Deployed the page to Github Pages.
This commit is contained in:
parent
1d79754e93
commit
2c89899458
62797 changed files with 6551425 additions and 15279 deletions
47
node_modules/readable-stream/LICENSE
generated
vendored
Normal file
47
node_modules/readable-stream/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,47 @@
|
|||
Node.js is licensed for use as follows:
|
||||
|
||||
"""
|
||||
Copyright Node.js contributors. All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to
|
||||
deal in the Software without restriction, including without limitation the
|
||||
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
sell copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
IN THE SOFTWARE.
|
||||
"""
|
||||
|
||||
This license applies to parts of Node.js originating from the
|
||||
https://github.com/joyent/node repository:
|
||||
|
||||
"""
|
||||
Copyright Joyent, Inc. and other Node contributors. All rights reserved.
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to
|
||||
deal in the Software without restriction, including without limitation the
|
||||
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
sell copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
IN THE SOFTWARE.
|
||||
"""
|
116
node_modules/readable-stream/README.md
generated
vendored
Normal file
116
node_modules/readable-stream/README.md
generated
vendored
Normal file
|
@ -0,0 +1,116 @@
|
|||
# readable-stream
|
||||
|
||||
**_Node.js core streams for userland_**
|
||||
|
||||
[](https://npm.im/readable-stream)
|
||||
[](https://www.npmjs.org/package/readable-stream)
|
||||
[](https://github.com/nodejs/readable-stream/actions?query=workflow%3ANode.js)
|
||||
[](https://github.com/nodejs/readable-stream/actions?query=workflow%3ABrowsers)
|
||||
|
||||
```bash
|
||||
npm install readable-stream
|
||||
```
|
||||
|
||||
This package is a mirror of the streams implementations in Node.js 18.19.0.
|
||||
|
||||
Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v18.19.0/docs/api/stream.html).
|
||||
|
||||
If you want to guarantee a stable streams base, regardless of what version of
|
||||
Node you, or the users of your libraries are using, use **readable-stream** _only_ and avoid the _"stream"_ module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html).
|
||||
|
||||
As of version 2.0.0 **readable-stream** uses semantic versioning.
|
||||
|
||||
## Version 4.x.x
|
||||
|
||||
v4.x.x of `readable-stream` is a cut from Node 18. This version supports Node 12, 14, 16 and 18, as well as evergreen browsers.
|
||||
The breaking changes introduced by v4 are composed of the combined breaking changes in:
|
||||
* [Node v12](https://nodejs.org/en/blog/release/v12.0.0/)
|
||||
* [Node v13](https://nodejs.org/en/blog/release/v13.0.0/)
|
||||
* [Node v14](https://nodejs.org/en/blog/release/v14.0.0/)
|
||||
* [Node v15](https://nodejs.org/en/blog/release/v15.0.0/)
|
||||
* [Node v16](https://nodejs.org/en/blog/release/v16.0.0/)
|
||||
* [Node v17](https://nodejs.org/en/blog/release/v17.0.0/)
|
||||
* [Node v18](https://nodejs.org/en/blog/release/v18.0.0/)
|
||||
|
||||
This also includes _many_ new features.
|
||||
|
||||
## Version 3.x.x
|
||||
|
||||
v3.x.x of `readable-stream` is a cut from Node 10. This version supports Node 6, 8, and 10, as well as evergreen browsers, IE 11 and latest Safari. The breaking changes introduced by v3 are composed by the combined breaking changes in [Node v9](https://nodejs.org/en/blog/release/v9.0.0/) and [Node v10](https://nodejs.org/en/blog/release/v10.0.0/), as follows:
|
||||
|
||||
1. Error codes: https://github.com/nodejs/node/pull/13310,
|
||||
https://github.com/nodejs/node/pull/13291,
|
||||
https://github.com/nodejs/node/pull/16589,
|
||||
https://github.com/nodejs/node/pull/15042,
|
||||
https://github.com/nodejs/node/pull/15665,
|
||||
https://github.com/nodejs/readable-stream/pull/344
|
||||
2. 'readable' have precedence over flowing
|
||||
https://github.com/nodejs/node/pull/18994
|
||||
3. make virtual methods errors consistent
|
||||
https://github.com/nodejs/node/pull/18813
|
||||
4. updated streams error handling
|
||||
https://github.com/nodejs/node/pull/18438
|
||||
5. writable.end should return this.
|
||||
https://github.com/nodejs/node/pull/18780
|
||||
6. readable continues to read when push('')
|
||||
https://github.com/nodejs/node/pull/18211
|
||||
7. add custom inspect to BufferList
|
||||
https://github.com/nodejs/node/pull/17907
|
||||
8. always defer 'readable' with nextTick
|
||||
https://github.com/nodejs/node/pull/17979
|
||||
|
||||
## Version 2.x.x
|
||||
|
||||
v2.x.x of `readable-stream` is a cut of the stream module from Node 8 (there have been no semver-major changes from Node 4 to 8). This version supports all Node.js versions from 0.8, as well as evergreen browsers and IE 10 & 11.
|
||||
|
||||
# Usage
|
||||
|
||||
You can swap your `require('stream')` with `require('readable-stream')`
|
||||
without any changes, if you are just using one of the main classes and
|
||||
functions.
|
||||
|
||||
```js
|
||||
const {
|
||||
Readable,
|
||||
Writable,
|
||||
Transform,
|
||||
Duplex,
|
||||
pipeline,
|
||||
finished
|
||||
} = require('readable-stream')
|
||||
```
|
||||
|
||||
Note that `require('stream')` will return `Stream`, while
|
||||
`require('readable-stream')` will return `Readable`. We discourage using
|
||||
whatever is exported directly, but rather use one of the properties as
|
||||
shown in the example above.
|
||||
|
||||
## Usage In Browsers
|
||||
|
||||
You will need a bundler like [`browserify`](https://github.com/browserify/browserify#readme), [`webpack`](https://webpack.js.org/), [`parcel`](https://github.com/parcel-bundler/parcel#readme) or similar. Polyfills are no longer required since version 4.2.0.
|
||||
|
||||
# Streams Working Group
|
||||
|
||||
`readable-stream` is maintained by the Streams Working Group, which
|
||||
oversees the development and maintenance of the Streams API within
|
||||
Node.js. The responsibilities of the Streams Working Group include:
|
||||
|
||||
- Addressing stream issues on the Node.js issue tracker.
|
||||
- Authoring and editing stream documentation within the Node.js project.
|
||||
- Reviewing changes to stream subclasses within the Node.js project.
|
||||
- Redirecting changes to streams from the Node.js project to this
|
||||
project.
|
||||
- Assisting in the implementation of stream providers within Node.js.
|
||||
- Recommending versions of `readable-stream` to be included in Node.js.
|
||||
- Messaging about the future of streams to give the community advance
|
||||
notice of changes.
|
||||
|
||||
<a name="members"></a>
|
||||
|
||||
## Team Members
|
||||
|
||||
- **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com>
|
||||
- **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com>
|
||||
- Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E
|
||||
- **Robert Nagy** ([@ronag](https://github.com/ronag)) <ronagy@icloud.com>
|
||||
- **Vincent Weevers** ([@vweevers](https://github.com/vweevers)) <mail@vincentweevers.nl>
|
4
node_modules/readable-stream/lib/_stream_duplex.js
generated
vendored
Normal file
4
node_modules/readable-stream/lib/_stream_duplex.js
generated
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
'use strict'
|
||||
|
||||
// Keep this file as an alias for the full stream module.
|
||||
module.exports = require('./stream').Duplex
|
4
node_modules/readable-stream/lib/_stream_passthrough.js
generated
vendored
Normal file
4
node_modules/readable-stream/lib/_stream_passthrough.js
generated
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
'use strict'
|
||||
|
||||
// Keep this file as an alias for the full stream module.
|
||||
module.exports = require('./stream').PassThrough
|
4
node_modules/readable-stream/lib/_stream_readable.js
generated
vendored
Normal file
4
node_modules/readable-stream/lib/_stream_readable.js
generated
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
'use strict'
|
||||
|
||||
// Keep this file as an alias for the full stream module.
|
||||
module.exports = require('./stream').Readable
|
4
node_modules/readable-stream/lib/_stream_transform.js
generated
vendored
Normal file
4
node_modules/readable-stream/lib/_stream_transform.js
generated
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
'use strict'
|
||||
|
||||
// Keep this file as an alias for the full stream module.
|
||||
module.exports = require('./stream').Transform
|
4
node_modules/readable-stream/lib/_stream_writable.js
generated
vendored
Normal file
4
node_modules/readable-stream/lib/_stream_writable.js
generated
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
'use strict'
|
||||
|
||||
// Keep this file as an alias for the full stream module.
|
||||
module.exports = require('./stream').Writable
|
52
node_modules/readable-stream/lib/internal/streams/add-abort-signal.js
generated
vendored
Normal file
52
node_modules/readable-stream/lib/internal/streams/add-abort-signal.js
generated
vendored
Normal file
|
@ -0,0 +1,52 @@
|
|||
'use strict'
|
||||
|
||||
const { SymbolDispose } = require('../../ours/primordials')
|
||||
const { AbortError, codes } = require('../../ours/errors')
|
||||
const { isNodeStream, isWebStream, kControllerErrorFunction } = require('./utils')
|
||||
const eos = require('./end-of-stream')
|
||||
const { ERR_INVALID_ARG_TYPE } = codes
|
||||
let addAbortListener
|
||||
|
||||
// This method is inlined here for readable-stream
|
||||
// It also does not allow for signal to not exist on the stream
|
||||
// https://github.com/nodejs/node/pull/36061#discussion_r533718029
|
||||
const validateAbortSignal = (signal, name) => {
|
||||
if (typeof signal !== 'object' || !('aborted' in signal)) {
|
||||
throw new ERR_INVALID_ARG_TYPE(name, 'AbortSignal', signal)
|
||||
}
|
||||
}
|
||||
module.exports.addAbortSignal = function addAbortSignal(signal, stream) {
|
||||
validateAbortSignal(signal, 'signal')
|
||||
if (!isNodeStream(stream) && !isWebStream(stream)) {
|
||||
throw new ERR_INVALID_ARG_TYPE('stream', ['ReadableStream', 'WritableStream', 'Stream'], stream)
|
||||
}
|
||||
return module.exports.addAbortSignalNoValidate(signal, stream)
|
||||
}
|
||||
module.exports.addAbortSignalNoValidate = function (signal, stream) {
|
||||
if (typeof signal !== 'object' || !('aborted' in signal)) {
|
||||
return stream
|
||||
}
|
||||
const onAbort = isNodeStream(stream)
|
||||
? () => {
|
||||
stream.destroy(
|
||||
new AbortError(undefined, {
|
||||
cause: signal.reason
|
||||
})
|
||||
)
|
||||
}
|
||||
: () => {
|
||||
stream[kControllerErrorFunction](
|
||||
new AbortError(undefined, {
|
||||
cause: signal.reason
|
||||
})
|
||||
)
|
||||
}
|
||||
if (signal.aborted) {
|
||||
onAbort()
|
||||
} else {
|
||||
addAbortListener = addAbortListener || require('../../ours/util').addAbortListener
|
||||
const disposable = addAbortListener(signal, onAbort)
|
||||
eos(stream, disposable[SymbolDispose])
|
||||
}
|
||||
return stream
|
||||
}
|
157
node_modules/readable-stream/lib/internal/streams/buffer_list.js
generated
vendored
Normal file
157
node_modules/readable-stream/lib/internal/streams/buffer_list.js
generated
vendored
Normal file
|
@ -0,0 +1,157 @@
|
|||
'use strict'
|
||||
|
||||
const { StringPrototypeSlice, SymbolIterator, TypedArrayPrototypeSet, Uint8Array } = require('../../ours/primordials')
|
||||
const { Buffer } = require('buffer')
|
||||
const { inspect } = require('../../ours/util')
|
||||
module.exports = class BufferList {
|
||||
constructor() {
|
||||
this.head = null
|
||||
this.tail = null
|
||||
this.length = 0
|
||||
}
|
||||
push(v) {
|
||||
const entry = {
|
||||
data: v,
|
||||
next: null
|
||||
}
|
||||
if (this.length > 0) this.tail.next = entry
|
||||
else this.head = entry
|
||||
this.tail = entry
|
||||
++this.length
|
||||
}
|
||||
unshift(v) {
|
||||
const entry = {
|
||||
data: v,
|
||||
next: this.head
|
||||
}
|
||||
if (this.length === 0) this.tail = entry
|
||||
this.head = entry
|
||||
++this.length
|
||||
}
|
||||
shift() {
|
||||
if (this.length === 0) return
|
||||
const ret = this.head.data
|
||||
if (this.length === 1) this.head = this.tail = null
|
||||
else this.head = this.head.next
|
||||
--this.length
|
||||
return ret
|
||||
}
|
||||
clear() {
|
||||
this.head = this.tail = null
|
||||
this.length = 0
|
||||
}
|
||||
join(s) {
|
||||
if (this.length === 0) return ''
|
||||
let p = this.head
|
||||
let ret = '' + p.data
|
||||
while ((p = p.next) !== null) ret += s + p.data
|
||||
return ret
|
||||
}
|
||||
concat(n) {
|
||||
if (this.length === 0) return Buffer.alloc(0)
|
||||
const ret = Buffer.allocUnsafe(n >>> 0)
|
||||
let p = this.head
|
||||
let i = 0
|
||||
while (p) {
|
||||
TypedArrayPrototypeSet(ret, p.data, i)
|
||||
i += p.data.length
|
||||
p = p.next
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
// Consumes a specified amount of bytes or characters from the buffered data.
|
||||
consume(n, hasStrings) {
|
||||
const data = this.head.data
|
||||
if (n < data.length) {
|
||||
// `slice` is the same for buffers and strings.
|
||||
const slice = data.slice(0, n)
|
||||
this.head.data = data.slice(n)
|
||||
return slice
|
||||
}
|
||||
if (n === data.length) {
|
||||
// First chunk is a perfect match.
|
||||
return this.shift()
|
||||
}
|
||||
// Result spans more than one buffer.
|
||||
return hasStrings ? this._getString(n) : this._getBuffer(n)
|
||||
}
|
||||
first() {
|
||||
return this.head.data
|
||||
}
|
||||
*[SymbolIterator]() {
|
||||
for (let p = this.head; p; p = p.next) {
|
||||
yield p.data
|
||||
}
|
||||
}
|
||||
|
||||
// Consumes a specified amount of characters from the buffered data.
|
||||
_getString(n) {
|
||||
let ret = ''
|
||||
let p = this.head
|
||||
let c = 0
|
||||
do {
|
||||
const str = p.data
|
||||
if (n > str.length) {
|
||||
ret += str
|
||||
n -= str.length
|
||||
} else {
|
||||
if (n === str.length) {
|
||||
ret += str
|
||||
++c
|
||||
if (p.next) this.head = p.next
|
||||
else this.head = this.tail = null
|
||||
} else {
|
||||
ret += StringPrototypeSlice(str, 0, n)
|
||||
this.head = p
|
||||
p.data = StringPrototypeSlice(str, n)
|
||||
}
|
||||
break
|
||||
}
|
||||
++c
|
||||
} while ((p = p.next) !== null)
|
||||
this.length -= c
|
||||
return ret
|
||||
}
|
||||
|
||||
// Consumes a specified amount of bytes from the buffered data.
|
||||
_getBuffer(n) {
|
||||
const ret = Buffer.allocUnsafe(n)
|
||||
const retLen = n
|
||||
let p = this.head
|
||||
let c = 0
|
||||
do {
|
||||
const buf = p.data
|
||||
if (n > buf.length) {
|
||||
TypedArrayPrototypeSet(ret, buf, retLen - n)
|
||||
n -= buf.length
|
||||
} else {
|
||||
if (n === buf.length) {
|
||||
TypedArrayPrototypeSet(ret, buf, retLen - n)
|
||||
++c
|
||||
if (p.next) this.head = p.next
|
||||
else this.head = this.tail = null
|
||||
} else {
|
||||
TypedArrayPrototypeSet(ret, new Uint8Array(buf.buffer, buf.byteOffset, n), retLen - n)
|
||||
this.head = p
|
||||
p.data = buf.slice(n)
|
||||
}
|
||||
break
|
||||
}
|
||||
++c
|
||||
} while ((p = p.next) !== null)
|
||||
this.length -= c
|
||||
return ret
|
||||
}
|
||||
|
||||
// Make sure the linked list only shows the minimal necessary information.
|
||||
[Symbol.for('nodejs.util.inspect.custom')](_, options) {
|
||||
return inspect(this, {
|
||||
...options,
|
||||
// Only inspect one level.
|
||||
depth: 0,
|
||||
// It should not recurse.
|
||||
customInspect: false
|
||||
})
|
||||
}
|
||||
}
|
194
node_modules/readable-stream/lib/internal/streams/compose.js
generated
vendored
Normal file
194
node_modules/readable-stream/lib/internal/streams/compose.js
generated
vendored
Normal file
|
@ -0,0 +1,194 @@
|
|||
'use strict'
|
||||
|
||||
const { pipeline } = require('./pipeline')
|
||||
const Duplex = require('./duplex')
|
||||
const { destroyer } = require('./destroy')
|
||||
const {
|
||||
isNodeStream,
|
||||
isReadable,
|
||||
isWritable,
|
||||
isWebStream,
|
||||
isTransformStream,
|
||||
isWritableStream,
|
||||
isReadableStream
|
||||
} = require('./utils')
|
||||
const {
|
||||
AbortError,
|
||||
codes: { ERR_INVALID_ARG_VALUE, ERR_MISSING_ARGS }
|
||||
} = require('../../ours/errors')
|
||||
const eos = require('./end-of-stream')
|
||||
module.exports = function compose(...streams) {
|
||||
if (streams.length === 0) {
|
||||
throw new ERR_MISSING_ARGS('streams')
|
||||
}
|
||||
if (streams.length === 1) {
|
||||
return Duplex.from(streams[0])
|
||||
}
|
||||
const orgStreams = [...streams]
|
||||
if (typeof streams[0] === 'function') {
|
||||
streams[0] = Duplex.from(streams[0])
|
||||
}
|
||||
if (typeof streams[streams.length - 1] === 'function') {
|
||||
const idx = streams.length - 1
|
||||
streams[idx] = Duplex.from(streams[idx])
|
||||
}
|
||||
for (let n = 0; n < streams.length; ++n) {
|
||||
if (!isNodeStream(streams[n]) && !isWebStream(streams[n])) {
|
||||
// TODO(ronag): Add checks for non streams.
|
||||
continue
|
||||
}
|
||||
if (
|
||||
n < streams.length - 1 &&
|
||||
!(isReadable(streams[n]) || isReadableStream(streams[n]) || isTransformStream(streams[n]))
|
||||
) {
|
||||
throw new ERR_INVALID_ARG_VALUE(`streams[${n}]`, orgStreams[n], 'must be readable')
|
||||
}
|
||||
if (n > 0 && !(isWritable(streams[n]) || isWritableStream(streams[n]) || isTransformStream(streams[n]))) {
|
||||
throw new ERR_INVALID_ARG_VALUE(`streams[${n}]`, orgStreams[n], 'must be writable')
|
||||
}
|
||||
}
|
||||
let ondrain
|
||||
let onfinish
|
||||
let onreadable
|
||||
let onclose
|
||||
let d
|
||||
function onfinished(err) {
|
||||
const cb = onclose
|
||||
onclose = null
|
||||
if (cb) {
|
||||
cb(err)
|
||||
} else if (err) {
|
||||
d.destroy(err)
|
||||
} else if (!readable && !writable) {
|
||||
d.destroy()
|
||||
}
|
||||
}
|
||||
const head = streams[0]
|
||||
const tail = pipeline(streams, onfinished)
|
||||
const writable = !!(isWritable(head) || isWritableStream(head) || isTransformStream(head))
|
||||
const readable = !!(isReadable(tail) || isReadableStream(tail) || isTransformStream(tail))
|
||||
|
||||
// TODO(ronag): Avoid double buffering.
|
||||
// Implement Writable/Readable/Duplex traits.
|
||||
// See, https://github.com/nodejs/node/pull/33515.
|
||||
d = new Duplex({
|
||||
// TODO (ronag): highWaterMark?
|
||||
writableObjectMode: !!(head !== null && head !== undefined && head.writableObjectMode),
|
||||
readableObjectMode: !!(tail !== null && tail !== undefined && tail.readableObjectMode),
|
||||
writable,
|
||||
readable
|
||||
})
|
||||
if (writable) {
|
||||
if (isNodeStream(head)) {
|
||||
d._write = function (chunk, encoding, callback) {
|
||||
if (head.write(chunk, encoding)) {
|
||||
callback()
|
||||
} else {
|
||||
ondrain = callback
|
||||
}
|
||||
}
|
||||
d._final = function (callback) {
|
||||
head.end()
|
||||
onfinish = callback
|
||||
}
|
||||
head.on('drain', function () {
|
||||
if (ondrain) {
|
||||
const cb = ondrain
|
||||
ondrain = null
|
||||
cb()
|
||||
}
|
||||
})
|
||||
} else if (isWebStream(head)) {
|
||||
const writable = isTransformStream(head) ? head.writable : head
|
||||
const writer = writable.getWriter()
|
||||
d._write = async function (chunk, encoding, callback) {
|
||||
try {
|
||||
await writer.ready
|
||||
writer.write(chunk).catch(() => {})
|
||||
callback()
|
||||
} catch (err) {
|
||||
callback(err)
|
||||
}
|
||||
}
|
||||
d._final = async function (callback) {
|
||||
try {
|
||||
await writer.ready
|
||||
writer.close().catch(() => {})
|
||||
onfinish = callback
|
||||
} catch (err) {
|
||||
callback(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
const toRead = isTransformStream(tail) ? tail.readable : tail
|
||||
eos(toRead, () => {
|
||||
if (onfinish) {
|
||||
const cb = onfinish
|
||||
onfinish = null
|
||||
cb()
|
||||
}
|
||||
})
|
||||
}
|
||||
if (readable) {
|
||||
if (isNodeStream(tail)) {
|
||||
tail.on('readable', function () {
|
||||
if (onreadable) {
|
||||
const cb = onreadable
|
||||
onreadable = null
|
||||
cb()
|
||||
}
|
||||
})
|
||||
tail.on('end', function () {
|
||||
d.push(null)
|
||||
})
|
||||
d._read = function () {
|
||||
while (true) {
|
||||
const buf = tail.read()
|
||||
if (buf === null) {
|
||||
onreadable = d._read
|
||||
return
|
||||
}
|
||||
if (!d.push(buf)) {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (isWebStream(tail)) {
|
||||
const readable = isTransformStream(tail) ? tail.readable : tail
|
||||
const reader = readable.getReader()
|
||||
d._read = async function () {
|
||||
while (true) {
|
||||
try {
|
||||
const { value, done } = await reader.read()
|
||||
if (!d.push(value)) {
|
||||
return
|
||||
}
|
||||
if (done) {
|
||||
d.push(null)
|
||||
return
|
||||
}
|
||||
} catch {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
d._destroy = function (err, callback) {
|
||||
if (!err && onclose !== null) {
|
||||
err = new AbortError()
|
||||
}
|
||||
onreadable = null
|
||||
ondrain = null
|
||||
onfinish = null
|
||||
if (onclose === null) {
|
||||
callback(err)
|
||||
} else {
|
||||
onclose = callback
|
||||
if (isNodeStream(tail)) {
|
||||
destroyer(tail, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
return d
|
||||
}
|
290
node_modules/readable-stream/lib/internal/streams/destroy.js
generated
vendored
Normal file
290
node_modules/readable-stream/lib/internal/streams/destroy.js
generated
vendored
Normal file
|
@ -0,0 +1,290 @@
|
|||
'use strict'
|
||||
|
||||
/* replacement start */
|
||||
|
||||
const process = require('process/')
|
||||
|
||||
/* replacement end */
|
||||
|
||||
const {
|
||||
aggregateTwoErrors,
|
||||
codes: { ERR_MULTIPLE_CALLBACK },
|
||||
AbortError
|
||||
} = require('../../ours/errors')
|
||||
const { Symbol } = require('../../ours/primordials')
|
||||
const { kIsDestroyed, isDestroyed, isFinished, isServerRequest } = require('./utils')
|
||||
const kDestroy = Symbol('kDestroy')
|
||||
const kConstruct = Symbol('kConstruct')
|
||||
function checkError(err, w, r) {
|
||||
if (err) {
|
||||
// Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364
|
||||
err.stack // eslint-disable-line no-unused-expressions
|
||||
|
||||
if (w && !w.errored) {
|
||||
w.errored = err
|
||||
}
|
||||
if (r && !r.errored) {
|
||||
r.errored = err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Backwards compat. cb() is undocumented and unused in core but
|
||||
// unfortunately might be used by modules.
|
||||
function destroy(err, cb) {
|
||||
const r = this._readableState
|
||||
const w = this._writableState
|
||||
// With duplex streams we use the writable side for state.
|
||||
const s = w || r
|
||||
if ((w !== null && w !== undefined && w.destroyed) || (r !== null && r !== undefined && r.destroyed)) {
|
||||
if (typeof cb === 'function') {
|
||||
cb()
|
||||
}
|
||||
return this
|
||||
}
|
||||
|
||||
// We set destroyed to true before firing error callbacks in order
|
||||
// to make it re-entrance safe in case destroy() is called within callbacks
|
||||
checkError(err, w, r)
|
||||
if (w) {
|
||||
w.destroyed = true
|
||||
}
|
||||
if (r) {
|
||||
r.destroyed = true
|
||||
}
|
||||
|
||||
// If still constructing then defer calling _destroy.
|
||||
if (!s.constructed) {
|
||||
this.once(kDestroy, function (er) {
|
||||
_destroy(this, aggregateTwoErrors(er, err), cb)
|
||||
})
|
||||
} else {
|
||||
_destroy(this, err, cb)
|
||||
}
|
||||
return this
|
||||
}
|
||||
function _destroy(self, err, cb) {
|
||||
let called = false
|
||||
function onDestroy(err) {
|
||||
if (called) {
|
||||
return
|
||||
}
|
||||
called = true
|
||||
const r = self._readableState
|
||||
const w = self._writableState
|
||||
checkError(err, w, r)
|
||||
if (w) {
|
||||
w.closed = true
|
||||
}
|
||||
if (r) {
|
||||
r.closed = true
|
||||
}
|
||||
if (typeof cb === 'function') {
|
||||
cb(err)
|
||||
}
|
||||
if (err) {
|
||||
process.nextTick(emitErrorCloseNT, self, err)
|
||||
} else {
|
||||
process.nextTick(emitCloseNT, self)
|
||||
}
|
||||
}
|
||||
try {
|
||||
self._destroy(err || null, onDestroy)
|
||||
} catch (err) {
|
||||
onDestroy(err)
|
||||
}
|
||||
}
|
||||
function emitErrorCloseNT(self, err) {
|
||||
emitErrorNT(self, err)
|
||||
emitCloseNT(self)
|
||||
}
|
||||
function emitCloseNT(self) {
|
||||
const r = self._readableState
|
||||
const w = self._writableState
|
||||
if (w) {
|
||||
w.closeEmitted = true
|
||||
}
|
||||
if (r) {
|
||||
r.closeEmitted = true
|
||||
}
|
||||
if ((w !== null && w !== undefined && w.emitClose) || (r !== null && r !== undefined && r.emitClose)) {
|
||||
self.emit('close')
|
||||
}
|
||||
}
|
||||
function emitErrorNT(self, err) {
|
||||
const r = self._readableState
|
||||
const w = self._writableState
|
||||
if ((w !== null && w !== undefined && w.errorEmitted) || (r !== null && r !== undefined && r.errorEmitted)) {
|
||||
return
|
||||
}
|
||||
if (w) {
|
||||
w.errorEmitted = true
|
||||
}
|
||||
if (r) {
|
||||
r.errorEmitted = true
|
||||
}
|
||||
self.emit('error', err)
|
||||
}
|
||||
function undestroy() {
|
||||
const r = this._readableState
|
||||
const w = this._writableState
|
||||
if (r) {
|
||||
r.constructed = true
|
||||
r.closed = false
|
||||
r.closeEmitted = false
|
||||
r.destroyed = false
|
||||
r.errored = null
|
||||
r.errorEmitted = false
|
||||
r.reading = false
|
||||
r.ended = r.readable === false
|
||||
r.endEmitted = r.readable === false
|
||||
}
|
||||
if (w) {
|
||||
w.constructed = true
|
||||
w.destroyed = false
|
||||
w.closed = false
|
||||
w.closeEmitted = false
|
||||
w.errored = null
|
||||
w.errorEmitted = false
|
||||
w.finalCalled = false
|
||||
w.prefinished = false
|
||||
w.ended = w.writable === false
|
||||
w.ending = w.writable === false
|
||||
w.finished = w.writable === false
|
||||
}
|
||||
}
|
||||
function errorOrDestroy(stream, err, sync) {
|
||||
// We have tests that rely on errors being emitted
|
||||
// in the same tick, so changing this is semver major.
|
||||
// For now when you opt-in to autoDestroy we allow
|
||||
// the error to be emitted nextTick. In a future
|
||||
// semver major update we should change the default to this.
|
||||
|
||||
const r = stream._readableState
|
||||
const w = stream._writableState
|
||||
if ((w !== null && w !== undefined && w.destroyed) || (r !== null && r !== undefined && r.destroyed)) {
|
||||
return this
|
||||
}
|
||||
if ((r !== null && r !== undefined && r.autoDestroy) || (w !== null && w !== undefined && w.autoDestroy))
|
||||
stream.destroy(err)
|
||||
else if (err) {
|
||||
// Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364
|
||||
err.stack // eslint-disable-line no-unused-expressions
|
||||
|
||||
if (w && !w.errored) {
|
||||
w.errored = err
|
||||
}
|
||||
if (r && !r.errored) {
|
||||
r.errored = err
|
||||
}
|
||||
if (sync) {
|
||||
process.nextTick(emitErrorNT, stream, err)
|
||||
} else {
|
||||
emitErrorNT(stream, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
function construct(stream, cb) {
|
||||
if (typeof stream._construct !== 'function') {
|
||||
return
|
||||
}
|
||||
const r = stream._readableState
|
||||
const w = stream._writableState
|
||||
if (r) {
|
||||
r.constructed = false
|
||||
}
|
||||
if (w) {
|
||||
w.constructed = false
|
||||
}
|
||||
stream.once(kConstruct, cb)
|
||||
if (stream.listenerCount(kConstruct) > 1) {
|
||||
// Duplex
|
||||
return
|
||||
}
|
||||
process.nextTick(constructNT, stream)
|
||||
}
|
||||
function constructNT(stream) {
|
||||
let called = false
|
||||
function onConstruct(err) {
|
||||
if (called) {
|
||||
errorOrDestroy(stream, err !== null && err !== undefined ? err : new ERR_MULTIPLE_CALLBACK())
|
||||
return
|
||||
}
|
||||
called = true
|
||||
const r = stream._readableState
|
||||
const w = stream._writableState
|
||||
const s = w || r
|
||||
if (r) {
|
||||
r.constructed = true
|
||||
}
|
||||
if (w) {
|
||||
w.constructed = true
|
||||
}
|
||||
if (s.destroyed) {
|
||||
stream.emit(kDestroy, err)
|
||||
} else if (err) {
|
||||
errorOrDestroy(stream, err, true)
|
||||
} else {
|
||||
process.nextTick(emitConstructNT, stream)
|
||||
}
|
||||
}
|
||||
try {
|
||||
stream._construct((err) => {
|
||||
process.nextTick(onConstruct, err)
|
||||
})
|
||||
} catch (err) {
|
||||
process.nextTick(onConstruct, err)
|
||||
}
|
||||
}
|
||||
function emitConstructNT(stream) {
|
||||
stream.emit(kConstruct)
|
||||
}
|
||||
function isRequest(stream) {
|
||||
return (stream === null || stream === undefined ? undefined : stream.setHeader) && typeof stream.abort === 'function'
|
||||
}
|
||||
function emitCloseLegacy(stream) {
|
||||
stream.emit('close')
|
||||
}
|
||||
function emitErrorCloseLegacy(stream, err) {
|
||||
stream.emit('error', err)
|
||||
process.nextTick(emitCloseLegacy, stream)
|
||||
}
|
||||
|
||||
// Normalize destroy for legacy.
|
||||
function destroyer(stream, err) {
|
||||
if (!stream || isDestroyed(stream)) {
|
||||
return
|
||||
}
|
||||
if (!err && !isFinished(stream)) {
|
||||
err = new AbortError()
|
||||
}
|
||||
|
||||
// TODO: Remove isRequest branches.
|
||||
if (isServerRequest(stream)) {
|
||||
stream.socket = null
|
||||
stream.destroy(err)
|
||||
} else if (isRequest(stream)) {
|
||||
stream.abort()
|
||||
} else if (isRequest(stream.req)) {
|
||||
stream.req.abort()
|
||||
} else if (typeof stream.destroy === 'function') {
|
||||
stream.destroy(err)
|
||||
} else if (typeof stream.close === 'function') {
|
||||
// TODO: Don't lose err?
|
||||
stream.close()
|
||||
} else if (err) {
|
||||
process.nextTick(emitErrorCloseLegacy, stream, err)
|
||||
} else {
|
||||
process.nextTick(emitCloseLegacy, stream)
|
||||
}
|
||||
if (!stream.destroyed) {
|
||||
stream[kIsDestroyed] = true
|
||||
}
|
||||
}
|
||||
module.exports = {
|
||||
construct,
|
||||
destroyer,
|
||||
destroy,
|
||||
undestroy,
|
||||
errorOrDestroy
|
||||
}
|
143
node_modules/readable-stream/lib/internal/streams/duplex.js
generated
vendored
Normal file
143
node_modules/readable-stream/lib/internal/streams/duplex.js
generated
vendored
Normal file
|
@ -0,0 +1,143 @@
|
|||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
// a duplex stream is just a stream that is both readable and writable.
|
||||
// Since JS doesn't have multiple prototype inheritance, this class
|
||||
// prototypically inherits from Readable, and then parasitically from
|
||||
// Writable.
|
||||
|
||||
'use strict'
|
||||
|
||||
const {
|
||||
ObjectDefineProperties,
|
||||
ObjectGetOwnPropertyDescriptor,
|
||||
ObjectKeys,
|
||||
ObjectSetPrototypeOf
|
||||
} = require('../../ours/primordials')
|
||||
module.exports = Duplex
|
||||
const Readable = require('./readable')
|
||||
const Writable = require('./writable')
|
||||
ObjectSetPrototypeOf(Duplex.prototype, Readable.prototype)
|
||||
ObjectSetPrototypeOf(Duplex, Readable)
|
||||
{
|
||||
const keys = ObjectKeys(Writable.prototype)
|
||||
// Allow the keys array to be GC'ed.
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
const method = keys[i]
|
||||
if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]
|
||||
}
|
||||
}
|
||||
function Duplex(options) {
|
||||
if (!(this instanceof Duplex)) return new Duplex(options)
|
||||
Readable.call(this, options)
|
||||
Writable.call(this, options)
|
||||
if (options) {
|
||||
this.allowHalfOpen = options.allowHalfOpen !== false
|
||||
if (options.readable === false) {
|
||||
this._readableState.readable = false
|
||||
this._readableState.ended = true
|
||||
this._readableState.endEmitted = true
|
||||
}
|
||||
if (options.writable === false) {
|
||||
this._writableState.writable = false
|
||||
this._writableState.ending = true
|
||||
this._writableState.ended = true
|
||||
this._writableState.finished = true
|
||||
}
|
||||
} else {
|
||||
this.allowHalfOpen = true
|
||||
}
|
||||
}
|
||||
ObjectDefineProperties(Duplex.prototype, {
|
||||
writable: {
|
||||
__proto__: null,
|
||||
...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writable')
|
||||
},
|
||||
writableHighWaterMark: {
|
||||
__proto__: null,
|
||||
...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableHighWaterMark')
|
||||
},
|
||||
writableObjectMode: {
|
||||
__proto__: null,
|
||||
...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableObjectMode')
|
||||
},
|
||||
writableBuffer: {
|
||||
__proto__: null,
|
||||
...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableBuffer')
|
||||
},
|
||||
writableLength: {
|
||||
__proto__: null,
|
||||
...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableLength')
|
||||
},
|
||||
writableFinished: {
|
||||
__proto__: null,
|
||||
...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableFinished')
|
||||
},
|
||||
writableCorked: {
|
||||
__proto__: null,
|
||||
...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableCorked')
|
||||
},
|
||||
writableEnded: {
|
||||
__proto__: null,
|
||||
...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableEnded')
|
||||
},
|
||||
writableNeedDrain: {
|
||||
__proto__: null,
|
||||
...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableNeedDrain')
|
||||
},
|
||||
destroyed: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
if (this._readableState === undefined || this._writableState === undefined) {
|
||||
return false
|
||||
}
|
||||
return this._readableState.destroyed && this._writableState.destroyed
|
||||
},
|
||||
set(value) {
|
||||
// Backward compatibility, the user is explicitly
|
||||
// managing destroyed.
|
||||
if (this._readableState && this._writableState) {
|
||||
this._readableState.destroyed = value
|
||||
this._writableState.destroyed = value
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
let webStreamsAdapters
|
||||
|
||||
// Lazy to avoid circular references
|
||||
function lazyWebStreams() {
|
||||
if (webStreamsAdapters === undefined) webStreamsAdapters = {}
|
||||
return webStreamsAdapters
|
||||
}
|
||||
Duplex.fromWeb = function (pair, options) {
|
||||
return lazyWebStreams().newStreamDuplexFromReadableWritablePair(pair, options)
|
||||
}
|
||||
Duplex.toWeb = function (duplex) {
|
||||
return lazyWebStreams().newReadableWritablePairFromDuplex(duplex)
|
||||
}
|
||||
let duplexify
|
||||
Duplex.from = function (body) {
|
||||
if (!duplexify) {
|
||||
duplexify = require('./duplexify')
|
||||
}
|
||||
return duplexify(body, 'body')
|
||||
}
|
378
node_modules/readable-stream/lib/internal/streams/duplexify.js
generated
vendored
Normal file
378
node_modules/readable-stream/lib/internal/streams/duplexify.js
generated
vendored
Normal file
|
@ -0,0 +1,378 @@
|
|||
/* replacement start */
|
||||
|
||||
const process = require('process/')
|
||||
|
||||
/* replacement end */
|
||||
|
||||
;('use strict')
|
||||
const bufferModule = require('buffer')
|
||||
const {
|
||||
isReadable,
|
||||
isWritable,
|
||||
isIterable,
|
||||
isNodeStream,
|
||||
isReadableNodeStream,
|
||||
isWritableNodeStream,
|
||||
isDuplexNodeStream,
|
||||
isReadableStream,
|
||||
isWritableStream
|
||||
} = require('./utils')
|
||||
const eos = require('./end-of-stream')
|
||||
const {
|
||||
AbortError,
|
||||
codes: { ERR_INVALID_ARG_TYPE, ERR_INVALID_RETURN_VALUE }
|
||||
} = require('../../ours/errors')
|
||||
const { destroyer } = require('./destroy')
|
||||
const Duplex = require('./duplex')
|
||||
const Readable = require('./readable')
|
||||
const Writable = require('./writable')
|
||||
const { createDeferredPromise } = require('../../ours/util')
|
||||
const from = require('./from')
|
||||
const Blob = globalThis.Blob || bufferModule.Blob
|
||||
const isBlob =
|
||||
typeof Blob !== 'undefined'
|
||||
? function isBlob(b) {
|
||||
return b instanceof Blob
|
||||
}
|
||||
: function isBlob(b) {
|
||||
return false
|
||||
}
|
||||
const AbortController = globalThis.AbortController || require('abort-controller').AbortController
|
||||
const { FunctionPrototypeCall } = require('../../ours/primordials')
|
||||
|
||||
// This is needed for pre node 17.
|
||||
class Duplexify extends Duplex {
|
||||
constructor(options) {
|
||||
super(options)
|
||||
|
||||
// https://github.com/nodejs/node/pull/34385
|
||||
|
||||
if ((options === null || options === undefined ? undefined : options.readable) === false) {
|
||||
this._readableState.readable = false
|
||||
this._readableState.ended = true
|
||||
this._readableState.endEmitted = true
|
||||
}
|
||||
if ((options === null || options === undefined ? undefined : options.writable) === false) {
|
||||
this._writableState.writable = false
|
||||
this._writableState.ending = true
|
||||
this._writableState.ended = true
|
||||
this._writableState.finished = true
|
||||
}
|
||||
}
|
||||
}
|
||||
module.exports = function duplexify(body, name) {
|
||||
if (isDuplexNodeStream(body)) {
|
||||
return body
|
||||
}
|
||||
if (isReadableNodeStream(body)) {
|
||||
return _duplexify({
|
||||
readable: body
|
||||
})
|
||||
}
|
||||
if (isWritableNodeStream(body)) {
|
||||
return _duplexify({
|
||||
writable: body
|
||||
})
|
||||
}
|
||||
if (isNodeStream(body)) {
|
||||
return _duplexify({
|
||||
writable: false,
|
||||
readable: false
|
||||
})
|
||||
}
|
||||
if (isReadableStream(body)) {
|
||||
return _duplexify({
|
||||
readable: Readable.fromWeb(body)
|
||||
})
|
||||
}
|
||||
if (isWritableStream(body)) {
|
||||
return _duplexify({
|
||||
writable: Writable.fromWeb(body)
|
||||
})
|
||||
}
|
||||
if (typeof body === 'function') {
|
||||
const { value, write, final, destroy } = fromAsyncGen(body)
|
||||
if (isIterable(value)) {
|
||||
return from(Duplexify, value, {
|
||||
// TODO (ronag): highWaterMark?
|
||||
objectMode: true,
|
||||
write,
|
||||
final,
|
||||
destroy
|
||||
})
|
||||
}
|
||||
const then = value === null || value === undefined ? undefined : value.then
|
||||
if (typeof then === 'function') {
|
||||
let d
|
||||
const promise = FunctionPrototypeCall(
|
||||
then,
|
||||
value,
|
||||
(val) => {
|
||||
if (val != null) {
|
||||
throw new ERR_INVALID_RETURN_VALUE('nully', 'body', val)
|
||||
}
|
||||
},
|
||||
(err) => {
|
||||
destroyer(d, err)
|
||||
}
|
||||
)
|
||||
return (d = new Duplexify({
|
||||
// TODO (ronag): highWaterMark?
|
||||
objectMode: true,
|
||||
readable: false,
|
||||
write,
|
||||
final(cb) {
|
||||
final(async () => {
|
||||
try {
|
||||
await promise
|
||||
process.nextTick(cb, null)
|
||||
} catch (err) {
|
||||
process.nextTick(cb, err)
|
||||
}
|
||||
})
|
||||
},
|
||||
destroy
|
||||
}))
|
||||
}
|
||||
throw new ERR_INVALID_RETURN_VALUE('Iterable, AsyncIterable or AsyncFunction', name, value)
|
||||
}
|
||||
if (isBlob(body)) {
|
||||
return duplexify(body.arrayBuffer())
|
||||
}
|
||||
if (isIterable(body)) {
|
||||
return from(Duplexify, body, {
|
||||
// TODO (ronag): highWaterMark?
|
||||
objectMode: true,
|
||||
writable: false
|
||||
})
|
||||
}
|
||||
if (
|
||||
isReadableStream(body === null || body === undefined ? undefined : body.readable) &&
|
||||
isWritableStream(body === null || body === undefined ? undefined : body.writable)
|
||||
) {
|
||||
return Duplexify.fromWeb(body)
|
||||
}
|
||||
if (
|
||||
typeof (body === null || body === undefined ? undefined : body.writable) === 'object' ||
|
||||
typeof (body === null || body === undefined ? undefined : body.readable) === 'object'
|
||||
) {
|
||||
const readable =
|
||||
body !== null && body !== undefined && body.readable
|
||||
? isReadableNodeStream(body === null || body === undefined ? undefined : body.readable)
|
||||
? body === null || body === undefined
|
||||
? undefined
|
||||
: body.readable
|
||||
: duplexify(body.readable)
|
||||
: undefined
|
||||
const writable =
|
||||
body !== null && body !== undefined && body.writable
|
||||
? isWritableNodeStream(body === null || body === undefined ? undefined : body.writable)
|
||||
? body === null || body === undefined
|
||||
? undefined
|
||||
: body.writable
|
||||
: duplexify(body.writable)
|
||||
: undefined
|
||||
return _duplexify({
|
||||
readable,
|
||||
writable
|
||||
})
|
||||
}
|
||||
const then = body === null || body === undefined ? undefined : body.then
|
||||
if (typeof then === 'function') {
|
||||
let d
|
||||
FunctionPrototypeCall(
|
||||
then,
|
||||
body,
|
||||
(val) => {
|
||||
if (val != null) {
|
||||
d.push(val)
|
||||
}
|
||||
d.push(null)
|
||||
},
|
||||
(err) => {
|
||||
destroyer(d, err)
|
||||
}
|
||||
)
|
||||
return (d = new Duplexify({
|
||||
objectMode: true,
|
||||
writable: false,
|
||||
read() {}
|
||||
}))
|
||||
}
|
||||
throw new ERR_INVALID_ARG_TYPE(
|
||||
name,
|
||||
[
|
||||
'Blob',
|
||||
'ReadableStream',
|
||||
'WritableStream',
|
||||
'Stream',
|
||||
'Iterable',
|
||||
'AsyncIterable',
|
||||
'Function',
|
||||
'{ readable, writable } pair',
|
||||
'Promise'
|
||||
],
|
||||
body
|
||||
)
|
||||
}
|
||||
function fromAsyncGen(fn) {
|
||||
let { promise, resolve } = createDeferredPromise()
|
||||
const ac = new AbortController()
|
||||
const signal = ac.signal
|
||||
const value = fn(
|
||||
(async function* () {
|
||||
while (true) {
|
||||
const _promise = promise
|
||||
promise = null
|
||||
const { chunk, done, cb } = await _promise
|
||||
process.nextTick(cb)
|
||||
if (done) return
|
||||
if (signal.aborted)
|
||||
throw new AbortError(undefined, {
|
||||
cause: signal.reason
|
||||
})
|
||||
;({ promise, resolve } = createDeferredPromise())
|
||||
yield chunk
|
||||
}
|
||||
})(),
|
||||
{
|
||||
signal
|
||||
}
|
||||
)
|
||||
return {
|
||||
value,
|
||||
write(chunk, encoding, cb) {
|
||||
const _resolve = resolve
|
||||
resolve = null
|
||||
_resolve({
|
||||
chunk,
|
||||
done: false,
|
||||
cb
|
||||
})
|
||||
},
|
||||
final(cb) {
|
||||
const _resolve = resolve
|
||||
resolve = null
|
||||
_resolve({
|
||||
done: true,
|
||||
cb
|
||||
})
|
||||
},
|
||||
destroy(err, cb) {
|
||||
ac.abort()
|
||||
cb(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
function _duplexify(pair) {
|
||||
const r = pair.readable && typeof pair.readable.read !== 'function' ? Readable.wrap(pair.readable) : pair.readable
|
||||
const w = pair.writable
|
||||
let readable = !!isReadable(r)
|
||||
let writable = !!isWritable(w)
|
||||
let ondrain
|
||||
let onfinish
|
||||
let onreadable
|
||||
let onclose
|
||||
let d
|
||||
function onfinished(err) {
|
||||
const cb = onclose
|
||||
onclose = null
|
||||
if (cb) {
|
||||
cb(err)
|
||||
} else if (err) {
|
||||
d.destroy(err)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(ronag): Avoid double buffering.
|
||||
// Implement Writable/Readable/Duplex traits.
|
||||
// See, https://github.com/nodejs/node/pull/33515.
|
||||
d = new Duplexify({
|
||||
// TODO (ronag): highWaterMark?
|
||||
readableObjectMode: !!(r !== null && r !== undefined && r.readableObjectMode),
|
||||
writableObjectMode: !!(w !== null && w !== undefined && w.writableObjectMode),
|
||||
readable,
|
||||
writable
|
||||
})
|
||||
if (writable) {
|
||||
eos(w, (err) => {
|
||||
writable = false
|
||||
if (err) {
|
||||
destroyer(r, err)
|
||||
}
|
||||
onfinished(err)
|
||||
})
|
||||
d._write = function (chunk, encoding, callback) {
|
||||
if (w.write(chunk, encoding)) {
|
||||
callback()
|
||||
} else {
|
||||
ondrain = callback
|
||||
}
|
||||
}
|
||||
d._final = function (callback) {
|
||||
w.end()
|
||||
onfinish = callback
|
||||
}
|
||||
w.on('drain', function () {
|
||||
if (ondrain) {
|
||||
const cb = ondrain
|
||||
ondrain = null
|
||||
cb()
|
||||
}
|
||||
})
|
||||
w.on('finish', function () {
|
||||
if (onfinish) {
|
||||
const cb = onfinish
|
||||
onfinish = null
|
||||
cb()
|
||||
}
|
||||
})
|
||||
}
|
||||
if (readable) {
|
||||
eos(r, (err) => {
|
||||
readable = false
|
||||
if (err) {
|
||||
destroyer(r, err)
|
||||
}
|
||||
onfinished(err)
|
||||
})
|
||||
r.on('readable', function () {
|
||||
if (onreadable) {
|
||||
const cb = onreadable
|
||||
onreadable = null
|
||||
cb()
|
||||
}
|
||||
})
|
||||
r.on('end', function () {
|
||||
d.push(null)
|
||||
})
|
||||
d._read = function () {
|
||||
while (true) {
|
||||
const buf = r.read()
|
||||
if (buf === null) {
|
||||
onreadable = d._read
|
||||
return
|
||||
}
|
||||
if (!d.push(buf)) {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
d._destroy = function (err, callback) {
|
||||
if (!err && onclose !== null) {
|
||||
err = new AbortError()
|
||||
}
|
||||
onreadable = null
|
||||
ondrain = null
|
||||
onfinish = null
|
||||
if (onclose === null) {
|
||||
callback(err)
|
||||
} else {
|
||||
onclose = callback
|
||||
destroyer(w, err)
|
||||
destroyer(r, err)
|
||||
}
|
||||
}
|
||||
return d
|
||||
}
|
284
node_modules/readable-stream/lib/internal/streams/end-of-stream.js
generated
vendored
Normal file
284
node_modules/readable-stream/lib/internal/streams/end-of-stream.js
generated
vendored
Normal file
|
@ -0,0 +1,284 @@
|
|||
/* replacement start */
|
||||
|
||||
const process = require('process/')
|
||||
|
||||
/* replacement end */
|
||||
// Ported from https://github.com/mafintosh/end-of-stream with
|
||||
// permission from the author, Mathias Buus (@mafintosh).
|
||||
|
||||
;('use strict')
|
||||
const { AbortError, codes } = require('../../ours/errors')
|
||||
const { ERR_INVALID_ARG_TYPE, ERR_STREAM_PREMATURE_CLOSE } = codes
|
||||
const { kEmptyObject, once } = require('../../ours/util')
|
||||
const { validateAbortSignal, validateFunction, validateObject, validateBoolean } = require('../validators')
|
||||
const { Promise, PromisePrototypeThen, SymbolDispose } = require('../../ours/primordials')
|
||||
const {
|
||||
isClosed,
|
||||
isReadable,
|
||||
isReadableNodeStream,
|
||||
isReadableStream,
|
||||
isReadableFinished,
|
||||
isReadableErrored,
|
||||
isWritable,
|
||||
isWritableNodeStream,
|
||||
isWritableStream,
|
||||
isWritableFinished,
|
||||
isWritableErrored,
|
||||
isNodeStream,
|
||||
willEmitClose: _willEmitClose,
|
||||
kIsClosedPromise
|
||||
} = require('./utils')
|
||||
let addAbortListener
|
||||
function isRequest(stream) {
|
||||
return stream.setHeader && typeof stream.abort === 'function'
|
||||
}
|
||||
const nop = () => {}
|
||||
function eos(stream, options, callback) {
|
||||
var _options$readable, _options$writable
|
||||
if (arguments.length === 2) {
|
||||
callback = options
|
||||
options = kEmptyObject
|
||||
} else if (options == null) {
|
||||
options = kEmptyObject
|
||||
} else {
|
||||
validateObject(options, 'options')
|
||||
}
|
||||
validateFunction(callback, 'callback')
|
||||
validateAbortSignal(options.signal, 'options.signal')
|
||||
callback = once(callback)
|
||||
if (isReadableStream(stream) || isWritableStream(stream)) {
|
||||
return eosWeb(stream, options, callback)
|
||||
}
|
||||
if (!isNodeStream(stream)) {
|
||||
throw new ERR_INVALID_ARG_TYPE('stream', ['ReadableStream', 'WritableStream', 'Stream'], stream)
|
||||
}
|
||||
const readable =
|
||||
(_options$readable = options.readable) !== null && _options$readable !== undefined
|
||||
? _options$readable
|
||||
: isReadableNodeStream(stream)
|
||||
const writable =
|
||||
(_options$writable = options.writable) !== null && _options$writable !== undefined
|
||||
? _options$writable
|
||||
: isWritableNodeStream(stream)
|
||||
const wState = stream._writableState
|
||||
const rState = stream._readableState
|
||||
const onlegacyfinish = () => {
|
||||
if (!stream.writable) {
|
||||
onfinish()
|
||||
}
|
||||
}
|
||||
|
||||
// TODO (ronag): Improve soft detection to include core modules and
|
||||
// common ecosystem modules that do properly emit 'close' but fail
|
||||
// this generic check.
|
||||
let willEmitClose =
|
||||
_willEmitClose(stream) && isReadableNodeStream(stream) === readable && isWritableNodeStream(stream) === writable
|
||||
let writableFinished = isWritableFinished(stream, false)
|
||||
const onfinish = () => {
|
||||
writableFinished = true
|
||||
// Stream should not be destroyed here. If it is that
|
||||
// means that user space is doing something differently and
|
||||
// we cannot trust willEmitClose.
|
||||
if (stream.destroyed) {
|
||||
willEmitClose = false
|
||||
}
|
||||
if (willEmitClose && (!stream.readable || readable)) {
|
||||
return
|
||||
}
|
||||
if (!readable || readableFinished) {
|
||||
callback.call(stream)
|
||||
}
|
||||
}
|
||||
let readableFinished = isReadableFinished(stream, false)
|
||||
const onend = () => {
|
||||
readableFinished = true
|
||||
// Stream should not be destroyed here. If it is that
|
||||
// means that user space is doing something differently and
|
||||
// we cannot trust willEmitClose.
|
||||
if (stream.destroyed) {
|
||||
willEmitClose = false
|
||||
}
|
||||
if (willEmitClose && (!stream.writable || writable)) {
|
||||
return
|
||||
}
|
||||
if (!writable || writableFinished) {
|
||||
callback.call(stream)
|
||||
}
|
||||
}
|
||||
const onerror = (err) => {
|
||||
callback.call(stream, err)
|
||||
}
|
||||
let closed = isClosed(stream)
|
||||
const onclose = () => {
|
||||
closed = true
|
||||
const errored = isWritableErrored(stream) || isReadableErrored(stream)
|
||||
if (errored && typeof errored !== 'boolean') {
|
||||
return callback.call(stream, errored)
|
||||
}
|
||||
if (readable && !readableFinished && isReadableNodeStream(stream, true)) {
|
||||
if (!isReadableFinished(stream, false)) return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE())
|
||||
}
|
||||
if (writable && !writableFinished) {
|
||||
if (!isWritableFinished(stream, false)) return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE())
|
||||
}
|
||||
callback.call(stream)
|
||||
}
|
||||
const onclosed = () => {
|
||||
closed = true
|
||||
const errored = isWritableErrored(stream) || isReadableErrored(stream)
|
||||
if (errored && typeof errored !== 'boolean') {
|
||||
return callback.call(stream, errored)
|
||||
}
|
||||
callback.call(stream)
|
||||
}
|
||||
const onrequest = () => {
|
||||
stream.req.on('finish', onfinish)
|
||||
}
|
||||
if (isRequest(stream)) {
|
||||
stream.on('complete', onfinish)
|
||||
if (!willEmitClose) {
|
||||
stream.on('abort', onclose)
|
||||
}
|
||||
if (stream.req) {
|
||||
onrequest()
|
||||
} else {
|
||||
stream.on('request', onrequest)
|
||||
}
|
||||
} else if (writable && !wState) {
|
||||
// legacy streams
|
||||
stream.on('end', onlegacyfinish)
|
||||
stream.on('close', onlegacyfinish)
|
||||
}
|
||||
|
||||
// Not all streams will emit 'close' after 'aborted'.
|
||||
if (!willEmitClose && typeof stream.aborted === 'boolean') {
|
||||
stream.on('aborted', onclose)
|
||||
}
|
||||
stream.on('end', onend)
|
||||
stream.on('finish', onfinish)
|
||||
if (options.error !== false) {
|
||||
stream.on('error', onerror)
|
||||
}
|
||||
stream.on('close', onclose)
|
||||
if (closed) {
|
||||
process.nextTick(onclose)
|
||||
} else if (
|
||||
(wState !== null && wState !== undefined && wState.errorEmitted) ||
|
||||
(rState !== null && rState !== undefined && rState.errorEmitted)
|
||||
) {
|
||||
if (!willEmitClose) {
|
||||
process.nextTick(onclosed)
|
||||
}
|
||||
} else if (
|
||||
!readable &&
|
||||
(!willEmitClose || isReadable(stream)) &&
|
||||
(writableFinished || isWritable(stream) === false)
|
||||
) {
|
||||
process.nextTick(onclosed)
|
||||
} else if (
|
||||
!writable &&
|
||||
(!willEmitClose || isWritable(stream)) &&
|
||||
(readableFinished || isReadable(stream) === false)
|
||||
) {
|
||||
process.nextTick(onclosed)
|
||||
} else if (rState && stream.req && stream.aborted) {
|
||||
process.nextTick(onclosed)
|
||||
}
|
||||
const cleanup = () => {
|
||||
callback = nop
|
||||
stream.removeListener('aborted', onclose)
|
||||
stream.removeListener('complete', onfinish)
|
||||
stream.removeListener('abort', onclose)
|
||||
stream.removeListener('request', onrequest)
|
||||
if (stream.req) stream.req.removeListener('finish', onfinish)
|
||||
stream.removeListener('end', onlegacyfinish)
|
||||
stream.removeListener('close', onlegacyfinish)
|
||||
stream.removeListener('finish', onfinish)
|
||||
stream.removeListener('end', onend)
|
||||
stream.removeListener('error', onerror)
|
||||
stream.removeListener('close', onclose)
|
||||
}
|
||||
if (options.signal && !closed) {
|
||||
const abort = () => {
|
||||
// Keep it because cleanup removes it.
|
||||
const endCallback = callback
|
||||
cleanup()
|
||||
endCallback.call(
|
||||
stream,
|
||||
new AbortError(undefined, {
|
||||
cause: options.signal.reason
|
||||
})
|
||||
)
|
||||
}
|
||||
if (options.signal.aborted) {
|
||||
process.nextTick(abort)
|
||||
} else {
|
||||
addAbortListener = addAbortListener || require('../../ours/util').addAbortListener
|
||||
const disposable = addAbortListener(options.signal, abort)
|
||||
const originalCallback = callback
|
||||
callback = once((...args) => {
|
||||
disposable[SymbolDispose]()
|
||||
originalCallback.apply(stream, args)
|
||||
})
|
||||
}
|
||||
}
|
||||
return cleanup
|
||||
}
|
||||
function eosWeb(stream, options, callback) {
|
||||
let isAborted = false
|
||||
let abort = nop
|
||||
if (options.signal) {
|
||||
abort = () => {
|
||||
isAborted = true
|
||||
callback.call(
|
||||
stream,
|
||||
new AbortError(undefined, {
|
||||
cause: options.signal.reason
|
||||
})
|
||||
)
|
||||
}
|
||||
if (options.signal.aborted) {
|
||||
process.nextTick(abort)
|
||||
} else {
|
||||
addAbortListener = addAbortListener || require('../../ours/util').addAbortListener
|
||||
const disposable = addAbortListener(options.signal, abort)
|
||||
const originalCallback = callback
|
||||
callback = once((...args) => {
|
||||
disposable[SymbolDispose]()
|
||||
originalCallback.apply(stream, args)
|
||||
})
|
||||
}
|
||||
}
|
||||
const resolverFn = (...args) => {
|
||||
if (!isAborted) {
|
||||
process.nextTick(() => callback.apply(stream, args))
|
||||
}
|
||||
}
|
||||
PromisePrototypeThen(stream[kIsClosedPromise].promise, resolverFn, resolverFn)
|
||||
return nop
|
||||
}
|
||||
function finished(stream, opts) {
|
||||
var _opts
|
||||
let autoCleanup = false
|
||||
if (opts === null) {
|
||||
opts = kEmptyObject
|
||||
}
|
||||
if ((_opts = opts) !== null && _opts !== undefined && _opts.cleanup) {
|
||||
validateBoolean(opts.cleanup, 'cleanup')
|
||||
autoCleanup = opts.cleanup
|
||||
}
|
||||
return new Promise((resolve, reject) => {
|
||||
const cleanup = eos(stream, opts, (err) => {
|
||||
if (autoCleanup) {
|
||||
cleanup()
|
||||
}
|
||||
if (err) {
|
||||
reject(err)
|
||||
} else {
|
||||
resolve()
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
module.exports = eos
|
||||
module.exports.finished = finished
|
98
node_modules/readable-stream/lib/internal/streams/from.js
generated
vendored
Normal file
98
node_modules/readable-stream/lib/internal/streams/from.js
generated
vendored
Normal file
|
@ -0,0 +1,98 @@
|
|||
'use strict'
|
||||
|
||||
/* replacement start */
|
||||
|
||||
const process = require('process/')
|
||||
|
||||
/* replacement end */
|
||||
|
||||
const { PromisePrototypeThen, SymbolAsyncIterator, SymbolIterator } = require('../../ours/primordials')
|
||||
const { Buffer } = require('buffer')
|
||||
const { ERR_INVALID_ARG_TYPE, ERR_STREAM_NULL_VALUES } = require('../../ours/errors').codes
|
||||
function from(Readable, iterable, opts) {
|
||||
let iterator
|
||||
if (typeof iterable === 'string' || iterable instanceof Buffer) {
|
||||
return new Readable({
|
||||
objectMode: true,
|
||||
...opts,
|
||||
read() {
|
||||
this.push(iterable)
|
||||
this.push(null)
|
||||
}
|
||||
})
|
||||
}
|
||||
let isAsync
|
||||
if (iterable && iterable[SymbolAsyncIterator]) {
|
||||
isAsync = true
|
||||
iterator = iterable[SymbolAsyncIterator]()
|
||||
} else if (iterable && iterable[SymbolIterator]) {
|
||||
isAsync = false
|
||||
iterator = iterable[SymbolIterator]()
|
||||
} else {
|
||||
throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable)
|
||||
}
|
||||
const readable = new Readable({
|
||||
objectMode: true,
|
||||
highWaterMark: 1,
|
||||
// TODO(ronag): What options should be allowed?
|
||||
...opts
|
||||
})
|
||||
|
||||
// Flag to protect against _read
|
||||
// being called before last iteration completion.
|
||||
let reading = false
|
||||
readable._read = function () {
|
||||
if (!reading) {
|
||||
reading = true
|
||||
next()
|
||||
}
|
||||
}
|
||||
readable._destroy = function (error, cb) {
|
||||
PromisePrototypeThen(
|
||||
close(error),
|
||||
() => process.nextTick(cb, error),
|
||||
// nextTick is here in case cb throws
|
||||
(e) => process.nextTick(cb, e || error)
|
||||
)
|
||||
}
|
||||
async function close(error) {
|
||||
const hadError = error !== undefined && error !== null
|
||||
const hasThrow = typeof iterator.throw === 'function'
|
||||
if (hadError && hasThrow) {
|
||||
const { value, done } = await iterator.throw(error)
|
||||
await value
|
||||
if (done) {
|
||||
return
|
||||
}
|
||||
}
|
||||
if (typeof iterator.return === 'function') {
|
||||
const { value } = await iterator.return()
|
||||
await value
|
||||
}
|
||||
}
|
||||
async function next() {
|
||||
for (;;) {
|
||||
try {
|
||||
const { value, done } = isAsync ? await iterator.next() : iterator.next()
|
||||
if (done) {
|
||||
readable.push(null)
|
||||
} else {
|
||||
const res = value && typeof value.then === 'function' ? await value : value
|
||||
if (res === null) {
|
||||
reading = false
|
||||
throw new ERR_STREAM_NULL_VALUES()
|
||||
} else if (readable.push(res)) {
|
||||
continue
|
||||
} else {
|
||||
reading = false
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
readable.destroy(err)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
return readable
|
||||
}
|
||||
module.exports = from
|
51
node_modules/readable-stream/lib/internal/streams/lazy_transform.js
generated
vendored
Normal file
51
node_modules/readable-stream/lib/internal/streams/lazy_transform.js
generated
vendored
Normal file
|
@ -0,0 +1,51 @@
|
|||
// LazyTransform is a special type of Transform stream that is lazily loaded.
|
||||
// This is used for performance with bi-API-ship: when two APIs are available
|
||||
// for the stream, one conventional and one non-conventional.
|
||||
'use strict'
|
||||
|
||||
const { ObjectDefineProperties, ObjectDefineProperty, ObjectSetPrototypeOf } = require('../../ours/primordials')
|
||||
const stream = require('../../stream')
|
||||
const { getDefaultEncoding } = require('../crypto/util')
|
||||
module.exports = LazyTransform
|
||||
function LazyTransform(options) {
|
||||
this._options = options
|
||||
}
|
||||
ObjectSetPrototypeOf(LazyTransform.prototype, stream.Transform.prototype)
|
||||
ObjectSetPrototypeOf(LazyTransform, stream.Transform)
|
||||
function makeGetter(name) {
|
||||
return function () {
|
||||
stream.Transform.call(this, this._options)
|
||||
this._writableState.decodeStrings = false
|
||||
if (!this._options || !this._options.defaultEncoding) {
|
||||
this._writableState.defaultEncoding = getDefaultEncoding()
|
||||
}
|
||||
return this[name]
|
||||
}
|
||||
}
|
||||
function makeSetter(name) {
|
||||
return function (val) {
|
||||
ObjectDefineProperty(this, name, {
|
||||
__proto__: null,
|
||||
value: val,
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true
|
||||
})
|
||||
}
|
||||
}
|
||||
ObjectDefineProperties(LazyTransform.prototype, {
|
||||
_readableState: {
|
||||
__proto__: null,
|
||||
get: makeGetter('_readableState'),
|
||||
set: makeSetter('_readableState'),
|
||||
configurable: true,
|
||||
enumerable: true
|
||||
},
|
||||
_writableState: {
|
||||
__proto__: null,
|
||||
get: makeGetter('_writableState'),
|
||||
set: makeSetter('_writableState'),
|
||||
configurable: true,
|
||||
enumerable: true
|
||||
}
|
||||
})
|
89
node_modules/readable-stream/lib/internal/streams/legacy.js
generated
vendored
Normal file
89
node_modules/readable-stream/lib/internal/streams/legacy.js
generated
vendored
Normal file
|
@ -0,0 +1,89 @@
|
|||
'use strict'
|
||||
|
||||
const { ArrayIsArray, ObjectSetPrototypeOf } = require('../../ours/primordials')
|
||||
const { EventEmitter: EE } = require('events')
|
||||
function Stream(opts) {
|
||||
EE.call(this, opts)
|
||||
}
|
||||
ObjectSetPrototypeOf(Stream.prototype, EE.prototype)
|
||||
ObjectSetPrototypeOf(Stream, EE)
|
||||
Stream.prototype.pipe = function (dest, options) {
|
||||
const source = this
|
||||
function ondata(chunk) {
|
||||
if (dest.writable && dest.write(chunk) === false && source.pause) {
|
||||
source.pause()
|
||||
}
|
||||
}
|
||||
source.on('data', ondata)
|
||||
function ondrain() {
|
||||
if (source.readable && source.resume) {
|
||||
source.resume()
|
||||
}
|
||||
}
|
||||
dest.on('drain', ondrain)
|
||||
|
||||
// If the 'end' option is not supplied, dest.end() will be called when
|
||||
// source gets the 'end' or 'close' events. Only dest.end() once.
|
||||
if (!dest._isStdio && (!options || options.end !== false)) {
|
||||
source.on('end', onend)
|
||||
source.on('close', onclose)
|
||||
}
|
||||
let didOnEnd = false
|
||||
function onend() {
|
||||
if (didOnEnd) return
|
||||
didOnEnd = true
|
||||
dest.end()
|
||||
}
|
||||
function onclose() {
|
||||
if (didOnEnd) return
|
||||
didOnEnd = true
|
||||
if (typeof dest.destroy === 'function') dest.destroy()
|
||||
}
|
||||
|
||||
// Don't leave dangling pipes when there are errors.
|
||||
function onerror(er) {
|
||||
cleanup()
|
||||
if (EE.listenerCount(this, 'error') === 0) {
|
||||
this.emit('error', er)
|
||||
}
|
||||
}
|
||||
prependListener(source, 'error', onerror)
|
||||
prependListener(dest, 'error', onerror)
|
||||
|
||||
// Remove all the event listeners that were added.
|
||||
function cleanup() {
|
||||
source.removeListener('data', ondata)
|
||||
dest.removeListener('drain', ondrain)
|
||||
source.removeListener('end', onend)
|
||||
source.removeListener('close', onclose)
|
||||
source.removeListener('error', onerror)
|
||||
dest.removeListener('error', onerror)
|
||||
source.removeListener('end', cleanup)
|
||||
source.removeListener('close', cleanup)
|
||||
dest.removeListener('close', cleanup)
|
||||
}
|
||||
source.on('end', cleanup)
|
||||
source.on('close', cleanup)
|
||||
dest.on('close', cleanup)
|
||||
dest.emit('pipe', source)
|
||||
|
||||
// Allow for unix-like usage: A.pipe(B).pipe(C)
|
||||
return dest
|
||||
}
|
||||
function prependListener(emitter, event, fn) {
|
||||
// Sadly this is not cacheable as some libraries bundle their own
|
||||
// event emitter implementation with them.
|
||||
if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn)
|
||||
|
||||
// This is a hack to make sure that our error handler is attached before any
|
||||
// userland ones. NEVER DO THIS. This is here only because this code needs
|
||||
// to continue to work with older versions of Node.js that do not include
|
||||
// the prependListener() method. The goal is to eventually remove this hack.
|
||||
if (!emitter._events || !emitter._events[event]) emitter.on(event, fn)
|
||||
else if (ArrayIsArray(emitter._events[event])) emitter._events[event].unshift(fn)
|
||||
else emitter._events[event] = [fn, emitter._events[event]]
|
||||
}
|
||||
module.exports = {
|
||||
Stream,
|
||||
prependListener
|
||||
}
|
457
node_modules/readable-stream/lib/internal/streams/operators.js
generated
vendored
Normal file
457
node_modules/readable-stream/lib/internal/streams/operators.js
generated
vendored
Normal file
|
@ -0,0 +1,457 @@
|
|||
'use strict'
|
||||
|
||||
const AbortController = globalThis.AbortController || require('abort-controller').AbortController
|
||||
const {
|
||||
codes: { ERR_INVALID_ARG_VALUE, ERR_INVALID_ARG_TYPE, ERR_MISSING_ARGS, ERR_OUT_OF_RANGE },
|
||||
AbortError
|
||||
} = require('../../ours/errors')
|
||||
const { validateAbortSignal, validateInteger, validateObject } = require('../validators')
|
||||
const kWeakHandler = require('../../ours/primordials').Symbol('kWeak')
|
||||
const kResistStopPropagation = require('../../ours/primordials').Symbol('kResistStopPropagation')
|
||||
const { finished } = require('./end-of-stream')
|
||||
const staticCompose = require('./compose')
|
||||
const { addAbortSignalNoValidate } = require('./add-abort-signal')
|
||||
const { isWritable, isNodeStream } = require('./utils')
|
||||
const { deprecate } = require('../../ours/util')
|
||||
const {
|
||||
ArrayPrototypePush,
|
||||
Boolean,
|
||||
MathFloor,
|
||||
Number,
|
||||
NumberIsNaN,
|
||||
Promise,
|
||||
PromiseReject,
|
||||
PromiseResolve,
|
||||
PromisePrototypeThen,
|
||||
Symbol
|
||||
} = require('../../ours/primordials')
|
||||
const kEmpty = Symbol('kEmpty')
|
||||
const kEof = Symbol('kEof')
|
||||
function compose(stream, options) {
|
||||
if (options != null) {
|
||||
validateObject(options, 'options')
|
||||
}
|
||||
if ((options === null || options === undefined ? undefined : options.signal) != null) {
|
||||
validateAbortSignal(options.signal, 'options.signal')
|
||||
}
|
||||
if (isNodeStream(stream) && !isWritable(stream)) {
|
||||
throw new ERR_INVALID_ARG_VALUE('stream', stream, 'must be writable')
|
||||
}
|
||||
const composedStream = staticCompose(this, stream)
|
||||
if (options !== null && options !== undefined && options.signal) {
|
||||
// Not validating as we already validated before
|
||||
addAbortSignalNoValidate(options.signal, composedStream)
|
||||
}
|
||||
return composedStream
|
||||
}
|
||||
function map(fn, options) {
|
||||
if (typeof fn !== 'function') {
|
||||
throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn)
|
||||
}
|
||||
if (options != null) {
|
||||
validateObject(options, 'options')
|
||||
}
|
||||
if ((options === null || options === undefined ? undefined : options.signal) != null) {
|
||||
validateAbortSignal(options.signal, 'options.signal')
|
||||
}
|
||||
let concurrency = 1
|
||||
if ((options === null || options === undefined ? undefined : options.concurrency) != null) {
|
||||
concurrency = MathFloor(options.concurrency)
|
||||
}
|
||||
let highWaterMark = concurrency - 1
|
||||
if ((options === null || options === undefined ? undefined : options.highWaterMark) != null) {
|
||||
highWaterMark = MathFloor(options.highWaterMark)
|
||||
}
|
||||
validateInteger(concurrency, 'options.concurrency', 1)
|
||||
validateInteger(highWaterMark, 'options.highWaterMark', 0)
|
||||
highWaterMark += concurrency
|
||||
return async function* map() {
|
||||
const signal = require('../../ours/util').AbortSignalAny(
|
||||
[options === null || options === undefined ? undefined : options.signal].filter(Boolean)
|
||||
)
|
||||
const stream = this
|
||||
const queue = []
|
||||
const signalOpt = {
|
||||
signal
|
||||
}
|
||||
let next
|
||||
let resume
|
||||
let done = false
|
||||
let cnt = 0
|
||||
function onCatch() {
|
||||
done = true
|
||||
afterItemProcessed()
|
||||
}
|
||||
function afterItemProcessed() {
|
||||
cnt -= 1
|
||||
maybeResume()
|
||||
}
|
||||
function maybeResume() {
|
||||
if (resume && !done && cnt < concurrency && queue.length < highWaterMark) {
|
||||
resume()
|
||||
resume = null
|
||||
}
|
||||
}
|
||||
async function pump() {
|
||||
try {
|
||||
for await (let val of stream) {
|
||||
if (done) {
|
||||
return
|
||||
}
|
||||
if (signal.aborted) {
|
||||
throw new AbortError()
|
||||
}
|
||||
try {
|
||||
val = fn(val, signalOpt)
|
||||
if (val === kEmpty) {
|
||||
continue
|
||||
}
|
||||
val = PromiseResolve(val)
|
||||
} catch (err) {
|
||||
val = PromiseReject(err)
|
||||
}
|
||||
cnt += 1
|
||||
PromisePrototypeThen(val, afterItemProcessed, onCatch)
|
||||
queue.push(val)
|
||||
if (next) {
|
||||
next()
|
||||
next = null
|
||||
}
|
||||
if (!done && (queue.length >= highWaterMark || cnt >= concurrency)) {
|
||||
await new Promise((resolve) => {
|
||||
resume = resolve
|
||||
})
|
||||
}
|
||||
}
|
||||
queue.push(kEof)
|
||||
} catch (err) {
|
||||
const val = PromiseReject(err)
|
||||
PromisePrototypeThen(val, afterItemProcessed, onCatch)
|
||||
queue.push(val)
|
||||
} finally {
|
||||
done = true
|
||||
if (next) {
|
||||
next()
|
||||
next = null
|
||||
}
|
||||
}
|
||||
}
|
||||
pump()
|
||||
try {
|
||||
while (true) {
|
||||
while (queue.length > 0) {
|
||||
const val = await queue[0]
|
||||
if (val === kEof) {
|
||||
return
|
||||
}
|
||||
if (signal.aborted) {
|
||||
throw new AbortError()
|
||||
}
|
||||
if (val !== kEmpty) {
|
||||
yield val
|
||||
}
|
||||
queue.shift()
|
||||
maybeResume()
|
||||
}
|
||||
await new Promise((resolve) => {
|
||||
next = resolve
|
||||
})
|
||||
}
|
||||
} finally {
|
||||
done = true
|
||||
if (resume) {
|
||||
resume()
|
||||
resume = null
|
||||
}
|
||||
}
|
||||
}.call(this)
|
||||
}
|
||||
function asIndexedPairs(options = undefined) {
|
||||
if (options != null) {
|
||||
validateObject(options, 'options')
|
||||
}
|
||||
if ((options === null || options === undefined ? undefined : options.signal) != null) {
|
||||
validateAbortSignal(options.signal, 'options.signal')
|
||||
}
|
||||
return async function* asIndexedPairs() {
|
||||
let index = 0
|
||||
for await (const val of this) {
|
||||
var _options$signal
|
||||
if (
|
||||
options !== null &&
|
||||
options !== undefined &&
|
||||
(_options$signal = options.signal) !== null &&
|
||||
_options$signal !== undefined &&
|
||||
_options$signal.aborted
|
||||
) {
|
||||
throw new AbortError({
|
||||
cause: options.signal.reason
|
||||
})
|
||||
}
|
||||
yield [index++, val]
|
||||
}
|
||||
}.call(this)
|
||||
}
|
||||
async function some(fn, options = undefined) {
|
||||
for await (const unused of filter.call(this, fn, options)) {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
async function every(fn, options = undefined) {
|
||||
if (typeof fn !== 'function') {
|
||||
throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn)
|
||||
}
|
||||
// https://en.wikipedia.org/wiki/De_Morgan%27s_laws
|
||||
return !(await some.call(
|
||||
this,
|
||||
async (...args) => {
|
||||
return !(await fn(...args))
|
||||
},
|
||||
options
|
||||
))
|
||||
}
|
||||
async function find(fn, options) {
|
||||
for await (const result of filter.call(this, fn, options)) {
|
||||
return result
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
async function forEach(fn, options) {
|
||||
if (typeof fn !== 'function') {
|
||||
throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn)
|
||||
}
|
||||
async function forEachFn(value, options) {
|
||||
await fn(value, options)
|
||||
return kEmpty
|
||||
}
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
for await (const unused of map.call(this, forEachFn, options));
|
||||
}
|
||||
function filter(fn, options) {
|
||||
if (typeof fn !== 'function') {
|
||||
throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn)
|
||||
}
|
||||
async function filterFn(value, options) {
|
||||
if (await fn(value, options)) {
|
||||
return value
|
||||
}
|
||||
return kEmpty
|
||||
}
|
||||
return map.call(this, filterFn, options)
|
||||
}
|
||||
|
||||
// Specific to provide better error to reduce since the argument is only
|
||||
// missing if the stream has no items in it - but the code is still appropriate
|
||||
class ReduceAwareErrMissingArgs extends ERR_MISSING_ARGS {
|
||||
constructor() {
|
||||
super('reduce')
|
||||
this.message = 'Reduce of an empty stream requires an initial value'
|
||||
}
|
||||
}
|
||||
async function reduce(reducer, initialValue, options) {
|
||||
var _options$signal2
|
||||
if (typeof reducer !== 'function') {
|
||||
throw new ERR_INVALID_ARG_TYPE('reducer', ['Function', 'AsyncFunction'], reducer)
|
||||
}
|
||||
if (options != null) {
|
||||
validateObject(options, 'options')
|
||||
}
|
||||
if ((options === null || options === undefined ? undefined : options.signal) != null) {
|
||||
validateAbortSignal(options.signal, 'options.signal')
|
||||
}
|
||||
let hasInitialValue = arguments.length > 1
|
||||
if (
|
||||
options !== null &&
|
||||
options !== undefined &&
|
||||
(_options$signal2 = options.signal) !== null &&
|
||||
_options$signal2 !== undefined &&
|
||||
_options$signal2.aborted
|
||||
) {
|
||||
const err = new AbortError(undefined, {
|
||||
cause: options.signal.reason
|
||||
})
|
||||
this.once('error', () => {}) // The error is already propagated
|
||||
await finished(this.destroy(err))
|
||||
throw err
|
||||
}
|
||||
const ac = new AbortController()
|
||||
const signal = ac.signal
|
||||
if (options !== null && options !== undefined && options.signal) {
|
||||
const opts = {
|
||||
once: true,
|
||||
[kWeakHandler]: this,
|
||||
[kResistStopPropagation]: true
|
||||
}
|
||||
options.signal.addEventListener('abort', () => ac.abort(), opts)
|
||||
}
|
||||
let gotAnyItemFromStream = false
|
||||
try {
|
||||
for await (const value of this) {
|
||||
var _options$signal3
|
||||
gotAnyItemFromStream = true
|
||||
if (
|
||||
options !== null &&
|
||||
options !== undefined &&
|
||||
(_options$signal3 = options.signal) !== null &&
|
||||
_options$signal3 !== undefined &&
|
||||
_options$signal3.aborted
|
||||
) {
|
||||
throw new AbortError()
|
||||
}
|
||||
if (!hasInitialValue) {
|
||||
initialValue = value
|
||||
hasInitialValue = true
|
||||
} else {
|
||||
initialValue = await reducer(initialValue, value, {
|
||||
signal
|
||||
})
|
||||
}
|
||||
}
|
||||
if (!gotAnyItemFromStream && !hasInitialValue) {
|
||||
throw new ReduceAwareErrMissingArgs()
|
||||
}
|
||||
} finally {
|
||||
ac.abort()
|
||||
}
|
||||
return initialValue
|
||||
}
|
||||
async function toArray(options) {
|
||||
if (options != null) {
|
||||
validateObject(options, 'options')
|
||||
}
|
||||
if ((options === null || options === undefined ? undefined : options.signal) != null) {
|
||||
validateAbortSignal(options.signal, 'options.signal')
|
||||
}
|
||||
const result = []
|
||||
for await (const val of this) {
|
||||
var _options$signal4
|
||||
if (
|
||||
options !== null &&
|
||||
options !== undefined &&
|
||||
(_options$signal4 = options.signal) !== null &&
|
||||
_options$signal4 !== undefined &&
|
||||
_options$signal4.aborted
|
||||
) {
|
||||
throw new AbortError(undefined, {
|
||||
cause: options.signal.reason
|
||||
})
|
||||
}
|
||||
ArrayPrototypePush(result, val)
|
||||
}
|
||||
return result
|
||||
}
|
||||
function flatMap(fn, options) {
|
||||
const values = map.call(this, fn, options)
|
||||
return async function* flatMap() {
|
||||
for await (const val of values) {
|
||||
yield* val
|
||||
}
|
||||
}.call(this)
|
||||
}
|
||||
function toIntegerOrInfinity(number) {
|
||||
// We coerce here to align with the spec
|
||||
// https://github.com/tc39/proposal-iterator-helpers/issues/169
|
||||
number = Number(number)
|
||||
if (NumberIsNaN(number)) {
|
||||
return 0
|
||||
}
|
||||
if (number < 0) {
|
||||
throw new ERR_OUT_OF_RANGE('number', '>= 0', number)
|
||||
}
|
||||
return number
|
||||
}
|
||||
function drop(number, options = undefined) {
|
||||
if (options != null) {
|
||||
validateObject(options, 'options')
|
||||
}
|
||||
if ((options === null || options === undefined ? undefined : options.signal) != null) {
|
||||
validateAbortSignal(options.signal, 'options.signal')
|
||||
}
|
||||
number = toIntegerOrInfinity(number)
|
||||
return async function* drop() {
|
||||
var _options$signal5
|
||||
if (
|
||||
options !== null &&
|
||||
options !== undefined &&
|
||||
(_options$signal5 = options.signal) !== null &&
|
||||
_options$signal5 !== undefined &&
|
||||
_options$signal5.aborted
|
||||
) {
|
||||
throw new AbortError()
|
||||
}
|
||||
for await (const val of this) {
|
||||
var _options$signal6
|
||||
if (
|
||||
options !== null &&
|
||||
options !== undefined &&
|
||||
(_options$signal6 = options.signal) !== null &&
|
||||
_options$signal6 !== undefined &&
|
||||
_options$signal6.aborted
|
||||
) {
|
||||
throw new AbortError()
|
||||
}
|
||||
if (number-- <= 0) {
|
||||
yield val
|
||||
}
|
||||
}
|
||||
}.call(this)
|
||||
}
|
||||
function take(number, options = undefined) {
|
||||
if (options != null) {
|
||||
validateObject(options, 'options')
|
||||
}
|
||||
if ((options === null || options === undefined ? undefined : options.signal) != null) {
|
||||
validateAbortSignal(options.signal, 'options.signal')
|
||||
}
|
||||
number = toIntegerOrInfinity(number)
|
||||
return async function* take() {
|
||||
var _options$signal7
|
||||
if (
|
||||
options !== null &&
|
||||
options !== undefined &&
|
||||
(_options$signal7 = options.signal) !== null &&
|
||||
_options$signal7 !== undefined &&
|
||||
_options$signal7.aborted
|
||||
) {
|
||||
throw new AbortError()
|
||||
}
|
||||
for await (const val of this) {
|
||||
var _options$signal8
|
||||
if (
|
||||
options !== null &&
|
||||
options !== undefined &&
|
||||
(_options$signal8 = options.signal) !== null &&
|
||||
_options$signal8 !== undefined &&
|
||||
_options$signal8.aborted
|
||||
) {
|
||||
throw new AbortError()
|
||||
}
|
||||
if (number-- > 0) {
|
||||
yield val
|
||||
}
|
||||
|
||||
// Don't get another item from iterator in case we reached the end
|
||||
if (number <= 0) {
|
||||
return
|
||||
}
|
||||
}
|
||||
}.call(this)
|
||||
}
|
||||
module.exports.streamReturningOperators = {
|
||||
asIndexedPairs: deprecate(asIndexedPairs, 'readable.asIndexedPairs will be removed in a future version.'),
|
||||
drop,
|
||||
filter,
|
||||
flatMap,
|
||||
map,
|
||||
take,
|
||||
compose
|
||||
}
|
||||
module.exports.promiseReturningOperators = {
|
||||
every,
|
||||
forEach,
|
||||
reduce,
|
||||
toArray,
|
||||
some,
|
||||
find
|
||||
}
|
39
node_modules/readable-stream/lib/internal/streams/passthrough.js
generated
vendored
Normal file
39
node_modules/readable-stream/lib/internal/streams/passthrough.js
generated
vendored
Normal file
|
@ -0,0 +1,39 @@
|
|||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
// a passthrough stream.
|
||||
// basically just the most minimal sort of Transform stream.
|
||||
// Every written chunk gets output as-is.
|
||||
|
||||
'use strict'
|
||||
|
||||
const { ObjectSetPrototypeOf } = require('../../ours/primordials')
|
||||
module.exports = PassThrough
|
||||
const Transform = require('./transform')
|
||||
ObjectSetPrototypeOf(PassThrough.prototype, Transform.prototype)
|
||||
ObjectSetPrototypeOf(PassThrough, Transform)
|
||||
function PassThrough(options) {
|
||||
if (!(this instanceof PassThrough)) return new PassThrough(options)
|
||||
Transform.call(this, options)
|
||||
}
|
||||
PassThrough.prototype._transform = function (chunk, encoding, cb) {
|
||||
cb(null, chunk)
|
||||
}
|
471
node_modules/readable-stream/lib/internal/streams/pipeline.js
generated
vendored
Normal file
471
node_modules/readable-stream/lib/internal/streams/pipeline.js
generated
vendored
Normal file
|
@ -0,0 +1,471 @@
|
|||
/* replacement start */
|
||||
|
||||
const process = require('process/')
|
||||
|
||||
/* replacement end */
|
||||
// Ported from https://github.com/mafintosh/pump with
|
||||
// permission from the author, Mathias Buus (@mafintosh).
|
||||
|
||||
;('use strict')
|
||||
const { ArrayIsArray, Promise, SymbolAsyncIterator, SymbolDispose } = require('../../ours/primordials')
|
||||
const eos = require('./end-of-stream')
|
||||
const { once } = require('../../ours/util')
|
||||
const destroyImpl = require('./destroy')
|
||||
const Duplex = require('./duplex')
|
||||
const {
|
||||
aggregateTwoErrors,
|
||||
codes: {
|
||||
ERR_INVALID_ARG_TYPE,
|
||||
ERR_INVALID_RETURN_VALUE,
|
||||
ERR_MISSING_ARGS,
|
||||
ERR_STREAM_DESTROYED,
|
||||
ERR_STREAM_PREMATURE_CLOSE
|
||||
},
|
||||
AbortError
|
||||
} = require('../../ours/errors')
|
||||
const { validateFunction, validateAbortSignal } = require('../validators')
|
||||
const {
|
||||
isIterable,
|
||||
isReadable,
|
||||
isReadableNodeStream,
|
||||
isNodeStream,
|
||||
isTransformStream,
|
||||
isWebStream,
|
||||
isReadableStream,
|
||||
isReadableFinished
|
||||
} = require('./utils')
|
||||
const AbortController = globalThis.AbortController || require('abort-controller').AbortController
|
||||
let PassThrough
|
||||
let Readable
|
||||
let addAbortListener
|
||||
function destroyer(stream, reading, writing) {
|
||||
let finished = false
|
||||
stream.on('close', () => {
|
||||
finished = true
|
||||
})
|
||||
const cleanup = eos(
|
||||
stream,
|
||||
{
|
||||
readable: reading,
|
||||
writable: writing
|
||||
},
|
||||
(err) => {
|
||||
finished = !err
|
||||
}
|
||||
)
|
||||
return {
|
||||
destroy: (err) => {
|
||||
if (finished) return
|
||||
finished = true
|
||||
destroyImpl.destroyer(stream, err || new ERR_STREAM_DESTROYED('pipe'))
|
||||
},
|
||||
cleanup
|
||||
}
|
||||
}
|
||||
function popCallback(streams) {
|
||||
// Streams should never be an empty array. It should always contain at least
|
||||
// a single stream. Therefore optimize for the average case instead of
|
||||
// checking for length === 0 as well.
|
||||
validateFunction(streams[streams.length - 1], 'streams[stream.length - 1]')
|
||||
return streams.pop()
|
||||
}
|
||||
function makeAsyncIterable(val) {
|
||||
if (isIterable(val)) {
|
||||
return val
|
||||
} else if (isReadableNodeStream(val)) {
|
||||
// Legacy streams are not Iterable.
|
||||
return fromReadable(val)
|
||||
}
|
||||
throw new ERR_INVALID_ARG_TYPE('val', ['Readable', 'Iterable', 'AsyncIterable'], val)
|
||||
}
|
||||
async function* fromReadable(val) {
|
||||
if (!Readable) {
|
||||
Readable = require('./readable')
|
||||
}
|
||||
yield* Readable.prototype[SymbolAsyncIterator].call(val)
|
||||
}
|
||||
async function pumpToNode(iterable, writable, finish, { end }) {
|
||||
let error
|
||||
let onresolve = null
|
||||
const resume = (err) => {
|
||||
if (err) {
|
||||
error = err
|
||||
}
|
||||
if (onresolve) {
|
||||
const callback = onresolve
|
||||
onresolve = null
|
||||
callback()
|
||||
}
|
||||
}
|
||||
const wait = () =>
|
||||
new Promise((resolve, reject) => {
|
||||
if (error) {
|
||||
reject(error)
|
||||
} else {
|
||||
onresolve = () => {
|
||||
if (error) {
|
||||
reject(error)
|
||||
} else {
|
||||
resolve()
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
writable.on('drain', resume)
|
||||
const cleanup = eos(
|
||||
writable,
|
||||
{
|
||||
readable: false
|
||||
},
|
||||
resume
|
||||
)
|
||||
try {
|
||||
if (writable.writableNeedDrain) {
|
||||
await wait()
|
||||
}
|
||||
for await (const chunk of iterable) {
|
||||
if (!writable.write(chunk)) {
|
||||
await wait()
|
||||
}
|
||||
}
|
||||
if (end) {
|
||||
writable.end()
|
||||
await wait()
|
||||
}
|
||||
finish()
|
||||
} catch (err) {
|
||||
finish(error !== err ? aggregateTwoErrors(error, err) : err)
|
||||
} finally {
|
||||
cleanup()
|
||||
writable.off('drain', resume)
|
||||
}
|
||||
}
|
||||
async function pumpToWeb(readable, writable, finish, { end }) {
|
||||
if (isTransformStream(writable)) {
|
||||
writable = writable.writable
|
||||
}
|
||||
// https://streams.spec.whatwg.org/#example-manual-write-with-backpressure
|
||||
const writer = writable.getWriter()
|
||||
try {
|
||||
for await (const chunk of readable) {
|
||||
await writer.ready
|
||||
writer.write(chunk).catch(() => {})
|
||||
}
|
||||
await writer.ready
|
||||
if (end) {
|
||||
await writer.close()
|
||||
}
|
||||
finish()
|
||||
} catch (err) {
|
||||
try {
|
||||
await writer.abort(err)
|
||||
finish(err)
|
||||
} catch (err) {
|
||||
finish(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
function pipeline(...streams) {
|
||||
return pipelineImpl(streams, once(popCallback(streams)))
|
||||
}
|
||||
function pipelineImpl(streams, callback, opts) {
|
||||
if (streams.length === 1 && ArrayIsArray(streams[0])) {
|
||||
streams = streams[0]
|
||||
}
|
||||
if (streams.length < 2) {
|
||||
throw new ERR_MISSING_ARGS('streams')
|
||||
}
|
||||
const ac = new AbortController()
|
||||
const signal = ac.signal
|
||||
const outerSignal = opts === null || opts === undefined ? undefined : opts.signal
|
||||
|
||||
// Need to cleanup event listeners if last stream is readable
|
||||
// https://github.com/nodejs/node/issues/35452
|
||||
const lastStreamCleanup = []
|
||||
validateAbortSignal(outerSignal, 'options.signal')
|
||||
function abort() {
|
||||
finishImpl(new AbortError())
|
||||
}
|
||||
addAbortListener = addAbortListener || require('../../ours/util').addAbortListener
|
||||
let disposable
|
||||
if (outerSignal) {
|
||||
disposable = addAbortListener(outerSignal, abort)
|
||||
}
|
||||
let error
|
||||
let value
|
||||
const destroys = []
|
||||
let finishCount = 0
|
||||
function finish(err) {
|
||||
finishImpl(err, --finishCount === 0)
|
||||
}
|
||||
function finishImpl(err, final) {
|
||||
var _disposable
|
||||
if (err && (!error || error.code === 'ERR_STREAM_PREMATURE_CLOSE')) {
|
||||
error = err
|
||||
}
|
||||
if (!error && !final) {
|
||||
return
|
||||
}
|
||||
while (destroys.length) {
|
||||
destroys.shift()(error)
|
||||
}
|
||||
;(_disposable = disposable) === null || _disposable === undefined ? undefined : _disposable[SymbolDispose]()
|
||||
ac.abort()
|
||||
if (final) {
|
||||
if (!error) {
|
||||
lastStreamCleanup.forEach((fn) => fn())
|
||||
}
|
||||
process.nextTick(callback, error, value)
|
||||
}
|
||||
}
|
||||
let ret
|
||||
for (let i = 0; i < streams.length; i++) {
|
||||
const stream = streams[i]
|
||||
const reading = i < streams.length - 1
|
||||
const writing = i > 0
|
||||
const end = reading || (opts === null || opts === undefined ? undefined : opts.end) !== false
|
||||
const isLastStream = i === streams.length - 1
|
||||
if (isNodeStream(stream)) {
|
||||
if (end) {
|
||||
const { destroy, cleanup } = destroyer(stream, reading, writing)
|
||||
destroys.push(destroy)
|
||||
if (isReadable(stream) && isLastStream) {
|
||||
lastStreamCleanup.push(cleanup)
|
||||
}
|
||||
}
|
||||
|
||||
// Catch stream errors that occur after pipe/pump has completed.
|
||||
function onError(err) {
|
||||
if (err && err.name !== 'AbortError' && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') {
|
||||
finish(err)
|
||||
}
|
||||
}
|
||||
stream.on('error', onError)
|
||||
if (isReadable(stream) && isLastStream) {
|
||||
lastStreamCleanup.push(() => {
|
||||
stream.removeListener('error', onError)
|
||||
})
|
||||
}
|
||||
}
|
||||
if (i === 0) {
|
||||
if (typeof stream === 'function') {
|
||||
ret = stream({
|
||||
signal
|
||||
})
|
||||
if (!isIterable(ret)) {
|
||||
throw new ERR_INVALID_RETURN_VALUE('Iterable, AsyncIterable or Stream', 'source', ret)
|
||||
}
|
||||
} else if (isIterable(stream) || isReadableNodeStream(stream) || isTransformStream(stream)) {
|
||||
ret = stream
|
||||
} else {
|
||||
ret = Duplex.from(stream)
|
||||
}
|
||||
} else if (typeof stream === 'function') {
|
||||
if (isTransformStream(ret)) {
|
||||
var _ret
|
||||
ret = makeAsyncIterable((_ret = ret) === null || _ret === undefined ? undefined : _ret.readable)
|
||||
} else {
|
||||
ret = makeAsyncIterable(ret)
|
||||
}
|
||||
ret = stream(ret, {
|
||||
signal
|
||||
})
|
||||
if (reading) {
|
||||
if (!isIterable(ret, true)) {
|
||||
throw new ERR_INVALID_RETURN_VALUE('AsyncIterable', `transform[${i - 1}]`, ret)
|
||||
}
|
||||
} else {
|
||||
var _ret2
|
||||
if (!PassThrough) {
|
||||
PassThrough = require('./passthrough')
|
||||
}
|
||||
|
||||
// If the last argument to pipeline is not a stream
|
||||
// we must create a proxy stream so that pipeline(...)
|
||||
// always returns a stream which can be further
|
||||
// composed through `.pipe(stream)`.
|
||||
|
||||
const pt = new PassThrough({
|
||||
objectMode: true
|
||||
})
|
||||
|
||||
// Handle Promises/A+ spec, `then` could be a getter that throws on
|
||||
// second use.
|
||||
const then = (_ret2 = ret) === null || _ret2 === undefined ? undefined : _ret2.then
|
||||
if (typeof then === 'function') {
|
||||
finishCount++
|
||||
then.call(
|
||||
ret,
|
||||
(val) => {
|
||||
value = val
|
||||
if (val != null) {
|
||||
pt.write(val)
|
||||
}
|
||||
if (end) {
|
||||
pt.end()
|
||||
}
|
||||
process.nextTick(finish)
|
||||
},
|
||||
(err) => {
|
||||
pt.destroy(err)
|
||||
process.nextTick(finish, err)
|
||||
}
|
||||
)
|
||||
} else if (isIterable(ret, true)) {
|
||||
finishCount++
|
||||
pumpToNode(ret, pt, finish, {
|
||||
end
|
||||
})
|
||||
} else if (isReadableStream(ret) || isTransformStream(ret)) {
|
||||
const toRead = ret.readable || ret
|
||||
finishCount++
|
||||
pumpToNode(toRead, pt, finish, {
|
||||
end
|
||||
})
|
||||
} else {
|
||||
throw new ERR_INVALID_RETURN_VALUE('AsyncIterable or Promise', 'destination', ret)
|
||||
}
|
||||
ret = pt
|
||||
const { destroy, cleanup } = destroyer(ret, false, true)
|
||||
destroys.push(destroy)
|
||||
if (isLastStream) {
|
||||
lastStreamCleanup.push(cleanup)
|
||||
}
|
||||
}
|
||||
} else if (isNodeStream(stream)) {
|
||||
if (isReadableNodeStream(ret)) {
|
||||
finishCount += 2
|
||||
const cleanup = pipe(ret, stream, finish, {
|
||||
end
|
||||
})
|
||||
if (isReadable(stream) && isLastStream) {
|
||||
lastStreamCleanup.push(cleanup)
|
||||
}
|
||||
} else if (isTransformStream(ret) || isReadableStream(ret)) {
|
||||
const toRead = ret.readable || ret
|
||||
finishCount++
|
||||
pumpToNode(toRead, stream, finish, {
|
||||
end
|
||||
})
|
||||
} else if (isIterable(ret)) {
|
||||
finishCount++
|
||||
pumpToNode(ret, stream, finish, {
|
||||
end
|
||||
})
|
||||
} else {
|
||||
throw new ERR_INVALID_ARG_TYPE(
|
||||
'val',
|
||||
['Readable', 'Iterable', 'AsyncIterable', 'ReadableStream', 'TransformStream'],
|
||||
ret
|
||||
)
|
||||
}
|
||||
ret = stream
|
||||
} else if (isWebStream(stream)) {
|
||||
if (isReadableNodeStream(ret)) {
|
||||
finishCount++
|
||||
pumpToWeb(makeAsyncIterable(ret), stream, finish, {
|
||||
end
|
||||
})
|
||||
} else if (isReadableStream(ret) || isIterable(ret)) {
|
||||
finishCount++
|
||||
pumpToWeb(ret, stream, finish, {
|
||||
end
|
||||
})
|
||||
} else if (isTransformStream(ret)) {
|
||||
finishCount++
|
||||
pumpToWeb(ret.readable, stream, finish, {
|
||||
end
|
||||
})
|
||||
} else {
|
||||
throw new ERR_INVALID_ARG_TYPE(
|
||||
'val',
|
||||
['Readable', 'Iterable', 'AsyncIterable', 'ReadableStream', 'TransformStream'],
|
||||
ret
|
||||
)
|
||||
}
|
||||
ret = stream
|
||||
} else {
|
||||
ret = Duplex.from(stream)
|
||||
}
|
||||
}
|
||||
if (
|
||||
(signal !== null && signal !== undefined && signal.aborted) ||
|
||||
(outerSignal !== null && outerSignal !== undefined && outerSignal.aborted)
|
||||
) {
|
||||
process.nextTick(abort)
|
||||
}
|
||||
return ret
|
||||
}
|
||||
function pipe(src, dst, finish, { end }) {
|
||||
let ended = false
|
||||
dst.on('close', () => {
|
||||
if (!ended) {
|
||||
// Finish if the destination closes before the source has completed.
|
||||
finish(new ERR_STREAM_PREMATURE_CLOSE())
|
||||
}
|
||||
})
|
||||
src.pipe(dst, {
|
||||
end: false
|
||||
}) // If end is true we already will have a listener to end dst.
|
||||
|
||||
if (end) {
|
||||
// Compat. Before node v10.12.0 stdio used to throw an error so
|
||||
// pipe() did/does not end() stdio destinations.
|
||||
// Now they allow it but "secretly" don't close the underlying fd.
|
||||
|
||||
function endFn() {
|
||||
ended = true
|
||||
dst.end()
|
||||
}
|
||||
if (isReadableFinished(src)) {
|
||||
// End the destination if the source has already ended.
|
||||
process.nextTick(endFn)
|
||||
} else {
|
||||
src.once('end', endFn)
|
||||
}
|
||||
} else {
|
||||
finish()
|
||||
}
|
||||
eos(
|
||||
src,
|
||||
{
|
||||
readable: true,
|
||||
writable: false
|
||||
},
|
||||
(err) => {
|
||||
const rState = src._readableState
|
||||
if (
|
||||
err &&
|
||||
err.code === 'ERR_STREAM_PREMATURE_CLOSE' &&
|
||||
rState &&
|
||||
rState.ended &&
|
||||
!rState.errored &&
|
||||
!rState.errorEmitted
|
||||
) {
|
||||
// Some readable streams will emit 'close' before 'end'. However, since
|
||||
// this is on the readable side 'end' should still be emitted if the
|
||||
// stream has been ended and no error emitted. This should be allowed in
|
||||
// favor of backwards compatibility. Since the stream is piped to a
|
||||
// destination this should not result in any observable difference.
|
||||
// We don't need to check if this is a writable premature close since
|
||||
// eos will only fail with premature close on the reading side for
|
||||
// duplex streams.
|
||||
src.once('end', finish).once('error', finish)
|
||||
} else {
|
||||
finish(err)
|
||||
}
|
||||
}
|
||||
)
|
||||
return eos(
|
||||
dst,
|
||||
{
|
||||
readable: false,
|
||||
writable: true
|
||||
},
|
||||
finish
|
||||
)
|
||||
}
|
||||
module.exports = {
|
||||
pipelineImpl,
|
||||
pipeline
|
||||
}
|
1288
node_modules/readable-stream/lib/internal/streams/readable.js
generated
vendored
Normal file
1288
node_modules/readable-stream/lib/internal/streams/readable.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
39
node_modules/readable-stream/lib/internal/streams/state.js
generated
vendored
Normal file
39
node_modules/readable-stream/lib/internal/streams/state.js
generated
vendored
Normal file
|
@ -0,0 +1,39 @@
|
|||
'use strict'
|
||||
|
||||
const { MathFloor, NumberIsInteger } = require('../../ours/primordials')
|
||||
const { validateInteger } = require('../validators')
|
||||
const { ERR_INVALID_ARG_VALUE } = require('../../ours/errors').codes
|
||||
let defaultHighWaterMarkBytes = 16 * 1024
|
||||
let defaultHighWaterMarkObjectMode = 16
|
||||
function highWaterMarkFrom(options, isDuplex, duplexKey) {
|
||||
return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null
|
||||
}
|
||||
function getDefaultHighWaterMark(objectMode) {
|
||||
return objectMode ? defaultHighWaterMarkObjectMode : defaultHighWaterMarkBytes
|
||||
}
|
||||
function setDefaultHighWaterMark(objectMode, value) {
|
||||
validateInteger(value, 'value', 0)
|
||||
if (objectMode) {
|
||||
defaultHighWaterMarkObjectMode = value
|
||||
} else {
|
||||
defaultHighWaterMarkBytes = value
|
||||
}
|
||||
}
|
||||
function getHighWaterMark(state, options, duplexKey, isDuplex) {
|
||||
const hwm = highWaterMarkFrom(options, isDuplex, duplexKey)
|
||||
if (hwm != null) {
|
||||
if (!NumberIsInteger(hwm) || hwm < 0) {
|
||||
const name = isDuplex ? `options.${duplexKey}` : 'options.highWaterMark'
|
||||
throw new ERR_INVALID_ARG_VALUE(name, hwm)
|
||||
}
|
||||
return MathFloor(hwm)
|
||||
}
|
||||
|
||||
// Default value
|
||||
return getDefaultHighWaterMark(state.objectMode)
|
||||
}
|
||||
module.exports = {
|
||||
getHighWaterMark,
|
||||
getDefaultHighWaterMark,
|
||||
setDefaultHighWaterMark
|
||||
}
|
180
node_modules/readable-stream/lib/internal/streams/transform.js
generated
vendored
Normal file
180
node_modules/readable-stream/lib/internal/streams/transform.js
generated
vendored
Normal file
|
@ -0,0 +1,180 @@
|
|||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
// a transform stream is a readable/writable stream where you do
|
||||
// something with the data. Sometimes it's called a "filter",
|
||||
// but that's not a great name for it, since that implies a thing where
|
||||
// some bits pass through, and others are simply ignored. (That would
|
||||
// be a valid example of a transform, of course.)
|
||||
//
|
||||
// While the output is causally related to the input, it's not a
|
||||
// necessarily symmetric or synchronous transformation. For example,
|
||||
// a zlib stream might take multiple plain-text writes(), and then
|
||||
// emit a single compressed chunk some time in the future.
|
||||
//
|
||||
// Here's how this works:
|
||||
//
|
||||
// The Transform stream has all the aspects of the readable and writable
|
||||
// stream classes. When you write(chunk), that calls _write(chunk,cb)
|
||||
// internally, and returns false if there's a lot of pending writes
|
||||
// buffered up. When you call read(), that calls _read(n) until
|
||||
// there's enough pending readable data buffered up.
|
||||
//
|
||||
// In a transform stream, the written data is placed in a buffer. When
|
||||
// _read(n) is called, it transforms the queued up data, calling the
|
||||
// buffered _write cb's as it consumes chunks. If consuming a single
|
||||
// written chunk would result in multiple output chunks, then the first
|
||||
// outputted bit calls the readcb, and subsequent chunks just go into
|
||||
// the read buffer, and will cause it to emit 'readable' if necessary.
|
||||
//
|
||||
// This way, back-pressure is actually determined by the reading side,
|
||||
// since _read has to be called to start processing a new chunk. However,
|
||||
// a pathological inflate type of transform can cause excessive buffering
|
||||
// here. For example, imagine a stream where every byte of input is
|
||||
// interpreted as an integer from 0-255, and then results in that many
|
||||
// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
|
||||
// 1kb of data being output. In this case, you could write a very small
|
||||
// amount of input, and end up with a very large amount of output. In
|
||||
// such a pathological inflating mechanism, there'd be no way to tell
|
||||
// the system to stop doing the transform. A single 4MB write could
|
||||
// cause the system to run out of memory.
|
||||
//
|
||||
// However, even in such a pathological case, only a single written chunk
|
||||
// would be consumed, and then the rest would wait (un-transformed) until
|
||||
// the results of the previous transformed chunk were consumed.
|
||||
|
||||
'use strict'
|
||||
|
||||
const { ObjectSetPrototypeOf, Symbol } = require('../../ours/primordials')
|
||||
module.exports = Transform
|
||||
const { ERR_METHOD_NOT_IMPLEMENTED } = require('../../ours/errors').codes
|
||||
const Duplex = require('./duplex')
|
||||
const { getHighWaterMark } = require('./state')
|
||||
ObjectSetPrototypeOf(Transform.prototype, Duplex.prototype)
|
||||
ObjectSetPrototypeOf(Transform, Duplex)
|
||||
const kCallback = Symbol('kCallback')
|
||||
function Transform(options) {
|
||||
if (!(this instanceof Transform)) return new Transform(options)
|
||||
|
||||
// TODO (ronag): This should preferably always be
|
||||
// applied but would be semver-major. Or even better;
|
||||
// make Transform a Readable with the Writable interface.
|
||||
const readableHighWaterMark = options ? getHighWaterMark(this, options, 'readableHighWaterMark', true) : null
|
||||
if (readableHighWaterMark === 0) {
|
||||
// A Duplex will buffer both on the writable and readable side while
|
||||
// a Transform just wants to buffer hwm number of elements. To avoid
|
||||
// buffering twice we disable buffering on the writable side.
|
||||
options = {
|
||||
...options,
|
||||
highWaterMark: null,
|
||||
readableHighWaterMark,
|
||||
// TODO (ronag): 0 is not optimal since we have
|
||||
// a "bug" where we check needDrain before calling _write and not after.
|
||||
// Refs: https://github.com/nodejs/node/pull/32887
|
||||
// Refs: https://github.com/nodejs/node/pull/35941
|
||||
writableHighWaterMark: options.writableHighWaterMark || 0
|
||||
}
|
||||
}
|
||||
Duplex.call(this, options)
|
||||
|
||||
// We have implemented the _read method, and done the other things
|
||||
// that Readable wants before the first _read call, so unset the
|
||||
// sync guard flag.
|
||||
this._readableState.sync = false
|
||||
this[kCallback] = null
|
||||
if (options) {
|
||||
if (typeof options.transform === 'function') this._transform = options.transform
|
||||
if (typeof options.flush === 'function') this._flush = options.flush
|
||||
}
|
||||
|
||||
// When the writable side finishes, then flush out anything remaining.
|
||||
// Backwards compat. Some Transform streams incorrectly implement _final
|
||||
// instead of or in addition to _flush. By using 'prefinish' instead of
|
||||
// implementing _final we continue supporting this unfortunate use case.
|
||||
this.on('prefinish', prefinish)
|
||||
}
|
||||
function final(cb) {
|
||||
if (typeof this._flush === 'function' && !this.destroyed) {
|
||||
this._flush((er, data) => {
|
||||
if (er) {
|
||||
if (cb) {
|
||||
cb(er)
|
||||
} else {
|
||||
this.destroy(er)
|
||||
}
|
||||
return
|
||||
}
|
||||
if (data != null) {
|
||||
this.push(data)
|
||||
}
|
||||
this.push(null)
|
||||
if (cb) {
|
||||
cb()
|
||||
}
|
||||
})
|
||||
} else {
|
||||
this.push(null)
|
||||
if (cb) {
|
||||
cb()
|
||||
}
|
||||
}
|
||||
}
|
||||
function prefinish() {
|
||||
if (this._final !== final) {
|
||||
final.call(this)
|
||||
}
|
||||
}
|
||||
Transform.prototype._final = final
|
||||
Transform.prototype._transform = function (chunk, encoding, callback) {
|
||||
throw new ERR_METHOD_NOT_IMPLEMENTED('_transform()')
|
||||
}
|
||||
Transform.prototype._write = function (chunk, encoding, callback) {
|
||||
const rState = this._readableState
|
||||
const wState = this._writableState
|
||||
const length = rState.length
|
||||
this._transform(chunk, encoding, (err, val) => {
|
||||
if (err) {
|
||||
callback(err)
|
||||
return
|
||||
}
|
||||
if (val != null) {
|
||||
this.push(val)
|
||||
}
|
||||
if (
|
||||
wState.ended ||
|
||||
// Backwards compat.
|
||||
length === rState.length ||
|
||||
// Backwards compat.
|
||||
rState.length < rState.highWaterMark
|
||||
) {
|
||||
callback()
|
||||
} else {
|
||||
this[kCallback] = callback
|
||||
}
|
||||
})
|
||||
}
|
||||
Transform.prototype._read = function () {
|
||||
if (this[kCallback]) {
|
||||
const callback = this[kCallback]
|
||||
this[kCallback] = null
|
||||
callback()
|
||||
}
|
||||
}
|
329
node_modules/readable-stream/lib/internal/streams/utils.js
generated
vendored
Normal file
329
node_modules/readable-stream/lib/internal/streams/utils.js
generated
vendored
Normal file
|
@ -0,0 +1,329 @@
|
|||
'use strict'
|
||||
|
||||
const { SymbolAsyncIterator, SymbolIterator, SymbolFor } = require('../../ours/primordials')
|
||||
|
||||
// We need to use SymbolFor to make these globally available
|
||||
// for interopt with readable-stream, i.e. readable-stream
|
||||
// and node core needs to be able to read/write private state
|
||||
// from each other for proper interoperability.
|
||||
const kIsDestroyed = SymbolFor('nodejs.stream.destroyed')
|
||||
const kIsErrored = SymbolFor('nodejs.stream.errored')
|
||||
const kIsReadable = SymbolFor('nodejs.stream.readable')
|
||||
const kIsWritable = SymbolFor('nodejs.stream.writable')
|
||||
const kIsDisturbed = SymbolFor('nodejs.stream.disturbed')
|
||||
const kIsClosedPromise = SymbolFor('nodejs.webstream.isClosedPromise')
|
||||
const kControllerErrorFunction = SymbolFor('nodejs.webstream.controllerErrorFunction')
|
||||
function isReadableNodeStream(obj, strict = false) {
|
||||
var _obj$_readableState
|
||||
return !!(
|
||||
(
|
||||
obj &&
|
||||
typeof obj.pipe === 'function' &&
|
||||
typeof obj.on === 'function' &&
|
||||
(!strict || (typeof obj.pause === 'function' && typeof obj.resume === 'function')) &&
|
||||
(!obj._writableState ||
|
||||
((_obj$_readableState = obj._readableState) === null || _obj$_readableState === undefined
|
||||
? undefined
|
||||
: _obj$_readableState.readable) !== false) &&
|
||||
// Duplex
|
||||
(!obj._writableState || obj._readableState)
|
||||
) // Writable has .pipe.
|
||||
)
|
||||
}
|
||||
|
||||
function isWritableNodeStream(obj) {
|
||||
var _obj$_writableState
|
||||
return !!(
|
||||
(
|
||||
obj &&
|
||||
typeof obj.write === 'function' &&
|
||||
typeof obj.on === 'function' &&
|
||||
(!obj._readableState ||
|
||||
((_obj$_writableState = obj._writableState) === null || _obj$_writableState === undefined
|
||||
? undefined
|
||||
: _obj$_writableState.writable) !== false)
|
||||
) // Duplex
|
||||
)
|
||||
}
|
||||
|
||||
function isDuplexNodeStream(obj) {
|
||||
return !!(
|
||||
obj &&
|
||||
typeof obj.pipe === 'function' &&
|
||||
obj._readableState &&
|
||||
typeof obj.on === 'function' &&
|
||||
typeof obj.write === 'function'
|
||||
)
|
||||
}
|
||||
function isNodeStream(obj) {
|
||||
return (
|
||||
obj &&
|
||||
(obj._readableState ||
|
||||
obj._writableState ||
|
||||
(typeof obj.write === 'function' && typeof obj.on === 'function') ||
|
||||
(typeof obj.pipe === 'function' && typeof obj.on === 'function'))
|
||||
)
|
||||
}
|
||||
function isReadableStream(obj) {
|
||||
return !!(
|
||||
obj &&
|
||||
!isNodeStream(obj) &&
|
||||
typeof obj.pipeThrough === 'function' &&
|
||||
typeof obj.getReader === 'function' &&
|
||||
typeof obj.cancel === 'function'
|
||||
)
|
||||
}
|
||||
function isWritableStream(obj) {
|
||||
return !!(obj && !isNodeStream(obj) && typeof obj.getWriter === 'function' && typeof obj.abort === 'function')
|
||||
}
|
||||
function isTransformStream(obj) {
|
||||
return !!(obj && !isNodeStream(obj) && typeof obj.readable === 'object' && typeof obj.writable === 'object')
|
||||
}
|
||||
function isWebStream(obj) {
|
||||
return isReadableStream(obj) || isWritableStream(obj) || isTransformStream(obj)
|
||||
}
|
||||
function isIterable(obj, isAsync) {
|
||||
if (obj == null) return false
|
||||
if (isAsync === true) return typeof obj[SymbolAsyncIterator] === 'function'
|
||||
if (isAsync === false) return typeof obj[SymbolIterator] === 'function'
|
||||
return typeof obj[SymbolAsyncIterator] === 'function' || typeof obj[SymbolIterator] === 'function'
|
||||
}
|
||||
function isDestroyed(stream) {
|
||||
if (!isNodeStream(stream)) return null
|
||||
const wState = stream._writableState
|
||||
const rState = stream._readableState
|
||||
const state = wState || rState
|
||||
return !!(stream.destroyed || stream[kIsDestroyed] || (state !== null && state !== undefined && state.destroyed))
|
||||
}
|
||||
|
||||
// Have been end():d.
|
||||
function isWritableEnded(stream) {
|
||||
if (!isWritableNodeStream(stream)) return null
|
||||
if (stream.writableEnded === true) return true
|
||||
const wState = stream._writableState
|
||||
if (wState !== null && wState !== undefined && wState.errored) return false
|
||||
if (typeof (wState === null || wState === undefined ? undefined : wState.ended) !== 'boolean') return null
|
||||
return wState.ended
|
||||
}
|
||||
|
||||
// Have emitted 'finish'.
|
||||
function isWritableFinished(stream, strict) {
|
||||
if (!isWritableNodeStream(stream)) return null
|
||||
if (stream.writableFinished === true) return true
|
||||
const wState = stream._writableState
|
||||
if (wState !== null && wState !== undefined && wState.errored) return false
|
||||
if (typeof (wState === null || wState === undefined ? undefined : wState.finished) !== 'boolean') return null
|
||||
return !!(wState.finished || (strict === false && wState.ended === true && wState.length === 0))
|
||||
}
|
||||
|
||||
// Have been push(null):d.
|
||||
function isReadableEnded(stream) {
|
||||
if (!isReadableNodeStream(stream)) return null
|
||||
if (stream.readableEnded === true) return true
|
||||
const rState = stream._readableState
|
||||
if (!rState || rState.errored) return false
|
||||
if (typeof (rState === null || rState === undefined ? undefined : rState.ended) !== 'boolean') return null
|
||||
return rState.ended
|
||||
}
|
||||
|
||||
// Have emitted 'end'.
|
||||
function isReadableFinished(stream, strict) {
|
||||
if (!isReadableNodeStream(stream)) return null
|
||||
const rState = stream._readableState
|
||||
if (rState !== null && rState !== undefined && rState.errored) return false
|
||||
if (typeof (rState === null || rState === undefined ? undefined : rState.endEmitted) !== 'boolean') return null
|
||||
return !!(rState.endEmitted || (strict === false && rState.ended === true && rState.length === 0))
|
||||
}
|
||||
function isReadable(stream) {
|
||||
if (stream && stream[kIsReadable] != null) return stream[kIsReadable]
|
||||
if (typeof (stream === null || stream === undefined ? undefined : stream.readable) !== 'boolean') return null
|
||||
if (isDestroyed(stream)) return false
|
||||
return isReadableNodeStream(stream) && stream.readable && !isReadableFinished(stream)
|
||||
}
|
||||
function isWritable(stream) {
|
||||
if (stream && stream[kIsWritable] != null) return stream[kIsWritable]
|
||||
if (typeof (stream === null || stream === undefined ? undefined : stream.writable) !== 'boolean') return null
|
||||
if (isDestroyed(stream)) return false
|
||||
return isWritableNodeStream(stream) && stream.writable && !isWritableEnded(stream)
|
||||
}
|
||||
function isFinished(stream, opts) {
|
||||
if (!isNodeStream(stream)) {
|
||||
return null
|
||||
}
|
||||
if (isDestroyed(stream)) {
|
||||
return true
|
||||
}
|
||||
if ((opts === null || opts === undefined ? undefined : opts.readable) !== false && isReadable(stream)) {
|
||||
return false
|
||||
}
|
||||
if ((opts === null || opts === undefined ? undefined : opts.writable) !== false && isWritable(stream)) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
function isWritableErrored(stream) {
|
||||
var _stream$_writableStat, _stream$_writableStat2
|
||||
if (!isNodeStream(stream)) {
|
||||
return null
|
||||
}
|
||||
if (stream.writableErrored) {
|
||||
return stream.writableErrored
|
||||
}
|
||||
return (_stream$_writableStat =
|
||||
(_stream$_writableStat2 = stream._writableState) === null || _stream$_writableStat2 === undefined
|
||||
? undefined
|
||||
: _stream$_writableStat2.errored) !== null && _stream$_writableStat !== undefined
|
||||
? _stream$_writableStat
|
||||
: null
|
||||
}
|
||||
function isReadableErrored(stream) {
|
||||
var _stream$_readableStat, _stream$_readableStat2
|
||||
if (!isNodeStream(stream)) {
|
||||
return null
|
||||
}
|
||||
if (stream.readableErrored) {
|
||||
return stream.readableErrored
|
||||
}
|
||||
return (_stream$_readableStat =
|
||||
(_stream$_readableStat2 = stream._readableState) === null || _stream$_readableStat2 === undefined
|
||||
? undefined
|
||||
: _stream$_readableStat2.errored) !== null && _stream$_readableStat !== undefined
|
||||
? _stream$_readableStat
|
||||
: null
|
||||
}
|
||||
function isClosed(stream) {
|
||||
if (!isNodeStream(stream)) {
|
||||
return null
|
||||
}
|
||||
if (typeof stream.closed === 'boolean') {
|
||||
return stream.closed
|
||||
}
|
||||
const wState = stream._writableState
|
||||
const rState = stream._readableState
|
||||
if (
|
||||
typeof (wState === null || wState === undefined ? undefined : wState.closed) === 'boolean' ||
|
||||
typeof (rState === null || rState === undefined ? undefined : rState.closed) === 'boolean'
|
||||
) {
|
||||
return (
|
||||
(wState === null || wState === undefined ? undefined : wState.closed) ||
|
||||
(rState === null || rState === undefined ? undefined : rState.closed)
|
||||
)
|
||||
}
|
||||
if (typeof stream._closed === 'boolean' && isOutgoingMessage(stream)) {
|
||||
return stream._closed
|
||||
}
|
||||
return null
|
||||
}
|
||||
function isOutgoingMessage(stream) {
|
||||
return (
|
||||
typeof stream._closed === 'boolean' &&
|
||||
typeof stream._defaultKeepAlive === 'boolean' &&
|
||||
typeof stream._removedConnection === 'boolean' &&
|
||||
typeof stream._removedContLen === 'boolean'
|
||||
)
|
||||
}
|
||||
function isServerResponse(stream) {
|
||||
return typeof stream._sent100 === 'boolean' && isOutgoingMessage(stream)
|
||||
}
|
||||
function isServerRequest(stream) {
|
||||
var _stream$req
|
||||
return (
|
||||
typeof stream._consuming === 'boolean' &&
|
||||
typeof stream._dumped === 'boolean' &&
|
||||
((_stream$req = stream.req) === null || _stream$req === undefined ? undefined : _stream$req.upgradeOrConnect) ===
|
||||
undefined
|
||||
)
|
||||
}
|
||||
function willEmitClose(stream) {
|
||||
if (!isNodeStream(stream)) return null
|
||||
const wState = stream._writableState
|
||||
const rState = stream._readableState
|
||||
const state = wState || rState
|
||||
return (
|
||||
(!state && isServerResponse(stream)) || !!(state && state.autoDestroy && state.emitClose && state.closed === false)
|
||||
)
|
||||
}
|
||||
function isDisturbed(stream) {
|
||||
var _stream$kIsDisturbed
|
||||
return !!(
|
||||
stream &&
|
||||
((_stream$kIsDisturbed = stream[kIsDisturbed]) !== null && _stream$kIsDisturbed !== undefined
|
||||
? _stream$kIsDisturbed
|
||||
: stream.readableDidRead || stream.readableAborted)
|
||||
)
|
||||
}
|
||||
function isErrored(stream) {
|
||||
var _ref,
|
||||
_ref2,
|
||||
_ref3,
|
||||
_ref4,
|
||||
_ref5,
|
||||
_stream$kIsErrored,
|
||||
_stream$_readableStat3,
|
||||
_stream$_writableStat3,
|
||||
_stream$_readableStat4,
|
||||
_stream$_writableStat4
|
||||
return !!(
|
||||
stream &&
|
||||
((_ref =
|
||||
(_ref2 =
|
||||
(_ref3 =
|
||||
(_ref4 =
|
||||
(_ref5 =
|
||||
(_stream$kIsErrored = stream[kIsErrored]) !== null && _stream$kIsErrored !== undefined
|
||||
? _stream$kIsErrored
|
||||
: stream.readableErrored) !== null && _ref5 !== undefined
|
||||
? _ref5
|
||||
: stream.writableErrored) !== null && _ref4 !== undefined
|
||||
? _ref4
|
||||
: (_stream$_readableStat3 = stream._readableState) === null || _stream$_readableStat3 === undefined
|
||||
? undefined
|
||||
: _stream$_readableStat3.errorEmitted) !== null && _ref3 !== undefined
|
||||
? _ref3
|
||||
: (_stream$_writableStat3 = stream._writableState) === null || _stream$_writableStat3 === undefined
|
||||
? undefined
|
||||
: _stream$_writableStat3.errorEmitted) !== null && _ref2 !== undefined
|
||||
? _ref2
|
||||
: (_stream$_readableStat4 = stream._readableState) === null || _stream$_readableStat4 === undefined
|
||||
? undefined
|
||||
: _stream$_readableStat4.errored) !== null && _ref !== undefined
|
||||
? _ref
|
||||
: (_stream$_writableStat4 = stream._writableState) === null || _stream$_writableStat4 === undefined
|
||||
? undefined
|
||||
: _stream$_writableStat4.errored)
|
||||
)
|
||||
}
|
||||
module.exports = {
|
||||
isDestroyed,
|
||||
kIsDestroyed,
|
||||
isDisturbed,
|
||||
kIsDisturbed,
|
||||
isErrored,
|
||||
kIsErrored,
|
||||
isReadable,
|
||||
kIsReadable,
|
||||
kIsClosedPromise,
|
||||
kControllerErrorFunction,
|
||||
kIsWritable,
|
||||
isClosed,
|
||||
isDuplexNodeStream,
|
||||
isFinished,
|
||||
isIterable,
|
||||
isReadableNodeStream,
|
||||
isReadableStream,
|
||||
isReadableEnded,
|
||||
isReadableFinished,
|
||||
isReadableErrored,
|
||||
isNodeStream,
|
||||
isWebStream,
|
||||
isWritable,
|
||||
isWritableNodeStream,
|
||||
isWritableStream,
|
||||
isWritableEnded,
|
||||
isWritableFinished,
|
||||
isWritableErrored,
|
||||
isServerRequest,
|
||||
isServerResponse,
|
||||
willEmitClose,
|
||||
isTransformStream
|
||||
}
|
817
node_modules/readable-stream/lib/internal/streams/writable.js
generated
vendored
Normal file
817
node_modules/readable-stream/lib/internal/streams/writable.js
generated
vendored
Normal file
|
@ -0,0 +1,817 @@
|
|||
/* replacement start */
|
||||
|
||||
const process = require('process/')
|
||||
|
||||
/* replacement end */
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
// A bit simpler than readable streams.
|
||||
// Implement an async ._write(chunk, encoding, cb), and it'll handle all
|
||||
// the drain event emission and buffering.
|
||||
|
||||
;('use strict')
|
||||
const {
|
||||
ArrayPrototypeSlice,
|
||||
Error,
|
||||
FunctionPrototypeSymbolHasInstance,
|
||||
ObjectDefineProperty,
|
||||
ObjectDefineProperties,
|
||||
ObjectSetPrototypeOf,
|
||||
StringPrototypeToLowerCase,
|
||||
Symbol,
|
||||
SymbolHasInstance
|
||||
} = require('../../ours/primordials')
|
||||
module.exports = Writable
|
||||
Writable.WritableState = WritableState
|
||||
const { EventEmitter: EE } = require('events')
|
||||
const Stream = require('./legacy').Stream
|
||||
const { Buffer } = require('buffer')
|
||||
const destroyImpl = require('./destroy')
|
||||
const { addAbortSignal } = require('./add-abort-signal')
|
||||
const { getHighWaterMark, getDefaultHighWaterMark } = require('./state')
|
||||
const {
|
||||
ERR_INVALID_ARG_TYPE,
|
||||
ERR_METHOD_NOT_IMPLEMENTED,
|
||||
ERR_MULTIPLE_CALLBACK,
|
||||
ERR_STREAM_CANNOT_PIPE,
|
||||
ERR_STREAM_DESTROYED,
|
||||
ERR_STREAM_ALREADY_FINISHED,
|
||||
ERR_STREAM_NULL_VALUES,
|
||||
ERR_STREAM_WRITE_AFTER_END,
|
||||
ERR_UNKNOWN_ENCODING
|
||||
} = require('../../ours/errors').codes
|
||||
const { errorOrDestroy } = destroyImpl
|
||||
ObjectSetPrototypeOf(Writable.prototype, Stream.prototype)
|
||||
ObjectSetPrototypeOf(Writable, Stream)
|
||||
function nop() {}
|
||||
const kOnFinished = Symbol('kOnFinished')
|
||||
function WritableState(options, stream, isDuplex) {
|
||||
// Duplex streams are both readable and writable, but share
|
||||
// the same options object.
|
||||
// However, some cases require setting options to different
|
||||
// values for the readable and the writable sides of the duplex stream,
|
||||
// e.g. options.readableObjectMode vs. options.writableObjectMode, etc.
|
||||
if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof require('./duplex')
|
||||
|
||||
// Object stream flag to indicate whether or not this stream
|
||||
// contains buffers or objects.
|
||||
this.objectMode = !!(options && options.objectMode)
|
||||
if (isDuplex) this.objectMode = this.objectMode || !!(options && options.writableObjectMode)
|
||||
|
||||
// The point at which write() starts returning false
|
||||
// Note: 0 is a valid value, means that we always return false if
|
||||
// the entire buffer is not flushed immediately on write().
|
||||
this.highWaterMark = options
|
||||
? getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex)
|
||||
: getDefaultHighWaterMark(false)
|
||||
|
||||
// if _final has been called.
|
||||
this.finalCalled = false
|
||||
|
||||
// drain event flag.
|
||||
this.needDrain = false
|
||||
// At the start of calling end()
|
||||
this.ending = false
|
||||
// When end() has been called, and returned.
|
||||
this.ended = false
|
||||
// When 'finish' is emitted.
|
||||
this.finished = false
|
||||
|
||||
// Has it been destroyed
|
||||
this.destroyed = false
|
||||
|
||||
// Should we decode strings into buffers before passing to _write?
|
||||
// this is here so that some node-core streams can optimize string
|
||||
// handling at a lower level.
|
||||
const noDecode = !!(options && options.decodeStrings === false)
|
||||
this.decodeStrings = !noDecode
|
||||
|
||||
// Crypto is kind of old and crusty. Historically, its default string
|
||||
// encoding is 'binary' so we have to make this configurable.
|
||||
// Everything else in the universe uses 'utf8', though.
|
||||
this.defaultEncoding = (options && options.defaultEncoding) || 'utf8'
|
||||
|
||||
// Not an actual buffer we keep track of, but a measurement
|
||||
// of how much we're waiting to get pushed to some underlying
|
||||
// socket or file.
|
||||
this.length = 0
|
||||
|
||||
// A flag to see when we're in the middle of a write.
|
||||
this.writing = false
|
||||
|
||||
// When true all writes will be buffered until .uncork() call.
|
||||
this.corked = 0
|
||||
|
||||
// A flag to be able to tell if the onwrite cb is called immediately,
|
||||
// or on a later tick. We set this to true at first, because any
|
||||
// actions that shouldn't happen until "later" should generally also
|
||||
// not happen before the first write call.
|
||||
this.sync = true
|
||||
|
||||
// A flag to know if we're processing previously buffered items, which
|
||||
// may call the _write() callback in the same tick, so that we don't
|
||||
// end up in an overlapped onwrite situation.
|
||||
this.bufferProcessing = false
|
||||
|
||||
// The callback that's passed to _write(chunk, cb).
|
||||
this.onwrite = onwrite.bind(undefined, stream)
|
||||
|
||||
// The callback that the user supplies to write(chunk, encoding, cb).
|
||||
this.writecb = null
|
||||
|
||||
// The amount that is being written when _write is called.
|
||||
this.writelen = 0
|
||||
|
||||
// Storage for data passed to the afterWrite() callback in case of
|
||||
// synchronous _write() completion.
|
||||
this.afterWriteTickInfo = null
|
||||
resetBuffer(this)
|
||||
|
||||
// Number of pending user-supplied write callbacks
|
||||
// this must be 0 before 'finish' can be emitted.
|
||||
this.pendingcb = 0
|
||||
|
||||
// Stream is still being constructed and cannot be
|
||||
// destroyed until construction finished or failed.
|
||||
// Async construction is opt in, therefore we start as
|
||||
// constructed.
|
||||
this.constructed = true
|
||||
|
||||
// Emit prefinish if the only thing we're waiting for is _write cbs
|
||||
// This is relevant for synchronous Transform streams.
|
||||
this.prefinished = false
|
||||
|
||||
// True if the error was already emitted and should not be thrown again.
|
||||
this.errorEmitted = false
|
||||
|
||||
// Should close be emitted on destroy. Defaults to true.
|
||||
this.emitClose = !options || options.emitClose !== false
|
||||
|
||||
// Should .destroy() be called after 'finish' (and potentially 'end').
|
||||
this.autoDestroy = !options || options.autoDestroy !== false
|
||||
|
||||
// Indicates whether the stream has errored. When true all write() calls
|
||||
// should return false. This is needed since when autoDestroy
|
||||
// is disabled we need a way to tell whether the stream has failed.
|
||||
this.errored = null
|
||||
|
||||
// Indicates whether the stream has finished destroying.
|
||||
this.closed = false
|
||||
|
||||
// True if close has been emitted or would have been emitted
|
||||
// depending on emitClose.
|
||||
this.closeEmitted = false
|
||||
this[kOnFinished] = []
|
||||
}
|
||||
function resetBuffer(state) {
|
||||
state.buffered = []
|
||||
state.bufferedIndex = 0
|
||||
state.allBuffers = true
|
||||
state.allNoop = true
|
||||
}
|
||||
WritableState.prototype.getBuffer = function getBuffer() {
|
||||
return ArrayPrototypeSlice(this.buffered, this.bufferedIndex)
|
||||
}
|
||||
ObjectDefineProperty(WritableState.prototype, 'bufferedRequestCount', {
|
||||
__proto__: null,
|
||||
get() {
|
||||
return this.buffered.length - this.bufferedIndex
|
||||
}
|
||||
})
|
||||
function Writable(options) {
|
||||
// Writable ctor is applied to Duplexes, too.
|
||||
// `realHasInstance` is necessary because using plain `instanceof`
|
||||
// would return false, as no `_writableState` property is attached.
|
||||
|
||||
// Trying to use the custom `instanceof` for Writable here will also break the
|
||||
// Node.js LazyTransform implementation, which has a non-trivial getter for
|
||||
// `_writableState` that would lead to infinite recursion.
|
||||
|
||||
// Checking for a Stream.Duplex instance is faster here instead of inside
|
||||
// the WritableState constructor, at least with V8 6.5.
|
||||
const isDuplex = this instanceof require('./duplex')
|
||||
if (!isDuplex && !FunctionPrototypeSymbolHasInstance(Writable, this)) return new Writable(options)
|
||||
this._writableState = new WritableState(options, this, isDuplex)
|
||||
if (options) {
|
||||
if (typeof options.write === 'function') this._write = options.write
|
||||
if (typeof options.writev === 'function') this._writev = options.writev
|
||||
if (typeof options.destroy === 'function') this._destroy = options.destroy
|
||||
if (typeof options.final === 'function') this._final = options.final
|
||||
if (typeof options.construct === 'function') this._construct = options.construct
|
||||
if (options.signal) addAbortSignal(options.signal, this)
|
||||
}
|
||||
Stream.call(this, options)
|
||||
destroyImpl.construct(this, () => {
|
||||
const state = this._writableState
|
||||
if (!state.writing) {
|
||||
clearBuffer(this, state)
|
||||
}
|
||||
finishMaybe(this, state)
|
||||
})
|
||||
}
|
||||
ObjectDefineProperty(Writable, SymbolHasInstance, {
|
||||
__proto__: null,
|
||||
value: function (object) {
|
||||
if (FunctionPrototypeSymbolHasInstance(this, object)) return true
|
||||
if (this !== Writable) return false
|
||||
return object && object._writableState instanceof WritableState
|
||||
}
|
||||
})
|
||||
|
||||
// Otherwise people can pipe Writable streams, which is just wrong.
|
||||
Writable.prototype.pipe = function () {
|
||||
errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE())
|
||||
}
|
||||
function _write(stream, chunk, encoding, cb) {
|
||||
const state = stream._writableState
|
||||
if (typeof encoding === 'function') {
|
||||
cb = encoding
|
||||
encoding = state.defaultEncoding
|
||||
} else {
|
||||
if (!encoding) encoding = state.defaultEncoding
|
||||
else if (encoding !== 'buffer' && !Buffer.isEncoding(encoding)) throw new ERR_UNKNOWN_ENCODING(encoding)
|
||||
if (typeof cb !== 'function') cb = nop
|
||||
}
|
||||
if (chunk === null) {
|
||||
throw new ERR_STREAM_NULL_VALUES()
|
||||
} else if (!state.objectMode) {
|
||||
if (typeof chunk === 'string') {
|
||||
if (state.decodeStrings !== false) {
|
||||
chunk = Buffer.from(chunk, encoding)
|
||||
encoding = 'buffer'
|
||||
}
|
||||
} else if (chunk instanceof Buffer) {
|
||||
encoding = 'buffer'
|
||||
} else if (Stream._isUint8Array(chunk)) {
|
||||
chunk = Stream._uint8ArrayToBuffer(chunk)
|
||||
encoding = 'buffer'
|
||||
} else {
|
||||
throw new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk)
|
||||
}
|
||||
}
|
||||
let err
|
||||
if (state.ending) {
|
||||
err = new ERR_STREAM_WRITE_AFTER_END()
|
||||
} else if (state.destroyed) {
|
||||
err = new ERR_STREAM_DESTROYED('write')
|
||||
}
|
||||
if (err) {
|
||||
process.nextTick(cb, err)
|
||||
errorOrDestroy(stream, err, true)
|
||||
return err
|
||||
}
|
||||
state.pendingcb++
|
||||
return writeOrBuffer(stream, state, chunk, encoding, cb)
|
||||
}
|
||||
Writable.prototype.write = function (chunk, encoding, cb) {
|
||||
return _write(this, chunk, encoding, cb) === true
|
||||
}
|
||||
Writable.prototype.cork = function () {
|
||||
this._writableState.corked++
|
||||
}
|
||||
Writable.prototype.uncork = function () {
|
||||
const state = this._writableState
|
||||
if (state.corked) {
|
||||
state.corked--
|
||||
if (!state.writing) clearBuffer(this, state)
|
||||
}
|
||||
}
|
||||
Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
|
||||
// node::ParseEncoding() requires lower case.
|
||||
if (typeof encoding === 'string') encoding = StringPrototypeToLowerCase(encoding)
|
||||
if (!Buffer.isEncoding(encoding)) throw new ERR_UNKNOWN_ENCODING(encoding)
|
||||
this._writableState.defaultEncoding = encoding
|
||||
return this
|
||||
}
|
||||
|
||||
// If we're already writing something, then just put this
|
||||
// in the queue, and wait our turn. Otherwise, call _write
|
||||
// If we return false, then we need a drain event, so set that flag.
|
||||
function writeOrBuffer(stream, state, chunk, encoding, callback) {
|
||||
const len = state.objectMode ? 1 : chunk.length
|
||||
state.length += len
|
||||
|
||||
// stream._write resets state.length
|
||||
const ret = state.length < state.highWaterMark
|
||||
// We must ensure that previous needDrain will not be reset to false.
|
||||
if (!ret) state.needDrain = true
|
||||
if (state.writing || state.corked || state.errored || !state.constructed) {
|
||||
state.buffered.push({
|
||||
chunk,
|
||||
encoding,
|
||||
callback
|
||||
})
|
||||
if (state.allBuffers && encoding !== 'buffer') {
|
||||
state.allBuffers = false
|
||||
}
|
||||
if (state.allNoop && callback !== nop) {
|
||||
state.allNoop = false
|
||||
}
|
||||
} else {
|
||||
state.writelen = len
|
||||
state.writecb = callback
|
||||
state.writing = true
|
||||
state.sync = true
|
||||
stream._write(chunk, encoding, state.onwrite)
|
||||
state.sync = false
|
||||
}
|
||||
|
||||
// Return false if errored or destroyed in order to break
|
||||
// any synchronous while(stream.write(data)) loops.
|
||||
return ret && !state.errored && !state.destroyed
|
||||
}
|
||||
function doWrite(stream, state, writev, len, chunk, encoding, cb) {
|
||||
state.writelen = len
|
||||
state.writecb = cb
|
||||
state.writing = true
|
||||
state.sync = true
|
||||
if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'))
|
||||
else if (writev) stream._writev(chunk, state.onwrite)
|
||||
else stream._write(chunk, encoding, state.onwrite)
|
||||
state.sync = false
|
||||
}
|
||||
function onwriteError(stream, state, er, cb) {
|
||||
--state.pendingcb
|
||||
cb(er)
|
||||
// Ensure callbacks are invoked even when autoDestroy is
|
||||
// not enabled. Passing `er` here doesn't make sense since
|
||||
// it's related to one specific write, not to the buffered
|
||||
// writes.
|
||||
errorBuffer(state)
|
||||
// This can emit error, but error must always follow cb.
|
||||
errorOrDestroy(stream, er)
|
||||
}
|
||||
function onwrite(stream, er) {
|
||||
const state = stream._writableState
|
||||
const sync = state.sync
|
||||
const cb = state.writecb
|
||||
if (typeof cb !== 'function') {
|
||||
errorOrDestroy(stream, new ERR_MULTIPLE_CALLBACK())
|
||||
return
|
||||
}
|
||||
state.writing = false
|
||||
state.writecb = null
|
||||
state.length -= state.writelen
|
||||
state.writelen = 0
|
||||
if (er) {
|
||||
// Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364
|
||||
er.stack // eslint-disable-line no-unused-expressions
|
||||
|
||||
if (!state.errored) {
|
||||
state.errored = er
|
||||
}
|
||||
|
||||
// In case of duplex streams we need to notify the readable side of the
|
||||
// error.
|
||||
if (stream._readableState && !stream._readableState.errored) {
|
||||
stream._readableState.errored = er
|
||||
}
|
||||
if (sync) {
|
||||
process.nextTick(onwriteError, stream, state, er, cb)
|
||||
} else {
|
||||
onwriteError(stream, state, er, cb)
|
||||
}
|
||||
} else {
|
||||
if (state.buffered.length > state.bufferedIndex) {
|
||||
clearBuffer(stream, state)
|
||||
}
|
||||
if (sync) {
|
||||
// It is a common case that the callback passed to .write() is always
|
||||
// the same. In that case, we do not schedule a new nextTick(), but
|
||||
// rather just increase a counter, to improve performance and avoid
|
||||
// memory allocations.
|
||||
if (state.afterWriteTickInfo !== null && state.afterWriteTickInfo.cb === cb) {
|
||||
state.afterWriteTickInfo.count++
|
||||
} else {
|
||||
state.afterWriteTickInfo = {
|
||||
count: 1,
|
||||
cb,
|
||||
stream,
|
||||
state
|
||||
}
|
||||
process.nextTick(afterWriteTick, state.afterWriteTickInfo)
|
||||
}
|
||||
} else {
|
||||
afterWrite(stream, state, 1, cb)
|
||||
}
|
||||
}
|
||||
}
|
||||
function afterWriteTick({ stream, state, count, cb }) {
|
||||
state.afterWriteTickInfo = null
|
||||
return afterWrite(stream, state, count, cb)
|
||||
}
|
||||
function afterWrite(stream, state, count, cb) {
|
||||
const needDrain = !state.ending && !stream.destroyed && state.length === 0 && state.needDrain
|
||||
if (needDrain) {
|
||||
state.needDrain = false
|
||||
stream.emit('drain')
|
||||
}
|
||||
while (count-- > 0) {
|
||||
state.pendingcb--
|
||||
cb()
|
||||
}
|
||||
if (state.destroyed) {
|
||||
errorBuffer(state)
|
||||
}
|
||||
finishMaybe(stream, state)
|
||||
}
|
||||
|
||||
// If there's something in the buffer waiting, then invoke callbacks.
|
||||
function errorBuffer(state) {
|
||||
if (state.writing) {
|
||||
return
|
||||
}
|
||||
for (let n = state.bufferedIndex; n < state.buffered.length; ++n) {
|
||||
var _state$errored
|
||||
const { chunk, callback } = state.buffered[n]
|
||||
const len = state.objectMode ? 1 : chunk.length
|
||||
state.length -= len
|
||||
callback(
|
||||
(_state$errored = state.errored) !== null && _state$errored !== undefined
|
||||
? _state$errored
|
||||
: new ERR_STREAM_DESTROYED('write')
|
||||
)
|
||||
}
|
||||
const onfinishCallbacks = state[kOnFinished].splice(0)
|
||||
for (let i = 0; i < onfinishCallbacks.length; i++) {
|
||||
var _state$errored2
|
||||
onfinishCallbacks[i](
|
||||
(_state$errored2 = state.errored) !== null && _state$errored2 !== undefined
|
||||
? _state$errored2
|
||||
: new ERR_STREAM_DESTROYED('end')
|
||||
)
|
||||
}
|
||||
resetBuffer(state)
|
||||
}
|
||||
|
||||
// If there's something in the buffer waiting, then process it.
|
||||
function clearBuffer(stream, state) {
|
||||
if (state.corked || state.bufferProcessing || state.destroyed || !state.constructed) {
|
||||
return
|
||||
}
|
||||
const { buffered, bufferedIndex, objectMode } = state
|
||||
const bufferedLength = buffered.length - bufferedIndex
|
||||
if (!bufferedLength) {
|
||||
return
|
||||
}
|
||||
let i = bufferedIndex
|
||||
state.bufferProcessing = true
|
||||
if (bufferedLength > 1 && stream._writev) {
|
||||
state.pendingcb -= bufferedLength - 1
|
||||
const callback = state.allNoop
|
||||
? nop
|
||||
: (err) => {
|
||||
for (let n = i; n < buffered.length; ++n) {
|
||||
buffered[n].callback(err)
|
||||
}
|
||||
}
|
||||
// Make a copy of `buffered` if it's going to be used by `callback` above,
|
||||
// since `doWrite` will mutate the array.
|
||||
const chunks = state.allNoop && i === 0 ? buffered : ArrayPrototypeSlice(buffered, i)
|
||||
chunks.allBuffers = state.allBuffers
|
||||
doWrite(stream, state, true, state.length, chunks, '', callback)
|
||||
resetBuffer(state)
|
||||
} else {
|
||||
do {
|
||||
const { chunk, encoding, callback } = buffered[i]
|
||||
buffered[i++] = null
|
||||
const len = objectMode ? 1 : chunk.length
|
||||
doWrite(stream, state, false, len, chunk, encoding, callback)
|
||||
} while (i < buffered.length && !state.writing)
|
||||
if (i === buffered.length) {
|
||||
resetBuffer(state)
|
||||
} else if (i > 256) {
|
||||
buffered.splice(0, i)
|
||||
state.bufferedIndex = 0
|
||||
} else {
|
||||
state.bufferedIndex = i
|
||||
}
|
||||
}
|
||||
state.bufferProcessing = false
|
||||
}
|
||||
Writable.prototype._write = function (chunk, encoding, cb) {
|
||||
if (this._writev) {
|
||||
this._writev(
|
||||
[
|
||||
{
|
||||
chunk,
|
||||
encoding
|
||||
}
|
||||
],
|
||||
cb
|
||||
)
|
||||
} else {
|
||||
throw new ERR_METHOD_NOT_IMPLEMENTED('_write()')
|
||||
}
|
||||
}
|
||||
Writable.prototype._writev = null
|
||||
Writable.prototype.end = function (chunk, encoding, cb) {
|
||||
const state = this._writableState
|
||||
if (typeof chunk === 'function') {
|
||||
cb = chunk
|
||||
chunk = null
|
||||
encoding = null
|
||||
} else if (typeof encoding === 'function') {
|
||||
cb = encoding
|
||||
encoding = null
|
||||
}
|
||||
let err
|
||||
if (chunk !== null && chunk !== undefined) {
|
||||
const ret = _write(this, chunk, encoding)
|
||||
if (ret instanceof Error) {
|
||||
err = ret
|
||||
}
|
||||
}
|
||||
|
||||
// .end() fully uncorks.
|
||||
if (state.corked) {
|
||||
state.corked = 1
|
||||
this.uncork()
|
||||
}
|
||||
if (err) {
|
||||
// Do nothing...
|
||||
} else if (!state.errored && !state.ending) {
|
||||
// This is forgiving in terms of unnecessary calls to end() and can hide
|
||||
// logic errors. However, usually such errors are harmless and causing a
|
||||
// hard error can be disproportionately destructive. It is not always
|
||||
// trivial for the user to determine whether end() needs to be called
|
||||
// or not.
|
||||
|
||||
state.ending = true
|
||||
finishMaybe(this, state, true)
|
||||
state.ended = true
|
||||
} else if (state.finished) {
|
||||
err = new ERR_STREAM_ALREADY_FINISHED('end')
|
||||
} else if (state.destroyed) {
|
||||
err = new ERR_STREAM_DESTROYED('end')
|
||||
}
|
||||
if (typeof cb === 'function') {
|
||||
if (err || state.finished) {
|
||||
process.nextTick(cb, err)
|
||||
} else {
|
||||
state[kOnFinished].push(cb)
|
||||
}
|
||||
}
|
||||
return this
|
||||
}
|
||||
function needFinish(state) {
|
||||
return (
|
||||
state.ending &&
|
||||
!state.destroyed &&
|
||||
state.constructed &&
|
||||
state.length === 0 &&
|
||||
!state.errored &&
|
||||
state.buffered.length === 0 &&
|
||||
!state.finished &&
|
||||
!state.writing &&
|
||||
!state.errorEmitted &&
|
||||
!state.closeEmitted
|
||||
)
|
||||
}
|
||||
function callFinal(stream, state) {
|
||||
let called = false
|
||||
function onFinish(err) {
|
||||
if (called) {
|
||||
errorOrDestroy(stream, err !== null && err !== undefined ? err : ERR_MULTIPLE_CALLBACK())
|
||||
return
|
||||
}
|
||||
called = true
|
||||
state.pendingcb--
|
||||
if (err) {
|
||||
const onfinishCallbacks = state[kOnFinished].splice(0)
|
||||
for (let i = 0; i < onfinishCallbacks.length; i++) {
|
||||
onfinishCallbacks[i](err)
|
||||
}
|
||||
errorOrDestroy(stream, err, state.sync)
|
||||
} else if (needFinish(state)) {
|
||||
state.prefinished = true
|
||||
stream.emit('prefinish')
|
||||
// Backwards compat. Don't check state.sync here.
|
||||
// Some streams assume 'finish' will be emitted
|
||||
// asynchronously relative to _final callback.
|
||||
state.pendingcb++
|
||||
process.nextTick(finish, stream, state)
|
||||
}
|
||||
}
|
||||
state.sync = true
|
||||
state.pendingcb++
|
||||
try {
|
||||
stream._final(onFinish)
|
||||
} catch (err) {
|
||||
onFinish(err)
|
||||
}
|
||||
state.sync = false
|
||||
}
|
||||
function prefinish(stream, state) {
|
||||
if (!state.prefinished && !state.finalCalled) {
|
||||
if (typeof stream._final === 'function' && !state.destroyed) {
|
||||
state.finalCalled = true
|
||||
callFinal(stream, state)
|
||||
} else {
|
||||
state.prefinished = true
|
||||
stream.emit('prefinish')
|
||||
}
|
||||
}
|
||||
}
|
||||
function finishMaybe(stream, state, sync) {
|
||||
if (needFinish(state)) {
|
||||
prefinish(stream, state)
|
||||
if (state.pendingcb === 0) {
|
||||
if (sync) {
|
||||
state.pendingcb++
|
||||
process.nextTick(
|
||||
(stream, state) => {
|
||||
if (needFinish(state)) {
|
||||
finish(stream, state)
|
||||
} else {
|
||||
state.pendingcb--
|
||||
}
|
||||
},
|
||||
stream,
|
||||
state
|
||||
)
|
||||
} else if (needFinish(state)) {
|
||||
state.pendingcb++
|
||||
finish(stream, state)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
function finish(stream, state) {
|
||||
state.pendingcb--
|
||||
state.finished = true
|
||||
const onfinishCallbacks = state[kOnFinished].splice(0)
|
||||
for (let i = 0; i < onfinishCallbacks.length; i++) {
|
||||
onfinishCallbacks[i]()
|
||||
}
|
||||
stream.emit('finish')
|
||||
if (state.autoDestroy) {
|
||||
// In case of duplex streams we need a way to detect
|
||||
// if the readable side is ready for autoDestroy as well.
|
||||
const rState = stream._readableState
|
||||
const autoDestroy =
|
||||
!rState ||
|
||||
(rState.autoDestroy &&
|
||||
// We don't expect the readable to ever 'end'
|
||||
// if readable is explicitly set to false.
|
||||
(rState.endEmitted || rState.readable === false))
|
||||
if (autoDestroy) {
|
||||
stream.destroy()
|
||||
}
|
||||
}
|
||||
}
|
||||
ObjectDefineProperties(Writable.prototype, {
|
||||
closed: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
return this._writableState ? this._writableState.closed : false
|
||||
}
|
||||
},
|
||||
destroyed: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
return this._writableState ? this._writableState.destroyed : false
|
||||
},
|
||||
set(value) {
|
||||
// Backward compatibility, the user is explicitly managing destroyed.
|
||||
if (this._writableState) {
|
||||
this._writableState.destroyed = value
|
||||
}
|
||||
}
|
||||
},
|
||||
writable: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
const w = this._writableState
|
||||
// w.writable === false means that this is part of a Duplex stream
|
||||
// where the writable side was disabled upon construction.
|
||||
// Compat. The user might manually disable writable side through
|
||||
// deprecated setter.
|
||||
return !!w && w.writable !== false && !w.destroyed && !w.errored && !w.ending && !w.ended
|
||||
},
|
||||
set(val) {
|
||||
// Backwards compatible.
|
||||
if (this._writableState) {
|
||||
this._writableState.writable = !!val
|
||||
}
|
||||
}
|
||||
},
|
||||
writableFinished: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
return this._writableState ? this._writableState.finished : false
|
||||
}
|
||||
},
|
||||
writableObjectMode: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
return this._writableState ? this._writableState.objectMode : false
|
||||
}
|
||||
},
|
||||
writableBuffer: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
return this._writableState && this._writableState.getBuffer()
|
||||
}
|
||||
},
|
||||
writableEnded: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
return this._writableState ? this._writableState.ending : false
|
||||
}
|
||||
},
|
||||
writableNeedDrain: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
const wState = this._writableState
|
||||
if (!wState) return false
|
||||
return !wState.destroyed && !wState.ending && wState.needDrain
|
||||
}
|
||||
},
|
||||
writableHighWaterMark: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
return this._writableState && this._writableState.highWaterMark
|
||||
}
|
||||
},
|
||||
writableCorked: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
return this._writableState ? this._writableState.corked : 0
|
||||
}
|
||||
},
|
||||
writableLength: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
return this._writableState && this._writableState.length
|
||||
}
|
||||
},
|
||||
errored: {
|
||||
__proto__: null,
|
||||
enumerable: false,
|
||||
get() {
|
||||
return this._writableState ? this._writableState.errored : null
|
||||
}
|
||||
},
|
||||
writableAborted: {
|
||||
__proto__: null,
|
||||
enumerable: false,
|
||||
get: function () {
|
||||
return !!(
|
||||
this._writableState.writable !== false &&
|
||||
(this._writableState.destroyed || this._writableState.errored) &&
|
||||
!this._writableState.finished
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
const destroy = destroyImpl.destroy
|
||||
Writable.prototype.destroy = function (err, cb) {
|
||||
const state = this._writableState
|
||||
|
||||
// Invoke pending callbacks.
|
||||
if (!state.destroyed && (state.bufferedIndex < state.buffered.length || state[kOnFinished].length)) {
|
||||
process.nextTick(errorBuffer, state)
|
||||
}
|
||||
destroy.call(this, err, cb)
|
||||
return this
|
||||
}
|
||||
Writable.prototype._undestroy = destroyImpl.undestroy
|
||||
Writable.prototype._destroy = function (err, cb) {
|
||||
cb(err)
|
||||
}
|
||||
Writable.prototype[EE.captureRejectionSymbol] = function (err) {
|
||||
this.destroy(err)
|
||||
}
|
||||
let webStreamsAdapters
|
||||
|
||||
// Lazy to avoid circular references
|
||||
function lazyWebStreams() {
|
||||
if (webStreamsAdapters === undefined) webStreamsAdapters = {}
|
||||
return webStreamsAdapters
|
||||
}
|
||||
Writable.fromWeb = function (writableStream, options) {
|
||||
return lazyWebStreams().newStreamWritableFromWritableStream(writableStream, options)
|
||||
}
|
||||
Writable.toWeb = function (streamWritable) {
|
||||
return lazyWebStreams().newWritableStreamFromStreamWritable(streamWritable)
|
||||
}
|
530
node_modules/readable-stream/lib/internal/validators.js
generated
vendored
Normal file
530
node_modules/readable-stream/lib/internal/validators.js
generated
vendored
Normal file
|
@ -0,0 +1,530 @@
|
|||
/* eslint jsdoc/require-jsdoc: "error" */
|
||||
|
||||
'use strict'
|
||||
|
||||
const {
|
||||
ArrayIsArray,
|
||||
ArrayPrototypeIncludes,
|
||||
ArrayPrototypeJoin,
|
||||
ArrayPrototypeMap,
|
||||
NumberIsInteger,
|
||||
NumberIsNaN,
|
||||
NumberMAX_SAFE_INTEGER,
|
||||
NumberMIN_SAFE_INTEGER,
|
||||
NumberParseInt,
|
||||
ObjectPrototypeHasOwnProperty,
|
||||
RegExpPrototypeExec,
|
||||
String,
|
||||
StringPrototypeToUpperCase,
|
||||
StringPrototypeTrim
|
||||
} = require('../ours/primordials')
|
||||
const {
|
||||
hideStackFrames,
|
||||
codes: { ERR_SOCKET_BAD_PORT, ERR_INVALID_ARG_TYPE, ERR_INVALID_ARG_VALUE, ERR_OUT_OF_RANGE, ERR_UNKNOWN_SIGNAL }
|
||||
} = require('../ours/errors')
|
||||
const { normalizeEncoding } = require('../ours/util')
|
||||
const { isAsyncFunction, isArrayBufferView } = require('../ours/util').types
|
||||
const signals = {}
|
||||
|
||||
/**
|
||||
* @param {*} value
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isInt32(value) {
|
||||
return value === (value | 0)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {*} value
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isUint32(value) {
|
||||
return value === value >>> 0
|
||||
}
|
||||
const octalReg = /^[0-7]+$/
|
||||
const modeDesc = 'must be a 32-bit unsigned integer or an octal string'
|
||||
|
||||
/**
|
||||
* Parse and validate values that will be converted into mode_t (the S_*
|
||||
* constants). Only valid numbers and octal strings are allowed. They could be
|
||||
* converted to 32-bit unsigned integers or non-negative signed integers in the
|
||||
* C++ land, but any value higher than 0o777 will result in platform-specific
|
||||
* behaviors.
|
||||
* @param {*} value Values to be validated
|
||||
* @param {string} name Name of the argument
|
||||
* @param {number} [def] If specified, will be returned for invalid values
|
||||
* @returns {number}
|
||||
*/
|
||||
function parseFileMode(value, name, def) {
|
||||
if (typeof value === 'undefined') {
|
||||
value = def
|
||||
}
|
||||
if (typeof value === 'string') {
|
||||
if (RegExpPrototypeExec(octalReg, value) === null) {
|
||||
throw new ERR_INVALID_ARG_VALUE(name, value, modeDesc)
|
||||
}
|
||||
value = NumberParseInt(value, 8)
|
||||
}
|
||||
validateUint32(value, name)
|
||||
return value
|
||||
}
|
||||
|
||||
/**
|
||||
* @callback validateInteger
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @param {number} [min]
|
||||
* @param {number} [max]
|
||||
* @returns {asserts value is number}
|
||||
*/
|
||||
|
||||
/** @type {validateInteger} */
|
||||
const validateInteger = hideStackFrames((value, name, min = NumberMIN_SAFE_INTEGER, max = NumberMAX_SAFE_INTEGER) => {
|
||||
if (typeof value !== 'number') throw new ERR_INVALID_ARG_TYPE(name, 'number', value)
|
||||
if (!NumberIsInteger(value)) throw new ERR_OUT_OF_RANGE(name, 'an integer', value)
|
||||
if (value < min || value > max) throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value)
|
||||
})
|
||||
|
||||
/**
|
||||
* @callback validateInt32
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @param {number} [min]
|
||||
* @param {number} [max]
|
||||
* @returns {asserts value is number}
|
||||
*/
|
||||
|
||||
/** @type {validateInt32} */
|
||||
const validateInt32 = hideStackFrames((value, name, min = -2147483648, max = 2147483647) => {
|
||||
// The defaults for min and max correspond to the limits of 32-bit integers.
|
||||
if (typeof value !== 'number') {
|
||||
throw new ERR_INVALID_ARG_TYPE(name, 'number', value)
|
||||
}
|
||||
if (!NumberIsInteger(value)) {
|
||||
throw new ERR_OUT_OF_RANGE(name, 'an integer', value)
|
||||
}
|
||||
if (value < min || value > max) {
|
||||
throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value)
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* @callback validateUint32
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @param {number|boolean} [positive=false]
|
||||
* @returns {asserts value is number}
|
||||
*/
|
||||
|
||||
/** @type {validateUint32} */
|
||||
const validateUint32 = hideStackFrames((value, name, positive = false) => {
|
||||
if (typeof value !== 'number') {
|
||||
throw new ERR_INVALID_ARG_TYPE(name, 'number', value)
|
||||
}
|
||||
if (!NumberIsInteger(value)) {
|
||||
throw new ERR_OUT_OF_RANGE(name, 'an integer', value)
|
||||
}
|
||||
const min = positive ? 1 : 0
|
||||
// 2 ** 32 === 4294967296
|
||||
const max = 4294967295
|
||||
if (value < min || value > max) {
|
||||
throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value)
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* @callback validateString
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @returns {asserts value is string}
|
||||
*/
|
||||
|
||||
/** @type {validateString} */
|
||||
function validateString(value, name) {
|
||||
if (typeof value !== 'string') throw new ERR_INVALID_ARG_TYPE(name, 'string', value)
|
||||
}
|
||||
|
||||
/**
|
||||
* @callback validateNumber
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @param {number} [min]
|
||||
* @param {number} [max]
|
||||
* @returns {asserts value is number}
|
||||
*/
|
||||
|
||||
/** @type {validateNumber} */
|
||||
function validateNumber(value, name, min = undefined, max) {
|
||||
if (typeof value !== 'number') throw new ERR_INVALID_ARG_TYPE(name, 'number', value)
|
||||
if (
|
||||
(min != null && value < min) ||
|
||||
(max != null && value > max) ||
|
||||
((min != null || max != null) && NumberIsNaN(value))
|
||||
) {
|
||||
throw new ERR_OUT_OF_RANGE(
|
||||
name,
|
||||
`${min != null ? `>= ${min}` : ''}${min != null && max != null ? ' && ' : ''}${max != null ? `<= ${max}` : ''}`,
|
||||
value
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @callback validateOneOf
|
||||
* @template T
|
||||
* @param {T} value
|
||||
* @param {string} name
|
||||
* @param {T[]} oneOf
|
||||
*/
|
||||
|
||||
/** @type {validateOneOf} */
|
||||
const validateOneOf = hideStackFrames((value, name, oneOf) => {
|
||||
if (!ArrayPrototypeIncludes(oneOf, value)) {
|
||||
const allowed = ArrayPrototypeJoin(
|
||||
ArrayPrototypeMap(oneOf, (v) => (typeof v === 'string' ? `'${v}'` : String(v))),
|
||||
', '
|
||||
)
|
||||
const reason = 'must be one of: ' + allowed
|
||||
throw new ERR_INVALID_ARG_VALUE(name, value, reason)
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* @callback validateBoolean
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @returns {asserts value is boolean}
|
||||
*/
|
||||
|
||||
/** @type {validateBoolean} */
|
||||
function validateBoolean(value, name) {
|
||||
if (typeof value !== 'boolean') throw new ERR_INVALID_ARG_TYPE(name, 'boolean', value)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {any} options
|
||||
* @param {string} key
|
||||
* @param {boolean} defaultValue
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function getOwnPropertyValueOrDefault(options, key, defaultValue) {
|
||||
return options == null || !ObjectPrototypeHasOwnProperty(options, key) ? defaultValue : options[key]
|
||||
}
|
||||
|
||||
/**
|
||||
* @callback validateObject
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @param {{
|
||||
* allowArray?: boolean,
|
||||
* allowFunction?: boolean,
|
||||
* nullable?: boolean
|
||||
* }} [options]
|
||||
*/
|
||||
|
||||
/** @type {validateObject} */
|
||||
const validateObject = hideStackFrames((value, name, options = null) => {
|
||||
const allowArray = getOwnPropertyValueOrDefault(options, 'allowArray', false)
|
||||
const allowFunction = getOwnPropertyValueOrDefault(options, 'allowFunction', false)
|
||||
const nullable = getOwnPropertyValueOrDefault(options, 'nullable', false)
|
||||
if (
|
||||
(!nullable && value === null) ||
|
||||
(!allowArray && ArrayIsArray(value)) ||
|
||||
(typeof value !== 'object' && (!allowFunction || typeof value !== 'function'))
|
||||
) {
|
||||
throw new ERR_INVALID_ARG_TYPE(name, 'Object', value)
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* @callback validateDictionary - We are using the Web IDL Standard definition
|
||||
* of "dictionary" here, which means any value
|
||||
* whose Type is either Undefined, Null, or
|
||||
* Object (which includes functions).
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @see https://webidl.spec.whatwg.org/#es-dictionary
|
||||
* @see https://tc39.es/ecma262/#table-typeof-operator-results
|
||||
*/
|
||||
|
||||
/** @type {validateDictionary} */
|
||||
const validateDictionary = hideStackFrames((value, name) => {
|
||||
if (value != null && typeof value !== 'object' && typeof value !== 'function') {
|
||||
throw new ERR_INVALID_ARG_TYPE(name, 'a dictionary', value)
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* @callback validateArray
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @param {number} [minLength]
|
||||
* @returns {asserts value is any[]}
|
||||
*/
|
||||
|
||||
/** @type {validateArray} */
|
||||
const validateArray = hideStackFrames((value, name, minLength = 0) => {
|
||||
if (!ArrayIsArray(value)) {
|
||||
throw new ERR_INVALID_ARG_TYPE(name, 'Array', value)
|
||||
}
|
||||
if (value.length < minLength) {
|
||||
const reason = `must be longer than ${minLength}`
|
||||
throw new ERR_INVALID_ARG_VALUE(name, value, reason)
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* @callback validateStringArray
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @returns {asserts value is string[]}
|
||||
*/
|
||||
|
||||
/** @type {validateStringArray} */
|
||||
function validateStringArray(value, name) {
|
||||
validateArray(value, name)
|
||||
for (let i = 0; i < value.length; i++) {
|
||||
validateString(value[i], `${name}[${i}]`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @callback validateBooleanArray
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @returns {asserts value is boolean[]}
|
||||
*/
|
||||
|
||||
/** @type {validateBooleanArray} */
|
||||
function validateBooleanArray(value, name) {
|
||||
validateArray(value, name)
|
||||
for (let i = 0; i < value.length; i++) {
|
||||
validateBoolean(value[i], `${name}[${i}]`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @callback validateAbortSignalArray
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @returns {asserts value is AbortSignal[]}
|
||||
*/
|
||||
|
||||
/** @type {validateAbortSignalArray} */
|
||||
function validateAbortSignalArray(value, name) {
|
||||
validateArray(value, name)
|
||||
for (let i = 0; i < value.length; i++) {
|
||||
const signal = value[i]
|
||||
const indexedName = `${name}[${i}]`
|
||||
if (signal == null) {
|
||||
throw new ERR_INVALID_ARG_TYPE(indexedName, 'AbortSignal', signal)
|
||||
}
|
||||
validateAbortSignal(signal, indexedName)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {*} signal
|
||||
* @param {string} [name='signal']
|
||||
* @returns {asserts signal is keyof signals}
|
||||
*/
|
||||
function validateSignalName(signal, name = 'signal') {
|
||||
validateString(signal, name)
|
||||
if (signals[signal] === undefined) {
|
||||
if (signals[StringPrototypeToUpperCase(signal)] !== undefined) {
|
||||
throw new ERR_UNKNOWN_SIGNAL(signal + ' (signals must use all capital letters)')
|
||||
}
|
||||
throw new ERR_UNKNOWN_SIGNAL(signal)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @callback validateBuffer
|
||||
* @param {*} buffer
|
||||
* @param {string} [name='buffer']
|
||||
* @returns {asserts buffer is ArrayBufferView}
|
||||
*/
|
||||
|
||||
/** @type {validateBuffer} */
|
||||
const validateBuffer = hideStackFrames((buffer, name = 'buffer') => {
|
||||
if (!isArrayBufferView(buffer)) {
|
||||
throw new ERR_INVALID_ARG_TYPE(name, ['Buffer', 'TypedArray', 'DataView'], buffer)
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* @param {string} data
|
||||
* @param {string} encoding
|
||||
*/
|
||||
function validateEncoding(data, encoding) {
|
||||
const normalizedEncoding = normalizeEncoding(encoding)
|
||||
const length = data.length
|
||||
if (normalizedEncoding === 'hex' && length % 2 !== 0) {
|
||||
throw new ERR_INVALID_ARG_VALUE('encoding', encoding, `is invalid for data of length ${length}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that the port number is not NaN when coerced to a number,
|
||||
* is an integer and that it falls within the legal range of port numbers.
|
||||
* @param {*} port
|
||||
* @param {string} [name='Port']
|
||||
* @param {boolean} [allowZero=true]
|
||||
* @returns {number}
|
||||
*/
|
||||
function validatePort(port, name = 'Port', allowZero = true) {
|
||||
if (
|
||||
(typeof port !== 'number' && typeof port !== 'string') ||
|
||||
(typeof port === 'string' && StringPrototypeTrim(port).length === 0) ||
|
||||
+port !== +port >>> 0 ||
|
||||
port > 0xffff ||
|
||||
(port === 0 && !allowZero)
|
||||
) {
|
||||
throw new ERR_SOCKET_BAD_PORT(name, port, allowZero)
|
||||
}
|
||||
return port | 0
|
||||
}
|
||||
|
||||
/**
|
||||
* @callback validateAbortSignal
|
||||
* @param {*} signal
|
||||
* @param {string} name
|
||||
*/
|
||||
|
||||
/** @type {validateAbortSignal} */
|
||||
const validateAbortSignal = hideStackFrames((signal, name) => {
|
||||
if (signal !== undefined && (signal === null || typeof signal !== 'object' || !('aborted' in signal))) {
|
||||
throw new ERR_INVALID_ARG_TYPE(name, 'AbortSignal', signal)
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* @callback validateFunction
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @returns {asserts value is Function}
|
||||
*/
|
||||
|
||||
/** @type {validateFunction} */
|
||||
const validateFunction = hideStackFrames((value, name) => {
|
||||
if (typeof value !== 'function') throw new ERR_INVALID_ARG_TYPE(name, 'Function', value)
|
||||
})
|
||||
|
||||
/**
|
||||
* @callback validatePlainFunction
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @returns {asserts value is Function}
|
||||
*/
|
||||
|
||||
/** @type {validatePlainFunction} */
|
||||
const validatePlainFunction = hideStackFrames((value, name) => {
|
||||
if (typeof value !== 'function' || isAsyncFunction(value)) throw new ERR_INVALID_ARG_TYPE(name, 'Function', value)
|
||||
})
|
||||
|
||||
/**
|
||||
* @callback validateUndefined
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @returns {asserts value is undefined}
|
||||
*/
|
||||
|
||||
/** @type {validateUndefined} */
|
||||
const validateUndefined = hideStackFrames((value, name) => {
|
||||
if (value !== undefined) throw new ERR_INVALID_ARG_TYPE(name, 'undefined', value)
|
||||
})
|
||||
|
||||
/**
|
||||
* @template T
|
||||
* @param {T} value
|
||||
* @param {string} name
|
||||
* @param {T[]} union
|
||||
*/
|
||||
function validateUnion(value, name, union) {
|
||||
if (!ArrayPrototypeIncludes(union, value)) {
|
||||
throw new ERR_INVALID_ARG_TYPE(name, `('${ArrayPrototypeJoin(union, '|')}')`, value)
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
The rules for the Link header field are described here:
|
||||
https://www.rfc-editor.org/rfc/rfc8288.html#section-3
|
||||
|
||||
This regex validates any string surrounded by angle brackets
|
||||
(not necessarily a valid URI reference) followed by zero or more
|
||||
link-params separated by semicolons.
|
||||
*/
|
||||
const linkValueRegExp = /^(?:<[^>]*>)(?:\s*;\s*[^;"\s]+(?:=(")?[^;"\s]*\1)?)*$/
|
||||
|
||||
/**
|
||||
* @param {any} value
|
||||
* @param {string} name
|
||||
*/
|
||||
function validateLinkHeaderFormat(value, name) {
|
||||
if (typeof value === 'undefined' || !RegExpPrototypeExec(linkValueRegExp, value)) {
|
||||
throw new ERR_INVALID_ARG_VALUE(
|
||||
name,
|
||||
value,
|
||||
'must be an array or string of format "</styles.css>; rel=preload; as=style"'
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {any} hints
|
||||
* @return {string}
|
||||
*/
|
||||
function validateLinkHeaderValue(hints) {
|
||||
if (typeof hints === 'string') {
|
||||
validateLinkHeaderFormat(hints, 'hints')
|
||||
return hints
|
||||
} else if (ArrayIsArray(hints)) {
|
||||
const hintsLength = hints.length
|
||||
let result = ''
|
||||
if (hintsLength === 0) {
|
||||
return result
|
||||
}
|
||||
for (let i = 0; i < hintsLength; i++) {
|
||||
const link = hints[i]
|
||||
validateLinkHeaderFormat(link, 'hints')
|
||||
result += link
|
||||
if (i !== hintsLength - 1) {
|
||||
result += ', '
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
throw new ERR_INVALID_ARG_VALUE(
|
||||
'hints',
|
||||
hints,
|
||||
'must be an array or string of format "</styles.css>; rel=preload; as=style"'
|
||||
)
|
||||
}
|
||||
module.exports = {
|
||||
isInt32,
|
||||
isUint32,
|
||||
parseFileMode,
|
||||
validateArray,
|
||||
validateStringArray,
|
||||
validateBooleanArray,
|
||||
validateAbortSignalArray,
|
||||
validateBoolean,
|
||||
validateBuffer,
|
||||
validateDictionary,
|
||||
validateEncoding,
|
||||
validateFunction,
|
||||
validateInt32,
|
||||
validateInteger,
|
||||
validateNumber,
|
||||
validateObject,
|
||||
validateOneOf,
|
||||
validatePlainFunction,
|
||||
validatePort,
|
||||
validateSignalName,
|
||||
validateString,
|
||||
validateUint32,
|
||||
validateUndefined,
|
||||
validateUnion,
|
||||
validateAbortSignal,
|
||||
validateLinkHeaderValue
|
||||
}
|
35
node_modules/readable-stream/lib/ours/browser.js
generated
vendored
Normal file
35
node_modules/readable-stream/lib/ours/browser.js
generated
vendored
Normal file
|
@ -0,0 +1,35 @@
|
|||
'use strict'
|
||||
|
||||
const CustomStream = require('../stream')
|
||||
const promises = require('../stream/promises')
|
||||
const originalDestroy = CustomStream.Readable.destroy
|
||||
module.exports = CustomStream.Readable
|
||||
|
||||
// Explicit export naming is needed for ESM
|
||||
module.exports._uint8ArrayToBuffer = CustomStream._uint8ArrayToBuffer
|
||||
module.exports._isUint8Array = CustomStream._isUint8Array
|
||||
module.exports.isDisturbed = CustomStream.isDisturbed
|
||||
module.exports.isErrored = CustomStream.isErrored
|
||||
module.exports.isReadable = CustomStream.isReadable
|
||||
module.exports.Readable = CustomStream.Readable
|
||||
module.exports.Writable = CustomStream.Writable
|
||||
module.exports.Duplex = CustomStream.Duplex
|
||||
module.exports.Transform = CustomStream.Transform
|
||||
module.exports.PassThrough = CustomStream.PassThrough
|
||||
module.exports.addAbortSignal = CustomStream.addAbortSignal
|
||||
module.exports.finished = CustomStream.finished
|
||||
module.exports.destroy = CustomStream.destroy
|
||||
module.exports.destroy = originalDestroy
|
||||
module.exports.pipeline = CustomStream.pipeline
|
||||
module.exports.compose = CustomStream.compose
|
||||
Object.defineProperty(CustomStream, 'promises', {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
get() {
|
||||
return promises
|
||||
}
|
||||
})
|
||||
module.exports.Stream = CustomStream.Stream
|
||||
|
||||
// Allow default importing
|
||||
module.exports.default = module.exports
|
341
node_modules/readable-stream/lib/ours/errors.js
generated
vendored
Normal file
341
node_modules/readable-stream/lib/ours/errors.js
generated
vendored
Normal file
|
@ -0,0 +1,341 @@
|
|||
'use strict'
|
||||
|
||||
const { format, inspect, AggregateError: CustomAggregateError } = require('./util')
|
||||
|
||||
/*
|
||||
This file is a reduced and adapted version of the main lib/internal/errors.js file defined at
|
||||
|
||||
https://github.com/nodejs/node/blob/master/lib/internal/errors.js
|
||||
|
||||
Don't try to replace with the original file and keep it up to date (starting from E(...) definitions)
|
||||
with the upstream file.
|
||||
*/
|
||||
|
||||
const AggregateError = globalThis.AggregateError || CustomAggregateError
|
||||
const kIsNodeError = Symbol('kIsNodeError')
|
||||
const kTypes = [
|
||||
'string',
|
||||
'function',
|
||||
'number',
|
||||
'object',
|
||||
// Accept 'Function' and 'Object' as alternative to the lower cased version.
|
||||
'Function',
|
||||
'Object',
|
||||
'boolean',
|
||||
'bigint',
|
||||
'symbol'
|
||||
]
|
||||
const classRegExp = /^([A-Z][a-z0-9]*)+$/
|
||||
const nodeInternalPrefix = '__node_internal_'
|
||||
const codes = {}
|
||||
function assert(value, message) {
|
||||
if (!value) {
|
||||
throw new codes.ERR_INTERNAL_ASSERTION(message)
|
||||
}
|
||||
}
|
||||
|
||||
// Only use this for integers! Decimal numbers do not work with this function.
|
||||
function addNumericalSeparator(val) {
|
||||
let res = ''
|
||||
let i = val.length
|
||||
const start = val[0] === '-' ? 1 : 0
|
||||
for (; i >= start + 4; i -= 3) {
|
||||
res = `_${val.slice(i - 3, i)}${res}`
|
||||
}
|
||||
return `${val.slice(0, i)}${res}`
|
||||
}
|
||||
function getMessage(key, msg, args) {
|
||||
if (typeof msg === 'function') {
|
||||
assert(
|
||||
msg.length <= args.length,
|
||||
// Default options do not count.
|
||||
`Code: ${key}; The provided arguments length (${args.length}) does not match the required ones (${msg.length}).`
|
||||
)
|
||||
return msg(...args)
|
||||
}
|
||||
const expectedLength = (msg.match(/%[dfijoOs]/g) || []).length
|
||||
assert(
|
||||
expectedLength === args.length,
|
||||
`Code: ${key}; The provided arguments length (${args.length}) does not match the required ones (${expectedLength}).`
|
||||
)
|
||||
if (args.length === 0) {
|
||||
return msg
|
||||
}
|
||||
return format(msg, ...args)
|
||||
}
|
||||
function E(code, message, Base) {
|
||||
if (!Base) {
|
||||
Base = Error
|
||||
}
|
||||
class NodeError extends Base {
|
||||
constructor(...args) {
|
||||
super(getMessage(code, message, args))
|
||||
}
|
||||
toString() {
|
||||
return `${this.name} [${code}]: ${this.message}`
|
||||
}
|
||||
}
|
||||
Object.defineProperties(NodeError.prototype, {
|
||||
name: {
|
||||
value: Base.name,
|
||||
writable: true,
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
},
|
||||
toString: {
|
||||
value() {
|
||||
return `${this.name} [${code}]: ${this.message}`
|
||||
},
|
||||
writable: true,
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
}
|
||||
})
|
||||
NodeError.prototype.code = code
|
||||
NodeError.prototype[kIsNodeError] = true
|
||||
codes[code] = NodeError
|
||||
}
|
||||
function hideStackFrames(fn) {
|
||||
// We rename the functions that will be hidden to cut off the stacktrace
|
||||
// at the outermost one
|
||||
const hidden = nodeInternalPrefix + fn.name
|
||||
Object.defineProperty(fn, 'name', {
|
||||
value: hidden
|
||||
})
|
||||
return fn
|
||||
}
|
||||
function aggregateTwoErrors(innerError, outerError) {
|
||||
if (innerError && outerError && innerError !== outerError) {
|
||||
if (Array.isArray(outerError.errors)) {
|
||||
// If `outerError` is already an `AggregateError`.
|
||||
outerError.errors.push(innerError)
|
||||
return outerError
|
||||
}
|
||||
const err = new AggregateError([outerError, innerError], outerError.message)
|
||||
err.code = outerError.code
|
||||
return err
|
||||
}
|
||||
return innerError || outerError
|
||||
}
|
||||
class AbortError extends Error {
|
||||
constructor(message = 'The operation was aborted', options = undefined) {
|
||||
if (options !== undefined && typeof options !== 'object') {
|
||||
throw new codes.ERR_INVALID_ARG_TYPE('options', 'Object', options)
|
||||
}
|
||||
super(message, options)
|
||||
this.code = 'ABORT_ERR'
|
||||
this.name = 'AbortError'
|
||||
}
|
||||
}
|
||||
E('ERR_ASSERTION', '%s', Error)
|
||||
E(
|
||||
'ERR_INVALID_ARG_TYPE',
|
||||
(name, expected, actual) => {
|
||||
assert(typeof name === 'string', "'name' must be a string")
|
||||
if (!Array.isArray(expected)) {
|
||||
expected = [expected]
|
||||
}
|
||||
let msg = 'The '
|
||||
if (name.endsWith(' argument')) {
|
||||
// For cases like 'first argument'
|
||||
msg += `${name} `
|
||||
} else {
|
||||
msg += `"${name}" ${name.includes('.') ? 'property' : 'argument'} `
|
||||
}
|
||||
msg += 'must be '
|
||||
const types = []
|
||||
const instances = []
|
||||
const other = []
|
||||
for (const value of expected) {
|
||||
assert(typeof value === 'string', 'All expected entries have to be of type string')
|
||||
if (kTypes.includes(value)) {
|
||||
types.push(value.toLowerCase())
|
||||
} else if (classRegExp.test(value)) {
|
||||
instances.push(value)
|
||||
} else {
|
||||
assert(value !== 'object', 'The value "object" should be written as "Object"')
|
||||
other.push(value)
|
||||
}
|
||||
}
|
||||
|
||||
// Special handle `object` in case other instances are allowed to outline
|
||||
// the differences between each other.
|
||||
if (instances.length > 0) {
|
||||
const pos = types.indexOf('object')
|
||||
if (pos !== -1) {
|
||||
types.splice(types, pos, 1)
|
||||
instances.push('Object')
|
||||
}
|
||||
}
|
||||
if (types.length > 0) {
|
||||
switch (types.length) {
|
||||
case 1:
|
||||
msg += `of type ${types[0]}`
|
||||
break
|
||||
case 2:
|
||||
msg += `one of type ${types[0]} or ${types[1]}`
|
||||
break
|
||||
default: {
|
||||
const last = types.pop()
|
||||
msg += `one of type ${types.join(', ')}, or ${last}`
|
||||
}
|
||||
}
|
||||
if (instances.length > 0 || other.length > 0) {
|
||||
msg += ' or '
|
||||
}
|
||||
}
|
||||
if (instances.length > 0) {
|
||||
switch (instances.length) {
|
||||
case 1:
|
||||
msg += `an instance of ${instances[0]}`
|
||||
break
|
||||
case 2:
|
||||
msg += `an instance of ${instances[0]} or ${instances[1]}`
|
||||
break
|
||||
default: {
|
||||
const last = instances.pop()
|
||||
msg += `an instance of ${instances.join(', ')}, or ${last}`
|
||||
}
|
||||
}
|
||||
if (other.length > 0) {
|
||||
msg += ' or '
|
||||
}
|
||||
}
|
||||
switch (other.length) {
|
||||
case 0:
|
||||
break
|
||||
case 1:
|
||||
if (other[0].toLowerCase() !== other[0]) {
|
||||
msg += 'an '
|
||||
}
|
||||
msg += `${other[0]}`
|
||||
break
|
||||
case 2:
|
||||
msg += `one of ${other[0]} or ${other[1]}`
|
||||
break
|
||||
default: {
|
||||
const last = other.pop()
|
||||
msg += `one of ${other.join(', ')}, or ${last}`
|
||||
}
|
||||
}
|
||||
if (actual == null) {
|
||||
msg += `. Received ${actual}`
|
||||
} else if (typeof actual === 'function' && actual.name) {
|
||||
msg += `. Received function ${actual.name}`
|
||||
} else if (typeof actual === 'object') {
|
||||
var _actual$constructor
|
||||
if (
|
||||
(_actual$constructor = actual.constructor) !== null &&
|
||||
_actual$constructor !== undefined &&
|
||||
_actual$constructor.name
|
||||
) {
|
||||
msg += `. Received an instance of ${actual.constructor.name}`
|
||||
} else {
|
||||
const inspected = inspect(actual, {
|
||||
depth: -1
|
||||
})
|
||||
msg += `. Received ${inspected}`
|
||||
}
|
||||
} else {
|
||||
let inspected = inspect(actual, {
|
||||
colors: false
|
||||
})
|
||||
if (inspected.length > 25) {
|
||||
inspected = `${inspected.slice(0, 25)}...`
|
||||
}
|
||||
msg += `. Received type ${typeof actual} (${inspected})`
|
||||
}
|
||||
return msg
|
||||
},
|
||||
TypeError
|
||||
)
|
||||
E(
|
||||
'ERR_INVALID_ARG_VALUE',
|
||||
(name, value, reason = 'is invalid') => {
|
||||
let inspected = inspect(value)
|
||||
if (inspected.length > 128) {
|
||||
inspected = inspected.slice(0, 128) + '...'
|
||||
}
|
||||
const type = name.includes('.') ? 'property' : 'argument'
|
||||
return `The ${type} '${name}' ${reason}. Received ${inspected}`
|
||||
},
|
||||
TypeError
|
||||
)
|
||||
E(
|
||||
'ERR_INVALID_RETURN_VALUE',
|
||||
(input, name, value) => {
|
||||
var _value$constructor
|
||||
const type =
|
||||
value !== null &&
|
||||
value !== undefined &&
|
||||
(_value$constructor = value.constructor) !== null &&
|
||||
_value$constructor !== undefined &&
|
||||
_value$constructor.name
|
||||
? `instance of ${value.constructor.name}`
|
||||
: `type ${typeof value}`
|
||||
return `Expected ${input} to be returned from the "${name}"` + ` function but got ${type}.`
|
||||
},
|
||||
TypeError
|
||||
)
|
||||
E(
|
||||
'ERR_MISSING_ARGS',
|
||||
(...args) => {
|
||||
assert(args.length > 0, 'At least one arg needs to be specified')
|
||||
let msg
|
||||
const len = args.length
|
||||
args = (Array.isArray(args) ? args : [args]).map((a) => `"${a}"`).join(' or ')
|
||||
switch (len) {
|
||||
case 1:
|
||||
msg += `The ${args[0]} argument`
|
||||
break
|
||||
case 2:
|
||||
msg += `The ${args[0]} and ${args[1]} arguments`
|
||||
break
|
||||
default:
|
||||
{
|
||||
const last = args.pop()
|
||||
msg += `The ${args.join(', ')}, and ${last} arguments`
|
||||
}
|
||||
break
|
||||
}
|
||||
return `${msg} must be specified`
|
||||
},
|
||||
TypeError
|
||||
)
|
||||
E(
|
||||
'ERR_OUT_OF_RANGE',
|
||||
(str, range, input) => {
|
||||
assert(range, 'Missing "range" argument')
|
||||
let received
|
||||
if (Number.isInteger(input) && Math.abs(input) > 2 ** 32) {
|
||||
received = addNumericalSeparator(String(input))
|
||||
} else if (typeof input === 'bigint') {
|
||||
received = String(input)
|
||||
if (input > 2n ** 32n || input < -(2n ** 32n)) {
|
||||
received = addNumericalSeparator(received)
|
||||
}
|
||||
received += 'n'
|
||||
} else {
|
||||
received = inspect(input)
|
||||
}
|
||||
return `The value of "${str}" is out of range. It must be ${range}. Received ${received}`
|
||||
},
|
||||
RangeError
|
||||
)
|
||||
E('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times', Error)
|
||||
E('ERR_METHOD_NOT_IMPLEMENTED', 'The %s method is not implemented', Error)
|
||||
E('ERR_STREAM_ALREADY_FINISHED', 'Cannot call %s after a stream was finished', Error)
|
||||
E('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable', Error)
|
||||
E('ERR_STREAM_DESTROYED', 'Cannot call %s after a stream was destroyed', Error)
|
||||
E('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError)
|
||||
E('ERR_STREAM_PREMATURE_CLOSE', 'Premature close', Error)
|
||||
E('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF', Error)
|
||||
E('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event', Error)
|
||||
E('ERR_STREAM_WRITE_AFTER_END', 'write after end', Error)
|
||||
E('ERR_UNKNOWN_ENCODING', 'Unknown encoding: %s', TypeError)
|
||||
module.exports = {
|
||||
AbortError,
|
||||
aggregateTwoErrors: hideStackFrames(aggregateTwoErrors),
|
||||
hideStackFrames,
|
||||
codes
|
||||
}
|
65
node_modules/readable-stream/lib/ours/index.js
generated
vendored
Normal file
65
node_modules/readable-stream/lib/ours/index.js
generated
vendored
Normal file
|
@ -0,0 +1,65 @@
|
|||
'use strict'
|
||||
|
||||
const Stream = require('stream')
|
||||
if (Stream && process.env.READABLE_STREAM === 'disable') {
|
||||
const promises = Stream.promises
|
||||
|
||||
// Explicit export naming is needed for ESM
|
||||
module.exports._uint8ArrayToBuffer = Stream._uint8ArrayToBuffer
|
||||
module.exports._isUint8Array = Stream._isUint8Array
|
||||
module.exports.isDisturbed = Stream.isDisturbed
|
||||
module.exports.isErrored = Stream.isErrored
|
||||
module.exports.isReadable = Stream.isReadable
|
||||
module.exports.Readable = Stream.Readable
|
||||
module.exports.Writable = Stream.Writable
|
||||
module.exports.Duplex = Stream.Duplex
|
||||
module.exports.Transform = Stream.Transform
|
||||
module.exports.PassThrough = Stream.PassThrough
|
||||
module.exports.addAbortSignal = Stream.addAbortSignal
|
||||
module.exports.finished = Stream.finished
|
||||
module.exports.destroy = Stream.destroy
|
||||
module.exports.pipeline = Stream.pipeline
|
||||
module.exports.compose = Stream.compose
|
||||
Object.defineProperty(Stream, 'promises', {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
get() {
|
||||
return promises
|
||||
}
|
||||
})
|
||||
module.exports.Stream = Stream.Stream
|
||||
} else {
|
||||
const CustomStream = require('../stream')
|
||||
const promises = require('../stream/promises')
|
||||
const originalDestroy = CustomStream.Readable.destroy
|
||||
module.exports = CustomStream.Readable
|
||||
|
||||
// Explicit export naming is needed for ESM
|
||||
module.exports._uint8ArrayToBuffer = CustomStream._uint8ArrayToBuffer
|
||||
module.exports._isUint8Array = CustomStream._isUint8Array
|
||||
module.exports.isDisturbed = CustomStream.isDisturbed
|
||||
module.exports.isErrored = CustomStream.isErrored
|
||||
module.exports.isReadable = CustomStream.isReadable
|
||||
module.exports.Readable = CustomStream.Readable
|
||||
module.exports.Writable = CustomStream.Writable
|
||||
module.exports.Duplex = CustomStream.Duplex
|
||||
module.exports.Transform = CustomStream.Transform
|
||||
module.exports.PassThrough = CustomStream.PassThrough
|
||||
module.exports.addAbortSignal = CustomStream.addAbortSignal
|
||||
module.exports.finished = CustomStream.finished
|
||||
module.exports.destroy = CustomStream.destroy
|
||||
module.exports.destroy = originalDestroy
|
||||
module.exports.pipeline = CustomStream.pipeline
|
||||
module.exports.compose = CustomStream.compose
|
||||
Object.defineProperty(CustomStream, 'promises', {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
get() {
|
||||
return promises
|
||||
}
|
||||
})
|
||||
module.exports.Stream = CustomStream.Stream
|
||||
}
|
||||
|
||||
// Allow default importing
|
||||
module.exports.default = module.exports
|
107
node_modules/readable-stream/lib/ours/primordials.js
generated
vendored
Normal file
107
node_modules/readable-stream/lib/ours/primordials.js
generated
vendored
Normal file
|
@ -0,0 +1,107 @@
|
|||
'use strict'
|
||||
|
||||
/*
|
||||
This file is a reduced and adapted version of the main lib/internal/per_context/primordials.js file defined at
|
||||
|
||||
https://github.com/nodejs/node/blob/master/lib/internal/per_context/primordials.js
|
||||
|
||||
Don't try to replace with the original file and keep it up to date with the upstream file.
|
||||
*/
|
||||
module.exports = {
|
||||
ArrayIsArray(self) {
|
||||
return Array.isArray(self)
|
||||
},
|
||||
ArrayPrototypeIncludes(self, el) {
|
||||
return self.includes(el)
|
||||
},
|
||||
ArrayPrototypeIndexOf(self, el) {
|
||||
return self.indexOf(el)
|
||||
},
|
||||
ArrayPrototypeJoin(self, sep) {
|
||||
return self.join(sep)
|
||||
},
|
||||
ArrayPrototypeMap(self, fn) {
|
||||
return self.map(fn)
|
||||
},
|
||||
ArrayPrototypePop(self, el) {
|
||||
return self.pop(el)
|
||||
},
|
||||
ArrayPrototypePush(self, el) {
|
||||
return self.push(el)
|
||||
},
|
||||
ArrayPrototypeSlice(self, start, end) {
|
||||
return self.slice(start, end)
|
||||
},
|
||||
Error,
|
||||
FunctionPrototypeCall(fn, thisArgs, ...args) {
|
||||
return fn.call(thisArgs, ...args)
|
||||
},
|
||||
FunctionPrototypeSymbolHasInstance(self, instance) {
|
||||
return Function.prototype[Symbol.hasInstance].call(self, instance)
|
||||
},
|
||||
MathFloor: Math.floor,
|
||||
Number,
|
||||
NumberIsInteger: Number.isInteger,
|
||||
NumberIsNaN: Number.isNaN,
|
||||
NumberMAX_SAFE_INTEGER: Number.MAX_SAFE_INTEGER,
|
||||
NumberMIN_SAFE_INTEGER: Number.MIN_SAFE_INTEGER,
|
||||
NumberParseInt: Number.parseInt,
|
||||
ObjectDefineProperties(self, props) {
|
||||
return Object.defineProperties(self, props)
|
||||
},
|
||||
ObjectDefineProperty(self, name, prop) {
|
||||
return Object.defineProperty(self, name, prop)
|
||||
},
|
||||
ObjectGetOwnPropertyDescriptor(self, name) {
|
||||
return Object.getOwnPropertyDescriptor(self, name)
|
||||
},
|
||||
ObjectKeys(obj) {
|
||||
return Object.keys(obj)
|
||||
},
|
||||
ObjectSetPrototypeOf(target, proto) {
|
||||
return Object.setPrototypeOf(target, proto)
|
||||
},
|
||||
Promise,
|
||||
PromisePrototypeCatch(self, fn) {
|
||||
return self.catch(fn)
|
||||
},
|
||||
PromisePrototypeThen(self, thenFn, catchFn) {
|
||||
return self.then(thenFn, catchFn)
|
||||
},
|
||||
PromiseReject(err) {
|
||||
return Promise.reject(err)
|
||||
},
|
||||
PromiseResolve(val) {
|
||||
return Promise.resolve(val)
|
||||
},
|
||||
ReflectApply: Reflect.apply,
|
||||
RegExpPrototypeTest(self, value) {
|
||||
return self.test(value)
|
||||
},
|
||||
SafeSet: Set,
|
||||
String,
|
||||
StringPrototypeSlice(self, start, end) {
|
||||
return self.slice(start, end)
|
||||
},
|
||||
StringPrototypeToLowerCase(self) {
|
||||
return self.toLowerCase()
|
||||
},
|
||||
StringPrototypeToUpperCase(self) {
|
||||
return self.toUpperCase()
|
||||
},
|
||||
StringPrototypeTrim(self) {
|
||||
return self.trim()
|
||||
},
|
||||
Symbol,
|
||||
SymbolFor: Symbol.for,
|
||||
SymbolAsyncIterator: Symbol.asyncIterator,
|
||||
SymbolHasInstance: Symbol.hasInstance,
|
||||
SymbolIterator: Symbol.iterator,
|
||||
SymbolDispose: Symbol.dispose || Symbol('Symbol.dispose'),
|
||||
SymbolAsyncDispose: Symbol.asyncDispose || Symbol('Symbol.asyncDispose'),
|
||||
TypedArrayPrototypeSet(self, buf, len) {
|
||||
return self.set(buf, len)
|
||||
},
|
||||
Boolean: Boolean,
|
||||
Uint8Array
|
||||
}
|
200
node_modules/readable-stream/lib/ours/util.js
generated
vendored
Normal file
200
node_modules/readable-stream/lib/ours/util.js
generated
vendored
Normal file
|
@ -0,0 +1,200 @@
|
|||
'use strict'
|
||||
|
||||
const bufferModule = require('buffer')
|
||||
const { kResistStopPropagation, SymbolDispose } = require('./primordials')
|
||||
const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal
|
||||
const AbortController = globalThis.AbortController || require('abort-controller').AbortController
|
||||
const AsyncFunction = Object.getPrototypeOf(async function () {}).constructor
|
||||
const Blob = globalThis.Blob || bufferModule.Blob
|
||||
/* eslint-disable indent */
|
||||
const isBlob =
|
||||
typeof Blob !== 'undefined'
|
||||
? function isBlob(b) {
|
||||
// eslint-disable-next-line indent
|
||||
return b instanceof Blob
|
||||
}
|
||||
: function isBlob(b) {
|
||||
return false
|
||||
}
|
||||
/* eslint-enable indent */
|
||||
|
||||
const validateAbortSignal = (signal, name) => {
|
||||
if (signal !== undefined && (signal === null || typeof signal !== 'object' || !('aborted' in signal))) {
|
||||
throw new ERR_INVALID_ARG_TYPE(name, 'AbortSignal', signal)
|
||||
}
|
||||
}
|
||||
const validateFunction = (value, name) => {
|
||||
if (typeof value !== 'function') throw new ERR_INVALID_ARG_TYPE(name, 'Function', value)
|
||||
}
|
||||
|
||||
// This is a simplified version of AggregateError
|
||||
class AggregateError extends Error {
|
||||
constructor(errors) {
|
||||
if (!Array.isArray(errors)) {
|
||||
throw new TypeError(`Expected input to be an Array, got ${typeof errors}`)
|
||||
}
|
||||
let message = ''
|
||||
for (let i = 0; i < errors.length; i++) {
|
||||
message += ` ${errors[i].stack}\n`
|
||||
}
|
||||
super(message)
|
||||
this.name = 'AggregateError'
|
||||
this.errors = errors
|
||||
}
|
||||
}
|
||||
module.exports = {
|
||||
AggregateError,
|
||||
kEmptyObject: Object.freeze({}),
|
||||
once(callback) {
|
||||
let called = false
|
||||
return function (...args) {
|
||||
if (called) {
|
||||
return
|
||||
}
|
||||
called = true
|
||||
callback.apply(this, args)
|
||||
}
|
||||
},
|
||||
createDeferredPromise: function () {
|
||||
let resolve
|
||||
let reject
|
||||
|
||||
// eslint-disable-next-line promise/param-names
|
||||
const promise = new Promise((res, rej) => {
|
||||
resolve = res
|
||||
reject = rej
|
||||
})
|
||||
return {
|
||||
promise,
|
||||
resolve,
|
||||
reject
|
||||
}
|
||||
},
|
||||
promisify(fn) {
|
||||
return new Promise((resolve, reject) => {
|
||||
fn((err, ...args) => {
|
||||
if (err) {
|
||||
return reject(err)
|
||||
}
|
||||
return resolve(...args)
|
||||
})
|
||||
})
|
||||
},
|
||||
debuglog() {
|
||||
return function () {}
|
||||
},
|
||||
format(format, ...args) {
|
||||
// Simplified version of https://nodejs.org/api/util.html#utilformatformat-args
|
||||
return format.replace(/%([sdifj])/g, function (...[_unused, type]) {
|
||||
const replacement = args.shift()
|
||||
if (type === 'f') {
|
||||
return replacement.toFixed(6)
|
||||
} else if (type === 'j') {
|
||||
return JSON.stringify(replacement)
|
||||
} else if (type === 's' && typeof replacement === 'object') {
|
||||
const ctor = replacement.constructor !== Object ? replacement.constructor.name : ''
|
||||
return `${ctor} {}`.trim()
|
||||
} else {
|
||||
return replacement.toString()
|
||||
}
|
||||
})
|
||||
},
|
||||
inspect(value) {
|
||||
// Vastly simplified version of https://nodejs.org/api/util.html#utilinspectobject-options
|
||||
switch (typeof value) {
|
||||
case 'string':
|
||||
if (value.includes("'")) {
|
||||
if (!value.includes('"')) {
|
||||
return `"${value}"`
|
||||
} else if (!value.includes('`') && !value.includes('${')) {
|
||||
return `\`${value}\``
|
||||
}
|
||||
}
|
||||
return `'${value}'`
|
||||
case 'number':
|
||||
if (isNaN(value)) {
|
||||
return 'NaN'
|
||||
} else if (Object.is(value, -0)) {
|
||||
return String(value)
|
||||
}
|
||||
return value
|
||||
case 'bigint':
|
||||
return `${String(value)}n`
|
||||
case 'boolean':
|
||||
case 'undefined':
|
||||
return String(value)
|
||||
case 'object':
|
||||
return '{}'
|
||||
}
|
||||
},
|
||||
types: {
|
||||
isAsyncFunction(fn) {
|
||||
return fn instanceof AsyncFunction
|
||||
},
|
||||
isArrayBufferView(arr) {
|
||||
return ArrayBuffer.isView(arr)
|
||||
}
|
||||
},
|
||||
isBlob,
|
||||
deprecate(fn, message) {
|
||||
return fn
|
||||
},
|
||||
addAbortListener:
|
||||
require('events').addAbortListener ||
|
||||
function addAbortListener(signal, listener) {
|
||||
if (signal === undefined) {
|
||||
throw new ERR_INVALID_ARG_TYPE('signal', 'AbortSignal', signal)
|
||||
}
|
||||
validateAbortSignal(signal, 'signal')
|
||||
validateFunction(listener, 'listener')
|
||||
let removeEventListener
|
||||
if (signal.aborted) {
|
||||
queueMicrotask(() => listener())
|
||||
} else {
|
||||
signal.addEventListener('abort', listener, {
|
||||
__proto__: null,
|
||||
once: true,
|
||||
[kResistStopPropagation]: true
|
||||
})
|
||||
removeEventListener = () => {
|
||||
signal.removeEventListener('abort', listener)
|
||||
}
|
||||
}
|
||||
return {
|
||||
__proto__: null,
|
||||
[SymbolDispose]() {
|
||||
var _removeEventListener
|
||||
;(_removeEventListener = removeEventListener) === null || _removeEventListener === undefined
|
||||
? undefined
|
||||
: _removeEventListener()
|
||||
}
|
||||
}
|
||||
},
|
||||
AbortSignalAny:
|
||||
AbortSignal.any ||
|
||||
function AbortSignalAny(signals) {
|
||||
// Fast path if there is only one signal.
|
||||
if (signals.length === 1) {
|
||||
return signals[0]
|
||||
}
|
||||
const ac = new AbortController()
|
||||
const abort = () => ac.abort()
|
||||
signals.forEach((signal) => {
|
||||
validateAbortSignal(signal, 'signals')
|
||||
signal.addEventListener('abort', abort, {
|
||||
once: true
|
||||
})
|
||||
})
|
||||
ac.signal.addEventListener(
|
||||
'abort',
|
||||
() => {
|
||||
signals.forEach((signal) => signal.removeEventListener('abort', abort))
|
||||
},
|
||||
{
|
||||
once: true
|
||||
}
|
||||
)
|
||||
return ac.signal
|
||||
}
|
||||
}
|
||||
module.exports.promisify.custom = Symbol.for('nodejs.util.promisify.custom')
|
141
node_modules/readable-stream/lib/stream.js
generated
vendored
Normal file
141
node_modules/readable-stream/lib/stream.js
generated
vendored
Normal file
|
@ -0,0 +1,141 @@
|
|||
/* replacement start */
|
||||
|
||||
const { Buffer } = require('buffer')
|
||||
|
||||
/* replacement end */
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
;('use strict')
|
||||
const { ObjectDefineProperty, ObjectKeys, ReflectApply } = require('./ours/primordials')
|
||||
const {
|
||||
promisify: { custom: customPromisify }
|
||||
} = require('./ours/util')
|
||||
const { streamReturningOperators, promiseReturningOperators } = require('./internal/streams/operators')
|
||||
const {
|
||||
codes: { ERR_ILLEGAL_CONSTRUCTOR }
|
||||
} = require('./ours/errors')
|
||||
const compose = require('./internal/streams/compose')
|
||||
const { setDefaultHighWaterMark, getDefaultHighWaterMark } = require('./internal/streams/state')
|
||||
const { pipeline } = require('./internal/streams/pipeline')
|
||||
const { destroyer } = require('./internal/streams/destroy')
|
||||
const eos = require('./internal/streams/end-of-stream')
|
||||
const internalBuffer = {}
|
||||
const promises = require('./stream/promises')
|
||||
const utils = require('./internal/streams/utils')
|
||||
const Stream = (module.exports = require('./internal/streams/legacy').Stream)
|
||||
Stream.isDestroyed = utils.isDestroyed
|
||||
Stream.isDisturbed = utils.isDisturbed
|
||||
Stream.isErrored = utils.isErrored
|
||||
Stream.isReadable = utils.isReadable
|
||||
Stream.isWritable = utils.isWritable
|
||||
Stream.Readable = require('./internal/streams/readable')
|
||||
for (const key of ObjectKeys(streamReturningOperators)) {
|
||||
const op = streamReturningOperators[key]
|
||||
function fn(...args) {
|
||||
if (new.target) {
|
||||
throw ERR_ILLEGAL_CONSTRUCTOR()
|
||||
}
|
||||
return Stream.Readable.from(ReflectApply(op, this, args))
|
||||
}
|
||||
ObjectDefineProperty(fn, 'name', {
|
||||
__proto__: null,
|
||||
value: op.name
|
||||
})
|
||||
ObjectDefineProperty(fn, 'length', {
|
||||
__proto__: null,
|
||||
value: op.length
|
||||
})
|
||||
ObjectDefineProperty(Stream.Readable.prototype, key, {
|
||||
__proto__: null,
|
||||
value: fn,
|
||||
enumerable: false,
|
||||
configurable: true,
|
||||
writable: true
|
||||
})
|
||||
}
|
||||
for (const key of ObjectKeys(promiseReturningOperators)) {
|
||||
const op = promiseReturningOperators[key]
|
||||
function fn(...args) {
|
||||
if (new.target) {
|
||||
throw ERR_ILLEGAL_CONSTRUCTOR()
|
||||
}
|
||||
return ReflectApply(op, this, args)
|
||||
}
|
||||
ObjectDefineProperty(fn, 'name', {
|
||||
__proto__: null,
|
||||
value: op.name
|
||||
})
|
||||
ObjectDefineProperty(fn, 'length', {
|
||||
__proto__: null,
|
||||
value: op.length
|
||||
})
|
||||
ObjectDefineProperty(Stream.Readable.prototype, key, {
|
||||
__proto__: null,
|
||||
value: fn,
|
||||
enumerable: false,
|
||||
configurable: true,
|
||||
writable: true
|
||||
})
|
||||
}
|
||||
Stream.Writable = require('./internal/streams/writable')
|
||||
Stream.Duplex = require('./internal/streams/duplex')
|
||||
Stream.Transform = require('./internal/streams/transform')
|
||||
Stream.PassThrough = require('./internal/streams/passthrough')
|
||||
Stream.pipeline = pipeline
|
||||
const { addAbortSignal } = require('./internal/streams/add-abort-signal')
|
||||
Stream.addAbortSignal = addAbortSignal
|
||||
Stream.finished = eos
|
||||
Stream.destroy = destroyer
|
||||
Stream.compose = compose
|
||||
Stream.setDefaultHighWaterMark = setDefaultHighWaterMark
|
||||
Stream.getDefaultHighWaterMark = getDefaultHighWaterMark
|
||||
ObjectDefineProperty(Stream, 'promises', {
|
||||
__proto__: null,
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
get() {
|
||||
return promises
|
||||
}
|
||||
})
|
||||
ObjectDefineProperty(pipeline, customPromisify, {
|
||||
__proto__: null,
|
||||
enumerable: true,
|
||||
get() {
|
||||
return promises.pipeline
|
||||
}
|
||||
})
|
||||
ObjectDefineProperty(eos, customPromisify, {
|
||||
__proto__: null,
|
||||
enumerable: true,
|
||||
get() {
|
||||
return promises.finished
|
||||
}
|
||||
})
|
||||
|
||||
// Backwards-compat with node 0.4.x
|
||||
Stream.Stream = Stream
|
||||
Stream._isUint8Array = function isUint8Array(value) {
|
||||
return value instanceof Uint8Array
|
||||
}
|
||||
Stream._uint8ArrayToBuffer = function _uint8ArrayToBuffer(chunk) {
|
||||
return Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
|
||||
}
|
43
node_modules/readable-stream/lib/stream/promises.js
generated
vendored
Normal file
43
node_modules/readable-stream/lib/stream/promises.js
generated
vendored
Normal file
|
@ -0,0 +1,43 @@
|
|||
'use strict'
|
||||
|
||||
const { ArrayPrototypePop, Promise } = require('../ours/primordials')
|
||||
const { isIterable, isNodeStream, isWebStream } = require('../internal/streams/utils')
|
||||
const { pipelineImpl: pl } = require('../internal/streams/pipeline')
|
||||
const { finished } = require('../internal/streams/end-of-stream')
|
||||
require('../../lib/stream.js')
|
||||
function pipeline(...streams) {
|
||||
return new Promise((resolve, reject) => {
|
||||
let signal
|
||||
let end
|
||||
const lastArg = streams[streams.length - 1]
|
||||
if (
|
||||
lastArg &&
|
||||
typeof lastArg === 'object' &&
|
||||
!isNodeStream(lastArg) &&
|
||||
!isIterable(lastArg) &&
|
||||
!isWebStream(lastArg)
|
||||
) {
|
||||
const options = ArrayPrototypePop(streams)
|
||||
signal = options.signal
|
||||
end = options.end
|
||||
}
|
||||
pl(
|
||||
streams,
|
||||
(err, value) => {
|
||||
if (err) {
|
||||
reject(err)
|
||||
} else {
|
||||
resolve(value)
|
||||
}
|
||||
},
|
||||
{
|
||||
signal,
|
||||
end
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
||||
module.exports = {
|
||||
finished,
|
||||
pipeline
|
||||
}
|
73
node_modules/readable-stream/node_modules/buffer/AUTHORS.md
generated
vendored
Normal file
73
node_modules/readable-stream/node_modules/buffer/AUTHORS.md
generated
vendored
Normal file
|
@ -0,0 +1,73 @@
|
|||
# Authors
|
||||
|
||||
#### Ordered by first contribution.
|
||||
|
||||
- Romain Beauxis (toots@rastageeks.org)
|
||||
- Tobias Koppers (tobias.koppers@googlemail.com)
|
||||
- Janus (ysangkok@gmail.com)
|
||||
- Rainer Dreyer (rdrey1@gmail.com)
|
||||
- Tõnis Tiigi (tonistiigi@gmail.com)
|
||||
- James Halliday (mail@substack.net)
|
||||
- Michael Williamson (mike@zwobble.org)
|
||||
- elliottcable (github@elliottcable.name)
|
||||
- rafael (rvalle@livelens.net)
|
||||
- Andrew Kelley (superjoe30@gmail.com)
|
||||
- Andreas Madsen (amwebdk@gmail.com)
|
||||
- Mike Brevoort (mike.brevoort@pearson.com)
|
||||
- Brian White (mscdex@mscdex.net)
|
||||
- Feross Aboukhadijeh (feross@feross.org)
|
||||
- Ruben Verborgh (ruben@verborgh.org)
|
||||
- eliang (eliang.cs@gmail.com)
|
||||
- Jesse Tane (jesse.tane@gmail.com)
|
||||
- Alfonso Boza (alfonso@cloud.com)
|
||||
- Mathias Buus (mathiasbuus@gmail.com)
|
||||
- Devon Govett (devongovett@gmail.com)
|
||||
- Daniel Cousens (github@dcousens.com)
|
||||
- Joseph Dykstra (josephdykstra@gmail.com)
|
||||
- Parsha Pourkhomami (parshap+git@gmail.com)
|
||||
- Damjan Košir (damjan.kosir@gmail.com)
|
||||
- daverayment (dave.rayment@gmail.com)
|
||||
- kawanet (u-suke@kawa.net)
|
||||
- Linus Unnebäck (linus@folkdatorn.se)
|
||||
- Nolan Lawson (nolan.lawson@gmail.com)
|
||||
- Calvin Metcalf (calvin.metcalf@gmail.com)
|
||||
- Koki Takahashi (hakatasiloving@gmail.com)
|
||||
- Guy Bedford (guybedford@gmail.com)
|
||||
- Jan Schär (jscissr@gmail.com)
|
||||
- RaulTsc (tomescu.raul@gmail.com)
|
||||
- Matthieu Monsch (monsch@alum.mit.edu)
|
||||
- Dan Ehrenberg (littledan@chromium.org)
|
||||
- Kirill Fomichev (fanatid@ya.ru)
|
||||
- Yusuke Kawasaki (u-suke@kawa.net)
|
||||
- DC (dcposch@dcpos.ch)
|
||||
- John-David Dalton (john.david.dalton@gmail.com)
|
||||
- adventure-yunfei (adventure030@gmail.com)
|
||||
- Emil Bay (github@tixz.dk)
|
||||
- Sam Sudar (sudar.sam@gmail.com)
|
||||
- Volker Mische (volker.mische@gmail.com)
|
||||
- David Walton (support@geekstocks.com)
|
||||
- Сковорода Никита Андреевич (chalkerx@gmail.com)
|
||||
- greenkeeper[bot] (greenkeeper[bot]@users.noreply.github.com)
|
||||
- ukstv (sergey.ukustov@machinomy.com)
|
||||
- Renée Kooi (renee@kooi.me)
|
||||
- ranbochen (ranbochen@qq.com)
|
||||
- Vladimir Borovik (bobahbdb@gmail.com)
|
||||
- greenkeeper[bot] (23040076+greenkeeper[bot]@users.noreply.github.com)
|
||||
- kumavis (aaron@kumavis.me)
|
||||
- Sergey Ukustov (sergey.ukustov@machinomy.com)
|
||||
- Fei Liu (liu.feiwood@gmail.com)
|
||||
- Blaine Bublitz (blaine.bublitz@gmail.com)
|
||||
- clement (clement@seald.io)
|
||||
- Koushik Dutta (koushd@gmail.com)
|
||||
- Jordan Harband (ljharb@gmail.com)
|
||||
- Niklas Mischkulnig (mischnic@users.noreply.github.com)
|
||||
- Nikolai Vavilov (vvnicholas@gmail.com)
|
||||
- Fedor Nezhivoi (gyzerok@users.noreply.github.com)
|
||||
- shuse2 (shus.toda@gmail.com)
|
||||
- Peter Newman (peternewman@users.noreply.github.com)
|
||||
- mathmakgakpak (44949126+mathmakgakpak@users.noreply.github.com)
|
||||
- jkkang (jkkang@smartauth.kr)
|
||||
- Deklan Webster (deklanw@gmail.com)
|
||||
- Martin Heidegger (martin.heidegger@gmail.com)
|
||||
|
||||
#### Generated by bin/update-authors.sh.
|
21
node_modules/readable-stream/node_modules/buffer/LICENSE
generated
vendored
Normal file
21
node_modules/readable-stream/node_modules/buffer/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) Feross Aboukhadijeh, and other contributors.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
410
node_modules/readable-stream/node_modules/buffer/README.md
generated
vendored
Normal file
410
node_modules/readable-stream/node_modules/buffer/README.md
generated
vendored
Normal file
|
@ -0,0 +1,410 @@
|
|||
# buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url]
|
||||
|
||||
[travis-image]: https://img.shields.io/travis/feross/buffer/master.svg
|
||||
[travis-url]: https://travis-ci.org/feross/buffer
|
||||
[npm-image]: https://img.shields.io/npm/v/buffer.svg
|
||||
[npm-url]: https://npmjs.org/package/buffer
|
||||
[downloads-image]: https://img.shields.io/npm/dm/buffer.svg
|
||||
[downloads-url]: https://npmjs.org/package/buffer
|
||||
[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg
|
||||
[standard-url]: https://standardjs.com
|
||||
|
||||
#### The buffer module from [node.js](https://nodejs.org/), for the browser.
|
||||
|
||||
[![saucelabs][saucelabs-image]][saucelabs-url]
|
||||
|
||||
[saucelabs-image]: https://saucelabs.com/browser-matrix/buffer.svg
|
||||
[saucelabs-url]: https://saucelabs.com/u/buffer
|
||||
|
||||
With [browserify](http://browserify.org), simply `require('buffer')` or use the `Buffer` global and you will get this module.
|
||||
|
||||
The goal is to provide an API that is 100% identical to
|
||||
[node's Buffer API](https://nodejs.org/api/buffer.html). Read the
|
||||
[official docs](https://nodejs.org/api/buffer.html) for the full list of properties,
|
||||
instance methods, and class methods that are supported.
|
||||
|
||||
## features
|
||||
|
||||
- Manipulate binary data like a boss, in all browsers!
|
||||
- Super fast. Backed by Typed Arrays (`Uint8Array`/`ArrayBuffer`, not `Object`)
|
||||
- Extremely small bundle size (**6.75KB minified + gzipped**, 51.9KB with comments)
|
||||
- Excellent browser support (Chrome, Firefox, Edge, Safari 11+, iOS 11+, Android, etc.)
|
||||
- Preserves Node API exactly, with one minor difference (see below)
|
||||
- Square-bracket `buf[4]` notation works!
|
||||
- Does not modify any browser prototypes or put anything on `window`
|
||||
- Comprehensive test suite (including all buffer tests from node.js core)
|
||||
|
||||
## install
|
||||
|
||||
To use this module directly (without browserify), install it:
|
||||
|
||||
```bash
|
||||
npm install buffer
|
||||
```
|
||||
|
||||
This module was previously called **native-buffer-browserify**, but please use **buffer**
|
||||
from now on.
|
||||
|
||||
If you do not use a bundler, you can use the [standalone script](https://bundle.run/buffer).
|
||||
|
||||
## usage
|
||||
|
||||
The module's API is identical to node's `Buffer` API. Read the
|
||||
[official docs](https://nodejs.org/api/buffer.html) for the full list of properties,
|
||||
instance methods, and class methods that are supported.
|
||||
|
||||
As mentioned above, `require('buffer')` or use the `Buffer` global with
|
||||
[browserify](http://browserify.org) and this module will automatically be included
|
||||
in your bundle. Almost any npm module will work in the browser, even if it assumes that
|
||||
the node `Buffer` API will be available.
|
||||
|
||||
To depend on this module explicitly (without browserify), require it like this:
|
||||
|
||||
```js
|
||||
var Buffer = require('buffer/').Buffer // note: the trailing slash is important!
|
||||
```
|
||||
|
||||
To require this module explicitly, use `require('buffer/')` which tells the node.js module
|
||||
lookup algorithm (also used by browserify) to use the **npm module** named `buffer`
|
||||
instead of the **node.js core** module named `buffer`!
|
||||
|
||||
|
||||
## how does it work?
|
||||
|
||||
The Buffer constructor returns instances of `Uint8Array` that have their prototype
|
||||
changed to `Buffer.prototype`. Furthermore, `Buffer` is a subclass of `Uint8Array`,
|
||||
so the returned instances will have all the node `Buffer` methods and the
|
||||
`Uint8Array` methods. Square bracket notation works as expected -- it returns a
|
||||
single octet.
|
||||
|
||||
The `Uint8Array` prototype remains unmodified.
|
||||
|
||||
|
||||
## tracking the latest node api
|
||||
|
||||
This module tracks the Buffer API in the latest (unstable) version of node.js. The Buffer
|
||||
API is considered **stable** in the
|
||||
[node stability index](https://nodejs.org/docs/latest/api/documentation.html#documentation_stability_index),
|
||||
so it is unlikely that there will ever be breaking changes.
|
||||
Nonetheless, when/if the Buffer API changes in node, this module's API will change
|
||||
accordingly.
|
||||
|
||||
## related packages
|
||||
|
||||
- [`buffer-reverse`](https://www.npmjs.com/package/buffer-reverse) - Reverse a buffer
|
||||
- [`buffer-xor`](https://www.npmjs.com/package/buffer-xor) - Bitwise xor a buffer
|
||||
- [`is-buffer`](https://www.npmjs.com/package/is-buffer) - Determine if an object is a Buffer without including the whole `Buffer` package
|
||||
|
||||
## conversion packages
|
||||
|
||||
### convert typed array to buffer
|
||||
|
||||
Use [`typedarray-to-buffer`](https://www.npmjs.com/package/typedarray-to-buffer) to convert any kind of typed array to a `Buffer`. Does not perform a copy, so it's super fast.
|
||||
|
||||
### convert buffer to typed array
|
||||
|
||||
`Buffer` is a subclass of `Uint8Array` (which is a typed array). So there is no need to explicitly convert to typed array. Just use the buffer as a `Uint8Array`.
|
||||
|
||||
### convert blob to buffer
|
||||
|
||||
Use [`blob-to-buffer`](https://www.npmjs.com/package/blob-to-buffer) to convert a `Blob` to a `Buffer`.
|
||||
|
||||
### convert buffer to blob
|
||||
|
||||
To convert a `Buffer` to a `Blob`, use the `Blob` constructor:
|
||||
|
||||
```js
|
||||
var blob = new Blob([ buffer ])
|
||||
```
|
||||
|
||||
Optionally, specify a mimetype:
|
||||
|
||||
```js
|
||||
var blob = new Blob([ buffer ], { type: 'text/html' })
|
||||
```
|
||||
|
||||
### convert arraybuffer to buffer
|
||||
|
||||
To convert an `ArrayBuffer` to a `Buffer`, use the `Buffer.from` function. Does not perform a copy, so it's super fast.
|
||||
|
||||
```js
|
||||
var buffer = Buffer.from(arrayBuffer)
|
||||
```
|
||||
|
||||
### convert buffer to arraybuffer
|
||||
|
||||
To convert a `Buffer` to an `ArrayBuffer`, use the `.buffer` property (which is present on all `Uint8Array` objects):
|
||||
|
||||
```js
|
||||
var arrayBuffer = buffer.buffer.slice(
|
||||
buffer.byteOffset, buffer.byteOffset + buffer.byteLength
|
||||
)
|
||||
```
|
||||
|
||||
Alternatively, use the [`to-arraybuffer`](https://www.npmjs.com/package/to-arraybuffer) module.
|
||||
|
||||
## performance
|
||||
|
||||
See perf tests in `/perf`.
|
||||
|
||||
`BrowserBuffer` is the browser `buffer` module (this repo). `Uint8Array` is included as a
|
||||
sanity check (since `BrowserBuffer` uses `Uint8Array` under the hood, `Uint8Array` will
|
||||
always be at least a bit faster). Finally, `NodeBuffer` is the node.js buffer module,
|
||||
which is included to compare against.
|
||||
|
||||
NOTE: Performance has improved since these benchmarks were taken. PR welcome to update the README.
|
||||
|
||||
### Chrome 38
|
||||
|
||||
| Method | Operations | Accuracy | Sampled | Fastest |
|
||||
|:-------|:-----------|:---------|:--------|:-------:|
|
||||
| BrowserBuffer#bracket-notation | 11,457,464 ops/sec | ±0.86% | 66 | ✓ |
|
||||
| Uint8Array#bracket-notation | 10,824,332 ops/sec | ±0.74% | 65 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#concat | 450,532 ops/sec | ±0.76% | 68 | |
|
||||
| Uint8Array#concat | 1,368,911 ops/sec | ±1.50% | 62 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#copy(16000) | 903,001 ops/sec | ±0.96% | 67 | |
|
||||
| Uint8Array#copy(16000) | 1,422,441 ops/sec | ±1.04% | 66 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#copy(16) | 11,431,358 ops/sec | ±0.46% | 69 | |
|
||||
| Uint8Array#copy(16) | 13,944,163 ops/sec | ±1.12% | 68 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#new(16000) | 106,329 ops/sec | ±6.70% | 44 | |
|
||||
| Uint8Array#new(16000) | 131,001 ops/sec | ±2.85% | 31 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#new(16) | 1,554,491 ops/sec | ±1.60% | 65 | |
|
||||
| Uint8Array#new(16) | 6,623,930 ops/sec | ±1.66% | 65 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#readDoubleBE | 112,830 ops/sec | ±0.51% | 69 | ✓ |
|
||||
| DataView#getFloat64 | 93,500 ops/sec | ±0.57% | 68 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#readFloatBE | 146,678 ops/sec | ±0.95% | 68 | ✓ |
|
||||
| DataView#getFloat32 | 99,311 ops/sec | ±0.41% | 67 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#readUInt32LE | 843,214 ops/sec | ±0.70% | 69 | ✓ |
|
||||
| DataView#getUint32 | 103,024 ops/sec | ±0.64% | 67 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#slice | 1,013,941 ops/sec | ±0.75% | 67 | |
|
||||
| Uint8Array#subarray | 1,903,928 ops/sec | ±0.53% | 67 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#writeFloatBE | 61,387 ops/sec | ±0.90% | 67 | |
|
||||
| DataView#setFloat32 | 141,249 ops/sec | ±0.40% | 66 | ✓ |
|
||||
|
||||
|
||||
### Firefox 33
|
||||
|
||||
| Method | Operations | Accuracy | Sampled | Fastest |
|
||||
|:-------|:-----------|:---------|:--------|:-------:|
|
||||
| BrowserBuffer#bracket-notation | 20,800,421 ops/sec | ±1.84% | 60 | |
|
||||
| Uint8Array#bracket-notation | 20,826,235 ops/sec | ±2.02% | 61 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#concat | 153,076 ops/sec | ±2.32% | 61 | |
|
||||
| Uint8Array#concat | 1,255,674 ops/sec | ±8.65% | 52 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#copy(16000) | 1,105,312 ops/sec | ±1.16% | 63 | |
|
||||
| Uint8Array#copy(16000) | 1,615,911 ops/sec | ±0.55% | 66 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#copy(16) | 16,357,599 ops/sec | ±0.73% | 68 | |
|
||||
| Uint8Array#copy(16) | 31,436,281 ops/sec | ±1.05% | 68 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#new(16000) | 52,995 ops/sec | ±6.01% | 35 | |
|
||||
| Uint8Array#new(16000) | 87,686 ops/sec | ±5.68% | 45 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#new(16) | 252,031 ops/sec | ±1.61% | 66 | |
|
||||
| Uint8Array#new(16) | 8,477,026 ops/sec | ±0.49% | 68 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#readDoubleBE | 99,871 ops/sec | ±0.41% | 69 | |
|
||||
| DataView#getFloat64 | 285,663 ops/sec | ±0.70% | 68 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#readFloatBE | 115,540 ops/sec | ±0.42% | 69 | |
|
||||
| DataView#getFloat32 | 288,722 ops/sec | ±0.82% | 68 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#readUInt32LE | 633,926 ops/sec | ±1.08% | 67 | ✓ |
|
||||
| DataView#getUint32 | 294,808 ops/sec | ±0.79% | 64 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#slice | 349,425 ops/sec | ±0.46% | 69 | |
|
||||
| Uint8Array#subarray | 5,965,819 ops/sec | ±0.60% | 65 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#writeFloatBE | 59,980 ops/sec | ±0.41% | 67 | |
|
||||
| DataView#setFloat32 | 317,634 ops/sec | ±0.63% | 68 | ✓ |
|
||||
|
||||
### Safari 8
|
||||
|
||||
| Method | Operations | Accuracy | Sampled | Fastest |
|
||||
|:-------|:-----------|:---------|:--------|:-------:|
|
||||
| BrowserBuffer#bracket-notation | 10,279,729 ops/sec | ±2.25% | 56 | ✓ |
|
||||
| Uint8Array#bracket-notation | 10,030,767 ops/sec | ±2.23% | 59 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#concat | 144,138 ops/sec | ±1.38% | 65 | |
|
||||
| Uint8Array#concat | 4,950,764 ops/sec | ±1.70% | 63 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#copy(16000) | 1,058,548 ops/sec | ±1.51% | 64 | |
|
||||
| Uint8Array#copy(16000) | 1,409,666 ops/sec | ±1.17% | 65 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#copy(16) | 6,282,529 ops/sec | ±1.88% | 58 | |
|
||||
| Uint8Array#copy(16) | 11,907,128 ops/sec | ±2.87% | 58 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#new(16000) | 101,663 ops/sec | ±3.89% | 57 | |
|
||||
| Uint8Array#new(16000) | 22,050,818 ops/sec | ±6.51% | 46 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#new(16) | 176,072 ops/sec | ±2.13% | 64 | |
|
||||
| Uint8Array#new(16) | 24,385,731 ops/sec | ±5.01% | 51 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#readDoubleBE | 41,341 ops/sec | ±1.06% | 67 | |
|
||||
| DataView#getFloat64 | 322,280 ops/sec | ±0.84% | 68 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#readFloatBE | 46,141 ops/sec | ±1.06% | 65 | |
|
||||
| DataView#getFloat32 | 337,025 ops/sec | ±0.43% | 69 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#readUInt32LE | 151,551 ops/sec | ±1.02% | 66 | |
|
||||
| DataView#getUint32 | 308,278 ops/sec | ±0.94% | 67 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#slice | 197,365 ops/sec | ±0.95% | 66 | |
|
||||
| Uint8Array#subarray | 9,558,024 ops/sec | ±3.08% | 58 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#writeFloatBE | 17,518 ops/sec | ±1.03% | 63 | |
|
||||
| DataView#setFloat32 | 319,751 ops/sec | ±0.48% | 68 | ✓ |
|
||||
|
||||
|
||||
### Node 0.11.14
|
||||
|
||||
| Method | Operations | Accuracy | Sampled | Fastest |
|
||||
|:-------|:-----------|:---------|:--------|:-------:|
|
||||
| BrowserBuffer#bracket-notation | 10,489,828 ops/sec | ±3.25% | 90 | |
|
||||
| Uint8Array#bracket-notation | 10,534,884 ops/sec | ±0.81% | 92 | ✓ |
|
||||
| NodeBuffer#bracket-notation | 10,389,910 ops/sec | ±0.97% | 87 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#concat | 487,830 ops/sec | ±2.58% | 88 | |
|
||||
| Uint8Array#concat | 1,814,327 ops/sec | ±1.28% | 88 | ✓ |
|
||||
| NodeBuffer#concat | 1,636,523 ops/sec | ±1.88% | 73 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#copy(16000) | 1,073,665 ops/sec | ±0.77% | 90 | |
|
||||
| Uint8Array#copy(16000) | 1,348,517 ops/sec | ±0.84% | 89 | ✓ |
|
||||
| NodeBuffer#copy(16000) | 1,289,533 ops/sec | ±0.82% | 93 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#copy(16) | 12,782,706 ops/sec | ±0.74% | 85 | |
|
||||
| Uint8Array#copy(16) | 14,180,427 ops/sec | ±0.93% | 92 | ✓ |
|
||||
| NodeBuffer#copy(16) | 11,083,134 ops/sec | ±1.06% | 89 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#new(16000) | 141,678 ops/sec | ±3.30% | 67 | |
|
||||
| Uint8Array#new(16000) | 161,491 ops/sec | ±2.96% | 60 | |
|
||||
| NodeBuffer#new(16000) | 292,699 ops/sec | ±3.20% | 55 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#new(16) | 1,655,466 ops/sec | ±2.41% | 82 | |
|
||||
| Uint8Array#new(16) | 14,399,926 ops/sec | ±0.91% | 94 | ✓ |
|
||||
| NodeBuffer#new(16) | 3,894,696 ops/sec | ±0.88% | 92 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#readDoubleBE | 109,582 ops/sec | ±0.75% | 93 | ✓ |
|
||||
| DataView#getFloat64 | 91,235 ops/sec | ±0.81% | 90 | |
|
||||
| NodeBuffer#readDoubleBE | 88,593 ops/sec | ±0.96% | 81 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#readFloatBE | 139,854 ops/sec | ±1.03% | 85 | ✓ |
|
||||
| DataView#getFloat32 | 98,744 ops/sec | ±0.80% | 89 | |
|
||||
| NodeBuffer#readFloatBE | 92,769 ops/sec | ±0.94% | 93 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#readUInt32LE | 710,861 ops/sec | ±0.82% | 92 | |
|
||||
| DataView#getUint32 | 117,893 ops/sec | ±0.84% | 91 | |
|
||||
| NodeBuffer#readUInt32LE | 851,412 ops/sec | ±0.72% | 93 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#slice | 1,673,877 ops/sec | ±0.73% | 94 | |
|
||||
| Uint8Array#subarray | 6,919,243 ops/sec | ±0.67% | 90 | ✓ |
|
||||
| NodeBuffer#slice | 4,617,604 ops/sec | ±0.79% | 93 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#writeFloatBE | 66,011 ops/sec | ±0.75% | 93 | |
|
||||
| DataView#setFloat32 | 127,760 ops/sec | ±0.72% | 93 | ✓ |
|
||||
| NodeBuffer#writeFloatBE | 103,352 ops/sec | ±0.83% | 93 | |
|
||||
|
||||
### iojs 1.8.1
|
||||
|
||||
| Method | Operations | Accuracy | Sampled | Fastest |
|
||||
|:-------|:-----------|:---------|:--------|:-------:|
|
||||
| BrowserBuffer#bracket-notation | 10,990,488 ops/sec | ±1.11% | 91 | |
|
||||
| Uint8Array#bracket-notation | 11,268,757 ops/sec | ±0.65% | 97 | |
|
||||
| NodeBuffer#bracket-notation | 11,353,260 ops/sec | ±0.83% | 94 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#concat | 378,954 ops/sec | ±0.74% | 94 | |
|
||||
| Uint8Array#concat | 1,358,288 ops/sec | ±0.97% | 87 | |
|
||||
| NodeBuffer#concat | 1,934,050 ops/sec | ±1.11% | 78 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#copy(16000) | 894,538 ops/sec | ±0.56% | 84 | |
|
||||
| Uint8Array#copy(16000) | 1,442,656 ops/sec | ±0.71% | 96 | |
|
||||
| NodeBuffer#copy(16000) | 1,457,898 ops/sec | ±0.53% | 92 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#copy(16) | 12,870,457 ops/sec | ±0.67% | 95 | |
|
||||
| Uint8Array#copy(16) | 16,643,989 ops/sec | ±0.61% | 93 | ✓ |
|
||||
| NodeBuffer#copy(16) | 14,885,848 ops/sec | ±0.74% | 94 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#new(16000) | 109,264 ops/sec | ±4.21% | 63 | |
|
||||
| Uint8Array#new(16000) | 138,916 ops/sec | ±1.87% | 61 | |
|
||||
| NodeBuffer#new(16000) | 281,449 ops/sec | ±3.58% | 51 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#new(16) | 1,362,935 ops/sec | ±0.56% | 99 | |
|
||||
| Uint8Array#new(16) | 6,193,090 ops/sec | ±0.64% | 95 | ✓ |
|
||||
| NodeBuffer#new(16) | 4,745,425 ops/sec | ±1.56% | 90 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#readDoubleBE | 118,127 ops/sec | ±0.59% | 93 | ✓ |
|
||||
| DataView#getFloat64 | 107,332 ops/sec | ±0.65% | 91 | |
|
||||
| NodeBuffer#readDoubleBE | 116,274 ops/sec | ±0.94% | 95 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#readFloatBE | 150,326 ops/sec | ±0.58% | 95 | ✓ |
|
||||
| DataView#getFloat32 | 110,541 ops/sec | ±0.57% | 98 | |
|
||||
| NodeBuffer#readFloatBE | 121,599 ops/sec | ±0.60% | 87 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#readUInt32LE | 814,147 ops/sec | ±0.62% | 93 | |
|
||||
| DataView#getUint32 | 137,592 ops/sec | ±0.64% | 90 | |
|
||||
| NodeBuffer#readUInt32LE | 931,650 ops/sec | ±0.71% | 96 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#slice | 878,590 ops/sec | ±0.68% | 93 | |
|
||||
| Uint8Array#subarray | 2,843,308 ops/sec | ±1.02% | 90 | |
|
||||
| NodeBuffer#slice | 4,998,316 ops/sec | ±0.68% | 90 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#writeFloatBE | 65,927 ops/sec | ±0.74% | 93 | |
|
||||
| DataView#setFloat32 | 139,823 ops/sec | ±0.97% | 89 | ✓ |
|
||||
| NodeBuffer#writeFloatBE | 135,763 ops/sec | ±0.65% | 96 | |
|
||||
| | | | |
|
||||
|
||||
## Testing the project
|
||||
|
||||
First, install the project:
|
||||
|
||||
npm install
|
||||
|
||||
Then, to run tests in Node.js, run:
|
||||
|
||||
npm run test-node
|
||||
|
||||
To test locally in a browser, you can run:
|
||||
|
||||
npm run test-browser-es5-local # For ES5 browsers that don't support ES6
|
||||
npm run test-browser-es6-local # For ES6 compliant browsers
|
||||
|
||||
This will print out a URL that you can then open in a browser to run the tests, using [airtap](https://www.npmjs.com/package/airtap).
|
||||
|
||||
To run automated browser tests using Saucelabs, ensure that your `SAUCE_USERNAME` and `SAUCE_ACCESS_KEY` environment variables are set, then run:
|
||||
|
||||
npm test
|
||||
|
||||
This is what's run in Travis, to check against various browsers. The list of browsers is kept in the `bin/airtap-es5.yml` and `bin/airtap-es6.yml` files.
|
||||
|
||||
## JavaScript Standard Style
|
||||
|
||||
This module uses [JavaScript Standard Style](https://github.com/feross/standard).
|
||||
|
||||
[](https://github.com/feross/standard)
|
||||
|
||||
To test that the code conforms to the style, `npm install` and run:
|
||||
|
||||
./node_modules/.bin/standard
|
||||
|
||||
## credit
|
||||
|
||||
This was originally forked from [buffer-browserify](https://github.com/toots/buffer-browserify).
|
||||
|
||||
## Security Policies and Procedures
|
||||
|
||||
The `buffer` team and community take all security bugs in `buffer` seriously. Please see our [security policies and procedures](https://github.com/feross/security) document to learn how to report issues.
|
||||
|
||||
## license
|
||||
|
||||
MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org), and other contributors. Originally forked from an MIT-licensed module by Romain Beauxis.
|
194
node_modules/readable-stream/node_modules/buffer/index.d.ts
generated
vendored
Normal file
194
node_modules/readable-stream/node_modules/buffer/index.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,194 @@
|
|||
export class Buffer extends Uint8Array {
|
||||
length: number
|
||||
write(string: string, offset?: number, length?: number, encoding?: string): number;
|
||||
toString(encoding?: string, start?: number, end?: number): string;
|
||||
toJSON(): { type: 'Buffer', data: any[] };
|
||||
equals(otherBuffer: Buffer): boolean;
|
||||
compare(otherBuffer: Uint8Array, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number;
|
||||
copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number;
|
||||
slice(start?: number, end?: number): Buffer;
|
||||
writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
|
||||
writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
|
||||
writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
|
||||
writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
|
||||
readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number;
|
||||
readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number;
|
||||
readIntLE(offset: number, byteLength: number, noAssert?: boolean): number;
|
||||
readIntBE(offset: number, byteLength: number, noAssert?: boolean): number;
|
||||
readUInt8(offset: number, noAssert?: boolean): number;
|
||||
readUInt16LE(offset: number, noAssert?: boolean): number;
|
||||
readUInt16BE(offset: number, noAssert?: boolean): number;
|
||||
readUInt32LE(offset: number, noAssert?: boolean): number;
|
||||
readUInt32BE(offset: number, noAssert?: boolean): number;
|
||||
readBigUInt64LE(offset: number): BigInt;
|
||||
readBigUInt64BE(offset: number): BigInt;
|
||||
readInt8(offset: number, noAssert?: boolean): number;
|
||||
readInt16LE(offset: number, noAssert?: boolean): number;
|
||||
readInt16BE(offset: number, noAssert?: boolean): number;
|
||||
readInt32LE(offset: number, noAssert?: boolean): number;
|
||||
readInt32BE(offset: number, noAssert?: boolean): number;
|
||||
readBigInt64LE(offset: number): BigInt;
|
||||
readBigInt64BE(offset: number): BigInt;
|
||||
readFloatLE(offset: number, noAssert?: boolean): number;
|
||||
readFloatBE(offset: number, noAssert?: boolean): number;
|
||||
readDoubleLE(offset: number, noAssert?: boolean): number;
|
||||
readDoubleBE(offset: number, noAssert?: boolean): number;
|
||||
reverse(): this;
|
||||
swap16(): Buffer;
|
||||
swap32(): Buffer;
|
||||
swap64(): Buffer;
|
||||
writeUInt8(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeUInt16LE(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeUInt16BE(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeUInt32LE(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeUInt32BE(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeBigUInt64LE(value: number, offset: number): BigInt;
|
||||
writeBigUInt64BE(value: number, offset: number): BigInt;
|
||||
writeInt8(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeInt16LE(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeInt16BE(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeInt32LE(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeInt32BE(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeBigInt64LE(value: number, offset: number): BigInt;
|
||||
writeBigInt64BE(value: number, offset: number): BigInt;
|
||||
writeFloatLE(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeFloatBE(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeDoubleLE(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeDoubleBE(value: number, offset: number, noAssert?: boolean): number;
|
||||
fill(value: any, offset?: number, end?: number): this;
|
||||
indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number;
|
||||
lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number;
|
||||
includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean;
|
||||
|
||||
/**
|
||||
* Allocates a new buffer containing the given {str}.
|
||||
*
|
||||
* @param str String to store in buffer.
|
||||
* @param encoding encoding to use, optional. Default is 'utf8'
|
||||
*/
|
||||
constructor (str: string, encoding?: string);
|
||||
/**
|
||||
* Allocates a new buffer of {size} octets.
|
||||
*
|
||||
* @param size count of octets to allocate.
|
||||
*/
|
||||
constructor (size: number);
|
||||
/**
|
||||
* Allocates a new buffer containing the given {array} of octets.
|
||||
*
|
||||
* @param array The octets to store.
|
||||
*/
|
||||
constructor (array: Uint8Array);
|
||||
/**
|
||||
* Produces a Buffer backed by the same allocated memory as
|
||||
* the given {ArrayBuffer}.
|
||||
*
|
||||
*
|
||||
* @param arrayBuffer The ArrayBuffer with which to share memory.
|
||||
*/
|
||||
constructor (arrayBuffer: ArrayBuffer);
|
||||
/**
|
||||
* Allocates a new buffer containing the given {array} of octets.
|
||||
*
|
||||
* @param array The octets to store.
|
||||
*/
|
||||
constructor (array: any[]);
|
||||
/**
|
||||
* Copies the passed {buffer} data onto a new {Buffer} instance.
|
||||
*
|
||||
* @param buffer The buffer to copy.
|
||||
*/
|
||||
constructor (buffer: Buffer);
|
||||
prototype: Buffer;
|
||||
/**
|
||||
* Allocates a new Buffer using an {array} of octets.
|
||||
*
|
||||
* @param array
|
||||
*/
|
||||
static from(array: any[]): Buffer;
|
||||
/**
|
||||
* When passed a reference to the .buffer property of a TypedArray instance,
|
||||
* the newly created Buffer will share the same allocated memory as the TypedArray.
|
||||
* The optional {byteOffset} and {length} arguments specify a memory range
|
||||
* within the {arrayBuffer} that will be shared by the Buffer.
|
||||
*
|
||||
* @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer()
|
||||
* @param byteOffset
|
||||
* @param length
|
||||
*/
|
||||
static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer;
|
||||
/**
|
||||
* Copies the passed {buffer} data onto a new Buffer instance.
|
||||
*
|
||||
* @param buffer
|
||||
*/
|
||||
static from(buffer: Buffer | Uint8Array): Buffer;
|
||||
/**
|
||||
* Creates a new Buffer containing the given JavaScript string {str}.
|
||||
* If provided, the {encoding} parameter identifies the character encoding.
|
||||
* If not provided, {encoding} defaults to 'utf8'.
|
||||
*
|
||||
* @param str
|
||||
*/
|
||||
static from(str: string, encoding?: string): Buffer;
|
||||
/**
|
||||
* Returns true if {obj} is a Buffer
|
||||
*
|
||||
* @param obj object to test.
|
||||
*/
|
||||
static isBuffer(obj: any): obj is Buffer;
|
||||
/**
|
||||
* Returns true if {encoding} is a valid encoding argument.
|
||||
* Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex'
|
||||
*
|
||||
* @param encoding string to test.
|
||||
*/
|
||||
static isEncoding(encoding: string): boolean;
|
||||
/**
|
||||
* Gives the actual byte length of a string. encoding defaults to 'utf8'.
|
||||
* This is not the same as String.prototype.length since that returns the number of characters in a string.
|
||||
*
|
||||
* @param string string to test.
|
||||
* @param encoding encoding used to evaluate (defaults to 'utf8')
|
||||
*/
|
||||
static byteLength(string: string, encoding?: string): number;
|
||||
/**
|
||||
* Returns a buffer which is the result of concatenating all the buffers in the list together.
|
||||
*
|
||||
* If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer.
|
||||
* If the list has exactly one item, then the first item of the list is returned.
|
||||
* If the list has more than one item, then a new Buffer is created.
|
||||
*
|
||||
* @param list An array of Buffer objects to concatenate
|
||||
* @param totalLength Total length of the buffers when concatenated.
|
||||
* If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly.
|
||||
*/
|
||||
static concat(list: Uint8Array[], totalLength?: number): Buffer;
|
||||
/**
|
||||
* The same as buf1.compare(buf2).
|
||||
*/
|
||||
static compare(buf1: Uint8Array, buf2: Uint8Array): number;
|
||||
/**
|
||||
* Allocates a new buffer of {size} octets.
|
||||
*
|
||||
* @param size count of octets to allocate.
|
||||
* @param fill if specified, buffer will be initialized by calling buf.fill(fill).
|
||||
* If parameter is omitted, buffer will be filled with zeros.
|
||||
* @param encoding encoding used for call to buf.fill while initializing
|
||||
*/
|
||||
static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer;
|
||||
/**
|
||||
* Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents
|
||||
* of the newly created Buffer are unknown and may contain sensitive data.
|
||||
*
|
||||
* @param size count of octets to allocate
|
||||
*/
|
||||
static allocUnsafe(size: number): Buffer;
|
||||
/**
|
||||
* Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents
|
||||
* of the newly created Buffer are unknown and may contain sensitive data.
|
||||
*
|
||||
* @param size count of octets to allocate
|
||||
*/
|
||||
static allocUnsafeSlow(size: number): Buffer;
|
||||
}
|
2106
node_modules/readable-stream/node_modules/buffer/index.js
generated
vendored
Normal file
2106
node_modules/readable-stream/node_modules/buffer/index.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
93
node_modules/readable-stream/node_modules/buffer/package.json
generated
vendored
Normal file
93
node_modules/readable-stream/node_modules/buffer/package.json
generated
vendored
Normal file
|
@ -0,0 +1,93 @@
|
|||
{
|
||||
"name": "buffer",
|
||||
"description": "Node.js Buffer API, for the browser",
|
||||
"version": "6.0.3",
|
||||
"author": {
|
||||
"name": "Feross Aboukhadijeh",
|
||||
"email": "feross@feross.org",
|
||||
"url": "https://feross.org"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/feross/buffer/issues"
|
||||
},
|
||||
"contributors": [
|
||||
"Romain Beauxis <toots@rastageeks.org>",
|
||||
"James Halliday <mail@substack.net>"
|
||||
],
|
||||
"dependencies": {
|
||||
"base64-js": "^1.3.1",
|
||||
"ieee754": "^1.2.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"airtap": "^3.0.0",
|
||||
"benchmark": "^2.1.4",
|
||||
"browserify": "^17.0.0",
|
||||
"concat-stream": "^2.0.0",
|
||||
"hyperquest": "^2.1.3",
|
||||
"is-buffer": "^2.0.5",
|
||||
"is-nan": "^1.3.0",
|
||||
"split": "^1.0.1",
|
||||
"standard": "*",
|
||||
"tape": "^5.0.1",
|
||||
"through2": "^4.0.2",
|
||||
"uglify-js": "^3.11.5"
|
||||
},
|
||||
"homepage": "https://github.com/feross/buffer",
|
||||
"jspm": {
|
||||
"map": {
|
||||
"./index.js": {
|
||||
"node": "@node/buffer"
|
||||
}
|
||||
}
|
||||
},
|
||||
"keywords": [
|
||||
"arraybuffer",
|
||||
"browser",
|
||||
"browserify",
|
||||
"buffer",
|
||||
"compatible",
|
||||
"dataview",
|
||||
"uint8array"
|
||||
],
|
||||
"license": "MIT",
|
||||
"main": "index.js",
|
||||
"types": "index.d.ts",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/feross/buffer.git"
|
||||
},
|
||||
"scripts": {
|
||||
"perf": "browserify --debug perf/bracket-notation.js > perf/bundle.js && open perf/index.html",
|
||||
"perf-node": "node perf/bracket-notation.js && node perf/concat.js && node perf/copy-big.js && node perf/copy.js && node perf/new-big.js && node perf/new.js && node perf/readDoubleBE.js && node perf/readFloatBE.js && node perf/readUInt32LE.js && node perf/slice.js && node perf/writeFloatBE.js",
|
||||
"size": "browserify -r ./ | uglifyjs -c -m | gzip | wc -c",
|
||||
"test": "standard && node ./bin/test.js",
|
||||
"test-browser-old": "airtap -- test/*.js",
|
||||
"test-browser-old-local": "airtap --local -- test/*.js",
|
||||
"test-browser-new": "airtap -- test/*.js test/node/*.js",
|
||||
"test-browser-new-local": "airtap --local -- test/*.js test/node/*.js",
|
||||
"test-node": "tape test/*.js test/node/*.js",
|
||||
"update-authors": "./bin/update-authors.sh"
|
||||
},
|
||||
"standard": {
|
||||
"ignore": [
|
||||
"test/node/**/*.js",
|
||||
"test/common.js",
|
||||
"test/_polyfill.js",
|
||||
"perf/**/*.js"
|
||||
]
|
||||
},
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/feross"
|
||||
},
|
||||
{
|
||||
"type": "patreon",
|
||||
"url": "https://www.patreon.com/feross"
|
||||
},
|
||||
{
|
||||
"type": "consulting",
|
||||
"url": "https://feross.org/support"
|
||||
}
|
||||
]
|
||||
}
|
86
node_modules/readable-stream/package.json
generated
vendored
Normal file
86
node_modules/readable-stream/package.json
generated
vendored
Normal file
|
@ -0,0 +1,86 @@
|
|||
{
|
||||
"name": "readable-stream",
|
||||
"version": "4.5.2",
|
||||
"description": "Node.js Streams, a user-land copy of the stream library from Node.js",
|
||||
"homepage": "https://github.com/nodejs/readable-stream",
|
||||
"license": "MIT",
|
||||
"licenses": [
|
||||
{
|
||||
"type": "MIT",
|
||||
"url": "https://choosealicense.com/licenses/mit/"
|
||||
}
|
||||
],
|
||||
"keywords": [
|
||||
"readable",
|
||||
"stream",
|
||||
"pipe"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/nodejs/readable-stream"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/nodejs/readable-stream/issues"
|
||||
},
|
||||
"main": "lib/ours/index.js",
|
||||
"files": [
|
||||
"lib",
|
||||
"LICENSE",
|
||||
"README.md"
|
||||
],
|
||||
"browser": {
|
||||
"util": "./lib/ours/util.js",
|
||||
"./lib/ours/index.js": "./lib/ours/browser.js"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "node build/build.mjs",
|
||||
"postbuild": "prettier -w lib test",
|
||||
"test": "tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js",
|
||||
"test:prepare": "node test/browser/runner-prepare.mjs",
|
||||
"test:browsers": "node test/browser/runner-browser.mjs",
|
||||
"test:bundlers": "node test/browser/runner-node.mjs",
|
||||
"test:readable-stream-only": "node readable-stream-test/runner-prepare.mjs",
|
||||
"coverage": "c8 -c ./c8.json tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js",
|
||||
"format": "prettier -w src lib test",
|
||||
"lint": "eslint src"
|
||||
},
|
||||
"dependencies": {
|
||||
"abort-controller": "^3.0.0",
|
||||
"buffer": "^6.0.3",
|
||||
"events": "^3.3.0",
|
||||
"process": "^0.11.10",
|
||||
"string_decoder": "^1.3.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.17.10",
|
||||
"@babel/plugin-proposal-nullish-coalescing-operator": "^7.16.7",
|
||||
"@babel/plugin-proposal-optional-chaining": "^7.16.7",
|
||||
"@rollup/plugin-commonjs": "^22.0.0",
|
||||
"@rollup/plugin-inject": "^4.0.4",
|
||||
"@rollup/plugin-node-resolve": "^13.3.0",
|
||||
"@sinonjs/fake-timers": "^9.1.2",
|
||||
"browserify": "^17.0.0",
|
||||
"c8": "^7.11.2",
|
||||
"esbuild": "^0.19.9",
|
||||
"esbuild-plugin-alias": "^0.2.1",
|
||||
"eslint": "^8.15.0",
|
||||
"eslint-config-standard": "^17.0.0",
|
||||
"eslint-plugin-import": "^2.26.0",
|
||||
"eslint-plugin-n": "^15.2.0",
|
||||
"eslint-plugin-promise": "^6.0.0",
|
||||
"playwright": "^1.21.1",
|
||||
"prettier": "^2.6.2",
|
||||
"rollup": "^2.72.1",
|
||||
"rollup-plugin-polyfill-node": "^0.9.0",
|
||||
"tap": "^16.2.0",
|
||||
"tap-mocha-reporter": "^5.0.3",
|
||||
"tape": "^5.5.3",
|
||||
"tar": "^6.1.11",
|
||||
"undici": "^5.1.1",
|
||||
"webpack": "^5.72.1",
|
||||
"webpack-cli": "^4.9.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue