Updated the files.
This commit is contained in:
parent
1553e6b971
commit
753967d4f5
23418 changed files with 3784666 additions and 0 deletions
1
my-app/node_modules/tar/node_modules/.bin/mkdirp
generated
vendored
Symbolic link
1
my-app/node_modules/tar/node_modules/.bin/mkdirp
generated
vendored
Symbolic link
|
@ -0,0 +1 @@
|
|||
../mkdirp/bin/cmd.js
|
15
my-app/node_modules/tar/node_modules/fs-minipass/LICENSE
generated
vendored
Executable file
15
my-app/node_modules/tar/node_modules/fs-minipass/LICENSE
generated
vendored
Executable file
|
@ -0,0 +1,15 @@
|
|||
The ISC License
|
||||
|
||||
Copyright (c) Isaac Z. Schlueter and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
70
my-app/node_modules/tar/node_modules/fs-minipass/README.md
generated
vendored
Executable file
70
my-app/node_modules/tar/node_modules/fs-minipass/README.md
generated
vendored
Executable file
|
@ -0,0 +1,70 @@
|
|||
# fs-minipass
|
||||
|
||||
Filesystem streams based on [minipass](http://npm.im/minipass).
|
||||
|
||||
4 classes are exported:
|
||||
|
||||
- ReadStream
|
||||
- ReadStreamSync
|
||||
- WriteStream
|
||||
- WriteStreamSync
|
||||
|
||||
When using `ReadStreamSync`, all of the data is made available
|
||||
immediately upon consuming the stream. Nothing is buffered in memory
|
||||
when the stream is constructed. If the stream is piped to a writer,
|
||||
then it will synchronously `read()` and emit data into the writer as
|
||||
fast as the writer can consume it. (That is, it will respect
|
||||
backpressure.) If you call `stream.read()` then it will read the
|
||||
entire file and return the contents.
|
||||
|
||||
When using `WriteStreamSync`, every write is flushed to the file
|
||||
synchronously. If your writes all come in a single tick, then it'll
|
||||
write it all out in a single tick. It's as synchronous as you are.
|
||||
|
||||
The async versions work much like their node builtin counterparts,
|
||||
with the exception of introducing significantly less Stream machinery
|
||||
overhead.
|
||||
|
||||
## USAGE
|
||||
|
||||
It's just streams, you pipe them or read() them or write() to them.
|
||||
|
||||
```js
|
||||
const fsm = require('fs-minipass')
|
||||
const readStream = new fsm.ReadStream('file.txt')
|
||||
const writeStream = new fsm.WriteStream('output.txt')
|
||||
writeStream.write('some file header or whatever\n')
|
||||
readStream.pipe(writeStream)
|
||||
```
|
||||
|
||||
## ReadStream(path, options)
|
||||
|
||||
Path string is required, but somewhat irrelevant if an open file
|
||||
descriptor is passed in as an option.
|
||||
|
||||
Options:
|
||||
|
||||
- `fd` Pass in a numeric file descriptor, if the file is already open.
|
||||
- `readSize` The size of reads to do, defaults to 16MB
|
||||
- `size` The size of the file, if known. Prevents zero-byte read()
|
||||
call at the end.
|
||||
- `autoClose` Set to `false` to prevent the file descriptor from being
|
||||
closed when the file is done being read.
|
||||
|
||||
## WriteStream(path, options)
|
||||
|
||||
Path string is required, but somewhat irrelevant if an open file
|
||||
descriptor is passed in as an option.
|
||||
|
||||
Options:
|
||||
|
||||
- `fd` Pass in a numeric file descriptor, if the file is already open.
|
||||
- `mode` The mode to create the file with. Defaults to `0o666`.
|
||||
- `start` The position in the file to start reading. If not
|
||||
specified, then the file will start writing at position zero, and be
|
||||
truncated by default.
|
||||
- `autoClose` Set to `false` to prevent the file descriptor from being
|
||||
closed when the stream is ended.
|
||||
- `flags` Flags to use when opening the file. Irrelevant if `fd` is
|
||||
passed in, since file won't be opened in that case. Defaults to
|
||||
`'a'` if a `pos` is specified, or `'w'` otherwise.
|
422
my-app/node_modules/tar/node_modules/fs-minipass/index.js
generated
vendored
Executable file
422
my-app/node_modules/tar/node_modules/fs-minipass/index.js
generated
vendored
Executable file
|
@ -0,0 +1,422 @@
|
|||
'use strict'
|
||||
const MiniPass = require('minipass')
|
||||
const EE = require('events').EventEmitter
|
||||
const fs = require('fs')
|
||||
|
||||
let writev = fs.writev
|
||||
/* istanbul ignore next */
|
||||
if (!writev) {
|
||||
// This entire block can be removed if support for earlier than Node.js
|
||||
// 12.9.0 is not needed.
|
||||
const binding = process.binding('fs')
|
||||
const FSReqWrap = binding.FSReqWrap || binding.FSReqCallback
|
||||
|
||||
writev = (fd, iovec, pos, cb) => {
|
||||
const done = (er, bw) => cb(er, bw, iovec)
|
||||
const req = new FSReqWrap()
|
||||
req.oncomplete = done
|
||||
binding.writeBuffers(fd, iovec, pos, req)
|
||||
}
|
||||
}
|
||||
|
||||
const _autoClose = Symbol('_autoClose')
|
||||
const _close = Symbol('_close')
|
||||
const _ended = Symbol('_ended')
|
||||
const _fd = Symbol('_fd')
|
||||
const _finished = Symbol('_finished')
|
||||
const _flags = Symbol('_flags')
|
||||
const _flush = Symbol('_flush')
|
||||
const _handleChunk = Symbol('_handleChunk')
|
||||
const _makeBuf = Symbol('_makeBuf')
|
||||
const _mode = Symbol('_mode')
|
||||
const _needDrain = Symbol('_needDrain')
|
||||
const _onerror = Symbol('_onerror')
|
||||
const _onopen = Symbol('_onopen')
|
||||
const _onread = Symbol('_onread')
|
||||
const _onwrite = Symbol('_onwrite')
|
||||
const _open = Symbol('_open')
|
||||
const _path = Symbol('_path')
|
||||
const _pos = Symbol('_pos')
|
||||
const _queue = Symbol('_queue')
|
||||
const _read = Symbol('_read')
|
||||
const _readSize = Symbol('_readSize')
|
||||
const _reading = Symbol('_reading')
|
||||
const _remain = Symbol('_remain')
|
||||
const _size = Symbol('_size')
|
||||
const _write = Symbol('_write')
|
||||
const _writing = Symbol('_writing')
|
||||
const _defaultFlag = Symbol('_defaultFlag')
|
||||
const _errored = Symbol('_errored')
|
||||
|
||||
class ReadStream extends MiniPass {
|
||||
constructor (path, opt) {
|
||||
opt = opt || {}
|
||||
super(opt)
|
||||
|
||||
this.readable = true
|
||||
this.writable = false
|
||||
|
||||
if (typeof path !== 'string')
|
||||
throw new TypeError('path must be a string')
|
||||
|
||||
this[_errored] = false
|
||||
this[_fd] = typeof opt.fd === 'number' ? opt.fd : null
|
||||
this[_path] = path
|
||||
this[_readSize] = opt.readSize || 16*1024*1024
|
||||
this[_reading] = false
|
||||
this[_size] = typeof opt.size === 'number' ? opt.size : Infinity
|
||||
this[_remain] = this[_size]
|
||||
this[_autoClose] = typeof opt.autoClose === 'boolean' ?
|
||||
opt.autoClose : true
|
||||
|
||||
if (typeof this[_fd] === 'number')
|
||||
this[_read]()
|
||||
else
|
||||
this[_open]()
|
||||
}
|
||||
|
||||
get fd () { return this[_fd] }
|
||||
get path () { return this[_path] }
|
||||
|
||||
write () {
|
||||
throw new TypeError('this is a readable stream')
|
||||
}
|
||||
|
||||
end () {
|
||||
throw new TypeError('this is a readable stream')
|
||||
}
|
||||
|
||||
[_open] () {
|
||||
fs.open(this[_path], 'r', (er, fd) => this[_onopen](er, fd))
|
||||
}
|
||||
|
||||
[_onopen] (er, fd) {
|
||||
if (er)
|
||||
this[_onerror](er)
|
||||
else {
|
||||
this[_fd] = fd
|
||||
this.emit('open', fd)
|
||||
this[_read]()
|
||||
}
|
||||
}
|
||||
|
||||
[_makeBuf] () {
|
||||
return Buffer.allocUnsafe(Math.min(this[_readSize], this[_remain]))
|
||||
}
|
||||
|
||||
[_read] () {
|
||||
if (!this[_reading]) {
|
||||
this[_reading] = true
|
||||
const buf = this[_makeBuf]()
|
||||
/* istanbul ignore if */
|
||||
if (buf.length === 0)
|
||||
return process.nextTick(() => this[_onread](null, 0, buf))
|
||||
fs.read(this[_fd], buf, 0, buf.length, null, (er, br, buf) =>
|
||||
this[_onread](er, br, buf))
|
||||
}
|
||||
}
|
||||
|
||||
[_onread] (er, br, buf) {
|
||||
this[_reading] = false
|
||||
if (er)
|
||||
this[_onerror](er)
|
||||
else if (this[_handleChunk](br, buf))
|
||||
this[_read]()
|
||||
}
|
||||
|
||||
[_close] () {
|
||||
if (this[_autoClose] && typeof this[_fd] === 'number') {
|
||||
const fd = this[_fd]
|
||||
this[_fd] = null
|
||||
fs.close(fd, er => er ? this.emit('error', er) : this.emit('close'))
|
||||
}
|
||||
}
|
||||
|
||||
[_onerror] (er) {
|
||||
this[_reading] = true
|
||||
this[_close]()
|
||||
this.emit('error', er)
|
||||
}
|
||||
|
||||
[_handleChunk] (br, buf) {
|
||||
let ret = false
|
||||
// no effect if infinite
|
||||
this[_remain] -= br
|
||||
if (br > 0)
|
||||
ret = super.write(br < buf.length ? buf.slice(0, br) : buf)
|
||||
|
||||
if (br === 0 || this[_remain] <= 0) {
|
||||
ret = false
|
||||
this[_close]()
|
||||
super.end()
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
emit (ev, data) {
|
||||
switch (ev) {
|
||||
case 'prefinish':
|
||||
case 'finish':
|
||||
break
|
||||
|
||||
case 'drain':
|
||||
if (typeof this[_fd] === 'number')
|
||||
this[_read]()
|
||||
break
|
||||
|
||||
case 'error':
|
||||
if (this[_errored])
|
||||
return
|
||||
this[_errored] = true
|
||||
return super.emit(ev, data)
|
||||
|
||||
default:
|
||||
return super.emit(ev, data)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class ReadStreamSync extends ReadStream {
|
||||
[_open] () {
|
||||
let threw = true
|
||||
try {
|
||||
this[_onopen](null, fs.openSync(this[_path], 'r'))
|
||||
threw = false
|
||||
} finally {
|
||||
if (threw)
|
||||
this[_close]()
|
||||
}
|
||||
}
|
||||
|
||||
[_read] () {
|
||||
let threw = true
|
||||
try {
|
||||
if (!this[_reading]) {
|
||||
this[_reading] = true
|
||||
do {
|
||||
const buf = this[_makeBuf]()
|
||||
/* istanbul ignore next */
|
||||
const br = buf.length === 0 ? 0
|
||||
: fs.readSync(this[_fd], buf, 0, buf.length, null)
|
||||
if (!this[_handleChunk](br, buf))
|
||||
break
|
||||
} while (true)
|
||||
this[_reading] = false
|
||||
}
|
||||
threw = false
|
||||
} finally {
|
||||
if (threw)
|
||||
this[_close]()
|
||||
}
|
||||
}
|
||||
|
||||
[_close] () {
|
||||
if (this[_autoClose] && typeof this[_fd] === 'number') {
|
||||
const fd = this[_fd]
|
||||
this[_fd] = null
|
||||
fs.closeSync(fd)
|
||||
this.emit('close')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class WriteStream extends EE {
|
||||
constructor (path, opt) {
|
||||
opt = opt || {}
|
||||
super(opt)
|
||||
this.readable = false
|
||||
this.writable = true
|
||||
this[_errored] = false
|
||||
this[_writing] = false
|
||||
this[_ended] = false
|
||||
this[_needDrain] = false
|
||||
this[_queue] = []
|
||||
this[_path] = path
|
||||
this[_fd] = typeof opt.fd === 'number' ? opt.fd : null
|
||||
this[_mode] = opt.mode === undefined ? 0o666 : opt.mode
|
||||
this[_pos] = typeof opt.start === 'number' ? opt.start : null
|
||||
this[_autoClose] = typeof opt.autoClose === 'boolean' ?
|
||||
opt.autoClose : true
|
||||
|
||||
// truncating makes no sense when writing into the middle
|
||||
const defaultFlag = this[_pos] !== null ? 'r+' : 'w'
|
||||
this[_defaultFlag] = opt.flags === undefined
|
||||
this[_flags] = this[_defaultFlag] ? defaultFlag : opt.flags
|
||||
|
||||
if (this[_fd] === null)
|
||||
this[_open]()
|
||||
}
|
||||
|
||||
emit (ev, data) {
|
||||
if (ev === 'error') {
|
||||
if (this[_errored])
|
||||
return
|
||||
this[_errored] = true
|
||||
}
|
||||
return super.emit(ev, data)
|
||||
}
|
||||
|
||||
|
||||
get fd () { return this[_fd] }
|
||||
get path () { return this[_path] }
|
||||
|
||||
[_onerror] (er) {
|
||||
this[_close]()
|
||||
this[_writing] = true
|
||||
this.emit('error', er)
|
||||
}
|
||||
|
||||
[_open] () {
|
||||
fs.open(this[_path], this[_flags], this[_mode],
|
||||
(er, fd) => this[_onopen](er, fd))
|
||||
}
|
||||
|
||||
[_onopen] (er, fd) {
|
||||
if (this[_defaultFlag] &&
|
||||
this[_flags] === 'r+' &&
|
||||
er && er.code === 'ENOENT') {
|
||||
this[_flags] = 'w'
|
||||
this[_open]()
|
||||
} else if (er)
|
||||
this[_onerror](er)
|
||||
else {
|
||||
this[_fd] = fd
|
||||
this.emit('open', fd)
|
||||
this[_flush]()
|
||||
}
|
||||
}
|
||||
|
||||
end (buf, enc) {
|
||||
if (buf)
|
||||
this.write(buf, enc)
|
||||
|
||||
this[_ended] = true
|
||||
|
||||
// synthetic after-write logic, where drain/finish live
|
||||
if (!this[_writing] && !this[_queue].length &&
|
||||
typeof this[_fd] === 'number')
|
||||
this[_onwrite](null, 0)
|
||||
return this
|
||||
}
|
||||
|
||||
write (buf, enc) {
|
||||
if (typeof buf === 'string')
|
||||
buf = Buffer.from(buf, enc)
|
||||
|
||||
if (this[_ended]) {
|
||||
this.emit('error', new Error('write() after end()'))
|
||||
return false
|
||||
}
|
||||
|
||||
if (this[_fd] === null || this[_writing] || this[_queue].length) {
|
||||
this[_queue].push(buf)
|
||||
this[_needDrain] = true
|
||||
return false
|
||||
}
|
||||
|
||||
this[_writing] = true
|
||||
this[_write](buf)
|
||||
return true
|
||||
}
|
||||
|
||||
[_write] (buf) {
|
||||
fs.write(this[_fd], buf, 0, buf.length, this[_pos], (er, bw) =>
|
||||
this[_onwrite](er, bw))
|
||||
}
|
||||
|
||||
[_onwrite] (er, bw) {
|
||||
if (er)
|
||||
this[_onerror](er)
|
||||
else {
|
||||
if (this[_pos] !== null)
|
||||
this[_pos] += bw
|
||||
if (this[_queue].length)
|
||||
this[_flush]()
|
||||
else {
|
||||
this[_writing] = false
|
||||
|
||||
if (this[_ended] && !this[_finished]) {
|
||||
this[_finished] = true
|
||||
this[_close]()
|
||||
this.emit('finish')
|
||||
} else if (this[_needDrain]) {
|
||||
this[_needDrain] = false
|
||||
this.emit('drain')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[_flush] () {
|
||||
if (this[_queue].length === 0) {
|
||||
if (this[_ended])
|
||||
this[_onwrite](null, 0)
|
||||
} else if (this[_queue].length === 1)
|
||||
this[_write](this[_queue].pop())
|
||||
else {
|
||||
const iovec = this[_queue]
|
||||
this[_queue] = []
|
||||
writev(this[_fd], iovec, this[_pos],
|
||||
(er, bw) => this[_onwrite](er, bw))
|
||||
}
|
||||
}
|
||||
|
||||
[_close] () {
|
||||
if (this[_autoClose] && typeof this[_fd] === 'number') {
|
||||
const fd = this[_fd]
|
||||
this[_fd] = null
|
||||
fs.close(fd, er => er ? this.emit('error', er) : this.emit('close'))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class WriteStreamSync extends WriteStream {
|
||||
[_open] () {
|
||||
let fd
|
||||
// only wrap in a try{} block if we know we'll retry, to avoid
|
||||
// the rethrow obscuring the error's source frame in most cases.
|
||||
if (this[_defaultFlag] && this[_flags] === 'r+') {
|
||||
try {
|
||||
fd = fs.openSync(this[_path], this[_flags], this[_mode])
|
||||
} catch (er) {
|
||||
if (er.code === 'ENOENT') {
|
||||
this[_flags] = 'w'
|
||||
return this[_open]()
|
||||
} else
|
||||
throw er
|
||||
}
|
||||
} else
|
||||
fd = fs.openSync(this[_path], this[_flags], this[_mode])
|
||||
|
||||
this[_onopen](null, fd)
|
||||
}
|
||||
|
||||
[_close] () {
|
||||
if (this[_autoClose] && typeof this[_fd] === 'number') {
|
||||
const fd = this[_fd]
|
||||
this[_fd] = null
|
||||
fs.closeSync(fd)
|
||||
this.emit('close')
|
||||
}
|
||||
}
|
||||
|
||||
[_write] (buf) {
|
||||
// throw the original, but try to close if it fails
|
||||
let threw = true
|
||||
try {
|
||||
this[_onwrite](null,
|
||||
fs.writeSync(this[_fd], buf, 0, buf.length, this[_pos]))
|
||||
threw = false
|
||||
} finally {
|
||||
if (threw)
|
||||
try { this[_close]() } catch (_) {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
exports.ReadStream = ReadStream
|
||||
exports.ReadStreamSync = ReadStreamSync
|
||||
|
||||
exports.WriteStream = WriteStream
|
||||
exports.WriteStreamSync = WriteStreamSync
|
15
my-app/node_modules/tar/node_modules/fs-minipass/node_modules/minipass/LICENSE
generated
vendored
Executable file
15
my-app/node_modules/tar/node_modules/fs-minipass/node_modules/minipass/LICENSE
generated
vendored
Executable file
|
@ -0,0 +1,15 @@
|
|||
The ISC License
|
||||
|
||||
Copyright (c) 2017-2022 npm, Inc., Isaac Z. Schlueter, and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
728
my-app/node_modules/tar/node_modules/fs-minipass/node_modules/minipass/README.md
generated
vendored
Executable file
728
my-app/node_modules/tar/node_modules/fs-minipass/node_modules/minipass/README.md
generated
vendored
Executable file
|
@ -0,0 +1,728 @@
|
|||
# minipass
|
||||
|
||||
A _very_ minimal implementation of a [PassThrough
|
||||
stream](https://nodejs.org/api/stream.html#stream_class_stream_passthrough)
|
||||
|
||||
[It's very
|
||||
fast](https://docs.google.com/spreadsheets/d/1oObKSrVwLX_7Ut4Z6g3fZW-AX1j1-k6w-cDsrkaSbHM/edit#gid=0)
|
||||
for objects, strings, and buffers.
|
||||
|
||||
Supports `pipe()`ing (including multi-`pipe()` and backpressure transmission),
|
||||
buffering data until either a `data` event handler or `pipe()` is added (so
|
||||
you don't lose the first chunk), and most other cases where PassThrough is
|
||||
a good idea.
|
||||
|
||||
There is a `read()` method, but it's much more efficient to consume data
|
||||
from this stream via `'data'` events or by calling `pipe()` into some other
|
||||
stream. Calling `read()` requires the buffer to be flattened in some
|
||||
cases, which requires copying memory.
|
||||
|
||||
If you set `objectMode: true` in the options, then whatever is written will
|
||||
be emitted. Otherwise, it'll do a minimal amount of Buffer copying to
|
||||
ensure proper Streams semantics when `read(n)` is called.
|
||||
|
||||
`objectMode` can also be set by doing `stream.objectMode = true`, or by
|
||||
writing any non-string/non-buffer data. `objectMode` cannot be set to
|
||||
false once it is set.
|
||||
|
||||
This is not a `through` or `through2` stream. It doesn't transform the
|
||||
data, it just passes it right through. If you want to transform the data,
|
||||
extend the class, and override the `write()` method. Once you're done
|
||||
transforming the data however you want, call `super.write()` with the
|
||||
transform output.
|
||||
|
||||
For some examples of streams that extend Minipass in various ways, check
|
||||
out:
|
||||
|
||||
- [minizlib](http://npm.im/minizlib)
|
||||
- [fs-minipass](http://npm.im/fs-minipass)
|
||||
- [tar](http://npm.im/tar)
|
||||
- [minipass-collect](http://npm.im/minipass-collect)
|
||||
- [minipass-flush](http://npm.im/minipass-flush)
|
||||
- [minipass-pipeline](http://npm.im/minipass-pipeline)
|
||||
- [tap](http://npm.im/tap)
|
||||
- [tap-parser](http://npm.im/tap-parser)
|
||||
- [treport](http://npm.im/treport)
|
||||
- [minipass-fetch](http://npm.im/minipass-fetch)
|
||||
- [pacote](http://npm.im/pacote)
|
||||
- [make-fetch-happen](http://npm.im/make-fetch-happen)
|
||||
- [cacache](http://npm.im/cacache)
|
||||
- [ssri](http://npm.im/ssri)
|
||||
- [npm-registry-fetch](http://npm.im/npm-registry-fetch)
|
||||
- [minipass-json-stream](http://npm.im/minipass-json-stream)
|
||||
- [minipass-sized](http://npm.im/minipass-sized)
|
||||
|
||||
## Differences from Node.js Streams
|
||||
|
||||
There are several things that make Minipass streams different from (and in
|
||||
some ways superior to) Node.js core streams.
|
||||
|
||||
Please read these caveats if you are familiar with node-core streams and
|
||||
intend to use Minipass streams in your programs.
|
||||
|
||||
You can avoid most of these differences entirely (for a very
|
||||
small performance penalty) by setting `{async: true}` in the
|
||||
constructor options.
|
||||
|
||||
### Timing
|
||||
|
||||
Minipass streams are designed to support synchronous use-cases. Thus, data
|
||||
is emitted as soon as it is available, always. It is buffered until read,
|
||||
but no longer. Another way to look at it is that Minipass streams are
|
||||
exactly as synchronous as the logic that writes into them.
|
||||
|
||||
This can be surprising if your code relies on `PassThrough.write()` always
|
||||
providing data on the next tick rather than the current one, or being able
|
||||
to call `resume()` and not have the entire buffer disappear immediately.
|
||||
|
||||
However, without this synchronicity guarantee, there would be no way for
|
||||
Minipass to achieve the speeds it does, or support the synchronous use
|
||||
cases that it does. Simply put, waiting takes time.
|
||||
|
||||
This non-deferring approach makes Minipass streams much easier to reason
|
||||
about, especially in the context of Promises and other flow-control
|
||||
mechanisms.
|
||||
|
||||
Example:
|
||||
|
||||
```js
|
||||
const Minipass = require('minipass')
|
||||
const stream = new Minipass({ async: true })
|
||||
stream.on('data', () => console.log('data event'))
|
||||
console.log('before write')
|
||||
stream.write('hello')
|
||||
console.log('after write')
|
||||
// output:
|
||||
// before write
|
||||
// data event
|
||||
// after write
|
||||
```
|
||||
|
||||
### Exception: Async Opt-In
|
||||
|
||||
If you wish to have a Minipass stream with behavior that more
|
||||
closely mimics Node.js core streams, you can set the stream in
|
||||
async mode either by setting `async: true` in the constructor
|
||||
options, or by setting `stream.async = true` later on.
|
||||
|
||||
```js
|
||||
const Minipass = require('minipass')
|
||||
const asyncStream = new Minipass({ async: true })
|
||||
asyncStream.on('data', () => console.log('data event'))
|
||||
console.log('before write')
|
||||
asyncStream.write('hello')
|
||||
console.log('after write')
|
||||
// output:
|
||||
// before write
|
||||
// after write
|
||||
// data event <-- this is deferred until the next tick
|
||||
```
|
||||
|
||||
Switching _out_ of async mode is unsafe, as it could cause data
|
||||
corruption, and so is not enabled. Example:
|
||||
|
||||
```js
|
||||
const Minipass = require('minipass')
|
||||
const stream = new Minipass({ encoding: 'utf8' })
|
||||
stream.on('data', chunk => console.log(chunk))
|
||||
stream.async = true
|
||||
console.log('before writes')
|
||||
stream.write('hello')
|
||||
setStreamSyncAgainSomehow(stream) // <-- this doesn't actually exist!
|
||||
stream.write('world')
|
||||
console.log('after writes')
|
||||
// hypothetical output would be:
|
||||
// before writes
|
||||
// world
|
||||
// after writes
|
||||
// hello
|
||||
// NOT GOOD!
|
||||
```
|
||||
|
||||
To avoid this problem, once set into async mode, any attempt to
|
||||
make the stream sync again will be ignored.
|
||||
|
||||
```js
|
||||
const Minipass = require('minipass')
|
||||
const stream = new Minipass({ encoding: 'utf8' })
|
||||
stream.on('data', chunk => console.log(chunk))
|
||||
stream.async = true
|
||||
console.log('before writes')
|
||||
stream.write('hello')
|
||||
stream.async = false // <-- no-op, stream already async
|
||||
stream.write('world')
|
||||
console.log('after writes')
|
||||
// actual output:
|
||||
// before writes
|
||||
// after writes
|
||||
// hello
|
||||
// world
|
||||
```
|
||||
|
||||
### No High/Low Water Marks
|
||||
|
||||
Node.js core streams will optimistically fill up a buffer, returning `true`
|
||||
on all writes until the limit is hit, even if the data has nowhere to go.
|
||||
Then, they will not attempt to draw more data in until the buffer size dips
|
||||
below a minimum value.
|
||||
|
||||
Minipass streams are much simpler. The `write()` method will return `true`
|
||||
if the data has somewhere to go (which is to say, given the timing
|
||||
guarantees, that the data is already there by the time `write()` returns).
|
||||
|
||||
If the data has nowhere to go, then `write()` returns false, and the data
|
||||
sits in a buffer, to be drained out immediately as soon as anyone consumes
|
||||
it.
|
||||
|
||||
Since nothing is ever buffered unnecessarily, there is much less
|
||||
copying data, and less bookkeeping about buffer capacity levels.
|
||||
|
||||
### Hazards of Buffering (or: Why Minipass Is So Fast)
|
||||
|
||||
Since data written to a Minipass stream is immediately written all the way
|
||||
through the pipeline, and `write()` always returns true/false based on
|
||||
whether the data was fully flushed, backpressure is communicated
|
||||
immediately to the upstream caller. This minimizes buffering.
|
||||
|
||||
Consider this case:
|
||||
|
||||
```js
|
||||
const {PassThrough} = require('stream')
|
||||
const p1 = new PassThrough({ highWaterMark: 1024 })
|
||||
const p2 = new PassThrough({ highWaterMark: 1024 })
|
||||
const p3 = new PassThrough({ highWaterMark: 1024 })
|
||||
const p4 = new PassThrough({ highWaterMark: 1024 })
|
||||
|
||||
p1.pipe(p2).pipe(p3).pipe(p4)
|
||||
p4.on('data', () => console.log('made it through'))
|
||||
|
||||
// this returns false and buffers, then writes to p2 on next tick (1)
|
||||
// p2 returns false and buffers, pausing p1, then writes to p3 on next tick (2)
|
||||
// p3 returns false and buffers, pausing p2, then writes to p4 on next tick (3)
|
||||
// p4 returns false and buffers, pausing p3, then emits 'data' and 'drain'
|
||||
// on next tick (4)
|
||||
// p3 sees p4's 'drain' event, and calls resume(), emitting 'resume' and
|
||||
// 'drain' on next tick (5)
|
||||
// p2 sees p3's 'drain', calls resume(), emits 'resume' and 'drain' on next tick (6)
|
||||
// p1 sees p2's 'drain', calls resume(), emits 'resume' and 'drain' on next
|
||||
// tick (7)
|
||||
|
||||
p1.write(Buffer.alloc(2048)) // returns false
|
||||
```
|
||||
|
||||
Along the way, the data was buffered and deferred at each stage, and
|
||||
multiple event deferrals happened, for an unblocked pipeline where it was
|
||||
perfectly safe to write all the way through!
|
||||
|
||||
Furthermore, setting a `highWaterMark` of `1024` might lead someone reading
|
||||
the code to think an advisory maximum of 1KiB is being set for the
|
||||
pipeline. However, the actual advisory buffering level is the _sum_ of
|
||||
`highWaterMark` values, since each one has its own bucket.
|
||||
|
||||
Consider the Minipass case:
|
||||
|
||||
```js
|
||||
const m1 = new Minipass()
|
||||
const m2 = new Minipass()
|
||||
const m3 = new Minipass()
|
||||
const m4 = new Minipass()
|
||||
|
||||
m1.pipe(m2).pipe(m3).pipe(m4)
|
||||
m4.on('data', () => console.log('made it through'))
|
||||
|
||||
// m1 is flowing, so it writes the data to m2 immediately
|
||||
// m2 is flowing, so it writes the data to m3 immediately
|
||||
// m3 is flowing, so it writes the data to m4 immediately
|
||||
// m4 is flowing, so it fires the 'data' event immediately, returns true
|
||||
// m4's write returned true, so m3 is still flowing, returns true
|
||||
// m3's write returned true, so m2 is still flowing, returns true
|
||||
// m2's write returned true, so m1 is still flowing, returns true
|
||||
// No event deferrals or buffering along the way!
|
||||
|
||||
m1.write(Buffer.alloc(2048)) // returns true
|
||||
```
|
||||
|
||||
It is extremely unlikely that you _don't_ want to buffer any data written,
|
||||
or _ever_ buffer data that can be flushed all the way through. Neither
|
||||
node-core streams nor Minipass ever fail to buffer written data, but
|
||||
node-core streams do a lot of unnecessary buffering and pausing.
|
||||
|
||||
As always, the faster implementation is the one that does less stuff and
|
||||
waits less time to do it.
|
||||
|
||||
### Immediately emit `end` for empty streams (when not paused)
|
||||
|
||||
If a stream is not paused, and `end()` is called before writing any data
|
||||
into it, then it will emit `end` immediately.
|
||||
|
||||
If you have logic that occurs on the `end` event which you don't want to
|
||||
potentially happen immediately (for example, closing file descriptors,
|
||||
moving on to the next entry in an archive parse stream, etc.) then be sure
|
||||
to call `stream.pause()` on creation, and then `stream.resume()` once you
|
||||
are ready to respond to the `end` event.
|
||||
|
||||
However, this is _usually_ not a problem because:
|
||||
|
||||
### Emit `end` When Asked
|
||||
|
||||
One hazard of immediately emitting `'end'` is that you may not yet have had
|
||||
a chance to add a listener. In order to avoid this hazard, Minipass
|
||||
streams safely re-emit the `'end'` event if a new listener is added after
|
||||
`'end'` has been emitted.
|
||||
|
||||
Ie, if you do `stream.on('end', someFunction)`, and the stream has already
|
||||
emitted `end`, then it will call the handler right away. (You can think of
|
||||
this somewhat like attaching a new `.then(fn)` to a previously-resolved
|
||||
Promise.)
|
||||
|
||||
To prevent calling handlers multiple times who would not expect multiple
|
||||
ends to occur, all listeners are removed from the `'end'` event whenever it
|
||||
is emitted.
|
||||
|
||||
### Emit `error` When Asked
|
||||
|
||||
The most recent error object passed to the `'error'` event is
|
||||
stored on the stream. If a new `'error'` event handler is added,
|
||||
and an error was previously emitted, then the event handler will
|
||||
be called immediately (or on `process.nextTick` in the case of
|
||||
async streams).
|
||||
|
||||
This makes it much more difficult to end up trying to interact
|
||||
with a broken stream, if the error handler is added after an
|
||||
error was previously emitted.
|
||||
|
||||
### Impact of "immediate flow" on Tee-streams
|
||||
|
||||
A "tee stream" is a stream piping to multiple destinations:
|
||||
|
||||
```js
|
||||
const tee = new Minipass()
|
||||
t.pipe(dest1)
|
||||
t.pipe(dest2)
|
||||
t.write('foo') // goes to both destinations
|
||||
```
|
||||
|
||||
Since Minipass streams _immediately_ process any pending data through the
|
||||
pipeline when a new pipe destination is added, this can have surprising
|
||||
effects, especially when a stream comes in from some other function and may
|
||||
or may not have data in its buffer.
|
||||
|
||||
```js
|
||||
// WARNING! WILL LOSE DATA!
|
||||
const src = new Minipass()
|
||||
src.write('foo')
|
||||
src.pipe(dest1) // 'foo' chunk flows to dest1 immediately, and is gone
|
||||
src.pipe(dest2) // gets nothing!
|
||||
```
|
||||
|
||||
One solution is to create a dedicated tee-stream junction that pipes to
|
||||
both locations, and then pipe to _that_ instead.
|
||||
|
||||
```js
|
||||
// Safe example: tee to both places
|
||||
const src = new Minipass()
|
||||
src.write('foo')
|
||||
const tee = new Minipass()
|
||||
tee.pipe(dest1)
|
||||
tee.pipe(dest2)
|
||||
src.pipe(tee) // tee gets 'foo', pipes to both locations
|
||||
```
|
||||
|
||||
The same caveat applies to `on('data')` event listeners. The first one
|
||||
added will _immediately_ receive all of the data, leaving nothing for the
|
||||
second:
|
||||
|
||||
```js
|
||||
// WARNING! WILL LOSE DATA!
|
||||
const src = new Minipass()
|
||||
src.write('foo')
|
||||
src.on('data', handler1) // receives 'foo' right away
|
||||
src.on('data', handler2) // nothing to see here!
|
||||
```
|
||||
|
||||
Using a dedicated tee-stream can be used in this case as well:
|
||||
|
||||
```js
|
||||
// Safe example: tee to both data handlers
|
||||
const src = new Minipass()
|
||||
src.write('foo')
|
||||
const tee = new Minipass()
|
||||
tee.on('data', handler1)
|
||||
tee.on('data', handler2)
|
||||
src.pipe(tee)
|
||||
```
|
||||
|
||||
All of the hazards in this section are avoided by setting `{
|
||||
async: true }` in the Minipass constructor, or by setting
|
||||
`stream.async = true` afterwards. Note that this does add some
|
||||
overhead, so should only be done in cases where you are willing
|
||||
to lose a bit of performance in order to avoid having to refactor
|
||||
program logic.
|
||||
|
||||
## USAGE
|
||||
|
||||
It's a stream! Use it like a stream and it'll most likely do what you
|
||||
want.
|
||||
|
||||
```js
|
||||
const Minipass = require('minipass')
|
||||
const mp = new Minipass(options) // optional: { encoding, objectMode }
|
||||
mp.write('foo')
|
||||
mp.pipe(someOtherStream)
|
||||
mp.end('bar')
|
||||
```
|
||||
|
||||
### OPTIONS
|
||||
|
||||
* `encoding` How would you like the data coming _out_ of the stream to be
|
||||
encoded? Accepts any values that can be passed to `Buffer.toString()`.
|
||||
* `objectMode` Emit data exactly as it comes in. This will be flipped on
|
||||
by default if you write() something other than a string or Buffer at any
|
||||
point. Setting `objectMode: true` will prevent setting any encoding
|
||||
value.
|
||||
* `async` Defaults to `false`. Set to `true` to defer data
|
||||
emission until next tick. This reduces performance slightly,
|
||||
but makes Minipass streams use timing behavior closer to Node
|
||||
core streams. See [Timing](#timing) for more details.
|
||||
|
||||
### API
|
||||
|
||||
Implements the user-facing portions of Node.js's `Readable` and `Writable`
|
||||
streams.
|
||||
|
||||
### Methods
|
||||
|
||||
* `write(chunk, [encoding], [callback])` - Put data in. (Note that, in the
|
||||
base Minipass class, the same data will come out.) Returns `false` if
|
||||
the stream will buffer the next write, or true if it's still in "flowing"
|
||||
mode.
|
||||
* `end([chunk, [encoding]], [callback])` - Signal that you have no more
|
||||
data to write. This will queue an `end` event to be fired when all the
|
||||
data has been consumed.
|
||||
* `setEncoding(encoding)` - Set the encoding for data coming of the stream.
|
||||
This can only be done once.
|
||||
* `pause()` - No more data for a while, please. This also prevents `end`
|
||||
from being emitted for empty streams until the stream is resumed.
|
||||
* `resume()` - Resume the stream. If there's data in the buffer, it is all
|
||||
discarded. Any buffered events are immediately emitted.
|
||||
* `pipe(dest)` - Send all output to the stream provided. When
|
||||
data is emitted, it is immediately written to any and all pipe
|
||||
destinations. (Or written on next tick in `async` mode.)
|
||||
* `unpipe(dest)` - Stop piping to the destination stream. This
|
||||
is immediate, meaning that any asynchronously queued data will
|
||||
_not_ make it to the destination when running in `async` mode.
|
||||
* `options.end` - Boolean, end the destination stream when
|
||||
the source stream ends. Default `true`.
|
||||
* `options.proxyErrors` - Boolean, proxy `error` events from
|
||||
the source stream to the destination stream. Note that
|
||||
errors are _not_ proxied after the pipeline terminates,
|
||||
either due to the source emitting `'end'` or manually
|
||||
unpiping with `src.unpipe(dest)`. Default `false`.
|
||||
* `on(ev, fn)`, `emit(ev, fn)` - Minipass streams are EventEmitters. Some
|
||||
events are given special treatment, however. (See below under "events".)
|
||||
* `promise()` - Returns a Promise that resolves when the stream emits
|
||||
`end`, or rejects if the stream emits `error`.
|
||||
* `collect()` - Return a Promise that resolves on `end` with an array
|
||||
containing each chunk of data that was emitted, or rejects if the stream
|
||||
emits `error`. Note that this consumes the stream data.
|
||||
* `concat()` - Same as `collect()`, but concatenates the data into a single
|
||||
Buffer object. Will reject the returned promise if the stream is in
|
||||
objectMode, or if it goes into objectMode by the end of the data.
|
||||
* `read(n)` - Consume `n` bytes of data out of the buffer. If `n` is not
|
||||
provided, then consume all of it. If `n` bytes are not available, then
|
||||
it returns null. **Note** consuming streams in this way is less
|
||||
efficient, and can lead to unnecessary Buffer copying.
|
||||
* `destroy([er])` - Destroy the stream. If an error is provided, then an
|
||||
`'error'` event is emitted. If the stream has a `close()` method, and
|
||||
has not emitted a `'close'` event yet, then `stream.close()` will be
|
||||
called. Any Promises returned by `.promise()`, `.collect()` or
|
||||
`.concat()` will be rejected. After being destroyed, writing to the
|
||||
stream will emit an error. No more data will be emitted if the stream is
|
||||
destroyed, even if it was previously buffered.
|
||||
|
||||
### Properties
|
||||
|
||||
* `bufferLength` Read-only. Total number of bytes buffered, or in the case
|
||||
of objectMode, the total number of objects.
|
||||
* `encoding` The encoding that has been set. (Setting this is equivalent
|
||||
to calling `setEncoding(enc)` and has the same prohibition against
|
||||
setting multiple times.)
|
||||
* `flowing` Read-only. Boolean indicating whether a chunk written to the
|
||||
stream will be immediately emitted.
|
||||
* `emittedEnd` Read-only. Boolean indicating whether the end-ish events
|
||||
(ie, `end`, `prefinish`, `finish`) have been emitted. Note that
|
||||
listening on any end-ish event will immediateyl re-emit it if it has
|
||||
already been emitted.
|
||||
* `writable` Whether the stream is writable. Default `true`. Set to
|
||||
`false` when `end()`
|
||||
* `readable` Whether the stream is readable. Default `true`.
|
||||
* `buffer` A [yallist](http://npm.im/yallist) linked list of chunks written
|
||||
to the stream that have not yet been emitted. (It's probably a bad idea
|
||||
to mess with this.)
|
||||
* `pipes` A [yallist](http://npm.im/yallist) linked list of streams that
|
||||
this stream is piping into. (It's probably a bad idea to mess with
|
||||
this.)
|
||||
* `destroyed` A getter that indicates whether the stream was destroyed.
|
||||
* `paused` True if the stream has been explicitly paused, otherwise false.
|
||||
* `objectMode` Indicates whether the stream is in `objectMode`. Once set
|
||||
to `true`, it cannot be set to `false`.
|
||||
|
||||
### Events
|
||||
|
||||
* `data` Emitted when there's data to read. Argument is the data to read.
|
||||
This is never emitted while not flowing. If a listener is attached, that
|
||||
will resume the stream.
|
||||
* `end` Emitted when there's no more data to read. This will be emitted
|
||||
immediately for empty streams when `end()` is called. If a listener is
|
||||
attached, and `end` was already emitted, then it will be emitted again.
|
||||
All listeners are removed when `end` is emitted.
|
||||
* `prefinish` An end-ish event that follows the same logic as `end` and is
|
||||
emitted in the same conditions where `end` is emitted. Emitted after
|
||||
`'end'`.
|
||||
* `finish` An end-ish event that follows the same logic as `end` and is
|
||||
emitted in the same conditions where `end` is emitted. Emitted after
|
||||
`'prefinish'`.
|
||||
* `close` An indication that an underlying resource has been released.
|
||||
Minipass does not emit this event, but will defer it until after `end`
|
||||
has been emitted, since it throws off some stream libraries otherwise.
|
||||
* `drain` Emitted when the internal buffer empties, and it is again
|
||||
suitable to `write()` into the stream.
|
||||
* `readable` Emitted when data is buffered and ready to be read by a
|
||||
consumer.
|
||||
* `resume` Emitted when stream changes state from buffering to flowing
|
||||
mode. (Ie, when `resume` is called, `pipe` is called, or a `data` event
|
||||
listener is added.)
|
||||
|
||||
### Static Methods
|
||||
|
||||
* `Minipass.isStream(stream)` Returns `true` if the argument is a stream,
|
||||
and false otherwise. To be considered a stream, the object must be
|
||||
either an instance of Minipass, or an EventEmitter that has either a
|
||||
`pipe()` method, or both `write()` and `end()` methods. (Pretty much any
|
||||
stream in node-land will return `true` for this.)
|
||||
|
||||
## EXAMPLES
|
||||
|
||||
Here are some examples of things you can do with Minipass streams.
|
||||
|
||||
### simple "are you done yet" promise
|
||||
|
||||
```js
|
||||
mp.promise().then(() => {
|
||||
// stream is finished
|
||||
}, er => {
|
||||
// stream emitted an error
|
||||
})
|
||||
```
|
||||
|
||||
### collecting
|
||||
|
||||
```js
|
||||
mp.collect().then(all => {
|
||||
// all is an array of all the data emitted
|
||||
// encoding is supported in this case, so
|
||||
// so the result will be a collection of strings if
|
||||
// an encoding is specified, or buffers/objects if not.
|
||||
//
|
||||
// In an async function, you may do
|
||||
// const data = await stream.collect()
|
||||
})
|
||||
```
|
||||
|
||||
### collecting into a single blob
|
||||
|
||||
This is a bit slower because it concatenates the data into one chunk for
|
||||
you, but if you're going to do it yourself anyway, it's convenient this
|
||||
way:
|
||||
|
||||
```js
|
||||
mp.concat().then(onebigchunk => {
|
||||
// onebigchunk is a string if the stream
|
||||
// had an encoding set, or a buffer otherwise.
|
||||
})
|
||||
```
|
||||
|
||||
### iteration
|
||||
|
||||
You can iterate over streams synchronously or asynchronously in platforms
|
||||
that support it.
|
||||
|
||||
Synchronous iteration will end when the currently available data is
|
||||
consumed, even if the `end` event has not been reached. In string and
|
||||
buffer mode, the data is concatenated, so unless multiple writes are
|
||||
occurring in the same tick as the `read()`, sync iteration loops will
|
||||
generally only have a single iteration.
|
||||
|
||||
To consume chunks in this way exactly as they have been written, with no
|
||||
flattening, create the stream with the `{ objectMode: true }` option.
|
||||
|
||||
```js
|
||||
const mp = new Minipass({ objectMode: true })
|
||||
mp.write('a')
|
||||
mp.write('b')
|
||||
for (let letter of mp) {
|
||||
console.log(letter) // a, b
|
||||
}
|
||||
mp.write('c')
|
||||
mp.write('d')
|
||||
for (let letter of mp) {
|
||||
console.log(letter) // c, d
|
||||
}
|
||||
mp.write('e')
|
||||
mp.end()
|
||||
for (let letter of mp) {
|
||||
console.log(letter) // e
|
||||
}
|
||||
for (let letter of mp) {
|
||||
console.log(letter) // nothing
|
||||
}
|
||||
```
|
||||
|
||||
Asynchronous iteration will continue until the end event is reached,
|
||||
consuming all of the data.
|
||||
|
||||
```js
|
||||
const mp = new Minipass({ encoding: 'utf8' })
|
||||
|
||||
// some source of some data
|
||||
let i = 5
|
||||
const inter = setInterval(() => {
|
||||
if (i-- > 0)
|
||||
mp.write(Buffer.from('foo\n', 'utf8'))
|
||||
else {
|
||||
mp.end()
|
||||
clearInterval(inter)
|
||||
}
|
||||
}, 100)
|
||||
|
||||
// consume the data with asynchronous iteration
|
||||
async function consume () {
|
||||
for await (let chunk of mp) {
|
||||
console.log(chunk)
|
||||
}
|
||||
return 'ok'
|
||||
}
|
||||
|
||||
consume().then(res => console.log(res))
|
||||
// logs `foo\n` 5 times, and then `ok`
|
||||
```
|
||||
|
||||
### subclass that `console.log()`s everything written into it
|
||||
|
||||
```js
|
||||
class Logger extends Minipass {
|
||||
write (chunk, encoding, callback) {
|
||||
console.log('WRITE', chunk, encoding)
|
||||
return super.write(chunk, encoding, callback)
|
||||
}
|
||||
end (chunk, encoding, callback) {
|
||||
console.log('END', chunk, encoding)
|
||||
return super.end(chunk, encoding, callback)
|
||||
}
|
||||
}
|
||||
|
||||
someSource.pipe(new Logger()).pipe(someDest)
|
||||
```
|
||||
|
||||
### same thing, but using an inline anonymous class
|
||||
|
||||
```js
|
||||
// js classes are fun
|
||||
someSource
|
||||
.pipe(new (class extends Minipass {
|
||||
emit (ev, ...data) {
|
||||
// let's also log events, because debugging some weird thing
|
||||
console.log('EMIT', ev)
|
||||
return super.emit(ev, ...data)
|
||||
}
|
||||
write (chunk, encoding, callback) {
|
||||
console.log('WRITE', chunk, encoding)
|
||||
return super.write(chunk, encoding, callback)
|
||||
}
|
||||
end (chunk, encoding, callback) {
|
||||
console.log('END', chunk, encoding)
|
||||
return super.end(chunk, encoding, callback)
|
||||
}
|
||||
}))
|
||||
.pipe(someDest)
|
||||
```
|
||||
|
||||
### subclass that defers 'end' for some reason
|
||||
|
||||
```js
|
||||
class SlowEnd extends Minipass {
|
||||
emit (ev, ...args) {
|
||||
if (ev === 'end') {
|
||||
console.log('going to end, hold on a sec')
|
||||
setTimeout(() => {
|
||||
console.log('ok, ready to end now')
|
||||
super.emit('end', ...args)
|
||||
}, 100)
|
||||
} else {
|
||||
return super.emit(ev, ...args)
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### transform that creates newline-delimited JSON
|
||||
|
||||
```js
|
||||
class NDJSONEncode extends Minipass {
|
||||
write (obj, cb) {
|
||||
try {
|
||||
// JSON.stringify can throw, emit an error on that
|
||||
return super.write(JSON.stringify(obj) + '\n', 'utf8', cb)
|
||||
} catch (er) {
|
||||
this.emit('error', er)
|
||||
}
|
||||
}
|
||||
end (obj, cb) {
|
||||
if (typeof obj === 'function') {
|
||||
cb = obj
|
||||
obj = undefined
|
||||
}
|
||||
if (obj !== undefined) {
|
||||
this.write(obj)
|
||||
}
|
||||
return super.end(cb)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### transform that parses newline-delimited JSON
|
||||
|
||||
```js
|
||||
class NDJSONDecode extends Minipass {
|
||||
constructor (options) {
|
||||
// always be in object mode, as far as Minipass is concerned
|
||||
super({ objectMode: true })
|
||||
this._jsonBuffer = ''
|
||||
}
|
||||
write (chunk, encoding, cb) {
|
||||
if (typeof chunk === 'string' &&
|
||||
typeof encoding === 'string' &&
|
||||
encoding !== 'utf8') {
|
||||
chunk = Buffer.from(chunk, encoding).toString()
|
||||
} else if (Buffer.isBuffer(chunk))
|
||||
chunk = chunk.toString()
|
||||
}
|
||||
if (typeof encoding === 'function') {
|
||||
cb = encoding
|
||||
}
|
||||
const jsonData = (this._jsonBuffer + chunk).split('\n')
|
||||
this._jsonBuffer = jsonData.pop()
|
||||
for (let i = 0; i < jsonData.length; i++) {
|
||||
try {
|
||||
// JSON.parse can throw, emit an error on that
|
||||
super.write(JSON.parse(jsonData[i]))
|
||||
} catch (er) {
|
||||
this.emit('error', er)
|
||||
continue
|
||||
}
|
||||
}
|
||||
if (cb)
|
||||
cb()
|
||||
}
|
||||
}
|
||||
```
|
155
my-app/node_modules/tar/node_modules/fs-minipass/node_modules/minipass/index.d.ts
generated
vendored
Executable file
155
my-app/node_modules/tar/node_modules/fs-minipass/node_modules/minipass/index.d.ts
generated
vendored
Executable file
|
@ -0,0 +1,155 @@
|
|||
/// <reference types="node" />
|
||||
import { EventEmitter } from 'events'
|
||||
import { Stream } from 'stream'
|
||||
|
||||
declare namespace Minipass {
|
||||
type Encoding = BufferEncoding | 'buffer' | null
|
||||
|
||||
interface Writable extends EventEmitter {
|
||||
end(): any
|
||||
write(chunk: any, ...args: any[]): any
|
||||
}
|
||||
|
||||
interface Readable extends EventEmitter {
|
||||
pause(): any
|
||||
resume(): any
|
||||
pipe(): any
|
||||
}
|
||||
|
||||
interface Pipe<R, W> {
|
||||
src: Minipass<R, W>
|
||||
dest: Writable
|
||||
opts: PipeOptions
|
||||
}
|
||||
|
||||
type DualIterable<T> = Iterable<T> & AsyncIterable<T>
|
||||
|
||||
type ContiguousData = Buffer | ArrayBufferLike | ArrayBufferView | string
|
||||
|
||||
type BufferOrString = Buffer | string
|
||||
|
||||
interface StringOptions {
|
||||
encoding: BufferEncoding
|
||||
objectMode?: boolean
|
||||
async?: boolean
|
||||
}
|
||||
|
||||
interface BufferOptions {
|
||||
encoding?: null | 'buffer'
|
||||
objectMode?: boolean
|
||||
async?: boolean
|
||||
}
|
||||
|
||||
interface ObjectModeOptions {
|
||||
objectMode: true
|
||||
async?: boolean
|
||||
}
|
||||
|
||||
interface PipeOptions {
|
||||
end?: boolean
|
||||
proxyErrors?: boolean
|
||||
}
|
||||
|
||||
type Options<T> = T extends string
|
||||
? StringOptions
|
||||
: T extends Buffer
|
||||
? BufferOptions
|
||||
: ObjectModeOptions
|
||||
}
|
||||
|
||||
declare class Minipass<
|
||||
RType extends any = Buffer,
|
||||
WType extends any = RType extends Minipass.BufferOrString
|
||||
? Minipass.ContiguousData
|
||||
: RType
|
||||
>
|
||||
extends Stream
|
||||
implements Minipass.DualIterable<RType>
|
||||
{
|
||||
static isStream(stream: any): stream is Minipass.Readable | Minipass.Writable
|
||||
|
||||
readonly bufferLength: number
|
||||
readonly flowing: boolean
|
||||
readonly writable: boolean
|
||||
readonly readable: boolean
|
||||
readonly paused: boolean
|
||||
readonly emittedEnd: boolean
|
||||
readonly destroyed: boolean
|
||||
|
||||
/**
|
||||
* Not technically private or readonly, but not safe to mutate.
|
||||
*/
|
||||
private readonly buffer: RType[]
|
||||
private readonly pipes: Minipass.Pipe<RType, WType>[]
|
||||
|
||||
/**
|
||||
* Technically writable, but mutating it can change the type,
|
||||
* so is not safe to do in TypeScript.
|
||||
*/
|
||||
readonly objectMode: boolean
|
||||
async: boolean
|
||||
|
||||
/**
|
||||
* Note: encoding is not actually read-only, and setEncoding(enc)
|
||||
* exists. However, this type definition will insist that TypeScript
|
||||
* programs declare the type of a Minipass stream up front, and if
|
||||
* that type is string, then an encoding MUST be set in the ctor. If
|
||||
* the type is Buffer, then the encoding must be missing, or set to
|
||||
* 'buffer' or null. If the type is anything else, then objectMode
|
||||
* must be set in the constructor options. So there is effectively
|
||||
* no allowed way that a TS program can set the encoding after
|
||||
* construction, as doing so will destroy any hope of type safety.
|
||||
* TypeScript does not provide many options for changing the type of
|
||||
* an object at run-time, which is what changing the encoding does.
|
||||
*/
|
||||
readonly encoding: Minipass.Encoding
|
||||
// setEncoding(encoding: Encoding): void
|
||||
|
||||
// Options required if not reading buffers
|
||||
constructor(
|
||||
...args: RType extends Buffer
|
||||
? [] | [Minipass.Options<RType>]
|
||||
: [Minipass.Options<RType>]
|
||||
)
|
||||
|
||||
write(chunk: WType, cb?: () => void): boolean
|
||||
write(chunk: WType, encoding?: Minipass.Encoding, cb?: () => void): boolean
|
||||
read(size?: number): RType
|
||||
end(cb?: () => void): this
|
||||
end(chunk: any, cb?: () => void): this
|
||||
end(chunk: any, encoding?: Minipass.Encoding, cb?: () => void): this
|
||||
pause(): void
|
||||
resume(): void
|
||||
promise(): Promise<void>
|
||||
collect(): Promise<RType[]>
|
||||
|
||||
concat(): RType extends Minipass.BufferOrString ? Promise<RType> : never
|
||||
destroy(er?: any): void
|
||||
pipe<W extends Minipass.Writable>(dest: W, opts?: Minipass.PipeOptions): W
|
||||
unpipe<W extends Minipass.Writable>(dest: W): void
|
||||
|
||||
/**
|
||||
* alias for on()
|
||||
*/
|
||||
addEventHandler(event: string, listener: (...args: any[]) => any): this
|
||||
|
||||
on(event: string, listener: (...args: any[]) => any): this
|
||||
on(event: 'data', listener: (chunk: RType) => any): this
|
||||
on(event: 'error', listener: (error: any) => any): this
|
||||
on(
|
||||
event:
|
||||
| 'readable'
|
||||
| 'drain'
|
||||
| 'resume'
|
||||
| 'end'
|
||||
| 'prefinish'
|
||||
| 'finish'
|
||||
| 'close',
|
||||
listener: () => any
|
||||
): this
|
||||
|
||||
[Symbol.iterator](): Iterator<RType>
|
||||
[Symbol.asyncIterator](): AsyncIterator<RType>
|
||||
}
|
||||
|
||||
export = Minipass
|
649
my-app/node_modules/tar/node_modules/fs-minipass/node_modules/minipass/index.js
generated
vendored
Executable file
649
my-app/node_modules/tar/node_modules/fs-minipass/node_modules/minipass/index.js
generated
vendored
Executable file
|
@ -0,0 +1,649 @@
|
|||
'use strict'
|
||||
const proc = typeof process === 'object' && process ? process : {
|
||||
stdout: null,
|
||||
stderr: null,
|
||||
}
|
||||
const EE = require('events')
|
||||
const Stream = require('stream')
|
||||
const SD = require('string_decoder').StringDecoder
|
||||
|
||||
const EOF = Symbol('EOF')
|
||||
const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
|
||||
const EMITTED_END = Symbol('emittedEnd')
|
||||
const EMITTING_END = Symbol('emittingEnd')
|
||||
const EMITTED_ERROR = Symbol('emittedError')
|
||||
const CLOSED = Symbol('closed')
|
||||
const READ = Symbol('read')
|
||||
const FLUSH = Symbol('flush')
|
||||
const FLUSHCHUNK = Symbol('flushChunk')
|
||||
const ENCODING = Symbol('encoding')
|
||||
const DECODER = Symbol('decoder')
|
||||
const FLOWING = Symbol('flowing')
|
||||
const PAUSED = Symbol('paused')
|
||||
const RESUME = Symbol('resume')
|
||||
const BUFFERLENGTH = Symbol('bufferLength')
|
||||
const BUFFERPUSH = Symbol('bufferPush')
|
||||
const BUFFERSHIFT = Symbol('bufferShift')
|
||||
const OBJECTMODE = Symbol('objectMode')
|
||||
const DESTROYED = Symbol('destroyed')
|
||||
const EMITDATA = Symbol('emitData')
|
||||
const EMITEND = Symbol('emitEnd')
|
||||
const EMITEND2 = Symbol('emitEnd2')
|
||||
const ASYNC = Symbol('async')
|
||||
|
||||
const defer = fn => Promise.resolve().then(fn)
|
||||
|
||||
// TODO remove when Node v8 support drops
|
||||
const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1'
|
||||
const ASYNCITERATOR = doIter && Symbol.asyncIterator
|
||||
|| Symbol('asyncIterator not implemented')
|
||||
const ITERATOR = doIter && Symbol.iterator
|
||||
|| Symbol('iterator not implemented')
|
||||
|
||||
// events that mean 'the stream is over'
|
||||
// these are treated specially, and re-emitted
|
||||
// if they are listened for after emitting.
|
||||
const isEndish = ev =>
|
||||
ev === 'end' ||
|
||||
ev === 'finish' ||
|
||||
ev === 'prefinish'
|
||||
|
||||
const isArrayBuffer = b => b instanceof ArrayBuffer ||
|
||||
typeof b === 'object' &&
|
||||
b.constructor &&
|
||||
b.constructor.name === 'ArrayBuffer' &&
|
||||
b.byteLength >= 0
|
||||
|
||||
const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
|
||||
|
||||
class Pipe {
|
||||
constructor (src, dest, opts) {
|
||||
this.src = src
|
||||
this.dest = dest
|
||||
this.opts = opts
|
||||
this.ondrain = () => src[RESUME]()
|
||||
dest.on('drain', this.ondrain)
|
||||
}
|
||||
unpipe () {
|
||||
this.dest.removeListener('drain', this.ondrain)
|
||||
}
|
||||
// istanbul ignore next - only here for the prototype
|
||||
proxyErrors () {}
|
||||
end () {
|
||||
this.unpipe()
|
||||
if (this.opts.end)
|
||||
this.dest.end()
|
||||
}
|
||||
}
|
||||
|
||||
class PipeProxyErrors extends Pipe {
|
||||
unpipe () {
|
||||
this.src.removeListener('error', this.proxyErrors)
|
||||
super.unpipe()
|
||||
}
|
||||
constructor (src, dest, opts) {
|
||||
super(src, dest, opts)
|
||||
this.proxyErrors = er => dest.emit('error', er)
|
||||
src.on('error', this.proxyErrors)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = class Minipass extends Stream {
|
||||
constructor (options) {
|
||||
super()
|
||||
this[FLOWING] = false
|
||||
// whether we're explicitly paused
|
||||
this[PAUSED] = false
|
||||
this.pipes = []
|
||||
this.buffer = []
|
||||
this[OBJECTMODE] = options && options.objectMode || false
|
||||
if (this[OBJECTMODE])
|
||||
this[ENCODING] = null
|
||||
else
|
||||
this[ENCODING] = options && options.encoding || null
|
||||
if (this[ENCODING] === 'buffer')
|
||||
this[ENCODING] = null
|
||||
this[ASYNC] = options && !!options.async || false
|
||||
this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
|
||||
this[EOF] = false
|
||||
this[EMITTED_END] = false
|
||||
this[EMITTING_END] = false
|
||||
this[CLOSED] = false
|
||||
this[EMITTED_ERROR] = null
|
||||
this.writable = true
|
||||
this.readable = true
|
||||
this[BUFFERLENGTH] = 0
|
||||
this[DESTROYED] = false
|
||||
}
|
||||
|
||||
get bufferLength () { return this[BUFFERLENGTH] }
|
||||
|
||||
get encoding () { return this[ENCODING] }
|
||||
set encoding (enc) {
|
||||
if (this[OBJECTMODE])
|
||||
throw new Error('cannot set encoding in objectMode')
|
||||
|
||||
if (this[ENCODING] && enc !== this[ENCODING] &&
|
||||
(this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH]))
|
||||
throw new Error('cannot change encoding')
|
||||
|
||||
if (this[ENCODING] !== enc) {
|
||||
this[DECODER] = enc ? new SD(enc) : null
|
||||
if (this.buffer.length)
|
||||
this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk))
|
||||
}
|
||||
|
||||
this[ENCODING] = enc
|
||||
}
|
||||
|
||||
setEncoding (enc) {
|
||||
this.encoding = enc
|
||||
}
|
||||
|
||||
get objectMode () { return this[OBJECTMODE] }
|
||||
set objectMode (om) { this[OBJECTMODE] = this[OBJECTMODE] || !!om }
|
||||
|
||||
get ['async'] () { return this[ASYNC] }
|
||||
set ['async'] (a) { this[ASYNC] = this[ASYNC] || !!a }
|
||||
|
||||
write (chunk, encoding, cb) {
|
||||
if (this[EOF])
|
||||
throw new Error('write after end')
|
||||
|
||||
if (this[DESTROYED]) {
|
||||
this.emit('error', Object.assign(
|
||||
new Error('Cannot call write after a stream was destroyed'),
|
||||
{ code: 'ERR_STREAM_DESTROYED' }
|
||||
))
|
||||
return true
|
||||
}
|
||||
|
||||
if (typeof encoding === 'function')
|
||||
cb = encoding, encoding = 'utf8'
|
||||
|
||||
if (!encoding)
|
||||
encoding = 'utf8'
|
||||
|
||||
const fn = this[ASYNC] ? defer : f => f()
|
||||
|
||||
// convert array buffers and typed array views into buffers
|
||||
// at some point in the future, we may want to do the opposite!
|
||||
// leave strings and buffers as-is
|
||||
// anything else switches us into object mode
|
||||
if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
|
||||
if (isArrayBufferView(chunk))
|
||||
chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
|
||||
else if (isArrayBuffer(chunk))
|
||||
chunk = Buffer.from(chunk)
|
||||
else if (typeof chunk !== 'string')
|
||||
// use the setter so we throw if we have encoding set
|
||||
this.objectMode = true
|
||||
}
|
||||
|
||||
// handle object mode up front, since it's simpler
|
||||
// this yields better performance, fewer checks later.
|
||||
if (this[OBJECTMODE]) {
|
||||
/* istanbul ignore if - maybe impossible? */
|
||||
if (this.flowing && this[BUFFERLENGTH] !== 0)
|
||||
this[FLUSH](true)
|
||||
|
||||
if (this.flowing)
|
||||
this.emit('data', chunk)
|
||||
else
|
||||
this[BUFFERPUSH](chunk)
|
||||
|
||||
if (this[BUFFERLENGTH] !== 0)
|
||||
this.emit('readable')
|
||||
|
||||
if (cb)
|
||||
fn(cb)
|
||||
|
||||
return this.flowing
|
||||
}
|
||||
|
||||
// at this point the chunk is a buffer or string
|
||||
// don't buffer it up or send it to the decoder
|
||||
if (!chunk.length) {
|
||||
if (this[BUFFERLENGTH] !== 0)
|
||||
this.emit('readable')
|
||||
if (cb)
|
||||
fn(cb)
|
||||
return this.flowing
|
||||
}
|
||||
|
||||
// fast-path writing strings of same encoding to a stream with
|
||||
// an empty buffer, skipping the buffer/decoder dance
|
||||
if (typeof chunk === 'string' &&
|
||||
// unless it is a string already ready for us to use
|
||||
!(encoding === this[ENCODING] && !this[DECODER].lastNeed)) {
|
||||
chunk = Buffer.from(chunk, encoding)
|
||||
}
|
||||
|
||||
if (Buffer.isBuffer(chunk) && this[ENCODING])
|
||||
chunk = this[DECODER].write(chunk)
|
||||
|
||||
// Note: flushing CAN potentially switch us into not-flowing mode
|
||||
if (this.flowing && this[BUFFERLENGTH] !== 0)
|
||||
this[FLUSH](true)
|
||||
|
||||
if (this.flowing)
|
||||
this.emit('data', chunk)
|
||||
else
|
||||
this[BUFFERPUSH](chunk)
|
||||
|
||||
if (this[BUFFERLENGTH] !== 0)
|
||||
this.emit('readable')
|
||||
|
||||
if (cb)
|
||||
fn(cb)
|
||||
|
||||
return this.flowing
|
||||
}
|
||||
|
||||
read (n) {
|
||||
if (this[DESTROYED])
|
||||
return null
|
||||
|
||||
if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) {
|
||||
this[MAYBE_EMIT_END]()
|
||||
return null
|
||||
}
|
||||
|
||||
if (this[OBJECTMODE])
|
||||
n = null
|
||||
|
||||
if (this.buffer.length > 1 && !this[OBJECTMODE]) {
|
||||
if (this.encoding)
|
||||
this.buffer = [this.buffer.join('')]
|
||||
else
|
||||
this.buffer = [Buffer.concat(this.buffer, this[BUFFERLENGTH])]
|
||||
}
|
||||
|
||||
const ret = this[READ](n || null, this.buffer[0])
|
||||
this[MAYBE_EMIT_END]()
|
||||
return ret
|
||||
}
|
||||
|
||||
[READ] (n, chunk) {
|
||||
if (n === chunk.length || n === null)
|
||||
this[BUFFERSHIFT]()
|
||||
else {
|
||||
this.buffer[0] = chunk.slice(n)
|
||||
chunk = chunk.slice(0, n)
|
||||
this[BUFFERLENGTH] -= n
|
||||
}
|
||||
|
||||
this.emit('data', chunk)
|
||||
|
||||
if (!this.buffer.length && !this[EOF])
|
||||
this.emit('drain')
|
||||
|
||||
return chunk
|
||||
}
|
||||
|
||||
end (chunk, encoding, cb) {
|
||||
if (typeof chunk === 'function')
|
||||
cb = chunk, chunk = null
|
||||
if (typeof encoding === 'function')
|
||||
cb = encoding, encoding = 'utf8'
|
||||
if (chunk)
|
||||
this.write(chunk, encoding)
|
||||
if (cb)
|
||||
this.once('end', cb)
|
||||
this[EOF] = true
|
||||
this.writable = false
|
||||
|
||||
// if we haven't written anything, then go ahead and emit,
|
||||
// even if we're not reading.
|
||||
// we'll re-emit if a new 'end' listener is added anyway.
|
||||
// This makes MP more suitable to write-only use cases.
|
||||
if (this.flowing || !this[PAUSED])
|
||||
this[MAYBE_EMIT_END]()
|
||||
return this
|
||||
}
|
||||
|
||||
// don't let the internal resume be overwritten
|
||||
[RESUME] () {
|
||||
if (this[DESTROYED])
|
||||
return
|
||||
|
||||
this[PAUSED] = false
|
||||
this[FLOWING] = true
|
||||
this.emit('resume')
|
||||
if (this.buffer.length)
|
||||
this[FLUSH]()
|
||||
else if (this[EOF])
|
||||
this[MAYBE_EMIT_END]()
|
||||
else
|
||||
this.emit('drain')
|
||||
}
|
||||
|
||||
resume () {
|
||||
return this[RESUME]()
|
||||
}
|
||||
|
||||
pause () {
|
||||
this[FLOWING] = false
|
||||
this[PAUSED] = true
|
||||
}
|
||||
|
||||
get destroyed () {
|
||||
return this[DESTROYED]
|
||||
}
|
||||
|
||||
get flowing () {
|
||||
return this[FLOWING]
|
||||
}
|
||||
|
||||
get paused () {
|
||||
return this[PAUSED]
|
||||
}
|
||||
|
||||
[BUFFERPUSH] (chunk) {
|
||||
if (this[OBJECTMODE])
|
||||
this[BUFFERLENGTH] += 1
|
||||
else
|
||||
this[BUFFERLENGTH] += chunk.length
|
||||
this.buffer.push(chunk)
|
||||
}
|
||||
|
||||
[BUFFERSHIFT] () {
|
||||
if (this.buffer.length) {
|
||||
if (this[OBJECTMODE])
|
||||
this[BUFFERLENGTH] -= 1
|
||||
else
|
||||
this[BUFFERLENGTH] -= this.buffer[0].length
|
||||
}
|
||||
return this.buffer.shift()
|
||||
}
|
||||
|
||||
[FLUSH] (noDrain) {
|
||||
do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()))
|
||||
|
||||
if (!noDrain && !this.buffer.length && !this[EOF])
|
||||
this.emit('drain')
|
||||
}
|
||||
|
||||
[FLUSHCHUNK] (chunk) {
|
||||
return chunk ? (this.emit('data', chunk), this.flowing) : false
|
||||
}
|
||||
|
||||
pipe (dest, opts) {
|
||||
if (this[DESTROYED])
|
||||
return
|
||||
|
||||
const ended = this[EMITTED_END]
|
||||
opts = opts || {}
|
||||
if (dest === proc.stdout || dest === proc.stderr)
|
||||
opts.end = false
|
||||
else
|
||||
opts.end = opts.end !== false
|
||||
opts.proxyErrors = !!opts.proxyErrors
|
||||
|
||||
// piping an ended stream ends immediately
|
||||
if (ended) {
|
||||
if (opts.end)
|
||||
dest.end()
|
||||
} else {
|
||||
this.pipes.push(!opts.proxyErrors ? new Pipe(this, dest, opts)
|
||||
: new PipeProxyErrors(this, dest, opts))
|
||||
if (this[ASYNC])
|
||||
defer(() => this[RESUME]())
|
||||
else
|
||||
this[RESUME]()
|
||||
}
|
||||
|
||||
return dest
|
||||
}
|
||||
|
||||
unpipe (dest) {
|
||||
const p = this.pipes.find(p => p.dest === dest)
|
||||
if (p) {
|
||||
this.pipes.splice(this.pipes.indexOf(p), 1)
|
||||
p.unpipe()
|
||||
}
|
||||
}
|
||||
|
||||
addListener (ev, fn) {
|
||||
return this.on(ev, fn)
|
||||
}
|
||||
|
||||
on (ev, fn) {
|
||||
const ret = super.on(ev, fn)
|
||||
if (ev === 'data' && !this.pipes.length && !this.flowing)
|
||||
this[RESUME]()
|
||||
else if (ev === 'readable' && this[BUFFERLENGTH] !== 0)
|
||||
super.emit('readable')
|
||||
else if (isEndish(ev) && this[EMITTED_END]) {
|
||||
super.emit(ev)
|
||||
this.removeAllListeners(ev)
|
||||
} else if (ev === 'error' && this[EMITTED_ERROR]) {
|
||||
if (this[ASYNC])
|
||||
defer(() => fn.call(this, this[EMITTED_ERROR]))
|
||||
else
|
||||
fn.call(this, this[EMITTED_ERROR])
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
get emittedEnd () {
|
||||
return this[EMITTED_END]
|
||||
}
|
||||
|
||||
[MAYBE_EMIT_END] () {
|
||||
if (!this[EMITTING_END] &&
|
||||
!this[EMITTED_END] &&
|
||||
!this[DESTROYED] &&
|
||||
this.buffer.length === 0 &&
|
||||
this[EOF]) {
|
||||
this[EMITTING_END] = true
|
||||
this.emit('end')
|
||||
this.emit('prefinish')
|
||||
this.emit('finish')
|
||||
if (this[CLOSED])
|
||||
this.emit('close')
|
||||
this[EMITTING_END] = false
|
||||
}
|
||||
}
|
||||
|
||||
emit (ev, data, ...extra) {
|
||||
// error and close are only events allowed after calling destroy()
|
||||
if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
|
||||
return
|
||||
else if (ev === 'data') {
|
||||
return !data ? false
|
||||
: this[ASYNC] ? defer(() => this[EMITDATA](data))
|
||||
: this[EMITDATA](data)
|
||||
} else if (ev === 'end') {
|
||||
return this[EMITEND]()
|
||||
} else if (ev === 'close') {
|
||||
this[CLOSED] = true
|
||||
// don't emit close before 'end' and 'finish'
|
||||
if (!this[EMITTED_END] && !this[DESTROYED])
|
||||
return
|
||||
const ret = super.emit('close')
|
||||
this.removeAllListeners('close')
|
||||
return ret
|
||||
} else if (ev === 'error') {
|
||||
this[EMITTED_ERROR] = data
|
||||
const ret = super.emit('error', data)
|
||||
this[MAYBE_EMIT_END]()
|
||||
return ret
|
||||
} else if (ev === 'resume') {
|
||||
const ret = super.emit('resume')
|
||||
this[MAYBE_EMIT_END]()
|
||||
return ret
|
||||
} else if (ev === 'finish' || ev === 'prefinish') {
|
||||
const ret = super.emit(ev)
|
||||
this.removeAllListeners(ev)
|
||||
return ret
|
||||
}
|
||||
|
||||
// Some other unknown event
|
||||
const ret = super.emit(ev, data, ...extra)
|
||||
this[MAYBE_EMIT_END]()
|
||||
return ret
|
||||
}
|
||||
|
||||
[EMITDATA] (data) {
|
||||
for (const p of this.pipes) {
|
||||
if (p.dest.write(data) === false)
|
||||
this.pause()
|
||||
}
|
||||
const ret = super.emit('data', data)
|
||||
this[MAYBE_EMIT_END]()
|
||||
return ret
|
||||
}
|
||||
|
||||
[EMITEND] () {
|
||||
if (this[EMITTED_END])
|
||||
return
|
||||
|
||||
this[EMITTED_END] = true
|
||||
this.readable = false
|
||||
if (this[ASYNC])
|
||||
defer(() => this[EMITEND2]())
|
||||
else
|
||||
this[EMITEND2]()
|
||||
}
|
||||
|
||||
[EMITEND2] () {
|
||||
if (this[DECODER]) {
|
||||
const data = this[DECODER].end()
|
||||
if (data) {
|
||||
for (const p of this.pipes) {
|
||||
p.dest.write(data)
|
||||
}
|
||||
super.emit('data', data)
|
||||
}
|
||||
}
|
||||
|
||||
for (const p of this.pipes) {
|
||||
p.end()
|
||||
}
|
||||
const ret = super.emit('end')
|
||||
this.removeAllListeners('end')
|
||||
return ret
|
||||
}
|
||||
|
||||
// const all = await stream.collect()
|
||||
collect () {
|
||||
const buf = []
|
||||
if (!this[OBJECTMODE])
|
||||
buf.dataLength = 0
|
||||
// set the promise first, in case an error is raised
|
||||
// by triggering the flow here.
|
||||
const p = this.promise()
|
||||
this.on('data', c => {
|
||||
buf.push(c)
|
||||
if (!this[OBJECTMODE])
|
||||
buf.dataLength += c.length
|
||||
})
|
||||
return p.then(() => buf)
|
||||
}
|
||||
|
||||
// const data = await stream.concat()
|
||||
concat () {
|
||||
return this[OBJECTMODE]
|
||||
? Promise.reject(new Error('cannot concat in objectMode'))
|
||||
: this.collect().then(buf =>
|
||||
this[OBJECTMODE]
|
||||
? Promise.reject(new Error('cannot concat in objectMode'))
|
||||
: this[ENCODING] ? buf.join('') : Buffer.concat(buf, buf.dataLength))
|
||||
}
|
||||
|
||||
// stream.promise().then(() => done, er => emitted error)
|
||||
promise () {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.on(DESTROYED, () => reject(new Error('stream destroyed')))
|
||||
this.on('error', er => reject(er))
|
||||
this.on('end', () => resolve())
|
||||
})
|
||||
}
|
||||
|
||||
// for await (let chunk of stream)
|
||||
[ASYNCITERATOR] () {
|
||||
const next = () => {
|
||||
const res = this.read()
|
||||
if (res !== null)
|
||||
return Promise.resolve({ done: false, value: res })
|
||||
|
||||
if (this[EOF])
|
||||
return Promise.resolve({ done: true })
|
||||
|
||||
let resolve = null
|
||||
let reject = null
|
||||
const onerr = er => {
|
||||
this.removeListener('data', ondata)
|
||||
this.removeListener('end', onend)
|
||||
reject(er)
|
||||
}
|
||||
const ondata = value => {
|
||||
this.removeListener('error', onerr)
|
||||
this.removeListener('end', onend)
|
||||
this.pause()
|
||||
resolve({ value: value, done: !!this[EOF] })
|
||||
}
|
||||
const onend = () => {
|
||||
this.removeListener('error', onerr)
|
||||
this.removeListener('data', ondata)
|
||||
resolve({ done: true })
|
||||
}
|
||||
const ondestroy = () => onerr(new Error('stream destroyed'))
|
||||
return new Promise((res, rej) => {
|
||||
reject = rej
|
||||
resolve = res
|
||||
this.once(DESTROYED, ondestroy)
|
||||
this.once('error', onerr)
|
||||
this.once('end', onend)
|
||||
this.once('data', ondata)
|
||||
})
|
||||
}
|
||||
|
||||
return { next }
|
||||
}
|
||||
|
||||
// for (let chunk of stream)
|
||||
[ITERATOR] () {
|
||||
const next = () => {
|
||||
const value = this.read()
|
||||
const done = value === null
|
||||
return { value, done }
|
||||
}
|
||||
return { next }
|
||||
}
|
||||
|
||||
destroy (er) {
|
||||
if (this[DESTROYED]) {
|
||||
if (er)
|
||||
this.emit('error', er)
|
||||
else
|
||||
this.emit(DESTROYED)
|
||||
return this
|
||||
}
|
||||
|
||||
this[DESTROYED] = true
|
||||
|
||||
// throw away all buffered data, it's never coming out
|
||||
this.buffer.length = 0
|
||||
this[BUFFERLENGTH] = 0
|
||||
|
||||
if (typeof this.close === 'function' && !this[CLOSED])
|
||||
this.close()
|
||||
|
||||
if (er)
|
||||
this.emit('error', er)
|
||||
else // if no error to emit, still reject pending promises
|
||||
this.emit(DESTROYED)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
static isStream (s) {
|
||||
return !!s && (s instanceof Minipass || s instanceof Stream ||
|
||||
s instanceof EE && (
|
||||
typeof s.pipe === 'function' || // readable
|
||||
(typeof s.write === 'function' && typeof s.end === 'function') // writable
|
||||
))
|
||||
}
|
||||
}
|
56
my-app/node_modules/tar/node_modules/fs-minipass/node_modules/minipass/package.json
generated
vendored
Executable file
56
my-app/node_modules/tar/node_modules/fs-minipass/node_modules/minipass/package.json
generated
vendored
Executable file
|
@ -0,0 +1,56 @@
|
|||
{
|
||||
"name": "minipass",
|
||||
"version": "3.3.6",
|
||||
"description": "minimal implementation of a PassThrough stream",
|
||||
"main": "index.js",
|
||||
"types": "index.d.ts",
|
||||
"dependencies": {
|
||||
"yallist": "^4.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^17.0.41",
|
||||
"end-of-stream": "^1.4.0",
|
||||
"prettier": "^2.6.2",
|
||||
"tap": "^16.2.0",
|
||||
"through2": "^2.0.3",
|
||||
"ts-node": "^10.8.1",
|
||||
"typescript": "^4.7.3"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "tap",
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"postpublish": "git push origin --follow-tags"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/isaacs/minipass.git"
|
||||
},
|
||||
"keywords": [
|
||||
"passthrough",
|
||||
"stream"
|
||||
],
|
||||
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
|
||||
"license": "ISC",
|
||||
"files": [
|
||||
"index.d.ts",
|
||||
"index.js"
|
||||
],
|
||||
"tap": {
|
||||
"check-coverage": true
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
"prettier": {
|
||||
"semi": false,
|
||||
"printWidth": 80,
|
||||
"tabWidth": 2,
|
||||
"useTabs": false,
|
||||
"singleQuote": true,
|
||||
"jsxSingleQuote": false,
|
||||
"bracketSameLine": true,
|
||||
"arrowParens": "avoid",
|
||||
"endOfLine": "lf"
|
||||
}
|
||||
}
|
39
my-app/node_modules/tar/node_modules/fs-minipass/package.json
generated
vendored
Executable file
39
my-app/node_modules/tar/node_modules/fs-minipass/package.json
generated
vendored
Executable file
|
@ -0,0 +1,39 @@
|
|||
{
|
||||
"name": "fs-minipass",
|
||||
"version": "2.1.0",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "tap",
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"postpublish": "git push origin --follow-tags"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
|
||||
"license": "ISC",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/npm/fs-minipass.git"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/npm/fs-minipass/issues"
|
||||
},
|
||||
"homepage": "https://github.com/npm/fs-minipass#readme",
|
||||
"description": "fs read and write streams based on minipass",
|
||||
"dependencies": {
|
||||
"minipass": "^3.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"mutate-fs": "^2.0.1",
|
||||
"tap": "^14.6.4"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"tap": {
|
||||
"check-coverage": true
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 8"
|
||||
}
|
||||
}
|
15
my-app/node_modules/tar/node_modules/minipass/LICENSE
generated
vendored
Executable file
15
my-app/node_modules/tar/node_modules/minipass/LICENSE
generated
vendored
Executable file
|
@ -0,0 +1,15 @@
|
|||
The ISC License
|
||||
|
||||
Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
769
my-app/node_modules/tar/node_modules/minipass/README.md
generated
vendored
Executable file
769
my-app/node_modules/tar/node_modules/minipass/README.md
generated
vendored
Executable file
|
@ -0,0 +1,769 @@
|
|||
# minipass
|
||||
|
||||
A _very_ minimal implementation of a [PassThrough
|
||||
stream](https://nodejs.org/api/stream.html#stream_class_stream_passthrough)
|
||||
|
||||
[It's very
|
||||
fast](https://docs.google.com/spreadsheets/d/1K_HR5oh3r80b8WVMWCPPjfuWXUgfkmhlX7FGI6JJ8tY/edit?usp=sharing)
|
||||
for objects, strings, and buffers.
|
||||
|
||||
Supports `pipe()`ing (including multi-`pipe()` and backpressure
|
||||
transmission), buffering data until either a `data` event handler
|
||||
or `pipe()` is added (so you don't lose the first chunk), and
|
||||
most other cases where PassThrough is a good idea.
|
||||
|
||||
There is a `read()` method, but it's much more efficient to
|
||||
consume data from this stream via `'data'` events or by calling
|
||||
`pipe()` into some other stream. Calling `read()` requires the
|
||||
buffer to be flattened in some cases, which requires copying
|
||||
memory.
|
||||
|
||||
If you set `objectMode: true` in the options, then whatever is
|
||||
written will be emitted. Otherwise, it'll do a minimal amount of
|
||||
Buffer copying to ensure proper Streams semantics when `read(n)`
|
||||
is called.
|
||||
|
||||
`objectMode` can also be set by doing `stream.objectMode = true`,
|
||||
or by writing any non-string/non-buffer data. `objectMode` cannot
|
||||
be set to false once it is set.
|
||||
|
||||
This is not a `through` or `through2` stream. It doesn't
|
||||
transform the data, it just passes it right through. If you want
|
||||
to transform the data, extend the class, and override the
|
||||
`write()` method. Once you're done transforming the data however
|
||||
you want, call `super.write()` with the transform output.
|
||||
|
||||
For some examples of streams that extend Minipass in various
|
||||
ways, check out:
|
||||
|
||||
- [minizlib](http://npm.im/minizlib)
|
||||
- [fs-minipass](http://npm.im/fs-minipass)
|
||||
- [tar](http://npm.im/tar)
|
||||
- [minipass-collect](http://npm.im/minipass-collect)
|
||||
- [minipass-flush](http://npm.im/minipass-flush)
|
||||
- [minipass-pipeline](http://npm.im/minipass-pipeline)
|
||||
- [tap](http://npm.im/tap)
|
||||
- [tap-parser](http://npm.im/tap-parser)
|
||||
- [treport](http://npm.im/treport)
|
||||
- [minipass-fetch](http://npm.im/minipass-fetch)
|
||||
- [pacote](http://npm.im/pacote)
|
||||
- [make-fetch-happen](http://npm.im/make-fetch-happen)
|
||||
- [cacache](http://npm.im/cacache)
|
||||
- [ssri](http://npm.im/ssri)
|
||||
- [npm-registry-fetch](http://npm.im/npm-registry-fetch)
|
||||
- [minipass-json-stream](http://npm.im/minipass-json-stream)
|
||||
- [minipass-sized](http://npm.im/minipass-sized)
|
||||
|
||||
## Differences from Node.js Streams
|
||||
|
||||
There are several things that make Minipass streams different
|
||||
from (and in some ways superior to) Node.js core streams.
|
||||
|
||||
Please read these caveats if you are familiar with node-core
|
||||
streams and intend to use Minipass streams in your programs.
|
||||
|
||||
You can avoid most of these differences entirely (for a very
|
||||
small performance penalty) by setting `{async: true}` in the
|
||||
constructor options.
|
||||
|
||||
### Timing
|
||||
|
||||
Minipass streams are designed to support synchronous use-cases.
|
||||
Thus, data is emitted as soon as it is available, always. It is
|
||||
buffered until read, but no longer. Another way to look at it is
|
||||
that Minipass streams are exactly as synchronous as the logic
|
||||
that writes into them.
|
||||
|
||||
This can be surprising if your code relies on
|
||||
`PassThrough.write()` always providing data on the next tick
|
||||
rather than the current one, or being able to call `resume()` and
|
||||
not have the entire buffer disappear immediately.
|
||||
|
||||
However, without this synchronicity guarantee, there would be no
|
||||
way for Minipass to achieve the speeds it does, or support the
|
||||
synchronous use cases that it does. Simply put, waiting takes
|
||||
time.
|
||||
|
||||
This non-deferring approach makes Minipass streams much easier to
|
||||
reason about, especially in the context of Promises and other
|
||||
flow-control mechanisms.
|
||||
|
||||
Example:
|
||||
|
||||
```js
|
||||
// hybrid module, either works
|
||||
import { Minipass } from 'minipass'
|
||||
// or:
|
||||
const { Minipass } = require('minipass')
|
||||
|
||||
const stream = new Minipass()
|
||||
stream.on('data', () => console.log('data event'))
|
||||
console.log('before write')
|
||||
stream.write('hello')
|
||||
console.log('after write')
|
||||
// output:
|
||||
// before write
|
||||
// data event
|
||||
// after write
|
||||
```
|
||||
|
||||
### Exception: Async Opt-In
|
||||
|
||||
If you wish to have a Minipass stream with behavior that more
|
||||
closely mimics Node.js core streams, you can set the stream in
|
||||
async mode either by setting `async: true` in the constructor
|
||||
options, or by setting `stream.async = true` later on.
|
||||
|
||||
```js
|
||||
// hybrid module, either works
|
||||
import { Minipass } from 'minipass'
|
||||
// or:
|
||||
const { Minipass } = require('minipass')
|
||||
|
||||
const asyncStream = new Minipass({ async: true })
|
||||
asyncStream.on('data', () => console.log('data event'))
|
||||
console.log('before write')
|
||||
asyncStream.write('hello')
|
||||
console.log('after write')
|
||||
// output:
|
||||
// before write
|
||||
// after write
|
||||
// data event <-- this is deferred until the next tick
|
||||
```
|
||||
|
||||
Switching _out_ of async mode is unsafe, as it could cause data
|
||||
corruption, and so is not enabled. Example:
|
||||
|
||||
```js
|
||||
import { Minipass } from 'minipass'
|
||||
const stream = new Minipass({ encoding: 'utf8' })
|
||||
stream.on('data', chunk => console.log(chunk))
|
||||
stream.async = true
|
||||
console.log('before writes')
|
||||
stream.write('hello')
|
||||
setStreamSyncAgainSomehow(stream) // <-- this doesn't actually exist!
|
||||
stream.write('world')
|
||||
console.log('after writes')
|
||||
// hypothetical output would be:
|
||||
// before writes
|
||||
// world
|
||||
// after writes
|
||||
// hello
|
||||
// NOT GOOD!
|
||||
```
|
||||
|
||||
To avoid this problem, once set into async mode, any attempt to
|
||||
make the stream sync again will be ignored.
|
||||
|
||||
```js
|
||||
const { Minipass } = require('minipass')
|
||||
const stream = new Minipass({ encoding: 'utf8' })
|
||||
stream.on('data', chunk => console.log(chunk))
|
||||
stream.async = true
|
||||
console.log('before writes')
|
||||
stream.write('hello')
|
||||
stream.async = false // <-- no-op, stream already async
|
||||
stream.write('world')
|
||||
console.log('after writes')
|
||||
// actual output:
|
||||
// before writes
|
||||
// after writes
|
||||
// hello
|
||||
// world
|
||||
```
|
||||
|
||||
### No High/Low Water Marks
|
||||
|
||||
Node.js core streams will optimistically fill up a buffer,
|
||||
returning `true` on all writes until the limit is hit, even if
|
||||
the data has nowhere to go. Then, they will not attempt to draw
|
||||
more data in until the buffer size dips below a minimum value.
|
||||
|
||||
Minipass streams are much simpler. The `write()` method will
|
||||
return `true` if the data has somewhere to go (which is to say,
|
||||
given the timing guarantees, that the data is already there by
|
||||
the time `write()` returns).
|
||||
|
||||
If the data has nowhere to go, then `write()` returns false, and
|
||||
the data sits in a buffer, to be drained out immediately as soon
|
||||
as anyone consumes it.
|
||||
|
||||
Since nothing is ever buffered unnecessarily, there is much less
|
||||
copying data, and less bookkeeping about buffer capacity levels.
|
||||
|
||||
### Hazards of Buffering (or: Why Minipass Is So Fast)
|
||||
|
||||
Since data written to a Minipass stream is immediately written
|
||||
all the way through the pipeline, and `write()` always returns
|
||||
true/false based on whether the data was fully flushed,
|
||||
backpressure is communicated immediately to the upstream caller.
|
||||
This minimizes buffering.
|
||||
|
||||
Consider this case:
|
||||
|
||||
```js
|
||||
const { PassThrough } = require('stream')
|
||||
const p1 = new PassThrough({ highWaterMark: 1024 })
|
||||
const p2 = new PassThrough({ highWaterMark: 1024 })
|
||||
const p3 = new PassThrough({ highWaterMark: 1024 })
|
||||
const p4 = new PassThrough({ highWaterMark: 1024 })
|
||||
|
||||
p1.pipe(p2).pipe(p3).pipe(p4)
|
||||
p4.on('data', () => console.log('made it through'))
|
||||
|
||||
// this returns false and buffers, then writes to p2 on next tick (1)
|
||||
// p2 returns false and buffers, pausing p1, then writes to p3 on next tick (2)
|
||||
// p3 returns false and buffers, pausing p2, then writes to p4 on next tick (3)
|
||||
// p4 returns false and buffers, pausing p3, then emits 'data' and 'drain'
|
||||
// on next tick (4)
|
||||
// p3 sees p4's 'drain' event, and calls resume(), emitting 'resume' and
|
||||
// 'drain' on next tick (5)
|
||||
// p2 sees p3's 'drain', calls resume(), emits 'resume' and 'drain' on next tick (6)
|
||||
// p1 sees p2's 'drain', calls resume(), emits 'resume' and 'drain' on next
|
||||
// tick (7)
|
||||
|
||||
p1.write(Buffer.alloc(2048)) // returns false
|
||||
```
|
||||
|
||||
Along the way, the data was buffered and deferred at each stage,
|
||||
and multiple event deferrals happened, for an unblocked pipeline
|
||||
where it was perfectly safe to write all the way through!
|
||||
|
||||
Furthermore, setting a `highWaterMark` of `1024` might lead
|
||||
someone reading the code to think an advisory maximum of 1KiB is
|
||||
being set for the pipeline. However, the actual advisory
|
||||
buffering level is the _sum_ of `highWaterMark` values, since
|
||||
each one has its own bucket.
|
||||
|
||||
Consider the Minipass case:
|
||||
|
||||
```js
|
||||
const m1 = new Minipass()
|
||||
const m2 = new Minipass()
|
||||
const m3 = new Minipass()
|
||||
const m4 = new Minipass()
|
||||
|
||||
m1.pipe(m2).pipe(m3).pipe(m4)
|
||||
m4.on('data', () => console.log('made it through'))
|
||||
|
||||
// m1 is flowing, so it writes the data to m2 immediately
|
||||
// m2 is flowing, so it writes the data to m3 immediately
|
||||
// m3 is flowing, so it writes the data to m4 immediately
|
||||
// m4 is flowing, so it fires the 'data' event immediately, returns true
|
||||
// m4's write returned true, so m3 is still flowing, returns true
|
||||
// m3's write returned true, so m2 is still flowing, returns true
|
||||
// m2's write returned true, so m1 is still flowing, returns true
|
||||
// No event deferrals or buffering along the way!
|
||||
|
||||
m1.write(Buffer.alloc(2048)) // returns true
|
||||
```
|
||||
|
||||
It is extremely unlikely that you _don't_ want to buffer any data
|
||||
written, or _ever_ buffer data that can be flushed all the way
|
||||
through. Neither node-core streams nor Minipass ever fail to
|
||||
buffer written data, but node-core streams do a lot of
|
||||
unnecessary buffering and pausing.
|
||||
|
||||
As always, the faster implementation is the one that does less
|
||||
stuff and waits less time to do it.
|
||||
|
||||
### Immediately emit `end` for empty streams (when not paused)
|
||||
|
||||
If a stream is not paused, and `end()` is called before writing
|
||||
any data into it, then it will emit `end` immediately.
|
||||
|
||||
If you have logic that occurs on the `end` event which you don't
|
||||
want to potentially happen immediately (for example, closing file
|
||||
descriptors, moving on to the next entry in an archive parse
|
||||
stream, etc.) then be sure to call `stream.pause()` on creation,
|
||||
and then `stream.resume()` once you are ready to respond to the
|
||||
`end` event.
|
||||
|
||||
However, this is _usually_ not a problem because:
|
||||
|
||||
### Emit `end` When Asked
|
||||
|
||||
One hazard of immediately emitting `'end'` is that you may not
|
||||
yet have had a chance to add a listener. In order to avoid this
|
||||
hazard, Minipass streams safely re-emit the `'end'` event if a
|
||||
new listener is added after `'end'` has been emitted.
|
||||
|
||||
Ie, if you do `stream.on('end', someFunction)`, and the stream
|
||||
has already emitted `end`, then it will call the handler right
|
||||
away. (You can think of this somewhat like attaching a new
|
||||
`.then(fn)` to a previously-resolved Promise.)
|
||||
|
||||
To prevent calling handlers multiple times who would not expect
|
||||
multiple ends to occur, all listeners are removed from the
|
||||
`'end'` event whenever it is emitted.
|
||||
|
||||
### Emit `error` When Asked
|
||||
|
||||
The most recent error object passed to the `'error'` event is
|
||||
stored on the stream. If a new `'error'` event handler is added,
|
||||
and an error was previously emitted, then the event handler will
|
||||
be called immediately (or on `process.nextTick` in the case of
|
||||
async streams).
|
||||
|
||||
This makes it much more difficult to end up trying to interact
|
||||
with a broken stream, if the error handler is added after an
|
||||
error was previously emitted.
|
||||
|
||||
### Impact of "immediate flow" on Tee-streams
|
||||
|
||||
A "tee stream" is a stream piping to multiple destinations:
|
||||
|
||||
```js
|
||||
const tee = new Minipass()
|
||||
t.pipe(dest1)
|
||||
t.pipe(dest2)
|
||||
t.write('foo') // goes to both destinations
|
||||
```
|
||||
|
||||
Since Minipass streams _immediately_ process any pending data
|
||||
through the pipeline when a new pipe destination is added, this
|
||||
can have surprising effects, especially when a stream comes in
|
||||
from some other function and may or may not have data in its
|
||||
buffer.
|
||||
|
||||
```js
|
||||
// WARNING! WILL LOSE DATA!
|
||||
const src = new Minipass()
|
||||
src.write('foo')
|
||||
src.pipe(dest1) // 'foo' chunk flows to dest1 immediately, and is gone
|
||||
src.pipe(dest2) // gets nothing!
|
||||
```
|
||||
|
||||
One solution is to create a dedicated tee-stream junction that
|
||||
pipes to both locations, and then pipe to _that_ instead.
|
||||
|
||||
```js
|
||||
// Safe example: tee to both places
|
||||
const src = new Minipass()
|
||||
src.write('foo')
|
||||
const tee = new Minipass()
|
||||
tee.pipe(dest1)
|
||||
tee.pipe(dest2)
|
||||
src.pipe(tee) // tee gets 'foo', pipes to both locations
|
||||
```
|
||||
|
||||
The same caveat applies to `on('data')` event listeners. The
|
||||
first one added will _immediately_ receive all of the data,
|
||||
leaving nothing for the second:
|
||||
|
||||
```js
|
||||
// WARNING! WILL LOSE DATA!
|
||||
const src = new Minipass()
|
||||
src.write('foo')
|
||||
src.on('data', handler1) // receives 'foo' right away
|
||||
src.on('data', handler2) // nothing to see here!
|
||||
```
|
||||
|
||||
Using a dedicated tee-stream can be used in this case as well:
|
||||
|
||||
```js
|
||||
// Safe example: tee to both data handlers
|
||||
const src = new Minipass()
|
||||
src.write('foo')
|
||||
const tee = new Minipass()
|
||||
tee.on('data', handler1)
|
||||
tee.on('data', handler2)
|
||||
src.pipe(tee)
|
||||
```
|
||||
|
||||
All of the hazards in this section are avoided by setting `{
|
||||
async: true }` in the Minipass constructor, or by setting
|
||||
`stream.async = true` afterwards. Note that this does add some
|
||||
overhead, so should only be done in cases where you are willing
|
||||
to lose a bit of performance in order to avoid having to refactor
|
||||
program logic.
|
||||
|
||||
## USAGE
|
||||
|
||||
It's a stream! Use it like a stream and it'll most likely do what
|
||||
you want.
|
||||
|
||||
```js
|
||||
import { Minipass } from 'minipass'
|
||||
const mp = new Minipass(options) // optional: { encoding, objectMode }
|
||||
mp.write('foo')
|
||||
mp.pipe(someOtherStream)
|
||||
mp.end('bar')
|
||||
```
|
||||
|
||||
### OPTIONS
|
||||
|
||||
- `encoding` How would you like the data coming _out_ of the
|
||||
stream to be encoded? Accepts any values that can be passed to
|
||||
`Buffer.toString()`.
|
||||
- `objectMode` Emit data exactly as it comes in. This will be
|
||||
flipped on by default if you write() something other than a
|
||||
string or Buffer at any point. Setting `objectMode: true` will
|
||||
prevent setting any encoding value.
|
||||
- `async` Defaults to `false`. Set to `true` to defer data
|
||||
emission until next tick. This reduces performance slightly,
|
||||
but makes Minipass streams use timing behavior closer to Node
|
||||
core streams. See [Timing](#timing) for more details.
|
||||
- `signal` An `AbortSignal` that will cause the stream to unhook
|
||||
itself from everything and become as inert as possible. Note
|
||||
that providing a `signal` parameter will make `'error'` events
|
||||
no longer throw if they are unhandled, but they will still be
|
||||
emitted to handlers if any are attached.
|
||||
|
||||
### API
|
||||
|
||||
Implements the user-facing portions of Node.js's `Readable` and
|
||||
`Writable` streams.
|
||||
|
||||
### Methods
|
||||
|
||||
- `write(chunk, [encoding], [callback])` - Put data in. (Note
|
||||
that, in the base Minipass class, the same data will come out.)
|
||||
Returns `false` if the stream will buffer the next write, or
|
||||
true if it's still in "flowing" mode.
|
||||
- `end([chunk, [encoding]], [callback])` - Signal that you have
|
||||
no more data to write. This will queue an `end` event to be
|
||||
fired when all the data has been consumed.
|
||||
- `setEncoding(encoding)` - Set the encoding for data coming of
|
||||
the stream. This can only be done once.
|
||||
- `pause()` - No more data for a while, please. This also
|
||||
prevents `end` from being emitted for empty streams until the
|
||||
stream is resumed.
|
||||
- `resume()` - Resume the stream. If there's data in the buffer,
|
||||
it is all discarded. Any buffered events are immediately
|
||||
emitted.
|
||||
- `pipe(dest)` - Send all output to the stream provided. When
|
||||
data is emitted, it is immediately written to any and all pipe
|
||||
destinations. (Or written on next tick in `async` mode.)
|
||||
- `unpipe(dest)` - Stop piping to the destination stream. This is
|
||||
immediate, meaning that any asynchronously queued data will
|
||||
_not_ make it to the destination when running in `async` mode.
|
||||
- `options.end` - Boolean, end the destination stream when the
|
||||
source stream ends. Default `true`.
|
||||
- `options.proxyErrors` - Boolean, proxy `error` events from
|
||||
the source stream to the destination stream. Note that errors
|
||||
are _not_ proxied after the pipeline terminates, either due
|
||||
to the source emitting `'end'` or manually unpiping with
|
||||
`src.unpipe(dest)`. Default `false`.
|
||||
- `on(ev, fn)`, `emit(ev, fn)` - Minipass streams are
|
||||
EventEmitters. Some events are given special treatment,
|
||||
however. (See below under "events".)
|
||||
- `promise()` - Returns a Promise that resolves when the stream
|
||||
emits `end`, or rejects if the stream emits `error`.
|
||||
- `collect()` - Return a Promise that resolves on `end` with an
|
||||
array containing each chunk of data that was emitted, or
|
||||
rejects if the stream emits `error`. Note that this consumes
|
||||
the stream data.
|
||||
- `concat()` - Same as `collect()`, but concatenates the data
|
||||
into a single Buffer object. Will reject the returned promise
|
||||
if the stream is in objectMode, or if it goes into objectMode
|
||||
by the end of the data.
|
||||
- `read(n)` - Consume `n` bytes of data out of the buffer. If `n`
|
||||
is not provided, then consume all of it. If `n` bytes are not
|
||||
available, then it returns null. **Note** consuming streams in
|
||||
this way is less efficient, and can lead to unnecessary Buffer
|
||||
copying.
|
||||
- `destroy([er])` - Destroy the stream. If an error is provided,
|
||||
then an `'error'` event is emitted. If the stream has a
|
||||
`close()` method, and has not emitted a `'close'` event yet,
|
||||
then `stream.close()` will be called. Any Promises returned by
|
||||
`.promise()`, `.collect()` or `.concat()` will be rejected.
|
||||
After being destroyed, writing to the stream will emit an
|
||||
error. No more data will be emitted if the stream is destroyed,
|
||||
even if it was previously buffered.
|
||||
|
||||
### Properties
|
||||
|
||||
- `bufferLength` Read-only. Total number of bytes buffered, or in
|
||||
the case of objectMode, the total number of objects.
|
||||
- `encoding` The encoding that has been set. (Setting this is
|
||||
equivalent to calling `setEncoding(enc)` and has the same
|
||||
prohibition against setting multiple times.)
|
||||
- `flowing` Read-only. Boolean indicating whether a chunk written
|
||||
to the stream will be immediately emitted.
|
||||
- `emittedEnd` Read-only. Boolean indicating whether the end-ish
|
||||
events (ie, `end`, `prefinish`, `finish`) have been emitted.
|
||||
Note that listening on any end-ish event will immediateyl
|
||||
re-emit it if it has already been emitted.
|
||||
- `writable` Whether the stream is writable. Default `true`. Set
|
||||
to `false` when `end()`
|
||||
- `readable` Whether the stream is readable. Default `true`.
|
||||
- `pipes` An array of Pipe objects referencing streams that this
|
||||
stream is piping into.
|
||||
- `destroyed` A getter that indicates whether the stream was
|
||||
destroyed.
|
||||
- `paused` True if the stream has been explicitly paused,
|
||||
otherwise false.
|
||||
- `objectMode` Indicates whether the stream is in `objectMode`.
|
||||
Once set to `true`, it cannot be set to `false`.
|
||||
- `aborted` Readonly property set when the `AbortSignal`
|
||||
dispatches an `abort` event.
|
||||
|
||||
### Events
|
||||
|
||||
- `data` Emitted when there's data to read. Argument is the data
|
||||
to read. This is never emitted while not flowing. If a listener
|
||||
is attached, that will resume the stream.
|
||||
- `end` Emitted when there's no more data to read. This will be
|
||||
emitted immediately for empty streams when `end()` is called.
|
||||
If a listener is attached, and `end` was already emitted, then
|
||||
it will be emitted again. All listeners are removed when `end`
|
||||
is emitted.
|
||||
- `prefinish` An end-ish event that follows the same logic as
|
||||
`end` and is emitted in the same conditions where `end` is
|
||||
emitted. Emitted after `'end'`.
|
||||
- `finish` An end-ish event that follows the same logic as `end`
|
||||
and is emitted in the same conditions where `end` is emitted.
|
||||
Emitted after `'prefinish'`.
|
||||
- `close` An indication that an underlying resource has been
|
||||
released. Minipass does not emit this event, but will defer it
|
||||
until after `end` has been emitted, since it throws off some
|
||||
stream libraries otherwise.
|
||||
- `drain` Emitted when the internal buffer empties, and it is
|
||||
again suitable to `write()` into the stream.
|
||||
- `readable` Emitted when data is buffered and ready to be read
|
||||
by a consumer.
|
||||
- `resume` Emitted when stream changes state from buffering to
|
||||
flowing mode. (Ie, when `resume` is called, `pipe` is called,
|
||||
or a `data` event listener is added.)
|
||||
|
||||
### Static Methods
|
||||
|
||||
- `Minipass.isStream(stream)` Returns `true` if the argument is a
|
||||
stream, and false otherwise. To be considered a stream, the
|
||||
object must be either an instance of Minipass, or an
|
||||
EventEmitter that has either a `pipe()` method, or both
|
||||
`write()` and `end()` methods. (Pretty much any stream in
|
||||
node-land will return `true` for this.)
|
||||
|
||||
## EXAMPLES
|
||||
|
||||
Here are some examples of things you can do with Minipass
|
||||
streams.
|
||||
|
||||
### simple "are you done yet" promise
|
||||
|
||||
```js
|
||||
mp.promise().then(
|
||||
() => {
|
||||
// stream is finished
|
||||
},
|
||||
er => {
|
||||
// stream emitted an error
|
||||
}
|
||||
)
|
||||
```
|
||||
|
||||
### collecting
|
||||
|
||||
```js
|
||||
mp.collect().then(all => {
|
||||
// all is an array of all the data emitted
|
||||
// encoding is supported in this case, so
|
||||
// so the result will be a collection of strings if
|
||||
// an encoding is specified, or buffers/objects if not.
|
||||
//
|
||||
// In an async function, you may do
|
||||
// const data = await stream.collect()
|
||||
})
|
||||
```
|
||||
|
||||
### collecting into a single blob
|
||||
|
||||
This is a bit slower because it concatenates the data into one
|
||||
chunk for you, but if you're going to do it yourself anyway, it's
|
||||
convenient this way:
|
||||
|
||||
```js
|
||||
mp.concat().then(onebigchunk => {
|
||||
// onebigchunk is a string if the stream
|
||||
// had an encoding set, or a buffer otherwise.
|
||||
})
|
||||
```
|
||||
|
||||
### iteration
|
||||
|
||||
You can iterate over streams synchronously or asynchronously in
|
||||
platforms that support it.
|
||||
|
||||
Synchronous iteration will end when the currently available data
|
||||
is consumed, even if the `end` event has not been reached. In
|
||||
string and buffer mode, the data is concatenated, so unless
|
||||
multiple writes are occurring in the same tick as the `read()`,
|
||||
sync iteration loops will generally only have a single iteration.
|
||||
|
||||
To consume chunks in this way exactly as they have been written,
|
||||
with no flattening, create the stream with the `{ objectMode:
|
||||
true }` option.
|
||||
|
||||
```js
|
||||
const mp = new Minipass({ objectMode: true })
|
||||
mp.write('a')
|
||||
mp.write('b')
|
||||
for (let letter of mp) {
|
||||
console.log(letter) // a, b
|
||||
}
|
||||
mp.write('c')
|
||||
mp.write('d')
|
||||
for (let letter of mp) {
|
||||
console.log(letter) // c, d
|
||||
}
|
||||
mp.write('e')
|
||||
mp.end()
|
||||
for (let letter of mp) {
|
||||
console.log(letter) // e
|
||||
}
|
||||
for (let letter of mp) {
|
||||
console.log(letter) // nothing
|
||||
}
|
||||
```
|
||||
|
||||
Asynchronous iteration will continue until the end event is reached,
|
||||
consuming all of the data.
|
||||
|
||||
```js
|
||||
const mp = new Minipass({ encoding: 'utf8' })
|
||||
|
||||
// some source of some data
|
||||
let i = 5
|
||||
const inter = setInterval(() => {
|
||||
if (i-- > 0) mp.write(Buffer.from('foo\n', 'utf8'))
|
||||
else {
|
||||
mp.end()
|
||||
clearInterval(inter)
|
||||
}
|
||||
}, 100)
|
||||
|
||||
// consume the data with asynchronous iteration
|
||||
async function consume() {
|
||||
for await (let chunk of mp) {
|
||||
console.log(chunk)
|
||||
}
|
||||
return 'ok'
|
||||
}
|
||||
|
||||
consume().then(res => console.log(res))
|
||||
// logs `foo\n` 5 times, and then `ok`
|
||||
```
|
||||
|
||||
### subclass that `console.log()`s everything written into it
|
||||
|
||||
```js
|
||||
class Logger extends Minipass {
|
||||
write(chunk, encoding, callback) {
|
||||
console.log('WRITE', chunk, encoding)
|
||||
return super.write(chunk, encoding, callback)
|
||||
}
|
||||
end(chunk, encoding, callback) {
|
||||
console.log('END', chunk, encoding)
|
||||
return super.end(chunk, encoding, callback)
|
||||
}
|
||||
}
|
||||
|
||||
someSource.pipe(new Logger()).pipe(someDest)
|
||||
```
|
||||
|
||||
### same thing, but using an inline anonymous class
|
||||
|
||||
```js
|
||||
// js classes are fun
|
||||
someSource
|
||||
.pipe(
|
||||
new (class extends Minipass {
|
||||
emit(ev, ...data) {
|
||||
// let's also log events, because debugging some weird thing
|
||||
console.log('EMIT', ev)
|
||||
return super.emit(ev, ...data)
|
||||
}
|
||||
write(chunk, encoding, callback) {
|
||||
console.log('WRITE', chunk, encoding)
|
||||
return super.write(chunk, encoding, callback)
|
||||
}
|
||||
end(chunk, encoding, callback) {
|
||||
console.log('END', chunk, encoding)
|
||||
return super.end(chunk, encoding, callback)
|
||||
}
|
||||
})()
|
||||
)
|
||||
.pipe(someDest)
|
||||
```
|
||||
|
||||
### subclass that defers 'end' for some reason
|
||||
|
||||
```js
|
||||
class SlowEnd extends Minipass {
|
||||
emit(ev, ...args) {
|
||||
if (ev === 'end') {
|
||||
console.log('going to end, hold on a sec')
|
||||
setTimeout(() => {
|
||||
console.log('ok, ready to end now')
|
||||
super.emit('end', ...args)
|
||||
}, 100)
|
||||
} else {
|
||||
return super.emit(ev, ...args)
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### transform that creates newline-delimited JSON
|
||||
|
||||
```js
|
||||
class NDJSONEncode extends Minipass {
|
||||
write(obj, cb) {
|
||||
try {
|
||||
// JSON.stringify can throw, emit an error on that
|
||||
return super.write(JSON.stringify(obj) + '\n', 'utf8', cb)
|
||||
} catch (er) {
|
||||
this.emit('error', er)
|
||||
}
|
||||
}
|
||||
end(obj, cb) {
|
||||
if (typeof obj === 'function') {
|
||||
cb = obj
|
||||
obj = undefined
|
||||
}
|
||||
if (obj !== undefined) {
|
||||
this.write(obj)
|
||||
}
|
||||
return super.end(cb)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### transform that parses newline-delimited JSON
|
||||
|
||||
```js
|
||||
class NDJSONDecode extends Minipass {
|
||||
constructor (options) {
|
||||
// always be in object mode, as far as Minipass is concerned
|
||||
super({ objectMode: true })
|
||||
this._jsonBuffer = ''
|
||||
}
|
||||
write (chunk, encoding, cb) {
|
||||
if (typeof chunk === 'string' &&
|
||||
typeof encoding === 'string' &&
|
||||
encoding !== 'utf8') {
|
||||
chunk = Buffer.from(chunk, encoding).toString()
|
||||
} else if (Buffer.isBuffer(chunk)) {
|
||||
chunk = chunk.toString()
|
||||
}
|
||||
if (typeof encoding === 'function') {
|
||||
cb = encoding
|
||||
}
|
||||
const jsonData = (this._jsonBuffer + chunk).split('\n')
|
||||
this._jsonBuffer = jsonData.pop()
|
||||
for (let i = 0; i < jsonData.length; i++) {
|
||||
try {
|
||||
// JSON.parse can throw, emit an error on that
|
||||
super.write(JSON.parse(jsonData[i]))
|
||||
} catch (er) {
|
||||
this.emit('error', er)
|
||||
continue
|
||||
}
|
||||
}
|
||||
if (cb)
|
||||
cb()
|
||||
}
|
||||
}
|
||||
```
|
152
my-app/node_modules/tar/node_modules/minipass/index.d.ts
generated
vendored
Executable file
152
my-app/node_modules/tar/node_modules/minipass/index.d.ts
generated
vendored
Executable file
|
@ -0,0 +1,152 @@
|
|||
/// <reference types="node" />
|
||||
|
||||
// Note: marking anything protected or private in the exported
|
||||
// class will limit Minipass's ability to be used as the base
|
||||
// for mixin classes.
|
||||
import { EventEmitter } from 'events'
|
||||
import { Stream } from 'stream'
|
||||
|
||||
export namespace Minipass {
|
||||
export type Encoding = BufferEncoding | 'buffer' | null
|
||||
|
||||
export interface Writable extends EventEmitter {
|
||||
end(): any
|
||||
write(chunk: any, ...args: any[]): any
|
||||
}
|
||||
|
||||
export interface Readable extends EventEmitter {
|
||||
pause(): any
|
||||
resume(): any
|
||||
pipe(): any
|
||||
}
|
||||
|
||||
export type DualIterable<T> = Iterable<T> & AsyncIterable<T>
|
||||
|
||||
export type ContiguousData =
|
||||
| Buffer
|
||||
| ArrayBufferLike
|
||||
| ArrayBufferView
|
||||
| string
|
||||
|
||||
export type BufferOrString = Buffer | string
|
||||
|
||||
export interface SharedOptions {
|
||||
async?: boolean
|
||||
signal?: AbortSignal
|
||||
}
|
||||
|
||||
export interface StringOptions extends SharedOptions {
|
||||
encoding: BufferEncoding
|
||||
objectMode?: boolean
|
||||
}
|
||||
|
||||
export interface BufferOptions extends SharedOptions {
|
||||
encoding?: null | 'buffer'
|
||||
objectMode?: boolean
|
||||
}
|
||||
|
||||
export interface ObjectModeOptions extends SharedOptions {
|
||||
objectMode: true
|
||||
}
|
||||
|
||||
export interface PipeOptions {
|
||||
end?: boolean
|
||||
proxyErrors?: boolean
|
||||
}
|
||||
|
||||
export type Options<T> = T extends string
|
||||
? StringOptions
|
||||
: T extends Buffer
|
||||
? BufferOptions
|
||||
: ObjectModeOptions
|
||||
}
|
||||
|
||||
export class Minipass<
|
||||
RType extends any = Buffer,
|
||||
WType extends any = RType extends Minipass.BufferOrString
|
||||
? Minipass.ContiguousData
|
||||
: RType
|
||||
>
|
||||
extends Stream
|
||||
implements Minipass.DualIterable<RType>
|
||||
{
|
||||
static isStream(stream: any): stream is Minipass.Readable | Minipass.Writable
|
||||
|
||||
readonly bufferLength: number
|
||||
readonly flowing: boolean
|
||||
readonly writable: boolean
|
||||
readonly readable: boolean
|
||||
readonly aborted: boolean
|
||||
readonly paused: boolean
|
||||
readonly emittedEnd: boolean
|
||||
readonly destroyed: boolean
|
||||
|
||||
/**
|
||||
* Technically writable, but mutating it can change the type,
|
||||
* so is not safe to do in TypeScript.
|
||||
*/
|
||||
readonly objectMode: boolean
|
||||
async: boolean
|
||||
|
||||
/**
|
||||
* Note: encoding is not actually read-only, and setEncoding(enc)
|
||||
* exists. However, this type definition will insist that TypeScript
|
||||
* programs declare the type of a Minipass stream up front, and if
|
||||
* that type is string, then an encoding MUST be set in the ctor. If
|
||||
* the type is Buffer, then the encoding must be missing, or set to
|
||||
* 'buffer' or null. If the type is anything else, then objectMode
|
||||
* must be set in the constructor options. So there is effectively
|
||||
* no allowed way that a TS program can set the encoding after
|
||||
* construction, as doing so will destroy any hope of type safety.
|
||||
* TypeScript does not provide many options for changing the type of
|
||||
* an object at run-time, which is what changing the encoding does.
|
||||
*/
|
||||
readonly encoding: Minipass.Encoding
|
||||
// setEncoding(encoding: Encoding): void
|
||||
|
||||
// Options required if not reading buffers
|
||||
constructor(
|
||||
...args: RType extends Buffer
|
||||
? [] | [Minipass.Options<RType>]
|
||||
: [Minipass.Options<RType>]
|
||||
)
|
||||
|
||||
write(chunk: WType, cb?: () => void): boolean
|
||||
write(chunk: WType, encoding?: Minipass.Encoding, cb?: () => void): boolean
|
||||
read(size?: number): RType
|
||||
end(cb?: () => void): this
|
||||
end(chunk: any, cb?: () => void): this
|
||||
end(chunk: any, encoding?: Minipass.Encoding, cb?: () => void): this
|
||||
pause(): void
|
||||
resume(): void
|
||||
promise(): Promise<void>
|
||||
collect(): Promise<RType[]>
|
||||
|
||||
concat(): RType extends Minipass.BufferOrString ? Promise<RType> : never
|
||||
destroy(er?: any): void
|
||||
pipe<W extends Minipass.Writable>(dest: W, opts?: Minipass.PipeOptions): W
|
||||
unpipe<W extends Minipass.Writable>(dest: W): void
|
||||
|
||||
/**
|
||||
* alias for on()
|
||||
*/
|
||||
addEventHandler(event: string, listener: (...args: any[]) => any): this
|
||||
|
||||
on(event: string, listener: (...args: any[]) => any): this
|
||||
on(event: 'data', listener: (chunk: RType) => any): this
|
||||
on(event: 'error', listener: (error: any) => any): this
|
||||
on(
|
||||
event:
|
||||
| 'readable'
|
||||
| 'drain'
|
||||
| 'resume'
|
||||
| 'end'
|
||||
| 'prefinish'
|
||||
| 'finish'
|
||||
| 'close',
|
||||
listener: () => any
|
||||
): this
|
||||
|
||||
[Symbol.iterator](): Generator<RType, void, void>
|
||||
[Symbol.asyncIterator](): AsyncGenerator<RType, void, void>
|
||||
}
|
702
my-app/node_modules/tar/node_modules/minipass/index.js
generated
vendored
Executable file
702
my-app/node_modules/tar/node_modules/minipass/index.js
generated
vendored
Executable file
|
@ -0,0 +1,702 @@
|
|||
'use strict'
|
||||
const proc =
|
||||
typeof process === 'object' && process
|
||||
? process
|
||||
: {
|
||||
stdout: null,
|
||||
stderr: null,
|
||||
}
|
||||
const EE = require('events')
|
||||
const Stream = require('stream')
|
||||
const stringdecoder = require('string_decoder')
|
||||
const SD = stringdecoder.StringDecoder
|
||||
|
||||
const EOF = Symbol('EOF')
|
||||
const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
|
||||
const EMITTED_END = Symbol('emittedEnd')
|
||||
const EMITTING_END = Symbol('emittingEnd')
|
||||
const EMITTED_ERROR = Symbol('emittedError')
|
||||
const CLOSED = Symbol('closed')
|
||||
const READ = Symbol('read')
|
||||
const FLUSH = Symbol('flush')
|
||||
const FLUSHCHUNK = Symbol('flushChunk')
|
||||
const ENCODING = Symbol('encoding')
|
||||
const DECODER = Symbol('decoder')
|
||||
const FLOWING = Symbol('flowing')
|
||||
const PAUSED = Symbol('paused')
|
||||
const RESUME = Symbol('resume')
|
||||
const BUFFER = Symbol('buffer')
|
||||
const PIPES = Symbol('pipes')
|
||||
const BUFFERLENGTH = Symbol('bufferLength')
|
||||
const BUFFERPUSH = Symbol('bufferPush')
|
||||
const BUFFERSHIFT = Symbol('bufferShift')
|
||||
const OBJECTMODE = Symbol('objectMode')
|
||||
// internal event when stream is destroyed
|
||||
const DESTROYED = Symbol('destroyed')
|
||||
// internal event when stream has an error
|
||||
const ERROR = Symbol('error')
|
||||
const EMITDATA = Symbol('emitData')
|
||||
const EMITEND = Symbol('emitEnd')
|
||||
const EMITEND2 = Symbol('emitEnd2')
|
||||
const ASYNC = Symbol('async')
|
||||
const ABORT = Symbol('abort')
|
||||
const ABORTED = Symbol('aborted')
|
||||
const SIGNAL = Symbol('signal')
|
||||
|
||||
const defer = fn => Promise.resolve().then(fn)
|
||||
|
||||
// TODO remove when Node v8 support drops
|
||||
const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1'
|
||||
const ASYNCITERATOR =
|
||||
(doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented')
|
||||
const ITERATOR =
|
||||
(doIter && Symbol.iterator) || Symbol('iterator not implemented')
|
||||
|
||||
// events that mean 'the stream is over'
|
||||
// these are treated specially, and re-emitted
|
||||
// if they are listened for after emitting.
|
||||
const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish'
|
||||
|
||||
const isArrayBuffer = b =>
|
||||
b instanceof ArrayBuffer ||
|
||||
(typeof b === 'object' &&
|
||||
b.constructor &&
|
||||
b.constructor.name === 'ArrayBuffer' &&
|
||||
b.byteLength >= 0)
|
||||
|
||||
const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
|
||||
|
||||
class Pipe {
|
||||
constructor(src, dest, opts) {
|
||||
this.src = src
|
||||
this.dest = dest
|
||||
this.opts = opts
|
||||
this.ondrain = () => src[RESUME]()
|
||||
dest.on('drain', this.ondrain)
|
||||
}
|
||||
unpipe() {
|
||||
this.dest.removeListener('drain', this.ondrain)
|
||||
}
|
||||
// istanbul ignore next - only here for the prototype
|
||||
proxyErrors() {}
|
||||
end() {
|
||||
this.unpipe()
|
||||
if (this.opts.end) this.dest.end()
|
||||
}
|
||||
}
|
||||
|
||||
class PipeProxyErrors extends Pipe {
|
||||
unpipe() {
|
||||
this.src.removeListener('error', this.proxyErrors)
|
||||
super.unpipe()
|
||||
}
|
||||
constructor(src, dest, opts) {
|
||||
super(src, dest, opts)
|
||||
this.proxyErrors = er => dest.emit('error', er)
|
||||
src.on('error', this.proxyErrors)
|
||||
}
|
||||
}
|
||||
|
||||
class Minipass extends Stream {
|
||||
constructor(options) {
|
||||
super()
|
||||
this[FLOWING] = false
|
||||
// whether we're explicitly paused
|
||||
this[PAUSED] = false
|
||||
this[PIPES] = []
|
||||
this[BUFFER] = []
|
||||
this[OBJECTMODE] = (options && options.objectMode) || false
|
||||
if (this[OBJECTMODE]) this[ENCODING] = null
|
||||
else this[ENCODING] = (options && options.encoding) || null
|
||||
if (this[ENCODING] === 'buffer') this[ENCODING] = null
|
||||
this[ASYNC] = (options && !!options.async) || false
|
||||
this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
|
||||
this[EOF] = false
|
||||
this[EMITTED_END] = false
|
||||
this[EMITTING_END] = false
|
||||
this[CLOSED] = false
|
||||
this[EMITTED_ERROR] = null
|
||||
this.writable = true
|
||||
this.readable = true
|
||||
this[BUFFERLENGTH] = 0
|
||||
this[DESTROYED] = false
|
||||
if (options && options.debugExposeBuffer === true) {
|
||||
Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] })
|
||||
}
|
||||
if (options && options.debugExposePipes === true) {
|
||||
Object.defineProperty(this, 'pipes', { get: () => this[PIPES] })
|
||||
}
|
||||
this[SIGNAL] = options && options.signal
|
||||
this[ABORTED] = false
|
||||
if (this[SIGNAL]) {
|
||||
this[SIGNAL].addEventListener('abort', () => this[ABORT]())
|
||||
if (this[SIGNAL].aborted) {
|
||||
this[ABORT]()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
get bufferLength() {
|
||||
return this[BUFFERLENGTH]
|
||||
}
|
||||
|
||||
get encoding() {
|
||||
return this[ENCODING]
|
||||
}
|
||||
set encoding(enc) {
|
||||
if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode')
|
||||
|
||||
if (
|
||||
this[ENCODING] &&
|
||||
enc !== this[ENCODING] &&
|
||||
((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH])
|
||||
)
|
||||
throw new Error('cannot change encoding')
|
||||
|
||||
if (this[ENCODING] !== enc) {
|
||||
this[DECODER] = enc ? new SD(enc) : null
|
||||
if (this[BUFFER].length)
|
||||
this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk))
|
||||
}
|
||||
|
||||
this[ENCODING] = enc
|
||||
}
|
||||
|
||||
setEncoding(enc) {
|
||||
this.encoding = enc
|
||||
}
|
||||
|
||||
get objectMode() {
|
||||
return this[OBJECTMODE]
|
||||
}
|
||||
set objectMode(om) {
|
||||
this[OBJECTMODE] = this[OBJECTMODE] || !!om
|
||||
}
|
||||
|
||||
get ['async']() {
|
||||
return this[ASYNC]
|
||||
}
|
||||
set ['async'](a) {
|
||||
this[ASYNC] = this[ASYNC] || !!a
|
||||
}
|
||||
|
||||
// drop everything and get out of the flow completely
|
||||
[ABORT]() {
|
||||
this[ABORTED] = true
|
||||
this.emit('abort', this[SIGNAL].reason)
|
||||
this.destroy(this[SIGNAL].reason)
|
||||
}
|
||||
|
||||
get aborted() {
|
||||
return this[ABORTED]
|
||||
}
|
||||
set aborted(_) {}
|
||||
|
||||
write(chunk, encoding, cb) {
|
||||
if (this[ABORTED]) return false
|
||||
if (this[EOF]) throw new Error('write after end')
|
||||
|
||||
if (this[DESTROYED]) {
|
||||
this.emit(
|
||||
'error',
|
||||
Object.assign(
|
||||
new Error('Cannot call write after a stream was destroyed'),
|
||||
{ code: 'ERR_STREAM_DESTROYED' }
|
||||
)
|
||||
)
|
||||
return true
|
||||
}
|
||||
|
||||
if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8')
|
||||
|
||||
if (!encoding) encoding = 'utf8'
|
||||
|
||||
const fn = this[ASYNC] ? defer : f => f()
|
||||
|
||||
// convert array buffers and typed array views into buffers
|
||||
// at some point in the future, we may want to do the opposite!
|
||||
// leave strings and buffers as-is
|
||||
// anything else switches us into object mode
|
||||
if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
|
||||
if (isArrayBufferView(chunk))
|
||||
chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
|
||||
else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk)
|
||||
else if (typeof chunk !== 'string')
|
||||
// use the setter so we throw if we have encoding set
|
||||
this.objectMode = true
|
||||
}
|
||||
|
||||
// handle object mode up front, since it's simpler
|
||||
// this yields better performance, fewer checks later.
|
||||
if (this[OBJECTMODE]) {
|
||||
/* istanbul ignore if - maybe impossible? */
|
||||
if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true)
|
||||
|
||||
if (this.flowing) this.emit('data', chunk)
|
||||
else this[BUFFERPUSH](chunk)
|
||||
|
||||
if (this[BUFFERLENGTH] !== 0) this.emit('readable')
|
||||
|
||||
if (cb) fn(cb)
|
||||
|
||||
return this.flowing
|
||||
}
|
||||
|
||||
// at this point the chunk is a buffer or string
|
||||
// don't buffer it up or send it to the decoder
|
||||
if (!chunk.length) {
|
||||
if (this[BUFFERLENGTH] !== 0) this.emit('readable')
|
||||
if (cb) fn(cb)
|
||||
return this.flowing
|
||||
}
|
||||
|
||||
// fast-path writing strings of same encoding to a stream with
|
||||
// an empty buffer, skipping the buffer/decoder dance
|
||||
if (
|
||||
typeof chunk === 'string' &&
|
||||
// unless it is a string already ready for us to use
|
||||
!(encoding === this[ENCODING] && !this[DECODER].lastNeed)
|
||||
) {
|
||||
chunk = Buffer.from(chunk, encoding)
|
||||
}
|
||||
|
||||
if (Buffer.isBuffer(chunk) && this[ENCODING])
|
||||
chunk = this[DECODER].write(chunk)
|
||||
|
||||
// Note: flushing CAN potentially switch us into not-flowing mode
|
||||
if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true)
|
||||
|
||||
if (this.flowing) this.emit('data', chunk)
|
||||
else this[BUFFERPUSH](chunk)
|
||||
|
||||
if (this[BUFFERLENGTH] !== 0) this.emit('readable')
|
||||
|
||||
if (cb) fn(cb)
|
||||
|
||||
return this.flowing
|
||||
}
|
||||
|
||||
read(n) {
|
||||
if (this[DESTROYED]) return null
|
||||
|
||||
if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) {
|
||||
this[MAYBE_EMIT_END]()
|
||||
return null
|
||||
}
|
||||
|
||||
if (this[OBJECTMODE]) n = null
|
||||
|
||||
if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
|
||||
if (this.encoding) this[BUFFER] = [this[BUFFER].join('')]
|
||||
else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])]
|
||||
}
|
||||
|
||||
const ret = this[READ](n || null, this[BUFFER][0])
|
||||
this[MAYBE_EMIT_END]()
|
||||
return ret
|
||||
}
|
||||
|
||||
[READ](n, chunk) {
|
||||
if (n === chunk.length || n === null) this[BUFFERSHIFT]()
|
||||
else {
|
||||
this[BUFFER][0] = chunk.slice(n)
|
||||
chunk = chunk.slice(0, n)
|
||||
this[BUFFERLENGTH] -= n
|
||||
}
|
||||
|
||||
this.emit('data', chunk)
|
||||
|
||||
if (!this[BUFFER].length && !this[EOF]) this.emit('drain')
|
||||
|
||||
return chunk
|
||||
}
|
||||
|
||||
end(chunk, encoding, cb) {
|
||||
if (typeof chunk === 'function') (cb = chunk), (chunk = null)
|
||||
if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8')
|
||||
if (chunk) this.write(chunk, encoding)
|
||||
if (cb) this.once('end', cb)
|
||||
this[EOF] = true
|
||||
this.writable = false
|
||||
|
||||
// if we haven't written anything, then go ahead and emit,
|
||||
// even if we're not reading.
|
||||
// we'll re-emit if a new 'end' listener is added anyway.
|
||||
// This makes MP more suitable to write-only use cases.
|
||||
if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]()
|
||||
return this
|
||||
}
|
||||
|
||||
// don't let the internal resume be overwritten
|
||||
[RESUME]() {
|
||||
if (this[DESTROYED]) return
|
||||
|
||||
this[PAUSED] = false
|
||||
this[FLOWING] = true
|
||||
this.emit('resume')
|
||||
if (this[BUFFER].length) this[FLUSH]()
|
||||
else if (this[EOF]) this[MAYBE_EMIT_END]()
|
||||
else this.emit('drain')
|
||||
}
|
||||
|
||||
resume() {
|
||||
return this[RESUME]()
|
||||
}
|
||||
|
||||
pause() {
|
||||
this[FLOWING] = false
|
||||
this[PAUSED] = true
|
||||
}
|
||||
|
||||
get destroyed() {
|
||||
return this[DESTROYED]
|
||||
}
|
||||
|
||||
get flowing() {
|
||||
return this[FLOWING]
|
||||
}
|
||||
|
||||
get paused() {
|
||||
return this[PAUSED]
|
||||
}
|
||||
|
||||
[BUFFERPUSH](chunk) {
|
||||
if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1
|
||||
else this[BUFFERLENGTH] += chunk.length
|
||||
this[BUFFER].push(chunk)
|
||||
}
|
||||
|
||||
[BUFFERSHIFT]() {
|
||||
if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1
|
||||
else this[BUFFERLENGTH] -= this[BUFFER][0].length
|
||||
return this[BUFFER].shift()
|
||||
}
|
||||
|
||||
[FLUSH](noDrain) {
|
||||
do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length)
|
||||
|
||||
if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain')
|
||||
}
|
||||
|
||||
[FLUSHCHUNK](chunk) {
|
||||
this.emit('data', chunk)
|
||||
return this.flowing
|
||||
}
|
||||
|
||||
pipe(dest, opts) {
|
||||
if (this[DESTROYED]) return
|
||||
|
||||
const ended = this[EMITTED_END]
|
||||
opts = opts || {}
|
||||
if (dest === proc.stdout || dest === proc.stderr) opts.end = false
|
||||
else opts.end = opts.end !== false
|
||||
opts.proxyErrors = !!opts.proxyErrors
|
||||
|
||||
// piping an ended stream ends immediately
|
||||
if (ended) {
|
||||
if (opts.end) dest.end()
|
||||
} else {
|
||||
this[PIPES].push(
|
||||
!opts.proxyErrors
|
||||
? new Pipe(this, dest, opts)
|
||||
: new PipeProxyErrors(this, dest, opts)
|
||||
)
|
||||
if (this[ASYNC]) defer(() => this[RESUME]())
|
||||
else this[RESUME]()
|
||||
}
|
||||
|
||||
return dest
|
||||
}
|
||||
|
||||
unpipe(dest) {
|
||||
const p = this[PIPES].find(p => p.dest === dest)
|
||||
if (p) {
|
||||
this[PIPES].splice(this[PIPES].indexOf(p), 1)
|
||||
p.unpipe()
|
||||
}
|
||||
}
|
||||
|
||||
addListener(ev, fn) {
|
||||
return this.on(ev, fn)
|
||||
}
|
||||
|
||||
on(ev, fn) {
|
||||
const ret = super.on(ev, fn)
|
||||
if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]()
|
||||
else if (ev === 'readable' && this[BUFFERLENGTH] !== 0)
|
||||
super.emit('readable')
|
||||
else if (isEndish(ev) && this[EMITTED_END]) {
|
||||
super.emit(ev)
|
||||
this.removeAllListeners(ev)
|
||||
} else if (ev === 'error' && this[EMITTED_ERROR]) {
|
||||
if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR]))
|
||||
else fn.call(this, this[EMITTED_ERROR])
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
get emittedEnd() {
|
||||
return this[EMITTED_END]
|
||||
}
|
||||
|
||||
[MAYBE_EMIT_END]() {
|
||||
if (
|
||||
!this[EMITTING_END] &&
|
||||
!this[EMITTED_END] &&
|
||||
!this[DESTROYED] &&
|
||||
this[BUFFER].length === 0 &&
|
||||
this[EOF]
|
||||
) {
|
||||
this[EMITTING_END] = true
|
||||
this.emit('end')
|
||||
this.emit('prefinish')
|
||||
this.emit('finish')
|
||||
if (this[CLOSED]) this.emit('close')
|
||||
this[EMITTING_END] = false
|
||||
}
|
||||
}
|
||||
|
||||
emit(ev, data, ...extra) {
|
||||
// error and close are only events allowed after calling destroy()
|
||||
if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
|
||||
return
|
||||
else if (ev === 'data') {
|
||||
return !this[OBJECTMODE] && !data
|
||||
? false
|
||||
: this[ASYNC]
|
||||
? defer(() => this[EMITDATA](data))
|
||||
: this[EMITDATA](data)
|
||||
} else if (ev === 'end') {
|
||||
return this[EMITEND]()
|
||||
} else if (ev === 'close') {
|
||||
this[CLOSED] = true
|
||||
// don't emit close before 'end' and 'finish'
|
||||
if (!this[EMITTED_END] && !this[DESTROYED]) return
|
||||
const ret = super.emit('close')
|
||||
this.removeAllListeners('close')
|
||||
return ret
|
||||
} else if (ev === 'error') {
|
||||
this[EMITTED_ERROR] = data
|
||||
super.emit(ERROR, data)
|
||||
const ret =
|
||||
!this[SIGNAL] || this.listeners('error').length
|
||||
? super.emit('error', data)
|
||||
: false
|
||||
this[MAYBE_EMIT_END]()
|
||||
return ret
|
||||
} else if (ev === 'resume') {
|
||||
const ret = super.emit('resume')
|
||||
this[MAYBE_EMIT_END]()
|
||||
return ret
|
||||
} else if (ev === 'finish' || ev === 'prefinish') {
|
||||
const ret = super.emit(ev)
|
||||
this.removeAllListeners(ev)
|
||||
return ret
|
||||
}
|
||||
|
||||
// Some other unknown event
|
||||
const ret = super.emit(ev, data, ...extra)
|
||||
this[MAYBE_EMIT_END]()
|
||||
return ret
|
||||
}
|
||||
|
||||
[EMITDATA](data) {
|
||||
for (const p of this[PIPES]) {
|
||||
if (p.dest.write(data) === false) this.pause()
|
||||
}
|
||||
const ret = super.emit('data', data)
|
||||
this[MAYBE_EMIT_END]()
|
||||
return ret
|
||||
}
|
||||
|
||||
[EMITEND]() {
|
||||
if (this[EMITTED_END]) return
|
||||
|
||||
this[EMITTED_END] = true
|
||||
this.readable = false
|
||||
if (this[ASYNC]) defer(() => this[EMITEND2]())
|
||||
else this[EMITEND2]()
|
||||
}
|
||||
|
||||
[EMITEND2]() {
|
||||
if (this[DECODER]) {
|
||||
const data = this[DECODER].end()
|
||||
if (data) {
|
||||
for (const p of this[PIPES]) {
|
||||
p.dest.write(data)
|
||||
}
|
||||
super.emit('data', data)
|
||||
}
|
||||
}
|
||||
|
||||
for (const p of this[PIPES]) {
|
||||
p.end()
|
||||
}
|
||||
const ret = super.emit('end')
|
||||
this.removeAllListeners('end')
|
||||
return ret
|
||||
}
|
||||
|
||||
// const all = await stream.collect()
|
||||
collect() {
|
||||
const buf = []
|
||||
if (!this[OBJECTMODE]) buf.dataLength = 0
|
||||
// set the promise first, in case an error is raised
|
||||
// by triggering the flow here.
|
||||
const p = this.promise()
|
||||
this.on('data', c => {
|
||||
buf.push(c)
|
||||
if (!this[OBJECTMODE]) buf.dataLength += c.length
|
||||
})
|
||||
return p.then(() => buf)
|
||||
}
|
||||
|
||||
// const data = await stream.concat()
|
||||
concat() {
|
||||
return this[OBJECTMODE]
|
||||
? Promise.reject(new Error('cannot concat in objectMode'))
|
||||
: this.collect().then(buf =>
|
||||
this[OBJECTMODE]
|
||||
? Promise.reject(new Error('cannot concat in objectMode'))
|
||||
: this[ENCODING]
|
||||
? buf.join('')
|
||||
: Buffer.concat(buf, buf.dataLength)
|
||||
)
|
||||
}
|
||||
|
||||
// stream.promise().then(() => done, er => emitted error)
|
||||
promise() {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.on(DESTROYED, () => reject(new Error('stream destroyed')))
|
||||
this.on('error', er => reject(er))
|
||||
this.on('end', () => resolve())
|
||||
})
|
||||
}
|
||||
|
||||
// for await (let chunk of stream)
|
||||
[ASYNCITERATOR]() {
|
||||
let stopped = false
|
||||
const stop = () => {
|
||||
this.pause()
|
||||
stopped = true
|
||||
return Promise.resolve({ done: true })
|
||||
}
|
||||
const next = () => {
|
||||
if (stopped) return stop()
|
||||
const res = this.read()
|
||||
if (res !== null) return Promise.resolve({ done: false, value: res })
|
||||
|
||||
if (this[EOF]) return stop()
|
||||
|
||||
let resolve = null
|
||||
let reject = null
|
||||
const onerr = er => {
|
||||
this.removeListener('data', ondata)
|
||||
this.removeListener('end', onend)
|
||||
this.removeListener(DESTROYED, ondestroy)
|
||||
stop()
|
||||
reject(er)
|
||||
}
|
||||
const ondata = value => {
|
||||
this.removeListener('error', onerr)
|
||||
this.removeListener('end', onend)
|
||||
this.removeListener(DESTROYED, ondestroy)
|
||||
this.pause()
|
||||
resolve({ value: value, done: !!this[EOF] })
|
||||
}
|
||||
const onend = () => {
|
||||
this.removeListener('error', onerr)
|
||||
this.removeListener('data', ondata)
|
||||
this.removeListener(DESTROYED, ondestroy)
|
||||
stop()
|
||||
resolve({ done: true })
|
||||
}
|
||||
const ondestroy = () => onerr(new Error('stream destroyed'))
|
||||
return new Promise((res, rej) => {
|
||||
reject = rej
|
||||
resolve = res
|
||||
this.once(DESTROYED, ondestroy)
|
||||
this.once('error', onerr)
|
||||
this.once('end', onend)
|
||||
this.once('data', ondata)
|
||||
})
|
||||
}
|
||||
|
||||
return {
|
||||
next,
|
||||
throw: stop,
|
||||
return: stop,
|
||||
[ASYNCITERATOR]() {
|
||||
return this
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// for (let chunk of stream)
|
||||
[ITERATOR]() {
|
||||
let stopped = false
|
||||
const stop = () => {
|
||||
this.pause()
|
||||
this.removeListener(ERROR, stop)
|
||||
this.removeListener(DESTROYED, stop)
|
||||
this.removeListener('end', stop)
|
||||
stopped = true
|
||||
return { done: true }
|
||||
}
|
||||
|
||||
const next = () => {
|
||||
if (stopped) return stop()
|
||||
const value = this.read()
|
||||
return value === null ? stop() : { value }
|
||||
}
|
||||
this.once('end', stop)
|
||||
this.once(ERROR, stop)
|
||||
this.once(DESTROYED, stop)
|
||||
|
||||
return {
|
||||
next,
|
||||
throw: stop,
|
||||
return: stop,
|
||||
[ITERATOR]() {
|
||||
return this
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
destroy(er) {
|
||||
if (this[DESTROYED]) {
|
||||
if (er) this.emit('error', er)
|
||||
else this.emit(DESTROYED)
|
||||
return this
|
||||
}
|
||||
|
||||
this[DESTROYED] = true
|
||||
|
||||
// throw away all buffered data, it's never coming out
|
||||
this[BUFFER].length = 0
|
||||
this[BUFFERLENGTH] = 0
|
||||
|
||||
if (typeof this.close === 'function' && !this[CLOSED]) this.close()
|
||||
|
||||
if (er) this.emit('error', er)
|
||||
// if no error to emit, still reject pending promises
|
||||
else this.emit(DESTROYED)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
static isStream(s) {
|
||||
return (
|
||||
!!s &&
|
||||
(s instanceof Minipass ||
|
||||
s instanceof Stream ||
|
||||
(s instanceof EE &&
|
||||
// readable
|
||||
(typeof s.pipe === 'function' ||
|
||||
// writable
|
||||
(typeof s.write === 'function' && typeof s.end === 'function'))))
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
exports.Minipass = Minipass
|
702
my-app/node_modules/tar/node_modules/minipass/index.mjs
generated
vendored
Executable file
702
my-app/node_modules/tar/node_modules/minipass/index.mjs
generated
vendored
Executable file
|
@ -0,0 +1,702 @@
|
|||
'use strict'
|
||||
const proc =
|
||||
typeof process === 'object' && process
|
||||
? process
|
||||
: {
|
||||
stdout: null,
|
||||
stderr: null,
|
||||
}
|
||||
import EE from 'events'
|
||||
import Stream from 'stream'
|
||||
import stringdecoder from 'string_decoder'
|
||||
const SD = stringdecoder.StringDecoder
|
||||
|
||||
const EOF = Symbol('EOF')
|
||||
const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
|
||||
const EMITTED_END = Symbol('emittedEnd')
|
||||
const EMITTING_END = Symbol('emittingEnd')
|
||||
const EMITTED_ERROR = Symbol('emittedError')
|
||||
const CLOSED = Symbol('closed')
|
||||
const READ = Symbol('read')
|
||||
const FLUSH = Symbol('flush')
|
||||
const FLUSHCHUNK = Symbol('flushChunk')
|
||||
const ENCODING = Symbol('encoding')
|
||||
const DECODER = Symbol('decoder')
|
||||
const FLOWING = Symbol('flowing')
|
||||
const PAUSED = Symbol('paused')
|
||||
const RESUME = Symbol('resume')
|
||||
const BUFFER = Symbol('buffer')
|
||||
const PIPES = Symbol('pipes')
|
||||
const BUFFERLENGTH = Symbol('bufferLength')
|
||||
const BUFFERPUSH = Symbol('bufferPush')
|
||||
const BUFFERSHIFT = Symbol('bufferShift')
|
||||
const OBJECTMODE = Symbol('objectMode')
|
||||
// internal event when stream is destroyed
|
||||
const DESTROYED = Symbol('destroyed')
|
||||
// internal event when stream has an error
|
||||
const ERROR = Symbol('error')
|
||||
const EMITDATA = Symbol('emitData')
|
||||
const EMITEND = Symbol('emitEnd')
|
||||
const EMITEND2 = Symbol('emitEnd2')
|
||||
const ASYNC = Symbol('async')
|
||||
const ABORT = Symbol('abort')
|
||||
const ABORTED = Symbol('aborted')
|
||||
const SIGNAL = Symbol('signal')
|
||||
|
||||
const defer = fn => Promise.resolve().then(fn)
|
||||
|
||||
// TODO remove when Node v8 support drops
|
||||
const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1'
|
||||
const ASYNCITERATOR =
|
||||
(doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented')
|
||||
const ITERATOR =
|
||||
(doIter && Symbol.iterator) || Symbol('iterator not implemented')
|
||||
|
||||
// events that mean 'the stream is over'
|
||||
// these are treated specially, and re-emitted
|
||||
// if they are listened for after emitting.
|
||||
const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish'
|
||||
|
||||
const isArrayBuffer = b =>
|
||||
b instanceof ArrayBuffer ||
|
||||
(typeof b === 'object' &&
|
||||
b.constructor &&
|
||||
b.constructor.name === 'ArrayBuffer' &&
|
||||
b.byteLength >= 0)
|
||||
|
||||
const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
|
||||
|
||||
class Pipe {
|
||||
constructor(src, dest, opts) {
|
||||
this.src = src
|
||||
this.dest = dest
|
||||
this.opts = opts
|
||||
this.ondrain = () => src[RESUME]()
|
||||
dest.on('drain', this.ondrain)
|
||||
}
|
||||
unpipe() {
|
||||
this.dest.removeListener('drain', this.ondrain)
|
||||
}
|
||||
// istanbul ignore next - only here for the prototype
|
||||
proxyErrors() {}
|
||||
end() {
|
||||
this.unpipe()
|
||||
if (this.opts.end) this.dest.end()
|
||||
}
|
||||
}
|
||||
|
||||
class PipeProxyErrors extends Pipe {
|
||||
unpipe() {
|
||||
this.src.removeListener('error', this.proxyErrors)
|
||||
super.unpipe()
|
||||
}
|
||||
constructor(src, dest, opts) {
|
||||
super(src, dest, opts)
|
||||
this.proxyErrors = er => dest.emit('error', er)
|
||||
src.on('error', this.proxyErrors)
|
||||
}
|
||||
}
|
||||
|
||||
export class Minipass extends Stream {
|
||||
constructor(options) {
|
||||
super()
|
||||
this[FLOWING] = false
|
||||
// whether we're explicitly paused
|
||||
this[PAUSED] = false
|
||||
this[PIPES] = []
|
||||
this[BUFFER] = []
|
||||
this[OBJECTMODE] = (options && options.objectMode) || false
|
||||
if (this[OBJECTMODE]) this[ENCODING] = null
|
||||
else this[ENCODING] = (options && options.encoding) || null
|
||||
if (this[ENCODING] === 'buffer') this[ENCODING] = null
|
||||
this[ASYNC] = (options && !!options.async) || false
|
||||
this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
|
||||
this[EOF] = false
|
||||
this[EMITTED_END] = false
|
||||
this[EMITTING_END] = false
|
||||
this[CLOSED] = false
|
||||
this[EMITTED_ERROR] = null
|
||||
this.writable = true
|
||||
this.readable = true
|
||||
this[BUFFERLENGTH] = 0
|
||||
this[DESTROYED] = false
|
||||
if (options && options.debugExposeBuffer === true) {
|
||||
Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] })
|
||||
}
|
||||
if (options && options.debugExposePipes === true) {
|
||||
Object.defineProperty(this, 'pipes', { get: () => this[PIPES] })
|
||||
}
|
||||
this[SIGNAL] = options && options.signal
|
||||
this[ABORTED] = false
|
||||
if (this[SIGNAL]) {
|
||||
this[SIGNAL].addEventListener('abort', () => this[ABORT]())
|
||||
if (this[SIGNAL].aborted) {
|
||||
this[ABORT]()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
get bufferLength() {
|
||||
return this[BUFFERLENGTH]
|
||||
}
|
||||
|
||||
get encoding() {
|
||||
return this[ENCODING]
|
||||
}
|
||||
set encoding(enc) {
|
||||
if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode')
|
||||
|
||||
if (
|
||||
this[ENCODING] &&
|
||||
enc !== this[ENCODING] &&
|
||||
((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH])
|
||||
)
|
||||
throw new Error('cannot change encoding')
|
||||
|
||||
if (this[ENCODING] !== enc) {
|
||||
this[DECODER] = enc ? new SD(enc) : null
|
||||
if (this[BUFFER].length)
|
||||
this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk))
|
||||
}
|
||||
|
||||
this[ENCODING] = enc
|
||||
}
|
||||
|
||||
setEncoding(enc) {
|
||||
this.encoding = enc
|
||||
}
|
||||
|
||||
get objectMode() {
|
||||
return this[OBJECTMODE]
|
||||
}
|
||||
set objectMode(om) {
|
||||
this[OBJECTMODE] = this[OBJECTMODE] || !!om
|
||||
}
|
||||
|
||||
get ['async']() {
|
||||
return this[ASYNC]
|
||||
}
|
||||
set ['async'](a) {
|
||||
this[ASYNC] = this[ASYNC] || !!a
|
||||
}
|
||||
|
||||
// drop everything and get out of the flow completely
|
||||
[ABORT]() {
|
||||
this[ABORTED] = true
|
||||
this.emit('abort', this[SIGNAL].reason)
|
||||
this.destroy(this[SIGNAL].reason)
|
||||
}
|
||||
|
||||
get aborted() {
|
||||
return this[ABORTED]
|
||||
}
|
||||
set aborted(_) {}
|
||||
|
||||
write(chunk, encoding, cb) {
|
||||
if (this[ABORTED]) return false
|
||||
if (this[EOF]) throw new Error('write after end')
|
||||
|
||||
if (this[DESTROYED]) {
|
||||
this.emit(
|
||||
'error',
|
||||
Object.assign(
|
||||
new Error('Cannot call write after a stream was destroyed'),
|
||||
{ code: 'ERR_STREAM_DESTROYED' }
|
||||
)
|
||||
)
|
||||
return true
|
||||
}
|
||||
|
||||
if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8')
|
||||
|
||||
if (!encoding) encoding = 'utf8'
|
||||
|
||||
const fn = this[ASYNC] ? defer : f => f()
|
||||
|
||||
// convert array buffers and typed array views into buffers
|
||||
// at some point in the future, we may want to do the opposite!
|
||||
// leave strings and buffers as-is
|
||||
// anything else switches us into object mode
|
||||
if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
|
||||
if (isArrayBufferView(chunk))
|
||||
chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
|
||||
else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk)
|
||||
else if (typeof chunk !== 'string')
|
||||
// use the setter so we throw if we have encoding set
|
||||
this.objectMode = true
|
||||
}
|
||||
|
||||
// handle object mode up front, since it's simpler
|
||||
// this yields better performance, fewer checks later.
|
||||
if (this[OBJECTMODE]) {
|
||||
/* istanbul ignore if - maybe impossible? */
|
||||
if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true)
|
||||
|
||||
if (this.flowing) this.emit('data', chunk)
|
||||
else this[BUFFERPUSH](chunk)
|
||||
|
||||
if (this[BUFFERLENGTH] !== 0) this.emit('readable')
|
||||
|
||||
if (cb) fn(cb)
|
||||
|
||||
return this.flowing
|
||||
}
|
||||
|
||||
// at this point the chunk is a buffer or string
|
||||
// don't buffer it up or send it to the decoder
|
||||
if (!chunk.length) {
|
||||
if (this[BUFFERLENGTH] !== 0) this.emit('readable')
|
||||
if (cb) fn(cb)
|
||||
return this.flowing
|
||||
}
|
||||
|
||||
// fast-path writing strings of same encoding to a stream with
|
||||
// an empty buffer, skipping the buffer/decoder dance
|
||||
if (
|
||||
typeof chunk === 'string' &&
|
||||
// unless it is a string already ready for us to use
|
||||
!(encoding === this[ENCODING] && !this[DECODER].lastNeed)
|
||||
) {
|
||||
chunk = Buffer.from(chunk, encoding)
|
||||
}
|
||||
|
||||
if (Buffer.isBuffer(chunk) && this[ENCODING])
|
||||
chunk = this[DECODER].write(chunk)
|
||||
|
||||
// Note: flushing CAN potentially switch us into not-flowing mode
|
||||
if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true)
|
||||
|
||||
if (this.flowing) this.emit('data', chunk)
|
||||
else this[BUFFERPUSH](chunk)
|
||||
|
||||
if (this[BUFFERLENGTH] !== 0) this.emit('readable')
|
||||
|
||||
if (cb) fn(cb)
|
||||
|
||||
return this.flowing
|
||||
}
|
||||
|
||||
read(n) {
|
||||
if (this[DESTROYED]) return null
|
||||
|
||||
if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) {
|
||||
this[MAYBE_EMIT_END]()
|
||||
return null
|
||||
}
|
||||
|
||||
if (this[OBJECTMODE]) n = null
|
||||
|
||||
if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
|
||||
if (this.encoding) this[BUFFER] = [this[BUFFER].join('')]
|
||||
else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])]
|
||||
}
|
||||
|
||||
const ret = this[READ](n || null, this[BUFFER][0])
|
||||
this[MAYBE_EMIT_END]()
|
||||
return ret
|
||||
}
|
||||
|
||||
[READ](n, chunk) {
|
||||
if (n === chunk.length || n === null) this[BUFFERSHIFT]()
|
||||
else {
|
||||
this[BUFFER][0] = chunk.slice(n)
|
||||
chunk = chunk.slice(0, n)
|
||||
this[BUFFERLENGTH] -= n
|
||||
}
|
||||
|
||||
this.emit('data', chunk)
|
||||
|
||||
if (!this[BUFFER].length && !this[EOF]) this.emit('drain')
|
||||
|
||||
return chunk
|
||||
}
|
||||
|
||||
end(chunk, encoding, cb) {
|
||||
if (typeof chunk === 'function') (cb = chunk), (chunk = null)
|
||||
if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8')
|
||||
if (chunk) this.write(chunk, encoding)
|
||||
if (cb) this.once('end', cb)
|
||||
this[EOF] = true
|
||||
this.writable = false
|
||||
|
||||
// if we haven't written anything, then go ahead and emit,
|
||||
// even if we're not reading.
|
||||
// we'll re-emit if a new 'end' listener is added anyway.
|
||||
// This makes MP more suitable to write-only use cases.
|
||||
if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]()
|
||||
return this
|
||||
}
|
||||
|
||||
// don't let the internal resume be overwritten
|
||||
[RESUME]() {
|
||||
if (this[DESTROYED]) return
|
||||
|
||||
this[PAUSED] = false
|
||||
this[FLOWING] = true
|
||||
this.emit('resume')
|
||||
if (this[BUFFER].length) this[FLUSH]()
|
||||
else if (this[EOF]) this[MAYBE_EMIT_END]()
|
||||
else this.emit('drain')
|
||||
}
|
||||
|
||||
resume() {
|
||||
return this[RESUME]()
|
||||
}
|
||||
|
||||
pause() {
|
||||
this[FLOWING] = false
|
||||
this[PAUSED] = true
|
||||
}
|
||||
|
||||
get destroyed() {
|
||||
return this[DESTROYED]
|
||||
}
|
||||
|
||||
get flowing() {
|
||||
return this[FLOWING]
|
||||
}
|
||||
|
||||
get paused() {
|
||||
return this[PAUSED]
|
||||
}
|
||||
|
||||
[BUFFERPUSH](chunk) {
|
||||
if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1
|
||||
else this[BUFFERLENGTH] += chunk.length
|
||||
this[BUFFER].push(chunk)
|
||||
}
|
||||
|
||||
[BUFFERSHIFT]() {
|
||||
if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1
|
||||
else this[BUFFERLENGTH] -= this[BUFFER][0].length
|
||||
return this[BUFFER].shift()
|
||||
}
|
||||
|
||||
[FLUSH](noDrain) {
|
||||
do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length)
|
||||
|
||||
if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain')
|
||||
}
|
||||
|
||||
[FLUSHCHUNK](chunk) {
|
||||
this.emit('data', chunk)
|
||||
return this.flowing
|
||||
}
|
||||
|
||||
pipe(dest, opts) {
|
||||
if (this[DESTROYED]) return
|
||||
|
||||
const ended = this[EMITTED_END]
|
||||
opts = opts || {}
|
||||
if (dest === proc.stdout || dest === proc.stderr) opts.end = false
|
||||
else opts.end = opts.end !== false
|
||||
opts.proxyErrors = !!opts.proxyErrors
|
||||
|
||||
// piping an ended stream ends immediately
|
||||
if (ended) {
|
||||
if (opts.end) dest.end()
|
||||
} else {
|
||||
this[PIPES].push(
|
||||
!opts.proxyErrors
|
||||
? new Pipe(this, dest, opts)
|
||||
: new PipeProxyErrors(this, dest, opts)
|
||||
)
|
||||
if (this[ASYNC]) defer(() => this[RESUME]())
|
||||
else this[RESUME]()
|
||||
}
|
||||
|
||||
return dest
|
||||
}
|
||||
|
||||
unpipe(dest) {
|
||||
const p = this[PIPES].find(p => p.dest === dest)
|
||||
if (p) {
|
||||
this[PIPES].splice(this[PIPES].indexOf(p), 1)
|
||||
p.unpipe()
|
||||
}
|
||||
}
|
||||
|
||||
addListener(ev, fn) {
|
||||
return this.on(ev, fn)
|
||||
}
|
||||
|
||||
on(ev, fn) {
|
||||
const ret = super.on(ev, fn)
|
||||
if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]()
|
||||
else if (ev === 'readable' && this[BUFFERLENGTH] !== 0)
|
||||
super.emit('readable')
|
||||
else if (isEndish(ev) && this[EMITTED_END]) {
|
||||
super.emit(ev)
|
||||
this.removeAllListeners(ev)
|
||||
} else if (ev === 'error' && this[EMITTED_ERROR]) {
|
||||
if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR]))
|
||||
else fn.call(this, this[EMITTED_ERROR])
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
get emittedEnd() {
|
||||
return this[EMITTED_END]
|
||||
}
|
||||
|
||||
[MAYBE_EMIT_END]() {
|
||||
if (
|
||||
!this[EMITTING_END] &&
|
||||
!this[EMITTED_END] &&
|
||||
!this[DESTROYED] &&
|
||||
this[BUFFER].length === 0 &&
|
||||
this[EOF]
|
||||
) {
|
||||
this[EMITTING_END] = true
|
||||
this.emit('end')
|
||||
this.emit('prefinish')
|
||||
this.emit('finish')
|
||||
if (this[CLOSED]) this.emit('close')
|
||||
this[EMITTING_END] = false
|
||||
}
|
||||
}
|
||||
|
||||
emit(ev, data, ...extra) {
|
||||
// error and close are only events allowed after calling destroy()
|
||||
if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
|
||||
return
|
||||
else if (ev === 'data') {
|
||||
return !this[OBJECTMODE] && !data
|
||||
? false
|
||||
: this[ASYNC]
|
||||
? defer(() => this[EMITDATA](data))
|
||||
: this[EMITDATA](data)
|
||||
} else if (ev === 'end') {
|
||||
return this[EMITEND]()
|
||||
} else if (ev === 'close') {
|
||||
this[CLOSED] = true
|
||||
// don't emit close before 'end' and 'finish'
|
||||
if (!this[EMITTED_END] && !this[DESTROYED]) return
|
||||
const ret = super.emit('close')
|
||||
this.removeAllListeners('close')
|
||||
return ret
|
||||
} else if (ev === 'error') {
|
||||
this[EMITTED_ERROR] = data
|
||||
super.emit(ERROR, data)
|
||||
const ret =
|
||||
!this[SIGNAL] || this.listeners('error').length
|
||||
? super.emit('error', data)
|
||||
: false
|
||||
this[MAYBE_EMIT_END]()
|
||||
return ret
|
||||
} else if (ev === 'resume') {
|
||||
const ret = super.emit('resume')
|
||||
this[MAYBE_EMIT_END]()
|
||||
return ret
|
||||
} else if (ev === 'finish' || ev === 'prefinish') {
|
||||
const ret = super.emit(ev)
|
||||
this.removeAllListeners(ev)
|
||||
return ret
|
||||
}
|
||||
|
||||
// Some other unknown event
|
||||
const ret = super.emit(ev, data, ...extra)
|
||||
this[MAYBE_EMIT_END]()
|
||||
return ret
|
||||
}
|
||||
|
||||
[EMITDATA](data) {
|
||||
for (const p of this[PIPES]) {
|
||||
if (p.dest.write(data) === false) this.pause()
|
||||
}
|
||||
const ret = super.emit('data', data)
|
||||
this[MAYBE_EMIT_END]()
|
||||
return ret
|
||||
}
|
||||
|
||||
[EMITEND]() {
|
||||
if (this[EMITTED_END]) return
|
||||
|
||||
this[EMITTED_END] = true
|
||||
this.readable = false
|
||||
if (this[ASYNC]) defer(() => this[EMITEND2]())
|
||||
else this[EMITEND2]()
|
||||
}
|
||||
|
||||
[EMITEND2]() {
|
||||
if (this[DECODER]) {
|
||||
const data = this[DECODER].end()
|
||||
if (data) {
|
||||
for (const p of this[PIPES]) {
|
||||
p.dest.write(data)
|
||||
}
|
||||
super.emit('data', data)
|
||||
}
|
||||
}
|
||||
|
||||
for (const p of this[PIPES]) {
|
||||
p.end()
|
||||
}
|
||||
const ret = super.emit('end')
|
||||
this.removeAllListeners('end')
|
||||
return ret
|
||||
}
|
||||
|
||||
// const all = await stream.collect()
|
||||
collect() {
|
||||
const buf = []
|
||||
if (!this[OBJECTMODE]) buf.dataLength = 0
|
||||
// set the promise first, in case an error is raised
|
||||
// by triggering the flow here.
|
||||
const p = this.promise()
|
||||
this.on('data', c => {
|
||||
buf.push(c)
|
||||
if (!this[OBJECTMODE]) buf.dataLength += c.length
|
||||
})
|
||||
return p.then(() => buf)
|
||||
}
|
||||
|
||||
// const data = await stream.concat()
|
||||
concat() {
|
||||
return this[OBJECTMODE]
|
||||
? Promise.reject(new Error('cannot concat in objectMode'))
|
||||
: this.collect().then(buf =>
|
||||
this[OBJECTMODE]
|
||||
? Promise.reject(new Error('cannot concat in objectMode'))
|
||||
: this[ENCODING]
|
||||
? buf.join('')
|
||||
: Buffer.concat(buf, buf.dataLength)
|
||||
)
|
||||
}
|
||||
|
||||
// stream.promise().then(() => done, er => emitted error)
|
||||
promise() {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.on(DESTROYED, () => reject(new Error('stream destroyed')))
|
||||
this.on('error', er => reject(er))
|
||||
this.on('end', () => resolve())
|
||||
})
|
||||
}
|
||||
|
||||
// for await (let chunk of stream)
|
||||
[ASYNCITERATOR]() {
|
||||
let stopped = false
|
||||
const stop = () => {
|
||||
this.pause()
|
||||
stopped = true
|
||||
return Promise.resolve({ done: true })
|
||||
}
|
||||
const next = () => {
|
||||
if (stopped) return stop()
|
||||
const res = this.read()
|
||||
if (res !== null) return Promise.resolve({ done: false, value: res })
|
||||
|
||||
if (this[EOF]) return stop()
|
||||
|
||||
let resolve = null
|
||||
let reject = null
|
||||
const onerr = er => {
|
||||
this.removeListener('data', ondata)
|
||||
this.removeListener('end', onend)
|
||||
this.removeListener(DESTROYED, ondestroy)
|
||||
stop()
|
||||
reject(er)
|
||||
}
|
||||
const ondata = value => {
|
||||
this.removeListener('error', onerr)
|
||||
this.removeListener('end', onend)
|
||||
this.removeListener(DESTROYED, ondestroy)
|
||||
this.pause()
|
||||
resolve({ value: value, done: !!this[EOF] })
|
||||
}
|
||||
const onend = () => {
|
||||
this.removeListener('error', onerr)
|
||||
this.removeListener('data', ondata)
|
||||
this.removeListener(DESTROYED, ondestroy)
|
||||
stop()
|
||||
resolve({ done: true })
|
||||
}
|
||||
const ondestroy = () => onerr(new Error('stream destroyed'))
|
||||
return new Promise((res, rej) => {
|
||||
reject = rej
|
||||
resolve = res
|
||||
this.once(DESTROYED, ondestroy)
|
||||
this.once('error', onerr)
|
||||
this.once('end', onend)
|
||||
this.once('data', ondata)
|
||||
})
|
||||
}
|
||||
|
||||
return {
|
||||
next,
|
||||
throw: stop,
|
||||
return: stop,
|
||||
[ASYNCITERATOR]() {
|
||||
return this
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// for (let chunk of stream)
|
||||
[ITERATOR]() {
|
||||
let stopped = false
|
||||
const stop = () => {
|
||||
this.pause()
|
||||
this.removeListener(ERROR, stop)
|
||||
this.removeListener(DESTROYED, stop)
|
||||
this.removeListener('end', stop)
|
||||
stopped = true
|
||||
return { done: true }
|
||||
}
|
||||
|
||||
const next = () => {
|
||||
if (stopped) return stop()
|
||||
const value = this.read()
|
||||
return value === null ? stop() : { value }
|
||||
}
|
||||
this.once('end', stop)
|
||||
this.once(ERROR, stop)
|
||||
this.once(DESTROYED, stop)
|
||||
|
||||
return {
|
||||
next,
|
||||
throw: stop,
|
||||
return: stop,
|
||||
[ITERATOR]() {
|
||||
return this
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
destroy(er) {
|
||||
if (this[DESTROYED]) {
|
||||
if (er) this.emit('error', er)
|
||||
else this.emit(DESTROYED)
|
||||
return this
|
||||
}
|
||||
|
||||
this[DESTROYED] = true
|
||||
|
||||
// throw away all buffered data, it's never coming out
|
||||
this[BUFFER].length = 0
|
||||
this[BUFFERLENGTH] = 0
|
||||
|
||||
if (typeof this.close === 'function' && !this[CLOSED]) this.close()
|
||||
|
||||
if (er) this.emit('error', er)
|
||||
// if no error to emit, still reject pending promises
|
||||
else this.emit(DESTROYED)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
static isStream(s) {
|
||||
return (
|
||||
!!s &&
|
||||
(s instanceof Minipass ||
|
||||
s instanceof Stream ||
|
||||
(s instanceof EE &&
|
||||
// readable
|
||||
(typeof s.pipe === 'function' ||
|
||||
// writable
|
||||
(typeof s.write === 'function' && typeof s.end === 'function'))))
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
76
my-app/node_modules/tar/node_modules/minipass/package.json
generated
vendored
Executable file
76
my-app/node_modules/tar/node_modules/minipass/package.json
generated
vendored
Executable file
|
@ -0,0 +1,76 @@
|
|||
{
|
||||
"name": "minipass",
|
||||
"version": "5.0.0",
|
||||
"description": "minimal implementation of a PassThrough stream",
|
||||
"main": "./index.js",
|
||||
"module": "./index.mjs",
|
||||
"types": "./index.d.ts",
|
||||
"exports": {
|
||||
".": {
|
||||
"import": {
|
||||
"types": "./index.d.ts",
|
||||
"default": "./index.mjs"
|
||||
},
|
||||
"require": {
|
||||
"types": "./index.d.ts",
|
||||
"default": "./index.js"
|
||||
}
|
||||
},
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^17.0.41",
|
||||
"end-of-stream": "^1.4.0",
|
||||
"node-abort-controller": "^3.1.1",
|
||||
"prettier": "^2.6.2",
|
||||
"tap": "^16.2.0",
|
||||
"through2": "^2.0.3",
|
||||
"ts-node": "^10.8.1",
|
||||
"typedoc": "^0.23.24",
|
||||
"typescript": "^4.7.3"
|
||||
},
|
||||
"scripts": {
|
||||
"pretest": "npm run prepare",
|
||||
"presnap": "npm run prepare",
|
||||
"prepare": "node ./scripts/transpile-to-esm.js",
|
||||
"snap": "tap",
|
||||
"test": "tap",
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"postpublish": "git push origin --follow-tags",
|
||||
"typedoc": "typedoc ./index.d.ts",
|
||||
"format": "prettier --write . --loglevel warn"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/isaacs/minipass.git"
|
||||
},
|
||||
"keywords": [
|
||||
"passthrough",
|
||||
"stream"
|
||||
],
|
||||
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
|
||||
"license": "ISC",
|
||||
"files": [
|
||||
"index.d.ts",
|
||||
"index.js",
|
||||
"index.mjs"
|
||||
],
|
||||
"tap": {
|
||||
"check-coverage": true
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
"prettier": {
|
||||
"semi": false,
|
||||
"printWidth": 80,
|
||||
"tabWidth": 2,
|
||||
"useTabs": false,
|
||||
"singleQuote": true,
|
||||
"jsxSingleQuote": false,
|
||||
"bracketSameLine": true,
|
||||
"arrowParens": "avoid",
|
||||
"endOfLine": "lf"
|
||||
}
|
||||
}
|
15
my-app/node_modules/tar/node_modules/mkdirp/CHANGELOG.md
generated
vendored
Executable file
15
my-app/node_modules/tar/node_modules/mkdirp/CHANGELOG.md
generated
vendored
Executable file
|
@ -0,0 +1,15 @@
|
|||
# Changers Lorgs!
|
||||
|
||||
## 1.0
|
||||
|
||||
Full rewrite. Essentially a brand new module.
|
||||
|
||||
- Return a promise instead of taking a callback.
|
||||
- Use native `fs.mkdir(path, { recursive: true })` when available.
|
||||
- Drop support for outdated Node.js versions. (Technically still works on
|
||||
Node.js v8, but only 10 and above are officially supported.)
|
||||
|
||||
## 0.x
|
||||
|
||||
Original and most widely used recursive directory creation implementation
|
||||
in JavaScript, dating back to 2010.
|
21
my-app/node_modules/tar/node_modules/mkdirp/LICENSE
generated
vendored
Executable file
21
my-app/node_modules/tar/node_modules/mkdirp/LICENSE
generated
vendored
Executable file
|
@ -0,0 +1,21 @@
|
|||
Copyright James Halliday (mail@substack.net) and Isaac Z. Schlueter (i@izs.me)
|
||||
|
||||
This project is free software released under the MIT license:
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
68
my-app/node_modules/tar/node_modules/mkdirp/bin/cmd.js
generated
vendored
Executable file
68
my-app/node_modules/tar/node_modules/mkdirp/bin/cmd.js
generated
vendored
Executable file
|
@ -0,0 +1,68 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const usage = () => `
|
||||
usage: mkdirp [DIR1,DIR2..] {OPTIONS}
|
||||
|
||||
Create each supplied directory including any necessary parent directories
|
||||
that don't yet exist.
|
||||
|
||||
If the directory already exists, do nothing.
|
||||
|
||||
OPTIONS are:
|
||||
|
||||
-m<mode> If a directory needs to be created, set the mode as an octal
|
||||
--mode=<mode> permission string.
|
||||
|
||||
-v --version Print the mkdirp version number
|
||||
|
||||
-h --help Print this helpful banner
|
||||
|
||||
-p --print Print the first directories created for each path provided
|
||||
|
||||
--manual Use manual implementation, even if native is available
|
||||
`
|
||||
|
||||
const dirs = []
|
||||
const opts = {}
|
||||
let print = false
|
||||
let dashdash = false
|
||||
let manual = false
|
||||
for (const arg of process.argv.slice(2)) {
|
||||
if (dashdash)
|
||||
dirs.push(arg)
|
||||
else if (arg === '--')
|
||||
dashdash = true
|
||||
else if (arg === '--manual')
|
||||
manual = true
|
||||
else if (/^-h/.test(arg) || /^--help/.test(arg)) {
|
||||
console.log(usage())
|
||||
process.exit(0)
|
||||
} else if (arg === '-v' || arg === '--version') {
|
||||
console.log(require('../package.json').version)
|
||||
process.exit(0)
|
||||
} else if (arg === '-p' || arg === '--print') {
|
||||
print = true
|
||||
} else if (/^-m/.test(arg) || /^--mode=/.test(arg)) {
|
||||
const mode = parseInt(arg.replace(/^(-m|--mode=)/, ''), 8)
|
||||
if (isNaN(mode)) {
|
||||
console.error(`invalid mode argument: ${arg}\nMust be an octal number.`)
|
||||
process.exit(1)
|
||||
}
|
||||
opts.mode = mode
|
||||
} else
|
||||
dirs.push(arg)
|
||||
}
|
||||
|
||||
const mkdirp = require('../')
|
||||
const impl = manual ? mkdirp.manual : mkdirp
|
||||
if (dirs.length === 0)
|
||||
console.error(usage())
|
||||
|
||||
Promise.all(dirs.map(dir => impl(dir, opts)))
|
||||
.then(made => print ? made.forEach(m => m && console.log(m)) : null)
|
||||
.catch(er => {
|
||||
console.error(er.message)
|
||||
if (er.code)
|
||||
console.error(' code: ' + er.code)
|
||||
process.exit(1)
|
||||
})
|
31
my-app/node_modules/tar/node_modules/mkdirp/index.js
generated
vendored
Executable file
31
my-app/node_modules/tar/node_modules/mkdirp/index.js
generated
vendored
Executable file
|
@ -0,0 +1,31 @@
|
|||
const optsArg = require('./lib/opts-arg.js')
|
||||
const pathArg = require('./lib/path-arg.js')
|
||||
|
||||
const {mkdirpNative, mkdirpNativeSync} = require('./lib/mkdirp-native.js')
|
||||
const {mkdirpManual, mkdirpManualSync} = require('./lib/mkdirp-manual.js')
|
||||
const {useNative, useNativeSync} = require('./lib/use-native.js')
|
||||
|
||||
|
||||
const mkdirp = (path, opts) => {
|
||||
path = pathArg(path)
|
||||
opts = optsArg(opts)
|
||||
return useNative(opts)
|
||||
? mkdirpNative(path, opts)
|
||||
: mkdirpManual(path, opts)
|
||||
}
|
||||
|
||||
const mkdirpSync = (path, opts) => {
|
||||
path = pathArg(path)
|
||||
opts = optsArg(opts)
|
||||
return useNativeSync(opts)
|
||||
? mkdirpNativeSync(path, opts)
|
||||
: mkdirpManualSync(path, opts)
|
||||
}
|
||||
|
||||
mkdirp.sync = mkdirpSync
|
||||
mkdirp.native = (path, opts) => mkdirpNative(pathArg(path), optsArg(opts))
|
||||
mkdirp.manual = (path, opts) => mkdirpManual(pathArg(path), optsArg(opts))
|
||||
mkdirp.nativeSync = (path, opts) => mkdirpNativeSync(pathArg(path), optsArg(opts))
|
||||
mkdirp.manualSync = (path, opts) => mkdirpManualSync(pathArg(path), optsArg(opts))
|
||||
|
||||
module.exports = mkdirp
|
29
my-app/node_modules/tar/node_modules/mkdirp/lib/find-made.js
generated
vendored
Executable file
29
my-app/node_modules/tar/node_modules/mkdirp/lib/find-made.js
generated
vendored
Executable file
|
@ -0,0 +1,29 @@
|
|||
const {dirname} = require('path')
|
||||
|
||||
const findMade = (opts, parent, path = undefined) => {
|
||||
// we never want the 'made' return value to be a root directory
|
||||
if (path === parent)
|
||||
return Promise.resolve()
|
||||
|
||||
return opts.statAsync(parent).then(
|
||||
st => st.isDirectory() ? path : undefined, // will fail later
|
||||
er => er.code === 'ENOENT'
|
||||
? findMade(opts, dirname(parent), parent)
|
||||
: undefined
|
||||
)
|
||||
}
|
||||
|
||||
const findMadeSync = (opts, parent, path = undefined) => {
|
||||
if (path === parent)
|
||||
return undefined
|
||||
|
||||
try {
|
||||
return opts.statSync(parent).isDirectory() ? path : undefined
|
||||
} catch (er) {
|
||||
return er.code === 'ENOENT'
|
||||
? findMadeSync(opts, dirname(parent), parent)
|
||||
: undefined
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {findMade, findMadeSync}
|
64
my-app/node_modules/tar/node_modules/mkdirp/lib/mkdirp-manual.js
generated
vendored
Executable file
64
my-app/node_modules/tar/node_modules/mkdirp/lib/mkdirp-manual.js
generated
vendored
Executable file
|
@ -0,0 +1,64 @@
|
|||
const {dirname} = require('path')
|
||||
|
||||
const mkdirpManual = (path, opts, made) => {
|
||||
opts.recursive = false
|
||||
const parent = dirname(path)
|
||||
if (parent === path) {
|
||||
return opts.mkdirAsync(path, opts).catch(er => {
|
||||
// swallowed by recursive implementation on posix systems
|
||||
// any other error is a failure
|
||||
if (er.code !== 'EISDIR')
|
||||
throw er
|
||||
})
|
||||
}
|
||||
|
||||
return opts.mkdirAsync(path, opts).then(() => made || path, er => {
|
||||
if (er.code === 'ENOENT')
|
||||
return mkdirpManual(parent, opts)
|
||||
.then(made => mkdirpManual(path, opts, made))
|
||||
if (er.code !== 'EEXIST' && er.code !== 'EROFS')
|
||||
throw er
|
||||
return opts.statAsync(path).then(st => {
|
||||
if (st.isDirectory())
|
||||
return made
|
||||
else
|
||||
throw er
|
||||
}, () => { throw er })
|
||||
})
|
||||
}
|
||||
|
||||
const mkdirpManualSync = (path, opts, made) => {
|
||||
const parent = dirname(path)
|
||||
opts.recursive = false
|
||||
|
||||
if (parent === path) {
|
||||
try {
|
||||
return opts.mkdirSync(path, opts)
|
||||
} catch (er) {
|
||||
// swallowed by recursive implementation on posix systems
|
||||
// any other error is a failure
|
||||
if (er.code !== 'EISDIR')
|
||||
throw er
|
||||
else
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
opts.mkdirSync(path, opts)
|
||||
return made || path
|
||||
} catch (er) {
|
||||
if (er.code === 'ENOENT')
|
||||
return mkdirpManualSync(path, opts, mkdirpManualSync(parent, opts, made))
|
||||
if (er.code !== 'EEXIST' && er.code !== 'EROFS')
|
||||
throw er
|
||||
try {
|
||||
if (!opts.statSync(path).isDirectory())
|
||||
throw er
|
||||
} catch (_) {
|
||||
throw er
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {mkdirpManual, mkdirpManualSync}
|
39
my-app/node_modules/tar/node_modules/mkdirp/lib/mkdirp-native.js
generated
vendored
Executable file
39
my-app/node_modules/tar/node_modules/mkdirp/lib/mkdirp-native.js
generated
vendored
Executable file
|
@ -0,0 +1,39 @@
|
|||
const {dirname} = require('path')
|
||||
const {findMade, findMadeSync} = require('./find-made.js')
|
||||
const {mkdirpManual, mkdirpManualSync} = require('./mkdirp-manual.js')
|
||||
|
||||
const mkdirpNative = (path, opts) => {
|
||||
opts.recursive = true
|
||||
const parent = dirname(path)
|
||||
if (parent === path)
|
||||
return opts.mkdirAsync(path, opts)
|
||||
|
||||
return findMade(opts, path).then(made =>
|
||||
opts.mkdirAsync(path, opts).then(() => made)
|
||||
.catch(er => {
|
||||
if (er.code === 'ENOENT')
|
||||
return mkdirpManual(path, opts)
|
||||
else
|
||||
throw er
|
||||
}))
|
||||
}
|
||||
|
||||
const mkdirpNativeSync = (path, opts) => {
|
||||
opts.recursive = true
|
||||
const parent = dirname(path)
|
||||
if (parent === path)
|
||||
return opts.mkdirSync(path, opts)
|
||||
|
||||
const made = findMadeSync(opts, path)
|
||||
try {
|
||||
opts.mkdirSync(path, opts)
|
||||
return made
|
||||
} catch (er) {
|
||||
if (er.code === 'ENOENT')
|
||||
return mkdirpManualSync(path, opts)
|
||||
else
|
||||
throw er
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {mkdirpNative, mkdirpNativeSync}
|
23
my-app/node_modules/tar/node_modules/mkdirp/lib/opts-arg.js
generated
vendored
Executable file
23
my-app/node_modules/tar/node_modules/mkdirp/lib/opts-arg.js
generated
vendored
Executable file
|
@ -0,0 +1,23 @@
|
|||
const { promisify } = require('util')
|
||||
const fs = require('fs')
|
||||
const optsArg = opts => {
|
||||
if (!opts)
|
||||
opts = { mode: 0o777, fs }
|
||||
else if (typeof opts === 'object')
|
||||
opts = { mode: 0o777, fs, ...opts }
|
||||
else if (typeof opts === 'number')
|
||||
opts = { mode: opts, fs }
|
||||
else if (typeof opts === 'string')
|
||||
opts = { mode: parseInt(opts, 8), fs }
|
||||
else
|
||||
throw new TypeError('invalid options argument')
|
||||
|
||||
opts.mkdir = opts.mkdir || opts.fs.mkdir || fs.mkdir
|
||||
opts.mkdirAsync = promisify(opts.mkdir)
|
||||
opts.stat = opts.stat || opts.fs.stat || fs.stat
|
||||
opts.statAsync = promisify(opts.stat)
|
||||
opts.statSync = opts.statSync || opts.fs.statSync || fs.statSync
|
||||
opts.mkdirSync = opts.mkdirSync || opts.fs.mkdirSync || fs.mkdirSync
|
||||
return opts
|
||||
}
|
||||
module.exports = optsArg
|
29
my-app/node_modules/tar/node_modules/mkdirp/lib/path-arg.js
generated
vendored
Executable file
29
my-app/node_modules/tar/node_modules/mkdirp/lib/path-arg.js
generated
vendored
Executable file
|
@ -0,0 +1,29 @@
|
|||
const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform
|
||||
const { resolve, parse } = require('path')
|
||||
const pathArg = path => {
|
||||
if (/\0/.test(path)) {
|
||||
// simulate same failure that node raises
|
||||
throw Object.assign(
|
||||
new TypeError('path must be a string without null bytes'),
|
||||
{
|
||||
path,
|
||||
code: 'ERR_INVALID_ARG_VALUE',
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
path = resolve(path)
|
||||
if (platform === 'win32') {
|
||||
const badWinChars = /[*|"<>?:]/
|
||||
const {root} = parse(path)
|
||||
if (badWinChars.test(path.substr(root.length))) {
|
||||
throw Object.assign(new Error('Illegal characters in path.'), {
|
||||
path,
|
||||
code: 'EINVAL',
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return path
|
||||
}
|
||||
module.exports = pathArg
|
10
my-app/node_modules/tar/node_modules/mkdirp/lib/use-native.js
generated
vendored
Executable file
10
my-app/node_modules/tar/node_modules/mkdirp/lib/use-native.js
generated
vendored
Executable file
|
@ -0,0 +1,10 @@
|
|||
const fs = require('fs')
|
||||
|
||||
const version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version
|
||||
const versArr = version.replace(/^v/, '').split('.')
|
||||
const hasNative = +versArr[0] > 10 || +versArr[0] === 10 && +versArr[1] >= 12
|
||||
|
||||
const useNative = !hasNative ? () => false : opts => opts.mkdir === fs.mkdir
|
||||
const useNativeSync = !hasNative ? () => false : opts => opts.mkdirSync === fs.mkdirSync
|
||||
|
||||
module.exports = {useNative, useNativeSync}
|
44
my-app/node_modules/tar/node_modules/mkdirp/package.json
generated
vendored
Executable file
44
my-app/node_modules/tar/node_modules/mkdirp/package.json
generated
vendored
Executable file
|
@ -0,0 +1,44 @@
|
|||
{
|
||||
"name": "mkdirp",
|
||||
"description": "Recursively mkdir, like `mkdir -p`",
|
||||
"version": "1.0.4",
|
||||
"main": "index.js",
|
||||
"keywords": [
|
||||
"mkdir",
|
||||
"directory",
|
||||
"make dir",
|
||||
"make",
|
||||
"dir",
|
||||
"recursive",
|
||||
"native"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/isaacs/node-mkdirp.git"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "tap",
|
||||
"snap": "tap",
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"postpublish": "git push origin --follow-tags"
|
||||
},
|
||||
"tap": {
|
||||
"check-coverage": true,
|
||||
"coverage-map": "map.js"
|
||||
},
|
||||
"devDependencies": {
|
||||
"require-inject": "^1.4.4",
|
||||
"tap": "^14.10.7"
|
||||
},
|
||||
"bin": "bin/cmd.js",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"files": [
|
||||
"bin",
|
||||
"lib",
|
||||
"index.js"
|
||||
]
|
||||
}
|
266
my-app/node_modules/tar/node_modules/mkdirp/readme.markdown
generated
vendored
Executable file
266
my-app/node_modules/tar/node_modules/mkdirp/readme.markdown
generated
vendored
Executable file
|
@ -0,0 +1,266 @@
|
|||
# mkdirp
|
||||
|
||||
Like `mkdir -p`, but in Node.js!
|
||||
|
||||
Now with a modern API and no\* bugs!
|
||||
|
||||
<small>\* may contain some bugs</small>
|
||||
|
||||
# example
|
||||
|
||||
## pow.js
|
||||
|
||||
```js
|
||||
const mkdirp = require('mkdirp')
|
||||
|
||||
// return value is a Promise resolving to the first directory created
|
||||
mkdirp('/tmp/foo/bar/baz').then(made =>
|
||||
console.log(`made directories, starting with ${made}`))
|
||||
```
|
||||
|
||||
Output (where `/tmp/foo` already exists)
|
||||
|
||||
```
|
||||
made directories, starting with /tmp/foo/bar
|
||||
```
|
||||
|
||||
Or, if you don't have time to wait around for promises:
|
||||
|
||||
```js
|
||||
const mkdirp = require('mkdirp')
|
||||
|
||||
// return value is the first directory created
|
||||
const made = mkdirp.sync('/tmp/foo/bar/baz')
|
||||
console.log(`made directories, starting with ${made}`)
|
||||
```
|
||||
|
||||
And now /tmp/foo/bar/baz exists, huzzah!
|
||||
|
||||
# methods
|
||||
|
||||
```js
|
||||
const mkdirp = require('mkdirp')
|
||||
```
|
||||
|
||||
## mkdirp(dir, [opts]) -> Promise<String | undefined>
|
||||
|
||||
Create a new directory and any necessary subdirectories at `dir` with octal
|
||||
permission string `opts.mode`. If `opts` is a string or number, it will be
|
||||
treated as the `opts.mode`.
|
||||
|
||||
If `opts.mode` isn't specified, it defaults to `0o777 &
|
||||
(~process.umask())`.
|
||||
|
||||
Promise resolves to first directory `made` that had to be created, or
|
||||
`undefined` if everything already exists. Promise rejects if any errors
|
||||
are encountered. Note that, in the case of promise rejection, some
|
||||
directories _may_ have been created, as recursive directory creation is not
|
||||
an atomic operation.
|
||||
|
||||
You can optionally pass in an alternate `fs` implementation by passing in
|
||||
`opts.fs`. Your implementation should have `opts.fs.mkdir(path, opts, cb)`
|
||||
and `opts.fs.stat(path, cb)`.
|
||||
|
||||
You can also override just one or the other of `mkdir` and `stat` by
|
||||
passing in `opts.stat` or `opts.mkdir`, or providing an `fs` option that
|
||||
only overrides one of these.
|
||||
|
||||
## mkdirp.sync(dir, opts) -> String|null
|
||||
|
||||
Synchronously create a new directory and any necessary subdirectories at
|
||||
`dir` with octal permission string `opts.mode`. If `opts` is a string or
|
||||
number, it will be treated as the `opts.mode`.
|
||||
|
||||
If `opts.mode` isn't specified, it defaults to `0o777 &
|
||||
(~process.umask())`.
|
||||
|
||||
Returns the first directory that had to be created, or undefined if
|
||||
everything already exists.
|
||||
|
||||
You can optionally pass in an alternate `fs` implementation by passing in
|
||||
`opts.fs`. Your implementation should have `opts.fs.mkdirSync(path, mode)`
|
||||
and `opts.fs.statSync(path)`.
|
||||
|
||||
You can also override just one or the other of `mkdirSync` and `statSync`
|
||||
by passing in `opts.statSync` or `opts.mkdirSync`, or providing an `fs`
|
||||
option that only overrides one of these.
|
||||
|
||||
## mkdirp.manual, mkdirp.manualSync
|
||||
|
||||
Use the manual implementation (not the native one). This is the default
|
||||
when the native implementation is not available or the stat/mkdir
|
||||
implementation is overridden.
|
||||
|
||||
## mkdirp.native, mkdirp.nativeSync
|
||||
|
||||
Use the native implementation (not the manual one). This is the default
|
||||
when the native implementation is available and stat/mkdir are not
|
||||
overridden.
|
||||
|
||||
# implementation
|
||||
|
||||
On Node.js v10.12.0 and above, use the native `fs.mkdir(p,
|
||||
{recursive:true})` option, unless `fs.mkdir`/`fs.mkdirSync` has been
|
||||
overridden by an option.
|
||||
|
||||
## native implementation
|
||||
|
||||
- If the path is a root directory, then pass it to the underlying
|
||||
implementation and return the result/error. (In this case, it'll either
|
||||
succeed or fail, but we aren't actually creating any dirs.)
|
||||
- Walk up the path statting each directory, to find the first path that
|
||||
will be created, `made`.
|
||||
- Call `fs.mkdir(path, { recursive: true })` (or `fs.mkdirSync`)
|
||||
- If error, raise it to the caller.
|
||||
- Return `made`.
|
||||
|
||||
## manual implementation
|
||||
|
||||
- Call underlying `fs.mkdir` implementation, with `recursive: false`
|
||||
- If error:
|
||||
- If path is a root directory, raise to the caller and do not handle it
|
||||
- If ENOENT, mkdirp parent dir, store result as `made`
|
||||
- stat(path)
|
||||
- If error, raise original `mkdir` error
|
||||
- If directory, return `made`
|
||||
- Else, raise original `mkdir` error
|
||||
- else
|
||||
- return `undefined` if a root dir, or `made` if set, or `path`
|
||||
|
||||
## windows vs unix caveat
|
||||
|
||||
On Windows file systems, attempts to create a root directory (ie, a drive
|
||||
letter or root UNC path) will fail. If the root directory exists, then it
|
||||
will fail with `EPERM`. If the root directory does not exist, then it will
|
||||
fail with `ENOENT`.
|
||||
|
||||
On posix file systems, attempts to create a root directory (in recursive
|
||||
mode) will succeed silently, as it is treated like just another directory
|
||||
that already exists. (In non-recursive mode, of course, it fails with
|
||||
`EEXIST`.)
|
||||
|
||||
In order to preserve this system-specific behavior (and because it's not as
|
||||
if we can create the parent of a root directory anyway), attempts to create
|
||||
a root directory are passed directly to the `fs` implementation, and any
|
||||
errors encountered are not handled.
|
||||
|
||||
## native error caveat
|
||||
|
||||
The native implementation (as of at least Node.js v13.4.0) does not provide
|
||||
appropriate errors in some cases (see
|
||||
[nodejs/node#31481](https://github.com/nodejs/node/issues/31481) and
|
||||
[nodejs/node#28015](https://github.com/nodejs/node/issues/28015)).
|
||||
|
||||
In order to work around this issue, the native implementation will fall
|
||||
back to the manual implementation if an `ENOENT` error is encountered.
|
||||
|
||||
# choosing a recursive mkdir implementation
|
||||
|
||||
There are a few to choose from! Use the one that suits your needs best :D
|
||||
|
||||
## use `fs.mkdir(path, {recursive: true}, cb)` if:
|
||||
|
||||
- You wish to optimize performance even at the expense of other factors.
|
||||
- You don't need to know the first dir created.
|
||||
- You are ok with getting `ENOENT` as the error when some other problem is
|
||||
the actual cause.
|
||||
- You can limit your platforms to Node.js v10.12 and above.
|
||||
- You're ok with using callbacks instead of promises.
|
||||
- You don't need/want a CLI.
|
||||
- You don't need to override the `fs` methods in use.
|
||||
|
||||
## use this module (mkdirp 1.x) if:
|
||||
|
||||
- You need to know the first directory that was created.
|
||||
- You wish to use the native implementation if available, but fall back
|
||||
when it's not.
|
||||
- You prefer promise-returning APIs to callback-taking APIs.
|
||||
- You want more useful error messages than the native recursive mkdir
|
||||
provides (at least as of Node.js v13.4), and are ok with re-trying on
|
||||
`ENOENT` to achieve this.
|
||||
- You need (or at least, are ok with) a CLI.
|
||||
- You need to override the `fs` methods in use.
|
||||
|
||||
## use [`make-dir`](http://npm.im/make-dir) if:
|
||||
|
||||
- You do not need to know the first dir created (and wish to save a few
|
||||
`stat` calls when using the native implementation for this reason).
|
||||
- You wish to use the native implementation if available, but fall back
|
||||
when it's not.
|
||||
- You prefer promise-returning APIs to callback-taking APIs.
|
||||
- You are ok with occasionally getting `ENOENT` errors for failures that
|
||||
are actually related to something other than a missing file system entry.
|
||||
- You don't need/want a CLI.
|
||||
- You need to override the `fs` methods in use.
|
||||
|
||||
## use mkdirp 0.x if:
|
||||
|
||||
- You need to know the first directory that was created.
|
||||
- You need (or at least, are ok with) a CLI.
|
||||
- You need to override the `fs` methods in use.
|
||||
- You're ok with using callbacks instead of promises.
|
||||
- You are not running on Windows, where the root-level ENOENT errors can
|
||||
lead to infinite regress.
|
||||
- You think vinyl just sounds warmer and richer for some weird reason.
|
||||
- You are supporting truly ancient Node.js versions, before even the advent
|
||||
of a `Promise` language primitive. (Please don't. You deserve better.)
|
||||
|
||||
# cli
|
||||
|
||||
This package also ships with a `mkdirp` command.
|
||||
|
||||
```
|
||||
$ mkdirp -h
|
||||
|
||||
usage: mkdirp [DIR1,DIR2..] {OPTIONS}
|
||||
|
||||
Create each supplied directory including any necessary parent directories
|
||||
that don't yet exist.
|
||||
|
||||
If the directory already exists, do nothing.
|
||||
|
||||
OPTIONS are:
|
||||
|
||||
-m<mode> If a directory needs to be created, set the mode as an octal
|
||||
--mode=<mode> permission string.
|
||||
|
||||
-v --version Print the mkdirp version number
|
||||
|
||||
-h --help Print this helpful banner
|
||||
|
||||
-p --print Print the first directories created for each path provided
|
||||
|
||||
--manual Use manual implementation, even if native is available
|
||||
```
|
||||
|
||||
# install
|
||||
|
||||
With [npm](http://npmjs.org) do:
|
||||
|
||||
```
|
||||
npm install mkdirp
|
||||
```
|
||||
|
||||
to get the library locally, or
|
||||
|
||||
```
|
||||
npm install -g mkdirp
|
||||
```
|
||||
|
||||
to get the command everywhere, or
|
||||
|
||||
```
|
||||
npx mkdirp ...
|
||||
```
|
||||
|
||||
to run the command without installing it globally.
|
||||
|
||||
# platform support
|
||||
|
||||
This module works on node v8, but only v10 and above are officially
|
||||
supported, as Node v8 reached its LTS end of life 2020-01-01, which is in
|
||||
the past, as of this writing.
|
||||
|
||||
# license
|
||||
|
||||
MIT
|
15
my-app/node_modules/tar/node_modules/yallist/LICENSE
generated
vendored
Executable file
15
my-app/node_modules/tar/node_modules/yallist/LICENSE
generated
vendored
Executable file
|
@ -0,0 +1,15 @@
|
|||
The ISC License
|
||||
|
||||
Copyright (c) Isaac Z. Schlueter and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
204
my-app/node_modules/tar/node_modules/yallist/README.md
generated
vendored
Executable file
204
my-app/node_modules/tar/node_modules/yallist/README.md
generated
vendored
Executable file
|
@ -0,0 +1,204 @@
|
|||
# yallist
|
||||
|
||||
Yet Another Linked List
|
||||
|
||||
There are many doubly-linked list implementations like it, but this
|
||||
one is mine.
|
||||
|
||||
For when an array would be too big, and a Map can't be iterated in
|
||||
reverse order.
|
||||
|
||||
|
||||
[](https://travis-ci.org/isaacs/yallist) [](https://coveralls.io/github/isaacs/yallist)
|
||||
|
||||
## basic usage
|
||||
|
||||
```javascript
|
||||
var yallist = require('yallist')
|
||||
var myList = yallist.create([1, 2, 3])
|
||||
myList.push('foo')
|
||||
myList.unshift('bar')
|
||||
// of course pop() and shift() are there, too
|
||||
console.log(myList.toArray()) // ['bar', 1, 2, 3, 'foo']
|
||||
myList.forEach(function (k) {
|
||||
// walk the list head to tail
|
||||
})
|
||||
myList.forEachReverse(function (k, index, list) {
|
||||
// walk the list tail to head
|
||||
})
|
||||
var myDoubledList = myList.map(function (k) {
|
||||
return k + k
|
||||
})
|
||||
// now myDoubledList contains ['barbar', 2, 4, 6, 'foofoo']
|
||||
// mapReverse is also a thing
|
||||
var myDoubledListReverse = myList.mapReverse(function (k) {
|
||||
return k + k
|
||||
}) // ['foofoo', 6, 4, 2, 'barbar']
|
||||
|
||||
var reduced = myList.reduce(function (set, entry) {
|
||||
set += entry
|
||||
return set
|
||||
}, 'start')
|
||||
console.log(reduced) // 'startfoo123bar'
|
||||
```
|
||||
|
||||
## api
|
||||
|
||||
The whole API is considered "public".
|
||||
|
||||
Functions with the same name as an Array method work more or less the
|
||||
same way.
|
||||
|
||||
There's reverse versions of most things because that's the point.
|
||||
|
||||
### Yallist
|
||||
|
||||
Default export, the class that holds and manages a list.
|
||||
|
||||
Call it with either a forEach-able (like an array) or a set of
|
||||
arguments, to initialize the list.
|
||||
|
||||
The Array-ish methods all act like you'd expect. No magic length,
|
||||
though, so if you change that it won't automatically prune or add
|
||||
empty spots.
|
||||
|
||||
### Yallist.create(..)
|
||||
|
||||
Alias for Yallist function. Some people like factories.
|
||||
|
||||
#### yallist.head
|
||||
|
||||
The first node in the list
|
||||
|
||||
#### yallist.tail
|
||||
|
||||
The last node in the list
|
||||
|
||||
#### yallist.length
|
||||
|
||||
The number of nodes in the list. (Change this at your peril. It is
|
||||
not magic like Array length.)
|
||||
|
||||
#### yallist.toArray()
|
||||
|
||||
Convert the list to an array.
|
||||
|
||||
#### yallist.forEach(fn, [thisp])
|
||||
|
||||
Call a function on each item in the list.
|
||||
|
||||
#### yallist.forEachReverse(fn, [thisp])
|
||||
|
||||
Call a function on each item in the list, in reverse order.
|
||||
|
||||
#### yallist.get(n)
|
||||
|
||||
Get the data at position `n` in the list. If you use this a lot,
|
||||
probably better off just using an Array.
|
||||
|
||||
#### yallist.getReverse(n)
|
||||
|
||||
Get the data at position `n`, counting from the tail.
|
||||
|
||||
#### yallist.map(fn, thisp)
|
||||
|
||||
Create a new Yallist with the result of calling the function on each
|
||||
item.
|
||||
|
||||
#### yallist.mapReverse(fn, thisp)
|
||||
|
||||
Same as `map`, but in reverse.
|
||||
|
||||
#### yallist.pop()
|
||||
|
||||
Get the data from the list tail, and remove the tail from the list.
|
||||
|
||||
#### yallist.push(item, ...)
|
||||
|
||||
Insert one or more items to the tail of the list.
|
||||
|
||||
#### yallist.reduce(fn, initialValue)
|
||||
|
||||
Like Array.reduce.
|
||||
|
||||
#### yallist.reduceReverse
|
||||
|
||||
Like Array.reduce, but in reverse.
|
||||
|
||||
#### yallist.reverse
|
||||
|
||||
Reverse the list in place.
|
||||
|
||||
#### yallist.shift()
|
||||
|
||||
Get the data from the list head, and remove the head from the list.
|
||||
|
||||
#### yallist.slice([from], [to])
|
||||
|
||||
Just like Array.slice, but returns a new Yallist.
|
||||
|
||||
#### yallist.sliceReverse([from], [to])
|
||||
|
||||
Just like yallist.slice, but the result is returned in reverse.
|
||||
|
||||
#### yallist.toArray()
|
||||
|
||||
Create an array representation of the list.
|
||||
|
||||
#### yallist.toArrayReverse()
|
||||
|
||||
Create a reversed array representation of the list.
|
||||
|
||||
#### yallist.unshift(item, ...)
|
||||
|
||||
Insert one or more items to the head of the list.
|
||||
|
||||
#### yallist.unshiftNode(node)
|
||||
|
||||
Move a Node object to the front of the list. (That is, pull it out of
|
||||
wherever it lives, and make it the new head.)
|
||||
|
||||
If the node belongs to a different list, then that list will remove it
|
||||
first.
|
||||
|
||||
#### yallist.pushNode(node)
|
||||
|
||||
Move a Node object to the end of the list. (That is, pull it out of
|
||||
wherever it lives, and make it the new tail.)
|
||||
|
||||
If the node belongs to a list already, then that list will remove it
|
||||
first.
|
||||
|
||||
#### yallist.removeNode(node)
|
||||
|
||||
Remove a node from the list, preserving referential integrity of head
|
||||
and tail and other nodes.
|
||||
|
||||
Will throw an error if you try to have a list remove a node that
|
||||
doesn't belong to it.
|
||||
|
||||
### Yallist.Node
|
||||
|
||||
The class that holds the data and is actually the list.
|
||||
|
||||
Call with `var n = new Node(value, previousNode, nextNode)`
|
||||
|
||||
Note that if you do direct operations on Nodes themselves, it's very
|
||||
easy to get into weird states where the list is broken. Be careful :)
|
||||
|
||||
#### node.next
|
||||
|
||||
The next node in the list.
|
||||
|
||||
#### node.prev
|
||||
|
||||
The previous node in the list.
|
||||
|
||||
#### node.value
|
||||
|
||||
The data the node contains.
|
||||
|
||||
#### node.list
|
||||
|
||||
The list to which this node belongs. (Null if it does not belong to
|
||||
any list.)
|
8
my-app/node_modules/tar/node_modules/yallist/iterator.js
generated
vendored
Executable file
8
my-app/node_modules/tar/node_modules/yallist/iterator.js
generated
vendored
Executable file
|
@ -0,0 +1,8 @@
|
|||
'use strict'
|
||||
module.exports = function (Yallist) {
|
||||
Yallist.prototype[Symbol.iterator] = function* () {
|
||||
for (let walker = this.head; walker; walker = walker.next) {
|
||||
yield walker.value
|
||||
}
|
||||
}
|
||||
}
|
29
my-app/node_modules/tar/node_modules/yallist/package.json
generated
vendored
Executable file
29
my-app/node_modules/tar/node_modules/yallist/package.json
generated
vendored
Executable file
|
@ -0,0 +1,29 @@
|
|||
{
|
||||
"name": "yallist",
|
||||
"version": "4.0.0",
|
||||
"description": "Yet Another Linked List",
|
||||
"main": "yallist.js",
|
||||
"directories": {
|
||||
"test": "test"
|
||||
},
|
||||
"files": [
|
||||
"yallist.js",
|
||||
"iterator.js"
|
||||
],
|
||||
"dependencies": {},
|
||||
"devDependencies": {
|
||||
"tap": "^12.1.0"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "tap test/*.js --100",
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"postpublish": "git push origin --all; git push origin --tags"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/isaacs/yallist.git"
|
||||
},
|
||||
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
|
||||
"license": "ISC"
|
||||
}
|
426
my-app/node_modules/tar/node_modules/yallist/yallist.js
generated
vendored
Executable file
426
my-app/node_modules/tar/node_modules/yallist/yallist.js
generated
vendored
Executable file
|
@ -0,0 +1,426 @@
|
|||
'use strict'
|
||||
module.exports = Yallist
|
||||
|
||||
Yallist.Node = Node
|
||||
Yallist.create = Yallist
|
||||
|
||||
function Yallist (list) {
|
||||
var self = this
|
||||
if (!(self instanceof Yallist)) {
|
||||
self = new Yallist()
|
||||
}
|
||||
|
||||
self.tail = null
|
||||
self.head = null
|
||||
self.length = 0
|
||||
|
||||
if (list && typeof list.forEach === 'function') {
|
||||
list.forEach(function (item) {
|
||||
self.push(item)
|
||||
})
|
||||
} else if (arguments.length > 0) {
|
||||
for (var i = 0, l = arguments.length; i < l; i++) {
|
||||
self.push(arguments[i])
|
||||
}
|
||||
}
|
||||
|
||||
return self
|
||||
}
|
||||
|
||||
Yallist.prototype.removeNode = function (node) {
|
||||
if (node.list !== this) {
|
||||
throw new Error('removing node which does not belong to this list')
|
||||
}
|
||||
|
||||
var next = node.next
|
||||
var prev = node.prev
|
||||
|
||||
if (next) {
|
||||
next.prev = prev
|
||||
}
|
||||
|
||||
if (prev) {
|
||||
prev.next = next
|
||||
}
|
||||
|
||||
if (node === this.head) {
|
||||
this.head = next
|
||||
}
|
||||
if (node === this.tail) {
|
||||
this.tail = prev
|
||||
}
|
||||
|
||||
node.list.length--
|
||||
node.next = null
|
||||
node.prev = null
|
||||
node.list = null
|
||||
|
||||
return next
|
||||
}
|
||||
|
||||
Yallist.prototype.unshiftNode = function (node) {
|
||||
if (node === this.head) {
|
||||
return
|
||||
}
|
||||
|
||||
if (node.list) {
|
||||
node.list.removeNode(node)
|
||||
}
|
||||
|
||||
var head = this.head
|
||||
node.list = this
|
||||
node.next = head
|
||||
if (head) {
|
||||
head.prev = node
|
||||
}
|
||||
|
||||
this.head = node
|
||||
if (!this.tail) {
|
||||
this.tail = node
|
||||
}
|
||||
this.length++
|
||||
}
|
||||
|
||||
Yallist.prototype.pushNode = function (node) {
|
||||
if (node === this.tail) {
|
||||
return
|
||||
}
|
||||
|
||||
if (node.list) {
|
||||
node.list.removeNode(node)
|
||||
}
|
||||
|
||||
var tail = this.tail
|
||||
node.list = this
|
||||
node.prev = tail
|
||||
if (tail) {
|
||||
tail.next = node
|
||||
}
|
||||
|
||||
this.tail = node
|
||||
if (!this.head) {
|
||||
this.head = node
|
||||
}
|
||||
this.length++
|
||||
}
|
||||
|
||||
Yallist.prototype.push = function () {
|
||||
for (var i = 0, l = arguments.length; i < l; i++) {
|
||||
push(this, arguments[i])
|
||||
}
|
||||
return this.length
|
||||
}
|
||||
|
||||
Yallist.prototype.unshift = function () {
|
||||
for (var i = 0, l = arguments.length; i < l; i++) {
|
||||
unshift(this, arguments[i])
|
||||
}
|
||||
return this.length
|
||||
}
|
||||
|
||||
Yallist.prototype.pop = function () {
|
||||
if (!this.tail) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
var res = this.tail.value
|
||||
this.tail = this.tail.prev
|
||||
if (this.tail) {
|
||||
this.tail.next = null
|
||||
} else {
|
||||
this.head = null
|
||||
}
|
||||
this.length--
|
||||
return res
|
||||
}
|
||||
|
||||
Yallist.prototype.shift = function () {
|
||||
if (!this.head) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
var res = this.head.value
|
||||
this.head = this.head.next
|
||||
if (this.head) {
|
||||
this.head.prev = null
|
||||
} else {
|
||||
this.tail = null
|
||||
}
|
||||
this.length--
|
||||
return res
|
||||
}
|
||||
|
||||
Yallist.prototype.forEach = function (fn, thisp) {
|
||||
thisp = thisp || this
|
||||
for (var walker = this.head, i = 0; walker !== null; i++) {
|
||||
fn.call(thisp, walker.value, i, this)
|
||||
walker = walker.next
|
||||
}
|
||||
}
|
||||
|
||||
Yallist.prototype.forEachReverse = function (fn, thisp) {
|
||||
thisp = thisp || this
|
||||
for (var walker = this.tail, i = this.length - 1; walker !== null; i--) {
|
||||
fn.call(thisp, walker.value, i, this)
|
||||
walker = walker.prev
|
||||
}
|
||||
}
|
||||
|
||||
Yallist.prototype.get = function (n) {
|
||||
for (var i = 0, walker = this.head; walker !== null && i < n; i++) {
|
||||
// abort out of the list early if we hit a cycle
|
||||
walker = walker.next
|
||||
}
|
||||
if (i === n && walker !== null) {
|
||||
return walker.value
|
||||
}
|
||||
}
|
||||
|
||||
Yallist.prototype.getReverse = function (n) {
|
||||
for (var i = 0, walker = this.tail; walker !== null && i < n; i++) {
|
||||
// abort out of the list early if we hit a cycle
|
||||
walker = walker.prev
|
||||
}
|
||||
if (i === n && walker !== null) {
|
||||
return walker.value
|
||||
}
|
||||
}
|
||||
|
||||
Yallist.prototype.map = function (fn, thisp) {
|
||||
thisp = thisp || this
|
||||
var res = new Yallist()
|
||||
for (var walker = this.head; walker !== null;) {
|
||||
res.push(fn.call(thisp, walker.value, this))
|
||||
walker = walker.next
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
Yallist.prototype.mapReverse = function (fn, thisp) {
|
||||
thisp = thisp || this
|
||||
var res = new Yallist()
|
||||
for (var walker = this.tail; walker !== null;) {
|
||||
res.push(fn.call(thisp, walker.value, this))
|
||||
walker = walker.prev
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
Yallist.prototype.reduce = function (fn, initial) {
|
||||
var acc
|
||||
var walker = this.head
|
||||
if (arguments.length > 1) {
|
||||
acc = initial
|
||||
} else if (this.head) {
|
||||
walker = this.head.next
|
||||
acc = this.head.value
|
||||
} else {
|
||||
throw new TypeError('Reduce of empty list with no initial value')
|
||||
}
|
||||
|
||||
for (var i = 0; walker !== null; i++) {
|
||||
acc = fn(acc, walker.value, i)
|
||||
walker = walker.next
|
||||
}
|
||||
|
||||
return acc
|
||||
}
|
||||
|
||||
Yallist.prototype.reduceReverse = function (fn, initial) {
|
||||
var acc
|
||||
var walker = this.tail
|
||||
if (arguments.length > 1) {
|
||||
acc = initial
|
||||
} else if (this.tail) {
|
||||
walker = this.tail.prev
|
||||
acc = this.tail.value
|
||||
} else {
|
||||
throw new TypeError('Reduce of empty list with no initial value')
|
||||
}
|
||||
|
||||
for (var i = this.length - 1; walker !== null; i--) {
|
||||
acc = fn(acc, walker.value, i)
|
||||
walker = walker.prev
|
||||
}
|
||||
|
||||
return acc
|
||||
}
|
||||
|
||||
Yallist.prototype.toArray = function () {
|
||||
var arr = new Array(this.length)
|
||||
for (var i = 0, walker = this.head; walker !== null; i++) {
|
||||
arr[i] = walker.value
|
||||
walker = walker.next
|
||||
}
|
||||
return arr
|
||||
}
|
||||
|
||||
Yallist.prototype.toArrayReverse = function () {
|
||||
var arr = new Array(this.length)
|
||||
for (var i = 0, walker = this.tail; walker !== null; i++) {
|
||||
arr[i] = walker.value
|
||||
walker = walker.prev
|
||||
}
|
||||
return arr
|
||||
}
|
||||
|
||||
Yallist.prototype.slice = function (from, to) {
|
||||
to = to || this.length
|
||||
if (to < 0) {
|
||||
to += this.length
|
||||
}
|
||||
from = from || 0
|
||||
if (from < 0) {
|
||||
from += this.length
|
||||
}
|
||||
var ret = new Yallist()
|
||||
if (to < from || to < 0) {
|
||||
return ret
|
||||
}
|
||||
if (from < 0) {
|
||||
from = 0
|
||||
}
|
||||
if (to > this.length) {
|
||||
to = this.length
|
||||
}
|
||||
for (var i = 0, walker = this.head; walker !== null && i < from; i++) {
|
||||
walker = walker.next
|
||||
}
|
||||
for (; walker !== null && i < to; i++, walker = walker.next) {
|
||||
ret.push(walker.value)
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
Yallist.prototype.sliceReverse = function (from, to) {
|
||||
to = to || this.length
|
||||
if (to < 0) {
|
||||
to += this.length
|
||||
}
|
||||
from = from || 0
|
||||
if (from < 0) {
|
||||
from += this.length
|
||||
}
|
||||
var ret = new Yallist()
|
||||
if (to < from || to < 0) {
|
||||
return ret
|
||||
}
|
||||
if (from < 0) {
|
||||
from = 0
|
||||
}
|
||||
if (to > this.length) {
|
||||
to = this.length
|
||||
}
|
||||
for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) {
|
||||
walker = walker.prev
|
||||
}
|
||||
for (; walker !== null && i > from; i--, walker = walker.prev) {
|
||||
ret.push(walker.value)
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
Yallist.prototype.splice = function (start, deleteCount, ...nodes) {
|
||||
if (start > this.length) {
|
||||
start = this.length - 1
|
||||
}
|
||||
if (start < 0) {
|
||||
start = this.length + start;
|
||||
}
|
||||
|
||||
for (var i = 0, walker = this.head; walker !== null && i < start; i++) {
|
||||
walker = walker.next
|
||||
}
|
||||
|
||||
var ret = []
|
||||
for (var i = 0; walker && i < deleteCount; i++) {
|
||||
ret.push(walker.value)
|
||||
walker = this.removeNode(walker)
|
||||
}
|
||||
if (walker === null) {
|
||||
walker = this.tail
|
||||
}
|
||||
|
||||
if (walker !== this.head && walker !== this.tail) {
|
||||
walker = walker.prev
|
||||
}
|
||||
|
||||
for (var i = 0; i < nodes.length; i++) {
|
||||
walker = insert(this, walker, nodes[i])
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
Yallist.prototype.reverse = function () {
|
||||
var head = this.head
|
||||
var tail = this.tail
|
||||
for (var walker = head; walker !== null; walker = walker.prev) {
|
||||
var p = walker.prev
|
||||
walker.prev = walker.next
|
||||
walker.next = p
|
||||
}
|
||||
this.head = tail
|
||||
this.tail = head
|
||||
return this
|
||||
}
|
||||
|
||||
function insert (self, node, value) {
|
||||
var inserted = node === self.head ?
|
||||
new Node(value, null, node, self) :
|
||||
new Node(value, node, node.next, self)
|
||||
|
||||
if (inserted.next === null) {
|
||||
self.tail = inserted
|
||||
}
|
||||
if (inserted.prev === null) {
|
||||
self.head = inserted
|
||||
}
|
||||
|
||||
self.length++
|
||||
|
||||
return inserted
|
||||
}
|
||||
|
||||
function push (self, item) {
|
||||
self.tail = new Node(item, self.tail, null, self)
|
||||
if (!self.head) {
|
||||
self.head = self.tail
|
||||
}
|
||||
self.length++
|
||||
}
|
||||
|
||||
function unshift (self, item) {
|
||||
self.head = new Node(item, null, self.head, self)
|
||||
if (!self.tail) {
|
||||
self.tail = self.head
|
||||
}
|
||||
self.length++
|
||||
}
|
||||
|
||||
function Node (value, prev, next, list) {
|
||||
if (!(this instanceof Node)) {
|
||||
return new Node(value, prev, next, list)
|
||||
}
|
||||
|
||||
this.list = list
|
||||
this.value = value
|
||||
|
||||
if (prev) {
|
||||
prev.next = this
|
||||
this.prev = prev
|
||||
} else {
|
||||
this.prev = null
|
||||
}
|
||||
|
||||
if (next) {
|
||||
next.prev = this
|
||||
this.next = next
|
||||
} else {
|
||||
this.next = null
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// add if support for Symbol.iterator is present
|
||||
require('./iterator.js')(Yallist)
|
||||
} catch (er) {}
|
Loading…
Add table
Add a link
Reference in a new issue