Updated the files.
This commit is contained in:
parent
1553e6b971
commit
753967d4f5
23418 changed files with 3784666 additions and 0 deletions
15
my-app/node_modules/pacote/LICENSE
generated
vendored
Executable file
15
my-app/node_modules/pacote/LICENSE
generated
vendored
Executable file
|
@ -0,0 +1,15 @@
|
|||
The ISC License
|
||||
|
||||
Copyright (c) Isaac Z. Schlueter, Kat Marchán, npm, Inc., and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
285
my-app/node_modules/pacote/README.md
generated
vendored
Executable file
285
my-app/node_modules/pacote/README.md
generated
vendored
Executable file
|
@ -0,0 +1,285 @@
|
|||
# pacote
|
||||
|
||||
Fetches package manifests and tarballs from the npm registry.
|
||||
|
||||
## USAGE
|
||||
|
||||
```js
|
||||
const pacote = require('pacote')
|
||||
|
||||
// get a package manifest
|
||||
pacote.manifest('foo@1.x').then(manifest => console.log('got it', manifest))
|
||||
|
||||
// extract a package into a folder
|
||||
pacote.extract('github:npm/cli', 'some/path', options)
|
||||
.then(({from, resolved, integrity}) => {
|
||||
console.log('extracted!', from, resolved, integrity)
|
||||
})
|
||||
|
||||
pacote.tarball('https://server.com/package.tgz').then(data => {
|
||||
console.log('got ' + data.length + ' bytes of tarball data')
|
||||
})
|
||||
```
|
||||
|
||||
`pacote` works with any kind of package specifier that npm can install. If
|
||||
you can pass it to the npm CLI, you can pass it to pacote. (In fact, that's
|
||||
exactly what the npm CLI does.)
|
||||
|
||||
Anything that you can do with one kind of package, you can do with another.
|
||||
|
||||
Data that isn't relevant (like a packument for a tarball) will be
|
||||
simulated.
|
||||
|
||||
`prepare` scripts will be run when generating tarballs from `git` and
|
||||
`directory` locations, to simulate what _would_ be published to the
|
||||
registry, so that you get a working package instead of just raw source
|
||||
code that might need to be transpiled.
|
||||
|
||||
## CLI
|
||||
|
||||
This module exports a command line interface that can do most of what is
|
||||
described below. Run `pacote -h` to learn more.
|
||||
|
||||
```
|
||||
Pacote - The JavaScript Package Handler, v10.1.1
|
||||
|
||||
Usage:
|
||||
|
||||
pacote resolve <spec>
|
||||
Resolve a specifier and output the fully resolved target
|
||||
Returns integrity and from if '--long' flag is set.
|
||||
|
||||
pacote manifest <spec>
|
||||
Fetch a manifest and print to stdout
|
||||
|
||||
pacote packument <spec>
|
||||
Fetch a full packument and print to stdout
|
||||
|
||||
pacote tarball <spec> [<filename>]
|
||||
Fetch a package tarball and save to <filename>
|
||||
If <filename> is missing or '-', the tarball will be streamed to stdout.
|
||||
|
||||
pacote extract <spec> <folder>
|
||||
Extract a package to the destination folder.
|
||||
|
||||
Configuration values all match the names of configs passed to npm, or
|
||||
options passed to Pacote. Additional flags for this executable:
|
||||
|
||||
--long Print an object from 'resolve', including integrity and spec.
|
||||
--json Print result objects as JSON rather than node's default.
|
||||
(This is the default if stdout is not a TTY.)
|
||||
--help -h Print this helpful text.
|
||||
|
||||
For example '--cache=/path/to/folder' will use that folder as the cache.
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
The `spec` refers to any kind of package specifier that npm can install.
|
||||
If you can pass it to the npm CLI, you can pass it to pacote. (In fact,
|
||||
that's exactly what the npm CLI does.)
|
||||
|
||||
See below for valid `opts` values.
|
||||
|
||||
* `pacote.resolve(spec, opts)` Resolve a specifier like `foo@latest` or
|
||||
`github:user/project` all the way to a tarball url, tarball file, or git
|
||||
repo with commit hash.
|
||||
|
||||
* `pacote.extract(spec, dest, opts)` Extract a package's tarball into a
|
||||
destination folder. Returns a promise that resolves to the
|
||||
`{from,resolved,integrity}` of the extracted package.
|
||||
|
||||
* `pacote.manifest(spec, opts)` Fetch (or simulate) a package's manifest
|
||||
(basically, the `package.json` file, plus a bit of metadata).
|
||||
See below for more on manifests and packuments. Returns a Promise that
|
||||
resolves to the manifest object.
|
||||
|
||||
* `pacote.packument(spec, opts)` Fetch (or simulate) a package's packument
|
||||
(basically, the top-level package document listing all the manifests that
|
||||
the registry returns). See below for more on manifests and packuments.
|
||||
Returns a Promise that resolves to the packument object.
|
||||
|
||||
* `pacote.tarball(spec, opts)` Get a package tarball data as a buffer in
|
||||
memory. Returns a Promise that resolves to the tarball data Buffer, with
|
||||
`from`, `resolved`, and `integrity` fields attached.
|
||||
|
||||
* `pacote.tarball.file(spec, dest, opts)` Save a package tarball data to
|
||||
a file on disk. Returns a Promise that resolves to
|
||||
`{from,integrity,resolved}` of the fetched tarball.
|
||||
|
||||
* `pacote.tarball.stream(spec, streamHandler, opts)` Fetch a tarball and
|
||||
make the stream available to the `streamHandler` function.
|
||||
|
||||
This is mostly an internal function, but it is exposed because it does
|
||||
provide some functionality that may be difficult to achieve otherwise.
|
||||
|
||||
The `streamHandler` function MUST return a Promise that resolves when
|
||||
the stream (and all associated work) is ended, or rejects if the stream
|
||||
has an error.
|
||||
|
||||
The `streamHandler` function MAY be called multiple times, as Pacote
|
||||
retries requests in some scenarios, such as cache corruption or
|
||||
retriable network failures.
|
||||
|
||||
### Options
|
||||
|
||||
Options are passed to
|
||||
[`npm-registry-fetch`](http://npm.im/npm-registry-fetch) and
|
||||
[`cacache`](http://npm.im/cacache), so in addition to these, anything for
|
||||
those modules can be given to pacote as well.
|
||||
|
||||
Options object is cloned, and mutated along the way to add integrity,
|
||||
resolved, and other properties, as they are determined.
|
||||
|
||||
* `cache` Where to store cache entries and temp files. Passed to
|
||||
[`cacache`](http://npm.im/cacache). Defaults to the same cache directory
|
||||
that npm will use by default, based on platform and environment.
|
||||
* `where` Base folder for resolving relative `file:` dependencies.
|
||||
* `resolved` Shortcut for looking up resolved values. Should be specified
|
||||
if known.
|
||||
* `integrity` Expected integrity of fetched package tarball. If specified,
|
||||
tarballs with mismatched integrity values will raise an `EINTEGRITY`
|
||||
error.
|
||||
* `umask` Permission mode mask for extracted files and directories.
|
||||
Defaults to `0o22`. See "Extracted File Modes" below.
|
||||
* `fmode` Minimum permission mode for extracted files. Defaults to
|
||||
`0o666`. See "Extracted File Modes" below.
|
||||
* `dmode` Minimum permission mode for extracted directories. Defaults to
|
||||
`0o777`. See "Extracted File Modes" below.
|
||||
* `preferOnline` Prefer to revalidate cache entries, even when it would not
|
||||
be strictly necessary. Default `false`.
|
||||
* `before` When picking a manifest from a packument, only consider
|
||||
packages published before the specified date. Default `null`.
|
||||
* `defaultTag` The default `dist-tag` to use when choosing a manifest from a
|
||||
packument. Defaults to `latest`.
|
||||
* `registry` The npm registry to use by default. Defaults to
|
||||
`https://registry.npmjs.org/`.
|
||||
* `fullMetadata` Fetch the full metadata from the registry for packuments,
|
||||
including information not strictly required for installation (author,
|
||||
description, etc.) Defaults to `true` when `before` is set, since the
|
||||
version publish time is part of the extended packument metadata.
|
||||
* `fullReadJson` Use the slower `read-package-json` package insted of
|
||||
`read-package-json-fast` in order to include extra fields like "readme" in
|
||||
the manifest. Defaults to `false`.
|
||||
* `packumentCache` For registry packuments only, you may provide a `Map`
|
||||
object which will be used to cache packument requests between pacote
|
||||
calls. This allows you to easily avoid hitting the registry multiple
|
||||
times (even just to validate the cache) for a given packument, since it
|
||||
is unlikely to change in the span of a single command.
|
||||
* `silent` A boolean that determines whether the banner is displayed
|
||||
when calling `@npmcli/run-script`.
|
||||
* `verifySignatures` A boolean that will make pacote verify the
|
||||
integrity signature of a manifest, if present. There must be a
|
||||
configured `_keys` entry in the config that is scoped to the
|
||||
registry the manifest is being fetched from.
|
||||
* `verifyAttestations` A boolean that will make pacote verify Sigstore
|
||||
attestations, if present. There must be a configured `_keys` entry in the
|
||||
config that is scoped to the registry the manifest is being fetched from.
|
||||
* `tufCache` Where to store metadata/target files when retrieving the package
|
||||
attestation key material via TUF. Defaults to the same cache directory that
|
||||
npm will use by default, based on platform and environment.
|
||||
|
||||
### Advanced API
|
||||
|
||||
Each different type of fetcher is exposed for more advanced usage such as
|
||||
using helper methods from this classes:
|
||||
|
||||
* `DirFetcher`
|
||||
* `FileFetcher`
|
||||
* `GitFetcher`
|
||||
* `RegistryFetcher`
|
||||
* `RemoteFetcher`
|
||||
|
||||
## Extracted File Modes
|
||||
|
||||
Files are extracted with a mode matching the following formula:
|
||||
|
||||
```
|
||||
( (tarball entry mode value) | (minimum mode option) ) ~ (umask)
|
||||
```
|
||||
|
||||
This is in order to prevent unreadable files or unlistable directories from
|
||||
cluttering a project's `node_modules` folder, even if the package tarball
|
||||
specifies that the file should be inaccessible.
|
||||
|
||||
It also prevents files from being group- or world-writable without explicit
|
||||
opt-in by the user, because all file and directory modes are masked against
|
||||
the `umask` value.
|
||||
|
||||
So, a file which is `0o771` in the tarball, using the default `fmode` of
|
||||
`0o666` and `umask` of `0o22`, will result in a file mode of `0o755`:
|
||||
|
||||
```
|
||||
(0o771 | 0o666) => 0o777
|
||||
(0o777 ~ 0o22) => 0o755
|
||||
```
|
||||
|
||||
In almost every case, the defaults are appropriate. To respect exactly
|
||||
what is in the package tarball (even if this makes an unusable system), set
|
||||
both `dmode` and `fmode` options to `0`. Otherwise, the `umask` config
|
||||
should be used in most cases where file mode modifications are required,
|
||||
and this functions more or less the same as the `umask` value in most Unix
|
||||
systems.
|
||||
|
||||
## Extracted File Ownership
|
||||
|
||||
When running as `root` on Unix systems, all extracted files and folders
|
||||
will have their owning `uid` and `gid` values set to match the ownership
|
||||
of the containing folder.
|
||||
|
||||
This prevents `root`-owned files showing up in a project's `node_modules`
|
||||
folder when a user runs `sudo npm install`.
|
||||
|
||||
## Manifests
|
||||
|
||||
A `manifest` is similar to a `package.json` file. However, it has a few
|
||||
pieces of extra metadata, and sometimes lacks metadata that is inessential
|
||||
to package installation.
|
||||
|
||||
In addition to the common `package.json` fields, manifests include:
|
||||
|
||||
* `manifest._resolved` The tarball url or file path where the package
|
||||
artifact can be found.
|
||||
* `manifest._from` A normalized form of the spec passed in as an argument.
|
||||
* `manifest._integrity` The integrity value for the package artifact.
|
||||
* `manifest._id` The canonical spec of this package version: name@version.
|
||||
* `manifest.dist` Registry manifests (those included in a packument) have a
|
||||
`dist` object. Only `tarball` is required, though at least one of
|
||||
`shasum` or `integrity` is almost always present.
|
||||
|
||||
* `tarball` The url to the associated package artifact. (Copied by
|
||||
Pacote to `manifest._resolved`.)
|
||||
* `integrity` The integrity SRI string for the artifact. This may not
|
||||
be present for older packages on the npm registry. (Copied by Pacote
|
||||
to `manifest._integrity`.)
|
||||
* `shasum` Legacy integrity value. Hexadecimal-encoded sha1 hash.
|
||||
(Converted to an SRI string and copied by Pacote to
|
||||
`manifest._integrity` when `dist.integrity` is not present.)
|
||||
* `fileCount` Number of files in the tarball.
|
||||
* `unpackedSize` Size on disk of the package when unpacked.
|
||||
* `signatures` Signatures of the shasum. Includes the keyid that
|
||||
correlates to a [`key from the npm
|
||||
registry`](https://registry.npmjs.org/-/npm/v1/keys)
|
||||
|
||||
## Packuments
|
||||
|
||||
A packument is the top-level package document that lists the set of
|
||||
manifests for available versions for a package.
|
||||
|
||||
When a packument is fetched with `accept:
|
||||
application/vnd.npm.install-v1+json` in the HTTP headers, only the most
|
||||
minimum necessary metadata is returned. Additional metadata is returned
|
||||
when fetched with only `accept: application/json`.
|
||||
|
||||
For Pacote's purposes, the following fields are relevant:
|
||||
|
||||
* `versions` An object where each key is a version, and each value is the
|
||||
manifest for that version.
|
||||
* `dist-tags` An object mapping dist-tags to version numbers. This is how
|
||||
`foo@latest` gets turned into `foo@1.2.3`.
|
||||
* `time` In the full packument, an object mapping version numbers to
|
||||
publication times, for the `opts.before` functionality.
|
||||
|
||||
Pacote adds the following field, regardless of the accept header:
|
||||
|
||||
* `_contentLength` The size of the packument.
|
158
my-app/node_modules/pacote/lib/bin.js
generated
vendored
Executable file
158
my-app/node_modules/pacote/lib/bin.js
generated
vendored
Executable file
|
@ -0,0 +1,158 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const run = conf => {
|
||||
const pacote = require('../')
|
||||
switch (conf._[0]) {
|
||||
case 'resolve':
|
||||
case 'manifest':
|
||||
case 'packument':
|
||||
if (conf._[0] === 'resolve' && conf.long) {
|
||||
return pacote.manifest(conf._[1], conf).then(mani => ({
|
||||
resolved: mani._resolved,
|
||||
integrity: mani._integrity,
|
||||
from: mani._from,
|
||||
}))
|
||||
}
|
||||
return pacote[conf._[0]](conf._[1], conf)
|
||||
|
||||
case 'tarball':
|
||||
if (!conf._[2] || conf._[2] === '-') {
|
||||
return pacote.tarball.stream(conf._[1], stream => {
|
||||
stream.pipe(
|
||||
conf.testStdout ||
|
||||
/* istanbul ignore next */
|
||||
process.stdout
|
||||
)
|
||||
// make sure it resolves something falsey
|
||||
return stream.promise().then(() => {
|
||||
return false
|
||||
})
|
||||
}, conf)
|
||||
} else {
|
||||
return pacote.tarball.file(conf._[1], conf._[2], conf)
|
||||
}
|
||||
|
||||
case 'extract':
|
||||
return pacote.extract(conf._[1], conf._[2], conf)
|
||||
|
||||
default: /* istanbul ignore next */ {
|
||||
throw new Error(`bad command: ${conf._[0]}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const version = require('../package.json').version
|
||||
const usage = () =>
|
||||
`Pacote - The JavaScript Package Handler, v${version}
|
||||
|
||||
Usage:
|
||||
|
||||
pacote resolve <spec>
|
||||
Resolve a specifier and output the fully resolved target
|
||||
Returns integrity and from if '--long' flag is set.
|
||||
|
||||
pacote manifest <spec>
|
||||
Fetch a manifest and print to stdout
|
||||
|
||||
pacote packument <spec>
|
||||
Fetch a full packument and print to stdout
|
||||
|
||||
pacote tarball <spec> [<filename>]
|
||||
Fetch a package tarball and save to <filename>
|
||||
If <filename> is missing or '-', the tarball will be streamed to stdout.
|
||||
|
||||
pacote extract <spec> <folder>
|
||||
Extract a package to the destination folder.
|
||||
|
||||
Configuration values all match the names of configs passed to npm, or
|
||||
options passed to Pacote. Additional flags for this executable:
|
||||
|
||||
--long Print an object from 'resolve', including integrity and spec.
|
||||
--json Print result objects as JSON rather than node's default.
|
||||
(This is the default if stdout is not a TTY.)
|
||||
--help -h Print this helpful text.
|
||||
|
||||
For example '--cache=/path/to/folder' will use that folder as the cache.
|
||||
`
|
||||
|
||||
const shouldJSON = (conf, result) =>
|
||||
conf.json ||
|
||||
!process.stdout.isTTY &&
|
||||
conf.json === undefined &&
|
||||
result &&
|
||||
typeof result === 'object'
|
||||
|
||||
const pretty = (conf, result) =>
|
||||
shouldJSON(conf, result) ? JSON.stringify(result, 0, 2) : result
|
||||
|
||||
let addedLogListener = false
|
||||
const main = args => {
|
||||
const conf = parse(args)
|
||||
if (conf.help || conf.h) {
|
||||
return console.log(usage())
|
||||
}
|
||||
|
||||
if (!addedLogListener) {
|
||||
process.on('log', console.error)
|
||||
addedLogListener = true
|
||||
}
|
||||
|
||||
try {
|
||||
return run(conf)
|
||||
.then(result => result && console.log(pretty(conf, result)))
|
||||
.catch(er => {
|
||||
console.error(er)
|
||||
process.exit(1)
|
||||
})
|
||||
} catch (er) {
|
||||
console.error(er.message)
|
||||
console.error(usage())
|
||||
}
|
||||
}
|
||||
|
||||
const parseArg = arg => {
|
||||
const split = arg.slice(2).split('=')
|
||||
const k = split.shift()
|
||||
const v = split.join('=')
|
||||
const no = /^no-/.test(k) && !v
|
||||
const key = (no ? k.slice(3) : k)
|
||||
.replace(/^tag$/, 'defaultTag')
|
||||
.replace(/-([a-z])/g, (_, c) => c.toUpperCase())
|
||||
const value = v ? v.replace(/^~/, process.env.HOME) : !no
|
||||
return { key, value }
|
||||
}
|
||||
|
||||
const parse = args => {
|
||||
const conf = {
|
||||
_: [],
|
||||
cache: process.env.HOME + '/.npm/_cacache',
|
||||
}
|
||||
let dashdash = false
|
||||
args.forEach(arg => {
|
||||
if (dashdash) {
|
||||
conf._.push(arg)
|
||||
} else if (arg === '--') {
|
||||
dashdash = true
|
||||
} else if (arg === '-h') {
|
||||
conf.help = true
|
||||
} else if (/^--/.test(arg)) {
|
||||
const { key, value } = parseArg(arg)
|
||||
conf[key] = value
|
||||
} else {
|
||||
conf._.push(arg)
|
||||
}
|
||||
})
|
||||
return conf
|
||||
}
|
||||
|
||||
if (module === require.main) {
|
||||
main(process.argv.slice(2))
|
||||
} else {
|
||||
module.exports = {
|
||||
main,
|
||||
run,
|
||||
usage,
|
||||
parseArg,
|
||||
parse,
|
||||
}
|
||||
}
|
108
my-app/node_modules/pacote/lib/dir.js
generated
vendored
Executable file
108
my-app/node_modules/pacote/lib/dir.js
generated
vendored
Executable file
|
@ -0,0 +1,108 @@
|
|||
const Fetcher = require('./fetcher.js')
|
||||
const FileFetcher = require('./file.js')
|
||||
const { Minipass } = require('minipass')
|
||||
const tarCreateOptions = require('./util/tar-create-options.js')
|
||||
const packlist = require('npm-packlist')
|
||||
const tar = require('tar')
|
||||
const _prepareDir = Symbol('_prepareDir')
|
||||
const { resolve } = require('path')
|
||||
const _readPackageJson = Symbol.for('package.Fetcher._readPackageJson')
|
||||
|
||||
const runScript = require('@npmcli/run-script')
|
||||
|
||||
const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved')
|
||||
class DirFetcher extends Fetcher {
|
||||
constructor (spec, opts) {
|
||||
super(spec, opts)
|
||||
// just the fully resolved filename
|
||||
this.resolved = this.spec.fetchSpec
|
||||
|
||||
this.tree = opts.tree || null
|
||||
this.Arborist = opts.Arborist || null
|
||||
}
|
||||
|
||||
// exposes tarCreateOptions as public API
|
||||
static tarCreateOptions (manifest) {
|
||||
return tarCreateOptions(manifest)
|
||||
}
|
||||
|
||||
get types () {
|
||||
return ['directory']
|
||||
}
|
||||
|
||||
[_prepareDir] () {
|
||||
return this.manifest().then(mani => {
|
||||
if (!mani.scripts || !mani.scripts.prepare) {
|
||||
return
|
||||
}
|
||||
|
||||
// we *only* run prepare.
|
||||
// pre/post-pack is run by the npm CLI for publish and pack,
|
||||
// but this function is *also* run when installing git deps
|
||||
const stdio = this.opts.foregroundScripts ? 'inherit' : 'pipe'
|
||||
|
||||
// hide the banner if silent opt is passed in, or if prepare running
|
||||
// in the background.
|
||||
const banner = this.opts.silent ? false : stdio === 'inherit'
|
||||
|
||||
return runScript({
|
||||
pkg: mani,
|
||||
event: 'prepare',
|
||||
path: this.resolved,
|
||||
stdio,
|
||||
banner,
|
||||
env: {
|
||||
npm_package_resolved: this.resolved,
|
||||
npm_package_integrity: this.integrity,
|
||||
npm_package_json: resolve(this.resolved, 'package.json'),
|
||||
},
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
[_tarballFromResolved] () {
|
||||
if (!this.tree && !this.Arborist) {
|
||||
throw new Error('DirFetcher requires either a tree or an Arborist constructor to pack')
|
||||
}
|
||||
|
||||
const stream = new Minipass()
|
||||
stream.resolved = this.resolved
|
||||
stream.integrity = this.integrity
|
||||
|
||||
const { prefix, workspaces } = this.opts
|
||||
|
||||
// run the prepare script, get the list of files, and tar it up
|
||||
// pipe to the stream, and proxy errors the chain.
|
||||
this[_prepareDir]()
|
||||
.then(async () => {
|
||||
if (!this.tree) {
|
||||
const arb = new this.Arborist({ path: this.resolved })
|
||||
this.tree = await arb.loadActual()
|
||||
}
|
||||
return packlist(this.tree, { path: this.resolved, prefix, workspaces })
|
||||
})
|
||||
.then(files => tar.c(tarCreateOptions(this.package), files)
|
||||
.on('error', er => stream.emit('error', er)).pipe(stream))
|
||||
.catch(er => stream.emit('error', er))
|
||||
return stream
|
||||
}
|
||||
|
||||
manifest () {
|
||||
if (this.package) {
|
||||
return Promise.resolve(this.package)
|
||||
}
|
||||
|
||||
return this[_readPackageJson](this.resolved + '/package.json')
|
||||
.then(mani => this.package = {
|
||||
...mani,
|
||||
_integrity: this.integrity && String(this.integrity),
|
||||
_resolved: this.resolved,
|
||||
_from: this.from,
|
||||
})
|
||||
}
|
||||
|
||||
packument () {
|
||||
return FileFetcher.prototype.packument.apply(this)
|
||||
}
|
||||
}
|
||||
module.exports = DirFetcher
|
505
my-app/node_modules/pacote/lib/fetcher.js
generated
vendored
Executable file
505
my-app/node_modules/pacote/lib/fetcher.js
generated
vendored
Executable file
|
@ -0,0 +1,505 @@
|
|||
// This is the base class that the other fetcher types in lib
|
||||
// all descend from.
|
||||
// It handles the unpacking and retry logic that is shared among
|
||||
// all of the other Fetcher types.
|
||||
|
||||
const npa = require('npm-package-arg')
|
||||
const ssri = require('ssri')
|
||||
const { promisify } = require('util')
|
||||
const { basename, dirname } = require('path')
|
||||
const tar = require('tar')
|
||||
const log = require('proc-log')
|
||||
const retry = require('promise-retry')
|
||||
const fs = require('fs/promises')
|
||||
const fsm = require('fs-minipass')
|
||||
const cacache = require('cacache')
|
||||
const isPackageBin = require('./util/is-package-bin.js')
|
||||
const removeTrailingSlashes = require('./util/trailing-slashes.js')
|
||||
const getContents = require('@npmcli/installed-package-contents')
|
||||
const readPackageJsonFast = require('read-package-json-fast')
|
||||
const readPackageJson = promisify(require('read-package-json'))
|
||||
const { Minipass } = require('minipass')
|
||||
|
||||
const cacheDir = require('./util/cache-dir.js')
|
||||
|
||||
// Private methods.
|
||||
// Child classes should not have to override these.
|
||||
// Users should never call them.
|
||||
const _extract = Symbol('_extract')
|
||||
const _mkdir = Symbol('_mkdir')
|
||||
const _empty = Symbol('_empty')
|
||||
const _toFile = Symbol('_toFile')
|
||||
const _tarxOptions = Symbol('_tarxOptions')
|
||||
const _entryMode = Symbol('_entryMode')
|
||||
const _istream = Symbol('_istream')
|
||||
const _assertType = Symbol('_assertType')
|
||||
const _tarballFromCache = Symbol('_tarballFromCache')
|
||||
const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved')
|
||||
const _cacheFetches = Symbol.for('pacote.Fetcher._cacheFetches')
|
||||
const _readPackageJson = Symbol.for('package.Fetcher._readPackageJson')
|
||||
|
||||
class FetcherBase {
|
||||
constructor (spec, opts) {
|
||||
if (!opts || typeof opts !== 'object') {
|
||||
throw new TypeError('options object is required')
|
||||
}
|
||||
this.spec = npa(spec, opts.where)
|
||||
|
||||
this.allowGitIgnore = !!opts.allowGitIgnore
|
||||
|
||||
// a bit redundant because presumably the caller already knows this,
|
||||
// but it makes it easier to not have to keep track of the requested
|
||||
// spec when we're dispatching thousands of these at once, and normalizing
|
||||
// is nice. saveSpec is preferred if set, because it turns stuff like
|
||||
// x/y#committish into github:x/y#committish. use name@rawSpec for
|
||||
// registry deps so that we turn xyz and xyz@ -> xyz@
|
||||
this.from = this.spec.registry
|
||||
? `${this.spec.name}@${this.spec.rawSpec}` : this.spec.saveSpec
|
||||
|
||||
this[_assertType]()
|
||||
// clone the opts object so that others aren't upset when we mutate it
|
||||
// by adding/modifying the integrity value.
|
||||
this.opts = { ...opts }
|
||||
|
||||
this.cache = opts.cache || cacheDir().cacache
|
||||
this.tufCache = opts.tufCache || cacheDir().tufcache
|
||||
this.resolved = opts.resolved || null
|
||||
|
||||
// default to caching/verifying with sha512, that's what we usually have
|
||||
// need to change this default, or start overriding it, when sha512
|
||||
// is no longer strong enough.
|
||||
this.defaultIntegrityAlgorithm = opts.defaultIntegrityAlgorithm || 'sha512'
|
||||
|
||||
if (typeof opts.integrity === 'string') {
|
||||
this.opts.integrity = ssri.parse(opts.integrity)
|
||||
}
|
||||
|
||||
this.package = null
|
||||
this.type = this.constructor.name
|
||||
this.fmode = opts.fmode || 0o666
|
||||
this.dmode = opts.dmode || 0o777
|
||||
// we don't need a default umask, because we don't chmod files coming
|
||||
// out of package tarballs. they're forced to have a mode that is
|
||||
// valid, regardless of what's in the tarball entry, and then we let
|
||||
// the process's umask setting do its job. but if configured, we do
|
||||
// respect it.
|
||||
this.umask = opts.umask || 0
|
||||
|
||||
this.preferOnline = !!opts.preferOnline
|
||||
this.preferOffline = !!opts.preferOffline
|
||||
this.offline = !!opts.offline
|
||||
|
||||
this.before = opts.before
|
||||
this.fullMetadata = this.before ? true : !!opts.fullMetadata
|
||||
this.fullReadJson = !!opts.fullReadJson
|
||||
if (this.fullReadJson) {
|
||||
this[_readPackageJson] = readPackageJson
|
||||
} else {
|
||||
this[_readPackageJson] = readPackageJsonFast
|
||||
}
|
||||
|
||||
// rrh is a registry hostname or 'never' or 'always'
|
||||
// defaults to registry.npmjs.org
|
||||
this.replaceRegistryHost = (!opts.replaceRegistryHost || opts.replaceRegistryHost === 'npmjs') ?
|
||||
'registry.npmjs.org' : opts.replaceRegistryHost
|
||||
|
||||
this.defaultTag = opts.defaultTag || 'latest'
|
||||
this.registry = removeTrailingSlashes(opts.registry || 'https://registry.npmjs.org')
|
||||
|
||||
// command to run 'prepare' scripts on directories and git dirs
|
||||
// To use pacote with yarn, for example, set npmBin to 'yarn'
|
||||
// and npmCliConfig with yarn's equivalents.
|
||||
this.npmBin = opts.npmBin || 'npm'
|
||||
|
||||
// command to install deps for preparing
|
||||
this.npmInstallCmd = opts.npmInstallCmd || ['install', '--force']
|
||||
|
||||
// XXX fill more of this in based on what we know from this.opts
|
||||
// we explicitly DO NOT fill in --tag, though, since we are often
|
||||
// going to be packing in the context of a publish, which may set
|
||||
// a dist-tag, but certainly wants to keep defaulting to latest.
|
||||
this.npmCliConfig = opts.npmCliConfig || [
|
||||
`--cache=${dirname(this.cache)}`,
|
||||
`--prefer-offline=${!!this.preferOffline}`,
|
||||
`--prefer-online=${!!this.preferOnline}`,
|
||||
`--offline=${!!this.offline}`,
|
||||
...(this.before ? [`--before=${this.before.toISOString()}`] : []),
|
||||
'--no-progress',
|
||||
'--no-save',
|
||||
'--no-audit',
|
||||
// override any omit settings from the environment
|
||||
'--include=dev',
|
||||
'--include=peer',
|
||||
'--include=optional',
|
||||
// we need the actual things, not just the lockfile
|
||||
'--no-package-lock-only',
|
||||
'--no-dry-run',
|
||||
]
|
||||
}
|
||||
|
||||
get integrity () {
|
||||
return this.opts.integrity || null
|
||||
}
|
||||
|
||||
set integrity (i) {
|
||||
if (!i) {
|
||||
return
|
||||
}
|
||||
|
||||
i = ssri.parse(i)
|
||||
const current = this.opts.integrity
|
||||
|
||||
// do not ever update an existing hash value, but do
|
||||
// merge in NEW algos and hashes that we don't already have.
|
||||
if (current) {
|
||||
current.merge(i)
|
||||
} else {
|
||||
this.opts.integrity = i
|
||||
}
|
||||
}
|
||||
|
||||
get notImplementedError () {
|
||||
return new Error('not implemented in this fetcher type: ' + this.type)
|
||||
}
|
||||
|
||||
// override in child classes
|
||||
// Returns a Promise that resolves to this.resolved string value
|
||||
resolve () {
|
||||
return this.resolved ? Promise.resolve(this.resolved)
|
||||
: Promise.reject(this.notImplementedError)
|
||||
}
|
||||
|
||||
packument () {
|
||||
return Promise.reject(this.notImplementedError)
|
||||
}
|
||||
|
||||
// override in child class
|
||||
// returns a manifest containing:
|
||||
// - name
|
||||
// - version
|
||||
// - _resolved
|
||||
// - _integrity
|
||||
// - plus whatever else was in there (corgi, full metadata, or pj file)
|
||||
manifest () {
|
||||
return Promise.reject(this.notImplementedError)
|
||||
}
|
||||
|
||||
// private, should be overridden.
|
||||
// Note that they should *not* calculate or check integrity or cache,
|
||||
// but *just* return the raw tarball data stream.
|
||||
[_tarballFromResolved] () {
|
||||
throw this.notImplementedError
|
||||
}
|
||||
|
||||
// public, should not be overridden
|
||||
tarball () {
|
||||
return this.tarballStream(stream => stream.concat().then(data => {
|
||||
data.integrity = this.integrity && String(this.integrity)
|
||||
data.resolved = this.resolved
|
||||
data.from = this.from
|
||||
return data
|
||||
}))
|
||||
}
|
||||
|
||||
// private
|
||||
// Note: cacache will raise a EINTEGRITY error if the integrity doesn't match
|
||||
[_tarballFromCache] () {
|
||||
return cacache.get.stream.byDigest(this.cache, this.integrity, this.opts)
|
||||
}
|
||||
|
||||
get [_cacheFetches] () {
|
||||
return true
|
||||
}
|
||||
|
||||
[_istream] (stream) {
|
||||
// if not caching this, just return it
|
||||
if (!this.opts.cache || !this[_cacheFetches]) {
|
||||
// instead of creating a new integrity stream, we only piggyback on the
|
||||
// provided stream's events
|
||||
if (stream.hasIntegrityEmitter) {
|
||||
stream.on('integrity', i => this.integrity = i)
|
||||
return stream
|
||||
}
|
||||
|
||||
const istream = ssri.integrityStream(this.opts)
|
||||
istream.on('integrity', i => this.integrity = i)
|
||||
stream.on('error', err => istream.emit('error', err))
|
||||
return stream.pipe(istream)
|
||||
}
|
||||
|
||||
// we have to return a stream that gets ALL the data, and proxies errors,
|
||||
// but then pipe from the original tarball stream into the cache as well.
|
||||
// To do this without losing any data, and since the cacache put stream
|
||||
// is not a passthrough, we have to pipe from the original stream into
|
||||
// the cache AFTER we pipe into the middleStream. Since the cache stream
|
||||
// has an asynchronous flush to write its contents to disk, we need to
|
||||
// defer the middleStream end until the cache stream ends.
|
||||
const middleStream = new Minipass()
|
||||
stream.on('error', err => middleStream.emit('error', err))
|
||||
stream.pipe(middleStream, { end: false })
|
||||
const cstream = cacache.put.stream(
|
||||
this.opts.cache,
|
||||
`pacote:tarball:${this.from}`,
|
||||
this.opts
|
||||
)
|
||||
cstream.on('integrity', i => this.integrity = i)
|
||||
cstream.on('error', err => stream.emit('error', err))
|
||||
stream.pipe(cstream)
|
||||
|
||||
// eslint-disable-next-line promise/catch-or-return
|
||||
cstream.promise().catch(() => {}).then(() => middleStream.end())
|
||||
return middleStream
|
||||
}
|
||||
|
||||
pickIntegrityAlgorithm () {
|
||||
return this.integrity ? this.integrity.pickAlgorithm(this.opts)
|
||||
: this.defaultIntegrityAlgorithm
|
||||
}
|
||||
|
||||
// TODO: check error class, once those are rolled out to our deps
|
||||
isDataCorruptionError (er) {
|
||||
return er.code === 'EINTEGRITY' || er.code === 'Z_DATA_ERROR'
|
||||
}
|
||||
|
||||
// override the types getter
|
||||
get types () {
|
||||
return false
|
||||
}
|
||||
|
||||
[_assertType] () {
|
||||
if (this.types && !this.types.includes(this.spec.type)) {
|
||||
throw new TypeError(`Wrong spec type (${
|
||||
this.spec.type
|
||||
}) for ${
|
||||
this.constructor.name
|
||||
}. Supported types: ${this.types.join(', ')}`)
|
||||
}
|
||||
}
|
||||
|
||||
// We allow ENOENTs from cacache, but not anywhere else.
|
||||
// An ENOENT trying to read a tgz file, for example, is Right Out.
|
||||
isRetriableError (er) {
|
||||
// TODO: check error class, once those are rolled out to our deps
|
||||
return this.isDataCorruptionError(er) ||
|
||||
er.code === 'ENOENT' ||
|
||||
er.code === 'EISDIR'
|
||||
}
|
||||
|
||||
// Mostly internal, but has some uses
|
||||
// Pass in a function which returns a promise
|
||||
// Function will be called 1 or more times with streams that may fail.
|
||||
// Retries:
|
||||
// Function MUST handle errors on the stream by rejecting the promise,
|
||||
// so that retry logic can pick it up and either retry or fail whatever
|
||||
// promise it was making (ie, failing extraction, etc.)
|
||||
//
|
||||
// The return value of this method is a Promise that resolves the same
|
||||
// as whatever the streamHandler resolves to.
|
||||
//
|
||||
// This should never be overridden by child classes, but it is public.
|
||||
tarballStream (streamHandler) {
|
||||
// Only short-circuit via cache if we have everything else we'll need,
|
||||
// and the user has not expressed a preference for checking online.
|
||||
|
||||
const fromCache = (
|
||||
!this.preferOnline &&
|
||||
this.integrity &&
|
||||
this.resolved
|
||||
) ? streamHandler(this[_tarballFromCache]()).catch(er => {
|
||||
if (this.isDataCorruptionError(er)) {
|
||||
log.warn('tarball', `cached data for ${
|
||||
this.spec
|
||||
} (${this.integrity}) seems to be corrupted. Refreshing cache.`)
|
||||
return this.cleanupCached().then(() => {
|
||||
throw er
|
||||
})
|
||||
} else {
|
||||
throw er
|
||||
}
|
||||
}) : null
|
||||
|
||||
const fromResolved = er => {
|
||||
if (er) {
|
||||
if (!this.isRetriableError(er)) {
|
||||
throw er
|
||||
}
|
||||
log.silly('tarball', `no local data for ${
|
||||
this.spec
|
||||
}. Extracting by manifest.`)
|
||||
}
|
||||
return this.resolve().then(() => retry(tryAgain =>
|
||||
streamHandler(this[_istream](this[_tarballFromResolved]()))
|
||||
.catch(streamErr => {
|
||||
// Most likely data integrity. A cache ENOENT error is unlikely
|
||||
// here, since we're definitely not reading from the cache, but it
|
||||
// IS possible that the fetch subsystem accessed the cache, and the
|
||||
// entry got blown away or something. Try one more time to be sure.
|
||||
if (this.isRetriableError(streamErr)) {
|
||||
log.warn('tarball', `tarball data for ${
|
||||
this.spec
|
||||
} (${this.integrity}) seems to be corrupted. Trying again.`)
|
||||
return this.cleanupCached().then(() => tryAgain(streamErr))
|
||||
}
|
||||
throw streamErr
|
||||
}), { retries: 1, minTimeout: 0, maxTimeout: 0 }))
|
||||
}
|
||||
|
||||
return fromCache ? fromCache.catch(fromResolved) : fromResolved()
|
||||
}
|
||||
|
||||
cleanupCached () {
|
||||
return cacache.rm.content(this.cache, this.integrity, this.opts)
|
||||
}
|
||||
|
||||
[_empty] (path) {
|
||||
return getContents({ path, depth: 1 }).then(contents => Promise.all(
|
||||
contents.map(entry => fs.rm(entry, { recursive: true, force: true }))))
|
||||
}
|
||||
|
||||
async [_mkdir] (dest) {
|
||||
await this[_empty](dest)
|
||||
return await fs.mkdir(dest, { recursive: true })
|
||||
}
|
||||
|
||||
// extraction is always the same. the only difference is where
|
||||
// the tarball comes from.
|
||||
async extract (dest) {
|
||||
await this[_mkdir](dest)
|
||||
return this.tarballStream((tarball) => this[_extract](dest, tarball))
|
||||
}
|
||||
|
||||
[_toFile] (dest) {
|
||||
return this.tarballStream(str => new Promise((res, rej) => {
|
||||
const writer = new fsm.WriteStream(dest)
|
||||
str.on('error', er => writer.emit('error', er))
|
||||
writer.on('error', er => rej(er))
|
||||
writer.on('close', () => res({
|
||||
integrity: this.integrity && String(this.integrity),
|
||||
resolved: this.resolved,
|
||||
from: this.from,
|
||||
}))
|
||||
str.pipe(writer)
|
||||
}))
|
||||
}
|
||||
|
||||
// don't use this[_mkdir] because we don't want to rimraf anything
|
||||
async tarballFile (dest) {
|
||||
const dir = dirname(dest)
|
||||
await fs.mkdir(dir, { recursive: true })
|
||||
return this[_toFile](dest)
|
||||
}
|
||||
|
||||
[_extract] (dest, tarball) {
|
||||
const extractor = tar.x(this[_tarxOptions]({ cwd: dest }))
|
||||
const p = new Promise((resolve, reject) => {
|
||||
extractor.on('end', () => {
|
||||
resolve({
|
||||
resolved: this.resolved,
|
||||
integrity: this.integrity && String(this.integrity),
|
||||
from: this.from,
|
||||
})
|
||||
})
|
||||
|
||||
extractor.on('error', er => {
|
||||
log.warn('tar', er.message)
|
||||
log.silly('tar', er)
|
||||
reject(er)
|
||||
})
|
||||
|
||||
tarball.on('error', er => reject(er))
|
||||
})
|
||||
|
||||
tarball.pipe(extractor)
|
||||
return p
|
||||
}
|
||||
|
||||
// always ensure that entries are at least as permissive as our configured
|
||||
// dmode/fmode, but never more permissive than the umask allows.
|
||||
[_entryMode] (path, mode, type) {
|
||||
const m = /Directory|GNUDumpDir/.test(type) ? this.dmode
|
||||
: /File$/.test(type) ? this.fmode
|
||||
: /* istanbul ignore next - should never happen in a pkg */ 0
|
||||
|
||||
// make sure package bins are executable
|
||||
const exe = isPackageBin(this.package, path) ? 0o111 : 0
|
||||
// always ensure that files are read/writable by the owner
|
||||
return ((mode | m) & ~this.umask) | exe | 0o600
|
||||
}
|
||||
|
||||
[_tarxOptions] ({ cwd }) {
|
||||
const sawIgnores = new Set()
|
||||
return {
|
||||
cwd,
|
||||
noChmod: true,
|
||||
noMtime: true,
|
||||
filter: (name, entry) => {
|
||||
if (/Link$/.test(entry.type)) {
|
||||
return false
|
||||
}
|
||||
entry.mode = this[_entryMode](entry.path, entry.mode, entry.type)
|
||||
// this replicates the npm pack behavior where .gitignore files
|
||||
// are treated like .npmignore files, but only if a .npmignore
|
||||
// file is not present.
|
||||
if (/File$/.test(entry.type)) {
|
||||
const base = basename(entry.path)
|
||||
if (base === '.npmignore') {
|
||||
sawIgnores.add(entry.path)
|
||||
} else if (base === '.gitignore' && !this.allowGitIgnore) {
|
||||
// rename, but only if there's not already a .npmignore
|
||||
const ni = entry.path.replace(/\.gitignore$/, '.npmignore')
|
||||
if (sawIgnores.has(ni)) {
|
||||
return false
|
||||
}
|
||||
entry.path = ni
|
||||
}
|
||||
return true
|
||||
}
|
||||
},
|
||||
strip: 1,
|
||||
onwarn: /* istanbul ignore next - we can trust that tar logs */
|
||||
(code, msg, data) => {
|
||||
log.warn('tar', code, msg)
|
||||
log.silly('tar', code, msg, data)
|
||||
},
|
||||
umask: this.umask,
|
||||
// always ignore ownership info from tarball metadata
|
||||
preserveOwner: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = FetcherBase
|
||||
|
||||
// Child classes
|
||||
const GitFetcher = require('./git.js')
|
||||
const RegistryFetcher = require('./registry.js')
|
||||
const FileFetcher = require('./file.js')
|
||||
const DirFetcher = require('./dir.js')
|
||||
const RemoteFetcher = require('./remote.js')
|
||||
|
||||
// Get an appropriate fetcher object from a spec and options
|
||||
FetcherBase.get = (rawSpec, opts = {}) => {
|
||||
const spec = npa(rawSpec, opts.where)
|
||||
switch (spec.type) {
|
||||
case 'git':
|
||||
return new GitFetcher(spec, opts)
|
||||
|
||||
case 'remote':
|
||||
return new RemoteFetcher(spec, opts)
|
||||
|
||||
case 'version':
|
||||
case 'range':
|
||||
case 'tag':
|
||||
case 'alias':
|
||||
return new RegistryFetcher(spec.subSpec || spec, opts)
|
||||
|
||||
case 'file':
|
||||
return new FileFetcher(spec, opts)
|
||||
|
||||
case 'directory':
|
||||
return new DirFetcher(spec, opts)
|
||||
|
||||
default:
|
||||
throw new TypeError('Unknown spec type: ' + spec.type)
|
||||
}
|
||||
}
|
96
my-app/node_modules/pacote/lib/file.js
generated
vendored
Executable file
96
my-app/node_modules/pacote/lib/file.js
generated
vendored
Executable file
|
@ -0,0 +1,96 @@
|
|||
const Fetcher = require('./fetcher.js')
|
||||
const fsm = require('fs-minipass')
|
||||
const cacache = require('cacache')
|
||||
const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved')
|
||||
const _exeBins = Symbol('_exeBins')
|
||||
const { resolve } = require('path')
|
||||
const fs = require('fs')
|
||||
const _readPackageJson = Symbol.for('package.Fetcher._readPackageJson')
|
||||
|
||||
class FileFetcher extends Fetcher {
|
||||
constructor (spec, opts) {
|
||||
super(spec, opts)
|
||||
// just the fully resolved filename
|
||||
this.resolved = this.spec.fetchSpec
|
||||
}
|
||||
|
||||
get types () {
|
||||
return ['file']
|
||||
}
|
||||
|
||||
manifest () {
|
||||
if (this.package) {
|
||||
return Promise.resolve(this.package)
|
||||
}
|
||||
|
||||
// have to unpack the tarball for this.
|
||||
return cacache.tmp.withTmp(this.cache, this.opts, dir =>
|
||||
this.extract(dir)
|
||||
.then(() => this[_readPackageJson](dir + '/package.json'))
|
||||
.then(mani => this.package = {
|
||||
...mani,
|
||||
_integrity: this.integrity && String(this.integrity),
|
||||
_resolved: this.resolved,
|
||||
_from: this.from,
|
||||
}))
|
||||
}
|
||||
|
||||
[_exeBins] (pkg, dest) {
|
||||
if (!pkg.bin) {
|
||||
return Promise.resolve()
|
||||
}
|
||||
|
||||
return Promise.all(Object.keys(pkg.bin).map(k => new Promise(res => {
|
||||
const script = resolve(dest, pkg.bin[k])
|
||||
// Best effort. Ignore errors here, the only result is that
|
||||
// a bin script is not executable. But if it's missing or
|
||||
// something, we just leave it for a later stage to trip over
|
||||
// when we can provide a more useful contextual error.
|
||||
fs.stat(script, (er, st) => {
|
||||
if (er) {
|
||||
return res()
|
||||
}
|
||||
const mode = st.mode | 0o111
|
||||
if (mode === st.mode) {
|
||||
return res()
|
||||
}
|
||||
fs.chmod(script, mode, res)
|
||||
})
|
||||
})))
|
||||
}
|
||||
|
||||
extract (dest) {
|
||||
// if we've already loaded the manifest, then the super got it.
|
||||
// but if not, read the unpacked manifest and chmod properly.
|
||||
return super.extract(dest)
|
||||
.then(result => this.package ? result
|
||||
: this[_readPackageJson](dest + '/package.json').then(pkg =>
|
||||
this[_exeBins](pkg, dest)).then(() => result))
|
||||
}
|
||||
|
||||
[_tarballFromResolved] () {
|
||||
// create a read stream and return it
|
||||
return new fsm.ReadStream(this.resolved)
|
||||
}
|
||||
|
||||
packument () {
|
||||
// simulate based on manifest
|
||||
return this.manifest().then(mani => ({
|
||||
name: mani.name,
|
||||
'dist-tags': {
|
||||
[this.defaultTag]: mani.version,
|
||||
},
|
||||
versions: {
|
||||
[mani.version]: {
|
||||
...mani,
|
||||
dist: {
|
||||
tarball: `file:${this.resolved}`,
|
||||
integrity: this.integrity && String(this.integrity),
|
||||
},
|
||||
},
|
||||
},
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = FileFetcher
|
327
my-app/node_modules/pacote/lib/git.js
generated
vendored
Executable file
327
my-app/node_modules/pacote/lib/git.js
generated
vendored
Executable file
|
@ -0,0 +1,327 @@
|
|||
const Fetcher = require('./fetcher.js')
|
||||
const FileFetcher = require('./file.js')
|
||||
const RemoteFetcher = require('./remote.js')
|
||||
const DirFetcher = require('./dir.js')
|
||||
const hashre = /^[a-f0-9]{40}$/
|
||||
const git = require('@npmcli/git')
|
||||
const pickManifest = require('npm-pick-manifest')
|
||||
const npa = require('npm-package-arg')
|
||||
const { Minipass } = require('minipass')
|
||||
const cacache = require('cacache')
|
||||
const log = require('proc-log')
|
||||
const npm = require('./util/npm.js')
|
||||
|
||||
const _resolvedFromRepo = Symbol('_resolvedFromRepo')
|
||||
const _resolvedFromHosted = Symbol('_resolvedFromHosted')
|
||||
const _resolvedFromClone = Symbol('_resolvedFromClone')
|
||||
const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved')
|
||||
const _addGitSha = Symbol('_addGitSha')
|
||||
const addGitSha = require('./util/add-git-sha.js')
|
||||
const _clone = Symbol('_clone')
|
||||
const _cloneHosted = Symbol('_cloneHosted')
|
||||
const _cloneRepo = Symbol('_cloneRepo')
|
||||
const _setResolvedWithSha = Symbol('_setResolvedWithSha')
|
||||
const _prepareDir = Symbol('_prepareDir')
|
||||
const _readPackageJson = Symbol.for('package.Fetcher._readPackageJson')
|
||||
|
||||
// get the repository url.
|
||||
// prefer https if there's auth, since ssh will drop that.
|
||||
// otherwise, prefer ssh if available (more secure).
|
||||
// We have to add the git+ back because npa suppresses it.
|
||||
const repoUrl = (h, opts) =>
|
||||
h.sshurl && !(h.https && h.auth) && addGitPlus(h.sshurl(opts)) ||
|
||||
h.https && addGitPlus(h.https(opts))
|
||||
|
||||
// add git+ to the url, but only one time.
|
||||
const addGitPlus = url => url && `git+${url}`.replace(/^(git\+)+/, 'git+')
|
||||
|
||||
class GitFetcher extends Fetcher {
|
||||
constructor (spec, opts) {
|
||||
super(spec, opts)
|
||||
|
||||
// we never want to compare integrity for git dependencies: npm/rfcs#525
|
||||
if (this.opts.integrity) {
|
||||
delete this.opts.integrity
|
||||
log.warn(`skipping integrity check for git dependency ${this.spec.fetchSpec}`)
|
||||
}
|
||||
|
||||
this.resolvedRef = null
|
||||
if (this.spec.hosted) {
|
||||
this.from = this.spec.hosted.shortcut({ noCommittish: false })
|
||||
}
|
||||
|
||||
// shortcut: avoid full clone when we can go straight to the tgz
|
||||
// if we have the full sha and it's a hosted git platform
|
||||
if (this.spec.gitCommittish && hashre.test(this.spec.gitCommittish)) {
|
||||
this.resolvedSha = this.spec.gitCommittish
|
||||
// use hosted.tarball() when we shell to RemoteFetcher later
|
||||
this.resolved = this.spec.hosted
|
||||
? repoUrl(this.spec.hosted, { noCommittish: false })
|
||||
: this.spec.rawSpec
|
||||
} else {
|
||||
this.resolvedSha = ''
|
||||
}
|
||||
|
||||
this.Arborist = opts.Arborist || null
|
||||
}
|
||||
|
||||
// just exposed to make it easier to test all the combinations
|
||||
static repoUrl (hosted, opts) {
|
||||
return repoUrl(hosted, opts)
|
||||
}
|
||||
|
||||
get types () {
|
||||
return ['git']
|
||||
}
|
||||
|
||||
resolve () {
|
||||
// likely a hosted git repo with a sha, so get the tarball url
|
||||
// but in general, no reason to resolve() more than necessary!
|
||||
if (this.resolved) {
|
||||
return super.resolve()
|
||||
}
|
||||
|
||||
// fetch the git repo and then look at the current hash
|
||||
const h = this.spec.hosted
|
||||
// try to use ssh, fall back to git.
|
||||
return h ? this[_resolvedFromHosted](h)
|
||||
: this[_resolvedFromRepo](this.spec.fetchSpec)
|
||||
}
|
||||
|
||||
// first try https, since that's faster and passphrase-less for
|
||||
// public repos, and supports private repos when auth is provided.
|
||||
// Fall back to SSH to support private repos
|
||||
// NB: we always store the https url in resolved field if auth
|
||||
// is present, otherwise ssh if the hosted type provides it
|
||||
[_resolvedFromHosted] (hosted) {
|
||||
return this[_resolvedFromRepo](hosted.https && hosted.https())
|
||||
.catch(er => {
|
||||
// Throw early since we know pathspec errors will fail again if retried
|
||||
if (er instanceof git.errors.GitPathspecError) {
|
||||
throw er
|
||||
}
|
||||
const ssh = hosted.sshurl && hosted.sshurl()
|
||||
// no fallthrough if we can't fall through or have https auth
|
||||
if (!ssh || hosted.auth) {
|
||||
throw er
|
||||
}
|
||||
return this[_resolvedFromRepo](ssh)
|
||||
})
|
||||
}
|
||||
|
||||
[_resolvedFromRepo] (gitRemote) {
|
||||
// XXX make this a custom error class
|
||||
if (!gitRemote) {
|
||||
return Promise.reject(new Error(`No git url for ${this.spec}`))
|
||||
}
|
||||
const gitRange = this.spec.gitRange
|
||||
const name = this.spec.name
|
||||
return git.revs(gitRemote, this.opts).then(remoteRefs => {
|
||||
return gitRange ? pickManifest({
|
||||
versions: remoteRefs.versions,
|
||||
'dist-tags': remoteRefs['dist-tags'],
|
||||
name,
|
||||
}, gitRange, this.opts)
|
||||
: this.spec.gitCommittish ?
|
||||
remoteRefs.refs[this.spec.gitCommittish] ||
|
||||
remoteRefs.refs[remoteRefs.shas[this.spec.gitCommittish]]
|
||||
: remoteRefs.refs.HEAD // no git committish, get default head
|
||||
}).then(revDoc => {
|
||||
// the committish provided isn't in the rev list
|
||||
// things like HEAD~3 or @yesterday can land here.
|
||||
if (!revDoc || !revDoc.sha) {
|
||||
return this[_resolvedFromClone]()
|
||||
}
|
||||
|
||||
this.resolvedRef = revDoc
|
||||
this.resolvedSha = revDoc.sha
|
||||
this[_addGitSha](revDoc.sha)
|
||||
return this.resolved
|
||||
})
|
||||
}
|
||||
|
||||
[_setResolvedWithSha] (withSha) {
|
||||
// we haven't cloned, so a tgz download is still faster
|
||||
// of course, if it's not a known host, we can't do that.
|
||||
this.resolved = !this.spec.hosted ? withSha
|
||||
: repoUrl(npa(withSha).hosted, { noCommittish: false })
|
||||
}
|
||||
|
||||
// when we get the git sha, we affix it to our spec to build up
|
||||
// either a git url with a hash, or a tarball download URL
|
||||
[_addGitSha] (sha) {
|
||||
this[_setResolvedWithSha](addGitSha(this.spec, sha))
|
||||
}
|
||||
|
||||
[_resolvedFromClone] () {
|
||||
// do a full or shallow clone, then look at the HEAD
|
||||
// kind of wasteful, but no other option, really
|
||||
return this[_clone](dir => this.resolved)
|
||||
}
|
||||
|
||||
[_prepareDir] (dir) {
|
||||
return this[_readPackageJson](dir + '/package.json').then(mani => {
|
||||
// no need if we aren't going to do any preparation.
|
||||
const scripts = mani.scripts
|
||||
if (!mani.workspaces && (!scripts || !(
|
||||
scripts.postinstall ||
|
||||
scripts.build ||
|
||||
scripts.preinstall ||
|
||||
scripts.install ||
|
||||
scripts.prepack ||
|
||||
scripts.prepare))) {
|
||||
return
|
||||
}
|
||||
|
||||
// to avoid cases where we have an cycle of git deps that depend
|
||||
// on one another, we only ever do preparation for one instance
|
||||
// of a given git dep along the chain of installations.
|
||||
// Note that this does mean that a dependency MAY in theory end up
|
||||
// trying to run its prepare script using a dependency that has not
|
||||
// been properly prepared itself, but that edge case is smaller
|
||||
// and less hazardous than a fork bomb of npm and git commands.
|
||||
const noPrepare = !process.env._PACOTE_NO_PREPARE_ ? []
|
||||
: process.env._PACOTE_NO_PREPARE_.split('\n')
|
||||
if (noPrepare.includes(this.resolved)) {
|
||||
log.info('prepare', 'skip prepare, already seen', this.resolved)
|
||||
return
|
||||
}
|
||||
noPrepare.push(this.resolved)
|
||||
|
||||
// the DirFetcher will do its own preparation to run the prepare scripts
|
||||
// All we have to do is put the deps in place so that it can succeed.
|
||||
return npm(
|
||||
this.npmBin,
|
||||
[].concat(this.npmInstallCmd).concat(this.npmCliConfig),
|
||||
dir,
|
||||
{ ...process.env, _PACOTE_NO_PREPARE_: noPrepare.join('\n') },
|
||||
{ message: 'git dep preparation failed' }
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
[_tarballFromResolved] () {
|
||||
const stream = new Minipass()
|
||||
stream.resolved = this.resolved
|
||||
stream.from = this.from
|
||||
|
||||
// check it out and then shell out to the DirFetcher tarball packer
|
||||
this[_clone](dir => this[_prepareDir](dir)
|
||||
.then(() => new Promise((res, rej) => {
|
||||
if (!this.Arborist) {
|
||||
throw new Error('GitFetcher requires an Arborist constructor to pack a tarball')
|
||||
}
|
||||
const df = new DirFetcher(`file:${dir}`, {
|
||||
...this.opts,
|
||||
Arborist: this.Arborist,
|
||||
resolved: null,
|
||||
integrity: null,
|
||||
})
|
||||
const dirStream = df[_tarballFromResolved]()
|
||||
dirStream.on('error', rej)
|
||||
dirStream.on('end', res)
|
||||
dirStream.pipe(stream)
|
||||
}))).catch(
|
||||
/* istanbul ignore next: very unlikely and hard to test */
|
||||
er => stream.emit('error', er)
|
||||
)
|
||||
return stream
|
||||
}
|
||||
|
||||
// clone a git repo into a temp folder (or fetch and unpack if possible)
|
||||
// handler accepts a directory, and returns a promise that resolves
|
||||
// when we're done with it, at which point, cacache deletes it
|
||||
//
|
||||
// TODO: after cloning, create a tarball of the folder, and add to the cache
|
||||
// with cacache.put.stream(), using a key that's deterministic based on the
|
||||
// spec and repo, so that we don't ever clone the same thing multiple times.
|
||||
[_clone] (handler, tarballOk = true) {
|
||||
const o = { tmpPrefix: 'git-clone' }
|
||||
const ref = this.resolvedSha || this.spec.gitCommittish
|
||||
const h = this.spec.hosted
|
||||
const resolved = this.resolved
|
||||
|
||||
// can be set manually to false to fall back to actual git clone
|
||||
tarballOk = tarballOk &&
|
||||
h && resolved === repoUrl(h, { noCommittish: false }) && h.tarball
|
||||
|
||||
return cacache.tmp.withTmp(this.cache, o, async tmp => {
|
||||
// if we're resolved, and have a tarball url, shell out to RemoteFetcher
|
||||
if (tarballOk) {
|
||||
const nameat = this.spec.name ? `${this.spec.name}@` : ''
|
||||
return new RemoteFetcher(h.tarball({ noCommittish: false }), {
|
||||
...this.opts,
|
||||
allowGitIgnore: true,
|
||||
pkgid: `git:${nameat}${this.resolved}`,
|
||||
resolved: this.resolved,
|
||||
integrity: null, // it'll always be different, if we have one
|
||||
}).extract(tmp).then(() => handler(tmp), er => {
|
||||
// fall back to ssh download if tarball fails
|
||||
if (er.constructor.name.match(/^Http/)) {
|
||||
return this[_clone](handler, false)
|
||||
} else {
|
||||
throw er
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const sha = await (
|
||||
h ? this[_cloneHosted](ref, tmp)
|
||||
: this[_cloneRepo](this.spec.fetchSpec, ref, tmp)
|
||||
)
|
||||
this.resolvedSha = sha
|
||||
if (!this.resolved) {
|
||||
await this[_addGitSha](sha)
|
||||
}
|
||||
return handler(tmp)
|
||||
})
|
||||
}
|
||||
|
||||
// first try https, since that's faster and passphrase-less for
|
||||
// public repos, and supports private repos when auth is provided.
|
||||
// Fall back to SSH to support private repos
|
||||
// NB: we always store the https url in resolved field if auth
|
||||
// is present, otherwise ssh if the hosted type provides it
|
||||
[_cloneHosted] (ref, tmp) {
|
||||
const hosted = this.spec.hosted
|
||||
return this[_cloneRepo](hosted.https({ noCommittish: true }), ref, tmp)
|
||||
.catch(er => {
|
||||
// Throw early since we know pathspec errors will fail again if retried
|
||||
if (er instanceof git.errors.GitPathspecError) {
|
||||
throw er
|
||||
}
|
||||
const ssh = hosted.sshurl && hosted.sshurl({ noCommittish: true })
|
||||
// no fallthrough if we can't fall through or have https auth
|
||||
if (!ssh || hosted.auth) {
|
||||
throw er
|
||||
}
|
||||
return this[_cloneRepo](ssh, ref, tmp)
|
||||
})
|
||||
}
|
||||
|
||||
[_cloneRepo] (repo, ref, tmp) {
|
||||
const { opts, spec } = this
|
||||
return git.clone(repo, ref, tmp, { ...opts, spec })
|
||||
}
|
||||
|
||||
manifest () {
|
||||
if (this.package) {
|
||||
return Promise.resolve(this.package)
|
||||
}
|
||||
|
||||
return this.spec.hosted && this.resolved
|
||||
? FileFetcher.prototype.manifest.apply(this)
|
||||
: this[_clone](dir =>
|
||||
this[_readPackageJson](dir + '/package.json')
|
||||
.then(mani => this.package = {
|
||||
...mani,
|
||||
_resolved: this.resolved,
|
||||
_from: this.from,
|
||||
}))
|
||||
}
|
||||
|
||||
packument () {
|
||||
return FileFetcher.prototype.packument.apply(this)
|
||||
}
|
||||
}
|
||||
module.exports = GitFetcher
|
23
my-app/node_modules/pacote/lib/index.js
generated
vendored
Executable file
23
my-app/node_modules/pacote/lib/index.js
generated
vendored
Executable file
|
@ -0,0 +1,23 @@
|
|||
const { get } = require('./fetcher.js')
|
||||
const GitFetcher = require('./git.js')
|
||||
const RegistryFetcher = require('./registry.js')
|
||||
const FileFetcher = require('./file.js')
|
||||
const DirFetcher = require('./dir.js')
|
||||
const RemoteFetcher = require('./remote.js')
|
||||
|
||||
module.exports = {
|
||||
GitFetcher,
|
||||
RegistryFetcher,
|
||||
FileFetcher,
|
||||
DirFetcher,
|
||||
RemoteFetcher,
|
||||
resolve: (spec, opts) => get(spec, opts).resolve(),
|
||||
extract: (spec, dest, opts) => get(spec, opts).extract(dest),
|
||||
manifest: (spec, opts) => get(spec, opts).manifest(),
|
||||
tarball: (spec, opts) => get(spec, opts).tarball(),
|
||||
packument: (spec, opts) => get(spec, opts).packument(),
|
||||
}
|
||||
module.exports.tarball.stream = (spec, handler, opts) =>
|
||||
get(spec, opts).tarballStream(handler)
|
||||
module.exports.tarball.file = (spec, dest, opts) =>
|
||||
get(spec, opts).tarballFile(dest)
|
368
my-app/node_modules/pacote/lib/registry.js
generated
vendored
Executable file
368
my-app/node_modules/pacote/lib/registry.js
generated
vendored
Executable file
|
@ -0,0 +1,368 @@
|
|||
const Fetcher = require('./fetcher.js')
|
||||
const RemoteFetcher = require('./remote.js')
|
||||
const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved')
|
||||
const pacoteVersion = require('../package.json').version
|
||||
const removeTrailingSlashes = require('./util/trailing-slashes.js')
|
||||
const rpj = require('read-package-json-fast')
|
||||
const pickManifest = require('npm-pick-manifest')
|
||||
const ssri = require('ssri')
|
||||
const crypto = require('crypto')
|
||||
const npa = require('npm-package-arg')
|
||||
const sigstore = require('sigstore')
|
||||
|
||||
// Corgis are cute. 🐕🐶
|
||||
const corgiDoc = 'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*'
|
||||
const fullDoc = 'application/json'
|
||||
|
||||
// Some really old packages have no time field in their packument so we need a
|
||||
// cutoff date.
|
||||
const MISSING_TIME_CUTOFF = '2015-01-01T00:00:00.000Z'
|
||||
|
||||
const fetch = require('npm-registry-fetch')
|
||||
|
||||
const _headers = Symbol('_headers')
|
||||
class RegistryFetcher extends Fetcher {
|
||||
constructor (spec, opts) {
|
||||
super(spec, opts)
|
||||
|
||||
// you usually don't want to fetch the same packument multiple times in
|
||||
// the span of a given script or command, no matter how many pacote calls
|
||||
// are made, so this lets us avoid doing that. It's only relevant for
|
||||
// registry fetchers, because other types simulate their packument from
|
||||
// the manifest, which they memoize on this.package, so it's very cheap
|
||||
// already.
|
||||
this.packumentCache = this.opts.packumentCache || null
|
||||
|
||||
this.registry = fetch.pickRegistry(spec, opts)
|
||||
this.packumentUrl = removeTrailingSlashes(this.registry) + '/' +
|
||||
this.spec.escapedName
|
||||
|
||||
const parsed = new URL(this.registry)
|
||||
const regKey = `//${parsed.host}${parsed.pathname}`
|
||||
// unlike the nerf-darted auth keys, this one does *not* allow a mismatch
|
||||
// of trailing slashes. It must match exactly.
|
||||
if (this.opts[`${regKey}:_keys`]) {
|
||||
this.registryKeys = this.opts[`${regKey}:_keys`]
|
||||
}
|
||||
|
||||
// XXX pacote <=9 has some logic to ignore opts.resolved if
|
||||
// the resolved URL doesn't go to the same registry.
|
||||
// Consider reproducing that here, to throw away this.resolved
|
||||
// in that case.
|
||||
}
|
||||
|
||||
async resolve () {
|
||||
// fetching the manifest sets resolved and (if present) integrity
|
||||
await this.manifest()
|
||||
if (!this.resolved) {
|
||||
throw Object.assign(
|
||||
new Error('Invalid package manifest: no `dist.tarball` field'),
|
||||
{ package: this.spec.toString() }
|
||||
)
|
||||
}
|
||||
return this.resolved
|
||||
}
|
||||
|
||||
[_headers] () {
|
||||
return {
|
||||
// npm will override UA, but ensure that we always send *something*
|
||||
'user-agent': this.opts.userAgent ||
|
||||
`pacote/${pacoteVersion} node/${process.version}`,
|
||||
...(this.opts.headers || {}),
|
||||
'pacote-version': pacoteVersion,
|
||||
'pacote-req-type': 'packument',
|
||||
'pacote-pkg-id': `registry:${this.spec.name}`,
|
||||
accept: this.fullMetadata ? fullDoc : corgiDoc,
|
||||
}
|
||||
}
|
||||
|
||||
async packument () {
|
||||
// note this might be either an in-flight promise for a request,
|
||||
// or the actual packument, but we never want to make more than
|
||||
// one request at a time for the same thing regardless.
|
||||
if (this.packumentCache && this.packumentCache.has(this.packumentUrl)) {
|
||||
return this.packumentCache.get(this.packumentUrl)
|
||||
}
|
||||
|
||||
// npm-registry-fetch the packument
|
||||
// set the appropriate header for corgis if fullMetadata isn't set
|
||||
// return the res.json() promise
|
||||
try {
|
||||
const res = await fetch(this.packumentUrl, {
|
||||
...this.opts,
|
||||
headers: this[_headers](),
|
||||
spec: this.spec,
|
||||
// never check integrity for packuments themselves
|
||||
integrity: null,
|
||||
})
|
||||
const packument = await res.json()
|
||||
packument._contentLength = +res.headers.get('content-length')
|
||||
if (this.packumentCache) {
|
||||
this.packumentCache.set(this.packumentUrl, packument)
|
||||
}
|
||||
return packument
|
||||
} catch (err) {
|
||||
if (this.packumentCache) {
|
||||
this.packumentCache.delete(this.packumentUrl)
|
||||
}
|
||||
if (err.code !== 'E404' || this.fullMetadata) {
|
||||
throw err
|
||||
}
|
||||
// possible that corgis are not supported by this registry
|
||||
this.fullMetadata = true
|
||||
return this.packument()
|
||||
}
|
||||
}
|
||||
|
||||
async manifest () {
|
||||
if (this.package) {
|
||||
return this.package
|
||||
}
|
||||
|
||||
// When verifying signatures, we need to fetch the full/uncompressed
|
||||
// packument to get publish time as this is not included in the
|
||||
// corgi/compressed packument.
|
||||
if (this.opts.verifySignatures) {
|
||||
this.fullMetadata = true
|
||||
}
|
||||
|
||||
const packument = await this.packument()
|
||||
let mani = await pickManifest(packument, this.spec.fetchSpec, {
|
||||
...this.opts,
|
||||
defaultTag: this.defaultTag,
|
||||
before: this.before,
|
||||
})
|
||||
mani = rpj.normalize(mani)
|
||||
/* XXX add ETARGET and E403 revalidation of cached packuments here */
|
||||
|
||||
// add _time from packument if fetched with fullMetadata
|
||||
const time = packument.time?.[mani.version]
|
||||
if (time) {
|
||||
mani._time = time
|
||||
}
|
||||
|
||||
// add _resolved and _integrity from dist object
|
||||
const { dist } = mani
|
||||
if (dist) {
|
||||
this.resolved = mani._resolved = dist.tarball
|
||||
mani._from = this.from
|
||||
const distIntegrity = dist.integrity ? ssri.parse(dist.integrity)
|
||||
: dist.shasum ? ssri.fromHex(dist.shasum, 'sha1', { ...this.opts })
|
||||
: null
|
||||
if (distIntegrity) {
|
||||
if (this.integrity && !this.integrity.match(distIntegrity)) {
|
||||
// only bork if they have algos in common.
|
||||
// otherwise we end up breaking if we have saved a sha512
|
||||
// previously for the tarball, but the manifest only
|
||||
// provides a sha1, which is possible for older publishes.
|
||||
// Otherwise, this is almost certainly a case of holding it
|
||||
// wrong, and will result in weird or insecure behavior
|
||||
// later on when building package tree.
|
||||
for (const algo of Object.keys(this.integrity)) {
|
||||
if (distIntegrity[algo]) {
|
||||
throw Object.assign(new Error(
|
||||
`Integrity checksum failed when using ${algo}: ` +
|
||||
`wanted ${this.integrity} but got ${distIntegrity}.`
|
||||
), { code: 'EINTEGRITY' })
|
||||
}
|
||||
}
|
||||
}
|
||||
// made it this far, the integrity is worthwhile. accept it.
|
||||
// the setter here will take care of merging it into what we already
|
||||
// had.
|
||||
this.integrity = distIntegrity
|
||||
}
|
||||
}
|
||||
if (this.integrity) {
|
||||
mani._integrity = String(this.integrity)
|
||||
if (dist.signatures) {
|
||||
if (this.opts.verifySignatures) {
|
||||
// validate and throw on error, then set _signatures
|
||||
const message = `${mani._id}:${mani._integrity}`
|
||||
for (const signature of dist.signatures) {
|
||||
const publicKey = this.registryKeys &&
|
||||
this.registryKeys.filter(key => (key.keyid === signature.keyid))[0]
|
||||
if (!publicKey) {
|
||||
throw Object.assign(new Error(
|
||||
`${mani._id} has a registry signature with keyid: ${signature.keyid} ` +
|
||||
'but no corresponding public key can be found'
|
||||
), { code: 'EMISSINGSIGNATUREKEY' })
|
||||
}
|
||||
|
||||
const publishedTime = Date.parse(mani._time || MISSING_TIME_CUTOFF)
|
||||
const validPublicKey = !publicKey.expires ||
|
||||
publishedTime < Date.parse(publicKey.expires)
|
||||
if (!validPublicKey) {
|
||||
throw Object.assign(new Error(
|
||||
`${mani._id} has a registry signature with keyid: ${signature.keyid} ` +
|
||||
`but the corresponding public key has expired ${publicKey.expires}`
|
||||
), { code: 'EEXPIREDSIGNATUREKEY' })
|
||||
}
|
||||
const verifier = crypto.createVerify('SHA256')
|
||||
verifier.write(message)
|
||||
verifier.end()
|
||||
const valid = verifier.verify(
|
||||
publicKey.pemkey,
|
||||
signature.sig,
|
||||
'base64'
|
||||
)
|
||||
if (!valid) {
|
||||
throw Object.assign(new Error(
|
||||
`${mani._id} has an invalid registry signature with ` +
|
||||
`keyid: ${publicKey.keyid} and signature: ${signature.sig}`
|
||||
), {
|
||||
code: 'EINTEGRITYSIGNATURE',
|
||||
keyid: publicKey.keyid,
|
||||
signature: signature.sig,
|
||||
resolved: mani._resolved,
|
||||
integrity: mani._integrity,
|
||||
})
|
||||
}
|
||||
}
|
||||
mani._signatures = dist.signatures
|
||||
} else {
|
||||
mani._signatures = dist.signatures
|
||||
}
|
||||
}
|
||||
|
||||
if (dist.attestations) {
|
||||
if (this.opts.verifyAttestations) {
|
||||
// Always fetch attestations from the current registry host
|
||||
const attestationsPath = new URL(dist.attestations.url).pathname
|
||||
const attestationsUrl = removeTrailingSlashes(this.registry) + attestationsPath
|
||||
const res = await fetch(attestationsUrl, {
|
||||
...this.opts,
|
||||
// disable integrity check for attestations json payload, we check the
|
||||
// integrity in the verification steps below
|
||||
integrity: null,
|
||||
})
|
||||
const { attestations } = await res.json()
|
||||
const bundles = attestations.map(({ predicateType, bundle }) => {
|
||||
const statement = JSON.parse(
|
||||
Buffer.from(bundle.dsseEnvelope.payload, 'base64').toString('utf8')
|
||||
)
|
||||
const keyid = bundle.dsseEnvelope.signatures[0].keyid
|
||||
const signature = bundle.dsseEnvelope.signatures[0].sig
|
||||
|
||||
return {
|
||||
predicateType,
|
||||
bundle,
|
||||
statement,
|
||||
keyid,
|
||||
signature,
|
||||
}
|
||||
})
|
||||
|
||||
const attestationKeyIds = bundles.map((b) => b.keyid).filter((k) => !!k)
|
||||
const attestationRegistryKeys = (this.registryKeys || [])
|
||||
.filter(key => attestationKeyIds.includes(key.keyid))
|
||||
if (!attestationRegistryKeys.length) {
|
||||
throw Object.assign(new Error(
|
||||
`${mani._id} has attestations but no corresponding public key(s) can be found`
|
||||
), { code: 'EMISSINGSIGNATUREKEY' })
|
||||
}
|
||||
|
||||
for (const { predicateType, bundle, keyid, signature, statement } of bundles) {
|
||||
const publicKey = attestationRegistryKeys.find(key => key.keyid === keyid)
|
||||
// Publish attestations have a keyid set and a valid public key must be found
|
||||
if (keyid) {
|
||||
if (!publicKey) {
|
||||
throw Object.assign(new Error(
|
||||
`${mani._id} has attestations with keyid: ${keyid} ` +
|
||||
'but no corresponding public key can be found'
|
||||
), { code: 'EMISSINGSIGNATUREKEY' })
|
||||
}
|
||||
|
||||
const integratedTime = new Date(
|
||||
Number(
|
||||
bundle.verificationMaterial.tlogEntries[0].integratedTime
|
||||
) * 1000
|
||||
)
|
||||
const validPublicKey = !publicKey.expires ||
|
||||
(integratedTime < Date.parse(publicKey.expires))
|
||||
if (!validPublicKey) {
|
||||
throw Object.assign(new Error(
|
||||
`${mani._id} has attestations with keyid: ${keyid} ` +
|
||||
`but the corresponding public key has expired ${publicKey.expires}`
|
||||
), { code: 'EEXPIREDSIGNATUREKEY' })
|
||||
}
|
||||
}
|
||||
|
||||
const subject = {
|
||||
name: statement.subject[0].name,
|
||||
sha512: statement.subject[0].digest.sha512,
|
||||
}
|
||||
|
||||
// Only type 'version' can be turned into a PURL
|
||||
const purl = this.spec.type === 'version' ? npa.toPurl(this.spec) : this.spec
|
||||
// Verify the statement subject matches the package, version
|
||||
if (subject.name !== purl) {
|
||||
throw Object.assign(new Error(
|
||||
`${mani._id} package name and version (PURL): ${purl} ` +
|
||||
`doesn't match what was signed: ${subject.name}`
|
||||
), { code: 'EATTESTATIONSUBJECT' })
|
||||
}
|
||||
|
||||
// Verify the statement subject matches the tarball integrity
|
||||
const integrityHexDigest = ssri.parse(this.integrity).hexDigest()
|
||||
if (subject.sha512 !== integrityHexDigest) {
|
||||
throw Object.assign(new Error(
|
||||
`${mani._id} package integrity (hex digest): ` +
|
||||
`${integrityHexDigest} ` +
|
||||
`doesn't match what was signed: ${subject.sha512}`
|
||||
), { code: 'EATTESTATIONSUBJECT' })
|
||||
}
|
||||
|
||||
try {
|
||||
// Provenance attestations are signed with a signing certificate
|
||||
// (including the key) so we don't need to return a public key.
|
||||
//
|
||||
// Publish attestations are signed with a keyid so we need to
|
||||
// specify a public key from the keys endpoint: `registry-host.tld/-/npm/v1/keys`
|
||||
const options = {
|
||||
tufCachePath: this.tufCache,
|
||||
keySelector: publicKey ? () => publicKey.pemkey : undefined,
|
||||
}
|
||||
await sigstore.verify(bundle, options)
|
||||
} catch (e) {
|
||||
throw Object.assign(new Error(
|
||||
`${mani._id} failed to verify attestation: ${e.message}`
|
||||
), {
|
||||
code: 'EATTESTATIONVERIFY',
|
||||
predicateType,
|
||||
keyid,
|
||||
signature,
|
||||
resolved: mani._resolved,
|
||||
integrity: mani._integrity,
|
||||
})
|
||||
}
|
||||
}
|
||||
mani._attestations = dist.attestations
|
||||
} else {
|
||||
mani._attestations = dist.attestations
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.package = mani
|
||||
return this.package
|
||||
}
|
||||
|
||||
[_tarballFromResolved] () {
|
||||
// we use a RemoteFetcher to get the actual tarball stream
|
||||
return new RemoteFetcher(this.resolved, {
|
||||
...this.opts,
|
||||
resolved: this.resolved,
|
||||
pkgid: `registry:${this.spec.name}@${this.resolved}`,
|
||||
})[_tarballFromResolved]()
|
||||
}
|
||||
|
||||
get types () {
|
||||
return [
|
||||
'tag',
|
||||
'version',
|
||||
'range',
|
||||
]
|
||||
}
|
||||
}
|
||||
module.exports = RegistryFetcher
|
91
my-app/node_modules/pacote/lib/remote.js
generated
vendored
Executable file
91
my-app/node_modules/pacote/lib/remote.js
generated
vendored
Executable file
|
@ -0,0 +1,91 @@
|
|||
const Fetcher = require('./fetcher.js')
|
||||
const FileFetcher = require('./file.js')
|
||||
const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved')
|
||||
const pacoteVersion = require('../package.json').version
|
||||
const fetch = require('npm-registry-fetch')
|
||||
const { Minipass } = require('minipass')
|
||||
|
||||
const _cacheFetches = Symbol.for('pacote.Fetcher._cacheFetches')
|
||||
const _headers = Symbol('_headers')
|
||||
class RemoteFetcher extends Fetcher {
|
||||
constructor (spec, opts) {
|
||||
super(spec, opts)
|
||||
this.resolved = this.spec.fetchSpec
|
||||
const resolvedURL = new URL(this.resolved)
|
||||
if (this.replaceRegistryHost !== 'never'
|
||||
&& (this.replaceRegistryHost === 'always'
|
||||
|| this.replaceRegistryHost === resolvedURL.host)) {
|
||||
this.resolved = new URL(resolvedURL.pathname, this.registry).href
|
||||
}
|
||||
|
||||
// nam is a fermented pork sausage that is good to eat
|
||||
const nameat = this.spec.name ? `${this.spec.name}@` : ''
|
||||
this.pkgid = opts.pkgid ? opts.pkgid : `remote:${nameat}${this.resolved}`
|
||||
}
|
||||
|
||||
// Don't need to cache tarball fetches in pacote, because make-fetch-happen
|
||||
// will write into cacache anyway.
|
||||
get [_cacheFetches] () {
|
||||
return false
|
||||
}
|
||||
|
||||
[_tarballFromResolved] () {
|
||||
const stream = new Minipass()
|
||||
stream.hasIntegrityEmitter = true
|
||||
|
||||
const fetchOpts = {
|
||||
...this.opts,
|
||||
headers: this[_headers](),
|
||||
spec: this.spec,
|
||||
integrity: this.integrity,
|
||||
algorithms: [this.pickIntegrityAlgorithm()],
|
||||
}
|
||||
|
||||
// eslint-disable-next-line promise/always-return
|
||||
fetch(this.resolved, fetchOpts).then(res => {
|
||||
res.body.on('error',
|
||||
/* istanbul ignore next - exceedingly rare and hard to simulate */
|
||||
er => stream.emit('error', er)
|
||||
)
|
||||
|
||||
res.body.on('integrity', i => {
|
||||
this.integrity = i
|
||||
stream.emit('integrity', i)
|
||||
})
|
||||
|
||||
res.body.pipe(stream)
|
||||
}).catch(er => stream.emit('error', er))
|
||||
|
||||
return stream
|
||||
}
|
||||
|
||||
[_headers] () {
|
||||
return {
|
||||
// npm will override this, but ensure that we always send *something*
|
||||
'user-agent': this.opts.userAgent ||
|
||||
`pacote/${pacoteVersion} node/${process.version}`,
|
||||
...(this.opts.headers || {}),
|
||||
'pacote-version': pacoteVersion,
|
||||
'pacote-req-type': 'tarball',
|
||||
'pacote-pkg-id': this.pkgid,
|
||||
...(this.integrity ? { 'pacote-integrity': String(this.integrity) }
|
||||
: {}),
|
||||
...(this.opts.headers || {}),
|
||||
}
|
||||
}
|
||||
|
||||
get types () {
|
||||
return ['remote']
|
||||
}
|
||||
|
||||
// getting a packument and/or manifest is the same as with a file: spec.
|
||||
// unpack the tarball stream, and then read from the package.json file.
|
||||
packument () {
|
||||
return FileFetcher.prototype.packument.apply(this)
|
||||
}
|
||||
|
||||
manifest () {
|
||||
return FileFetcher.prototype.manifest.apply(this)
|
||||
}
|
||||
}
|
||||
module.exports = RemoteFetcher
|
15
my-app/node_modules/pacote/lib/util/add-git-sha.js
generated
vendored
Executable file
15
my-app/node_modules/pacote/lib/util/add-git-sha.js
generated
vendored
Executable file
|
@ -0,0 +1,15 @@
|
|||
// add a sha to a git remote url spec
|
||||
const addGitSha = (spec, sha) => {
|
||||
if (spec.hosted) {
|
||||
const h = spec.hosted
|
||||
const opt = { noCommittish: true }
|
||||
const base = h.https && h.auth ? h.https(opt) : h.shortcut(opt)
|
||||
|
||||
return `${base}#${sha}`
|
||||
} else {
|
||||
// don't use new URL for this, because it doesn't handle scp urls
|
||||
return spec.rawSpec.replace(/#.*$/, '') + `#${sha}`
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = addGitSha
|
15
my-app/node_modules/pacote/lib/util/cache-dir.js
generated
vendored
Executable file
15
my-app/node_modules/pacote/lib/util/cache-dir.js
generated
vendored
Executable file
|
@ -0,0 +1,15 @@
|
|||
const os = require('os')
|
||||
const { resolve } = require('path')
|
||||
|
||||
module.exports = (fakePlatform = false) => {
|
||||
const temp = os.tmpdir()
|
||||
const uidOrPid = process.getuid ? process.getuid() : process.pid
|
||||
const home = os.homedir() || resolve(temp, 'npm-' + uidOrPid)
|
||||
const platform = fakePlatform || process.platform
|
||||
const cacheExtra = platform === 'win32' ? 'npm-cache' : '.npm'
|
||||
const cacheRoot = (platform === 'win32' && process.env.LOCALAPPDATA) || home
|
||||
return {
|
||||
cacache: resolve(cacheRoot, cacheExtra, '_cacache'),
|
||||
tufcache: resolve(cacheRoot, cacheExtra, '_tuf'),
|
||||
}
|
||||
}
|
25
my-app/node_modules/pacote/lib/util/is-package-bin.js
generated
vendored
Executable file
25
my-app/node_modules/pacote/lib/util/is-package-bin.js
generated
vendored
Executable file
|
@ -0,0 +1,25 @@
|
|||
// Function to determine whether a path is in the package.bin set.
|
||||
// Used to prevent issues when people publish a package from a
|
||||
// windows machine, and then install with --no-bin-links.
|
||||
//
|
||||
// Note: this is not possible in remote or file fetchers, since
|
||||
// we don't have the manifest until AFTER we've unpacked. But the
|
||||
// main use case is registry fetching with git a distant second,
|
||||
// so that's an acceptable edge case to not handle.
|
||||
|
||||
const binObj = (name, bin) =>
|
||||
typeof bin === 'string' ? { [name]: bin } : bin
|
||||
|
||||
const hasBin = (pkg, path) => {
|
||||
const bin = binObj(pkg.name, pkg.bin)
|
||||
const p = path.replace(/^[^\\/]*\//, '')
|
||||
for (const kv of Object.entries(bin)) {
|
||||
if (kv[1] === p) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
module.exports = (pkg, path) =>
|
||||
pkg && pkg.bin ? hasBin(pkg, path) : false
|
14
my-app/node_modules/pacote/lib/util/npm.js
generated
vendored
Executable file
14
my-app/node_modules/pacote/lib/util/npm.js
generated
vendored
Executable file
|
@ -0,0 +1,14 @@
|
|||
// run an npm command
|
||||
const spawn = require('@npmcli/promise-spawn')
|
||||
|
||||
module.exports = (npmBin, npmCommand, cwd, env, extra) => {
|
||||
const isJS = npmBin.endsWith('.js')
|
||||
const cmd = isJS ? process.execPath : npmBin
|
||||
const args = (isJS ? [npmBin] : []).concat(npmCommand)
|
||||
// when installing to run the `prepare` script for a git dep, we need
|
||||
// to ensure that we don't run into a cycle of checking out packages
|
||||
// in temp directories. this lets us link previously-seen repos that
|
||||
// are also being prepared.
|
||||
|
||||
return spawn(cmd, args, { cwd, env }, extra)
|
||||
}
|
31
my-app/node_modules/pacote/lib/util/tar-create-options.js
generated
vendored
Executable file
31
my-app/node_modules/pacote/lib/util/tar-create-options.js
generated
vendored
Executable file
|
@ -0,0 +1,31 @@
|
|||
const isPackageBin = require('./is-package-bin.js')
|
||||
|
||||
const tarCreateOptions = manifest => ({
|
||||
cwd: manifest._resolved,
|
||||
prefix: 'package/',
|
||||
portable: true,
|
||||
gzip: {
|
||||
// forcing the level to 9 seems to avoid some
|
||||
// platform specific optimizations that cause
|
||||
// integrity mismatch errors due to differing
|
||||
// end results after compression
|
||||
level: 9,
|
||||
},
|
||||
|
||||
// ensure that package bins are always executable
|
||||
// Note that npm-packlist is already filtering out
|
||||
// anything that is not a regular file, ignored by
|
||||
// .npmignore or package.json "files", etc.
|
||||
filter: (path, stat) => {
|
||||
if (isPackageBin(manifest, path)) {
|
||||
stat.mode |= 0o111
|
||||
}
|
||||
return true
|
||||
},
|
||||
|
||||
// Provide a specific date in the 1980s for the benefit of zip,
|
||||
// which is confounded by files dated at the Unix epoch 0.
|
||||
mtime: new Date('1985-10-26T08:15:00.000Z'),
|
||||
})
|
||||
|
||||
module.exports = tarCreateOptions
|
10
my-app/node_modules/pacote/lib/util/trailing-slashes.js
generated
vendored
Executable file
10
my-app/node_modules/pacote/lib/util/trailing-slashes.js
generated
vendored
Executable file
|
@ -0,0 +1,10 @@
|
|||
const removeTrailingSlashes = (input) => {
|
||||
// in order to avoid regexp redos detection
|
||||
let output = input
|
||||
while (output.endsWith('/')) {
|
||||
output = output.slice(0, -1)
|
||||
}
|
||||
return output
|
||||
}
|
||||
|
||||
module.exports = removeTrailingSlashes
|
79
my-app/node_modules/pacote/package.json
generated
vendored
Executable file
79
my-app/node_modules/pacote/package.json
generated
vendored
Executable file
|
@ -0,0 +1,79 @@
|
|||
{
|
||||
"name": "pacote",
|
||||
"version": "17.0.5",
|
||||
"description": "JavaScript package downloader",
|
||||
"author": "GitHub Inc.",
|
||||
"bin": {
|
||||
"pacote": "lib/bin.js"
|
||||
},
|
||||
"license": "ISC",
|
||||
"main": "lib/index.js",
|
||||
"scripts": {
|
||||
"test": "tap",
|
||||
"snap": "tap",
|
||||
"lint": "eslint \"**/*.js\"",
|
||||
"postlint": "template-oss-check",
|
||||
"lintfix": "npm run lint -- --fix",
|
||||
"posttest": "npm run lint",
|
||||
"template-oss-apply": "template-oss-apply --force"
|
||||
},
|
||||
"tap": {
|
||||
"timeout": 300,
|
||||
"nyc-arg": [
|
||||
"--exclude",
|
||||
"tap-snapshots/**"
|
||||
]
|
||||
},
|
||||
"devDependencies": {
|
||||
"@npmcli/arborist": "^7.1.0",
|
||||
"@npmcli/eslint-config": "^4.0.0",
|
||||
"@npmcli/template-oss": "4.19.0",
|
||||
"hosted-git-info": "^7.0.0",
|
||||
"mutate-fs": "^2.1.1",
|
||||
"nock": "^13.2.4",
|
||||
"npm-registry-mock": "^1.3.2",
|
||||
"tap": "^16.0.1"
|
||||
},
|
||||
"files": [
|
||||
"bin/",
|
||||
"lib/"
|
||||
],
|
||||
"keywords": [
|
||||
"packages",
|
||||
"npm",
|
||||
"git"
|
||||
],
|
||||
"dependencies": {
|
||||
"@npmcli/git": "^5.0.0",
|
||||
"@npmcli/installed-package-contents": "^2.0.1",
|
||||
"@npmcli/promise-spawn": "^7.0.0",
|
||||
"@npmcli/run-script": "^7.0.0",
|
||||
"cacache": "^18.0.0",
|
||||
"fs-minipass": "^3.0.0",
|
||||
"minipass": "^7.0.2",
|
||||
"npm-package-arg": "^11.0.0",
|
||||
"npm-packlist": "^8.0.0",
|
||||
"npm-pick-manifest": "^9.0.0",
|
||||
"npm-registry-fetch": "^16.0.0",
|
||||
"proc-log": "^3.0.0",
|
||||
"promise-retry": "^2.0.1",
|
||||
"read-package-json": "^7.0.0",
|
||||
"read-package-json-fast": "^3.0.0",
|
||||
"sigstore": "^2.0.0",
|
||||
"ssri": "^10.0.0",
|
||||
"tar": "^6.1.11"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^16.14.0 || >=18.0.0"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/npm/pacote.git"
|
||||
},
|
||||
"templateOSS": {
|
||||
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
|
||||
"version": "4.19.0",
|
||||
"windowsCI": false,
|
||||
"publish": "true"
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue