Deployed the page to Github Pages.

This commit is contained in:
Batuhan Berk Başoğlu 2024-11-03 21:30:09 -05:00
parent 1d79754e93
commit 2c89899458
Signed by: batuhan-basoglu
SSH key fingerprint: SHA256:kEsnuHX+qbwhxSAXPUQ4ox535wFHu/hIRaa53FzxRpo
62797 changed files with 6551425 additions and 15279 deletions

100
node_modules/pacote/lib/dir.js generated vendored Normal file
View file

@ -0,0 +1,100 @@
const { resolve } = require('node:path')
const packlist = require('npm-packlist')
const runScript = require('@npmcli/run-script')
const tar = require('tar')
const { Minipass } = require('minipass')
const Fetcher = require('./fetcher.js')
const FileFetcher = require('./file.js')
const _ = require('./util/protected.js')
const tarCreateOptions = require('./util/tar-create-options.js')
class DirFetcher extends Fetcher {
constructor (spec, opts) {
super(spec, opts)
// just the fully resolved filename
this.resolved = this.spec.fetchSpec
this.tree = opts.tree || null
this.Arborist = opts.Arborist || null
}
// exposes tarCreateOptions as public API
static tarCreateOptions (manifest) {
return tarCreateOptions(manifest)
}
get types () {
return ['directory']
}
#prepareDir () {
return this.manifest().then(mani => {
if (!mani.scripts || !mani.scripts.prepare) {
return
}
// we *only* run prepare.
// pre/post-pack is run by the npm CLI for publish and pack,
// but this function is *also* run when installing git deps
const stdio = this.opts.foregroundScripts ? 'inherit' : 'pipe'
return runScript({
pkg: mani,
event: 'prepare',
path: this.resolved,
stdio,
env: {
npm_package_resolved: this.resolved,
npm_package_integrity: this.integrity,
npm_package_json: resolve(this.resolved, 'package.json'),
},
})
})
}
[_.tarballFromResolved] () {
if (!this.tree && !this.Arborist) {
throw new Error('DirFetcher requires either a tree or an Arborist constructor to pack')
}
const stream = new Minipass()
stream.resolved = this.resolved
stream.integrity = this.integrity
const { prefix, workspaces } = this.opts
// run the prepare script, get the list of files, and tar it up
// pipe to the stream, and proxy errors the chain.
this.#prepareDir()
.then(async () => {
if (!this.tree) {
const arb = new this.Arborist({ path: this.resolved })
this.tree = await arb.loadActual()
}
return packlist(this.tree, { path: this.resolved, prefix, workspaces })
})
.then(files => tar.c(tarCreateOptions(this.package), files)
.on('error', er => stream.emit('error', er)).pipe(stream))
.catch(er => stream.emit('error', er))
return stream
}
manifest () {
if (this.package) {
return Promise.resolve(this.package)
}
return this[_.readPackageJson](this.resolved)
.then(mani => this.package = {
...mani,
_integrity: this.integrity && String(this.integrity),
_resolved: this.resolved,
_from: this.from,
})
}
packument () {
return FileFetcher.prototype.packument.apply(this)
}
}
module.exports = DirFetcher

489
node_modules/pacote/lib/fetcher.js generated vendored Normal file
View file

@ -0,0 +1,489 @@
// This is the base class that the other fetcher types in lib
// all descend from.
// It handles the unpacking and retry logic that is shared among
// all of the other Fetcher types.
const { basename, dirname } = require('node:path')
const { rm, mkdir } = require('node:fs/promises')
const PackageJson = require('@npmcli/package-json')
const cacache = require('cacache')
const fsm = require('fs-minipass')
const getContents = require('@npmcli/installed-package-contents')
const npa = require('npm-package-arg')
const retry = require('promise-retry')
const ssri = require('ssri')
const tar = require('tar')
const { Minipass } = require('minipass')
const { log } = require('proc-log')
const _ = require('./util/protected.js')
const cacheDir = require('./util/cache-dir.js')
const isPackageBin = require('./util/is-package-bin.js')
const removeTrailingSlashes = require('./util/trailing-slashes.js')
// Pacote is only concerned with the package.json contents
const packageJsonPrepare = (p) => PackageJson.prepare(p).then(pkg => pkg.content)
const packageJsonNormalize = (p) => PackageJson.normalize(p).then(pkg => pkg.content)
class FetcherBase {
constructor (spec, opts) {
if (!opts || typeof opts !== 'object') {
throw new TypeError('options object is required')
}
this.spec = npa(spec, opts.where)
this.allowGitIgnore = !!opts.allowGitIgnore
// a bit redundant because presumably the caller already knows this,
// but it makes it easier to not have to keep track of the requested
// spec when we're dispatching thousands of these at once, and normalizing
// is nice. saveSpec is preferred if set, because it turns stuff like
// x/y#committish into github:x/y#committish. use name@rawSpec for
// registry deps so that we turn xyz and xyz@ -> xyz@
this.from = this.spec.registry
? `${this.spec.name}@${this.spec.rawSpec}` : this.spec.saveSpec
this.#assertType()
// clone the opts object so that others aren't upset when we mutate it
// by adding/modifying the integrity value.
this.opts = { ...opts }
this.cache = opts.cache || cacheDir().cacache
this.tufCache = opts.tufCache || cacheDir().tufcache
this.resolved = opts.resolved || null
// default to caching/verifying with sha512, that's what we usually have
// need to change this default, or start overriding it, when sha512
// is no longer strong enough.
this.defaultIntegrityAlgorithm = opts.defaultIntegrityAlgorithm || 'sha512'
if (typeof opts.integrity === 'string') {
this.opts.integrity = ssri.parse(opts.integrity)
}
this.package = null
this.type = this.constructor.name
this.fmode = opts.fmode || 0o666
this.dmode = opts.dmode || 0o777
// we don't need a default umask, because we don't chmod files coming
// out of package tarballs. they're forced to have a mode that is
// valid, regardless of what's in the tarball entry, and then we let
// the process's umask setting do its job. but if configured, we do
// respect it.
this.umask = opts.umask || 0
this.preferOnline = !!opts.preferOnline
this.preferOffline = !!opts.preferOffline
this.offline = !!opts.offline
this.before = opts.before
this.fullMetadata = this.before ? true : !!opts.fullMetadata
this.fullReadJson = !!opts.fullReadJson
this[_.readPackageJson] = this.fullReadJson
? packageJsonPrepare
: packageJsonNormalize
// rrh is a registry hostname or 'never' or 'always'
// defaults to registry.npmjs.org
this.replaceRegistryHost = (!opts.replaceRegistryHost || opts.replaceRegistryHost === 'npmjs') ?
'registry.npmjs.org' : opts.replaceRegistryHost
this.defaultTag = opts.defaultTag || 'latest'
this.registry = removeTrailingSlashes(opts.registry || 'https://registry.npmjs.org')
// command to run 'prepare' scripts on directories and git dirs
// To use pacote with yarn, for example, set npmBin to 'yarn'
// and npmCliConfig with yarn's equivalents.
this.npmBin = opts.npmBin || 'npm'
// command to install deps for preparing
this.npmInstallCmd = opts.npmInstallCmd || ['install', '--force']
// XXX fill more of this in based on what we know from this.opts
// we explicitly DO NOT fill in --tag, though, since we are often
// going to be packing in the context of a publish, which may set
// a dist-tag, but certainly wants to keep defaulting to latest.
this.npmCliConfig = opts.npmCliConfig || [
`--cache=${dirname(this.cache)}`,
`--prefer-offline=${!!this.preferOffline}`,
`--prefer-online=${!!this.preferOnline}`,
`--offline=${!!this.offline}`,
...(this.before ? [`--before=${this.before.toISOString()}`] : []),
'--no-progress',
'--no-save',
'--no-audit',
// override any omit settings from the environment
'--include=dev',
'--include=peer',
'--include=optional',
// we need the actual things, not just the lockfile
'--no-package-lock-only',
'--no-dry-run',
]
}
get integrity () {
return this.opts.integrity || null
}
set integrity (i) {
if (!i) {
return
}
i = ssri.parse(i)
const current = this.opts.integrity
// do not ever update an existing hash value, but do
// merge in NEW algos and hashes that we don't already have.
if (current) {
current.merge(i)
} else {
this.opts.integrity = i
}
}
get notImplementedError () {
return new Error('not implemented in this fetcher type: ' + this.type)
}
// override in child classes
// Returns a Promise that resolves to this.resolved string value
resolve () {
return this.resolved ? Promise.resolve(this.resolved)
: Promise.reject(this.notImplementedError)
}
packument () {
return Promise.reject(this.notImplementedError)
}
// override in child class
// returns a manifest containing:
// - name
// - version
// - _resolved
// - _integrity
// - plus whatever else was in there (corgi, full metadata, or pj file)
manifest () {
return Promise.reject(this.notImplementedError)
}
// private, should be overridden.
// Note that they should *not* calculate or check integrity or cache,
// but *just* return the raw tarball data stream.
[_.tarballFromResolved] () {
throw this.notImplementedError
}
// public, should not be overridden
tarball () {
return this.tarballStream(stream => stream.concat().then(data => {
data.integrity = this.integrity && String(this.integrity)
data.resolved = this.resolved
data.from = this.from
return data
}))
}
// private
// Note: cacache will raise a EINTEGRITY error if the integrity doesn't match
#tarballFromCache () {
return cacache.get.stream.byDigest(this.cache, this.integrity, this.opts)
}
get [_.cacheFetches] () {
return true
}
#istream (stream) {
// if not caching this, just return it
if (!this.opts.cache || !this[_.cacheFetches]) {
// instead of creating a new integrity stream, we only piggyback on the
// provided stream's events
if (stream.hasIntegrityEmitter) {
stream.on('integrity', i => this.integrity = i)
return stream
}
const istream = ssri.integrityStream(this.opts)
istream.on('integrity', i => this.integrity = i)
stream.on('error', err => istream.emit('error', err))
return stream.pipe(istream)
}
// we have to return a stream that gets ALL the data, and proxies errors,
// but then pipe from the original tarball stream into the cache as well.
// To do this without losing any data, and since the cacache put stream
// is not a passthrough, we have to pipe from the original stream into
// the cache AFTER we pipe into the middleStream. Since the cache stream
// has an asynchronous flush to write its contents to disk, we need to
// defer the middleStream end until the cache stream ends.
const middleStream = new Minipass()
stream.on('error', err => middleStream.emit('error', err))
stream.pipe(middleStream, { end: false })
const cstream = cacache.put.stream(
this.opts.cache,
`pacote:tarball:${this.from}`,
this.opts
)
cstream.on('integrity', i => this.integrity = i)
cstream.on('error', err => stream.emit('error', err))
stream.pipe(cstream)
// eslint-disable-next-line promise/catch-or-return
cstream.promise().catch(() => {}).then(() => middleStream.end())
return middleStream
}
pickIntegrityAlgorithm () {
return this.integrity ? this.integrity.pickAlgorithm(this.opts)
: this.defaultIntegrityAlgorithm
}
// TODO: check error class, once those are rolled out to our deps
isDataCorruptionError (er) {
return er.code === 'EINTEGRITY' || er.code === 'Z_DATA_ERROR'
}
// override the types getter
get types () {
return false
}
#assertType () {
if (this.types && !this.types.includes(this.spec.type)) {
throw new TypeError(`Wrong spec type (${
this.spec.type
}) for ${
this.constructor.name
}. Supported types: ${this.types.join(', ')}`)
}
}
// We allow ENOENTs from cacache, but not anywhere else.
// An ENOENT trying to read a tgz file, for example, is Right Out.
isRetriableError (er) {
// TODO: check error class, once those are rolled out to our deps
return this.isDataCorruptionError(er) ||
er.code === 'ENOENT' ||
er.code === 'EISDIR'
}
// Mostly internal, but has some uses
// Pass in a function which returns a promise
// Function will be called 1 or more times with streams that may fail.
// Retries:
// Function MUST handle errors on the stream by rejecting the promise,
// so that retry logic can pick it up and either retry or fail whatever
// promise it was making (ie, failing extraction, etc.)
//
// The return value of this method is a Promise that resolves the same
// as whatever the streamHandler resolves to.
//
// This should never be overridden by child classes, but it is public.
tarballStream (streamHandler) {
// Only short-circuit via cache if we have everything else we'll need,
// and the user has not expressed a preference for checking online.
const fromCache = (
!this.preferOnline &&
this.integrity &&
this.resolved
) ? streamHandler(this.#tarballFromCache()).catch(er => {
if (this.isDataCorruptionError(er)) {
log.warn('tarball', `cached data for ${
this.spec
} (${this.integrity}) seems to be corrupted. Refreshing cache.`)
return this.cleanupCached().then(() => {
throw er
})
} else {
throw er
}
}) : null
const fromResolved = er => {
if (er) {
if (!this.isRetriableError(er)) {
throw er
}
log.silly('tarball', `no local data for ${
this.spec
}. Extracting by manifest.`)
}
return this.resolve().then(() => retry(tryAgain =>
streamHandler(this.#istream(this[_.tarballFromResolved]()))
.catch(streamErr => {
// Most likely data integrity. A cache ENOENT error is unlikely
// here, since we're definitely not reading from the cache, but it
// IS possible that the fetch subsystem accessed the cache, and the
// entry got blown away or something. Try one more time to be sure.
if (this.isRetriableError(streamErr)) {
log.warn('tarball', `tarball data for ${
this.spec
} (${this.integrity}) seems to be corrupted. Trying again.`)
return this.cleanupCached().then(() => tryAgain(streamErr))
}
throw streamErr
}), { retries: 1, minTimeout: 0, maxTimeout: 0 }))
}
return fromCache ? fromCache.catch(fromResolved) : fromResolved()
}
cleanupCached () {
return cacache.rm.content(this.cache, this.integrity, this.opts)
}
#empty (path) {
return getContents({ path, depth: 1 }).then(contents => Promise.all(
contents.map(entry => rm(entry, { recursive: true, force: true }))))
}
async #mkdir (dest) {
await this.#empty(dest)
return await mkdir(dest, { recursive: true })
}
// extraction is always the same. the only difference is where
// the tarball comes from.
async extract (dest) {
await this.#mkdir(dest)
return this.tarballStream((tarball) => this.#extract(dest, tarball))
}
#toFile (dest) {
return this.tarballStream(str => new Promise((res, rej) => {
const writer = new fsm.WriteStream(dest)
str.on('error', er => writer.emit('error', er))
writer.on('error', er => rej(er))
writer.on('close', () => res({
integrity: this.integrity && String(this.integrity),
resolved: this.resolved,
from: this.from,
}))
str.pipe(writer)
}))
}
// don't use this.#mkdir because we don't want to rimraf anything
async tarballFile (dest) {
const dir = dirname(dest)
await mkdir(dir, { recursive: true })
return this.#toFile(dest)
}
#extract (dest, tarball) {
const extractor = tar.x(this.#tarxOptions({ cwd: dest }))
const p = new Promise((resolve, reject) => {
extractor.on('end', () => {
resolve({
resolved: this.resolved,
integrity: this.integrity && String(this.integrity),
from: this.from,
})
})
extractor.on('error', er => {
log.warn('tar', er.message)
log.silly('tar', er)
reject(er)
})
tarball.on('error', er => reject(er))
})
tarball.pipe(extractor)
return p
}
// always ensure that entries are at least as permissive as our configured
// dmode/fmode, but never more permissive than the umask allows.
#entryMode (path, mode, type) {
const m = /Directory|GNUDumpDir/.test(type) ? this.dmode
: /File$/.test(type) ? this.fmode
: /* istanbul ignore next - should never happen in a pkg */ 0
// make sure package bins are executable
const exe = isPackageBin(this.package, path) ? 0o111 : 0
// always ensure that files are read/writable by the owner
return ((mode | m) & ~this.umask) | exe | 0o600
}
#tarxOptions ({ cwd }) {
const sawIgnores = new Set()
return {
cwd,
noChmod: true,
noMtime: true,
filter: (name, entry) => {
if (/Link$/.test(entry.type)) {
return false
}
entry.mode = this.#entryMode(entry.path, entry.mode, entry.type)
// this replicates the npm pack behavior where .gitignore files
// are treated like .npmignore files, but only if a .npmignore
// file is not present.
if (/File$/.test(entry.type)) {
const base = basename(entry.path)
if (base === '.npmignore') {
sawIgnores.add(entry.path)
} else if (base === '.gitignore' && !this.allowGitIgnore) {
// rename, but only if there's not already a .npmignore
const ni = entry.path.replace(/\.gitignore$/, '.npmignore')
if (sawIgnores.has(ni)) {
return false
}
entry.path = ni
}
return true
}
},
strip: 1,
onwarn: /* istanbul ignore next - we can trust that tar logs */
(code, msg, data) => {
log.warn('tar', code, msg)
log.silly('tar', code, msg, data)
},
umask: this.umask,
// always ignore ownership info from tarball metadata
preserveOwner: false,
}
}
}
module.exports = FetcherBase
// Child classes
const GitFetcher = require('./git.js')
const RegistryFetcher = require('./registry.js')
const FileFetcher = require('./file.js')
const DirFetcher = require('./dir.js')
const RemoteFetcher = require('./remote.js')
// Get an appropriate fetcher object from a spec and options
FetcherBase.get = (rawSpec, opts = {}) => {
const spec = npa(rawSpec, opts.where)
switch (spec.type) {
case 'git':
return new GitFetcher(spec, opts)
case 'remote':
return new RemoteFetcher(spec, opts)
case 'version':
case 'range':
case 'tag':
case 'alias':
return new RegistryFetcher(spec.subSpec || spec, opts)
case 'file':
return new FileFetcher(spec, opts)
case 'directory':
return new DirFetcher(spec, opts)
default:
throw new TypeError('Unknown spec type: ' + spec.type)
}
}

94
node_modules/pacote/lib/file.js generated vendored Normal file
View file

@ -0,0 +1,94 @@
const { resolve } = require('node:path')
const { stat, chmod } = require('node:fs/promises')
const cacache = require('cacache')
const fsm = require('fs-minipass')
const Fetcher = require('./fetcher.js')
const _ = require('./util/protected.js')
class FileFetcher extends Fetcher {
constructor (spec, opts) {
super(spec, opts)
// just the fully resolved filename
this.resolved = this.spec.fetchSpec
}
get types () {
return ['file']
}
manifest () {
if (this.package) {
return Promise.resolve(this.package)
}
// have to unpack the tarball for this.
return cacache.tmp.withTmp(this.cache, this.opts, dir =>
this.extract(dir)
.then(() => this[_.readPackageJson](dir))
.then(mani => this.package = {
...mani,
_integrity: this.integrity && String(this.integrity),
_resolved: this.resolved,
_from: this.from,
}))
}
#exeBins (pkg, dest) {
if (!pkg.bin) {
return Promise.resolve()
}
return Promise.all(Object.keys(pkg.bin).map(async k => {
const script = resolve(dest, pkg.bin[k])
// Best effort. Ignore errors here, the only result is that
// a bin script is not executable. But if it's missing or
// something, we just leave it for a later stage to trip over
// when we can provide a more useful contextual error.
try {
const st = await stat(script)
const mode = st.mode | 0o111
if (mode === st.mode) {
return
}
await chmod(script, mode)
} catch {
// Ignore errors here
}
}))
}
extract (dest) {
// if we've already loaded the manifest, then the super got it.
// but if not, read the unpacked manifest and chmod properly.
return super.extract(dest)
.then(result => this.package ? result
: this[_.readPackageJson](dest).then(pkg =>
this.#exeBins(pkg, dest)).then(() => result))
}
[_.tarballFromResolved] () {
// create a read stream and return it
return new fsm.ReadStream(this.resolved)
}
packument () {
// simulate based on manifest
return this.manifest().then(mani => ({
name: mani.name,
'dist-tags': {
[this.defaultTag]: mani.version,
},
versions: {
[mani.version]: {
...mani,
dist: {
tarball: `file:${this.resolved}`,
integrity: this.integrity && String(this.integrity),
},
},
},
}))
}
}
module.exports = FileFetcher

317
node_modules/pacote/lib/git.js generated vendored Normal file
View file

@ -0,0 +1,317 @@
const cacache = require('cacache')
const git = require('@npmcli/git')
const npa = require('npm-package-arg')
const pickManifest = require('npm-pick-manifest')
const { Minipass } = require('minipass')
const { log } = require('proc-log')
const DirFetcher = require('./dir.js')
const Fetcher = require('./fetcher.js')
const FileFetcher = require('./file.js')
const RemoteFetcher = require('./remote.js')
const _ = require('./util/protected.js')
const addGitSha = require('./util/add-git-sha.js')
const npm = require('./util/npm.js')
const hashre = /^[a-f0-9]{40}$/
// get the repository url.
// prefer https if there's auth, since ssh will drop that.
// otherwise, prefer ssh if available (more secure).
// We have to add the git+ back because npa suppresses it.
const repoUrl = (h, opts) =>
h.sshurl && !(h.https && h.auth) && addGitPlus(h.sshurl(opts)) ||
h.https && addGitPlus(h.https(opts))
// add git+ to the url, but only one time.
const addGitPlus = url => url && `git+${url}`.replace(/^(git\+)+/, 'git+')
class GitFetcher extends Fetcher {
constructor (spec, opts) {
super(spec, opts)
// we never want to compare integrity for git dependencies: npm/rfcs#525
if (this.opts.integrity) {
delete this.opts.integrity
log.warn(`skipping integrity check for git dependency ${this.spec.fetchSpec}`)
}
this.resolvedRef = null
if (this.spec.hosted) {
this.from = this.spec.hosted.shortcut({ noCommittish: false })
}
// shortcut: avoid full clone when we can go straight to the tgz
// if we have the full sha and it's a hosted git platform
if (this.spec.gitCommittish && hashre.test(this.spec.gitCommittish)) {
this.resolvedSha = this.spec.gitCommittish
// use hosted.tarball() when we shell to RemoteFetcher later
this.resolved = this.spec.hosted
? repoUrl(this.spec.hosted, { noCommittish: false })
: this.spec.rawSpec
} else {
this.resolvedSha = ''
}
this.Arborist = opts.Arborist || null
}
// just exposed to make it easier to test all the combinations
static repoUrl (hosted, opts) {
return repoUrl(hosted, opts)
}
get types () {
return ['git']
}
resolve () {
// likely a hosted git repo with a sha, so get the tarball url
// but in general, no reason to resolve() more than necessary!
if (this.resolved) {
return super.resolve()
}
// fetch the git repo and then look at the current hash
const h = this.spec.hosted
// try to use ssh, fall back to git.
return h
? this.#resolvedFromHosted(h)
: this.#resolvedFromRepo(this.spec.fetchSpec)
}
// first try https, since that's faster and passphrase-less for
// public repos, and supports private repos when auth is provided.
// Fall back to SSH to support private repos
// NB: we always store the https url in resolved field if auth
// is present, otherwise ssh if the hosted type provides it
#resolvedFromHosted (hosted) {
return this.#resolvedFromRepo(hosted.https && hosted.https()).catch(er => {
// Throw early since we know pathspec errors will fail again if retried
if (er instanceof git.errors.GitPathspecError) {
throw er
}
const ssh = hosted.sshurl && hosted.sshurl()
// no fallthrough if we can't fall through or have https auth
if (!ssh || hosted.auth) {
throw er
}
return this.#resolvedFromRepo(ssh)
})
}
#resolvedFromRepo (gitRemote) {
// XXX make this a custom error class
if (!gitRemote) {
return Promise.reject(new Error(`No git url for ${this.spec}`))
}
const gitRange = this.spec.gitRange
const name = this.spec.name
return git.revs(gitRemote, this.opts).then(remoteRefs => {
return gitRange ? pickManifest({
versions: remoteRefs.versions,
'dist-tags': remoteRefs['dist-tags'],
name,
}, gitRange, this.opts)
: this.spec.gitCommittish ?
remoteRefs.refs[this.spec.gitCommittish] ||
remoteRefs.refs[remoteRefs.shas[this.spec.gitCommittish]]
: remoteRefs.refs.HEAD // no git committish, get default head
}).then(revDoc => {
// the committish provided isn't in the rev list
// things like HEAD~3 or @yesterday can land here.
if (!revDoc || !revDoc.sha) {
return this.#resolvedFromClone()
}
this.resolvedRef = revDoc
this.resolvedSha = revDoc.sha
this.#addGitSha(revDoc.sha)
return this.resolved
})
}
#setResolvedWithSha (withSha) {
// we haven't cloned, so a tgz download is still faster
// of course, if it's not a known host, we can't do that.
this.resolved = !this.spec.hosted ? withSha
: repoUrl(npa(withSha).hosted, { noCommittish: false })
}
// when we get the git sha, we affix it to our spec to build up
// either a git url with a hash, or a tarball download URL
#addGitSha (sha) {
this.#setResolvedWithSha(addGitSha(this.spec, sha))
}
#resolvedFromClone () {
// do a full or shallow clone, then look at the HEAD
// kind of wasteful, but no other option, really
return this.#clone(() => this.resolved)
}
#prepareDir (dir) {
return this[_.readPackageJson](dir).then(mani => {
// no need if we aren't going to do any preparation.
const scripts = mani.scripts
if (!mani.workspaces && (!scripts || !(
scripts.postinstall ||
scripts.build ||
scripts.preinstall ||
scripts.install ||
scripts.prepack ||
scripts.prepare))) {
return
}
// to avoid cases where we have an cycle of git deps that depend
// on one another, we only ever do preparation for one instance
// of a given git dep along the chain of installations.
// Note that this does mean that a dependency MAY in theory end up
// trying to run its prepare script using a dependency that has not
// been properly prepared itself, but that edge case is smaller
// and less hazardous than a fork bomb of npm and git commands.
const noPrepare = !process.env._PACOTE_NO_PREPARE_ ? []
: process.env._PACOTE_NO_PREPARE_.split('\n')
if (noPrepare.includes(this.resolved)) {
log.info('prepare', 'skip prepare, already seen', this.resolved)
return
}
noPrepare.push(this.resolved)
// the DirFetcher will do its own preparation to run the prepare scripts
// All we have to do is put the deps in place so that it can succeed.
return npm(
this.npmBin,
[].concat(this.npmInstallCmd).concat(this.npmCliConfig),
dir,
{ ...process.env, _PACOTE_NO_PREPARE_: noPrepare.join('\n') },
{ message: 'git dep preparation failed' }
)
})
}
[_.tarballFromResolved] () {
const stream = new Minipass()
stream.resolved = this.resolved
stream.from = this.from
// check it out and then shell out to the DirFetcher tarball packer
this.#clone(dir => this.#prepareDir(dir)
.then(() => new Promise((res, rej) => {
if (!this.Arborist) {
throw new Error('GitFetcher requires an Arborist constructor to pack a tarball')
}
const df = new DirFetcher(`file:${dir}`, {
...this.opts,
Arborist: this.Arborist,
resolved: null,
integrity: null,
})
const dirStream = df[_.tarballFromResolved]()
dirStream.on('error', rej)
dirStream.on('end', res)
dirStream.pipe(stream)
}))).catch(
/* istanbul ignore next: very unlikely and hard to test */
er => stream.emit('error', er)
)
return stream
}
// clone a git repo into a temp folder (or fetch and unpack if possible)
// handler accepts a directory, and returns a promise that resolves
// when we're done with it, at which point, cacache deletes it
//
// TODO: after cloning, create a tarball of the folder, and add to the cache
// with cacache.put.stream(), using a key that's deterministic based on the
// spec and repo, so that we don't ever clone the same thing multiple times.
#clone (handler, tarballOk = true) {
const o = { tmpPrefix: 'git-clone' }
const ref = this.resolvedSha || this.spec.gitCommittish
const h = this.spec.hosted
const resolved = this.resolved
// can be set manually to false to fall back to actual git clone
tarballOk = tarballOk &&
h && resolved === repoUrl(h, { noCommittish: false }) && h.tarball
return cacache.tmp.withTmp(this.cache, o, async tmp => {
// if we're resolved, and have a tarball url, shell out to RemoteFetcher
if (tarballOk) {
const nameat = this.spec.name ? `${this.spec.name}@` : ''
return new RemoteFetcher(h.tarball({ noCommittish: false }), {
...this.opts,
allowGitIgnore: true,
pkgid: `git:${nameat}${this.resolved}`,
resolved: this.resolved,
integrity: null, // it'll always be different, if we have one
}).extract(tmp).then(() => handler(tmp), er => {
// fall back to ssh download if tarball fails
if (er.constructor.name.match(/^Http/)) {
return this.#clone(handler, false)
} else {
throw er
}
})
}
const sha = await (
h ? this.#cloneHosted(ref, tmp)
: this.#cloneRepo(this.spec.fetchSpec, ref, tmp)
)
this.resolvedSha = sha
if (!this.resolved) {
await this.#addGitSha(sha)
}
return handler(tmp)
})
}
// first try https, since that's faster and passphrase-less for
// public repos, and supports private repos when auth is provided.
// Fall back to SSH to support private repos
// NB: we always store the https url in resolved field if auth
// is present, otherwise ssh if the hosted type provides it
#cloneHosted (ref, tmp) {
const hosted = this.spec.hosted
return this.#cloneRepo(hosted.https({ noCommittish: true }), ref, tmp)
.catch(er => {
// Throw early since we know pathspec errors will fail again if retried
if (er instanceof git.errors.GitPathspecError) {
throw er
}
const ssh = hosted.sshurl && hosted.sshurl({ noCommittish: true })
// no fallthrough if we can't fall through or have https auth
if (!ssh || hosted.auth) {
throw er
}
return this.#cloneRepo(ssh, ref, tmp)
})
}
#cloneRepo (repo, ref, tmp) {
const { opts, spec } = this
return git.clone(repo, ref, tmp, { ...opts, spec })
}
manifest () {
if (this.package) {
return Promise.resolve(this.package)
}
return this.spec.hosted && this.resolved
? FileFetcher.prototype.manifest.apply(this)
: this.#clone(dir =>
this[_.readPackageJson](dir)
.then(mani => this.package = {
...mani,
_resolved: this.resolved,
_from: this.from,
}))
}
packument () {
return FileFetcher.prototype.packument.apply(this)
}
}
module.exports = GitFetcher

23
node_modules/pacote/lib/index.js generated vendored Normal file
View file

@ -0,0 +1,23 @@
const { get } = require('./fetcher.js')
const GitFetcher = require('./git.js')
const RegistryFetcher = require('./registry.js')
const FileFetcher = require('./file.js')
const DirFetcher = require('./dir.js')
const RemoteFetcher = require('./remote.js')
const tarball = (spec, opts) => get(spec, opts).tarball()
tarball.stream = (spec, handler, opts) => get(spec, opts).tarballStream(handler)
tarball.file = (spec, dest, opts) => get(spec, opts).tarballFile(dest)
module.exports = {
GitFetcher,
RegistryFetcher,
FileFetcher,
DirFetcher,
RemoteFetcher,
resolve: (spec, opts) => get(spec, opts).resolve(),
extract: (spec, dest, opts) => get(spec, opts).extract(dest),
manifest: (spec, opts) => get(spec, opts).manifest(),
packument: (spec, opts) => get(spec, opts).packument(),
tarball,
}

369
node_modules/pacote/lib/registry.js generated vendored Normal file
View file

@ -0,0 +1,369 @@
const crypto = require('node:crypto')
const PackageJson = require('@npmcli/package-json')
const pickManifest = require('npm-pick-manifest')
const ssri = require('ssri')
const npa = require('npm-package-arg')
const sigstore = require('sigstore')
const fetch = require('npm-registry-fetch')
const Fetcher = require('./fetcher.js')
const RemoteFetcher = require('./remote.js')
const pacoteVersion = require('../package.json').version
const removeTrailingSlashes = require('./util/trailing-slashes.js')
const _ = require('./util/protected.js')
// Corgis are cute. 🐕🐶
const corgiDoc = 'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*'
const fullDoc = 'application/json'
// Some really old packages have no time field in their packument so we need a
// cutoff date.
const MISSING_TIME_CUTOFF = '2015-01-01T00:00:00.000Z'
class RegistryFetcher extends Fetcher {
#cacheKey
constructor (spec, opts) {
super(spec, opts)
// you usually don't want to fetch the same packument multiple times in
// the span of a given script or command, no matter how many pacote calls
// are made, so this lets us avoid doing that. It's only relevant for
// registry fetchers, because other types simulate their packument from
// the manifest, which they memoize on this.package, so it's very cheap
// already.
this.packumentCache = this.opts.packumentCache || null
this.registry = fetch.pickRegistry(spec, opts)
this.packumentUrl = `${removeTrailingSlashes(this.registry)}/${this.spec.escapedName}`
this.#cacheKey = `${this.fullMetadata ? 'full' : 'corgi'}:${this.packumentUrl}`
const parsed = new URL(this.registry)
const regKey = `//${parsed.host}${parsed.pathname}`
// unlike the nerf-darted auth keys, this one does *not* allow a mismatch
// of trailing slashes. It must match exactly.
if (this.opts[`${regKey}:_keys`]) {
this.registryKeys = this.opts[`${regKey}:_keys`]
}
// XXX pacote <=9 has some logic to ignore opts.resolved if
// the resolved URL doesn't go to the same registry.
// Consider reproducing that here, to throw away this.resolved
// in that case.
}
async resolve () {
// fetching the manifest sets resolved and (if present) integrity
await this.manifest()
if (!this.resolved) {
throw Object.assign(
new Error('Invalid package manifest: no `dist.tarball` field'),
{ package: this.spec.toString() }
)
}
return this.resolved
}
#headers () {
return {
// npm will override UA, but ensure that we always send *something*
'user-agent': this.opts.userAgent ||
`pacote/${pacoteVersion} node/${process.version}`,
...(this.opts.headers || {}),
'pacote-version': pacoteVersion,
'pacote-req-type': 'packument',
'pacote-pkg-id': `registry:${this.spec.name}`,
accept: this.fullMetadata ? fullDoc : corgiDoc,
}
}
async packument () {
// note this might be either an in-flight promise for a request,
// or the actual packument, but we never want to make more than
// one request at a time for the same thing regardless.
if (this.packumentCache?.has(this.#cacheKey)) {
return this.packumentCache.get(this.#cacheKey)
}
// npm-registry-fetch the packument
// set the appropriate header for corgis if fullMetadata isn't set
// return the res.json() promise
try {
const res = await fetch(this.packumentUrl, {
...this.opts,
headers: this.#headers(),
spec: this.spec,
// never check integrity for packuments themselves
integrity: null,
})
const packument = await res.json()
const contentLength = res.headers.get('content-length')
if (contentLength) {
packument._contentLength = Number(contentLength)
}
this.packumentCache?.set(this.#cacheKey, packument)
return packument
} catch (err) {
this.packumentCache?.delete(this.#cacheKey)
if (err.code !== 'E404' || this.fullMetadata) {
throw err
}
// possible that corgis are not supported by this registry
this.fullMetadata = true
return this.packument()
}
}
async manifest () {
if (this.package) {
return this.package
}
// When verifying signatures, we need to fetch the full/uncompressed
// packument to get publish time as this is not included in the
// corgi/compressed packument.
if (this.opts.verifySignatures) {
this.fullMetadata = true
}
const packument = await this.packument()
const steps = PackageJson.normalizeSteps.filter(s => s !== '_attributes')
const mani = await new PackageJson().fromContent(pickManifest(packument, this.spec.fetchSpec, {
...this.opts,
defaultTag: this.defaultTag,
before: this.before,
})).normalize({ steps }).then(p => p.content)
/* XXX add ETARGET and E403 revalidation of cached packuments here */
// add _time from packument if fetched with fullMetadata
const time = packument.time?.[mani.version]
if (time) {
mani._time = time
}
// add _resolved and _integrity from dist object
const { dist } = mani
if (dist) {
this.resolved = mani._resolved = dist.tarball
mani._from = this.from
const distIntegrity = dist.integrity ? ssri.parse(dist.integrity)
: dist.shasum ? ssri.fromHex(dist.shasum, 'sha1', { ...this.opts })
: null
if (distIntegrity) {
if (this.integrity && !this.integrity.match(distIntegrity)) {
// only bork if they have algos in common.
// otherwise we end up breaking if we have saved a sha512
// previously for the tarball, but the manifest only
// provides a sha1, which is possible for older publishes.
// Otherwise, this is almost certainly a case of holding it
// wrong, and will result in weird or insecure behavior
// later on when building package tree.
for (const algo of Object.keys(this.integrity)) {
if (distIntegrity[algo]) {
throw Object.assign(new Error(
`Integrity checksum failed when using ${algo}: ` +
`wanted ${this.integrity} but got ${distIntegrity}.`
), { code: 'EINTEGRITY' })
}
}
}
// made it this far, the integrity is worthwhile. accept it.
// the setter here will take care of merging it into what we already
// had.
this.integrity = distIntegrity
}
}
if (this.integrity) {
mani._integrity = String(this.integrity)
if (dist.signatures) {
if (this.opts.verifySignatures) {
// validate and throw on error, then set _signatures
const message = `${mani._id}:${mani._integrity}`
for (const signature of dist.signatures) {
const publicKey = this.registryKeys &&
this.registryKeys.filter(key => (key.keyid === signature.keyid))[0]
if (!publicKey) {
throw Object.assign(new Error(
`${mani._id} has a registry signature with keyid: ${signature.keyid} ` +
'but no corresponding public key can be found'
), { code: 'EMISSINGSIGNATUREKEY' })
}
const publishedTime = Date.parse(mani._time || MISSING_TIME_CUTOFF)
const validPublicKey = !publicKey.expires ||
publishedTime < Date.parse(publicKey.expires)
if (!validPublicKey) {
throw Object.assign(new Error(
`${mani._id} has a registry signature with keyid: ${signature.keyid} ` +
`but the corresponding public key has expired ${publicKey.expires}`
), { code: 'EEXPIREDSIGNATUREKEY' })
}
const verifier = crypto.createVerify('SHA256')
verifier.write(message)
verifier.end()
const valid = verifier.verify(
publicKey.pemkey,
signature.sig,
'base64'
)
if (!valid) {
throw Object.assign(new Error(
`${mani._id} has an invalid registry signature with ` +
`keyid: ${publicKey.keyid} and signature: ${signature.sig}`
), {
code: 'EINTEGRITYSIGNATURE',
keyid: publicKey.keyid,
signature: signature.sig,
resolved: mani._resolved,
integrity: mani._integrity,
})
}
}
mani._signatures = dist.signatures
} else {
mani._signatures = dist.signatures
}
}
if (dist.attestations) {
if (this.opts.verifyAttestations) {
// Always fetch attestations from the current registry host
const attestationsPath = new URL(dist.attestations.url).pathname
const attestationsUrl = removeTrailingSlashes(this.registry) + attestationsPath
const res = await fetch(attestationsUrl, {
...this.opts,
// disable integrity check for attestations json payload, we check the
// integrity in the verification steps below
integrity: null,
})
const { attestations } = await res.json()
const bundles = attestations.map(({ predicateType, bundle }) => {
const statement = JSON.parse(
Buffer.from(bundle.dsseEnvelope.payload, 'base64').toString('utf8')
)
const keyid = bundle.dsseEnvelope.signatures[0].keyid
const signature = bundle.dsseEnvelope.signatures[0].sig
return {
predicateType,
bundle,
statement,
keyid,
signature,
}
})
const attestationKeyIds = bundles.map((b) => b.keyid).filter((k) => !!k)
const attestationRegistryKeys = (this.registryKeys || [])
.filter(key => attestationKeyIds.includes(key.keyid))
if (!attestationRegistryKeys.length) {
throw Object.assign(new Error(
`${mani._id} has attestations but no corresponding public key(s) can be found`
), { code: 'EMISSINGSIGNATUREKEY' })
}
for (const { predicateType, bundle, keyid, signature, statement } of bundles) {
const publicKey = attestationRegistryKeys.find(key => key.keyid === keyid)
// Publish attestations have a keyid set and a valid public key must be found
if (keyid) {
if (!publicKey) {
throw Object.assign(new Error(
`${mani._id} has attestations with keyid: ${keyid} ` +
'but no corresponding public key can be found'
), { code: 'EMISSINGSIGNATUREKEY' })
}
const integratedTime = new Date(
Number(
bundle.verificationMaterial.tlogEntries[0].integratedTime
) * 1000
)
const validPublicKey = !publicKey.expires ||
(integratedTime < Date.parse(publicKey.expires))
if (!validPublicKey) {
throw Object.assign(new Error(
`${mani._id} has attestations with keyid: ${keyid} ` +
`but the corresponding public key has expired ${publicKey.expires}`
), { code: 'EEXPIREDSIGNATUREKEY' })
}
}
const subject = {
name: statement.subject[0].name,
sha512: statement.subject[0].digest.sha512,
}
// Only type 'version' can be turned into a PURL
const purl = this.spec.type === 'version' ? npa.toPurl(this.spec) : this.spec
// Verify the statement subject matches the package, version
if (subject.name !== purl) {
throw Object.assign(new Error(
`${mani._id} package name and version (PURL): ${purl} ` +
`doesn't match what was signed: ${subject.name}`
), { code: 'EATTESTATIONSUBJECT' })
}
// Verify the statement subject matches the tarball integrity
const integrityHexDigest = ssri.parse(this.integrity).hexDigest()
if (subject.sha512 !== integrityHexDigest) {
throw Object.assign(new Error(
`${mani._id} package integrity (hex digest): ` +
`${integrityHexDigest} ` +
`doesn't match what was signed: ${subject.sha512}`
), { code: 'EATTESTATIONSUBJECT' })
}
try {
// Provenance attestations are signed with a signing certificate
// (including the key) so we don't need to return a public key.
//
// Publish attestations are signed with a keyid so we need to
// specify a public key from the keys endpoint: `registry-host.tld/-/npm/v1/keys`
const options = {
tufCachePath: this.tufCache,
tufForceCache: true,
keySelector: publicKey ? () => publicKey.pemkey : undefined,
}
await sigstore.verify(bundle, options)
} catch (e) {
throw Object.assign(new Error(
`${mani._id} failed to verify attestation: ${e.message}`
), {
code: 'EATTESTATIONVERIFY',
predicateType,
keyid,
signature,
resolved: mani._resolved,
integrity: mani._integrity,
})
}
}
mani._attestations = dist.attestations
} else {
mani._attestations = dist.attestations
}
}
}
this.package = mani
return this.package
}
[_.tarballFromResolved] () {
// we use a RemoteFetcher to get the actual tarball stream
return new RemoteFetcher(this.resolved, {
...this.opts,
resolved: this.resolved,
pkgid: `registry:${this.spec.name}@${this.resolved}`,
})[_.tarballFromResolved]()
}
get types () {
return [
'tag',
'version',
'range',
]
}
}
module.exports = RegistryFetcher

89
node_modules/pacote/lib/remote.js generated vendored Normal file
View file

@ -0,0 +1,89 @@
const fetch = require('npm-registry-fetch')
const { Minipass } = require('minipass')
const Fetcher = require('./fetcher.js')
const FileFetcher = require('./file.js')
const _ = require('./util/protected.js')
const pacoteVersion = require('../package.json').version
class RemoteFetcher extends Fetcher {
constructor (spec, opts) {
super(spec, opts)
this.resolved = this.spec.fetchSpec
const resolvedURL = new URL(this.resolved)
if (this.replaceRegistryHost !== 'never'
&& (this.replaceRegistryHost === 'always'
|| this.replaceRegistryHost === resolvedURL.host)) {
this.resolved = new URL(resolvedURL.pathname, this.registry).href
}
// nam is a fermented pork sausage that is good to eat
const nameat = this.spec.name ? `${this.spec.name}@` : ''
this.pkgid = opts.pkgid ? opts.pkgid : `remote:${nameat}${this.resolved}`
}
// Don't need to cache tarball fetches in pacote, because make-fetch-happen
// will write into cacache anyway.
get [_.cacheFetches] () {
return false
}
[_.tarballFromResolved] () {
const stream = new Minipass()
stream.hasIntegrityEmitter = true
const fetchOpts = {
...this.opts,
headers: this.#headers(),
spec: this.spec,
integrity: this.integrity,
algorithms: [this.pickIntegrityAlgorithm()],
}
// eslint-disable-next-line promise/always-return
fetch(this.resolved, fetchOpts).then(res => {
res.body.on('error',
/* istanbul ignore next - exceedingly rare and hard to simulate */
er => stream.emit('error', er)
)
res.body.on('integrity', i => {
this.integrity = i
stream.emit('integrity', i)
})
res.body.pipe(stream)
}).catch(er => stream.emit('error', er))
return stream
}
#headers () {
return {
// npm will override this, but ensure that we always send *something*
'user-agent': this.opts.userAgent ||
`pacote/${pacoteVersion} node/${process.version}`,
...(this.opts.headers || {}),
'pacote-version': pacoteVersion,
'pacote-req-type': 'tarball',
'pacote-pkg-id': this.pkgid,
...(this.integrity ? { 'pacote-integrity': String(this.integrity) }
: {}),
...(this.opts.headers || {}),
}
}
get types () {
return ['remote']
}
// getting a packument and/or manifest is the same as with a file: spec.
// unpack the tarball stream, and then read from the package.json file.
packument () {
return FileFetcher.prototype.packument.apply(this)
}
manifest () {
return FileFetcher.prototype.manifest.apply(this)
}
}
module.exports = RemoteFetcher

15
node_modules/pacote/lib/util/add-git-sha.js generated vendored Normal file
View file

@ -0,0 +1,15 @@
// add a sha to a git remote url spec
const addGitSha = (spec, sha) => {
if (spec.hosted) {
const h = spec.hosted
const opt = { noCommittish: true }
const base = h.https && h.auth ? h.https(opt) : h.shortcut(opt)
return `${base}#${sha}`
} else {
// don't use new URL for this, because it doesn't handle scp urls
return spec.rawSpec.replace(/#.*$/, '') + `#${sha}`
}
}
module.exports = addGitSha

15
node_modules/pacote/lib/util/cache-dir.js generated vendored Normal file
View file

@ -0,0 +1,15 @@
const { resolve } = require('node:path')
const { tmpdir, homedir } = require('node:os')
module.exports = (fakePlatform = false) => {
const temp = tmpdir()
const uidOrPid = process.getuid ? process.getuid() : process.pid
const home = homedir() || resolve(temp, 'npm-' + uidOrPid)
const platform = fakePlatform || process.platform
const cacheExtra = platform === 'win32' ? 'npm-cache' : '.npm'
const cacheRoot = (platform === 'win32' && process.env.LOCALAPPDATA) || home
return {
cacache: resolve(cacheRoot, cacheExtra, '_cacache'),
tufcache: resolve(cacheRoot, cacheExtra, '_tuf'),
}
}

25
node_modules/pacote/lib/util/is-package-bin.js generated vendored Normal file
View file

@ -0,0 +1,25 @@
// Function to determine whether a path is in the package.bin set.
// Used to prevent issues when people publish a package from a
// windows machine, and then install with --no-bin-links.
//
// Note: this is not possible in remote or file fetchers, since
// we don't have the manifest until AFTER we've unpacked. But the
// main use case is registry fetching with git a distant second,
// so that's an acceptable edge case to not handle.
const binObj = (name, bin) =>
typeof bin === 'string' ? { [name]: bin } : bin
const hasBin = (pkg, path) => {
const bin = binObj(pkg.name, pkg.bin)
const p = path.replace(/^[^\\/]*\//, '')
for (const kv of Object.entries(bin)) {
if (kv[1] === p) {
return true
}
}
return false
}
module.exports = (pkg, path) =>
pkg && pkg.bin ? hasBin(pkg, path) : false

14
node_modules/pacote/lib/util/npm.js generated vendored Normal file
View file

@ -0,0 +1,14 @@
// run an npm command
const spawn = require('@npmcli/promise-spawn')
module.exports = (npmBin, npmCommand, cwd, env, extra) => {
const isJS = npmBin.endsWith('.js')
const cmd = isJS ? process.execPath : npmBin
const args = (isJS ? [npmBin] : []).concat(npmCommand)
// when installing to run the `prepare` script for a git dep, we need
// to ensure that we don't run into a cycle of checking out packages
// in temp directories. this lets us link previously-seen repos that
// are also being prepared.
return spawn(cmd, args, { cwd, env }, extra)
}

5
node_modules/pacote/lib/util/protected.js generated vendored Normal file
View file

@ -0,0 +1,5 @@
module.exports = {
cacheFetches: Symbol.for('pacote.Fetcher._cacheFetches'),
readPackageJson: Symbol.for('package.Fetcher._readPackageJson'),
tarballFromResolved: Symbol.for('pacote.Fetcher._tarballFromResolved'),
}

31
node_modules/pacote/lib/util/tar-create-options.js generated vendored Normal file
View file

@ -0,0 +1,31 @@
const isPackageBin = require('./is-package-bin.js')
const tarCreateOptions = manifest => ({
cwd: manifest._resolved,
prefix: 'package/',
portable: true,
gzip: {
// forcing the level to 9 seems to avoid some
// platform specific optimizations that cause
// integrity mismatch errors due to differing
// end results after compression
level: 9,
},
// ensure that package bins are always executable
// Note that npm-packlist is already filtering out
// anything that is not a regular file, ignored by
// .npmignore or package.json "files", etc.
filter: (path, stat) => {
if (isPackageBin(manifest, path)) {
stat.mode |= 0o111
}
return true
},
// Provide a specific date in the 1980s for the benefit of zip,
// which is confounded by files dated at the Unix epoch 0.
mtime: new Date('1985-10-26T08:15:00.000Z'),
})
module.exports = tarCreateOptions

10
node_modules/pacote/lib/util/trailing-slashes.js generated vendored Normal file
View file

@ -0,0 +1,10 @@
const removeTrailingSlashes = (input) => {
// in order to avoid regexp redos detection
let output = input
while (output.endsWith('/')) {
output = output.slice(0, -1)
}
return output
}
module.exports = removeTrailingSlashes