Deployed the page to Github Pages.
This commit is contained in:
parent
1d79754e93
commit
2c89899458
62797 changed files with 6551425 additions and 15279 deletions
40
node_modules/@npmcli/agent/README.md
generated
vendored
Normal file
40
node_modules/@npmcli/agent/README.md
generated
vendored
Normal file
|
@ -0,0 +1,40 @@
|
|||
## @npmcli/agent
|
||||
|
||||
A pair of Agent implementations for nodejs that provide consistent keep-alives, granular timeouts, dns caching, and proxy support.
|
||||
|
||||
### Usage
|
||||
|
||||
```js
|
||||
const { getAgent, HttpAgent } = require('@npmcli/agent')
|
||||
const fetch = require('minipass-fetch')
|
||||
|
||||
const main = async () => {
|
||||
// if you know what agent you need, you can create one directly
|
||||
const agent = new HttpAgent(agentOptions)
|
||||
// or you can use the getAgent helper, it will determine and create an Agent
|
||||
// instance for you as well as reuse that agent for new requests as appropriate
|
||||
const agent = getAgent('https://registry.npmjs.org/npm', agentOptions)
|
||||
// minipass-fetch is just an example, this will work for any http client that
|
||||
// supports node's Agents
|
||||
const res = await fetch('https://registry.npmjs.org/npm', { agent })
|
||||
}
|
||||
|
||||
main()
|
||||
```
|
||||
|
||||
### Options
|
||||
|
||||
All options supported by the node Agent implementations are supported here, see [the docs](https://nodejs.org/api/http.html#new-agentoptions) for those.
|
||||
|
||||
Options that have been added by this module include:
|
||||
|
||||
- `family`: what tcp family to use, can be `4` for IPv4, `6` for IPv6 or `0` for both.
|
||||
- `proxy`: a URL to a supported proxy, currently supports `HTTP CONNECT` based http/https proxies as well as socks4 and 5.
|
||||
- `dns`: configuration for the built-in dns cache
|
||||
- `ttl`: how long (in milliseconds) to keep cached dns entries, defaults to `5 * 60 * 100 (5 minutes)`
|
||||
- `lookup`: optional function to override how dns lookups are performed, defaults to `require('dns').lookup`
|
||||
- `timeouts`: a set of granular timeouts, all default to `0`
|
||||
- `connection`: time between initiating connection and actually connecting
|
||||
- `idle`: time between data packets (if a top level `timeout` is provided, it will be copied here)
|
||||
- `response`: time between sending a request and receiving a response
|
||||
- `transfer`: time between starting to receive a request and consuming the response fully
|
206
node_modules/@npmcli/agent/lib/agents.js
generated
vendored
Normal file
206
node_modules/@npmcli/agent/lib/agents.js
generated
vendored
Normal file
|
@ -0,0 +1,206 @@
|
|||
'use strict'
|
||||
|
||||
const net = require('net')
|
||||
const tls = require('tls')
|
||||
const { once } = require('events')
|
||||
const timers = require('timers/promises')
|
||||
const { normalizeOptions, cacheOptions } = require('./options')
|
||||
const { getProxy, getProxyAgent, proxyCache } = require('./proxy.js')
|
||||
const Errors = require('./errors.js')
|
||||
const { Agent: AgentBase } = require('agent-base')
|
||||
|
||||
module.exports = class Agent extends AgentBase {
|
||||
#options
|
||||
#timeouts
|
||||
#proxy
|
||||
#noProxy
|
||||
#ProxyAgent
|
||||
|
||||
constructor (options = {}) {
|
||||
const { timeouts, proxy, noProxy, ...normalizedOptions } = normalizeOptions(options)
|
||||
|
||||
super(normalizedOptions)
|
||||
|
||||
this.#options = normalizedOptions
|
||||
this.#timeouts = timeouts
|
||||
|
||||
if (proxy) {
|
||||
this.#proxy = new URL(proxy)
|
||||
this.#noProxy = noProxy
|
||||
this.#ProxyAgent = getProxyAgent(proxy)
|
||||
}
|
||||
}
|
||||
|
||||
get proxy () {
|
||||
return this.#proxy ? { url: this.#proxy } : {}
|
||||
}
|
||||
|
||||
#getProxy (options) {
|
||||
if (!this.#proxy) {
|
||||
return
|
||||
}
|
||||
|
||||
const proxy = getProxy(`${options.protocol}//${options.host}:${options.port}`, {
|
||||
proxy: this.#proxy,
|
||||
noProxy: this.#noProxy,
|
||||
})
|
||||
|
||||
if (!proxy) {
|
||||
return
|
||||
}
|
||||
|
||||
const cacheKey = cacheOptions({
|
||||
...options,
|
||||
...this.#options,
|
||||
timeouts: this.#timeouts,
|
||||
proxy,
|
||||
})
|
||||
|
||||
if (proxyCache.has(cacheKey)) {
|
||||
return proxyCache.get(cacheKey)
|
||||
}
|
||||
|
||||
let ProxyAgent = this.#ProxyAgent
|
||||
if (Array.isArray(ProxyAgent)) {
|
||||
ProxyAgent = this.isSecureEndpoint(options) ? ProxyAgent[1] : ProxyAgent[0]
|
||||
}
|
||||
|
||||
const proxyAgent = new ProxyAgent(proxy, {
|
||||
...this.#options,
|
||||
socketOptions: { family: this.#options.family },
|
||||
})
|
||||
proxyCache.set(cacheKey, proxyAgent)
|
||||
|
||||
return proxyAgent
|
||||
}
|
||||
|
||||
// takes an array of promises and races them against the connection timeout
|
||||
// which will throw the necessary error if it is hit. This will return the
|
||||
// result of the promise race.
|
||||
async #timeoutConnection ({ promises, options, timeout }, ac = new AbortController()) {
|
||||
if (timeout) {
|
||||
const connectionTimeout = timers.setTimeout(timeout, null, { signal: ac.signal })
|
||||
.then(() => {
|
||||
throw new Errors.ConnectionTimeoutError(`${options.host}:${options.port}`)
|
||||
}).catch((err) => {
|
||||
if (err.name === 'AbortError') {
|
||||
return
|
||||
}
|
||||
throw err
|
||||
})
|
||||
promises.push(connectionTimeout)
|
||||
}
|
||||
|
||||
let result
|
||||
try {
|
||||
result = await Promise.race(promises)
|
||||
ac.abort()
|
||||
} catch (err) {
|
||||
ac.abort()
|
||||
throw err
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
async connect (request, options) {
|
||||
// if the connection does not have its own lookup function
|
||||
// set, then use the one from our options
|
||||
options.lookup ??= this.#options.lookup
|
||||
|
||||
let socket
|
||||
let timeout = this.#timeouts.connection
|
||||
const isSecureEndpoint = this.isSecureEndpoint(options)
|
||||
|
||||
const proxy = this.#getProxy(options)
|
||||
if (proxy) {
|
||||
// some of the proxies will wait for the socket to fully connect before
|
||||
// returning so we have to await this while also racing it against the
|
||||
// connection timeout.
|
||||
const start = Date.now()
|
||||
socket = await this.#timeoutConnection({
|
||||
options,
|
||||
timeout,
|
||||
promises: [proxy.connect(request, options)],
|
||||
})
|
||||
// see how much time proxy.connect took and subtract it from
|
||||
// the timeout
|
||||
if (timeout) {
|
||||
timeout = timeout - (Date.now() - start)
|
||||
}
|
||||
} else {
|
||||
socket = (isSecureEndpoint ? tls : net).connect(options)
|
||||
}
|
||||
|
||||
socket.setKeepAlive(this.keepAlive, this.keepAliveMsecs)
|
||||
socket.setNoDelay(this.keepAlive)
|
||||
|
||||
const abortController = new AbortController()
|
||||
const { signal } = abortController
|
||||
|
||||
const connectPromise = socket[isSecureEndpoint ? 'secureConnecting' : 'connecting']
|
||||
? once(socket, isSecureEndpoint ? 'secureConnect' : 'connect', { signal })
|
||||
: Promise.resolve()
|
||||
|
||||
await this.#timeoutConnection({
|
||||
options,
|
||||
timeout,
|
||||
promises: [
|
||||
connectPromise,
|
||||
once(socket, 'error', { signal }).then((err) => {
|
||||
throw err[0]
|
||||
}),
|
||||
],
|
||||
}, abortController)
|
||||
|
||||
if (this.#timeouts.idle) {
|
||||
socket.setTimeout(this.#timeouts.idle, () => {
|
||||
socket.destroy(new Errors.IdleTimeoutError(`${options.host}:${options.port}`))
|
||||
})
|
||||
}
|
||||
|
||||
return socket
|
||||
}
|
||||
|
||||
addRequest (request, options) {
|
||||
const proxy = this.#getProxy(options)
|
||||
// it would be better to call proxy.addRequest here but this causes the
|
||||
// http-proxy-agent to call its super.addRequest which causes the request
|
||||
// to be added to the agent twice. since we only support 3 agents
|
||||
// currently (see the required agents in proxy.js) we have manually
|
||||
// checked that the only public methods we need to call are called in the
|
||||
// next block. this could change in the future and presumably we would get
|
||||
// failing tests until we have properly called the necessary methods on
|
||||
// each of our proxy agents
|
||||
if (proxy?.setRequestProps) {
|
||||
proxy.setRequestProps(request, options)
|
||||
}
|
||||
|
||||
request.setHeader('connection', this.keepAlive ? 'keep-alive' : 'close')
|
||||
|
||||
if (this.#timeouts.response) {
|
||||
let responseTimeout
|
||||
request.once('finish', () => {
|
||||
setTimeout(() => {
|
||||
request.destroy(new Errors.ResponseTimeoutError(request, this.#proxy))
|
||||
}, this.#timeouts.response)
|
||||
})
|
||||
request.once('response', () => {
|
||||
clearTimeout(responseTimeout)
|
||||
})
|
||||
}
|
||||
|
||||
if (this.#timeouts.transfer) {
|
||||
let transferTimeout
|
||||
request.once('response', (res) => {
|
||||
setTimeout(() => {
|
||||
res.destroy(new Errors.TransferTimeoutError(request, this.#proxy))
|
||||
}, this.#timeouts.transfer)
|
||||
res.once('close', () => {
|
||||
clearTimeout(transferTimeout)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
return super.addRequest(request, options)
|
||||
}
|
||||
}
|
53
node_modules/@npmcli/agent/lib/dns.js
generated
vendored
Normal file
53
node_modules/@npmcli/agent/lib/dns.js
generated
vendored
Normal file
|
@ -0,0 +1,53 @@
|
|||
'use strict'
|
||||
|
||||
const { LRUCache } = require('lru-cache')
|
||||
const dns = require('dns')
|
||||
|
||||
// this is a factory so that each request can have its own opts (i.e. ttl)
|
||||
// while still sharing the cache across all requests
|
||||
const cache = new LRUCache({ max: 50 })
|
||||
|
||||
const getOptions = ({
|
||||
family = 0,
|
||||
hints = dns.ADDRCONFIG,
|
||||
all = false,
|
||||
verbatim = undefined,
|
||||
ttl = 5 * 60 * 1000,
|
||||
lookup = dns.lookup,
|
||||
}) => ({
|
||||
// hints and lookup are returned since both are top level properties to (net|tls).connect
|
||||
hints,
|
||||
lookup: (hostname, ...args) => {
|
||||
const callback = args.pop() // callback is always last arg
|
||||
const lookupOptions = args[0] ?? {}
|
||||
|
||||
const options = {
|
||||
family,
|
||||
hints,
|
||||
all,
|
||||
verbatim,
|
||||
...(typeof lookupOptions === 'number' ? { family: lookupOptions } : lookupOptions),
|
||||
}
|
||||
|
||||
const key = JSON.stringify({ hostname, ...options })
|
||||
|
||||
if (cache.has(key)) {
|
||||
const cached = cache.get(key)
|
||||
return process.nextTick(callback, null, ...cached)
|
||||
}
|
||||
|
||||
lookup(hostname, options, (err, ...result) => {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
|
||||
cache.set(key, result, { ttl })
|
||||
return callback(null, ...result)
|
||||
})
|
||||
},
|
||||
})
|
||||
|
||||
module.exports = {
|
||||
cache,
|
||||
getOptions,
|
||||
}
|
61
node_modules/@npmcli/agent/lib/errors.js
generated
vendored
Normal file
61
node_modules/@npmcli/agent/lib/errors.js
generated
vendored
Normal file
|
@ -0,0 +1,61 @@
|
|||
'use strict'
|
||||
|
||||
class InvalidProxyProtocolError extends Error {
|
||||
constructor (url) {
|
||||
super(`Invalid protocol \`${url.protocol}\` connecting to proxy \`${url.host}\``)
|
||||
this.code = 'EINVALIDPROXY'
|
||||
this.proxy = url
|
||||
}
|
||||
}
|
||||
|
||||
class ConnectionTimeoutError extends Error {
|
||||
constructor (host) {
|
||||
super(`Timeout connecting to host \`${host}\``)
|
||||
this.code = 'ECONNECTIONTIMEOUT'
|
||||
this.host = host
|
||||
}
|
||||
}
|
||||
|
||||
class IdleTimeoutError extends Error {
|
||||
constructor (host) {
|
||||
super(`Idle timeout reached for host \`${host}\``)
|
||||
this.code = 'EIDLETIMEOUT'
|
||||
this.host = host
|
||||
}
|
||||
}
|
||||
|
||||
class ResponseTimeoutError extends Error {
|
||||
constructor (request, proxy) {
|
||||
let msg = 'Response timeout '
|
||||
if (proxy) {
|
||||
msg += `from proxy \`${proxy.host}\` `
|
||||
}
|
||||
msg += `connecting to host \`${request.host}\``
|
||||
super(msg)
|
||||
this.code = 'ERESPONSETIMEOUT'
|
||||
this.proxy = proxy
|
||||
this.request = request
|
||||
}
|
||||
}
|
||||
|
||||
class TransferTimeoutError extends Error {
|
||||
constructor (request, proxy) {
|
||||
let msg = 'Transfer timeout '
|
||||
if (proxy) {
|
||||
msg += `from proxy \`${proxy.host}\` `
|
||||
}
|
||||
msg += `for \`${request.host}\``
|
||||
super(msg)
|
||||
this.code = 'ETRANSFERTIMEOUT'
|
||||
this.proxy = proxy
|
||||
this.request = request
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
InvalidProxyProtocolError,
|
||||
ConnectionTimeoutError,
|
||||
IdleTimeoutError,
|
||||
ResponseTimeoutError,
|
||||
TransferTimeoutError,
|
||||
}
|
56
node_modules/@npmcli/agent/lib/index.js
generated
vendored
Normal file
56
node_modules/@npmcli/agent/lib/index.js
generated
vendored
Normal file
|
@ -0,0 +1,56 @@
|
|||
'use strict'
|
||||
|
||||
const { LRUCache } = require('lru-cache')
|
||||
const { normalizeOptions, cacheOptions } = require('./options')
|
||||
const { getProxy, proxyCache } = require('./proxy.js')
|
||||
const dns = require('./dns.js')
|
||||
const Agent = require('./agents.js')
|
||||
|
||||
const agentCache = new LRUCache({ max: 20 })
|
||||
|
||||
const getAgent = (url, { agent, proxy, noProxy, ...options } = {}) => {
|
||||
// false has meaning so this can't be a simple truthiness check
|
||||
if (agent != null) {
|
||||
return agent
|
||||
}
|
||||
|
||||
url = new URL(url)
|
||||
|
||||
const proxyForUrl = getProxy(url, { proxy, noProxy })
|
||||
const normalizedOptions = {
|
||||
...normalizeOptions(options),
|
||||
proxy: proxyForUrl,
|
||||
}
|
||||
|
||||
const cacheKey = cacheOptions({
|
||||
...normalizedOptions,
|
||||
secureEndpoint: url.protocol === 'https:',
|
||||
})
|
||||
|
||||
if (agentCache.has(cacheKey)) {
|
||||
return agentCache.get(cacheKey)
|
||||
}
|
||||
|
||||
const newAgent = new Agent(normalizedOptions)
|
||||
agentCache.set(cacheKey, newAgent)
|
||||
|
||||
return newAgent
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getAgent,
|
||||
Agent,
|
||||
// these are exported for backwards compatability
|
||||
HttpAgent: Agent,
|
||||
HttpsAgent: Agent,
|
||||
cache: {
|
||||
proxy: proxyCache,
|
||||
agent: agentCache,
|
||||
dns: dns.cache,
|
||||
clear: () => {
|
||||
proxyCache.clear()
|
||||
agentCache.clear()
|
||||
dns.cache.clear()
|
||||
},
|
||||
},
|
||||
}
|
86
node_modules/@npmcli/agent/lib/options.js
generated
vendored
Normal file
86
node_modules/@npmcli/agent/lib/options.js
generated
vendored
Normal file
|
@ -0,0 +1,86 @@
|
|||
'use strict'
|
||||
|
||||
const dns = require('./dns')
|
||||
|
||||
const normalizeOptions = (opts) => {
|
||||
const family = parseInt(opts.family ?? '0', 10)
|
||||
const keepAlive = opts.keepAlive ?? true
|
||||
|
||||
const normalized = {
|
||||
// nodejs http agent options. these are all the defaults
|
||||
// but kept here to increase the likelihood of cache hits
|
||||
// https://nodejs.org/api/http.html#new-agentoptions
|
||||
keepAliveMsecs: keepAlive ? 1000 : undefined,
|
||||
maxSockets: opts.maxSockets ?? 15,
|
||||
maxTotalSockets: Infinity,
|
||||
maxFreeSockets: keepAlive ? 256 : undefined,
|
||||
scheduling: 'fifo',
|
||||
// then spread the rest of the options
|
||||
...opts,
|
||||
// we already set these to their defaults that we want
|
||||
family,
|
||||
keepAlive,
|
||||
// our custom timeout options
|
||||
timeouts: {
|
||||
// the standard timeout option is mapped to our idle timeout
|
||||
// and then deleted below
|
||||
idle: opts.timeout ?? 0,
|
||||
connection: 0,
|
||||
response: 0,
|
||||
transfer: 0,
|
||||
...opts.timeouts,
|
||||
},
|
||||
// get the dns options that go at the top level of socket connection
|
||||
...dns.getOptions({ family, ...opts.dns }),
|
||||
}
|
||||
|
||||
// remove timeout since we already used it to set our own idle timeout
|
||||
delete normalized.timeout
|
||||
|
||||
return normalized
|
||||
}
|
||||
|
||||
const createKey = (obj) => {
|
||||
let key = ''
|
||||
const sorted = Object.entries(obj).sort((a, b) => a[0] - b[0])
|
||||
for (let [k, v] of sorted) {
|
||||
if (v == null) {
|
||||
v = 'null'
|
||||
} else if (v instanceof URL) {
|
||||
v = v.toString()
|
||||
} else if (typeof v === 'object') {
|
||||
v = createKey(v)
|
||||
}
|
||||
key += `${k}:${v}:`
|
||||
}
|
||||
return key
|
||||
}
|
||||
|
||||
const cacheOptions = ({ secureEndpoint, ...options }) => createKey({
|
||||
secureEndpoint: !!secureEndpoint,
|
||||
// socket connect options
|
||||
family: options.family,
|
||||
hints: options.hints,
|
||||
localAddress: options.localAddress,
|
||||
// tls specific connect options
|
||||
strictSsl: secureEndpoint ? !!options.rejectUnauthorized : false,
|
||||
ca: secureEndpoint ? options.ca : null,
|
||||
cert: secureEndpoint ? options.cert : null,
|
||||
key: secureEndpoint ? options.key : null,
|
||||
// http agent options
|
||||
keepAlive: options.keepAlive,
|
||||
keepAliveMsecs: options.keepAliveMsecs,
|
||||
maxSockets: options.maxSockets,
|
||||
maxTotalSockets: options.maxTotalSockets,
|
||||
maxFreeSockets: options.maxFreeSockets,
|
||||
scheduling: options.scheduling,
|
||||
// timeout options
|
||||
timeouts: options.timeouts,
|
||||
// proxy
|
||||
proxy: options.proxy,
|
||||
})
|
||||
|
||||
module.exports = {
|
||||
normalizeOptions,
|
||||
cacheOptions,
|
||||
}
|
88
node_modules/@npmcli/agent/lib/proxy.js
generated
vendored
Normal file
88
node_modules/@npmcli/agent/lib/proxy.js
generated
vendored
Normal file
|
@ -0,0 +1,88 @@
|
|||
'use strict'
|
||||
|
||||
const { HttpProxyAgent } = require('http-proxy-agent')
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent')
|
||||
const { SocksProxyAgent } = require('socks-proxy-agent')
|
||||
const { LRUCache } = require('lru-cache')
|
||||
const { InvalidProxyProtocolError } = require('./errors.js')
|
||||
|
||||
const PROXY_CACHE = new LRUCache({ max: 20 })
|
||||
|
||||
const SOCKS_PROTOCOLS = new Set(SocksProxyAgent.protocols)
|
||||
|
||||
const PROXY_ENV_KEYS = new Set(['https_proxy', 'http_proxy', 'proxy', 'no_proxy'])
|
||||
|
||||
const PROXY_ENV = Object.entries(process.env).reduce((acc, [key, value]) => {
|
||||
key = key.toLowerCase()
|
||||
if (PROXY_ENV_KEYS.has(key)) {
|
||||
acc[key] = value
|
||||
}
|
||||
return acc
|
||||
}, {})
|
||||
|
||||
const getProxyAgent = (url) => {
|
||||
url = new URL(url)
|
||||
|
||||
const protocol = url.protocol.slice(0, -1)
|
||||
if (SOCKS_PROTOCOLS.has(protocol)) {
|
||||
return SocksProxyAgent
|
||||
}
|
||||
if (protocol === 'https' || protocol === 'http') {
|
||||
return [HttpProxyAgent, HttpsProxyAgent]
|
||||
}
|
||||
|
||||
throw new InvalidProxyProtocolError(url)
|
||||
}
|
||||
|
||||
const isNoProxy = (url, noProxy) => {
|
||||
if (typeof noProxy === 'string') {
|
||||
noProxy = noProxy.split(',').map((p) => p.trim()).filter(Boolean)
|
||||
}
|
||||
|
||||
if (!noProxy || !noProxy.length) {
|
||||
return false
|
||||
}
|
||||
|
||||
const hostSegments = url.hostname.split('.').reverse()
|
||||
|
||||
return noProxy.some((no) => {
|
||||
const noSegments = no.split('.').filter(Boolean).reverse()
|
||||
if (!noSegments.length) {
|
||||
return false
|
||||
}
|
||||
|
||||
for (let i = 0; i < noSegments.length; i++) {
|
||||
if (hostSegments[i] !== noSegments[i]) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
})
|
||||
}
|
||||
|
||||
const getProxy = (url, { proxy, noProxy }) => {
|
||||
url = new URL(url)
|
||||
|
||||
if (!proxy) {
|
||||
proxy = url.protocol === 'https:'
|
||||
? PROXY_ENV.https_proxy
|
||||
: PROXY_ENV.https_proxy || PROXY_ENV.http_proxy || PROXY_ENV.proxy
|
||||
}
|
||||
|
||||
if (!noProxy) {
|
||||
noProxy = PROXY_ENV.no_proxy
|
||||
}
|
||||
|
||||
if (!proxy || isNoProxy(url, noProxy)) {
|
||||
return null
|
||||
}
|
||||
|
||||
return new URL(proxy)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getProxyAgent,
|
||||
getProxy,
|
||||
proxyCache: PROXY_CACHE,
|
||||
}
|
15
node_modules/@npmcli/agent/node_modules/lru-cache/LICENSE
generated
vendored
Normal file
15
node_modules/@npmcli/agent/node_modules/lru-cache/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
The ISC License
|
||||
|
||||
Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
331
node_modules/@npmcli/agent/node_modules/lru-cache/README.md
generated
vendored
Normal file
331
node_modules/@npmcli/agent/node_modules/lru-cache/README.md
generated
vendored
Normal file
|
@ -0,0 +1,331 @@
|
|||
# lru-cache
|
||||
|
||||
A cache object that deletes the least-recently-used items.
|
||||
|
||||
Specify a max number of the most recently used items that you
|
||||
want to keep, and this cache will keep that many of the most
|
||||
recently accessed items.
|
||||
|
||||
This is not primarily a TTL cache, and does not make strong TTL
|
||||
guarantees. There is no preemptive pruning of expired items by
|
||||
default, but you _may_ set a TTL on the cache or on a single
|
||||
`set`. If you do so, it will treat expired items as missing, and
|
||||
delete them when fetched. If you are more interested in TTL
|
||||
caching than LRU caching, check out
|
||||
[@isaacs/ttlcache](http://npm.im/@isaacs/ttlcache).
|
||||
|
||||
As of version 7, this is one of the most performant LRU
|
||||
implementations available in JavaScript, and supports a wide
|
||||
diversity of use cases. However, note that using some of the
|
||||
features will necessarily impact performance, by causing the
|
||||
cache to have to do more work. See the "Performance" section
|
||||
below.
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
npm install lru-cache --save
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
// hybrid module, either works
|
||||
import { LRUCache } from 'lru-cache'
|
||||
// or:
|
||||
const { LRUCache } = require('lru-cache')
|
||||
// or in minified form for web browsers:
|
||||
import { LRUCache } from 'http://unpkg.com/lru-cache@9/dist/mjs/index.min.mjs'
|
||||
|
||||
// At least one of 'max', 'ttl', or 'maxSize' is required, to prevent
|
||||
// unsafe unbounded storage.
|
||||
//
|
||||
// In most cases, it's best to specify a max for performance, so all
|
||||
// the required memory allocation is done up-front.
|
||||
//
|
||||
// All the other options are optional, see the sections below for
|
||||
// documentation on what each one does. Most of them can be
|
||||
// overridden for specific items in get()/set()
|
||||
const options = {
|
||||
max: 500,
|
||||
|
||||
// for use with tracking overall storage size
|
||||
maxSize: 5000,
|
||||
sizeCalculation: (value, key) => {
|
||||
return 1
|
||||
},
|
||||
|
||||
// for use when you need to clean up something when objects
|
||||
// are evicted from the cache
|
||||
dispose: (value, key) => {
|
||||
freeFromMemoryOrWhatever(value)
|
||||
},
|
||||
|
||||
// how long to live in ms
|
||||
ttl: 1000 * 60 * 5,
|
||||
|
||||
// return stale items before removing from cache?
|
||||
allowStale: false,
|
||||
|
||||
updateAgeOnGet: false,
|
||||
updateAgeOnHas: false,
|
||||
|
||||
// async method to use for cache.fetch(), for
|
||||
// stale-while-revalidate type of behavior
|
||||
fetchMethod: async (
|
||||
key,
|
||||
staleValue,
|
||||
{ options, signal, context }
|
||||
) => {},
|
||||
}
|
||||
|
||||
const cache = new LRUCache(options)
|
||||
|
||||
cache.set('key', 'value')
|
||||
cache.get('key') // "value"
|
||||
|
||||
// non-string keys ARE fully supported
|
||||
// but note that it must be THE SAME object, not
|
||||
// just a JSON-equivalent object.
|
||||
var someObject = { a: 1 }
|
||||
cache.set(someObject, 'a value')
|
||||
// Object keys are not toString()-ed
|
||||
cache.set('[object Object]', 'a different value')
|
||||
assert.equal(cache.get(someObject), 'a value')
|
||||
// A similar object with same keys/values won't work,
|
||||
// because it's a different object identity
|
||||
assert.equal(cache.get({ a: 1 }), undefined)
|
||||
|
||||
cache.clear() // empty the cache
|
||||
```
|
||||
|
||||
If you put more stuff in the cache, then less recently used items
|
||||
will fall out. That's what an LRU cache is.
|
||||
|
||||
For full description of the API and all options, please see [the
|
||||
LRUCache typedocs](https://isaacs.github.io/node-lru-cache/)
|
||||
|
||||
## Storage Bounds Safety
|
||||
|
||||
This implementation aims to be as flexible as possible, within
|
||||
the limits of safe memory consumption and optimal performance.
|
||||
|
||||
At initial object creation, storage is allocated for `max` items.
|
||||
If `max` is set to zero, then some performance is lost, and item
|
||||
count is unbounded. Either `maxSize` or `ttl` _must_ be set if
|
||||
`max` is not specified.
|
||||
|
||||
If `maxSize` is set, then this creates a safe limit on the
|
||||
maximum storage consumed, but without the performance benefits of
|
||||
pre-allocation. When `maxSize` is set, every item _must_ provide
|
||||
a size, either via the `sizeCalculation` method provided to the
|
||||
constructor, or via a `size` or `sizeCalculation` option provided
|
||||
to `cache.set()`. The size of every item _must_ be a positive
|
||||
integer.
|
||||
|
||||
If neither `max` nor `maxSize` are set, then `ttl` tracking must
|
||||
be enabled. Note that, even when tracking item `ttl`, items are
|
||||
_not_ preemptively deleted when they become stale, unless
|
||||
`ttlAutopurge` is enabled. Instead, they are only purged the
|
||||
next time the key is requested. Thus, if `ttlAutopurge`, `max`,
|
||||
and `maxSize` are all not set, then the cache will potentially
|
||||
grow unbounded.
|
||||
|
||||
In this case, a warning is printed to standard error. Future
|
||||
versions may require the use of `ttlAutopurge` if `max` and
|
||||
`maxSize` are not specified.
|
||||
|
||||
If you truly wish to use a cache that is bound _only_ by TTL
|
||||
expiration, consider using a `Map` object, and calling
|
||||
`setTimeout` to delete entries when they expire. It will perform
|
||||
much better than an LRU cache.
|
||||
|
||||
Here is an implementation you may use, under the same
|
||||
[license](./LICENSE) as this package:
|
||||
|
||||
```js
|
||||
// a storage-unbounded ttl cache that is not an lru-cache
|
||||
const cache = {
|
||||
data: new Map(),
|
||||
timers: new Map(),
|
||||
set: (k, v, ttl) => {
|
||||
if (cache.timers.has(k)) {
|
||||
clearTimeout(cache.timers.get(k))
|
||||
}
|
||||
cache.timers.set(
|
||||
k,
|
||||
setTimeout(() => cache.delete(k), ttl)
|
||||
)
|
||||
cache.data.set(k, v)
|
||||
},
|
||||
get: k => cache.data.get(k),
|
||||
has: k => cache.data.has(k),
|
||||
delete: k => {
|
||||
if (cache.timers.has(k)) {
|
||||
clearTimeout(cache.timers.get(k))
|
||||
}
|
||||
cache.timers.delete(k)
|
||||
return cache.data.delete(k)
|
||||
},
|
||||
clear: () => {
|
||||
cache.data.clear()
|
||||
for (const v of cache.timers.values()) {
|
||||
clearTimeout(v)
|
||||
}
|
||||
cache.timers.clear()
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
If that isn't to your liking, check out
|
||||
[@isaacs/ttlcache](http://npm.im/@isaacs/ttlcache).
|
||||
|
||||
## Storing Undefined Values
|
||||
|
||||
This cache never stores undefined values, as `undefined` is used
|
||||
internally in a few places to indicate that a key is not in the
|
||||
cache.
|
||||
|
||||
You may call `cache.set(key, undefined)`, but this is just
|
||||
an alias for `cache.delete(key)`. Note that this has the effect
|
||||
that `cache.has(key)` will return _false_ after setting it to
|
||||
undefined.
|
||||
|
||||
```js
|
||||
cache.set(myKey, undefined)
|
||||
cache.has(myKey) // false!
|
||||
```
|
||||
|
||||
If you need to track `undefined` values, and still note that the
|
||||
key is in the cache, an easy workaround is to use a sigil object
|
||||
of your own.
|
||||
|
||||
```js
|
||||
import { LRUCache } from 'lru-cache'
|
||||
const undefinedValue = Symbol('undefined')
|
||||
const cache = new LRUCache(...)
|
||||
const mySet = (key, value) =>
|
||||
cache.set(key, value === undefined ? undefinedValue : value)
|
||||
const myGet = (key, value) => {
|
||||
const v = cache.get(key)
|
||||
return v === undefinedValue ? undefined : v
|
||||
}
|
||||
```
|
||||
|
||||
## Performance
|
||||
|
||||
As of January 2022, version 7 of this library is one of the most
|
||||
performant LRU cache implementations in JavaScript.
|
||||
|
||||
Benchmarks can be extremely difficult to get right. In
|
||||
particular, the performance of set/get/delete operations on
|
||||
objects will vary _wildly_ depending on the type of key used. V8
|
||||
is highly optimized for objects with keys that are short strings,
|
||||
especially integer numeric strings. Thus any benchmark which
|
||||
tests _solely_ using numbers as keys will tend to find that an
|
||||
object-based approach performs the best.
|
||||
|
||||
Note that coercing _anything_ to strings to use as object keys is
|
||||
unsafe, unless you can be 100% certain that no other type of
|
||||
value will be used. For example:
|
||||
|
||||
```js
|
||||
const myCache = {}
|
||||
const set = (k, v) => (myCache[k] = v)
|
||||
const get = k => myCache[k]
|
||||
|
||||
set({}, 'please hang onto this for me')
|
||||
set('[object Object]', 'oopsie')
|
||||
```
|
||||
|
||||
Also beware of "Just So" stories regarding performance. Garbage
|
||||
collection of large (especially: deep) object graphs can be
|
||||
incredibly costly, with several "tipping points" where it
|
||||
increases exponentially. As a result, putting that off until
|
||||
later can make it much worse, and less predictable. If a library
|
||||
performs well, but only in a scenario where the object graph is
|
||||
kept shallow, then that won't help you if you are using large
|
||||
objects as keys.
|
||||
|
||||
In general, when attempting to use a library to improve
|
||||
performance (such as a cache like this one), it's best to choose
|
||||
an option that will perform well in the sorts of scenarios where
|
||||
you'll actually use it.
|
||||
|
||||
This library is optimized for repeated gets and minimizing
|
||||
eviction time, since that is the expected need of a LRU. Set
|
||||
operations are somewhat slower on average than a few other
|
||||
options, in part because of that optimization. It is assumed
|
||||
that you'll be caching some costly operation, ideally as rarely
|
||||
as possible, so optimizing set over get would be unwise.
|
||||
|
||||
If performance matters to you:
|
||||
|
||||
1. If it's at all possible to use small integer values as keys,
|
||||
and you can guarantee that no other types of values will be
|
||||
used as keys, then do that, and use a cache such as
|
||||
[lru-fast](https://npmjs.com/package/lru-fast), or
|
||||
[mnemonist's
|
||||
LRUCache](https://yomguithereal.github.io/mnemonist/lru-cache)
|
||||
which uses an Object as its data store.
|
||||
|
||||
2. Failing that, if at all possible, use short non-numeric
|
||||
strings (ie, less than 256 characters) as your keys, and use
|
||||
[mnemonist's
|
||||
LRUCache](https://yomguithereal.github.io/mnemonist/lru-cache).
|
||||
|
||||
3. If the types of your keys will be anything else, especially
|
||||
long strings, strings that look like floats, objects, or some
|
||||
mix of types, or if you aren't sure, then this library will
|
||||
work well for you.
|
||||
|
||||
If you do not need the features that this library provides
|
||||
(like asynchronous fetching, a variety of TTL staleness
|
||||
options, and so on), then [mnemonist's
|
||||
LRUMap](https://yomguithereal.github.io/mnemonist/lru-map) is
|
||||
a very good option, and just slightly faster than this module
|
||||
(since it does considerably less).
|
||||
|
||||
4. Do not use a `dispose` function, size tracking, or especially
|
||||
ttl behavior, unless absolutely needed. These features are
|
||||
convenient, and necessary in some use cases, and every attempt
|
||||
has been made to make the performance impact minimal, but it
|
||||
isn't nothing.
|
||||
|
||||
## Breaking Changes in Version 7
|
||||
|
||||
This library changed to a different algorithm and internal data
|
||||
structure in version 7, yielding significantly better
|
||||
performance, albeit with some subtle changes as a result.
|
||||
|
||||
If you were relying on the internals of LRUCache in version 6 or
|
||||
before, it probably will not work in version 7 and above.
|
||||
|
||||
## Breaking Changes in Version 8
|
||||
|
||||
- The `fetchContext` option was renamed to `context`, and may no
|
||||
longer be set on the cache instance itself.
|
||||
- Rewritten in TypeScript, so pretty much all the types moved
|
||||
around a lot.
|
||||
- The AbortController/AbortSignal polyfill was removed. For this
|
||||
reason, **Node version 16.14.0 or higher is now required**.
|
||||
- Internal properties were moved to actual private class
|
||||
properties.
|
||||
- Keys and values must not be `null` or `undefined`.
|
||||
- Minified export available at `'lru-cache/min'`, for both CJS
|
||||
and MJS builds.
|
||||
|
||||
## Breaking Changes in Version 9
|
||||
|
||||
- Named export only, no default export.
|
||||
- AbortController polyfill returned, albeit with a warning when
|
||||
used.
|
||||
|
||||
## Breaking Changes in Version 10
|
||||
|
||||
- `cache.fetch()` return type is now `Promise<V | undefined>`
|
||||
instead of `Promise<V | void>`. This is an irrelevant change
|
||||
practically speaking, but can require changes for TypeScript
|
||||
users.
|
||||
|
||||
For more info, see the [change log](CHANGELOG.md).
|
1277
node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/index.d.ts
generated
vendored
Normal file
1277
node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/index.d.ts
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
1
node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/index.d.ts.map
generated
vendored
Normal file
1
node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/index.d.ts.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1546
node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/index.js
generated
vendored
Normal file
1546
node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/index.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
1
node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/index.js.map
generated
vendored
Normal file
1
node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/index.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
2
node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/index.min.js
generated
vendored
Normal file
2
node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/index.min.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
7
node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/index.min.js.map
generated
vendored
Normal file
7
node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/index.min.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
3
node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/package.json
generated
vendored
Normal file
3
node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/package.json
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"type": "commonjs"
|
||||
}
|
1277
node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/index.d.ts
generated
vendored
Normal file
1277
node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/index.d.ts
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
1
node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/index.d.ts.map
generated
vendored
Normal file
1
node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/index.d.ts.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1542
node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/index.js
generated
vendored
Normal file
1542
node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/index.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
1
node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/index.js.map
generated
vendored
Normal file
1
node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/index.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
2
node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/index.min.js
generated
vendored
Normal file
2
node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/index.min.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
7
node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/index.min.js.map
generated
vendored
Normal file
7
node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/index.min.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
3
node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/package.json
generated
vendored
Normal file
3
node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/package.json
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"type": "module"
|
||||
}
|
116
node_modules/@npmcli/agent/node_modules/lru-cache/package.json
generated
vendored
Normal file
116
node_modules/@npmcli/agent/node_modules/lru-cache/package.json
generated
vendored
Normal file
|
@ -0,0 +1,116 @@
|
|||
{
|
||||
"name": "lru-cache",
|
||||
"publishConfig": {
|
||||
"tag": "legacy-v10"
|
||||
},
|
||||
"description": "A cache object that deletes the least-recently-used items.",
|
||||
"version": "10.4.3",
|
||||
"author": "Isaac Z. Schlueter <i@izs.me>",
|
||||
"keywords": [
|
||||
"mru",
|
||||
"lru",
|
||||
"cache"
|
||||
],
|
||||
"sideEffects": false,
|
||||
"scripts": {
|
||||
"build": "npm run prepare",
|
||||
"prepare": "tshy && bash fixup.sh",
|
||||
"pretest": "npm run prepare",
|
||||
"presnap": "npm run prepare",
|
||||
"test": "tap",
|
||||
"snap": "tap",
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"prepublishOnly": "git push origin --follow-tags",
|
||||
"format": "prettier --write .",
|
||||
"typedoc": "typedoc --tsconfig ./.tshy/esm.json ./src/*.ts",
|
||||
"benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh",
|
||||
"prebenchmark": "npm run prepare",
|
||||
"benchmark": "make -C benchmark",
|
||||
"preprofile": "npm run prepare",
|
||||
"profile": "make -C benchmark profile"
|
||||
},
|
||||
"main": "./dist/commonjs/index.js",
|
||||
"types": "./dist/commonjs/index.d.ts",
|
||||
"tshy": {
|
||||
"exports": {
|
||||
".": "./src/index.ts",
|
||||
"./min": {
|
||||
"import": {
|
||||
"types": "./dist/esm/index.d.ts",
|
||||
"default": "./dist/esm/index.min.js"
|
||||
},
|
||||
"require": {
|
||||
"types": "./dist/commonjs/index.d.ts",
|
||||
"default": "./dist/commonjs/index.min.js"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/isaacs/node-lru-cache.git"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.2.5",
|
||||
"@types/tap": "^15.0.6",
|
||||
"benchmark": "^2.1.4",
|
||||
"esbuild": "^0.17.11",
|
||||
"eslint-config-prettier": "^8.5.0",
|
||||
"marked": "^4.2.12",
|
||||
"mkdirp": "^2.1.5",
|
||||
"prettier": "^2.6.2",
|
||||
"tap": "^20.0.3",
|
||||
"tshy": "^2.0.0",
|
||||
"tslib": "^2.4.0",
|
||||
"typedoc": "^0.25.3",
|
||||
"typescript": "^5.2.2"
|
||||
},
|
||||
"license": "ISC",
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"prettier": {
|
||||
"semi": false,
|
||||
"printWidth": 70,
|
||||
"tabWidth": 2,
|
||||
"useTabs": false,
|
||||
"singleQuote": true,
|
||||
"jsxSingleQuote": false,
|
||||
"bracketSameLine": true,
|
||||
"arrowParens": "avoid",
|
||||
"endOfLine": "lf"
|
||||
},
|
||||
"tap": {
|
||||
"node-arg": [
|
||||
"--expose-gc"
|
||||
],
|
||||
"plugin": [
|
||||
"@tapjs/clock"
|
||||
]
|
||||
},
|
||||
"exports": {
|
||||
".": {
|
||||
"import": {
|
||||
"types": "./dist/esm/index.d.ts",
|
||||
"default": "./dist/esm/index.js"
|
||||
},
|
||||
"require": {
|
||||
"types": "./dist/commonjs/index.d.ts",
|
||||
"default": "./dist/commonjs/index.js"
|
||||
}
|
||||
},
|
||||
"./min": {
|
||||
"import": {
|
||||
"types": "./dist/esm/index.d.ts",
|
||||
"default": "./dist/esm/index.min.js"
|
||||
},
|
||||
"require": {
|
||||
"types": "./dist/commonjs/index.d.ts",
|
||||
"default": "./dist/commonjs/index.min.js"
|
||||
}
|
||||
}
|
||||
},
|
||||
"type": "module",
|
||||
"module": "./dist/esm/index.js"
|
||||
}
|
60
node_modules/@npmcli/agent/package.json
generated
vendored
Normal file
60
node_modules/@npmcli/agent/package.json
generated
vendored
Normal file
|
@ -0,0 +1,60 @@
|
|||
{
|
||||
"name": "@npmcli/agent",
|
||||
"version": "2.2.2",
|
||||
"description": "the http/https agent used by the npm cli",
|
||||
"main": "lib/index.js",
|
||||
"scripts": {
|
||||
"gencerts": "bash scripts/create-cert.sh",
|
||||
"test": "tap",
|
||||
"lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
|
||||
"postlint": "template-oss-check",
|
||||
"template-oss-apply": "template-oss-apply --force",
|
||||
"lintfix": "npm run lint -- --fix",
|
||||
"snap": "tap",
|
||||
"posttest": "npm run lint"
|
||||
},
|
||||
"author": "GitHub Inc.",
|
||||
"license": "ISC",
|
||||
"bugs": {
|
||||
"url": "https://github.com/npm/agent/issues"
|
||||
},
|
||||
"homepage": "https://github.com/npm/agent#readme",
|
||||
"files": [
|
||||
"bin/",
|
||||
"lib/"
|
||||
],
|
||||
"engines": {
|
||||
"node": "^16.14.0 || >=18.0.0"
|
||||
},
|
||||
"templateOSS": {
|
||||
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
|
||||
"version": "4.21.3",
|
||||
"publish": "true"
|
||||
},
|
||||
"dependencies": {
|
||||
"agent-base": "^7.1.0",
|
||||
"http-proxy-agent": "^7.0.0",
|
||||
"https-proxy-agent": "^7.0.1",
|
||||
"lru-cache": "^10.0.1",
|
||||
"socks-proxy-agent": "^8.0.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@npmcli/eslint-config": "^4.0.0",
|
||||
"@npmcli/template-oss": "4.21.3",
|
||||
"minipass-fetch": "^3.0.3",
|
||||
"nock": "^13.2.7",
|
||||
"semver": "^7.5.4",
|
||||
"simple-socks": "^3.1.0",
|
||||
"tap": "^16.3.0"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/npm/agent.git"
|
||||
},
|
||||
"tap": {
|
||||
"nyc-arg": [
|
||||
"--exclude",
|
||||
"tap-snapshots/**"
|
||||
]
|
||||
}
|
||||
}
|
20
node_modules/@npmcli/fs/LICENSE.md
generated
vendored
Normal file
20
node_modules/@npmcli/fs/LICENSE.md
generated
vendored
Normal file
|
@ -0,0 +1,20 @@
|
|||
<!-- This file is automatically added by @npmcli/template-oss. Do not edit. -->
|
||||
|
||||
ISC License
|
||||
|
||||
Copyright npm, Inc.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this
|
||||
software for any purpose with or without fee is hereby
|
||||
granted, provided that the above copyright notice and this
|
||||
permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL
|
||||
WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO
|
||||
EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT,
|
||||
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
|
||||
WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
|
||||
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
|
||||
USE OR PERFORMANCE OF THIS SOFTWARE.
|
97
node_modules/@npmcli/fs/README.md
generated
vendored
Normal file
97
node_modules/@npmcli/fs/README.md
generated
vendored
Normal file
|
@ -0,0 +1,97 @@
|
|||
# @npmcli/fs
|
||||
|
||||
polyfills, and extensions, of the core `fs` module.
|
||||
|
||||
## Features
|
||||
|
||||
- `fs.cp` polyfill for node < 16.7.0
|
||||
- `fs.withTempDir` added
|
||||
- `fs.readdirScoped` added
|
||||
- `fs.moveFile` added
|
||||
|
||||
## `fs.withTempDir(root, fn, options) -> Promise`
|
||||
|
||||
### Parameters
|
||||
|
||||
- `root`: the directory in which to create the temporary directory
|
||||
- `fn`: a function that will be called with the path to the temporary directory
|
||||
- `options`
|
||||
- `tmpPrefix`: a prefix to be used in the generated directory name
|
||||
|
||||
### Usage
|
||||
|
||||
The `withTempDir` function creates a temporary directory, runs the provided
|
||||
function (`fn`), then removes the temporary directory and resolves or rejects
|
||||
based on the result of `fn`.
|
||||
|
||||
```js
|
||||
const fs = require('@npmcli/fs')
|
||||
const os = require('os')
|
||||
|
||||
// this function will be called with the full path to the temporary directory
|
||||
// it is called with `await` behind the scenes, so can be async if desired.
|
||||
const myFunction = async (tempPath) => {
|
||||
return 'done!'
|
||||
}
|
||||
|
||||
const main = async () => {
|
||||
const result = await fs.withTempDir(os.tmpdir(), myFunction)
|
||||
// result === 'done!'
|
||||
}
|
||||
|
||||
main()
|
||||
```
|
||||
|
||||
## `fs.readdirScoped(root) -> Promise`
|
||||
|
||||
### Parameters
|
||||
|
||||
- `root`: the directory to read
|
||||
|
||||
### Usage
|
||||
|
||||
Like `fs.readdir` but handling `@org/module` dirs as if they were
|
||||
a single entry.
|
||||
|
||||
```javascript
|
||||
const { readdirScoped } = require('@npmcli/fs')
|
||||
const entries = await readdirScoped('node_modules')
|
||||
// entries will be something like: ['a', '@org/foo', '@org/bar']
|
||||
```
|
||||
|
||||
## `fs.moveFile(source, dest, options) -> Promise`
|
||||
|
||||
A fork of [move-file](https://github.com/sindresorhus/move-file) with
|
||||
support for Common JS.
|
||||
|
||||
### Highlights
|
||||
|
||||
- Promise API.
|
||||
- Supports moving a file across partitions and devices.
|
||||
- Optionally prevent overwriting an existing file.
|
||||
- Creates non-existent destination directories for you.
|
||||
- Automatically recurses when source is a directory.
|
||||
|
||||
### Parameters
|
||||
|
||||
- `source`: File, or directory, you want to move.
|
||||
- `dest`: Where you want the file or directory moved.
|
||||
- `options`
|
||||
- `overwrite` (`boolean`, default: `true`): Overwrite existing destination file(s).
|
||||
|
||||
### Usage
|
||||
|
||||
The built-in
|
||||
[`fs.rename()`](https://nodejs.org/api/fs.html#fs_fs_rename_oldpath_newpath_callback)
|
||||
is just a JavaScript wrapper for the C `rename(2)` function, which doesn't
|
||||
support moving files across partitions or devices. This module is what you
|
||||
would have expected `fs.rename()` to be.
|
||||
|
||||
```js
|
||||
const { moveFile } = require('@npmcli/fs');
|
||||
|
||||
(async () => {
|
||||
await moveFile('source/unicorn.png', 'destination/unicorn.png');
|
||||
console.log('The file has been moved');
|
||||
})();
|
||||
```
|
20
node_modules/@npmcli/fs/lib/common/get-options.js
generated
vendored
Normal file
20
node_modules/@npmcli/fs/lib/common/get-options.js
generated
vendored
Normal file
|
@ -0,0 +1,20 @@
|
|||
// given an input that may or may not be an object, return an object that has
|
||||
// a copy of every defined property listed in 'copy'. if the input is not an
|
||||
// object, assign it to the property named by 'wrap'
|
||||
const getOptions = (input, { copy, wrap }) => {
|
||||
const result = {}
|
||||
|
||||
if (input && typeof input === 'object') {
|
||||
for (const prop of copy) {
|
||||
if (input[prop] !== undefined) {
|
||||
result[prop] = input[prop]
|
||||
}
|
||||
}
|
||||
} else {
|
||||
result[wrap] = input
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
module.exports = getOptions
|
9
node_modules/@npmcli/fs/lib/common/node.js
generated
vendored
Normal file
9
node_modules/@npmcli/fs/lib/common/node.js
generated
vendored
Normal file
|
@ -0,0 +1,9 @@
|
|||
const semver = require('semver')
|
||||
|
||||
const satisfies = (range) => {
|
||||
return semver.satisfies(process.version, range, { includePrerelease: true })
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
satisfies,
|
||||
}
|
15
node_modules/@npmcli/fs/lib/cp/LICENSE
generated
vendored
Normal file
15
node_modules/@npmcli/fs/lib/cp/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
(The MIT License)
|
||||
|
||||
Copyright (c) 2011-2017 JP Richardson
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files
|
||||
(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify,
|
||||
merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
|
||||
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
|
||||
OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
||||
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
129
node_modules/@npmcli/fs/lib/cp/errors.js
generated
vendored
Normal file
129
node_modules/@npmcli/fs/lib/cp/errors.js
generated
vendored
Normal file
|
@ -0,0 +1,129 @@
|
|||
'use strict'
|
||||
const { inspect } = require('util')
|
||||
|
||||
// adapted from node's internal/errors
|
||||
// https://github.com/nodejs/node/blob/c8a04049/lib/internal/errors.js
|
||||
|
||||
// close copy of node's internal SystemError class.
|
||||
class SystemError {
|
||||
constructor (code, prefix, context) {
|
||||
// XXX context.code is undefined in all constructors used in cp/polyfill
|
||||
// that may be a bug copied from node, maybe the constructor should use
|
||||
// `code` not `errno`? nodejs/node#41104
|
||||
let message = `${prefix}: ${context.syscall} returned ` +
|
||||
`${context.code} (${context.message})`
|
||||
|
||||
if (context.path !== undefined) {
|
||||
message += ` ${context.path}`
|
||||
}
|
||||
if (context.dest !== undefined) {
|
||||
message += ` => ${context.dest}`
|
||||
}
|
||||
|
||||
this.code = code
|
||||
Object.defineProperties(this, {
|
||||
name: {
|
||||
value: 'SystemError',
|
||||
enumerable: false,
|
||||
writable: true,
|
||||
configurable: true,
|
||||
},
|
||||
message: {
|
||||
value: message,
|
||||
enumerable: false,
|
||||
writable: true,
|
||||
configurable: true,
|
||||
},
|
||||
info: {
|
||||
value: context,
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: false,
|
||||
},
|
||||
errno: {
|
||||
get () {
|
||||
return context.errno
|
||||
},
|
||||
set (value) {
|
||||
context.errno = value
|
||||
},
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
},
|
||||
syscall: {
|
||||
get () {
|
||||
return context.syscall
|
||||
},
|
||||
set (value) {
|
||||
context.syscall = value
|
||||
},
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
},
|
||||
})
|
||||
|
||||
if (context.path !== undefined) {
|
||||
Object.defineProperty(this, 'path', {
|
||||
get () {
|
||||
return context.path
|
||||
},
|
||||
set (value) {
|
||||
context.path = value
|
||||
},
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
})
|
||||
}
|
||||
|
||||
if (context.dest !== undefined) {
|
||||
Object.defineProperty(this, 'dest', {
|
||||
get () {
|
||||
return context.dest
|
||||
},
|
||||
set (value) {
|
||||
context.dest = value
|
||||
},
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
toString () {
|
||||
return `${this.name} [${this.code}]: ${this.message}`
|
||||
}
|
||||
|
||||
[Symbol.for('nodejs.util.inspect.custom')] (_recurseTimes, ctx) {
|
||||
return inspect(this, {
|
||||
...ctx,
|
||||
getters: true,
|
||||
customInspect: false,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function E (code, message) {
|
||||
module.exports[code] = class NodeError extends SystemError {
|
||||
constructor (ctx) {
|
||||
super(code, message, ctx)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
E('ERR_FS_CP_DIR_TO_NON_DIR', 'Cannot overwrite directory with non-directory')
|
||||
E('ERR_FS_CP_EEXIST', 'Target already exists')
|
||||
E('ERR_FS_CP_EINVAL', 'Invalid src or dest')
|
||||
E('ERR_FS_CP_FIFO_PIPE', 'Cannot copy a FIFO pipe')
|
||||
E('ERR_FS_CP_NON_DIR_TO_DIR', 'Cannot overwrite non-directory with directory')
|
||||
E('ERR_FS_CP_SOCKET', 'Cannot copy a socket file')
|
||||
E('ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY', 'Cannot overwrite symlink in subdirectory of self')
|
||||
E('ERR_FS_CP_UNKNOWN', 'Cannot copy an unknown file type')
|
||||
E('ERR_FS_EISDIR', 'Path is a directory')
|
||||
|
||||
module.exports.ERR_INVALID_ARG_TYPE = class ERR_INVALID_ARG_TYPE extends Error {
|
||||
constructor (name, expected, actual) {
|
||||
super()
|
||||
this.code = 'ERR_INVALID_ARG_TYPE'
|
||||
this.message = `The ${name} argument must be ${expected}. Received ${typeof actual}`
|
||||
}
|
||||
}
|
22
node_modules/@npmcli/fs/lib/cp/index.js
generated
vendored
Normal file
22
node_modules/@npmcli/fs/lib/cp/index.js
generated
vendored
Normal file
|
@ -0,0 +1,22 @@
|
|||
const fs = require('fs/promises')
|
||||
const getOptions = require('../common/get-options.js')
|
||||
const node = require('../common/node.js')
|
||||
const polyfill = require('./polyfill.js')
|
||||
|
||||
// node 16.7.0 added fs.cp
|
||||
const useNative = node.satisfies('>=16.7.0')
|
||||
|
||||
const cp = async (src, dest, opts) => {
|
||||
const options = getOptions(opts, {
|
||||
copy: ['dereference', 'errorOnExist', 'filter', 'force', 'preserveTimestamps', 'recursive'],
|
||||
})
|
||||
|
||||
// the polyfill is tested separately from this module, no need to hack
|
||||
// process.version to try to trigger it just for coverage
|
||||
// istanbul ignore next
|
||||
return useNative
|
||||
? fs.cp(src, dest, options)
|
||||
: polyfill(src, dest, options)
|
||||
}
|
||||
|
||||
module.exports = cp
|
428
node_modules/@npmcli/fs/lib/cp/polyfill.js
generated
vendored
Normal file
428
node_modules/@npmcli/fs/lib/cp/polyfill.js
generated
vendored
Normal file
|
@ -0,0 +1,428 @@
|
|||
// this file is a modified version of the code in node 17.2.0
|
||||
// which is, in turn, a modified version of the fs-extra module on npm
|
||||
// node core changes:
|
||||
// - Use of the assert module has been replaced with core's error system.
|
||||
// - All code related to the glob dependency has been removed.
|
||||
// - Bring your own custom fs module is not currently supported.
|
||||
// - Some basic code cleanup.
|
||||
// changes here:
|
||||
// - remove all callback related code
|
||||
// - drop sync support
|
||||
// - change assertions back to non-internal methods (see options.js)
|
||||
// - throws ENOTDIR when rmdir gets an ENOENT for a path that exists in Windows
|
||||
'use strict'
|
||||
|
||||
const {
|
||||
ERR_FS_CP_DIR_TO_NON_DIR,
|
||||
ERR_FS_CP_EEXIST,
|
||||
ERR_FS_CP_EINVAL,
|
||||
ERR_FS_CP_FIFO_PIPE,
|
||||
ERR_FS_CP_NON_DIR_TO_DIR,
|
||||
ERR_FS_CP_SOCKET,
|
||||
ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY,
|
||||
ERR_FS_CP_UNKNOWN,
|
||||
ERR_FS_EISDIR,
|
||||
ERR_INVALID_ARG_TYPE,
|
||||
} = require('./errors.js')
|
||||
const {
|
||||
constants: {
|
||||
errno: {
|
||||
EEXIST,
|
||||
EISDIR,
|
||||
EINVAL,
|
||||
ENOTDIR,
|
||||
},
|
||||
},
|
||||
} = require('os')
|
||||
const {
|
||||
chmod,
|
||||
copyFile,
|
||||
lstat,
|
||||
mkdir,
|
||||
readdir,
|
||||
readlink,
|
||||
stat,
|
||||
symlink,
|
||||
unlink,
|
||||
utimes,
|
||||
} = require('fs/promises')
|
||||
const {
|
||||
dirname,
|
||||
isAbsolute,
|
||||
join,
|
||||
parse,
|
||||
resolve,
|
||||
sep,
|
||||
toNamespacedPath,
|
||||
} = require('path')
|
||||
const { fileURLToPath } = require('url')
|
||||
|
||||
const defaultOptions = {
|
||||
dereference: false,
|
||||
errorOnExist: false,
|
||||
filter: undefined,
|
||||
force: true,
|
||||
preserveTimestamps: false,
|
||||
recursive: false,
|
||||
}
|
||||
|
||||
async function cp (src, dest, opts) {
|
||||
if (opts != null && typeof opts !== 'object') {
|
||||
throw new ERR_INVALID_ARG_TYPE('options', ['Object'], opts)
|
||||
}
|
||||
return cpFn(
|
||||
toNamespacedPath(getValidatedPath(src)),
|
||||
toNamespacedPath(getValidatedPath(dest)),
|
||||
{ ...defaultOptions, ...opts })
|
||||
}
|
||||
|
||||
function getValidatedPath (fileURLOrPath) {
|
||||
const path = fileURLOrPath != null && fileURLOrPath.href
|
||||
&& fileURLOrPath.origin
|
||||
? fileURLToPath(fileURLOrPath)
|
||||
: fileURLOrPath
|
||||
return path
|
||||
}
|
||||
|
||||
async function cpFn (src, dest, opts) {
|
||||
// Warn about using preserveTimestamps on 32-bit node
|
||||
// istanbul ignore next
|
||||
if (opts.preserveTimestamps && process.arch === 'ia32') {
|
||||
const warning = 'Using the preserveTimestamps option in 32-bit ' +
|
||||
'node is not recommended'
|
||||
process.emitWarning(warning, 'TimestampPrecisionWarning')
|
||||
}
|
||||
const stats = await checkPaths(src, dest, opts)
|
||||
const { srcStat, destStat } = stats
|
||||
await checkParentPaths(src, srcStat, dest)
|
||||
if (opts.filter) {
|
||||
return handleFilter(checkParentDir, destStat, src, dest, opts)
|
||||
}
|
||||
return checkParentDir(destStat, src, dest, opts)
|
||||
}
|
||||
|
||||
async function checkPaths (src, dest, opts) {
|
||||
const { 0: srcStat, 1: destStat } = await getStats(src, dest, opts)
|
||||
if (destStat) {
|
||||
if (areIdentical(srcStat, destStat)) {
|
||||
throw new ERR_FS_CP_EINVAL({
|
||||
message: 'src and dest cannot be the same',
|
||||
path: dest,
|
||||
syscall: 'cp',
|
||||
errno: EINVAL,
|
||||
})
|
||||
}
|
||||
if (srcStat.isDirectory() && !destStat.isDirectory()) {
|
||||
throw new ERR_FS_CP_DIR_TO_NON_DIR({
|
||||
message: `cannot overwrite directory ${src} ` +
|
||||
`with non-directory ${dest}`,
|
||||
path: dest,
|
||||
syscall: 'cp',
|
||||
errno: EISDIR,
|
||||
})
|
||||
}
|
||||
if (!srcStat.isDirectory() && destStat.isDirectory()) {
|
||||
throw new ERR_FS_CP_NON_DIR_TO_DIR({
|
||||
message: `cannot overwrite non-directory ${src} ` +
|
||||
`with directory ${dest}`,
|
||||
path: dest,
|
||||
syscall: 'cp',
|
||||
errno: ENOTDIR,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if (srcStat.isDirectory() && isSrcSubdir(src, dest)) {
|
||||
throw new ERR_FS_CP_EINVAL({
|
||||
message: `cannot copy ${src} to a subdirectory of self ${dest}`,
|
||||
path: dest,
|
||||
syscall: 'cp',
|
||||
errno: EINVAL,
|
||||
})
|
||||
}
|
||||
return { srcStat, destStat }
|
||||
}
|
||||
|
||||
function areIdentical (srcStat, destStat) {
|
||||
return destStat.ino && destStat.dev && destStat.ino === srcStat.ino &&
|
||||
destStat.dev === srcStat.dev
|
||||
}
|
||||
|
||||
function getStats (src, dest, opts) {
|
||||
const statFunc = opts.dereference ?
|
||||
(file) => stat(file, { bigint: true }) :
|
||||
(file) => lstat(file, { bigint: true })
|
||||
return Promise.all([
|
||||
statFunc(src),
|
||||
statFunc(dest).catch((err) => {
|
||||
// istanbul ignore next: unsure how to cover.
|
||||
if (err.code === 'ENOENT') {
|
||||
return null
|
||||
}
|
||||
// istanbul ignore next: unsure how to cover.
|
||||
throw err
|
||||
}),
|
||||
])
|
||||
}
|
||||
|
||||
async function checkParentDir (destStat, src, dest, opts) {
|
||||
const destParent = dirname(dest)
|
||||
const dirExists = await pathExists(destParent)
|
||||
if (dirExists) {
|
||||
return getStatsForCopy(destStat, src, dest, opts)
|
||||
}
|
||||
await mkdir(destParent, { recursive: true })
|
||||
return getStatsForCopy(destStat, src, dest, opts)
|
||||
}
|
||||
|
||||
function pathExists (dest) {
|
||||
return stat(dest).then(
|
||||
() => true,
|
||||
// istanbul ignore next: not sure when this would occur
|
||||
(err) => (err.code === 'ENOENT' ? false : Promise.reject(err)))
|
||||
}
|
||||
|
||||
// Recursively check if dest parent is a subdirectory of src.
|
||||
// It works for all file types including symlinks since it
|
||||
// checks the src and dest inodes. It starts from the deepest
|
||||
// parent and stops once it reaches the src parent or the root path.
|
||||
async function checkParentPaths (src, srcStat, dest) {
|
||||
const srcParent = resolve(dirname(src))
|
||||
const destParent = resolve(dirname(dest))
|
||||
if (destParent === srcParent || destParent === parse(destParent).root) {
|
||||
return
|
||||
}
|
||||
let destStat
|
||||
try {
|
||||
destStat = await stat(destParent, { bigint: true })
|
||||
} catch (err) {
|
||||
// istanbul ignore else: not sure when this would occur
|
||||
if (err.code === 'ENOENT') {
|
||||
return
|
||||
}
|
||||
// istanbul ignore next: not sure when this would occur
|
||||
throw err
|
||||
}
|
||||
if (areIdentical(srcStat, destStat)) {
|
||||
throw new ERR_FS_CP_EINVAL({
|
||||
message: `cannot copy ${src} to a subdirectory of self ${dest}`,
|
||||
path: dest,
|
||||
syscall: 'cp',
|
||||
errno: EINVAL,
|
||||
})
|
||||
}
|
||||
return checkParentPaths(src, srcStat, destParent)
|
||||
}
|
||||
|
||||
const normalizePathToArray = (path) =>
|
||||
resolve(path).split(sep).filter(Boolean)
|
||||
|
||||
// Return true if dest is a subdir of src, otherwise false.
|
||||
// It only checks the path strings.
|
||||
function isSrcSubdir (src, dest) {
|
||||
const srcArr = normalizePathToArray(src)
|
||||
const destArr = normalizePathToArray(dest)
|
||||
return srcArr.every((cur, i) => destArr[i] === cur)
|
||||
}
|
||||
|
||||
async function handleFilter (onInclude, destStat, src, dest, opts, cb) {
|
||||
const include = await opts.filter(src, dest)
|
||||
if (include) {
|
||||
return onInclude(destStat, src, dest, opts, cb)
|
||||
}
|
||||
}
|
||||
|
||||
function startCopy (destStat, src, dest, opts) {
|
||||
if (opts.filter) {
|
||||
return handleFilter(getStatsForCopy, destStat, src, dest, opts)
|
||||
}
|
||||
return getStatsForCopy(destStat, src, dest, opts)
|
||||
}
|
||||
|
||||
async function getStatsForCopy (destStat, src, dest, opts) {
|
||||
const statFn = opts.dereference ? stat : lstat
|
||||
const srcStat = await statFn(src)
|
||||
// istanbul ignore else: can't portably test FIFO
|
||||
if (srcStat.isDirectory() && opts.recursive) {
|
||||
return onDir(srcStat, destStat, src, dest, opts)
|
||||
} else if (srcStat.isDirectory()) {
|
||||
throw new ERR_FS_EISDIR({
|
||||
message: `${src} is a directory (not copied)`,
|
||||
path: src,
|
||||
syscall: 'cp',
|
||||
errno: EINVAL,
|
||||
})
|
||||
} else if (srcStat.isFile() ||
|
||||
srcStat.isCharacterDevice() ||
|
||||
srcStat.isBlockDevice()) {
|
||||
return onFile(srcStat, destStat, src, dest, opts)
|
||||
} else if (srcStat.isSymbolicLink()) {
|
||||
return onLink(destStat, src, dest)
|
||||
} else if (srcStat.isSocket()) {
|
||||
throw new ERR_FS_CP_SOCKET({
|
||||
message: `cannot copy a socket file: ${dest}`,
|
||||
path: dest,
|
||||
syscall: 'cp',
|
||||
errno: EINVAL,
|
||||
})
|
||||
} else if (srcStat.isFIFO()) {
|
||||
throw new ERR_FS_CP_FIFO_PIPE({
|
||||
message: `cannot copy a FIFO pipe: ${dest}`,
|
||||
path: dest,
|
||||
syscall: 'cp',
|
||||
errno: EINVAL,
|
||||
})
|
||||
}
|
||||
// istanbul ignore next: should be unreachable
|
||||
throw new ERR_FS_CP_UNKNOWN({
|
||||
message: `cannot copy an unknown file type: ${dest}`,
|
||||
path: dest,
|
||||
syscall: 'cp',
|
||||
errno: EINVAL,
|
||||
})
|
||||
}
|
||||
|
||||
function onFile (srcStat, destStat, src, dest, opts) {
|
||||
if (!destStat) {
|
||||
return _copyFile(srcStat, src, dest, opts)
|
||||
}
|
||||
return mayCopyFile(srcStat, src, dest, opts)
|
||||
}
|
||||
|
||||
async function mayCopyFile (srcStat, src, dest, opts) {
|
||||
if (opts.force) {
|
||||
await unlink(dest)
|
||||
return _copyFile(srcStat, src, dest, opts)
|
||||
} else if (opts.errorOnExist) {
|
||||
throw new ERR_FS_CP_EEXIST({
|
||||
message: `${dest} already exists`,
|
||||
path: dest,
|
||||
syscall: 'cp',
|
||||
errno: EEXIST,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async function _copyFile (srcStat, src, dest, opts) {
|
||||
await copyFile(src, dest)
|
||||
if (opts.preserveTimestamps) {
|
||||
return handleTimestampsAndMode(srcStat.mode, src, dest)
|
||||
}
|
||||
return setDestMode(dest, srcStat.mode)
|
||||
}
|
||||
|
||||
async function handleTimestampsAndMode (srcMode, src, dest) {
|
||||
// Make sure the file is writable before setting the timestamp
|
||||
// otherwise open fails with EPERM when invoked with 'r+'
|
||||
// (through utimes call)
|
||||
if (fileIsNotWritable(srcMode)) {
|
||||
await makeFileWritable(dest, srcMode)
|
||||
return setDestTimestampsAndMode(srcMode, src, dest)
|
||||
}
|
||||
return setDestTimestampsAndMode(srcMode, src, dest)
|
||||
}
|
||||
|
||||
function fileIsNotWritable (srcMode) {
|
||||
return (srcMode & 0o200) === 0
|
||||
}
|
||||
|
||||
function makeFileWritable (dest, srcMode) {
|
||||
return setDestMode(dest, srcMode | 0o200)
|
||||
}
|
||||
|
||||
async function setDestTimestampsAndMode (srcMode, src, dest) {
|
||||
await setDestTimestamps(src, dest)
|
||||
return setDestMode(dest, srcMode)
|
||||
}
|
||||
|
||||
function setDestMode (dest, srcMode) {
|
||||
return chmod(dest, srcMode)
|
||||
}
|
||||
|
||||
async function setDestTimestamps (src, dest) {
|
||||
// The initial srcStat.atime cannot be trusted
|
||||
// because it is modified by the read(2) system call
|
||||
// (See https://nodejs.org/api/fs.html#fs_stat_time_values)
|
||||
const updatedSrcStat = await stat(src)
|
||||
return utimes(dest, updatedSrcStat.atime, updatedSrcStat.mtime)
|
||||
}
|
||||
|
||||
function onDir (srcStat, destStat, src, dest, opts) {
|
||||
if (!destStat) {
|
||||
return mkDirAndCopy(srcStat.mode, src, dest, opts)
|
||||
}
|
||||
return copyDir(src, dest, opts)
|
||||
}
|
||||
|
||||
async function mkDirAndCopy (srcMode, src, dest, opts) {
|
||||
await mkdir(dest)
|
||||
await copyDir(src, dest, opts)
|
||||
return setDestMode(dest, srcMode)
|
||||
}
|
||||
|
||||
async function copyDir (src, dest, opts) {
|
||||
const dir = await readdir(src)
|
||||
for (let i = 0; i < dir.length; i++) {
|
||||
const item = dir[i]
|
||||
const srcItem = join(src, item)
|
||||
const destItem = join(dest, item)
|
||||
const { destStat } = await checkPaths(srcItem, destItem, opts)
|
||||
await startCopy(destStat, srcItem, destItem, opts)
|
||||
}
|
||||
}
|
||||
|
||||
async function onLink (destStat, src, dest) {
|
||||
let resolvedSrc = await readlink(src)
|
||||
if (!isAbsolute(resolvedSrc)) {
|
||||
resolvedSrc = resolve(dirname(src), resolvedSrc)
|
||||
}
|
||||
if (!destStat) {
|
||||
return symlink(resolvedSrc, dest)
|
||||
}
|
||||
let resolvedDest
|
||||
try {
|
||||
resolvedDest = await readlink(dest)
|
||||
} catch (err) {
|
||||
// Dest exists and is a regular file or directory,
|
||||
// Windows may throw UNKNOWN error. If dest already exists,
|
||||
// fs throws error anyway, so no need to guard against it here.
|
||||
// istanbul ignore next: can only test on windows
|
||||
if (err.code === 'EINVAL' || err.code === 'UNKNOWN') {
|
||||
return symlink(resolvedSrc, dest)
|
||||
}
|
||||
// istanbul ignore next: should not be possible
|
||||
throw err
|
||||
}
|
||||
if (!isAbsolute(resolvedDest)) {
|
||||
resolvedDest = resolve(dirname(dest), resolvedDest)
|
||||
}
|
||||
if (isSrcSubdir(resolvedSrc, resolvedDest)) {
|
||||
throw new ERR_FS_CP_EINVAL({
|
||||
message: `cannot copy ${resolvedSrc} to a subdirectory of self ` +
|
||||
`${resolvedDest}`,
|
||||
path: dest,
|
||||
syscall: 'cp',
|
||||
errno: EINVAL,
|
||||
})
|
||||
}
|
||||
// Do not copy if src is a subdir of dest since unlinking
|
||||
// dest in this case would result in removing src contents
|
||||
// and therefore a broken symlink would be created.
|
||||
const srcStat = await stat(src)
|
||||
if (srcStat.isDirectory() && isSrcSubdir(resolvedDest, resolvedSrc)) {
|
||||
throw new ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY({
|
||||
message: `cannot overwrite ${resolvedDest} with ${resolvedSrc}`,
|
||||
path: dest,
|
||||
syscall: 'cp',
|
||||
errno: EINVAL,
|
||||
})
|
||||
}
|
||||
return copyLink(resolvedSrc, dest)
|
||||
}
|
||||
|
||||
async function copyLink (resolvedSrc, dest) {
|
||||
await unlink(dest)
|
||||
return symlink(resolvedSrc, dest)
|
||||
}
|
||||
|
||||
module.exports = cp
|
13
node_modules/@npmcli/fs/lib/index.js
generated
vendored
Normal file
13
node_modules/@npmcli/fs/lib/index.js
generated
vendored
Normal file
|
@ -0,0 +1,13 @@
|
|||
'use strict'
|
||||
|
||||
const cp = require('./cp/index.js')
|
||||
const withTempDir = require('./with-temp-dir.js')
|
||||
const readdirScoped = require('./readdir-scoped.js')
|
||||
const moveFile = require('./move-file.js')
|
||||
|
||||
module.exports = {
|
||||
cp,
|
||||
withTempDir,
|
||||
readdirScoped,
|
||||
moveFile,
|
||||
}
|
78
node_modules/@npmcli/fs/lib/move-file.js
generated
vendored
Normal file
78
node_modules/@npmcli/fs/lib/move-file.js
generated
vendored
Normal file
|
@ -0,0 +1,78 @@
|
|||
const { dirname, join, resolve, relative, isAbsolute } = require('path')
|
||||
const fs = require('fs/promises')
|
||||
|
||||
const pathExists = async path => {
|
||||
try {
|
||||
await fs.access(path)
|
||||
return true
|
||||
} catch (er) {
|
||||
return er.code !== 'ENOENT'
|
||||
}
|
||||
}
|
||||
|
||||
const moveFile = async (source, destination, options = {}, root = true, symlinks = []) => {
|
||||
if (!source || !destination) {
|
||||
throw new TypeError('`source` and `destination` file required')
|
||||
}
|
||||
|
||||
options = {
|
||||
overwrite: true,
|
||||
...options,
|
||||
}
|
||||
|
||||
if (!options.overwrite && await pathExists(destination)) {
|
||||
throw new Error(`The destination file exists: ${destination}`)
|
||||
}
|
||||
|
||||
await fs.mkdir(dirname(destination), { recursive: true })
|
||||
|
||||
try {
|
||||
await fs.rename(source, destination)
|
||||
} catch (error) {
|
||||
if (error.code === 'EXDEV' || error.code === 'EPERM') {
|
||||
const sourceStat = await fs.lstat(source)
|
||||
if (sourceStat.isDirectory()) {
|
||||
const files = await fs.readdir(source)
|
||||
await Promise.all(files.map((file) =>
|
||||
moveFile(join(source, file), join(destination, file), options, false, symlinks)
|
||||
))
|
||||
} else if (sourceStat.isSymbolicLink()) {
|
||||
symlinks.push({ source, destination })
|
||||
} else {
|
||||
await fs.copyFile(source, destination)
|
||||
}
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
if (root) {
|
||||
await Promise.all(symlinks.map(async ({ source: symSource, destination: symDestination }) => {
|
||||
let target = await fs.readlink(symSource)
|
||||
// junction symlinks in windows will be absolute paths, so we need to
|
||||
// make sure they point to the symlink destination
|
||||
if (isAbsolute(target)) {
|
||||
target = resolve(symDestination, relative(symSource, target))
|
||||
}
|
||||
// try to determine what the actual file is so we can create the correct
|
||||
// type of symlink in windows
|
||||
let targetStat = 'file'
|
||||
try {
|
||||
targetStat = await fs.stat(resolve(dirname(symSource), target))
|
||||
if (targetStat.isDirectory()) {
|
||||
targetStat = 'junction'
|
||||
}
|
||||
} catch {
|
||||
// targetStat remains 'file'
|
||||
}
|
||||
await fs.symlink(
|
||||
target,
|
||||
symDestination,
|
||||
targetStat
|
||||
)
|
||||
}))
|
||||
await fs.rm(source, { recursive: true, force: true })
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = moveFile
|
20
node_modules/@npmcli/fs/lib/readdir-scoped.js
generated
vendored
Normal file
20
node_modules/@npmcli/fs/lib/readdir-scoped.js
generated
vendored
Normal file
|
@ -0,0 +1,20 @@
|
|||
const { readdir } = require('fs/promises')
|
||||
const { join } = require('path')
|
||||
|
||||
const readdirScoped = async (dir) => {
|
||||
const results = []
|
||||
|
||||
for (const item of await readdir(dir)) {
|
||||
if (item.startsWith('@')) {
|
||||
for (const scopedItem of await readdir(join(dir, item))) {
|
||||
results.push(join(item, scopedItem))
|
||||
}
|
||||
} else {
|
||||
results.push(item)
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
module.exports = readdirScoped
|
39
node_modules/@npmcli/fs/lib/with-temp-dir.js
generated
vendored
Normal file
39
node_modules/@npmcli/fs/lib/with-temp-dir.js
generated
vendored
Normal file
|
@ -0,0 +1,39 @@
|
|||
const { join, sep } = require('path')
|
||||
|
||||
const getOptions = require('./common/get-options.js')
|
||||
const { mkdir, mkdtemp, rm } = require('fs/promises')
|
||||
|
||||
// create a temp directory, ensure its permissions match its parent, then call
|
||||
// the supplied function passing it the path to the directory. clean up after
|
||||
// the function finishes, whether it throws or not
|
||||
const withTempDir = async (root, fn, opts) => {
|
||||
const options = getOptions(opts, {
|
||||
copy: ['tmpPrefix'],
|
||||
})
|
||||
// create the directory
|
||||
await mkdir(root, { recursive: true })
|
||||
|
||||
const target = await mkdtemp(join(`${root}${sep}`, options.tmpPrefix || ''))
|
||||
let err
|
||||
let result
|
||||
|
||||
try {
|
||||
result = await fn(target)
|
||||
} catch (_err) {
|
||||
err = _err
|
||||
}
|
||||
|
||||
try {
|
||||
await rm(target, { force: true, recursive: true })
|
||||
} catch {
|
||||
// ignore errors
|
||||
}
|
||||
|
||||
if (err) {
|
||||
throw err
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
module.exports = withTempDir
|
52
node_modules/@npmcli/fs/package.json
generated
vendored
Normal file
52
node_modules/@npmcli/fs/package.json
generated
vendored
Normal file
|
@ -0,0 +1,52 @@
|
|||
{
|
||||
"name": "@npmcli/fs",
|
||||
"version": "3.1.1",
|
||||
"description": "filesystem utilities for the npm cli",
|
||||
"main": "lib/index.js",
|
||||
"files": [
|
||||
"bin/",
|
||||
"lib/"
|
||||
],
|
||||
"scripts": {
|
||||
"snap": "tap",
|
||||
"test": "tap",
|
||||
"npmclilint": "npmcli-lint",
|
||||
"lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
|
||||
"lintfix": "npm run lint -- --fix",
|
||||
"posttest": "npm run lint",
|
||||
"postsnap": "npm run lintfix --",
|
||||
"postlint": "template-oss-check",
|
||||
"template-oss-apply": "template-oss-apply --force"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/npm/fs.git"
|
||||
},
|
||||
"keywords": [
|
||||
"npm",
|
||||
"oss"
|
||||
],
|
||||
"author": "GitHub Inc.",
|
||||
"license": "ISC",
|
||||
"devDependencies": {
|
||||
"@npmcli/eslint-config": "^4.0.0",
|
||||
"@npmcli/template-oss": "4.22.0",
|
||||
"tap": "^16.0.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"semver": "^7.3.5"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^14.17.0 || ^16.13.0 || >=18.0.0"
|
||||
},
|
||||
"templateOSS": {
|
||||
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
|
||||
"version": "4.22.0"
|
||||
},
|
||||
"tap": {
|
||||
"nyc-arg": [
|
||||
"--exclude",
|
||||
"tap-snapshots/**"
|
||||
]
|
||||
}
|
||||
}
|
15
node_modules/@npmcli/git/LICENSE
generated
vendored
Normal file
15
node_modules/@npmcli/git/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
The ISC License
|
||||
|
||||
Copyright (c) npm, Inc.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE NPM DISCLAIMS ALL WARRANTIES WITH
|
||||
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
FITNESS. IN NO EVENT SHALL THE NPM BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT,
|
||||
OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
|
||||
DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
|
||||
ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS
|
||||
SOFTWARE.
|
158
node_modules/@npmcli/git/README.md
generated
vendored
Normal file
158
node_modules/@npmcli/git/README.md
generated
vendored
Normal file
|
@ -0,0 +1,158 @@
|
|||
# @npmcli/git
|
||||
|
||||
A utility for spawning git from npm CLI contexts.
|
||||
|
||||
This is _not_ an implementation of git itself, it's just a thing that
|
||||
spawns child processes to tell the system git CLI implementation to do
|
||||
stuff.
|
||||
|
||||
## USAGE
|
||||
|
||||
```js
|
||||
const git = require('@npmcli/git')
|
||||
git.clone('git://foo/bar.git', 'some-branch', 'some-path', opts) // clone a repo
|
||||
.then(() => git.spawn(['checkout', 'some-branch'], {cwd: 'bar'}))
|
||||
.then(() => git.spawn(['you get the idea']))
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
Most methods take an options object. Options are described below.
|
||||
|
||||
### `git.spawn(args, opts = {})`
|
||||
|
||||
Launch a `git` subprocess with the arguments specified.
|
||||
|
||||
All the other functions call this one at some point.
|
||||
|
||||
Processes are launched using
|
||||
[`@npmcli/promise-spawn`](http://npm.im/@npmcli/promise-spawn), with the
|
||||
`stdioString: true` option enabled by default, since git output is
|
||||
generally in readable string format.
|
||||
|
||||
Return value is a `Promise` that resolves to a result object with `{cmd,
|
||||
args, code, signal, stdout, stderr}` members, or rejects with an error with
|
||||
the same fields, passed back from
|
||||
[`@npmcli/promise-spawn`](http://npm.im/@npmcli/promise-spawn).
|
||||
|
||||
### `git.clone(repo, ref = 'HEAD', target = null, opts = {})` -> `Promise<sha String>`
|
||||
|
||||
Clone the repository into `target` path (or the default path for the name
|
||||
of the repository), checking out `ref`.
|
||||
|
||||
Return value is the sha of the current HEAD in the locally cloned
|
||||
repository.
|
||||
|
||||
In lieu of a specific `ref`, you may also pass in a `spec` option, which is
|
||||
a [`npm-package-arg`](http://npm.im/npm-package-arg) object for a `git`
|
||||
package dependency reference. In this way, you can select SemVer tags
|
||||
within a range, or any git committish value. For example:
|
||||
|
||||
```js
|
||||
const npa = require('npm-package-arg')
|
||||
git.clone('git@github.com:npm/git.git', '', null, {
|
||||
spec: npa('github:npm/git#semver:1.x'),
|
||||
})
|
||||
|
||||
// only gitRange and gitCommittish are relevant, so this works, too
|
||||
git.clone('git@github.com:npm/git.git', null, null, {
|
||||
spec: { gitRange: '1.x' }
|
||||
})
|
||||
```
|
||||
|
||||
This will automatically do a shallow `--depth=1` clone on any hosts that
|
||||
are known to support it. To force a shallow or deep clone, you can set the
|
||||
`gitShallow` option to `true` or `false` respectively.
|
||||
|
||||
### `git.revs(repo, opts = {})` -> `Promise<rev doc Object>`
|
||||
|
||||
Fetch a representation of all of the named references in a given
|
||||
repository. The resulting doc is intentionally somewhat
|
||||
[packument](https://www.npmjs.com/package/pacote#packuments)-like, so that
|
||||
git semver ranges can be applied using the same
|
||||
[`npm-pick-manifest`](http://npm.im/npm-pick-manifest) logic.
|
||||
|
||||
The resulting object looks like:
|
||||
|
||||
```js
|
||||
revs = {
|
||||
versions: {
|
||||
// all semver-looking tags go in here...
|
||||
// version: { sha, ref, rawRef, type }
|
||||
'1.0.0': {
|
||||
sha: '1bc5fba3353f8e1b56493b266bc459276ab23139',
|
||||
ref: 'v1.0.0',
|
||||
rawRef: 'refs/tags/v1.0.0',
|
||||
type: 'tag',
|
||||
},
|
||||
},
|
||||
'dist-tags': {
|
||||
HEAD: '1.0.0',
|
||||
latest: '1.0.0',
|
||||
},
|
||||
refs: {
|
||||
// all the advertised refs that can be cloned down remotely
|
||||
HEAD: { sha, ref, rawRef, type: 'head' },
|
||||
master: { ... },
|
||||
'v1.0.0': { ... },
|
||||
'refs/tags/v1.0.0': { ... },
|
||||
},
|
||||
shas: {
|
||||
// all named shas referenced above
|
||||
// sha: [list, of, refs]
|
||||
'6b2501f9183a1753027a9bf89a184b7d3d4602c7': [
|
||||
'HEAD',
|
||||
'master',
|
||||
'refs/heads/master',
|
||||
],
|
||||
'1bc5fba3353f8e1b56493b266bc459276ab23139': [ 'v1.0.0', 'refs/tags/v1.0.0' ],
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
### `git.is(opts)` -> `Promise<Boolean>`
|
||||
|
||||
Resolve to `true` if the path argument refers to the root of a git
|
||||
repository.
|
||||
|
||||
It does this by looking for a file in `${path}/.git/index`, which is not an
|
||||
airtight indicator, but at least avoids being fooled by an empty directory
|
||||
or a file named `.git`.
|
||||
|
||||
### `git.find(opts)` -> `Promise<String | null>`
|
||||
|
||||
Given a path, walk up the file system tree until a git repo working
|
||||
directory is found. Since this calls `stat` a bunch of times, it's
|
||||
probably best to only call it if you're reasonably sure you're likely to be
|
||||
in a git project somewhere. Pass in `opts.root` to stop checking at that
|
||||
directory.
|
||||
|
||||
Resolves to `null` if not in a git project.
|
||||
|
||||
### `git.isClean(opts = {})` -> `Promise<Boolean>`
|
||||
|
||||
Return true if in a git dir, and that git dir is free of changes. This
|
||||
will resolve `true` if the git working dir is clean, or `false` if not, and
|
||||
reject if the path is not within a git directory or some other error
|
||||
occurs.
|
||||
|
||||
## OPTIONS
|
||||
|
||||
- `retry` An object to configure retry behavior for transient network
|
||||
errors with exponential backoff.
|
||||
- `retries`: Defaults to `opts.fetchRetries` or 2
|
||||
- `factor`: Defaults to `opts.fetchRetryFactor` or 10
|
||||
- `maxTimeout`: Defaults to `opts.fetchRetryMaxtimeout` or 60000
|
||||
- `minTimeout`: Defaults to `opts.fetchRetryMintimeout` or 1000
|
||||
- `git` Path to the `git` binary to use. Will look up the first `git` in
|
||||
the `PATH` if not specified.
|
||||
- `spec` The [`npm-package-arg`](http://npm.im/npm-package-arg) specifier
|
||||
object for the thing being fetched (if relevant).
|
||||
- `fakePlatform` set to a fake value of `process.platform` to use. (Just
|
||||
for testing `win32` behavior on Unix, and vice versa.)
|
||||
- `cwd` The current working dir for the git command. Particularly for
|
||||
`find` and `is` and `isClean`, it's good to know that this defaults to
|
||||
`process.cwd()`, as one might expect.
|
||||
- Any other options that can be passed to
|
||||
[`@npmcli/promise-spawn`](http://npm.im/@npmcli/promise-spawn), or
|
||||
`child_process.spawn()`.
|
172
node_modules/@npmcli/git/lib/clone.js
generated
vendored
Normal file
172
node_modules/@npmcli/git/lib/clone.js
generated
vendored
Normal file
|
@ -0,0 +1,172 @@
|
|||
// The goal here is to minimize both git workload and
|
||||
// the number of refs we download over the network.
|
||||
//
|
||||
// Every method ends up with the checked out working dir
|
||||
// at the specified ref, and resolves with the git sha.
|
||||
|
||||
// Only certain whitelisted hosts get shallow cloning.
|
||||
// Many hosts (including GHE) don't always support it.
|
||||
// A failed shallow fetch takes a LOT longer than a full
|
||||
// fetch in most cases, so we skip it entirely.
|
||||
// Set opts.gitShallow = true/false to force this behavior
|
||||
// one way or the other.
|
||||
const shallowHosts = new Set([
|
||||
'github.com',
|
||||
'gist.github.com',
|
||||
'gitlab.com',
|
||||
'bitbucket.com',
|
||||
'bitbucket.org',
|
||||
])
|
||||
// we have to use url.parse until we add the same shim that hosted-git-info has
|
||||
// to handle scp:// urls
|
||||
const { parse } = require('url') // eslint-disable-line node/no-deprecated-api
|
||||
const path = require('path')
|
||||
|
||||
const getRevs = require('./revs.js')
|
||||
const spawn = require('./spawn.js')
|
||||
const { isWindows } = require('./utils.js')
|
||||
|
||||
const pickManifest = require('npm-pick-manifest')
|
||||
const fs = require('fs/promises')
|
||||
|
||||
module.exports = (repo, ref = 'HEAD', target = null, opts = {}) =>
|
||||
getRevs(repo, opts).then(revs => clone(
|
||||
repo,
|
||||
revs,
|
||||
ref,
|
||||
resolveRef(revs, ref, opts),
|
||||
target || defaultTarget(repo, opts.cwd),
|
||||
opts
|
||||
))
|
||||
|
||||
const maybeShallow = (repo, opts) => {
|
||||
if (opts.gitShallow === false || opts.gitShallow) {
|
||||
return opts.gitShallow
|
||||
}
|
||||
return shallowHosts.has(parse(repo).host)
|
||||
}
|
||||
|
||||
const defaultTarget = (repo, /* istanbul ignore next */ cwd = process.cwd()) =>
|
||||
path.resolve(cwd, path.basename(repo.replace(/[/\\]?\.git$/, '')))
|
||||
|
||||
const clone = (repo, revs, ref, revDoc, target, opts) => {
|
||||
if (!revDoc) {
|
||||
return unresolved(repo, ref, target, opts)
|
||||
}
|
||||
if (revDoc.sha === revs.refs.HEAD.sha) {
|
||||
return plain(repo, revDoc, target, opts)
|
||||
}
|
||||
if (revDoc.type === 'tag' || revDoc.type === 'branch') {
|
||||
return branch(repo, revDoc, target, opts)
|
||||
}
|
||||
return other(repo, revDoc, target, opts)
|
||||
}
|
||||
|
||||
const resolveRef = (revs, ref, opts) => {
|
||||
const { spec = {} } = opts
|
||||
ref = spec.gitCommittish || ref
|
||||
/* istanbul ignore next - will fail anyway, can't pull */
|
||||
if (!revs) {
|
||||
return null
|
||||
}
|
||||
if (spec.gitRange) {
|
||||
return pickManifest(revs, spec.gitRange, opts)
|
||||
}
|
||||
if (!ref) {
|
||||
return revs.refs.HEAD
|
||||
}
|
||||
if (revs.refs[ref]) {
|
||||
return revs.refs[ref]
|
||||
}
|
||||
if (revs.shas[ref]) {
|
||||
return revs.refs[revs.shas[ref][0]]
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
// pull request or some other kind of advertised ref
|
||||
const other = (repo, revDoc, target, opts) => {
|
||||
const shallow = maybeShallow(repo, opts)
|
||||
|
||||
const fetchOrigin = ['fetch', 'origin', revDoc.rawRef]
|
||||
.concat(shallow ? ['--depth=1'] : [])
|
||||
|
||||
const git = (args) => spawn(args, { ...opts, cwd: target })
|
||||
return fs.mkdir(target, { recursive: true })
|
||||
.then(() => git(['init']))
|
||||
.then(() => isWindows(opts)
|
||||
? git(['config', '--local', '--add', 'core.longpaths', 'true'])
|
||||
: null)
|
||||
.then(() => git(['remote', 'add', 'origin', repo]))
|
||||
.then(() => git(fetchOrigin))
|
||||
.then(() => git(['checkout', revDoc.sha]))
|
||||
.then(() => updateSubmodules(target, opts))
|
||||
.then(() => revDoc.sha)
|
||||
}
|
||||
|
||||
// tag or branches. use -b
|
||||
const branch = (repo, revDoc, target, opts) => {
|
||||
const args = [
|
||||
'clone',
|
||||
'-b',
|
||||
revDoc.ref,
|
||||
repo,
|
||||
target,
|
||||
'--recurse-submodules',
|
||||
]
|
||||
if (maybeShallow(repo, opts)) {
|
||||
args.push('--depth=1')
|
||||
}
|
||||
if (isWindows(opts)) {
|
||||
args.push('--config', 'core.longpaths=true')
|
||||
}
|
||||
return spawn(args, opts).then(() => revDoc.sha)
|
||||
}
|
||||
|
||||
// just the head. clone it
|
||||
const plain = (repo, revDoc, target, opts) => {
|
||||
const args = [
|
||||
'clone',
|
||||
repo,
|
||||
target,
|
||||
'--recurse-submodules',
|
||||
]
|
||||
if (maybeShallow(repo, opts)) {
|
||||
args.push('--depth=1')
|
||||
}
|
||||
if (isWindows(opts)) {
|
||||
args.push('--config', 'core.longpaths=true')
|
||||
}
|
||||
return spawn(args, opts).then(() => revDoc.sha)
|
||||
}
|
||||
|
||||
const updateSubmodules = async (target, opts) => {
|
||||
const hasSubmodules = await fs.stat(`${target}/.gitmodules`)
|
||||
.then(() => true)
|
||||
.catch(() => false)
|
||||
if (!hasSubmodules) {
|
||||
return null
|
||||
}
|
||||
return spawn([
|
||||
'submodule',
|
||||
'update',
|
||||
'-q',
|
||||
'--init',
|
||||
'--recursive',
|
||||
], { ...opts, cwd: target })
|
||||
}
|
||||
|
||||
const unresolved = (repo, ref, target, opts) => {
|
||||
// can't do this one shallowly, because the ref isn't advertised
|
||||
// but we can avoid checking out the working dir twice, at least
|
||||
const lp = isWindows(opts) ? ['--config', 'core.longpaths=true'] : []
|
||||
const cloneArgs = ['clone', '--mirror', '-q', repo, target + '/.git']
|
||||
const git = (args) => spawn(args, { ...opts, cwd: target })
|
||||
return fs.mkdir(target, { recursive: true })
|
||||
.then(() => git(cloneArgs.concat(lp)))
|
||||
.then(() => git(['init']))
|
||||
.then(() => git(['checkout', ref]))
|
||||
.then(() => updateSubmodules(target, opts))
|
||||
.then(() => git(['rev-parse', '--revs-only', 'HEAD']))
|
||||
.then(({ stdout }) => stdout.trim())
|
||||
}
|
36
node_modules/@npmcli/git/lib/errors.js
generated
vendored
Normal file
36
node_modules/@npmcli/git/lib/errors.js
generated
vendored
Normal file
|
@ -0,0 +1,36 @@
|
|||
|
||||
const maxRetry = 3
|
||||
|
||||
class GitError extends Error {
|
||||
shouldRetry () {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
class GitConnectionError extends GitError {
|
||||
constructor () {
|
||||
super('A git connection error occurred')
|
||||
}
|
||||
|
||||
shouldRetry (number) {
|
||||
return number < maxRetry
|
||||
}
|
||||
}
|
||||
|
||||
class GitPathspecError extends GitError {
|
||||
constructor () {
|
||||
super('The git reference could not be found')
|
||||
}
|
||||
}
|
||||
|
||||
class GitUnknownError extends GitError {
|
||||
constructor () {
|
||||
super('An unknown git error occurred')
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
GitConnectionError,
|
||||
GitPathspecError,
|
||||
GitUnknownError,
|
||||
}
|
15
node_modules/@npmcli/git/lib/find.js
generated
vendored
Normal file
15
node_modules/@npmcli/git/lib/find.js
generated
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
const is = require('./is.js')
|
||||
const { dirname } = require('path')
|
||||
|
||||
module.exports = async ({ cwd = process.cwd(), root } = {}) => {
|
||||
while (true) {
|
||||
if (await is({ cwd })) {
|
||||
return cwd
|
||||
}
|
||||
const next = dirname(cwd)
|
||||
if (cwd === root || cwd === next) {
|
||||
return null
|
||||
}
|
||||
cwd = next
|
||||
}
|
||||
}
|
9
node_modules/@npmcli/git/lib/index.js
generated
vendored
Normal file
9
node_modules/@npmcli/git/lib/index.js
generated
vendored
Normal file
|
@ -0,0 +1,9 @@
|
|||
module.exports = {
|
||||
clone: require('./clone.js'),
|
||||
revs: require('./revs.js'),
|
||||
spawn: require('./spawn.js'),
|
||||
is: require('./is.js'),
|
||||
find: require('./find.js'),
|
||||
isClean: require('./is-clean.js'),
|
||||
errors: require('./errors.js'),
|
||||
}
|
6
node_modules/@npmcli/git/lib/is-clean.js
generated
vendored
Normal file
6
node_modules/@npmcli/git/lib/is-clean.js
generated
vendored
Normal file
|
@ -0,0 +1,6 @@
|
|||
const spawn = require('./spawn.js')
|
||||
|
||||
module.exports = (opts = {}) =>
|
||||
spawn(['status', '--porcelain=v1', '-uno'], opts)
|
||||
.then(res => !res.stdout.trim().split(/\r?\n+/)
|
||||
.map(l => l.trim()).filter(l => l).length)
|
4
node_modules/@npmcli/git/lib/is.js
generated
vendored
Normal file
4
node_modules/@npmcli/git/lib/is.js
generated
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
// not an airtight indicator, but a good gut-check to even bother trying
|
||||
const { stat } = require('fs/promises')
|
||||
module.exports = ({ cwd = process.cwd() } = {}) =>
|
||||
stat(cwd + '/.git').then(() => true, () => false)
|
147
node_modules/@npmcli/git/lib/lines-to-revs.js
generated
vendored
Normal file
147
node_modules/@npmcli/git/lib/lines-to-revs.js
generated
vendored
Normal file
|
@ -0,0 +1,147 @@
|
|||
// turn an array of lines from `git ls-remote` into a thing
|
||||
// vaguely resembling a packument, where docs are a resolved ref
|
||||
|
||||
const semver = require('semver')
|
||||
|
||||
module.exports = lines => finish(lines.reduce(linesToRevsReducer, {
|
||||
versions: {},
|
||||
'dist-tags': {},
|
||||
refs: {},
|
||||
shas: {},
|
||||
}))
|
||||
|
||||
const finish = revs => distTags(shaList(peelTags(revs)))
|
||||
|
||||
// We can check out shallow clones on specific SHAs if we have a ref
|
||||
const shaList = revs => {
|
||||
Object.keys(revs.refs).forEach(ref => {
|
||||
const doc = revs.refs[ref]
|
||||
if (!revs.shas[doc.sha]) {
|
||||
revs.shas[doc.sha] = [ref]
|
||||
} else {
|
||||
revs.shas[doc.sha].push(ref)
|
||||
}
|
||||
})
|
||||
return revs
|
||||
}
|
||||
|
||||
// Replace any tags with their ^{} counterparts, if those exist
|
||||
const peelTags = revs => {
|
||||
Object.keys(revs.refs).filter(ref => ref.endsWith('^{}')).forEach(ref => {
|
||||
const peeled = revs.refs[ref]
|
||||
const unpeeled = revs.refs[ref.replace(/\^\{\}$/, '')]
|
||||
if (unpeeled) {
|
||||
unpeeled.sha = peeled.sha
|
||||
delete revs.refs[ref]
|
||||
}
|
||||
})
|
||||
return revs
|
||||
}
|
||||
|
||||
const distTags = revs => {
|
||||
// not entirely sure what situations would result in an
|
||||
// ichabod repo, but best to be careful in Sleepy Hollow anyway
|
||||
const HEAD = revs.refs.HEAD || /* istanbul ignore next */ {}
|
||||
const versions = Object.keys(revs.versions)
|
||||
versions.forEach(v => {
|
||||
// simulate a dist-tags with latest pointing at the
|
||||
// 'latest' branch if one exists and is a version,
|
||||
// or HEAD if not.
|
||||
const ver = revs.versions[v]
|
||||
if (revs.refs.latest && ver.sha === revs.refs.latest.sha) {
|
||||
revs['dist-tags'].latest = v
|
||||
} else if (ver.sha === HEAD.sha) {
|
||||
revs['dist-tags'].HEAD = v
|
||||
if (!revs.refs.latest) {
|
||||
revs['dist-tags'].latest = v
|
||||
}
|
||||
}
|
||||
})
|
||||
return revs
|
||||
}
|
||||
|
||||
const refType = ref => {
|
||||
if (ref.startsWith('refs/tags/')) {
|
||||
return 'tag'
|
||||
}
|
||||
if (ref.startsWith('refs/heads/')) {
|
||||
return 'branch'
|
||||
}
|
||||
if (ref.startsWith('refs/pull/')) {
|
||||
return 'pull'
|
||||
}
|
||||
if (ref === 'HEAD') {
|
||||
return 'head'
|
||||
}
|
||||
// Could be anything, ignore for now
|
||||
/* istanbul ignore next */
|
||||
return 'other'
|
||||
}
|
||||
|
||||
// return the doc, or null if we should ignore it.
|
||||
const lineToRevDoc = line => {
|
||||
const split = line.trim().split(/\s+/, 2)
|
||||
if (split.length < 2) {
|
||||
return null
|
||||
}
|
||||
|
||||
const sha = split[0].trim()
|
||||
const rawRef = split[1].trim()
|
||||
const type = refType(rawRef)
|
||||
|
||||
if (type === 'tag') {
|
||||
// refs/tags/foo^{} is the 'peeled tag', ie the commit
|
||||
// that is tagged by refs/tags/foo they resolve to the same
|
||||
// content, just different objects in git's data structure.
|
||||
// But, we care about the thing the tag POINTS to, not the tag
|
||||
// object itself, so we only look at the peeled tag refs, and
|
||||
// ignore the pointer.
|
||||
// For now, though, we have to save both, because some tags
|
||||
// don't have peels, if they were not annotated.
|
||||
const ref = rawRef.slice('refs/tags/'.length)
|
||||
return { sha, ref, rawRef, type }
|
||||
}
|
||||
|
||||
if (type === 'branch') {
|
||||
const ref = rawRef.slice('refs/heads/'.length)
|
||||
return { sha, ref, rawRef, type }
|
||||
}
|
||||
|
||||
if (type === 'pull') {
|
||||
// NB: merged pull requests installable with #pull/123/merge
|
||||
// for the merged pr, or #pull/123 for the PR head
|
||||
const ref = rawRef.slice('refs/'.length).replace(/\/head$/, '')
|
||||
return { sha, ref, rawRef, type }
|
||||
}
|
||||
|
||||
if (type === 'head') {
|
||||
const ref = 'HEAD'
|
||||
return { sha, ref, rawRef, type }
|
||||
}
|
||||
|
||||
// at this point, all we can do is leave the ref un-munged
|
||||
return { sha, ref: rawRef, rawRef, type }
|
||||
}
|
||||
|
||||
const linesToRevsReducer = (revs, line) => {
|
||||
const doc = lineToRevDoc(line)
|
||||
|
||||
if (!doc) {
|
||||
return revs
|
||||
}
|
||||
|
||||
revs.refs[doc.ref] = doc
|
||||
revs.refs[doc.rawRef] = doc
|
||||
|
||||
if (doc.type === 'tag') {
|
||||
// try to pull a semver value out of tags like `release-v1.2.3`
|
||||
// which is a pretty common pattern.
|
||||
const match = !doc.ref.endsWith('^{}') &&
|
||||
doc.ref.match(/v?(\d+\.\d+\.\d+(?:[-+].+)?)$/)
|
||||
if (match && semver.valid(match[1], true)) {
|
||||
revs.versions[semver.clean(match[1], true)] = doc
|
||||
}
|
||||
}
|
||||
|
||||
return revs
|
||||
}
|
33
node_modules/@npmcli/git/lib/make-error.js
generated
vendored
Normal file
33
node_modules/@npmcli/git/lib/make-error.js
generated
vendored
Normal file
|
@ -0,0 +1,33 @@
|
|||
const {
|
||||
GitConnectionError,
|
||||
GitPathspecError,
|
||||
GitUnknownError,
|
||||
} = require('./errors.js')
|
||||
|
||||
const connectionErrorRe = new RegExp([
|
||||
'remote error: Internal Server Error',
|
||||
'The remote end hung up unexpectedly',
|
||||
'Connection timed out',
|
||||
'Operation timed out',
|
||||
'Failed to connect to .* Timed out',
|
||||
'Connection reset by peer',
|
||||
'SSL_ERROR_SYSCALL',
|
||||
'The requested URL returned error: 503',
|
||||
].join('|'))
|
||||
|
||||
const missingPathspecRe = /pathspec .* did not match any file\(s\) known to git/
|
||||
|
||||
function makeError (er) {
|
||||
const message = er.stderr
|
||||
let gitEr
|
||||
if (connectionErrorRe.test(message)) {
|
||||
gitEr = new GitConnectionError(message)
|
||||
} else if (missingPathspecRe.test(message)) {
|
||||
gitEr = new GitPathspecError(message)
|
||||
} else {
|
||||
gitEr = new GitUnknownError(message)
|
||||
}
|
||||
return Object.assign(gitEr, er)
|
||||
}
|
||||
|
||||
module.exports = makeError
|
57
node_modules/@npmcli/git/lib/opts.js
generated
vendored
Normal file
57
node_modules/@npmcli/git/lib/opts.js
generated
vendored
Normal file
|
@ -0,0 +1,57 @@
|
|||
const fs = require('node:fs')
|
||||
const os = require('node:os')
|
||||
const path = require('node:path')
|
||||
const ini = require('ini')
|
||||
|
||||
const gitConfigPath = path.join(os.homedir(), '.gitconfig')
|
||||
|
||||
let cachedConfig = null
|
||||
|
||||
// Function to load and cache the git config
|
||||
const loadGitConfig = () => {
|
||||
if (cachedConfig === null) {
|
||||
try {
|
||||
cachedConfig = {}
|
||||
if (fs.existsSync(gitConfigPath)) {
|
||||
const configContent = fs.readFileSync(gitConfigPath, 'utf-8')
|
||||
cachedConfig = ini.parse(configContent)
|
||||
}
|
||||
} catch (error) {
|
||||
cachedConfig = {}
|
||||
}
|
||||
}
|
||||
return cachedConfig
|
||||
}
|
||||
|
||||
const checkGitConfigs = () => {
|
||||
const config = loadGitConfig()
|
||||
return {
|
||||
sshCommandSetInConfig: config?.core?.sshCommand !== undefined,
|
||||
askPassSetInConfig: config?.core?.askpass !== undefined,
|
||||
}
|
||||
}
|
||||
|
||||
const sshCommandSetInEnv = process.env.GIT_SSH_COMMAND !== undefined
|
||||
const askPassSetInEnv = process.env.GIT_ASKPASS !== undefined
|
||||
const { sshCommandSetInConfig, askPassSetInConfig } = checkGitConfigs()
|
||||
|
||||
// Values we want to set if they're not already defined by the end user
|
||||
// This defaults to accepting new ssh host key fingerprints
|
||||
const finalGitEnv = {
|
||||
...(askPassSetInEnv || askPassSetInConfig ? {} : {
|
||||
GIT_ASKPASS: 'echo',
|
||||
}),
|
||||
...(sshCommandSetInEnv || sshCommandSetInConfig ? {} : {
|
||||
GIT_SSH_COMMAND: 'ssh -oStrictHostKeyChecking=accept-new',
|
||||
}),
|
||||
}
|
||||
|
||||
module.exports = (opts = {}) => ({
|
||||
stdioString: true,
|
||||
...opts,
|
||||
shell: false,
|
||||
env: opts.env || { ...finalGitEnv, ...process.env },
|
||||
})
|
||||
|
||||
// Export the loadGitConfig function for testing
|
||||
module.exports.loadGitConfig = loadGitConfig
|
28
node_modules/@npmcli/git/lib/revs.js
generated
vendored
Normal file
28
node_modules/@npmcli/git/lib/revs.js
generated
vendored
Normal file
|
@ -0,0 +1,28 @@
|
|||
const pinflight = require('promise-inflight')
|
||||
const spawn = require('./spawn.js')
|
||||
const { LRUCache } = require('lru-cache')
|
||||
|
||||
const revsCache = new LRUCache({
|
||||
max: 100,
|
||||
ttl: 5 * 60 * 1000,
|
||||
})
|
||||
|
||||
const linesToRevs = require('./lines-to-revs.js')
|
||||
|
||||
module.exports = async (repo, opts = {}) => {
|
||||
if (!opts.noGitRevCache) {
|
||||
const cached = revsCache.get(repo)
|
||||
if (cached) {
|
||||
return cached
|
||||
}
|
||||
}
|
||||
|
||||
return pinflight(`ls-remote:${repo}`, () =>
|
||||
spawn(['ls-remote', repo], opts)
|
||||
.then(({ stdout }) => linesToRevs(stdout.trim().split('\n')))
|
||||
.then(revs => {
|
||||
revsCache.set(repo, revs)
|
||||
return revs
|
||||
})
|
||||
)
|
||||
}
|
44
node_modules/@npmcli/git/lib/spawn.js
generated
vendored
Normal file
44
node_modules/@npmcli/git/lib/spawn.js
generated
vendored
Normal file
|
@ -0,0 +1,44 @@
|
|||
const spawn = require('@npmcli/promise-spawn')
|
||||
const promiseRetry = require('promise-retry')
|
||||
const { log } = require('proc-log')
|
||||
const makeError = require('./make-error.js')
|
||||
const makeOpts = require('./opts.js')
|
||||
|
||||
module.exports = (gitArgs, opts = {}) => {
|
||||
const whichGit = require('./which.js')
|
||||
const gitPath = whichGit(opts)
|
||||
|
||||
if (gitPath instanceof Error) {
|
||||
return Promise.reject(gitPath)
|
||||
}
|
||||
|
||||
// undocumented option, mostly only here for tests
|
||||
const args = opts.allowReplace || gitArgs[0] === '--no-replace-objects'
|
||||
? gitArgs
|
||||
: ['--no-replace-objects', ...gitArgs]
|
||||
|
||||
let retryOpts = opts.retry
|
||||
if (retryOpts === null || retryOpts === undefined) {
|
||||
retryOpts = {
|
||||
retries: opts.fetchRetries || 2,
|
||||
factor: opts.fetchRetryFactor || 10,
|
||||
maxTimeout: opts.fetchRetryMaxtimeout || 60000,
|
||||
minTimeout: opts.fetchRetryMintimeout || 1000,
|
||||
}
|
||||
}
|
||||
return promiseRetry((retryFn, number) => {
|
||||
if (number !== 1) {
|
||||
log.silly('git', `Retrying git command: ${
|
||||
args.join(' ')} attempt # ${number}`)
|
||||
}
|
||||
|
||||
return spawn(gitPath, args, makeOpts(opts))
|
||||
.catch(er => {
|
||||
const gitError = makeError(er)
|
||||
if (!gitError.shouldRetry(number)) {
|
||||
throw gitError
|
||||
}
|
||||
retryFn(gitError)
|
||||
})
|
||||
}, retryOpts)
|
||||
}
|
3
node_modules/@npmcli/git/lib/utils.js
generated
vendored
Normal file
3
node_modules/@npmcli/git/lib/utils.js
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
const isWindows = opts => (opts.fakePlatform || process.platform) === 'win32'
|
||||
|
||||
exports.isWindows = isWindows
|
18
node_modules/@npmcli/git/lib/which.js
generated
vendored
Normal file
18
node_modules/@npmcli/git/lib/which.js
generated
vendored
Normal file
|
@ -0,0 +1,18 @@
|
|||
const which = require('which')
|
||||
|
||||
let gitPath
|
||||
try {
|
||||
gitPath = which.sync('git')
|
||||
} catch {
|
||||
// ignore errors
|
||||
}
|
||||
|
||||
module.exports = (opts = {}) => {
|
||||
if (opts.git) {
|
||||
return opts.git
|
||||
}
|
||||
if (!gitPath || opts.git === false) {
|
||||
return Object.assign(new Error('No git binary found in $PATH'), { code: 'ENOGIT' })
|
||||
}
|
||||
return gitPath
|
||||
}
|
1
node_modules/@npmcli/git/node_modules/.bin/node-which
generated
vendored
Symbolic link
1
node_modules/@npmcli/git/node_modules/.bin/node-which
generated
vendored
Symbolic link
|
@ -0,0 +1 @@
|
|||
../which/bin/which.js
|
15
node_modules/@npmcli/git/node_modules/isexe/LICENSE
generated
vendored
Normal file
15
node_modules/@npmcli/git/node_modules/isexe/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
The ISC License
|
||||
|
||||
Copyright (c) 2016-2022 Isaac Z. Schlueter and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
74
node_modules/@npmcli/git/node_modules/isexe/README.md
generated
vendored
Normal file
74
node_modules/@npmcli/git/node_modules/isexe/README.md
generated
vendored
Normal file
|
@ -0,0 +1,74 @@
|
|||
# isexe
|
||||
|
||||
Minimal module to check if a file is executable, and a normal file.
|
||||
|
||||
Uses `fs.stat` and tests against the `PATHEXT` environment variable on
|
||||
Windows.
|
||||
|
||||
## USAGE
|
||||
|
||||
```js
|
||||
import { isexe, sync } from 'isexe'
|
||||
// or require() works too
|
||||
// const { isexe } = require('isexe')
|
||||
isexe('some-file-name').then(isExe => {
|
||||
if (isExe) {
|
||||
console.error('this thing can be run')
|
||||
} else {
|
||||
console.error('cannot be run')
|
||||
}
|
||||
}, (err) => {
|
||||
console.error('probably file doesnt exist or something')
|
||||
})
|
||||
|
||||
// same thing but synchronous, throws errors
|
||||
isExe = sync('some-file-name')
|
||||
|
||||
// treat errors as just "not executable"
|
||||
const isExe = await isexe('maybe-missing-file', { ignoreErrors: true })
|
||||
const isExe = sync('maybe-missing-file', { ignoreErrors: true })
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
### `isexe(path, [options]) => Promise<boolean>`
|
||||
|
||||
Check if the path is executable.
|
||||
|
||||
Will raise whatever errors may be raised by `fs.stat`, unless
|
||||
`options.ignoreErrors` is set to true.
|
||||
|
||||
### `sync(path, [options]) => boolean`
|
||||
|
||||
Same as `isexe` but returns the value and throws any errors raised.
|
||||
|
||||
## Platform Specific Implementations
|
||||
|
||||
If for some reason you want to use the implementation for a
|
||||
specific platform, you can do that.
|
||||
|
||||
```js
|
||||
import { win32, posix } from 'isexe'
|
||||
win32.isexe(...)
|
||||
win32.sync(...)
|
||||
// etc
|
||||
|
||||
// or:
|
||||
import { isexe, sync } from 'isexe/posix'
|
||||
```
|
||||
|
||||
The default exported implementation will be chosen based on
|
||||
`process.platform`.
|
||||
|
||||
### Options
|
||||
|
||||
```ts
|
||||
import type IsexeOptions from 'isexe'
|
||||
```
|
||||
|
||||
* `ignoreErrors` Treat all errors as "no, this is not
|
||||
executable", but don't raise them.
|
||||
* `uid` Number to use as the user id on posix
|
||||
* `gid` Number to use as the group id on posix
|
||||
* `pathExt` List of path extensions to use instead of `PATHEXT`
|
||||
environment variable on Windows.
|
14
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/index.d.ts
generated
vendored
Normal file
14
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/index.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
import * as posix from './posix.js';
|
||||
import * as win32 from './win32.js';
|
||||
export * from './options.js';
|
||||
export { win32, posix };
|
||||
/**
|
||||
* Determine whether a path is executable on the current platform.
|
||||
*/
|
||||
export declare const isexe: (path: string, options?: import("./options.js").IsexeOptions) => Promise<boolean>;
|
||||
/**
|
||||
* Synchronously determine whether a path is executable on the
|
||||
* current platform.
|
||||
*/
|
||||
export declare const sync: (path: string, options?: import("./options.js").IsexeOptions) => boolean;
|
||||
//# sourceMappingURL=index.d.ts.map
|
1
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/index.d.ts.map
generated
vendored
Normal file
1
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/index.d.ts.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,KAAK,MAAM,YAAY,CAAA;AACnC,OAAO,KAAK,KAAK,MAAM,YAAY,CAAA;AACnC,cAAc,cAAc,CAAA;AAC5B,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,CAAA;AAKvB;;GAEG;AACH,eAAO,MAAM,KAAK,mFAAa,CAAA;AAC/B;;;GAGG;AACH,eAAO,MAAM,IAAI,0EAAY,CAAA"}
|
46
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/index.js
generated
vendored
Normal file
46
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/index.js
generated
vendored
Normal file
|
@ -0,0 +1,46 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
||||
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.sync = exports.isexe = exports.posix = exports.win32 = void 0;
|
||||
const posix = __importStar(require("./posix.js"));
|
||||
exports.posix = posix;
|
||||
const win32 = __importStar(require("./win32.js"));
|
||||
exports.win32 = win32;
|
||||
__exportStar(require("./options.js"), exports);
|
||||
const platform = process.env._ISEXE_TEST_PLATFORM_ || process.platform;
|
||||
const impl = platform === 'win32' ? win32 : posix;
|
||||
/**
|
||||
* Determine whether a path is executable on the current platform.
|
||||
*/
|
||||
exports.isexe = impl.isexe;
|
||||
/**
|
||||
* Synchronously determine whether a path is executable on the
|
||||
* current platform.
|
||||
*/
|
||||
exports.sync = impl.sync;
|
||||
//# sourceMappingURL=index.js.map
|
1
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/index.js.map
generated
vendored
Normal file
1
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/index.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,kDAAmC;AAGnB,sBAAK;AAFrB,kDAAmC;AAE1B,sBAAK;AADd,+CAA4B;AAG5B,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,qBAAqB,IAAI,OAAO,CAAC,QAAQ,CAAA;AACtE,MAAM,IAAI,GAAG,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAA;AAEjD;;GAEG;AACU,QAAA,KAAK,GAAG,IAAI,CAAC,KAAK,CAAA;AAC/B;;;GAGG;AACU,QAAA,IAAI,GAAG,IAAI,CAAC,IAAI,CAAA","sourcesContent":["import * as posix from './posix.js'\nimport * as win32 from './win32.js'\nexport * from './options.js'\nexport { win32, posix }\n\nconst platform = process.env._ISEXE_TEST_PLATFORM_ || process.platform\nconst impl = platform === 'win32' ? win32 : posix\n\n/**\n * Determine whether a path is executable on the current platform.\n */\nexport const isexe = impl.isexe\n/**\n * Synchronously determine whether a path is executable on the\n * current platform.\n */\nexport const sync = impl.sync\n"]}
|
32
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/options.d.ts
generated
vendored
Normal file
32
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/options.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,32 @@
|
|||
export interface IsexeOptions {
|
||||
/**
|
||||
* Ignore errors arising from attempting to get file access status
|
||||
* Note that EACCES is always ignored, because that just means
|
||||
* it's not executable. If this is not set, then attempting to check
|
||||
* the executable-ness of a nonexistent file will raise ENOENT, for
|
||||
* example.
|
||||
*/
|
||||
ignoreErrors?: boolean;
|
||||
/**
|
||||
* effective uid when checking executable mode flags on posix
|
||||
* Defaults to process.getuid()
|
||||
*/
|
||||
uid?: number;
|
||||
/**
|
||||
* effective gid when checking executable mode flags on posix
|
||||
* Defaults to process.getgid()
|
||||
*/
|
||||
gid?: number;
|
||||
/**
|
||||
* effective group ID list to use when checking executable mode flags
|
||||
* on posix
|
||||
* Defaults to process.getgroups()
|
||||
*/
|
||||
groups?: number[];
|
||||
/**
|
||||
* The ;-delimited path extension list for win32 implementation.
|
||||
* Defaults to process.env.PATHEXT
|
||||
*/
|
||||
pathExt?: string;
|
||||
}
|
||||
//# sourceMappingURL=options.d.ts.map
|
1
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/options.d.ts.map
generated
vendored
Normal file
1
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/options.d.ts.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"options.d.ts","sourceRoot":"","sources":["../../src/options.ts"],"names":[],"mappings":"AAAA,MAAM,WAAW,YAAY;IAC3B;;;;;;OAMG;IACH,YAAY,CAAC,EAAE,OAAO,CAAA;IAEtB;;;OAGG;IACH,GAAG,CAAC,EAAE,MAAM,CAAA;IAEZ;;;OAGG;IACH,GAAG,CAAC,EAAE,MAAM,CAAA;IAEZ;;;;OAIG;IACH,MAAM,CAAC,EAAE,MAAM,EAAE,CAAA;IAEjB;;;OAGG;IACH,OAAO,CAAC,EAAE,MAAM,CAAA;CACjB"}
|
3
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/options.js
generated
vendored
Normal file
3
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/options.js
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
//# sourceMappingURL=options.js.map
|
1
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/options.js.map
generated
vendored
Normal file
1
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/options.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"options.js","sourceRoot":"","sources":["../../src/options.ts"],"names":[],"mappings":"","sourcesContent":["export interface IsexeOptions {\n /**\n * Ignore errors arising from attempting to get file access status\n * Note that EACCES is always ignored, because that just means\n * it's not executable. If this is not set, then attempting to check\n * the executable-ness of a nonexistent file will raise ENOENT, for\n * example.\n */\n ignoreErrors?: boolean\n\n /**\n * effective uid when checking executable mode flags on posix\n * Defaults to process.getuid()\n */\n uid?: number\n\n /**\n * effective gid when checking executable mode flags on posix\n * Defaults to process.getgid()\n */\n gid?: number\n\n /**\n * effective group ID list to use when checking executable mode flags\n * on posix\n * Defaults to process.getgroups()\n */\n groups?: number[]\n\n /**\n * The ;-delimited path extension list for win32 implementation.\n * Defaults to process.env.PATHEXT\n */\n pathExt?: string\n}\n"]}
|
3
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/package.json
generated
vendored
Normal file
3
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/package.json
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"type": "commonjs"
|
||||
}
|
18
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/posix.d.ts
generated
vendored
Normal file
18
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/posix.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,18 @@
|
|||
/**
|
||||
* This is the Posix implementation of isexe, which uses the file
|
||||
* mode and uid/gid values.
|
||||
*
|
||||
* @module
|
||||
*/
|
||||
import { IsexeOptions } from './options';
|
||||
/**
|
||||
* Determine whether a path is executable according to the mode and
|
||||
* current (or specified) user and group IDs.
|
||||
*/
|
||||
export declare const isexe: (path: string, options?: IsexeOptions) => Promise<boolean>;
|
||||
/**
|
||||
* Synchronously determine whether a path is executable according to
|
||||
* the mode and current (or specified) user and group IDs.
|
||||
*/
|
||||
export declare const sync: (path: string, options?: IsexeOptions) => boolean;
|
||||
//# sourceMappingURL=posix.d.ts.map
|
1
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/posix.d.ts.map
generated
vendored
Normal file
1
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/posix.d.ts.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"posix.d.ts","sourceRoot":"","sources":["../../src/posix.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAIH,OAAO,EAAE,YAAY,EAAE,MAAM,WAAW,CAAA;AAExC;;;GAGG;AACH,eAAO,MAAM,KAAK,SACV,MAAM,YACH,YAAY,KACpB,QAAQ,OAAO,CASjB,CAAA;AAED;;;GAGG;AACH,eAAO,MAAM,IAAI,SACT,MAAM,YACH,YAAY,KACpB,OASF,CAAA"}
|
67
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/posix.js
generated
vendored
Normal file
67
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/posix.js
generated
vendored
Normal file
|
@ -0,0 +1,67 @@
|
|||
"use strict";
|
||||
/**
|
||||
* This is the Posix implementation of isexe, which uses the file
|
||||
* mode and uid/gid values.
|
||||
*
|
||||
* @module
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.sync = exports.isexe = void 0;
|
||||
const fs_1 = require("fs");
|
||||
const promises_1 = require("fs/promises");
|
||||
/**
|
||||
* Determine whether a path is executable according to the mode and
|
||||
* current (or specified) user and group IDs.
|
||||
*/
|
||||
const isexe = async (path, options = {}) => {
|
||||
const { ignoreErrors = false } = options;
|
||||
try {
|
||||
return checkStat(await (0, promises_1.stat)(path), options);
|
||||
}
|
||||
catch (e) {
|
||||
const er = e;
|
||||
if (ignoreErrors || er.code === 'EACCES')
|
||||
return false;
|
||||
throw er;
|
||||
}
|
||||
};
|
||||
exports.isexe = isexe;
|
||||
/**
|
||||
* Synchronously determine whether a path is executable according to
|
||||
* the mode and current (or specified) user and group IDs.
|
||||
*/
|
||||
const sync = (path, options = {}) => {
|
||||
const { ignoreErrors = false } = options;
|
||||
try {
|
||||
return checkStat((0, fs_1.statSync)(path), options);
|
||||
}
|
||||
catch (e) {
|
||||
const er = e;
|
||||
if (ignoreErrors || er.code === 'EACCES')
|
||||
return false;
|
||||
throw er;
|
||||
}
|
||||
};
|
||||
exports.sync = sync;
|
||||
const checkStat = (stat, options) => stat.isFile() && checkMode(stat, options);
|
||||
const checkMode = (stat, options) => {
|
||||
const myUid = options.uid ?? process.getuid?.();
|
||||
const myGroups = options.groups ?? process.getgroups?.() ?? [];
|
||||
const myGid = options.gid ?? process.getgid?.() ?? myGroups[0];
|
||||
if (myUid === undefined || myGid === undefined) {
|
||||
throw new Error('cannot get uid or gid');
|
||||
}
|
||||
const groups = new Set([myGid, ...myGroups]);
|
||||
const mod = stat.mode;
|
||||
const uid = stat.uid;
|
||||
const gid = stat.gid;
|
||||
const u = parseInt('100', 8);
|
||||
const g = parseInt('010', 8);
|
||||
const o = parseInt('001', 8);
|
||||
const ug = u | g;
|
||||
return !!(mod & o ||
|
||||
(mod & g && groups.has(gid)) ||
|
||||
(mod & u && uid === myUid) ||
|
||||
(mod & ug && myUid === 0));
|
||||
};
|
||||
//# sourceMappingURL=posix.js.map
|
1
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/posix.js.map
generated
vendored
Normal file
1
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/posix.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"posix.js","sourceRoot":"","sources":["../../src/posix.ts"],"names":[],"mappings":";AAAA;;;;;GAKG;;;AAEH,2BAAoC;AACpC,0CAAkC;AAGlC;;;GAGG;AACI,MAAM,KAAK,GAAG,KAAK,EACxB,IAAY,EACZ,UAAwB,EAAE,EACR,EAAE;IACpB,MAAM,EAAE,YAAY,GAAG,KAAK,EAAE,GAAG,OAAO,CAAA;IACxC,IAAI;QACF,OAAO,SAAS,CAAC,MAAM,IAAA,eAAI,EAAC,IAAI,CAAC,EAAE,OAAO,CAAC,CAAA;KAC5C;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,EAAE,GAAG,CAA0B,CAAA;QACrC,IAAI,YAAY,IAAI,EAAE,CAAC,IAAI,KAAK,QAAQ;YAAE,OAAO,KAAK,CAAA;QACtD,MAAM,EAAE,CAAA;KACT;AACH,CAAC,CAAA;AAZY,QAAA,KAAK,SAYjB;AAED;;;GAGG;AACI,MAAM,IAAI,GAAG,CAClB,IAAY,EACZ,UAAwB,EAAE,EACjB,EAAE;IACX,MAAM,EAAE,YAAY,GAAG,KAAK,EAAE,GAAG,OAAO,CAAA;IACxC,IAAI;QACF,OAAO,SAAS,CAAC,IAAA,aAAQ,EAAC,IAAI,CAAC,EAAE,OAAO,CAAC,CAAA;KAC1C;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,EAAE,GAAG,CAA0B,CAAA;QACrC,IAAI,YAAY,IAAI,EAAE,CAAC,IAAI,KAAK,QAAQ;YAAE,OAAO,KAAK,CAAA;QACtD,MAAM,EAAE,CAAA;KACT;AACH,CAAC,CAAA;AAZY,QAAA,IAAI,QAYhB;AAED,MAAM,SAAS,GAAG,CAAC,IAAW,EAAE,OAAqB,EAAE,EAAE,CACvD,IAAI,CAAC,MAAM,EAAE,IAAI,SAAS,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;AAE3C,MAAM,SAAS,GAAG,CAAC,IAAW,EAAE,OAAqB,EAAE,EAAE;IACvD,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,MAAM,EAAE,EAAE,CAAA;IAC/C,MAAM,QAAQ,GAAG,OAAO,CAAC,MAAM,IAAI,OAAO,CAAC,SAAS,EAAE,EAAE,IAAI,EAAE,CAAA;IAC9D,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,MAAM,EAAE,EAAE,IAAI,QAAQ,CAAC,CAAC,CAAC,CAAA;IAC9D,IAAI,KAAK,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS,EAAE;QAC9C,MAAM,IAAI,KAAK,CAAC,uBAAuB,CAAC,CAAA;KACzC;IAED,MAAM,MAAM,GAAG,IAAI,GAAG,CAAC,CAAC,KAAK,EAAE,GAAG,QAAQ,CAAC,CAAC,CAAA;IAE5C,MAAM,GAAG,GAAG,IAAI,CAAC,IAAI,CAAA;IACrB,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,CAAA;IACpB,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,CAAA;IAEpB,MAAM,CAAC,GAAG,QAAQ,CAAC,KAAK,EAAE,CAAC,CAAC,CAAA;IAC5B,MAAM,CAAC,GAAG,QAAQ,CAAC,KAAK,EAAE,CAAC,CAAC,CAAA;IAC5B,MAAM,CAAC,GAAG,QAAQ,CAAC,KAAK,EAAE,CAAC,CAAC,CAAA;IAC5B,MAAM,EAAE,GAAG,CAAC,GAAG,CAAC,CAAA;IAEhB,OAAO,CAAC,CAAC,CACP,GAAG,GAAG,CAAC;QACP,CAAC,GAAG,GAAG,CAAC,IAAI,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;QAC5B,CAAC,GAAG,GAAG,CAAC,IAAI,GAAG,KAAK,KAAK,CAAC;QAC1B,CAAC,GAAG,GAAG,EAAE,IAAI,KAAK,KAAK,CAAC,CAAC,CAC1B,CAAA;AACH,CAAC,CAAA","sourcesContent":["/**\n * This is the Posix implementation of isexe, which uses the file\n * mode and uid/gid values.\n *\n * @module\n */\n\nimport { Stats, statSync } from 'fs'\nimport { stat } from 'fs/promises'\nimport { IsexeOptions } from './options'\n\n/**\n * Determine whether a path is executable according to the mode and\n * current (or specified) user and group IDs.\n */\nexport const isexe = async (\n path: string,\n options: IsexeOptions = {}\n): Promise<boolean> => {\n const { ignoreErrors = false } = options\n try {\n return checkStat(await stat(path), options)\n } catch (e) {\n const er = e as NodeJS.ErrnoException\n if (ignoreErrors || er.code === 'EACCES') return false\n throw er\n }\n}\n\n/**\n * Synchronously determine whether a path is executable according to\n * the mode and current (or specified) user and group IDs.\n */\nexport const sync = (\n path: string,\n options: IsexeOptions = {}\n): boolean => {\n const { ignoreErrors = false } = options\n try {\n return checkStat(statSync(path), options)\n } catch (e) {\n const er = e as NodeJS.ErrnoException\n if (ignoreErrors || er.code === 'EACCES') return false\n throw er\n }\n}\n\nconst checkStat = (stat: Stats, options: IsexeOptions) =>\n stat.isFile() && checkMode(stat, options)\n\nconst checkMode = (stat: Stats, options: IsexeOptions) => {\n const myUid = options.uid ?? process.getuid?.()\n const myGroups = options.groups ?? process.getgroups?.() ?? []\n const myGid = options.gid ?? process.getgid?.() ?? myGroups[0]\n if (myUid === undefined || myGid === undefined) {\n throw new Error('cannot get uid or gid')\n }\n\n const groups = new Set([myGid, ...myGroups])\n\n const mod = stat.mode\n const uid = stat.uid\n const gid = stat.gid\n\n const u = parseInt('100', 8)\n const g = parseInt('010', 8)\n const o = parseInt('001', 8)\n const ug = u | g\n\n return !!(\n mod & o ||\n (mod & g && groups.has(gid)) ||\n (mod & u && uid === myUid) ||\n (mod & ug && myUid === 0)\n )\n}\n"]}
|
18
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/win32.d.ts
generated
vendored
Normal file
18
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/win32.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,18 @@
|
|||
/**
|
||||
* This is the Windows implementation of isexe, which uses the file
|
||||
* extension and PATHEXT setting.
|
||||
*
|
||||
* @module
|
||||
*/
|
||||
import { IsexeOptions } from './options';
|
||||
/**
|
||||
* Determine whether a path is executable based on the file extension
|
||||
* and PATHEXT environment variable (or specified pathExt option)
|
||||
*/
|
||||
export declare const isexe: (path: string, options?: IsexeOptions) => Promise<boolean>;
|
||||
/**
|
||||
* Synchronously determine whether a path is executable based on the file
|
||||
* extension and PATHEXT environment variable (or specified pathExt option)
|
||||
*/
|
||||
export declare const sync: (path: string, options?: IsexeOptions) => boolean;
|
||||
//# sourceMappingURL=win32.d.ts.map
|
1
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/win32.d.ts.map
generated
vendored
Normal file
1
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/win32.d.ts.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"win32.d.ts","sourceRoot":"","sources":["../../src/win32.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAIH,OAAO,EAAE,YAAY,EAAE,MAAM,WAAW,CAAA;AAExC;;;GAGG;AACH,eAAO,MAAM,KAAK,SACV,MAAM,YACH,YAAY,KACpB,QAAQ,OAAO,CASjB,CAAA;AAED;;;GAGG;AACH,eAAO,MAAM,IAAI,SACT,MAAM,YACH,YAAY,KACpB,OASF,CAAA"}
|
62
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/win32.js
generated
vendored
Normal file
62
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/win32.js
generated
vendored
Normal file
|
@ -0,0 +1,62 @@
|
|||
"use strict";
|
||||
/**
|
||||
* This is the Windows implementation of isexe, which uses the file
|
||||
* extension and PATHEXT setting.
|
||||
*
|
||||
* @module
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.sync = exports.isexe = void 0;
|
||||
const fs_1 = require("fs");
|
||||
const promises_1 = require("fs/promises");
|
||||
/**
|
||||
* Determine whether a path is executable based on the file extension
|
||||
* and PATHEXT environment variable (or specified pathExt option)
|
||||
*/
|
||||
const isexe = async (path, options = {}) => {
|
||||
const { ignoreErrors = false } = options;
|
||||
try {
|
||||
return checkStat(await (0, promises_1.stat)(path), path, options);
|
||||
}
|
||||
catch (e) {
|
||||
const er = e;
|
||||
if (ignoreErrors || er.code === 'EACCES')
|
||||
return false;
|
||||
throw er;
|
||||
}
|
||||
};
|
||||
exports.isexe = isexe;
|
||||
/**
|
||||
* Synchronously determine whether a path is executable based on the file
|
||||
* extension and PATHEXT environment variable (or specified pathExt option)
|
||||
*/
|
||||
const sync = (path, options = {}) => {
|
||||
const { ignoreErrors = false } = options;
|
||||
try {
|
||||
return checkStat((0, fs_1.statSync)(path), path, options);
|
||||
}
|
||||
catch (e) {
|
||||
const er = e;
|
||||
if (ignoreErrors || er.code === 'EACCES')
|
||||
return false;
|
||||
throw er;
|
||||
}
|
||||
};
|
||||
exports.sync = sync;
|
||||
const checkPathExt = (path, options) => {
|
||||
const { pathExt = process.env.PATHEXT || '' } = options;
|
||||
const peSplit = pathExt.split(';');
|
||||
if (peSplit.indexOf('') !== -1) {
|
||||
return true;
|
||||
}
|
||||
for (let i = 0; i < peSplit.length; i++) {
|
||||
const p = peSplit[i].toLowerCase();
|
||||
const ext = path.substring(path.length - p.length).toLowerCase();
|
||||
if (p && ext === p) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
const checkStat = (stat, path, options) => stat.isFile() && checkPathExt(path, options);
|
||||
//# sourceMappingURL=win32.js.map
|
1
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/win32.js.map
generated
vendored
Normal file
1
node_modules/@npmcli/git/node_modules/isexe/dist/cjs/win32.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"win32.js","sourceRoot":"","sources":["../../src/win32.ts"],"names":[],"mappings":";AAAA;;;;;GAKG;;;AAEH,2BAAoC;AACpC,0CAAkC;AAGlC;;;GAGG;AACI,MAAM,KAAK,GAAG,KAAK,EACxB,IAAY,EACZ,UAAwB,EAAE,EACR,EAAE;IACpB,MAAM,EAAE,YAAY,GAAG,KAAK,EAAE,GAAG,OAAO,CAAA;IACxC,IAAI;QACF,OAAO,SAAS,CAAC,MAAM,IAAA,eAAI,EAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,CAAC,CAAA;KAClD;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,EAAE,GAAG,CAA0B,CAAA;QACrC,IAAI,YAAY,IAAI,EAAE,CAAC,IAAI,KAAK,QAAQ;YAAE,OAAO,KAAK,CAAA;QACtD,MAAM,EAAE,CAAA;KACT;AACH,CAAC,CAAA;AAZY,QAAA,KAAK,SAYjB;AAED;;;GAGG;AACI,MAAM,IAAI,GAAG,CAClB,IAAY,EACZ,UAAwB,EAAE,EACjB,EAAE;IACX,MAAM,EAAE,YAAY,GAAG,KAAK,EAAE,GAAG,OAAO,CAAA;IACxC,IAAI;QACF,OAAO,SAAS,CAAC,IAAA,aAAQ,EAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,CAAC,CAAA;KAChD;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,EAAE,GAAG,CAA0B,CAAA;QACrC,IAAI,YAAY,IAAI,EAAE,CAAC,IAAI,KAAK,QAAQ;YAAE,OAAO,KAAK,CAAA;QACtD,MAAM,EAAE,CAAA;KACT;AACH,CAAC,CAAA;AAZY,QAAA,IAAI,QAYhB;AAED,MAAM,YAAY,GAAG,CAAC,IAAY,EAAE,OAAqB,EAAE,EAAE;IAC3D,MAAM,EAAE,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,OAAO,IAAI,EAAE,EAAE,GAAG,OAAO,CAAA;IACvD,MAAM,OAAO,GAAG,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;IAClC,IAAI,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE;QAC9B,OAAO,IAAI,CAAA;KACZ;IAED,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACvC,MAAM,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE,CAAA;QAClC,MAAM,GAAG,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,WAAW,EAAE,CAAA;QAEhE,IAAI,CAAC,IAAI,GAAG,KAAK,CAAC,EAAE;YAClB,OAAO,IAAI,CAAA;SACZ;KACF;IACD,OAAO,KAAK,CAAA;AACd,CAAC,CAAA;AAED,MAAM,SAAS,GAAG,CAAC,IAAW,EAAE,IAAY,EAAE,OAAqB,EAAE,EAAE,CACrE,IAAI,CAAC,MAAM,EAAE,IAAI,YAAY,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA","sourcesContent":["/**\n * This is the Windows implementation of isexe, which uses the file\n * extension and PATHEXT setting.\n *\n * @module\n */\n\nimport { Stats, statSync } from 'fs'\nimport { stat } from 'fs/promises'\nimport { IsexeOptions } from './options'\n\n/**\n * Determine whether a path is executable based on the file extension\n * and PATHEXT environment variable (or specified pathExt option)\n */\nexport const isexe = async (\n path: string,\n options: IsexeOptions = {}\n): Promise<boolean> => {\n const { ignoreErrors = false } = options\n try {\n return checkStat(await stat(path), path, options)\n } catch (e) {\n const er = e as NodeJS.ErrnoException\n if (ignoreErrors || er.code === 'EACCES') return false\n throw er\n }\n}\n\n/**\n * Synchronously determine whether a path is executable based on the file\n * extension and PATHEXT environment variable (or specified pathExt option)\n */\nexport const sync = (\n path: string,\n options: IsexeOptions = {}\n): boolean => {\n const { ignoreErrors = false } = options\n try {\n return checkStat(statSync(path), path, options)\n } catch (e) {\n const er = e as NodeJS.ErrnoException\n if (ignoreErrors || er.code === 'EACCES') return false\n throw er\n }\n}\n\nconst checkPathExt = (path: string, options: IsexeOptions) => {\n const { pathExt = process.env.PATHEXT || '' } = options\n const peSplit = pathExt.split(';')\n if (peSplit.indexOf('') !== -1) {\n return true\n }\n\n for (let i = 0; i < peSplit.length; i++) {\n const p = peSplit[i].toLowerCase()\n const ext = path.substring(path.length - p.length).toLowerCase()\n\n if (p && ext === p) {\n return true\n }\n }\n return false\n}\n\nconst checkStat = (stat: Stats, path: string, options: IsexeOptions) =>\n stat.isFile() && checkPathExt(path, options)\n"]}
|
14
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/index.d.ts
generated
vendored
Normal file
14
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/index.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
import * as posix from './posix.js';
|
||||
import * as win32 from './win32.js';
|
||||
export * from './options.js';
|
||||
export { win32, posix };
|
||||
/**
|
||||
* Determine whether a path is executable on the current platform.
|
||||
*/
|
||||
export declare const isexe: (path: string, options?: import("./options.js").IsexeOptions) => Promise<boolean>;
|
||||
/**
|
||||
* Synchronously determine whether a path is executable on the
|
||||
* current platform.
|
||||
*/
|
||||
export declare const sync: (path: string, options?: import("./options.js").IsexeOptions) => boolean;
|
||||
//# sourceMappingURL=index.d.ts.map
|
1
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/index.d.ts.map
generated
vendored
Normal file
1
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/index.d.ts.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,KAAK,MAAM,YAAY,CAAA;AACnC,OAAO,KAAK,KAAK,MAAM,YAAY,CAAA;AACnC,cAAc,cAAc,CAAA;AAC5B,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,CAAA;AAKvB;;GAEG;AACH,eAAO,MAAM,KAAK,mFAAa,CAAA;AAC/B;;;GAGG;AACH,eAAO,MAAM,IAAI,0EAAY,CAAA"}
|
16
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/index.js
generated
vendored
Normal file
16
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/index.js
generated
vendored
Normal file
|
@ -0,0 +1,16 @@
|
|||
import * as posix from './posix.js';
|
||||
import * as win32 from './win32.js';
|
||||
export * from './options.js';
|
||||
export { win32, posix };
|
||||
const platform = process.env._ISEXE_TEST_PLATFORM_ || process.platform;
|
||||
const impl = platform === 'win32' ? win32 : posix;
|
||||
/**
|
||||
* Determine whether a path is executable on the current platform.
|
||||
*/
|
||||
export const isexe = impl.isexe;
|
||||
/**
|
||||
* Synchronously determine whether a path is executable on the
|
||||
* current platform.
|
||||
*/
|
||||
export const sync = impl.sync;
|
||||
//# sourceMappingURL=index.js.map
|
1
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/index.js.map
generated
vendored
Normal file
1
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/index.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,KAAK,MAAM,YAAY,CAAA;AACnC,OAAO,KAAK,KAAK,MAAM,YAAY,CAAA;AACnC,cAAc,cAAc,CAAA;AAC5B,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,CAAA;AAEvB,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,qBAAqB,IAAI,OAAO,CAAC,QAAQ,CAAA;AACtE,MAAM,IAAI,GAAG,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAA;AAEjD;;GAEG;AACH,MAAM,CAAC,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAA;AAC/B;;;GAGG;AACH,MAAM,CAAC,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAA","sourcesContent":["import * as posix from './posix.js'\nimport * as win32 from './win32.js'\nexport * from './options.js'\nexport { win32, posix }\n\nconst platform = process.env._ISEXE_TEST_PLATFORM_ || process.platform\nconst impl = platform === 'win32' ? win32 : posix\n\n/**\n * Determine whether a path is executable on the current platform.\n */\nexport const isexe = impl.isexe\n/**\n * Synchronously determine whether a path is executable on the\n * current platform.\n */\nexport const sync = impl.sync\n"]}
|
32
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/options.d.ts
generated
vendored
Normal file
32
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/options.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,32 @@
|
|||
export interface IsexeOptions {
|
||||
/**
|
||||
* Ignore errors arising from attempting to get file access status
|
||||
* Note that EACCES is always ignored, because that just means
|
||||
* it's not executable. If this is not set, then attempting to check
|
||||
* the executable-ness of a nonexistent file will raise ENOENT, for
|
||||
* example.
|
||||
*/
|
||||
ignoreErrors?: boolean;
|
||||
/**
|
||||
* effective uid when checking executable mode flags on posix
|
||||
* Defaults to process.getuid()
|
||||
*/
|
||||
uid?: number;
|
||||
/**
|
||||
* effective gid when checking executable mode flags on posix
|
||||
* Defaults to process.getgid()
|
||||
*/
|
||||
gid?: number;
|
||||
/**
|
||||
* effective group ID list to use when checking executable mode flags
|
||||
* on posix
|
||||
* Defaults to process.getgroups()
|
||||
*/
|
||||
groups?: number[];
|
||||
/**
|
||||
* The ;-delimited path extension list for win32 implementation.
|
||||
* Defaults to process.env.PATHEXT
|
||||
*/
|
||||
pathExt?: string;
|
||||
}
|
||||
//# sourceMappingURL=options.d.ts.map
|
1
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/options.d.ts.map
generated
vendored
Normal file
1
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/options.d.ts.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"options.d.ts","sourceRoot":"","sources":["../../src/options.ts"],"names":[],"mappings":"AAAA,MAAM,WAAW,YAAY;IAC3B;;;;;;OAMG;IACH,YAAY,CAAC,EAAE,OAAO,CAAA;IAEtB;;;OAGG;IACH,GAAG,CAAC,EAAE,MAAM,CAAA;IAEZ;;;OAGG;IACH,GAAG,CAAC,EAAE,MAAM,CAAA;IAEZ;;;;OAIG;IACH,MAAM,CAAC,EAAE,MAAM,EAAE,CAAA;IAEjB;;;OAGG;IACH,OAAO,CAAC,EAAE,MAAM,CAAA;CACjB"}
|
2
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/options.js
generated
vendored
Normal file
2
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/options.js
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
export {};
|
||||
//# sourceMappingURL=options.js.map
|
1
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/options.js.map
generated
vendored
Normal file
1
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/options.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"options.js","sourceRoot":"","sources":["../../src/options.ts"],"names":[],"mappings":"","sourcesContent":["export interface IsexeOptions {\n /**\n * Ignore errors arising from attempting to get file access status\n * Note that EACCES is always ignored, because that just means\n * it's not executable. If this is not set, then attempting to check\n * the executable-ness of a nonexistent file will raise ENOENT, for\n * example.\n */\n ignoreErrors?: boolean\n\n /**\n * effective uid when checking executable mode flags on posix\n * Defaults to process.getuid()\n */\n uid?: number\n\n /**\n * effective gid when checking executable mode flags on posix\n * Defaults to process.getgid()\n */\n gid?: number\n\n /**\n * effective group ID list to use when checking executable mode flags\n * on posix\n * Defaults to process.getgroups()\n */\n groups?: number[]\n\n /**\n * The ;-delimited path extension list for win32 implementation.\n * Defaults to process.env.PATHEXT\n */\n pathExt?: string\n}\n"]}
|
3
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/package.json
generated
vendored
Normal file
3
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/package.json
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"type": "module"
|
||||
}
|
18
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/posix.d.ts
generated
vendored
Normal file
18
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/posix.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,18 @@
|
|||
/**
|
||||
* This is the Posix implementation of isexe, which uses the file
|
||||
* mode and uid/gid values.
|
||||
*
|
||||
* @module
|
||||
*/
|
||||
import { IsexeOptions } from './options';
|
||||
/**
|
||||
* Determine whether a path is executable according to the mode and
|
||||
* current (or specified) user and group IDs.
|
||||
*/
|
||||
export declare const isexe: (path: string, options?: IsexeOptions) => Promise<boolean>;
|
||||
/**
|
||||
* Synchronously determine whether a path is executable according to
|
||||
* the mode and current (or specified) user and group IDs.
|
||||
*/
|
||||
export declare const sync: (path: string, options?: IsexeOptions) => boolean;
|
||||
//# sourceMappingURL=posix.d.ts.map
|
1
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/posix.d.ts.map
generated
vendored
Normal file
1
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/posix.d.ts.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"posix.d.ts","sourceRoot":"","sources":["../../src/posix.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAIH,OAAO,EAAE,YAAY,EAAE,MAAM,WAAW,CAAA;AAExC;;;GAGG;AACH,eAAO,MAAM,KAAK,SACV,MAAM,YACH,YAAY,KACpB,QAAQ,OAAO,CASjB,CAAA;AAED;;;GAGG;AACH,eAAO,MAAM,IAAI,SACT,MAAM,YACH,YAAY,KACpB,OASF,CAAA"}
|
62
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/posix.js
generated
vendored
Normal file
62
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/posix.js
generated
vendored
Normal file
|
@ -0,0 +1,62 @@
|
|||
/**
|
||||
* This is the Posix implementation of isexe, which uses the file
|
||||
* mode and uid/gid values.
|
||||
*
|
||||
* @module
|
||||
*/
|
||||
import { statSync } from 'fs';
|
||||
import { stat } from 'fs/promises';
|
||||
/**
|
||||
* Determine whether a path is executable according to the mode and
|
||||
* current (or specified) user and group IDs.
|
||||
*/
|
||||
export const isexe = async (path, options = {}) => {
|
||||
const { ignoreErrors = false } = options;
|
||||
try {
|
||||
return checkStat(await stat(path), options);
|
||||
}
|
||||
catch (e) {
|
||||
const er = e;
|
||||
if (ignoreErrors || er.code === 'EACCES')
|
||||
return false;
|
||||
throw er;
|
||||
}
|
||||
};
|
||||
/**
|
||||
* Synchronously determine whether a path is executable according to
|
||||
* the mode and current (or specified) user and group IDs.
|
||||
*/
|
||||
export const sync = (path, options = {}) => {
|
||||
const { ignoreErrors = false } = options;
|
||||
try {
|
||||
return checkStat(statSync(path), options);
|
||||
}
|
||||
catch (e) {
|
||||
const er = e;
|
||||
if (ignoreErrors || er.code === 'EACCES')
|
||||
return false;
|
||||
throw er;
|
||||
}
|
||||
};
|
||||
const checkStat = (stat, options) => stat.isFile() && checkMode(stat, options);
|
||||
const checkMode = (stat, options) => {
|
||||
const myUid = options.uid ?? process.getuid?.();
|
||||
const myGroups = options.groups ?? process.getgroups?.() ?? [];
|
||||
const myGid = options.gid ?? process.getgid?.() ?? myGroups[0];
|
||||
if (myUid === undefined || myGid === undefined) {
|
||||
throw new Error('cannot get uid or gid');
|
||||
}
|
||||
const groups = new Set([myGid, ...myGroups]);
|
||||
const mod = stat.mode;
|
||||
const uid = stat.uid;
|
||||
const gid = stat.gid;
|
||||
const u = parseInt('100', 8);
|
||||
const g = parseInt('010', 8);
|
||||
const o = parseInt('001', 8);
|
||||
const ug = u | g;
|
||||
return !!(mod & o ||
|
||||
(mod & g && groups.has(gid)) ||
|
||||
(mod & u && uid === myUid) ||
|
||||
(mod & ug && myUid === 0));
|
||||
};
|
||||
//# sourceMappingURL=posix.js.map
|
1
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/posix.js.map
generated
vendored
Normal file
1
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/posix.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"posix.js","sourceRoot":"","sources":["../../src/posix.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,OAAO,EAAS,QAAQ,EAAE,MAAM,IAAI,CAAA;AACpC,OAAO,EAAE,IAAI,EAAE,MAAM,aAAa,CAAA;AAGlC;;;GAGG;AACH,MAAM,CAAC,MAAM,KAAK,GAAG,KAAK,EACxB,IAAY,EACZ,UAAwB,EAAE,EACR,EAAE;IACpB,MAAM,EAAE,YAAY,GAAG,KAAK,EAAE,GAAG,OAAO,CAAA;IACxC,IAAI;QACF,OAAO,SAAS,CAAC,MAAM,IAAI,CAAC,IAAI,CAAC,EAAE,OAAO,CAAC,CAAA;KAC5C;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,EAAE,GAAG,CAA0B,CAAA;QACrC,IAAI,YAAY,IAAI,EAAE,CAAC,IAAI,KAAK,QAAQ;YAAE,OAAO,KAAK,CAAA;QACtD,MAAM,EAAE,CAAA;KACT;AACH,CAAC,CAAA;AAED;;;GAGG;AACH,MAAM,CAAC,MAAM,IAAI,GAAG,CAClB,IAAY,EACZ,UAAwB,EAAE,EACjB,EAAE;IACX,MAAM,EAAE,YAAY,GAAG,KAAK,EAAE,GAAG,OAAO,CAAA;IACxC,IAAI;QACF,OAAO,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE,OAAO,CAAC,CAAA;KAC1C;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,EAAE,GAAG,CAA0B,CAAA;QACrC,IAAI,YAAY,IAAI,EAAE,CAAC,IAAI,KAAK,QAAQ;YAAE,OAAO,KAAK,CAAA;QACtD,MAAM,EAAE,CAAA;KACT;AACH,CAAC,CAAA;AAED,MAAM,SAAS,GAAG,CAAC,IAAW,EAAE,OAAqB,EAAE,EAAE,CACvD,IAAI,CAAC,MAAM,EAAE,IAAI,SAAS,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;AAE3C,MAAM,SAAS,GAAG,CAAC,IAAW,EAAE,OAAqB,EAAE,EAAE;IACvD,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,MAAM,EAAE,EAAE,CAAA;IAC/C,MAAM,QAAQ,GAAG,OAAO,CAAC,MAAM,IAAI,OAAO,CAAC,SAAS,EAAE,EAAE,IAAI,EAAE,CAAA;IAC9D,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,MAAM,EAAE,EAAE,IAAI,QAAQ,CAAC,CAAC,CAAC,CAAA;IAC9D,IAAI,KAAK,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS,EAAE;QAC9C,MAAM,IAAI,KAAK,CAAC,uBAAuB,CAAC,CAAA;KACzC;IAED,MAAM,MAAM,GAAG,IAAI,GAAG,CAAC,CAAC,KAAK,EAAE,GAAG,QAAQ,CAAC,CAAC,CAAA;IAE5C,MAAM,GAAG,GAAG,IAAI,CAAC,IAAI,CAAA;IACrB,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,CAAA;IACpB,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,CAAA;IAEpB,MAAM,CAAC,GAAG,QAAQ,CAAC,KAAK,EAAE,CAAC,CAAC,CAAA;IAC5B,MAAM,CAAC,GAAG,QAAQ,CAAC,KAAK,EAAE,CAAC,CAAC,CAAA;IAC5B,MAAM,CAAC,GAAG,QAAQ,CAAC,KAAK,EAAE,CAAC,CAAC,CAAA;IAC5B,MAAM,EAAE,GAAG,CAAC,GAAG,CAAC,CAAA;IAEhB,OAAO,CAAC,CAAC,CACP,GAAG,GAAG,CAAC;QACP,CAAC,GAAG,GAAG,CAAC,IAAI,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;QAC5B,CAAC,GAAG,GAAG,CAAC,IAAI,GAAG,KAAK,KAAK,CAAC;QAC1B,CAAC,GAAG,GAAG,EAAE,IAAI,KAAK,KAAK,CAAC,CAAC,CAC1B,CAAA;AACH,CAAC,CAAA","sourcesContent":["/**\n * This is the Posix implementation of isexe, which uses the file\n * mode and uid/gid values.\n *\n * @module\n */\n\nimport { Stats, statSync } from 'fs'\nimport { stat } from 'fs/promises'\nimport { IsexeOptions } from './options'\n\n/**\n * Determine whether a path is executable according to the mode and\n * current (or specified) user and group IDs.\n */\nexport const isexe = async (\n path: string,\n options: IsexeOptions = {}\n): Promise<boolean> => {\n const { ignoreErrors = false } = options\n try {\n return checkStat(await stat(path), options)\n } catch (e) {\n const er = e as NodeJS.ErrnoException\n if (ignoreErrors || er.code === 'EACCES') return false\n throw er\n }\n}\n\n/**\n * Synchronously determine whether a path is executable according to\n * the mode and current (or specified) user and group IDs.\n */\nexport const sync = (\n path: string,\n options: IsexeOptions = {}\n): boolean => {\n const { ignoreErrors = false } = options\n try {\n return checkStat(statSync(path), options)\n } catch (e) {\n const er = e as NodeJS.ErrnoException\n if (ignoreErrors || er.code === 'EACCES') return false\n throw er\n }\n}\n\nconst checkStat = (stat: Stats, options: IsexeOptions) =>\n stat.isFile() && checkMode(stat, options)\n\nconst checkMode = (stat: Stats, options: IsexeOptions) => {\n const myUid = options.uid ?? process.getuid?.()\n const myGroups = options.groups ?? process.getgroups?.() ?? []\n const myGid = options.gid ?? process.getgid?.() ?? myGroups[0]\n if (myUid === undefined || myGid === undefined) {\n throw new Error('cannot get uid or gid')\n }\n\n const groups = new Set([myGid, ...myGroups])\n\n const mod = stat.mode\n const uid = stat.uid\n const gid = stat.gid\n\n const u = parseInt('100', 8)\n const g = parseInt('010', 8)\n const o = parseInt('001', 8)\n const ug = u | g\n\n return !!(\n mod & o ||\n (mod & g && groups.has(gid)) ||\n (mod & u && uid === myUid) ||\n (mod & ug && myUid === 0)\n )\n}\n"]}
|
18
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/win32.d.ts
generated
vendored
Normal file
18
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/win32.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,18 @@
|
|||
/**
|
||||
* This is the Windows implementation of isexe, which uses the file
|
||||
* extension and PATHEXT setting.
|
||||
*
|
||||
* @module
|
||||
*/
|
||||
import { IsexeOptions } from './options';
|
||||
/**
|
||||
* Determine whether a path is executable based on the file extension
|
||||
* and PATHEXT environment variable (or specified pathExt option)
|
||||
*/
|
||||
export declare const isexe: (path: string, options?: IsexeOptions) => Promise<boolean>;
|
||||
/**
|
||||
* Synchronously determine whether a path is executable based on the file
|
||||
* extension and PATHEXT environment variable (or specified pathExt option)
|
||||
*/
|
||||
export declare const sync: (path: string, options?: IsexeOptions) => boolean;
|
||||
//# sourceMappingURL=win32.d.ts.map
|
1
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/win32.d.ts.map
generated
vendored
Normal file
1
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/win32.d.ts.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"win32.d.ts","sourceRoot":"","sources":["../../src/win32.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAIH,OAAO,EAAE,YAAY,EAAE,MAAM,WAAW,CAAA;AAExC;;;GAGG;AACH,eAAO,MAAM,KAAK,SACV,MAAM,YACH,YAAY,KACpB,QAAQ,OAAO,CASjB,CAAA;AAED;;;GAGG;AACH,eAAO,MAAM,IAAI,SACT,MAAM,YACH,YAAY,KACpB,OASF,CAAA"}
|
57
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/win32.js
generated
vendored
Normal file
57
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/win32.js
generated
vendored
Normal file
|
@ -0,0 +1,57 @@
|
|||
/**
|
||||
* This is the Windows implementation of isexe, which uses the file
|
||||
* extension and PATHEXT setting.
|
||||
*
|
||||
* @module
|
||||
*/
|
||||
import { statSync } from 'fs';
|
||||
import { stat } from 'fs/promises';
|
||||
/**
|
||||
* Determine whether a path is executable based on the file extension
|
||||
* and PATHEXT environment variable (or specified pathExt option)
|
||||
*/
|
||||
export const isexe = async (path, options = {}) => {
|
||||
const { ignoreErrors = false } = options;
|
||||
try {
|
||||
return checkStat(await stat(path), path, options);
|
||||
}
|
||||
catch (e) {
|
||||
const er = e;
|
||||
if (ignoreErrors || er.code === 'EACCES')
|
||||
return false;
|
||||
throw er;
|
||||
}
|
||||
};
|
||||
/**
|
||||
* Synchronously determine whether a path is executable based on the file
|
||||
* extension and PATHEXT environment variable (or specified pathExt option)
|
||||
*/
|
||||
export const sync = (path, options = {}) => {
|
||||
const { ignoreErrors = false } = options;
|
||||
try {
|
||||
return checkStat(statSync(path), path, options);
|
||||
}
|
||||
catch (e) {
|
||||
const er = e;
|
||||
if (ignoreErrors || er.code === 'EACCES')
|
||||
return false;
|
||||
throw er;
|
||||
}
|
||||
};
|
||||
const checkPathExt = (path, options) => {
|
||||
const { pathExt = process.env.PATHEXT || '' } = options;
|
||||
const peSplit = pathExt.split(';');
|
||||
if (peSplit.indexOf('') !== -1) {
|
||||
return true;
|
||||
}
|
||||
for (let i = 0; i < peSplit.length; i++) {
|
||||
const p = peSplit[i].toLowerCase();
|
||||
const ext = path.substring(path.length - p.length).toLowerCase();
|
||||
if (p && ext === p) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
const checkStat = (stat, path, options) => stat.isFile() && checkPathExt(path, options);
|
||||
//# sourceMappingURL=win32.js.map
|
1
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/win32.js.map
generated
vendored
Normal file
1
node_modules/@npmcli/git/node_modules/isexe/dist/mjs/win32.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"win32.js","sourceRoot":"","sources":["../../src/win32.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,OAAO,EAAS,QAAQ,EAAE,MAAM,IAAI,CAAA;AACpC,OAAO,EAAE,IAAI,EAAE,MAAM,aAAa,CAAA;AAGlC;;;GAGG;AACH,MAAM,CAAC,MAAM,KAAK,GAAG,KAAK,EACxB,IAAY,EACZ,UAAwB,EAAE,EACR,EAAE;IACpB,MAAM,EAAE,YAAY,GAAG,KAAK,EAAE,GAAG,OAAO,CAAA;IACxC,IAAI;QACF,OAAO,SAAS,CAAC,MAAM,IAAI,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,CAAC,CAAA;KAClD;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,EAAE,GAAG,CAA0B,CAAA;QACrC,IAAI,YAAY,IAAI,EAAE,CAAC,IAAI,KAAK,QAAQ;YAAE,OAAO,KAAK,CAAA;QACtD,MAAM,EAAE,CAAA;KACT;AACH,CAAC,CAAA;AAED;;;GAGG;AACH,MAAM,CAAC,MAAM,IAAI,GAAG,CAClB,IAAY,EACZ,UAAwB,EAAE,EACjB,EAAE;IACX,MAAM,EAAE,YAAY,GAAG,KAAK,EAAE,GAAG,OAAO,CAAA;IACxC,IAAI;QACF,OAAO,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,CAAC,CAAA;KAChD;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,EAAE,GAAG,CAA0B,CAAA;QACrC,IAAI,YAAY,IAAI,EAAE,CAAC,IAAI,KAAK,QAAQ;YAAE,OAAO,KAAK,CAAA;QACtD,MAAM,EAAE,CAAA;KACT;AACH,CAAC,CAAA;AAED,MAAM,YAAY,GAAG,CAAC,IAAY,EAAE,OAAqB,EAAE,EAAE;IAC3D,MAAM,EAAE,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,OAAO,IAAI,EAAE,EAAE,GAAG,OAAO,CAAA;IACvD,MAAM,OAAO,GAAG,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;IAClC,IAAI,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE;QAC9B,OAAO,IAAI,CAAA;KACZ;IAED,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACvC,MAAM,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE,CAAA;QAClC,MAAM,GAAG,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,WAAW,EAAE,CAAA;QAEhE,IAAI,CAAC,IAAI,GAAG,KAAK,CAAC,EAAE;YAClB,OAAO,IAAI,CAAA;SACZ;KACF;IACD,OAAO,KAAK,CAAA;AACd,CAAC,CAAA;AAED,MAAM,SAAS,GAAG,CAAC,IAAW,EAAE,IAAY,EAAE,OAAqB,EAAE,EAAE,CACrE,IAAI,CAAC,MAAM,EAAE,IAAI,YAAY,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA","sourcesContent":["/**\n * This is the Windows implementation of isexe, which uses the file\n * extension and PATHEXT setting.\n *\n * @module\n */\n\nimport { Stats, statSync } from 'fs'\nimport { stat } from 'fs/promises'\nimport { IsexeOptions } from './options'\n\n/**\n * Determine whether a path is executable based on the file extension\n * and PATHEXT environment variable (or specified pathExt option)\n */\nexport const isexe = async (\n path: string,\n options: IsexeOptions = {}\n): Promise<boolean> => {\n const { ignoreErrors = false } = options\n try {\n return checkStat(await stat(path), path, options)\n } catch (e) {\n const er = e as NodeJS.ErrnoException\n if (ignoreErrors || er.code === 'EACCES') return false\n throw er\n }\n}\n\n/**\n * Synchronously determine whether a path is executable based on the file\n * extension and PATHEXT environment variable (or specified pathExt option)\n */\nexport const sync = (\n path: string,\n options: IsexeOptions = {}\n): boolean => {\n const { ignoreErrors = false } = options\n try {\n return checkStat(statSync(path), path, options)\n } catch (e) {\n const er = e as NodeJS.ErrnoException\n if (ignoreErrors || er.code === 'EACCES') return false\n throw er\n }\n}\n\nconst checkPathExt = (path: string, options: IsexeOptions) => {\n const { pathExt = process.env.PATHEXT || '' } = options\n const peSplit = pathExt.split(';')\n if (peSplit.indexOf('') !== -1) {\n return true\n }\n\n for (let i = 0; i < peSplit.length; i++) {\n const p = peSplit[i].toLowerCase()\n const ext = path.substring(path.length - p.length).toLowerCase()\n\n if (p && ext === p) {\n return true\n }\n }\n return false\n}\n\nconst checkStat = (stat: Stats, path: string, options: IsexeOptions) =>\n stat.isFile() && checkPathExt(path, options)\n"]}
|
96
node_modules/@npmcli/git/node_modules/isexe/package.json
generated
vendored
Normal file
96
node_modules/@npmcli/git/node_modules/isexe/package.json
generated
vendored
Normal file
|
@ -0,0 +1,96 @@
|
|||
{
|
||||
"name": "isexe",
|
||||
"version": "3.1.1",
|
||||
"description": "Minimal module to check if a file is executable.",
|
||||
"main": "./dist/cjs/index.js",
|
||||
"module": "./dist/mjs/index.js",
|
||||
"types": "./dist/cjs/index.js",
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"exports": {
|
||||
".": {
|
||||
"import": {
|
||||
"types": "./dist/mjs/index.d.ts",
|
||||
"default": "./dist/mjs/index.js"
|
||||
},
|
||||
"require": {
|
||||
"types": "./dist/cjs/index.d.ts",
|
||||
"default": "./dist/cjs/index.js"
|
||||
}
|
||||
},
|
||||
"./posix": {
|
||||
"import": {
|
||||
"types": "./dist/mjs/posix.d.ts",
|
||||
"default": "./dist/mjs/posix.js"
|
||||
},
|
||||
"require": {
|
||||
"types": "./dist/cjs/posix.d.ts",
|
||||
"default": "./dist/cjs/posix.js"
|
||||
}
|
||||
},
|
||||
"./win32": {
|
||||
"import": {
|
||||
"types": "./dist/mjs/win32.d.ts",
|
||||
"default": "./dist/mjs/win32.js"
|
||||
},
|
||||
"require": {
|
||||
"types": "./dist/cjs/win32.d.ts",
|
||||
"default": "./dist/cjs/win32.js"
|
||||
}
|
||||
},
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.4.5",
|
||||
"@types/tap": "^15.0.8",
|
||||
"c8": "^8.0.1",
|
||||
"mkdirp": "^0.5.1",
|
||||
"prettier": "^2.8.8",
|
||||
"rimraf": "^2.5.0",
|
||||
"sync-content": "^1.0.2",
|
||||
"tap": "^16.3.8",
|
||||
"ts-node": "^10.9.1",
|
||||
"typedoc": "^0.24.8",
|
||||
"typescript": "^5.1.6"
|
||||
},
|
||||
"scripts": {
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"prepublishOnly": "git push origin --follow-tags",
|
||||
"prepare": "tsc -p tsconfig/cjs.json && tsc -p tsconfig/esm.json && bash ./scripts/fixup.sh",
|
||||
"pretest": "npm run prepare",
|
||||
"presnap": "npm run prepare",
|
||||
"test": "c8 tap",
|
||||
"snap": "c8 tap",
|
||||
"format": "prettier --write . --loglevel warn --ignore-path ../../.prettierignore --cache",
|
||||
"typedoc": "typedoc --tsconfig tsconfig/esm.json ./src/*.ts"
|
||||
},
|
||||
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
|
||||
"license": "ISC",
|
||||
"tap": {
|
||||
"coverage": false,
|
||||
"node-arg": [
|
||||
"--enable-source-maps",
|
||||
"--no-warnings",
|
||||
"--loader",
|
||||
"ts-node/esm"
|
||||
],
|
||||
"ts": false
|
||||
},
|
||||
"prettier": {
|
||||
"semi": false,
|
||||
"printWidth": 75,
|
||||
"tabWidth": 2,
|
||||
"useTabs": false,
|
||||
"singleQuote": true,
|
||||
"jsxSingleQuote": false,
|
||||
"bracketSameLine": true,
|
||||
"arrowParens": "avoid",
|
||||
"endOfLine": "lf"
|
||||
},
|
||||
"repository": "https://github.com/isaacs/isexe",
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
}
|
||||
}
|
15
node_modules/@npmcli/git/node_modules/lru-cache/LICENSE
generated
vendored
Normal file
15
node_modules/@npmcli/git/node_modules/lru-cache/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
The ISC License
|
||||
|
||||
Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
331
node_modules/@npmcli/git/node_modules/lru-cache/README.md
generated
vendored
Normal file
331
node_modules/@npmcli/git/node_modules/lru-cache/README.md
generated
vendored
Normal file
|
@ -0,0 +1,331 @@
|
|||
# lru-cache
|
||||
|
||||
A cache object that deletes the least-recently-used items.
|
||||
|
||||
Specify a max number of the most recently used items that you
|
||||
want to keep, and this cache will keep that many of the most
|
||||
recently accessed items.
|
||||
|
||||
This is not primarily a TTL cache, and does not make strong TTL
|
||||
guarantees. There is no preemptive pruning of expired items by
|
||||
default, but you _may_ set a TTL on the cache or on a single
|
||||
`set`. If you do so, it will treat expired items as missing, and
|
||||
delete them when fetched. If you are more interested in TTL
|
||||
caching than LRU caching, check out
|
||||
[@isaacs/ttlcache](http://npm.im/@isaacs/ttlcache).
|
||||
|
||||
As of version 7, this is one of the most performant LRU
|
||||
implementations available in JavaScript, and supports a wide
|
||||
diversity of use cases. However, note that using some of the
|
||||
features will necessarily impact performance, by causing the
|
||||
cache to have to do more work. See the "Performance" section
|
||||
below.
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
npm install lru-cache --save
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
// hybrid module, either works
|
||||
import { LRUCache } from 'lru-cache'
|
||||
// or:
|
||||
const { LRUCache } = require('lru-cache')
|
||||
// or in minified form for web browsers:
|
||||
import { LRUCache } from 'http://unpkg.com/lru-cache@9/dist/mjs/index.min.mjs'
|
||||
|
||||
// At least one of 'max', 'ttl', or 'maxSize' is required, to prevent
|
||||
// unsafe unbounded storage.
|
||||
//
|
||||
// In most cases, it's best to specify a max for performance, so all
|
||||
// the required memory allocation is done up-front.
|
||||
//
|
||||
// All the other options are optional, see the sections below for
|
||||
// documentation on what each one does. Most of them can be
|
||||
// overridden for specific items in get()/set()
|
||||
const options = {
|
||||
max: 500,
|
||||
|
||||
// for use with tracking overall storage size
|
||||
maxSize: 5000,
|
||||
sizeCalculation: (value, key) => {
|
||||
return 1
|
||||
},
|
||||
|
||||
// for use when you need to clean up something when objects
|
||||
// are evicted from the cache
|
||||
dispose: (value, key) => {
|
||||
freeFromMemoryOrWhatever(value)
|
||||
},
|
||||
|
||||
// how long to live in ms
|
||||
ttl: 1000 * 60 * 5,
|
||||
|
||||
// return stale items before removing from cache?
|
||||
allowStale: false,
|
||||
|
||||
updateAgeOnGet: false,
|
||||
updateAgeOnHas: false,
|
||||
|
||||
// async method to use for cache.fetch(), for
|
||||
// stale-while-revalidate type of behavior
|
||||
fetchMethod: async (
|
||||
key,
|
||||
staleValue,
|
||||
{ options, signal, context }
|
||||
) => {},
|
||||
}
|
||||
|
||||
const cache = new LRUCache(options)
|
||||
|
||||
cache.set('key', 'value')
|
||||
cache.get('key') // "value"
|
||||
|
||||
// non-string keys ARE fully supported
|
||||
// but note that it must be THE SAME object, not
|
||||
// just a JSON-equivalent object.
|
||||
var someObject = { a: 1 }
|
||||
cache.set(someObject, 'a value')
|
||||
// Object keys are not toString()-ed
|
||||
cache.set('[object Object]', 'a different value')
|
||||
assert.equal(cache.get(someObject), 'a value')
|
||||
// A similar object with same keys/values won't work,
|
||||
// because it's a different object identity
|
||||
assert.equal(cache.get({ a: 1 }), undefined)
|
||||
|
||||
cache.clear() // empty the cache
|
||||
```
|
||||
|
||||
If you put more stuff in the cache, then less recently used items
|
||||
will fall out. That's what an LRU cache is.
|
||||
|
||||
For full description of the API and all options, please see [the
|
||||
LRUCache typedocs](https://isaacs.github.io/node-lru-cache/)
|
||||
|
||||
## Storage Bounds Safety
|
||||
|
||||
This implementation aims to be as flexible as possible, within
|
||||
the limits of safe memory consumption and optimal performance.
|
||||
|
||||
At initial object creation, storage is allocated for `max` items.
|
||||
If `max` is set to zero, then some performance is lost, and item
|
||||
count is unbounded. Either `maxSize` or `ttl` _must_ be set if
|
||||
`max` is not specified.
|
||||
|
||||
If `maxSize` is set, then this creates a safe limit on the
|
||||
maximum storage consumed, but without the performance benefits of
|
||||
pre-allocation. When `maxSize` is set, every item _must_ provide
|
||||
a size, either via the `sizeCalculation` method provided to the
|
||||
constructor, or via a `size` or `sizeCalculation` option provided
|
||||
to `cache.set()`. The size of every item _must_ be a positive
|
||||
integer.
|
||||
|
||||
If neither `max` nor `maxSize` are set, then `ttl` tracking must
|
||||
be enabled. Note that, even when tracking item `ttl`, items are
|
||||
_not_ preemptively deleted when they become stale, unless
|
||||
`ttlAutopurge` is enabled. Instead, they are only purged the
|
||||
next time the key is requested. Thus, if `ttlAutopurge`, `max`,
|
||||
and `maxSize` are all not set, then the cache will potentially
|
||||
grow unbounded.
|
||||
|
||||
In this case, a warning is printed to standard error. Future
|
||||
versions may require the use of `ttlAutopurge` if `max` and
|
||||
`maxSize` are not specified.
|
||||
|
||||
If you truly wish to use a cache that is bound _only_ by TTL
|
||||
expiration, consider using a `Map` object, and calling
|
||||
`setTimeout` to delete entries when they expire. It will perform
|
||||
much better than an LRU cache.
|
||||
|
||||
Here is an implementation you may use, under the same
|
||||
[license](./LICENSE) as this package:
|
||||
|
||||
```js
|
||||
// a storage-unbounded ttl cache that is not an lru-cache
|
||||
const cache = {
|
||||
data: new Map(),
|
||||
timers: new Map(),
|
||||
set: (k, v, ttl) => {
|
||||
if (cache.timers.has(k)) {
|
||||
clearTimeout(cache.timers.get(k))
|
||||
}
|
||||
cache.timers.set(
|
||||
k,
|
||||
setTimeout(() => cache.delete(k), ttl)
|
||||
)
|
||||
cache.data.set(k, v)
|
||||
},
|
||||
get: k => cache.data.get(k),
|
||||
has: k => cache.data.has(k),
|
||||
delete: k => {
|
||||
if (cache.timers.has(k)) {
|
||||
clearTimeout(cache.timers.get(k))
|
||||
}
|
||||
cache.timers.delete(k)
|
||||
return cache.data.delete(k)
|
||||
},
|
||||
clear: () => {
|
||||
cache.data.clear()
|
||||
for (const v of cache.timers.values()) {
|
||||
clearTimeout(v)
|
||||
}
|
||||
cache.timers.clear()
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
If that isn't to your liking, check out
|
||||
[@isaacs/ttlcache](http://npm.im/@isaacs/ttlcache).
|
||||
|
||||
## Storing Undefined Values
|
||||
|
||||
This cache never stores undefined values, as `undefined` is used
|
||||
internally in a few places to indicate that a key is not in the
|
||||
cache.
|
||||
|
||||
You may call `cache.set(key, undefined)`, but this is just
|
||||
an alias for `cache.delete(key)`. Note that this has the effect
|
||||
that `cache.has(key)` will return _false_ after setting it to
|
||||
undefined.
|
||||
|
||||
```js
|
||||
cache.set(myKey, undefined)
|
||||
cache.has(myKey) // false!
|
||||
```
|
||||
|
||||
If you need to track `undefined` values, and still note that the
|
||||
key is in the cache, an easy workaround is to use a sigil object
|
||||
of your own.
|
||||
|
||||
```js
|
||||
import { LRUCache } from 'lru-cache'
|
||||
const undefinedValue = Symbol('undefined')
|
||||
const cache = new LRUCache(...)
|
||||
const mySet = (key, value) =>
|
||||
cache.set(key, value === undefined ? undefinedValue : value)
|
||||
const myGet = (key, value) => {
|
||||
const v = cache.get(key)
|
||||
return v === undefinedValue ? undefined : v
|
||||
}
|
||||
```
|
||||
|
||||
## Performance
|
||||
|
||||
As of January 2022, version 7 of this library is one of the most
|
||||
performant LRU cache implementations in JavaScript.
|
||||
|
||||
Benchmarks can be extremely difficult to get right. In
|
||||
particular, the performance of set/get/delete operations on
|
||||
objects will vary _wildly_ depending on the type of key used. V8
|
||||
is highly optimized for objects with keys that are short strings,
|
||||
especially integer numeric strings. Thus any benchmark which
|
||||
tests _solely_ using numbers as keys will tend to find that an
|
||||
object-based approach performs the best.
|
||||
|
||||
Note that coercing _anything_ to strings to use as object keys is
|
||||
unsafe, unless you can be 100% certain that no other type of
|
||||
value will be used. For example:
|
||||
|
||||
```js
|
||||
const myCache = {}
|
||||
const set = (k, v) => (myCache[k] = v)
|
||||
const get = k => myCache[k]
|
||||
|
||||
set({}, 'please hang onto this for me')
|
||||
set('[object Object]', 'oopsie')
|
||||
```
|
||||
|
||||
Also beware of "Just So" stories regarding performance. Garbage
|
||||
collection of large (especially: deep) object graphs can be
|
||||
incredibly costly, with several "tipping points" where it
|
||||
increases exponentially. As a result, putting that off until
|
||||
later can make it much worse, and less predictable. If a library
|
||||
performs well, but only in a scenario where the object graph is
|
||||
kept shallow, then that won't help you if you are using large
|
||||
objects as keys.
|
||||
|
||||
In general, when attempting to use a library to improve
|
||||
performance (such as a cache like this one), it's best to choose
|
||||
an option that will perform well in the sorts of scenarios where
|
||||
you'll actually use it.
|
||||
|
||||
This library is optimized for repeated gets and minimizing
|
||||
eviction time, since that is the expected need of a LRU. Set
|
||||
operations are somewhat slower on average than a few other
|
||||
options, in part because of that optimization. It is assumed
|
||||
that you'll be caching some costly operation, ideally as rarely
|
||||
as possible, so optimizing set over get would be unwise.
|
||||
|
||||
If performance matters to you:
|
||||
|
||||
1. If it's at all possible to use small integer values as keys,
|
||||
and you can guarantee that no other types of values will be
|
||||
used as keys, then do that, and use a cache such as
|
||||
[lru-fast](https://npmjs.com/package/lru-fast), or
|
||||
[mnemonist's
|
||||
LRUCache](https://yomguithereal.github.io/mnemonist/lru-cache)
|
||||
which uses an Object as its data store.
|
||||
|
||||
2. Failing that, if at all possible, use short non-numeric
|
||||
strings (ie, less than 256 characters) as your keys, and use
|
||||
[mnemonist's
|
||||
LRUCache](https://yomguithereal.github.io/mnemonist/lru-cache).
|
||||
|
||||
3. If the types of your keys will be anything else, especially
|
||||
long strings, strings that look like floats, objects, or some
|
||||
mix of types, or if you aren't sure, then this library will
|
||||
work well for you.
|
||||
|
||||
If you do not need the features that this library provides
|
||||
(like asynchronous fetching, a variety of TTL staleness
|
||||
options, and so on), then [mnemonist's
|
||||
LRUMap](https://yomguithereal.github.io/mnemonist/lru-map) is
|
||||
a very good option, and just slightly faster than this module
|
||||
(since it does considerably less).
|
||||
|
||||
4. Do not use a `dispose` function, size tracking, or especially
|
||||
ttl behavior, unless absolutely needed. These features are
|
||||
convenient, and necessary in some use cases, and every attempt
|
||||
has been made to make the performance impact minimal, but it
|
||||
isn't nothing.
|
||||
|
||||
## Breaking Changes in Version 7
|
||||
|
||||
This library changed to a different algorithm and internal data
|
||||
structure in version 7, yielding significantly better
|
||||
performance, albeit with some subtle changes as a result.
|
||||
|
||||
If you were relying on the internals of LRUCache in version 6 or
|
||||
before, it probably will not work in version 7 and above.
|
||||
|
||||
## Breaking Changes in Version 8
|
||||
|
||||
- The `fetchContext` option was renamed to `context`, and may no
|
||||
longer be set on the cache instance itself.
|
||||
- Rewritten in TypeScript, so pretty much all the types moved
|
||||
around a lot.
|
||||
- The AbortController/AbortSignal polyfill was removed. For this
|
||||
reason, **Node version 16.14.0 or higher is now required**.
|
||||
- Internal properties were moved to actual private class
|
||||
properties.
|
||||
- Keys and values must not be `null` or `undefined`.
|
||||
- Minified export available at `'lru-cache/min'`, for both CJS
|
||||
and MJS builds.
|
||||
|
||||
## Breaking Changes in Version 9
|
||||
|
||||
- Named export only, no default export.
|
||||
- AbortController polyfill returned, albeit with a warning when
|
||||
used.
|
||||
|
||||
## Breaking Changes in Version 10
|
||||
|
||||
- `cache.fetch()` return type is now `Promise<V | undefined>`
|
||||
instead of `Promise<V | void>`. This is an irrelevant change
|
||||
practically speaking, but can require changes for TypeScript
|
||||
users.
|
||||
|
||||
For more info, see the [change log](CHANGELOG.md).
|
1277
node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/index.d.ts
generated
vendored
Normal file
1277
node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/index.d.ts
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
1
node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/index.d.ts.map
generated
vendored
Normal file
1
node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/index.d.ts.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1546
node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/index.js
generated
vendored
Normal file
1546
node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/index.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
1
node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/index.js.map
generated
vendored
Normal file
1
node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/index.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
2
node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/index.min.js
generated
vendored
Normal file
2
node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/index.min.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
7
node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/index.min.js.map
generated
vendored
Normal file
7
node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/index.min.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
3
node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/package.json
generated
vendored
Normal file
3
node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/package.json
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"type": "commonjs"
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue