Updated the files.

This commit is contained in:
Batuhan Berk Başoğlu 2024-02-08 19:38:41 -05:00
parent 1553e6b971
commit 753967d4f5
23418 changed files with 3784666 additions and 0 deletions

469
my-app/node_modules/make-fetch-happen/lib/cache/entry.js generated vendored Executable file
View file

@ -0,0 +1,469 @@
const { Request, Response } = require('minipass-fetch')
const { Minipass } = require('minipass')
const MinipassFlush = require('minipass-flush')
const cacache = require('cacache')
const url = require('url')
const CachingMinipassPipeline = require('../pipeline.js')
const CachePolicy = require('./policy.js')
const cacheKey = require('./key.js')
const remote = require('../remote.js')
const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop)
// allow list for request headers that will be written to the cache index
// note: we will also store any request headers
// that are named in a response's vary header
const KEEP_REQUEST_HEADERS = [
'accept-charset',
'accept-encoding',
'accept-language',
'accept',
'cache-control',
]
// allow list for response headers that will be written to the cache index
// note: we must not store the real response's age header, or when we load
// a cache policy based on the metadata it will think the cached response
// is always stale
const KEEP_RESPONSE_HEADERS = [
'cache-control',
'content-encoding',
'content-language',
'content-type',
'date',
'etag',
'expires',
'last-modified',
'link',
'location',
'pragma',
'vary',
]
// return an object containing all metadata to be written to the index
const getMetadata = (request, response, options) => {
const metadata = {
time: Date.now(),
url: request.url,
reqHeaders: {},
resHeaders: {},
// options on which we must match the request and vary the response
options: {
compress: options.compress != null ? options.compress : request.compress,
},
}
// only save the status if it's not a 200 or 304
if (response.status !== 200 && response.status !== 304) {
metadata.status = response.status
}
for (const name of KEEP_REQUEST_HEADERS) {
if (request.headers.has(name)) {
metadata.reqHeaders[name] = request.headers.get(name)
}
}
// if the request's host header differs from the host in the url
// we need to keep it, otherwise it's just noise and we ignore it
const host = request.headers.get('host')
const parsedUrl = new url.URL(request.url)
if (host && parsedUrl.host !== host) {
metadata.reqHeaders.host = host
}
// if the response has a vary header, make sure
// we store the relevant request headers too
if (response.headers.has('vary')) {
const vary = response.headers.get('vary')
// a vary of "*" means every header causes a different response.
// in that scenario, we do not include any additional headers
// as the freshness check will always fail anyway and we don't
// want to bloat the cache indexes
if (vary !== '*') {
// copy any other request headers that will vary the response
const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/)
for (const name of varyHeaders) {
if (request.headers.has(name)) {
metadata.reqHeaders[name] = request.headers.get(name)
}
}
}
}
for (const name of KEEP_RESPONSE_HEADERS) {
if (response.headers.has(name)) {
metadata.resHeaders[name] = response.headers.get(name)
}
}
for (const name of options.cacheAdditionalHeaders) {
if (response.headers.has(name)) {
metadata.resHeaders[name] = response.headers.get(name)
}
}
return metadata
}
// symbols used to hide objects that may be lazily evaluated in a getter
const _request = Symbol('request')
const _response = Symbol('response')
const _policy = Symbol('policy')
class CacheEntry {
constructor ({ entry, request, response, options }) {
if (entry) {
this.key = entry.key
this.entry = entry
// previous versions of this module didn't write an explicit timestamp in
// the metadata, so fall back to the entry's timestamp. we can't use the
// entry timestamp to determine staleness because cacache will update it
// when it verifies its data
this.entry.metadata.time = this.entry.metadata.time || this.entry.time
} else {
this.key = cacheKey(request)
}
this.options = options
// these properties are behind getters that lazily evaluate
this[_request] = request
this[_response] = response
this[_policy] = null
}
// returns a CacheEntry instance that satisfies the given request
// or undefined if no existing entry satisfies
static async find (request, options) {
try {
// compacts the index and returns an array of unique entries
var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => {
const entryA = new CacheEntry({ entry: A, options })
const entryB = new CacheEntry({ entry: B, options })
return entryA.policy.satisfies(entryB.request)
}, {
validateEntry: (entry) => {
// clean out entries with a buggy content-encoding value
if (entry.metadata &&
entry.metadata.resHeaders &&
entry.metadata.resHeaders['content-encoding'] === null) {
return false
}
// if an integrity is null, it needs to have a status specified
if (entry.integrity === null) {
return !!(entry.metadata && entry.metadata.status)
}
return true
},
})
} catch (err) {
// if the compact request fails, ignore the error and return
return
}
// a cache mode of 'reload' means to behave as though we have no cache
// on the way to the network. return undefined to allow cacheFetch to
// create a brand new request no matter what.
if (options.cache === 'reload') {
return
}
// find the specific entry that satisfies the request
let match
for (const entry of matches) {
const _entry = new CacheEntry({
entry,
options,
})
if (_entry.policy.satisfies(request)) {
match = _entry
break
}
}
return match
}
// if the user made a PUT/POST/PATCH then we invalidate our
// cache for the same url by deleting the index entirely
static async invalidate (request, options) {
const key = cacheKey(request)
try {
await cacache.rm.entry(options.cachePath, key, { removeFully: true })
} catch (err) {
// ignore errors
}
}
get request () {
if (!this[_request]) {
this[_request] = new Request(this.entry.metadata.url, {
method: 'GET',
headers: this.entry.metadata.reqHeaders,
...this.entry.metadata.options,
})
}
return this[_request]
}
get response () {
if (!this[_response]) {
this[_response] = new Response(null, {
url: this.entry.metadata.url,
counter: this.options.counter,
status: this.entry.metadata.status || 200,
headers: {
...this.entry.metadata.resHeaders,
'content-length': this.entry.size,
},
})
}
return this[_response]
}
get policy () {
if (!this[_policy]) {
this[_policy] = new CachePolicy({
entry: this.entry,
request: this.request,
response: this.response,
options: this.options,
})
}
return this[_policy]
}
// wraps the response in a pipeline that stores the data
// in the cache while the user consumes it
async store (status) {
// if we got a status other than 200, 301, or 308,
// or the CachePolicy forbid storage, append the
// cache status header and return it untouched
if (
this.request.method !== 'GET' ||
![200, 301, 308].includes(this.response.status) ||
!this.policy.storable()
) {
this.response.headers.set('x-local-cache-status', 'skip')
return this.response
}
const size = this.response.headers.get('content-length')
const cacheOpts = {
algorithms: this.options.algorithms,
metadata: getMetadata(this.request, this.response, this.options),
size,
integrity: this.options.integrity,
integrityEmitter: this.response.body.hasIntegrityEmitter && this.response.body,
}
let body = null
// we only set a body if the status is a 200, redirects are
// stored as metadata only
if (this.response.status === 200) {
let cacheWriteResolve, cacheWriteReject
const cacheWritePromise = new Promise((resolve, reject) => {
cacheWriteResolve = resolve
cacheWriteReject = reject
})
body = new CachingMinipassPipeline({ events: ['integrity', 'size'] }, new MinipassFlush({
flush () {
return cacheWritePromise
},
}))
// this is always true since if we aren't reusing the one from the remote fetch, we
// are using the one from cacache
body.hasIntegrityEmitter = true
const onResume = () => {
const tee = new Minipass()
const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts)
// re-emit the integrity and size events on our new response body so they can be reused
cacheStream.on('integrity', i => body.emit('integrity', i))
cacheStream.on('size', s => body.emit('size', s))
// stick a flag on here so downstream users will know if they can expect integrity events
tee.pipe(cacheStream)
// TODO if the cache write fails, log a warning but return the response anyway
// eslint-disable-next-line promise/catch-or-return
cacheStream.promise().then(cacheWriteResolve, cacheWriteReject)
body.unshift(tee)
body.unshift(this.response.body)
}
body.once('resume', onResume)
body.once('end', () => body.removeListener('resume', onResume))
} else {
await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts)
}
// note: we do not set the x-local-cache-hash header because we do not know
// the hash value until after the write to the cache completes, which doesn't
// happen until after the response has been sent and it's too late to write
// the header anyway
this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath))
this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key))
this.response.headers.set('x-local-cache-mode', 'stream')
this.response.headers.set('x-local-cache-status', status)
this.response.headers.set('x-local-cache-time', new Date().toISOString())
const newResponse = new Response(body, {
url: this.response.url,
status: this.response.status,
headers: this.response.headers,
counter: this.options.counter,
})
return newResponse
}
// use the cached data to create a response and return it
async respond (method, options, status) {
let response
if (method === 'HEAD' || [301, 308].includes(this.response.status)) {
// if the request is a HEAD, or the response is a redirect,
// then the metadata in the entry already includes everything
// we need to build a response
response = this.response
} else {
// we're responding with a full cached response, so create a body
// that reads from cacache and attach it to a new Response
const body = new Minipass()
const headers = { ...this.policy.responseHeaders() }
const onResume = () => {
const cacheStream = cacache.get.stream.byDigest(
this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }
)
cacheStream.on('error', async (err) => {
cacheStream.pause()
if (err.code === 'EINTEGRITY') {
await cacache.rm.content(
this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }
)
}
if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') {
await CacheEntry.invalidate(this.request, this.options)
}
body.emit('error', err)
cacheStream.resume()
})
// emit the integrity and size events based on our metadata so we're consistent
body.emit('integrity', this.entry.integrity)
body.emit('size', Number(headers['content-length']))
cacheStream.pipe(body)
}
body.once('resume', onResume)
body.once('end', () => body.removeListener('resume', onResume))
response = new Response(body, {
url: this.entry.metadata.url,
counter: options.counter,
status: 200,
headers,
})
}
response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath))
response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity))
response.headers.set('x-local-cache-key', encodeURIComponent(this.key))
response.headers.set('x-local-cache-mode', 'stream')
response.headers.set('x-local-cache-status', status)
response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString())
return response
}
// use the provided request along with this cache entry to
// revalidate the stored response. returns a response, either
// from the cache or from the update
async revalidate (request, options) {
const revalidateRequest = new Request(request, {
headers: this.policy.revalidationHeaders(request),
})
try {
// NOTE: be sure to remove the headers property from the
// user supplied options, since we have already defined
// them on the new request object. if they're still in the
// options then those will overwrite the ones from the policy
var response = await remote(revalidateRequest, {
...options,
headers: undefined,
})
} catch (err) {
// if the network fetch fails, return the stale
// cached response unless it has a cache-control
// of 'must-revalidate'
if (!this.policy.mustRevalidate) {
return this.respond(request.method, options, 'stale')
}
throw err
}
if (this.policy.revalidated(revalidateRequest, response)) {
// we got a 304, write a new index to the cache and respond from cache
const metadata = getMetadata(request, response, options)
// 304 responses do not include headers that are specific to the response data
// since they do not include a body, so we copy values for headers that were
// in the old cache entry to the new one, if the new metadata does not already
// include that header
for (const name of KEEP_RESPONSE_HEADERS) {
if (
!hasOwnProperty(metadata.resHeaders, name) &&
hasOwnProperty(this.entry.metadata.resHeaders, name)
) {
metadata.resHeaders[name] = this.entry.metadata.resHeaders[name]
}
}
for (const name of options.cacheAdditionalHeaders) {
const inMeta = hasOwnProperty(metadata.resHeaders, name)
const inEntry = hasOwnProperty(this.entry.metadata.resHeaders, name)
const inPolicy = hasOwnProperty(this.policy.response.headers, name)
// if the header is in the existing entry, but it is not in the metadata
// then we need to write it to the metadata as this will refresh the on-disk cache
if (!inMeta && inEntry) {
metadata.resHeaders[name] = this.entry.metadata.resHeaders[name]
}
// if the header is in the metadata, but not in the policy, then we need to set
// it in the policy so that it's included in the immediate response. future
// responses will load a new cache entry, so we don't need to change that
if (!inPolicy && inMeta) {
this.policy.response.headers[name] = metadata.resHeaders[name]
}
}
try {
await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, {
size: this.entry.size,
metadata,
})
} catch (err) {
// if updating the cache index fails, we ignore it and
// respond anyway
}
return this.respond(request.method, options, 'revalidated')
}
// if we got a modified response, create a new entry based on it
const newEntry = new CacheEntry({
request,
response,
options,
})
// respond with the new entry while writing it to the cache
return newEntry.store('updated')
}
}
module.exports = CacheEntry

11
my-app/node_modules/make-fetch-happen/lib/cache/errors.js generated vendored Executable file
View file

@ -0,0 +1,11 @@
class NotCachedError extends Error {
constructor (url) {
/* eslint-disable-next-line max-len */
super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`)
this.code = 'ENOTCACHED'
}
}
module.exports = {
NotCachedError,
}

49
my-app/node_modules/make-fetch-happen/lib/cache/index.js generated vendored Executable file
View file

@ -0,0 +1,49 @@
const { NotCachedError } = require('./errors.js')
const CacheEntry = require('./entry.js')
const remote = require('../remote.js')
// do whatever is necessary to get a Response and return it
const cacheFetch = async (request, options) => {
// try to find a cached entry that satisfies this request
const entry = await CacheEntry.find(request, options)
if (!entry) {
// no cached result, if the cache mode is 'only-if-cached' that's a failure
if (options.cache === 'only-if-cached') {
throw new NotCachedError(request.url)
}
// otherwise, we make a request, store it and return it
const response = await remote(request, options)
const newEntry = new CacheEntry({ request, response, options })
return newEntry.store('miss')
}
// we have a cached response that satisfies this request, however if the cache
// mode is 'no-cache' then we send the revalidation request no matter what
if (options.cache === 'no-cache') {
return entry.revalidate(request, options)
}
// if the cached entry is not stale, or if the cache mode is 'force-cache' or
// 'only-if-cached' we can respond with the cached entry. set the status
// based on the result of needsRevalidation and respond
const _needsRevalidation = entry.policy.needsRevalidation(request)
if (options.cache === 'force-cache' ||
options.cache === 'only-if-cached' ||
!_needsRevalidation) {
return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit')
}
// if we got here, the cache entry is stale so revalidate it
return entry.revalidate(request, options)
}
cacheFetch.invalidate = async (request, options) => {
if (!options.cachePath) {
return
}
return CacheEntry.invalidate(request, options)
}
module.exports = cacheFetch

17
my-app/node_modules/make-fetch-happen/lib/cache/key.js generated vendored Executable file
View file

@ -0,0 +1,17 @@
const { URL, format } = require('url')
// options passed to url.format() when generating a key
const formatOptions = {
auth: false,
fragment: false,
search: true,
unicode: false,
}
// returns a string to be used as the cache key for the Request
const cacheKey = (request) => {
const parsed = new URL(request.url)
return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}`
}
module.exports = cacheKey

161
my-app/node_modules/make-fetch-happen/lib/cache/policy.js generated vendored Executable file
View file

@ -0,0 +1,161 @@
const CacheSemantics = require('http-cache-semantics')
const Negotiator = require('negotiator')
const ssri = require('ssri')
// options passed to http-cache-semantics constructor
const policyOptions = {
shared: false,
ignoreCargoCult: true,
}
// a fake empty response, used when only testing the
// request for storability
const emptyResponse = { status: 200, headers: {} }
// returns a plain object representation of the Request
const requestObject = (request) => {
const _obj = {
method: request.method,
url: request.url,
headers: {},
compress: request.compress,
}
request.headers.forEach((value, key) => {
_obj.headers[key] = value
})
return _obj
}
// returns a plain object representation of the Response
const responseObject = (response) => {
const _obj = {
status: response.status,
headers: {},
}
response.headers.forEach((value, key) => {
_obj.headers[key] = value
})
return _obj
}
class CachePolicy {
constructor ({ entry, request, response, options }) {
this.entry = entry
this.request = requestObject(request)
this.response = responseObject(response)
this.options = options
this.policy = new CacheSemantics(this.request, this.response, policyOptions)
if (this.entry) {
// if we have an entry, copy the timestamp to the _responseTime
// this is necessary because the CacheSemantics constructor forces
// the value to Date.now() which means a policy created from a
// cache entry is likely to always identify itself as stale
this.policy._responseTime = this.entry.metadata.time
}
}
// static method to quickly determine if a request alone is storable
static storable (request, options) {
// no cachePath means no caching
if (!options.cachePath) {
return false
}
// user explicitly asked not to cache
if (options.cache === 'no-store') {
return false
}
// we only cache GET and HEAD requests
if (!['GET', 'HEAD'].includes(request.method)) {
return false
}
// otherwise, let http-cache-semantics make the decision
// based on the request's headers
const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions)
return policy.storable()
}
// returns true if the policy satisfies the request
satisfies (request) {
const _req = requestObject(request)
if (this.request.headers.host !== _req.headers.host) {
return false
}
if (this.request.compress !== _req.compress) {
return false
}
const negotiatorA = new Negotiator(this.request)
const negotiatorB = new Negotiator(_req)
if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) {
return false
}
if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) {
return false
}
if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) {
return false
}
if (this.options.integrity) {
return ssri.parse(this.options.integrity).match(this.entry.integrity)
}
return true
}
// returns true if the request and response allow caching
storable () {
return this.policy.storable()
}
// NOTE: this is a hack to avoid parsing the cache-control
// header ourselves, it returns true if the response's
// cache-control contains must-revalidate
get mustRevalidate () {
return !!this.policy._rescc['must-revalidate']
}
// returns true if the cached response requires revalidation
// for the given request
needsRevalidation (request) {
const _req = requestObject(request)
// force method to GET because we only cache GETs
// but can serve a HEAD from a cached GET
_req.method = 'GET'
return !this.policy.satisfiesWithoutRevalidation(_req)
}
responseHeaders () {
return this.policy.responseHeaders()
}
// returns a new object containing the appropriate headers
// to send a revalidation request
revalidationHeaders (request) {
const _req = requestObject(request)
return this.policy.revalidationHeaders(_req)
}
// returns true if the request/response was revalidated
// successfully. returns false if a new response was received
revalidated (request, response) {
const _req = requestObject(request)
const _res = responseObject(response)
const policy = this.policy.revalidatedPolicy(_req, _res)
return !policy.modified
}
}
module.exports = CachePolicy

118
my-app/node_modules/make-fetch-happen/lib/fetch.js generated vendored Executable file
View file

@ -0,0 +1,118 @@
'use strict'
const { FetchError, Request, isRedirect } = require('minipass-fetch')
const url = require('url')
const CachePolicy = require('./cache/policy.js')
const cache = require('./cache/index.js')
const remote = require('./remote.js')
// given a Request, a Response and user options
// return true if the response is a redirect that
// can be followed. we throw errors that will result
// in the fetch being rejected if the redirect is
// possible but invalid for some reason
const canFollowRedirect = (request, response, options) => {
if (!isRedirect(response.status)) {
return false
}
if (options.redirect === 'manual') {
return false
}
if (options.redirect === 'error') {
throw new FetchError(`redirect mode is set to error: ${request.url}`,
'no-redirect', { code: 'ENOREDIRECT' })
}
if (!response.headers.has('location')) {
throw new FetchError(`redirect location header missing for: ${request.url}`,
'no-location', { code: 'EINVALIDREDIRECT' })
}
if (request.counter >= request.follow) {
throw new FetchError(`maximum redirect reached at: ${request.url}`,
'max-redirect', { code: 'EMAXREDIRECT' })
}
return true
}
// given a Request, a Response, and the user's options return an object
// with a new Request and a new options object that will be used for
// following the redirect
const getRedirect = (request, response, options) => {
const _opts = { ...options }
const location = response.headers.get('location')
const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url)
// Comment below is used under the following license:
/**
* @license
* Copyright (c) 2010-2012 Mikeal Rogers
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS
* IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
// Remove authorization if changing hostnames (but not if just
// changing ports or protocols). This matches the behavior of request:
// https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138
if (new url.URL(request.url).hostname !== redirectUrl.hostname) {
request.headers.delete('authorization')
request.headers.delete('cookie')
}
// for POST request with 301/302 response, or any request with 303 response,
// use GET when following redirect
if (
response.status === 303 ||
(request.method === 'POST' && [301, 302].includes(response.status))
) {
_opts.method = 'GET'
_opts.body = null
request.headers.delete('content-length')
}
_opts.headers = {}
request.headers.forEach((value, key) => {
_opts.headers[key] = value
})
_opts.counter = ++request.counter
const redirectReq = new Request(url.format(redirectUrl), _opts)
return {
request: redirectReq,
options: _opts,
}
}
const fetch = async (request, options) => {
const response = CachePolicy.storable(request, options)
? await cache(request, options)
: await remote(request, options)
// if the request wasn't a GET or HEAD, and the response
// status is between 200 and 399 inclusive, invalidate the
// request url
if (!['GET', 'HEAD'].includes(request.method) &&
response.status >= 200 &&
response.status <= 399) {
await cache.invalidate(request, options)
}
if (!canFollowRedirect(request, response, options)) {
return response
}
const redirect = getRedirect(request, response, options)
return fetch(redirect.request, redirect.options)
}
module.exports = fetch

41
my-app/node_modules/make-fetch-happen/lib/index.js generated vendored Executable file
View file

@ -0,0 +1,41 @@
const { FetchError, Headers, Request, Response } = require('minipass-fetch')
const configureOptions = require('./options.js')
const fetch = require('./fetch.js')
const makeFetchHappen = (url, opts) => {
const options = configureOptions(opts)
const request = new Request(url, options)
return fetch(request, options)
}
makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}, wrappedFetch = makeFetchHappen) => {
if (typeof defaultUrl === 'object') {
defaultOptions = defaultUrl
defaultUrl = null
}
const defaultedFetch = (url, options = {}) => {
const finalUrl = url || defaultUrl
const finalOptions = {
...defaultOptions,
...options,
headers: {
...defaultOptions.headers,
...options.headers,
},
}
return wrappedFetch(finalUrl, finalOptions)
}
defaultedFetch.defaults = (defaultUrl1, defaultOptions1 = {}) =>
makeFetchHappen.defaults(defaultUrl1, defaultOptions1, defaultedFetch)
return defaultedFetch
}
module.exports = makeFetchHappen
module.exports.FetchError = FetchError
module.exports.Headers = Headers
module.exports.Request = Request
module.exports.Response = Response

54
my-app/node_modules/make-fetch-happen/lib/options.js generated vendored Executable file
View file

@ -0,0 +1,54 @@
const dns = require('dns')
const conditionalHeaders = [
'if-modified-since',
'if-none-match',
'if-unmodified-since',
'if-match',
'if-range',
]
const configureOptions = (opts) => {
const { strictSSL, ...options } = { ...opts }
options.method = options.method ? options.method.toUpperCase() : 'GET'
options.rejectUnauthorized = strictSSL !== false
if (!options.retry) {
options.retry = { retries: 0 }
} else if (typeof options.retry === 'string') {
const retries = parseInt(options.retry, 10)
if (isFinite(retries)) {
options.retry = { retries }
} else {
options.retry = { retries: 0 }
}
} else if (typeof options.retry === 'number') {
options.retry = { retries: options.retry }
} else {
options.retry = { retries: 0, ...options.retry }
}
options.dns = { ttl: 5 * 60 * 1000, lookup: dns.lookup, ...options.dns }
options.cache = options.cache || 'default'
if (options.cache === 'default') {
const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => {
return conditionalHeaders.includes(name.toLowerCase())
})
if (hasConditionalHeader) {
options.cache = 'no-store'
}
}
options.cacheAdditionalHeaders = options.cacheAdditionalHeaders || []
// cacheManager is deprecated, but if it's set and
// cachePath is not we should copy it to the new field
if (options.cacheManager && !options.cachePath) {
options.cachePath = options.cacheManager
}
return options
}
module.exports = configureOptions

41
my-app/node_modules/make-fetch-happen/lib/pipeline.js generated vendored Executable file
View file

@ -0,0 +1,41 @@
'use strict'
const MinipassPipeline = require('minipass-pipeline')
class CachingMinipassPipeline extends MinipassPipeline {
#events = []
#data = new Map()
constructor (opts, ...streams) {
// CRITICAL: do NOT pass the streams to the call to super(), this will start
// the flow of data and potentially cause the events we need to catch to emit
// before we've finished our own setup. instead we call super() with no args,
// finish our setup, and then push the streams into ourselves to start the
// data flow
super()
this.#events = opts.events
/* istanbul ignore next - coverage disabled because this is pointless to test here */
if (streams.length) {
this.push(...streams)
}
}
on (event, handler) {
if (this.#events.includes(event) && this.#data.has(event)) {
return handler(...this.#data.get(event))
}
return super.on(event, handler)
}
emit (event, ...data) {
if (this.#events.includes(event)) {
this.#data.set(event, data)
}
return super.emit(event, ...data)
}
}
module.exports = CachingMinipassPipeline

127
my-app/node_modules/make-fetch-happen/lib/remote.js generated vendored Executable file
View file

@ -0,0 +1,127 @@
const { Minipass } = require('minipass')
const fetch = require('minipass-fetch')
const promiseRetry = require('promise-retry')
const ssri = require('ssri')
const CachingMinipassPipeline = require('./pipeline.js')
const { getAgent } = require('@npmcli/agent')
const pkg = require('../package.json')
const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})`
const RETRY_ERRORS = [
'ECONNRESET', // remote socket closed on us
'ECONNREFUSED', // remote host refused to open connection
'EADDRINUSE', // failed to bind to a local port (proxy?)
'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW
// from @npmcli/agent
'ECONNECTIONTIMEOUT',
'EIDLETIMEOUT',
'ERESPONSETIMEOUT',
'ETRANSFERTIMEOUT',
// Known codes we do NOT retry on:
// ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline)
// EINVALIDPROXY // invalid protocol from @npmcli/agent
// EINVALIDRESPONSE // invalid status code from @npmcli/agent
]
const RETRY_TYPES = [
'request-timeout',
]
// make a request directly to the remote source,
// retrying certain classes of errors as well as
// following redirects (through the cache if necessary)
// and verifying response integrity
const remoteFetch = (request, options) => {
const agent = getAgent(request.url, options)
if (!request.headers.has('connection')) {
request.headers.set('connection', agent ? 'keep-alive' : 'close')
}
if (!request.headers.has('user-agent')) {
request.headers.set('user-agent', USER_AGENT)
}
// keep our own options since we're overriding the agent
// and the redirect mode
const _opts = {
...options,
agent,
redirect: 'manual',
}
return promiseRetry(async (retryHandler, attemptNum) => {
const req = new fetch.Request(request, _opts)
try {
let res = await fetch(req, _opts)
if (_opts.integrity && res.status === 200) {
// we got a 200 response and the user has specified an expected
// integrity value, so wrap the response in an ssri stream to verify it
const integrityStream = ssri.integrityStream({
algorithms: _opts.algorithms,
integrity: _opts.integrity,
size: _opts.size,
})
const pipeline = new CachingMinipassPipeline({
events: ['integrity', 'size'],
}, res.body, integrityStream)
// we also propagate the integrity and size events out to the pipeline so we can use
// this new response body as an integrityEmitter for cacache
integrityStream.on('integrity', i => pipeline.emit('integrity', i))
integrityStream.on('size', s => pipeline.emit('size', s))
res = new fetch.Response(pipeline, res)
// set an explicit flag so we know if our response body will emit integrity and size
res.body.hasIntegrityEmitter = true
}
res.headers.set('x-fetch-attempts', attemptNum)
// do not retry POST requests, or requests with a streaming body
// do retry requests with a 408, 420, 429 or 500+ status in the response
const isStream = Minipass.isStream(req.body)
const isRetriable = req.method !== 'POST' &&
!isStream &&
([408, 420, 429].includes(res.status) || res.status >= 500)
if (isRetriable) {
if (typeof options.onRetry === 'function') {
options.onRetry(res)
}
return retryHandler(res)
}
return res
} catch (err) {
const code = (err.code === 'EPROMISERETRY')
? err.retried.code
: err.code
// err.retried will be the thing that was thrown from above
// if it's a response, we just got a bad status code and we
// can re-throw to allow the retry
const isRetryError = err.retried instanceof fetch.Response ||
(RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type))
if (req.method === 'POST' || isRetryError) {
throw err
}
if (typeof options.onRetry === 'function') {
options.onRetry(err)
}
return retryHandler(err)
}
}, options.retry).catch((err) => {
// don't reject for http errors, just return them
if (err.status >= 400 && err.type !== 'system') {
return err
}
throw err
})
}
module.exports = remoteFetch