Updated the files.
This commit is contained in:
parent
1553e6b971
commit
753967d4f5
23418 changed files with 3784666 additions and 0 deletions
29
my-app/node_modules/cacache/lib/content/path.js
generated
vendored
Executable file
29
my-app/node_modules/cacache/lib/content/path.js
generated
vendored
Executable file
|
@ -0,0 +1,29 @@
|
|||
'use strict'
|
||||
|
||||
const contentVer = require('../../package.json')['cache-version'].content
|
||||
const hashToSegments = require('../util/hash-to-segments')
|
||||
const path = require('path')
|
||||
const ssri = require('ssri')
|
||||
|
||||
// Current format of content file path:
|
||||
//
|
||||
// sha512-BaSE64Hex= ->
|
||||
// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee
|
||||
//
|
||||
module.exports = contentPath
|
||||
|
||||
function contentPath (cache, integrity) {
|
||||
const sri = ssri.parse(integrity, { single: true })
|
||||
// contentPath is the *strongest* algo given
|
||||
return path.join(
|
||||
contentDir(cache),
|
||||
sri.algorithm,
|
||||
...hashToSegments(sri.hexDigest())
|
||||
)
|
||||
}
|
||||
|
||||
module.exports.contentDir = contentDir
|
||||
|
||||
function contentDir (cache) {
|
||||
return path.join(cache, `content-v${contentVer}`)
|
||||
}
|
165
my-app/node_modules/cacache/lib/content/read.js
generated
vendored
Executable file
165
my-app/node_modules/cacache/lib/content/read.js
generated
vendored
Executable file
|
@ -0,0 +1,165 @@
|
|||
'use strict'
|
||||
|
||||
const fs = require('fs/promises')
|
||||
const fsm = require('fs-minipass')
|
||||
const ssri = require('ssri')
|
||||
const contentPath = require('./path')
|
||||
const Pipeline = require('minipass-pipeline')
|
||||
|
||||
module.exports = read
|
||||
|
||||
const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024
|
||||
async function read (cache, integrity, opts = {}) {
|
||||
const { size } = opts
|
||||
const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
|
||||
// get size
|
||||
const stat = size ? { size } : await fs.stat(cpath)
|
||||
return { stat, cpath, sri }
|
||||
})
|
||||
|
||||
if (stat.size > MAX_SINGLE_READ_SIZE) {
|
||||
return readPipeline(cpath, stat.size, sri, new Pipeline()).concat()
|
||||
}
|
||||
|
||||
const data = await fs.readFile(cpath, { encoding: null })
|
||||
|
||||
if (stat.size !== data.length) {
|
||||
throw sizeError(stat.size, data.length)
|
||||
}
|
||||
|
||||
if (!ssri.checkData(data, sri)) {
|
||||
throw integrityError(sri, cpath)
|
||||
}
|
||||
|
||||
return data
|
||||
}
|
||||
|
||||
const readPipeline = (cpath, size, sri, stream) => {
|
||||
stream.push(
|
||||
new fsm.ReadStream(cpath, {
|
||||
size,
|
||||
readSize: MAX_SINGLE_READ_SIZE,
|
||||
}),
|
||||
ssri.integrityStream({
|
||||
integrity: sri,
|
||||
size,
|
||||
})
|
||||
)
|
||||
return stream
|
||||
}
|
||||
|
||||
module.exports.stream = readStream
|
||||
module.exports.readStream = readStream
|
||||
|
||||
function readStream (cache, integrity, opts = {}) {
|
||||
const { size } = opts
|
||||
const stream = new Pipeline()
|
||||
// Set all this up to run on the stream and then just return the stream
|
||||
Promise.resolve().then(async () => {
|
||||
const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
|
||||
// get size
|
||||
const stat = size ? { size } : await fs.stat(cpath)
|
||||
return { stat, cpath, sri }
|
||||
})
|
||||
|
||||
return readPipeline(cpath, stat.size, sri, stream)
|
||||
}).catch(err => stream.emit('error', err))
|
||||
|
||||
return stream
|
||||
}
|
||||
|
||||
module.exports.copy = copy
|
||||
|
||||
function copy (cache, integrity, dest) {
|
||||
return withContentSri(cache, integrity, (cpath, sri) => {
|
||||
return fs.copyFile(cpath, dest)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports.hasContent = hasContent
|
||||
|
||||
async function hasContent (cache, integrity) {
|
||||
if (!integrity) {
|
||||
return false
|
||||
}
|
||||
|
||||
try {
|
||||
return await withContentSri(cache, integrity, async (cpath, sri) => {
|
||||
const stat = await fs.stat(cpath)
|
||||
return { size: stat.size, sri, stat }
|
||||
})
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return false
|
||||
}
|
||||
|
||||
if (err.code === 'EPERM') {
|
||||
/* istanbul ignore else */
|
||||
if (process.platform !== 'win32') {
|
||||
throw err
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function withContentSri (cache, integrity, fn) {
|
||||
const sri = ssri.parse(integrity)
|
||||
// If `integrity` has multiple entries, pick the first digest
|
||||
// with available local data.
|
||||
const algo = sri.pickAlgorithm()
|
||||
const digests = sri[algo]
|
||||
|
||||
if (digests.length <= 1) {
|
||||
const cpath = contentPath(cache, digests[0])
|
||||
return fn(cpath, digests[0])
|
||||
} else {
|
||||
// Can't use race here because a generic error can happen before
|
||||
// a ENOENT error, and can happen before a valid result
|
||||
const results = await Promise.all(digests.map(async (meta) => {
|
||||
try {
|
||||
return await withContentSri(cache, meta, fn)
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return Object.assign(
|
||||
new Error('No matching content found for ' + sri.toString()),
|
||||
{ code: 'ENOENT' }
|
||||
)
|
||||
}
|
||||
return err
|
||||
}
|
||||
}))
|
||||
// Return the first non error if it is found
|
||||
const result = results.find((r) => !(r instanceof Error))
|
||||
if (result) {
|
||||
return result
|
||||
}
|
||||
|
||||
// Throw the No matching content found error
|
||||
const enoentError = results.find((r) => r.code === 'ENOENT')
|
||||
if (enoentError) {
|
||||
throw enoentError
|
||||
}
|
||||
|
||||
// Throw generic error
|
||||
throw results.find((r) => r instanceof Error)
|
||||
}
|
||||
}
|
||||
|
||||
function sizeError (expected, found) {
|
||||
/* eslint-disable-next-line max-len */
|
||||
const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
|
||||
err.expected = expected
|
||||
err.found = found
|
||||
err.code = 'EBADSIZE'
|
||||
return err
|
||||
}
|
||||
|
||||
function integrityError (sri, path) {
|
||||
const err = new Error(`Integrity verification failed for ${sri} (${path})`)
|
||||
err.code = 'EINTEGRITY'
|
||||
err.sri = sri
|
||||
err.path = path
|
||||
return err
|
||||
}
|
18
my-app/node_modules/cacache/lib/content/rm.js
generated
vendored
Executable file
18
my-app/node_modules/cacache/lib/content/rm.js
generated
vendored
Executable file
|
@ -0,0 +1,18 @@
|
|||
'use strict'
|
||||
|
||||
const fs = require('fs/promises')
|
||||
const contentPath = require('./path')
|
||||
const { hasContent } = require('./read')
|
||||
|
||||
module.exports = rm
|
||||
|
||||
async function rm (cache, integrity) {
|
||||
const content = await hasContent(cache, integrity)
|
||||
// ~pretty~ sure we can't end up with a content lacking sri, but be safe
|
||||
if (content && content.sri) {
|
||||
await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true })
|
||||
return true
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
206
my-app/node_modules/cacache/lib/content/write.js
generated
vendored
Executable file
206
my-app/node_modules/cacache/lib/content/write.js
generated
vendored
Executable file
|
@ -0,0 +1,206 @@
|
|||
'use strict'
|
||||
|
||||
const events = require('events')
|
||||
|
||||
const contentPath = require('./path')
|
||||
const fs = require('fs/promises')
|
||||
const { moveFile } = require('@npmcli/fs')
|
||||
const { Minipass } = require('minipass')
|
||||
const Pipeline = require('minipass-pipeline')
|
||||
const Flush = require('minipass-flush')
|
||||
const path = require('path')
|
||||
const ssri = require('ssri')
|
||||
const uniqueFilename = require('unique-filename')
|
||||
const fsm = require('fs-minipass')
|
||||
|
||||
module.exports = write
|
||||
|
||||
// Cache of move operations in process so we don't duplicate
|
||||
const moveOperations = new Map()
|
||||
|
||||
async function write (cache, data, opts = {}) {
|
||||
const { algorithms, size, integrity } = opts
|
||||
|
||||
if (typeof size === 'number' && data.length !== size) {
|
||||
throw sizeError(size, data.length)
|
||||
}
|
||||
|
||||
const sri = ssri.fromData(data, algorithms ? { algorithms } : {})
|
||||
if (integrity && !ssri.checkData(data, integrity, opts)) {
|
||||
throw checksumError(integrity, sri)
|
||||
}
|
||||
|
||||
for (const algo in sri) {
|
||||
const tmp = await makeTmp(cache, opts)
|
||||
const hash = sri[algo].toString()
|
||||
try {
|
||||
await fs.writeFile(tmp.target, data, { flag: 'wx' })
|
||||
await moveToDestination(tmp, cache, hash, opts)
|
||||
} finally {
|
||||
if (!tmp.moved) {
|
||||
await fs.rm(tmp.target, { recursive: true, force: true })
|
||||
}
|
||||
}
|
||||
}
|
||||
return { integrity: sri, size: data.length }
|
||||
}
|
||||
|
||||
module.exports.stream = writeStream
|
||||
|
||||
// writes proxied to the 'inputStream' that is passed to the Promise
|
||||
// 'end' is deferred until content is handled.
|
||||
class CacacheWriteStream extends Flush {
|
||||
constructor (cache, opts) {
|
||||
super()
|
||||
this.opts = opts
|
||||
this.cache = cache
|
||||
this.inputStream = new Minipass()
|
||||
this.inputStream.on('error', er => this.emit('error', er))
|
||||
this.inputStream.on('drain', () => this.emit('drain'))
|
||||
this.handleContentP = null
|
||||
}
|
||||
|
||||
write (chunk, encoding, cb) {
|
||||
if (!this.handleContentP) {
|
||||
this.handleContentP = handleContent(
|
||||
this.inputStream,
|
||||
this.cache,
|
||||
this.opts
|
||||
)
|
||||
this.handleContentP.catch(error => this.emit('error', error))
|
||||
}
|
||||
return this.inputStream.write(chunk, encoding, cb)
|
||||
}
|
||||
|
||||
flush (cb) {
|
||||
this.inputStream.end(() => {
|
||||
if (!this.handleContentP) {
|
||||
const e = new Error('Cache input stream was empty')
|
||||
e.code = 'ENODATA'
|
||||
// empty streams are probably emitting end right away.
|
||||
// defer this one tick by rejecting a promise on it.
|
||||
return Promise.reject(e).catch(cb)
|
||||
}
|
||||
// eslint-disable-next-line promise/catch-or-return
|
||||
this.handleContentP.then(
|
||||
(res) => {
|
||||
res.integrity && this.emit('integrity', res.integrity)
|
||||
// eslint-disable-next-line promise/always-return
|
||||
res.size !== null && this.emit('size', res.size)
|
||||
cb()
|
||||
},
|
||||
(er) => cb(er)
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function writeStream (cache, opts = {}) {
|
||||
return new CacacheWriteStream(cache, opts)
|
||||
}
|
||||
|
||||
async function handleContent (inputStream, cache, opts) {
|
||||
const tmp = await makeTmp(cache, opts)
|
||||
try {
|
||||
const res = await pipeToTmp(inputStream, cache, tmp.target, opts)
|
||||
await moveToDestination(
|
||||
tmp,
|
||||
cache,
|
||||
res.integrity,
|
||||
opts
|
||||
)
|
||||
return res
|
||||
} finally {
|
||||
if (!tmp.moved) {
|
||||
await fs.rm(tmp.target, { recursive: true, force: true })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function pipeToTmp (inputStream, cache, tmpTarget, opts) {
|
||||
const outStream = new fsm.WriteStream(tmpTarget, {
|
||||
flags: 'wx',
|
||||
})
|
||||
|
||||
if (opts.integrityEmitter) {
|
||||
// we need to create these all simultaneously since they can fire in any order
|
||||
const [integrity, size] = await Promise.all([
|
||||
events.once(opts.integrityEmitter, 'integrity').then(res => res[0]),
|
||||
events.once(opts.integrityEmitter, 'size').then(res => res[0]),
|
||||
new Pipeline(inputStream, outStream).promise(),
|
||||
])
|
||||
return { integrity, size }
|
||||
}
|
||||
|
||||
let integrity
|
||||
let size
|
||||
const hashStream = ssri.integrityStream({
|
||||
integrity: opts.integrity,
|
||||
algorithms: opts.algorithms,
|
||||
size: opts.size,
|
||||
})
|
||||
hashStream.on('integrity', i => {
|
||||
integrity = i
|
||||
})
|
||||
hashStream.on('size', s => {
|
||||
size = s
|
||||
})
|
||||
|
||||
const pipeline = new Pipeline(inputStream, hashStream, outStream)
|
||||
await pipeline.promise()
|
||||
return { integrity, size }
|
||||
}
|
||||
|
||||
async function makeTmp (cache, opts) {
|
||||
const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
|
||||
await fs.mkdir(path.dirname(tmpTarget), { recursive: true })
|
||||
return {
|
||||
target: tmpTarget,
|
||||
moved: false,
|
||||
}
|
||||
}
|
||||
|
||||
async function moveToDestination (tmp, cache, sri, opts) {
|
||||
const destination = contentPath(cache, sri)
|
||||
const destDir = path.dirname(destination)
|
||||
if (moveOperations.has(destination)) {
|
||||
return moveOperations.get(destination)
|
||||
}
|
||||
moveOperations.set(
|
||||
destination,
|
||||
fs.mkdir(destDir, { recursive: true })
|
||||
.then(async () => {
|
||||
await moveFile(tmp.target, destination, { overwrite: false })
|
||||
tmp.moved = true
|
||||
return tmp.moved
|
||||
})
|
||||
.catch(err => {
|
||||
if (!err.message.startsWith('The destination file exists')) {
|
||||
throw Object.assign(err, { code: 'EEXIST' })
|
||||
}
|
||||
}).finally(() => {
|
||||
moveOperations.delete(destination)
|
||||
})
|
||||
|
||||
)
|
||||
return moveOperations.get(destination)
|
||||
}
|
||||
|
||||
function sizeError (expected, found) {
|
||||
/* eslint-disable-next-line max-len */
|
||||
const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
|
||||
err.expected = expected
|
||||
err.found = found
|
||||
err.code = 'EBADSIZE'
|
||||
return err
|
||||
}
|
||||
|
||||
function checksumError (expected, found) {
|
||||
const err = new Error(`Integrity check failed:
|
||||
Wanted: ${expected}
|
||||
Found: ${found}`)
|
||||
err.code = 'EINTEGRITY'
|
||||
err.expected = expected
|
||||
err.found = found
|
||||
return err
|
||||
}
|
330
my-app/node_modules/cacache/lib/entry-index.js
generated
vendored
Executable file
330
my-app/node_modules/cacache/lib/entry-index.js
generated
vendored
Executable file
|
@ -0,0 +1,330 @@
|
|||
'use strict'
|
||||
|
||||
const crypto = require('crypto')
|
||||
const {
|
||||
appendFile,
|
||||
mkdir,
|
||||
readFile,
|
||||
readdir,
|
||||
rm,
|
||||
writeFile,
|
||||
} = require('fs/promises')
|
||||
const { Minipass } = require('minipass')
|
||||
const path = require('path')
|
||||
const ssri = require('ssri')
|
||||
const uniqueFilename = require('unique-filename')
|
||||
|
||||
const contentPath = require('./content/path')
|
||||
const hashToSegments = require('./util/hash-to-segments')
|
||||
const indexV = require('../package.json')['cache-version'].index
|
||||
const { moveFile } = require('@npmcli/fs')
|
||||
|
||||
module.exports.NotFoundError = class NotFoundError extends Error {
|
||||
constructor (cache, key) {
|
||||
super(`No cache entry for ${key} found in ${cache}`)
|
||||
this.code = 'ENOENT'
|
||||
this.cache = cache
|
||||
this.key = key
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.compact = compact
|
||||
|
||||
async function compact (cache, key, matchFn, opts = {}) {
|
||||
const bucket = bucketPath(cache, key)
|
||||
const entries = await bucketEntries(bucket)
|
||||
const newEntries = []
|
||||
// we loop backwards because the bottom-most result is the newest
|
||||
// since we add new entries with appendFile
|
||||
for (let i = entries.length - 1; i >= 0; --i) {
|
||||
const entry = entries[i]
|
||||
// a null integrity could mean either a delete was appended
|
||||
// or the user has simply stored an index that does not map
|
||||
// to any content. we determine if the user wants to keep the
|
||||
// null integrity based on the validateEntry function passed in options.
|
||||
// if the integrity is null and no validateEntry is provided, we break
|
||||
// as we consider the null integrity to be a deletion of everything
|
||||
// that came before it.
|
||||
if (entry.integrity === null && !opts.validateEntry) {
|
||||
break
|
||||
}
|
||||
|
||||
// if this entry is valid, and it is either the first entry or
|
||||
// the newEntries array doesn't already include an entry that
|
||||
// matches this one based on the provided matchFn, then we add
|
||||
// it to the beginning of our list
|
||||
if ((!opts.validateEntry || opts.validateEntry(entry) === true) &&
|
||||
(newEntries.length === 0 ||
|
||||
!newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) {
|
||||
newEntries.unshift(entry)
|
||||
}
|
||||
}
|
||||
|
||||
const newIndex = '\n' + newEntries.map((entry) => {
|
||||
const stringified = JSON.stringify(entry)
|
||||
const hash = hashEntry(stringified)
|
||||
return `${hash}\t${stringified}`
|
||||
}).join('\n')
|
||||
|
||||
const setup = async () => {
|
||||
const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
|
||||
await mkdir(path.dirname(target), { recursive: true })
|
||||
return {
|
||||
target,
|
||||
moved: false,
|
||||
}
|
||||
}
|
||||
|
||||
const teardown = async (tmp) => {
|
||||
if (!tmp.moved) {
|
||||
return rm(tmp.target, { recursive: true, force: true })
|
||||
}
|
||||
}
|
||||
|
||||
const write = async (tmp) => {
|
||||
await writeFile(tmp.target, newIndex, { flag: 'wx' })
|
||||
await mkdir(path.dirname(bucket), { recursive: true })
|
||||
// we use @npmcli/move-file directly here because we
|
||||
// want to overwrite the existing file
|
||||
await moveFile(tmp.target, bucket)
|
||||
tmp.moved = true
|
||||
}
|
||||
|
||||
// write the file atomically
|
||||
const tmp = await setup()
|
||||
try {
|
||||
await write(tmp)
|
||||
} finally {
|
||||
await teardown(tmp)
|
||||
}
|
||||
|
||||
// we reverse the list we generated such that the newest
|
||||
// entries come first in order to make looping through them easier
|
||||
// the true passed to formatEntry tells it to keep null
|
||||
// integrity values, if they made it this far it's because
|
||||
// validateEntry returned true, and as such we should return it
|
||||
return newEntries.reverse().map((entry) => formatEntry(cache, entry, true))
|
||||
}
|
||||
|
||||
module.exports.insert = insert
|
||||
|
||||
async function insert (cache, key, integrity, opts = {}) {
|
||||
const { metadata, size, time } = opts
|
||||
const bucket = bucketPath(cache, key)
|
||||
const entry = {
|
||||
key,
|
||||
integrity: integrity && ssri.stringify(integrity),
|
||||
time: time || Date.now(),
|
||||
size,
|
||||
metadata,
|
||||
}
|
||||
try {
|
||||
await mkdir(path.dirname(bucket), { recursive: true })
|
||||
const stringified = JSON.stringify(entry)
|
||||
// NOTE - Cleverness ahoy!
|
||||
//
|
||||
// This works because it's tremendously unlikely for an entry to corrupt
|
||||
// another while still preserving the string length of the JSON in
|
||||
// question. So, we just slap the length in there and verify it on read.
|
||||
//
|
||||
// Thanks to @isaacs for the whiteboarding session that ended up with
|
||||
// this.
|
||||
await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return undefined
|
||||
}
|
||||
|
||||
throw err
|
||||
}
|
||||
return formatEntry(cache, entry)
|
||||
}
|
||||
|
||||
module.exports.find = find
|
||||
|
||||
async function find (cache, key) {
|
||||
const bucket = bucketPath(cache, key)
|
||||
try {
|
||||
const entries = await bucketEntries(bucket)
|
||||
return entries.reduce((latest, next) => {
|
||||
if (next && next.key === key) {
|
||||
return formatEntry(cache, next)
|
||||
} else {
|
||||
return latest
|
||||
}
|
||||
}, null)
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return null
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.delete = del
|
||||
|
||||
function del (cache, key, opts = {}) {
|
||||
if (!opts.removeFully) {
|
||||
return insert(cache, key, null, opts)
|
||||
}
|
||||
|
||||
const bucket = bucketPath(cache, key)
|
||||
return rm(bucket, { recursive: true, force: true })
|
||||
}
|
||||
|
||||
module.exports.lsStream = lsStream
|
||||
|
||||
function lsStream (cache) {
|
||||
const indexDir = bucketDir(cache)
|
||||
const stream = new Minipass({ objectMode: true })
|
||||
|
||||
// Set all this up to run on the stream and then just return the stream
|
||||
Promise.resolve().then(async () => {
|
||||
const buckets = await readdirOrEmpty(indexDir)
|
||||
await Promise.all(buckets.map(async (bucket) => {
|
||||
const bucketPath = path.join(indexDir, bucket)
|
||||
const subbuckets = await readdirOrEmpty(bucketPath)
|
||||
await Promise.all(subbuckets.map(async (subbucket) => {
|
||||
const subbucketPath = path.join(bucketPath, subbucket)
|
||||
|
||||
// "/cachename/<bucket 0xFF>/<bucket 0xFF>./*"
|
||||
const subbucketEntries = await readdirOrEmpty(subbucketPath)
|
||||
await Promise.all(subbucketEntries.map(async (entry) => {
|
||||
const entryPath = path.join(subbucketPath, entry)
|
||||
try {
|
||||
const entries = await bucketEntries(entryPath)
|
||||
// using a Map here prevents duplicate keys from showing up
|
||||
// twice, I guess?
|
||||
const reduced = entries.reduce((acc, entry) => {
|
||||
acc.set(entry.key, entry)
|
||||
return acc
|
||||
}, new Map())
|
||||
// reduced is a map of key => entry
|
||||
for (const entry of reduced.values()) {
|
||||
const formatted = formatEntry(cache, entry)
|
||||
if (formatted) {
|
||||
stream.write(formatted)
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return undefined
|
||||
}
|
||||
throw err
|
||||
}
|
||||
}))
|
||||
}))
|
||||
}))
|
||||
stream.end()
|
||||
return stream
|
||||
}).catch(err => stream.emit('error', err))
|
||||
|
||||
return stream
|
||||
}
|
||||
|
||||
module.exports.ls = ls
|
||||
|
||||
async function ls (cache) {
|
||||
const entries = await lsStream(cache).collect()
|
||||
return entries.reduce((acc, xs) => {
|
||||
acc[xs.key] = xs
|
||||
return acc
|
||||
}, {})
|
||||
}
|
||||
|
||||
module.exports.bucketEntries = bucketEntries
|
||||
|
||||
async function bucketEntries (bucket, filter) {
|
||||
const data = await readFile(bucket, 'utf8')
|
||||
return _bucketEntries(data, filter)
|
||||
}
|
||||
|
||||
function _bucketEntries (data, filter) {
|
||||
const entries = []
|
||||
data.split('\n').forEach((entry) => {
|
||||
if (!entry) {
|
||||
return
|
||||
}
|
||||
|
||||
const pieces = entry.split('\t')
|
||||
if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
|
||||
// Hash is no good! Corruption or malice? Doesn't matter!
|
||||
// EJECT EJECT
|
||||
return
|
||||
}
|
||||
let obj
|
||||
try {
|
||||
obj = JSON.parse(pieces[1])
|
||||
} catch (_) {
|
||||
// eslint-ignore-next-line no-empty-block
|
||||
}
|
||||
// coverage disabled here, no need to test with an entry that parses to something falsey
|
||||
// istanbul ignore else
|
||||
if (obj) {
|
||||
entries.push(obj)
|
||||
}
|
||||
})
|
||||
return entries
|
||||
}
|
||||
|
||||
module.exports.bucketDir = bucketDir
|
||||
|
||||
function bucketDir (cache) {
|
||||
return path.join(cache, `index-v${indexV}`)
|
||||
}
|
||||
|
||||
module.exports.bucketPath = bucketPath
|
||||
|
||||
function bucketPath (cache, key) {
|
||||
const hashed = hashKey(key)
|
||||
return path.join.apply(
|
||||
path,
|
||||
[bucketDir(cache)].concat(hashToSegments(hashed))
|
||||
)
|
||||
}
|
||||
|
||||
module.exports.hashKey = hashKey
|
||||
|
||||
function hashKey (key) {
|
||||
return hash(key, 'sha256')
|
||||
}
|
||||
|
||||
module.exports.hashEntry = hashEntry
|
||||
|
||||
function hashEntry (str) {
|
||||
return hash(str, 'sha1')
|
||||
}
|
||||
|
||||
function hash (str, digest) {
|
||||
return crypto
|
||||
.createHash(digest)
|
||||
.update(str)
|
||||
.digest('hex')
|
||||
}
|
||||
|
||||
function formatEntry (cache, entry, keepAll) {
|
||||
// Treat null digests as deletions. They'll shadow any previous entries.
|
||||
if (!entry.integrity && !keepAll) {
|
||||
return null
|
||||
}
|
||||
|
||||
return {
|
||||
key: entry.key,
|
||||
integrity: entry.integrity,
|
||||
path: entry.integrity ? contentPath(cache, entry.integrity) : undefined,
|
||||
size: entry.size,
|
||||
time: entry.time,
|
||||
metadata: entry.metadata,
|
||||
}
|
||||
}
|
||||
|
||||
function readdirOrEmpty (dir) {
|
||||
return readdir(dir).catch((err) => {
|
||||
if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
|
||||
return []
|
||||
}
|
||||
|
||||
throw err
|
||||
})
|
||||
}
|
170
my-app/node_modules/cacache/lib/get.js
generated
vendored
Executable file
170
my-app/node_modules/cacache/lib/get.js
generated
vendored
Executable file
|
@ -0,0 +1,170 @@
|
|||
'use strict'
|
||||
|
||||
const Collect = require('minipass-collect')
|
||||
const { Minipass } = require('minipass')
|
||||
const Pipeline = require('minipass-pipeline')
|
||||
|
||||
const index = require('./entry-index')
|
||||
const memo = require('./memoization')
|
||||
const read = require('./content/read')
|
||||
|
||||
async function getData (cache, key, opts = {}) {
|
||||
const { integrity, memoize, size } = opts
|
||||
const memoized = memo.get(cache, key, opts)
|
||||
if (memoized && memoize !== false) {
|
||||
return {
|
||||
metadata: memoized.entry.metadata,
|
||||
data: memoized.data,
|
||||
integrity: memoized.entry.integrity,
|
||||
size: memoized.entry.size,
|
||||
}
|
||||
}
|
||||
|
||||
const entry = await index.find(cache, key, opts)
|
||||
if (!entry) {
|
||||
throw new index.NotFoundError(cache, key)
|
||||
}
|
||||
const data = await read(cache, entry.integrity, { integrity, size })
|
||||
if (memoize) {
|
||||
memo.put(cache, entry, data, opts)
|
||||
}
|
||||
|
||||
return {
|
||||
data,
|
||||
metadata: entry.metadata,
|
||||
size: entry.size,
|
||||
integrity: entry.integrity,
|
||||
}
|
||||
}
|
||||
module.exports = getData
|
||||
|
||||
async function getDataByDigest (cache, key, opts = {}) {
|
||||
const { integrity, memoize, size } = opts
|
||||
const memoized = memo.get.byDigest(cache, key, opts)
|
||||
if (memoized && memoize !== false) {
|
||||
return memoized
|
||||
}
|
||||
|
||||
const res = await read(cache, key, { integrity, size })
|
||||
if (memoize) {
|
||||
memo.put.byDigest(cache, key, res, opts)
|
||||
}
|
||||
return res
|
||||
}
|
||||
module.exports.byDigest = getDataByDigest
|
||||
|
||||
const getMemoizedStream = (memoized) => {
|
||||
const stream = new Minipass()
|
||||
stream.on('newListener', function (ev, cb) {
|
||||
ev === 'metadata' && cb(memoized.entry.metadata)
|
||||
ev === 'integrity' && cb(memoized.entry.integrity)
|
||||
ev === 'size' && cb(memoized.entry.size)
|
||||
})
|
||||
stream.end(memoized.data)
|
||||
return stream
|
||||
}
|
||||
|
||||
function getStream (cache, key, opts = {}) {
|
||||
const { memoize, size } = opts
|
||||
const memoized = memo.get(cache, key, opts)
|
||||
if (memoized && memoize !== false) {
|
||||
return getMemoizedStream(memoized)
|
||||
}
|
||||
|
||||
const stream = new Pipeline()
|
||||
// Set all this up to run on the stream and then just return the stream
|
||||
Promise.resolve().then(async () => {
|
||||
const entry = await index.find(cache, key)
|
||||
if (!entry) {
|
||||
throw new index.NotFoundError(cache, key)
|
||||
}
|
||||
|
||||
stream.emit('metadata', entry.metadata)
|
||||
stream.emit('integrity', entry.integrity)
|
||||
stream.emit('size', entry.size)
|
||||
stream.on('newListener', function (ev, cb) {
|
||||
ev === 'metadata' && cb(entry.metadata)
|
||||
ev === 'integrity' && cb(entry.integrity)
|
||||
ev === 'size' && cb(entry.size)
|
||||
})
|
||||
|
||||
const src = read.readStream(
|
||||
cache,
|
||||
entry.integrity,
|
||||
{ ...opts, size: typeof size !== 'number' ? entry.size : size }
|
||||
)
|
||||
|
||||
if (memoize) {
|
||||
const memoStream = new Collect.PassThrough()
|
||||
memoStream.on('collect', data => memo.put(cache, entry, data, opts))
|
||||
stream.unshift(memoStream)
|
||||
}
|
||||
stream.unshift(src)
|
||||
return stream
|
||||
}).catch((err) => stream.emit('error', err))
|
||||
|
||||
return stream
|
||||
}
|
||||
|
||||
module.exports.stream = getStream
|
||||
|
||||
function getStreamDigest (cache, integrity, opts = {}) {
|
||||
const { memoize } = opts
|
||||
const memoized = memo.get.byDigest(cache, integrity, opts)
|
||||
if (memoized && memoize !== false) {
|
||||
const stream = new Minipass()
|
||||
stream.end(memoized)
|
||||
return stream
|
||||
} else {
|
||||
const stream = read.readStream(cache, integrity, opts)
|
||||
if (!memoize) {
|
||||
return stream
|
||||
}
|
||||
|
||||
const memoStream = new Collect.PassThrough()
|
||||
memoStream.on('collect', data => memo.put.byDigest(
|
||||
cache,
|
||||
integrity,
|
||||
data,
|
||||
opts
|
||||
))
|
||||
return new Pipeline(stream, memoStream)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.stream.byDigest = getStreamDigest
|
||||
|
||||
function info (cache, key, opts = {}) {
|
||||
const { memoize } = opts
|
||||
const memoized = memo.get(cache, key, opts)
|
||||
if (memoized && memoize !== false) {
|
||||
return Promise.resolve(memoized.entry)
|
||||
} else {
|
||||
return index.find(cache, key)
|
||||
}
|
||||
}
|
||||
module.exports.info = info
|
||||
|
||||
async function copy (cache, key, dest, opts = {}) {
|
||||
const entry = await index.find(cache, key, opts)
|
||||
if (!entry) {
|
||||
throw new index.NotFoundError(cache, key)
|
||||
}
|
||||
await read.copy(cache, entry.integrity, dest, opts)
|
||||
return {
|
||||
metadata: entry.metadata,
|
||||
size: entry.size,
|
||||
integrity: entry.integrity,
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.copy = copy
|
||||
|
||||
async function copyByDigest (cache, key, dest, opts = {}) {
|
||||
await read.copy(cache, key, dest, opts)
|
||||
return key
|
||||
}
|
||||
|
||||
module.exports.copy.byDigest = copyByDigest
|
||||
|
||||
module.exports.hasContent = read.hasContent
|
42
my-app/node_modules/cacache/lib/index.js
generated
vendored
Executable file
42
my-app/node_modules/cacache/lib/index.js
generated
vendored
Executable file
|
@ -0,0 +1,42 @@
|
|||
'use strict'
|
||||
|
||||
const get = require('./get.js')
|
||||
const put = require('./put.js')
|
||||
const rm = require('./rm.js')
|
||||
const verify = require('./verify.js')
|
||||
const { clearMemoized } = require('./memoization.js')
|
||||
const tmp = require('./util/tmp.js')
|
||||
const index = require('./entry-index.js')
|
||||
|
||||
module.exports.index = {}
|
||||
module.exports.index.compact = index.compact
|
||||
module.exports.index.insert = index.insert
|
||||
|
||||
module.exports.ls = index.ls
|
||||
module.exports.ls.stream = index.lsStream
|
||||
|
||||
module.exports.get = get
|
||||
module.exports.get.byDigest = get.byDigest
|
||||
module.exports.get.stream = get.stream
|
||||
module.exports.get.stream.byDigest = get.stream.byDigest
|
||||
module.exports.get.copy = get.copy
|
||||
module.exports.get.copy.byDigest = get.copy.byDigest
|
||||
module.exports.get.info = get.info
|
||||
module.exports.get.hasContent = get.hasContent
|
||||
|
||||
module.exports.put = put
|
||||
module.exports.put.stream = put.stream
|
||||
|
||||
module.exports.rm = rm.entry
|
||||
module.exports.rm.all = rm.all
|
||||
module.exports.rm.entry = module.exports.rm
|
||||
module.exports.rm.content = rm.content
|
||||
|
||||
module.exports.clearMemoized = clearMemoized
|
||||
|
||||
module.exports.tmp = {}
|
||||
module.exports.tmp.mkdir = tmp.mkdir
|
||||
module.exports.tmp.withTmp = tmp.withTmp
|
||||
|
||||
module.exports.verify = verify
|
||||
module.exports.verify.lastRun = verify.lastRun
|
72
my-app/node_modules/cacache/lib/memoization.js
generated
vendored
Executable file
72
my-app/node_modules/cacache/lib/memoization.js
generated
vendored
Executable file
|
@ -0,0 +1,72 @@
|
|||
'use strict'
|
||||
|
||||
const { LRUCache } = require('lru-cache')
|
||||
|
||||
const MEMOIZED = new LRUCache({
|
||||
max: 500,
|
||||
maxSize: 50 * 1024 * 1024, // 50MB
|
||||
ttl: 3 * 60 * 1000, // 3 minutes
|
||||
sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length,
|
||||
})
|
||||
|
||||
module.exports.clearMemoized = clearMemoized
|
||||
|
||||
function clearMemoized () {
|
||||
const old = {}
|
||||
MEMOIZED.forEach((v, k) => {
|
||||
old[k] = v
|
||||
})
|
||||
MEMOIZED.clear()
|
||||
return old
|
||||
}
|
||||
|
||||
module.exports.put = put
|
||||
|
||||
function put (cache, entry, data, opts) {
|
||||
pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data })
|
||||
putDigest(cache, entry.integrity, data, opts)
|
||||
}
|
||||
|
||||
module.exports.put.byDigest = putDigest
|
||||
|
||||
function putDigest (cache, integrity, data, opts) {
|
||||
pickMem(opts).set(`digest:${cache}:${integrity}`, data)
|
||||
}
|
||||
|
||||
module.exports.get = get
|
||||
|
||||
function get (cache, key, opts) {
|
||||
return pickMem(opts).get(`key:${cache}:${key}`)
|
||||
}
|
||||
|
||||
module.exports.get.byDigest = getDigest
|
||||
|
||||
function getDigest (cache, integrity, opts) {
|
||||
return pickMem(opts).get(`digest:${cache}:${integrity}`)
|
||||
}
|
||||
|
||||
class ObjProxy {
|
||||
constructor (obj) {
|
||||
this.obj = obj
|
||||
}
|
||||
|
||||
get (key) {
|
||||
return this.obj[key]
|
||||
}
|
||||
|
||||
set (key, val) {
|
||||
this.obj[key] = val
|
||||
}
|
||||
}
|
||||
|
||||
function pickMem (opts) {
|
||||
if (!opts || !opts.memoize) {
|
||||
return MEMOIZED
|
||||
} else if (opts.memoize.get && opts.memoize.set) {
|
||||
return opts.memoize
|
||||
} else if (typeof opts.memoize === 'object') {
|
||||
return new ObjProxy(opts.memoize)
|
||||
} else {
|
||||
return MEMOIZED
|
||||
}
|
||||
}
|
80
my-app/node_modules/cacache/lib/put.js
generated
vendored
Executable file
80
my-app/node_modules/cacache/lib/put.js
generated
vendored
Executable file
|
@ -0,0 +1,80 @@
|
|||
'use strict'
|
||||
|
||||
const index = require('./entry-index')
|
||||
const memo = require('./memoization')
|
||||
const write = require('./content/write')
|
||||
const Flush = require('minipass-flush')
|
||||
const { PassThrough } = require('minipass-collect')
|
||||
const Pipeline = require('minipass-pipeline')
|
||||
|
||||
const putOpts = (opts) => ({
|
||||
algorithms: ['sha512'],
|
||||
...opts,
|
||||
})
|
||||
|
||||
module.exports = putData
|
||||
|
||||
async function putData (cache, key, data, opts = {}) {
|
||||
const { memoize } = opts
|
||||
opts = putOpts(opts)
|
||||
const res = await write(cache, data, opts)
|
||||
const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size })
|
||||
if (memoize) {
|
||||
memo.put(cache, entry, data, opts)
|
||||
}
|
||||
|
||||
return res.integrity
|
||||
}
|
||||
|
||||
module.exports.stream = putStream
|
||||
|
||||
function putStream (cache, key, opts = {}) {
|
||||
const { memoize } = opts
|
||||
opts = putOpts(opts)
|
||||
let integrity
|
||||
let size
|
||||
let error
|
||||
|
||||
let memoData
|
||||
const pipeline = new Pipeline()
|
||||
// first item in the pipeline is the memoizer, because we need
|
||||
// that to end first and get the collected data.
|
||||
if (memoize) {
|
||||
const memoizer = new PassThrough().on('collect', data => {
|
||||
memoData = data
|
||||
})
|
||||
pipeline.push(memoizer)
|
||||
}
|
||||
|
||||
// contentStream is a write-only, not a passthrough
|
||||
// no data comes out of it.
|
||||
const contentStream = write.stream(cache, opts)
|
||||
.on('integrity', (int) => {
|
||||
integrity = int
|
||||
})
|
||||
.on('size', (s) => {
|
||||
size = s
|
||||
})
|
||||
.on('error', (err) => {
|
||||
error = err
|
||||
})
|
||||
|
||||
pipeline.push(contentStream)
|
||||
|
||||
// last but not least, we write the index and emit hash and size,
|
||||
// and memoize if we're doing that
|
||||
pipeline.push(new Flush({
|
||||
async flush () {
|
||||
if (!error) {
|
||||
const entry = await index.insert(cache, key, integrity, { ...opts, size })
|
||||
if (memoize && memoData) {
|
||||
memo.put(cache, entry, memoData, opts)
|
||||
}
|
||||
pipeline.emit('integrity', integrity)
|
||||
pipeline.emit('size', size)
|
||||
}
|
||||
},
|
||||
}))
|
||||
|
||||
return pipeline
|
||||
}
|
31
my-app/node_modules/cacache/lib/rm.js
generated
vendored
Executable file
31
my-app/node_modules/cacache/lib/rm.js
generated
vendored
Executable file
|
@ -0,0 +1,31 @@
|
|||
'use strict'
|
||||
|
||||
const { rm } = require('fs/promises')
|
||||
const glob = require('./util/glob.js')
|
||||
const index = require('./entry-index')
|
||||
const memo = require('./memoization')
|
||||
const path = require('path')
|
||||
const rmContent = require('./content/rm')
|
||||
|
||||
module.exports = entry
|
||||
module.exports.entry = entry
|
||||
|
||||
function entry (cache, key, opts) {
|
||||
memo.clearMemoized()
|
||||
return index.delete(cache, key, opts)
|
||||
}
|
||||
|
||||
module.exports.content = content
|
||||
|
||||
function content (cache, integrity) {
|
||||
memo.clearMemoized()
|
||||
return rmContent(cache, integrity)
|
||||
}
|
||||
|
||||
module.exports.all = all
|
||||
|
||||
async function all (cache) {
|
||||
memo.clearMemoized()
|
||||
const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true })
|
||||
return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true })))
|
||||
}
|
7
my-app/node_modules/cacache/lib/util/glob.js
generated
vendored
Executable file
7
my-app/node_modules/cacache/lib/util/glob.js
generated
vendored
Executable file
|
@ -0,0 +1,7 @@
|
|||
'use strict'
|
||||
|
||||
const { glob } = require('glob')
|
||||
const path = require('path')
|
||||
|
||||
const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep)
|
||||
module.exports = (path, options) => glob(globify(path), options)
|
7
my-app/node_modules/cacache/lib/util/hash-to-segments.js
generated
vendored
Executable file
7
my-app/node_modules/cacache/lib/util/hash-to-segments.js
generated
vendored
Executable file
|
@ -0,0 +1,7 @@
|
|||
'use strict'
|
||||
|
||||
module.exports = hashToSegments
|
||||
|
||||
function hashToSegments (hash) {
|
||||
return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)]
|
||||
}
|
26
my-app/node_modules/cacache/lib/util/tmp.js
generated
vendored
Executable file
26
my-app/node_modules/cacache/lib/util/tmp.js
generated
vendored
Executable file
|
@ -0,0 +1,26 @@
|
|||
'use strict'
|
||||
|
||||
const { withTempDir } = require('@npmcli/fs')
|
||||
const fs = require('fs/promises')
|
||||
const path = require('path')
|
||||
|
||||
module.exports.mkdir = mktmpdir
|
||||
|
||||
async function mktmpdir (cache, opts = {}) {
|
||||
const { tmpPrefix } = opts
|
||||
const tmpDir = path.join(cache, 'tmp')
|
||||
await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' })
|
||||
// do not use path.join(), it drops the trailing / if tmpPrefix is unset
|
||||
const target = `${tmpDir}${path.sep}${tmpPrefix || ''}`
|
||||
return fs.mkdtemp(target, { owner: 'inherit' })
|
||||
}
|
||||
|
||||
module.exports.withTmp = withTmp
|
||||
|
||||
function withTmp (cache, opts, cb) {
|
||||
if (!cb) {
|
||||
cb = opts
|
||||
opts = {}
|
||||
}
|
||||
return withTempDir(path.join(cache, 'tmp'), cb, opts)
|
||||
}
|
257
my-app/node_modules/cacache/lib/verify.js
generated
vendored
Executable file
257
my-app/node_modules/cacache/lib/verify.js
generated
vendored
Executable file
|
@ -0,0 +1,257 @@
|
|||
'use strict'
|
||||
|
||||
const {
|
||||
mkdir,
|
||||
readFile,
|
||||
rm,
|
||||
stat,
|
||||
truncate,
|
||||
writeFile,
|
||||
} = require('fs/promises')
|
||||
const pMap = require('p-map')
|
||||
const contentPath = require('./content/path')
|
||||
const fsm = require('fs-minipass')
|
||||
const glob = require('./util/glob.js')
|
||||
const index = require('./entry-index')
|
||||
const path = require('path')
|
||||
const ssri = require('ssri')
|
||||
|
||||
const hasOwnProperty = (obj, key) =>
|
||||
Object.prototype.hasOwnProperty.call(obj, key)
|
||||
|
||||
const verifyOpts = (opts) => ({
|
||||
concurrency: 20,
|
||||
log: { silly () {} },
|
||||
...opts,
|
||||
})
|
||||
|
||||
module.exports = verify
|
||||
|
||||
async function verify (cache, opts) {
|
||||
opts = verifyOpts(opts)
|
||||
opts.log.silly('verify', 'verifying cache at', cache)
|
||||
|
||||
const steps = [
|
||||
markStartTime,
|
||||
fixPerms,
|
||||
garbageCollect,
|
||||
rebuildIndex,
|
||||
cleanTmp,
|
||||
writeVerifile,
|
||||
markEndTime,
|
||||
]
|
||||
|
||||
const stats = {}
|
||||
for (const step of steps) {
|
||||
const label = step.name
|
||||
const start = new Date()
|
||||
const s = await step(cache, opts)
|
||||
if (s) {
|
||||
Object.keys(s).forEach((k) => {
|
||||
stats[k] = s[k]
|
||||
})
|
||||
}
|
||||
const end = new Date()
|
||||
if (!stats.runTime) {
|
||||
stats.runTime = {}
|
||||
}
|
||||
stats.runTime[label] = end - start
|
||||
}
|
||||
stats.runTime.total = stats.endTime - stats.startTime
|
||||
opts.log.silly(
|
||||
'verify',
|
||||
'verification finished for',
|
||||
cache,
|
||||
'in',
|
||||
`${stats.runTime.total}ms`
|
||||
)
|
||||
return stats
|
||||
}
|
||||
|
||||
async function markStartTime (cache, opts) {
|
||||
return { startTime: new Date() }
|
||||
}
|
||||
|
||||
async function markEndTime (cache, opts) {
|
||||
return { endTime: new Date() }
|
||||
}
|
||||
|
||||
async function fixPerms (cache, opts) {
|
||||
opts.log.silly('verify', 'fixing cache permissions')
|
||||
await mkdir(cache, { recursive: true })
|
||||
return null
|
||||
}
|
||||
|
||||
// Implements a naive mark-and-sweep tracing garbage collector.
|
||||
//
|
||||
// The algorithm is basically as follows:
|
||||
// 1. Read (and filter) all index entries ("pointers")
|
||||
// 2. Mark each integrity value as "live"
|
||||
// 3. Read entire filesystem tree in `content-vX/` dir
|
||||
// 4. If content is live, verify its checksum and delete it if it fails
|
||||
// 5. If content is not marked as live, rm it.
|
||||
//
|
||||
async function garbageCollect (cache, opts) {
|
||||
opts.log.silly('verify', 'garbage collecting content')
|
||||
const indexStream = index.lsStream(cache)
|
||||
const liveContent = new Set()
|
||||
indexStream.on('data', (entry) => {
|
||||
if (opts.filter && !opts.filter(entry)) {
|
||||
return
|
||||
}
|
||||
|
||||
// integrity is stringified, re-parse it so we can get each hash
|
||||
const integrity = ssri.parse(entry.integrity)
|
||||
for (const algo in integrity) {
|
||||
liveContent.add(integrity[algo].toString())
|
||||
}
|
||||
})
|
||||
await new Promise((resolve, reject) => {
|
||||
indexStream.on('end', resolve).on('error', reject)
|
||||
})
|
||||
const contentDir = contentPath.contentDir(cache)
|
||||
const files = await glob(path.join(contentDir, '**'), {
|
||||
follow: false,
|
||||
nodir: true,
|
||||
nosort: true,
|
||||
})
|
||||
const stats = {
|
||||
verifiedContent: 0,
|
||||
reclaimedCount: 0,
|
||||
reclaimedSize: 0,
|
||||
badContentCount: 0,
|
||||
keptSize: 0,
|
||||
}
|
||||
await pMap(
|
||||
files,
|
||||
async (f) => {
|
||||
const split = f.split(/[/\\]/)
|
||||
const digest = split.slice(split.length - 3).join('')
|
||||
const algo = split[split.length - 4]
|
||||
const integrity = ssri.fromHex(digest, algo)
|
||||
if (liveContent.has(integrity.toString())) {
|
||||
const info = await verifyContent(f, integrity)
|
||||
if (!info.valid) {
|
||||
stats.reclaimedCount++
|
||||
stats.badContentCount++
|
||||
stats.reclaimedSize += info.size
|
||||
} else {
|
||||
stats.verifiedContent++
|
||||
stats.keptSize += info.size
|
||||
}
|
||||
} else {
|
||||
// No entries refer to this content. We can delete.
|
||||
stats.reclaimedCount++
|
||||
const s = await stat(f)
|
||||
await rm(f, { recursive: true, force: true })
|
||||
stats.reclaimedSize += s.size
|
||||
}
|
||||
return stats
|
||||
},
|
||||
{ concurrency: opts.concurrency }
|
||||
)
|
||||
return stats
|
||||
}
|
||||
|
||||
async function verifyContent (filepath, sri) {
|
||||
const contentInfo = {}
|
||||
try {
|
||||
const { size } = await stat(filepath)
|
||||
contentInfo.size = size
|
||||
contentInfo.valid = true
|
||||
await ssri.checkStream(new fsm.ReadStream(filepath), sri)
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return { size: 0, valid: false }
|
||||
}
|
||||
if (err.code !== 'EINTEGRITY') {
|
||||
throw err
|
||||
}
|
||||
|
||||
await rm(filepath, { recursive: true, force: true })
|
||||
contentInfo.valid = false
|
||||
}
|
||||
return contentInfo
|
||||
}
|
||||
|
||||
async function rebuildIndex (cache, opts) {
|
||||
opts.log.silly('verify', 'rebuilding index')
|
||||
const entries = await index.ls(cache)
|
||||
const stats = {
|
||||
missingContent: 0,
|
||||
rejectedEntries: 0,
|
||||
totalEntries: 0,
|
||||
}
|
||||
const buckets = {}
|
||||
for (const k in entries) {
|
||||
/* istanbul ignore else */
|
||||
if (hasOwnProperty(entries, k)) {
|
||||
const hashed = index.hashKey(k)
|
||||
const entry = entries[k]
|
||||
const excluded = opts.filter && !opts.filter(entry)
|
||||
excluded && stats.rejectedEntries++
|
||||
if (buckets[hashed] && !excluded) {
|
||||
buckets[hashed].push(entry)
|
||||
} else if (buckets[hashed] && excluded) {
|
||||
// skip
|
||||
} else if (excluded) {
|
||||
buckets[hashed] = []
|
||||
buckets[hashed]._path = index.bucketPath(cache, k)
|
||||
} else {
|
||||
buckets[hashed] = [entry]
|
||||
buckets[hashed]._path = index.bucketPath(cache, k)
|
||||
}
|
||||
}
|
||||
}
|
||||
await pMap(
|
||||
Object.keys(buckets),
|
||||
(key) => {
|
||||
return rebuildBucket(cache, buckets[key], stats, opts)
|
||||
},
|
||||
{ concurrency: opts.concurrency }
|
||||
)
|
||||
return stats
|
||||
}
|
||||
|
||||
async function rebuildBucket (cache, bucket, stats, opts) {
|
||||
await truncate(bucket._path)
|
||||
// This needs to be serialized because cacache explicitly
|
||||
// lets very racy bucket conflicts clobber each other.
|
||||
for (const entry of bucket) {
|
||||
const content = contentPath(cache, entry.integrity)
|
||||
try {
|
||||
await stat(content)
|
||||
await index.insert(cache, entry.key, entry.integrity, {
|
||||
metadata: entry.metadata,
|
||||
size: entry.size,
|
||||
time: entry.time,
|
||||
})
|
||||
stats.totalEntries++
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
stats.rejectedEntries++
|
||||
stats.missingContent++
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function cleanTmp (cache, opts) {
|
||||
opts.log.silly('verify', 'cleaning tmp directory')
|
||||
return rm(path.join(cache, 'tmp'), { recursive: true, force: true })
|
||||
}
|
||||
|
||||
async function writeVerifile (cache, opts) {
|
||||
const verifile = path.join(cache, '_lastverified')
|
||||
opts.log.silly('verify', 'writing verifile to ' + verifile)
|
||||
return writeFile(verifile, `${Date.now()}`)
|
||||
}
|
||||
|
||||
module.exports.lastRun = lastRun
|
||||
|
||||
async function lastRun (cache) {
|
||||
const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' })
|
||||
return new Date(+data)
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue