Updated the files.

This commit is contained in:
Batuhan Berk Başoğlu 2024-02-08 19:38:41 -05:00
parent 1553e6b971
commit 753967d4f5
23418 changed files with 3784666 additions and 0 deletions

View file

@ -0,0 +1,33 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/** @typedef {import("../Compiler")} Compiler */
class AddBuildDependenciesPlugin {
/**
* @param {Iterable<string>} buildDependencies list of build dependencies
*/
constructor(buildDependencies) {
this.buildDependencies = new Set(buildDependencies);
}
/**
* Apply the plugin
* @param {Compiler} compiler the compiler instance
* @returns {void}
*/
apply(compiler) {
compiler.hooks.compilation.tap(
"AddBuildDependenciesPlugin",
compilation => {
compilation.buildDependencies.addAll(this.buildDependencies);
}
);
}
}
module.exports = AddBuildDependenciesPlugin;

View file

@ -0,0 +1,35 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/** @typedef {import("../Compiler")} Compiler */
class AddManagedPathsPlugin {
/**
* @param {Iterable<string | RegExp>} managedPaths list of managed paths
* @param {Iterable<string | RegExp>} immutablePaths list of immutable paths
*/
constructor(managedPaths, immutablePaths) {
this.managedPaths = new Set(managedPaths);
this.immutablePaths = new Set(immutablePaths);
}
/**
* Apply the plugin
* @param {Compiler} compiler the compiler instance
* @returns {void}
*/
apply(compiler) {
for (const managedPath of this.managedPaths) {
compiler.managedPaths.add(managedPath);
}
for (const immutablePath of this.immutablePaths) {
compiler.immutablePaths.add(immutablePath);
}
}
}
module.exports = AddManagedPathsPlugin;

228
my-app/node_modules/webpack/lib/cache/IdleFileCachePlugin.js generated vendored Executable file
View file

@ -0,0 +1,228 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const Cache = require("../Cache");
const ProgressPlugin = require("../ProgressPlugin");
/** @typedef {import("../Compiler")} Compiler */
const BUILD_DEPENDENCIES_KEY = Symbol();
class IdleFileCachePlugin {
/**
* @param {TODO} strategy cache strategy
* @param {number} idleTimeout timeout
* @param {number} idleTimeoutForInitialStore initial timeout
* @param {number} idleTimeoutAfterLargeChanges timeout after changes
*/
constructor(
strategy,
idleTimeout,
idleTimeoutForInitialStore,
idleTimeoutAfterLargeChanges
) {
this.strategy = strategy;
this.idleTimeout = idleTimeout;
this.idleTimeoutForInitialStore = idleTimeoutForInitialStore;
this.idleTimeoutAfterLargeChanges = idleTimeoutAfterLargeChanges;
}
/**
* Apply the plugin
* @param {Compiler} compiler the compiler instance
* @returns {void}
*/
apply(compiler) {
let strategy = this.strategy;
const idleTimeout = this.idleTimeout;
const idleTimeoutForInitialStore = Math.min(
idleTimeout,
this.idleTimeoutForInitialStore
);
const idleTimeoutAfterLargeChanges = this.idleTimeoutAfterLargeChanges;
const resolvedPromise = Promise.resolve();
let timeSpendInBuild = 0;
let timeSpendInStore = 0;
let avgTimeSpendInStore = 0;
/** @type {Map<string | typeof BUILD_DEPENDENCIES_KEY, () => Promise<void>>} */
const pendingIdleTasks = new Map();
compiler.cache.hooks.store.tap(
{ name: "IdleFileCachePlugin", stage: Cache.STAGE_DISK },
(identifier, etag, data) => {
pendingIdleTasks.set(identifier, () =>
strategy.store(identifier, etag, data)
);
}
);
compiler.cache.hooks.get.tapPromise(
{ name: "IdleFileCachePlugin", stage: Cache.STAGE_DISK },
(identifier, etag, gotHandlers) => {
const restore = () =>
strategy.restore(identifier, etag).then(cacheEntry => {
if (cacheEntry === undefined) {
gotHandlers.push((result, callback) => {
if (result !== undefined) {
pendingIdleTasks.set(identifier, () =>
strategy.store(identifier, etag, result)
);
}
callback();
});
} else {
return cacheEntry;
}
});
const pendingTask = pendingIdleTasks.get(identifier);
if (pendingTask !== undefined) {
pendingIdleTasks.delete(identifier);
return pendingTask().then(restore);
}
return restore();
}
);
compiler.cache.hooks.storeBuildDependencies.tap(
{ name: "IdleFileCachePlugin", stage: Cache.STAGE_DISK },
dependencies => {
pendingIdleTasks.set(BUILD_DEPENDENCIES_KEY, () =>
strategy.storeBuildDependencies(dependencies)
);
}
);
compiler.cache.hooks.shutdown.tapPromise(
{ name: "IdleFileCachePlugin", stage: Cache.STAGE_DISK },
() => {
if (idleTimer) {
clearTimeout(idleTimer);
idleTimer = undefined;
}
isIdle = false;
const reportProgress = ProgressPlugin.getReporter(compiler);
const jobs = Array.from(pendingIdleTasks.values());
if (reportProgress) reportProgress(0, "process pending cache items");
const promises = jobs.map(fn => fn());
pendingIdleTasks.clear();
promises.push(currentIdlePromise);
const promise = Promise.all(promises);
currentIdlePromise = promise.then(() => strategy.afterAllStored());
if (reportProgress) {
currentIdlePromise = currentIdlePromise.then(() => {
reportProgress(1, `stored`);
});
}
return currentIdlePromise.then(() => {
// Reset strategy
if (strategy.clear) strategy.clear();
});
}
);
/** @type {Promise<any>} */
let currentIdlePromise = resolvedPromise;
let isIdle = false;
let isInitialStore = true;
const processIdleTasks = () => {
if (isIdle) {
const startTime = Date.now();
if (pendingIdleTasks.size > 0) {
const promises = [currentIdlePromise];
const maxTime = startTime + 100;
let maxCount = 100;
for (const [filename, factory] of pendingIdleTasks) {
pendingIdleTasks.delete(filename);
promises.push(factory());
if (maxCount-- <= 0 || Date.now() > maxTime) break;
}
currentIdlePromise = Promise.all(promises);
currentIdlePromise.then(() => {
timeSpendInStore += Date.now() - startTime;
// Allow to exit the process between
idleTimer = setTimeout(processIdleTasks, 0);
idleTimer.unref();
});
return;
}
currentIdlePromise = currentIdlePromise
.then(async () => {
await strategy.afterAllStored();
timeSpendInStore += Date.now() - startTime;
avgTimeSpendInStore =
Math.max(avgTimeSpendInStore, timeSpendInStore) * 0.9 +
timeSpendInStore * 0.1;
timeSpendInStore = 0;
timeSpendInBuild = 0;
})
.catch(err => {
const logger = compiler.getInfrastructureLogger(
"IdleFileCachePlugin"
);
logger.warn(`Background tasks during idle failed: ${err.message}`);
logger.debug(err.stack);
});
isInitialStore = false;
}
};
/** @type {ReturnType<typeof setTimeout> | undefined} */
let idleTimer = undefined;
compiler.cache.hooks.beginIdle.tap(
{ name: "IdleFileCachePlugin", stage: Cache.STAGE_DISK },
() => {
const isLargeChange = timeSpendInBuild > avgTimeSpendInStore * 2;
if (isInitialStore && idleTimeoutForInitialStore < idleTimeout) {
compiler
.getInfrastructureLogger("IdleFileCachePlugin")
.log(
`Initial cache was generated and cache will be persisted in ${
idleTimeoutForInitialStore / 1000
}s.`
);
} else if (
isLargeChange &&
idleTimeoutAfterLargeChanges < idleTimeout
) {
compiler
.getInfrastructureLogger("IdleFileCachePlugin")
.log(
`Spend ${Math.round(timeSpendInBuild) / 1000}s in build and ${
Math.round(avgTimeSpendInStore) / 1000
}s in average in cache store. This is considered as large change and cache will be persisted in ${
idleTimeoutAfterLargeChanges / 1000
}s.`
);
}
idleTimer = setTimeout(() => {
idleTimer = undefined;
isIdle = true;
resolvedPromise.then(processIdleTasks);
}, Math.min(isInitialStore ? idleTimeoutForInitialStore : Infinity, isLargeChange ? idleTimeoutAfterLargeChanges : Infinity, idleTimeout));
idleTimer.unref();
}
);
compiler.cache.hooks.endIdle.tap(
{ name: "IdleFileCachePlugin", stage: Cache.STAGE_DISK },
() => {
if (idleTimer) {
clearTimeout(idleTimer);
idleTimer = undefined;
}
isIdle = false;
}
);
compiler.hooks.done.tap("IdleFileCachePlugin", stats => {
// 10% build overhead is ignored, as it's not cacheable
timeSpendInBuild *= 0.9;
timeSpendInBuild += stats.endTime - stats.startTime;
});
}
}
module.exports = IdleFileCachePlugin;

57
my-app/node_modules/webpack/lib/cache/MemoryCachePlugin.js generated vendored Executable file
View file

@ -0,0 +1,57 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const Cache = require("../Cache");
/** @typedef {import("webpack-sources").Source} Source */
/** @typedef {import("../Cache").Etag} Etag */
/** @typedef {import("../Compiler")} Compiler */
/** @typedef {import("../Module")} Module */
class MemoryCachePlugin {
/**
* Apply the plugin
* @param {Compiler} compiler the compiler instance
* @returns {void}
*/
apply(compiler) {
/** @type {Map<string, { etag: Etag | null, data: any }>} */
const cache = new Map();
compiler.cache.hooks.store.tap(
{ name: "MemoryCachePlugin", stage: Cache.STAGE_MEMORY },
(identifier, etag, data) => {
cache.set(identifier, { etag, data });
}
);
compiler.cache.hooks.get.tap(
{ name: "MemoryCachePlugin", stage: Cache.STAGE_MEMORY },
(identifier, etag, gotHandlers) => {
const cacheEntry = cache.get(identifier);
if (cacheEntry === null) {
return null;
} else if (cacheEntry !== undefined) {
return cacheEntry.etag === etag ? cacheEntry.data : null;
}
gotHandlers.push((result, callback) => {
if (result === undefined) {
cache.set(identifier, null);
} else {
cache.set(identifier, { etag, data: result });
}
return callback();
});
}
);
compiler.cache.hooks.shutdown.tap(
{ name: "MemoryCachePlugin", stage: Cache.STAGE_MEMORY },
() => {
cache.clear();
}
);
}
}
module.exports = MemoryCachePlugin;

View file

@ -0,0 +1,131 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const Cache = require("../Cache");
/** @typedef {import("webpack-sources").Source} Source */
/** @typedef {import("../Cache").Etag} Etag */
/** @typedef {import("../Compiler")} Compiler */
/** @typedef {import("../Module")} Module */
class MemoryWithGcCachePlugin {
constructor({ maxGenerations }) {
this._maxGenerations = maxGenerations;
}
/**
* Apply the plugin
* @param {Compiler} compiler the compiler instance
* @returns {void}
*/
apply(compiler) {
const maxGenerations = this._maxGenerations;
/** @type {Map<string, { etag: Etag | null, data: any }>} */
const cache = new Map();
/** @type {Map<string, { entry: { etag: Etag | null, data: any }, until: number }>} */
const oldCache = new Map();
let generation = 0;
let cachePosition = 0;
const logger = compiler.getInfrastructureLogger("MemoryWithGcCachePlugin");
compiler.hooks.afterDone.tap("MemoryWithGcCachePlugin", () => {
generation++;
let clearedEntries = 0;
let lastClearedIdentifier;
// Avoid coverage problems due indirect changes
/* istanbul ignore next */
for (const [identifier, entry] of oldCache) {
if (entry.until > generation) break;
oldCache.delete(identifier);
if (cache.get(identifier) === undefined) {
cache.delete(identifier);
clearedEntries++;
lastClearedIdentifier = identifier;
}
}
if (clearedEntries > 0 || oldCache.size > 0) {
logger.log(
`${cache.size - oldCache.size} active entries, ${
oldCache.size
} recently unused cached entries${
clearedEntries > 0
? `, ${clearedEntries} old unused cache entries removed e. g. ${lastClearedIdentifier}`
: ""
}`
);
}
let i = (cache.size / maxGenerations) | 0;
let j = cachePosition >= cache.size ? 0 : cachePosition;
cachePosition = j + i;
for (const [identifier, entry] of cache) {
if (j !== 0) {
j--;
continue;
}
if (entry !== undefined) {
// We don't delete the cache entry, but set it to undefined instead
// This reserves the location in the data table and avoids rehashing
// when constantly adding and removing entries.
// It will be deleted when removed from oldCache.
cache.set(identifier, undefined);
oldCache.delete(identifier);
oldCache.set(identifier, {
entry,
until: generation + maxGenerations
});
if (i-- === 0) break;
}
}
});
compiler.cache.hooks.store.tap(
{ name: "MemoryWithGcCachePlugin", stage: Cache.STAGE_MEMORY },
(identifier, etag, data) => {
cache.set(identifier, { etag, data });
}
);
compiler.cache.hooks.get.tap(
{ name: "MemoryWithGcCachePlugin", stage: Cache.STAGE_MEMORY },
(identifier, etag, gotHandlers) => {
const cacheEntry = cache.get(identifier);
if (cacheEntry === null) {
return null;
} else if (cacheEntry !== undefined) {
return cacheEntry.etag === etag ? cacheEntry.data : null;
}
const oldCacheEntry = oldCache.get(identifier);
if (oldCacheEntry !== undefined) {
const cacheEntry = oldCacheEntry.entry;
if (cacheEntry === null) {
oldCache.delete(identifier);
cache.set(identifier, cacheEntry);
return null;
} else {
if (cacheEntry.etag !== etag) return null;
oldCache.delete(identifier);
cache.set(identifier, cacheEntry);
return cacheEntry.data;
}
}
gotHandlers.push((result, callback) => {
if (result === undefined) {
cache.set(identifier, null);
} else {
cache.set(identifier, { etag, data: result });
}
return callback();
});
}
);
compiler.cache.hooks.shutdown.tap(
{ name: "MemoryWithGcCachePlugin", stage: Cache.STAGE_MEMORY },
() => {
cache.clear();
oldCache.clear();
}
);
}
}
module.exports = MemoryWithGcCachePlugin;

1467
my-app/node_modules/webpack/lib/cache/PackFileCacheStrategy.js generated vendored Executable file

File diff suppressed because it is too large Load diff

357
my-app/node_modules/webpack/lib/cache/ResolverCachePlugin.js generated vendored Executable file
View file

@ -0,0 +1,357 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const LazySet = require("../util/LazySet");
const makeSerializable = require("../util/makeSerializable");
/** @typedef {import("enhanced-resolve/lib/Resolver")} Resolver */
/** @typedef {import("../CacheFacade").ItemCacheFacade} ItemCacheFacade */
/** @typedef {import("../Compiler")} Compiler */
/** @typedef {import("../FileSystemInfo")} FileSystemInfo */
/** @typedef {import("../FileSystemInfo").Snapshot} Snapshot */
class CacheEntry {
constructor(result, snapshot) {
this.result = result;
this.snapshot = snapshot;
}
serialize({ write }) {
write(this.result);
write(this.snapshot);
}
deserialize({ read }) {
this.result = read();
this.snapshot = read();
}
}
makeSerializable(CacheEntry, "webpack/lib/cache/ResolverCachePlugin");
/**
* @template T
* @param {Set<T> | LazySet<T>} set set to add items to
* @param {Set<T> | LazySet<T>} otherSet set to add items from
* @returns {void}
*/
const addAllToSet = (set, otherSet) => {
if (set instanceof LazySet) {
set.addAll(otherSet);
} else {
for (const item of otherSet) {
set.add(item);
}
}
};
/**
* @param {Object} object an object
* @param {boolean} excludeContext if true, context is not included in string
* @returns {string} stringified version
*/
const objectToString = (object, excludeContext) => {
let str = "";
for (const key in object) {
if (excludeContext && key === "context") continue;
const value = object[key];
if (typeof value === "object" && value !== null) {
str += `|${key}=[${objectToString(value, false)}|]`;
} else {
str += `|${key}=|${value}`;
}
}
return str;
};
class ResolverCachePlugin {
/**
* Apply the plugin
* @param {Compiler} compiler the compiler instance
* @returns {void}
*/
apply(compiler) {
const cache = compiler.getCache("ResolverCachePlugin");
/** @type {FileSystemInfo} */
let fileSystemInfo;
let snapshotOptions;
let realResolves = 0;
let cachedResolves = 0;
let cacheInvalidResolves = 0;
let concurrentResolves = 0;
compiler.hooks.thisCompilation.tap("ResolverCachePlugin", compilation => {
snapshotOptions = compilation.options.snapshot.resolve;
fileSystemInfo = compilation.fileSystemInfo;
compilation.hooks.finishModules.tap("ResolverCachePlugin", () => {
if (realResolves + cachedResolves > 0) {
const logger = compilation.getLogger("webpack.ResolverCachePlugin");
logger.log(
`${Math.round(
(100 * realResolves) / (realResolves + cachedResolves)
)}% really resolved (${realResolves} real resolves with ${cacheInvalidResolves} cached but invalid, ${cachedResolves} cached valid, ${concurrentResolves} concurrent)`
);
realResolves = 0;
cachedResolves = 0;
cacheInvalidResolves = 0;
concurrentResolves = 0;
}
});
});
/**
* @param {ItemCacheFacade} itemCache cache
* @param {Resolver} resolver the resolver
* @param {Object} resolveContext context for resolving meta info
* @param {Object} request the request info object
* @param {function((Error | null)=, Object=): void} callback callback function
* @returns {void}
*/
const doRealResolve = (
itemCache,
resolver,
resolveContext,
request,
callback
) => {
realResolves++;
const newRequest = {
_ResolverCachePluginCacheMiss: true,
...request
};
const newResolveContext = {
...resolveContext,
stack: new Set(),
/** @type {LazySet<string>} */
missingDependencies: new LazySet(),
/** @type {LazySet<string>} */
fileDependencies: new LazySet(),
/** @type {LazySet<string>} */
contextDependencies: new LazySet()
};
let yieldResult;
let withYield = false;
if (typeof newResolveContext.yield === "function") {
yieldResult = [];
withYield = true;
newResolveContext.yield = obj => yieldResult.push(obj);
}
const propagate = key => {
if (resolveContext[key]) {
addAllToSet(resolveContext[key], newResolveContext[key]);
}
};
const resolveTime = Date.now();
resolver.doResolve(
resolver.hooks.resolve,
newRequest,
"Cache miss",
newResolveContext,
(err, result) => {
propagate("fileDependencies");
propagate("contextDependencies");
propagate("missingDependencies");
if (err) return callback(err);
const fileDependencies = newResolveContext.fileDependencies;
const contextDependencies = newResolveContext.contextDependencies;
const missingDependencies = newResolveContext.missingDependencies;
fileSystemInfo.createSnapshot(
resolveTime,
fileDependencies,
contextDependencies,
missingDependencies,
snapshotOptions,
(err, snapshot) => {
if (err) return callback(err);
const resolveResult = withYield ? yieldResult : result;
// since we intercept resolve hook
// we still can get result in callback
if (withYield && result) yieldResult.push(result);
if (!snapshot) {
if (resolveResult) return callback(null, resolveResult);
return callback();
}
itemCache.store(
new CacheEntry(resolveResult, snapshot),
storeErr => {
if (storeErr) return callback(storeErr);
if (resolveResult) return callback(null, resolveResult);
callback();
}
);
}
);
}
);
};
compiler.resolverFactory.hooks.resolver.intercept({
factory(type, hook) {
/** @type {Map<string, (function(Error=, Object=): void)[]>} */
const activeRequests = new Map();
/** @type {Map<string, [function(Error=, Object=): void, function(Error=, Object=): void][]>} */
const activeRequestsWithYield = new Map();
hook.tap(
"ResolverCachePlugin",
/**
* @param {Resolver} resolver the resolver
* @param {Object} options resolve options
* @param {Object} userOptions resolve options passed by the user
* @returns {void}
*/
(resolver, options, userOptions) => {
if (options.cache !== true) return;
const optionsIdent = objectToString(userOptions, false);
const cacheWithContext =
options.cacheWithContext !== undefined
? options.cacheWithContext
: false;
resolver.hooks.resolve.tapAsync(
{
name: "ResolverCachePlugin",
stage: -100
},
(request, resolveContext, callback) => {
if (request._ResolverCachePluginCacheMiss || !fileSystemInfo) {
return callback();
}
const withYield = typeof resolveContext.yield === "function";
const identifier = `${type}${
withYield ? "|yield" : "|default"
}${optionsIdent}${objectToString(request, !cacheWithContext)}`;
if (withYield) {
const activeRequest = activeRequestsWithYield.get(identifier);
if (activeRequest) {
activeRequest[0].push(callback);
activeRequest[1].push(resolveContext.yield);
return;
}
} else {
const activeRequest = activeRequests.get(identifier);
if (activeRequest) {
activeRequest.push(callback);
return;
}
}
const itemCache = cache.getItemCache(identifier, null);
let callbacks, yields;
const done = withYield
? (err, result) => {
if (callbacks === undefined) {
if (err) {
callback(err);
} else {
if (result)
for (const r of result) resolveContext.yield(r);
callback(null, null);
}
yields = undefined;
callbacks = false;
} else {
if (err) {
for (const cb of callbacks) cb(err);
} else {
for (let i = 0; i < callbacks.length; i++) {
const cb = callbacks[i];
const yield_ = yields[i];
if (result) for (const r of result) yield_(r);
cb(null, null);
}
}
activeRequestsWithYield.delete(identifier);
yields = undefined;
callbacks = false;
}
}
: (err, result) => {
if (callbacks === undefined) {
callback(err, result);
callbacks = false;
} else {
for (const callback of callbacks) {
callback(err, result);
}
activeRequests.delete(identifier);
callbacks = false;
}
};
/**
* @param {Error=} err error if any
* @param {CacheEntry=} cacheEntry cache entry
* @returns {void}
*/
const processCacheResult = (err, cacheEntry) => {
if (err) return done(err);
if (cacheEntry) {
const { snapshot, result } = cacheEntry;
fileSystemInfo.checkSnapshotValid(
snapshot,
(err, valid) => {
if (err || !valid) {
cacheInvalidResolves++;
return doRealResolve(
itemCache,
resolver,
resolveContext,
request,
done
);
}
cachedResolves++;
if (resolveContext.missingDependencies) {
addAllToSet(
resolveContext.missingDependencies,
snapshot.getMissingIterable()
);
}
if (resolveContext.fileDependencies) {
addAllToSet(
resolveContext.fileDependencies,
snapshot.getFileIterable()
);
}
if (resolveContext.contextDependencies) {
addAllToSet(
resolveContext.contextDependencies,
snapshot.getContextIterable()
);
}
done(null, result);
}
);
} else {
doRealResolve(
itemCache,
resolver,
resolveContext,
request,
done
);
}
};
itemCache.get(processCacheResult);
if (withYield && callbacks === undefined) {
callbacks = [callback];
yields = [resolveContext.yield];
activeRequestsWithYield.set(
identifier,
/** @type {[any, any]} */ ([callbacks, yields])
);
} else if (callbacks === undefined) {
callbacks = [callback];
activeRequests.set(identifier, callbacks);
}
}
);
}
);
return hook;
}
});
}
}
module.exports = ResolverCachePlugin;

81
my-app/node_modules/webpack/lib/cache/getLazyHashedEtag.js generated vendored Executable file
View file

@ -0,0 +1,81 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const createHash = require("../util/createHash");
/** @typedef {import("../util/Hash")} Hash */
/** @typedef {typeof import("../util/Hash")} HashConstructor */
/**
* @typedef {Object} HashableObject
* @property {function(Hash): void} updateHash
*/
class LazyHashedEtag {
/**
* @param {HashableObject} obj object with updateHash method
* @param {string | HashConstructor} hashFunction the hash function to use
*/
constructor(obj, hashFunction = "md4") {
this._obj = obj;
this._hash = undefined;
this._hashFunction = hashFunction;
}
/**
* @returns {string} hash of object
*/
toString() {
if (this._hash === undefined) {
const hash = createHash(this._hashFunction);
this._obj.updateHash(hash);
this._hash = /** @type {string} */ (hash.digest("base64"));
}
return this._hash;
}
}
/** @type {Map<string | HashConstructor, WeakMap<HashableObject, LazyHashedEtag>>} */
const mapStrings = new Map();
/** @type {WeakMap<HashConstructor, WeakMap<HashableObject, LazyHashedEtag>>} */
const mapObjects = new WeakMap();
/**
* @param {HashableObject} obj object with updateHash method
* @param {string | HashConstructor} hashFunction the hash function to use
* @returns {LazyHashedEtag} etag
*/
const getter = (obj, hashFunction = "md4") => {
let innerMap;
if (typeof hashFunction === "string") {
innerMap = mapStrings.get(hashFunction);
if (innerMap === undefined) {
const newHash = new LazyHashedEtag(obj, hashFunction);
innerMap = new WeakMap();
innerMap.set(obj, newHash);
mapStrings.set(hashFunction, innerMap);
return newHash;
}
} else {
innerMap = mapObjects.get(hashFunction);
if (innerMap === undefined) {
const newHash = new LazyHashedEtag(obj, hashFunction);
innerMap = new WeakMap();
innerMap.set(obj, newHash);
mapObjects.set(hashFunction, innerMap);
return newHash;
}
}
const hash = innerMap.get(obj);
if (hash !== undefined) return hash;
const newHash = new LazyHashedEtag(obj, hashFunction);
innerMap.set(obj, newHash);
return newHash;
};
module.exports = getter;

74
my-app/node_modules/webpack/lib/cache/mergeEtags.js generated vendored Executable file
View file

@ -0,0 +1,74 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/** @typedef {import("../Cache").Etag} Etag */
class MergedEtag {
/**
* @param {Etag} a first
* @param {Etag} b second
*/
constructor(a, b) {
this.a = a;
this.b = b;
}
toString() {
return `${this.a.toString()}|${this.b.toString()}`;
}
}
const dualObjectMap = new WeakMap();
const objectStringMap = new WeakMap();
/**
* @param {Etag} a first
* @param {Etag} b second
* @returns {Etag} result
*/
const mergeEtags = (a, b) => {
if (typeof a === "string") {
if (typeof b === "string") {
return `${a}|${b}`;
} else {
const temp = b;
b = a;
a = temp;
}
} else {
if (typeof b !== "string") {
// both a and b are objects
let map = dualObjectMap.get(a);
if (map === undefined) {
dualObjectMap.set(a, (map = new WeakMap()));
}
const mergedEtag = map.get(b);
if (mergedEtag === undefined) {
const newMergedEtag = new MergedEtag(a, b);
map.set(b, newMergedEtag);
return newMergedEtag;
} else {
return mergedEtag;
}
}
}
// a is object, b is string
let map = objectStringMap.get(a);
if (map === undefined) {
objectStringMap.set(a, (map = new Map()));
}
const mergedEtag = map.get(b);
if (mergedEtag === undefined) {
const newMergedEtag = new MergedEtag(a, b);
map.set(b, newMergedEtag);
return newMergedEtag;
} else {
return mergedEtag;
}
};
module.exports = mergeEtags;