Deployed the page to Github Pages.

This commit is contained in:
Batuhan Berk Başoğlu 2024-11-03 21:30:09 -05:00
parent 1d79754e93
commit 2c89899458
Signed by: batuhan-basoglu
SSH key fingerprint: SHA256:kEsnuHX+qbwhxSAXPUQ4ox535wFHu/hIRaa53FzxRpo
62797 changed files with 6551425 additions and 15279 deletions

252
node_modules/webpack/lib/util/LazyBucketSortedSet.js generated vendored Normal file
View file

@ -0,0 +1,252 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const { first } = require("./SetHelpers");
const SortableSet = require("./SortableSet");
/**
* @template T
* @typedef {LazyBucketSortedSet<T, any> | SortableSet<T>} Entry
*/
/**
* @template T
* @typedef {(function(T): any) | (function(any, any): number)} Arg
*/
/**
* Multi layer bucket sorted set:
* Supports adding non-existing items (DO NOT ADD ITEM TWICE),
* Supports removing exiting items (DO NOT REMOVE ITEM NOT IN SET),
* Supports popping the first items according to defined order,
* Supports iterating all items without order,
* Supports updating an item in an efficient way,
* Supports size property, which is the number of items,
* Items are lazy partially sorted when needed
* @template T
* @template K
*/
class LazyBucketSortedSet {
/**
* @param {function(T): K} getKey function to get key from item
* @param {function(K, K): number} comparator comparator to sort keys
* @param {...Arg<T>} args more pairs of getKey and comparator plus optional final comparator for the last layer
*/
constructor(getKey, comparator, ...args) {
this._getKey = getKey;
/** @type {Arg<T>[]} */
this._innerArgs = args;
this._leaf = args.length <= 1;
this._keys = new SortableSet(undefined, comparator);
/** @type {Map<K, Entry<T>>} */
this._map = new Map();
this._unsortedItems = new Set();
this.size = 0;
}
/**
* @param {T} item an item
* @returns {void}
*/
add(item) {
this.size++;
this._unsortedItems.add(item);
}
/**
* @param {K} key key of item
* @param {T} item the item
* @returns {void}
*/
_addInternal(key, item) {
let entry = this._map.get(key);
if (entry === undefined) {
entry =
/** @type {Entry<T>} */
(
this._leaf
? new SortableSet(undefined, this._innerArgs[0])
: new /** @type {TODO} */ (LazyBucketSortedSet)(...this._innerArgs)
);
this._keys.add(key);
this._map.set(key, entry);
}
/** @type {Entry<T>} */
(entry).add(item);
}
/**
* @param {T} item an item
* @returns {void}
*/
delete(item) {
this.size--;
if (this._unsortedItems.has(item)) {
this._unsortedItems.delete(item);
return;
}
const key = this._getKey(item);
const entry = /** @type {Entry<T>} */ (this._map.get(key));
entry.delete(item);
if (entry.size === 0) {
this._deleteKey(key);
}
}
/**
* @param {K} key key to be removed
* @returns {void}
*/
_deleteKey(key) {
this._keys.delete(key);
this._map.delete(key);
}
/**
* @returns {T | undefined} an item
*/
popFirst() {
if (this.size === 0) return;
this.size--;
if (this._unsortedItems.size > 0) {
for (const item of this._unsortedItems) {
const key = this._getKey(item);
this._addInternal(key, item);
}
this._unsortedItems.clear();
}
this._keys.sort();
const key = /** @type {K} */ (first(this._keys));
const entry = this._map.get(key);
if (this._leaf) {
const leafEntry = /** @type {SortableSet<T>} */ (entry);
leafEntry.sort();
const item = /** @type {T} */ (first(leafEntry));
leafEntry.delete(item);
if (leafEntry.size === 0) {
this._deleteKey(key);
}
return item;
}
const nodeEntry = /** @type {LazyBucketSortedSet<T, any>} */ (entry);
const item = nodeEntry.popFirst();
if (nodeEntry.size === 0) {
this._deleteKey(key);
}
return item;
}
/**
* @param {T} item to be updated item
* @returns {function(true=): void} finish update
*/
startUpdate(item) {
if (this._unsortedItems.has(item)) {
return remove => {
if (remove) {
this._unsortedItems.delete(item);
this.size--;
}
};
}
const key = this._getKey(item);
if (this._leaf) {
const oldEntry = /** @type {SortableSet<T>} */ (this._map.get(key));
return remove => {
if (remove) {
this.size--;
oldEntry.delete(item);
if (oldEntry.size === 0) {
this._deleteKey(key);
}
return;
}
const newKey = this._getKey(item);
if (key === newKey) {
// This flags the sortable set as unordered
oldEntry.add(item);
} else {
oldEntry.delete(item);
if (oldEntry.size === 0) {
this._deleteKey(key);
}
this._addInternal(newKey, item);
}
};
}
const oldEntry = /** @type {LazyBucketSortedSet<T, any>} */ (
this._map.get(key)
);
const finishUpdate = oldEntry.startUpdate(item);
return remove => {
if (remove) {
this.size--;
finishUpdate(true);
if (oldEntry.size === 0) {
this._deleteKey(key);
}
return;
}
const newKey = this._getKey(item);
if (key === newKey) {
finishUpdate();
} else {
finishUpdate(true);
if (oldEntry.size === 0) {
this._deleteKey(key);
}
this._addInternal(newKey, item);
}
};
}
/**
* @param {Iterator<T>[]} iterators list of iterators to append to
* @returns {void}
*/
_appendIterators(iterators) {
if (this._unsortedItems.size > 0)
iterators.push(this._unsortedItems[Symbol.iterator]());
for (const key of this._keys) {
const entry = this._map.get(key);
if (this._leaf) {
const leafEntry = /** @type {SortableSet<T>} */ (entry);
const iterator = leafEntry[Symbol.iterator]();
iterators.push(iterator);
} else {
const nodeEntry = /** @type {LazyBucketSortedSet<T, any>} */ (entry);
nodeEntry._appendIterators(iterators);
}
}
}
/**
* @returns {Iterator<T>} the iterator
*/
[Symbol.iterator]() {
/** @type {Iterator<T>[]} */
const iterators = [];
this._appendIterators(iterators);
iterators.reverse();
let currentIterator =
/** @type {Iterator<T>} */
(iterators.pop());
return {
next: () => {
const res = currentIterator.next();
if (res.done) {
if (iterators.length === 0) return res;
currentIterator = /** @type {Iterator<T>} */ (iterators.pop());
return currentIterator.next();
}
return res;
}
};
}
}
module.exports = LazyBucketSortedSet;

57
node_modules/webpack/lib/util/Queue.js generated vendored Normal file
View file

@ -0,0 +1,57 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/**
* @template T
*/
class Queue {
/**
* @param {Iterable<T>=} items The initial elements.
*/
constructor(items) {
/**
* @private
* @type {Set<T>}
*/
this._set = new Set(items);
/**
* @private
* @type {Iterator<T>}
*/
this._iterator = this._set[Symbol.iterator]();
}
/**
* Returns the number of elements in this queue.
* @returns {number} The number of elements in this queue.
*/
get length() {
return this._set.size;
}
/**
* Appends the specified element to this queue.
* @param {T} item The element to add.
* @returns {void}
*/
enqueue(item) {
this._set.add(item);
}
/**
* Retrieves and removes the head of this queue.
* @returns {T | undefined} The head of the queue of `undefined` if this queue is empty.
*/
dequeue() {
const result = this._iterator.next();
if (result.done) return;
this._set.delete(result.value);
return result.value;
}
}
module.exports = Queue;

51
node_modules/webpack/lib/util/Semaphore.js generated vendored Normal file
View file

@ -0,0 +1,51 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
class Semaphore {
/**
* Creates an instance of Semaphore.
* @param {number} available the amount available number of "tasks"
* in the Semaphore
*/
constructor(available) {
this.available = available;
/** @type {(function(): void)[]} */
this.waiters = [];
/** @private */
this._continue = this._continue.bind(this);
}
/**
* @param {function(): void} callback function block to capture and run
* @returns {void}
*/
acquire(callback) {
if (this.available > 0) {
this.available--;
callback();
} else {
this.waiters.push(callback);
}
}
release() {
this.available++;
if (this.waiters.length > 0) {
process.nextTick(this._continue);
}
}
_continue() {
if (this.available > 0 && this.waiters.length > 0) {
this.available--;
const callback = /** @type {(function(): void)} */ (this.waiters.pop());
callback();
}
}
}
module.exports = Semaphore;

94
node_modules/webpack/lib/util/SetHelpers.js generated vendored Normal file
View file

@ -0,0 +1,94 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/**
* intersect creates Set containing the intersection of elements between all sets
* @template T
* @param {Set<T>[]} sets an array of sets being checked for shared elements
* @returns {Set<T>} returns a new Set containing the intersecting items
*/
const intersect = sets => {
if (sets.length === 0) return new Set();
if (sets.length === 1) return new Set(sets[0]);
let minSize = Infinity;
let minIndex = -1;
for (let i = 0; i < sets.length; i++) {
const size = sets[i].size;
if (size < minSize) {
minIndex = i;
minSize = size;
}
}
const current = new Set(sets[minIndex]);
for (let i = 0; i < sets.length; i++) {
if (i === minIndex) continue;
const set = sets[i];
for (const item of current) {
if (!set.has(item)) {
current.delete(item);
}
}
}
return current;
};
/**
* Checks if a set is the subset of another set
* @template T
* @param {Set<T>} bigSet a Set which contains the original elements to compare against
* @param {Set<T>} smallSet the set whose elements might be contained inside of bigSet
* @returns {boolean} returns true if smallSet contains all elements inside of the bigSet
*/
const isSubset = (bigSet, smallSet) => {
if (bigSet.size < smallSet.size) return false;
for (const item of smallSet) {
if (!bigSet.has(item)) return false;
}
return true;
};
/**
* @template T
* @param {Set<T>} set a set
* @param {function(T): boolean} fn selector function
* @returns {T | undefined} found item
*/
const find = (set, fn) => {
for (const item of set) {
if (fn(item)) return item;
}
};
/**
* @template T
* @param {Set<T>} set a set
* @returns {T | undefined} first item
*/
const first = set => {
const entry = set.values().next();
return entry.done ? undefined : entry.value;
};
/**
* @template T
* @param {Set<T>} a first
* @param {Set<T>} b second
* @returns {Set<T>} combined set, may be identical to a or b
*/
const combine = (a, b) => {
if (b.size === 0) return a;
if (a.size === 0) return b;
const set = new Set(a);
for (const item of b) set.add(item);
return set;
};
module.exports.intersect = intersect;
module.exports.isSubset = isSubset;
module.exports.find = find;
module.exports.first = first;
module.exports.combine = combine;

173
node_modules/webpack/lib/util/SortableSet.js generated vendored Normal file
View file

@ -0,0 +1,173 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const NONE = Symbol("not sorted");
/**
* A subset of Set that offers sorting functionality
* @template T item type in set
* @extends {Set<T>}
*/
class SortableSet extends Set {
/**
* Create a new sortable set
* @template T
* @param {Iterable<T>=} initialIterable The initial iterable value
* @typedef {function(T, T): number} SortFunction
* @param {SortFunction<T>=} defaultSort Default sorting function
*/
constructor(initialIterable, defaultSort) {
super(initialIterable);
/**
* @private
* @type {undefined | SortFunction<T>}
*/
this._sortFn = defaultSort;
/**
* @private
* @type {typeof NONE | undefined | function(T, T): number}}
*/
this._lastActiveSortFn = NONE;
/**
* @private
* @type {Map<Function, any> | undefined}
*/
this._cache = undefined;
/**
* @private
* @type {Map<Function, any> | undefined}
*/
this._cacheOrderIndependent = undefined;
}
/**
* @param {T} value value to add to set
* @returns {this} returns itself
*/
add(value) {
this._lastActiveSortFn = NONE;
this._invalidateCache();
this._invalidateOrderedCache();
super.add(value);
return this;
}
/**
* @param {T} value value to delete
* @returns {boolean} true if value existed in set, false otherwise
*/
delete(value) {
this._invalidateCache();
this._invalidateOrderedCache();
return super.delete(value);
}
/**
* @returns {void}
*/
clear() {
this._invalidateCache();
this._invalidateOrderedCache();
return super.clear();
}
/**
* Sort with a comparer function
* @param {SortFunction<T> | undefined} sortFn Sorting comparer function
* @returns {void}
*/
sortWith(sortFn) {
if (this.size <= 1 || sortFn === this._lastActiveSortFn) {
// already sorted - nothing to do
return;
}
const sortedArray = Array.from(this).sort(sortFn);
super.clear();
for (let i = 0; i < sortedArray.length; i += 1) {
super.add(sortedArray[i]);
}
this._lastActiveSortFn = sortFn;
this._invalidateCache();
}
sort() {
this.sortWith(this._sortFn);
return this;
}
/**
* Get data from cache
* @template R
* @param {function(SortableSet<T>): R} fn function to calculate value
* @returns {R} returns result of fn(this), cached until set changes
*/
getFromCache(fn) {
if (this._cache === undefined) {
this._cache = new Map();
} else {
const result = this._cache.get(fn);
const data = /** @type {R} */ (result);
if (data !== undefined) {
return data;
}
}
const newData = fn(this);
this._cache.set(fn, newData);
return newData;
}
/**
* Get data from cache (ignoring sorting)
* @template R
* @param {function(SortableSet<T>): R} fn function to calculate value
* @returns {R} returns result of fn(this), cached until set changes
*/
getFromUnorderedCache(fn) {
if (this._cacheOrderIndependent === undefined) {
this._cacheOrderIndependent = new Map();
} else {
const result = this._cacheOrderIndependent.get(fn);
const data = /** @type {R} */ (result);
if (data !== undefined) {
return data;
}
}
const newData = fn(this);
this._cacheOrderIndependent.set(fn, newData);
return newData;
}
/**
* @private
* @returns {void}
*/
_invalidateCache() {
if (this._cache !== undefined) {
this._cache.clear();
}
}
/**
* @private
* @returns {void}
*/
_invalidateOrderedCache() {
if (this._cacheOrderIndependent !== undefined) {
this._cacheOrderIndependent.clear();
}
}
/**
* @returns {T[]} the raw array
*/
toJSON() {
return Array.from(this);
}
}
module.exports = SortableSet;

583
node_modules/webpack/lib/util/cleverMerge.js generated vendored Normal file
View file

@ -0,0 +1,583 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/** @type {WeakMap<object, WeakMap<object, object>>} */
const mergeCache = new WeakMap();
/** @type {WeakMap<object, Map<string, Map<string|number|boolean, object>>>} */
const setPropertyCache = new WeakMap();
const DELETE = Symbol("DELETE");
const DYNAMIC_INFO = Symbol("cleverMerge dynamic info");
/**
* Merges two given objects and caches the result to avoid computation if same objects passed as arguments again.
* @template T
* @template O
* @example
* // performs cleverMerge(first, second), stores the result in WeakMap and returns result
* cachedCleverMerge({a: 1}, {a: 2})
* {a: 2}
* // when same arguments passed, gets the result from WeakMap and returns it.
* cachedCleverMerge({a: 1}, {a: 2})
* {a: 2}
* @param {T | null | undefined} first first object
* @param {O | null | undefined} second second object
* @returns {T & O | T | O} merged object of first and second object
*/
const cachedCleverMerge = (first, second) => {
if (second === undefined) return /** @type {T} */ (first);
if (first === undefined) return /** @type {O} */ (second);
if (typeof second !== "object" || second === null)
return /** @type {O} */ (second);
if (typeof first !== "object" || first === null)
return /** @type {T} */ (first);
let innerCache = mergeCache.get(first);
if (innerCache === undefined) {
innerCache = new WeakMap();
mergeCache.set(first, innerCache);
}
const prevMerge = /** @type {T & O} */ (innerCache.get(second));
if (prevMerge !== undefined) return prevMerge;
const newMerge = _cleverMerge(first, second, true);
innerCache.set(second, newMerge);
return /** @type {T & O} */ (newMerge);
};
/**
* @template T
* @param {Partial<T>} obj object
* @param {string} property property
* @param {string|number|boolean} value assignment value
* @returns {T} new object
*/
const cachedSetProperty = (obj, property, value) => {
let mapByProperty = setPropertyCache.get(obj);
if (mapByProperty === undefined) {
mapByProperty = new Map();
setPropertyCache.set(obj, mapByProperty);
}
let mapByValue = mapByProperty.get(property);
if (mapByValue === undefined) {
mapByValue = new Map();
mapByProperty.set(property, mapByValue);
}
let result = mapByValue.get(value);
if (result) return /** @type {T} */ (result);
result = {
...obj,
[property]: value
};
mapByValue.set(value, result);
return /** @type {T} */ (result);
};
/**
* @typedef {object} ObjectParsedPropertyEntry
* @property {any | undefined} base base value
* @property {string | undefined} byProperty the name of the selector property
* @property {Map<string, any>} byValues value depending on selector property, merged with base
*/
/**
* @typedef {object} ParsedObject
* @property {Map<string, ObjectParsedPropertyEntry>} static static properties (key is property name)
* @property {{ byProperty: string, fn: Function } | undefined} dynamic dynamic part
*/
/** @type {WeakMap<object, ParsedObject>} */
const parseCache = new WeakMap();
/**
* @param {object} obj the object
* @returns {ParsedObject} parsed object
*/
const cachedParseObject = obj => {
const entry = parseCache.get(obj);
if (entry !== undefined) return entry;
const result = parseObject(obj);
parseCache.set(obj, result);
return result;
};
/**
* @param {object} obj the object
* @returns {ParsedObject} parsed object
*/
const parseObject = obj => {
const info = new Map();
let dynamicInfo;
const getInfo = p => {
const entry = info.get(p);
if (entry !== undefined) return entry;
const newEntry = {
base: undefined,
byProperty: undefined,
byValues: undefined
};
info.set(p, newEntry);
return newEntry;
};
for (const key of Object.keys(obj)) {
if (key.startsWith("by")) {
const byProperty = key;
const byObj = obj[byProperty];
if (typeof byObj === "object") {
for (const byValue of Object.keys(byObj)) {
const obj = byObj[byValue];
for (const key of Object.keys(obj)) {
const entry = getInfo(key);
if (entry.byProperty === undefined) {
entry.byProperty = byProperty;
entry.byValues = new Map();
} else if (entry.byProperty !== byProperty) {
throw new Error(
`${byProperty} and ${entry.byProperty} for a single property is not supported`
);
}
entry.byValues.set(byValue, obj[key]);
if (byValue === "default") {
for (const otherByValue of Object.keys(byObj)) {
if (!entry.byValues.has(otherByValue))
entry.byValues.set(otherByValue, undefined);
}
}
}
}
} else if (typeof byObj === "function") {
if (dynamicInfo === undefined) {
dynamicInfo = {
byProperty: key,
fn: byObj
};
} else {
throw new Error(
`${key} and ${dynamicInfo.byProperty} when both are functions is not supported`
);
}
} else {
const entry = getInfo(key);
entry.base = obj[key];
}
} else {
const entry = getInfo(key);
entry.base = obj[key];
}
}
return {
static: info,
dynamic: dynamicInfo
};
};
/**
* @param {Map<string, ObjectParsedPropertyEntry>} info static properties (key is property name)
* @param {{ byProperty: string, fn: Function } | undefined} dynamicInfo dynamic part
* @returns {object} the object
*/
const serializeObject = (info, dynamicInfo) => {
const obj = {};
// Setup byProperty structure
for (const entry of info.values()) {
if (entry.byProperty !== undefined) {
const byObj = (obj[entry.byProperty] = obj[entry.byProperty] || {});
for (const byValue of entry.byValues.keys()) {
byObj[byValue] = byObj[byValue] || {};
}
}
}
for (const [key, entry] of info) {
if (entry.base !== undefined) {
obj[key] = entry.base;
}
// Fill byProperty structure
if (entry.byProperty !== undefined) {
const byObj = (obj[entry.byProperty] = obj[entry.byProperty] || {});
for (const byValue of Object.keys(byObj)) {
const value = getFromByValues(entry.byValues, byValue);
if (value !== undefined) byObj[byValue][key] = value;
}
}
}
if (dynamicInfo !== undefined) {
obj[dynamicInfo.byProperty] = dynamicInfo.fn;
}
return obj;
};
const VALUE_TYPE_UNDEFINED = 0;
const VALUE_TYPE_ATOM = 1;
const VALUE_TYPE_ARRAY_EXTEND = 2;
const VALUE_TYPE_OBJECT = 3;
const VALUE_TYPE_DELETE = 4;
/**
* @param {any} value a single value
* @returns {VALUE_TYPE_UNDEFINED | VALUE_TYPE_ATOM | VALUE_TYPE_ARRAY_EXTEND | VALUE_TYPE_OBJECT | VALUE_TYPE_DELETE} value type
*/
const getValueType = value => {
if (value === undefined) {
return VALUE_TYPE_UNDEFINED;
} else if (value === DELETE) {
return VALUE_TYPE_DELETE;
} else if (Array.isArray(value)) {
if (value.includes("...")) return VALUE_TYPE_ARRAY_EXTEND;
return VALUE_TYPE_ATOM;
} else if (
typeof value === "object" &&
value !== null &&
(!value.constructor || value.constructor === Object)
) {
return VALUE_TYPE_OBJECT;
}
return VALUE_TYPE_ATOM;
};
/**
* Merges two objects. Objects are deeply clever merged.
* Arrays might reference the old value with "...".
* Non-object values take preference over object values.
* @template T
* @template O
* @param {T} first first object
* @param {O} second second object
* @returns {T & O | T | O} merged object of first and second object
*/
const cleverMerge = (first, second) => {
if (second === undefined) return first;
if (first === undefined) return second;
if (typeof second !== "object" || second === null) return second;
if (typeof first !== "object" || first === null) return first;
return /** @type {T & O} */ (_cleverMerge(first, second, false));
};
/**
* Merges two objects. Objects are deeply clever merged.
* @param {object} first first object
* @param {object} second second object
* @param {boolean} internalCaching should parsing of objects and nested merges be cached
* @returns {object} merged object of first and second object
*/
const _cleverMerge = (first, second, internalCaching = false) => {
const firstObject = internalCaching
? cachedParseObject(first)
: parseObject(first);
const { static: firstInfo, dynamic: firstDynamicInfo } = firstObject;
// If the first argument has a dynamic part we modify the dynamic part to merge the second argument
if (firstDynamicInfo !== undefined) {
let { byProperty, fn } = firstDynamicInfo;
const fnInfo = fn[DYNAMIC_INFO];
if (fnInfo) {
second = internalCaching
? cachedCleverMerge(fnInfo[1], second)
: cleverMerge(fnInfo[1], second);
fn = fnInfo[0];
}
const newFn = (...args) => {
const fnResult = fn(...args);
return internalCaching
? cachedCleverMerge(fnResult, second)
: cleverMerge(fnResult, second);
};
newFn[DYNAMIC_INFO] = [fn, second];
return serializeObject(firstObject.static, { byProperty, fn: newFn });
}
// If the first part is static only, we merge the static parts and keep the dynamic part of the second argument
const secondObject = internalCaching
? cachedParseObject(second)
: parseObject(second);
const { static: secondInfo, dynamic: secondDynamicInfo } = secondObject;
/** @type {Map<string, ObjectParsedPropertyEntry>} */
const resultInfo = new Map();
for (const [key, firstEntry] of firstInfo) {
const secondEntry = secondInfo.get(key);
const entry =
secondEntry !== undefined
? mergeEntries(firstEntry, secondEntry, internalCaching)
: firstEntry;
resultInfo.set(key, entry);
}
for (const [key, secondEntry] of secondInfo) {
if (!firstInfo.has(key)) {
resultInfo.set(key, secondEntry);
}
}
return serializeObject(resultInfo, secondDynamicInfo);
};
/**
* @param {ObjectParsedPropertyEntry} firstEntry a
* @param {ObjectParsedPropertyEntry} secondEntry b
* @param {boolean} internalCaching should parsing of objects and nested merges be cached
* @returns {ObjectParsedPropertyEntry} new entry
*/
const mergeEntries = (firstEntry, secondEntry, internalCaching) => {
switch (getValueType(secondEntry.base)) {
case VALUE_TYPE_ATOM:
case VALUE_TYPE_DELETE:
// No need to consider firstEntry at all
// second value override everything
// = second.base + second.byProperty
return secondEntry;
case VALUE_TYPE_UNDEFINED:
if (!firstEntry.byProperty) {
// = first.base + second.byProperty
return {
base: firstEntry.base,
byProperty: secondEntry.byProperty,
byValues: secondEntry.byValues
};
} else if (firstEntry.byProperty !== secondEntry.byProperty) {
throw new Error(
`${firstEntry.byProperty} and ${secondEntry.byProperty} for a single property is not supported`
);
} else {
// = first.base + (first.byProperty + second.byProperty)
// need to merge first and second byValues
const newByValues = new Map(firstEntry.byValues);
for (const [key, value] of secondEntry.byValues) {
const firstValue = getFromByValues(firstEntry.byValues, key);
newByValues.set(
key,
mergeSingleValue(firstValue, value, internalCaching)
);
}
return {
base: firstEntry.base,
byProperty: firstEntry.byProperty,
byValues: newByValues
};
}
default: {
if (!firstEntry.byProperty) {
// The simple case
// = (first.base + second.base) + second.byProperty
return {
base: mergeSingleValue(
firstEntry.base,
secondEntry.base,
internalCaching
),
byProperty: secondEntry.byProperty,
byValues: secondEntry.byValues
};
}
let newBase;
const intermediateByValues = new Map(firstEntry.byValues);
for (const [key, value] of intermediateByValues) {
intermediateByValues.set(
key,
mergeSingleValue(value, secondEntry.base, internalCaching)
);
}
if (
Array.from(firstEntry.byValues.values()).every(value => {
const type = getValueType(value);
return type === VALUE_TYPE_ATOM || type === VALUE_TYPE_DELETE;
})
) {
// = (first.base + second.base) + ((first.byProperty + second.base) + second.byProperty)
newBase = mergeSingleValue(
firstEntry.base,
secondEntry.base,
internalCaching
);
} else {
// = first.base + ((first.byProperty (+default) + second.base) + second.byProperty)
newBase = firstEntry.base;
if (!intermediateByValues.has("default"))
intermediateByValues.set("default", secondEntry.base);
}
if (!secondEntry.byProperty) {
// = first.base + (first.byProperty + second.base)
return {
base: newBase,
byProperty: firstEntry.byProperty,
byValues: intermediateByValues
};
} else if (firstEntry.byProperty !== secondEntry.byProperty) {
throw new Error(
`${firstEntry.byProperty} and ${secondEntry.byProperty} for a single property is not supported`
);
}
const newByValues = new Map(intermediateByValues);
for (const [key, value] of secondEntry.byValues) {
const firstValue = getFromByValues(intermediateByValues, key);
newByValues.set(
key,
mergeSingleValue(firstValue, value, internalCaching)
);
}
return {
base: newBase,
byProperty: firstEntry.byProperty,
byValues: newByValues
};
}
}
};
/**
* @param {Map<string, any>} byValues all values
* @param {string} key value of the selector
* @returns {any | undefined} value
*/
const getFromByValues = (byValues, key) => {
if (key !== "default" && byValues.has(key)) {
return byValues.get(key);
}
return byValues.get("default");
};
/**
* @param {any} a value
* @param {any} b value
* @param {boolean} internalCaching should parsing of objects and nested merges be cached
* @returns {any} value
*/
const mergeSingleValue = (a, b, internalCaching) => {
const bType = getValueType(b);
const aType = getValueType(a);
switch (bType) {
case VALUE_TYPE_DELETE:
case VALUE_TYPE_ATOM:
return b;
case VALUE_TYPE_OBJECT: {
return aType !== VALUE_TYPE_OBJECT
? b
: internalCaching
? cachedCleverMerge(a, b)
: cleverMerge(a, b);
}
case VALUE_TYPE_UNDEFINED:
return a;
case VALUE_TYPE_ARRAY_EXTEND:
switch (
aType !== VALUE_TYPE_ATOM
? aType
: Array.isArray(a)
? VALUE_TYPE_ARRAY_EXTEND
: VALUE_TYPE_OBJECT
) {
case VALUE_TYPE_UNDEFINED:
return b;
case VALUE_TYPE_DELETE:
return b.filter(item => item !== "...");
case VALUE_TYPE_ARRAY_EXTEND: {
const newArray = [];
for (const item of b) {
if (item === "...") {
for (const item of a) {
newArray.push(item);
}
} else {
newArray.push(item);
}
}
return newArray;
}
case VALUE_TYPE_OBJECT:
return b.map(item => (item === "..." ? a : item));
default:
throw new Error("Not implemented");
}
default:
throw new Error("Not implemented");
}
};
/**
* @template {object} T
* @param {T} obj the object
* @param {(keyof T)[]=} keysToKeepOriginalValue keys to keep original value
* @returns {T} the object without operations like "..." or DELETE
*/
const removeOperations = (obj, keysToKeepOriginalValue = []) => {
const newObj = /** @type {T} */ ({});
for (const key of Object.keys(obj)) {
const value = obj[/** @type {keyof T} */ (key)];
const type = getValueType(value);
if (
type === VALUE_TYPE_OBJECT &&
keysToKeepOriginalValue.includes(/** @type {keyof T} */ (key))
) {
newObj[/** @type {keyof T} */ (key)] = value;
continue;
}
switch (type) {
case VALUE_TYPE_UNDEFINED:
case VALUE_TYPE_DELETE:
break;
case VALUE_TYPE_OBJECT:
newObj[key] = removeOperations(
/** @type {TODO} */ (value),
keysToKeepOriginalValue
);
break;
case VALUE_TYPE_ARRAY_EXTEND:
newObj[key] =
/** @type {any[]} */
(value).filter(i => i !== "...");
break;
default:
newObj[/** @type {keyof T} */ (key)] = value;
break;
}
}
return newObj;
};
/**
* @template T
* @template {string} P
* @param {T} obj the object
* @param {P} byProperty the by description
* @param {...any} values values
* @returns {Omit<T, P>} object with merged byProperty
*/
const resolveByProperty = (obj, byProperty, ...values) => {
if (typeof obj !== "object" || obj === null || !(byProperty in obj)) {
return obj;
}
const { [byProperty]: _byValue, ..._remaining } = obj;
const remaining = /** @type {T} */ (_remaining);
const byValue =
/** @type {Record<string, T> | function(...any[]): T} */
(_byValue);
if (typeof byValue === "object") {
const key = values[0];
if (key in byValue) {
return cachedCleverMerge(remaining, byValue[key]);
} else if ("default" in byValue) {
return cachedCleverMerge(remaining, byValue.default);
}
return remaining;
} else if (typeof byValue === "function") {
// eslint-disable-next-line prefer-spread
const result = byValue.apply(null, values);
return cachedCleverMerge(
remaining,
resolveByProperty(result, byProperty, ...values)
);
}
};
module.exports.cachedSetProperty = cachedSetProperty;
module.exports.cachedCleverMerge = cachedCleverMerge;
module.exports.cleverMerge = cleverMerge;
module.exports.resolveByProperty = resolveByProperty;
module.exports.removeOperations = removeOperations;
module.exports.DELETE = DELETE;

194
node_modules/webpack/lib/util/createHash.js generated vendored Normal file
View file

@ -0,0 +1,194 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const Hash = require("./Hash");
const BULK_SIZE = 2000;
// We are using an object instead of a Map as this will stay static during the runtime
// so access to it can be optimized by v8
/** @type {{[key: string]: Map<string, string>}} */
const digestCaches = {};
/** @typedef {function(): Hash} HashFactory */
class BulkUpdateDecorator extends Hash {
/**
* @param {Hash | HashFactory} hashOrFactory function to create a hash
* @param {string=} hashKey key for caching
*/
constructor(hashOrFactory, hashKey) {
super();
this.hashKey = hashKey;
if (typeof hashOrFactory === "function") {
this.hashFactory = hashOrFactory;
this.hash = undefined;
} else {
this.hashFactory = undefined;
this.hash = hashOrFactory;
}
this.buffer = "";
}
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @param {string|Buffer} data data
* @param {string=} inputEncoding data encoding
* @returns {this} updated hash
*/
update(data, inputEncoding) {
if (
inputEncoding !== undefined ||
typeof data !== "string" ||
data.length > BULK_SIZE
) {
if (this.hash === undefined)
this.hash = /** @type {HashFactory} */ (this.hashFactory)();
if (this.buffer.length > 0) {
this.hash.update(this.buffer);
this.buffer = "";
}
this.hash.update(data, inputEncoding);
} else {
this.buffer += data;
if (this.buffer.length > BULK_SIZE) {
if (this.hash === undefined)
this.hash = /** @type {HashFactory} */ (this.hashFactory)();
this.hash.update(this.buffer);
this.buffer = "";
}
}
return this;
}
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @param {string=} encoding encoding of the return value
* @returns {string|Buffer} digest
*/
digest(encoding) {
let digestCache;
const buffer = this.buffer;
if (this.hash === undefined) {
// short data for hash, we can use caching
const cacheKey = `${this.hashKey}-${encoding}`;
digestCache = digestCaches[cacheKey];
if (digestCache === undefined) {
digestCache = digestCaches[cacheKey] = new Map();
}
const cacheEntry = digestCache.get(buffer);
if (cacheEntry !== undefined) return cacheEntry;
this.hash = /** @type {HashFactory} */ (this.hashFactory)();
}
if (buffer.length > 0) {
this.hash.update(buffer);
}
const digestResult = this.hash.digest(encoding);
const result =
typeof digestResult === "string" ? digestResult : digestResult.toString();
if (digestCache !== undefined) {
digestCache.set(buffer, result);
}
return result;
}
}
/* istanbul ignore next */
class DebugHash extends Hash {
constructor() {
super();
this.string = "";
}
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @param {string|Buffer} data data
* @param {string=} inputEncoding data encoding
* @returns {this} updated hash
*/
update(data, inputEncoding) {
if (typeof data !== "string") data = data.toString("utf-8");
const prefix = Buffer.from("@webpack-debug-digest@").toString("hex");
if (data.startsWith(prefix)) {
data = Buffer.from(data.slice(prefix.length), "hex").toString();
}
this.string += `[${data}](${
/** @type {string} */ (new Error().stack).split("\n", 3)[2]
})\n`;
return this;
}
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @param {string=} encoding encoding of the return value
* @returns {string|Buffer} digest
*/
digest(encoding) {
return Buffer.from(`@webpack-debug-digest@${this.string}`).toString("hex");
}
}
/** @type {typeof import("crypto") | undefined} */
let crypto;
/** @type {typeof import("./hash/xxhash64") | undefined} */
let createXXHash64;
/** @type {typeof import("./hash/md4") | undefined} */
let createMd4;
/** @type {typeof import("./hash/BatchedHash") | undefined} */
let BatchedHash;
/** @typedef {string | typeof Hash} Algorithm */
/**
* Creates a hash by name or function
* @param {Algorithm} algorithm the algorithm name or a constructor creating a hash
* @returns {Hash} the hash
*/
module.exports = algorithm => {
if (typeof algorithm === "function") {
// eslint-disable-next-line new-cap
return new BulkUpdateDecorator(() => new algorithm());
}
switch (algorithm) {
// TODO add non-cryptographic algorithm here
case "debug":
return new DebugHash();
case "xxhash64":
if (createXXHash64 === undefined) {
createXXHash64 = require("./hash/xxhash64");
if (BatchedHash === undefined) {
BatchedHash = require("./hash/BatchedHash");
}
}
return new /** @type {typeof import("./hash/BatchedHash")} */ (
BatchedHash
)(createXXHash64());
case "md4":
if (createMd4 === undefined) {
createMd4 = require("./hash/md4");
if (BatchedHash === undefined) {
BatchedHash = require("./hash/BatchedHash");
}
}
return new /** @type {typeof import("./hash/BatchedHash")} */ (
BatchedHash
)(createMd4());
case "native-md4":
if (crypto === undefined) crypto = require("crypto");
return new BulkUpdateDecorator(
() => /** @type {typeof import("crypto")} */ (crypto).createHash("md4"),
"md4"
);
default:
if (crypto === undefined) crypto = require("crypto");
return new BulkUpdateDecorator(
() =>
/** @type {typeof import("crypto")} */ (crypto).createHash(algorithm),
algorithm
);
}
};

540
node_modules/webpack/lib/util/deterministicGrouping.js generated vendored Normal file
View file

@ -0,0 +1,540 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
// Simulations show these probabilities for a single change
// 93.1% that one group is invalidated
// 4.8% that two groups are invalidated
// 1.1% that 3 groups are invalidated
// 0.1% that 4 or more groups are invalidated
//
// And these for removing/adding 10 lexically adjacent files
// 64.5% that one group is invalidated
// 24.8% that two groups are invalidated
// 7.8% that 3 groups are invalidated
// 2.7% that 4 or more groups are invalidated
//
// And these for removing/adding 3 random files
// 0% that one group is invalidated
// 3.7% that two groups are invalidated
// 80.8% that 3 groups are invalidated
// 12.3% that 4 groups are invalidated
// 3.2% that 5 or more groups are invalidated
/**
* @param {string} a key
* @param {string} b key
* @returns {number} the similarity as number
*/
const similarity = (a, b) => {
const l = Math.min(a.length, b.length);
let dist = 0;
for (let i = 0; i < l; i++) {
const ca = a.charCodeAt(i);
const cb = b.charCodeAt(i);
dist += Math.max(0, 10 - Math.abs(ca - cb));
}
return dist;
};
/**
* @param {string} a key
* @param {string} b key
* @param {Set<string>} usedNames set of already used names
* @returns {string} the common part and a single char for the difference
*/
const getName = (a, b, usedNames) => {
const l = Math.min(a.length, b.length);
let i = 0;
while (i < l) {
if (a.charCodeAt(i) !== b.charCodeAt(i)) {
i++;
break;
}
i++;
}
while (i < l) {
const name = a.slice(0, i);
const lowerName = name.toLowerCase();
if (!usedNames.has(lowerName)) {
usedNames.add(lowerName);
return name;
}
i++;
}
// names always contain a hash, so this is always unique
// we don't need to check usedNames nor add it
return a;
};
/**
* @param {Record<string, number>} total total size
* @param {Record<string, number>} size single size
* @returns {void}
*/
const addSizeTo = (total, size) => {
for (const key of Object.keys(size)) {
total[key] = (total[key] || 0) + size[key];
}
};
/**
* @param {Record<string, number>} total total size
* @param {Record<string, number>} size single size
* @returns {void}
*/
const subtractSizeFrom = (total, size) => {
for (const key of Object.keys(size)) {
total[key] -= size[key];
}
};
/**
* @template T
* @param {Iterable<Node<T>>} nodes some nodes
* @returns {Record<string, number>} total size
*/
const sumSize = nodes => {
const sum = Object.create(null);
for (const node of nodes) {
addSizeTo(sum, node.size);
}
return sum;
};
/**
* @param {Record<string, number>} size size
* @param {Record<string, number>} maxSize minimum size
* @returns {boolean} true, when size is too big
*/
const isTooBig = (size, maxSize) => {
for (const key of Object.keys(size)) {
const s = size[key];
if (s === 0) continue;
const maxSizeValue = maxSize[key];
if (typeof maxSizeValue === "number" && s > maxSizeValue) return true;
}
return false;
};
/**
* @param {Record<string, number>} size size
* @param {Record<string, number>} minSize minimum size
* @returns {boolean} true, when size is too small
*/
const isTooSmall = (size, minSize) => {
for (const key of Object.keys(size)) {
const s = size[key];
if (s === 0) continue;
const minSizeValue = minSize[key];
if (typeof minSizeValue === "number" && s < minSizeValue) return true;
}
return false;
};
/**
* @param {Record<string, number>} size size
* @param {Record<string, number>} minSize minimum size
* @returns {Set<string>} set of types that are too small
*/
const getTooSmallTypes = (size, minSize) => {
const types = new Set();
for (const key of Object.keys(size)) {
const s = size[key];
if (s === 0) continue;
const minSizeValue = minSize[key];
if (typeof minSizeValue === "number" && s < minSizeValue) types.add(key);
}
return types;
};
/**
* @template T
* @param {TODO} size size
* @param {Set<string>} types types
* @returns {number} number of matching size types
*/
const getNumberOfMatchingSizeTypes = (size, types) => {
let i = 0;
for (const key of Object.keys(size)) {
if (size[key] !== 0 && types.has(key)) i++;
}
return i;
};
/**
* @param {Record<string, number>} size size
* @param {Set<string>} types types
* @returns {number} selective size sum
*/
const selectiveSizeSum = (size, types) => {
let sum = 0;
for (const key of Object.keys(size)) {
if (size[key] !== 0 && types.has(key)) sum += size[key];
}
return sum;
};
/**
* @template T
*/
class Node {
/**
* @param {T} item item
* @param {string} key key
* @param {Record<string, number>} size size
*/
constructor(item, key, size) {
this.item = item;
this.key = key;
this.size = size;
}
}
/**
* @template T
*/
class Group {
/**
* @param {Node<T>[]} nodes nodes
* @param {number[] | null} similarities similarities between the nodes (length = nodes.length - 1)
* @param {Record<string, number>=} size size of the group
*/
constructor(nodes, similarities, size) {
this.nodes = nodes;
this.similarities = similarities;
this.size = size || sumSize(nodes);
/** @type {string | undefined} */
this.key = undefined;
}
/**
* @param {function(Node<T>): boolean} filter filter function
* @returns {Node<T>[] | undefined} removed nodes
*/
popNodes(filter) {
const newNodes = [];
const newSimilarities = [];
const resultNodes = [];
let lastNode;
for (let i = 0; i < this.nodes.length; i++) {
const node = this.nodes[i];
if (filter(node)) {
resultNodes.push(node);
} else {
if (newNodes.length > 0) {
newSimilarities.push(
lastNode === this.nodes[i - 1]
? /** @type {number[]} */ (this.similarities)[i - 1]
: similarity(/** @type {Node<T>} */ (lastNode).key, node.key)
);
}
newNodes.push(node);
lastNode = node;
}
}
if (resultNodes.length === this.nodes.length) return;
this.nodes = newNodes;
this.similarities = newSimilarities;
this.size = sumSize(newNodes);
return resultNodes;
}
}
/**
* @template T
* @param {Iterable<Node<T>>} nodes nodes
* @returns {number[]} similarities
*/
const getSimilarities = nodes => {
// calculate similarities between lexically adjacent nodes
/** @type {number[]} */
const similarities = [];
let last;
for (const node of nodes) {
if (last !== undefined) {
similarities.push(similarity(last.key, node.key));
}
last = node;
}
return similarities;
};
/**
* @template T
* @typedef {object} GroupedItems<T>
* @property {string} key
* @property {T[]} items
* @property {Record<string, number>} size
*/
/**
* @template T
* @typedef {object} Options
* @property {Record<string, number>} maxSize maximum size of a group
* @property {Record<string, number>} minSize minimum size of a group (preferred over maximum size)
* @property {Iterable<T>} items a list of items
* @property {function(T): Record<string, number>} getSize function to get size of an item
* @property {function(T): string} getKey function to get the key of an item
*/
/**
* @template T
* @param {Options<T>} options options object
* @returns {GroupedItems<T>[]} grouped items
*/
module.exports = ({ maxSize, minSize, items, getSize, getKey }) => {
/** @type {Group<T>[]} */
const result = [];
const nodes = Array.from(
items,
item => new Node(item, getKey(item), getSize(item))
);
/** @type {Node<T>[]} */
const initialNodes = [];
// lexically ordering of keys
nodes.sort((a, b) => {
if (a.key < b.key) return -1;
if (a.key > b.key) return 1;
return 0;
});
// return nodes bigger than maxSize directly as group
// But make sure that minSize is not violated
for (const node of nodes) {
if (isTooBig(node.size, maxSize) && !isTooSmall(node.size, minSize)) {
result.push(new Group([node], []));
} else {
initialNodes.push(node);
}
}
if (initialNodes.length > 0) {
const initialGroup = new Group(initialNodes, getSimilarities(initialNodes));
/**
* @param {Group<T>} group group
* @param {Record<string, number>} consideredSize size of the group to consider
* @returns {boolean} true, if the group was modified
*/
const removeProblematicNodes = (group, consideredSize = group.size) => {
const problemTypes = getTooSmallTypes(consideredSize, minSize);
if (problemTypes.size > 0) {
// We hit an edge case where the working set is already smaller than minSize
// We merge problematic nodes with the smallest result node to keep minSize intact
const problemNodes = group.popNodes(
n => getNumberOfMatchingSizeTypes(n.size, problemTypes) > 0
);
if (problemNodes === undefined) return false;
// Only merge it with result nodes that have the problematic size type
const possibleResultGroups = result.filter(
n => getNumberOfMatchingSizeTypes(n.size, problemTypes) > 0
);
if (possibleResultGroups.length > 0) {
const bestGroup = possibleResultGroups.reduce((min, group) => {
const minMatches = getNumberOfMatchingSizeTypes(min, problemTypes);
const groupMatches = getNumberOfMatchingSizeTypes(
group,
problemTypes
);
if (minMatches !== groupMatches)
return minMatches < groupMatches ? group : min;
if (
selectiveSizeSum(min.size, problemTypes) >
selectiveSizeSum(group.size, problemTypes)
)
return group;
return min;
});
for (const node of problemNodes) bestGroup.nodes.push(node);
bestGroup.nodes.sort((a, b) => {
if (a.key < b.key) return -1;
if (a.key > b.key) return 1;
return 0;
});
} else {
// There are no other nodes with the same size types
// We create a new group and have to accept that it's smaller than minSize
result.push(new Group(problemNodes, null));
}
return true;
}
return false;
};
if (initialGroup.nodes.length > 0) {
const queue = [initialGroup];
while (queue.length) {
const group = /** @type {Group<T>} */ (queue.pop());
// only groups bigger than maxSize need to be splitted
if (!isTooBig(group.size, maxSize)) {
result.push(group);
continue;
}
// If the group is already too small
// we try to work only with the unproblematic nodes
if (removeProblematicNodes(group)) {
// This changed something, so we try this group again
queue.push(group);
continue;
}
// find unsplittable area from left and right
// going minSize from left and right
// at least one node need to be included otherwise we get stuck
let left = 1;
const leftSize = Object.create(null);
addSizeTo(leftSize, group.nodes[0].size);
while (left < group.nodes.length && isTooSmall(leftSize, minSize)) {
addSizeTo(leftSize, group.nodes[left].size);
left++;
}
let right = group.nodes.length - 2;
const rightSize = Object.create(null);
addSizeTo(rightSize, group.nodes[group.nodes.length - 1].size);
while (right >= 0 && isTooSmall(rightSize, minSize)) {
addSizeTo(rightSize, group.nodes[right].size);
right--;
}
// left v v right
// [ O O O ] O O O [ O O O ]
// ^^^^^^^^^ leftSize
// rightSize ^^^^^^^^^
// leftSize > minSize
// rightSize > minSize
// Perfect split: [ O O O ] [ O O O ]
// right === left - 1
if (left - 1 > right) {
// We try to remove some problematic nodes to "fix" that
let prevSize;
if (right < group.nodes.length - left) {
subtractSizeFrom(rightSize, group.nodes[right + 1].size);
prevSize = rightSize;
} else {
subtractSizeFrom(leftSize, group.nodes[left - 1].size);
prevSize = leftSize;
}
if (removeProblematicNodes(group, prevSize)) {
// This changed something, so we try this group again
queue.push(group);
continue;
}
// can't split group while holding minSize
// because minSize is preferred of maxSize we return
// the problematic nodes as result here even while it's too big
// To avoid this make sure maxSize > minSize * 3
result.push(group);
continue;
}
if (left <= right) {
// when there is a area between left and right
// we look for best split point
// we split at the minimum similarity
// here key space is separated the most
// But we also need to make sure to not create too small groups
let best = -1;
let bestSimilarity = Infinity;
let pos = left;
const rightSize = sumSize(group.nodes.slice(pos));
// pos v v right
// [ O O O ] O O O [ O O O ]
// ^^^^^^^^^ leftSize
// rightSize ^^^^^^^^^^^^^^^
while (pos <= right + 1) {
const similarity = /** @type {number[]} */ (group.similarities)[
pos - 1
];
if (
similarity < bestSimilarity &&
!isTooSmall(leftSize, minSize) &&
!isTooSmall(rightSize, minSize)
) {
best = pos;
bestSimilarity = similarity;
}
addSizeTo(leftSize, group.nodes[pos].size);
subtractSizeFrom(rightSize, group.nodes[pos].size);
pos++;
}
if (best < 0) {
// This can't happen
// but if that assumption is wrong
// fallback to a big group
result.push(group);
continue;
}
left = best;
right = best - 1;
}
// create two new groups for left and right area
// and queue them up
const rightNodes = [group.nodes[right + 1]];
/** @type {number[]} */
const rightSimilarities = [];
for (let i = right + 2; i < group.nodes.length; i++) {
rightSimilarities.push(
/** @type {number[]} */ (group.similarities)[i - 1]
);
rightNodes.push(group.nodes[i]);
}
queue.push(new Group(rightNodes, rightSimilarities));
const leftNodes = [group.nodes[0]];
/** @type {number[]} */
const leftSimilarities = [];
for (let i = 1; i < left; i++) {
leftSimilarities.push(
/** @type {number[]} */ (group.similarities)[i - 1]
);
leftNodes.push(group.nodes[i]);
}
queue.push(new Group(leftNodes, leftSimilarities));
}
}
}
// lexically ordering
result.sort((a, b) => {
if (a.nodes[0].key < b.nodes[0].key) return -1;
if (a.nodes[0].key > b.nodes[0].key) return 1;
return 0;
});
// give every group a name
const usedNames = new Set();
for (let i = 0; i < result.length; i++) {
const group = result[i];
if (group.nodes.length === 1) {
group.key = group.nodes[0].key;
} else {
const first = group.nodes[0];
const last = group.nodes[group.nodes.length - 1];
const name = getName(first.key, last.key, usedNames);
group.key = name;
}
}
// return the results
return result.map(
group =>
/** @type {GroupedItems<T>} */
({
key: group.key,
items: group.nodes.map(node => node.item),
size: group.size
})
);
};

403
node_modules/webpack/lib/util/identifier.js generated vendored Normal file
View file

@ -0,0 +1,403 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
*/
"use strict";
const path = require("path");
const WINDOWS_ABS_PATH_REGEXP = /^[a-zA-Z]:[\\/]/;
const SEGMENTS_SPLIT_REGEXP = /([|!])/;
const WINDOWS_PATH_SEPARATOR_REGEXP = /\\/g;
/**
* @typedef {object} MakeRelativePathsCache
* @property {Map<string, Map<string, string>>=} relativePaths
*/
/**
* @param {string} relativePath relative path
* @returns {string} request
*/
const relativePathToRequest = relativePath => {
if (relativePath === "") return "./.";
if (relativePath === "..") return "../.";
if (relativePath.startsWith("../")) return relativePath;
return `./${relativePath}`;
};
/**
* @param {string} context context for relative path
* @param {string} maybeAbsolutePath path to make relative
* @returns {string} relative path in request style
*/
const absoluteToRequest = (context, maybeAbsolutePath) => {
if (maybeAbsolutePath[0] === "/") {
if (
maybeAbsolutePath.length > 1 &&
maybeAbsolutePath[maybeAbsolutePath.length - 1] === "/"
) {
// this 'path' is actually a regexp generated by dynamic requires.
// Don't treat it as an absolute path.
return maybeAbsolutePath;
}
const querySplitPos = maybeAbsolutePath.indexOf("?");
let resource =
querySplitPos === -1
? maybeAbsolutePath
: maybeAbsolutePath.slice(0, querySplitPos);
resource = relativePathToRequest(path.posix.relative(context, resource));
return querySplitPos === -1
? resource
: resource + maybeAbsolutePath.slice(querySplitPos);
}
if (WINDOWS_ABS_PATH_REGEXP.test(maybeAbsolutePath)) {
const querySplitPos = maybeAbsolutePath.indexOf("?");
let resource =
querySplitPos === -1
? maybeAbsolutePath
: maybeAbsolutePath.slice(0, querySplitPos);
resource = path.win32.relative(context, resource);
if (!WINDOWS_ABS_PATH_REGEXP.test(resource)) {
resource = relativePathToRequest(
resource.replace(WINDOWS_PATH_SEPARATOR_REGEXP, "/")
);
}
return querySplitPos === -1
? resource
: resource + maybeAbsolutePath.slice(querySplitPos);
}
// not an absolute path
return maybeAbsolutePath;
};
/**
* @param {string} context context for relative path
* @param {string} relativePath path
* @returns {string} absolute path
*/
const requestToAbsolute = (context, relativePath) => {
if (relativePath.startsWith("./") || relativePath.startsWith("../"))
return path.join(context, relativePath);
return relativePath;
};
/**
* @template T
* @typedef {function(string, object=): T} MakeCacheableResult
*/
/**
* @template T
* @typedef {function(string): T} BindCacheResultFn
*/
/**
* @template T
* @typedef {function(object): BindCacheResultFn<T>} BindCache
*/
/**
* @template T
* @param {(function(string): T)} realFn real function
* @returns {MakeCacheableResult<T> & { bindCache: BindCache<T> }} cacheable function
*/
const makeCacheable = realFn => {
/**
* @template T
* @typedef {Map<string, T>} CacheItem
*/
/** @type {WeakMap<object, CacheItem<T>>} */
const cache = new WeakMap();
/**
* @param {object} associatedObjectForCache an object to which the cache will be attached
* @returns {CacheItem<T>} cache item
*/
const getCache = associatedObjectForCache => {
const entry = cache.get(associatedObjectForCache);
if (entry !== undefined) return entry;
/** @type {Map<string, T>} */
const map = new Map();
cache.set(associatedObjectForCache, map);
return map;
};
/** @type {MakeCacheableResult<T> & { bindCache: BindCache<T> }} */
const fn = (str, associatedObjectForCache) => {
if (!associatedObjectForCache) return realFn(str);
const cache = getCache(associatedObjectForCache);
const entry = cache.get(str);
if (entry !== undefined) return entry;
const result = realFn(str);
cache.set(str, result);
return result;
};
/** @type {BindCache<T>} */
fn.bindCache = associatedObjectForCache => {
const cache = getCache(associatedObjectForCache);
/**
* @param {string} str string
* @returns {T} value
*/
return str => {
const entry = cache.get(str);
if (entry !== undefined) return entry;
const result = realFn(str);
cache.set(str, result);
return result;
};
};
return fn;
};
/** @typedef {function(string, string, object=): string} MakeCacheableWithContextResult */
/** @typedef {function(string, string): string} BindCacheForContextResultFn */
/** @typedef {function(string): string} BindContextCacheForContextResultFn */
/** @typedef {function(object=): BindCacheForContextResultFn} BindCacheForContext */
/** @typedef {function(string, object=): BindContextCacheForContextResultFn} BindContextCacheForContext */
/**
* @param {function(string, string): string} fn function
* @returns {MakeCacheableWithContextResult & { bindCache: BindCacheForContext, bindContextCache: BindContextCacheForContext }} cacheable function with context
*/
const makeCacheableWithContext = fn => {
/** @type {WeakMap<object, Map<string, Map<string, string>>>} */
const cache = new WeakMap();
/** @type {MakeCacheableWithContextResult & { bindCache: BindCacheForContext, bindContextCache: BindContextCacheForContext }} */
const cachedFn = (context, identifier, associatedObjectForCache) => {
if (!associatedObjectForCache) return fn(context, identifier);
let innerCache = cache.get(associatedObjectForCache);
if (innerCache === undefined) {
innerCache = new Map();
cache.set(associatedObjectForCache, innerCache);
}
let cachedResult;
let innerSubCache = innerCache.get(context);
if (innerSubCache === undefined) {
innerCache.set(context, (innerSubCache = new Map()));
} else {
cachedResult = innerSubCache.get(identifier);
}
if (cachedResult !== undefined) {
return cachedResult;
}
const result = fn(context, identifier);
innerSubCache.set(identifier, result);
return result;
};
/** @type {BindCacheForContext} */
cachedFn.bindCache = associatedObjectForCache => {
let innerCache;
if (associatedObjectForCache) {
innerCache = cache.get(associatedObjectForCache);
if (innerCache === undefined) {
innerCache = new Map();
cache.set(associatedObjectForCache, innerCache);
}
} else {
innerCache = new Map();
}
/**
* @param {string} context context used to create relative path
* @param {string} identifier identifier used to create relative path
* @returns {string} the returned relative path
*/
const boundFn = (context, identifier) => {
let cachedResult;
let innerSubCache = innerCache.get(context);
if (innerSubCache === undefined) {
innerCache.set(context, (innerSubCache = new Map()));
} else {
cachedResult = innerSubCache.get(identifier);
}
if (cachedResult !== undefined) {
return cachedResult;
}
const result = fn(context, identifier);
innerSubCache.set(identifier, result);
return result;
};
return boundFn;
};
/** @type {BindContextCacheForContext} */
cachedFn.bindContextCache = (context, associatedObjectForCache) => {
let innerSubCache;
if (associatedObjectForCache) {
let innerCache = cache.get(associatedObjectForCache);
if (innerCache === undefined) {
innerCache = new Map();
cache.set(associatedObjectForCache, innerCache);
}
innerSubCache = innerCache.get(context);
if (innerSubCache === undefined) {
innerCache.set(context, (innerSubCache = new Map()));
}
} else {
innerSubCache = new Map();
}
/**
* @param {string} identifier identifier used to create relative path
* @returns {string} the returned relative path
*/
const boundFn = identifier => {
const cachedResult = innerSubCache.get(identifier);
if (cachedResult !== undefined) {
return cachedResult;
}
const result = fn(context, identifier);
innerSubCache.set(identifier, result);
return result;
};
return boundFn;
};
return cachedFn;
};
/**
* @param {string} context context for relative path
* @param {string} identifier identifier for path
* @returns {string} a converted relative path
*/
const _makePathsRelative = (context, identifier) =>
identifier
.split(SEGMENTS_SPLIT_REGEXP)
.map(str => absoluteToRequest(context, str))
.join("");
module.exports.makePathsRelative = makeCacheableWithContext(_makePathsRelative);
/**
* @param {string} context context for relative path
* @param {string} identifier identifier for path
* @returns {string} a converted relative path
*/
const _makePathsAbsolute = (context, identifier) =>
identifier
.split(SEGMENTS_SPLIT_REGEXP)
.map(str => requestToAbsolute(context, str))
.join("");
module.exports.makePathsAbsolute = makeCacheableWithContext(_makePathsAbsolute);
/**
* @param {string} context absolute context path
* @param {string} request any request string may containing absolute paths, query string, etc.
* @returns {string} a new request string avoiding absolute paths when possible
*/
const _contextify = (context, request) =>
request
.split("!")
.map(r => absoluteToRequest(context, r))
.join("!");
const contextify = makeCacheableWithContext(_contextify);
module.exports.contextify = contextify;
/**
* @param {string} context absolute context path
* @param {string} request any request string
* @returns {string} a new request string using absolute paths when possible
*/
const _absolutify = (context, request) =>
request
.split("!")
.map(r => requestToAbsolute(context, r))
.join("!");
const absolutify = makeCacheableWithContext(_absolutify);
module.exports.absolutify = absolutify;
const PATH_QUERY_FRAGMENT_REGEXP =
/^((?:\0.|[^?#\0])*)(\?(?:\0.|[^#\0])*)?(#.*)?$/;
const PATH_QUERY_REGEXP = /^((?:\0.|[^?\0])*)(\?.*)?$/;
/** @typedef {{ resource: string, path: string, query: string, fragment: string }} ParsedResource */
/** @typedef {{ resource: string, path: string, query: string }} ParsedResourceWithoutFragment */
/**
* @param {string} str the path with query and fragment
* @returns {ParsedResource} parsed parts
*/
const _parseResource = str => {
const match =
/** @type {[string, string, string | undefined, string | undefined]} */
(/** @type {unknown} */ (PATH_QUERY_FRAGMENT_REGEXP.exec(str)));
return {
resource: str,
path: match[1].replace(/\0(.)/g, "$1"),
query: match[2] ? match[2].replace(/\0(.)/g, "$1") : "",
fragment: match[3] || ""
};
};
module.exports.parseResource = makeCacheable(_parseResource);
/**
* Parse resource, skips fragment part
* @param {string} str the path with query and fragment
* @returns {ParsedResourceWithoutFragment} parsed parts
*/
const _parseResourceWithoutFragment = str => {
const match =
/** @type {[string, string, string | undefined]} */
(/** @type {unknown} */ (PATH_QUERY_REGEXP.exec(str)));
return {
resource: str,
path: match[1].replace(/\0(.)/g, "$1"),
query: match[2] ? match[2].replace(/\0(.)/g, "$1") : ""
};
};
module.exports.parseResourceWithoutFragment = makeCacheable(
_parseResourceWithoutFragment
);
/**
* @param {string} filename the filename which should be undone
* @param {string} outputPath the output path that is restored (only relevant when filename contains "..")
* @param {boolean} enforceRelative true returns ./ for empty paths
* @returns {string} repeated ../ to leave the directory of the provided filename to be back on output dir
*/
module.exports.getUndoPath = (filename, outputPath, enforceRelative) => {
let depth = -1;
let append = "";
outputPath = outputPath.replace(/[\\/]$/, "");
for (const part of filename.split(/[/\\]+/)) {
if (part === "..") {
if (depth > -1) {
depth--;
} else {
const i = outputPath.lastIndexOf("/");
const j = outputPath.lastIndexOf("\\");
const pos = i < 0 ? j : j < 0 ? i : Math.max(i, j);
if (pos < 0) return `${outputPath}/`;
append = `${outputPath.slice(pos + 1)}/${append}`;
outputPath = outputPath.slice(0, pos);
}
} else if (part !== ".") {
depth++;
}
}
return depth > 0
? `${"../".repeat(depth)}${append}`
: enforceRelative
? `./${append}`
: append;
};

14
node_modules/webpack/lib/util/objectToMap.js generated vendored Normal file
View file

@ -0,0 +1,14 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
*/
"use strict";
/**
* Convert an object into an ES6 map
* @param {object} obj any object type that works with Object.entries()
* @returns {Map<string, any>} an ES6 Map of KV pairs
*/
module.exports = function objectToMap(obj) {
return new Map(Object.entries(obj));
};