Updated the files.
This commit is contained in:
parent
1553e6b971
commit
753967d4f5
23418 changed files with 3784666 additions and 0 deletions
100
my-app/node_modules/webpack/lib/optimize/AggressiveMergingPlugin.js
generated
vendored
Executable file
100
my-app/node_modules/webpack/lib/optimize/AggressiveMergingPlugin.js
generated
vendored
Executable file
|
@ -0,0 +1,100 @@
|
|||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Tobias Koppers @sokra
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
const { STAGE_ADVANCED } = require("../OptimizationStages");
|
||||
|
||||
/** @typedef {import("../Chunk")} Chunk */
|
||||
/** @typedef {import("../Compiler")} Compiler */
|
||||
|
||||
/**
|
||||
* @typedef {Object} AggressiveMergingPluginOptions
|
||||
* @property {number=} minSizeReduce minimal size reduction to trigger merging
|
||||
*/
|
||||
|
||||
class AggressiveMergingPlugin {
|
||||
/**
|
||||
* @param {AggressiveMergingPluginOptions=} [options] options object
|
||||
*/
|
||||
constructor(options) {
|
||||
if (
|
||||
(options !== undefined && typeof options !== "object") ||
|
||||
Array.isArray(options)
|
||||
) {
|
||||
throw new Error(
|
||||
"Argument should be an options object. To use defaults, pass in nothing.\nFor more info on options, see https://webpack.js.org/plugins/"
|
||||
);
|
||||
}
|
||||
this.options = options || {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply the plugin
|
||||
* @param {Compiler} compiler the compiler instance
|
||||
* @returns {void}
|
||||
*/
|
||||
apply(compiler) {
|
||||
const options = this.options;
|
||||
const minSizeReduce = options.minSizeReduce || 1.5;
|
||||
|
||||
compiler.hooks.thisCompilation.tap(
|
||||
"AggressiveMergingPlugin",
|
||||
compilation => {
|
||||
compilation.hooks.optimizeChunks.tap(
|
||||
{
|
||||
name: "AggressiveMergingPlugin",
|
||||
stage: STAGE_ADVANCED
|
||||
},
|
||||
chunks => {
|
||||
const chunkGraph = compilation.chunkGraph;
|
||||
/** @type {{a: Chunk, b: Chunk, improvement: number}[]} */
|
||||
let combinations = [];
|
||||
for (const a of chunks) {
|
||||
if (a.canBeInitial()) continue;
|
||||
for (const b of chunks) {
|
||||
if (b.canBeInitial()) continue;
|
||||
if (b === a) break;
|
||||
if (!chunkGraph.canChunksBeIntegrated(a, b)) {
|
||||
continue;
|
||||
}
|
||||
const aSize = chunkGraph.getChunkSize(b, {
|
||||
chunkOverhead: 0
|
||||
});
|
||||
const bSize = chunkGraph.getChunkSize(a, {
|
||||
chunkOverhead: 0
|
||||
});
|
||||
const abSize = chunkGraph.getIntegratedChunksSize(b, a, {
|
||||
chunkOverhead: 0
|
||||
});
|
||||
const improvement = (aSize + bSize) / abSize;
|
||||
combinations.push({
|
||||
a,
|
||||
b,
|
||||
improvement
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
combinations.sort((a, b) => {
|
||||
return b.improvement - a.improvement;
|
||||
});
|
||||
|
||||
const pair = combinations[0];
|
||||
|
||||
if (!pair) return;
|
||||
if (pair.improvement < minSizeReduce) return;
|
||||
|
||||
chunkGraph.integrateChunks(pair.b, pair.a);
|
||||
compilation.chunks.delete(pair.a);
|
||||
return true;
|
||||
}
|
||||
);
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = AggressiveMergingPlugin;
|
336
my-app/node_modules/webpack/lib/optimize/AggressiveSplittingPlugin.js
generated
vendored
Executable file
336
my-app/node_modules/webpack/lib/optimize/AggressiveSplittingPlugin.js
generated
vendored
Executable file
|
@ -0,0 +1,336 @@
|
|||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Tobias Koppers @sokra
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
const { STAGE_ADVANCED } = require("../OptimizationStages");
|
||||
const { intersect } = require("../util/SetHelpers");
|
||||
const {
|
||||
compareModulesByIdentifier,
|
||||
compareChunks
|
||||
} = require("../util/comparators");
|
||||
const createSchemaValidation = require("../util/create-schema-validation");
|
||||
const identifierUtils = require("../util/identifier");
|
||||
|
||||
/** @typedef {import("../../declarations/plugins/optimize/AggressiveSplittingPlugin").AggressiveSplittingPluginOptions} AggressiveSplittingPluginOptions */
|
||||
/** @typedef {import("../Chunk")} Chunk */
|
||||
/** @typedef {import("../ChunkGraph")} ChunkGraph */
|
||||
/** @typedef {import("../Compiler")} Compiler */
|
||||
/** @typedef {import("../Module")} Module */
|
||||
|
||||
const validate = createSchemaValidation(
|
||||
require("../../schemas/plugins/optimize/AggressiveSplittingPlugin.check.js"),
|
||||
() =>
|
||||
require("../../schemas/plugins/optimize/AggressiveSplittingPlugin.json"),
|
||||
{
|
||||
name: "Aggressive Splitting Plugin",
|
||||
baseDataPath: "options"
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* @param {ChunkGraph} chunkGraph the chunk graph
|
||||
* @param {Chunk} oldChunk the old chunk
|
||||
* @param {Chunk} newChunk the new chunk
|
||||
* @returns {(module: Module) => void} function to move module between chunks
|
||||
*/
|
||||
const moveModuleBetween = (chunkGraph, oldChunk, newChunk) => {
|
||||
return module => {
|
||||
chunkGraph.disconnectChunkAndModule(oldChunk, module);
|
||||
chunkGraph.connectChunkAndModule(newChunk, module);
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {ChunkGraph} chunkGraph the chunk graph
|
||||
* @param {Chunk} chunk the chunk
|
||||
* @returns {function(Module): boolean} filter for entry module
|
||||
*/
|
||||
const isNotAEntryModule = (chunkGraph, chunk) => {
|
||||
return module => {
|
||||
return !chunkGraph.isEntryModuleInChunk(module, chunk);
|
||||
};
|
||||
};
|
||||
|
||||
/** @type {WeakSet<Chunk>} */
|
||||
const recordedChunks = new WeakSet();
|
||||
|
||||
class AggressiveSplittingPlugin {
|
||||
/**
|
||||
* @param {AggressiveSplittingPluginOptions=} options options object
|
||||
*/
|
||||
constructor(options = {}) {
|
||||
validate(options);
|
||||
|
||||
this.options = options;
|
||||
if (typeof this.options.minSize !== "number") {
|
||||
this.options.minSize = 30 * 1024;
|
||||
}
|
||||
if (typeof this.options.maxSize !== "number") {
|
||||
this.options.maxSize = 50 * 1024;
|
||||
}
|
||||
if (typeof this.options.chunkOverhead !== "number") {
|
||||
this.options.chunkOverhead = 0;
|
||||
}
|
||||
if (typeof this.options.entryChunkMultiplicator !== "number") {
|
||||
this.options.entryChunkMultiplicator = 1;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Chunk} chunk the chunk to test
|
||||
* @returns {boolean} true if the chunk was recorded
|
||||
*/
|
||||
static wasChunkRecorded(chunk) {
|
||||
return recordedChunks.has(chunk);
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply the plugin
|
||||
* @param {Compiler} compiler the compiler instance
|
||||
* @returns {void}
|
||||
*/
|
||||
apply(compiler) {
|
||||
compiler.hooks.thisCompilation.tap(
|
||||
"AggressiveSplittingPlugin",
|
||||
compilation => {
|
||||
let needAdditionalSeal = false;
|
||||
let newSplits;
|
||||
/** @type {Set<Chunk>} */
|
||||
let fromAggressiveSplittingSet;
|
||||
let chunkSplitDataMap;
|
||||
compilation.hooks.optimize.tap("AggressiveSplittingPlugin", () => {
|
||||
newSplits = [];
|
||||
fromAggressiveSplittingSet = new Set();
|
||||
chunkSplitDataMap = new Map();
|
||||
});
|
||||
compilation.hooks.optimizeChunks.tap(
|
||||
{
|
||||
name: "AggressiveSplittingPlugin",
|
||||
stage: STAGE_ADVANCED
|
||||
},
|
||||
chunks => {
|
||||
const chunkGraph = compilation.chunkGraph;
|
||||
// Precompute stuff
|
||||
const nameToModuleMap = new Map();
|
||||
const moduleToNameMap = new Map();
|
||||
const makePathsRelative =
|
||||
identifierUtils.makePathsRelative.bindContextCache(
|
||||
compiler.context,
|
||||
compiler.root
|
||||
);
|
||||
for (const m of compilation.modules) {
|
||||
const name = makePathsRelative(m.identifier());
|
||||
nameToModuleMap.set(name, m);
|
||||
moduleToNameMap.set(m, name);
|
||||
}
|
||||
|
||||
// Check used chunk ids
|
||||
const usedIds = new Set();
|
||||
for (const chunk of chunks) {
|
||||
usedIds.add(chunk.id);
|
||||
}
|
||||
|
||||
const recordedSplits =
|
||||
(compilation.records && compilation.records.aggressiveSplits) ||
|
||||
[];
|
||||
const usedSplits = newSplits
|
||||
? recordedSplits.concat(newSplits)
|
||||
: recordedSplits;
|
||||
|
||||
const minSize = /** @type {number} */ (this.options.minSize);
|
||||
const maxSize = /** @type {number} */ (this.options.maxSize);
|
||||
|
||||
const applySplit = splitData => {
|
||||
// Cannot split if id is already taken
|
||||
if (splitData.id !== undefined && usedIds.has(splitData.id)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Get module objects from names
|
||||
const selectedModules = splitData.modules.map(name =>
|
||||
nameToModuleMap.get(name)
|
||||
);
|
||||
|
||||
// Does the modules exist at all?
|
||||
if (!selectedModules.every(Boolean)) return false;
|
||||
|
||||
// Check if size matches (faster than waiting for hash)
|
||||
let size = 0;
|
||||
for (const m of selectedModules) size += m.size();
|
||||
if (size !== splitData.size) return false;
|
||||
|
||||
// get chunks with all modules
|
||||
const selectedChunks = intersect(
|
||||
selectedModules.map(
|
||||
m => new Set(chunkGraph.getModuleChunksIterable(m))
|
||||
)
|
||||
);
|
||||
|
||||
// No relevant chunks found
|
||||
if (selectedChunks.size === 0) return false;
|
||||
|
||||
// The found chunk is already the split or similar
|
||||
if (
|
||||
selectedChunks.size === 1 &&
|
||||
chunkGraph.getNumberOfChunkModules(
|
||||
Array.from(selectedChunks)[0]
|
||||
) === selectedModules.length
|
||||
) {
|
||||
const chunk = Array.from(selectedChunks)[0];
|
||||
if (fromAggressiveSplittingSet.has(chunk)) return false;
|
||||
fromAggressiveSplittingSet.add(chunk);
|
||||
chunkSplitDataMap.set(chunk, splitData);
|
||||
return true;
|
||||
}
|
||||
|
||||
// split the chunk into two parts
|
||||
const newChunk = compilation.addChunk();
|
||||
newChunk.chunkReason = "aggressive splitted";
|
||||
for (const chunk of selectedChunks) {
|
||||
selectedModules.forEach(
|
||||
moveModuleBetween(chunkGraph, chunk, newChunk)
|
||||
);
|
||||
chunk.split(newChunk);
|
||||
chunk.name = null;
|
||||
}
|
||||
fromAggressiveSplittingSet.add(newChunk);
|
||||
chunkSplitDataMap.set(newChunk, splitData);
|
||||
|
||||
if (splitData.id !== null && splitData.id !== undefined) {
|
||||
newChunk.id = splitData.id;
|
||||
newChunk.ids = [splitData.id];
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
// try to restore to recorded splitting
|
||||
let changed = false;
|
||||
for (let j = 0; j < usedSplits.length; j++) {
|
||||
const splitData = usedSplits[j];
|
||||
if (applySplit(splitData)) changed = true;
|
||||
}
|
||||
|
||||
// for any chunk which isn't splitted yet, split it and create a new entry
|
||||
// start with the biggest chunk
|
||||
const cmpFn = compareChunks(chunkGraph);
|
||||
const sortedChunks = Array.from(chunks).sort((a, b) => {
|
||||
const diff1 =
|
||||
chunkGraph.getChunkModulesSize(b) -
|
||||
chunkGraph.getChunkModulesSize(a);
|
||||
if (diff1) return diff1;
|
||||
const diff2 =
|
||||
chunkGraph.getNumberOfChunkModules(a) -
|
||||
chunkGraph.getNumberOfChunkModules(b);
|
||||
if (diff2) return diff2;
|
||||
return cmpFn(a, b);
|
||||
});
|
||||
for (const chunk of sortedChunks) {
|
||||
if (fromAggressiveSplittingSet.has(chunk)) continue;
|
||||
const size = chunkGraph.getChunkModulesSize(chunk);
|
||||
if (
|
||||
size > maxSize &&
|
||||
chunkGraph.getNumberOfChunkModules(chunk) > 1
|
||||
) {
|
||||
const modules = chunkGraph
|
||||
.getOrderedChunkModules(chunk, compareModulesByIdentifier)
|
||||
.filter(isNotAEntryModule(chunkGraph, chunk));
|
||||
const selectedModules = [];
|
||||
let selectedModulesSize = 0;
|
||||
for (let k = 0; k < modules.length; k++) {
|
||||
const module = modules[k];
|
||||
const newSize = selectedModulesSize + module.size();
|
||||
if (newSize > maxSize && selectedModulesSize >= minSize) {
|
||||
break;
|
||||
}
|
||||
selectedModulesSize = newSize;
|
||||
selectedModules.push(module);
|
||||
}
|
||||
if (selectedModules.length === 0) continue;
|
||||
const splitData = {
|
||||
modules: selectedModules
|
||||
.map(m => moduleToNameMap.get(m))
|
||||
.sort(),
|
||||
size: selectedModulesSize
|
||||
};
|
||||
|
||||
if (applySplit(splitData)) {
|
||||
newSplits = (newSplits || []).concat(splitData);
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (changed) return true;
|
||||
}
|
||||
);
|
||||
compilation.hooks.recordHash.tap(
|
||||
"AggressiveSplittingPlugin",
|
||||
records => {
|
||||
// 4. save made splittings to records
|
||||
const allSplits = new Set();
|
||||
const invalidSplits = new Set();
|
||||
|
||||
// Check if some splittings are invalid
|
||||
// We remove invalid splittings and try again
|
||||
for (const chunk of compilation.chunks) {
|
||||
const splitData = chunkSplitDataMap.get(chunk);
|
||||
if (splitData !== undefined) {
|
||||
if (splitData.hash && chunk.hash !== splitData.hash) {
|
||||
// Split was successful, but hash doesn't equal
|
||||
// We can throw away the split since it's useless now
|
||||
invalidSplits.add(splitData);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (invalidSplits.size > 0) {
|
||||
records.aggressiveSplits = records.aggressiveSplits.filter(
|
||||
splitData => !invalidSplits.has(splitData)
|
||||
);
|
||||
needAdditionalSeal = true;
|
||||
} else {
|
||||
// set hash and id values on all (new) splittings
|
||||
for (const chunk of compilation.chunks) {
|
||||
const splitData = chunkSplitDataMap.get(chunk);
|
||||
if (splitData !== undefined) {
|
||||
splitData.hash = chunk.hash;
|
||||
splitData.id = chunk.id;
|
||||
allSplits.add(splitData);
|
||||
// set flag for stats
|
||||
recordedChunks.add(chunk);
|
||||
}
|
||||
}
|
||||
|
||||
// Also add all unused historical splits (after the used ones)
|
||||
// They can still be used in some future compilation
|
||||
const recordedSplits =
|
||||
compilation.records && compilation.records.aggressiveSplits;
|
||||
if (recordedSplits) {
|
||||
for (const splitData of recordedSplits) {
|
||||
if (!invalidSplits.has(splitData)) allSplits.add(splitData);
|
||||
}
|
||||
}
|
||||
|
||||
// record all splits
|
||||
records.aggressiveSplits = Array.from(allSplits);
|
||||
|
||||
needAdditionalSeal = false;
|
||||
}
|
||||
}
|
||||
);
|
||||
compilation.hooks.needAdditionalSeal.tap(
|
||||
"AggressiveSplittingPlugin",
|
||||
() => {
|
||||
if (needAdditionalSeal) {
|
||||
needAdditionalSeal = false;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
module.exports = AggressiveSplittingPlugin;
|
1905
my-app/node_modules/webpack/lib/optimize/ConcatenatedModule.js
generated
vendored
Executable file
1905
my-app/node_modules/webpack/lib/optimize/ConcatenatedModule.js
generated
vendored
Executable file
File diff suppressed because it is too large
Load diff
88
my-app/node_modules/webpack/lib/optimize/EnsureChunkConditionsPlugin.js
generated
vendored
Executable file
88
my-app/node_modules/webpack/lib/optimize/EnsureChunkConditionsPlugin.js
generated
vendored
Executable file
|
@ -0,0 +1,88 @@
|
|||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Tobias Koppers @sokra
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
const { STAGE_BASIC } = require("../OptimizationStages");
|
||||
|
||||
/** @typedef {import("../Chunk")} Chunk */
|
||||
/** @typedef {import("../ChunkGroup")} ChunkGroup */
|
||||
/** @typedef {import("../Compiler")} Compiler */
|
||||
|
||||
class EnsureChunkConditionsPlugin {
|
||||
/**
|
||||
* Apply the plugin
|
||||
* @param {Compiler} compiler the compiler instance
|
||||
* @returns {void}
|
||||
*/
|
||||
apply(compiler) {
|
||||
compiler.hooks.compilation.tap(
|
||||
"EnsureChunkConditionsPlugin",
|
||||
compilation => {
|
||||
/**
|
||||
* @param {Iterable<Chunk>} chunks the chunks
|
||||
*/
|
||||
const handler = chunks => {
|
||||
const chunkGraph = compilation.chunkGraph;
|
||||
// These sets are hoisted here to save memory
|
||||
// They are cleared at the end of every loop
|
||||
/** @type {Set<Chunk>} */
|
||||
const sourceChunks = new Set();
|
||||
/** @type {Set<ChunkGroup>} */
|
||||
const chunkGroups = new Set();
|
||||
for (const module of compilation.modules) {
|
||||
if (!module.hasChunkCondition()) continue;
|
||||
for (const chunk of chunkGraph.getModuleChunksIterable(module)) {
|
||||
if (!module.chunkCondition(chunk, compilation)) {
|
||||
sourceChunks.add(chunk);
|
||||
for (const group of chunk.groupsIterable) {
|
||||
chunkGroups.add(group);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (sourceChunks.size === 0) continue;
|
||||
/** @type {Set<Chunk>} */
|
||||
const targetChunks = new Set();
|
||||
chunkGroupLoop: for (const chunkGroup of chunkGroups) {
|
||||
// Can module be placed in a chunk of this group?
|
||||
for (const chunk of chunkGroup.chunks) {
|
||||
if (module.chunkCondition(chunk, compilation)) {
|
||||
targetChunks.add(chunk);
|
||||
continue chunkGroupLoop;
|
||||
}
|
||||
}
|
||||
// We reached the entrypoint: fail
|
||||
if (chunkGroup.isInitial()) {
|
||||
throw new Error(
|
||||
"Cannot fullfil chunk condition of " + module.identifier()
|
||||
);
|
||||
}
|
||||
// Try placing in all parents
|
||||
for (const group of chunkGroup.parentsIterable) {
|
||||
chunkGroups.add(group);
|
||||
}
|
||||
}
|
||||
for (const sourceChunk of sourceChunks) {
|
||||
chunkGraph.disconnectChunkAndModule(sourceChunk, module);
|
||||
}
|
||||
for (const targetChunk of targetChunks) {
|
||||
chunkGraph.connectChunkAndModule(targetChunk, module);
|
||||
}
|
||||
sourceChunks.clear();
|
||||
chunkGroups.clear();
|
||||
}
|
||||
};
|
||||
compilation.hooks.optimizeChunks.tap(
|
||||
{
|
||||
name: "EnsureChunkConditionsPlugin",
|
||||
stage: STAGE_BASIC
|
||||
},
|
||||
handler
|
||||
);
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
module.exports = EnsureChunkConditionsPlugin;
|
124
my-app/node_modules/webpack/lib/optimize/FlagIncludedChunksPlugin.js
generated
vendored
Executable file
124
my-app/node_modules/webpack/lib/optimize/FlagIncludedChunksPlugin.js
generated
vendored
Executable file
|
@ -0,0 +1,124 @@
|
|||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Tobias Koppers @sokra
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
/** @typedef {import("../Chunk")} Chunk */
|
||||
/** @typedef {import("../Chunk").ChunkId} ChunkId */
|
||||
/** @typedef {import("../Compiler")} Compiler */
|
||||
/** @typedef {import("../Module")} Module */
|
||||
|
||||
class FlagIncludedChunksPlugin {
|
||||
/**
|
||||
* Apply the plugin
|
||||
* @param {Compiler} compiler the compiler instance
|
||||
* @returns {void}
|
||||
*/
|
||||
apply(compiler) {
|
||||
compiler.hooks.compilation.tap("FlagIncludedChunksPlugin", compilation => {
|
||||
compilation.hooks.optimizeChunkIds.tap(
|
||||
"FlagIncludedChunksPlugin",
|
||||
chunks => {
|
||||
const chunkGraph = compilation.chunkGraph;
|
||||
|
||||
// prepare two bit integers for each module
|
||||
// 2^31 is the max number represented as SMI in v8
|
||||
// we want the bits distributed this way:
|
||||
// the bit 2^31 is pretty rar and only one module should get it
|
||||
// so it has a probability of 1 / modulesCount
|
||||
// the first bit (2^0) is the easiest and every module could get it
|
||||
// if it doesn't get a better bit
|
||||
// from bit 2^n to 2^(n+1) there is a probability of p
|
||||
// so 1 / modulesCount == p^31
|
||||
// <=> p = sqrt31(1 / modulesCount)
|
||||
// so we use a modulo of 1 / sqrt31(1 / modulesCount)
|
||||
/** @type {WeakMap<Module, number>} */
|
||||
const moduleBits = new WeakMap();
|
||||
const modulesCount = compilation.modules.size;
|
||||
|
||||
// precalculate the modulo values for each bit
|
||||
const modulo = 1 / Math.pow(1 / modulesCount, 1 / 31);
|
||||
const modulos = Array.from(
|
||||
{ length: 31 },
|
||||
(x, i) => Math.pow(modulo, i) | 0
|
||||
);
|
||||
|
||||
// iterate all modules to generate bit values
|
||||
let i = 0;
|
||||
for (const module of compilation.modules) {
|
||||
let bit = 30;
|
||||
while (i % modulos[bit] !== 0) {
|
||||
bit--;
|
||||
}
|
||||
moduleBits.set(module, 1 << bit);
|
||||
i++;
|
||||
}
|
||||
|
||||
// iterate all chunks to generate bitmaps
|
||||
/** @type {WeakMap<Chunk, number>} */
|
||||
const chunkModulesHash = new WeakMap();
|
||||
for (const chunk of chunks) {
|
||||
let hash = 0;
|
||||
for (const module of chunkGraph.getChunkModulesIterable(chunk)) {
|
||||
hash |= /** @type {number} */ (moduleBits.get(module));
|
||||
}
|
||||
chunkModulesHash.set(chunk, hash);
|
||||
}
|
||||
|
||||
for (const chunkA of chunks) {
|
||||
const chunkAHash =
|
||||
/** @type {number} */
|
||||
(chunkModulesHash.get(chunkA));
|
||||
const chunkAModulesCount =
|
||||
chunkGraph.getNumberOfChunkModules(chunkA);
|
||||
if (chunkAModulesCount === 0) continue;
|
||||
let bestModule = undefined;
|
||||
for (const module of chunkGraph.getChunkModulesIterable(chunkA)) {
|
||||
if (
|
||||
bestModule === undefined ||
|
||||
chunkGraph.getNumberOfModuleChunks(bestModule) >
|
||||
chunkGraph.getNumberOfModuleChunks(module)
|
||||
)
|
||||
bestModule = module;
|
||||
}
|
||||
loopB: for (const chunkB of chunkGraph.getModuleChunksIterable(
|
||||
/** @type {Module} */ (bestModule)
|
||||
)) {
|
||||
// as we iterate the same iterables twice
|
||||
// skip if we find ourselves
|
||||
if (chunkA === chunkB) continue;
|
||||
|
||||
const chunkBModulesCount =
|
||||
chunkGraph.getNumberOfChunkModules(chunkB);
|
||||
|
||||
// ids for empty chunks are not included
|
||||
if (chunkBModulesCount === 0) continue;
|
||||
|
||||
// instead of swapping A and B just bail
|
||||
// as we loop twice the current A will be B and B then A
|
||||
if (chunkAModulesCount > chunkBModulesCount) continue;
|
||||
|
||||
// is chunkA in chunkB?
|
||||
|
||||
// we do a cheap check for the hash value
|
||||
const chunkBHash =
|
||||
/** @type {number} */
|
||||
(chunkModulesHash.get(chunkB));
|
||||
if ((chunkBHash & chunkAHash) !== chunkAHash) continue;
|
||||
|
||||
// compare all modules
|
||||
for (const m of chunkGraph.getChunkModulesIterable(chunkA)) {
|
||||
if (!chunkGraph.isModuleInChunk(m, chunkB)) continue loopB;
|
||||
}
|
||||
/** @type {ChunkId[]} */
|
||||
(chunkB.ids).push(/** @type {ChunkId} */ (chunkA.id));
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
module.exports = FlagIncludedChunksPlugin;
|
346
my-app/node_modules/webpack/lib/optimize/InnerGraph.js
generated
vendored
Executable file
346
my-app/node_modules/webpack/lib/optimize/InnerGraph.js
generated
vendored
Executable file
|
@ -0,0 +1,346 @@
|
|||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Sergey Melyukov @smelukov
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
const { UsageState } = require("../ExportsInfo");
|
||||
|
||||
/** @typedef {import("estree").Node} AnyNode */
|
||||
/** @typedef {import("../Dependency")} Dependency */
|
||||
/** @typedef {import("../ModuleGraph")} ModuleGraph */
|
||||
/** @typedef {import("../ModuleGraphConnection")} ModuleGraphConnection */
|
||||
/** @typedef {import("../ModuleGraphConnection").ConnectionState} ConnectionState */
|
||||
/** @typedef {import("../Parser").ParserState} ParserState */
|
||||
/** @typedef {import("../javascript/JavascriptParser")} JavascriptParser */
|
||||
/** @typedef {import("../util/runtime").RuntimeSpec} RuntimeSpec */
|
||||
|
||||
/** @typedef {Map<TopLevelSymbol | null, Set<string | TopLevelSymbol> | true | undefined>} InnerGraph */
|
||||
/** @typedef {function(boolean | Set<string> | undefined): void} UsageCallback */
|
||||
|
||||
/**
|
||||
* @typedef {Object} StateObject
|
||||
* @property {InnerGraph} innerGraph
|
||||
* @property {TopLevelSymbol=} currentTopLevelSymbol
|
||||
* @property {Map<TopLevelSymbol, Set<UsageCallback>>} usageCallbackMap
|
||||
*/
|
||||
|
||||
/** @typedef {false|StateObject} State */
|
||||
|
||||
/** @type {WeakMap<ParserState, State>} */
|
||||
const parserStateMap = new WeakMap();
|
||||
const topLevelSymbolTag = Symbol("top level symbol");
|
||||
|
||||
/**
|
||||
* @param {ParserState} parserState parser state
|
||||
* @returns {State | undefined} state
|
||||
*/
|
||||
function getState(parserState) {
|
||||
return parserStateMap.get(parserState);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ParserState} parserState parser state
|
||||
* @returns {void}
|
||||
*/
|
||||
exports.bailout = parserState => {
|
||||
parserStateMap.set(parserState, false);
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {ParserState} parserState parser state
|
||||
* @returns {void}
|
||||
*/
|
||||
exports.enable = parserState => {
|
||||
const state = parserStateMap.get(parserState);
|
||||
if (state === false) {
|
||||
return;
|
||||
}
|
||||
parserStateMap.set(parserState, {
|
||||
innerGraph: new Map(),
|
||||
currentTopLevelSymbol: undefined,
|
||||
usageCallbackMap: new Map()
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {ParserState} parserState parser state
|
||||
* @returns {boolean} true, when enabled
|
||||
*/
|
||||
exports.isEnabled = parserState => {
|
||||
const state = parserStateMap.get(parserState);
|
||||
return !!state;
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {ParserState} state parser state
|
||||
* @param {TopLevelSymbol | null} symbol the symbol, or null for all symbols
|
||||
* @param {string | TopLevelSymbol | true} usage usage data
|
||||
* @returns {void}
|
||||
*/
|
||||
exports.addUsage = (state, symbol, usage) => {
|
||||
const innerGraphState = getState(state);
|
||||
|
||||
if (innerGraphState) {
|
||||
const { innerGraph } = innerGraphState;
|
||||
const info = innerGraph.get(symbol);
|
||||
if (usage === true) {
|
||||
innerGraph.set(symbol, true);
|
||||
} else if (info === undefined) {
|
||||
innerGraph.set(symbol, new Set([usage]));
|
||||
} else if (info !== true) {
|
||||
info.add(usage);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {JavascriptParser} parser the parser
|
||||
* @param {string} name name of variable
|
||||
* @param {string | TopLevelSymbol | true} usage usage data
|
||||
* @returns {void}
|
||||
*/
|
||||
exports.addVariableUsage = (parser, name, usage) => {
|
||||
const symbol =
|
||||
/** @type {TopLevelSymbol} */ (
|
||||
parser.getTagData(name, topLevelSymbolTag)
|
||||
) || exports.tagTopLevelSymbol(parser, name);
|
||||
if (symbol) {
|
||||
exports.addUsage(parser.state, symbol, usage);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {ParserState} state parser state
|
||||
* @returns {void}
|
||||
*/
|
||||
exports.inferDependencyUsage = state => {
|
||||
const innerGraphState = getState(state);
|
||||
|
||||
if (!innerGraphState) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { innerGraph, usageCallbackMap } = innerGraphState;
|
||||
const processed = new Map();
|
||||
// flatten graph to terminal nodes (string, undefined or true)
|
||||
const nonTerminal = new Set(innerGraph.keys());
|
||||
while (nonTerminal.size > 0) {
|
||||
for (const key of nonTerminal) {
|
||||
/** @type {Set<string|TopLevelSymbol> | true} */
|
||||
let newSet = new Set();
|
||||
let isTerminal = true;
|
||||
const value = innerGraph.get(key);
|
||||
let alreadyProcessed = processed.get(key);
|
||||
if (alreadyProcessed === undefined) {
|
||||
alreadyProcessed = new Set();
|
||||
processed.set(key, alreadyProcessed);
|
||||
}
|
||||
if (value !== true && value !== undefined) {
|
||||
for (const item of value) {
|
||||
alreadyProcessed.add(item);
|
||||
}
|
||||
for (const item of value) {
|
||||
if (typeof item === "string") {
|
||||
newSet.add(item);
|
||||
} else {
|
||||
const itemValue = innerGraph.get(item);
|
||||
if (itemValue === true) {
|
||||
newSet = true;
|
||||
break;
|
||||
}
|
||||
if (itemValue !== undefined) {
|
||||
for (const i of itemValue) {
|
||||
if (i === key) continue;
|
||||
if (alreadyProcessed.has(i)) continue;
|
||||
newSet.add(i);
|
||||
if (typeof i !== "string") {
|
||||
isTerminal = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (newSet === true) {
|
||||
innerGraph.set(key, true);
|
||||
} else if (newSet.size === 0) {
|
||||
innerGraph.set(key, undefined);
|
||||
} else {
|
||||
innerGraph.set(key, newSet);
|
||||
}
|
||||
}
|
||||
if (isTerminal) {
|
||||
nonTerminal.delete(key);
|
||||
|
||||
// For the global key, merge with all other keys
|
||||
if (key === null) {
|
||||
const globalValue = innerGraph.get(null);
|
||||
if (globalValue) {
|
||||
for (const [key, value] of innerGraph) {
|
||||
if (key !== null && value !== true) {
|
||||
if (globalValue === true) {
|
||||
innerGraph.set(key, true);
|
||||
} else {
|
||||
const newSet = new Set(value);
|
||||
for (const item of globalValue) {
|
||||
newSet.add(item);
|
||||
}
|
||||
innerGraph.set(key, newSet);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** @type {Map<Dependency, true | Set<string>>} */
|
||||
for (const [symbol, callbacks] of usageCallbackMap) {
|
||||
const usage = /** @type {true | Set<string> | undefined} */ (
|
||||
innerGraph.get(symbol)
|
||||
);
|
||||
for (const callback of callbacks) {
|
||||
callback(usage === undefined ? false : usage);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {ParserState} state parser state
|
||||
* @param {UsageCallback} onUsageCallback on usage callback
|
||||
*/
|
||||
exports.onUsage = (state, onUsageCallback) => {
|
||||
const innerGraphState = getState(state);
|
||||
|
||||
if (innerGraphState) {
|
||||
const { usageCallbackMap, currentTopLevelSymbol } = innerGraphState;
|
||||
if (currentTopLevelSymbol) {
|
||||
let callbacks = usageCallbackMap.get(currentTopLevelSymbol);
|
||||
|
||||
if (callbacks === undefined) {
|
||||
callbacks = new Set();
|
||||
usageCallbackMap.set(currentTopLevelSymbol, callbacks);
|
||||
}
|
||||
|
||||
callbacks.add(onUsageCallback);
|
||||
} else {
|
||||
onUsageCallback(true);
|
||||
}
|
||||
} else {
|
||||
onUsageCallback(undefined);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {ParserState} state parser state
|
||||
* @param {TopLevelSymbol | undefined} symbol the symbol
|
||||
*/
|
||||
exports.setTopLevelSymbol = (state, symbol) => {
|
||||
const innerGraphState = getState(state);
|
||||
|
||||
if (innerGraphState) {
|
||||
innerGraphState.currentTopLevelSymbol = symbol;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {ParserState} state parser state
|
||||
* @returns {TopLevelSymbol|void} usage data
|
||||
*/
|
||||
exports.getTopLevelSymbol = state => {
|
||||
const innerGraphState = getState(state);
|
||||
|
||||
if (innerGraphState) {
|
||||
return innerGraphState.currentTopLevelSymbol;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {JavascriptParser} parser parser
|
||||
* @param {string} name name of variable
|
||||
* @returns {TopLevelSymbol | undefined} symbol
|
||||
*/
|
||||
exports.tagTopLevelSymbol = (parser, name) => {
|
||||
const innerGraphState = getState(parser.state);
|
||||
if (!innerGraphState) return;
|
||||
|
||||
parser.defineVariable(name);
|
||||
|
||||
const existingTag = /** @type {TopLevelSymbol} */ (
|
||||
parser.getTagData(name, topLevelSymbolTag)
|
||||
);
|
||||
if (existingTag) {
|
||||
return existingTag;
|
||||
}
|
||||
|
||||
const fn = new TopLevelSymbol(name);
|
||||
parser.tagVariable(name, topLevelSymbolTag, fn);
|
||||
return fn;
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {Dependency} dependency the dependency
|
||||
* @param {Set<string> | boolean} usedByExports usedByExports info
|
||||
* @param {ModuleGraph} moduleGraph moduleGraph
|
||||
* @param {RuntimeSpec} runtime runtime
|
||||
* @returns {boolean} false, when unused. Otherwise true
|
||||
*/
|
||||
exports.isDependencyUsedByExports = (
|
||||
dependency,
|
||||
usedByExports,
|
||||
moduleGraph,
|
||||
runtime
|
||||
) => {
|
||||
if (usedByExports === false) return false;
|
||||
if (usedByExports !== true && usedByExports !== undefined) {
|
||||
const selfModule = moduleGraph.getParentModule(dependency);
|
||||
const exportsInfo = moduleGraph.getExportsInfo(selfModule);
|
||||
let used = false;
|
||||
for (const exportName of usedByExports) {
|
||||
if (exportsInfo.getUsed(exportName, runtime) !== UsageState.Unused)
|
||||
used = true;
|
||||
}
|
||||
if (!used) return false;
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {Dependency} dependency the dependency
|
||||
* @param {Set<string> | boolean | undefined} usedByExports usedByExports info
|
||||
* @param {ModuleGraph} moduleGraph moduleGraph
|
||||
* @returns {null | false | function(ModuleGraphConnection, RuntimeSpec): ConnectionState} function to determine if the connection is active
|
||||
*/
|
||||
exports.getDependencyUsedByExportsCondition = (
|
||||
dependency,
|
||||
usedByExports,
|
||||
moduleGraph
|
||||
) => {
|
||||
if (usedByExports === false) return false;
|
||||
if (usedByExports !== true && usedByExports !== undefined) {
|
||||
const selfModule = moduleGraph.getParentModule(dependency);
|
||||
const exportsInfo = moduleGraph.getExportsInfo(selfModule);
|
||||
return (connections, runtime) => {
|
||||
for (const exportName of usedByExports) {
|
||||
if (exportsInfo.getUsed(exportName, runtime) !== UsageState.Unused)
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
class TopLevelSymbol {
|
||||
/**
|
||||
* @param {string} name name of the variable
|
||||
*/
|
||||
constructor(name) {
|
||||
this.name = name;
|
||||
}
|
||||
}
|
||||
|
||||
exports.TopLevelSymbol = TopLevelSymbol;
|
||||
exports.topLevelSymbolTag = topLevelSymbolTag;
|
377
my-app/node_modules/webpack/lib/optimize/InnerGraphPlugin.js
generated
vendored
Executable file
377
my-app/node_modules/webpack/lib/optimize/InnerGraphPlugin.js
generated
vendored
Executable file
|
@ -0,0 +1,377 @@
|
|||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Tobias Koppers @sokra
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
const {
|
||||
JAVASCRIPT_MODULE_TYPE_AUTO,
|
||||
JAVASCRIPT_MODULE_TYPE_ESM
|
||||
} = require("../ModuleTypeConstants");
|
||||
const PureExpressionDependency = require("../dependencies/PureExpressionDependency");
|
||||
const InnerGraph = require("./InnerGraph");
|
||||
|
||||
/** @typedef {import("estree").ClassDeclaration} ClassDeclarationNode */
|
||||
/** @typedef {import("estree").ClassExpression} ClassExpressionNode */
|
||||
/** @typedef {import("estree").Node} Node */
|
||||
/** @typedef {import("estree").VariableDeclarator} VariableDeclaratorNode */
|
||||
/** @typedef {import("../../declarations/WebpackOptions").JavascriptParserOptions} JavascriptParserOptions */
|
||||
/** @typedef {import("../Compiler")} Compiler */
|
||||
/** @typedef {import("../Dependency")} Dependency */
|
||||
/** @typedef {import("../dependencies/HarmonyImportSpecifierDependency")} HarmonyImportSpecifierDependency */
|
||||
/** @typedef {import("../javascript/JavascriptParser")} JavascriptParser */
|
||||
/** @typedef {import("./InnerGraph").InnerGraph} InnerGraph */
|
||||
/** @typedef {import("./InnerGraph").TopLevelSymbol} TopLevelSymbol */
|
||||
|
||||
const { topLevelSymbolTag } = InnerGraph;
|
||||
|
||||
const PLUGIN_NAME = "InnerGraphPlugin";
|
||||
|
||||
class InnerGraphPlugin {
|
||||
/**
|
||||
* Apply the plugin
|
||||
* @param {Compiler} compiler the compiler instance
|
||||
* @returns {void}
|
||||
*/
|
||||
apply(compiler) {
|
||||
compiler.hooks.compilation.tap(
|
||||
PLUGIN_NAME,
|
||||
(compilation, { normalModuleFactory }) => {
|
||||
const logger = compilation.getLogger("webpack.InnerGraphPlugin");
|
||||
|
||||
compilation.dependencyTemplates.set(
|
||||
PureExpressionDependency,
|
||||
new PureExpressionDependency.Template()
|
||||
);
|
||||
|
||||
/**
|
||||
* @param {JavascriptParser} parser the parser
|
||||
* @param {JavascriptParserOptions} parserOptions options
|
||||
* @returns {void}
|
||||
*/
|
||||
const handler = (parser, parserOptions) => {
|
||||
const onUsageSuper = sup => {
|
||||
InnerGraph.onUsage(parser.state, usedByExports => {
|
||||
switch (usedByExports) {
|
||||
case undefined:
|
||||
case true:
|
||||
return;
|
||||
default: {
|
||||
const dep = new PureExpressionDependency(sup.range);
|
||||
dep.loc = sup.loc;
|
||||
dep.usedByExports = usedByExports;
|
||||
parser.state.module.addDependency(dep);
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
parser.hooks.program.tap(PLUGIN_NAME, () => {
|
||||
InnerGraph.enable(parser.state);
|
||||
});
|
||||
|
||||
parser.hooks.finish.tap(PLUGIN_NAME, () => {
|
||||
if (!InnerGraph.isEnabled(parser.state)) return;
|
||||
|
||||
logger.time("infer dependency usage");
|
||||
InnerGraph.inferDependencyUsage(parser.state);
|
||||
logger.timeAggregate("infer dependency usage");
|
||||
});
|
||||
|
||||
// During prewalking the following datastructures are filled with
|
||||
// nodes that have a TopLevelSymbol assigned and
|
||||
// variables are tagged with the assigned TopLevelSymbol
|
||||
|
||||
// We differ 3 types of nodes:
|
||||
// 1. full statements (export default, function declaration)
|
||||
// 2. classes (class declaration, class expression)
|
||||
// 3. variable declarators (const x = ...)
|
||||
|
||||
/** @type {WeakMap<Node, TopLevelSymbol>} */
|
||||
const statementWithTopLevelSymbol = new WeakMap();
|
||||
/** @type {WeakMap<Node, Node>} */
|
||||
const statementPurePart = new WeakMap();
|
||||
|
||||
/** @type {WeakMap<ClassExpressionNode | ClassDeclarationNode, TopLevelSymbol>} */
|
||||
const classWithTopLevelSymbol = new WeakMap();
|
||||
|
||||
/** @type {WeakMap<VariableDeclaratorNode, TopLevelSymbol>} */
|
||||
const declWithTopLevelSymbol = new WeakMap();
|
||||
/** @type {WeakSet<VariableDeclaratorNode>} */
|
||||
const pureDeclarators = new WeakSet();
|
||||
|
||||
// The following hooks are used during prewalking:
|
||||
|
||||
parser.hooks.preStatement.tap(PLUGIN_NAME, statement => {
|
||||
if (!InnerGraph.isEnabled(parser.state)) return;
|
||||
|
||||
if (parser.scope.topLevelScope === true) {
|
||||
if (statement.type === "FunctionDeclaration") {
|
||||
const name = statement.id ? statement.id.name : "*default*";
|
||||
const fn = InnerGraph.tagTopLevelSymbol(parser, name);
|
||||
statementWithTopLevelSymbol.set(statement, fn);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
parser.hooks.blockPreStatement.tap(PLUGIN_NAME, statement => {
|
||||
if (!InnerGraph.isEnabled(parser.state)) return;
|
||||
|
||||
if (parser.scope.topLevelScope === true) {
|
||||
if (
|
||||
statement.type === "ClassDeclaration" &&
|
||||
parser.isPure(statement, statement.range[0])
|
||||
) {
|
||||
const name = statement.id ? statement.id.name : "*default*";
|
||||
const fn = InnerGraph.tagTopLevelSymbol(parser, name);
|
||||
classWithTopLevelSymbol.set(statement, fn);
|
||||
return true;
|
||||
}
|
||||
if (statement.type === "ExportDefaultDeclaration") {
|
||||
const name = "*default*";
|
||||
const fn = InnerGraph.tagTopLevelSymbol(parser, name);
|
||||
const decl = statement.declaration;
|
||||
if (
|
||||
(decl.type === "ClassExpression" ||
|
||||
decl.type === "ClassDeclaration") &&
|
||||
parser.isPure(decl, decl.range[0])
|
||||
) {
|
||||
classWithTopLevelSymbol.set(decl, fn);
|
||||
} else if (parser.isPure(decl, statement.range[0])) {
|
||||
statementWithTopLevelSymbol.set(statement, fn);
|
||||
if (
|
||||
!decl.type.endsWith("FunctionExpression") &&
|
||||
!decl.type.endsWith("Declaration") &&
|
||||
decl.type !== "Literal"
|
||||
) {
|
||||
statementPurePart.set(statement, decl);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
parser.hooks.preDeclarator.tap(PLUGIN_NAME, (decl, statement) => {
|
||||
if (!InnerGraph.isEnabled(parser.state)) return;
|
||||
if (
|
||||
parser.scope.topLevelScope === true &&
|
||||
decl.init &&
|
||||
decl.id.type === "Identifier"
|
||||
) {
|
||||
const name = decl.id.name;
|
||||
if (
|
||||
decl.init.type === "ClassExpression" &&
|
||||
parser.isPure(decl.init, decl.id.range[1])
|
||||
) {
|
||||
const fn = InnerGraph.tagTopLevelSymbol(parser, name);
|
||||
classWithTopLevelSymbol.set(decl.init, fn);
|
||||
} else if (parser.isPure(decl.init, decl.id.range[1])) {
|
||||
const fn = InnerGraph.tagTopLevelSymbol(parser, name);
|
||||
declWithTopLevelSymbol.set(decl, fn);
|
||||
if (
|
||||
!decl.init.type.endsWith("FunctionExpression") &&
|
||||
decl.init.type !== "Literal"
|
||||
) {
|
||||
pureDeclarators.add(decl);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// During real walking we set the TopLevelSymbol state to the assigned
|
||||
// TopLevelSymbol by using the fill datastructures.
|
||||
|
||||
// In addition to tracking TopLevelSymbols, we sometimes need to
|
||||
// add a PureExpressionDependency. This is needed to skip execution
|
||||
// of pure expressions, even when they are not dropped due to
|
||||
// minimizing. Otherwise symbols used there might not exist anymore
|
||||
// as they are removed as unused by this optimization
|
||||
|
||||
// When we find a reference to a TopLevelSymbol, we register a
|
||||
// TopLevelSymbol dependency from TopLevelSymbol in state to the
|
||||
// referenced TopLevelSymbol. This way we get a graph of all
|
||||
// TopLevelSymbols.
|
||||
|
||||
// The following hooks are called during walking:
|
||||
|
||||
parser.hooks.statement.tap(PLUGIN_NAME, statement => {
|
||||
if (!InnerGraph.isEnabled(parser.state)) return;
|
||||
if (parser.scope.topLevelScope === true) {
|
||||
InnerGraph.setTopLevelSymbol(parser.state, undefined);
|
||||
|
||||
const fn = statementWithTopLevelSymbol.get(statement);
|
||||
if (fn) {
|
||||
InnerGraph.setTopLevelSymbol(parser.state, fn);
|
||||
const purePart = statementPurePart.get(statement);
|
||||
if (purePart) {
|
||||
InnerGraph.onUsage(parser.state, usedByExports => {
|
||||
switch (usedByExports) {
|
||||
case undefined:
|
||||
case true:
|
||||
return;
|
||||
default: {
|
||||
const dep = new PureExpressionDependency(
|
||||
purePart.range
|
||||
);
|
||||
dep.loc = statement.loc;
|
||||
dep.usedByExports = usedByExports;
|
||||
parser.state.module.addDependency(dep);
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
parser.hooks.classExtendsExpression.tap(
|
||||
PLUGIN_NAME,
|
||||
(expr, statement) => {
|
||||
if (!InnerGraph.isEnabled(parser.state)) return;
|
||||
if (parser.scope.topLevelScope === true) {
|
||||
const fn = classWithTopLevelSymbol.get(statement);
|
||||
if (
|
||||
fn &&
|
||||
parser.isPure(
|
||||
expr,
|
||||
statement.id ? statement.id.range[1] : statement.range[0]
|
||||
)
|
||||
) {
|
||||
InnerGraph.setTopLevelSymbol(parser.state, fn);
|
||||
onUsageSuper(expr);
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
parser.hooks.classBodyElement.tap(
|
||||
PLUGIN_NAME,
|
||||
(element, classDefinition) => {
|
||||
if (!InnerGraph.isEnabled(parser.state)) return;
|
||||
if (parser.scope.topLevelScope === true) {
|
||||
const fn = classWithTopLevelSymbol.get(classDefinition);
|
||||
if (fn) {
|
||||
InnerGraph.setTopLevelSymbol(parser.state, undefined);
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
parser.hooks.classBodyValue.tap(
|
||||
PLUGIN_NAME,
|
||||
(expression, element, classDefinition) => {
|
||||
if (!InnerGraph.isEnabled(parser.state)) return;
|
||||
if (parser.scope.topLevelScope === true) {
|
||||
const fn = classWithTopLevelSymbol.get(classDefinition);
|
||||
if (fn) {
|
||||
if (
|
||||
!element.static ||
|
||||
parser.isPure(
|
||||
expression,
|
||||
element.key ? element.key.range[1] : element.range[0]
|
||||
)
|
||||
) {
|
||||
InnerGraph.setTopLevelSymbol(parser.state, fn);
|
||||
if (element.type !== "MethodDefinition" && element.static) {
|
||||
InnerGraph.onUsage(parser.state, usedByExports => {
|
||||
switch (usedByExports) {
|
||||
case undefined:
|
||||
case true:
|
||||
return;
|
||||
default: {
|
||||
const dep = new PureExpressionDependency(
|
||||
expression.range
|
||||
);
|
||||
dep.loc = expression.loc;
|
||||
dep.usedByExports = usedByExports;
|
||||
parser.state.module.addDependency(dep);
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
} else {
|
||||
InnerGraph.setTopLevelSymbol(parser.state, undefined);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
parser.hooks.declarator.tap(PLUGIN_NAME, (decl, statement) => {
|
||||
if (!InnerGraph.isEnabled(parser.state)) return;
|
||||
const fn = declWithTopLevelSymbol.get(decl);
|
||||
|
||||
if (fn) {
|
||||
InnerGraph.setTopLevelSymbol(parser.state, fn);
|
||||
if (pureDeclarators.has(decl)) {
|
||||
if (decl.init.type === "ClassExpression") {
|
||||
if (decl.init.superClass) {
|
||||
onUsageSuper(decl.init.superClass);
|
||||
}
|
||||
} else {
|
||||
InnerGraph.onUsage(parser.state, usedByExports => {
|
||||
switch (usedByExports) {
|
||||
case undefined:
|
||||
case true:
|
||||
return;
|
||||
default: {
|
||||
const dep = new PureExpressionDependency(
|
||||
decl.init.range
|
||||
);
|
||||
dep.loc = decl.loc;
|
||||
dep.usedByExports = usedByExports;
|
||||
parser.state.module.addDependency(dep);
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
parser.walkExpression(decl.init);
|
||||
InnerGraph.setTopLevelSymbol(parser.state, undefined);
|
||||
return true;
|
||||
}
|
||||
});
|
||||
|
||||
parser.hooks.expression
|
||||
.for(topLevelSymbolTag)
|
||||
.tap(PLUGIN_NAME, () => {
|
||||
const topLevelSymbol = /** @type {TopLevelSymbol} */ (
|
||||
parser.currentTagData
|
||||
);
|
||||
const currentTopLevelSymbol = InnerGraph.getTopLevelSymbol(
|
||||
parser.state
|
||||
);
|
||||
InnerGraph.addUsage(
|
||||
parser.state,
|
||||
topLevelSymbol,
|
||||
currentTopLevelSymbol || true
|
||||
);
|
||||
});
|
||||
parser.hooks.assign.for(topLevelSymbolTag).tap(PLUGIN_NAME, expr => {
|
||||
if (!InnerGraph.isEnabled(parser.state)) return;
|
||||
if (expr.operator === "=") return true;
|
||||
});
|
||||
};
|
||||
normalModuleFactory.hooks.parser
|
||||
.for(JAVASCRIPT_MODULE_TYPE_AUTO)
|
||||
.tap(PLUGIN_NAME, handler);
|
||||
normalModuleFactory.hooks.parser
|
||||
.for(JAVASCRIPT_MODULE_TYPE_ESM)
|
||||
.tap(PLUGIN_NAME, handler);
|
||||
|
||||
compilation.hooks.finishModules.tap(PLUGIN_NAME, () => {
|
||||
logger.timeAggregateEnd("infer dependency usage");
|
||||
});
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = InnerGraphPlugin;
|
281
my-app/node_modules/webpack/lib/optimize/LimitChunkCountPlugin.js
generated
vendored
Executable file
281
my-app/node_modules/webpack/lib/optimize/LimitChunkCountPlugin.js
generated
vendored
Executable file
|
@ -0,0 +1,281 @@
|
|||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Tobias Koppers @sokra
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
const { STAGE_ADVANCED } = require("../OptimizationStages");
|
||||
const LazyBucketSortedSet = require("../util/LazyBucketSortedSet");
|
||||
const { compareChunks } = require("../util/comparators");
|
||||
const createSchemaValidation = require("../util/create-schema-validation");
|
||||
|
||||
/** @typedef {import("../../declarations/plugins/optimize/LimitChunkCountPlugin").LimitChunkCountPluginOptions} LimitChunkCountPluginOptions */
|
||||
/** @typedef {import("../Chunk")} Chunk */
|
||||
/** @typedef {import("../Compiler")} Compiler */
|
||||
|
||||
const validate = createSchemaValidation(
|
||||
require("../../schemas/plugins/optimize/LimitChunkCountPlugin.check.js"),
|
||||
() => require("../../schemas/plugins/optimize/LimitChunkCountPlugin.json"),
|
||||
{
|
||||
name: "Limit Chunk Count Plugin",
|
||||
baseDataPath: "options"
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* @typedef {Object} ChunkCombination
|
||||
* @property {boolean} deleted this is set to true when combination was removed
|
||||
* @property {number} sizeDiff
|
||||
* @property {number} integratedSize
|
||||
* @property {Chunk} a
|
||||
* @property {Chunk} b
|
||||
* @property {number} aIdx
|
||||
* @property {number} bIdx
|
||||
* @property {number} aSize
|
||||
* @property {number} bSize
|
||||
*/
|
||||
|
||||
/**
|
||||
* @template K, V
|
||||
* @param {Map<K, Set<V>>} map map
|
||||
* @param {K} key key
|
||||
* @param {V} value value
|
||||
*/
|
||||
const addToSetMap = (map, key, value) => {
|
||||
const set = map.get(key);
|
||||
if (set === undefined) {
|
||||
map.set(key, new Set([value]));
|
||||
} else {
|
||||
set.add(value);
|
||||
}
|
||||
};
|
||||
|
||||
class LimitChunkCountPlugin {
|
||||
/**
|
||||
* @param {LimitChunkCountPluginOptions=} options options object
|
||||
*/
|
||||
constructor(options) {
|
||||
validate(options);
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Compiler} compiler the webpack compiler
|
||||
* @returns {void}
|
||||
*/
|
||||
apply(compiler) {
|
||||
const options = this.options;
|
||||
compiler.hooks.compilation.tap("LimitChunkCountPlugin", compilation => {
|
||||
compilation.hooks.optimizeChunks.tap(
|
||||
{
|
||||
name: "LimitChunkCountPlugin",
|
||||
stage: STAGE_ADVANCED
|
||||
},
|
||||
chunks => {
|
||||
const chunkGraph = compilation.chunkGraph;
|
||||
const maxChunks =
|
||||
/** @type {LimitChunkCountPluginOptions} */
|
||||
(options).maxChunks;
|
||||
if (!maxChunks) return;
|
||||
if (maxChunks < 1) return;
|
||||
if (compilation.chunks.size <= maxChunks) return;
|
||||
|
||||
let remainingChunksToMerge = compilation.chunks.size - maxChunks;
|
||||
|
||||
// order chunks in a deterministic way
|
||||
const compareChunksWithGraph = compareChunks(chunkGraph);
|
||||
const orderedChunks = Array.from(chunks).sort(compareChunksWithGraph);
|
||||
|
||||
// create a lazy sorted data structure to keep all combinations
|
||||
// this is large. Size = chunks * (chunks - 1) / 2
|
||||
// It uses a multi layer bucket sort plus normal sort in the last layer
|
||||
// It's also lazy so only accessed buckets are sorted
|
||||
const combinations = new LazyBucketSortedSet(
|
||||
// Layer 1: ordered by largest size benefit
|
||||
c => c.sizeDiff,
|
||||
(a, b) => b - a,
|
||||
// Layer 2: ordered by smallest combined size
|
||||
/**
|
||||
* @param {ChunkCombination} c combination
|
||||
* @returns {number} integrated size
|
||||
*/
|
||||
c => c.integratedSize,
|
||||
(a, b) => a - b,
|
||||
// Layer 3: ordered by position difference in orderedChunk (-> to be deterministic)
|
||||
/**
|
||||
* @param {ChunkCombination} c combination
|
||||
* @returns {number} position difference
|
||||
*/
|
||||
c => c.bIdx - c.aIdx,
|
||||
(a, b) => a - b,
|
||||
// Layer 4: ordered by position in orderedChunk (-> to be deterministic)
|
||||
(a, b) => a.bIdx - b.bIdx
|
||||
);
|
||||
|
||||
// we keep a mapping from chunk to all combinations
|
||||
// but this mapping is not kept up-to-date with deletions
|
||||
// so `deleted` flag need to be considered when iterating this
|
||||
/** @type {Map<Chunk, Set<ChunkCombination>>} */
|
||||
const combinationsByChunk = new Map();
|
||||
|
||||
orderedChunks.forEach((b, bIdx) => {
|
||||
// create combination pairs with size and integrated size
|
||||
for (let aIdx = 0; aIdx < bIdx; aIdx++) {
|
||||
const a = orderedChunks[aIdx];
|
||||
// filter pairs that can not be integrated!
|
||||
if (!chunkGraph.canChunksBeIntegrated(a, b)) continue;
|
||||
|
||||
const integratedSize = chunkGraph.getIntegratedChunksSize(
|
||||
a,
|
||||
b,
|
||||
options
|
||||
);
|
||||
|
||||
const aSize = chunkGraph.getChunkSize(a, options);
|
||||
const bSize = chunkGraph.getChunkSize(b, options);
|
||||
const c = {
|
||||
deleted: false,
|
||||
sizeDiff: aSize + bSize - integratedSize,
|
||||
integratedSize,
|
||||
a,
|
||||
b,
|
||||
aIdx,
|
||||
bIdx,
|
||||
aSize,
|
||||
bSize
|
||||
};
|
||||
combinations.add(c);
|
||||
addToSetMap(combinationsByChunk, a, c);
|
||||
addToSetMap(combinationsByChunk, b, c);
|
||||
}
|
||||
return combinations;
|
||||
});
|
||||
|
||||
// list of modified chunks during this run
|
||||
// combinations affected by this change are skipped to allow
|
||||
// further optimizations
|
||||
/** @type {Set<Chunk>} */
|
||||
const modifiedChunks = new Set();
|
||||
|
||||
let changed = false;
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
loop: while (true) {
|
||||
const combination = combinations.popFirst();
|
||||
if (combination === undefined) break;
|
||||
|
||||
combination.deleted = true;
|
||||
const { a, b, integratedSize } = combination;
|
||||
|
||||
// skip over pair when
|
||||
// one of the already merged chunks is a parent of one of the chunks
|
||||
if (modifiedChunks.size > 0) {
|
||||
const queue = new Set(a.groupsIterable);
|
||||
for (const group of b.groupsIterable) {
|
||||
queue.add(group);
|
||||
}
|
||||
for (const group of queue) {
|
||||
for (const mChunk of modifiedChunks) {
|
||||
if (mChunk !== a && mChunk !== b && mChunk.isInGroup(group)) {
|
||||
// This is a potential pair which needs recalculation
|
||||
// We can't do that now, but it merge before following pairs
|
||||
// so we leave space for it, and consider chunks as modified
|
||||
// just for the worse case
|
||||
remainingChunksToMerge--;
|
||||
if (remainingChunksToMerge <= 0) break loop;
|
||||
modifiedChunks.add(a);
|
||||
modifiedChunks.add(b);
|
||||
continue loop;
|
||||
}
|
||||
}
|
||||
for (const parent of group.parentsIterable) {
|
||||
queue.add(parent);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// merge the chunks
|
||||
if (chunkGraph.canChunksBeIntegrated(a, b)) {
|
||||
chunkGraph.integrateChunks(a, b);
|
||||
compilation.chunks.delete(b);
|
||||
|
||||
// flag chunk a as modified as further optimization are possible for all children here
|
||||
modifiedChunks.add(a);
|
||||
|
||||
changed = true;
|
||||
remainingChunksToMerge--;
|
||||
if (remainingChunksToMerge <= 0) break;
|
||||
|
||||
// Update all affected combinations
|
||||
// delete all combination with the removed chunk
|
||||
// we will use combinations with the kept chunk instead
|
||||
for (const combination of /** @type {Set<ChunkCombination>} */ (
|
||||
combinationsByChunk.get(a)
|
||||
)) {
|
||||
if (combination.deleted) continue;
|
||||
combination.deleted = true;
|
||||
combinations.delete(combination);
|
||||
}
|
||||
|
||||
// Update combinations with the kept chunk with new sizes
|
||||
for (const combination of /** @type {Set<ChunkCombination>} */ (
|
||||
combinationsByChunk.get(b)
|
||||
)) {
|
||||
if (combination.deleted) continue;
|
||||
if (combination.a === b) {
|
||||
if (!chunkGraph.canChunksBeIntegrated(a, combination.b)) {
|
||||
combination.deleted = true;
|
||||
combinations.delete(combination);
|
||||
continue;
|
||||
}
|
||||
// Update size
|
||||
const newIntegratedSize = chunkGraph.getIntegratedChunksSize(
|
||||
a,
|
||||
combination.b,
|
||||
options
|
||||
);
|
||||
const finishUpdate = combinations.startUpdate(combination);
|
||||
combination.a = a;
|
||||
combination.integratedSize = newIntegratedSize;
|
||||
combination.aSize = integratedSize;
|
||||
combination.sizeDiff =
|
||||
combination.bSize + integratedSize - newIntegratedSize;
|
||||
finishUpdate();
|
||||
} else if (combination.b === b) {
|
||||
if (!chunkGraph.canChunksBeIntegrated(combination.a, a)) {
|
||||
combination.deleted = true;
|
||||
combinations.delete(combination);
|
||||
continue;
|
||||
}
|
||||
// Update size
|
||||
const newIntegratedSize = chunkGraph.getIntegratedChunksSize(
|
||||
combination.a,
|
||||
a,
|
||||
options
|
||||
);
|
||||
|
||||
const finishUpdate = combinations.startUpdate(combination);
|
||||
combination.b = a;
|
||||
combination.integratedSize = newIntegratedSize;
|
||||
combination.bSize = integratedSize;
|
||||
combination.sizeDiff =
|
||||
integratedSize + combination.aSize - newIntegratedSize;
|
||||
finishUpdate();
|
||||
}
|
||||
}
|
||||
combinationsByChunk.set(
|
||||
a,
|
||||
/** @type {Set<ChunkCombination>} */ (
|
||||
combinationsByChunk.get(b)
|
||||
)
|
||||
);
|
||||
combinationsByChunk.delete(b);
|
||||
}
|
||||
}
|
||||
if (changed) return true;
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
module.exports = LimitChunkCountPlugin;
|
177
my-app/node_modules/webpack/lib/optimize/MangleExportsPlugin.js
generated
vendored
Executable file
177
my-app/node_modules/webpack/lib/optimize/MangleExportsPlugin.js
generated
vendored
Executable file
|
@ -0,0 +1,177 @@
|
|||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Tobias Koppers @sokra
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
const { UsageState } = require("../ExportsInfo");
|
||||
const {
|
||||
numberToIdentifier,
|
||||
NUMBER_OF_IDENTIFIER_START_CHARS,
|
||||
NUMBER_OF_IDENTIFIER_CONTINUATION_CHARS
|
||||
} = require("../Template");
|
||||
const { assignDeterministicIds } = require("../ids/IdHelpers");
|
||||
const { compareSelect, compareStringsNumeric } = require("../util/comparators");
|
||||
|
||||
/** @typedef {import("../Compiler")} Compiler */
|
||||
/** @typedef {import("../ExportsInfo")} ExportsInfo */
|
||||
/** @typedef {import("../ExportsInfo").ExportInfo} ExportInfo */
|
||||
|
||||
/**
|
||||
* @param {ExportsInfo} exportsInfo exports info
|
||||
* @returns {boolean} mangle is possible
|
||||
*/
|
||||
const canMangle = exportsInfo => {
|
||||
if (exportsInfo.otherExportsInfo.getUsed(undefined) !== UsageState.Unused)
|
||||
return false;
|
||||
let hasSomethingToMangle = false;
|
||||
for (const exportInfo of exportsInfo.exports) {
|
||||
if (exportInfo.canMangle === true) {
|
||||
hasSomethingToMangle = true;
|
||||
}
|
||||
}
|
||||
return hasSomethingToMangle;
|
||||
};
|
||||
|
||||
// Sort by name
|
||||
const comparator = compareSelect(e => e.name, compareStringsNumeric);
|
||||
/**
|
||||
* @param {boolean} deterministic use deterministic names
|
||||
* @param {ExportsInfo} exportsInfo exports info
|
||||
* @param {boolean | undefined} isNamespace is namespace object
|
||||
* @returns {void}
|
||||
*/
|
||||
const mangleExportsInfo = (deterministic, exportsInfo, isNamespace) => {
|
||||
if (!canMangle(exportsInfo)) return;
|
||||
const usedNames = new Set();
|
||||
/** @type {ExportInfo[]} */
|
||||
const mangleableExports = [];
|
||||
|
||||
// Avoid to renamed exports that are not provided when
|
||||
// 1. it's not a namespace export: non-provided exports can be found in prototype chain
|
||||
// 2. there are other provided exports and deterministic mode is chosen:
|
||||
// non-provided exports would break the determinism
|
||||
let avoidMangleNonProvided = !isNamespace;
|
||||
if (!avoidMangleNonProvided && deterministic) {
|
||||
for (const exportInfo of exportsInfo.ownedExports) {
|
||||
if (exportInfo.provided !== false) {
|
||||
avoidMangleNonProvided = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const exportInfo of exportsInfo.ownedExports) {
|
||||
const name = exportInfo.name;
|
||||
if (!exportInfo.hasUsedName()) {
|
||||
if (
|
||||
// Can the export be mangled?
|
||||
exportInfo.canMangle !== true ||
|
||||
// Never rename 1 char exports
|
||||
(name.length === 1 && /^[a-zA-Z0-9_$]/.test(name)) ||
|
||||
// Don't rename 2 char exports in deterministic mode
|
||||
(deterministic &&
|
||||
name.length === 2 &&
|
||||
/^[a-zA-Z_$][a-zA-Z0-9_$]|^[1-9][0-9]/.test(name)) ||
|
||||
// Don't rename exports that are not provided
|
||||
(avoidMangleNonProvided && exportInfo.provided !== true)
|
||||
) {
|
||||
exportInfo.setUsedName(name);
|
||||
usedNames.add(name);
|
||||
} else {
|
||||
mangleableExports.push(exportInfo);
|
||||
}
|
||||
}
|
||||
if (exportInfo.exportsInfoOwned) {
|
||||
const used = exportInfo.getUsed(undefined);
|
||||
if (
|
||||
used === UsageState.OnlyPropertiesUsed ||
|
||||
used === UsageState.Unused
|
||||
) {
|
||||
mangleExportsInfo(deterministic, exportInfo.exportsInfo, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (deterministic) {
|
||||
assignDeterministicIds(
|
||||
mangleableExports,
|
||||
e => e.name,
|
||||
comparator,
|
||||
(e, id) => {
|
||||
const name = numberToIdentifier(id);
|
||||
const size = usedNames.size;
|
||||
usedNames.add(name);
|
||||
if (size === usedNames.size) return false;
|
||||
e.setUsedName(name);
|
||||
return true;
|
||||
},
|
||||
[
|
||||
NUMBER_OF_IDENTIFIER_START_CHARS,
|
||||
NUMBER_OF_IDENTIFIER_START_CHARS *
|
||||
NUMBER_OF_IDENTIFIER_CONTINUATION_CHARS
|
||||
],
|
||||
NUMBER_OF_IDENTIFIER_CONTINUATION_CHARS,
|
||||
usedNames.size
|
||||
);
|
||||
} else {
|
||||
const usedExports = [];
|
||||
const unusedExports = [];
|
||||
for (const exportInfo of mangleableExports) {
|
||||
if (exportInfo.getUsed(undefined) === UsageState.Unused) {
|
||||
unusedExports.push(exportInfo);
|
||||
} else {
|
||||
usedExports.push(exportInfo);
|
||||
}
|
||||
}
|
||||
usedExports.sort(comparator);
|
||||
unusedExports.sort(comparator);
|
||||
let i = 0;
|
||||
for (const list of [usedExports, unusedExports]) {
|
||||
for (const exportInfo of list) {
|
||||
let name;
|
||||
do {
|
||||
name = numberToIdentifier(i++);
|
||||
} while (usedNames.has(name));
|
||||
exportInfo.setUsedName(name);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
class MangleExportsPlugin {
|
||||
/**
|
||||
* @param {boolean} deterministic use deterministic names
|
||||
*/
|
||||
constructor(deterministic) {
|
||||
this._deterministic = deterministic;
|
||||
}
|
||||
/**
|
||||
* Apply the plugin
|
||||
* @param {Compiler} compiler the compiler instance
|
||||
* @returns {void}
|
||||
*/
|
||||
apply(compiler) {
|
||||
const { _deterministic: deterministic } = this;
|
||||
compiler.hooks.compilation.tap("MangleExportsPlugin", compilation => {
|
||||
const moduleGraph = compilation.moduleGraph;
|
||||
compilation.hooks.optimizeCodeGeneration.tap(
|
||||
"MangleExportsPlugin",
|
||||
modules => {
|
||||
if (compilation.moduleMemCaches) {
|
||||
throw new Error(
|
||||
"optimization.mangleExports can't be used with cacheUnaffected as export mangling is a global effect"
|
||||
);
|
||||
}
|
||||
for (const module of modules) {
|
||||
const isNamespace =
|
||||
module.buildMeta && module.buildMeta.exportsType === "namespace";
|
||||
const exportsInfo = moduleGraph.getExportsInfo(module);
|
||||
mangleExportsInfo(deterministic, exportsInfo, isNamespace);
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = MangleExportsPlugin;
|
115
my-app/node_modules/webpack/lib/optimize/MergeDuplicateChunksPlugin.js
generated
vendored
Executable file
115
my-app/node_modules/webpack/lib/optimize/MergeDuplicateChunksPlugin.js
generated
vendored
Executable file
|
@ -0,0 +1,115 @@
|
|||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Tobias Koppers @sokra
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
const { STAGE_BASIC } = require("../OptimizationStages");
|
||||
const { runtimeEqual } = require("../util/runtime");
|
||||
|
||||
/** @typedef {import("../Compiler")} Compiler */
|
||||
|
||||
class MergeDuplicateChunksPlugin {
|
||||
/**
|
||||
* @param {Compiler} compiler the compiler
|
||||
* @returns {void}
|
||||
*/
|
||||
apply(compiler) {
|
||||
compiler.hooks.compilation.tap(
|
||||
"MergeDuplicateChunksPlugin",
|
||||
compilation => {
|
||||
compilation.hooks.optimizeChunks.tap(
|
||||
{
|
||||
name: "MergeDuplicateChunksPlugin",
|
||||
stage: STAGE_BASIC
|
||||
},
|
||||
chunks => {
|
||||
const { chunkGraph, moduleGraph } = compilation;
|
||||
|
||||
// remember already tested chunks for performance
|
||||
const notDuplicates = new Set();
|
||||
|
||||
// for each chunk
|
||||
for (const chunk of chunks) {
|
||||
// track a Set of all chunk that could be duplicates
|
||||
let possibleDuplicates;
|
||||
for (const module of chunkGraph.getChunkModulesIterable(chunk)) {
|
||||
if (possibleDuplicates === undefined) {
|
||||
// when possibleDuplicates is not yet set,
|
||||
// create a new Set from chunks of the current module
|
||||
// including only chunks with the same number of modules
|
||||
for (const dup of chunkGraph.getModuleChunksIterable(
|
||||
module
|
||||
)) {
|
||||
if (
|
||||
dup !== chunk &&
|
||||
chunkGraph.getNumberOfChunkModules(chunk) ===
|
||||
chunkGraph.getNumberOfChunkModules(dup) &&
|
||||
!notDuplicates.has(dup)
|
||||
) {
|
||||
// delay allocating the new Set until here, reduce memory pressure
|
||||
if (possibleDuplicates === undefined) {
|
||||
possibleDuplicates = new Set();
|
||||
}
|
||||
possibleDuplicates.add(dup);
|
||||
}
|
||||
}
|
||||
// when no chunk is possible we can break here
|
||||
if (possibleDuplicates === undefined) break;
|
||||
} else {
|
||||
// validate existing possible duplicates
|
||||
for (const dup of possibleDuplicates) {
|
||||
// remove possible duplicate when module is not contained
|
||||
if (!chunkGraph.isModuleInChunk(module, dup)) {
|
||||
possibleDuplicates.delete(dup);
|
||||
}
|
||||
}
|
||||
// when all chunks has been removed we can break here
|
||||
if (possibleDuplicates.size === 0) break;
|
||||
}
|
||||
}
|
||||
|
||||
// when we found duplicates
|
||||
if (
|
||||
possibleDuplicates !== undefined &&
|
||||
possibleDuplicates.size > 0
|
||||
) {
|
||||
outer: for (const otherChunk of possibleDuplicates) {
|
||||
if (otherChunk.hasRuntime() !== chunk.hasRuntime()) continue;
|
||||
if (chunkGraph.getNumberOfEntryModules(chunk) > 0) continue;
|
||||
if (chunkGraph.getNumberOfEntryModules(otherChunk) > 0)
|
||||
continue;
|
||||
if (!runtimeEqual(chunk.runtime, otherChunk.runtime)) {
|
||||
for (const module of chunkGraph.getChunkModulesIterable(
|
||||
chunk
|
||||
)) {
|
||||
const exportsInfo = moduleGraph.getExportsInfo(module);
|
||||
if (
|
||||
!exportsInfo.isEquallyUsed(
|
||||
chunk.runtime,
|
||||
otherChunk.runtime
|
||||
)
|
||||
) {
|
||||
continue outer;
|
||||
}
|
||||
}
|
||||
}
|
||||
// merge them
|
||||
if (chunkGraph.canChunksBeIntegrated(chunk, otherChunk)) {
|
||||
chunkGraph.integrateChunks(chunk, otherChunk);
|
||||
compilation.chunks.delete(otherChunk);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// don't check already processed chunks twice
|
||||
notDuplicates.add(chunk);
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
module.exports = MergeDuplicateChunksPlugin;
|
113
my-app/node_modules/webpack/lib/optimize/MinChunkSizePlugin.js
generated
vendored
Executable file
113
my-app/node_modules/webpack/lib/optimize/MinChunkSizePlugin.js
generated
vendored
Executable file
|
@ -0,0 +1,113 @@
|
|||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Tobias Koppers @sokra
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
const { STAGE_ADVANCED } = require("../OptimizationStages");
|
||||
const createSchemaValidation = require("../util/create-schema-validation");
|
||||
|
||||
/** @typedef {import("../../declarations/plugins/optimize/MinChunkSizePlugin").MinChunkSizePluginOptions} MinChunkSizePluginOptions */
|
||||
/** @typedef {import("../Chunk")} Chunk */
|
||||
/** @typedef {import("../Compiler")} Compiler */
|
||||
|
||||
const validate = createSchemaValidation(
|
||||
require("../../schemas/plugins/optimize/MinChunkSizePlugin.check.js"),
|
||||
() => require("../../schemas/plugins/optimize/MinChunkSizePlugin.json"),
|
||||
{
|
||||
name: "Min Chunk Size Plugin",
|
||||
baseDataPath: "options"
|
||||
}
|
||||
);
|
||||
|
||||
class MinChunkSizePlugin {
|
||||
/**
|
||||
* @param {MinChunkSizePluginOptions} options options object
|
||||
*/
|
||||
constructor(options) {
|
||||
validate(options);
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply the plugin
|
||||
* @param {Compiler} compiler the compiler instance
|
||||
* @returns {void}
|
||||
*/
|
||||
apply(compiler) {
|
||||
const options = this.options;
|
||||
const minChunkSize = options.minChunkSize;
|
||||
compiler.hooks.compilation.tap("MinChunkSizePlugin", compilation => {
|
||||
compilation.hooks.optimizeChunks.tap(
|
||||
{
|
||||
name: "MinChunkSizePlugin",
|
||||
stage: STAGE_ADVANCED
|
||||
},
|
||||
chunks => {
|
||||
const chunkGraph = compilation.chunkGraph;
|
||||
const equalOptions = {
|
||||
chunkOverhead: 1,
|
||||
entryChunkMultiplicator: 1
|
||||
};
|
||||
|
||||
const chunkSizesMap = new Map();
|
||||
/** @type {[Chunk, Chunk][]} */
|
||||
const combinations = [];
|
||||
/** @type {Chunk[]} */
|
||||
const smallChunks = [];
|
||||
const visitedChunks = [];
|
||||
for (const a of chunks) {
|
||||
// check if one of the chunks sizes is smaller than the minChunkSize
|
||||
// and filter pairs that can NOT be integrated!
|
||||
if (chunkGraph.getChunkSize(a, equalOptions) < minChunkSize) {
|
||||
smallChunks.push(a);
|
||||
for (const b of visitedChunks) {
|
||||
if (chunkGraph.canChunksBeIntegrated(b, a))
|
||||
combinations.push([b, a]);
|
||||
}
|
||||
} else {
|
||||
for (const b of smallChunks) {
|
||||
if (chunkGraph.canChunksBeIntegrated(b, a))
|
||||
combinations.push([b, a]);
|
||||
}
|
||||
}
|
||||
chunkSizesMap.set(a, chunkGraph.getChunkSize(a, options));
|
||||
visitedChunks.push(a);
|
||||
}
|
||||
|
||||
const sortedSizeFilteredExtendedPairCombinations = combinations
|
||||
.map(pair => {
|
||||
// extend combination pairs with size and integrated size
|
||||
const a = chunkSizesMap.get(pair[0]);
|
||||
const b = chunkSizesMap.get(pair[1]);
|
||||
const ab = chunkGraph.getIntegratedChunksSize(
|
||||
pair[0],
|
||||
pair[1],
|
||||
options
|
||||
);
|
||||
/** @type {[number, number, Chunk, Chunk]} */
|
||||
const extendedPair = [a + b - ab, ab, pair[0], pair[1]];
|
||||
return extendedPair;
|
||||
})
|
||||
.sort((a, b) => {
|
||||
// sadly javascript does an in place sort here
|
||||
// sort by size
|
||||
const diff = b[0] - a[0];
|
||||
if (diff !== 0) return diff;
|
||||
return a[1] - b[1];
|
||||
});
|
||||
|
||||
if (sortedSizeFilteredExtendedPairCombinations.length === 0) return;
|
||||
|
||||
const pair = sortedSizeFilteredExtendedPairCombinations[0];
|
||||
|
||||
chunkGraph.integrateChunks(pair[2], pair[3]);
|
||||
compilation.chunks.delete(pair[3]);
|
||||
return true;
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
module.exports = MinChunkSizePlugin;
|
35
my-app/node_modules/webpack/lib/optimize/MinMaxSizeWarning.js
generated
vendored
Executable file
35
my-app/node_modules/webpack/lib/optimize/MinMaxSizeWarning.js
generated
vendored
Executable file
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Tobias Koppers @sokra
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
const SizeFormatHelpers = require("../SizeFormatHelpers");
|
||||
const WebpackError = require("../WebpackError");
|
||||
|
||||
class MinMaxSizeWarning extends WebpackError {
|
||||
/**
|
||||
* @param {string[] | undefined} keys keys
|
||||
* @param {number} minSize minimum size
|
||||
* @param {number} maxSize maximum size
|
||||
*/
|
||||
constructor(keys, minSize, maxSize) {
|
||||
let keysMessage = "Fallback cache group";
|
||||
if (keys) {
|
||||
keysMessage =
|
||||
keys.length > 1
|
||||
? `Cache groups ${keys.sort().join(", ")}`
|
||||
: `Cache group ${keys[0]}`;
|
||||
}
|
||||
super(
|
||||
`SplitChunksPlugin\n` +
|
||||
`${keysMessage}\n` +
|
||||
`Configured minSize (${SizeFormatHelpers.formatSize(minSize)}) is ` +
|
||||
`bigger than maxSize (${SizeFormatHelpers.formatSize(maxSize)}).\n` +
|
||||
"This seem to be a invalid optimization.splitChunks configuration."
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = MinMaxSizeWarning;
|
942
my-app/node_modules/webpack/lib/optimize/ModuleConcatenationPlugin.js
generated
vendored
Executable file
942
my-app/node_modules/webpack/lib/optimize/ModuleConcatenationPlugin.js
generated
vendored
Executable file
|
@ -0,0 +1,942 @@
|
|||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Tobias Koppers @sokra
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
const asyncLib = require("neo-async");
|
||||
const ChunkGraph = require("../ChunkGraph");
|
||||
const ModuleGraph = require("../ModuleGraph");
|
||||
const { STAGE_DEFAULT } = require("../OptimizationStages");
|
||||
const HarmonyImportDependency = require("../dependencies/HarmonyImportDependency");
|
||||
const { compareModulesByIdentifier } = require("../util/comparators");
|
||||
const {
|
||||
intersectRuntime,
|
||||
mergeRuntimeOwned,
|
||||
filterRuntime,
|
||||
runtimeToString,
|
||||
mergeRuntime
|
||||
} = require("../util/runtime");
|
||||
const ConcatenatedModule = require("./ConcatenatedModule");
|
||||
|
||||
/** @typedef {import("../Compilation")} Compilation */
|
||||
/** @typedef {import("../Compiler")} Compiler */
|
||||
/** @typedef {import("../Module")} Module */
|
||||
/** @typedef {import("../RequestShortener")} RequestShortener */
|
||||
/** @typedef {import("../util/runtime").RuntimeSpec} RuntimeSpec */
|
||||
|
||||
/**
|
||||
* @typedef {Object} Statistics
|
||||
* @property {number} cached
|
||||
* @property {number} alreadyInConfig
|
||||
* @property {number} invalidModule
|
||||
* @property {number} incorrectChunks
|
||||
* @property {number} incorrectDependency
|
||||
* @property {number} incorrectModuleDependency
|
||||
* @property {number} incorrectChunksOfImporter
|
||||
* @property {number} incorrectRuntimeCondition
|
||||
* @property {number} importerFailed
|
||||
* @property {number} added
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {string} msg message
|
||||
* @returns {string} formatted message
|
||||
*/
|
||||
const formatBailoutReason = msg => {
|
||||
return "ModuleConcatenation bailout: " + msg;
|
||||
};
|
||||
|
||||
class ModuleConcatenationPlugin {
|
||||
constructor(options) {
|
||||
if (typeof options !== "object") options = {};
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply the plugin
|
||||
* @param {Compiler} compiler the compiler instance
|
||||
* @returns {void}
|
||||
*/
|
||||
apply(compiler) {
|
||||
const { _backCompat: backCompat } = compiler;
|
||||
compiler.hooks.compilation.tap("ModuleConcatenationPlugin", compilation => {
|
||||
if (compilation.moduleMemCaches) {
|
||||
throw new Error(
|
||||
"optimization.concatenateModules can't be used with cacheUnaffected as module concatenation is a global effect"
|
||||
);
|
||||
}
|
||||
const moduleGraph = compilation.moduleGraph;
|
||||
/** @type {Map<Module, string | ((requestShortener: RequestShortener) => string)>} */
|
||||
const bailoutReasonMap = new Map();
|
||||
|
||||
/**
|
||||
* @param {Module} module the module
|
||||
* @param {string | ((requestShortener: RequestShortener) => string)} reason the reason
|
||||
*/
|
||||
const setBailoutReason = (module, reason) => {
|
||||
setInnerBailoutReason(module, reason);
|
||||
moduleGraph
|
||||
.getOptimizationBailout(module)
|
||||
.push(
|
||||
typeof reason === "function"
|
||||
? rs => formatBailoutReason(reason(rs))
|
||||
: formatBailoutReason(reason)
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {Module} module the module
|
||||
* @param {string | ((requestShortener: RequestShortener) => string)} reason the reason
|
||||
*/
|
||||
const setInnerBailoutReason = (module, reason) => {
|
||||
bailoutReasonMap.set(module, reason);
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {Module} module the module
|
||||
* @param {RequestShortener} requestShortener the request shortener
|
||||
* @returns {string | ((requestShortener: RequestShortener) => string) | undefined} the reason
|
||||
*/
|
||||
const getInnerBailoutReason = (module, requestShortener) => {
|
||||
const reason = bailoutReasonMap.get(module);
|
||||
if (typeof reason === "function") return reason(requestShortener);
|
||||
return reason;
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {Module} module the module
|
||||
* @param {Module | function(RequestShortener): string} problem the problem
|
||||
* @returns {(requestShortener: RequestShortener) => string} the reason
|
||||
*/
|
||||
const formatBailoutWarning = (module, problem) => requestShortener => {
|
||||
if (typeof problem === "function") {
|
||||
return formatBailoutReason(
|
||||
`Cannot concat with ${module.readableIdentifier(
|
||||
requestShortener
|
||||
)}: ${problem(requestShortener)}`
|
||||
);
|
||||
}
|
||||
const reason = getInnerBailoutReason(module, requestShortener);
|
||||
const reasonWithPrefix = reason ? `: ${reason}` : "";
|
||||
if (module === problem) {
|
||||
return formatBailoutReason(
|
||||
`Cannot concat with ${module.readableIdentifier(
|
||||
requestShortener
|
||||
)}${reasonWithPrefix}`
|
||||
);
|
||||
} else {
|
||||
return formatBailoutReason(
|
||||
`Cannot concat with ${module.readableIdentifier(
|
||||
requestShortener
|
||||
)} because of ${problem.readableIdentifier(
|
||||
requestShortener
|
||||
)}${reasonWithPrefix}`
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
compilation.hooks.optimizeChunkModules.tapAsync(
|
||||
{
|
||||
name: "ModuleConcatenationPlugin",
|
||||
stage: STAGE_DEFAULT
|
||||
},
|
||||
(allChunks, modules, callback) => {
|
||||
const logger = compilation.getLogger(
|
||||
"webpack.ModuleConcatenationPlugin"
|
||||
);
|
||||
const { chunkGraph, moduleGraph } = compilation;
|
||||
const relevantModules = [];
|
||||
const possibleInners = new Set();
|
||||
const context = {
|
||||
chunkGraph,
|
||||
moduleGraph
|
||||
};
|
||||
logger.time("select relevant modules");
|
||||
for (const module of modules) {
|
||||
let canBeRoot = true;
|
||||
let canBeInner = true;
|
||||
|
||||
const bailoutReason = module.getConcatenationBailoutReason(context);
|
||||
if (bailoutReason) {
|
||||
setBailoutReason(module, bailoutReason);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Must not be an async module
|
||||
if (moduleGraph.isAsync(module)) {
|
||||
setBailoutReason(module, `Module is async`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Must be in strict mode
|
||||
if (!module.buildInfo.strict) {
|
||||
setBailoutReason(module, `Module is not in strict mode`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Module must be in any chunk (we don't want to do useless work)
|
||||
if (chunkGraph.getNumberOfModuleChunks(module) === 0) {
|
||||
setBailoutReason(module, "Module is not in any chunk");
|
||||
continue;
|
||||
}
|
||||
|
||||
// Exports must be known (and not dynamic)
|
||||
const exportsInfo = moduleGraph.getExportsInfo(module);
|
||||
const relevantExports = exportsInfo.getRelevantExports(undefined);
|
||||
const unknownReexports = relevantExports.filter(exportInfo => {
|
||||
return (
|
||||
exportInfo.isReexport() && !exportInfo.getTarget(moduleGraph)
|
||||
);
|
||||
});
|
||||
if (unknownReexports.length > 0) {
|
||||
setBailoutReason(
|
||||
module,
|
||||
`Reexports in this module do not have a static target (${Array.from(
|
||||
unknownReexports,
|
||||
exportInfo =>
|
||||
`${
|
||||
exportInfo.name || "other exports"
|
||||
}: ${exportInfo.getUsedInfo()}`
|
||||
).join(", ")})`
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Root modules must have a static list of exports
|
||||
const unknownProvidedExports = relevantExports.filter(
|
||||
exportInfo => {
|
||||
return exportInfo.provided !== true;
|
||||
}
|
||||
);
|
||||
if (unknownProvidedExports.length > 0) {
|
||||
setBailoutReason(
|
||||
module,
|
||||
`List of module exports is dynamic (${Array.from(
|
||||
unknownProvidedExports,
|
||||
exportInfo =>
|
||||
`${
|
||||
exportInfo.name || "other exports"
|
||||
}: ${exportInfo.getProvidedInfo()} and ${exportInfo.getUsedInfo()}`
|
||||
).join(", ")})`
|
||||
);
|
||||
canBeRoot = false;
|
||||
}
|
||||
|
||||
// Module must not be an entry point
|
||||
if (chunkGraph.isEntryModule(module)) {
|
||||
setInnerBailoutReason(module, "Module is an entry point");
|
||||
canBeInner = false;
|
||||
}
|
||||
|
||||
if (canBeRoot) relevantModules.push(module);
|
||||
if (canBeInner) possibleInners.add(module);
|
||||
}
|
||||
logger.timeEnd("select relevant modules");
|
||||
logger.debug(
|
||||
`${relevantModules.length} potential root modules, ${possibleInners.size} potential inner modules`
|
||||
);
|
||||
// sort by depth
|
||||
// modules with lower depth are more likely suited as roots
|
||||
// this improves performance, because modules already selected as inner are skipped
|
||||
logger.time("sort relevant modules");
|
||||
relevantModules.sort((a, b) => {
|
||||
return moduleGraph.getDepth(a) - moduleGraph.getDepth(b);
|
||||
});
|
||||
logger.timeEnd("sort relevant modules");
|
||||
|
||||
/** @type {Statistics} */
|
||||
const stats = {
|
||||
cached: 0,
|
||||
alreadyInConfig: 0,
|
||||
invalidModule: 0,
|
||||
incorrectChunks: 0,
|
||||
incorrectDependency: 0,
|
||||
incorrectModuleDependency: 0,
|
||||
incorrectChunksOfImporter: 0,
|
||||
incorrectRuntimeCondition: 0,
|
||||
importerFailed: 0,
|
||||
added: 0
|
||||
};
|
||||
let statsCandidates = 0;
|
||||
let statsSizeSum = 0;
|
||||
let statsEmptyConfigurations = 0;
|
||||
|
||||
logger.time("find modules to concatenate");
|
||||
const concatConfigurations = [];
|
||||
const usedAsInner = new Set();
|
||||
for (const currentRoot of relevantModules) {
|
||||
// when used by another configuration as inner:
|
||||
// the other configuration is better and we can skip this one
|
||||
// TODO reconsider that when it's only used in a different runtime
|
||||
if (usedAsInner.has(currentRoot)) continue;
|
||||
|
||||
let chunkRuntime = undefined;
|
||||
for (const r of chunkGraph.getModuleRuntimes(currentRoot)) {
|
||||
chunkRuntime = mergeRuntimeOwned(chunkRuntime, r);
|
||||
}
|
||||
const exportsInfo = moduleGraph.getExportsInfo(currentRoot);
|
||||
const filteredRuntime = filterRuntime(chunkRuntime, r =>
|
||||
exportsInfo.isModuleUsed(r)
|
||||
);
|
||||
const activeRuntime =
|
||||
filteredRuntime === true
|
||||
? chunkRuntime
|
||||
: filteredRuntime === false
|
||||
? undefined
|
||||
: filteredRuntime;
|
||||
|
||||
// create a configuration with the root
|
||||
const currentConfiguration = new ConcatConfiguration(
|
||||
currentRoot,
|
||||
activeRuntime
|
||||
);
|
||||
|
||||
// cache failures to add modules
|
||||
const failureCache = new Map();
|
||||
|
||||
// potential optional import candidates
|
||||
/** @type {Set<Module>} */
|
||||
const candidates = new Set();
|
||||
|
||||
// try to add all imports
|
||||
for (const imp of this._getImports(
|
||||
compilation,
|
||||
currentRoot,
|
||||
activeRuntime
|
||||
)) {
|
||||
candidates.add(imp);
|
||||
}
|
||||
|
||||
for (const imp of candidates) {
|
||||
const impCandidates = new Set();
|
||||
const problem = this._tryToAdd(
|
||||
compilation,
|
||||
currentConfiguration,
|
||||
imp,
|
||||
chunkRuntime,
|
||||
activeRuntime,
|
||||
possibleInners,
|
||||
impCandidates,
|
||||
failureCache,
|
||||
chunkGraph,
|
||||
true,
|
||||
stats
|
||||
);
|
||||
if (problem) {
|
||||
failureCache.set(imp, problem);
|
||||
currentConfiguration.addWarning(imp, problem);
|
||||
} else {
|
||||
for (const c of impCandidates) {
|
||||
candidates.add(c);
|
||||
}
|
||||
}
|
||||
}
|
||||
statsCandidates += candidates.size;
|
||||
if (!currentConfiguration.isEmpty()) {
|
||||
const modules = currentConfiguration.getModules();
|
||||
statsSizeSum += modules.size;
|
||||
concatConfigurations.push(currentConfiguration);
|
||||
for (const module of modules) {
|
||||
if (module !== currentConfiguration.rootModule) {
|
||||
usedAsInner.add(module);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
statsEmptyConfigurations++;
|
||||
const optimizationBailouts =
|
||||
moduleGraph.getOptimizationBailout(currentRoot);
|
||||
for (const warning of currentConfiguration.getWarningsSorted()) {
|
||||
optimizationBailouts.push(
|
||||
formatBailoutWarning(warning[0], warning[1])
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
logger.timeEnd("find modules to concatenate");
|
||||
logger.debug(
|
||||
`${
|
||||
concatConfigurations.length
|
||||
} successful concat configurations (avg size: ${
|
||||
statsSizeSum / concatConfigurations.length
|
||||
}), ${statsEmptyConfigurations} bailed out completely`
|
||||
);
|
||||
logger.debug(
|
||||
`${statsCandidates} candidates were considered for adding (${stats.cached} cached failure, ${stats.alreadyInConfig} already in config, ${stats.invalidModule} invalid module, ${stats.incorrectChunks} incorrect chunks, ${stats.incorrectDependency} incorrect dependency, ${stats.incorrectChunksOfImporter} incorrect chunks of importer, ${stats.incorrectModuleDependency} incorrect module dependency, ${stats.incorrectRuntimeCondition} incorrect runtime condition, ${stats.importerFailed} importer failed, ${stats.added} added)`
|
||||
);
|
||||
// HACK: Sort configurations by length and start with the longest one
|
||||
// to get the biggest groups possible. Used modules are marked with usedModules
|
||||
// TODO: Allow to reuse existing configuration while trying to add dependencies.
|
||||
// This would improve performance. O(n^2) -> O(n)
|
||||
logger.time(`sort concat configurations`);
|
||||
concatConfigurations.sort((a, b) => {
|
||||
return b.modules.size - a.modules.size;
|
||||
});
|
||||
logger.timeEnd(`sort concat configurations`);
|
||||
const usedModules = new Set();
|
||||
|
||||
logger.time("create concatenated modules");
|
||||
asyncLib.each(
|
||||
concatConfigurations,
|
||||
(concatConfiguration, callback) => {
|
||||
const rootModule = concatConfiguration.rootModule;
|
||||
|
||||
// Avoid overlapping configurations
|
||||
// TODO: remove this when todo above is fixed
|
||||
if (usedModules.has(rootModule)) return callback();
|
||||
const modules = concatConfiguration.getModules();
|
||||
for (const m of modules) {
|
||||
usedModules.add(m);
|
||||
}
|
||||
|
||||
// Create a new ConcatenatedModule
|
||||
let newModule = ConcatenatedModule.create(
|
||||
rootModule,
|
||||
modules,
|
||||
concatConfiguration.runtime,
|
||||
compiler.root,
|
||||
compilation.outputOptions.hashFunction
|
||||
);
|
||||
|
||||
const build = () => {
|
||||
newModule.build(
|
||||
compiler.options,
|
||||
compilation,
|
||||
null,
|
||||
null,
|
||||
err => {
|
||||
if (err) {
|
||||
if (!err.module) {
|
||||
err.module = newModule;
|
||||
}
|
||||
return callback(err);
|
||||
}
|
||||
integrate();
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
const integrate = () => {
|
||||
if (backCompat) {
|
||||
ChunkGraph.setChunkGraphForModule(newModule, chunkGraph);
|
||||
ModuleGraph.setModuleGraphForModule(newModule, moduleGraph);
|
||||
}
|
||||
|
||||
for (const warning of concatConfiguration.getWarningsSorted()) {
|
||||
moduleGraph
|
||||
.getOptimizationBailout(newModule)
|
||||
.push(formatBailoutWarning(warning[0], warning[1]));
|
||||
}
|
||||
moduleGraph.cloneModuleAttributes(rootModule, newModule);
|
||||
for (const m of modules) {
|
||||
// add to builtModules when one of the included modules was built
|
||||
if (compilation.builtModules.has(m)) {
|
||||
compilation.builtModules.add(newModule);
|
||||
}
|
||||
if (m !== rootModule) {
|
||||
// attach external references to the concatenated module too
|
||||
moduleGraph.copyOutgoingModuleConnections(
|
||||
m,
|
||||
newModule,
|
||||
c => {
|
||||
return (
|
||||
c.originModule === m &&
|
||||
!(
|
||||
c.dependency instanceof HarmonyImportDependency &&
|
||||
modules.has(c.module)
|
||||
)
|
||||
);
|
||||
}
|
||||
);
|
||||
// remove module from chunk
|
||||
for (const chunk of chunkGraph.getModuleChunksIterable(
|
||||
rootModule
|
||||
)) {
|
||||
const sourceTypes = chunkGraph.getChunkModuleSourceTypes(
|
||||
chunk,
|
||||
m
|
||||
);
|
||||
if (sourceTypes.size === 1) {
|
||||
chunkGraph.disconnectChunkAndModule(chunk, m);
|
||||
} else {
|
||||
const newSourceTypes = new Set(sourceTypes);
|
||||
newSourceTypes.delete("javascript");
|
||||
chunkGraph.setChunkModuleSourceTypes(
|
||||
chunk,
|
||||
m,
|
||||
newSourceTypes
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
compilation.modules.delete(rootModule);
|
||||
ChunkGraph.clearChunkGraphForModule(rootModule);
|
||||
ModuleGraph.clearModuleGraphForModule(rootModule);
|
||||
|
||||
// remove module from chunk
|
||||
chunkGraph.replaceModule(rootModule, newModule);
|
||||
// replace module references with the concatenated module
|
||||
moduleGraph.moveModuleConnections(rootModule, newModule, c => {
|
||||
const otherModule =
|
||||
c.module === rootModule ? c.originModule : c.module;
|
||||
const innerConnection =
|
||||
c.dependency instanceof HarmonyImportDependency &&
|
||||
modules.has(/** @type {Module} */ (otherModule));
|
||||
return !innerConnection;
|
||||
});
|
||||
// add concatenated module to the compilation
|
||||
compilation.modules.add(newModule);
|
||||
|
||||
callback();
|
||||
};
|
||||
|
||||
build();
|
||||
},
|
||||
err => {
|
||||
logger.timeEnd("create concatenated modules");
|
||||
process.nextTick(callback.bind(null, err));
|
||||
}
|
||||
);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Compilation} compilation the compilation
|
||||
* @param {Module} module the module to be added
|
||||
* @param {RuntimeSpec} runtime the runtime scope
|
||||
* @returns {Set<Module>} the imported modules
|
||||
*/
|
||||
_getImports(compilation, module, runtime) {
|
||||
const moduleGraph = compilation.moduleGraph;
|
||||
const set = new Set();
|
||||
for (const dep of module.dependencies) {
|
||||
// Get reference info only for harmony Dependencies
|
||||
if (!(dep instanceof HarmonyImportDependency)) continue;
|
||||
|
||||
const connection = moduleGraph.getConnection(dep);
|
||||
// Reference is valid and has a module
|
||||
if (
|
||||
!connection ||
|
||||
!connection.module ||
|
||||
!connection.isTargetActive(runtime)
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const importedNames = compilation.getDependencyReferencedExports(
|
||||
dep,
|
||||
undefined
|
||||
);
|
||||
|
||||
if (
|
||||
importedNames.every(i =>
|
||||
Array.isArray(i) ? i.length > 0 : i.name.length > 0
|
||||
) ||
|
||||
Array.isArray(moduleGraph.getProvidedExports(module))
|
||||
) {
|
||||
set.add(connection.module);
|
||||
}
|
||||
}
|
||||
return set;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Compilation} compilation webpack compilation
|
||||
* @param {ConcatConfiguration} config concat configuration (will be modified when added)
|
||||
* @param {Module} module the module to be added
|
||||
* @param {RuntimeSpec} runtime the runtime scope of the generated code
|
||||
* @param {RuntimeSpec} activeRuntime the runtime scope of the root module
|
||||
* @param {Set<Module>} possibleModules modules that are candidates
|
||||
* @param {Set<Module>} candidates list of potential candidates (will be added to)
|
||||
* @param {Map<Module, Module | function(RequestShortener): string>} failureCache cache for problematic modules to be more performant
|
||||
* @param {ChunkGraph} chunkGraph the chunk graph
|
||||
* @param {boolean} avoidMutateOnFailure avoid mutating the config when adding fails
|
||||
* @param {Statistics} statistics gathering metrics
|
||||
* @returns {null | Module | function(RequestShortener): string} the problematic module
|
||||
*/
|
||||
_tryToAdd(
|
||||
compilation,
|
||||
config,
|
||||
module,
|
||||
runtime,
|
||||
activeRuntime,
|
||||
possibleModules,
|
||||
candidates,
|
||||
failureCache,
|
||||
chunkGraph,
|
||||
avoidMutateOnFailure,
|
||||
statistics
|
||||
) {
|
||||
const cacheEntry = failureCache.get(module);
|
||||
if (cacheEntry) {
|
||||
statistics.cached++;
|
||||
return cacheEntry;
|
||||
}
|
||||
|
||||
// Already added?
|
||||
if (config.has(module)) {
|
||||
statistics.alreadyInConfig++;
|
||||
return null;
|
||||
}
|
||||
|
||||
// Not possible to add?
|
||||
if (!possibleModules.has(module)) {
|
||||
statistics.invalidModule++;
|
||||
failureCache.set(module, module); // cache failures for performance
|
||||
return module;
|
||||
}
|
||||
|
||||
// Module must be in the correct chunks
|
||||
const missingChunks = Array.from(
|
||||
chunkGraph.getModuleChunksIterable(config.rootModule)
|
||||
).filter(chunk => !chunkGraph.isModuleInChunk(module, chunk));
|
||||
if (missingChunks.length > 0) {
|
||||
/**
|
||||
* @param {RequestShortener} requestShortener request shortener
|
||||
* @returns {string} problem description
|
||||
*/
|
||||
const problem = requestShortener => {
|
||||
const missingChunksList = Array.from(
|
||||
new Set(missingChunks.map(chunk => chunk.name || "unnamed chunk(s)"))
|
||||
).sort();
|
||||
const chunks = Array.from(
|
||||
new Set(
|
||||
Array.from(chunkGraph.getModuleChunksIterable(module)).map(
|
||||
chunk => chunk.name || "unnamed chunk(s)"
|
||||
)
|
||||
)
|
||||
).sort();
|
||||
return `Module ${module.readableIdentifier(
|
||||
requestShortener
|
||||
)} is not in the same chunk(s) (expected in chunk(s) ${missingChunksList.join(
|
||||
", "
|
||||
)}, module is in chunk(s) ${chunks.join(", ")})`;
|
||||
};
|
||||
statistics.incorrectChunks++;
|
||||
failureCache.set(module, problem); // cache failures for performance
|
||||
return problem;
|
||||
}
|
||||
|
||||
const moduleGraph = compilation.moduleGraph;
|
||||
|
||||
const incomingConnections =
|
||||
moduleGraph.getIncomingConnectionsByOriginModule(module);
|
||||
|
||||
const incomingConnectionsFromNonModules =
|
||||
incomingConnections.get(null) || incomingConnections.get(undefined);
|
||||
if (incomingConnectionsFromNonModules) {
|
||||
const activeNonModulesConnections =
|
||||
incomingConnectionsFromNonModules.filter(connection => {
|
||||
// We are not interested in inactive connections
|
||||
// or connections without dependency
|
||||
return connection.isActive(runtime);
|
||||
});
|
||||
if (activeNonModulesConnections.length > 0) {
|
||||
/**
|
||||
* @param {RequestShortener} requestShortener request shortener
|
||||
* @returns {string} problem description
|
||||
*/
|
||||
const problem = requestShortener => {
|
||||
const importingExplanations = new Set(
|
||||
activeNonModulesConnections.map(c => c.explanation).filter(Boolean)
|
||||
);
|
||||
const explanations = Array.from(importingExplanations).sort();
|
||||
return `Module ${module.readableIdentifier(
|
||||
requestShortener
|
||||
)} is referenced ${
|
||||
explanations.length > 0
|
||||
? `by: ${explanations.join(", ")}`
|
||||
: "in an unsupported way"
|
||||
}`;
|
||||
};
|
||||
statistics.incorrectDependency++;
|
||||
failureCache.set(module, problem); // cache failures for performance
|
||||
return problem;
|
||||
}
|
||||
}
|
||||
|
||||
/** @type {Map<Module, readonly ModuleGraph.ModuleGraphConnection[]>} */
|
||||
const incomingConnectionsFromModules = new Map();
|
||||
for (const [originModule, connections] of incomingConnections) {
|
||||
if (originModule) {
|
||||
// Ignore connection from orphan modules
|
||||
if (chunkGraph.getNumberOfModuleChunks(originModule) === 0) continue;
|
||||
|
||||
// We don't care for connections from other runtimes
|
||||
let originRuntime = undefined;
|
||||
for (const r of chunkGraph.getModuleRuntimes(originModule)) {
|
||||
originRuntime = mergeRuntimeOwned(originRuntime, r);
|
||||
}
|
||||
|
||||
if (!intersectRuntime(runtime, originRuntime)) continue;
|
||||
|
||||
// We are not interested in inactive connections
|
||||
const activeConnections = connections.filter(connection =>
|
||||
connection.isActive(runtime)
|
||||
);
|
||||
if (activeConnections.length > 0)
|
||||
incomingConnectionsFromModules.set(originModule, activeConnections);
|
||||
}
|
||||
}
|
||||
|
||||
const incomingModules = Array.from(incomingConnectionsFromModules.keys());
|
||||
|
||||
// Module must be in the same chunks like the referencing module
|
||||
const otherChunkModules = incomingModules.filter(originModule => {
|
||||
for (const chunk of chunkGraph.getModuleChunksIterable(
|
||||
config.rootModule
|
||||
)) {
|
||||
if (!chunkGraph.isModuleInChunk(originModule, chunk)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
});
|
||||
if (otherChunkModules.length > 0) {
|
||||
/**
|
||||
* @param {RequestShortener} requestShortener request shortener
|
||||
* @returns {string} problem description
|
||||
*/
|
||||
const problem = requestShortener => {
|
||||
const names = otherChunkModules
|
||||
.map(m => m.readableIdentifier(requestShortener))
|
||||
.sort();
|
||||
return `Module ${module.readableIdentifier(
|
||||
requestShortener
|
||||
)} is referenced from different chunks by these modules: ${names.join(
|
||||
", "
|
||||
)}`;
|
||||
};
|
||||
statistics.incorrectChunksOfImporter++;
|
||||
failureCache.set(module, problem); // cache failures for performance
|
||||
return problem;
|
||||
}
|
||||
|
||||
/** @type {Map<Module, readonly ModuleGraph.ModuleGraphConnection[]>} */
|
||||
const nonHarmonyConnections = new Map();
|
||||
for (const [originModule, connections] of incomingConnectionsFromModules) {
|
||||
const selected = connections.filter(
|
||||
connection =>
|
||||
!connection.dependency ||
|
||||
!(connection.dependency instanceof HarmonyImportDependency)
|
||||
);
|
||||
if (selected.length > 0)
|
||||
nonHarmonyConnections.set(originModule, connections);
|
||||
}
|
||||
if (nonHarmonyConnections.size > 0) {
|
||||
/**
|
||||
* @param {RequestShortener} requestShortener request shortener
|
||||
* @returns {string} problem description
|
||||
*/
|
||||
const problem = requestShortener => {
|
||||
const names = Array.from(nonHarmonyConnections)
|
||||
.map(([originModule, connections]) => {
|
||||
return `${originModule.readableIdentifier(
|
||||
requestShortener
|
||||
)} (referenced with ${Array.from(
|
||||
new Set(
|
||||
connections
|
||||
.map(c => c.dependency && c.dependency.type)
|
||||
.filter(Boolean)
|
||||
)
|
||||
)
|
||||
.sort()
|
||||
.join(", ")})`;
|
||||
})
|
||||
.sort();
|
||||
return `Module ${module.readableIdentifier(
|
||||
requestShortener
|
||||
)} is referenced from these modules with unsupported syntax: ${names.join(
|
||||
", "
|
||||
)}`;
|
||||
};
|
||||
statistics.incorrectModuleDependency++;
|
||||
failureCache.set(module, problem); // cache failures for performance
|
||||
return problem;
|
||||
}
|
||||
|
||||
if (runtime !== undefined && typeof runtime !== "string") {
|
||||
// Module must be consistently referenced in the same runtimes
|
||||
/** @type {{ originModule: Module, runtimeCondition: RuntimeSpec }[]} */
|
||||
const otherRuntimeConnections = [];
|
||||
outer: for (const [
|
||||
originModule,
|
||||
connections
|
||||
] of incomingConnectionsFromModules) {
|
||||
/** @type {false | RuntimeSpec} */
|
||||
let currentRuntimeCondition = false;
|
||||
for (const connection of connections) {
|
||||
const runtimeCondition = filterRuntime(runtime, runtime => {
|
||||
return connection.isTargetActive(runtime);
|
||||
});
|
||||
if (runtimeCondition === false) continue;
|
||||
if (runtimeCondition === true) continue outer;
|
||||
if (currentRuntimeCondition !== false) {
|
||||
currentRuntimeCondition = mergeRuntime(
|
||||
currentRuntimeCondition,
|
||||
runtimeCondition
|
||||
);
|
||||
} else {
|
||||
currentRuntimeCondition = runtimeCondition;
|
||||
}
|
||||
}
|
||||
if (currentRuntimeCondition !== false) {
|
||||
otherRuntimeConnections.push({
|
||||
originModule,
|
||||
runtimeCondition: currentRuntimeCondition
|
||||
});
|
||||
}
|
||||
}
|
||||
if (otherRuntimeConnections.length > 0) {
|
||||
/**
|
||||
* @param {RequestShortener} requestShortener request shortener
|
||||
* @returns {string} problem description
|
||||
*/
|
||||
const problem = requestShortener => {
|
||||
return `Module ${module.readableIdentifier(
|
||||
requestShortener
|
||||
)} is runtime-dependent referenced by these modules: ${Array.from(
|
||||
otherRuntimeConnections,
|
||||
({ originModule, runtimeCondition }) =>
|
||||
`${originModule.readableIdentifier(
|
||||
requestShortener
|
||||
)} (expected runtime ${runtimeToString(
|
||||
runtime
|
||||
)}, module is only referenced in ${runtimeToString(
|
||||
/** @type {RuntimeSpec} */ (runtimeCondition)
|
||||
)})`
|
||||
).join(", ")}`;
|
||||
};
|
||||
statistics.incorrectRuntimeCondition++;
|
||||
failureCache.set(module, problem); // cache failures for performance
|
||||
return problem;
|
||||
}
|
||||
}
|
||||
|
||||
let backup;
|
||||
if (avoidMutateOnFailure) {
|
||||
backup = config.snapshot();
|
||||
}
|
||||
|
||||
// Add the module
|
||||
config.add(module);
|
||||
|
||||
incomingModules.sort(compareModulesByIdentifier);
|
||||
|
||||
// Every module which depends on the added module must be in the configuration too.
|
||||
for (const originModule of incomingModules) {
|
||||
const problem = this._tryToAdd(
|
||||
compilation,
|
||||
config,
|
||||
originModule,
|
||||
runtime,
|
||||
activeRuntime,
|
||||
possibleModules,
|
||||
candidates,
|
||||
failureCache,
|
||||
chunkGraph,
|
||||
false,
|
||||
statistics
|
||||
);
|
||||
if (problem) {
|
||||
if (backup !== undefined) config.rollback(backup);
|
||||
statistics.importerFailed++;
|
||||
failureCache.set(module, problem); // cache failures for performance
|
||||
return problem;
|
||||
}
|
||||
}
|
||||
|
||||
// Add imports to possible candidates list
|
||||
for (const imp of this._getImports(compilation, module, runtime)) {
|
||||
candidates.add(imp);
|
||||
}
|
||||
statistics.added++;
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
class ConcatConfiguration {
|
||||
/**
|
||||
* @param {Module} rootModule the root module
|
||||
* @param {RuntimeSpec} runtime the runtime
|
||||
*/
|
||||
constructor(rootModule, runtime) {
|
||||
this.rootModule = rootModule;
|
||||
this.runtime = runtime;
|
||||
/** @type {Set<Module>} */
|
||||
this.modules = new Set();
|
||||
this.modules.add(rootModule);
|
||||
/** @type {Map<Module, Module | function(RequestShortener): string>} */
|
||||
this.warnings = new Map();
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Module} module the module
|
||||
*/
|
||||
add(module) {
|
||||
this.modules.add(module);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Module} module the module
|
||||
* @returns {boolean} true, when the module is in the module set
|
||||
*/
|
||||
has(module) {
|
||||
return this.modules.has(module);
|
||||
}
|
||||
|
||||
isEmpty() {
|
||||
return this.modules.size === 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Module} module the module
|
||||
* @param {Module | function(RequestShortener): string} problem the problem
|
||||
*/
|
||||
addWarning(module, problem) {
|
||||
this.warnings.set(module, problem);
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {Map<Module, Module | function(RequestShortener): string>} warnings
|
||||
*/
|
||||
getWarningsSorted() {
|
||||
return new Map(
|
||||
Array.from(this.warnings).sort((a, b) => {
|
||||
const ai = a[0].identifier();
|
||||
const bi = b[0].identifier();
|
||||
if (ai < bi) return -1;
|
||||
if (ai > bi) return 1;
|
||||
return 0;
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {Set<Module>} modules as set
|
||||
*/
|
||||
getModules() {
|
||||
return this.modules;
|
||||
}
|
||||
|
||||
snapshot() {
|
||||
return this.modules.size;
|
||||
}
|
||||
|
||||
rollback(snapshot) {
|
||||
const modules = this.modules;
|
||||
for (const m of modules) {
|
||||
if (snapshot === 0) {
|
||||
modules.delete(m);
|
||||
} else {
|
||||
snapshot--;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ModuleConcatenationPlugin;
|
467
my-app/node_modules/webpack/lib/optimize/RealContentHashPlugin.js
generated
vendored
Executable file
467
my-app/node_modules/webpack/lib/optimize/RealContentHashPlugin.js
generated
vendored
Executable file
|
@ -0,0 +1,467 @@
|
|||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Tobias Koppers @sokra
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
const { SyncBailHook } = require("tapable");
|
||||
const { RawSource, CachedSource, CompatSource } = require("webpack-sources");
|
||||
const Compilation = require("../Compilation");
|
||||
const WebpackError = require("../WebpackError");
|
||||
const { compareSelect, compareStrings } = require("../util/comparators");
|
||||
const createHash = require("../util/createHash");
|
||||
|
||||
/** @typedef {import("webpack-sources").Source} Source */
|
||||
/** @typedef {import("../Cache").Etag} Etag */
|
||||
/** @typedef {import("../Compilation").AssetInfo} AssetInfo */
|
||||
/** @typedef {import("../Compiler")} Compiler */
|
||||
/** @typedef {typeof import("../util/Hash")} Hash */
|
||||
|
||||
const EMPTY_SET = new Set();
|
||||
|
||||
/**
|
||||
* @template T
|
||||
* @param {T | T[]} itemOrItems item or items
|
||||
* @param {Set<T>} list list
|
||||
*/
|
||||
const addToList = (itemOrItems, list) => {
|
||||
if (Array.isArray(itemOrItems)) {
|
||||
for (const item of itemOrItems) {
|
||||
list.add(item);
|
||||
}
|
||||
} else if (itemOrItems) {
|
||||
list.add(itemOrItems);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @template T
|
||||
* @param {T[]} input list
|
||||
* @param {function(T): Buffer} fn map function
|
||||
* @returns {Buffer[]} buffers without duplicates
|
||||
*/
|
||||
const mapAndDeduplicateBuffers = (input, fn) => {
|
||||
// Buffer.equals compares size first so this should be efficient enough
|
||||
// If it becomes a performance problem we can use a map and group by size
|
||||
// instead of looping over all assets.
|
||||
const result = [];
|
||||
outer: for (const value of input) {
|
||||
const buf = fn(value);
|
||||
for (const other of result) {
|
||||
if (buf.equals(other)) continue outer;
|
||||
}
|
||||
result.push(buf);
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Escapes regular expression metacharacters
|
||||
* @param {string} str String to quote
|
||||
* @returns {string} Escaped string
|
||||
*/
|
||||
const quoteMeta = str => {
|
||||
return str.replace(/[-[\]\\/{}()*+?.^$|]/g, "\\$&");
|
||||
};
|
||||
|
||||
const cachedSourceMap = new WeakMap();
|
||||
|
||||
/**
|
||||
* @param {Source} source source
|
||||
* @returns {CachedSource} cached source
|
||||
*/
|
||||
const toCachedSource = source => {
|
||||
if (source instanceof CachedSource) {
|
||||
return source;
|
||||
}
|
||||
const entry = cachedSourceMap.get(source);
|
||||
if (entry !== undefined) return entry;
|
||||
const newSource = new CachedSource(CompatSource.from(source));
|
||||
cachedSourceMap.set(source, newSource);
|
||||
return newSource;
|
||||
};
|
||||
|
||||
/** @typedef {Set<string>} OwnHashes */
|
||||
/** @typedef {Set<string>} ReferencedHashes */
|
||||
/** @typedef {Set<string>} Hashes */
|
||||
|
||||
/**
|
||||
* @typedef {Object} AssetInfoForRealContentHash
|
||||
* @property {string} name
|
||||
* @property {AssetInfo} info
|
||||
* @property {Source} source
|
||||
* @property {RawSource | undefined} newSource
|
||||
* @property {RawSource | undefined} newSourceWithoutOwn
|
||||
* @property {string} content
|
||||
* @property {OwnHashes | undefined} ownHashes
|
||||
* @property {Promise<void> | undefined} contentComputePromise
|
||||
* @property {Promise<void> | undefined} contentComputeWithoutOwnPromise
|
||||
* @property {ReferencedHashes | undefined} referencedHashes
|
||||
* @property {Hashes} hashes
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} CompilationHooks
|
||||
* @property {SyncBailHook<[Buffer[], string], string>} updateHash
|
||||
*/
|
||||
|
||||
/** @type {WeakMap<Compilation, CompilationHooks>} */
|
||||
const compilationHooksMap = new WeakMap();
|
||||
|
||||
class RealContentHashPlugin {
|
||||
/**
|
||||
* @param {Compilation} compilation the compilation
|
||||
* @returns {CompilationHooks} the attached hooks
|
||||
*/
|
||||
static getCompilationHooks(compilation) {
|
||||
if (!(compilation instanceof Compilation)) {
|
||||
throw new TypeError(
|
||||
"The 'compilation' argument must be an instance of Compilation"
|
||||
);
|
||||
}
|
||||
let hooks = compilationHooksMap.get(compilation);
|
||||
if (hooks === undefined) {
|
||||
hooks = {
|
||||
updateHash: new SyncBailHook(["content", "oldHash"])
|
||||
};
|
||||
compilationHooksMap.set(compilation, hooks);
|
||||
}
|
||||
return hooks;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Object} options options object
|
||||
* @param {string | Hash} options.hashFunction the hash function to use
|
||||
* @param {string} options.hashDigest the hash digest to use
|
||||
*/
|
||||
constructor({ hashFunction, hashDigest }) {
|
||||
this._hashFunction = hashFunction;
|
||||
this._hashDigest = hashDigest;
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply the plugin
|
||||
* @param {Compiler} compiler the compiler instance
|
||||
* @returns {void}
|
||||
*/
|
||||
apply(compiler) {
|
||||
compiler.hooks.compilation.tap("RealContentHashPlugin", compilation => {
|
||||
const cacheAnalyse = compilation.getCache(
|
||||
"RealContentHashPlugin|analyse"
|
||||
);
|
||||
const cacheGenerate = compilation.getCache(
|
||||
"RealContentHashPlugin|generate"
|
||||
);
|
||||
const hooks = RealContentHashPlugin.getCompilationHooks(compilation);
|
||||
compilation.hooks.processAssets.tapPromise(
|
||||
{
|
||||
name: "RealContentHashPlugin",
|
||||
stage: Compilation.PROCESS_ASSETS_STAGE_OPTIMIZE_HASH
|
||||
},
|
||||
async () => {
|
||||
const assets = compilation.getAssets();
|
||||
/** @type {AssetInfoForRealContentHash[]} */
|
||||
const assetsWithInfo = [];
|
||||
/** @type {Map<string, [AssetInfoForRealContentHash]>} */
|
||||
const hashToAssets = new Map();
|
||||
for (const { source, info, name } of assets) {
|
||||
const cachedSource = toCachedSource(source);
|
||||
const content = /** @type {string} */ (cachedSource.source());
|
||||
/** @type {Hashes} */
|
||||
const hashes = new Set();
|
||||
addToList(info.contenthash, hashes);
|
||||
/** @type {AssetInfoForRealContentHash} */
|
||||
const data = {
|
||||
name,
|
||||
info,
|
||||
source: cachedSource,
|
||||
newSource: undefined,
|
||||
newSourceWithoutOwn: undefined,
|
||||
content,
|
||||
ownHashes: undefined,
|
||||
contentComputePromise: undefined,
|
||||
contentComputeWithoutOwnPromise: undefined,
|
||||
referencedHashes: undefined,
|
||||
hashes
|
||||
};
|
||||
assetsWithInfo.push(data);
|
||||
for (const hash of hashes) {
|
||||
const list = hashToAssets.get(hash);
|
||||
if (list === undefined) {
|
||||
hashToAssets.set(hash, [data]);
|
||||
} else {
|
||||
list.push(data);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (hashToAssets.size === 0) return;
|
||||
const hashRegExp = new RegExp(
|
||||
Array.from(hashToAssets.keys(), quoteMeta).join("|"),
|
||||
"g"
|
||||
);
|
||||
await Promise.all(
|
||||
assetsWithInfo.map(async asset => {
|
||||
const { name, source, content, hashes } = asset;
|
||||
if (Buffer.isBuffer(content)) {
|
||||
asset.referencedHashes = EMPTY_SET;
|
||||
asset.ownHashes = EMPTY_SET;
|
||||
return;
|
||||
}
|
||||
const etag = cacheAnalyse.mergeEtags(
|
||||
cacheAnalyse.getLazyHashedEtag(source),
|
||||
Array.from(hashes).join("|")
|
||||
);
|
||||
[asset.referencedHashes, asset.ownHashes] =
|
||||
await cacheAnalyse.providePromise(name, etag, () => {
|
||||
const referencedHashes = new Set();
|
||||
let ownHashes = new Set();
|
||||
const inContent = content.match(hashRegExp);
|
||||
if (inContent) {
|
||||
for (const hash of inContent) {
|
||||
if (hashes.has(hash)) {
|
||||
ownHashes.add(hash);
|
||||
continue;
|
||||
}
|
||||
referencedHashes.add(hash);
|
||||
}
|
||||
}
|
||||
return [referencedHashes, ownHashes];
|
||||
});
|
||||
})
|
||||
);
|
||||
/**
|
||||
* @param {string} hash the hash
|
||||
* @returns {undefined | ReferencedHashes} the referenced hashes
|
||||
*/
|
||||
const getDependencies = hash => {
|
||||
const assets = hashToAssets.get(hash);
|
||||
if (!assets) {
|
||||
const referencingAssets = assetsWithInfo.filter(asset =>
|
||||
/** @type {ReferencedHashes} */ (asset.referencedHashes).has(
|
||||
hash
|
||||
)
|
||||
);
|
||||
const err = new WebpackError(`RealContentHashPlugin
|
||||
Some kind of unexpected caching problem occurred.
|
||||
An asset was cached with a reference to another asset (${hash}) that's not in the compilation anymore.
|
||||
Either the asset was incorrectly cached, or the referenced asset should also be restored from cache.
|
||||
Referenced by:
|
||||
${referencingAssets
|
||||
.map(a => {
|
||||
const match = new RegExp(`.{0,20}${quoteMeta(hash)}.{0,20}`).exec(
|
||||
a.content
|
||||
);
|
||||
return ` - ${a.name}: ...${match ? match[0] : "???"}...`;
|
||||
})
|
||||
.join("\n")}`);
|
||||
compilation.errors.push(err);
|
||||
return undefined;
|
||||
}
|
||||
const hashes = new Set();
|
||||
for (const { referencedHashes, ownHashes } of assets) {
|
||||
if (!(/** @type {OwnHashes} */ (ownHashes).has(hash))) {
|
||||
for (const hash of /** @type {OwnHashes} */ (ownHashes)) {
|
||||
hashes.add(hash);
|
||||
}
|
||||
}
|
||||
for (const hash of /** @type {ReferencedHashes} */ (
|
||||
referencedHashes
|
||||
)) {
|
||||
hashes.add(hash);
|
||||
}
|
||||
}
|
||||
return hashes;
|
||||
};
|
||||
/**
|
||||
* @param {string} hash the hash
|
||||
* @returns {string} the hash info
|
||||
*/
|
||||
const hashInfo = hash => {
|
||||
const assets = hashToAssets.get(hash);
|
||||
return `${hash} (${Array.from(
|
||||
/** @type {AssetInfoForRealContentHash[]} */ (assets),
|
||||
a => a.name
|
||||
)})`;
|
||||
};
|
||||
const hashesInOrder = new Set();
|
||||
for (const hash of hashToAssets.keys()) {
|
||||
/**
|
||||
* @param {string} hash the hash
|
||||
* @param {Set<string>} stack stack of hashes
|
||||
*/
|
||||
const add = (hash, stack) => {
|
||||
const deps = getDependencies(hash);
|
||||
if (!deps) return;
|
||||
stack.add(hash);
|
||||
for (const dep of deps) {
|
||||
if (hashesInOrder.has(dep)) continue;
|
||||
if (stack.has(dep)) {
|
||||
throw new Error(
|
||||
`Circular hash dependency ${Array.from(
|
||||
stack,
|
||||
hashInfo
|
||||
).join(" -> ")} -> ${hashInfo(dep)}`
|
||||
);
|
||||
}
|
||||
add(dep, stack);
|
||||
}
|
||||
hashesInOrder.add(hash);
|
||||
stack.delete(hash);
|
||||
};
|
||||
if (hashesInOrder.has(hash)) continue;
|
||||
add(hash, new Set());
|
||||
}
|
||||
const hashToNewHash = new Map();
|
||||
/**
|
||||
* @param {AssetInfoForRealContentHash} asset asset info
|
||||
* @returns {Etag} etag
|
||||
*/
|
||||
const getEtag = asset =>
|
||||
cacheGenerate.mergeEtags(
|
||||
cacheGenerate.getLazyHashedEtag(asset.source),
|
||||
Array.from(
|
||||
/** @type {ReferencedHashes} */ (asset.referencedHashes),
|
||||
hash => hashToNewHash.get(hash)
|
||||
).join("|")
|
||||
);
|
||||
/**
|
||||
* @param {AssetInfoForRealContentHash} asset asset info
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
const computeNewContent = asset => {
|
||||
if (asset.contentComputePromise) return asset.contentComputePromise;
|
||||
return (asset.contentComputePromise = (async () => {
|
||||
if (
|
||||
/** @type {OwnHashes} */ (asset.ownHashes).size > 0 ||
|
||||
Array.from(
|
||||
/** @type {ReferencedHashes} */
|
||||
(asset.referencedHashes)
|
||||
).some(hash => hashToNewHash.get(hash) !== hash)
|
||||
) {
|
||||
const identifier = asset.name;
|
||||
const etag = getEtag(asset);
|
||||
asset.newSource = await cacheGenerate.providePromise(
|
||||
identifier,
|
||||
etag,
|
||||
() => {
|
||||
const newContent = asset.content.replace(hashRegExp, hash =>
|
||||
hashToNewHash.get(hash)
|
||||
);
|
||||
return new RawSource(newContent);
|
||||
}
|
||||
);
|
||||
}
|
||||
})());
|
||||
};
|
||||
/**
|
||||
* @param {AssetInfoForRealContentHash} asset asset info
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
const computeNewContentWithoutOwn = asset => {
|
||||
if (asset.contentComputeWithoutOwnPromise)
|
||||
return asset.contentComputeWithoutOwnPromise;
|
||||
return (asset.contentComputeWithoutOwnPromise = (async () => {
|
||||
if (
|
||||
/** @type {OwnHashes} */ (asset.ownHashes).size > 0 ||
|
||||
Array.from(
|
||||
/** @type {ReferencedHashes} */
|
||||
(asset.referencedHashes)
|
||||
).some(hash => hashToNewHash.get(hash) !== hash)
|
||||
) {
|
||||
const identifier = asset.name + "|without-own";
|
||||
const etag = getEtag(asset);
|
||||
asset.newSourceWithoutOwn = await cacheGenerate.providePromise(
|
||||
identifier,
|
||||
etag,
|
||||
() => {
|
||||
const newContent = asset.content.replace(
|
||||
hashRegExp,
|
||||
hash => {
|
||||
if (
|
||||
/** @type {OwnHashes} */ (asset.ownHashes).has(hash)
|
||||
) {
|
||||
return "";
|
||||
}
|
||||
return hashToNewHash.get(hash);
|
||||
}
|
||||
);
|
||||
return new RawSource(newContent);
|
||||
}
|
||||
);
|
||||
}
|
||||
})());
|
||||
};
|
||||
const comparator = compareSelect(a => a.name, compareStrings);
|
||||
for (const oldHash of hashesInOrder) {
|
||||
const assets =
|
||||
/** @type {AssetInfoForRealContentHash[]} */
|
||||
(hashToAssets.get(oldHash));
|
||||
assets.sort(comparator);
|
||||
await Promise.all(
|
||||
assets.map(asset =>
|
||||
/** @type {OwnHashes} */ (asset.ownHashes).has(oldHash)
|
||||
? computeNewContentWithoutOwn(asset)
|
||||
: computeNewContent(asset)
|
||||
)
|
||||
);
|
||||
const assetsContent = mapAndDeduplicateBuffers(assets, asset => {
|
||||
if (/** @type {OwnHashes} */ (asset.ownHashes).has(oldHash)) {
|
||||
return asset.newSourceWithoutOwn
|
||||
? asset.newSourceWithoutOwn.buffer()
|
||||
: asset.source.buffer();
|
||||
} else {
|
||||
return asset.newSource
|
||||
? asset.newSource.buffer()
|
||||
: asset.source.buffer();
|
||||
}
|
||||
});
|
||||
let newHash = hooks.updateHash.call(assetsContent, oldHash);
|
||||
if (!newHash) {
|
||||
const hash = createHash(this._hashFunction);
|
||||
if (compilation.outputOptions.hashSalt) {
|
||||
hash.update(compilation.outputOptions.hashSalt);
|
||||
}
|
||||
for (const content of assetsContent) {
|
||||
hash.update(content);
|
||||
}
|
||||
const digest = hash.digest(this._hashDigest);
|
||||
newHash = /** @type {string} */ (digest.slice(0, oldHash.length));
|
||||
}
|
||||
hashToNewHash.set(oldHash, newHash);
|
||||
}
|
||||
await Promise.all(
|
||||
assetsWithInfo.map(async asset => {
|
||||
await computeNewContent(asset);
|
||||
const newName = asset.name.replace(hashRegExp, hash =>
|
||||
hashToNewHash.get(hash)
|
||||
);
|
||||
|
||||
const infoUpdate = {};
|
||||
const hash = asset.info.contenthash;
|
||||
infoUpdate.contenthash = Array.isArray(hash)
|
||||
? hash.map(hash => hashToNewHash.get(hash))
|
||||
: hashToNewHash.get(hash);
|
||||
|
||||
if (asset.newSource !== undefined) {
|
||||
compilation.updateAsset(
|
||||
asset.name,
|
||||
asset.newSource,
|
||||
infoUpdate
|
||||
);
|
||||
} else {
|
||||
compilation.updateAsset(asset.name, asset.source, infoUpdate);
|
||||
}
|
||||
|
||||
if (asset.name !== newName) {
|
||||
compilation.renameAsset(asset.name, newName);
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = RealContentHashPlugin;
|
57
my-app/node_modules/webpack/lib/optimize/RemoveEmptyChunksPlugin.js
generated
vendored
Executable file
57
my-app/node_modules/webpack/lib/optimize/RemoveEmptyChunksPlugin.js
generated
vendored
Executable file
|
@ -0,0 +1,57 @@
|
|||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Tobias Koppers @sokra
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
const { STAGE_BASIC, STAGE_ADVANCED } = require("../OptimizationStages");
|
||||
|
||||
/** @typedef {import("../Chunk")} Chunk */
|
||||
/** @typedef {import("../Compiler")} Compiler */
|
||||
|
||||
class RemoveEmptyChunksPlugin {
|
||||
/**
|
||||
* Apply the plugin
|
||||
* @param {Compiler} compiler the compiler instance
|
||||
* @returns {void}
|
||||
*/
|
||||
apply(compiler) {
|
||||
compiler.hooks.compilation.tap("RemoveEmptyChunksPlugin", compilation => {
|
||||
/**
|
||||
* @param {Iterable<Chunk>} chunks the chunks array
|
||||
* @returns {void}
|
||||
*/
|
||||
const handler = chunks => {
|
||||
const chunkGraph = compilation.chunkGraph;
|
||||
for (const chunk of chunks) {
|
||||
if (
|
||||
chunkGraph.getNumberOfChunkModules(chunk) === 0 &&
|
||||
!chunk.hasRuntime() &&
|
||||
chunkGraph.getNumberOfEntryModules(chunk) === 0
|
||||
) {
|
||||
compilation.chunkGraph.disconnectChunk(chunk);
|
||||
compilation.chunks.delete(chunk);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// TODO do it once
|
||||
compilation.hooks.optimizeChunks.tap(
|
||||
{
|
||||
name: "RemoveEmptyChunksPlugin",
|
||||
stage: STAGE_BASIC
|
||||
},
|
||||
handler
|
||||
);
|
||||
compilation.hooks.optimizeChunks.tap(
|
||||
{
|
||||
name: "RemoveEmptyChunksPlugin",
|
||||
stage: STAGE_ADVANCED
|
||||
},
|
||||
handler
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
module.exports = RemoveEmptyChunksPlugin;
|
127
my-app/node_modules/webpack/lib/optimize/RemoveParentModulesPlugin.js
generated
vendored
Executable file
127
my-app/node_modules/webpack/lib/optimize/RemoveParentModulesPlugin.js
generated
vendored
Executable file
|
@ -0,0 +1,127 @@
|
|||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Tobias Koppers @sokra
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
const { STAGE_BASIC } = require("../OptimizationStages");
|
||||
const Queue = require("../util/Queue");
|
||||
const { intersect } = require("../util/SetHelpers");
|
||||
|
||||
/** @typedef {import("../Chunk")} Chunk */
|
||||
/** @typedef {import("../ChunkGroup")} ChunkGroup */
|
||||
/** @typedef {import("../Compiler")} Compiler */
|
||||
|
||||
class RemoveParentModulesPlugin {
|
||||
/**
|
||||
* @param {Compiler} compiler the compiler
|
||||
* @returns {void}
|
||||
*/
|
||||
apply(compiler) {
|
||||
compiler.hooks.compilation.tap("RemoveParentModulesPlugin", compilation => {
|
||||
/**
|
||||
* @param {Iterable<Chunk>} chunks the chunks
|
||||
* @param {ChunkGroup[]} chunkGroups the chunk groups
|
||||
*/
|
||||
const handler = (chunks, chunkGroups) => {
|
||||
const chunkGraph = compilation.chunkGraph;
|
||||
const queue = new Queue();
|
||||
const availableModulesMap = new WeakMap();
|
||||
|
||||
for (const chunkGroup of compilation.entrypoints.values()) {
|
||||
// initialize available modules for chunks without parents
|
||||
availableModulesMap.set(chunkGroup, new Set());
|
||||
for (const child of chunkGroup.childrenIterable) {
|
||||
queue.enqueue(child);
|
||||
}
|
||||
}
|
||||
for (const chunkGroup of compilation.asyncEntrypoints) {
|
||||
// initialize available modules for chunks without parents
|
||||
availableModulesMap.set(chunkGroup, new Set());
|
||||
for (const child of chunkGroup.childrenIterable) {
|
||||
queue.enqueue(child);
|
||||
}
|
||||
}
|
||||
|
||||
while (queue.length > 0) {
|
||||
const chunkGroup = queue.dequeue();
|
||||
let availableModules = availableModulesMap.get(chunkGroup);
|
||||
let changed = false;
|
||||
for (const parent of chunkGroup.parentsIterable) {
|
||||
const availableModulesInParent = availableModulesMap.get(parent);
|
||||
if (availableModulesInParent !== undefined) {
|
||||
// If we know the available modules in parent: process these
|
||||
if (availableModules === undefined) {
|
||||
// if we have not own info yet: create new entry
|
||||
availableModules = new Set(availableModulesInParent);
|
||||
for (const chunk of parent.chunks) {
|
||||
for (const m of chunkGraph.getChunkModulesIterable(chunk)) {
|
||||
availableModules.add(m);
|
||||
}
|
||||
}
|
||||
availableModulesMap.set(chunkGroup, availableModules);
|
||||
changed = true;
|
||||
} else {
|
||||
for (const m of availableModules) {
|
||||
if (
|
||||
!chunkGraph.isModuleInChunkGroup(m, parent) &&
|
||||
!availableModulesInParent.has(m)
|
||||
) {
|
||||
availableModules.delete(m);
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (changed) {
|
||||
// if something changed: enqueue our children
|
||||
for (const child of chunkGroup.childrenIterable) {
|
||||
queue.enqueue(child);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// now we have available modules for every chunk
|
||||
for (const chunk of chunks) {
|
||||
const availableModulesSets = Array.from(
|
||||
chunk.groupsIterable,
|
||||
chunkGroup => availableModulesMap.get(chunkGroup)
|
||||
);
|
||||
if (availableModulesSets.some(s => s === undefined)) continue; // No info about this chunk group
|
||||
const availableModules =
|
||||
availableModulesSets.length === 1
|
||||
? availableModulesSets[0]
|
||||
: intersect(availableModulesSets);
|
||||
const numberOfModules = chunkGraph.getNumberOfChunkModules(chunk);
|
||||
const toRemove = new Set();
|
||||
if (numberOfModules < availableModules.size) {
|
||||
for (const m of chunkGraph.getChunkModulesIterable(chunk)) {
|
||||
if (availableModules.has(m)) {
|
||||
toRemove.add(m);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (const m of availableModules) {
|
||||
if (chunkGraph.isModuleInChunk(m, chunk)) {
|
||||
toRemove.add(m);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const module of toRemove) {
|
||||
chunkGraph.disconnectChunkAndModule(chunk, module);
|
||||
}
|
||||
}
|
||||
};
|
||||
compilation.hooks.optimizeChunks.tap(
|
||||
{
|
||||
name: "RemoveParentModulesPlugin",
|
||||
stage: STAGE_BASIC
|
||||
},
|
||||
handler
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
module.exports = RemoveParentModulesPlugin;
|
52
my-app/node_modules/webpack/lib/optimize/RuntimeChunkPlugin.js
generated
vendored
Executable file
52
my-app/node_modules/webpack/lib/optimize/RuntimeChunkPlugin.js
generated
vendored
Executable file
|
@ -0,0 +1,52 @@
|
|||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Tobias Koppers @sokra
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
/** @typedef {import("../Compilation").EntryData} EntryData */
|
||||
/** @typedef {import("../Compiler")} Compiler */
|
||||
/** @typedef {import("../Entrypoint")} Entrypoint */
|
||||
|
||||
class RuntimeChunkPlugin {
|
||||
constructor(options) {
|
||||
this.options = {
|
||||
/**
|
||||
* @param {Entrypoint} entrypoint entrypoint name
|
||||
* @returns {string} runtime chunk name
|
||||
*/
|
||||
name: entrypoint => `runtime~${entrypoint.name}`,
|
||||
...options
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply the plugin
|
||||
* @param {Compiler} compiler the compiler instance
|
||||
* @returns {void}
|
||||
*/
|
||||
apply(compiler) {
|
||||
compiler.hooks.thisCompilation.tap("RuntimeChunkPlugin", compilation => {
|
||||
compilation.hooks.addEntry.tap(
|
||||
"RuntimeChunkPlugin",
|
||||
(_, { name: entryName }) => {
|
||||
if (entryName === undefined) return;
|
||||
const data =
|
||||
/** @type {EntryData} */
|
||||
(compilation.entries.get(entryName));
|
||||
if (data.options.runtime === undefined && !data.options.dependOn) {
|
||||
// Determine runtime chunk name
|
||||
let name = this.options.name;
|
||||
if (typeof name === "function") {
|
||||
name = name({ name: entryName });
|
||||
}
|
||||
data.options.runtime = name;
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = RuntimeChunkPlugin;
|
346
my-app/node_modules/webpack/lib/optimize/SideEffectsFlagPlugin.js
generated
vendored
Executable file
346
my-app/node_modules/webpack/lib/optimize/SideEffectsFlagPlugin.js
generated
vendored
Executable file
|
@ -0,0 +1,346 @@
|
|||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Tobias Koppers @sokra
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
const glob2regexp = require("glob-to-regexp");
|
||||
const {
|
||||
JAVASCRIPT_MODULE_TYPE_AUTO,
|
||||
JAVASCRIPT_MODULE_TYPE_ESM,
|
||||
JAVASCRIPT_MODULE_TYPE_DYNAMIC
|
||||
} = require("../ModuleTypeConstants");
|
||||
const { STAGE_DEFAULT } = require("../OptimizationStages");
|
||||
const HarmonyExportImportedSpecifierDependency = require("../dependencies/HarmonyExportImportedSpecifierDependency");
|
||||
const HarmonyImportSpecifierDependency = require("../dependencies/HarmonyImportSpecifierDependency");
|
||||
const formatLocation = require("../formatLocation");
|
||||
|
||||
/** @typedef {import("estree").ModuleDeclaration} ModuleDeclaration */
|
||||
/** @typedef {import("estree").Statement} Statement */
|
||||
/** @typedef {import("../Compiler")} Compiler */
|
||||
/** @typedef {import("../Dependency")} Dependency */
|
||||
/** @typedef {import("../Module")} Module */
|
||||
/** @typedef {import("../javascript/JavascriptParser")} JavascriptParser */
|
||||
|
||||
/**
|
||||
* @typedef {Object} ExportInModule
|
||||
* @property {Module} module the module
|
||||
* @property {string} exportName the name of the export
|
||||
* @property {boolean} checked if the export is conditional
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} ReexportInfo
|
||||
* @property {Map<string, ExportInModule[]>} static
|
||||
* @property {Map<Module, Set<string>>} dynamic
|
||||
*/
|
||||
|
||||
/** @type {WeakMap<any, Map<string, RegExp>>} */
|
||||
const globToRegexpCache = new WeakMap();
|
||||
|
||||
/**
|
||||
* @param {string} glob the pattern
|
||||
* @param {Map<string, RegExp>} cache the glob to RegExp cache
|
||||
* @returns {RegExp} a regular expression
|
||||
*/
|
||||
const globToRegexp = (glob, cache) => {
|
||||
const cacheEntry = cache.get(glob);
|
||||
if (cacheEntry !== undefined) return cacheEntry;
|
||||
if (!glob.includes("/")) {
|
||||
glob = `**/${glob}`;
|
||||
}
|
||||
const baseRegexp = glob2regexp(glob, { globstar: true, extended: true });
|
||||
const regexpSource = baseRegexp.source;
|
||||
const regexp = new RegExp("^(\\./)?" + regexpSource.slice(1));
|
||||
cache.set(glob, regexp);
|
||||
return regexp;
|
||||
};
|
||||
|
||||
const PLUGIN_NAME = "SideEffectsFlagPlugin";
|
||||
|
||||
class SideEffectsFlagPlugin {
|
||||
/**
|
||||
* @param {boolean} analyseSource analyse source code for side effects
|
||||
*/
|
||||
constructor(analyseSource = true) {
|
||||
this._analyseSource = analyseSource;
|
||||
}
|
||||
/**
|
||||
* Apply the plugin
|
||||
* @param {Compiler} compiler the compiler instance
|
||||
* @returns {void}
|
||||
*/
|
||||
apply(compiler) {
|
||||
let cache = globToRegexpCache.get(compiler.root);
|
||||
if (cache === undefined) {
|
||||
cache = new Map();
|
||||
globToRegexpCache.set(compiler.root, cache);
|
||||
}
|
||||
compiler.hooks.compilation.tap(
|
||||
PLUGIN_NAME,
|
||||
(compilation, { normalModuleFactory }) => {
|
||||
const moduleGraph = compilation.moduleGraph;
|
||||
normalModuleFactory.hooks.module.tap(PLUGIN_NAME, (module, data) => {
|
||||
const resolveData = data.resourceResolveData;
|
||||
if (
|
||||
resolveData &&
|
||||
resolveData.descriptionFileData &&
|
||||
resolveData.relativePath
|
||||
) {
|
||||
const sideEffects = resolveData.descriptionFileData.sideEffects;
|
||||
if (sideEffects !== undefined) {
|
||||
if (module.factoryMeta === undefined) {
|
||||
module.factoryMeta = {};
|
||||
}
|
||||
const hasSideEffects = SideEffectsFlagPlugin.moduleHasSideEffects(
|
||||
resolveData.relativePath,
|
||||
sideEffects,
|
||||
cache
|
||||
);
|
||||
module.factoryMeta.sideEffectFree = !hasSideEffects;
|
||||
}
|
||||
}
|
||||
|
||||
return module;
|
||||
});
|
||||
normalModuleFactory.hooks.module.tap(PLUGIN_NAME, (module, data) => {
|
||||
if (typeof data.settings.sideEffects === "boolean") {
|
||||
if (module.factoryMeta === undefined) {
|
||||
module.factoryMeta = {};
|
||||
}
|
||||
module.factoryMeta.sideEffectFree = !data.settings.sideEffects;
|
||||
}
|
||||
return module;
|
||||
});
|
||||
if (this._analyseSource) {
|
||||
/**
|
||||
* @param {JavascriptParser} parser the parser
|
||||
* @returns {void}
|
||||
*/
|
||||
const parserHandler = parser => {
|
||||
/** @type {undefined | Statement | ModuleDeclaration} */
|
||||
let sideEffectsStatement;
|
||||
parser.hooks.program.tap(PLUGIN_NAME, () => {
|
||||
sideEffectsStatement = undefined;
|
||||
});
|
||||
parser.hooks.statement.tap(
|
||||
{ name: PLUGIN_NAME, stage: -100 },
|
||||
statement => {
|
||||
if (sideEffectsStatement) return;
|
||||
if (parser.scope.topLevelScope !== true) return;
|
||||
switch (statement.type) {
|
||||
case "ExpressionStatement":
|
||||
if (
|
||||
!parser.isPure(statement.expression, statement.range[0])
|
||||
) {
|
||||
sideEffectsStatement = statement;
|
||||
}
|
||||
break;
|
||||
case "IfStatement":
|
||||
case "WhileStatement":
|
||||
case "DoWhileStatement":
|
||||
if (!parser.isPure(statement.test, statement.range[0])) {
|
||||
sideEffectsStatement = statement;
|
||||
}
|
||||
// statement hook will be called for child statements too
|
||||
break;
|
||||
case "ForStatement":
|
||||
if (
|
||||
!parser.isPure(statement.init, statement.range[0]) ||
|
||||
!parser.isPure(
|
||||
statement.test,
|
||||
statement.init
|
||||
? statement.init.range[1]
|
||||
: statement.range[0]
|
||||
) ||
|
||||
!parser.isPure(
|
||||
statement.update,
|
||||
statement.test
|
||||
? statement.test.range[1]
|
||||
: statement.init
|
||||
? statement.init.range[1]
|
||||
: statement.range[0]
|
||||
)
|
||||
) {
|
||||
sideEffectsStatement = statement;
|
||||
}
|
||||
// statement hook will be called for child statements too
|
||||
break;
|
||||
case "SwitchStatement":
|
||||
if (
|
||||
!parser.isPure(statement.discriminant, statement.range[0])
|
||||
) {
|
||||
sideEffectsStatement = statement;
|
||||
}
|
||||
// statement hook will be called for child statements too
|
||||
break;
|
||||
case "VariableDeclaration":
|
||||
case "ClassDeclaration":
|
||||
case "FunctionDeclaration":
|
||||
if (!parser.isPure(statement, statement.range[0])) {
|
||||
sideEffectsStatement = statement;
|
||||
}
|
||||
break;
|
||||
case "ExportNamedDeclaration":
|
||||
case "ExportDefaultDeclaration":
|
||||
if (
|
||||
!parser.isPure(statement.declaration, statement.range[0])
|
||||
) {
|
||||
sideEffectsStatement = statement;
|
||||
}
|
||||
break;
|
||||
case "LabeledStatement":
|
||||
case "BlockStatement":
|
||||
// statement hook will be called for child statements too
|
||||
break;
|
||||
case "EmptyStatement":
|
||||
break;
|
||||
case "ExportAllDeclaration":
|
||||
case "ImportDeclaration":
|
||||
// imports will be handled by the dependencies
|
||||
break;
|
||||
default:
|
||||
sideEffectsStatement = statement;
|
||||
break;
|
||||
}
|
||||
}
|
||||
);
|
||||
parser.hooks.finish.tap(PLUGIN_NAME, () => {
|
||||
if (sideEffectsStatement === undefined) {
|
||||
parser.state.module.buildMeta.sideEffectFree = true;
|
||||
} else {
|
||||
const { loc, type } = sideEffectsStatement;
|
||||
moduleGraph
|
||||
.getOptimizationBailout(parser.state.module)
|
||||
.push(
|
||||
() =>
|
||||
`Statement (${type}) with side effects in source code at ${formatLocation(
|
||||
loc
|
||||
)}`
|
||||
);
|
||||
}
|
||||
});
|
||||
};
|
||||
for (const key of [
|
||||
JAVASCRIPT_MODULE_TYPE_AUTO,
|
||||
JAVASCRIPT_MODULE_TYPE_ESM,
|
||||
JAVASCRIPT_MODULE_TYPE_DYNAMIC
|
||||
]) {
|
||||
normalModuleFactory.hooks.parser
|
||||
.for(key)
|
||||
.tap(PLUGIN_NAME, parserHandler);
|
||||
}
|
||||
}
|
||||
compilation.hooks.optimizeDependencies.tap(
|
||||
{
|
||||
name: PLUGIN_NAME,
|
||||
stage: STAGE_DEFAULT
|
||||
},
|
||||
modules => {
|
||||
const logger = compilation.getLogger(
|
||||
"webpack.SideEffectsFlagPlugin"
|
||||
);
|
||||
|
||||
logger.time("update dependencies");
|
||||
for (const module of modules) {
|
||||
if (module.getSideEffectsConnectionState(moduleGraph) === false) {
|
||||
const exportsInfo = moduleGraph.getExportsInfo(module);
|
||||
for (const connection of moduleGraph.getIncomingConnections(
|
||||
module
|
||||
)) {
|
||||
const dep = connection.dependency;
|
||||
let isReexport;
|
||||
if (
|
||||
(isReexport =
|
||||
dep instanceof
|
||||
HarmonyExportImportedSpecifierDependency) ||
|
||||
(dep instanceof HarmonyImportSpecifierDependency &&
|
||||
!dep.namespaceObjectAsContext)
|
||||
) {
|
||||
// TODO improve for export *
|
||||
if (isReexport && dep.name) {
|
||||
const exportInfo = moduleGraph.getExportInfo(
|
||||
/** @type {Module} */ (connection.originModule),
|
||||
dep.name
|
||||
);
|
||||
exportInfo.moveTarget(
|
||||
moduleGraph,
|
||||
({ module }) =>
|
||||
module.getSideEffectsConnectionState(moduleGraph) ===
|
||||
false,
|
||||
({ module: newModule, export: exportName }) => {
|
||||
moduleGraph.updateModule(dep, newModule);
|
||||
moduleGraph.addExplanation(
|
||||
dep,
|
||||
"(skipped side-effect-free modules)"
|
||||
);
|
||||
const ids = dep.getIds(moduleGraph);
|
||||
dep.setIds(
|
||||
moduleGraph,
|
||||
exportName
|
||||
? [...exportName, ...ids.slice(1)]
|
||||
: ids.slice(1)
|
||||
);
|
||||
return moduleGraph.getConnection(dep);
|
||||
}
|
||||
);
|
||||
continue;
|
||||
}
|
||||
// TODO improve for nested imports
|
||||
const ids = dep.getIds(moduleGraph);
|
||||
if (ids.length > 0) {
|
||||
const exportInfo = exportsInfo.getExportInfo(ids[0]);
|
||||
const target = exportInfo.getTarget(
|
||||
moduleGraph,
|
||||
({ module }) =>
|
||||
module.getSideEffectsConnectionState(moduleGraph) ===
|
||||
false
|
||||
);
|
||||
if (!target) continue;
|
||||
|
||||
moduleGraph.updateModule(dep, target.module);
|
||||
moduleGraph.addExplanation(
|
||||
dep,
|
||||
"(skipped side-effect-free modules)"
|
||||
);
|
||||
dep.setIds(
|
||||
moduleGraph,
|
||||
target.export
|
||||
? [...target.export, ...ids.slice(1)]
|
||||
: ids.slice(1)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
logger.timeEnd("update dependencies");
|
||||
}
|
||||
);
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} moduleName the module name
|
||||
* @param {undefined | boolean | string | string[]} flagValue the flag value
|
||||
* @param {Map<string, RegExp>} cache cache for glob to regexp
|
||||
* @returns {boolean | undefined} true, when the module has side effects, undefined or false when not
|
||||
*/
|
||||
static moduleHasSideEffects(moduleName, flagValue, cache) {
|
||||
switch (typeof flagValue) {
|
||||
case "undefined":
|
||||
return true;
|
||||
case "boolean":
|
||||
return flagValue;
|
||||
case "string":
|
||||
return globToRegexp(flagValue, cache).test(moduleName);
|
||||
case "object":
|
||||
return flagValue.some(glob =>
|
||||
SideEffectsFlagPlugin.moduleHasSideEffects(moduleName, glob, cache)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
module.exports = SideEffectsFlagPlugin;
|
1763
my-app/node_modules/webpack/lib/optimize/SplitChunksPlugin.js
generated
vendored
Executable file
1763
my-app/node_modules/webpack/lib/optimize/SplitChunksPlugin.js
generated
vendored
Executable file
File diff suppressed because it is too large
Load diff
Loading…
Add table
Add a link
Reference in a new issue