0.2.0 - Mid migration

This commit is contained in:
Daniel Mason 2022-04-25 14:47:15 +12:00
parent 139e6a915e
commit 7e38fdbd7d
42393 changed files with 5358157 additions and 62 deletions

View file

@ -0,0 +1,87 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
class AggressiveMergingPlugin {
constructor(options) {
if (
(options !== undefined && typeof options !== "object") ||
Array.isArray(options)
) {
throw new Error(
"Argument should be an options object. To use defaults, pass in nothing.\nFor more info on options, see https://webpack.js.org/plugins/"
);
}
this.options = options || {};
}
apply(compiler) {
const options = this.options;
const minSizeReduce = options.minSizeReduce || 1.5;
compiler.hooks.thisCompilation.tap(
"AggressiveMergingPlugin",
compilation => {
compilation.hooks.optimizeChunksAdvanced.tap(
"AggressiveMergingPlugin",
chunks => {
let combinations = [];
chunks.forEach((a, idx) => {
if (a.canBeInitial()) return;
for (let i = 0; i < idx; i++) {
const b = chunks[i];
if (b.canBeInitial()) continue;
combinations.push({
a,
b,
improvement: undefined
});
}
});
for (const pair of combinations) {
const a = pair.b.size({
chunkOverhead: 0
});
const b = pair.a.size({
chunkOverhead: 0
});
const ab = pair.b.integratedSize(pair.a, {
chunkOverhead: 0
});
let newSize;
if (ab === false) {
pair.improvement = false;
return;
} else {
newSize = ab;
}
pair.improvement = (a + b) / newSize;
}
combinations = combinations.filter(pair => {
return pair.improvement !== false;
});
combinations.sort((a, b) => {
return b.improvement - a.improvement;
});
const pair = combinations[0];
if (!pair) return;
if (pair.improvement < minSizeReduce) return;
if (pair.b.integrate(pair.a, "aggressive-merge")) {
chunks.splice(chunks.indexOf(pair.a), 1);
return true;
}
}
);
}
);
}
}
module.exports = AggressiveMergingPlugin;

View file

@ -0,0 +1,294 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const identifierUtils = require("../util/identifier");
const { intersect } = require("../util/SetHelpers");
const validateOptions = require("schema-utils");
const schema = require("../../schemas/plugins/optimize/AggressiveSplittingPlugin.json");
/** @typedef {import("../../declarations/plugins/optimize/AggressiveSplittingPlugin").AggressiveSplittingPluginOptions} AggressiveSplittingPluginOptions */
const moveModuleBetween = (oldChunk, newChunk) => {
return module => {
oldChunk.moveModule(module, newChunk);
};
};
const isNotAEntryModule = entryModule => {
return module => {
return entryModule !== module;
};
};
class AggressiveSplittingPlugin {
/**
* @param {AggressiveSplittingPluginOptions=} options options object
*/
constructor(options) {
if (!options) options = {};
validateOptions(schema, options, "Aggressive Splitting Plugin");
this.options = options;
if (typeof this.options.minSize !== "number") {
this.options.minSize = 30 * 1024;
}
if (typeof this.options.maxSize !== "number") {
this.options.maxSize = 50 * 1024;
}
if (typeof this.options.chunkOverhead !== "number") {
this.options.chunkOverhead = 0;
}
if (typeof this.options.entryChunkMultiplicator !== "number") {
this.options.entryChunkMultiplicator = 1;
}
}
apply(compiler) {
compiler.hooks.thisCompilation.tap(
"AggressiveSplittingPlugin",
compilation => {
let needAdditionalSeal = false;
let newSplits;
let fromAggressiveSplittingSet;
let chunkSplitDataMap;
compilation.hooks.optimize.tap("AggressiveSplittingPlugin", () => {
newSplits = [];
fromAggressiveSplittingSet = new Set();
chunkSplitDataMap = new Map();
});
compilation.hooks.optimizeChunksAdvanced.tap(
"AggressiveSplittingPlugin",
chunks => {
// Precompute stuff
const nameToModuleMap = new Map();
const moduleToNameMap = new Map();
for (const m of compilation.modules) {
const name = identifierUtils.makePathsRelative(
compiler.context,
m.identifier(),
compilation.cache
);
nameToModuleMap.set(name, m);
moduleToNameMap.set(m, name);
}
// Check used chunk ids
const usedIds = new Set();
for (const chunk of chunks) {
usedIds.add(chunk.id);
}
const recordedSplits =
(compilation.records && compilation.records.aggressiveSplits) ||
[];
const usedSplits = newSplits
? recordedSplits.concat(newSplits)
: recordedSplits;
const minSize = this.options.minSize;
const maxSize = this.options.maxSize;
const applySplit = splitData => {
// Cannot split if id is already taken
if (splitData.id !== undefined && usedIds.has(splitData.id)) {
return false;
}
// Get module objects from names
const selectedModules = splitData.modules.map(name =>
nameToModuleMap.get(name)
);
// Does the modules exist at all?
if (!selectedModules.every(Boolean)) return false;
// Check if size matches (faster than waiting for hash)
const size = selectedModules.reduce(
(sum, m) => sum + m.size(),
0
);
if (size !== splitData.size) return false;
// get chunks with all modules
const selectedChunks = intersect(
selectedModules.map(m => new Set(m.chunksIterable))
);
// No relevant chunks found
if (selectedChunks.size === 0) return false;
// The found chunk is already the split or similar
if (
selectedChunks.size === 1 &&
Array.from(selectedChunks)[0].getNumberOfModules() ===
selectedModules.length
) {
const chunk = Array.from(selectedChunks)[0];
if (fromAggressiveSplittingSet.has(chunk)) return false;
fromAggressiveSplittingSet.add(chunk);
chunkSplitDataMap.set(chunk, splitData);
return true;
}
// split the chunk into two parts
const newChunk = compilation.addChunk();
newChunk.chunkReason = "aggressive splitted";
for (const chunk of selectedChunks) {
selectedModules.forEach(moveModuleBetween(chunk, newChunk));
chunk.split(newChunk);
chunk.name = null;
}
fromAggressiveSplittingSet.add(newChunk);
chunkSplitDataMap.set(newChunk, splitData);
if (splitData.id !== null && splitData.id !== undefined) {
newChunk.id = splitData.id;
}
return true;
};
// try to restore to recorded splitting
let changed = false;
for (let j = 0; j < usedSplits.length; j++) {
const splitData = usedSplits[j];
if (applySplit(splitData)) changed = true;
}
// for any chunk which isn't splitted yet, split it and create a new entry
// start with the biggest chunk
const sortedChunks = chunks.slice().sort((a, b) => {
const diff1 = b.modulesSize() - a.modulesSize();
if (diff1) return diff1;
const diff2 = a.getNumberOfModules() - b.getNumberOfModules();
if (diff2) return diff2;
const modulesA = Array.from(a.modulesIterable);
const modulesB = Array.from(b.modulesIterable);
modulesA.sort();
modulesB.sort();
const aI = modulesA[Symbol.iterator]();
const bI = modulesB[Symbol.iterator]();
// eslint-disable-next-line no-constant-condition
while (true) {
const aItem = aI.next();
const bItem = bI.next();
if (aItem.done) return 0;
const aModuleIdentifier = aItem.value.identifier();
const bModuleIdentifier = bItem.value.identifier();
if (aModuleIdentifier > bModuleIdentifier) return -1;
if (aModuleIdentifier < bModuleIdentifier) return 1;
}
});
for (const chunk of sortedChunks) {
if (fromAggressiveSplittingSet.has(chunk)) continue;
const size = chunk.modulesSize();
if (size > maxSize && chunk.getNumberOfModules() > 1) {
const modules = chunk
.getModules()
.filter(isNotAEntryModule(chunk.entryModule))
.sort((a, b) => {
a = a.identifier();
b = b.identifier();
if (a > b) return 1;
if (a < b) return -1;
return 0;
});
const selectedModules = [];
let selectedModulesSize = 0;
for (let k = 0; k < modules.length; k++) {
const module = modules[k];
const newSize = selectedModulesSize + module.size();
if (newSize > maxSize && selectedModulesSize >= minSize) {
break;
}
selectedModulesSize = newSize;
selectedModules.push(module);
}
if (selectedModules.length === 0) continue;
const splitData = {
modules: selectedModules
.map(m => moduleToNameMap.get(m))
.sort(),
size: selectedModulesSize
};
if (applySplit(splitData)) {
newSplits = (newSplits || []).concat(splitData);
changed = true;
}
}
}
if (changed) return true;
}
);
compilation.hooks.recordHash.tap(
"AggressiveSplittingPlugin",
records => {
// 4. save made splittings to records
const allSplits = new Set();
const invalidSplits = new Set();
// Check if some splittings are invalid
// We remove invalid splittings and try again
for (const chunk of compilation.chunks) {
const splitData = chunkSplitDataMap.get(chunk);
if (splitData !== undefined) {
if (splitData.hash && chunk.hash !== splitData.hash) {
// Split was successful, but hash doesn't equal
// We can throw away the split since it's useless now
invalidSplits.add(splitData);
}
}
}
if (invalidSplits.size > 0) {
records.aggressiveSplits = records.aggressiveSplits.filter(
splitData => !invalidSplits.has(splitData)
);
needAdditionalSeal = true;
} else {
// set hash and id values on all (new) splittings
for (const chunk of compilation.chunks) {
const splitData = chunkSplitDataMap.get(chunk);
if (splitData !== undefined) {
splitData.hash = chunk.hash;
splitData.id = chunk.id;
allSplits.add(splitData);
// set flag for stats
chunk.recorded = true;
}
}
// Also add all unused historial splits (after the used ones)
// They can still be used in some future compilation
const recordedSplits =
compilation.records && compilation.records.aggressiveSplits;
if (recordedSplits) {
for (const splitData of recordedSplits) {
if (!invalidSplits.has(splitData)) allSplits.add(splitData);
}
}
// record all splits
records.aggressiveSplits = Array.from(allSplits);
needAdditionalSeal = false;
}
}
);
compilation.hooks.needAdditionalSeal.tap(
"AggressiveSplittingPlugin",
() => {
if (needAdditionalSeal) {
needAdditionalSeal = false;
return true;
}
}
);
}
);
}
}
module.exports = AggressiveSplittingPlugin;

View file

@ -0,0 +1,66 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const sortByIndex = (a, b) => {
return a.index - b.index;
};
const sortByIndex2 = (a, b) => {
return a.index2 - b.index2;
};
class ChunkModuleIdRangePlugin {
constructor(options) {
this.options = options;
}
apply(compiler) {
const options = this.options;
compiler.hooks.compilation.tap("ChunkModuleIdRangePlugin", compilation => {
compilation.hooks.moduleIds.tap("ChunkModuleIdRangePlugin", modules => {
const chunk = compilation.chunks.find(
chunk => chunk.name === options.name
);
if (!chunk) {
throw new Error(
`ChunkModuleIdRangePlugin: Chunk with name '${options.name}"' was not found`
);
}
let chunkModules;
if (options.order) {
chunkModules = Array.from(chunk.modulesIterable);
switch (options.order) {
case "index":
chunkModules.sort(sortByIndex);
break;
case "index2":
chunkModules.sort(sortByIndex2);
break;
default:
throw new Error(
"ChunkModuleIdRangePlugin: unexpected value of order"
);
}
} else {
chunkModules = modules.filter(m => {
return m.chunksIterable.has(chunk);
});
}
let currentId = options.start || 0;
for (let i = 0; i < chunkModules.length; i++) {
const m = chunkModules[i];
if (m.id === null) {
m.id = currentId++;
}
if (options.end && currentId > options.end) break;
}
});
});
}
}
module.exports = ChunkModuleIdRangePlugin;

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,70 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const GraphHelpers = require("../GraphHelpers");
class EnsureChunkConditionsPlugin {
apply(compiler) {
compiler.hooks.compilation.tap(
"EnsureChunkConditionsPlugin",
compilation => {
const handler = chunks => {
let changed = false;
for (const module of compilation.modules) {
if (!module.chunkCondition) continue;
const sourceChunks = new Set();
const chunkGroups = new Set();
for (const chunk of module.chunksIterable) {
if (!module.chunkCondition(chunk)) {
sourceChunks.add(chunk);
for (const group of chunk.groupsIterable) {
chunkGroups.add(group);
}
}
}
if (sourceChunks.size === 0) continue;
const targetChunks = new Set();
chunkGroupLoop: for (const chunkGroup of chunkGroups) {
// Can module be placed in a chunk of this group?
for (const chunk of chunkGroup.chunks) {
if (module.chunkCondition(chunk)) {
targetChunks.add(chunk);
continue chunkGroupLoop;
}
}
// We reached the entrypoint: fail
if (chunkGroup.isInitial()) {
throw new Error(
"Cannot fullfil chunk condition of " + module.identifier()
);
}
// Try placing in all parents
for (const group of chunkGroup.parentsIterable) {
chunkGroups.add(group);
}
}
for (const sourceChunk of sourceChunks) {
GraphHelpers.disconnectChunkAndModule(sourceChunk, module);
}
for (const targetChunk of targetChunks) {
GraphHelpers.connectChunkAndModule(targetChunk, module);
}
}
if (changed) return true;
};
compilation.hooks.optimizeChunksBasic.tap(
"EnsureChunkConditionsPlugin",
handler
);
compilation.hooks.optimizeExtractedChunksBasic.tap(
"EnsureChunkConditionsPlugin",
handler
);
}
);
}
}
module.exports = EnsureChunkConditionsPlugin;

View file

@ -0,0 +1,99 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
class FlagIncludedChunksPlugin {
apply(compiler) {
compiler.hooks.compilation.tap("FlagIncludedChunksPlugin", compilation => {
compilation.hooks.optimizeChunkIds.tap(
"FlagIncludedChunksPlugin",
chunks => {
// prepare two bit integers for each module
// 2^31 is the max number represented as SMI in v8
// we want the bits distributed this way:
// the bit 2^31 is pretty rar and only one module should get it
// so it has a probability of 1 / modulesCount
// the first bit (2^0) is the easiest and every module could get it
// if it doesn't get a better bit
// from bit 2^n to 2^(n+1) there is a probability of p
// so 1 / modulesCount == p^31
// <=> p = sqrt31(1 / modulesCount)
// so we use a modulo of 1 / sqrt31(1 / modulesCount)
const moduleBits = new WeakMap();
const modulesCount = compilation.modules.length;
// precalculate the modulo values for each bit
const modulo = 1 / Math.pow(1 / modulesCount, 1 / 31);
const modulos = Array.from(
{ length: 31 },
(x, i) => Math.pow(modulo, i) | 0
);
// iterate all modules to generate bit values
let i = 0;
for (const module of compilation.modules) {
let bit = 30;
while (i % modulos[bit] !== 0) {
bit--;
}
moduleBits.set(module, 1 << bit);
i++;
}
// interate all chunks to generate bitmaps
const chunkModulesHash = new WeakMap();
for (const chunk of chunks) {
let hash = 0;
for (const module of chunk.modulesIterable) {
hash |= moduleBits.get(module);
}
chunkModulesHash.set(chunk, hash);
}
for (const chunkA of chunks) {
const chunkAHash = chunkModulesHash.get(chunkA);
const chunkAModulesCount = chunkA.getNumberOfModules();
if (chunkAModulesCount === 0) continue;
let bestModule = undefined;
for (const module of chunkA.modulesIterable) {
if (
bestModule === undefined ||
bestModule.getNumberOfChunks() > module.getNumberOfChunks()
)
bestModule = module;
}
loopB: for (const chunkB of bestModule.chunksIterable) {
// as we iterate the same iterables twice
// skip if we find ourselves
if (chunkA === chunkB) continue;
const chunkBModulesCount = chunkB.getNumberOfModules();
// ids for empty chunks are not included
if (chunkBModulesCount === 0) continue;
// instead of swapping A and B just bail
// as we loop twice the current A will be B and B then A
if (chunkAModulesCount > chunkBModulesCount) continue;
// is chunkA in chunkB?
// we do a cheap check for the hash value
const chunkBHash = chunkModulesHash.get(chunkB);
if ((chunkBHash & chunkAHash) !== chunkAHash) continue;
// compare all modules
for (const m of chunkA.modulesIterable) {
if (!chunkB.containsModule(m)) continue loopB;
}
chunkB.ids.push(chunkA.id);
}
}
}
);
});
}
}
module.exports = FlagIncludedChunksPlugin;

View file

@ -0,0 +1,231 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const validateOptions = require("schema-utils");
const schema = require("../../schemas/plugins/optimize/LimitChunkCountPlugin.json");
const LazyBucketSortedSet = require("../util/LazyBucketSortedSet");
/** @typedef {import("../../declarations/plugins/optimize/LimitChunkCountPlugin").LimitChunkCountPluginOptions} LimitChunkCountPluginOptions */
/** @typedef {import("../Chunk")} Chunk */
/** @typedef {import("../Compiler")} Compiler */
/**
* @typedef {Object} ChunkCombination
* @property {boolean} deleted this is set to true when combination was removed
* @property {number} sizeDiff
* @property {number} integratedSize
* @property {Chunk} a
* @property {Chunk} b
* @property {number} aIdx
* @property {number} bIdx
* @property {number} aSize
* @property {number} bSize
*/
const addToSetMap = (map, key, value) => {
const set = map.get(key);
if (set === undefined) {
map.set(key, new Set([value]));
} else {
set.add(value);
}
};
class LimitChunkCountPlugin {
/**
* @param {LimitChunkCountPluginOptions=} options options object
*/
constructor(options) {
if (!options) options = {};
validateOptions(schema, options, "Limit Chunk Count Plugin");
this.options = options;
}
/**
* @param {Compiler} compiler the webpack compiler
* @returns {void}
*/
apply(compiler) {
const options = this.options;
compiler.hooks.compilation.tap("LimitChunkCountPlugin", compilation => {
compilation.hooks.optimizeChunksAdvanced.tap(
"LimitChunkCountPlugin",
chunks => {
const maxChunks = options.maxChunks;
if (!maxChunks) return;
if (maxChunks < 1) return;
if (chunks.length <= maxChunks) return;
let remainingChunksToMerge = chunks.length - maxChunks;
// order chunks in a deterministic way
const orderedChunks = chunks.slice().sort((a, b) => a.compareTo(b));
// create a lazy sorted data structure to keep all combinations
// this is large. Size = chunks * (chunks - 1) / 2
// It uses a multi layer bucket sort plus normal sort in the last layer
// It's also lazy so only accessed buckets are sorted
const combinations = new LazyBucketSortedSet(
// Layer 1: ordered by largest size benefit
c => c.sizeDiff,
(a, b) => b - a,
// Layer 2: ordered by smallest combined size
c => c.integratedSize,
(a, b) => a - b,
// Layer 3: ordered by position difference in orderedChunk (-> to be deterministic)
c => c.bIdx - c.aIdx,
(a, b) => a - b,
// Layer 4: ordered by position in orderedChunk (-> to be deterministic)
(a, b) => a.bIdx - b.bIdx
);
// we keep a mappng from chunk to all combinations
// but this mapping is not kept up-to-date with deletions
// so `deleted` flag need to be considered when iterating this
/** @type {Map<Chunk, Set<ChunkCombination>>} */
const combinationsByChunk = new Map();
orderedChunks.forEach((b, bIdx) => {
// create combination pairs with size and integrated size
for (let aIdx = 0; aIdx < bIdx; aIdx++) {
const a = orderedChunks[aIdx];
const integratedSize = a.integratedSize(b, options);
// filter pairs that do not have an integratedSize
// meaning they can NOT be integrated!
if (integratedSize === false) continue;
const aSize = a.size(options);
const bSize = b.size(options);
const c = {
deleted: false,
sizeDiff: aSize + bSize - integratedSize,
integratedSize,
a,
b,
aIdx,
bIdx,
aSize,
bSize
};
combinations.add(c);
addToSetMap(combinationsByChunk, a, c);
addToSetMap(combinationsByChunk, b, c);
}
return combinations;
});
// list of modified chunks during this run
// combinations affected by this change are skipped to allow
// futher optimizations
/** @type {Set<Chunk>} */
const modifiedChunks = new Set();
let changed = false;
// eslint-disable-next-line no-constant-condition
loop: while (true) {
const combination = combinations.popFirst();
if (combination === undefined) break;
combination.deleted = true;
const { a, b, integratedSize } = combination;
// skip over pair when
// one of the already merged chunks is a parent of one of the chunks
if (modifiedChunks.size > 0) {
const queue = new Set(a.groupsIterable);
for (const group of b.groupsIterable) {
queue.add(group);
}
for (const group of queue) {
for (const mChunk of modifiedChunks) {
if (mChunk !== a && mChunk !== b && mChunk.isInGroup(group)) {
// This is a potential pair which needs recalculation
// We can't do that now, but it merge before following pairs
// so we leave space for it, and consider chunks as modified
// just for the worse case
remainingChunksToMerge--;
if (remainingChunksToMerge <= 0) break loop;
modifiedChunks.add(a);
modifiedChunks.add(b);
continue loop;
}
}
for (const parent of group.parentsIterable) {
queue.add(parent);
}
}
}
// merge the chunks
if (a.integrate(b, "limit")) {
chunks.splice(chunks.indexOf(b), 1);
// flag chunk a as modified as further optimization are possible for all children here
modifiedChunks.add(a);
changed = true;
remainingChunksToMerge--;
if (remainingChunksToMerge <= 0) break;
// Update all affected combinations
// delete all combination with the removed chunk
// we will use combinations with the kept chunk instead
for (const combination of combinationsByChunk.get(b)) {
if (combination.deleted) continue;
combination.deleted = true;
combinations.delete(combination);
}
// Update combinations with the kept chunk with new sizes
for (const combination of combinationsByChunk.get(a)) {
if (combination.deleted) continue;
if (combination.a === a) {
// Update size
const newIntegratedSize = a.integratedSize(
combination.b,
options
);
if (newIntegratedSize === false) {
combination.deleted = true;
combinations.delete(combination);
continue;
}
const finishUpdate = combinations.startUpdate(combination);
combination.integratedSize = newIntegratedSize;
combination.aSize = integratedSize;
combination.sizeDiff =
combination.bSize + integratedSize - newIntegratedSize;
finishUpdate();
} else if (combination.b === a) {
// Update size
const newIntegratedSize = combination.a.integratedSize(
a,
options
);
if (newIntegratedSize === false) {
combination.deleted = true;
combinations.delete(combination);
continue;
}
const finishUpdate = combinations.startUpdate(combination);
combination.integratedSize = newIntegratedSize;
combination.bSize = integratedSize;
combination.sizeDiff =
integratedSize + combination.aSize - newIntegratedSize;
finishUpdate();
}
}
}
}
if (changed) return true;
}
);
});
}
}
module.exports = LimitChunkCountPlugin;

View file

@ -0,0 +1,78 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
class MergeDuplicateChunksPlugin {
apply(compiler) {
compiler.hooks.compilation.tap(
"MergeDuplicateChunksPlugin",
compilation => {
compilation.hooks.optimizeChunksBasic.tap(
"MergeDuplicateChunksPlugin",
chunks => {
// remember already tested chunks for performance
const notDuplicates = new Set();
// for each chunk
for (const chunk of chunks) {
// track a Set of all chunk that could be duplicates
let possibleDuplicates;
for (const module of chunk.modulesIterable) {
if (possibleDuplicates === undefined) {
// when possibleDuplicates is not yet set,
// create a new Set from chunks of the current module
// including only chunks with the same number of modules
for (const dup of module.chunksIterable) {
if (
dup !== chunk &&
chunk.getNumberOfModules() === dup.getNumberOfModules() &&
!notDuplicates.has(dup)
) {
// delay allocating the new Set until here, reduce memory pressure
if (possibleDuplicates === undefined) {
possibleDuplicates = new Set();
}
possibleDuplicates.add(dup);
}
}
// when no chunk is possible we can break here
if (possibleDuplicates === undefined) break;
} else {
// validate existing possible duplicates
for (const dup of possibleDuplicates) {
// remove possible duplicate when module is not contained
if (!dup.containsModule(module)) {
possibleDuplicates.delete(dup);
}
}
// when all chunks has been removed we can break here
if (possibleDuplicates.size === 0) break;
}
}
// when we found duplicates
if (
possibleDuplicates !== undefined &&
possibleDuplicates.size > 0
) {
for (const otherChunk of possibleDuplicates) {
if (otherChunk.hasRuntime() !== chunk.hasRuntime()) continue;
// merge them
if (chunk.integrate(otherChunk, "duplicate")) {
chunks.splice(chunks.indexOf(otherChunk), 1);
}
}
}
// don't check already processed chunks twice
notDuplicates.add(chunk);
}
}
);
}
);
}
}
module.exports = MergeDuplicateChunksPlugin;

View file

@ -0,0 +1,82 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const validateOptions = require("schema-utils");
const schema = require("../../schemas/plugins/optimize/MinChunkSizePlugin.json");
/** @typedef {import("../../declarations/plugins/optimize/MinChunkSizePlugin").MinChunkSizePluginOptions} MinChunkSizePluginOptions */
class MinChunkSizePlugin {
/**
* @param {MinChunkSizePluginOptions} options options object
*/
constructor(options) {
validateOptions(schema, options, "Min Chunk Size Plugin");
this.options = options;
}
apply(compiler) {
const options = this.options;
const minChunkSize = options.minChunkSize;
compiler.hooks.compilation.tap("MinChunkSizePlugin", compilation => {
compilation.hooks.optimizeChunksAdvanced.tap(
"MinChunkSizePlugin",
chunks => {
const equalOptions = {
chunkOverhead: 1,
entryChunkMultiplicator: 1
};
const sortedSizeFilteredExtendedPairCombinations = chunks
.reduce((combinations, a, idx) => {
// create combination pairs
for (let i = 0; i < idx; i++) {
const b = chunks[i];
combinations.push([b, a]);
}
return combinations;
}, [])
.filter(pair => {
// check if one of the chunks sizes is smaller than the minChunkSize
const p0SmallerThanMinChunkSize =
pair[0].size(equalOptions) < minChunkSize;
const p1SmallerThanMinChunkSize =
pair[1].size(equalOptions) < minChunkSize;
return p0SmallerThanMinChunkSize || p1SmallerThanMinChunkSize;
})
.map(pair => {
// extend combination pairs with size and integrated size
const a = pair[0].size(options);
const b = pair[1].size(options);
const ab = pair[0].integratedSize(pair[1], options);
return [a + b - ab, ab, pair[0], pair[1]];
})
.filter(pair => {
// filter pairs that do not have an integratedSize
// meaning they can NOT be integrated!
return pair[1] !== false;
})
.sort((a, b) => {
// sadly javascript does an inplace sort here
// sort by size
const diff = b[0] - a[0];
if (diff !== 0) return diff;
return a[1] - b[1];
});
if (sortedSizeFilteredExtendedPairCombinations.length === 0) return;
const pair = sortedSizeFilteredExtendedPairCombinations[0];
pair[2].integrate(pair[3], "min-size");
chunks.splice(chunks.indexOf(pair[3]), 1);
return true;
}
);
});
}
}
module.exports = MinChunkSizePlugin;

View file

@ -0,0 +1,29 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const WebpackError = require("../WebpackError");
const SizeFormatHelpers = require("../SizeFormatHelpers");
class MinMaxSizeWarning extends WebpackError {
constructor(keys, minSize, maxSize) {
let keysMessage = "Fallback cache group";
if (keys) {
keysMessage =
keys.length > 1
? `Cache groups ${keys.sort().join(", ")}`
: `Cache group ${keys[0]}`;
}
super(
`SplitChunksPlugin\n` +
`${keysMessage}\n` +
`Configured minSize (${SizeFormatHelpers.formatSize(minSize)}) is ` +
`bigger than maxSize (${SizeFormatHelpers.formatSize(maxSize)}).\n` +
"This seem to be a invalid optimiziation.splitChunks configuration."
);
}
}
module.exports = MinMaxSizeWarning;

View file

@ -0,0 +1,485 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const HarmonyImportDependency = require("../dependencies/HarmonyImportDependency");
const ModuleHotAcceptDependency = require("../dependencies/ModuleHotAcceptDependency");
const ModuleHotDeclineDependency = require("../dependencies/ModuleHotDeclineDependency");
const ConcatenatedModule = require("./ConcatenatedModule");
const HarmonyCompatibilityDependency = require("../dependencies/HarmonyCompatibilityDependency");
const StackedSetMap = require("../util/StackedSetMap");
const formatBailoutReason = msg => {
return "ModuleConcatenation bailout: " + msg;
};
class ModuleConcatenationPlugin {
constructor(options) {
if (typeof options !== "object") options = {};
this.options = options;
}
apply(compiler) {
compiler.hooks.compilation.tap(
"ModuleConcatenationPlugin",
(compilation, { normalModuleFactory }) => {
const handler = (parser, parserOptions) => {
parser.hooks.call.for("eval").tap("ModuleConcatenationPlugin", () => {
// Because of variable renaming we can't use modules with eval.
parser.state.module.buildMeta.moduleConcatenationBailout = "eval()";
});
};
normalModuleFactory.hooks.parser
.for("javascript/auto")
.tap("ModuleConcatenationPlugin", handler);
normalModuleFactory.hooks.parser
.for("javascript/dynamic")
.tap("ModuleConcatenationPlugin", handler);
normalModuleFactory.hooks.parser
.for("javascript/esm")
.tap("ModuleConcatenationPlugin", handler);
const bailoutReasonMap = new Map();
const setBailoutReason = (module, reason) => {
bailoutReasonMap.set(module, reason);
module.optimizationBailout.push(
typeof reason === "function"
? rs => formatBailoutReason(reason(rs))
: formatBailoutReason(reason)
);
};
const getBailoutReason = (module, requestShortener) => {
const reason = bailoutReasonMap.get(module);
if (typeof reason === "function") return reason(requestShortener);
return reason;
};
compilation.hooks.optimizeChunkModules.tap(
"ModuleConcatenationPlugin",
(allChunks, modules) => {
const relevantModules = [];
const possibleInners = new Set();
for (const module of modules) {
// Only harmony modules are valid for optimization
if (
!module.buildMeta ||
module.buildMeta.exportsType !== "namespace" ||
!module.dependencies.some(
d => d instanceof HarmonyCompatibilityDependency
)
) {
setBailoutReason(module, "Module is not an ECMAScript module");
continue;
}
// Some expressions are not compatible with module concatenation
// because they may produce unexpected results. The plugin bails out
// if some were detected upfront.
if (
module.buildMeta &&
module.buildMeta.moduleConcatenationBailout
) {
setBailoutReason(
module,
`Module uses ${module.buildMeta.moduleConcatenationBailout}`
);
continue;
}
// Exports must be known (and not dynamic)
if (!Array.isArray(module.buildMeta.providedExports)) {
setBailoutReason(module, "Module exports are unknown");
continue;
}
// Using dependency variables is not possible as this wraps the code in a function
if (module.variables.length > 0) {
setBailoutReason(
module,
`Module uses injected variables (${module.variables
.map(v => v.name)
.join(", ")})`
);
continue;
}
// Hot Module Replacement need it's own module to work correctly
if (
module.dependencies.some(
dep =>
dep instanceof ModuleHotAcceptDependency ||
dep instanceof ModuleHotDeclineDependency
)
) {
setBailoutReason(module, "Module uses Hot Module Replacement");
continue;
}
relevantModules.push(module);
// Module must not be the entry points
if (module.isEntryModule()) {
setBailoutReason(module, "Module is an entry point");
continue;
}
// Module must be in any chunk (we don't want to do useless work)
if (module.getNumberOfChunks() === 0) {
setBailoutReason(module, "Module is not in any chunk");
continue;
}
// Module must only be used by Harmony Imports
const nonHarmonyReasons = module.reasons.filter(
reason =>
!reason.dependency ||
!(reason.dependency instanceof HarmonyImportDependency)
);
if (nonHarmonyReasons.length > 0) {
const importingModules = new Set(
nonHarmonyReasons.map(r => r.module).filter(Boolean)
);
const importingExplanations = new Set(
nonHarmonyReasons.map(r => r.explanation).filter(Boolean)
);
const importingModuleTypes = new Map(
Array.from(importingModules).map(
m => /** @type {[string, Set]} */ ([
m,
new Set(
nonHarmonyReasons
.filter(r => r.module === m)
.map(r => r.dependency.type)
.sort()
)
])
)
);
setBailoutReason(module, requestShortener => {
const names = Array.from(importingModules)
.map(
m =>
`${m.readableIdentifier(
requestShortener
)} (referenced with ${Array.from(
importingModuleTypes.get(m)
).join(", ")})`
)
.sort();
const explanations = Array.from(importingExplanations).sort();
if (names.length > 0 && explanations.length === 0) {
return `Module is referenced from these modules with unsupported syntax: ${names.join(
", "
)}`;
} else if (names.length === 0 && explanations.length > 0) {
return `Module is referenced by: ${explanations.join(
", "
)}`;
} else if (names.length > 0 && explanations.length > 0) {
return `Module is referenced from these modules with unsupported syntax: ${names.join(
", "
)} and by: ${explanations.join(", ")}`;
} else {
return "Module is referenced in a unsupported way";
}
});
continue;
}
possibleInners.add(module);
}
// sort by depth
// modules with lower depth are more likely suited as roots
// this improves performance, because modules already selected as inner are skipped
relevantModules.sort((a, b) => {
return a.depth - b.depth;
});
const concatConfigurations = [];
const usedAsInner = new Set();
for (const currentRoot of relevantModules) {
// when used by another configuration as inner:
// the other configuration is better and we can skip this one
if (usedAsInner.has(currentRoot)) continue;
// create a configuration with the root
const currentConfiguration = new ConcatConfiguration(currentRoot);
// cache failures to add modules
const failureCache = new Map();
// try to add all imports
for (const imp of this._getImports(compilation, currentRoot)) {
const problem = this._tryToAdd(
compilation,
currentConfiguration,
imp,
possibleInners,
failureCache
);
if (problem) {
failureCache.set(imp, problem);
currentConfiguration.addWarning(imp, problem);
}
}
if (!currentConfiguration.isEmpty()) {
concatConfigurations.push(currentConfiguration);
for (const module of currentConfiguration.getModules()) {
if (module !== currentConfiguration.rootModule) {
usedAsInner.add(module);
}
}
}
}
// HACK: Sort configurations by length and start with the longest one
// to get the biggers groups possible. Used modules are marked with usedModules
// TODO: Allow to reuse existing configuration while trying to add dependencies.
// This would improve performance. O(n^2) -> O(n)
concatConfigurations.sort((a, b) => {
return b.modules.size - a.modules.size;
});
const usedModules = new Set();
for (const concatConfiguration of concatConfigurations) {
if (usedModules.has(concatConfiguration.rootModule)) continue;
const modules = concatConfiguration.getModules();
const rootModule = concatConfiguration.rootModule;
const newModule = new ConcatenatedModule(
rootModule,
Array.from(modules),
ConcatenatedModule.createConcatenationList(
rootModule,
modules,
compilation
)
);
for (const warning of concatConfiguration.getWarningsSorted()) {
newModule.optimizationBailout.push(requestShortener => {
const reason = getBailoutReason(warning[0], requestShortener);
const reasonWithPrefix = reason ? ` (<- ${reason})` : "";
if (warning[0] === warning[1]) {
return formatBailoutReason(
`Cannot concat with ${warning[0].readableIdentifier(
requestShortener
)}${reasonWithPrefix}`
);
} else {
return formatBailoutReason(
`Cannot concat with ${warning[0].readableIdentifier(
requestShortener
)} because of ${warning[1].readableIdentifier(
requestShortener
)}${reasonWithPrefix}`
);
}
});
}
const chunks = concatConfiguration.rootModule.getChunks();
for (const m of modules) {
usedModules.add(m);
for (const chunk of chunks) {
chunk.removeModule(m);
}
}
for (const chunk of chunks) {
chunk.addModule(newModule);
newModule.addChunk(chunk);
}
for (const chunk of allChunks) {
if (chunk.entryModule === concatConfiguration.rootModule) {
chunk.entryModule = newModule;
}
}
compilation.modules.push(newModule);
for (const reason of newModule.reasons) {
if (reason.dependency.module === concatConfiguration.rootModule)
reason.dependency.module = newModule;
if (
reason.dependency.redirectedModule ===
concatConfiguration.rootModule
)
reason.dependency.redirectedModule = newModule;
}
// TODO: remove when LTS node version contains fixed v8 version
// @see https://github.com/webpack/webpack/pull/6613
// Turbofan does not correctly inline for-of loops with polymorphic input arrays.
// Work around issue by using a standard for loop and assigning dep.module.reasons
for (let i = 0; i < newModule.dependencies.length; i++) {
let dep = newModule.dependencies[i];
if (dep.module) {
let reasons = dep.module.reasons;
for (let j = 0; j < reasons.length; j++) {
let reason = reasons[j];
if (reason.dependency === dep) {
reason.module = newModule;
}
}
}
}
}
compilation.modules = compilation.modules.filter(
m => !usedModules.has(m)
);
}
);
}
);
}
_getImports(compilation, module) {
return new Set(
module.dependencies
// Get reference info only for harmony Dependencies
.map(dep => {
if (!(dep instanceof HarmonyImportDependency)) return null;
if (!compilation) return dep.getReference();
return compilation.getDependencyReference(module, dep);
})
// Reference is valid and has a module
// Dependencies are simple enough to concat them
.filter(
ref =>
ref &&
ref.module &&
(Array.isArray(ref.importedNames) ||
Array.isArray(ref.module.buildMeta.providedExports))
)
// Take the imported module
.map(ref => ref.module)
);
}
_tryToAdd(compilation, config, module, possibleModules, failureCache) {
const cacheEntry = failureCache.get(module);
if (cacheEntry) {
return cacheEntry;
}
// Already added?
if (config.has(module)) {
return null;
}
// Not possible to add?
if (!possibleModules.has(module)) {
failureCache.set(module, module); // cache failures for performance
return module;
}
// module must be in the same chunks
if (!config.rootModule.hasEqualsChunks(module)) {
failureCache.set(module, module); // cache failures for performance
return module;
}
// Clone config to make experimental changes
const testConfig = config.clone();
// Add the module
testConfig.add(module);
// Every module which depends on the added module must be in the configuration too.
for (const reason of module.reasons) {
// Modules that are not used can be ignored
if (
reason.module.factoryMeta.sideEffectFree &&
reason.module.used === false
)
continue;
const problem = this._tryToAdd(
compilation,
testConfig,
reason.module,
possibleModules,
failureCache
);
if (problem) {
failureCache.set(module, problem); // cache failures for performance
return problem;
}
}
// Commit experimental changes
config.set(testConfig);
// Eagerly try to add imports too if possible
for (const imp of this._getImports(compilation, module)) {
const problem = this._tryToAdd(
compilation,
config,
imp,
possibleModules,
failureCache
);
if (problem) {
config.addWarning(imp, problem);
}
}
return null;
}
}
class ConcatConfiguration {
constructor(rootModule, cloneFrom) {
this.rootModule = rootModule;
if (cloneFrom) {
this.modules = cloneFrom.modules.createChild(5);
this.warnings = cloneFrom.warnings.createChild(5);
} else {
this.modules = new StackedSetMap();
this.modules.add(rootModule);
this.warnings = new StackedSetMap();
}
}
add(module) {
this.modules.add(module);
}
has(module) {
return this.modules.has(module);
}
isEmpty() {
return this.modules.size === 1;
}
addWarning(module, problem) {
this.warnings.set(module, problem);
}
getWarningsSorted() {
return new Map(
this.warnings.asPairArray().sort((a, b) => {
const ai = a[0].identifier();
const bi = b[0].identifier();
if (ai < bi) return -1;
if (ai > bi) return 1;
return 0;
})
);
}
getModules() {
return this.modules.asSet();
}
clone() {
return new ConcatConfiguration(this.rootModule, this);
}
set(config) {
this.rootModule = config.rootModule;
this.modules = config.modules;
this.warnings = config.warnings;
}
}
module.exports = ModuleConcatenationPlugin;

View file

@ -0,0 +1,41 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/** @typedef {import("../Compiler")} Compiler */
class NaturalChunkOrderPlugin {
/**
* @param {Compiler} compiler webpack compiler
* @returns {void}
*/
apply(compiler) {
compiler.hooks.compilation.tap("NaturalChunkOrderPlugin", compilation => {
compilation.hooks.optimizeChunkOrder.tap(
"NaturalChunkOrderPlugin",
chunks => {
chunks.sort((chunkA, chunkB) => {
const a = chunkA.modulesIterable[Symbol.iterator]();
const b = chunkB.modulesIterable[Symbol.iterator]();
// eslint-disable-next-line no-constant-condition
while (true) {
const aItem = a.next();
const bItem = b.next();
if (aItem.done && bItem.done) return 0;
if (aItem.done) return -1;
if (bItem.done) return 1;
const aModuleId = aItem.value.id;
const bModuleId = bItem.value.id;
if (aModuleId < bModuleId) return -1;
if (aModuleId > bModuleId) return 1;
}
});
}
);
});
}
}
module.exports = NaturalChunkOrderPlugin;

View file

@ -0,0 +1,66 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const validateOptions = require("schema-utils");
const schema = require("../../schemas/plugins/optimize/OccurrenceOrderChunkIdsPlugin.json");
/** @typedef {import("../../declarations/plugins/optimize/OccurrenceOrderChunkIdsPlugin").OccurrenceOrderChunkIdsPluginOptions} OccurrenceOrderChunkIdsPluginOptions */
class OccurrenceOrderChunkIdsPlugin {
/**
* @param {OccurrenceOrderChunkIdsPluginOptions=} options options object
*/
constructor(options = {}) {
validateOptions(schema, options, "Occurrence Order Chunk Ids Plugin");
this.options = options;
}
apply(compiler) {
const prioritiseInitial = this.options.prioritiseInitial;
compiler.hooks.compilation.tap(
"OccurrenceOrderChunkIdsPlugin",
compilation => {
compilation.hooks.optimizeChunkOrder.tap(
"OccurrenceOrderChunkIdsPlugin",
chunks => {
const occursInInitialChunksMap = new Map();
const originalOrder = new Map();
let i = 0;
for (const c of chunks) {
let occurs = 0;
for (const chunkGroup of c.groupsIterable) {
for (const parent of chunkGroup.parentsIterable) {
if (parent.isInitial()) occurs++;
}
}
occursInInitialChunksMap.set(c, occurs);
originalOrder.set(c, i++);
}
chunks.sort((a, b) => {
if (prioritiseInitial) {
const aEntryOccurs = occursInInitialChunksMap.get(a);
const bEntryOccurs = occursInInitialChunksMap.get(b);
if (aEntryOccurs > bEntryOccurs) return -1;
if (aEntryOccurs < bEntryOccurs) return 1;
}
const aOccurs = a.getNumberOfGroups();
const bOccurs = b.getNumberOfGroups();
if (aOccurs > bOccurs) return -1;
if (aOccurs < bOccurs) return 1;
const orgA = originalOrder.get(a);
const orgB = originalOrder.get(b);
return orgA - orgB;
});
}
);
}
);
}
}
module.exports = OccurrenceOrderChunkIdsPlugin;

View file

@ -0,0 +1,112 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const validateOptions = require("schema-utils");
const schema = require("../../schemas/plugins/optimize/OccurrenceOrderModuleIdsPlugin.json");
/** @typedef {import("../../declarations/plugins/optimize/OccurrenceOrderModuleIdsPlugin").OccurrenceOrderModuleIdsPluginOptions} OccurrenceOrderModuleIdsPluginOptions */
class OccurrenceOrderModuleIdsPlugin {
/**
* @param {OccurrenceOrderModuleIdsPluginOptions=} options options object
*/
constructor(options = {}) {
validateOptions(schema, options, "Occurrence Order Module Ids Plugin");
this.options = options;
}
apply(compiler) {
const prioritiseInitial = this.options.prioritiseInitial;
compiler.hooks.compilation.tap(
"OccurrenceOrderModuleIdsPlugin",
compilation => {
compilation.hooks.optimizeModuleOrder.tap(
"OccurrenceOrderModuleIdsPlugin",
modules => {
const occursInInitialChunksMap = new Map();
const occursInAllChunksMap = new Map();
const initialChunkChunkMap = new Map();
const entryCountMap = new Map();
for (const m of modules) {
let initial = 0;
let entry = 0;
for (const c of m.chunksIterable) {
if (c.canBeInitial()) initial++;
if (c.entryModule === m) entry++;
}
initialChunkChunkMap.set(m, initial);
entryCountMap.set(m, entry);
}
const countOccursInEntry = (sum, r) => {
if (!r.module) {
return sum;
}
const count = initialChunkChunkMap.get(r.module);
if (!count) {
return sum;
}
return sum + count;
};
const countOccurs = (sum, r) => {
if (!r.module) {
return sum;
}
let factor = 1;
if (typeof r.dependency.getNumberOfIdOccurrences === "function") {
factor = r.dependency.getNumberOfIdOccurrences();
}
if (factor === 0) {
return sum;
}
return sum + factor * r.module.getNumberOfChunks();
};
if (prioritiseInitial) {
for (const m of modules) {
const result =
m.reasons.reduce(countOccursInEntry, 0) +
initialChunkChunkMap.get(m) +
entryCountMap.get(m);
occursInInitialChunksMap.set(m, result);
}
}
const originalOrder = new Map();
let i = 0;
for (const m of modules) {
const result =
m.reasons.reduce(countOccurs, 0) +
m.getNumberOfChunks() +
entryCountMap.get(m);
occursInAllChunksMap.set(m, result);
originalOrder.set(m, i++);
}
modules.sort((a, b) => {
if (prioritiseInitial) {
const aEntryOccurs = occursInInitialChunksMap.get(a);
const bEntryOccurs = occursInInitialChunksMap.get(b);
if (aEntryOccurs > bEntryOccurs) return -1;
if (aEntryOccurs < bEntryOccurs) return 1;
}
const aOccurs = occursInAllChunksMap.get(a);
const bOccurs = occursInAllChunksMap.get(b);
if (aOccurs > bOccurs) return -1;
if (aOccurs < bOccurs) return 1;
const orgA = originalOrder.get(a);
const orgB = originalOrder.get(b);
return orgA - orgB;
});
}
);
}
);
}
}
module.exports = OccurrenceOrderModuleIdsPlugin;

View file

@ -0,0 +1,135 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
// TODO webpack 5 remove this plugin
// It has been splitted into separate plugins for modules and chunks
class OccurrenceOrderPlugin {
constructor(preferEntry) {
if (preferEntry !== undefined && typeof preferEntry !== "boolean") {
throw new Error(
"Argument should be a boolean.\nFor more info on this plugin, see https://webpack.js.org/plugins/"
);
}
this.preferEntry = preferEntry;
}
apply(compiler) {
const preferEntry = this.preferEntry;
compiler.hooks.compilation.tap("OccurrenceOrderPlugin", compilation => {
compilation.hooks.optimizeModuleOrder.tap(
"OccurrenceOrderPlugin",
modules => {
const occursInInitialChunksMap = new Map();
const occursInAllChunksMap = new Map();
const initialChunkChunkMap = new Map();
const entryCountMap = new Map();
for (const m of modules) {
let initial = 0;
let entry = 0;
for (const c of m.chunksIterable) {
if (c.canBeInitial()) initial++;
if (c.entryModule === m) entry++;
}
initialChunkChunkMap.set(m, initial);
entryCountMap.set(m, entry);
}
const countOccursInEntry = (sum, r) => {
if (!r.module) {
return sum;
}
return sum + initialChunkChunkMap.get(r.module);
};
const countOccurs = (sum, r) => {
if (!r.module) {
return sum;
}
let factor = 1;
if (typeof r.dependency.getNumberOfIdOccurrences === "function") {
factor = r.dependency.getNumberOfIdOccurrences();
}
if (factor === 0) {
return sum;
}
return sum + factor * r.module.getNumberOfChunks();
};
if (preferEntry) {
for (const m of modules) {
const result =
m.reasons.reduce(countOccursInEntry, 0) +
initialChunkChunkMap.get(m) +
entryCountMap.get(m);
occursInInitialChunksMap.set(m, result);
}
}
const originalOrder = new Map();
let i = 0;
for (const m of modules) {
const result =
m.reasons.reduce(countOccurs, 0) +
m.getNumberOfChunks() +
entryCountMap.get(m);
occursInAllChunksMap.set(m, result);
originalOrder.set(m, i++);
}
modules.sort((a, b) => {
if (preferEntry) {
const aEntryOccurs = occursInInitialChunksMap.get(a);
const bEntryOccurs = occursInInitialChunksMap.get(b);
if (aEntryOccurs > bEntryOccurs) return -1;
if (aEntryOccurs < bEntryOccurs) return 1;
}
const aOccurs = occursInAllChunksMap.get(a);
const bOccurs = occursInAllChunksMap.get(b);
if (aOccurs > bOccurs) return -1;
if (aOccurs < bOccurs) return 1;
const orgA = originalOrder.get(a);
const orgB = originalOrder.get(b);
return orgA - orgB;
});
}
);
compilation.hooks.optimizeChunkOrder.tap(
"OccurrenceOrderPlugin",
chunks => {
const occursInInitialChunksMap = new Map();
const originalOrder = new Map();
let i = 0;
for (const c of chunks) {
let occurs = 0;
for (const chunkGroup of c.groupsIterable) {
for (const parent of chunkGroup.parentsIterable) {
if (parent.isInitial()) occurs++;
}
}
occursInInitialChunksMap.set(c, occurs);
originalOrder.set(c, i++);
}
chunks.sort((a, b) => {
const aEntryOccurs = occursInInitialChunksMap.get(a);
const bEntryOccurs = occursInInitialChunksMap.get(b);
if (aEntryOccurs > bEntryOccurs) return -1;
if (aEntryOccurs < bEntryOccurs) return 1;
const aOccurs = a.getNumberOfGroups();
const bOccurs = b.getNumberOfGroups();
if (aOccurs > bOccurs) return -1;
if (aOccurs < bOccurs) return 1;
const orgA = originalOrder.get(a);
const orgB = originalOrder.get(b);
return orgA - orgB;
});
}
);
});
}
}
module.exports = OccurrenceOrderPlugin;

View file

@ -0,0 +1,42 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
class RemoveEmptyChunksPlugin {
apply(compiler) {
compiler.hooks.compilation.tap("RemoveEmptyChunksPlugin", compilation => {
const handler = chunks => {
for (let i = chunks.length - 1; i >= 0; i--) {
const chunk = chunks[i];
if (
chunk.isEmpty() &&
!chunk.hasRuntime() &&
!chunk.hasEntryModule()
) {
chunk.remove("empty");
chunks.splice(i, 1);
}
}
};
compilation.hooks.optimizeChunksBasic.tap(
"RemoveEmptyChunksPlugin",
handler
);
compilation.hooks.optimizeChunksAdvanced.tap(
"RemoveEmptyChunksPlugin",
handler
);
compilation.hooks.optimizeExtractedChunksBasic.tap(
"RemoveEmptyChunksPlugin",
handler
);
compilation.hooks.optimizeExtractedChunksAdvanced.tap(
"RemoveEmptyChunksPlugin",
handler
);
});
}
}
module.exports = RemoveEmptyChunksPlugin;

View file

@ -0,0 +1,127 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const Queue = require("../util/Queue");
const { intersect } = require("../util/SetHelpers");
const getParentChunksWithModule = (currentChunk, module) => {
const chunks = [];
const stack = new Set(currentChunk.parentsIterable);
for (const chunk of stack) {
if (chunk.containsModule(module)) {
chunks.push(chunk);
} else {
for (const parent of chunk.parentsIterable) {
stack.add(parent);
}
}
}
return chunks;
};
class RemoveParentModulesPlugin {
apply(compiler) {
compiler.hooks.compilation.tap("RemoveParentModulesPlugin", compilation => {
const handler = (chunks, chunkGroups) => {
const queue = new Queue();
const availableModulesMap = new WeakMap();
for (const chunkGroup of compilation.entrypoints.values()) {
// initialize available modules for chunks without parents
availableModulesMap.set(chunkGroup, new Set());
for (const child of chunkGroup.childrenIterable) {
queue.enqueue(child);
}
}
while (queue.length > 0) {
const chunkGroup = queue.dequeue();
let availableModules = availableModulesMap.get(chunkGroup);
let changed = false;
for (const parent of chunkGroup.parentsIterable) {
const availableModulesInParent = availableModulesMap.get(parent);
if (availableModulesInParent !== undefined) {
// If we know the available modules in parent: process these
if (availableModules === undefined) {
// if we have not own info yet: create new entry
availableModules = new Set(availableModulesInParent);
for (const chunk of parent.chunks) {
for (const m of chunk.modulesIterable) {
availableModules.add(m);
}
}
availableModulesMap.set(chunkGroup, availableModules);
changed = true;
} else {
for (const m of availableModules) {
if (
!parent.containsModule(m) &&
!availableModulesInParent.has(m)
) {
availableModules.delete(m);
changed = true;
}
}
}
}
}
if (changed) {
// if something changed: enqueue our children
for (const child of chunkGroup.childrenIterable) {
queue.enqueue(child);
}
}
}
// now we have available modules for every chunk
for (const chunk of chunks) {
const availableModulesSets = Array.from(
chunk.groupsIterable,
chunkGroup => availableModulesMap.get(chunkGroup)
);
if (availableModulesSets.some(s => s === undefined)) continue; // No info about this chunk group
const availableModules =
availableModulesSets.length === 1
? availableModulesSets[0]
: intersect(availableModulesSets);
const numberOfModules = chunk.getNumberOfModules();
const toRemove = new Set();
if (numberOfModules < availableModules.size) {
for (const m of chunk.modulesIterable) {
if (availableModules.has(m)) {
toRemove.add(m);
}
}
} else {
for (const m of availableModules) {
if (chunk.containsModule(m)) {
toRemove.add(m);
}
}
}
for (const module of toRemove) {
module.rewriteChunkInReasons(
chunk,
getParentChunksWithModule(chunk, module)
);
chunk.removeModule(module);
}
}
};
compilation.hooks.optimizeChunksBasic.tap(
"RemoveParentModulesPlugin",
handler
);
compilation.hooks.optimizeExtractedChunksBasic.tap(
"RemoveParentModulesPlugin",
handler
);
});
}
}
module.exports = RemoveParentModulesPlugin;

View file

@ -0,0 +1,41 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
module.exports = class RuntimeChunkPlugin {
constructor(options) {
this.options = Object.assign(
{
name: entrypoint => `runtime~${entrypoint.name}`
},
options
);
}
apply(compiler) {
compiler.hooks.thisCompilation.tap("RuntimeChunkPlugin", compilation => {
compilation.hooks.optimizeChunksAdvanced.tap("RuntimeChunkPlugin", () => {
for (const entrypoint of compilation.entrypoints.values()) {
const chunk = entrypoint.getRuntimeChunk();
let name = this.options.name;
if (typeof name === "function") {
name = name(entrypoint);
}
if (
chunk.getNumberOfModules() > 0 ||
!chunk.preventIntegration ||
chunk.name !== name
) {
const newChunk = compilation.addChunk(name);
newChunk.preventIntegration = true;
entrypoint.unshiftChunk(newChunk);
newChunk.addGroup(entrypoint);
entrypoint.setRuntimeChunk(newChunk);
}
}
});
});
}
};

View file

@ -0,0 +1,352 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const mm = require("micromatch");
const HarmonyExportImportedSpecifierDependency = require("../dependencies/HarmonyExportImportedSpecifierDependency");
const HarmonyImportSideEffectDependency = require("../dependencies/HarmonyImportSideEffectDependency");
const HarmonyImportSpecifierDependency = require("../dependencies/HarmonyImportSpecifierDependency");
/** @typedef {import("../Module")} Module */
/** @typedef {import("../Dependency")} Dependency */
/**
* @typedef {Object} ExportInModule
* @property {Module} module the module
* @property {string} exportName the name of the export
* @property {boolean} checked if the export is conditional
*/
/**
* @typedef {Object} ReexportInfo
* @property {Map<string, ExportInModule[]>} static
* @property {Map<Module, Set<string>>} dynamic
*/
/**
* @param {ReexportInfo} info info object
* @param {string} exportName name of export
* @returns {ExportInModule | undefined} static export
*/
const getMappingFromInfo = (info, exportName) => {
const staticMappings = info.static.get(exportName);
if (staticMappings !== undefined) {
if (staticMappings.length === 1) return staticMappings[0];
return undefined;
}
const dynamicMappings = Array.from(info.dynamic).filter(
([_, ignored]) => !ignored.has(exportName)
);
if (dynamicMappings.length === 1) {
return {
module: dynamicMappings[0][0],
exportName,
checked: true
};
}
return undefined;
};
/**
* @param {ReexportInfo} info info object
* @param {string} exportName name of export of source module
* @param {Module} module the target module
* @param {string} innerExportName name of export of target module
* @param {boolean} checked true, if existence of target module is checked
*/
const addStaticReexport = (
info,
exportName,
module,
innerExportName,
checked
) => {
let mappings = info.static.get(exportName);
if (mappings !== undefined) {
for (const mapping of mappings) {
if (mapping.module === module && mapping.exportName === innerExportName) {
mapping.checked = mapping.checked && checked;
return;
}
}
} else {
mappings = [];
info.static.set(exportName, mappings);
}
mappings.push({
module,
exportName: innerExportName,
checked
});
};
/**
* @param {ReexportInfo} info info object
* @param {Module} module the reexport module
* @param {Set<string>} ignored ignore list
* @returns {void}
*/
const addDynamicReexport = (info, module, ignored) => {
const existingList = info.dynamic.get(module);
if (existingList !== undefined) {
for (const key of existingList) {
if (!ignored.has(key)) existingList.delete(key);
}
} else {
info.dynamic.set(module, new Set(ignored));
}
};
class SideEffectsFlagPlugin {
apply(compiler) {
compiler.hooks.normalModuleFactory.tap("SideEffectsFlagPlugin", nmf => {
nmf.hooks.module.tap("SideEffectsFlagPlugin", (module, data) => {
const resolveData = data.resourceResolveData;
if (
resolveData &&
resolveData.descriptionFileData &&
resolveData.relativePath
) {
const sideEffects = resolveData.descriptionFileData.sideEffects;
const hasSideEffects = SideEffectsFlagPlugin.moduleHasSideEffects(
resolveData.relativePath,
sideEffects
);
if (!hasSideEffects) {
module.factoryMeta.sideEffectFree = true;
}
}
return module;
});
nmf.hooks.module.tap("SideEffectsFlagPlugin", (module, data) => {
if (data.settings.sideEffects === false) {
module.factoryMeta.sideEffectFree = true;
} else if (data.settings.sideEffects === true) {
module.factoryMeta.sideEffectFree = false;
}
});
});
compiler.hooks.compilation.tap("SideEffectsFlagPlugin", compilation => {
compilation.hooks.optimizeDependencies.tap(
"SideEffectsFlagPlugin",
modules => {
/** @type {Map<Module, ReexportInfo>} */
const reexportMaps = new Map();
// Capture reexports of sideEffectFree modules
for (const module of modules) {
/** @type {Dependency[]} */
const removeDependencies = [];
for (const dep of module.dependencies) {
if (dep instanceof HarmonyImportSideEffectDependency) {
if (dep.module && dep.module.factoryMeta.sideEffectFree) {
removeDependencies.push(dep);
}
} else if (
dep instanceof HarmonyExportImportedSpecifierDependency
) {
if (module.factoryMeta.sideEffectFree) {
const mode = dep.getMode(true);
if (
mode.type === "safe-reexport" ||
mode.type === "checked-reexport" ||
mode.type === "dynamic-reexport" ||
mode.type === "reexport-non-harmony-default" ||
mode.type === "reexport-non-harmony-default-strict" ||
mode.type === "reexport-named-default"
) {
let info = reexportMaps.get(module);
if (!info) {
reexportMaps.set(
module,
(info = {
static: new Map(),
dynamic: new Map()
})
);
}
const targetModule = dep._module;
switch (mode.type) {
case "safe-reexport":
for (const [key, id] of mode.map) {
if (id) {
addStaticReexport(
info,
key,
targetModule,
id,
false
);
}
}
break;
case "checked-reexport":
for (const [key, id] of mode.map) {
if (id) {
addStaticReexport(
info,
key,
targetModule,
id,
true
);
}
}
break;
case "dynamic-reexport":
addDynamicReexport(info, targetModule, mode.ignored);
break;
case "reexport-non-harmony-default":
case "reexport-non-harmony-default-strict":
case "reexport-named-default":
addStaticReexport(
info,
mode.name,
targetModule,
"default",
false
);
break;
}
}
}
}
}
}
// Flatten reexports
for (const info of reexportMaps.values()) {
const dynamicReexports = info.dynamic;
info.dynamic = new Map();
for (const reexport of dynamicReexports) {
let [targetModule, ignored] = reexport;
for (;;) {
const innerInfo = reexportMaps.get(targetModule);
if (!innerInfo) break;
for (const [key, reexports] of innerInfo.static) {
if (ignored.has(key)) continue;
for (const { module, exportName, checked } of reexports) {
addStaticReexport(info, key, module, exportName, checked);
}
}
// Follow dynamic reexport if there is only one
if (innerInfo.dynamic.size !== 1) {
// When there are more then one, we don't know which one
break;
}
ignored = new Set(ignored);
for (const [innerModule, innerIgnored] of innerInfo.dynamic) {
for (const key of innerIgnored) {
if (ignored.has(key)) continue;
// This reexports ends here
addStaticReexport(info, key, targetModule, key, true);
ignored.add(key);
}
targetModule = innerModule;
}
}
// Update reexport as all other cases has been handled
addDynamicReexport(info, targetModule, ignored);
}
}
for (const info of reexportMaps.values()) {
const staticReexports = info.static;
info.static = new Map();
for (const [key, reexports] of staticReexports) {
for (let mapping of reexports) {
for (;;) {
const innerInfo = reexportMaps.get(mapping.module);
if (!innerInfo) break;
const newMapping = getMappingFromInfo(
innerInfo,
mapping.exportName
);
if (!newMapping) break;
mapping = newMapping;
}
addStaticReexport(
info,
key,
mapping.module,
mapping.exportName,
mapping.checked
);
}
}
}
// Update imports along the reexports from sideEffectFree modules
for (const pair of reexportMaps) {
const module = pair[0];
const info = pair[1];
let newReasons = undefined;
for (let i = 0; i < module.reasons.length; i++) {
const reason = module.reasons[i];
const dep = reason.dependency;
if (
(dep instanceof HarmonyExportImportedSpecifierDependency ||
(dep instanceof HarmonyImportSpecifierDependency &&
!dep.namespaceObjectAsContext)) &&
dep._id
) {
const mapping = getMappingFromInfo(info, dep._id);
if (mapping) {
dep.redirectedModule = mapping.module;
dep.redirectedId = mapping.exportName;
mapping.module.addReason(
reason.module,
dep,
reason.explanation
? reason.explanation +
" (skipped side-effect-free modules)"
: "(skipped side-effect-free modules)"
);
// removing the currect reason, by not adding it to the newReasons array
// lazily create the newReasons array
if (newReasons === undefined) {
newReasons = i === 0 ? [] : module.reasons.slice(0, i);
}
continue;
}
}
if (newReasons !== undefined) newReasons.push(reason);
}
if (newReasons !== undefined) {
module.reasons = newReasons;
}
}
}
);
});
}
static moduleHasSideEffects(moduleName, flagValue) {
switch (typeof flagValue) {
case "undefined":
return true;
case "boolean":
return flagValue;
case "string":
if (process.platform === "win32") {
flagValue = flagValue.replace(/\\/g, "/");
}
return mm.isMatch(moduleName, flagValue, {
matchBase: true
});
case "object":
return flagValue.some(glob =>
SideEffectsFlagPlugin.moduleHasSideEffects(moduleName, glob)
);
}
}
}
module.exports = SideEffectsFlagPlugin;

View file

@ -0,0 +1,968 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const crypto = require("crypto");
const SortableSet = require("../util/SortableSet");
const GraphHelpers = require("../GraphHelpers");
const { isSubset } = require("../util/SetHelpers");
const deterministicGrouping = require("../util/deterministicGrouping");
const MinMaxSizeWarning = require("./MinMaxSizeWarning");
const contextify = require("../util/identifier").contextify;
/** @typedef {import("../Compiler")} Compiler */
/** @typedef {import("../Chunk")} Chunk */
/** @typedef {import("../Module")} Module */
/** @typedef {import("../util/deterministicGrouping").Options<Module>} DeterministicGroupingOptionsForModule */
/** @typedef {import("../util/deterministicGrouping").GroupedItems<Module>} DeterministicGroupingGroupedItemsForModule */
const deterministicGroupingForModules = /** @type {function(DeterministicGroupingOptionsForModule): DeterministicGroupingGroupedItemsForModule[]} */ (deterministicGrouping);
const hashFilename = name => {
return crypto
.createHash("md4")
.update(name)
.digest("hex")
.slice(0, 8);
};
const sortByIdentifier = (a, b) => {
if (a.identifier() > b.identifier()) return 1;
if (a.identifier() < b.identifier()) return -1;
return 0;
};
const getRequests = chunk => {
let requests = 0;
for (const chunkGroup of chunk.groupsIterable) {
requests = Math.max(requests, chunkGroup.chunks.length);
}
return requests;
};
const getModulesSize = modules => {
let sum = 0;
for (const m of modules) {
sum += m.size();
}
return sum;
};
/**
* @template T
* @param {Set<T>} a set
* @param {Set<T>} b other set
* @returns {boolean} true if at least one item of a is in b
*/
const isOverlap = (a, b) => {
for (const item of a) {
if (b.has(item)) return true;
}
return false;
};
const compareEntries = (a, b) => {
// 1. by priority
const diffPriority = a.cacheGroup.priority - b.cacheGroup.priority;
if (diffPriority) return diffPriority;
// 2. by number of chunks
const diffCount = a.chunks.size - b.chunks.size;
if (diffCount) return diffCount;
// 3. by size reduction
const aSizeReduce = a.size * (a.chunks.size - 1);
const bSizeReduce = b.size * (b.chunks.size - 1);
const diffSizeReduce = aSizeReduce - bSizeReduce;
if (diffSizeReduce) return diffSizeReduce;
// 4. by cache group index
const indexDiff = a.cacheGroupIndex - b.cacheGroupIndex;
if (indexDiff) return indexDiff;
// 5. by number of modules (to be able to compare by identifier)
const modulesA = a.modules;
const modulesB = b.modules;
const diff = modulesA.size - modulesB.size;
if (diff) return diff;
// 6. by module identifiers
modulesA.sort();
modulesB.sort();
const aI = modulesA[Symbol.iterator]();
const bI = modulesB[Symbol.iterator]();
// eslint-disable-next-line no-constant-condition
while (true) {
const aItem = aI.next();
const bItem = bI.next();
if (aItem.done) return 0;
const aModuleIdentifier = aItem.value.identifier();
const bModuleIdentifier = bItem.value.identifier();
if (aModuleIdentifier > bModuleIdentifier) return -1;
if (aModuleIdentifier < bModuleIdentifier) return 1;
}
};
const compareNumbers = (a, b) => a - b;
const INITIAL_CHUNK_FILTER = chunk => chunk.canBeInitial();
const ASYNC_CHUNK_FILTER = chunk => !chunk.canBeInitial();
const ALL_CHUNK_FILTER = chunk => true;
module.exports = class SplitChunksPlugin {
constructor(options) {
this.options = SplitChunksPlugin.normalizeOptions(options);
}
static normalizeOptions(options = {}) {
return {
chunksFilter: SplitChunksPlugin.normalizeChunksFilter(
options.chunks || "all"
),
minSize: options.minSize || 0,
enforceSizeThreshold: options.enforceSizeThreshold || 0,
maxSize: options.maxSize || 0,
minChunks: options.minChunks || 1,
maxAsyncRequests: options.maxAsyncRequests || 1,
maxInitialRequests: options.maxInitialRequests || 1,
hidePathInfo: options.hidePathInfo || false,
filename: options.filename || undefined,
getCacheGroups: SplitChunksPlugin.normalizeCacheGroups({
cacheGroups: options.cacheGroups,
name: options.name,
automaticNameDelimiter: options.automaticNameDelimiter,
automaticNameMaxLength: options.automaticNameMaxLength
}),
automaticNameDelimiter: options.automaticNameDelimiter,
automaticNameMaxLength: options.automaticNameMaxLength || 109,
fallbackCacheGroup: SplitChunksPlugin.normalizeFallbackCacheGroup(
options.fallbackCacheGroup || {},
options
)
};
}
static normalizeName({
name,
automaticNameDelimiter,
automaticNamePrefix,
automaticNameMaxLength
}) {
if (name === true) {
/** @type {WeakMap<Chunk[], Record<string, string>>} */
const cache = new WeakMap();
const fn = (module, chunks, cacheGroup) => {
let cacheEntry = cache.get(chunks);
if (cacheEntry === undefined) {
cacheEntry = {};
cache.set(chunks, cacheEntry);
} else if (cacheGroup in cacheEntry) {
return cacheEntry[cacheGroup];
}
const names = chunks.map(c => c.name);
if (!names.every(Boolean)) {
cacheEntry[cacheGroup] = undefined;
return;
}
names.sort();
const prefix =
typeof automaticNamePrefix === "string"
? automaticNamePrefix
: cacheGroup;
const namePrefix = prefix ? prefix + automaticNameDelimiter : "";
let name = namePrefix + names.join(automaticNameDelimiter);
// Filenames and paths can't be too long otherwise an
// ENAMETOOLONG error is raised. If the generated name if too
// long, it is truncated and a hash is appended. The limit has
// been set to 109 to prevent `[name].[chunkhash].[ext]` from
// generating a 256+ character string.
if (name.length > automaticNameMaxLength) {
const hashedFilename = hashFilename(name);
const sliceLength =
automaticNameMaxLength -
(automaticNameDelimiter.length + hashedFilename.length);
name =
name.slice(0, sliceLength) +
automaticNameDelimiter +
hashedFilename;
}
cacheEntry[cacheGroup] = name;
return name;
};
return fn;
}
if (typeof name === "string") {
const fn = () => {
return name;
};
return fn;
}
if (typeof name === "function") return name;
}
static normalizeChunksFilter(chunks) {
if (chunks === "initial") {
return INITIAL_CHUNK_FILTER;
}
if (chunks === "async") {
return ASYNC_CHUNK_FILTER;
}
if (chunks === "all") {
return ALL_CHUNK_FILTER;
}
if (typeof chunks === "function") return chunks;
}
static normalizeFallbackCacheGroup(
{
minSize = undefined,
maxSize = undefined,
automaticNameDelimiter = undefined
},
{
minSize: defaultMinSize = undefined,
maxSize: defaultMaxSize = undefined,
automaticNameDelimiter: defaultAutomaticNameDelimiter = undefined
}
) {
return {
minSize: typeof minSize === "number" ? minSize : defaultMinSize || 0,
maxSize: typeof maxSize === "number" ? maxSize : defaultMaxSize || 0,
automaticNameDelimiter:
automaticNameDelimiter || defaultAutomaticNameDelimiter || "~"
};
}
static normalizeCacheGroups({
cacheGroups,
name,
automaticNameDelimiter,
automaticNameMaxLength
}) {
if (typeof cacheGroups === "function") {
// TODO webpack 5 remove this
if (cacheGroups.length !== 1) {
return module => cacheGroups(module, module.getChunks());
}
return cacheGroups;
}
if (cacheGroups && typeof cacheGroups === "object") {
const fn = module => {
let results;
for (const key of Object.keys(cacheGroups)) {
let option = cacheGroups[key];
if (option === false) continue;
if (option instanceof RegExp || typeof option === "string") {
option = {
test: option
};
}
if (typeof option === "function") {
let result = option(module);
if (result) {
if (results === undefined) results = [];
for (const r of Array.isArray(result) ? result : [result]) {
const result = Object.assign({ key }, r);
if (result.name) result.getName = () => result.name;
if (result.chunks) {
result.chunksFilter = SplitChunksPlugin.normalizeChunksFilter(
result.chunks
);
}
results.push(result);
}
}
} else if (SplitChunksPlugin.checkTest(option.test, module)) {
if (results === undefined) results = [];
results.push({
key: key,
priority: option.priority,
getName:
SplitChunksPlugin.normalizeName({
name: option.name || name,
automaticNameDelimiter:
typeof option.automaticNameDelimiter === "string"
? option.automaticNameDelimiter
: automaticNameDelimiter,
automaticNamePrefix: option.automaticNamePrefix,
automaticNameMaxLength:
option.automaticNameMaxLength || automaticNameMaxLength
}) || (() => {}),
chunksFilter: SplitChunksPlugin.normalizeChunksFilter(
option.chunks
),
enforce: option.enforce,
minSize: option.minSize,
enforceSizeThreshold: option.enforceSizeThreshold,
maxSize: option.maxSize,
minChunks: option.minChunks,
maxAsyncRequests: option.maxAsyncRequests,
maxInitialRequests: option.maxInitialRequests,
filename: option.filename,
reuseExistingChunk: option.reuseExistingChunk
});
}
}
return results;
};
return fn;
}
const fn = () => {};
return fn;
}
static checkTest(test, module) {
if (test === undefined) return true;
if (typeof test === "function") {
if (test.length !== 1) {
return test(module, module.getChunks());
}
return test(module);
}
if (typeof test === "boolean") return test;
if (typeof test === "string") {
if (
module.nameForCondition &&
module.nameForCondition().startsWith(test)
) {
return true;
}
for (const chunk of module.chunksIterable) {
if (chunk.name && chunk.name.startsWith(test)) {
return true;
}
}
return false;
}
if (test instanceof RegExp) {
if (module.nameForCondition && test.test(module.nameForCondition())) {
return true;
}
for (const chunk of module.chunksIterable) {
if (chunk.name && test.test(chunk.name)) {
return true;
}
}
return false;
}
return false;
}
/**
* @param {Compiler} compiler webpack compiler
* @returns {void}
*/
apply(compiler) {
compiler.hooks.thisCompilation.tap("SplitChunksPlugin", compilation => {
let alreadyOptimized = false;
compilation.hooks.unseal.tap("SplitChunksPlugin", () => {
alreadyOptimized = false;
});
compilation.hooks.optimizeChunksAdvanced.tap(
"SplitChunksPlugin",
chunks => {
if (alreadyOptimized) return;
alreadyOptimized = true;
// Give each selected chunk an index (to create strings from chunks)
const indexMap = new Map();
let index = 1;
for (const chunk of chunks) {
indexMap.set(chunk, index++);
}
const getKey = chunks => {
return Array.from(chunks, c => indexMap.get(c))
.sort(compareNumbers)
.join();
};
/** @type {Map<string, Set<Chunk>>} */
const chunkSetsInGraph = new Map();
for (const module of compilation.modules) {
const chunksKey = getKey(module.chunksIterable);
if (!chunkSetsInGraph.has(chunksKey)) {
chunkSetsInGraph.set(chunksKey, new Set(module.chunksIterable));
}
}
// group these set of chunks by count
// to allow to check less sets via isSubset
// (only smaller sets can be subset)
/** @type {Map<number, Array<Set<Chunk>>>} */
const chunkSetsByCount = new Map();
for (const chunksSet of chunkSetsInGraph.values()) {
const count = chunksSet.size;
let array = chunkSetsByCount.get(count);
if (array === undefined) {
array = [];
chunkSetsByCount.set(count, array);
}
array.push(chunksSet);
}
// Create a list of possible combinations
const combinationsCache = new Map(); // Map<string, Set<Chunk>[]>
const getCombinations = key => {
const chunksSet = chunkSetsInGraph.get(key);
var array = [chunksSet];
if (chunksSet.size > 1) {
for (const [count, setArray] of chunkSetsByCount) {
// "equal" is not needed because they would have been merge in the first step
if (count < chunksSet.size) {
for (const set of setArray) {
if (isSubset(chunksSet, set)) {
array.push(set);
}
}
}
}
}
return array;
};
/**
* @typedef {Object} SelectedChunksResult
* @property {Chunk[]} chunks the list of chunks
* @property {string} key a key of the list
*/
/**
* @typedef {function(Chunk): boolean} ChunkFilterFunction
*/
/** @type {WeakMap<Set<Chunk>, WeakMap<ChunkFilterFunction, SelectedChunksResult>>} */
const selectedChunksCacheByChunksSet = new WeakMap();
/**
* get list and key by applying the filter function to the list
* It is cached for performance reasons
* @param {Set<Chunk>} chunks list of chunks
* @param {ChunkFilterFunction} chunkFilter filter function for chunks
* @returns {SelectedChunksResult} list and key
*/
const getSelectedChunks = (chunks, chunkFilter) => {
let entry = selectedChunksCacheByChunksSet.get(chunks);
if (entry === undefined) {
entry = new WeakMap();
selectedChunksCacheByChunksSet.set(chunks, entry);
}
/** @type {SelectedChunksResult} */
let entry2 = entry.get(chunkFilter);
if (entry2 === undefined) {
/** @type {Chunk[]} */
const selectedChunks = [];
for (const chunk of chunks) {
if (chunkFilter(chunk)) selectedChunks.push(chunk);
}
entry2 = {
chunks: selectedChunks,
key: getKey(selectedChunks)
};
entry.set(chunkFilter, entry2);
}
return entry2;
};
/**
* @typedef {Object} ChunksInfoItem
* @property {SortableSet} modules
* @property {TODO} cacheGroup
* @property {number} cacheGroupIndex
* @property {string} name
* @property {number} size
* @property {Set<Chunk>} chunks
* @property {Set<Chunk>} reuseableChunks
* @property {Set<string>} chunksKeys
*/
// Map a list of chunks to a list of modules
// For the key the chunk "index" is used, the value is a SortableSet of modules
/** @type {Map<string, ChunksInfoItem>} */
const chunksInfoMap = new Map();
/**
* @param {TODO} cacheGroup the current cache group
* @param {number} cacheGroupIndex the index of the cache group of ordering
* @param {Chunk[]} selectedChunks chunks selected for this module
* @param {string} selectedChunksKey a key of selectedChunks
* @param {Module} module the current module
* @returns {void}
*/
const addModuleToChunksInfoMap = (
cacheGroup,
cacheGroupIndex,
selectedChunks,
selectedChunksKey,
module
) => {
// Break if minimum number of chunks is not reached
if (selectedChunks.length < cacheGroup.minChunks) return;
// Determine name for split chunk
const name = cacheGroup.getName(
module,
selectedChunks,
cacheGroup.key
);
// Create key for maps
// When it has a name we use the name as key
// Elsewise we create the key from chunks and cache group key
// This automatically merges equal names
const key =
cacheGroup.key +
(name ? ` name:${name}` : ` chunks:${selectedChunksKey}`);
// Add module to maps
let info = chunksInfoMap.get(key);
if (info === undefined) {
chunksInfoMap.set(
key,
(info = {
modules: new SortableSet(undefined, sortByIdentifier),
cacheGroup,
cacheGroupIndex,
name,
size: 0,
chunks: new Set(),
reuseableChunks: new Set(),
chunksKeys: new Set()
})
);
}
info.modules.add(module);
info.size += module.size();
if (!info.chunksKeys.has(selectedChunksKey)) {
info.chunksKeys.add(selectedChunksKey);
for (const chunk of selectedChunks) {
info.chunks.add(chunk);
}
}
};
// Walk through all modules
for (const module of compilation.modules) {
// Get cache group
let cacheGroups = this.options.getCacheGroups(module);
if (!Array.isArray(cacheGroups) || cacheGroups.length === 0) {
continue;
}
// Prepare some values
const chunksKey = getKey(module.chunksIterable);
let combs = combinationsCache.get(chunksKey);
if (combs === undefined) {
combs = getCombinations(chunksKey);
combinationsCache.set(chunksKey, combs);
}
let cacheGroupIndex = 0;
for (const cacheGroupSource of cacheGroups) {
const minSize =
cacheGroupSource.minSize !== undefined
? cacheGroupSource.minSize
: cacheGroupSource.enforce
? 0
: this.options.minSize;
const enforceSizeThreshold =
cacheGroupSource.enforceSizeThreshold !== undefined
? cacheGroupSource.enforceSizeThreshold
: cacheGroupSource.enforce
? 0
: this.options.enforceSizeThreshold;
const cacheGroup = {
key: cacheGroupSource.key,
priority: cacheGroupSource.priority || 0,
chunksFilter:
cacheGroupSource.chunksFilter || this.options.chunksFilter,
minSize,
minSizeForMaxSize:
cacheGroupSource.minSize !== undefined
? cacheGroupSource.minSize
: this.options.minSize,
enforceSizeThreshold,
maxSize:
cacheGroupSource.maxSize !== undefined
? cacheGroupSource.maxSize
: cacheGroupSource.enforce
? 0
: this.options.maxSize,
minChunks:
cacheGroupSource.minChunks !== undefined
? cacheGroupSource.minChunks
: cacheGroupSource.enforce
? 1
: this.options.minChunks,
maxAsyncRequests:
cacheGroupSource.maxAsyncRequests !== undefined
? cacheGroupSource.maxAsyncRequests
: cacheGroupSource.enforce
? Infinity
: this.options.maxAsyncRequests,
maxInitialRequests:
cacheGroupSource.maxInitialRequests !== undefined
? cacheGroupSource.maxInitialRequests
: cacheGroupSource.enforce
? Infinity
: this.options.maxInitialRequests,
getName:
cacheGroupSource.getName !== undefined
? cacheGroupSource.getName
: this.options.getName,
filename:
cacheGroupSource.filename !== undefined
? cacheGroupSource.filename
: this.options.filename,
automaticNameDelimiter:
cacheGroupSource.automaticNameDelimiter !== undefined
? cacheGroupSource.automaticNameDelimiter
: this.options.automaticNameDelimiter,
reuseExistingChunk: cacheGroupSource.reuseExistingChunk,
_validateSize: minSize > 0,
_conditionalEnforce: enforceSizeThreshold > 0
};
// For all combination of chunk selection
for (const chunkCombination of combs) {
// Break if minimum number of chunks is not reached
if (chunkCombination.size < cacheGroup.minChunks) continue;
// Select chunks by configuration
const {
chunks: selectedChunks,
key: selectedChunksKey
} = getSelectedChunks(
chunkCombination,
cacheGroup.chunksFilter
);
addModuleToChunksInfoMap(
cacheGroup,
cacheGroupIndex,
selectedChunks,
selectedChunksKey,
module
);
}
cacheGroupIndex++;
}
}
// Filter items were size < minSize
for (const pair of chunksInfoMap) {
const info = pair[1];
if (
info.cacheGroup._validateSize &&
info.size < info.cacheGroup.minSize
) {
chunksInfoMap.delete(pair[0]);
}
}
/** @type {Map<Chunk, {minSize: number, maxSize: number, automaticNameDelimiter: string, keys: string[]}>} */
const maxSizeQueueMap = new Map();
while (chunksInfoMap.size > 0) {
// Find best matching entry
let bestEntryKey;
let bestEntry;
for (const pair of chunksInfoMap) {
const key = pair[0];
const info = pair[1];
if (bestEntry === undefined) {
bestEntry = info;
bestEntryKey = key;
} else if (compareEntries(bestEntry, info) < 0) {
bestEntry = info;
bestEntryKey = key;
}
}
const item = bestEntry;
chunksInfoMap.delete(bestEntryKey);
let chunkName = item.name;
// Variable for the new chunk (lazy created)
/** @type {Chunk} */
let newChunk;
// When no chunk name, check if we can reuse a chunk instead of creating a new one
let isReused = false;
if (item.cacheGroup.reuseExistingChunk) {
outer: for (const chunk of item.chunks) {
if (chunk.getNumberOfModules() !== item.modules.size) continue;
if (chunk.hasEntryModule()) continue;
for (const module of item.modules) {
if (!chunk.containsModule(module)) continue outer;
}
if (!newChunk || !newChunk.name) {
newChunk = chunk;
} else if (
chunk.name &&
chunk.name.length < newChunk.name.length
) {
newChunk = chunk;
} else if (
chunk.name &&
chunk.name.length === newChunk.name.length &&
chunk.name < newChunk.name
) {
newChunk = chunk;
}
chunkName = undefined;
isReused = true;
}
}
// Check if maxRequests condition can be fulfilled
const selectedChunks = Array.from(item.chunks).filter(chunk => {
// skip if we address ourself
return (
(!chunkName || chunk.name !== chunkName) && chunk !== newChunk
);
});
const enforced =
item.cacheGroup._conditionalEnforce &&
item.size >= item.cacheGroup.enforceSizeThreshold;
// Skip when no chunk selected
if (selectedChunks.length === 0) continue;
const usedChunks = new Set(selectedChunks);
// Check if maxRequests condition can be fulfilled
if (
!enforced &&
(Number.isFinite(item.cacheGroup.maxInitialRequests) ||
Number.isFinite(item.cacheGroup.maxAsyncRequests))
) {
for (const chunk of usedChunks) {
// respect max requests
const maxRequests = chunk.isOnlyInitial()
? item.cacheGroup.maxInitialRequests
: chunk.canBeInitial()
? Math.min(
item.cacheGroup.maxInitialRequests,
item.cacheGroup.maxAsyncRequests
)
: item.cacheGroup.maxAsyncRequests;
if (
isFinite(maxRequests) &&
getRequests(chunk) >= maxRequests
) {
usedChunks.delete(chunk);
}
}
}
outer: for (const chunk of usedChunks) {
for (const module of item.modules) {
if (chunk.containsModule(module)) continue outer;
}
usedChunks.delete(chunk);
}
// Were some (invalid) chunks removed from usedChunks?
// => readd all modules to the queue, as things could have been changed
if (usedChunks.size < selectedChunks.length) {
if (usedChunks.size >= item.cacheGroup.minChunks) {
const chunksArr = Array.from(usedChunks);
for (const module of item.modules) {
addModuleToChunksInfoMap(
item.cacheGroup,
item.cacheGroupIndex,
chunksArr,
getKey(usedChunks),
module
);
}
}
continue;
}
// Create the new chunk if not reusing one
if (!isReused) {
newChunk = compilation.addChunk(chunkName);
}
// Walk through all chunks
for (const chunk of usedChunks) {
// Add graph connections for splitted chunk
chunk.split(newChunk);
}
// Add a note to the chunk
newChunk.chunkReason = isReused
? "reused as split chunk"
: "split chunk";
if (item.cacheGroup.key) {
newChunk.chunkReason += ` (cache group: ${item.cacheGroup.key})`;
}
if (chunkName) {
newChunk.chunkReason += ` (name: ${chunkName})`;
// If the chosen name is already an entry point we remove the entry point
const entrypoint = compilation.entrypoints.get(chunkName);
if (entrypoint) {
compilation.entrypoints.delete(chunkName);
entrypoint.remove();
newChunk.entryModule = undefined;
}
}
if (item.cacheGroup.filename) {
if (!newChunk.isOnlyInitial()) {
throw new Error(
"SplitChunksPlugin: You are trying to set a filename for a chunk which is (also) loaded on demand. " +
"The runtime can only handle loading of chunks which match the chunkFilename schema. " +
"Using a custom filename would fail at runtime. " +
`(cache group: ${item.cacheGroup.key})`
);
}
newChunk.filenameTemplate = item.cacheGroup.filename;
}
if (!isReused) {
// Add all modules to the new chunk
for (const module of item.modules) {
if (typeof module.chunkCondition === "function") {
if (!module.chunkCondition(newChunk)) continue;
}
// Add module to new chunk
GraphHelpers.connectChunkAndModule(newChunk, module);
// Remove module from used chunks
for (const chunk of usedChunks) {
chunk.removeModule(module);
module.rewriteChunkInReasons(chunk, [newChunk]);
}
}
} else {
// Remove all modules from used chunks
for (const module of item.modules) {
for (const chunk of usedChunks) {
chunk.removeModule(module);
module.rewriteChunkInReasons(chunk, [newChunk]);
}
}
}
if (item.cacheGroup.maxSize > 0) {
const oldMaxSizeSettings = maxSizeQueueMap.get(newChunk);
maxSizeQueueMap.set(newChunk, {
minSize: Math.max(
oldMaxSizeSettings ? oldMaxSizeSettings.minSize : 0,
item.cacheGroup.minSizeForMaxSize
),
maxSize: Math.min(
oldMaxSizeSettings ? oldMaxSizeSettings.maxSize : Infinity,
item.cacheGroup.maxSize
),
automaticNameDelimiter: item.cacheGroup.automaticNameDelimiter,
keys: oldMaxSizeSettings
? oldMaxSizeSettings.keys.concat(item.cacheGroup.key)
: [item.cacheGroup.key]
});
}
// remove all modules from other entries and update size
for (const [key, info] of chunksInfoMap) {
if (isOverlap(info.chunks, usedChunks)) {
// update modules and total size
// may remove it from the map when < minSize
const oldSize = info.modules.size;
for (const module of item.modules) {
info.modules.delete(module);
}
if (info.modules.size !== oldSize) {
if (info.modules.size === 0) {
chunksInfoMap.delete(key);
continue;
}
info.size = getModulesSize(info.modules);
if (
info.cacheGroup._validateSize &&
info.size < info.cacheGroup.minSize
) {
chunksInfoMap.delete(key);
}
if (info.modules.size === 0) {
chunksInfoMap.delete(key);
}
}
}
}
}
const incorrectMinMaxSizeSet = new Set();
// Make sure that maxSize is fulfilled
for (const chunk of compilation.chunks.slice()) {
const { minSize, maxSize, automaticNameDelimiter, keys } =
maxSizeQueueMap.get(chunk) || this.options.fallbackCacheGroup;
if (!maxSize) continue;
if (minSize > maxSize) {
const warningKey = `${keys && keys.join()} ${minSize} ${maxSize}`;
if (!incorrectMinMaxSizeSet.has(warningKey)) {
incorrectMinMaxSizeSet.add(warningKey);
compilation.warnings.push(
new MinMaxSizeWarning(keys, minSize, maxSize)
);
}
}
const results = deterministicGroupingForModules({
maxSize: Math.max(minSize, maxSize),
minSize,
items: chunk.modulesIterable,
getKey(module) {
const ident = contextify(
compilation.options.context,
module.identifier()
);
const name = module.nameForCondition
? contextify(
compilation.options.context,
module.nameForCondition()
)
: ident.replace(/^.*!|\?[^?!]*$/g, "");
const fullKey =
name + automaticNameDelimiter + hashFilename(ident);
return fullKey.replace(/[\\/?]/g, "_");
},
getSize(module) {
return module.size();
}
});
results.sort((a, b) => {
if (a.key < b.key) return -1;
if (a.key > b.key) return 1;
return 0;
});
for (let i = 0; i < results.length; i++) {
const group = results[i];
const key = this.options.hidePathInfo
? hashFilename(group.key)
: group.key;
let name = chunk.name
? chunk.name + automaticNameDelimiter + key
: null;
if (name && name.length > 100) {
name =
name.slice(0, 100) +
automaticNameDelimiter +
hashFilename(name);
}
let newPart;
if (i !== results.length - 1) {
newPart = compilation.addChunk(name);
chunk.split(newPart);
newPart.chunkReason = chunk.chunkReason;
// Add all modules to the new chunk
for (const module of group.items) {
if (typeof module.chunkCondition === "function") {
if (!module.chunkCondition(newPart)) continue;
}
// Add module to new chunk
GraphHelpers.connectChunkAndModule(newPart, module);
// Remove module from used chunks
chunk.removeModule(module);
module.rewriteChunkInReasons(chunk, [newPart]);
}
} else {
// change the chunk to be a part
newPart = chunk;
chunk.name = name;
}
}
}
}
);
});
}
};