mirror of
https://github.com/idanoo/GoScrobble
synced 2025-07-05 15:42:18 +00:00
0.2.0 - Mid migration
This commit is contained in:
parent
139e6a915e
commit
7e38fdbd7d
42393 changed files with 5358157 additions and 62 deletions
235
web/node_modules/webpack/lib/util/LazyBucketSortedSet.js
generated
vendored
Normal file
235
web/node_modules/webpack/lib/util/LazyBucketSortedSet.js
generated
vendored
Normal file
|
@ -0,0 +1,235 @@
|
|||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Tobias Koppers @sokra
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
const SortableSet = require("./SortableSet");
|
||||
|
||||
/**
|
||||
* @template T
|
||||
* @template K
|
||||
* Multi layer bucket sorted set
|
||||
* Supports adding non-existing items (DO NOT ADD ITEM TWICE)
|
||||
* Supports removing exiting items (DO NOT REMOVE ITEM NOT IN SET)
|
||||
* Supports popping the first items according to defined order
|
||||
* Supports iterating all items without order
|
||||
* Supports updating an item in an efficient way
|
||||
* Supports size property, which is the number of items
|
||||
* Items are lazy partially sorted when needed
|
||||
*/
|
||||
class LazyBucketSortedSet {
|
||||
/**
|
||||
* @param {function(T): K} getKey function to get key from item
|
||||
* @param {function(K, K): number} comparator comparator to sort keys
|
||||
* @param {...((function(T): any) | (function(any, any): number))} args more pairs of getKey and comparator plus optional final comparator for the last layer
|
||||
*/
|
||||
constructor(getKey, comparator, ...args) {
|
||||
this._getKey = getKey;
|
||||
this._innerArgs = args;
|
||||
this._leaf = args.length <= 1;
|
||||
this._keys = new SortableSet(undefined, comparator);
|
||||
/** @type {Map<K, LazyBucketSortedSet<T, any> | SortableSet<T>>} */
|
||||
this._map = new Map();
|
||||
this._unsortedItems = new Set();
|
||||
this.size = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {T} item an item
|
||||
* @returns {void}
|
||||
*/
|
||||
add(item) {
|
||||
this.size++;
|
||||
this._unsortedItems.add(item);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {K} key key of item
|
||||
* @param {T} item the item
|
||||
* @returns {void}
|
||||
*/
|
||||
_addInternal(key, item) {
|
||||
let entry = this._map.get(key);
|
||||
if (entry === undefined) {
|
||||
entry = this._leaf
|
||||
? new SortableSet(undefined, this._innerArgs[0])
|
||||
: new /** @type {any} */ (LazyBucketSortedSet)(...this._innerArgs);
|
||||
this._keys.add(key);
|
||||
this._map.set(key, entry);
|
||||
}
|
||||
entry.add(item);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {T} item an item
|
||||
* @returns {void}
|
||||
*/
|
||||
delete(item) {
|
||||
this.size--;
|
||||
if (this._unsortedItems.has(item)) {
|
||||
this._unsortedItems.delete(item);
|
||||
return;
|
||||
}
|
||||
const key = this._getKey(item);
|
||||
const entry = this._map.get(key);
|
||||
entry.delete(item);
|
||||
if (entry.size === 0) {
|
||||
this._deleteKey(key);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {K} key key to be removed
|
||||
* @returns {void}
|
||||
*/
|
||||
_deleteKey(key) {
|
||||
this._keys.delete(key);
|
||||
this._map.delete(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {T | undefined} an item
|
||||
*/
|
||||
popFirst() {
|
||||
if (this.size === 0) return undefined;
|
||||
this.size--;
|
||||
if (this._unsortedItems.size > 0) {
|
||||
for (const item of this._unsortedItems) {
|
||||
const key = this._getKey(item);
|
||||
this._addInternal(key, item);
|
||||
}
|
||||
this._unsortedItems.clear();
|
||||
}
|
||||
this._keys.sort();
|
||||
const key = this._keys.values().next().value;
|
||||
const entry = this._map.get(key);
|
||||
if (this._leaf) {
|
||||
const leafEntry = /** @type {SortableSet<T>} */ (entry);
|
||||
leafEntry.sort();
|
||||
const item = leafEntry.values().next().value;
|
||||
leafEntry.delete(item);
|
||||
if (leafEntry.size === 0) {
|
||||
this._deleteKey(key);
|
||||
}
|
||||
return item;
|
||||
} else {
|
||||
const nodeEntry = /** @type {LazyBucketSortedSet<T, any>} */ (entry);
|
||||
const item = nodeEntry.popFirst();
|
||||
if (nodeEntry.size === 0) {
|
||||
this._deleteKey(key);
|
||||
}
|
||||
return item;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {T} item to be updated item
|
||||
* @returns {function(true=): void} finish update
|
||||
*/
|
||||
startUpdate(item) {
|
||||
if (this._unsortedItems.has(item)) {
|
||||
return remove => {
|
||||
if (remove) {
|
||||
this._unsortedItems.delete(item);
|
||||
this.size--;
|
||||
return;
|
||||
}
|
||||
};
|
||||
}
|
||||
const key = this._getKey(item);
|
||||
if (this._leaf) {
|
||||
const oldEntry = /** @type {SortableSet<T>} */ (this._map.get(key));
|
||||
return remove => {
|
||||
if (remove) {
|
||||
this.size--;
|
||||
oldEntry.delete(item);
|
||||
if (oldEntry.size === 0) {
|
||||
this._deleteKey(key);
|
||||
}
|
||||
return;
|
||||
}
|
||||
const newKey = this._getKey(item);
|
||||
if (key === newKey) {
|
||||
// This flags the sortable set as unordered
|
||||
oldEntry.add(item);
|
||||
} else {
|
||||
oldEntry.delete(item);
|
||||
if (oldEntry.size === 0) {
|
||||
this._deleteKey(key);
|
||||
}
|
||||
this._addInternal(newKey, item);
|
||||
}
|
||||
};
|
||||
} else {
|
||||
const oldEntry = /** @type {LazyBucketSortedSet<T, any>} */ (this._map.get(
|
||||
key
|
||||
));
|
||||
const finishUpdate = oldEntry.startUpdate(item);
|
||||
return remove => {
|
||||
if (remove) {
|
||||
this.size--;
|
||||
finishUpdate(true);
|
||||
if (oldEntry.size === 0) {
|
||||
this._deleteKey(key);
|
||||
}
|
||||
return;
|
||||
}
|
||||
const newKey = this._getKey(item);
|
||||
if (key === newKey) {
|
||||
finishUpdate();
|
||||
} else {
|
||||
finishUpdate(true);
|
||||
if (oldEntry.size === 0) {
|
||||
this._deleteKey(key);
|
||||
}
|
||||
this._addInternal(newKey, item);
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Iterator<T>[]} iterators list of iterators to append to
|
||||
* @returns {void}
|
||||
*/
|
||||
_appendIterators(iterators) {
|
||||
if (this._unsortedItems.size > 0)
|
||||
iterators.push(this._unsortedItems[Symbol.iterator]());
|
||||
for (const key of this._keys) {
|
||||
const entry = this._map.get(key);
|
||||
if (this._leaf) {
|
||||
const leafEntry = /** @type {SortableSet<T>} */ (entry);
|
||||
const iterator = leafEntry[Symbol.iterator]();
|
||||
iterators.push(iterator);
|
||||
} else {
|
||||
const nodeEntry = /** @type {LazyBucketSortedSet<T, any>} */ (entry);
|
||||
nodeEntry._appendIterators(iterators);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {Iterator<T>} the iterator
|
||||
*/
|
||||
[Symbol.iterator]() {
|
||||
const iterators = [];
|
||||
this._appendIterators(iterators);
|
||||
iterators.reverse();
|
||||
let currentIterator = iterators.pop();
|
||||
return {
|
||||
next: () => {
|
||||
const res = currentIterator.next();
|
||||
if (res.done) {
|
||||
if (iterators.length === 0) return res;
|
||||
currentIterator = iterators.pop();
|
||||
return currentIterator.next();
|
||||
}
|
||||
return res;
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = LazyBucketSortedSet;
|
46
web/node_modules/webpack/lib/util/Queue.js
generated
vendored
Normal file
46
web/node_modules/webpack/lib/util/Queue.js
generated
vendored
Normal file
|
@ -0,0 +1,46 @@
|
|||
"use strict";
|
||||
|
||||
/**
|
||||
* @template T
|
||||
*/
|
||||
class Queue {
|
||||
/**
|
||||
* @param {Iterable<T>=} items The initial elements.
|
||||
*/
|
||||
constructor(items) {
|
||||
/** @private @type {Set<T>} */
|
||||
this.set = new Set(items);
|
||||
/** @private @type {Iterator<T>} */
|
||||
this.iterator = this.set[Symbol.iterator]();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the number of elements in this queue.
|
||||
* @returns {number} The number of elements in this queue.
|
||||
*/
|
||||
get length() {
|
||||
return this.set.size;
|
||||
}
|
||||
|
||||
/**
|
||||
* Appends the specified element to this queue.
|
||||
* @param {T} item The element to add.
|
||||
* @returns {void}
|
||||
*/
|
||||
enqueue(item) {
|
||||
this.set.add(item);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves and removes the head of this queue.
|
||||
* @returns {T | undefined} The head of the queue of `undefined` if this queue is empty.
|
||||
*/
|
||||
dequeue() {
|
||||
const result = this.iterator.next();
|
||||
if (result.done) return undefined;
|
||||
this.set.delete(result.value);
|
||||
return result.value;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Queue;
|
53
web/node_modules/webpack/lib/util/Semaphore.js
generated
vendored
Normal file
53
web/node_modules/webpack/lib/util/Semaphore.js
generated
vendored
Normal file
|
@ -0,0 +1,53 @@
|
|||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Tobias Koppers @sokra
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
class Semaphore {
|
||||
/**
|
||||
* Creates an instance of Semaphore.
|
||||
*
|
||||
* @param {number} available the amount available number of "tasks"
|
||||
* in the Semaphore
|
||||
*/
|
||||
constructor(available) {
|
||||
this.available = available;
|
||||
/** @type {(function(): void)[]} */
|
||||
this.waiters = [];
|
||||
/** @private */
|
||||
this._continue = this._continue.bind(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {function(): void} callback function block to capture and run
|
||||
* @returns {void}
|
||||
*/
|
||||
acquire(callback) {
|
||||
if (this.available > 0) {
|
||||
this.available--;
|
||||
callback();
|
||||
} else {
|
||||
this.waiters.push(callback);
|
||||
}
|
||||
}
|
||||
|
||||
release() {
|
||||
this.available++;
|
||||
if (this.waiters.length > 0) {
|
||||
process.nextTick(this._continue);
|
||||
}
|
||||
}
|
||||
|
||||
_continue() {
|
||||
if (this.available > 0) {
|
||||
if (this.waiters.length > 0) {
|
||||
this.available--;
|
||||
const callback = this.waiters.pop();
|
||||
callback();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Semaphore;
|
48
web/node_modules/webpack/lib/util/SetHelpers.js
generated
vendored
Normal file
48
web/node_modules/webpack/lib/util/SetHelpers.js
generated
vendored
Normal file
|
@ -0,0 +1,48 @@
|
|||
"use strict";
|
||||
|
||||
/**
|
||||
* intersect creates Set containing the intersection of elements between all sets
|
||||
* @param {Set[]} sets an array of sets being checked for shared elements
|
||||
* @returns {Set<TODO>} returns a new Set containing the intersecting items
|
||||
*/
|
||||
const intersect = sets => {
|
||||
if (sets.length === 0) return new Set();
|
||||
if (sets.length === 1) return new Set(sets[0]);
|
||||
let minSize = Infinity;
|
||||
let minIndex = -1;
|
||||
for (let i = 0; i < sets.length; i++) {
|
||||
const size = sets[i].size;
|
||||
if (size < minSize) {
|
||||
minIndex = i;
|
||||
minSize = size;
|
||||
}
|
||||
}
|
||||
const current = new Set(sets[minIndex]);
|
||||
for (let i = 0; i < sets.length; i++) {
|
||||
if (i === minIndex) continue;
|
||||
const set = sets[i];
|
||||
for (const item of current) {
|
||||
if (!set.has(item)) {
|
||||
current.delete(item);
|
||||
}
|
||||
}
|
||||
}
|
||||
return current;
|
||||
};
|
||||
|
||||
/**
|
||||
* Checks if a set is the subset of another set
|
||||
* @param {Set<TODO>} bigSet a Set which contains the original elements to compare against
|
||||
* @param {Set<TODO>} smallSet the set whos elements might be contained inside of bigSet
|
||||
* @returns {boolean} returns true if smallSet contains all elements inside of the bigSet
|
||||
*/
|
||||
const isSubset = (bigSet, smallSet) => {
|
||||
if (bigSet.size < smallSet.size) return false;
|
||||
for (const item of smallSet) {
|
||||
if (!bigSet.has(item)) return false;
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
exports.intersect = intersect;
|
||||
exports.isSubset = isSubset;
|
140
web/node_modules/webpack/lib/util/SortableSet.js
generated
vendored
Normal file
140
web/node_modules/webpack/lib/util/SortableSet.js
generated
vendored
Normal file
|
@ -0,0 +1,140 @@
|
|||
"use strict";
|
||||
|
||||
/**
|
||||
* A subset of Set that offers sorting functionality
|
||||
* @template T item type in set
|
||||
* @extends {Set<T>}
|
||||
*/
|
||||
class SortableSet extends Set {
|
||||
/**
|
||||
* Create a new sortable set
|
||||
* @param {Iterable<T>=} initialIterable The initial iterable value
|
||||
* @typedef {function(T, T): number} SortFunction
|
||||
* @param {SortFunction=} defaultSort Default sorting function
|
||||
*/
|
||||
constructor(initialIterable, defaultSort) {
|
||||
super(initialIterable);
|
||||
/** @private @type {function(T, T): number}} */
|
||||
this._sortFn = defaultSort;
|
||||
/** @private @type {function(T, T): number} | null} */
|
||||
this._lastActiveSortFn = null;
|
||||
/** @private @type {Map<Function, T[]> | undefined} */
|
||||
this._cache = undefined;
|
||||
/** @private @type {Map<Function, T[]|string|number> | undefined} */
|
||||
this._cacheOrderIndependent = undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {T} value value to add to set
|
||||
* @returns {this} returns itself
|
||||
*/
|
||||
add(value) {
|
||||
this._lastActiveSortFn = null;
|
||||
this._invalidateCache();
|
||||
this._invalidateOrderedCache();
|
||||
super.add(value);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {T} value value to delete
|
||||
* @returns {boolean} true if value existed in set, false otherwise
|
||||
*/
|
||||
delete(value) {
|
||||
this._invalidateCache();
|
||||
this._invalidateOrderedCache();
|
||||
return super.delete(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {void}
|
||||
*/
|
||||
clear() {
|
||||
this._invalidateCache();
|
||||
this._invalidateOrderedCache();
|
||||
return super.clear();
|
||||
}
|
||||
|
||||
/**
|
||||
* Sort with a comparer function
|
||||
* @param {SortFunction} sortFn Sorting comparer function
|
||||
* @returns {void}
|
||||
*/
|
||||
sortWith(sortFn) {
|
||||
if (this.size <= 1 || sortFn === this._lastActiveSortFn) {
|
||||
// already sorted - nothing to do
|
||||
return;
|
||||
}
|
||||
|
||||
const sortedArray = Array.from(this).sort(sortFn);
|
||||
super.clear();
|
||||
for (let i = 0; i < sortedArray.length; i += 1) {
|
||||
super.add(sortedArray[i]);
|
||||
}
|
||||
this._lastActiveSortFn = sortFn;
|
||||
this._invalidateCache();
|
||||
}
|
||||
|
||||
sort() {
|
||||
this.sortWith(this._sortFn);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get data from cache
|
||||
* @param {function(SortableSet<T>): T[]} fn function to calculate value
|
||||
* @returns {T[]} returns result of fn(this), cached until set changes
|
||||
*/
|
||||
getFromCache(fn) {
|
||||
if (this._cache === undefined) {
|
||||
this._cache = new Map();
|
||||
} else {
|
||||
const data = this._cache.get(fn);
|
||||
if (data !== undefined) {
|
||||
return data;
|
||||
}
|
||||
}
|
||||
const newData = fn(this);
|
||||
this._cache.set(fn, newData);
|
||||
return newData;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {function(SortableSet<T>): string|number|T[]} fn function to calculate value
|
||||
* @returns {any} returns result of fn(this), cached until set changes
|
||||
*/
|
||||
getFromUnorderedCache(fn) {
|
||||
if (this._cacheOrderIndependent === undefined) {
|
||||
this._cacheOrderIndependent = new Map();
|
||||
} else {
|
||||
const data = this._cacheOrderIndependent.get(fn);
|
||||
if (data !== undefined) {
|
||||
return data;
|
||||
}
|
||||
}
|
||||
const newData = fn(this);
|
||||
this._cacheOrderIndependent.set(fn, newData);
|
||||
return newData;
|
||||
}
|
||||
|
||||
/**
|
||||
* @private
|
||||
* @returns {void}
|
||||
*/
|
||||
_invalidateCache() {
|
||||
if (this._cache !== undefined) {
|
||||
this._cache.clear();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @private
|
||||
* @returns {void}
|
||||
*/
|
||||
_invalidateOrderedCache() {
|
||||
if (this._cacheOrderIndependent !== undefined) {
|
||||
this._cacheOrderIndependent.clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = SortableSet;
|
142
web/node_modules/webpack/lib/util/StackedSetMap.js
generated
vendored
Normal file
142
web/node_modules/webpack/lib/util/StackedSetMap.js
generated
vendored
Normal file
|
@ -0,0 +1,142 @@
|
|||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Tobias Koppers @sokra
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
const util = require("util");
|
||||
|
||||
const TOMBSTONE = {};
|
||||
const UNDEFINED_MARKER = {};
|
||||
|
||||
class StackedSetMap {
|
||||
constructor(parentStack) {
|
||||
this.stack = parentStack === undefined ? [] : parentStack.slice();
|
||||
this.map = new Map();
|
||||
this.stack.push(this.map);
|
||||
}
|
||||
|
||||
add(item) {
|
||||
this.map.set(item, true);
|
||||
}
|
||||
|
||||
set(item, value) {
|
||||
this.map.set(item, value === undefined ? UNDEFINED_MARKER : value);
|
||||
}
|
||||
|
||||
delete(item) {
|
||||
if (this.stack.length > 1) {
|
||||
this.map.set(item, TOMBSTONE);
|
||||
} else {
|
||||
this.map.delete(item);
|
||||
}
|
||||
}
|
||||
|
||||
has(item) {
|
||||
const topValue = this.map.get(item);
|
||||
if (topValue !== undefined) return topValue !== TOMBSTONE;
|
||||
if (this.stack.length > 1) {
|
||||
for (var i = this.stack.length - 2; i >= 0; i--) {
|
||||
const value = this.stack[i].get(item);
|
||||
if (value !== undefined) {
|
||||
this.map.set(item, value);
|
||||
return value !== TOMBSTONE;
|
||||
}
|
||||
}
|
||||
this.map.set(item, TOMBSTONE);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
get(item) {
|
||||
const topValue = this.map.get(item);
|
||||
if (topValue !== undefined) {
|
||||
return topValue === TOMBSTONE || topValue === UNDEFINED_MARKER
|
||||
? undefined
|
||||
: topValue;
|
||||
}
|
||||
if (this.stack.length > 1) {
|
||||
for (var i = this.stack.length - 2; i >= 0; i--) {
|
||||
const value = this.stack[i].get(item);
|
||||
if (value !== undefined) {
|
||||
this.map.set(item, value);
|
||||
return value === TOMBSTONE || value === UNDEFINED_MARKER
|
||||
? undefined
|
||||
: value;
|
||||
}
|
||||
}
|
||||
this.map.set(item, TOMBSTONE);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
_compress() {
|
||||
if (this.stack.length === 1) return;
|
||||
this.map = new Map();
|
||||
for (const data of this.stack) {
|
||||
for (const pair of data) {
|
||||
if (pair[1] === TOMBSTONE) {
|
||||
this.map.delete(pair[0]);
|
||||
} else {
|
||||
this.map.set(pair[0], pair[1]);
|
||||
}
|
||||
}
|
||||
}
|
||||
this.stack = [this.map];
|
||||
}
|
||||
|
||||
asArray() {
|
||||
this._compress();
|
||||
return Array.from(this.map.entries(), pair => pair[0]);
|
||||
}
|
||||
|
||||
asSet() {
|
||||
return new Set(this.asArray());
|
||||
}
|
||||
|
||||
asPairArray() {
|
||||
this._compress();
|
||||
return Array.from(this.map.entries(), pair =>
|
||||
/** @type {[TODO, TODO]} */ (pair[1] === UNDEFINED_MARKER
|
||||
? [pair[0], undefined]
|
||||
: pair)
|
||||
);
|
||||
}
|
||||
|
||||
asMap() {
|
||||
return new Map(this.asPairArray());
|
||||
}
|
||||
|
||||
get size() {
|
||||
this._compress();
|
||||
return this.map.size;
|
||||
}
|
||||
|
||||
createChild() {
|
||||
return new StackedSetMap(this.stack);
|
||||
}
|
||||
|
||||
get length() {
|
||||
throw new Error("This is no longer an Array");
|
||||
}
|
||||
|
||||
set length(value) {
|
||||
throw new Error("This is no longer an Array");
|
||||
}
|
||||
}
|
||||
|
||||
// TODO remove in webpack 5
|
||||
StackedSetMap.prototype.push = util.deprecate(
|
||||
/**
|
||||
* @deprecated
|
||||
* @this {StackedSetMap}
|
||||
* @param {any} item Item to add
|
||||
* @returns {void}
|
||||
*/
|
||||
function(item) {
|
||||
this.add(item);
|
||||
},
|
||||
"This is no longer an Array: Use add instead."
|
||||
);
|
||||
|
||||
module.exports = StackedSetMap;
|
35
web/node_modules/webpack/lib/util/TrackingSet.js
generated
vendored
Normal file
35
web/node_modules/webpack/lib/util/TrackingSet.js
generated
vendored
Normal file
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Tobias Koppers @sokra
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
module.exports = class TrackingSet {
|
||||
constructor(set) {
|
||||
this.set = set;
|
||||
this.set2 = new Set();
|
||||
this.stack = set.stack;
|
||||
}
|
||||
|
||||
add(item) {
|
||||
this.set2.add(item);
|
||||
return this.set.add(item);
|
||||
}
|
||||
|
||||
delete(item) {
|
||||
this.set2.delete(item);
|
||||
return this.set.delete(item);
|
||||
}
|
||||
|
||||
has(item) {
|
||||
return this.set.has(item);
|
||||
}
|
||||
|
||||
createChild() {
|
||||
return this.set.createChild();
|
||||
}
|
||||
|
||||
getAddedItems() {
|
||||
return this.set2;
|
||||
}
|
||||
};
|
35
web/node_modules/webpack/lib/util/cachedMerge.js
generated
vendored
Normal file
35
web/node_modules/webpack/lib/util/cachedMerge.js
generated
vendored
Normal file
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Tobias Koppers @sokra
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
const mergeCache = new WeakMap();
|
||||
|
||||
/**
|
||||
* Merges two given objects and caches the result to avoid computation if same objects passed as arguments again.
|
||||
* @example
|
||||
* // performs Object.assign(first, second), stores the result in WeakMap and returns result
|
||||
* cachedMerge({a: 1}, {a: 2})
|
||||
* {a: 2}
|
||||
* // when same arguments passed, gets the result from WeakMap and returns it.
|
||||
* cachedMerge({a: 1}, {a: 2})
|
||||
* {a: 2}
|
||||
* @param {object} first first object
|
||||
* @param {object} second second object
|
||||
* @returns {object} merged object of first and second object
|
||||
*/
|
||||
const cachedMerge = (first, second) => {
|
||||
let innerCache = mergeCache.get(first);
|
||||
if (innerCache === undefined) {
|
||||
innerCache = new WeakMap();
|
||||
mergeCache.set(first, innerCache);
|
||||
}
|
||||
const prevMerge = innerCache.get(second);
|
||||
if (prevMerge !== undefined) return prevMerge;
|
||||
const newMerge = Object.assign({}, first, second);
|
||||
innerCache.set(second, newMerge);
|
||||
return newMerge;
|
||||
};
|
||||
|
||||
module.exports = cachedMerge;
|
77
web/node_modules/webpack/lib/util/cleverMerge.js
generated
vendored
Normal file
77
web/node_modules/webpack/lib/util/cleverMerge.js
generated
vendored
Normal file
|
@ -0,0 +1,77 @@
|
|||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Tobias Koppers @sokra
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
const mergeCache = new WeakMap();
|
||||
|
||||
/**
|
||||
* Merges two given objects and caches the result to avoid computation if same objects passed as arguments again.
|
||||
* @example
|
||||
* // performs cleverMerge(first, second), stores the result in WeakMap and returns result
|
||||
* cachedCleverMerge({a: 1}, {a: 2})
|
||||
* {a: 2}
|
||||
* // when same arguments passed, gets the result from WeakMap and returns it.
|
||||
* cachedCleverMerge({a: 1}, {a: 2})
|
||||
* {a: 2}
|
||||
* @param {object} first first object
|
||||
* @param {object} second second object
|
||||
* @returns {object} merged object of first and second object
|
||||
*/
|
||||
const cachedCleverMerge = (first, second) => {
|
||||
let innerCache = mergeCache.get(first);
|
||||
if (innerCache === undefined) {
|
||||
innerCache = new WeakMap();
|
||||
mergeCache.set(first, innerCache);
|
||||
}
|
||||
const prevMerge = innerCache.get(second);
|
||||
if (prevMerge !== undefined) return prevMerge;
|
||||
const newMerge = cleverMerge(first, second);
|
||||
innerCache.set(second, newMerge);
|
||||
return newMerge;
|
||||
};
|
||||
|
||||
/**
|
||||
* Merges two objects. Objects are not deeply merged.
|
||||
* TODO webpack 5: merge objects deeply clever.
|
||||
* Arrays might reference the old value with "..."
|
||||
* @param {object} first first object
|
||||
* @param {object} second second object
|
||||
* @returns {object} merged object of first and second object
|
||||
*/
|
||||
const cleverMerge = (first, second) => {
|
||||
const newObject = Object.assign({}, first);
|
||||
for (const key of Object.keys(second)) {
|
||||
if (!(key in newObject)) {
|
||||
newObject[key] = second[key];
|
||||
continue;
|
||||
}
|
||||
const secondValue = second[key];
|
||||
if (!Array.isArray(secondValue)) {
|
||||
newObject[key] = secondValue;
|
||||
continue;
|
||||
}
|
||||
const firstValue = newObject[key];
|
||||
if (Array.isArray(firstValue)) {
|
||||
const newArray = [];
|
||||
for (const item of secondValue) {
|
||||
if (item === "...") {
|
||||
for (const item of firstValue) {
|
||||
newArray.push(item);
|
||||
}
|
||||
} else {
|
||||
newArray.push(item);
|
||||
}
|
||||
}
|
||||
newObject[key] = newArray;
|
||||
} else {
|
||||
newObject[key] = secondValue;
|
||||
}
|
||||
}
|
||||
return newObject;
|
||||
};
|
||||
|
||||
exports.cachedCleverMerge = cachedCleverMerge;
|
||||
exports.cleverMerge = cleverMerge;
|
137
web/node_modules/webpack/lib/util/createHash.js
generated
vendored
Normal file
137
web/node_modules/webpack/lib/util/createHash.js
generated
vendored
Normal file
|
@ -0,0 +1,137 @@
|
|||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Tobias Koppers @sokra
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
const AbstractMethodError = require("../AbstractMethodError");
|
||||
|
||||
const BULK_SIZE = 1000;
|
||||
|
||||
class Hash {
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @param {string|Buffer} data data
|
||||
* @param {string=} inputEncoding data encoding
|
||||
* @returns {this} updated hash
|
||||
*/
|
||||
update(data, inputEncoding) {
|
||||
throw new AbstractMethodError();
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @param {string=} encoding encoding of the return value
|
||||
* @returns {string|Buffer} digest
|
||||
*/
|
||||
digest(encoding) {
|
||||
throw new AbstractMethodError();
|
||||
}
|
||||
}
|
||||
|
||||
exports.Hash = Hash;
|
||||
/** @typedef {typeof Hash} HashConstructor */
|
||||
|
||||
class BulkUpdateDecorator extends Hash {
|
||||
/**
|
||||
* @param {Hash} hash hash
|
||||
*/
|
||||
constructor(hash) {
|
||||
super();
|
||||
this.hash = hash;
|
||||
this.buffer = "";
|
||||
}
|
||||
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @param {string|Buffer} data data
|
||||
* @param {string=} inputEncoding data encoding
|
||||
* @returns {this} updated hash
|
||||
*/
|
||||
update(data, inputEncoding) {
|
||||
if (
|
||||
inputEncoding !== undefined ||
|
||||
typeof data !== "string" ||
|
||||
data.length > BULK_SIZE
|
||||
) {
|
||||
if (this.buffer.length > 0) {
|
||||
this.hash.update(this.buffer);
|
||||
this.buffer = "";
|
||||
}
|
||||
this.hash.update(data, inputEncoding);
|
||||
} else {
|
||||
this.buffer += data;
|
||||
if (this.buffer.length > BULK_SIZE) {
|
||||
this.hash.update(this.buffer);
|
||||
this.buffer = "";
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @param {string=} encoding encoding of the return value
|
||||
* @returns {string|Buffer} digest
|
||||
*/
|
||||
digest(encoding) {
|
||||
if (this.buffer.length > 0) {
|
||||
this.hash.update(this.buffer);
|
||||
}
|
||||
var digestResult = this.hash.digest(encoding);
|
||||
return typeof digestResult === "string"
|
||||
? digestResult
|
||||
: digestResult.toString();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* istanbul ignore next
|
||||
*/
|
||||
class DebugHash extends Hash {
|
||||
constructor() {
|
||||
super();
|
||||
this.string = "";
|
||||
}
|
||||
|
||||
/**
|
||||
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
||||
* @param {string|Buffer} data data
|
||||
* @param {string=} inputEncoding data encoding
|
||||
* @returns {this} updated hash
|
||||
*/
|
||||
update(data, inputEncoding) {
|
||||
if (typeof data !== "string") data = data.toString("utf-8");
|
||||
this.string += data;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
||||
* @param {string=} encoding encoding of the return value
|
||||
* @returns {string|Buffer} digest
|
||||
*/
|
||||
digest(encoding) {
|
||||
return this.string.replace(/[^a-z0-9]+/gi, m =>
|
||||
Buffer.from(m).toString("hex")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a hash by name or function
|
||||
* @param {string | HashConstructor} algorithm the algorithm name or a constructor creating a hash
|
||||
* @returns {Hash} the hash
|
||||
*/
|
||||
module.exports = algorithm => {
|
||||
if (typeof algorithm === "function") {
|
||||
return new BulkUpdateDecorator(new algorithm());
|
||||
}
|
||||
switch (algorithm) {
|
||||
// TODO add non-cryptographic algorithm here
|
||||
case "debug":
|
||||
return new DebugHash();
|
||||
default:
|
||||
return new BulkUpdateDecorator(require("crypto").createHash(algorithm));
|
||||
}
|
||||
};
|
274
web/node_modules/webpack/lib/util/deterministicGrouping.js
generated
vendored
Normal file
274
web/node_modules/webpack/lib/util/deterministicGrouping.js
generated
vendored
Normal file
|
@ -0,0 +1,274 @@
|
|||
"use strict";
|
||||
|
||||
// Simulations show these probabilities for a single change
|
||||
// 93.1% that one group is invalidated
|
||||
// 4.8% that two groups are invalidated
|
||||
// 1.1% that 3 groups are invalidated
|
||||
// 0.1% that 4 or more groups are invalidated
|
||||
//
|
||||
// And these for removing/adding 10 lexically adjacent files
|
||||
// 64.5% that one group is invalidated
|
||||
// 24.8% that two groups are invalidated
|
||||
// 7.8% that 3 groups are invalidated
|
||||
// 2.7% that 4 or more groups are invalidated
|
||||
//
|
||||
// And these for removing/adding 3 random files
|
||||
// 0% that one group is invalidated
|
||||
// 3.7% that two groups are invalidated
|
||||
// 80.8% that 3 groups are invalidated
|
||||
// 12.3% that 4 groups are invalidated
|
||||
// 3.2% that 5 or more groups are invalidated
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} a key
|
||||
* @param {string} b key
|
||||
* @returns {number} the similarity as number
|
||||
*/
|
||||
const similarity = (a, b) => {
|
||||
const l = Math.min(a.length, b.length);
|
||||
let dist = 0;
|
||||
for (let i = 0; i < l; i++) {
|
||||
const ca = a.charCodeAt(i);
|
||||
const cb = b.charCodeAt(i);
|
||||
dist += Math.max(0, 10 - Math.abs(ca - cb));
|
||||
}
|
||||
return dist;
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {string} a key
|
||||
* @param {string} b key
|
||||
* @returns {string} the common part and a single char for the difference
|
||||
*/
|
||||
const getName = (a, b) => {
|
||||
const l = Math.min(a.length, b.length);
|
||||
let r = "";
|
||||
for (let i = 0; i < l; i++) {
|
||||
const ca = a.charAt(i);
|
||||
const cb = b.charAt(i);
|
||||
r += ca;
|
||||
if (ca === cb) {
|
||||
continue;
|
||||
}
|
||||
return r;
|
||||
}
|
||||
return a;
|
||||
};
|
||||
|
||||
/**
|
||||
* @template T
|
||||
*/
|
||||
class Node {
|
||||
/**
|
||||
* @param {T} item item
|
||||
* @param {string} key key
|
||||
* @param {number} size size
|
||||
*/
|
||||
constructor(item, key, size) {
|
||||
this.item = item;
|
||||
this.key = key;
|
||||
this.size = size;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @template T
|
||||
*/
|
||||
class Group {
|
||||
/**
|
||||
* @param {Node<T>[]} nodes nodes
|
||||
* @param {number[]} similarities similarities between the nodes (length = nodes.length - 1)
|
||||
*/
|
||||
constructor(nodes, similarities) {
|
||||
this.nodes = nodes;
|
||||
this.similarities = similarities;
|
||||
this.size = nodes.reduce((size, node) => size + node.size, 0);
|
||||
/** @type {string} */
|
||||
this.key = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @template T
|
||||
* @typedef {Object} GroupedItems<T>
|
||||
* @property {string} key
|
||||
* @property {T[]} items
|
||||
* @property {number} size
|
||||
*/
|
||||
|
||||
/**
|
||||
* @template T
|
||||
* @typedef {Object} Options
|
||||
* @property {number} maxSize maximum size of a group
|
||||
* @property {number} minSize minimum size of a group (preferred over maximum size)
|
||||
* @property {Iterable<T>} items a list of items
|
||||
* @property {function(T): number} getSize function to get size of an item
|
||||
* @property {function(T): string} getKey function to get the key of an item
|
||||
*/
|
||||
|
||||
/**
|
||||
* @template T
|
||||
* @param {Options<T>} options options object
|
||||
* @returns {GroupedItems<T>[]} grouped items
|
||||
*/
|
||||
module.exports = ({ maxSize, minSize, items, getSize, getKey }) => {
|
||||
/** @type {Group<T>[]} */
|
||||
const result = [];
|
||||
|
||||
const nodes = Array.from(
|
||||
items,
|
||||
item => new Node(item, getKey(item), getSize(item))
|
||||
);
|
||||
|
||||
/** @type {Node<T>[]} */
|
||||
const initialNodes = [];
|
||||
|
||||
// lexically ordering of keys
|
||||
nodes.sort((a, b) => {
|
||||
if (a.key < b.key) return -1;
|
||||
if (a.key > b.key) return 1;
|
||||
return 0;
|
||||
});
|
||||
|
||||
// return nodes bigger than maxSize directly as group
|
||||
for (const node of nodes) {
|
||||
if (node.size >= maxSize) {
|
||||
result.push(new Group([node], []));
|
||||
} else {
|
||||
initialNodes.push(node);
|
||||
}
|
||||
}
|
||||
|
||||
if (initialNodes.length > 0) {
|
||||
// calculate similarities between lexically adjacent nodes
|
||||
/** @type {number[]} */
|
||||
const similarities = [];
|
||||
for (let i = 1; i < initialNodes.length; i++) {
|
||||
const a = initialNodes[i - 1];
|
||||
const b = initialNodes[i];
|
||||
similarities.push(similarity(a.key, b.key));
|
||||
}
|
||||
|
||||
const initialGroup = new Group(initialNodes, similarities);
|
||||
|
||||
if (initialGroup.size < minSize) {
|
||||
// We hit an edgecase where the working set is already smaller than minSize
|
||||
// We merge it with the smallest result node to keep minSize intact
|
||||
if (result.length > 0) {
|
||||
const smallestGroup = result.reduce((min, group) =>
|
||||
min.size > group.size ? group : min
|
||||
);
|
||||
for (const node of initialGroup.nodes) smallestGroup.nodes.push(node);
|
||||
smallestGroup.nodes.sort((a, b) => {
|
||||
if (a.key < b.key) return -1;
|
||||
if (a.key > b.key) return 1;
|
||||
return 0;
|
||||
});
|
||||
} else {
|
||||
// There are no other nodes
|
||||
// We use all nodes and have to accept that it's smaller than minSize
|
||||
result.push(initialGroup);
|
||||
}
|
||||
} else {
|
||||
const queue = [initialGroup];
|
||||
|
||||
while (queue.length) {
|
||||
const group = queue.pop();
|
||||
// only groups bigger than maxSize need to be splitted
|
||||
if (group.size < maxSize) {
|
||||
result.push(group);
|
||||
continue;
|
||||
}
|
||||
|
||||
// find unsplittable area from left and right
|
||||
// going minSize from left and right
|
||||
// at least one node need to be included otherwise we get stuck
|
||||
let left = 0;
|
||||
let leftSize = 0;
|
||||
while (leftSize <= minSize) {
|
||||
leftSize += group.nodes[left].size;
|
||||
left++;
|
||||
}
|
||||
let right = group.nodes.length - 1;
|
||||
let rightSize = 0;
|
||||
while (rightSize <= minSize) {
|
||||
rightSize += group.nodes[right].size;
|
||||
right--;
|
||||
}
|
||||
|
||||
if (left - 1 > right) {
|
||||
// can't split group while holding minSize
|
||||
// because minSize is preferred of maxSize we return
|
||||
// the group here even while it's too big
|
||||
// To avoid this make sure maxSize > minSize * 3
|
||||
result.push(group);
|
||||
continue;
|
||||
}
|
||||
if (left <= right) {
|
||||
// when there is a area between left and right
|
||||
// we look for best split point
|
||||
// we split at the minimum similarity
|
||||
// here key space is separated the most
|
||||
let best = left - 1;
|
||||
let bestSimilarity = group.similarities[best];
|
||||
for (let i = left; i <= right; i++) {
|
||||
const similarity = group.similarities[i];
|
||||
if (similarity < bestSimilarity) {
|
||||
best = i;
|
||||
bestSimilarity = similarity;
|
||||
}
|
||||
}
|
||||
left = best + 1;
|
||||
right = best;
|
||||
}
|
||||
|
||||
// create two new groups for left and right area
|
||||
// and queue them up
|
||||
const rightNodes = [group.nodes[right + 1]];
|
||||
/** @type {number[]} */
|
||||
const rightSimilaries = [];
|
||||
for (let i = right + 2; i < group.nodes.length; i++) {
|
||||
rightSimilaries.push(group.similarities[i - 1]);
|
||||
rightNodes.push(group.nodes[i]);
|
||||
}
|
||||
queue.push(new Group(rightNodes, rightSimilaries));
|
||||
|
||||
const leftNodes = [group.nodes[0]];
|
||||
/** @type {number[]} */
|
||||
const leftSimilaries = [];
|
||||
for (let i = 1; i < left; i++) {
|
||||
leftSimilaries.push(group.similarities[i - 1]);
|
||||
leftNodes.push(group.nodes[i]);
|
||||
}
|
||||
queue.push(new Group(leftNodes, leftSimilaries));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// lexically ordering
|
||||
result.sort((a, b) => {
|
||||
if (a.nodes[0].key < b.nodes[0].key) return -1;
|
||||
if (a.nodes[0].key > b.nodes[0].key) return 1;
|
||||
return 0;
|
||||
});
|
||||
|
||||
// give every group a name
|
||||
for (let i = 0; i < result.length; i++) {
|
||||
const group = result[i];
|
||||
const first = group.nodes[0];
|
||||
const last = group.nodes[group.nodes.length - 1];
|
||||
let name = getName(first.key, last.key);
|
||||
group.key = name;
|
||||
}
|
||||
|
||||
// return the results
|
||||
return result.map(group => {
|
||||
/** @type {GroupedItems} */
|
||||
return {
|
||||
key: group.key,
|
||||
items: group.nodes.map(node => node.item),
|
||||
size: group.size
|
||||
};
|
||||
});
|
||||
};
|
127
web/node_modules/webpack/lib/util/identifier.js
generated
vendored
Normal file
127
web/node_modules/webpack/lib/util/identifier.js
generated
vendored
Normal file
|
@ -0,0 +1,127 @@
|
|||
"use strict";
|
||||
const path = require("path");
|
||||
|
||||
/**
|
||||
* @param {string} context context for relative path
|
||||
* @param {string} relativePath path
|
||||
* @returns {string} absolute path
|
||||
*/
|
||||
const requestToAbsolute = (context, relativePath) => {
|
||||
if (relativePath.startsWith("./") || relativePath.startsWith("../"))
|
||||
return path.join(context, relativePath);
|
||||
return relativePath;
|
||||
};
|
||||
|
||||
/**
|
||||
* @typedef {Object} MakeRelativePathsCache
|
||||
* @property {Map<string, Map<string, string>>=} relativePaths
|
||||
*/
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} maybeAbsolutePath path to check
|
||||
* @returns {boolean} returns true if path is "Absolute Path"-like
|
||||
*/
|
||||
const looksLikeAbsolutePath = maybeAbsolutePath => {
|
||||
if (/^\/.*\/$/.test(maybeAbsolutePath)) {
|
||||
// this 'path' is actually a regexp generated by dynamic requires.
|
||||
// Don't treat it as an absolute path.
|
||||
return false;
|
||||
}
|
||||
return /^(?:[a-z]:\\|\/)/i.test(maybeAbsolutePath);
|
||||
};
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} p path to normalize
|
||||
* @returns {string} normalized version of path
|
||||
*/
|
||||
const normalizePathSeparator = p => p.replace(/\\/g, "/");
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} context context for relative path
|
||||
* @param {string} identifier identifier for path
|
||||
* @returns {string} a converted relative path
|
||||
*/
|
||||
const _makePathsRelative = (context, identifier) => {
|
||||
return identifier
|
||||
.split(/([|! ])/)
|
||||
.map(str =>
|
||||
looksLikeAbsolutePath(str)
|
||||
? normalizePathSeparator(path.relative(context, str))
|
||||
: str
|
||||
)
|
||||
.join("");
|
||||
};
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} context context used to create relative path
|
||||
* @param {string} identifier identifier used to create relative path
|
||||
* @param {MakeRelativePathsCache=} cache the cache object being set
|
||||
* @returns {string} the returned relative path
|
||||
*/
|
||||
exports.makePathsRelative = (context, identifier, cache) => {
|
||||
if (!cache) return _makePathsRelative(context, identifier);
|
||||
|
||||
const relativePaths =
|
||||
cache.relativePaths || (cache.relativePaths = new Map());
|
||||
|
||||
let cachedResult;
|
||||
let contextCache = relativePaths.get(context);
|
||||
if (contextCache === undefined) {
|
||||
relativePaths.set(context, (contextCache = new Map()));
|
||||
} else {
|
||||
cachedResult = contextCache.get(identifier);
|
||||
}
|
||||
|
||||
if (cachedResult !== undefined) {
|
||||
return cachedResult;
|
||||
} else {
|
||||
const relativePath = _makePathsRelative(context, identifier);
|
||||
contextCache.set(identifier, relativePath);
|
||||
return relativePath;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {string} context absolute context path
|
||||
* @param {string} request any request string may containing absolute paths, query string, etc.
|
||||
* @returns {string} a new request string avoiding absolute paths when possible
|
||||
*/
|
||||
exports.contextify = (context, request) => {
|
||||
return request
|
||||
.split("!")
|
||||
.map(r => {
|
||||
const splitPath = r.split("?", 2);
|
||||
if (/^[a-zA-Z]:\\/.test(splitPath[0])) {
|
||||
splitPath[0] = path.win32.relative(context, splitPath[0]);
|
||||
if (!/^[a-zA-Z]:\\/.test(splitPath[0])) {
|
||||
splitPath[0] = splitPath[0].replace(/\\/g, "/");
|
||||
}
|
||||
}
|
||||
if (/^\//.test(splitPath[0])) {
|
||||
splitPath[0] = path.posix.relative(context, splitPath[0]);
|
||||
}
|
||||
if (!/^(\.\.\/|\/|[a-zA-Z]:\\)/.test(splitPath[0])) {
|
||||
splitPath[0] = "./" + splitPath[0];
|
||||
}
|
||||
return splitPath.join("?");
|
||||
})
|
||||
.join("!");
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {string} context absolute context path
|
||||
* @param {string} request any request string
|
||||
* @returns {string} a new request string using absolute paths when possible
|
||||
*/
|
||||
const _absolutify = (context, request) => {
|
||||
return request
|
||||
.split("!")
|
||||
.map(r => requestToAbsolute(context, r))
|
||||
.join("!");
|
||||
};
|
||||
|
||||
exports.absolutify = _absolutify;
|
16
web/node_modules/webpack/lib/util/objectToMap.js
generated
vendored
Normal file
16
web/node_modules/webpack/lib/util/objectToMap.js
generated
vendored
Normal file
|
@ -0,0 +1,16 @@
|
|||
/**
|
||||
* convert an object into its 2D array equivalent to be turned
|
||||
* into an ES6 map
|
||||
*
|
||||
* @param {object} obj any object type that works with Object.keys()
|
||||
* @returns {Map<TODO, TODO>} an ES6 Map of KV pairs
|
||||
*/
|
||||
module.exports = function objectToMap(obj) {
|
||||
return new Map(
|
||||
Object.keys(obj).map(key => {
|
||||
/** @type {[string, string]} */
|
||||
const pair = [key, obj[key]];
|
||||
return pair;
|
||||
})
|
||||
);
|
||||
};
|
Loading…
Add table
Add a link
Reference in a new issue