first commit

This commit is contained in:
monjack
2025-06-20 18:01:48 +08:00
commit 6daa6d65c1
24611 changed files with 2512443 additions and 0 deletions

46
app_vue/node_modules/webpack/lib/util/ArrayHelpers.js generated vendored Normal file
View File

@ -0,0 +1,46 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/**
* Compare two arrays or strings by performing strict equality check for each value.
* @template T
* @param {ArrayLike<T>} a Array of values to be compared
* @param {ArrayLike<T>} b Array of values to be compared
* @returns {boolean} returns true if all the elements of passed arrays are strictly equal.
*/
module.exports.equals = (a, b) => {
if (a.length !== b.length) return false;
for (let i = 0; i < a.length; i++) {
if (a[i] !== b[i]) return false;
}
return true;
};
/**
* Partition an array by calling a predicate function on each value.
* @template T
* @param {Array<T>} arr Array of values to be partitioned
* @param {(value: T) => boolean} fn Partition function which partitions based on truthiness of result.
* @returns {[Array<T>, Array<T>]} returns the values of `arr` partitioned into two new arrays based on fn predicate.
*/
module.exports.groupBy = (
// eslint-disable-next-line default-param-last
arr = [],
fn
) =>
arr.reduce(
/**
* @param {[Array<T>, Array<T>]} groups An accumulator storing already partitioned values returned from previous call.
* @param {T} value The value of the current element
* @returns {[Array<T>, Array<T>]} returns an array of partitioned groups accumulator resulting from calling a predicate on the current value.
*/
(groups, value) => {
groups[fn(value) ? 0 : 1].push(value);
return groups;
},
[[], []]
);

104
app_vue/node_modules/webpack/lib/util/ArrayQueue.js generated vendored Normal file
View File

@ -0,0 +1,104 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/**
* @template T
*/
class ArrayQueue {
/**
* @param {Iterable<T>=} items The initial elements.
*/
constructor(items) {
/**
* @private
* @type {T[]}
*/
this._list = items ? Array.from(items) : [];
/**
* @private
* @type {T[]}
*/
this._listReversed = [];
}
/**
* Returns the number of elements in this queue.
* @returns {number} The number of elements in this queue.
*/
get length() {
return this._list.length + this._listReversed.length;
}
/**
* Empties the queue.
*/
clear() {
this._list.length = 0;
this._listReversed.length = 0;
}
/**
* Appends the specified element to this queue.
* @param {T} item The element to add.
* @returns {void}
*/
enqueue(item) {
this._list.push(item);
}
/**
* Retrieves and removes the head of this queue.
* @returns {T | undefined} The head of the queue of `undefined` if this queue is empty.
*/
dequeue() {
if (this._listReversed.length === 0) {
if (this._list.length === 0) return;
if (this._list.length === 1) return this._list.pop();
if (this._list.length < 16) return this._list.shift();
const temp = this._listReversed;
this._listReversed = this._list;
this._listReversed.reverse();
this._list = temp;
}
return this._listReversed.pop();
}
/**
* Finds and removes an item
* @param {T} item the item
* @returns {void}
*/
delete(item) {
const i = this._list.indexOf(item);
if (i >= 0) {
this._list.splice(i, 1);
} else {
const i = this._listReversed.indexOf(item);
if (i >= 0) this._listReversed.splice(i, 1);
}
}
[Symbol.iterator]() {
return {
next: () => {
const item = this.dequeue();
if (item) {
return {
done: false,
value: item
};
}
return {
done: true,
value: undefined
};
}
};
}
}
module.exports = ArrayQueue;

410
app_vue/node_modules/webpack/lib/util/AsyncQueue.js generated vendored Normal file
View File

@ -0,0 +1,410 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const { SyncHook, AsyncSeriesHook } = require("tapable");
const { makeWebpackError } = require("../HookWebpackError");
const WebpackError = require("../WebpackError");
const ArrayQueue = require("./ArrayQueue");
const QUEUED_STATE = 0;
const PROCESSING_STATE = 1;
const DONE_STATE = 2;
let inHandleResult = 0;
/**
* @template T
* @callback Callback
* @param {(WebpackError | null)=} err
* @param {(T | null)=} result
*/
/**
* @template T
* @template K
* @template R
*/
class AsyncQueueEntry {
/**
* @param {T} item the item
* @param {Callback<R>} callback the callback
*/
constructor(item, callback) {
this.item = item;
/** @type {typeof QUEUED_STATE | typeof PROCESSING_STATE | typeof DONE_STATE} */
this.state = QUEUED_STATE;
/** @type {Callback<R> | undefined} */
this.callback = callback;
/** @type {Callback<R>[] | undefined} */
this.callbacks = undefined;
/** @type {R | null | undefined} */
this.result = undefined;
/** @type {WebpackError | null | undefined} */
this.error = undefined;
}
}
/**
* @template T, K
* @typedef {(item: T) => K} getKey
*/
/**
* @template T, R
* @typedef {(item: T, callback: Callback<R>) => void} Processor
*/
/**
* @template T
* @template K
* @template R
*/
class AsyncQueue {
/**
* @param {object} options options object
* @param {string=} options.name name of the queue
* @param {number=} options.parallelism how many items should be processed at once
* @param {string=} options.context context of execution
* @param {AsyncQueue<EXPECTED_ANY, EXPECTED_ANY, EXPECTED_ANY>=} options.parent parent queue, which will have priority over this queue and with shared parallelism
* @param {getKey<T, K>=} options.getKey extract key from item
* @param {Processor<T, R>} options.processor async function to process items
*/
constructor({ name, context, parallelism, parent, processor, getKey }) {
this._name = name;
this._context = context || "normal";
this._parallelism = parallelism || 1;
this._processor = processor;
this._getKey =
getKey ||
/** @type {getKey<T, K>} */ (item => /** @type {T & K} */ (item));
/** @type {Map<K, AsyncQueueEntry<T, K, R>>} */
this._entries = new Map();
/** @type {ArrayQueue<AsyncQueueEntry<T, K, R>>} */
this._queued = new ArrayQueue();
/** @type {AsyncQueue<T, K, R>[] | undefined} */
this._children = undefined;
this._activeTasks = 0;
this._willEnsureProcessing = false;
this._needProcessing = false;
this._stopped = false;
/** @type {AsyncQueue<T, K, R>} */
this._root = parent ? parent._root : this;
if (parent) {
if (this._root._children === undefined) {
this._root._children = [this];
} else {
this._root._children.push(this);
}
}
this.hooks = {
/** @type {AsyncSeriesHook<[T]>} */
beforeAdd: new AsyncSeriesHook(["item"]),
/** @type {SyncHook<[T]>} */
added: new SyncHook(["item"]),
/** @type {AsyncSeriesHook<[T]>} */
beforeStart: new AsyncSeriesHook(["item"]),
/** @type {SyncHook<[T]>} */
started: new SyncHook(["item"]),
/** @type {SyncHook<[T, WebpackError | null | undefined, R | null | undefined]>} */
result: new SyncHook(["item", "error", "result"])
};
this._ensureProcessing = this._ensureProcessing.bind(this);
}
/**
* @returns {string} context of execution
*/
getContext() {
return this._context;
}
/**
* @param {string} value context of execution
*/
setContext(value) {
this._context = value;
}
/**
* @param {T} item an item
* @param {Callback<R>} callback callback function
* @returns {void}
*/
add(item, callback) {
if (this._stopped) return callback(new WebpackError("Queue was stopped"));
this.hooks.beforeAdd.callAsync(item, err => {
if (err) {
callback(
makeWebpackError(err, `AsyncQueue(${this._name}).hooks.beforeAdd`)
);
return;
}
const key = this._getKey(item);
const entry = this._entries.get(key);
if (entry !== undefined) {
if (entry.state === DONE_STATE) {
if (inHandleResult++ > 3) {
process.nextTick(() => callback(entry.error, entry.result));
} else {
callback(entry.error, entry.result);
}
inHandleResult--;
} else if (entry.callbacks === undefined) {
entry.callbacks = [callback];
} else {
entry.callbacks.push(callback);
}
return;
}
const newEntry = new AsyncQueueEntry(item, callback);
if (this._stopped) {
this.hooks.added.call(item);
this._root._activeTasks++;
process.nextTick(() =>
this._handleResult(newEntry, new WebpackError("Queue was stopped"))
);
} else {
this._entries.set(key, newEntry);
this._queued.enqueue(newEntry);
const root = this._root;
root._needProcessing = true;
if (root._willEnsureProcessing === false) {
root._willEnsureProcessing = true;
setImmediate(root._ensureProcessing);
}
this.hooks.added.call(item);
}
});
}
/**
* @param {T} item an item
* @returns {void}
*/
invalidate(item) {
const key = this._getKey(item);
const entry =
/** @type {AsyncQueueEntry<T, K, R>} */
(this._entries.get(key));
this._entries.delete(key);
if (entry.state === QUEUED_STATE) {
this._queued.delete(entry);
}
}
/**
* Waits for an already started item
* @param {T} item an item
* @param {Callback<R>} callback callback function
* @returns {void}
*/
waitFor(item, callback) {
const key = this._getKey(item);
const entry = this._entries.get(key);
if (entry === undefined) {
return callback(
new WebpackError(
"waitFor can only be called for an already started item"
)
);
}
if (entry.state === DONE_STATE) {
process.nextTick(() => callback(entry.error, entry.result));
} else if (entry.callbacks === undefined) {
entry.callbacks = [callback];
} else {
entry.callbacks.push(callback);
}
}
/**
* @returns {void}
*/
stop() {
this._stopped = true;
const queue = this._queued;
this._queued = new ArrayQueue();
const root = this._root;
for (const entry of queue) {
this._entries.delete(
this._getKey(/** @type {AsyncQueueEntry<T, K, R>} */ (entry).item)
);
root._activeTasks++;
this._handleResult(
/** @type {AsyncQueueEntry<T, K, R>} */ (entry),
new WebpackError("Queue was stopped")
);
}
}
/**
* @returns {void}
*/
increaseParallelism() {
const root = this._root;
root._parallelism++;
/* istanbul ignore next */
if (root._willEnsureProcessing === false && root._needProcessing) {
root._willEnsureProcessing = true;
setImmediate(root._ensureProcessing);
}
}
/**
* @returns {void}
*/
decreaseParallelism() {
const root = this._root;
root._parallelism--;
}
/**
* @param {T} item an item
* @returns {boolean} true, if the item is currently being processed
*/
isProcessing(item) {
const key = this._getKey(item);
const entry = this._entries.get(key);
return entry !== undefined && entry.state === PROCESSING_STATE;
}
/**
* @param {T} item an item
* @returns {boolean} true, if the item is currently queued
*/
isQueued(item) {
const key = this._getKey(item);
const entry = this._entries.get(key);
return entry !== undefined && entry.state === QUEUED_STATE;
}
/**
* @param {T} item an item
* @returns {boolean} true, if the item is currently queued
*/
isDone(item) {
const key = this._getKey(item);
const entry = this._entries.get(key);
return entry !== undefined && entry.state === DONE_STATE;
}
/**
* @returns {void}
*/
_ensureProcessing() {
while (this._activeTasks < this._parallelism) {
const entry = this._queued.dequeue();
if (entry === undefined) break;
this._activeTasks++;
entry.state = PROCESSING_STATE;
this._startProcessing(entry);
}
this._willEnsureProcessing = false;
if (this._queued.length > 0) return;
if (this._children !== undefined) {
for (const child of this._children) {
while (this._activeTasks < this._parallelism) {
const entry = child._queued.dequeue();
if (entry === undefined) break;
this._activeTasks++;
entry.state = PROCESSING_STATE;
child._startProcessing(entry);
}
if (child._queued.length > 0) return;
}
}
if (!this._willEnsureProcessing) this._needProcessing = false;
}
/**
* @param {AsyncQueueEntry<T, K, R>} entry the entry
* @returns {void}
*/
_startProcessing(entry) {
this.hooks.beforeStart.callAsync(entry.item, err => {
if (err) {
this._handleResult(
entry,
makeWebpackError(err, `AsyncQueue(${this._name}).hooks.beforeStart`)
);
return;
}
let inCallback = false;
try {
this._processor(entry.item, (e, r) => {
inCallback = true;
this._handleResult(entry, e, r);
});
} catch (err) {
if (inCallback) throw err;
this._handleResult(entry, /** @type {WebpackError} */ (err), null);
}
this.hooks.started.call(entry.item);
});
}
/**
* @param {AsyncQueueEntry<T, K, R>} entry the entry
* @param {(WebpackError | null)=} err error, if any
* @param {(R | null)=} result result, if any
* @returns {void}
*/
_handleResult(entry, err, result) {
this.hooks.result.callAsync(entry.item, err, result, hookError => {
const error = hookError
? makeWebpackError(hookError, `AsyncQueue(${this._name}).hooks.result`)
: err;
const callback = /** @type {Callback<R>} */ (entry.callback);
const callbacks = entry.callbacks;
entry.state = DONE_STATE;
entry.callback = undefined;
entry.callbacks = undefined;
entry.result = result;
entry.error = error;
const root = this._root;
root._activeTasks--;
if (root._willEnsureProcessing === false && root._needProcessing) {
root._willEnsureProcessing = true;
setImmediate(root._ensureProcessing);
}
if (inHandleResult++ > 3) {
process.nextTick(() => {
callback(error, result);
if (callbacks !== undefined) {
for (const callback of callbacks) {
callback(error, result);
}
}
});
} else {
callback(error, result);
if (callbacks !== undefined) {
for (const callback of callbacks) {
callback(error, result);
}
}
}
inHandleResult--;
});
}
clear() {
this._entries.clear();
this._queued.clear();
this._activeTasks = 0;
this._willEnsureProcessing = false;
this._needProcessing = false;
this._stopped = false;
}
}
module.exports = AsyncQueue;

35
app_vue/node_modules/webpack/lib/util/Hash.js generated vendored Normal file
View File

@ -0,0 +1,35 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
class Hash {
/* istanbul ignore next */
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @abstract
* @param {string|Buffer} data data
* @param {string=} inputEncoding data encoding
* @returns {this} updated hash
*/
update(data, inputEncoding) {
const AbstractMethodError = require("../AbstractMethodError");
throw new AbstractMethodError();
}
/* istanbul ignore next */
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @abstract
* @param {string=} encoding encoding of the return value
* @returns {string|Buffer} digest
*/
digest(encoding) {
const AbstractMethodError = require("../AbstractMethodError");
throw new AbstractMethodError();
}
}
module.exports = Hash;

View File

@ -0,0 +1,45 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/**
* @template T
* @param {Iterable<T>} set a set
* @returns {T | undefined} last item
*/
const last = set => {
let last;
for (const item of set) last = item;
return last;
};
/**
* @template T
* @param {Iterable<T>} iterable iterable
* @param {(value: T) => boolean | null | undefined} filter predicate
* @returns {boolean} true, if some items match the filter predicate
*/
const someInIterable = (iterable, filter) => {
for (const item of iterable) {
if (filter(item)) return true;
}
return false;
};
/**
* @template T
* @param {Iterable<T>} iterable an iterable
* @returns {number} count of items
*/
const countIterable = iterable => {
let i = 0;
for (const _ of iterable) i++;
return i;
};
module.exports.last = last;
module.exports.someInIterable = someInIterable;
module.exports.countIterable = countIterable;

View File

@ -0,0 +1,270 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const { first } = require("./SetHelpers");
const SortableSet = require("./SortableSet");
/**
* @template T
* @template K
* @typedef {(item: T) => K} GetKey
*/
/**
* @template T
* @typedef {(a: T, n: T) => number} Comparator
*/
/**
* @template T
* @template K
* @typedef {LazyBucketSortedSet<T, K> | SortableSet<T>} Entry
*/
/**
* @template T
* @template K
* @typedef {GetKey<T, K> | Comparator<K> | Comparator<T>} Arg
*/
/**
* Multi layer bucket sorted set:
* Supports adding non-existing items (DO NOT ADD ITEM TWICE),
* Supports removing exiting items (DO NOT REMOVE ITEM NOT IN SET),
* Supports popping the first items according to defined order,
* Supports iterating all items without order,
* Supports updating an item in an efficient way,
* Supports size property, which is the number of items,
* Items are lazy partially sorted when needed
* @template T
* @template K
*/
class LazyBucketSortedSet {
/**
* @param {GetKey<T, K>} getKey function to get key from item
* @param {Comparator<K>=} comparator comparator to sort keys
* @param {...Arg<T, K>} args more pairs of getKey and comparator plus optional final comparator for the last layer
*/
constructor(getKey, comparator, ...args) {
this._getKey = getKey;
this._innerArgs = args;
this._leaf = args.length <= 1;
this._keys = new SortableSet(undefined, comparator);
/** @type {Map<K, Entry<T, K>>} */
this._map = new Map();
this._unsortedItems = new Set();
this.size = 0;
}
/**
* @param {T} item an item
* @returns {void}
*/
add(item) {
this.size++;
this._unsortedItems.add(item);
}
/**
* @param {K} key key of item
* @param {T} item the item
* @returns {void}
*/
_addInternal(key, item) {
let entry = this._map.get(key);
if (entry === undefined) {
entry = this._leaf
? new SortableSet(
undefined,
/** @type {Comparator<T>} */
(this._innerArgs[0])
)
: new LazyBucketSortedSet(
.../** @type {[GetKey<T, K>, Comparator<K>]} */
(this._innerArgs)
);
this._keys.add(key);
this._map.set(key, entry);
}
entry.add(item);
}
/**
* @param {T} item an item
* @returns {void}
*/
delete(item) {
this.size--;
if (this._unsortedItems.has(item)) {
this._unsortedItems.delete(item);
return;
}
const key = this._getKey(item);
const entry = /** @type {Entry<T, K>} */ (this._map.get(key));
entry.delete(item);
if (entry.size === 0) {
this._deleteKey(key);
}
}
/**
* @param {K} key key to be removed
* @returns {void}
*/
_deleteKey(key) {
this._keys.delete(key);
this._map.delete(key);
}
/**
* @returns {T | undefined} an item
*/
popFirst() {
if (this.size === 0) return;
this.size--;
if (this._unsortedItems.size > 0) {
for (const item of this._unsortedItems) {
const key = this._getKey(item);
this._addInternal(key, item);
}
this._unsortedItems.clear();
}
this._keys.sort();
const key = /** @type {K} */ (first(this._keys));
const entry = this._map.get(key);
if (this._leaf) {
const leafEntry = /** @type {SortableSet<T>} */ (entry);
leafEntry.sort();
const item = /** @type {T} */ (first(leafEntry));
leafEntry.delete(item);
if (leafEntry.size === 0) {
this._deleteKey(key);
}
return item;
}
const nodeEntry =
/** @type {LazyBucketSortedSet<T, K>} */
(entry);
const item = nodeEntry.popFirst();
if (nodeEntry.size === 0) {
this._deleteKey(key);
}
return item;
}
/**
* @param {T} item to be updated item
* @returns {(remove?: true) => void} finish update
*/
startUpdate(item) {
if (this._unsortedItems.has(item)) {
return remove => {
if (remove) {
this._unsortedItems.delete(item);
this.size--;
}
};
}
const key = this._getKey(item);
if (this._leaf) {
const oldEntry = /** @type {SortableSet<T>} */ (this._map.get(key));
return remove => {
if (remove) {
this.size--;
oldEntry.delete(item);
if (oldEntry.size === 0) {
this._deleteKey(key);
}
return;
}
const newKey = this._getKey(item);
if (key === newKey) {
// This flags the sortable set as unordered
oldEntry.add(item);
} else {
oldEntry.delete(item);
if (oldEntry.size === 0) {
this._deleteKey(key);
}
this._addInternal(newKey, item);
}
};
}
const oldEntry =
/** @type {LazyBucketSortedSet<T, K>} */
(this._map.get(key));
const finishUpdate = oldEntry.startUpdate(item);
return remove => {
if (remove) {
this.size--;
finishUpdate(true);
if (oldEntry.size === 0) {
this._deleteKey(key);
}
return;
}
const newKey = this._getKey(item);
if (key === newKey) {
finishUpdate();
} else {
finishUpdate(true);
if (oldEntry.size === 0) {
this._deleteKey(key);
}
this._addInternal(newKey, item);
}
};
}
/**
* @param {Iterator<T>[]} iterators list of iterators to append to
* @returns {void}
*/
_appendIterators(iterators) {
if (this._unsortedItems.size > 0)
iterators.push(this._unsortedItems[Symbol.iterator]());
for (const key of this._keys) {
const entry = this._map.get(key);
if (this._leaf) {
const leafEntry = /** @type {SortableSet<T>} */ (entry);
const iterator = leafEntry[Symbol.iterator]();
iterators.push(iterator);
} else {
const nodeEntry =
/** @type {LazyBucketSortedSet<T, K>} */
(entry);
nodeEntry._appendIterators(iterators);
}
}
}
/**
* @returns {Iterator<T>} the iterator
*/
[Symbol.iterator]() {
/** @type {Iterator<T>[]} */
const iterators = [];
this._appendIterators(iterators);
iterators.reverse();
let currentIterator =
/** @type {Iterator<T>} */
(iterators.pop());
return {
next: () => {
const res = currentIterator.next();
if (res.done) {
if (iterators.length === 0) return res;
currentIterator = /** @type {Iterator<T>} */ (iterators.pop());
return currentIterator.next();
}
return res;
}
};
}
}
module.exports = LazyBucketSortedSet;

230
app_vue/node_modules/webpack/lib/util/LazySet.js generated vendored Normal file
View File

@ -0,0 +1,230 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const makeSerializable = require("./makeSerializable.js");
/**
* @template T
* @param {Set<T>} targetSet set where items should be added
* @param {Set<Iterable<T>>} toMerge iterables to be merged
* @returns {void}
*/
const merge = (targetSet, toMerge) => {
for (const set of toMerge) {
for (const item of set) {
targetSet.add(item);
}
}
};
/**
* @template T
* @param {Set<Iterable<T>>} targetSet set where iterables should be added
* @param {Array<LazySet<T>>} toDeepMerge lazy sets to be flattened
* @returns {void}
*/
const flatten = (targetSet, toDeepMerge) => {
for (const set of toDeepMerge) {
if (set._set.size > 0) targetSet.add(set._set);
if (set._needMerge) {
for (const mergedSet of set._toMerge) {
targetSet.add(mergedSet);
}
flatten(targetSet, set._toDeepMerge);
}
}
};
/**
* Like Set but with an addAll method to eventually add items from another iterable.
* Access methods make sure that all delayed operations are executed.
* Iteration methods deopts to normal Set performance until clear is called again (because of the chance of modifications during iteration).
* @template T
*/
class LazySet {
/**
* @param {Iterable<T>=} iterable init iterable
*/
constructor(iterable) {
/** @type {Set<T>} */
this._set = new Set(iterable);
/** @type {Set<Iterable<T>>} */
this._toMerge = new Set();
/** @type {Array<LazySet<T>>} */
this._toDeepMerge = [];
this._needMerge = false;
this._deopt = false;
}
_flatten() {
flatten(this._toMerge, this._toDeepMerge);
this._toDeepMerge.length = 0;
}
_merge() {
this._flatten();
merge(this._set, this._toMerge);
this._toMerge.clear();
this._needMerge = false;
}
_isEmpty() {
return (
this._set.size === 0 &&
this._toMerge.size === 0 &&
this._toDeepMerge.length === 0
);
}
get size() {
if (this._needMerge) this._merge();
return this._set.size;
}
/**
* @param {T} item an item
* @returns {LazySet<T>} itself
*/
add(item) {
this._set.add(item);
return this;
}
/**
* @param {Iterable<T> | LazySet<T>} iterable a immutable iterable or another immutable LazySet which will eventually be merged into the Set
* @returns {LazySet<T>} itself
*/
addAll(iterable) {
if (this._deopt) {
const _set = this._set;
for (const item of iterable) {
_set.add(item);
}
} else {
if (iterable instanceof LazySet) {
if (iterable._isEmpty()) return this;
this._toDeepMerge.push(iterable);
this._needMerge = true;
if (this._toDeepMerge.length > 100000) {
this._flatten();
}
} else {
this._toMerge.add(iterable);
this._needMerge = true;
}
if (this._toMerge.size > 100000) this._merge();
}
return this;
}
clear() {
this._set.clear();
this._toMerge.clear();
this._toDeepMerge.length = 0;
this._needMerge = false;
this._deopt = false;
}
/**
* @param {T} value an item
* @returns {boolean} true, if the value was in the Set before
*/
delete(value) {
if (this._needMerge) this._merge();
return this._set.delete(value);
}
/**
* @returns {IterableIterator<[T, T]>} entries
*/
entries() {
this._deopt = true;
if (this._needMerge) this._merge();
return this._set.entries();
}
/**
* @template K
* @param {(value: T, value2: T, set: Set<T>) => void} callbackFn function called for each entry
* @param {K} thisArg this argument for the callbackFn
* @returns {void}
*/
forEach(callbackFn, thisArg) {
this._deopt = true;
if (this._needMerge) this._merge();
// eslint-disable-next-line unicorn/no-array-for-each
this._set.forEach(callbackFn, thisArg);
}
/**
* @param {T} item an item
* @returns {boolean} true, when the item is in the Set
*/
has(item) {
if (this._needMerge) this._merge();
return this._set.has(item);
}
/**
* @returns {IterableIterator<T>} keys
*/
keys() {
this._deopt = true;
if (this._needMerge) this._merge();
return this._set.keys();
}
/**
* @returns {IterableIterator<T>} values
*/
values() {
this._deopt = true;
if (this._needMerge) this._merge();
return this._set.values();
}
/**
* @returns {IterableIterator<T>} iterable iterator
*/
[Symbol.iterator]() {
this._deopt = true;
if (this._needMerge) this._merge();
return this._set[Symbol.iterator]();
}
/* istanbul ignore next */
get [Symbol.toStringTag]() {
return "LazySet";
}
/**
* @param {import("../serialization/ObjectMiddleware").ObjectSerializerContext} context context
*/
serialize({ write }) {
if (this._needMerge) this._merge();
write(this._set.size);
for (const item of this._set) write(item);
}
/**
* @template T
* @param {import("../serialization/ObjectMiddleware").ObjectDeserializerContext} context context
* @returns {LazySet<T>} lazy set
*/
static deserialize({ read }) {
const count = read();
const items = [];
for (let i = 0; i < count; i++) {
items.push(read());
}
return new LazySet(items);
}
}
makeSerializable(LazySet, "webpack/lib/util/LazySet");
module.exports = LazySet;

34
app_vue/node_modules/webpack/lib/util/MapHelpers.js generated vendored Normal file
View File

@ -0,0 +1,34 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/**
* getOrInsert is a helper function for maps that allows you to get a value
* from a map if it exists, or insert a new value if it doesn't. If it value doesn't
* exist, it will be computed by the provided function.
* @template K
* @template V
* @param {Map<K, V>} map The map object to check
* @param {K} key The key to check
* @param {() => V} computer function which will compute the value if it doesn't exist
* @returns {V} The value from the map, or the computed value
* @example
* ```js
* const map = new Map();
* const value = getOrInsert(map, "key", () => "value");
* console.log(value); // "value"
* ```
*/
module.exports.getOrInsert = (map, key, computer) => {
// Grab key from map
const value = map.get(key);
// If the value already exists, return it
if (value !== undefined) return value;
// Otherwise compute the value, set it in the map, and return it
const newValue = computer();
map.set(key, newValue);
return newValue;
};

View File

@ -0,0 +1,69 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const binarySearchBounds = require("./binarySearchBounds");
/** @typedef {(value: number) => void} Callback */
class ParallelismFactorCalculator {
constructor() {
/** @type {number[]} */
this._rangePoints = [];
/** @type {Callback[]} */
this._rangeCallbacks = [];
}
/**
* @param {number} start range start
* @param {number} end range end
* @param {Callback} callback callback
* @returns {void}
*/
range(start, end, callback) {
if (start === end) return callback(1);
this._rangePoints.push(start);
this._rangePoints.push(end);
this._rangeCallbacks.push(callback);
}
calculate() {
const segments = Array.from(new Set(this._rangePoints)).sort((a, b) =>
a < b ? -1 : 1
);
const parallelism = segments.map(() => 0);
const rangeStartIndices = [];
for (let i = 0; i < this._rangePoints.length; i += 2) {
const start = this._rangePoints[i];
const end = this._rangePoints[i + 1];
let idx = binarySearchBounds.eq(segments, start);
rangeStartIndices.push(idx);
do {
parallelism[idx]++;
idx++;
} while (segments[idx] < end);
}
for (let i = 0; i < this._rangeCallbacks.length; i++) {
const start = this._rangePoints[i * 2];
const end = this._rangePoints[i * 2 + 1];
let idx = rangeStartIndices[i];
let sum = 0;
let totalDuration = 0;
let current = start;
do {
const p = parallelism[idx];
idx++;
const duration = segments[idx] - current;
totalDuration += duration;
current = segments[idx];
sum += p * duration;
} while (current < end);
this._rangeCallbacks[i](sum / totalDuration);
}
}
}
module.exports = ParallelismFactorCalculator;

52
app_vue/node_modules/webpack/lib/util/Queue.js generated vendored Normal file
View File

@ -0,0 +1,52 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/**
* @template T
*/
class Queue {
/**
* @param {Iterable<T>=} items The initial elements.
*/
constructor(items) {
/**
* @private
* @type {Set<T>}
*/
this._set = new Set(items);
}
/**
* Returns the number of elements in this queue.
* @returns {number} The number of elements in this queue.
*/
get length() {
return this._set.size;
}
/**
* Appends the specified element to this queue.
* @param {T} item The element to add.
* @returns {void}
*/
enqueue(item) {
this._set.add(item);
}
/**
* Retrieves and removes the head of this queue.
* @returns {T | undefined} The head of the queue of `undefined` if this queue is empty.
*/
dequeue() {
const result = this._set[Symbol.iterator]().next();
if (result.done) return;
this._set.delete(result.value);
return result.value;
}
}
module.exports = Queue;

51
app_vue/node_modules/webpack/lib/util/Semaphore.js generated vendored Normal file
View File

@ -0,0 +1,51 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
class Semaphore {
/**
* Creates an instance of Semaphore.
* @param {number} available the amount available number of "tasks"
* in the Semaphore
*/
constructor(available) {
this.available = available;
/** @type {(() => void)[]} */
this.waiters = [];
/** @private */
this._continue = this._continue.bind(this);
}
/**
* @param {() => void} callback function block to capture and run
* @returns {void}
*/
acquire(callback) {
if (this.available > 0) {
this.available--;
callback();
} else {
this.waiters.push(callback);
}
}
release() {
this.available++;
if (this.waiters.length > 0) {
process.nextTick(this._continue);
}
}
_continue() {
if (this.available > 0 && this.waiters.length > 0) {
this.available--;
const callback = /** @type {(() => void)} */ (this.waiters.pop());
callback();
}
}
}
module.exports = Semaphore;

94
app_vue/node_modules/webpack/lib/util/SetHelpers.js generated vendored Normal file
View File

@ -0,0 +1,94 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/**
* intersect creates Set containing the intersection of elements between all sets
* @template T
* @param {Set<T>[]} sets an array of sets being checked for shared elements
* @returns {Set<T>} returns a new Set containing the intersecting items
*/
const intersect = sets => {
if (sets.length === 0) return new Set();
if (sets.length === 1) return new Set(sets[0]);
let minSize = Infinity;
let minIndex = -1;
for (let i = 0; i < sets.length; i++) {
const size = sets[i].size;
if (size < minSize) {
minIndex = i;
minSize = size;
}
}
const current = new Set(sets[minIndex]);
for (let i = 0; i < sets.length; i++) {
if (i === minIndex) continue;
const set = sets[i];
for (const item of current) {
if (!set.has(item)) {
current.delete(item);
}
}
}
return current;
};
/**
* Checks if a set is the subset of another set
* @template T
* @param {Set<T>} bigSet a Set which contains the original elements to compare against
* @param {Set<T>} smallSet the set whose elements might be contained inside of bigSet
* @returns {boolean} returns true if smallSet contains all elements inside of the bigSet
*/
const isSubset = (bigSet, smallSet) => {
if (bigSet.size < smallSet.size) return false;
for (const item of smallSet) {
if (!bigSet.has(item)) return false;
}
return true;
};
/**
* @template T
* @param {Set<T>} set a set
* @param {(set: T) => boolean} fn selector function
* @returns {T | undefined} found item
*/
const find = (set, fn) => {
for (const item of set) {
if (fn(item)) return item;
}
};
/**
* @template T
* @param {Set<T> | ReadonlySet<T>} set a set
* @returns {T | undefined} first item
*/
const first = set => {
const entry = set.values().next();
return entry.done ? undefined : entry.value;
};
/**
* @template T
* @param {Set<T>} a first
* @param {Set<T>} b second
* @returns {Set<T>} combined set, may be identical to a or b
*/
const combine = (a, b) => {
if (b.size === 0) return a;
if (a.size === 0) return b;
const set = new Set(a);
for (const item of b) set.add(item);
return set;
};
module.exports.intersect = intersect;
module.exports.isSubset = isSubset;
module.exports.find = find;
module.exports.first = first;
module.exports.combine = combine;

175
app_vue/node_modules/webpack/lib/util/SortableSet.js generated vendored Normal file
View File

@ -0,0 +1,175 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const NONE = Symbol("not sorted");
/**
* A subset of Set that offers sorting functionality
* @template T item type in set
* @extends {Set<T>}
*/
class SortableSet extends Set {
/**
* Create a new sortable set
* @template T
* @typedef {(a: T, b: T) => number} SortFunction
* @param {Iterable<T>=} initialIterable The initial iterable value
* @param {SortFunction<T>=} defaultSort Default sorting function
*/
constructor(initialIterable, defaultSort) {
super(initialIterable);
/**
* @private
* @type {undefined | SortFunction<T>}
*/
this._sortFn = defaultSort;
/**
* @private
* @type {typeof NONE | undefined | ((a: T, b: T) => number)}}
*/
this._lastActiveSortFn = NONE;
/**
* @private
* @template R
* @type {Map<(set: SortableSet<T>) => EXPECTED_ANY, EXPECTED_ANY> | undefined}
*/
this._cache = undefined;
/**
* @private
* @template R
* @type {Map<(set: SortableSet<T>) => EXPECTED_ANY, EXPECTED_ANY> | undefined}
*/
this._cacheOrderIndependent = undefined;
}
/**
* @param {T} value value to add to set
* @returns {this} returns itself
*/
add(value) {
this._lastActiveSortFn = NONE;
this._invalidateCache();
this._invalidateOrderedCache();
super.add(value);
return this;
}
/**
* @param {T} value value to delete
* @returns {boolean} true if value existed in set, false otherwise
*/
delete(value) {
this._invalidateCache();
this._invalidateOrderedCache();
return super.delete(value);
}
/**
* @returns {void}
*/
clear() {
this._invalidateCache();
this._invalidateOrderedCache();
return super.clear();
}
/**
* Sort with a comparer function
* @param {SortFunction<T> | undefined} sortFn Sorting comparer function
* @returns {void}
*/
sortWith(sortFn) {
if (this.size <= 1 || sortFn === this._lastActiveSortFn) {
// already sorted - nothing to do
return;
}
const sortedArray = Array.from(this).sort(sortFn);
super.clear();
for (let i = 0; i < sortedArray.length; i += 1) {
super.add(sortedArray[i]);
}
this._lastActiveSortFn = sortFn;
this._invalidateCache();
}
sort() {
this.sortWith(this._sortFn);
return this;
}
/**
* Get data from cache
* @template {EXPECTED_ANY} R
* @param {(set: SortableSet<T>) => R} fn function to calculate value
* @returns {R} returns result of fn(this), cached until set changes
*/
getFromCache(fn) {
if (this._cache === undefined) {
this._cache = new Map();
} else {
const result = this._cache.get(fn);
const data = /** @type {R} */ (result);
if (data !== undefined) {
return data;
}
}
const newData = fn(this);
this._cache.set(fn, newData);
return newData;
}
/**
* Get data from cache (ignoring sorting)
* @template R
* @param {(set: SortableSet<T>) => R} fn function to calculate value
* @returns {R} returns result of fn(this), cached until set changes
*/
getFromUnorderedCache(fn) {
if (this._cacheOrderIndependent === undefined) {
this._cacheOrderIndependent = new Map();
} else {
const result = this._cacheOrderIndependent.get(fn);
const data = /** @type {R} */ (result);
if (data !== undefined) {
return data;
}
}
const newData = fn(this);
this._cacheOrderIndependent.set(fn, newData);
return newData;
}
/**
* @private
* @returns {void}
*/
_invalidateCache() {
if (this._cache !== undefined) {
this._cache.clear();
}
}
/**
* @private
* @returns {void}
*/
_invalidateOrderedCache() {
if (this._cacheOrderIndependent !== undefined) {
this._cacheOrderIndependent.clear();
}
}
/**
* @returns {T[]} the raw array
*/
toJSON() {
return Array.from(this);
}
}
module.exports = SortableSet;

View File

@ -0,0 +1,140 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/**
* The StackedCacheMap is a data structure designed as an alternative to a Map
* in situations where you need to handle multiple item additions and
* frequently access the largest map.
*
* It is particularly optimized for efficiently adding multiple items
* at once, which can be achieved using the `addAll` method.
*
* It has a fallback Map that is used when the map to be added is mutable.
*
* Note: `delete` and `has` are not supported for performance reasons.
* @example
* ```js
* const map = new StackedCacheMap();
* map.addAll(new Map([["a", 1], ["b", 2]]), true);
* map.addAll(new Map([["c", 3], ["d", 4]]), true);
* map.get("a"); // 1
* map.get("d"); // 4
* for (const [key, value] of map) {
* console.log(key, value);
* }
* ```
* @template K
* @template V
*/
class StackedCacheMap {
constructor() {
/** @type {Map<K, V>} */
this.map = new Map();
/** @type {ReadonlyMap<K, V>[]} */
this.stack = [];
}
/**
* If `immutable` is true, the map can be referenced by the StackedCacheMap
* and should not be changed afterwards. If the map is mutable, all items
* are copied into a fallback Map.
* @param {ReadonlyMap<K, V>} map map to add
* @param {boolean=} immutable if 'map' is immutable and StackedCacheMap can keep referencing it
*/
addAll(map, immutable) {
if (immutable) {
this.stack.push(map);
// largest map should go first
for (let i = this.stack.length - 1; i > 0; i--) {
const beforeLast = this.stack[i - 1];
if (beforeLast.size >= map.size) break;
this.stack[i] = beforeLast;
this.stack[i - 1] = map;
}
} else {
for (const [key, value] of map) {
this.map.set(key, value);
}
}
}
/**
* @param {K} item the key of the element to add
* @param {V} value the value of the element to add
* @returns {void}
*/
set(item, value) {
this.map.set(item, value);
}
/**
* @param {K} item the item to delete
* @returns {void}
*/
delete(item) {
throw new Error("Items can't be deleted from a StackedCacheMap");
}
/**
* @param {K} item the item to test
* @returns {boolean} true if the item exists in this set
*/
has(item) {
throw new Error(
"Checking StackedCacheMap.has before reading is inefficient, use StackedCacheMap.get and check for undefined"
);
}
/**
* @param {K} item the key of the element to return
* @returns {V | undefined} the value of the element
*/
get(item) {
for (const map of this.stack) {
const value = map.get(item);
if (value !== undefined) return value;
}
return this.map.get(item);
}
clear() {
this.stack.length = 0;
this.map.clear();
}
/**
* @returns {number} size of the map
*/
get size() {
let size = this.map.size;
for (const map of this.stack) {
size += map.size;
}
return size;
}
/**
* @returns {Iterator<[K, V]>} iterator
*/
[Symbol.iterator]() {
const iterators = this.stack.map(map => map[Symbol.iterator]());
let current = this.map[Symbol.iterator]();
return {
next() {
let result = current.next();
while (result.done && iterators.length > 0) {
current = /** @type {IterableIterator<[K, V]>} */ (iterators.pop());
result = current.next();
}
return result;
}
};
}
}
module.exports = StackedCacheMap;

164
app_vue/node_modules/webpack/lib/util/StackedMap.js generated vendored Normal file
View File

@ -0,0 +1,164 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const TOMBSTONE = Symbol("tombstone");
const UNDEFINED_MARKER = Symbol("undefined");
/**
* @template T
* @typedef {T | undefined} Cell<T>
*/
/**
* @template T
* @typedef {T | typeof TOMBSTONE | typeof UNDEFINED_MARKER} InternalCell<T>
*/
/**
* @template K
* @template V
* @param {[K, InternalCell<V>]} pair the internal cell
* @returns {[K, Cell<V>]} its “safe” representation
*/
const extractPair = pair => {
const key = pair[0];
const val = pair[1];
if (val === UNDEFINED_MARKER || val === TOMBSTONE) {
return [key, undefined];
}
return /** @type {[K, Cell<V>]} */ (pair);
};
/**
* @template K
* @template V
*/
class StackedMap {
/**
* @param {Map<K, InternalCell<V>>[]=} parentStack an optional parent
*/
constructor(parentStack) {
/** @type {Map<K, InternalCell<V>>} */
this.map = new Map();
/** @type {Map<K, InternalCell<V>>[]} */
this.stack = parentStack === undefined ? [] : parentStack.slice();
this.stack.push(this.map);
}
/**
* @param {K} item the key of the element to add
* @param {V} value the value of the element to add
* @returns {void}
*/
set(item, value) {
this.map.set(item, value === undefined ? UNDEFINED_MARKER : value);
}
/**
* @param {K} item the item to delete
* @returns {void}
*/
delete(item) {
if (this.stack.length > 1) {
this.map.set(item, TOMBSTONE);
} else {
this.map.delete(item);
}
}
/**
* @param {K} item the item to test
* @returns {boolean} true if the item exists in this set
*/
has(item) {
const topValue = this.map.get(item);
if (topValue !== undefined) {
return topValue !== TOMBSTONE;
}
if (this.stack.length > 1) {
for (let i = this.stack.length - 2; i >= 0; i--) {
const value = this.stack[i].get(item);
if (value !== undefined) {
this.map.set(item, value);
return value !== TOMBSTONE;
}
}
this.map.set(item, TOMBSTONE);
}
return false;
}
/**
* @param {K} item the key of the element to return
* @returns {Cell<V>} the value of the element
*/
get(item) {
const topValue = this.map.get(item);
if (topValue !== undefined) {
return topValue === TOMBSTONE || topValue === UNDEFINED_MARKER
? undefined
: topValue;
}
if (this.stack.length > 1) {
for (let i = this.stack.length - 2; i >= 0; i--) {
const value = this.stack[i].get(item);
if (value !== undefined) {
this.map.set(item, value);
return value === TOMBSTONE || value === UNDEFINED_MARKER
? undefined
: value;
}
}
this.map.set(item, TOMBSTONE);
}
}
_compress() {
if (this.stack.length === 1) return;
this.map = new Map();
for (const data of this.stack) {
for (const pair of data) {
if (pair[1] === TOMBSTONE) {
this.map.delete(pair[0]);
} else {
this.map.set(pair[0], pair[1]);
}
}
}
this.stack = [this.map];
}
asArray() {
this._compress();
return Array.from(this.map.keys());
}
asSet() {
this._compress();
return new Set(this.map.keys());
}
asPairArray() {
this._compress();
return Array.from(this.map.entries(), extractPair);
}
asMap() {
return new Map(this.asPairArray());
}
get size() {
this._compress();
return this.map.size;
}
createChild() {
return new StackedMap(this.stack);
}
}
module.exports = StackedMap;

101
app_vue/node_modules/webpack/lib/util/StringXor.js generated vendored Normal file
View File

@ -0,0 +1,101 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/** @typedef {import("../util/Hash")} Hash */
/**
* StringXor class provides methods for performing
* [XOR operations](https://en.wikipedia.org/wiki/Exclusive_or) on strings. In this context
* we operating on the character codes of two strings, which are represented as
* [Buffer](https://nodejs.org/api/buffer.html) objects.
*
* We use [StringXor in webpack](https://github.com/webpack/webpack/commit/41a8e2ea483a544c4ccd3e6217bdfb80daffca39)
* to create a hash of the current state of the compilation. By XOR'ing the Module hashes, it
* doesn't matter if the Module hashes are sorted or not. This is useful because it allows us to avoid sorting the
* Module hashes.
* @example
* ```js
* const xor = new StringXor();
* xor.add('hello');
* xor.add('world');
* console.log(xor.toString());
* ```
* @example
* ```js
* const xor = new StringXor();
* xor.add('foo');
* xor.add('bar');
* const hash = createHash('sha256');
* hash.update(xor.toString());
* console.log(hash.digest('hex'));
* ```
*/
class StringXor {
constructor() {
/** @type {Buffer|undefined} */
this._value = undefined;
}
/**
* Adds a string to the current StringXor object.
* @param {string} str string
* @returns {void}
*/
add(str) {
const len = str.length;
const value = this._value;
if (value === undefined) {
/**
* We are choosing to use Buffer.allocUnsafe() because it is often faster than Buffer.alloc() because
* it allocates a new buffer of the specified size without initializing the memory.
*/
const newValue = (this._value = Buffer.allocUnsafe(len));
for (let i = 0; i < len; i++) {
newValue[i] = str.charCodeAt(i);
}
return;
}
const valueLen = value.length;
if (valueLen < len) {
const newValue = (this._value = Buffer.allocUnsafe(len));
let i;
for (i = 0; i < valueLen; i++) {
newValue[i] = value[i] ^ str.charCodeAt(i);
}
for (; i < len; i++) {
newValue[i] = str.charCodeAt(i);
}
} else {
for (let i = 0; i < len; i++) {
value[i] = value[i] ^ str.charCodeAt(i);
}
}
}
/**
* Returns a string that represents the current state of the StringXor object. We chose to use "latin1" encoding
* here because "latin1" encoding is a single-byte encoding that can represent all characters in the
* [ISO-8859-1 character set](https://en.wikipedia.org/wiki/ISO/IEC_8859-1). This is useful when working
* with binary data that needs to be represented as a string.
* @returns {string} Returns a string that represents the current state of the StringXor object.
*/
toString() {
const value = this._value;
return value === undefined ? "" : value.toString("latin1");
}
/**
* Updates the hash with the current state of the StringXor object.
* @param {Hash} hash Hash instance
*/
updateHash(hash) {
const value = this._value;
if (value !== undefined) hash.update(value);
}
}
module.exports = StringXor;

70
app_vue/node_modules/webpack/lib/util/TupleQueue.js generated vendored Normal file
View File

@ -0,0 +1,70 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const TupleSet = require("./TupleSet");
/**
* @template T
* @template V
*/
class TupleQueue {
/**
* @param {Iterable<[T, V, ...EXPECTED_ANY]>=} items The initial elements.
*/
constructor(items) {
/**
* @private
* @type {TupleSet<T, V>}
*/
this._set = new TupleSet(items);
/**
* @private
* @type {Iterator<[T, V, ...EXPECTED_ANY]>}
*/
this._iterator = this._set[Symbol.iterator]();
}
/**
* Returns the number of elements in this queue.
* @returns {number} The number of elements in this queue.
*/
get length() {
return this._set.size;
}
/**
* Appends the specified element to this queue.
* @param {[T, V, ...EXPECTED_ANY]} item The element to add.
* @returns {void}
*/
enqueue(...item) {
this._set.add(...item);
}
/**
* Retrieves and removes the head of this queue.
* @returns {[T, V, ...EXPECTED_ANY] | undefined} The head of the queue of `undefined` if this queue is empty.
*/
dequeue() {
const result = this._iterator.next();
if (result.done) {
if (this._set.size > 0) {
this._iterator = this._set[Symbol.iterator]();
const value =
/** @type {[T, V, ...EXPECTED_ANY]} */
(this._iterator.next().value);
this._set.delete(...value);
return value;
}
return;
}
this._set.delete(.../** @type {[T, V, ...EXPECTED_ANY]} */ (result.value));
return result.value;
}
}
module.exports = TupleQueue;

179
app_vue/node_modules/webpack/lib/util/TupleSet.js generated vendored Normal file
View File

@ -0,0 +1,179 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/**
* @template K
* @template V
* @typedef {Map<K, InnerMap<K, V> | Set<V>>} InnerMap
*/
/**
* @template T
* @template V
*/
class TupleSet {
/**
* @param {Iterable<[T, V, ...EXPECTED_ANY]>=} init init
*/
constructor(init) {
/** @type {InnerMap<T, V>} */
this._map = new Map();
this.size = 0;
if (init) {
for (const tuple of init) {
this.add(...tuple);
}
}
}
/**
* @param {[T, V, ...EXPECTED_ANY]} args tuple
* @returns {void}
*/
add(...args) {
let map = this._map;
for (let i = 0; i < args.length - 2; i++) {
const arg = args[i];
const innerMap = map.get(arg);
if (innerMap === undefined) {
map.set(arg, (map = new Map()));
} else {
map = /** @type {InnerMap<T, V>} */ (innerMap);
}
}
const beforeLast = args[args.length - 2];
let set = /** @type {Set<V>} */ (map.get(beforeLast));
if (set === undefined) {
map.set(beforeLast, (set = new Set()));
}
const last = args[args.length - 1];
this.size -= set.size;
set.add(last);
this.size += set.size;
}
/**
* @param {[T, V, ...EXPECTED_ANY]} args tuple
* @returns {boolean} true, if the tuple is in the Set
*/
has(...args) {
let map = this._map;
for (let i = 0; i < args.length - 2; i++) {
const arg = args[i];
map = /** @type {InnerMap<T, V>} */ (map.get(arg));
if (map === undefined) {
return false;
}
}
const beforeLast = args[args.length - 2];
const set = map.get(beforeLast);
if (set === undefined) {
return false;
}
const last = args[args.length - 1];
return set.has(last);
}
/**
* @param {[T, V, ...EXPECTED_ANY]} args tuple
* @returns {void}
*/
delete(...args) {
let map = this._map;
for (let i = 0; i < args.length - 2; i++) {
const arg = args[i];
map = /** @type {InnerMap<T, V>} */ (map.get(arg));
if (map === undefined) {
return;
}
}
const beforeLast = args[args.length - 2];
const set = map.get(beforeLast);
if (set === undefined) {
return;
}
const last = args[args.length - 1];
this.size -= set.size;
set.delete(last);
this.size += set.size;
}
/**
* @returns {Iterator<[T, V, ...EXPECTED_ANY]>} iterator
*/
[Symbol.iterator]() {
// This is difficult to type because we can have a map inside a map inside a map, etc. where the end is a set (each key is an argument)
// But in basic use we only have 2 arguments in our methods, so we have `Map<K, Set<V>>`
/** @type {MapIterator<[T, InnerMap<T, V> | Set<V>]>[]} */
const iteratorStack = [];
/** @type {[T?, V?, ...EXPECTED_ANY]} */
const tuple = [];
/** @type {SetIterator<V> | undefined} */
let currentSetIterator;
/**
* @param {MapIterator<[T, InnerMap<T, V> | Set<V>]>} it iterator
* @returns {boolean} result
*/
const next = it => {
const result = it.next();
if (result.done) {
if (iteratorStack.length === 0) return false;
tuple.pop();
return next(
/** @type {MapIterator<[T, InnerMap<T, V> | Set<V>]>} */
(iteratorStack.pop())
);
}
const [key, value] = result.value;
iteratorStack.push(it);
tuple.push(key);
if (value instanceof Set) {
currentSetIterator = value[Symbol.iterator]();
return true;
}
return next(value[Symbol.iterator]());
};
next(this._map[Symbol.iterator]());
return {
next() {
while (currentSetIterator) {
const result = currentSetIterator.next();
if (result.done) {
tuple.pop();
if (
!next(
/** @type {MapIterator<[T, InnerMap<T, V> | Set<V>]>} */
(iteratorStack.pop())
)
) {
currentSetIterator = undefined;
}
} else {
return {
done: false,
value:
/** @type {[T, V, ...EXPECTED_ANY]} */
(tuple.concat(result.value))
};
}
}
return { done: true, value: undefined };
}
};
}
}
module.exports = TupleSet;

View File

@ -0,0 +1,87 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Ivan Kopeykin @vankop
*/
"use strict";
/** @typedef {import("./fs").InputFileSystem} InputFileSystem */
/** @typedef {(error: Error|null, result?: Buffer) => void} ErrorFirstCallback */
const backSlashCharCode = "\\".charCodeAt(0);
const slashCharCode = "/".charCodeAt(0);
const aLowerCaseCharCode = "a".charCodeAt(0);
const zLowerCaseCharCode = "z".charCodeAt(0);
const aUpperCaseCharCode = "A".charCodeAt(0);
const zUpperCaseCharCode = "Z".charCodeAt(0);
const _0CharCode = "0".charCodeAt(0);
const _9CharCode = "9".charCodeAt(0);
const plusCharCode = "+".charCodeAt(0);
const hyphenCharCode = "-".charCodeAt(0);
const colonCharCode = ":".charCodeAt(0);
const hashCharCode = "#".charCodeAt(0);
const queryCharCode = "?".charCodeAt(0);
/**
* Get scheme if specifier is an absolute URL specifier
* e.g. Absolute specifiers like 'file:///user/webpack/index.js'
* https://tools.ietf.org/html/rfc3986#section-3.1
* @param {string} specifier specifier
* @returns {string|undefined} scheme if absolute URL specifier provided
*/
function getScheme(specifier) {
const start = specifier.charCodeAt(0);
// First char maybe only a letter
if (
(start < aLowerCaseCharCode || start > zLowerCaseCharCode) &&
(start < aUpperCaseCharCode || start > zUpperCaseCharCode)
) {
return;
}
let i = 1;
let ch = specifier.charCodeAt(i);
while (
(ch >= aLowerCaseCharCode && ch <= zLowerCaseCharCode) ||
(ch >= aUpperCaseCharCode && ch <= zUpperCaseCharCode) ||
(ch >= _0CharCode && ch <= _9CharCode) ||
ch === plusCharCode ||
ch === hyphenCharCode
) {
if (++i === specifier.length) return;
ch = specifier.charCodeAt(i);
}
// Scheme must end with colon
if (ch !== colonCharCode) return;
// Check for Windows absolute path
// https://url.spec.whatwg.org/#url-miscellaneous
if (i === 1) {
const nextChar = i + 1 < specifier.length ? specifier.charCodeAt(i + 1) : 0;
if (
nextChar === 0 ||
nextChar === backSlashCharCode ||
nextChar === slashCharCode ||
nextChar === hashCharCode ||
nextChar === queryCharCode
) {
return;
}
}
return specifier.slice(0, i).toLowerCase();
}
/**
* @param {string} specifier specifier
* @returns {string | null | undefined} protocol if absolute URL specifier provided
*/
function getProtocol(specifier) {
const scheme = getScheme(specifier);
return scheme === undefined ? undefined : `${scheme}:`;
}
module.exports.getScheme = getScheme;
module.exports.getProtocol = getProtocol;

227
app_vue/node_modules/webpack/lib/util/WeakTupleMap.js generated vendored Normal file
View File

@ -0,0 +1,227 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/**
* @template {EXPECTED_ANY[]} T
* @template V
* @typedef {Map<EXPECTED_ANY, WeakTupleMap<T, V>>} M
*/
/**
* @template {EXPECTED_ANY[]} T
* @template V
* @typedef {WeakMap<EXPECTED_OBJECT, WeakTupleMap<T, V>>} W
*/
/**
* @param {EXPECTED_ANY} thing thing
* @returns {boolean} true if is weak
*/
const isWeakKey = thing => typeof thing === "object" && thing !== null;
/**
* @template {unknown[]} T
* @typedef {T extends readonly (infer ElementType)[] ? ElementType : never} ArrayElement
*/
/**
* @template {EXPECTED_ANY[]} K
* @template V
*/
class WeakTupleMap {
constructor() {
/** @private */
this.f = 0;
/**
* @private
* @type {V | undefined}
*/
this.v = undefined;
/**
* @private
* @type {M<K, V> | undefined}
*/
this.m = undefined;
/**
* @private
* @type {W<K, V> | undefined}
*/
this.w = undefined;
}
/**
* @param {[...K, V]} args tuple
* @returns {void}
*/
set(...args) {
/** @type {WeakTupleMap<K, V>} */
let node = this;
for (let i = 0; i < args.length - 1; i++) {
node = node._get(/** @type {ArrayElement<K>} */ (args[i]));
}
node._setValue(/** @type {V} */ (args[args.length - 1]));
}
/**
* @param {K} args tuple
* @returns {boolean} true, if the tuple is in the Set
*/
has(...args) {
/** @type {WeakTupleMap<K, V> | undefined} */
let node = this;
for (let i = 0; i < args.length; i++) {
node = node._peek(/** @type {ArrayElement<K>} */ (args[i]));
if (node === undefined) return false;
}
return node._hasValue();
}
/**
* @param {K} args tuple
* @returns {V | undefined} the value
*/
get(...args) {
/** @type {WeakTupleMap<K, V> | undefined} */
let node = this;
for (let i = 0; i < args.length; i++) {
node = node._peek(/** @type {ArrayElement<K>} */ (args[i]));
if (node === undefined) return;
}
return node._getValue();
}
/**
* @param {[...K, (...args: K) => V]} args tuple
* @returns {V} the value
*/
provide(...args) {
/** @type {WeakTupleMap<K, V>} */
let node = this;
for (let i = 0; i < args.length - 1; i++) {
node = node._get(/** @type {ArrayElement<K>} */ (args[i]));
}
if (node._hasValue()) return /** @type {V} */ (node._getValue());
const fn = /** @type {(...args: K) => V} */ (args[args.length - 1]);
const newValue = fn(.../** @type {K} */ (args.slice(0, -1)));
node._setValue(newValue);
return newValue;
}
/**
* @param {K} args tuple
* @returns {void}
*/
delete(...args) {
/** @type {WeakTupleMap<K, V> | undefined} */
let node = this;
for (let i = 0; i < args.length; i++) {
node = node._peek(/** @type {ArrayElement<K>} */ (args[i]));
if (node === undefined) return;
}
node._deleteValue();
}
/**
* @returns {void}
*/
clear() {
this.f = 0;
this.v = undefined;
this.w = undefined;
this.m = undefined;
}
_getValue() {
return this.v;
}
_hasValue() {
return (this.f & 1) === 1;
}
/**
* @param {V} v value
* @private
*/
_setValue(v) {
this.f |= 1;
this.v = v;
}
_deleteValue() {
this.f &= 6;
this.v = undefined;
}
/**
* @param {ArrayElement<K>} thing thing
* @returns {WeakTupleMap<K, V> | undefined} thing
* @private
*/
_peek(thing) {
if (isWeakKey(thing)) {
if ((this.f & 4) !== 4) return;
return /** @type {WeakMap<ArrayElement<K>, WeakTupleMap<K, V>>} */ (
this.w
).get(thing);
}
if ((this.f & 2) !== 2) return;
return /** @type {Map<ArrayElement<K>, WeakTupleMap<K, V>>} */ (this.m).get(
thing
);
}
/**
* @private
* @param {ArrayElement<K>} thing thing
* @returns {WeakTupleMap<K, V>} value
*/
_get(thing) {
if (isWeakKey(thing)) {
if ((this.f & 4) !== 4) {
/** @type {W<K, V>} */
const newMap = new WeakMap();
this.f |= 4;
/** @type {WeakTupleMap<K, V>} */
const newNode = new WeakTupleMap();
(this.w = newMap).set(thing, newNode);
return newNode;
}
const entry = /** @type {W<K, V>} */ (this.w).get(thing);
if (entry !== undefined) {
return entry;
}
/** @type {WeakTupleMap<K, V>} */
const newNode = new WeakTupleMap();
/** @type {W<K, V>} */
(this.w).set(thing, newNode);
return newNode;
}
if ((this.f & 2) !== 2) {
/** @type {M<K, V>} */
const newMap = new Map();
this.f |= 2;
/** @type {WeakTupleMap<K, V>} */
const newNode = new WeakTupleMap();
(this.m = newMap).set(thing, newNode);
return newNode;
}
const entry =
/** @type {M<K, V>} */
(this.m).get(thing);
if (entry !== undefined) {
return entry;
}
/** @type {WeakTupleMap<K, V>} */
const newNode = new WeakTupleMap();
/** @type {M<K, V>} */
(this.m).set(thing, newNode);
return newNode;
}
}
module.exports = WeakTupleMap;

View File

@ -0,0 +1,129 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Mikola Lysenko @mikolalysenko
*/
"use strict";
/* cspell:disable-next-line */
// Refactor: Peter Somogyvari @petermetz
/** @typedef {">=" | "<=" | "<" | ">" | "-" } BinarySearchPredicate */
/** @typedef {"GE" | "GT" | "LT" | "LE" | "EQ" } SearchPredicateSuffix */
/**
* Helper function for compiling binary search functions.
*
* The generated code uses a while loop to repeatedly divide the search interval
* in half until the desired element is found, or the search interval is empty.
*
* The following is an example of a generated function for calling `compileSearch("P", "c(x,y)<=0", true, ["y", "c"], false)`:
*
* ```js
* function P(a,l,h,y,c){var i=l-1;while(l<=h){var m=(l+h)>>>1,x=a[m];if(c(x,y)<=0){i=m;l=m+1}else{h=m-1}}return i};
* ```
* @param {string} funcName The name of the function to be compiled.
* @param {string} predicate The predicate / comparison operator to be used in the binary search.
* @param {boolean} reversed Whether the search should be reversed.
* @param {string[]} extraArgs Extra arguments to be passed to the function.
* @param {boolean=} earlyOut Whether the search should return as soon as a match is found.
* @returns {string} The compiled binary search function.
*/
const compileSearch = (funcName, predicate, reversed, extraArgs, earlyOut) => {
const code = [
"function ",
funcName,
"(a,l,h,",
extraArgs.join(","),
"){",
earlyOut ? "" : "var i=",
reversed ? "l-1" : "h+1",
";while(l<=h){var m=(l+h)>>>1,x=a[m]"
];
if (earlyOut) {
if (!predicate.includes("c")) {
code.push(";if(x===y){return m}else if(x<=y){");
} else {
code.push(";var p=c(x,y);if(p===0){return m}else if(p<=0){");
}
} else {
code.push(";if(", predicate, "){i=m;");
}
if (reversed) {
code.push("l=m+1}else{h=m-1}");
} else {
code.push("h=m-1}else{l=m+1}");
}
code.push("}");
if (earlyOut) {
code.push("return -1};");
} else {
code.push("return i};");
}
return code.join("");
};
/**
* This helper functions generate code for two binary search functions:
* A(): Performs a binary search on an array using the comparison operator specified.
* P(): Performs a binary search on an array using a _custom comparison function_
* `c(x,y)` **and** comparison operator specified by `predicate`.
* @template T
* @param {BinarySearchPredicate} predicate The predicate / comparison operator to be used in the binary search.
* @param {boolean} reversed Whether the search should be reversed.
* @param {SearchPredicateSuffix} suffix The suffix to be used in the function name.
* @param {boolean=} earlyOut Whether the search should return as soon as a match is found.
* @returns {(items: T[], start: number, compareFn?: number | ((item: T, needle: number) => number), l?: number, h?: number) => number} The compiled binary search function.
*/
const compileBoundsSearch = (predicate, reversed, suffix, earlyOut) => {
const arg1 = compileSearch("A", `x${predicate}y`, reversed, ["y"], earlyOut);
const arg2 = compileSearch(
"P",
`c(x,y)${predicate}0`,
reversed,
["y", "c"],
earlyOut
);
const fnHeader = "function dispatchBinarySearch";
const fnBody =
// eslint-disable-next-line no-multi-str
"(a,y,c,l,h){\
if(typeof(c)==='function'){\
return P(a,(l===void 0)?0:l|0,(h===void 0)?a.length-1:h|0,y,c)\
}else{\
return A(a,(c===void 0)?0:c|0,(l===void 0)?a.length-1:l|0,y)\
}}\
return dispatchBinarySearch";
const fnArgList = [arg1, arg2, fnHeader, suffix, fnBody, suffix];
const fnSource = fnArgList.join("");
// eslint-disable-next-line no-new-func
const result = new Function(fnSource);
return result();
};
/**
* These functions are used to perform binary searches on arrays.
* @example
* ```js
* const { gt, le} = require("./binarySearchBounds");
* const arr = [1, 2, 3, 4, 5, 6, 7, 8, 9];
*
* // Find the index of the first element greater than 5
* const index1 = gt(arr, 5); // index1 === 3
*
* // Find the index of the first element less than or equal to 5
* const index2 = le(arr, 5); // index2 === 4
* ```
*/
module.exports = {
ge: compileBoundsSearch(">=", false, "GE"),
gt: compileBoundsSearch(">", false, "GT"),
lt: compileBoundsSearch("<", true, "LT"),
le: compileBoundsSearch("<=", true, "LE"),
eq: compileBoundsSearch("-", true, "EQ", true)
};

View File

@ -0,0 +1,97 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/** @typedef {import("../Dependency")} Dependency */
/** @typedef {import("../Module")} Module */
/** @typedef {import("../ModuleGraph")} ModuleGraph */
/** @typedef {import("../javascript/JavascriptParser").Range} Range */
/**
* @summary Get the subset of ids and their corresponding range in an id chain that should be re-rendered by webpack.
* Only those in the chain that are actually referring to namespaces or imports should be re-rendered.
* Deeper member accessors on the imported object should not be re-rendered. If deeper member accessors are re-rendered,
* there is a potential loss of meaning with rendering a quoted accessor as an unquoted accessor, or vice versa,
* because minifiers treat quoted accessors differently. e.g. import { a } from "./module"; a["b"] vs a.b
* @param {string[]} untrimmedIds chained ids
* @param {Range} untrimmedRange range encompassing allIds
* @param {Range[] | undefined} ranges cumulative range of ids for each of allIds
* @param {ModuleGraph} moduleGraph moduleGraph
* @param {Dependency} dependency dependency
* @returns {{trimmedIds: string[], trimmedRange: Range}} computed trimmed ids and cumulative range of those ids
*/
module.exports.getTrimmedIdsAndRange = (
untrimmedIds,
untrimmedRange,
ranges,
moduleGraph,
dependency
) => {
let trimmedIds = trimIdsToThoseImported(
untrimmedIds,
moduleGraph,
dependency
);
let trimmedRange = untrimmedRange;
if (trimmedIds.length !== untrimmedIds.length) {
// The array returned from dep.idRanges is right-aligned with the array returned from dep.names.
// Meaning, the two arrays may not always have the same number of elements, but the last element of
// dep.idRanges corresponds to [the expression fragment to the left of] the last element of dep.names.
// Use this to find the correct replacement range based on the number of ids that were trimmed.
const idx =
ranges === undefined
? -1 /* trigger failure case below */
: ranges.length + (trimmedIds.length - untrimmedIds.length);
if (idx < 0 || idx >= /** @type {Range[]} */ (ranges).length) {
// cspell:ignore minifiers
// Should not happen but we can't throw an error here because of backward compatibility with
// external plugins in wp5. Instead, we just disable trimming for now. This may break some minifiers.
trimmedIds = untrimmedIds;
// TODO webpack 6 remove the "trimmedIds = ids" above and uncomment the following line instead.
// throw new Error("Missing range starts data for id replacement trimming.");
} else {
trimmedRange = /** @type {Range[]} */ (ranges)[idx];
}
}
return { trimmedIds, trimmedRange };
};
/**
* @summary Determine which IDs in the id chain are actually referring to namespaces or imports,
* and which are deeper member accessors on the imported object.
* @param {string[]} ids untrimmed ids
* @param {ModuleGraph} moduleGraph moduleGraph
* @param {Dependency} dependency dependency
* @returns {string[]} trimmed ids
*/
function trimIdsToThoseImported(ids, moduleGraph, dependency) {
/** @type {string[]} */
let trimmedIds = [];
let currentExportsInfo = moduleGraph.getExportsInfo(
/** @type {Module} */ (moduleGraph.getModule(dependency))
);
for (let i = 0; i < ids.length; i++) {
if (i === 0 && ids[i] === "default") {
continue; // ExportInfo for the next level under default is still at the root ExportsInfo, so don't advance currentExportsInfo
}
const exportInfo = currentExportsInfo.getExportInfo(ids[i]);
if (exportInfo.provided === false) {
// json imports have nested ExportInfo for elements that things that are not actually exported, so check .provided
trimmedIds = ids.slice(0, i);
break;
}
const nestedInfo = exportInfo.getNestedExportsInfo();
if (!nestedInfo) {
// once all nested exports are traversed, the next item is the actual import so stop there
trimmedIds = ids.slice(0, i + 1);
break;
}
currentExportsInfo = nestedInfo;
}
// Never trim to nothing. This can happen for invalid imports (e.g. import { notThere } from "./module", or import { anything } from "./missingModule")
return trimmedIds.length ? trimmedIds : ids;
}

637
app_vue/node_modules/webpack/lib/util/cleverMerge.js generated vendored Normal file
View File

@ -0,0 +1,637 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/** @type {WeakMap<EXPECTED_OBJECT, WeakMap<EXPECTED_OBJECT, EXPECTED_OBJECT>>} */
const mergeCache = new WeakMap();
/** @type {WeakMap<EXPECTED_OBJECT, Map<string, Map<string | number | boolean, EXPECTED_OBJECT>>>} */
const setPropertyCache = new WeakMap();
const DELETE = Symbol("DELETE");
const DYNAMIC_INFO = Symbol("cleverMerge dynamic info");
/**
* Merges two given objects and caches the result to avoid computation if same objects passed as arguments again.
* @template T
* @template O
* @example
* // performs cleverMerge(first, second), stores the result in WeakMap and returns result
* cachedCleverMerge({a: 1}, {a: 2})
* {a: 2}
* // when same arguments passed, gets the result from WeakMap and returns it.
* cachedCleverMerge({a: 1}, {a: 2})
* {a: 2}
* @param {T | null | undefined} first first object
* @param {O | null | undefined} second second object
* @returns {T & O | T | O} merged object of first and second object
*/
const cachedCleverMerge = (first, second) => {
if (second === undefined) return /** @type {T} */ (first);
if (first === undefined) return /** @type {O} */ (second);
if (typeof second !== "object" || second === null)
return /** @type {O} */ (second);
if (typeof first !== "object" || first === null)
return /** @type {T} */ (first);
let innerCache = mergeCache.get(first);
if (innerCache === undefined) {
innerCache = new WeakMap();
mergeCache.set(first, innerCache);
}
const prevMerge = /** @type {T & O} */ (innerCache.get(second));
if (prevMerge !== undefined) return prevMerge;
const newMerge = _cleverMerge(first, second, true);
innerCache.set(second, newMerge);
return newMerge;
};
/**
* @template T
* @param {Partial<T>} obj object
* @param {string} property property
* @param {string | number | boolean} value assignment value
* @returns {T} new object
*/
const cachedSetProperty = (obj, property, value) => {
let mapByProperty = setPropertyCache.get(obj);
if (mapByProperty === undefined) {
mapByProperty = new Map();
setPropertyCache.set(obj, mapByProperty);
}
let mapByValue = mapByProperty.get(property);
if (mapByValue === undefined) {
mapByValue = new Map();
mapByProperty.set(property, mapByValue);
}
let result = mapByValue.get(value);
if (result) return /** @type {T} */ (result);
result = {
...obj,
[property]: value
};
mapByValue.set(value, result);
return /** @type {T} */ (result);
};
/**
* @template V
* @typedef {Map<string, V | undefined>} ByValues
*/
/**
* @typedef {object} ObjectParsedPropertyEntry
* @property {TODO | undefined} base base value
* @property {string | undefined} byProperty the name of the selector property
* @property {ByValues<TODO>} byValues value depending on selector property, merged with base
*/
/** @typedef {(function(...EXPECTED_ANY): object) & { [DYNAMIC_INFO]: [DynamicFunction, object] }} DynamicFunction */
/**
* @typedef {object} ParsedObject
* @property {Map<string, ObjectParsedPropertyEntry>} static static properties (key is property name)
* @property {{ byProperty: string, fn: DynamicFunction } | undefined} dynamic dynamic part
*/
/** @type {WeakMap<EXPECTED_OBJECT, ParsedObject>} */
const parseCache = new WeakMap();
/**
* @template {object} T
* @param {T} obj the object
* @returns {ParsedObject} parsed object
*/
const cachedParseObject = obj => {
const entry = parseCache.get(/** @type {EXPECTED_OBJECT} */ (obj));
if (entry !== undefined) return entry;
const result = parseObject(obj);
parseCache.set(/** @type {EXPECTED_OBJECT} */ (obj), result);
return result;
};
/**
* @template {object} T
* @template V
* @param {T} obj the object
* @returns {ParsedObject} parsed object
*/
const parseObject = obj => {
const info = new Map();
let dynamicInfo;
/**
* @param {string} p path
* @returns {Partial<ObjectParsedPropertyEntry>} object parsed property entry
*/
const getInfo = p => {
const entry = info.get(p);
if (entry !== undefined) return entry;
const newEntry = {
base: undefined,
byProperty: undefined,
byValues: undefined
};
info.set(p, newEntry);
return newEntry;
};
for (const key of Object.keys(obj)) {
if (key.startsWith("by")) {
const byProperty = /** @type {keyof T} */ (key);
const byObj = /** @type {TODO} */ (obj[byProperty]);
if (typeof byObj === "object") {
for (const byValue of Object.keys(byObj)) {
const obj = byObj[/** @type {keyof (keyof T)} */ (byValue)];
for (const key of Object.keys(obj)) {
const entry = getInfo(key);
if (entry.byProperty === undefined) {
entry.byProperty = /** @type {string} */ (byProperty);
entry.byValues = new Map();
} else if (entry.byProperty !== byProperty) {
throw new Error(
`${/** @type {string} */ (byProperty)} and ${entry.byProperty} for a single property is not supported`
);
}
/** @type {ByValues<V>} */
(entry.byValues).set(
byValue,
obj[/** @type {keyof (keyof T)} */ (key)]
);
if (byValue === "default") {
for (const otherByValue of Object.keys(byObj)) {
if (
!(
/** @type {ByValues<V>} */
(entry.byValues).has(otherByValue)
)
)
/** @type {ByValues<V>} */
(entry.byValues).set(otherByValue, undefined);
}
}
}
}
} else if (typeof byObj === "function") {
if (dynamicInfo === undefined) {
dynamicInfo = {
byProperty: key,
fn: byObj
};
} else {
throw new Error(
`${key} and ${dynamicInfo.byProperty} when both are functions is not supported`
);
}
} else {
const entry = getInfo(key);
entry.base = obj[/** @type {keyof T} */ (key)];
}
} else {
const entry = getInfo(key);
entry.base = obj[/** @type {keyof T} */ (key)];
}
}
return {
static: info,
dynamic: dynamicInfo
};
};
/**
* @template {object} T
* @param {Map<string, ObjectParsedPropertyEntry>} info static properties (key is property name)
* @param {{ byProperty: string, fn: (...args: EXPECTED_ANY[]) => T } | undefined} dynamicInfo dynamic part
* @returns {T} the object
*/
const serializeObject = (info, dynamicInfo) => {
const obj = /** @type {T} */ ({});
// Setup byProperty structure
for (const entry of info.values()) {
if (entry.byProperty !== undefined) {
const byProperty = /** @type {keyof T} */ (entry.byProperty);
const byObj = (obj[byProperty] =
obj[byProperty] || /** @type {TODO} */ ({}));
for (const byValue of entry.byValues.keys()) {
byObj[byValue] = byObj[byValue] || {};
}
}
}
for (const [key, entry] of info) {
if (entry.base !== undefined) {
obj[/** @type {keyof T} */ (key)] = entry.base;
}
// Fill byProperty structure
if (entry.byProperty !== undefined) {
const byProperty = /** @type {keyof T} */ (entry.byProperty);
const byObj = (obj[byProperty] =
obj[byProperty] || /** @type {TODO} */ ({}));
for (const byValue of Object.keys(byObj)) {
const value = getFromByValues(entry.byValues, byValue);
if (value !== undefined) byObj[byValue][key] = value;
}
}
}
if (dynamicInfo !== undefined) {
/** @type {TODO} */
(obj)[dynamicInfo.byProperty] = dynamicInfo.fn;
}
return obj;
};
const VALUE_TYPE_UNDEFINED = 0;
const VALUE_TYPE_ATOM = 1;
const VALUE_TYPE_ARRAY_EXTEND = 2;
const VALUE_TYPE_OBJECT = 3;
const VALUE_TYPE_DELETE = 4;
/**
* @template T
* @param {T} value a single value
* @returns {VALUE_TYPE_UNDEFINED | VALUE_TYPE_ATOM | VALUE_TYPE_ARRAY_EXTEND | VALUE_TYPE_OBJECT | VALUE_TYPE_DELETE} value type
*/
const getValueType = value => {
if (value === undefined) {
return VALUE_TYPE_UNDEFINED;
} else if (value === DELETE) {
return VALUE_TYPE_DELETE;
} else if (Array.isArray(value)) {
if (value.includes("...")) return VALUE_TYPE_ARRAY_EXTEND;
return VALUE_TYPE_ATOM;
} else if (
typeof value === "object" &&
value !== null &&
(!value.constructor || value.constructor === Object)
) {
return VALUE_TYPE_OBJECT;
}
return VALUE_TYPE_ATOM;
};
/**
* Merges two objects. Objects are deeply clever merged.
* Arrays might reference the old value with "...".
* Non-object values take preference over object values.
* @template T
* @template O
* @param {T} first first object
* @param {O} second second object
* @returns {T & O | T | O} merged object of first and second object
*/
const cleverMerge = (first, second) => {
if (second === undefined) return first;
if (first === undefined) return second;
if (typeof second !== "object" || second === null) return second;
if (typeof first !== "object" || first === null) return first;
return /** @type {T & O} */ (_cleverMerge(first, second, false));
};
/**
* @template {object} T
* @template {object} O
* Merges two objects. Objects are deeply clever merged.
* @param {T} first first
* @param {O} second second
* @param {boolean} internalCaching should parsing of objects and nested merges be cached
* @returns {T & O} merged object of first and second object
*/
const _cleverMerge = (first, second, internalCaching = false) => {
const firstObject = internalCaching
? cachedParseObject(first)
: parseObject(first);
const { static: firstInfo, dynamic: firstDynamicInfo } = firstObject;
// If the first argument has a dynamic part we modify the dynamic part to merge the second argument
if (firstDynamicInfo !== undefined) {
let { byProperty, fn } = firstDynamicInfo;
const fnInfo = fn[DYNAMIC_INFO];
if (fnInfo) {
second =
/** @type {TODO} */
(
internalCaching
? cachedCleverMerge(fnInfo[1], second)
: cleverMerge(fnInfo[1], second)
);
fn = fnInfo[0];
}
/** @type {DynamicFunction} */
const newFn = (...args) => {
const fnResult = fn(...args);
return internalCaching
? cachedCleverMerge(fnResult, second)
: cleverMerge(fnResult, second);
};
newFn[DYNAMIC_INFO] = [fn, second];
return /** @type {T & O} */ (
serializeObject(firstObject.static, { byProperty, fn: newFn })
);
}
// If the first part is static only, we merge the static parts and keep the dynamic part of the second argument
const secondObject = internalCaching
? cachedParseObject(second)
: parseObject(second);
const { static: secondInfo, dynamic: secondDynamicInfo } = secondObject;
/** @type {Map<string, ObjectParsedPropertyEntry>} */
const resultInfo = new Map();
for (const [key, firstEntry] of firstInfo) {
const secondEntry = secondInfo.get(key);
const entry =
secondEntry !== undefined
? mergeEntries(firstEntry, secondEntry, internalCaching)
: firstEntry;
resultInfo.set(key, entry);
}
for (const [key, secondEntry] of secondInfo) {
if (!firstInfo.has(key)) {
resultInfo.set(key, secondEntry);
}
}
return /** @type {T & O} */ (serializeObject(resultInfo, secondDynamicInfo));
};
/**
* @param {ObjectParsedPropertyEntry} firstEntry a
* @param {ObjectParsedPropertyEntry} secondEntry b
* @param {boolean} internalCaching should parsing of objects and nested merges be cached
* @returns {ObjectParsedPropertyEntry} new entry
*/
const mergeEntries = (firstEntry, secondEntry, internalCaching) => {
switch (getValueType(secondEntry.base)) {
case VALUE_TYPE_ATOM:
case VALUE_TYPE_DELETE:
// No need to consider firstEntry at all
// second value override everything
// = second.base + second.byProperty
return secondEntry;
case VALUE_TYPE_UNDEFINED:
if (!firstEntry.byProperty) {
// = first.base + second.byProperty
return {
base: firstEntry.base,
byProperty: secondEntry.byProperty,
byValues: secondEntry.byValues
};
} else if (firstEntry.byProperty !== secondEntry.byProperty) {
throw new Error(
`${firstEntry.byProperty} and ${secondEntry.byProperty} for a single property is not supported`
);
} else {
// = first.base + (first.byProperty + second.byProperty)
// need to merge first and second byValues
const newByValues = new Map(firstEntry.byValues);
for (const [key, value] of secondEntry.byValues) {
const firstValue = getFromByValues(firstEntry.byValues, key);
newByValues.set(
key,
mergeSingleValue(firstValue, value, internalCaching)
);
}
return {
base: firstEntry.base,
byProperty: firstEntry.byProperty,
byValues: newByValues
};
}
default: {
if (!firstEntry.byProperty) {
// The simple case
// = (first.base + second.base) + second.byProperty
return {
base: mergeSingleValue(
firstEntry.base,
secondEntry.base,
internalCaching
),
byProperty: secondEntry.byProperty,
byValues: secondEntry.byValues
};
}
let newBase;
const intermediateByValues = new Map(firstEntry.byValues);
for (const [key, value] of intermediateByValues) {
intermediateByValues.set(
key,
mergeSingleValue(value, secondEntry.base, internalCaching)
);
}
if (
Array.from(firstEntry.byValues.values()).every(value => {
const type = getValueType(value);
return type === VALUE_TYPE_ATOM || type === VALUE_TYPE_DELETE;
})
) {
// = (first.base + second.base) + ((first.byProperty + second.base) + second.byProperty)
newBase = mergeSingleValue(
firstEntry.base,
secondEntry.base,
internalCaching
);
} else {
// = first.base + ((first.byProperty (+default) + second.base) + second.byProperty)
newBase = firstEntry.base;
if (!intermediateByValues.has("default"))
intermediateByValues.set("default", secondEntry.base);
}
if (!secondEntry.byProperty) {
// = first.base + (first.byProperty + second.base)
return {
base: newBase,
byProperty: firstEntry.byProperty,
byValues: intermediateByValues
};
} else if (firstEntry.byProperty !== secondEntry.byProperty) {
throw new Error(
`${firstEntry.byProperty} and ${secondEntry.byProperty} for a single property is not supported`
);
}
const newByValues = new Map(intermediateByValues);
for (const [key, value] of secondEntry.byValues) {
const firstValue = getFromByValues(intermediateByValues, key);
newByValues.set(
key,
mergeSingleValue(firstValue, value, internalCaching)
);
}
return {
base: newBase,
byProperty: firstEntry.byProperty,
byValues: newByValues
};
}
}
};
/**
* @template V
* @param {ByValues<V>} byValues all values
* @param {string} key value of the selector
* @returns {V | undefined} value
*/
const getFromByValues = (byValues, key) => {
if (key !== "default" && byValues.has(key)) {
return byValues.get(key);
}
return byValues.get("default");
};
/**
* @template A
* @template B
* @param {A | A[]} a value
* @param {B | B[]} b value
* @param {boolean} internalCaching should parsing of objects and nested merges be cached
* @returns {A & B | (A | B)[] | A | A[] | B | B[]} value
*/
const mergeSingleValue = (a, b, internalCaching) => {
const bType = getValueType(b);
const aType = getValueType(a);
switch (bType) {
case VALUE_TYPE_DELETE:
case VALUE_TYPE_ATOM:
return b;
case VALUE_TYPE_OBJECT: {
return aType !== VALUE_TYPE_OBJECT
? b
: internalCaching
? cachedCleverMerge(a, b)
: cleverMerge(a, b);
}
case VALUE_TYPE_UNDEFINED:
return a;
case VALUE_TYPE_ARRAY_EXTEND:
switch (
aType !== VALUE_TYPE_ATOM
? aType
: Array.isArray(a)
? VALUE_TYPE_ARRAY_EXTEND
: VALUE_TYPE_OBJECT
) {
case VALUE_TYPE_UNDEFINED:
return b;
case VALUE_TYPE_DELETE:
return /** @type {B[]} */ (b).filter(item => item !== "...");
case VALUE_TYPE_ARRAY_EXTEND: {
/** @type {(A | B)[]} */
const newArray = [];
for (const item of /** @type {B[]} */ (b)) {
if (item === "...") {
for (const item of /** @type {A[]} */ (a)) {
newArray.push(item);
}
} else {
newArray.push(item);
}
}
return newArray;
}
case VALUE_TYPE_OBJECT:
return /** @type {(A | B)[]} */ (b).map(item =>
item === "..." ? /** @type {A} */ (a) : item
);
default:
throw new Error("Not implemented");
}
default:
throw new Error("Not implemented");
}
};
/**
* @template {object} T
* @param {T} obj the object
* @param {(keyof T)[]=} keysToKeepOriginalValue keys to keep original value
* @returns {T} the object without operations like "..." or DELETE
*/
const removeOperations = (obj, keysToKeepOriginalValue = []) => {
const newObj = /** @type {T} */ ({});
for (const _key of Object.keys(obj)) {
const key = /** @type {keyof T} */ (_key);
const value = obj[key];
const type = getValueType(value);
if (type === VALUE_TYPE_OBJECT && keysToKeepOriginalValue.includes(key)) {
newObj[key] = value;
continue;
}
switch (type) {
case VALUE_TYPE_UNDEFINED:
case VALUE_TYPE_DELETE:
break;
case VALUE_TYPE_OBJECT:
newObj[key] =
/** @type {T[keyof T]} */
(
removeOperations(
/** @type {T} */
(value),
keysToKeepOriginalValue
)
);
break;
case VALUE_TYPE_ARRAY_EXTEND:
newObj[key] =
/** @type {T[keyof T]} */
(
/** @type {EXPECTED_ANY[]} */
(value).filter(i => i !== "...")
);
break;
default:
newObj[key] = value;
break;
}
}
return newObj;
};
/**
* @template T
* @template {keyof T} P
* @template V
* @param {T} obj the object
* @param {P} byProperty the by description
* @param {...V} values values
* @returns {Omit<T, P>} object with merged byProperty
*/
const resolveByProperty = (obj, byProperty, ...values) => {
if (typeof obj !== "object" || obj === null || !(byProperty in obj)) {
return obj;
}
const { [byProperty]: _byValue, ..._remaining } = obj;
const remaining = /** @type {T} */ (_remaining);
const byValue =
/** @type {Record<string, T> | ((...args: V[]) => T)} */
(_byValue);
if (typeof byValue === "object") {
const key = /** @type {string} */ (values[0]);
if (key in byValue) {
return cachedCleverMerge(remaining, byValue[key]);
} else if ("default" in byValue) {
return cachedCleverMerge(remaining, byValue.default);
}
return remaining;
} else if (typeof byValue === "function") {
// eslint-disable-next-line prefer-spread
const result = byValue.apply(null, values);
return cachedCleverMerge(
remaining,
resolveByProperty(result, byProperty, ...values)
);
}
return obj;
};
module.exports.cachedSetProperty = cachedSetProperty;
module.exports.cachedCleverMerge = cachedCleverMerge;
module.exports.cleverMerge = cleverMerge;
module.exports.resolveByProperty = resolveByProperty;
module.exports.removeOperations = removeOperations;
module.exports.DELETE = DELETE;

523
app_vue/node_modules/webpack/lib/util/comparators.js generated vendored Normal file
View File

@ -0,0 +1,523 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const { compareRuntime } = require("./runtime");
/** @typedef {import("../Chunk")} Chunk */
/** @typedef {import("../Chunk").ChunkId} ChunkId */
/** @typedef {import("../ChunkGraph")} ChunkGraph */
/** @typedef {import("../ChunkGraph").ModuleId} ModuleId */
/** @typedef {import("../ChunkGroup")} ChunkGroup */
/** @typedef {import("../Dependency").DependencyLocation} DependencyLocation */
/** @typedef {import("../Module")} Module */
/** @typedef {import("../ModuleGraph")} ModuleGraph */
/**
* @template T
* @typedef {(a: T, b: T) => -1 | 0 | 1} Comparator
*/
/**
* @template {object} TArg
* @template T
* @typedef {(tArg: TArg, a: T, b: T) => -1 | 0 | 1} RawParameterizedComparator
*/
/**
* @template {object} TArg
* @template T
* @typedef {(tArg: TArg) => Comparator<T>} ParameterizedComparator
*/
/**
* @template {object} TArg
* @template {object} T
* @param {RawParameterizedComparator<TArg, T>} fn comparator with argument
* @returns {ParameterizedComparator<TArg, T>} comparator
*/
const createCachedParameterizedComparator = fn => {
/** @type {WeakMap<EXPECTED_OBJECT, Comparator<T>>} */
const map = new WeakMap();
return arg => {
const cachedResult = map.get(/** @type {EXPECTED_OBJECT} */ (arg));
if (cachedResult !== undefined) return cachedResult;
/**
* @param {T} a first item
* @param {T} b second item
* @returns {-1|0|1} compare result
*/
const result = fn.bind(null, arg);
map.set(/** @type {EXPECTED_OBJECT} */ (arg), result);
return result;
};
};
/**
* @param {Chunk} a chunk
* @param {Chunk} b chunk
* @returns {-1|0|1} compare result
*/
module.exports.compareChunksById = (a, b) =>
compareIds(/** @type {ChunkId} */ (a.id), /** @type {ChunkId} */ (b.id));
/**
* @param {Module} a module
* @param {Module} b module
* @returns {-1|0|1} compare result
*/
module.exports.compareModulesByIdentifier = (a, b) =>
compareIds(a.identifier(), b.identifier());
/**
* @param {ChunkGraph} chunkGraph the chunk graph
* @param {Module} a module
* @param {Module} b module
* @returns {-1|0|1} compare result
*/
const compareModulesById = (chunkGraph, a, b) =>
compareIds(
/** @type {ModuleId} */ (chunkGraph.getModuleId(a)),
/** @type {ModuleId} */ (chunkGraph.getModuleId(b))
);
/** @type {ParameterizedComparator<ChunkGraph, Module>} */
module.exports.compareModulesById =
createCachedParameterizedComparator(compareModulesById);
/**
* @param {number} a number
* @param {number} b number
* @returns {-1|0|1} compare result
*/
const compareNumbers = (a, b) => {
if (typeof a !== typeof b) {
return typeof a < typeof b ? -1 : 1;
}
if (a < b) return -1;
if (a > b) return 1;
return 0;
};
module.exports.compareNumbers = compareNumbers;
/**
* @param {string} a string
* @param {string} b string
* @returns {-1|0|1} compare result
*/
const compareStringsNumeric = (a, b) => {
const aLength = a.length;
const bLength = b.length;
let aChar = 0;
let bChar = 0;
let aIsDigit = false;
let bIsDigit = false;
let i = 0;
let j = 0;
while (i < aLength && j < bLength) {
aChar = a.charCodeAt(i);
bChar = b.charCodeAt(j);
aIsDigit = aChar >= 48 && aChar <= 57;
bIsDigit = bChar >= 48 && bChar <= 57;
if (!aIsDigit && !bIsDigit) {
if (aChar < bChar) return -1;
if (aChar > bChar) return 1;
i++;
j++;
} else if (aIsDigit && !bIsDigit) {
// This segment of a is shorter than in b
return 1;
} else if (!aIsDigit && bIsDigit) {
// This segment of b is shorter than in a
return -1;
} else {
let aNumber = aChar - 48;
let bNumber = bChar - 48;
while (++i < aLength) {
aChar = a.charCodeAt(i);
if (aChar < 48 || aChar > 57) break;
aNumber = aNumber * 10 + aChar - 48;
}
while (++j < bLength) {
bChar = b.charCodeAt(j);
if (bChar < 48 || bChar > 57) break;
bNumber = bNumber * 10 + bChar - 48;
}
if (aNumber < bNumber) return -1;
if (aNumber > bNumber) return 1;
}
}
if (j < bLength) {
// a is shorter than b
bChar = b.charCodeAt(j);
bIsDigit = bChar >= 48 && bChar <= 57;
return bIsDigit ? -1 : 1;
}
if (i < aLength) {
// b is shorter than a
aChar = a.charCodeAt(i);
aIsDigit = aChar >= 48 && aChar <= 57;
return aIsDigit ? 1 : -1;
}
return 0;
};
module.exports.compareStringsNumeric = compareStringsNumeric;
/**
* @param {ModuleGraph} moduleGraph the module graph
* @param {Module} a module
* @param {Module} b module
* @returns {-1|0|1} compare result
*/
const compareModulesByPostOrderIndexOrIdentifier = (moduleGraph, a, b) => {
const cmp = compareNumbers(
/** @type {number} */ (moduleGraph.getPostOrderIndex(a)),
/** @type {number} */ (moduleGraph.getPostOrderIndex(b))
);
if (cmp !== 0) return cmp;
return compareIds(a.identifier(), b.identifier());
};
/** @type {ParameterizedComparator<ModuleGraph, Module>} */
module.exports.compareModulesByPostOrderIndexOrIdentifier =
createCachedParameterizedComparator(
compareModulesByPostOrderIndexOrIdentifier
);
/**
* @param {ModuleGraph} moduleGraph the module graph
* @param {Module} a module
* @param {Module} b module
* @returns {-1|0|1} compare result
*/
const compareModulesByPreOrderIndexOrIdentifier = (moduleGraph, a, b) => {
const cmp = compareNumbers(
/** @type {number} */ (moduleGraph.getPreOrderIndex(a)),
/** @type {number} */ (moduleGraph.getPreOrderIndex(b))
);
if (cmp !== 0) return cmp;
return compareIds(a.identifier(), b.identifier());
};
/** @type {ParameterizedComparator<ModuleGraph, Module>} */
module.exports.compareModulesByPreOrderIndexOrIdentifier =
createCachedParameterizedComparator(
compareModulesByPreOrderIndexOrIdentifier
);
/**
* @param {ChunkGraph} chunkGraph the chunk graph
* @param {Module} a module
* @param {Module} b module
* @returns {-1|0|1} compare result
*/
const compareModulesByIdOrIdentifier = (chunkGraph, a, b) => {
const cmp = compareIds(
/** @type {ModuleId} */ (chunkGraph.getModuleId(a)),
/** @type {ModuleId} */ (chunkGraph.getModuleId(b))
);
if (cmp !== 0) return cmp;
return compareIds(a.identifier(), b.identifier());
};
/** @type {ParameterizedComparator<ChunkGraph, Module>} */
module.exports.compareModulesByIdOrIdentifier =
createCachedParameterizedComparator(compareModulesByIdOrIdentifier);
/**
* @param {ChunkGraph} chunkGraph the chunk graph
* @param {Chunk} a chunk
* @param {Chunk} b chunk
* @returns {-1 | 0 | 1} compare result
*/
const compareChunks = (chunkGraph, a, b) => chunkGraph.compareChunks(a, b);
/** @type {ParameterizedComparator<ChunkGraph, Chunk>} */
module.exports.compareChunks =
createCachedParameterizedComparator(compareChunks);
/**
* @param {string | number} a first id
* @param {string | number} b second id
* @returns {-1 | 0 | 1} compare result
*/
const compareIds = (a, b) => {
if (typeof a !== typeof b) {
return typeof a < typeof b ? -1 : 1;
}
if (a < b) return -1;
if (a > b) return 1;
return 0;
};
module.exports.compareIds = compareIds;
/**
* @param {string} a first string
* @param {string} b second string
* @returns {-1|0|1} compare result
*/
const compareStrings = (a, b) => {
if (a < b) return -1;
if (a > b) return 1;
return 0;
};
module.exports.compareStrings = compareStrings;
/**
* @param {ChunkGroup} a first chunk group
* @param {ChunkGroup} b second chunk group
* @returns {-1 | 0 | 1} compare result
*/
const compareChunkGroupsByIndex = (a, b) =>
/** @type {number} */ (a.index) < /** @type {number} */ (b.index) ? -1 : 1;
module.exports.compareChunkGroupsByIndex = compareChunkGroupsByIndex;
/**
* @template {EXPECTED_OBJECT} K1
* @template {EXPECTED_OBJECT} K2
* @template T
*/
class TwoKeyWeakMap {
constructor() {
/**
* @private
* @type {WeakMap<K1, WeakMap<K2, T | undefined>>}
*/
this._map = new WeakMap();
}
/**
* @param {K1} key1 first key
* @param {K2} key2 second key
* @returns {T | undefined} value
*/
get(key1, key2) {
const childMap = this._map.get(key1);
if (childMap === undefined) {
return;
}
return childMap.get(key2);
}
/**
* @param {K1} key1 first key
* @param {K2} key2 second key
* @param {T | undefined} value new value
* @returns {void}
*/
set(key1, key2, value) {
let childMap = this._map.get(key1);
if (childMap === undefined) {
childMap = new WeakMap();
this._map.set(key1, childMap);
}
childMap.set(key2, value);
}
}
/** @type {TwoKeyWeakMap<Comparator<EXPECTED_ANY>, Comparator<EXPECTED_ANY>, Comparator<EXPECTED_ANY>>}} */
const concatComparatorsCache = new TwoKeyWeakMap();
/**
* @template T
* @param {Comparator<T>} c1 comparator
* @param {Comparator<T>} c2 comparator
* @param {Comparator<T>[]} cRest comparators
* @returns {Comparator<T>} comparator
*/
const concatComparators = (c1, c2, ...cRest) => {
if (cRest.length > 0) {
const [c3, ...cRest2] = cRest;
return concatComparators(c1, concatComparators(c2, c3, ...cRest2));
}
const cacheEntry = /** @type {Comparator<T>} */ (
concatComparatorsCache.get(c1, c2)
);
if (cacheEntry !== undefined) return cacheEntry;
/**
* @param {T} a first value
* @param {T} b second value
* @returns {-1|0|1} compare result
*/
const result = (a, b) => {
const res = c1(a, b);
if (res !== 0) return res;
return c2(a, b);
};
concatComparatorsCache.set(c1, c2, result);
return result;
};
module.exports.concatComparators = concatComparators;
/**
* @template A, B
* @typedef {(input: A) => B | undefined | null} Selector
*/
/** @type {TwoKeyWeakMap<Selector<EXPECTED_ANY, EXPECTED_ANY>, Comparator<EXPECTED_ANY>, Comparator<EXPECTED_ANY>>}} */
const compareSelectCache = new TwoKeyWeakMap();
/**
* @template T
* @template R
* @param {Selector<T, R>} getter getter for value
* @param {Comparator<R>} comparator comparator
* @returns {Comparator<T>} comparator
*/
const compareSelect = (getter, comparator) => {
const cacheEntry = compareSelectCache.get(getter, comparator);
if (cacheEntry !== undefined) return cacheEntry;
/**
* @param {T} a first value
* @param {T} b second value
* @returns {-1|0|1} compare result
*/
const result = (a, b) => {
const aValue = getter(a);
const bValue = getter(b);
if (aValue !== undefined && aValue !== null) {
if (bValue !== undefined && bValue !== null) {
return comparator(aValue, bValue);
}
return -1;
}
if (bValue !== undefined && bValue !== null) {
return 1;
}
return 0;
};
compareSelectCache.set(getter, comparator, result);
return result;
};
module.exports.compareSelect = compareSelect;
/** @type {WeakMap<Comparator<EXPECTED_ANY>, Comparator<Iterable<EXPECTED_ANY>>>} */
const compareIteratorsCache = new WeakMap();
/**
* @template T
* @param {Comparator<T>} elementComparator comparator for elements
* @returns {Comparator<Iterable<T>>} comparator for iterables of elements
*/
const compareIterables = elementComparator => {
const cacheEntry = compareIteratorsCache.get(elementComparator);
if (cacheEntry !== undefined) return cacheEntry;
/**
* @param {Iterable<T>} a first value
* @param {Iterable<T>} b second value
* @returns {-1|0|1} compare result
*/
const result = (a, b) => {
const aI = a[Symbol.iterator]();
const bI = b[Symbol.iterator]();
while (true) {
const aItem = aI.next();
const bItem = bI.next();
if (aItem.done) {
return bItem.done ? 0 : -1;
} else if (bItem.done) {
return 1;
}
const res = elementComparator(aItem.value, bItem.value);
if (res !== 0) return res;
}
};
compareIteratorsCache.set(elementComparator, result);
return result;
};
module.exports.compareIterables = compareIterables;
// TODO this is no longer needed when minimum node.js version is >= 12
// since these versions ship with a stable sort function
/**
* @template T
* @param {Iterable<T>} iterable original ordered list
* @returns {Comparator<T>} comparator
*/
module.exports.keepOriginalOrder = iterable => {
/** @type {Map<T, number>} */
const map = new Map();
let i = 0;
for (const item of iterable) {
map.set(item, i++);
}
return (a, b) =>
compareNumbers(
/** @type {number} */ (map.get(a)),
/** @type {number} */ (map.get(b))
);
};
/**
* @param {ChunkGraph} chunkGraph the chunk graph
* @returns {Comparator<Chunk>} comparator
*/
module.exports.compareChunksNatural = chunkGraph => {
const cmpFn = module.exports.compareModulesById(chunkGraph);
const cmpIterableFn = compareIterables(cmpFn);
return concatComparators(
compareSelect(
chunk => /** @type {string|number} */ (chunk.name),
compareIds
),
compareSelect(chunk => chunk.runtime, compareRuntime),
compareSelect(
/**
* @param {Chunk} chunk a chunk
* @returns {Iterable<Module>} modules
*/
chunk => chunkGraph.getOrderedChunkModulesIterable(chunk, cmpFn),
cmpIterableFn
)
);
};
/**
* Compare two locations
* @param {DependencyLocation} a A location node
* @param {DependencyLocation} b A location node
* @returns {-1|0|1} sorting comparator value
*/
module.exports.compareLocations = (a, b) => {
const isObjectA = typeof a === "object" && a !== null;
const isObjectB = typeof b === "object" && b !== null;
if (!isObjectA || !isObjectB) {
if (isObjectA) return 1;
if (isObjectB) return -1;
return 0;
}
if ("start" in a) {
if ("start" in b) {
const ap = a.start;
const bp = b.start;
if (ap.line < bp.line) return -1;
if (ap.line > bp.line) return 1;
if (/** @type {number} */ (ap.column) < /** @type {number} */ (bp.column))
return -1;
if (/** @type {number} */ (ap.column) > /** @type {number} */ (bp.column))
return 1;
} else return -1;
} else if ("start" in b) return 1;
if ("name" in a) {
if ("name" in b) {
if (a.name < b.name) return -1;
if (a.name > b.name) return 1;
} else return -1;
} else if ("name" in b) return 1;
if ("index" in a) {
if ("index" in b) {
if (/** @type {number} */ (a.index) < /** @type {number} */ (b.index))
return -1;
if (/** @type {number} */ (a.index) > /** @type {number} */ (b.index))
return 1;
} else return -1;
} else if ("index" in b) return 1;
return 0;
};

View File

@ -0,0 +1,234 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/**
* @param {string} str string
* @returns {string} quoted meta
*/
const quoteMeta = str => str.replace(/[-[\]\\/{}()*+?.^$|]/g, "\\$&");
/**
* @param {string} str string
* @returns {string} string
*/
const toSimpleString = str => {
if (`${Number(str)}` === str) {
return str;
}
return JSON.stringify(str);
};
/**
* @param {Record<string | number, boolean>} map value map
* @returns {boolean | ((value: string) => string)} true/false, when unconditionally true/false, or a template function to determine the value at runtime
*/
const compileBooleanMatcher = map => {
const positiveItems = Object.keys(map).filter(i => map[i]);
const negativeItems = Object.keys(map).filter(i => !map[i]);
if (positiveItems.length === 0) return false;
if (negativeItems.length === 0) return true;
return compileBooleanMatcherFromLists(positiveItems, negativeItems);
};
/**
* @param {string[]} positiveItems positive items
* @param {string[]} negativeItems negative items
* @returns {(value: string) => string} a template function to determine the value at runtime
*/
const compileBooleanMatcherFromLists = (positiveItems, negativeItems) => {
if (positiveItems.length === 0) return () => "false";
if (negativeItems.length === 0) return () => "true";
if (positiveItems.length === 1)
return value => `${toSimpleString(positiveItems[0])} == ${value}`;
if (negativeItems.length === 1)
return value => `${toSimpleString(negativeItems[0])} != ${value}`;
const positiveRegexp = itemsToRegexp(positiveItems);
const negativeRegexp = itemsToRegexp(negativeItems);
if (positiveRegexp.length <= negativeRegexp.length) {
return value => `/^${positiveRegexp}$/.test(${value})`;
}
return value => `!/^${negativeRegexp}$/.test(${value})`;
};
/**
* @param {Set<string>} itemsSet items set
* @param {(str: string) => string | false} getKey get key function
* @param {(str: Array<string>) => boolean} condition condition
* @returns {Array<Array<string>>} list of common items
*/
const popCommonItems = (itemsSet, getKey, condition) => {
/** @type {Map<string, Array<string>>} */
const map = new Map();
for (const item of itemsSet) {
const key = getKey(item);
if (key) {
let list = map.get(key);
if (list === undefined) {
/** @type {Array<string>} */
list = [];
map.set(key, list);
}
list.push(item);
}
}
/** @type {Array<Array<string>>} */
const result = [];
for (const list of map.values()) {
if (condition(list)) {
for (const item of list) {
itemsSet.delete(item);
}
result.push(list);
}
}
return result;
};
/**
* @param {Array<string>} items items
* @returns {string} common prefix
*/
const getCommonPrefix = items => {
let prefix = items[0];
for (let i = 1; i < items.length; i++) {
const item = items[i];
for (let p = 0; p < prefix.length; p++) {
if (item[p] !== prefix[p]) {
prefix = prefix.slice(0, p);
break;
}
}
}
return prefix;
};
/**
* @param {Array<string>} items items
* @returns {string} common suffix
*/
const getCommonSuffix = items => {
let suffix = items[0];
for (let i = 1; i < items.length; i++) {
const item = items[i];
for (let p = item.length - 1, s = suffix.length - 1; s >= 0; p--, s--) {
if (item[p] !== suffix[s]) {
suffix = suffix.slice(s + 1);
break;
}
}
}
return suffix;
};
/**
* @param {Array<string>} itemsArr array of items
* @returns {string} regexp
*/
const itemsToRegexp = itemsArr => {
if (itemsArr.length === 1) {
return quoteMeta(itemsArr[0]);
}
/** @type {Array<string>} */
const finishedItems = [];
// merge single char items: (a|b|c|d|ef) => ([abcd]|ef)
let countOfSingleCharItems = 0;
for (const item of itemsArr) {
if (item.length === 1) {
countOfSingleCharItems++;
}
}
// special case for only single char items
if (countOfSingleCharItems === itemsArr.length) {
return `[${quoteMeta(itemsArr.sort().join(""))}]`;
}
const items = new Set(itemsArr.sort());
if (countOfSingleCharItems > 2) {
let singleCharItems = "";
for (const item of items) {
if (item.length === 1) {
singleCharItems += item;
items.delete(item);
}
}
finishedItems.push(`[${quoteMeta(singleCharItems)}]`);
}
// special case for 2 items with common prefix/suffix
if (finishedItems.length === 0 && items.size === 2) {
const prefix = getCommonPrefix(itemsArr);
const suffix = getCommonSuffix(
itemsArr.map(item => item.slice(prefix.length))
);
if (prefix.length > 0 || suffix.length > 0) {
return `${quoteMeta(prefix)}${itemsToRegexp(
itemsArr.map(i => i.slice(prefix.length, -suffix.length || undefined))
)}${quoteMeta(suffix)}`;
}
}
// special case for 2 items with common suffix
if (finishedItems.length === 0 && items.size === 2) {
/** @type {Iterator<string>} */
const it = items[Symbol.iterator]();
const a = it.next().value;
const b = it.next().value;
if (a.length > 0 && b.length > 0 && a.slice(-1) === b.slice(-1)) {
return `${itemsToRegexp([a.slice(0, -1), b.slice(0, -1)])}${quoteMeta(
a.slice(-1)
)}`;
}
}
// find common prefix: (a1|a2|a3|a4|b5) => (a(1|2|3|4)|b5)
const prefixed = popCommonItems(
items,
item => (item.length >= 1 ? item[0] : false),
list => {
if (list.length >= 3) return true;
if (list.length <= 1) return false;
return list[0][1] === list[1][1];
}
);
for (const prefixedItems of prefixed) {
const prefix = getCommonPrefix(prefixedItems);
finishedItems.push(
`${quoteMeta(prefix)}${itemsToRegexp(
prefixedItems.map(i => i.slice(prefix.length))
)}`
);
}
// find common suffix: (a1|b1|c1|d1|e2) => ((a|b|c|d)1|e2)
const suffixed = popCommonItems(
items,
item => (item.length >= 1 ? item.slice(-1) : false),
list => {
if (list.length >= 3) return true;
if (list.length <= 1) return false;
return list[0].slice(-2) === list[1].slice(-2);
}
);
for (const suffixedItems of suffixed) {
const suffix = getCommonSuffix(suffixedItems);
finishedItems.push(
`${itemsToRegexp(
suffixedItems.map(i => i.slice(0, -suffix.length))
)}${quoteMeta(suffix)}`
);
}
// TODO further optimize regexp, i. e.
// use ranges: (1|2|3|4|a) => [1-4a]
const conditional = finishedItems.concat(Array.from(items, quoteMeta));
if (conditional.length === 1) return conditional[0];
return `(${conditional.join("|")})`;
};
compileBooleanMatcher.fromLists = compileBooleanMatcherFromLists;
compileBooleanMatcher.itemsToRegexp = itemsToRegexp;
module.exports = compileBooleanMatcher;

231
app_vue/node_modules/webpack/lib/util/concatenate.js generated vendored Normal file
View File

@ -0,0 +1,231 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const Template = require("../Template");
/** @typedef {import("eslint-scope").Scope} Scope */
/** @typedef {import("eslint-scope").Reference} Reference */
/** @typedef {import("eslint-scope").Variable} Variable */
/** @typedef {import("estree").Node} Node */
/** @typedef {import("../javascript/JavascriptParser").Range} Range */
/** @typedef {import("../javascript/JavascriptParser").Program} Program */
/** @typedef {Set<string>} UsedNames */
const DEFAULT_EXPORT = "__WEBPACK_DEFAULT_EXPORT__";
const NAMESPACE_OBJECT_EXPORT = "__WEBPACK_NAMESPACE_OBJECT__";
/**
* @param {Variable} variable variable
* @returns {Reference[]} references
*/
const getAllReferences = variable => {
let set = variable.references;
// Look for inner scope variables too (like in class Foo { t() { Foo } })
const identifiers = new Set(variable.identifiers);
for (const scope of variable.scope.childScopes) {
for (const innerVar of scope.variables) {
if (innerVar.identifiers.some(id => identifiers.has(id))) {
set = set.concat(innerVar.references);
break;
}
}
}
return set;
};
/**
* @param {Node | Node[]} ast ast
* @param {Node} node node
* @returns {undefined | Node[]} result
*/
const getPathInAst = (ast, node) => {
if (ast === node) {
return [];
}
const nr = /** @type {Range} */ (node.range);
/**
* @param {Node} n node
* @returns {Node[] | undefined} result
*/
const enterNode = n => {
if (!n) return;
const r = n.range;
if (r && r[0] <= nr[0] && r[1] >= nr[1]) {
const path = getPathInAst(n, node);
if (path) {
path.push(n);
return path;
}
}
};
if (Array.isArray(ast)) {
for (let i = 0; i < ast.length; i++) {
const enterResult = enterNode(ast[i]);
if (enterResult !== undefined) return enterResult;
}
} else if (ast && typeof ast === "object") {
const keys =
/** @type {Array<keyof Node>} */
(Object.keys(ast));
for (let i = 0; i < keys.length; i++) {
// We are making the faster check in `enterNode` using `n.range`
const value =
ast[
/** @type {Exclude<keyof Node, "range" | "loc" | "leadingComments" | "trailingComments">} */
(keys[i])
];
if (Array.isArray(value)) {
const pathResult = getPathInAst(value, node);
if (pathResult !== undefined) return pathResult;
} else if (value && typeof value === "object") {
const enterResult = enterNode(value);
if (enterResult !== undefined) return enterResult;
}
}
}
};
/**
* @param {string} oldName old name
* @param {UsedNames} usedNamed1 used named 1
* @param {UsedNames} usedNamed2 used named 2
* @param {string} extraInfo extra info
* @returns {string} found new name
*/
function findNewName(oldName, usedNamed1, usedNamed2, extraInfo) {
let name = oldName;
if (name === DEFAULT_EXPORT) {
name = "";
}
if (name === NAMESPACE_OBJECT_EXPORT) {
name = "namespaceObject";
}
// Remove uncool stuff
extraInfo = extraInfo.replace(
/\.+\/|(\/index)?\.([a-zA-Z0-9]{1,4})($|\s|\?)|\s*\+\s*\d+\s*modules/g,
""
);
const splittedInfo = extraInfo.split("/");
while (splittedInfo.length) {
name = splittedInfo.pop() + (name ? `_${name}` : "");
const nameIdent = Template.toIdentifier(name);
if (
!usedNamed1.has(nameIdent) &&
(!usedNamed2 || !usedNamed2.has(nameIdent))
)
return nameIdent;
}
let i = 0;
let nameWithNumber = Template.toIdentifier(`${name}_${i}`);
while (
usedNamed1.has(nameWithNumber) ||
// eslint-disable-next-line no-unmodified-loop-condition
(usedNamed2 && usedNamed2.has(nameWithNumber))
) {
i++;
nameWithNumber = Template.toIdentifier(`${name}_${i}`);
}
return nameWithNumber;
}
/** @typedef {Set<Scope>} ScopeSet */
/**
* @param {Scope | null} s scope
* @param {UsedNames} nameSet name set
* @param {ScopeSet} scopeSet1 scope set 1
* @param {ScopeSet} scopeSet2 scope set 2
*/
const addScopeSymbols = (s, nameSet, scopeSet1, scopeSet2) => {
let scope = s;
while (scope) {
if (scopeSet1.has(scope)) break;
if (scopeSet2.has(scope)) break;
scopeSet1.add(scope);
for (const variable of scope.variables) {
nameSet.add(variable.name);
}
scope = scope.upper;
}
};
const RESERVED_NAMES = new Set(
[
// internal names (should always be renamed)
DEFAULT_EXPORT,
NAMESPACE_OBJECT_EXPORT,
// keywords
"abstract,arguments,async,await,boolean,break,byte,case,catch,char,class,const,continue",
"debugger,default,delete,do,double,else,enum,eval,export,extends,false,final,finally,float",
"for,function,goto,if,implements,import,in,instanceof,int,interface,let,long,native,new,null",
"package,private,protected,public,return,short,static,super,switch,synchronized,this,throw",
"throws,transient,true,try,typeof,var,void,volatile,while,with,yield",
// commonjs/amd
"module,__dirname,__filename,exports,require,define",
// js globals
"Array,Date,eval,function,hasOwnProperty,Infinity,isFinite,isNaN,isPrototypeOf,length,Math",
"NaN,name,Number,Object,prototype,String,Symbol,toString,undefined,valueOf",
// browser globals
"alert,all,anchor,anchors,area,assign,blur,button,checkbox,clearInterval,clearTimeout",
"clientInformation,close,closed,confirm,constructor,crypto,decodeURI,decodeURIComponent",
"defaultStatus,document,element,elements,embed,embeds,encodeURI,encodeURIComponent,escape",
"event,fileUpload,focus,form,forms,frame,innerHeight,innerWidth,layer,layers,link,location",
"mimeTypes,navigate,navigator,frames,frameRate,hidden,history,image,images,offscreenBuffering",
"open,opener,option,outerHeight,outerWidth,packages,pageXOffset,pageYOffset,parent,parseFloat",
"parseInt,password,pkcs11,plugin,prompt,propertyIsEnum,radio,reset,screenX,screenY,scroll",
"secure,select,self,setInterval,setTimeout,status,submit,taint,text,textarea,top,unescape",
"untaint,window",
// window events
"onblur,onclick,onerror,onfocus,onkeydown,onkeypress,onkeyup,onmouseover,onload,onmouseup,onmousedown,onsubmit"
]
.join(",")
.split(",")
);
/** @typedef {{ usedNames: UsedNames, alreadyCheckedScopes: ScopeSet }} ScopeInfo */
/**
* @param {Map<string, ScopeInfo>} usedNamesInScopeInfo used names in scope info
* @param {string} module module identifier
* @param {string} id export id
* @returns {ScopeInfo} info
*/
const getUsedNamesInScopeInfo = (usedNamesInScopeInfo, module, id) => {
const key = `${module}-${id}`;
let info = usedNamesInScopeInfo.get(key);
if (info === undefined) {
info = {
usedNames: new Set(),
alreadyCheckedScopes: new Set()
};
usedNamesInScopeInfo.set(key, info);
}
return info;
};
module.exports = {
getUsedNamesInScopeInfo,
findNewName,
getAllReferences,
getPathInAst,
NAMESPACE_OBJECT_EXPORT,
DEFAULT_EXPORT,
RESERVED_NAMES,
addScopeSymbols
};

126
app_vue/node_modules/webpack/lib/util/conventions.js generated vendored Normal file
View File

@ -0,0 +1,126 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Gengkun He @ahabhgk
*/
"use strict";
/** @typedef {import("../../declarations/WebpackOptions").CssGeneratorExportsConvention} CssGeneratorExportsConvention */
/**
* @param {string} input input
* @param {CssGeneratorExportsConvention | undefined} convention convention
* @returns {string[]} results
*/
module.exports.cssExportConvention = (input, convention) => {
const set = new Set();
if (typeof convention === "function") {
set.add(convention(input));
} else {
switch (convention) {
case "camel-case": {
set.add(input);
set.add(module.exports.camelCase(input));
break;
}
case "camel-case-only": {
set.add(module.exports.camelCase(input));
break;
}
case "dashes": {
set.add(input);
set.add(module.exports.dashesCamelCase(input));
break;
}
case "dashes-only": {
set.add(module.exports.dashesCamelCase(input));
break;
}
case "as-is": {
set.add(input);
break;
}
}
}
return Array.from(set);
};
// Copy from css-loader
/**
* @param {string} input input
* @returns {string} result
*/
module.exports.dashesCamelCase = input =>
input.replace(/-+(\w)/g, (match, firstLetter) => firstLetter.toUpperCase());
// Copy from css-loader
/**
* @param {string} input input
* @returns {string} result
*/
module.exports.camelCase = input => {
let result = input.trim();
if (result.length === 0) {
return "";
}
if (result.length === 1) {
return result.toLowerCase();
}
const hasUpperCase = result !== result.toLowerCase();
if (hasUpperCase) {
result = preserveCamelCase(result);
}
return result
.replace(/^[_.\- ]+/, "")
.toLowerCase()
.replace(/[_.\- ]+([\p{Alpha}\p{N}_]|$)/gu, (_, p1) => p1.toUpperCase())
.replace(/\d+([\p{Alpha}\p{N}_]|$)/gu, m => m.toUpperCase());
};
// Copy from css-loader
/**
* @param {string} string string
* @returns {string} result
*/
const preserveCamelCase = string => {
let result = string;
let isLastCharLower = false;
let isLastCharUpper = false;
let isLastLastCharUpper = false;
for (let i = 0; i < result.length; i++) {
const character = result[i];
if (isLastCharLower && /[\p{Lu}]/u.test(character)) {
result = `${result.slice(0, i)}-${result.slice(i)}`;
isLastCharLower = false;
isLastLastCharUpper = isLastCharUpper;
isLastCharUpper = true;
i += 1;
} else if (
isLastCharUpper &&
isLastLastCharUpper &&
/[\p{Ll}]/u.test(character)
) {
result = `${result.slice(0, i - 1)}-${result.slice(i - 1)}`;
isLastLastCharUpper = isLastCharUpper;
isLastCharUpper = false;
isLastCharLower = true;
} else {
isLastCharLower =
character.toLowerCase() === character &&
character.toUpperCase() !== character;
isLastLastCharUpper = isLastCharUpper;
isLastCharUpper =
character.toUpperCase() === character &&
character.toLowerCase() !== character;
}
}
return result;
};

View File

@ -0,0 +1,41 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const memoize = require("./memoize");
/** @typedef {import("schema-utils/declarations/validate").ValidationErrorConfiguration} ValidationErrorConfiguration */
/** @typedef {import("./fs").JsonObject} JsonObject */
const getValidate = memoize(() => require("schema-utils").validate);
/**
* @template {object | object[]} T
* @param {((value: T) => boolean) | undefined} check check
* @param {() => JsonObject} getSchema get schema fn
* @param {ValidationErrorConfiguration} options options
* @returns {(value?: T) => void} validate
*/
const createSchemaValidation = (check, getSchema, options) => {
getSchema = memoize(getSchema);
return value => {
if (check && value && !check(value)) {
getValidate()(
getSchema(),
/** @type {EXPECTED_OBJECT | EXPECTED_OBJECT[]} */
(value),
options
);
require("util").deprecate(
() => {},
"webpack bug: Pre-compiled schema reports error while real schema is happy. This has performance drawbacks.",
"DEP_WEBPACK_PRE_COMPILED_SCHEMA_INVALID"
)();
}
};
};
module.exports = createSchemaValidation;

194
app_vue/node_modules/webpack/lib/util/createHash.js generated vendored Normal file
View File

@ -0,0 +1,194 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const Hash = require("./Hash");
/** @typedef {import("../../declarations/WebpackOptions").HashFunction} HashFunction */
const BULK_SIZE = 2000;
// We are using an object instead of a Map as this will stay static during the runtime
// so access to it can be optimized by v8
/** @type {{[key: string]: Map<string, string>}} */
const digestCaches = {};
/** @typedef {() => Hash} HashFactory */
class BulkUpdateDecorator extends Hash {
/**
* @param {Hash | HashFactory} hashOrFactory function to create a hash
* @param {string=} hashKey key for caching
*/
constructor(hashOrFactory, hashKey) {
super();
this.hashKey = hashKey;
if (typeof hashOrFactory === "function") {
this.hashFactory = hashOrFactory;
this.hash = undefined;
} else {
this.hashFactory = undefined;
this.hash = hashOrFactory;
}
this.buffer = "";
}
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @param {string|Buffer} data data
* @param {string=} inputEncoding data encoding
* @returns {this} updated hash
*/
update(data, inputEncoding) {
if (
inputEncoding !== undefined ||
typeof data !== "string" ||
data.length > BULK_SIZE
) {
if (this.hash === undefined)
this.hash = /** @type {HashFactory} */ (this.hashFactory)();
if (this.buffer.length > 0) {
this.hash.update(this.buffer);
this.buffer = "";
}
this.hash.update(data, inputEncoding);
} else {
this.buffer += data;
if (this.buffer.length > BULK_SIZE) {
if (this.hash === undefined)
this.hash = /** @type {HashFactory} */ (this.hashFactory)();
this.hash.update(this.buffer);
this.buffer = "";
}
}
return this;
}
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @param {string=} encoding encoding of the return value
* @returns {string|Buffer} digest
*/
digest(encoding) {
let digestCache;
const buffer = this.buffer;
if (this.hash === undefined) {
// short data for hash, we can use caching
const cacheKey = `${this.hashKey}-${encoding}`;
digestCache = digestCaches[cacheKey];
if (digestCache === undefined) {
digestCache = digestCaches[cacheKey] = new Map();
}
const cacheEntry = digestCache.get(buffer);
if (cacheEntry !== undefined) return cacheEntry;
this.hash = /** @type {HashFactory} */ (this.hashFactory)();
}
if (buffer.length > 0) {
this.hash.update(buffer);
}
const digestResult = this.hash.digest(encoding);
const result =
typeof digestResult === "string" ? digestResult : digestResult.toString();
if (digestCache !== undefined) {
digestCache.set(buffer, result);
}
return result;
}
}
/* istanbul ignore next */
class DebugHash extends Hash {
constructor() {
super();
this.string = "";
}
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @param {string|Buffer} data data
* @param {string=} inputEncoding data encoding
* @returns {this} updated hash
*/
update(data, inputEncoding) {
if (typeof data !== "string") data = data.toString("utf-8");
const prefix = Buffer.from("@webpack-debug-digest@").toString("hex");
if (data.startsWith(prefix)) {
data = Buffer.from(data.slice(prefix.length), "hex").toString();
}
this.string += `[${data}](${
/** @type {string} */ (new Error().stack).split("\n", 3)[2]
})\n`;
return this;
}
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @param {string=} encoding encoding of the return value
* @returns {string|Buffer} digest
*/
digest(encoding) {
return Buffer.from(`@webpack-debug-digest@${this.string}`).toString("hex");
}
}
/** @type {typeof import("crypto") | undefined} */
let crypto;
/** @type {typeof import("./hash/xxhash64") | undefined} */
let createXXHash64;
/** @type {typeof import("./hash/md4") | undefined} */
let createMd4;
/** @type {typeof import("./hash/BatchedHash") | undefined} */
let BatchedHash;
/**
* Creates a hash by name or function
* @param {HashFunction} algorithm the algorithm name or a constructor creating a hash
* @returns {Hash} the hash
*/
module.exports = algorithm => {
if (typeof algorithm === "function") {
// eslint-disable-next-line new-cap
return new BulkUpdateDecorator(() => new algorithm());
}
switch (algorithm) {
// TODO add non-cryptographic algorithm here
case "debug":
return new DebugHash();
case "xxhash64":
if (createXXHash64 === undefined) {
createXXHash64 = require("./hash/xxhash64");
if (BatchedHash === undefined) {
BatchedHash = require("./hash/BatchedHash");
}
}
return new /** @type {typeof import("./hash/BatchedHash")} */ (
BatchedHash
)(createXXHash64());
case "md4":
if (createMd4 === undefined) {
createMd4 = require("./hash/md4");
if (BatchedHash === undefined) {
BatchedHash = require("./hash/BatchedHash");
}
}
return new /** @type {typeof import("./hash/BatchedHash")} */ (
BatchedHash
)(createMd4());
case "native-md4":
if (crypto === undefined) crypto = require("crypto");
return new BulkUpdateDecorator(
() => /** @type {typeof import("crypto")} */ (crypto).createHash("md4"),
"md4"
);
default:
if (crypto === undefined) crypto = require("crypto");
return new BulkUpdateDecorator(
() =>
/** @type {typeof import("crypto")} */ (crypto).createHash(algorithm),
algorithm
);
}
};

349
app_vue/node_modules/webpack/lib/util/deprecation.js generated vendored Normal file
View File

@ -0,0 +1,349 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const util = require("util");
/** @type {Map<string, () => void>} */
const deprecationCache = new Map();
/**
* @typedef {object} FakeHookMarker
* @property {true} _fakeHook it's a fake hook
*/
/**
* @template T
* @typedef {T & FakeHookMarker} FakeHook<T>
*/
/**
* @param {string} message deprecation message
* @param {string} code deprecation code
* @returns {() => void} function to trigger deprecation
*/
const createDeprecation = (message, code) => {
const cached = deprecationCache.get(message);
if (cached !== undefined) return cached;
const fn = util.deprecate(
() => {},
message,
`DEP_WEBPACK_DEPRECATION_${code}`
);
deprecationCache.set(message, fn);
return fn;
};
/** @typedef {"concat" | "entry" | "filter" | "find" | "findIndex" | "includes" | "indexOf" | "join" | "lastIndexOf" | "map" | "reduce" | "reduceRight" | "slice" | "some"} COPY_METHODS_NAMES */
/** @type {COPY_METHODS_NAMES[]} */
const COPY_METHODS = [
"concat",
"entry",
"filter",
"find",
"findIndex",
"includes",
"indexOf",
"join",
"lastIndexOf",
"map",
"reduce",
"reduceRight",
"slice",
"some"
];
/** @typedef {"copyWithin" | "entries" | "fill" | "keys" | "pop" | "reverse" | "shift" | "splice" | "sort" | "unshift"} DISABLED_METHODS_NAMES */
/** @type {DISABLED_METHODS_NAMES[]} */
const DISABLED_METHODS = [
"copyWithin",
"entries",
"fill",
"keys",
"pop",
"reverse",
"shift",
"splice",
"sort",
"unshift"
];
/**
* @template T
* @typedef {Set<T> & {[Symbol.isConcatSpreadable]?: boolean} & { push?: (...items: T[]) => void } & { [P in DISABLED_METHODS_NAMES]?: () => void } & { [P in COPY_METHODS_NAMES]?: () => TODO }} SetWithDeprecatedArrayMethods
*/
/**
* @template T
* @param {SetWithDeprecatedArrayMethods<T>} set new set
* @param {string} name property name
* @returns {void}
*/
module.exports.arrayToSetDeprecation = (set, name) => {
for (const method of COPY_METHODS) {
if (set[method]) continue;
const d = createDeprecation(
`${name} was changed from Array to Set (using Array method '${method}' is deprecated)`,
"ARRAY_TO_SET"
);
/**
* @deprecated
* @this {Set<T>}
* @returns {number} count
*/
set[method] = function () {
d();
const array = Array.from(this);
return Array.prototype[/** @type {keyof COPY_METHODS} */ (method)].apply(
array,
// eslint-disable-next-line prefer-rest-params
arguments
);
};
}
const dPush = createDeprecation(
`${name} was changed from Array to Set (using Array method 'push' is deprecated)`,
"ARRAY_TO_SET_PUSH"
);
const dLength = createDeprecation(
`${name} was changed from Array to Set (using Array property 'length' is deprecated)`,
"ARRAY_TO_SET_LENGTH"
);
const dIndexer = createDeprecation(
`${name} was changed from Array to Set (indexing Array is deprecated)`,
"ARRAY_TO_SET_INDEXER"
);
/**
* @deprecated
* @this {Set<T>}
* @returns {number} count
*/
set.push = function () {
dPush();
// eslint-disable-next-line prefer-rest-params
for (const item of Array.from(arguments)) {
this.add(item);
}
return this.size;
};
for (const method of DISABLED_METHODS) {
if (set[method]) continue;
set[method] = () => {
throw new Error(
`${name} was changed from Array to Set (using Array method '${method}' is not possible)`
);
};
}
/**
* @param {number} index index
* @returns {() => T | undefined} value
*/
const createIndexGetter = index => {
/**
* @this {Set<T>} a Set
* @returns {T | undefined} the value at this location
*/
// eslint-disable-next-line func-style
const fn = function () {
dIndexer();
let i = 0;
for (const item of this) {
if (i++ === index) return item;
}
};
return fn;
};
/**
* @param {number} index index
*/
const defineIndexGetter = index => {
Object.defineProperty(set, index, {
get: createIndexGetter(index),
set(value) {
throw new Error(
`${name} was changed from Array to Set (indexing Array with write is not possible)`
);
}
});
};
defineIndexGetter(0);
let indexerDefined = 1;
Object.defineProperty(set, "length", {
get() {
dLength();
const length = this.size;
for (indexerDefined; indexerDefined < length + 1; indexerDefined++) {
defineIndexGetter(indexerDefined);
}
return length;
},
set(value) {
throw new Error(
`${name} was changed from Array to Set (writing to Array property 'length' is not possible)`
);
}
});
set[Symbol.isConcatSpreadable] = true;
};
/**
* @template T
* @param {string} name name
* @returns {{ new <T = any>(values?: readonly T[] | null): SetDeprecatedArray<T> }} SetDeprecatedArray
*/
module.exports.createArrayToSetDeprecationSet = name => {
let initialized = false;
/**
* @template T
*/
class SetDeprecatedArray extends Set {
/**
* @param {readonly T[] | null=} items items
*/
constructor(items) {
super(items);
if (!initialized) {
initialized = true;
module.exports.arrayToSetDeprecation(
SetDeprecatedArray.prototype,
name
);
}
}
}
return SetDeprecatedArray;
};
/**
* @template {object} T
* @param {T} obj object
* @param {string} name property name
* @param {string} code deprecation code
* @param {string} note additional note
* @returns {T} frozen object with deprecation when modifying
*/
module.exports.soonFrozenObjectDeprecation = (obj, name, code, note = "") => {
const message = `${name} will be frozen in future, all modifications are deprecated.${
note && `\n${note}`
}`;
return /** @type {T} */ (
new Proxy(obj, {
set: util.deprecate(
/**
* @param {object} target target
* @param {string | symbol} property property
* @param {EXPECTED_ANY} value value
* @param {EXPECTED_ANY} receiver receiver
* @returns {boolean} result
*/
(target, property, value, receiver) =>
Reflect.set(target, property, value, receiver),
message,
code
),
defineProperty: util.deprecate(
/**
* @param {object} target target
* @param {string | symbol} property property
* @param {PropertyDescriptor} descriptor descriptor
* @returns {boolean} result
*/
(target, property, descriptor) =>
Reflect.defineProperty(target, property, descriptor),
message,
code
),
deleteProperty: util.deprecate(
/**
* @param {object} target target
* @param {string | symbol} property property
* @returns {boolean} result
*/
(target, property) => Reflect.deleteProperty(target, property),
message,
code
),
setPrototypeOf: util.deprecate(
/**
* @param {object} target target
* @param {EXPECTED_OBJECT | null} proto proto
* @returns {boolean} result
*/
(target, proto) => Reflect.setPrototypeOf(target, proto),
message,
code
)
})
);
};
/**
* @template T
* @param {T} obj object
* @param {string} message deprecation message
* @param {string} code deprecation code
* @returns {T} object with property access deprecated
*/
const deprecateAllProperties = (obj, message, code) => {
const newObj = {};
const descriptors = Object.getOwnPropertyDescriptors(obj);
for (const name of Object.keys(descriptors)) {
const descriptor = descriptors[name];
if (typeof descriptor.value === "function") {
Object.defineProperty(newObj, name, {
...descriptor,
value: util.deprecate(descriptor.value, message, code)
});
} else if (descriptor.get || descriptor.set) {
Object.defineProperty(newObj, name, {
...descriptor,
get: descriptor.get && util.deprecate(descriptor.get, message, code),
set: descriptor.set && util.deprecate(descriptor.set, message, code)
});
} else {
let value = descriptor.value;
Object.defineProperty(newObj, name, {
configurable: descriptor.configurable,
enumerable: descriptor.enumerable,
get: util.deprecate(() => value, message, code),
set: descriptor.writable
? util.deprecate(
/**
* @template T
* @param {T} v value
* @returns {T} result
*/
v => (value = v),
message,
code
)
: undefined
});
}
}
return /** @type {T} */ (newObj);
};
module.exports.deprecateAllProperties = deprecateAllProperties;
/**
* @template {object} T
* @param {T} fakeHook fake hook implementation
* @param {string=} message deprecation message (not deprecated when unset)
* @param {string=} code deprecation code (not deprecated when unset)
* @returns {FakeHook<T>} fake hook which redirects
*/
module.exports.createFakeHook = (fakeHook, message, code) => {
if (message && code) {
fakeHook = deprecateAllProperties(fakeHook, message, code);
}
return Object.freeze(
Object.assign(fakeHook, { _fakeHook: /** @type {true} */ (true) })
);
};

View File

@ -0,0 +1,540 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
// Simulations show these probabilities for a single change
// 93.1% that one group is invalidated
// 4.8% that two groups are invalidated
// 1.1% that 3 groups are invalidated
// 0.1% that 4 or more groups are invalidated
//
// And these for removing/adding 10 lexically adjacent files
// 64.5% that one group is invalidated
// 24.8% that two groups are invalidated
// 7.8% that 3 groups are invalidated
// 2.7% that 4 or more groups are invalidated
//
// And these for removing/adding 3 random files
// 0% that one group is invalidated
// 3.7% that two groups are invalidated
// 80.8% that 3 groups are invalidated
// 12.3% that 4 groups are invalidated
// 3.2% that 5 or more groups are invalidated
/**
* @param {string} a key
* @param {string} b key
* @returns {number} the similarity as number
*/
const similarity = (a, b) => {
const l = Math.min(a.length, b.length);
let dist = 0;
for (let i = 0; i < l; i++) {
const ca = a.charCodeAt(i);
const cb = b.charCodeAt(i);
dist += Math.max(0, 10 - Math.abs(ca - cb));
}
return dist;
};
/**
* @param {string} a key
* @param {string} b key
* @param {Set<string>} usedNames set of already used names
* @returns {string} the common part and a single char for the difference
*/
const getName = (a, b, usedNames) => {
const l = Math.min(a.length, b.length);
let i = 0;
while (i < l) {
if (a.charCodeAt(i) !== b.charCodeAt(i)) {
i++;
break;
}
i++;
}
while (i < l) {
const name = a.slice(0, i);
const lowerName = name.toLowerCase();
if (!usedNames.has(lowerName)) {
usedNames.add(lowerName);
return name;
}
i++;
}
// names always contain a hash, so this is always unique
// we don't need to check usedNames nor add it
return a;
};
/**
* @param {Record<string, number>} total total size
* @param {Record<string, number>} size single size
* @returns {void}
*/
const addSizeTo = (total, size) => {
for (const key of Object.keys(size)) {
total[key] = (total[key] || 0) + size[key];
}
};
/**
* @param {Record<string, number>} total total size
* @param {Record<string, number>} size single size
* @returns {void}
*/
const subtractSizeFrom = (total, size) => {
for (const key of Object.keys(size)) {
total[key] -= size[key];
}
};
/**
* @template T
* @param {Iterable<Node<T>>} nodes some nodes
* @returns {Record<string, number>} total size
*/
const sumSize = nodes => {
const sum = Object.create(null);
for (const node of nodes) {
addSizeTo(sum, node.size);
}
return sum;
};
/**
* @param {Record<string, number>} size size
* @param {Record<string, number>} maxSize minimum size
* @returns {boolean} true, when size is too big
*/
const isTooBig = (size, maxSize) => {
for (const key of Object.keys(size)) {
const s = size[key];
if (s === 0) continue;
const maxSizeValue = maxSize[key];
if (typeof maxSizeValue === "number" && s > maxSizeValue) return true;
}
return false;
};
/**
* @param {Record<string, number>} size size
* @param {Record<string, number>} minSize minimum size
* @returns {boolean} true, when size is too small
*/
const isTooSmall = (size, minSize) => {
for (const key of Object.keys(size)) {
const s = size[key];
if (s === 0) continue;
const minSizeValue = minSize[key];
if (typeof minSizeValue === "number" && s < minSizeValue) return true;
}
return false;
};
/**
* @param {Record<string, number>} size size
* @param {Record<string, number>} minSize minimum size
* @returns {Set<string>} set of types that are too small
*/
const getTooSmallTypes = (size, minSize) => {
const types = new Set();
for (const key of Object.keys(size)) {
const s = size[key];
if (s === 0) continue;
const minSizeValue = minSize[key];
if (typeof minSizeValue === "number" && s < minSizeValue) types.add(key);
}
return types;
};
/**
* @template {object} T
* @param {T} size size
* @param {Set<string>} types types
* @returns {number} number of matching size types
*/
const getNumberOfMatchingSizeTypes = (size, types) => {
let i = 0;
for (const key of Object.keys(size)) {
if (size[/** @type {keyof T} */ (key)] !== 0 && types.has(key)) i++;
}
return i;
};
/**
* @param {Record<string, number>} size size
* @param {Set<string>} types types
* @returns {number} selective size sum
*/
const selectiveSizeSum = (size, types) => {
let sum = 0;
for (const key of Object.keys(size)) {
if (size[key] !== 0 && types.has(key)) sum += size[key];
}
return sum;
};
/**
* @template T
*/
class Node {
/**
* @param {T} item item
* @param {string} key key
* @param {Record<string, number>} size size
*/
constructor(item, key, size) {
this.item = item;
this.key = key;
this.size = size;
}
}
/**
* @template T
*/
class Group {
/**
* @param {Node<T>[]} nodes nodes
* @param {number[] | null} similarities similarities between the nodes (length = nodes.length - 1)
* @param {Record<string, number>=} size size of the group
*/
constructor(nodes, similarities, size) {
this.nodes = nodes;
this.similarities = similarities;
this.size = size || sumSize(nodes);
/** @type {string | undefined} */
this.key = undefined;
}
/**
* @param {(node: Node<T>) => boolean} filter filter function
* @returns {Node<T>[] | undefined} removed nodes
*/
popNodes(filter) {
const newNodes = [];
const newSimilarities = [];
const resultNodes = [];
let lastNode;
for (let i = 0; i < this.nodes.length; i++) {
const node = this.nodes[i];
if (filter(node)) {
resultNodes.push(node);
} else {
if (newNodes.length > 0) {
newSimilarities.push(
lastNode === this.nodes[i - 1]
? /** @type {number[]} */ (this.similarities)[i - 1]
: similarity(/** @type {Node<T>} */ (lastNode).key, node.key)
);
}
newNodes.push(node);
lastNode = node;
}
}
if (resultNodes.length === this.nodes.length) return;
this.nodes = newNodes;
this.similarities = newSimilarities;
this.size = sumSize(newNodes);
return resultNodes;
}
}
/**
* @template T
* @param {Iterable<Node<T>>} nodes nodes
* @returns {number[]} similarities
*/
const getSimilarities = nodes => {
// calculate similarities between lexically adjacent nodes
/** @type {number[]} */
const similarities = [];
let last;
for (const node of nodes) {
if (last !== undefined) {
similarities.push(similarity(last.key, node.key));
}
last = node;
}
return similarities;
};
/**
* @template T
* @typedef {object} GroupedItems<T>
* @property {string} key
* @property {T[]} items
* @property {Record<string, number>} size
*/
/**
* @template T
* @typedef {object} Options
* @property {Record<string, number>} maxSize maximum size of a group
* @property {Record<string, number>} minSize minimum size of a group (preferred over maximum size)
* @property {Iterable<T>} items a list of items
* @property {(item: T) => Record<string, number>} getSize function to get size of an item
* @property {(item: T) => string} getKey function to get the key of an item
*/
/**
* @template T
* @param {Options<T>} options options object
* @returns {GroupedItems<T>[]} grouped items
*/
module.exports = ({ maxSize, minSize, items, getSize, getKey }) => {
/** @type {Group<T>[]} */
const result = [];
const nodes = Array.from(
items,
item => new Node(item, getKey(item), getSize(item))
);
/** @type {Node<T>[]} */
const initialNodes = [];
// lexically ordering of keys
nodes.sort((a, b) => {
if (a.key < b.key) return -1;
if (a.key > b.key) return 1;
return 0;
});
// return nodes bigger than maxSize directly as group
// But make sure that minSize is not violated
for (const node of nodes) {
if (isTooBig(node.size, maxSize) && !isTooSmall(node.size, minSize)) {
result.push(new Group([node], []));
} else {
initialNodes.push(node);
}
}
if (initialNodes.length > 0) {
const initialGroup = new Group(initialNodes, getSimilarities(initialNodes));
/**
* @param {Group<T>} group group
* @param {Record<string, number>} consideredSize size of the group to consider
* @returns {boolean} true, if the group was modified
*/
const removeProblematicNodes = (group, consideredSize = group.size) => {
const problemTypes = getTooSmallTypes(consideredSize, minSize);
if (problemTypes.size > 0) {
// We hit an edge case where the working set is already smaller than minSize
// We merge problematic nodes with the smallest result node to keep minSize intact
const problemNodes = group.popNodes(
n => getNumberOfMatchingSizeTypes(n.size, problemTypes) > 0
);
if (problemNodes === undefined) return false;
// Only merge it with result nodes that have the problematic size type
const possibleResultGroups = result.filter(
n => getNumberOfMatchingSizeTypes(n.size, problemTypes) > 0
);
if (possibleResultGroups.length > 0) {
const bestGroup = possibleResultGroups.reduce((min, group) => {
const minMatches = getNumberOfMatchingSizeTypes(min, problemTypes);
const groupMatches = getNumberOfMatchingSizeTypes(
group,
problemTypes
);
if (minMatches !== groupMatches)
return minMatches < groupMatches ? group : min;
if (
selectiveSizeSum(min.size, problemTypes) >
selectiveSizeSum(group.size, problemTypes)
)
return group;
return min;
});
for (const node of problemNodes) bestGroup.nodes.push(node);
bestGroup.nodes.sort((a, b) => {
if (a.key < b.key) return -1;
if (a.key > b.key) return 1;
return 0;
});
} else {
// There are no other nodes with the same size types
// We create a new group and have to accept that it's smaller than minSize
result.push(new Group(problemNodes, null));
}
return true;
}
return false;
};
if (initialGroup.nodes.length > 0) {
const queue = [initialGroup];
while (queue.length) {
const group = /** @type {Group<T>} */ (queue.pop());
// only groups bigger than maxSize need to be splitted
if (!isTooBig(group.size, maxSize)) {
result.push(group);
continue;
}
// If the group is already too small
// we try to work only with the unproblematic nodes
if (removeProblematicNodes(group)) {
// This changed something, so we try this group again
queue.push(group);
continue;
}
// find unsplittable area from left and right
// going minSize from left and right
// at least one node need to be included otherwise we get stuck
let left = 1;
const leftSize = Object.create(null);
addSizeTo(leftSize, group.nodes[0].size);
while (left < group.nodes.length && isTooSmall(leftSize, minSize)) {
addSizeTo(leftSize, group.nodes[left].size);
left++;
}
let right = group.nodes.length - 2;
const rightSize = Object.create(null);
addSizeTo(rightSize, group.nodes[group.nodes.length - 1].size);
while (right >= 0 && isTooSmall(rightSize, minSize)) {
addSizeTo(rightSize, group.nodes[right].size);
right--;
}
// left v v right
// [ O O O ] O O O [ O O O ]
// ^^^^^^^^^ leftSize
// rightSize ^^^^^^^^^
// leftSize > minSize
// rightSize > minSize
// Perfect split: [ O O O ] [ O O O ]
// right === left - 1
if (left - 1 > right) {
// We try to remove some problematic nodes to "fix" that
let prevSize;
if (right < group.nodes.length - left) {
subtractSizeFrom(rightSize, group.nodes[right + 1].size);
prevSize = rightSize;
} else {
subtractSizeFrom(leftSize, group.nodes[left - 1].size);
prevSize = leftSize;
}
if (removeProblematicNodes(group, prevSize)) {
// This changed something, so we try this group again
queue.push(group);
continue;
}
// can't split group while holding minSize
// because minSize is preferred of maxSize we return
// the problematic nodes as result here even while it's too big
// To avoid this make sure maxSize > minSize * 3
result.push(group);
continue;
}
if (left <= right) {
// when there is a area between left and right
// we look for best split point
// we split at the minimum similarity
// here key space is separated the most
// But we also need to make sure to not create too small groups
let best = -1;
let bestSimilarity = Infinity;
let pos = left;
const rightSize = sumSize(group.nodes.slice(pos));
// pos v v right
// [ O O O ] O O O [ O O O ]
// ^^^^^^^^^ leftSize
// rightSize ^^^^^^^^^^^^^^^
while (pos <= right + 1) {
const similarity = /** @type {number[]} */ (group.similarities)[
pos - 1
];
if (
similarity < bestSimilarity &&
!isTooSmall(leftSize, minSize) &&
!isTooSmall(rightSize, minSize)
) {
best = pos;
bestSimilarity = similarity;
}
addSizeTo(leftSize, group.nodes[pos].size);
subtractSizeFrom(rightSize, group.nodes[pos].size);
pos++;
}
if (best < 0) {
// This can't happen
// but if that assumption is wrong
// fallback to a big group
result.push(group);
continue;
}
left = best;
right = best - 1;
}
// create two new groups for left and right area
// and queue them up
const rightNodes = [group.nodes[right + 1]];
/** @type {number[]} */
const rightSimilarities = [];
for (let i = right + 2; i < group.nodes.length; i++) {
rightSimilarities.push(
/** @type {number[]} */ (group.similarities)[i - 1]
);
rightNodes.push(group.nodes[i]);
}
queue.push(new Group(rightNodes, rightSimilarities));
const leftNodes = [group.nodes[0]];
/** @type {number[]} */
const leftSimilarities = [];
for (let i = 1; i < left; i++) {
leftSimilarities.push(
/** @type {number[]} */ (group.similarities)[i - 1]
);
leftNodes.push(group.nodes[i]);
}
queue.push(new Group(leftNodes, leftSimilarities));
}
}
}
// lexically ordering
result.sort((a, b) => {
if (a.nodes[0].key < b.nodes[0].key) return -1;
if (a.nodes[0].key > b.nodes[0].key) return 1;
return 0;
});
// give every group a name
const usedNames = new Set();
for (let i = 0; i < result.length; i++) {
const group = result[i];
if (group.nodes.length === 1) {
group.key = group.nodes[0].key;
} else {
const first = group.nodes[0];
const last = group.nodes[group.nodes.length - 1];
const name = getName(first.key, last.key, usedNames);
group.key = name;
}
}
// return the results
return result.map(
group =>
/** @type {GroupedItems<T>} */
({
key: group.key,
items: group.nodes.map(node => node.item),
size: group.size
})
);
};

View File

@ -0,0 +1,18 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Sam Chen @chenxsan
*/
"use strict";
/**
* @param {string} urlAndGlobal the script request
* @returns {string[]} script url and its global variable
*/
module.exports = function extractUrlAndGlobal(urlAndGlobal) {
const index = urlAndGlobal.indexOf("@");
if (index <= 0 || index === urlAndGlobal.length - 1) {
throw new Error(`Invalid request "${urlAndGlobal}"`);
}
return [urlAndGlobal.substring(index + 1), urlAndGlobal.substring(0, index)];
};

231
app_vue/node_modules/webpack/lib/util/findGraphRoots.js generated vendored Normal file
View File

@ -0,0 +1,231 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const NO_MARKER = 0;
const IN_PROGRESS_MARKER = 1;
const DONE_MARKER = 2;
const DONE_MAYBE_ROOT_CYCLE_MARKER = 3;
const DONE_AND_ROOT_MARKER = 4;
/**
* @template T
*/
class Node {
/**
* @param {T} item the value of the node
*/
constructor(item) {
this.item = item;
/** @type {Set<Node<T>>} */
this.dependencies = new Set();
this.marker = NO_MARKER;
/** @type {Cycle<T> | undefined} */
this.cycle = undefined;
this.incoming = 0;
}
}
/**
* @template T
*/
class Cycle {
constructor() {
/** @type {Set<Node<T>>} */
this.nodes = new Set();
}
}
/**
* @template T
* @typedef {object} StackEntry
* @property {Node<T>} node
* @property {Node<T>[]} openEdges
*/
/**
* @template T
* @param {Iterable<T>} items list of items
* @param {(item: T) => Iterable<T>} getDependencies function to get dependencies of an item (items that are not in list are ignored)
* @returns {Iterable<T>} graph roots of the items
*/
module.exports = (items, getDependencies) => {
/** @type {Map<T, Node<T>>} */
const itemToNode = new Map();
for (const item of items) {
const node = new Node(item);
itemToNode.set(item, node);
}
// early exit when there is only a single item
if (itemToNode.size <= 1) return items;
// grab all the dependencies
for (const node of itemToNode.values()) {
for (const dep of getDependencies(node.item)) {
const depNode = itemToNode.get(dep);
if (depNode !== undefined) {
node.dependencies.add(depNode);
}
}
}
// Set of current root modules
// items will be removed if a new reference to it has been found
/** @type {Set<Node<T>>} */
const roots = new Set();
// Set of current cycles without references to it
// cycles will be removed if a new reference to it has been found
// that is not part of the cycle
/** @type {Set<Cycle<T>>} */
const rootCycles = new Set();
// For all non-marked nodes
for (const selectedNode of itemToNode.values()) {
if (selectedNode.marker === NO_MARKER) {
// deep-walk all referenced modules
// in a non-recursive way
// start by entering the selected node
selectedNode.marker = IN_PROGRESS_MARKER;
// keep a stack to avoid recursive walk
/** @type {StackEntry<T>[]} */
const stack = [
{
node: selectedNode,
openEdges: Array.from(selectedNode.dependencies)
}
];
// process the top item until stack is empty
while (stack.length > 0) {
const topOfStack = stack[stack.length - 1];
// Are there still edges unprocessed in the current node?
if (topOfStack.openEdges.length > 0) {
// Process one dependency
const dependency =
/** @type {Node<T>} */
(topOfStack.openEdges.pop());
switch (dependency.marker) {
case NO_MARKER:
// dependency has not be visited yet
// mark it as in-progress and recurse
stack.push({
node: dependency,
openEdges: Array.from(dependency.dependencies)
});
dependency.marker = IN_PROGRESS_MARKER;
break;
case IN_PROGRESS_MARKER: {
// It's a in-progress cycle
let cycle = dependency.cycle;
if (!cycle) {
cycle = new Cycle();
cycle.nodes.add(dependency);
dependency.cycle = cycle;
}
// set cycle property for each node in the cycle
// if nodes are already part of a cycle
// we merge the cycles to a shared cycle
for (
let i = stack.length - 1;
stack[i].node !== dependency;
i--
) {
const node = stack[i].node;
if (node.cycle) {
if (node.cycle !== cycle) {
// merge cycles
for (const cycleNode of node.cycle.nodes) {
cycleNode.cycle = cycle;
cycle.nodes.add(cycleNode);
}
}
} else {
node.cycle = cycle;
cycle.nodes.add(node);
}
}
// don't recurse into dependencies
// these are already on the stack
break;
}
case DONE_AND_ROOT_MARKER:
// This node has be visited yet and is currently a root node
// But as this is a new reference to the node
// it's not really a root
// so we have to convert it to a normal node
dependency.marker = DONE_MARKER;
roots.delete(dependency);
break;
case DONE_MAYBE_ROOT_CYCLE_MARKER:
// This node has be visited yet and
// is maybe currently part of a completed root cycle
// we found a new reference to the cycle
// so it's not really a root cycle
// remove the cycle from the root cycles
// and convert it to a normal node
rootCycles.delete(/** @type {Cycle<T>} */ (dependency.cycle));
dependency.marker = DONE_MARKER;
break;
// DONE_MARKER: nothing to do, don't recurse into dependencies
}
} else {
// All dependencies of the current node has been visited
// we leave the node
stack.pop();
topOfStack.node.marker = DONE_MARKER;
}
}
const cycle = selectedNode.cycle;
if (cycle) {
for (const node of cycle.nodes) {
node.marker = DONE_MAYBE_ROOT_CYCLE_MARKER;
}
rootCycles.add(cycle);
} else {
selectedNode.marker = DONE_AND_ROOT_MARKER;
roots.add(selectedNode);
}
}
}
// Extract roots from root cycles
// We take the nodes with most incoming edges
// inside of the cycle
for (const cycle of rootCycles) {
let max = 0;
/** @type {Set<Node<T>>} */
const cycleRoots = new Set();
const nodes = cycle.nodes;
for (const node of nodes) {
for (const dep of node.dependencies) {
if (nodes.has(dep)) {
dep.incoming++;
if (dep.incoming < max) continue;
if (dep.incoming > max) {
cycleRoots.clear();
max = dep.incoming;
}
cycleRoots.add(dep);
}
}
}
for (const cycleRoot of cycleRoots) {
roots.add(cycleRoot);
}
}
// When roots were found, return them
if (roots.size > 0) {
return Array.from(roots, r => r.item);
}
throw new Error("Implementation of findGraphRoots is broken");
};

651
app_vue/node_modules/webpack/lib/util/fs.js generated vendored Normal file
View File

@ -0,0 +1,651 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const path = require("path");
/** @typedef {import("../../declarations/WebpackOptions").WatchOptions} WatchOptions */
/** @typedef {import("../FileSystemInfo").FileSystemInfoEntry} FileSystemInfoEntry */
/**
* @template T
* @typedef {object} IStatsBase
* @property {() => boolean} isFile
* @property {() => boolean} isDirectory
* @property {() => boolean} isBlockDevice
* @property {() => boolean} isCharacterDevice
* @property {() => boolean} isSymbolicLink
* @property {() => boolean} isFIFO
* @property {() => boolean} isSocket
* @property {T} dev
* @property {T} ino
* @property {T} mode
* @property {T} nlink
* @property {T} uid
* @property {T} gid
* @property {T} rdev
* @property {T} size
* @property {T} blksize
* @property {T} blocks
* @property {T} atimeMs
* @property {T} mtimeMs
* @property {T} ctimeMs
* @property {T} birthtimeMs
* @property {Date} atime
* @property {Date} mtime
* @property {Date} ctime
* @property {Date} birthtime
*/
/**
* @typedef {IStatsBase<number>} IStats
*/
/**
* @typedef {IStatsBase<bigint> & { atimeNs: bigint, mtimeNs: bigint, ctimeNs: bigint, birthtimeNs: bigint }} IBigIntStats
*/
/**
* @typedef {object} Dirent
* @property {() => boolean} isFile
* @property {() => boolean} isDirectory
* @property {() => boolean} isBlockDevice
* @property {() => boolean} isCharacterDevice
* @property {() => boolean} isSymbolicLink
* @property {() => boolean} isFIFO
* @property {() => boolean} isSocket
* @property {string} name
* @property {string} path
*/
/** @typedef {string | number | boolean | null} JsonPrimitive */
/** @typedef {JsonValue[]} JsonArray */
/** @typedef {{[Key in string]: JsonValue} & {[Key in string]?: JsonValue | undefined}} JsonObject */
/** @typedef {JsonPrimitive | JsonObject | JsonArray} JsonValue */
/** @typedef {(err: NodeJS.ErrnoException | null) => void} NoParamCallback */
/** @typedef {(err: NodeJS.ErrnoException | null, result?: string) => void} StringCallback */
/** @typedef {(err: NodeJS.ErrnoException | null, result?: Buffer) => void} BufferCallback */
/** @typedef {(err: NodeJS.ErrnoException | null, result?: string | Buffer) => void} StringOrBufferCallback */
/** @typedef {(err: NodeJS.ErrnoException | null, result?: string[]) => void} ReaddirStringCallback */
/** @typedef {(err: NodeJS.ErrnoException | null, result?: Buffer[]) => void} ReaddirBufferCallback */
/** @typedef {(err: NodeJS.ErrnoException | null, result?: string[] | Buffer[]) => void} ReaddirStringOrBufferCallback */
/** @typedef {(err: NodeJS.ErrnoException | null, result?: Dirent[]) => void} ReaddirDirentCallback */
/** @typedef {(err: NodeJS.ErrnoException | null, result?: IStats) => void} StatsCallback */
/** @typedef {(err: NodeJS.ErrnoException | null, result?: IBigIntStats) => void} BigIntStatsCallback */
/** @typedef {(err: NodeJS.ErrnoException | null, result?: IStats | IBigIntStats) => void} StatsOrBigIntStatsCallback */
/** @typedef {(err: NodeJS.ErrnoException | null, result?: number) => void} NumberCallback */
/** @typedef {(err: NodeJS.ErrnoException | Error | null, result?: JsonObject) => void} ReadJsonCallback */
/** @typedef {Map<string, FileSystemInfoEntry | "ignore">} TimeInfoEntries */
/**
* @typedef {object} WatcherInfo
* @property {Set<string> | null} changes get current aggregated changes that have not yet send to callback
* @property {Set<string> | null} removals get current aggregated removals that have not yet send to callback
* @property {TimeInfoEntries} fileTimeInfoEntries get info about files
* @property {TimeInfoEntries} contextTimeInfoEntries get info about directories
*/
/** @typedef {Set<string>} Changes */
/** @typedef {Set<string>} Removals */
// TODO webpack 6 deprecate missing getInfo
/**
* @typedef {object} Watcher
* @property {() => void} close closes the watcher and all underlying file watchers
* @property {() => void} pause closes the watcher, but keeps underlying file watchers alive until the next watch call
* @property {(() => Changes | null)=} getAggregatedChanges get current aggregated changes that have not yet send to callback
* @property {(() => Removals | null)=} getAggregatedRemovals get current aggregated removals that have not yet send to callback
* @property {() => TimeInfoEntries} getFileTimeInfoEntries get info about files
* @property {() => TimeInfoEntries} getContextTimeInfoEntries get info about directories
* @property {() => WatcherInfo=} getInfo get info about timestamps and changes
*/
/**
* @callback WatchMethod
* @param {Iterable<string>} files watched files
* @param {Iterable<string>} directories watched directories
* @param {Iterable<string>} missing watched existence entries
* @param {number} startTime timestamp of start time
* @param {WatchOptions} options options object
* @param {(err: Error | null, timeInfoEntries1?: TimeInfoEntries, timeInfoEntries2?: TimeInfoEntries, changes?: Changes, removals?: Removals) => void} callback aggregated callback
* @param {(value: string, num: number) => void} callbackUndelayed callback when the first change was detected
* @returns {Watcher} a watcher
*/
// TODO webpack 6 make optional methods required and avoid using non standard methods like `join`, `relative`, `dirname`, move IntermediateFileSystemExtras methods to InputFilesystem or OutputFilesystem
/**
* @typedef {string | Buffer | URL} PathLike
*/
/**
* @typedef {PathLike | number} PathOrFileDescriptor
*/
/**
* @typedef {object} ObjectEncodingOptions
* @property {BufferEncoding | null | undefined=} encoding
*/
/**
* @typedef {{
* (path: PathOrFileDescriptor, options: ({ encoding?: null | undefined, flag?: string | undefined } & import("events").Abortable) | undefined | null, callback: BufferCallback): void;
* (path: PathOrFileDescriptor, options: ({ encoding: BufferEncoding, flag?: string | undefined } & import("events").Abortable) | BufferEncoding, callback: StringCallback): void;
* (path: PathOrFileDescriptor, options: (ObjectEncodingOptions & { flag?: string | undefined } & import("events").Abortable) | BufferEncoding | undefined | null, callback: StringOrBufferCallback): void;
* (path: PathOrFileDescriptor, callback: BufferCallback): void;
* }} ReadFile
*/
/**
* @typedef {{
* (path: PathOrFileDescriptor, options?: { encoding?: null | undefined, flag?: string | undefined } | null): Buffer;
* (path: PathOrFileDescriptor, options: { encoding: BufferEncoding, flag?: string | undefined } | BufferEncoding): string;
* (path: PathOrFileDescriptor, options?: (ObjectEncodingOptions & { flag?: string | undefined }) | BufferEncoding | null): string | Buffer;
* }} ReadFileSync
*/
/**
* @typedef {ObjectEncodingOptions | BufferEncoding | undefined | null} EncodingOption
*/
/**
* @typedef {'buffer'| { encoding: 'buffer' }} BufferEncodingOption
*/
/**
* @typedef {object} StatOptions
* @property {(boolean | undefined)=} bigint
*/
/**
* @typedef {object} StatSyncOptions
* @property {(boolean | undefined)=} bigint
* @property {(boolean | undefined)=} throwIfNoEntry
*/
/**
* @typedef {{
* (path: PathLike, options: EncodingOption, callback: StringCallback): void;
* (path: PathLike, options: BufferEncodingOption, callback: BufferCallback): void;
* (path: PathLike, options: EncodingOption, callback: StringOrBufferCallback): void;
* (path: PathLike, callback: StringCallback): void;
* }} Readlink
*/
/**
* @typedef {{
* (path: PathLike, options?: EncodingOption): string;
* (path: PathLike, options: BufferEncodingOption): Buffer;
* (path: PathLike, options?: EncodingOption): string | Buffer;
* }} ReadlinkSync
*/
/**
* @typedef {{
* (path: PathLike, options: { encoding: BufferEncoding | null, withFileTypes?: false | undefined, recursive?: boolean | undefined } | BufferEncoding | undefined | null, callback: ReaddirStringCallback): void;
* (path: PathLike, options: { encoding: 'buffer', withFileTypes?: false | undefined, recursive?: boolean | undefined } | 'buffer', callback: ReaddirBufferCallback): void;
* (path: PathLike, callback: ReaddirStringCallback): void;
* (path: PathLike, options: (ObjectEncodingOptions & { withFileTypes?: false | undefined, recursive?: boolean | undefined }) | BufferEncoding | undefined | null, callback: ReaddirStringOrBufferCallback): void;
* (path: PathLike, options: ObjectEncodingOptions & { withFileTypes: true, recursive?: boolean | undefined }, callback: ReaddirDirentCallback): void;
* }} Readdir
*/
/**
* @typedef {{
* (path: PathLike, options?: { encoding: BufferEncoding | null, withFileTypes?: false | undefined, recursive?: boolean | undefined } | BufferEncoding | null): string[];
* (path: PathLike, options: { encoding: 'buffer', withFileTypes?: false | undefined, recursive?: boolean | undefined } | 'buffer'): Buffer[];
* (path: PathLike, options?: (ObjectEncodingOptions & { withFileTypes?: false | undefined, recursive?: boolean | undefined }) | BufferEncoding | null): string[] | Buffer[];
* (path: PathLike, options: ObjectEncodingOptions & { withFileTypes: true, recursive?: boolean | undefined }): Dirent[];
* }} ReaddirSync
*/
/**
* @typedef {{
* (path: PathLike, callback: StatsCallback): void;
* (path: PathLike, options: (StatOptions & { bigint?: false | undefined }) | undefined, callback: StatsCallback): void;
* (path: PathLike, options: StatOptions & { bigint: true }, callback: BigIntStatsCallback): void;
* (path: PathLike, options: StatOptions | undefined, callback: StatsOrBigIntStatsCallback): void;
* }} Stat
*/
/**
* @typedef {{
* (path: PathLike, options?: undefined): IStats;
* (path: PathLike, options?: StatSyncOptions & { bigint?: false | undefined, throwIfNoEntry: false }): IStats | undefined;
* (path: PathLike, options: StatSyncOptions & { bigint: true, throwIfNoEntry: false }): IBigIntStats | undefined;
* (path: PathLike, options?: StatSyncOptions & { bigint?: false | undefined }): IStats;
* (path: PathLike, options: StatSyncOptions & { bigint: true }): IBigIntStats;
* (path: PathLike, options: StatSyncOptions & { bigint: boolean, throwIfNoEntry?: false | undefined }): IStats | IBigIntStats;
* (path: PathLike, options?: StatSyncOptions): IStats | IBigIntStats | undefined;
* }} StatSync
*/
/**
* @typedef {{
* (path: PathLike, callback: StatsCallback): void;
* (path: PathLike, options: (StatOptions & { bigint?: false | undefined }) | undefined, callback: StatsCallback): void;
* (path: PathLike, options: StatOptions & { bigint: true }, callback: BigIntStatsCallback): void;
* (path: PathLike, options: StatOptions | undefined, callback: StatsOrBigIntStatsCallback): void;
* }} LStat
*/
/**
* @typedef {{
* (path: PathLike, options?: undefined): IStats;
* (path: PathLike, options?: StatSyncOptions & { bigint?: false | undefined, throwIfNoEntry: false }): IStats | undefined;
* (path: PathLike, options: StatSyncOptions & { bigint: true, throwIfNoEntry: false }): IBigIntStats | undefined;
* (path: PathLike, options?: StatSyncOptions & { bigint?: false | undefined }): IStats;
* (path: PathLike, options: StatSyncOptions & { bigint: true }): IBigIntStats;
* (path: PathLike, options: StatSyncOptions & { bigint: boolean, throwIfNoEntry?: false | undefined }): IStats | IBigIntStats;
* (path: PathLike, options?: StatSyncOptions): IStats | IBigIntStats | undefined;
* }} LStatSync
*/
/**
* @typedef {{
* (path: PathLike, options: EncodingOption, callback: StringCallback): void;
* (path: PathLike, options: BufferEncodingOption, callback: BufferCallback): void;
* (path: PathLike, options: EncodingOption, callback: StringOrBufferCallback): void;
* (path: PathLike, callback: StringCallback): void;
* }} RealPath
*/
/**
* @typedef {{
* (path: PathLike, options?: EncodingOption): string;
* (path: PathLike, options: BufferEncodingOption): Buffer;
* (path: PathLike, options?: EncodingOption): string | Buffer;
* }} RealPathSync
*/
/**
* @typedef {(pathOrFileDescriptor: PathOrFileDescriptor, callback: ReadJsonCallback) => void} ReadJson
*/
/**
* @typedef {(pathOrFileDescriptor: PathOrFileDescriptor) => JsonObject} ReadJsonSync
*/
/**
* @typedef {(value?: string | string[] | Set<string>) => void} Purge
*/
/**
* @typedef {object} InputFileSystem
* @property {ReadFile} readFile
* @property {ReadFileSync=} readFileSync
* @property {Readlink} readlink
* @property {ReadlinkSync=} readlinkSync
* @property {Readdir} readdir
* @property {ReaddirSync=} readdirSync
* @property {Stat} stat
* @property {StatSync=} statSync
* @property {LStat=} lstat
* @property {LStatSync=} lstatSync
* @property {RealPath=} realpath
* @property {RealPathSync=} realpathSync
* @property {ReadJson=} readJson
* @property {ReadJsonSync=} readJsonSync
* @property {Purge=} purge
* @property {((path1: string, path2: string) => string)=} join
* @property {((from: string, to: string) => string)=} relative
* @property {((dirname: string) => string)=} dirname
*/
/**
* @typedef {number | string} Mode
*/
/**
* @typedef {(ObjectEncodingOptions & import("events").Abortable & { mode?: Mode | undefined, flag?: string | undefined, flush?: boolean | undefined }) | BufferEncoding | null} WriteFileOptions
*/
/**
* @typedef {{
* (file: PathOrFileDescriptor, data: string | NodeJS.ArrayBufferView, options: WriteFileOptions, callback: NoParamCallback): void;
* (file: PathOrFileDescriptor, data: string | NodeJS.ArrayBufferView, callback: NoParamCallback): void;
* }} WriteFile
*/
/**
* @typedef {{ recursive?: boolean | undefined, mode?: Mode | undefined }} MakeDirectoryOptions
*/
/**
* @typedef {{
* (file: PathLike, options: MakeDirectoryOptions & { recursive: true }, callback: StringCallback): void;
* (file: PathLike, options: Mode | (MakeDirectoryOptions & { recursive?: false | undefined; }) | null | undefined, callback: NoParamCallback): void;
* (file: PathLike, options: Mode | MakeDirectoryOptions | null | undefined, callback: StringCallback): void;
* (file: PathLike, callback: NoParamCallback): void;
* }} Mkdir
*/
/**
* @typedef {{ maxRetries?: number | undefined, recursive?: boolean | undefined, retryDelay?: number | undefined }} RmDirOptions
*/
/**
* @typedef {{
* (file: PathLike, callback: NoParamCallback): void;
* (file: PathLike, options: RmDirOptions, callback: NoParamCallback): void;
* }} Rmdir
*/
/**
* @typedef {(pathLike: PathLike, callback: NoParamCallback) => void} Unlink
*/
/**
* @typedef {object} OutputFileSystem
* @property {WriteFile} writeFile
* @property {Mkdir} mkdir
* @property {Readdir=} readdir
* @property {Rmdir=} rmdir
* @property {Unlink=} unlink
* @property {Stat} stat
* @property {LStat=} lstat
* @property {ReadFile} readFile
* @property {((path1: string, path2: string) => string)=} join
* @property {((from: string, to: string) => string)=} relative
* @property {((dirname: string) => string)=} dirname
*/
/**
* @typedef {object} WatchFileSystem
* @property {WatchMethod} watch
*/
/**
* @typedef {{
* (path: PathLike, options: MakeDirectoryOptions & { recursive: true }): string | undefined;
* (path: PathLike, options?: Mode | (MakeDirectoryOptions & { recursive?: false | undefined }) | null): void;
* (path: PathLike, options?: Mode | MakeDirectoryOptions | null): string | undefined;
* }} MkdirSync
*/
/**
* @typedef {object} StreamOptions
* @property {(string | undefined)=} flags
* @property {(BufferEncoding | undefined)} encoding
* @property {(number | EXPECTED_ANY | undefined)=} fd
* @property {(number | undefined)=} mode
* @property {(boolean | undefined)=} autoClose
* @property {(boolean | undefined)=} emitClose
* @property {(number | undefined)=} start
* @property {(AbortSignal | null | undefined)=} signal
*/
/**
* @typedef {object} FSImplementation
* @property {((...args: EXPECTED_ANY[]) => EXPECTED_ANY)=} open
* @property {((...args: EXPECTED_ANY[]) => EXPECTED_ANY)=} close
*/
/**
* @typedef {FSImplementation & { write: (...args: EXPECTED_ANY[]) => EXPECTED_ANY; close?: (...args: EXPECTED_ANY[]) => EXPECTED_ANY }} CreateWriteStreamFSImplementation
*/
/**
* @typedef {StreamOptions & { fs?: CreateWriteStreamFSImplementation | null | undefined }} WriteStreamOptions
*/
/**
* @typedef {(pathLike: PathLike, result?: BufferEncoding | WriteStreamOptions) => NodeJS.WritableStream} CreateWriteStream
*/
/**
* @typedef {number | string} OpenMode
*/
/**
* @typedef {{
* (file: PathLike, flags: OpenMode | undefined, mode: Mode | undefined | null, callback: NumberCallback): void;
* (file: PathLike, flags: OpenMode | undefined, callback: NumberCallback): void;
* (file: PathLike, callback: NumberCallback): void;
* }} Open
*/
/**
* @typedef {number | bigint} ReadPosition
*/
/**
* @typedef {object} ReadSyncOptions
* @property {(number | undefined)=} offset
* @property {(number | undefined)=} length
* @property {(ReadPosition | null | undefined)=} position
*/
/**
* @template {NodeJS.ArrayBufferView} TBuffer
* @typedef {object} ReadAsyncOptions
* @property {(number | undefined)=} offset
* @property {(number | undefined)=} length
* @property {(ReadPosition | null | undefined)=} position
* @property {TBuffer=} buffer
*/
/**
* @template {NodeJS.ArrayBufferView} [TBuffer=NodeJS.ArrayBufferView]
* @typedef {{
* (fd: number, buffer: TBuffer, offset: number, length: number, position: ReadPosition | null, callback: (err: NodeJS.ErrnoException | null, bytesRead: number, buffer: TBuffer) => void): void;
* (fd: number, options: ReadAsyncOptions<TBuffer>, callback: (err: NodeJS.ErrnoException | null, bytesRead: number, buffer: TBuffer) => void): void;
* (fd: number, callback: (err: NodeJS.ErrnoException | null, bytesRead: number, buffer: NodeJS.ArrayBufferView) => void): void;
* }} Read
*/
/** @typedef {(df: number, callback: NoParamCallback) => void} Close */
/** @typedef {(a: PathLike, b: PathLike, callback: NoParamCallback) => void} Rename */
/**
* @typedef {object} IntermediateFileSystemExtras
* @property {MkdirSync} mkdirSync
* @property {CreateWriteStream} createWriteStream
* @property {Open} open
* @property {Read} read
* @property {Close} close
* @property {Rename} rename
*/
/** @typedef {InputFileSystem & OutputFileSystem & IntermediateFileSystemExtras} IntermediateFileSystem */
/**
* @param {InputFileSystem|OutputFileSystem|undefined} fs a file system
* @param {string} rootPath the root path
* @param {string} targetPath the target path
* @returns {string} location of targetPath relative to rootPath
*/
const relative = (fs, rootPath, targetPath) => {
if (fs && fs.relative) {
return fs.relative(rootPath, targetPath);
} else if (path.posix.isAbsolute(rootPath)) {
return path.posix.relative(rootPath, targetPath);
} else if (path.win32.isAbsolute(rootPath)) {
return path.win32.relative(rootPath, targetPath);
}
throw new Error(
`${rootPath} is neither a posix nor a windows path, and there is no 'relative' method defined in the file system`
);
};
module.exports.relative = relative;
/**
* @param {InputFileSystem|OutputFileSystem|undefined} fs a file system
* @param {string} rootPath a path
* @param {string} filename a filename
* @returns {string} the joined path
*/
const join = (fs, rootPath, filename) => {
if (fs && fs.join) {
return fs.join(rootPath, filename);
} else if (path.posix.isAbsolute(rootPath)) {
return path.posix.join(rootPath, filename);
} else if (path.win32.isAbsolute(rootPath)) {
return path.win32.join(rootPath, filename);
}
throw new Error(
`${rootPath} is neither a posix nor a windows path, and there is no 'join' method defined in the file system`
);
};
module.exports.join = join;
/**
* @param {InputFileSystem|OutputFileSystem|undefined} fs a file system
* @param {string} absPath an absolute path
* @returns {string} the parent directory of the absolute path
*/
const dirname = (fs, absPath) => {
if (fs && fs.dirname) {
return fs.dirname(absPath);
} else if (path.posix.isAbsolute(absPath)) {
return path.posix.dirname(absPath);
} else if (path.win32.isAbsolute(absPath)) {
return path.win32.dirname(absPath);
}
throw new Error(
`${absPath} is neither a posix nor a windows path, and there is no 'dirname' method defined in the file system`
);
};
module.exports.dirname = dirname;
/**
* @param {OutputFileSystem} fs a file system
* @param {string} p an absolute path
* @param {(err?: Error) => void} callback callback function for the error
* @returns {void}
*/
const mkdirp = (fs, p, callback) => {
fs.mkdir(p, err => {
if (err) {
if (err.code === "ENOENT") {
const dir = dirname(fs, p);
if (dir === p) {
callback(err);
return;
}
mkdirp(fs, dir, err => {
if (err) {
callback(err);
return;
}
fs.mkdir(p, err => {
if (err) {
if (err.code === "EEXIST") {
callback();
return;
}
callback(err);
return;
}
callback();
});
});
return;
} else if (err.code === "EEXIST") {
callback();
return;
}
callback(err);
return;
}
callback();
});
};
module.exports.mkdirp = mkdirp;
/**
* @param {IntermediateFileSystem} fs a file system
* @param {string} p an absolute path
* @returns {void}
*/
const mkdirpSync = (fs, p) => {
try {
fs.mkdirSync(p);
} catch (err) {
if (err) {
if (/** @type {NodeJS.ErrnoException} */ (err).code === "ENOENT") {
const dir = dirname(fs, p);
if (dir === p) {
throw err;
}
mkdirpSync(fs, dir);
fs.mkdirSync(p);
return;
} else if (/** @type {NodeJS.ErrnoException} */ (err).code === "EEXIST") {
return;
}
throw err;
}
}
};
module.exports.mkdirpSync = mkdirpSync;
/**
* @param {InputFileSystem} fs a file system
* @param {string} p an absolute path
* @param {ReadJsonCallback} callback callback
* @returns {void}
*/
const readJson = (fs, p, callback) => {
if ("readJson" in fs)
return /** @type {NonNullable<InputFileSystem["readJson"]>} */ (
fs.readJson
)(p, callback);
fs.readFile(p, (err, buf) => {
if (err) return callback(err);
let data;
try {
data = JSON.parse(/** @type {Buffer} */ (buf).toString("utf-8"));
} catch (err1) {
return callback(/** @type {Error} */ (err1));
}
return callback(null, data);
});
};
module.exports.readJson = readJson;
/**
* @param {InputFileSystem} fs a file system
* @param {string} p an absolute path
* @param {(err: NodeJS.ErrnoException | Error | null, stats?: IStats | string) => void} callback callback
* @returns {void}
*/
const lstatReadlinkAbsolute = (fs, p, callback) => {
let i = 3;
const doReadLink = () => {
fs.readlink(p, (err, target) => {
if (err && --i > 0) {
// It might was just changed from symlink to file
// we retry 2 times to catch this case before throwing the error
return doStat();
}
if (err) return callback(err);
const value = /** @type {string} */ (target).toString();
callback(null, join(fs, dirname(fs, p), value));
});
};
const doStat = () => {
if ("lstat" in fs) {
return /** @type {NonNullable<InputFileSystem["lstat"]>} */ (fs.lstat)(
p,
(err, stats) => {
if (err) return callback(err);
if (/** @type {IStats} */ (stats).isSymbolicLink()) {
return doReadLink();
}
callback(null, stats);
}
);
}
return fs.stat(p, callback);
};
if ("lstat" in fs) return doStat();
doReadLink();
};
module.exports.lstatReadlinkAbsolute = lstatReadlinkAbsolute;

View File

@ -0,0 +1,33 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Alexander Akait @alexander-akait
*/
"use strict";
const createHash = require("./createHash");
/**
* @param {string | Buffer} content content
* @param {string} file file
* @returns {string} generated debug id
*/
module.exports = (content, file) => {
// We need a uuid which is 128 bits so we need 2x 64 bit hashes.
// The first 64 bits is a hash of the source.
const sourceHash = createHash("xxhash64").update(content).digest("hex");
// The next 64 bits is a hash of the filename and sourceHash
const hash128 = `${sourceHash}${createHash("xxhash64")
.update(file)
.update(sourceHash)
.digest("hex")}`;
return [
hash128.slice(0, 8),
hash128.slice(8, 12),
`4${hash128.slice(12, 15)}`,
((Number.parseInt(hash128.slice(15, 16), 16) & 3) | 8).toString(16) +
hash128.slice(17, 20),
hash128.slice(20, 32)
].join("-");
};

View File

@ -0,0 +1,71 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const Hash = require("../Hash");
const MAX_SHORT_STRING = require("./wasm-hash").MAX_SHORT_STRING;
class BatchedHash extends Hash {
/**
* @param {Hash} hash hash
*/
constructor(hash) {
super();
this.string = undefined;
this.encoding = undefined;
this.hash = hash;
}
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @param {string|Buffer} data data
* @param {string=} inputEncoding data encoding
* @returns {this} updated hash
*/
update(data, inputEncoding) {
if (this.string !== undefined) {
if (
typeof data === "string" &&
inputEncoding === this.encoding &&
this.string.length + data.length < MAX_SHORT_STRING
) {
this.string += data;
return this;
}
this.hash.update(this.string, this.encoding);
this.string = undefined;
}
if (typeof data === "string") {
if (
data.length < MAX_SHORT_STRING &&
// base64 encoding is not valid since it may contain padding chars
(!inputEncoding || !inputEncoding.startsWith("ba"))
) {
this.string = data;
this.encoding = inputEncoding;
} else {
this.hash.update(data, inputEncoding);
}
} else {
this.hash.update(data);
}
return this;
}
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @param {string=} encoding encoding of the return value
* @returns {string|Buffer} digest
*/
digest(encoding) {
if (this.string !== undefined) {
this.hash.update(this.string, this.encoding);
}
return this.hash.digest(encoding);
}
}
module.exports = BatchedHash;

20
app_vue/node_modules/webpack/lib/util/hash/md4.js generated vendored Normal file
View File

@ -0,0 +1,20 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const create = require("./wasm-hash");
// #region wasm code: md4 (../../../assembly/hash/md4.asm.ts) --initialMemory 1
const md4 = new WebAssembly.Module(
Buffer.from(
// 2154 bytes
"AGFzbQEAAAABCAJgAX8AYAAAAwUEAQAAAAUDAQABBhoFfwFBAAt/AUEAC38BQQALfwFBAAt/AUEACwciBARpbml0AAAGdXBkYXRlAAIFZmluYWwAAwZtZW1vcnkCAAqJEAQmAEGBxpS6BiQBQYnXtv5+JAJB/rnrxXkkA0H2qMmBASQEQQAkAAvQCgEZfyMBIQUjAiECIwMhAyMEIQQDQCAAIAFLBEAgASgCBCIOIAQgAyABKAIAIg8gBSAEIAIgAyAEc3FzampBA3ciCCACIANzcXNqakEHdyEJIAEoAgwiBiACIAggASgCCCIQIAMgAiAJIAIgCHNxc2pqQQt3IgogCCAJc3FzampBE3chCyABKAIUIgcgCSAKIAEoAhAiESAIIAkgCyAJIApzcXNqakEDdyIMIAogC3Nxc2pqQQd3IQ0gASgCHCIJIAsgDCABKAIYIgggCiALIA0gCyAMc3FzampBC3ciEiAMIA1zcXNqakETdyETIAEoAiQiFCANIBIgASgCICIVIAwgDSATIA0gEnNxc2pqQQN3IgwgEiATc3FzampBB3chDSABKAIsIgsgEyAMIAEoAigiCiASIBMgDSAMIBNzcXNqakELdyISIAwgDXNxc2pqQRN3IRMgASgCNCIWIA0gEiABKAIwIhcgDCANIBMgDSASc3FzampBA3ciGCASIBNzcXNqakEHdyEZIBggASgCPCINIBMgGCABKAI4IgwgEiATIBkgEyAYc3FzampBC3ciEiAYIBlzcXNqakETdyITIBIgGXJxIBIgGXFyaiAPakGZ84nUBWpBA3ciGCATIBIgGSAYIBIgE3JxIBIgE3FyaiARakGZ84nUBWpBBXciEiATIBhycSATIBhxcmogFWpBmfOJ1AVqQQl3IhMgEiAYcnEgEiAYcXJqIBdqQZnzidQFakENdyIYIBIgE3JxIBIgE3FyaiAOakGZ84nUBWpBA3ciGSAYIBMgEiAZIBMgGHJxIBMgGHFyaiAHakGZ84nUBWpBBXciEiAYIBlycSAYIBlxcmogFGpBmfOJ1AVqQQl3IhMgEiAZcnEgEiAZcXJqIBZqQZnzidQFakENdyIYIBIgE3JxIBIgE3FyaiAQakGZ84nUBWpBA3ciGSAYIBMgEiAZIBMgGHJxIBMgGHFyaiAIakGZ84nUBWpBBXciEiAYIBlycSAYIBlxcmogCmpBmfOJ1AVqQQl3IhMgEiAZcnEgEiAZcXJqIAxqQZnzidQFakENdyIYIBIgE3JxIBIgE3FyaiAGakGZ84nUBWpBA3ciGSAYIBMgEiAZIBMgGHJxIBMgGHFyaiAJakGZ84nUBWpBBXciEiAYIBlycSAYIBlxcmogC2pBmfOJ1AVqQQl3IhMgEiAZcnEgEiAZcXJqIA1qQZnzidQFakENdyIYIBNzIBJzaiAPakGh1+f2BmpBA3ciDyAYIBMgEiAPIBhzIBNzaiAVakGh1+f2BmpBCXciEiAPcyAYc2ogEWpBodfn9gZqQQt3IhEgEnMgD3NqIBdqQaHX5/YGakEPdyIPIBFzIBJzaiAQakGh1+f2BmpBA3ciECAPIBEgEiAPIBBzIBFzaiAKakGh1+f2BmpBCXciCiAQcyAPc2ogCGpBodfn9gZqQQt3IgggCnMgEHNqIAxqQaHX5/YGakEPdyIMIAhzIApzaiAOakGh1+f2BmpBA3ciDiAMIAggCiAMIA5zIAhzaiAUakGh1+f2BmpBCXciCCAOcyAMc2ogB2pBodfn9gZqQQt3IgcgCHMgDnNqIBZqQaHX5/YGakEPdyIKIAdzIAhzaiAGakGh1+f2BmpBA3ciBiAFaiEFIAIgCiAHIAggBiAKcyAHc2ogC2pBodfn9gZqQQl3IgcgBnMgCnNqIAlqQaHX5/YGakELdyIIIAdzIAZzaiANakGh1+f2BmpBD3dqIQIgAyAIaiEDIAQgB2ohBCABQUBrIQEMAQsLIAUkASACJAIgAyQDIAQkBAsNACAAEAEjACAAaiQAC/8EAgN/AX4jACAAaq1CA4YhBCAAQcgAakFAcSICQQhrIQMgACIBQQFqIQAgAUGAAToAAANAIAAgAklBACAAQQdxGwRAIABBADoAACAAQQFqIQAMAQsLA0AgACACSQRAIABCADcDACAAQQhqIQAMAQsLIAMgBDcDACACEAFBACMBrSIEQv//A4MgBEKAgPz/D4NCEIaEIgRC/4GAgPAfgyAEQoD+g4CA4D+DQgiGhCIEQo+AvIDwgcAHg0IIhiAEQvCBwIeAnoD4AINCBIiEIgRChoyYsODAgYMGfEIEiEKBgoSIkKDAgAGDQid+IARCsODAgYOGjJgwhHw3AwBBCCMCrSIEQv//A4MgBEKAgPz/D4NCEIaEIgRC/4GAgPAfgyAEQoD+g4CA4D+DQgiGhCIEQo+AvIDwgcAHg0IIhiAEQvCBwIeAnoD4AINCBIiEIgRChoyYsODAgYMGfEIEiEKBgoSIkKDAgAGDQid+IARCsODAgYOGjJgwhHw3AwBBECMDrSIEQv//A4MgBEKAgPz/D4NCEIaEIgRC/4GAgPAfgyAEQoD+g4CA4D+DQgiGhCIEQo+AvIDwgcAHg0IIhiAEQvCBwIeAnoD4AINCBIiEIgRChoyYsODAgYMGfEIEiEKBgoSIkKDAgAGDQid+IARCsODAgYOGjJgwhHw3AwBBGCMErSIEQv//A4MgBEKAgPz/D4NCEIaEIgRC/4GAgPAfgyAEQoD+g4CA4D+DQgiGhCIEQo+AvIDwgcAHg0IIhiAEQvCBwIeAnoD4AINCBIiEIgRChoyYsODAgYMGfEIEiEKBgoSIkKDAgAGDQid+IARCsODAgYOGjJgwhHw3AwAL",
"base64"
)
);
// #endregion
module.exports = create.bind(null, md4, [], 64, 32);

174
app_vue/node_modules/webpack/lib/util/hash/wasm-hash.js generated vendored Normal file
View File

@ -0,0 +1,174 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
// 65536 is the size of a wasm memory page
// 64 is the maximum chunk size for every possible wasm hash implementation
// 4 is the maximum number of bytes per char for string encoding (max is utf-8)
// ~3 makes sure that it's always a block of 4 chars, so avoid partially encoded bytes for base64
const MAX_SHORT_STRING = Math.floor((65536 - 64) / 4) & ~3;
class WasmHash {
/**
* @param {WebAssembly.Instance} instance wasm instance
* @param {WebAssembly.Instance[]} instancesPool pool of instances
* @param {number} chunkSize size of data chunks passed to wasm
* @param {number} digestSize size of digest returned by wasm
*/
constructor(instance, instancesPool, chunkSize, digestSize) {
const exports = /** @type {EXPECTED_ANY} */ (instance.exports);
exports.init();
this.exports = exports;
this.mem = Buffer.from(exports.memory.buffer, 0, 65536);
this.buffered = 0;
this.instancesPool = instancesPool;
this.chunkSize = chunkSize;
this.digestSize = digestSize;
}
reset() {
this.buffered = 0;
this.exports.init();
}
/**
* @param {Buffer | string} data data
* @param {BufferEncoding=} encoding encoding
* @returns {this} itself
*/
update(data, encoding) {
if (typeof data === "string") {
while (data.length > MAX_SHORT_STRING) {
this._updateWithShortString(data.slice(0, MAX_SHORT_STRING), encoding);
data = data.slice(MAX_SHORT_STRING);
}
this._updateWithShortString(data, encoding);
return this;
}
this._updateWithBuffer(data);
return this;
}
/**
* @param {string} data data
* @param {BufferEncoding=} encoding encoding
* @returns {void}
*/
_updateWithShortString(data, encoding) {
const { exports, buffered, mem, chunkSize } = this;
let endPos;
if (data.length < 70) {
if (!encoding || encoding === "utf-8" || encoding === "utf8") {
endPos = buffered;
for (let i = 0; i < data.length; i++) {
const cc = data.charCodeAt(i);
if (cc < 0x80) mem[endPos++] = cc;
else if (cc < 0x800) {
mem[endPos] = (cc >> 6) | 0xc0;
mem[endPos + 1] = (cc & 0x3f) | 0x80;
endPos += 2;
} else {
// bail-out for weird chars
endPos += mem.write(data.slice(i), endPos, encoding);
break;
}
}
} else if (encoding === "latin1") {
endPos = buffered;
for (let i = 0; i < data.length; i++) {
const cc = data.charCodeAt(i);
mem[endPos++] = cc;
}
} else {
endPos = buffered + mem.write(data, buffered, encoding);
}
} else {
endPos = buffered + mem.write(data, buffered, encoding);
}
if (endPos < chunkSize) {
this.buffered = endPos;
} else {
const l = endPos & ~(this.chunkSize - 1);
exports.update(l);
const newBuffered = endPos - l;
this.buffered = newBuffered;
if (newBuffered > 0) mem.copyWithin(0, l, endPos);
}
}
/**
* @param {Buffer} data data
* @returns {void}
*/
_updateWithBuffer(data) {
const { exports, buffered, mem } = this;
const length = data.length;
if (buffered + length < this.chunkSize) {
data.copy(mem, buffered, 0, length);
this.buffered += length;
} else {
const l = (buffered + length) & ~(this.chunkSize - 1);
if (l > 65536) {
let i = 65536 - buffered;
data.copy(mem, buffered, 0, i);
exports.update(65536);
const stop = l - buffered - 65536;
while (i < stop) {
data.copy(mem, 0, i, i + 65536);
exports.update(65536);
i += 65536;
}
data.copy(mem, 0, i, l - buffered);
exports.update(l - buffered - i);
} else {
data.copy(mem, buffered, 0, l - buffered);
exports.update(l);
}
const newBuffered = length + buffered - l;
this.buffered = newBuffered;
if (newBuffered > 0) data.copy(mem, 0, length - newBuffered, length);
}
}
/**
* @param {BufferEncoding} type type
* @returns {Buffer | string} digest
*/
digest(type) {
const { exports, buffered, mem, digestSize } = this;
exports.final(buffered);
this.instancesPool.push(this);
const hex = mem.toString("latin1", 0, digestSize);
if (type === "hex") return hex;
if (type === "binary" || !type) return Buffer.from(hex, "hex");
return Buffer.from(hex, "hex").toString(type);
}
}
/**
* @param {WebAssembly.Module} wasmModule wasm module
* @param {WasmHash[]} instancesPool pool of instances
* @param {number} chunkSize size of data chunks passed to wasm
* @param {number} digestSize size of digest returned by wasm
* @returns {WasmHash} wasm hash
*/
const create = (wasmModule, instancesPool, chunkSize, digestSize) => {
if (instancesPool.length > 0) {
const old = /** @type {WasmHash} */ (instancesPool.pop());
old.reset();
return old;
}
return new WasmHash(
new WebAssembly.Instance(wasmModule),
instancesPool,
chunkSize,
digestSize
);
};
module.exports = create;
module.exports.MAX_SHORT_STRING = MAX_SHORT_STRING;

20
app_vue/node_modules/webpack/lib/util/hash/xxhash64.js generated vendored Normal file
View File

@ -0,0 +1,20 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const create = require("./wasm-hash");
// #region wasm code: xxhash64 (../../../assembly/hash/xxhash64.asm.ts) --initialMemory 1
const xxhash64 = new WebAssembly.Module(
Buffer.from(
// 1160 bytes
"AGFzbQEAAAABCAJgAX8AYAAAAwQDAQAABQMBAAEGGgV+AUIAC34BQgALfgFCAAt+AUIAC34BQgALByIEBGluaXQAAAZ1cGRhdGUAAQVmaW5hbAACBm1lbW9yeQIACqgIAzAAQtbrgu7q/Yn14AAkAELP1tO+0ser2UIkAUIAJAJC+erQ0OfJoeThACQDQgAkBAvUAQIBfwR+IABFBEAPCyMEIACtfCQEIwAhAiMBIQMjAiEEIwMhBQNAIAIgASkDAELP1tO+0ser2UJ+fEIfiUKHla+vmLbem55/fiECIAMgASkDCELP1tO+0ser2UJ+fEIfiUKHla+vmLbem55/fiEDIAQgASkDEELP1tO+0ser2UJ+fEIfiUKHla+vmLbem55/fiEEIAUgASkDGELP1tO+0ser2UJ+fEIfiUKHla+vmLbem55/fiEFIAFBIGoiASAASQ0ACyACJAAgAyQBIAQkAiAFJAMLngYCAn8CfiMEQgBSBH4jAEIBiSMBQgeJfCMCQgyJfCMDQhKJfCMAQs/W077Sx6vZQn5CH4lCh5Wvr5i23puef36FQoeVr6+Ytt6bnn9+Qp2jteqDsY2K+gB9IwFCz9bTvtLHq9lCfkIfiUKHla+vmLbem55/foVCh5Wvr5i23puef35CnaO16oOxjYr6AH0jAkLP1tO+0ser2UJ+Qh+JQoeVr6+Ytt6bnn9+hUKHla+vmLbem55/fkKdo7Xqg7GNivoAfSMDQs/W077Sx6vZQn5CH4lCh5Wvr5i23puef36FQoeVr6+Ytt6bnn9+Qp2jteqDsY2K+gB9BULFz9my8eW66icLIwQgAK18fCEDA0AgAUEIaiICIABNBEAgAyABKQMAQs/W077Sx6vZQn5CH4lCh5Wvr5i23puef36FQhuJQoeVr6+Ytt6bnn9+Qp2jteqDsY2K+gB9IQMgAiEBDAELCyABQQRqIgIgAE0EQCADIAE1AgBCh5Wvr5i23puef36FQheJQs/W077Sx6vZQn5C+fPd8Zn2masWfCEDIAIhAQsDQCAAIAFHBEAgAyABMQAAQsXP2bLx5brqJ36FQguJQoeVr6+Ytt6bnn9+IQMgAUEBaiEBDAELC0EAIAMgA0IhiIVCz9bTvtLHq9lCfiIDQh2IIAOFQvnz3fGZ9pmrFn4iA0IgiCADhSIDQiCIIgRC//8Dg0IghiAEQoCA/P8Pg0IQiIQiBEL/gYCA8B+DQhCGIARCgP6DgIDgP4NCCIiEIgRCj4C8gPCBwAeDQgiGIARC8IHAh4CegPgAg0IEiIQiBEKGjJiw4MCBgwZ8QgSIQoGChIiQoMCAAYNCJ34gBEKw4MCBg4aMmDCEfDcDAEEIIANC/////w+DIgNC//8Dg0IghiADQoCA/P8Pg0IQiIQiA0L/gYCA8B+DQhCGIANCgP6DgIDgP4NCCIiEIgNCj4C8gPCBwAeDQgiGIANC8IHAh4CegPgAg0IEiIQiA0KGjJiw4MCBgwZ8QgSIQoGChIiQoMCAAYNCJ34gA0Kw4MCBg4aMmDCEfDcDAAs=",
"base64"
)
);
// #endregion
module.exports = create.bind(null, xxhash64, [], 32, 16);

400
app_vue/node_modules/webpack/lib/util/identifier.js generated vendored Normal file
View File

@ -0,0 +1,400 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
*/
"use strict";
const path = require("path");
const WINDOWS_ABS_PATH_REGEXP = /^[a-zA-Z]:[\\/]/;
const SEGMENTS_SPLIT_REGEXP = /([|!])/;
const WINDOWS_PATH_SEPARATOR_REGEXP = /\\/g;
/**
* @param {string} relativePath relative path
* @returns {string} request
*/
const relativePathToRequest = relativePath => {
if (relativePath === "") return "./.";
if (relativePath === "..") return "../.";
if (relativePath.startsWith("../")) return relativePath;
return `./${relativePath}`;
};
/**
* @param {string} context context for relative path
* @param {string} maybeAbsolutePath path to make relative
* @returns {string} relative path in request style
*/
const absoluteToRequest = (context, maybeAbsolutePath) => {
if (maybeAbsolutePath[0] === "/") {
if (
maybeAbsolutePath.length > 1 &&
maybeAbsolutePath[maybeAbsolutePath.length - 1] === "/"
) {
// this 'path' is actually a regexp generated by dynamic requires.
// Don't treat it as an absolute path.
return maybeAbsolutePath;
}
const querySplitPos = maybeAbsolutePath.indexOf("?");
let resource =
querySplitPos === -1
? maybeAbsolutePath
: maybeAbsolutePath.slice(0, querySplitPos);
resource = relativePathToRequest(path.posix.relative(context, resource));
return querySplitPos === -1
? resource
: resource + maybeAbsolutePath.slice(querySplitPos);
}
if (WINDOWS_ABS_PATH_REGEXP.test(maybeAbsolutePath)) {
const querySplitPos = maybeAbsolutePath.indexOf("?");
let resource =
querySplitPos === -1
? maybeAbsolutePath
: maybeAbsolutePath.slice(0, querySplitPos);
resource = path.win32.relative(context, resource);
if (!WINDOWS_ABS_PATH_REGEXP.test(resource)) {
resource = relativePathToRequest(
resource.replace(WINDOWS_PATH_SEPARATOR_REGEXP, "/")
);
}
return querySplitPos === -1
? resource
: resource + maybeAbsolutePath.slice(querySplitPos);
}
// not an absolute path
return maybeAbsolutePath;
};
/**
* @param {string} context context for relative path
* @param {string} relativePath path
* @returns {string} absolute path
*/
const requestToAbsolute = (context, relativePath) => {
if (relativePath.startsWith("./") || relativePath.startsWith("../"))
return path.join(context, relativePath);
return relativePath;
};
/** @typedef {EXPECTED_OBJECT} AssociatedObjectForCache */
/**
* @template T
* @typedef {(value: string, cache?: AssociatedObjectForCache) => T} MakeCacheableResult
*/
/**
* @template T
* @typedef {(value: string) => T} BindCacheResultFn
*/
/**
* @template T
* @typedef {(cache: AssociatedObjectForCache) => BindCacheResultFn<T>} BindCache
*/
/**
* @template T
* @param {((value: string) => T)} realFn real function
* @returns {MakeCacheableResult<T> & { bindCache: BindCache<T> }} cacheable function
*/
const makeCacheable = realFn => {
/**
* @template T
* @typedef {Map<string, T>} CacheItem
*/
/** @type {WeakMap<AssociatedObjectForCache, CacheItem<T>>} */
const cache = new WeakMap();
/**
* @param {AssociatedObjectForCache} associatedObjectForCache an object to which the cache will be attached
* @returns {CacheItem<T>} cache item
*/
const getCache = associatedObjectForCache => {
const entry = cache.get(associatedObjectForCache);
if (entry !== undefined) return entry;
/** @type {Map<string, T>} */
const map = new Map();
cache.set(associatedObjectForCache, map);
return map;
};
/** @type {MakeCacheableResult<T> & { bindCache: BindCache<T> }} */
const fn = (str, associatedObjectForCache) => {
if (!associatedObjectForCache) return realFn(str);
const cache = getCache(associatedObjectForCache);
const entry = cache.get(str);
if (entry !== undefined) return entry;
const result = realFn(str);
cache.set(str, result);
return result;
};
/** @type {BindCache<T>} */
fn.bindCache = associatedObjectForCache => {
const cache = getCache(associatedObjectForCache);
/**
* @param {string} str string
* @returns {T} value
*/
return str => {
const entry = cache.get(str);
if (entry !== undefined) return entry;
const result = realFn(str);
cache.set(str, result);
return result;
};
};
return fn;
};
/** @typedef {(context: string, value: string, associatedObjectForCache?: AssociatedObjectForCache) => string} MakeCacheableWithContextResult */
/** @typedef {(context: string, value: string) => string} BindCacheForContextResultFn */
/** @typedef {(value: string) => string} BindContextCacheForContextResultFn */
/** @typedef {(associatedObjectForCache?: AssociatedObjectForCache) => BindCacheForContextResultFn} BindCacheForContext */
/** @typedef {(value: string, associatedObjectForCache?: AssociatedObjectForCache) => BindContextCacheForContextResultFn} BindContextCacheForContext */
/**
* @param {(context: string, identifier: string) => string} fn function
* @returns {MakeCacheableWithContextResult & { bindCache: BindCacheForContext, bindContextCache: BindContextCacheForContext }} cacheable function with context
*/
const makeCacheableWithContext = fn => {
/** @type {WeakMap<AssociatedObjectForCache, Map<string, Map<string, string>>>} */
const cache = new WeakMap();
/** @type {MakeCacheableWithContextResult & { bindCache: BindCacheForContext, bindContextCache: BindContextCacheForContext }} */
const cachedFn = (context, identifier, associatedObjectForCache) => {
if (!associatedObjectForCache) return fn(context, identifier);
let innerCache = cache.get(associatedObjectForCache);
if (innerCache === undefined) {
innerCache = new Map();
cache.set(associatedObjectForCache, innerCache);
}
let cachedResult;
let innerSubCache = innerCache.get(context);
if (innerSubCache === undefined) {
innerCache.set(context, (innerSubCache = new Map()));
} else {
cachedResult = innerSubCache.get(identifier);
}
if (cachedResult !== undefined) {
return cachedResult;
}
const result = fn(context, identifier);
innerSubCache.set(identifier, result);
return result;
};
/** @type {BindCacheForContext} */
cachedFn.bindCache = associatedObjectForCache => {
let innerCache;
if (associatedObjectForCache) {
innerCache = cache.get(associatedObjectForCache);
if (innerCache === undefined) {
innerCache = new Map();
cache.set(associatedObjectForCache, innerCache);
}
} else {
innerCache = new Map();
}
/**
* @param {string} context context used to create relative path
* @param {string} identifier identifier used to create relative path
* @returns {string} the returned relative path
*/
const boundFn = (context, identifier) => {
let cachedResult;
let innerSubCache = innerCache.get(context);
if (innerSubCache === undefined) {
innerCache.set(context, (innerSubCache = new Map()));
} else {
cachedResult = innerSubCache.get(identifier);
}
if (cachedResult !== undefined) {
return cachedResult;
}
const result = fn(context, identifier);
innerSubCache.set(identifier, result);
return result;
};
return boundFn;
};
/** @type {BindContextCacheForContext} */
cachedFn.bindContextCache = (context, associatedObjectForCache) => {
let innerSubCache;
if (associatedObjectForCache) {
let innerCache = cache.get(associatedObjectForCache);
if (innerCache === undefined) {
innerCache = new Map();
cache.set(associatedObjectForCache, innerCache);
}
innerSubCache = innerCache.get(context);
if (innerSubCache === undefined) {
innerCache.set(context, (innerSubCache = new Map()));
}
} else {
innerSubCache = new Map();
}
/**
* @param {string} identifier identifier used to create relative path
* @returns {string} the returned relative path
*/
const boundFn = identifier => {
const cachedResult = innerSubCache.get(identifier);
if (cachedResult !== undefined) {
return cachedResult;
}
const result = fn(context, identifier);
innerSubCache.set(identifier, result);
return result;
};
return boundFn;
};
return cachedFn;
};
/**
* @param {string} context context for relative path
* @param {string} identifier identifier for path
* @returns {string} a converted relative path
*/
const _makePathsRelative = (context, identifier) =>
identifier
.split(SEGMENTS_SPLIT_REGEXP)
.map(str => absoluteToRequest(context, str))
.join("");
module.exports.makePathsRelative = makeCacheableWithContext(_makePathsRelative);
/**
* @param {string} context context for relative path
* @param {string} identifier identifier for path
* @returns {string} a converted relative path
*/
const _makePathsAbsolute = (context, identifier) =>
identifier
.split(SEGMENTS_SPLIT_REGEXP)
.map(str => requestToAbsolute(context, str))
.join("");
module.exports.makePathsAbsolute = makeCacheableWithContext(_makePathsAbsolute);
/**
* @param {string} context absolute context path
* @param {string} request any request string may containing absolute paths, query string, etc.
* @returns {string} a new request string avoiding absolute paths when possible
*/
const _contextify = (context, request) =>
request
.split("!")
.map(r => absoluteToRequest(context, r))
.join("!");
const contextify = makeCacheableWithContext(_contextify);
module.exports.contextify = contextify;
/**
* @param {string} context absolute context path
* @param {string} request any request string
* @returns {string} a new request string using absolute paths when possible
*/
const _absolutify = (context, request) =>
request
.split("!")
.map(r => requestToAbsolute(context, r))
.join("!");
const absolutify = makeCacheableWithContext(_absolutify);
module.exports.absolutify = absolutify;
const PATH_QUERY_FRAGMENT_REGEXP =
/^((?:\0.|[^?#\0])*)(\?(?:\0.|[^#\0])*)?(#.*)?$/;
const PATH_QUERY_REGEXP = /^((?:\0.|[^?\0])*)(\?.*)?$/;
/** @typedef {{ resource: string, path: string, query: string, fragment: string }} ParsedResource */
/** @typedef {{ resource: string, path: string, query: string }} ParsedResourceWithoutFragment */
/**
* @param {string} str the path with query and fragment
* @returns {ParsedResource} parsed parts
*/
const _parseResource = str => {
const match =
/** @type {[string, string, string | undefined, string | undefined]} */
(/** @type {unknown} */ (PATH_QUERY_FRAGMENT_REGEXP.exec(str)));
return {
resource: str,
path: match[1].replace(/\0(.)/g, "$1"),
query: match[2] ? match[2].replace(/\0(.)/g, "$1") : "",
fragment: match[3] || ""
};
};
module.exports.parseResource = makeCacheable(_parseResource);
/**
* Parse resource, skips fragment part
* @param {string} str the path with query and fragment
* @returns {ParsedResourceWithoutFragment} parsed parts
*/
const _parseResourceWithoutFragment = str => {
const match =
/** @type {[string, string, string | undefined]} */
(/** @type {unknown} */ (PATH_QUERY_REGEXP.exec(str)));
return {
resource: str,
path: match[1].replace(/\0(.)/g, "$1"),
query: match[2] ? match[2].replace(/\0(.)/g, "$1") : ""
};
};
module.exports.parseResourceWithoutFragment = makeCacheable(
_parseResourceWithoutFragment
);
/**
* @param {string} filename the filename which should be undone
* @param {string} outputPath the output path that is restored (only relevant when filename contains "..")
* @param {boolean} enforceRelative true returns ./ for empty paths
* @returns {string} repeated ../ to leave the directory of the provided filename to be back on output dir
*/
module.exports.getUndoPath = (filename, outputPath, enforceRelative) => {
let depth = -1;
let append = "";
outputPath = outputPath.replace(/[\\/]$/, "");
for (const part of filename.split(/[/\\]+/)) {
if (part === "..") {
if (depth > -1) {
depth--;
} else {
const i = outputPath.lastIndexOf("/");
const j = outputPath.lastIndexOf("\\");
const pos = i < 0 ? j : j < 0 ? i : Math.max(i, j);
if (pos < 0) return `${outputPath}/`;
append = `${outputPath.slice(pos + 1)}/${append}`;
outputPath = outputPath.slice(0, pos);
}
} else if (part !== ".") {
depth++;
}
}
return depth > 0
? `${"../".repeat(depth)}${append}`
: enforceRelative
? `./${append}`
: append;
};

View File

@ -0,0 +1,224 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
// We need to include a list of requires here
// to allow webpack to be bundled with only static requires
// We could use a dynamic require(`../${request}`) but this
// would include too many modules and not every tool is able
// to process this
module.exports = {
AsyncDependenciesBlock: () => require("../AsyncDependenciesBlock"),
CommentCompilationWarning: () => require("../CommentCompilationWarning"),
ContextModule: () => require("../ContextModule"),
"cache/PackFileCacheStrategy": () =>
require("../cache/PackFileCacheStrategy"),
"cache/ResolverCachePlugin": () => require("../cache/ResolverCachePlugin"),
"container/ContainerEntryDependency": () =>
require("../container/ContainerEntryDependency"),
"container/ContainerEntryModule": () =>
require("../container/ContainerEntryModule"),
"container/ContainerExposedDependency": () =>
require("../container/ContainerExposedDependency"),
"container/FallbackDependency": () =>
require("../container/FallbackDependency"),
"container/FallbackItemDependency": () =>
require("../container/FallbackItemDependency"),
"container/FallbackModule": () => require("../container/FallbackModule"),
"container/RemoteModule": () => require("../container/RemoteModule"),
"container/RemoteToExternalDependency": () =>
require("../container/RemoteToExternalDependency"),
"dependencies/AMDDefineDependency": () =>
require("../dependencies/AMDDefineDependency"),
"dependencies/AMDRequireArrayDependency": () =>
require("../dependencies/AMDRequireArrayDependency"),
"dependencies/AMDRequireContextDependency": () =>
require("../dependencies/AMDRequireContextDependency"),
"dependencies/AMDRequireDependenciesBlock": () =>
require("../dependencies/AMDRequireDependenciesBlock"),
"dependencies/AMDRequireDependency": () =>
require("../dependencies/AMDRequireDependency"),
"dependencies/AMDRequireItemDependency": () =>
require("../dependencies/AMDRequireItemDependency"),
"dependencies/CachedConstDependency": () =>
require("../dependencies/CachedConstDependency"),
"dependencies/ExternalModuleDependency": () =>
require("../dependencies/ExternalModuleDependency"),
"dependencies/ExternalModuleInitFragment": () =>
require("../dependencies/ExternalModuleInitFragment"),
"dependencies/CreateScriptUrlDependency": () =>
require("../dependencies/CreateScriptUrlDependency"),
"dependencies/CommonJsRequireContextDependency": () =>
require("../dependencies/CommonJsRequireContextDependency"),
"dependencies/CommonJsExportRequireDependency": () =>
require("../dependencies/CommonJsExportRequireDependency"),
"dependencies/CommonJsExportsDependency": () =>
require("../dependencies/CommonJsExportsDependency"),
"dependencies/CommonJsFullRequireDependency": () =>
require("../dependencies/CommonJsFullRequireDependency"),
"dependencies/CommonJsRequireDependency": () =>
require("../dependencies/CommonJsRequireDependency"),
"dependencies/CommonJsSelfReferenceDependency": () =>
require("../dependencies/CommonJsSelfReferenceDependency"),
"dependencies/ConstDependency": () =>
require("../dependencies/ConstDependency"),
"dependencies/ContextDependency": () =>
require("../dependencies/ContextDependency"),
"dependencies/ContextElementDependency": () =>
require("../dependencies/ContextElementDependency"),
"dependencies/CriticalDependencyWarning": () =>
require("../dependencies/CriticalDependencyWarning"),
"dependencies/CssImportDependency": () =>
require("../dependencies/CssImportDependency"),
"dependencies/CssLocalIdentifierDependency": () =>
require("../dependencies/CssLocalIdentifierDependency"),
"dependencies/CssSelfLocalIdentifierDependency": () =>
require("../dependencies/CssSelfLocalIdentifierDependency"),
"dependencies/CssIcssImportDependency": () =>
require("../dependencies/CssIcssImportDependency"),
"dependencies/CssIcssExportDependency": () =>
require("../dependencies/CssIcssExportDependency"),
"dependencies/CssUrlDependency": () =>
require("../dependencies/CssUrlDependency"),
"dependencies/CssIcssSymbolDependency": () =>
require("../dependencies/CssIcssSymbolDependency"),
"dependencies/DelegatedSourceDependency": () =>
require("../dependencies/DelegatedSourceDependency"),
"dependencies/DllEntryDependency": () =>
require("../dependencies/DllEntryDependency"),
"dependencies/EntryDependency": () =>
require("../dependencies/EntryDependency"),
"dependencies/ExportsInfoDependency": () =>
require("../dependencies/ExportsInfoDependency"),
"dependencies/HarmonyAcceptDependency": () =>
require("../dependencies/HarmonyAcceptDependency"),
"dependencies/HarmonyAcceptImportDependency": () =>
require("../dependencies/HarmonyAcceptImportDependency"),
"dependencies/HarmonyCompatibilityDependency": () =>
require("../dependencies/HarmonyCompatibilityDependency"),
"dependencies/HarmonyExportExpressionDependency": () =>
require("../dependencies/HarmonyExportExpressionDependency"),
"dependencies/HarmonyExportHeaderDependency": () =>
require("../dependencies/HarmonyExportHeaderDependency"),
"dependencies/HarmonyExportImportedSpecifierDependency": () =>
require("../dependencies/HarmonyExportImportedSpecifierDependency"),
"dependencies/HarmonyExportSpecifierDependency": () =>
require("../dependencies/HarmonyExportSpecifierDependency"),
"dependencies/HarmonyImportSideEffectDependency": () =>
require("../dependencies/HarmonyImportSideEffectDependency"),
"dependencies/HarmonyImportSpecifierDependency": () =>
require("../dependencies/HarmonyImportSpecifierDependency"),
"dependencies/HarmonyEvaluatedImportSpecifierDependency": () =>
require("../dependencies/HarmonyEvaluatedImportSpecifierDependency"),
"dependencies/ImportContextDependency": () =>
require("../dependencies/ImportContextDependency"),
"dependencies/ImportDependency": () =>
require("../dependencies/ImportDependency"),
"dependencies/ImportEagerDependency": () =>
require("../dependencies/ImportEagerDependency"),
"dependencies/ImportWeakDependency": () =>
require("../dependencies/ImportWeakDependency"),
"dependencies/JsonExportsDependency": () =>
require("../dependencies/JsonExportsDependency"),
"dependencies/LocalModule": () => require("../dependencies/LocalModule"),
"dependencies/LocalModuleDependency": () =>
require("../dependencies/LocalModuleDependency"),
"dependencies/ModuleDecoratorDependency": () =>
require("../dependencies/ModuleDecoratorDependency"),
"dependencies/ModuleHotAcceptDependency": () =>
require("../dependencies/ModuleHotAcceptDependency"),
"dependencies/ModuleHotDeclineDependency": () =>
require("../dependencies/ModuleHotDeclineDependency"),
"dependencies/ImportMetaHotAcceptDependency": () =>
require("../dependencies/ImportMetaHotAcceptDependency"),
"dependencies/ImportMetaHotDeclineDependency": () =>
require("../dependencies/ImportMetaHotDeclineDependency"),
"dependencies/ImportMetaContextDependency": () =>
require("../dependencies/ImportMetaContextDependency"),
"dependencies/ProvidedDependency": () =>
require("../dependencies/ProvidedDependency"),
"dependencies/PureExpressionDependency": () =>
require("../dependencies/PureExpressionDependency"),
"dependencies/RequireContextDependency": () =>
require("../dependencies/RequireContextDependency"),
"dependencies/RequireEnsureDependenciesBlock": () =>
require("../dependencies/RequireEnsureDependenciesBlock"),
"dependencies/RequireEnsureDependency": () =>
require("../dependencies/RequireEnsureDependency"),
"dependencies/RequireEnsureItemDependency": () =>
require("../dependencies/RequireEnsureItemDependency"),
"dependencies/RequireHeaderDependency": () =>
require("../dependencies/RequireHeaderDependency"),
"dependencies/RequireIncludeDependency": () =>
require("../dependencies/RequireIncludeDependency"),
"dependencies/RequireIncludeDependencyParserPlugin": () =>
require("../dependencies/RequireIncludeDependencyParserPlugin"),
"dependencies/RequireResolveContextDependency": () =>
require("../dependencies/RequireResolveContextDependency"),
"dependencies/RequireResolveDependency": () =>
require("../dependencies/RequireResolveDependency"),
"dependencies/RequireResolveHeaderDependency": () =>
require("../dependencies/RequireResolveHeaderDependency"),
"dependencies/RuntimeRequirementsDependency": () =>
require("../dependencies/RuntimeRequirementsDependency"),
"dependencies/StaticExportsDependency": () =>
require("../dependencies/StaticExportsDependency"),
"dependencies/SystemPlugin": () => require("../dependencies/SystemPlugin"),
"dependencies/UnsupportedDependency": () =>
require("../dependencies/UnsupportedDependency"),
"dependencies/URLDependency": () => require("../dependencies/URLDependency"),
"dependencies/WebAssemblyExportImportedDependency": () =>
require("../dependencies/WebAssemblyExportImportedDependency"),
"dependencies/WebAssemblyImportDependency": () =>
require("../dependencies/WebAssemblyImportDependency"),
"dependencies/WebpackIsIncludedDependency": () =>
require("../dependencies/WebpackIsIncludedDependency"),
"dependencies/WorkerDependency": () =>
require("../dependencies/WorkerDependency"),
"json/JsonData": () => require("../json/JsonData"),
"optimize/ConcatenatedModule": () =>
require("../optimize/ConcatenatedModule"),
DelegatedModule: () => require("../DelegatedModule"),
DependenciesBlock: () => require("../DependenciesBlock"),
DllModule: () => require("../DllModule"),
ExternalModule: () => require("../ExternalModule"),
FileSystemInfo: () => require("../FileSystemInfo"),
InitFragment: () => require("../InitFragment"),
InvalidDependenciesModuleWarning: () =>
require("../InvalidDependenciesModuleWarning"),
Module: () => require("../Module"),
ModuleBuildError: () => require("../ModuleBuildError"),
ModuleDependencyWarning: () => require("../ModuleDependencyWarning"),
ModuleError: () => require("../ModuleError"),
ModuleGraph: () => require("../ModuleGraph"),
ModuleParseError: () => require("../ModuleParseError"),
ModuleWarning: () => require("../ModuleWarning"),
NormalModule: () => require("../NormalModule"),
CssModule: () => require("../CssModule"),
RawDataUrlModule: () => require("../asset/RawDataUrlModule"),
RawModule: () => require("../RawModule"),
"sharing/ConsumeSharedModule": () =>
require("../sharing/ConsumeSharedModule"),
"sharing/ConsumeSharedFallbackDependency": () =>
require("../sharing/ConsumeSharedFallbackDependency"),
"sharing/ProvideSharedModule": () =>
require("../sharing/ProvideSharedModule"),
"sharing/ProvideSharedDependency": () =>
require("../sharing/ProvideSharedDependency"),
"sharing/ProvideForSharedDependency": () =>
require("../sharing/ProvideForSharedDependency"),
UnsupportedFeatureWarning: () => require("../UnsupportedFeatureWarning"),
"util/LazySet": () => require("../util/LazySet"),
UnhandledSchemeError: () => require("../UnhandledSchemeError"),
NodeStuffInWebError: () => require("../NodeStuffInWebError"),
EnvironmentNotSupportAsyncWarning: () =>
require("../EnvironmentNotSupportAsyncWarning"),
WebpackError: () => require("../WebpackError"),
"util/registerExternalSerializer": () => {
// already registered
}
};

21
app_vue/node_modules/webpack/lib/util/magicComment.js generated vendored Normal file
View File

@ -0,0 +1,21 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Alexander Akait @alexander-akait
*/
"use strict";
// regexp to match at least one "magic comment"
module.exports.webpackCommentRegExp = new RegExp(
/(^|\W)webpack[A-Z]{1,}[A-Za-z]{1,}:/
);
// regexp to match at least one "magic comment"
/**
* @returns {import("vm").Context} magic comment context
*/
module.exports.createMagicCommentContext = () =>
require("vm").createContext(undefined, {
name: "Webpack Magic Comment Parser",
codeGeneration: { strings: false, wasm: false }
});

View File

@ -0,0 +1,60 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
*/
"use strict";
const { register } = require("./serialization");
/** @typedef {import("../serialization/ObjectMiddleware").Constructor} Constructor */
/** @typedef {import("../serialization/ObjectMiddleware").ObjectDeserializerContext} ObjectDeserializerContext */
/** @typedef {import("../serialization/ObjectMiddleware").ObjectSerializerContext} ObjectSerializerContext */
/** @typedef {{ serialize: (context: ObjectSerializerContext) => void, deserialize: (context: ObjectDeserializerContext) => void }} SerializableClass */
/**
* @template {SerializableClass} T
* @typedef {(new (...params: EXPECTED_ANY[]) => T) & { deserialize?: (context: ObjectDeserializerContext) => T }} SerializableClassConstructor
*/
/**
* @template {SerializableClass} T
*/
class ClassSerializer {
/**
* @param {SerializableClassConstructor<T>} Constructor constructor
*/
constructor(Constructor) {
this.Constructor = Constructor;
}
/**
* @param {T} obj obj
* @param {ObjectSerializerContext} context context
*/
serialize(obj, context) {
obj.serialize(context);
}
/**
* @param {ObjectDeserializerContext} context context
* @returns {T} obj
*/
deserialize(context) {
if (typeof this.Constructor.deserialize === "function") {
return this.Constructor.deserialize(context);
}
const obj = new this.Constructor();
obj.deserialize(context);
return obj;
}
}
/**
* @template {Constructor} T
* @param {T} Constructor the constructor
* @param {string} request the request which will be required when deserializing
* @param {string | null=} name the name to make multiple serializer unique when sharing a request
*/
module.exports = (Constructor, request, name = null) => {
register(Constructor, request, name, new ClassSerializer(Constructor));
};

36
app_vue/node_modules/webpack/lib/util/memoize.js generated vendored Normal file
View File

@ -0,0 +1,36 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
*/
"use strict";
/**
* @template T
* @typedef {() => T} FunctionReturning
*/
/**
* @template T
* @param {FunctionReturning<T>} fn memorized function
* @returns {FunctionReturning<T>} new function
*/
const memoize = fn => {
let cache = false;
/** @type {T | undefined} */
let result;
return () => {
if (cache) {
return /** @type {T} */ (result);
}
result = fn();
cache = true;
// Allow to clean up memory for fn
// and all dependent resources
/** @type {FunctionReturning<T> | undefined} */
(fn) = undefined;
return /** @type {T} */ (result);
};
};
module.exports = memoize;

View File

@ -0,0 +1,22 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Ivan Kopeykin @vankop
*/
"use strict";
const A_CODE = "a".charCodeAt(0);
/**
* @param {string} hash hash
* @param {number} hashLength hash length
* @returns {string} returns hash that has at least one non numeric char
*/
module.exports = (hash, hashLength) => {
if (hashLength < 1) return "";
const slice = hash.slice(0, hashLength);
if (/[^\d]/.test(slice)) return slice;
return `${String.fromCharCode(
A_CODE + (Number.parseInt(hash[0], 10) % 6)
)}${slice.slice(1)}`;
};

95
app_vue/node_modules/webpack/lib/util/numberHash.js generated vendored Normal file
View File

@ -0,0 +1,95 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/**
* Threshold for switching from 32-bit to 64-bit hashing. This is selected to ensure that the bias towards lower modulo results when using 32-bit hashing is <0.5%.
* @type {number}
*/
const FNV_64_THRESHOLD = 1 << 24;
/**
* The FNV-1a offset basis for 32-bit hash values.
* @type {number}
*/
const FNV_OFFSET_32 = 2166136261;
/**
* The FNV-1a prime for 32-bit hash values.
* @type {number}
*/
const FNV_PRIME_32 = 16777619;
/**
* The mask for a positive 32-bit signed integer.
* @type {number}
*/
const MASK_31 = 0x7fffffff;
/**
* The FNV-1a offset basis for 64-bit hash values.
* @type {bigint}
*/
const FNV_OFFSET_64 = BigInt("0xCBF29CE484222325");
/**
* The FNV-1a prime for 64-bit hash values.
* @type {bigint}
*/
const FNV_PRIME_64 = BigInt("0x100000001B3");
/**
* Computes a 32-bit FNV-1a hash value for the given string.
* See https://en.wikipedia.org/wiki/Fowler%E2%80%93Noll%E2%80%93Vo_hash_function
* @param {string} str The input string to hash
* @returns {number} - The computed hash value.
*/
function fnv1a32(str) {
let hash = FNV_OFFSET_32;
for (let i = 0, len = str.length; i < len; i++) {
hash ^= str.charCodeAt(i);
// Use Math.imul to do c-style 32-bit multiplication and keep only the 32 least significant bits
hash = Math.imul(hash, FNV_PRIME_32);
}
// Force the result to be positive
return hash & MASK_31;
}
/**
* Computes a 64-bit FNV-1a hash value for the given string.
* See https://en.wikipedia.org/wiki/Fowler%E2%80%93Noll%E2%80%93Vo_hash_function
* @param {string} str The input string to hash
* @returns {bigint} - The computed hash value.
*/
function fnv1a64(str) {
let hash = FNV_OFFSET_64;
for (let i = 0, len = str.length; i < len; i++) {
hash ^= BigInt(str.charCodeAt(i));
hash = BigInt.asUintN(64, hash * FNV_PRIME_64);
}
return hash;
}
/**
* Computes a hash value for the given string and range. This hashing algorithm is a modified
* version of the [FNV-1a algorithm](https://en.wikipedia.org/wiki/Fowler%E2%80%93Noll%E2%80%93Vo_hash_function).
* It is optimized for speed and does **not** generate a cryptographic hash value.
*
* We use `numberHash` in `lib/ids/IdHelpers.js` to generate hash values for the module identifier. The generated
* hash is used as a prefix for the module id's to avoid collisions with other modules.
* @param {string} str The input string to hash.
* @param {number} range The range of the hash value (0 to range-1).
* @returns {number} - The computed hash value.
* @example
* ```js
* const numberHash = require("webpack/lib/util/numberHash");
* numberHash("hello", 1000); // 73
* numberHash("hello world"); // 72
* ```
*/
module.exports = (str, range) => {
if (range < FNV_64_THRESHOLD) {
return fnv1a32(str) % range;
}
return Number(fnv1a64(str) % BigInt(range));
};

15
app_vue/node_modules/webpack/lib/util/objectToMap.js generated vendored Normal file
View File

@ -0,0 +1,15 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
*/
"use strict";
/**
* Convert an object into an ES6 map
* @template {object} T
* @param {T} obj any object type that works with Object.entries()
* @returns {Map<string, T[keyof T]>} an ES6 Map of KV pairs
*/
module.exports = function objectToMap(obj) {
return new Map(Object.entries(obj));
};

View File

@ -0,0 +1,68 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/**
* @template T
* @template {Error} E
* @param {Iterable<T>} items initial items
* @param {number} concurrency number of items running in parallel
* @param {(item: T, push: (item: T) => void, callback: (err?: E) => void) => void} processor worker which pushes more items
* @param {(err?: E) => void} callback all items processed
* @returns {void}
*/
const processAsyncTree = (items, concurrency, processor, callback) => {
const queue = Array.from(items);
if (queue.length === 0) return callback();
let processing = 0;
let finished = false;
let processScheduled = true;
/**
* @param {T} item item
*/
const push = item => {
queue.push(item);
if (!processScheduled && processing < concurrency) {
processScheduled = true;
process.nextTick(processQueue);
}
};
/**
* @param {E | null | undefined} err error
*/
const processorCallback = err => {
processing--;
if (err && !finished) {
finished = true;
callback(err);
return;
}
if (!processScheduled) {
processScheduled = true;
process.nextTick(processQueue);
}
};
const processQueue = () => {
if (finished) return;
while (processing < concurrency && queue.length > 0) {
processing++;
const item = /** @type {T} */ (queue.pop());
processor(item, push, processorCallback);
}
processScheduled = false;
if (queue.length === 0 && processing === 0 && !finished) {
finished = true;
callback();
}
};
processQueue();
};
module.exports = processAsyncTree;

View File

@ -0,0 +1,30 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const { SAFE_IDENTIFIER, RESERVED_IDENTIFIER } = require("./propertyName");
/**
* @param {ArrayLike<string>} properties properties
* @param {number} start start index
* @returns {string} chain of property accesses
*/
const propertyAccess = (properties, start = 0) => {
let str = "";
for (let i = start; i < properties.length; i++) {
const p = properties[i];
if (`${Number(p)}` === p) {
str += `[${p}]`;
} else if (SAFE_IDENTIFIER.test(p) && !RESERVED_IDENTIFIER.has(p)) {
str += `.${p}`;
} else {
str += `[${JSON.stringify(p)}]`;
}
}
return str;
};
module.exports = propertyAccess;

76
app_vue/node_modules/webpack/lib/util/propertyName.js generated vendored Normal file
View File

@ -0,0 +1,76 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const SAFE_IDENTIFIER = /^[_a-zA-Z$][_a-zA-Z$0-9]*$/;
const RESERVED_IDENTIFIER = new Set([
"break",
"case",
"catch",
"class",
"const",
"continue",
"debugger",
"default",
"delete",
"do",
"else",
"export",
"extends",
"finally",
"for",
"function",
"if",
"import",
"in",
"instanceof",
"new",
"return",
"super",
"switch",
"this",
"throw",
"try",
"typeof",
"var",
"void",
"while",
"with",
"enum",
// strict mode
"implements",
"interface",
"let",
"package",
"private",
"protected",
"public",
"static",
"yield",
// module code
"await",
// skip future reserved keywords defined under ES1 till ES3
// additional
"null",
"true",
"false"
]);
/**
* @summary Returns a valid JS property name for the given property.
* Certain strings like "default", "null", and names with whitespace are not
* valid JS property names, so they are returned as strings.
* @param {string} prop property name to analyze
* @returns {string} valid JS property name
*/
const propertyName = prop => {
if (SAFE_IDENTIFIER.test(prop) && !RESERVED_IDENTIFIER.has(prop)) {
return prop;
}
return JSON.stringify(prop);
};
module.exports = { SAFE_IDENTIFIER, RESERVED_IDENTIFIER, propertyName };

View File

@ -0,0 +1,334 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const { register } = require("./serialization");
const Position = require("acorn").Position;
const SourceLocation = require("acorn").SourceLocation;
const ValidationError = require("schema-utils").ValidationError;
const {
CachedSource,
ConcatSource,
OriginalSource,
PrefixSource,
RawSource,
ReplaceSource,
SourceMapSource
} = require("webpack-sources");
/** @typedef {import("acorn").Position} Position */
/** @typedef {import("../Dependency").RealDependencyLocation} RealDependencyLocation */
/** @typedef {import("../Dependency").SourcePosition} SourcePosition */
/** @typedef {import("../serialization/ObjectMiddleware").ObjectDeserializerContext} ObjectDeserializerContext */
/** @typedef {import("../serialization/ObjectMiddleware").ObjectSerializerContext} ObjectSerializerContext */
const CURRENT_MODULE = "webpack/lib/util/registerExternalSerializer";
register(
CachedSource,
CURRENT_MODULE,
"webpack-sources/CachedSource",
new (class CachedSourceSerializer {
/**
* @param {CachedSource} source the cached source to be serialized
* @param {ObjectSerializerContext} context context
* @returns {void}
*/
serialize(source, { write, writeLazy }) {
if (writeLazy) {
writeLazy(source.originalLazy());
} else {
write(source.original());
}
write(source.getCachedData());
}
/**
* @param {ObjectDeserializerContext} context context
* @returns {CachedSource} cached source
*/
deserialize({ read }) {
const source = read();
const cachedData = read();
return new CachedSource(source, cachedData);
}
})()
);
register(
RawSource,
CURRENT_MODULE,
"webpack-sources/RawSource",
new (class RawSourceSerializer {
/**
* @param {RawSource} source the raw source to be serialized
* @param {ObjectSerializerContext} context context
* @returns {void}
*/
serialize(source, { write }) {
write(source.buffer());
write(!source.isBuffer());
}
/**
* @param {ObjectDeserializerContext} context context
* @returns {RawSource} raw source
*/
deserialize({ read }) {
const source = read();
const convertToString = read();
return new RawSource(source, convertToString);
}
})()
);
register(
ConcatSource,
CURRENT_MODULE,
"webpack-sources/ConcatSource",
new (class ConcatSourceSerializer {
/**
* @param {ConcatSource} source the concat source to be serialized
* @param {ObjectSerializerContext} context context
* @returns {void}
*/
serialize(source, { write }) {
write(source.getChildren());
}
/**
* @param {ObjectDeserializerContext} context context
* @returns {ConcatSource} concat source
*/
deserialize({ read }) {
const source = new ConcatSource();
source.addAllSkipOptimizing(read());
return source;
}
})()
);
register(
PrefixSource,
CURRENT_MODULE,
"webpack-sources/PrefixSource",
new (class PrefixSourceSerializer {
/**
* @param {PrefixSource} source the prefix source to be serialized
* @param {ObjectSerializerContext} context context
* @returns {void}
*/
serialize(source, { write }) {
write(source.getPrefix());
write(source.original());
}
/**
* @param {ObjectDeserializerContext} context context
* @returns {PrefixSource} prefix source
*/
deserialize({ read }) {
return new PrefixSource(read(), read());
}
})()
);
register(
ReplaceSource,
CURRENT_MODULE,
"webpack-sources/ReplaceSource",
new (class ReplaceSourceSerializer {
/**
* @param {ReplaceSource} source the replace source to be serialized
* @param {ObjectSerializerContext} context context
* @returns {void}
*/
serialize(source, { write }) {
write(source.original());
write(source.getName());
const replacements = source.getReplacements();
write(replacements.length);
for (const repl of replacements) {
write(repl.start);
write(repl.end);
}
for (const repl of replacements) {
write(repl.content);
write(repl.name);
}
}
/**
* @param {ObjectDeserializerContext} context context
* @returns {ReplaceSource} replace source
*/
deserialize({ read }) {
const source = new ReplaceSource(read(), read());
const len = read();
const startEndBuffer = [];
for (let i = 0; i < len; i++) {
startEndBuffer.push(read(), read());
}
let j = 0;
for (let i = 0; i < len; i++) {
source.replace(
startEndBuffer[j++],
startEndBuffer[j++],
read(),
read()
);
}
return source;
}
})()
);
register(
OriginalSource,
CURRENT_MODULE,
"webpack-sources/OriginalSource",
new (class OriginalSourceSerializer {
/**
* @param {OriginalSource} source the original source to be serialized
* @param {ObjectSerializerContext} context context
* @returns {void}
*/
serialize(source, { write }) {
write(source.buffer());
write(source.getName());
}
/**
* @param {ObjectDeserializerContext} context context
* @returns {OriginalSource} original source
*/
deserialize({ read }) {
const buffer = read();
const name = read();
return new OriginalSource(buffer, name);
}
})()
);
register(
SourceLocation,
CURRENT_MODULE,
"acorn/SourceLocation",
new (class SourceLocationSerializer {
/**
* @param {SourceLocation} loc the location to be serialized
* @param {ObjectSerializerContext} context context
* @returns {void}
*/
serialize(loc, { write }) {
write(loc.start.line);
write(loc.start.column);
write(loc.end.line);
write(loc.end.column);
}
/**
* @param {ObjectDeserializerContext} context context
* @returns {RealDependencyLocation} location
*/
deserialize({ read }) {
return {
start: {
line: read(),
column: read()
},
end: {
line: read(),
column: read()
}
};
}
})()
);
register(
Position,
CURRENT_MODULE,
"acorn/Position",
new (class PositionSerializer {
/**
* @param {Position} pos the position to be serialized
* @param {ObjectSerializerContext} context context
* @returns {void}
*/
serialize(pos, { write }) {
write(pos.line);
write(pos.column);
}
/**
* @param {ObjectDeserializerContext} context context
* @returns {SourcePosition} position
*/
deserialize({ read }) {
return {
line: read(),
column: read()
};
}
})()
);
register(
SourceMapSource,
CURRENT_MODULE,
"webpack-sources/SourceMapSource",
new (class SourceMapSourceSerializer {
/**
* @param {SourceMapSource} source the source map source to be serialized
* @param {ObjectSerializerContext} context context
* @returns {void}
*/
serialize(source, { write }) {
write(source.getArgsAsBuffers());
}
/**
* @param {ObjectDeserializerContext} context context
* @returns {SourceMapSource} source source map source
*/
deserialize({ read }) {
// @ts-expect-error
return new SourceMapSource(...read());
}
})()
);
register(
ValidationError,
CURRENT_MODULE,
"schema-utils/ValidationError",
new (class ValidationErrorSerializer {
/**
* @param {ValidationError} error the source map source to be serialized
* @param {ObjectSerializerContext} context context
* @returns {void}
*/
serialize(error, { write }) {
write(error.errors);
write(error.schema);
write({
name: error.headerName,
baseDataPath: error.baseDataPath,
postFormatter: error.postFormatter
});
}
/**
* @param {ObjectDeserializerContext} context context
* @returns {ValidationError} error
*/
deserialize({ read }) {
return new ValidationError(read(), read(), read());
}
})()
);

25
app_vue/node_modules/webpack/lib/util/removeBOM.js generated vendored Normal file
View File

@ -0,0 +1,25 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Alexander Akait @alexander-akait
*/
"use strict";
/**
* @param {string | Buffer} strOrBuffer string or buffer
* @returns {string | Buffer} result without BOM
*/
module.exports = strOrBuffer => {
if (typeof strOrBuffer === "string" && strOrBuffer.charCodeAt(0) === 0xfeff) {
return strOrBuffer.substr(1);
} else if (
Buffer.isBuffer(strOrBuffer) &&
strOrBuffer[0] === 0xef &&
strOrBuffer[1] === 0xbb &&
strOrBuffer[2] === 0xbf
) {
return strOrBuffer.subarray(3);
}
return strOrBuffer;
};

695
app_vue/node_modules/webpack/lib/util/runtime.js generated vendored Normal file
View File

@ -0,0 +1,695 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const SortableSet = require("./SortableSet");
/** @typedef {import("../Compilation")} Compilation */
/** @typedef {import("../Entrypoint").EntryOptions} EntryOptions */
/** @typedef {string | SortableSet<string> | undefined} RuntimeSpec */
/** @typedef {RuntimeSpec | boolean} RuntimeCondition */
/**
* @param {Compilation} compilation the compilation
* @param {string} name name of the entry
* @param {EntryOptions=} options optionally already received entry options
* @returns {RuntimeSpec} runtime
*/
module.exports.getEntryRuntime = (compilation, name, options) => {
let dependOn;
let runtime;
if (options) {
({ dependOn, runtime } = options);
} else {
const entry = compilation.entries.get(name);
if (!entry) return name;
({ dependOn, runtime } = entry.options);
}
if (dependOn) {
/** @type {RuntimeSpec} */
let result;
const queue = new Set(dependOn);
for (const name of queue) {
const dep = compilation.entries.get(name);
if (!dep) continue;
const { dependOn, runtime } = dep.options;
if (dependOn) {
for (const name of dependOn) {
queue.add(name);
}
} else {
result = mergeRuntimeOwned(result, runtime || name);
}
}
return result || name;
}
return runtime || name;
};
/**
* @param {RuntimeSpec} runtime runtime
* @param {(runtime: string | undefined) => void} fn functor
* @param {boolean} deterministicOrder enforce a deterministic order
* @returns {void}
*/
const forEachRuntime = (runtime, fn, deterministicOrder = false) => {
if (runtime === undefined) {
fn(undefined);
} else if (typeof runtime === "string") {
fn(runtime);
} else {
if (deterministicOrder) runtime.sort();
for (const r of runtime) {
fn(r);
}
}
};
module.exports.forEachRuntime = forEachRuntime;
/**
* @template T
* @param {SortableSet<T>} set set
* @returns {string} runtime key
*/
const getRuntimesKey = set => {
set.sort();
return Array.from(set).join("\n");
};
/**
* @param {RuntimeSpec} runtime runtime(s)
* @returns {string} key of runtimes
*/
const getRuntimeKey = runtime => {
if (runtime === undefined) return "*";
if (typeof runtime === "string") return runtime;
return runtime.getFromUnorderedCache(getRuntimesKey);
};
module.exports.getRuntimeKey = getRuntimeKey;
/**
* @param {string} key key of runtimes
* @returns {RuntimeSpec} runtime(s)
*/
const keyToRuntime = key => {
if (key === "*") return;
const items = key.split("\n");
if (items.length === 1) return items[0];
return new SortableSet(items);
};
module.exports.keyToRuntime = keyToRuntime;
/**
* @template T
* @param {SortableSet<T>} set set
* @returns {string} runtime string
*/
const getRuntimesString = set => {
set.sort();
return Array.from(set).join("+");
};
/**
* @param {RuntimeSpec} runtime runtime(s)
* @returns {string} readable version
*/
const runtimeToString = runtime => {
if (runtime === undefined) return "*";
if (typeof runtime === "string") return runtime;
return runtime.getFromUnorderedCache(getRuntimesString);
};
module.exports.runtimeToString = runtimeToString;
/**
* @param {RuntimeCondition} runtimeCondition runtime condition
* @returns {string} readable version
*/
module.exports.runtimeConditionToString = runtimeCondition => {
if (runtimeCondition === true) return "true";
if (runtimeCondition === false) return "false";
return runtimeToString(runtimeCondition);
};
/**
* @param {RuntimeSpec} a first
* @param {RuntimeSpec} b second
* @returns {boolean} true, when they are equal
*/
const runtimeEqual = (a, b) => {
if (a === b) {
return true;
} else if (
a === undefined ||
b === undefined ||
typeof a === "string" ||
typeof b === "string"
) {
return false;
} else if (a.size !== b.size) {
return false;
}
a.sort();
b.sort();
const aIt = a[Symbol.iterator]();
const bIt = b[Symbol.iterator]();
for (;;) {
const aV = aIt.next();
if (aV.done) return true;
const bV = bIt.next();
if (aV.value !== bV.value) return false;
}
};
module.exports.runtimeEqual = runtimeEqual;
/**
* @param {RuntimeSpec} a first
* @param {RuntimeSpec} b second
* @returns {-1|0|1} compare
*/
module.exports.compareRuntime = (a, b) => {
if (a === b) {
return 0;
} else if (a === undefined) {
return -1;
} else if (b === undefined) {
return 1;
}
const aKey = getRuntimeKey(a);
const bKey = getRuntimeKey(b);
if (aKey < bKey) return -1;
if (aKey > bKey) return 1;
return 0;
};
/**
* @param {RuntimeSpec} a first
* @param {RuntimeSpec} b second
* @returns {RuntimeSpec} merged
*/
const mergeRuntime = (a, b) => {
if (a === undefined) {
return b;
} else if (b === undefined) {
return a;
} else if (a === b) {
return a;
} else if (typeof a === "string") {
if (typeof b === "string") {
const set = new SortableSet();
set.add(a);
set.add(b);
return set;
} else if (b.has(a)) {
return b;
}
const set = new SortableSet(b);
set.add(a);
return set;
}
if (typeof b === "string") {
if (a.has(b)) return a;
const set = new SortableSet(a);
set.add(b);
return set;
}
const set = new SortableSet(a);
for (const item of b) set.add(item);
if (set.size === a.size) return a;
return set;
};
module.exports.mergeRuntime = mergeRuntime;
/**
* @param {RuntimeCondition} a first
* @param {RuntimeCondition} b second
* @param {RuntimeSpec} runtime full runtime
* @returns {RuntimeCondition} result
*/
module.exports.mergeRuntimeCondition = (a, b, runtime) => {
if (a === false) return b;
if (b === false) return a;
if (a === true || b === true) return true;
const merged = mergeRuntime(a, b);
if (merged === undefined) return;
if (typeof merged === "string") {
if (typeof runtime === "string" && merged === runtime) return true;
return merged;
}
if (typeof runtime === "string" || runtime === undefined) return merged;
if (merged.size === runtime.size) return true;
return merged;
};
/**
* @param {RuntimeSpec | true} a first
* @param {RuntimeSpec | true} b second
* @param {RuntimeSpec} runtime full runtime
* @returns {RuntimeSpec | true} result
*/
module.exports.mergeRuntimeConditionNonFalse = (a, b, runtime) => {
if (a === true || b === true) return true;
const merged = mergeRuntime(a, b);
if (merged === undefined) return;
if (typeof merged === "string") {
if (typeof runtime === "string" && merged === runtime) return true;
return merged;
}
if (typeof runtime === "string" || runtime === undefined) return merged;
if (merged.size === runtime.size) return true;
return merged;
};
/**
* @param {RuntimeSpec} a first (may be modified)
* @param {RuntimeSpec} b second
* @returns {RuntimeSpec} merged
*/
const mergeRuntimeOwned = (a, b) => {
if (b === undefined) {
return a;
} else if (a === b) {
return a;
} else if (a === undefined) {
if (typeof b === "string") {
return b;
}
return new SortableSet(b);
} else if (typeof a === "string") {
if (typeof b === "string") {
const set = new SortableSet();
set.add(a);
set.add(b);
return set;
}
const set = new SortableSet(b);
set.add(a);
return set;
}
if (typeof b === "string") {
a.add(b);
return a;
}
for (const item of b) a.add(item);
return a;
};
module.exports.mergeRuntimeOwned = mergeRuntimeOwned;
/**
* @param {RuntimeSpec} a first
* @param {RuntimeSpec} b second
* @returns {RuntimeSpec} merged
*/
module.exports.intersectRuntime = (a, b) => {
if (a === undefined) {
return b;
} else if (b === undefined) {
return a;
} else if (a === b) {
return a;
} else if (typeof a === "string") {
if (typeof b === "string") {
return;
} else if (b.has(a)) {
return a;
}
return;
}
if (typeof b === "string") {
if (a.has(b)) return b;
return;
}
const set = new SortableSet();
for (const item of b) {
if (a.has(item)) set.add(item);
}
if (set.size === 0) return;
if (set.size === 1) {
const [item] = set;
return item;
}
return set;
};
/**
* @param {RuntimeSpec} a first
* @param {RuntimeSpec} b second
* @returns {RuntimeSpec} result
*/
const subtractRuntime = (a, b) => {
if (a === undefined) {
return;
} else if (b === undefined) {
return a;
} else if (a === b) {
return;
} else if (typeof a === "string") {
if (typeof b === "string") {
return a;
} else if (b.has(a)) {
return;
}
return a;
}
if (typeof b === "string") {
if (!a.has(b)) return a;
if (a.size === 2) {
for (const item of a) {
if (item !== b) return item;
}
}
const set = new SortableSet(a);
set.delete(b);
return set;
}
const set = new SortableSet();
for (const item of a) {
if (!b.has(item)) set.add(item);
}
if (set.size === 0) return;
if (set.size === 1) {
const [item] = set;
return item;
}
return set;
};
module.exports.subtractRuntime = subtractRuntime;
/**
* @param {RuntimeCondition} a first
* @param {RuntimeCondition} b second
* @param {RuntimeSpec} runtime runtime
* @returns {RuntimeCondition} result
*/
module.exports.subtractRuntimeCondition = (a, b, runtime) => {
if (b === true) return false;
if (b === false) return a;
if (a === false) return false;
const result = subtractRuntime(a === true ? runtime : a, b);
return result === undefined ? false : result;
};
/**
* @param {RuntimeSpec} runtime runtime
* @param {(runtime?: RuntimeSpec) => boolean} filter filter function
* @returns {boolean | RuntimeSpec} true/false if filter is constant for all runtimes, otherwise runtimes that are active
*/
module.exports.filterRuntime = (runtime, filter) => {
if (runtime === undefined) return filter();
if (typeof runtime === "string") return filter(runtime);
let some = false;
let every = true;
let result;
for (const r of runtime) {
const v = filter(r);
if (v) {
some = true;
result = mergeRuntimeOwned(result, r);
} else {
every = false;
}
}
if (!some) return false;
if (every) return true;
return result;
};
/**
* @template T
* @typedef {Map<string, T>} RuntimeSpecMapInnerMap
*/
/**
* @template T
* @template [R=T]
*/
class RuntimeSpecMap {
/**
* @param {RuntimeSpecMap<T, R>=} clone copy form this
*/
constructor(clone) {
/** @type {0 | 1 | 2} */
this._mode = clone ? clone._mode : 0; // 0 = empty, 1 = single entry, 2 = map
/** @type {RuntimeSpec} */
this._singleRuntime = clone ? clone._singleRuntime : undefined;
/** @type {R | undefined} */
this._singleValue = clone ? clone._singleValue : undefined;
/** @type {RuntimeSpecMapInnerMap<R> | undefined} */
this._map = clone && clone._map ? new Map(clone._map) : undefined;
}
/**
* @param {RuntimeSpec} runtime the runtimes
* @returns {R | undefined} value
*/
get(runtime) {
switch (this._mode) {
case 0:
return;
case 1:
return runtimeEqual(this._singleRuntime, runtime)
? this._singleValue
: undefined;
default:
return /** @type {RuntimeSpecMapInnerMap<R>} */ (this._map).get(
getRuntimeKey(runtime)
);
}
}
/**
* @param {RuntimeSpec} runtime the runtimes
* @returns {boolean} true, when the runtime is stored
*/
has(runtime) {
switch (this._mode) {
case 0:
return false;
case 1:
return runtimeEqual(this._singleRuntime, runtime);
default:
return /** @type {RuntimeSpecMapInnerMap<R>} */ (this._map).has(
getRuntimeKey(runtime)
);
}
}
/**
* @param {RuntimeSpec} runtime the runtimes
* @param {R} value the value
*/
set(runtime, value) {
switch (this._mode) {
case 0:
this._mode = 1;
this._singleRuntime = runtime;
this._singleValue = value;
break;
case 1:
if (runtimeEqual(this._singleRuntime, runtime)) {
this._singleValue = value;
break;
}
this._mode = 2;
this._map = new Map();
this._map.set(
getRuntimeKey(this._singleRuntime),
/** @type {R} */ (this._singleValue)
);
this._singleRuntime = undefined;
this._singleValue = undefined;
/* falls through */
default:
/** @type {RuntimeSpecMapInnerMap<R>} */
(this._map).set(getRuntimeKey(runtime), value);
}
}
/**
* @param {RuntimeSpec} runtime the runtimes
* @param {() => R} computer function to compute the value
* @returns {R} the new value
*/
provide(runtime, computer) {
switch (this._mode) {
case 0:
this._mode = 1;
this._singleRuntime = runtime;
return (this._singleValue = computer());
case 1: {
if (runtimeEqual(this._singleRuntime, runtime)) {
return /** @type {R} */ (this._singleValue);
}
this._mode = 2;
this._map = new Map();
this._map.set(
getRuntimeKey(this._singleRuntime),
/** @type {R} */
(this._singleValue)
);
this._singleRuntime = undefined;
this._singleValue = undefined;
const newValue = computer();
this._map.set(getRuntimeKey(runtime), newValue);
return newValue;
}
default: {
const key = getRuntimeKey(runtime);
const value =
/** @type {RuntimeSpecMapInnerMap<R>} */
(this._map).get(key);
if (value !== undefined) return value;
const newValue = computer();
/** @type {RuntimeSpecMapInnerMap<R>} */
(this._map).set(key, newValue);
return newValue;
}
}
}
/**
* @param {RuntimeSpec} runtime the runtimes
*/
delete(runtime) {
switch (this._mode) {
case 0:
return;
case 1:
if (runtimeEqual(this._singleRuntime, runtime)) {
this._mode = 0;
this._singleRuntime = undefined;
this._singleValue = undefined;
}
return;
default:
/** @type {RuntimeSpecMapInnerMap<R>} */
(this._map).delete(getRuntimeKey(runtime));
}
}
/**
* @param {RuntimeSpec} runtime the runtimes
* @param {(value: R | undefined) => R} fn function to update the value
*/
update(runtime, fn) {
switch (this._mode) {
case 0:
throw new Error("runtime passed to update must exist");
case 1: {
if (runtimeEqual(this._singleRuntime, runtime)) {
this._singleValue = fn(this._singleValue);
break;
}
const newValue = fn(undefined);
if (newValue !== undefined) {
this._mode = 2;
this._map = new Map();
this._map.set(
getRuntimeKey(this._singleRuntime),
/** @type {R} */
(this._singleValue)
);
this._singleRuntime = undefined;
this._singleValue = undefined;
this._map.set(getRuntimeKey(runtime), newValue);
}
break;
}
default: {
const key = getRuntimeKey(runtime);
const oldValue =
/** @type {RuntimeSpecMapInnerMap<R>} */
(this._map).get(key);
const newValue = fn(oldValue);
if (newValue !== oldValue)
/** @type {RuntimeSpecMapInnerMap<R>} */
(this._map).set(key, newValue);
}
}
}
keys() {
switch (this._mode) {
case 0:
return [];
case 1:
return [this._singleRuntime];
default:
return Array.from(
/** @type {RuntimeSpecMapInnerMap<R>} */
(this._map).keys(),
keyToRuntime
);
}
}
/**
* @returns {IterableIterator<R>} values
*/
values() {
switch (this._mode) {
case 0:
return [][Symbol.iterator]();
case 1:
return [/** @type {R} */ (this._singleValue)][Symbol.iterator]();
default:
return /** @type {RuntimeSpecMapInnerMap<R>} */ (this._map).values();
}
}
get size() {
if (/** @type {number} */ (this._mode) <= 1) {
return /** @type {number} */ (this._mode);
}
return /** @type {RuntimeSpecMapInnerMap<R>} */ (this._map).size;
}
}
module.exports.RuntimeSpecMap = RuntimeSpecMap;
class RuntimeSpecSet {
/**
* @param {Iterable<RuntimeSpec>=} iterable iterable
*/
constructor(iterable) {
/** @type {Map<string, RuntimeSpec>} */
this._map = new Map();
if (iterable) {
for (const item of iterable) {
this.add(item);
}
}
}
/**
* @param {RuntimeSpec} runtime runtime
*/
add(runtime) {
this._map.set(getRuntimeKey(runtime), runtime);
}
/**
* @param {RuntimeSpec} runtime runtime
* @returns {boolean} true, when the runtime exists
*/
has(runtime) {
return this._map.has(getRuntimeKey(runtime));
}
/**
* @returns {IterableIterator<RuntimeSpec>} iterable iterator
*/
[Symbol.iterator]() {
return this._map.values();
}
get size() {
return this._map.size;
}
}
module.exports.RuntimeSpecSet = RuntimeSpecSet;

602
app_vue/node_modules/webpack/lib/util/semver.js generated vendored Normal file
View File

@ -0,0 +1,602 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/** @typedef {import("../RuntimeTemplate")} RuntimeTemplate */
/** @typedef {string | number | undefined} SemVerRangeItem */
/** @typedef {(SemVerRangeItem | SemVerRangeItem[])[]} SemVerRange */
/**
* @param {string} str version string
* @returns {SemVerRange} parsed version
*/
const parseVersion = str => {
/**
* @param {str} str str
* @returns {(string | number)[]} result
*/
var splitAndConvert = function (str) {
return str.split(".").map(function (item) {
// eslint-disable-next-line eqeqeq
return +item == /** @type {EXPECTED_ANY} */ (item) ? +item : item;
});
};
var match =
/** @type {RegExpExecArray} */
(/^([^-+]+)?(?:-([^+]+))?(?:\+(.+))?$/.exec(str));
/** @type {(string | number | undefined | [])[]} */
var ver = match[1] ? splitAndConvert(match[1]) : [];
if (match[2]) {
ver.length++;
ver.push.apply(ver, splitAndConvert(match[2]));
}
if (match[3]) {
ver.push([]);
ver.push.apply(ver, splitAndConvert(match[3]));
}
return ver;
};
module.exports.parseVersion = parseVersion;
/* eslint-disable eqeqeq */
/**
* @param {string} a version
* @param {string} b version
* @returns {boolean} true, iff a < b
*/
const versionLt = (a, b) => {
// @ts-expect-error
a = parseVersion(a);
// @ts-expect-error
b = parseVersion(b);
var i = 0;
for (;;) {
// a b EOA object undefined number string
// EOA a == b a < b b < a a < b a < b
// object b < a (0) b < a a < b a < b
// undefined a < b a < b (0) a < b a < b
// number b < a b < a b < a (1) a < b
// string b < a b < a b < a b < a (1)
// EOA end of array
// (0) continue on
// (1) compare them via "<"
// Handles first row in table
if (i >= a.length) return i < b.length && (typeof b[i])[0] != "u";
var aValue = a[i];
var aType = (typeof aValue)[0];
// Handles first column in table
if (i >= b.length) return aType == "u";
var bValue = b[i];
var bType = (typeof bValue)[0];
if (aType == bType) {
if (aType != "o" && aType != "u" && aValue != bValue) {
return aValue < bValue;
}
i++;
} else {
// Handles remaining cases
if (aType == "o" && bType == "n") return true;
return bType == "s" || aType == "u";
}
}
};
/* eslint-enable eqeqeq */
module.exports.versionLt = versionLt;
/**
* @param {string} str range string
* @returns {SemVerRange} parsed range
*/
module.exports.parseRange = str => {
/**
* @param {string} str str
* @returns {(string | number)[]} result
*/
const splitAndConvert = str => {
return str
.split(".")
.map(item => (item !== "NaN" && `${+item}` === item ? +item : item));
};
// see https://docs.npmjs.com/misc/semver#range-grammar for grammar
/**
* @param {string} str str
* @returns {SemVerRangeItem[]}
*/
const parsePartial = str => {
const match =
/** @type {RegExpExecArray} */
(/^([^-+]+)?(?:-([^+]+))?(?:\+(.+))?$/.exec(str));
/** @type {SemVerRangeItem[]} */
const ver = match[1] ? [0, ...splitAndConvert(match[1])] : [0];
if (match[2]) {
ver.length++;
ver.push.apply(ver, splitAndConvert(match[2]));
}
// remove trailing any matchers
let last = ver[ver.length - 1];
while (
ver.length &&
(last === undefined || /^[*xX]$/.test(/** @type {string} */ (last)))
) {
ver.pop();
last = ver[ver.length - 1];
}
return ver;
};
/**
*
* @param {SemVerRangeItem[]} range range
* @returns {SemVerRangeItem[]}
*/
const toFixed = range => {
if (range.length === 1) {
// Special case for "*" is "x.x.x" instead of "="
return [0];
} else if (range.length === 2) {
// Special case for "1" is "1.x.x" instead of "=1"
return [1, ...range.slice(1)];
} else if (range.length === 3) {
// Special case for "1.2" is "1.2.x" instead of "=1.2"
return [2, ...range.slice(1)];
}
return [range.length, ...range.slice(1)];
};
/**
*
* @param {SemVerRangeItem[]} range
* @returns {SemVerRangeItem[]} result
*/
const negate = range => {
return [-(/** @type { [number]} */ (range)[0]) - 1, ...range.slice(1)];
};
/**
* @param {string} str str
* @returns {SemVerRange}
*/
const parseSimple = str => {
// simple ::= primitive | partial | tilde | caret
// primitive ::= ( '<' | '>' | '>=' | '<=' | '=' | '!' ) ( ' ' ) * partial
// tilde ::= '~' ( ' ' ) * partial
// caret ::= '^' ( ' ' ) * partial
const match = /^(\^|~|<=|<|>=|>|=|v|!)/.exec(str);
const start = match ? match[0] : "";
const remainder = parsePartial(
start.length ? str.slice(start.length).trim() : str.trim()
);
switch (start) {
case "^":
if (remainder.length > 1 && remainder[1] === 0) {
if (remainder.length > 2 && remainder[2] === 0) {
return [3, ...remainder.slice(1)];
}
return [2, ...remainder.slice(1)];
}
return [1, ...remainder.slice(1)];
case "~":
if (remainder.length === 2 && remainder[0] === 0) {
return [1, ...remainder.slice(1)];
}
return [2, ...remainder.slice(1)];
case ">=":
return remainder;
case "=":
case "v":
case "":
return toFixed(remainder);
case "<":
return negate(remainder);
case ">": {
// and( >=, not( = ) ) => >=, =, not, and
const fixed = toFixed(remainder);
// eslint-disable-next-line no-sparse-arrays
return [, fixed, 0, remainder, 2];
}
case "<=":
// or( <, = ) => <, =, or
// eslint-disable-next-line no-sparse-arrays
return [, toFixed(remainder), negate(remainder), 1];
case "!": {
// not =
const fixed = toFixed(remainder);
// eslint-disable-next-line no-sparse-arrays
return [, fixed, 0];
}
default:
throw new Error("Unexpected start value");
}
};
/**
*
* @param {SemVerRangeItem[][]} items items
* @param {number} fn fn
* @returns {SemVerRange} result
*/
const combine = (items, fn) => {
if (items.length === 1) return items[0];
const arr = [];
for (const item of items.slice().reverse()) {
if (0 in item) {
arr.push(item);
} else {
arr.push(...item.slice(1));
}
}
// eslint-disable-next-line no-sparse-arrays
return [, ...arr, ...items.slice(1).map(() => fn)];
};
/**
* @param {string} str str
* @returns {SemVerRange}
*/
const parseRange = str => {
// range ::= hyphen | simple ( ' ' ( ' ' ) * simple ) * | ''
// hyphen ::= partial ( ' ' ) * ' - ' ( ' ' ) * partial
const items = str.split(/\s+-\s+/);
if (items.length === 1) {
str = str.trim();
/** @type {SemVerRangeItem[][]} */
const items = [];
const r = /[-0-9A-Za-z]\s+/g;
var start = 0;
var match;
while ((match = r.exec(str))) {
const end = match.index + 1;
items.push(
/** @type {SemVerRangeItem[]} */
(parseSimple(str.slice(start, end).trim()))
);
start = end;
}
items.push(
/** @type {SemVerRangeItem[]} */
(parseSimple(str.slice(start).trim()))
);
return combine(items, 2);
}
const a = parsePartial(items[0]);
const b = parsePartial(items[1]);
// >=a <=b => and( >=a, or( <b, =b ) ) => >=a, <b, =b, or, and
// eslint-disable-next-line no-sparse-arrays
return [, toFixed(b), negate(b), 1, a, 2];
};
/**
* @param {string} str str
* @returns {SemVerRange}
*/
const parseLogicalOr = str => {
// range-set ::= range ( logical-or range ) *
// logical-or ::= ( ' ' ) * '||' ( ' ' ) *
const items =
/** @type {SemVerRangeItem[][]} */
(str.split(/\s*\|\|\s*/).map(parseRange));
return combine(items, 1);
};
return parseLogicalOr(str);
};
/* eslint-disable eqeqeq */
/**
* @param {SemVerRange} range
* @returns {string}
*/
const rangeToString = range => {
var fixCount = /** @type {number} */ (range[0]);
var str = "";
if (range.length === 1) {
return "*";
} else if (fixCount + 0.5) {
str +=
fixCount == 0
? ">="
: fixCount == -1
? "<"
: fixCount == 1
? "^"
: fixCount == 2
? "~"
: fixCount > 0
? "="
: "!=";
var needDot = 1;
for (var i = 1; i < range.length; i++) {
var item = range[i];
var t = (typeof item)[0];
needDot--;
str +=
t == "u"
? // undefined: prerelease marker, add an "-"
"-"
: // number or string: add the item, set flag to add an "." between two of them
(needDot > 0 ? "." : "") + ((needDot = 2), item);
}
return str;
}
/** @type {string[]} */
var stack = [];
// eslint-disable-next-line no-redeclare
for (var i = 1; i < range.length; i++) {
// eslint-disable-next-line no-redeclare
var item = range[i];
stack.push(
item === 0
? "not(" + pop() + ")"
: item === 1
? "(" + pop() + " || " + pop() + ")"
: item === 2
? stack.pop() + " " + stack.pop()
: rangeToString(/** @type {SemVerRange} */ (item))
);
}
return pop();
function pop() {
return /** @type {string} */ (stack.pop()).replace(/^\((.+)\)$/, "$1");
}
};
module.exports.rangeToString = rangeToString;
/**
* @param {SemVerRange} range version range
* @param {string} version the version
* @returns {boolean} if version satisfy the range
*/
const satisfy = (range, version) => {
if (0 in range) {
// @ts-expect-error
version = parseVersion(version);
var fixCount = /** @type {number} */ (range[0]);
// when negated is set it swill set for < instead of >=
var negated = fixCount < 0;
if (negated) fixCount = -fixCount - 1;
for (var i = 0, j = 1, isEqual = true; ; j++, i++) {
// cspell:word nequal nequ
// when isEqual = true:
// range version: EOA/object undefined number string
// EOA equal block big-ver big-ver
// undefined bigger next big-ver big-ver
// number smaller block cmp big-cmp
// fixed number smaller block cmp-fix differ
// string smaller block differ cmp
// fixed string smaller block small-cmp cmp-fix
// when isEqual = false:
// range version: EOA/object undefined number string
// EOA nequal block next-ver next-ver
// undefined nequal block next-ver next-ver
// number nequal block next next
// fixed number nequal block next next (this never happens)
// string nequal block next next
// fixed string nequal block next next (this never happens)
// EOA end of array
// equal (version is equal range):
// when !negated: return true,
// when negated: return false
// bigger (version is bigger as range):
// when fixed: return false,
// when !negated: return true,
// when negated: return false,
// smaller (version is smaller as range):
// when !negated: return false,
// when negated: return true
// nequal (version is not equal range (> resp <)): return true
// block (version is in different prerelease area): return false
// differ (version is different from fixed range (string vs. number)): return false
// next: continues to the next items
// next-ver: when fixed: return false, continues to the next item only for the version, sets isEqual=false
// big-ver: when fixed || negated: return false, continues to the next item only for the version, sets isEqual=false
// next-nequ: continues to the next items, sets isEqual=false
// cmp (negated === false): version < range => return false, version > range => next-nequ, else => next
// cmp (negated === true): version > range => return false, version < range => next-nequ, else => next
// cmp-fix: version == range => next, else => return false
// big-cmp: when negated => return false, else => next-nequ
// small-cmp: when negated => next-nequ, else => return false
var rangeType =
/** @type {"s" | "n" | "u" | ""} */
(j < range.length ? (typeof range[j])[0] : "");
/** @type {number | string | undefined} */
var versionValue;
/** @type {"n" | "s" | "u" | "o" | undefined} */
var versionType;
// Handles first column in both tables (end of version or object)
if (
i >= version.length ||
((versionValue = version[i]),
(versionType = /** @type {"n" | "s" | "u" | "o"} */ (
(typeof versionValue)[0]
)) == "o")
) {
// Handles nequal
if (!isEqual) return true;
// Handles bigger
if (rangeType == "u") return j > fixCount && !negated;
// Handles equal and smaller: (range === EOA) XOR negated
return (rangeType == "") != negated; // equal + smaller
}
// Handles second column in both tables (version = undefined)
if (versionType == "u") {
if (!isEqual || rangeType != "u") {
return false;
}
}
// switch between first and second table
else if (isEqual) {
// Handle diagonal
if (rangeType == versionType) {
if (j <= fixCount) {
// Handles "cmp-fix" cases
if (versionValue != range[j]) {
return false;
}
} else {
// Handles "cmp" cases
if (
negated
? versionValue > /** @type {(number | string)[]} */ (range)[j]
: versionValue < /** @type {(number | string)[]} */ (range)[j]
) {
return false;
}
if (versionValue != range[j]) isEqual = false;
}
}
// Handle big-ver
else if (rangeType != "s" && rangeType != "n") {
if (negated || j <= fixCount) return false;
isEqual = false;
j--;
}
// Handle differ, big-cmp and small-cmp
else if (j <= fixCount || versionType < rangeType != negated) {
return false;
} else {
isEqual = false;
}
} else {
// Handles all "next-ver" cases in the second table
// eslint-disable-next-line no-lonely-if
if (rangeType != "s" && rangeType != "n") {
isEqual = false;
j--;
}
// next is applied by default
}
}
}
/** @type {(boolean | number)[]} */
var stack = [];
var p = stack.pop.bind(stack);
// eslint-disable-next-line no-redeclare
for (var i = 1; i < range.length; i++) {
var item = /** @type {SemVerRangeItem[] | 0 | 1 | 2} */ (range[i]);
stack.push(
item == 1
? /** @type {() => number} */ (p)() | /** @type {() => number} */ (p)()
: item == 2
? /** @type {() => number} */ (p)() &
/** @type {() => number} */ (p)()
: item
? satisfy(item, version)
: !p()
);
}
return !!p();
};
/* eslint-enable eqeqeq */
module.exports.satisfy = satisfy;
/**
* @param {SemVerRange | string | number | false | undefined} json
* @returns {string}
*/
module.exports.stringifyHoley = json => {
switch (typeof json) {
case "undefined":
return "";
case "object":
if (Array.isArray(json)) {
let str = "[";
for (let i = 0; i < json.length; i++) {
if (i !== 0) str += ",";
str += this.stringifyHoley(json[i]);
}
str += "]";
return str;
}
return JSON.stringify(json);
default:
return JSON.stringify(json);
}
};
//#region runtime code: parseVersion
/**
* @param {RuntimeTemplate} runtimeTemplate
* @returns {string}
*/
exports.parseVersionRuntimeCode = runtimeTemplate =>
`var parseVersion = ${runtimeTemplate.basicFunction("str", [
"// see webpack/lib/util/semver.js for original code",
`var p=${runtimeTemplate.supportsArrowFunction() ? "p=>" : "function(p)"}{return p.split(".").map((${runtimeTemplate.supportsArrowFunction() ? "p=>" : "function(p)"}{return+p==p?+p:p}))},n=/^([^-+]+)?(?:-([^+]+))?(?:\\+(.+))?$/.exec(str),r=n[1]?p(n[1]):[];return n[2]&&(r.length++,r.push.apply(r,p(n[2]))),n[3]&&(r.push([]),r.push.apply(r,p(n[3]))),r;`
])}`;
//#endregion
//#region runtime code: versionLt
/**
* @param {RuntimeTemplate} runtimeTemplate
* @returns {string}
*/
exports.versionLtRuntimeCode = runtimeTemplate =>
`var versionLt = ${runtimeTemplate.basicFunction("a, b", [
"// see webpack/lib/util/semver.js for original code",
'a=parseVersion(a),b=parseVersion(b);for(var r=0;;){if(r>=a.length)return r<b.length&&"u"!=(typeof b[r])[0];var e=a[r],n=(typeof e)[0];if(r>=b.length)return"u"==n;var t=b[r],f=(typeof t)[0];if(n!=f)return"o"==n&&"n"==f||("s"==f||"u"==n);if("o"!=n&&"u"!=n&&e!=t)return e<t;r++}'
])}`;
//#endregion
//#region runtime code: rangeToString
/**
* @param {RuntimeTemplate} runtimeTemplate
* @returns {string}
*/
exports.rangeToStringRuntimeCode = runtimeTemplate =>
`var rangeToString = ${runtimeTemplate.basicFunction("range", [
"// see webpack/lib/util/semver.js for original code",
'var r=range[0],n="";if(1===range.length)return"*";if(r+.5){n+=0==r?">=":-1==r?"<":1==r?"^":2==r?"~":r>0?"=":"!=";for(var e=1,a=1;a<range.length;a++){e--,n+="u"==(typeof(t=range[a]))[0]?"-":(e>0?".":"")+(e=2,t)}return n}var g=[];for(a=1;a<range.length;a++){var t=range[a];g.push(0===t?"not("+o()+")":1===t?"("+o()+" || "+o()+")":2===t?g.pop()+" "+g.pop():rangeToString(t))}return o();function o(){return g.pop().replace(/^\\((.+)\\)$/,"$1")}'
])}`;
//#endregion
//#region runtime code: satisfy
/**
* @param {RuntimeTemplate} runtimeTemplate
* @returns {string}
*/
exports.satisfyRuntimeCode = runtimeTemplate =>
`var satisfy = ${runtimeTemplate.basicFunction("range, version", [
"// see webpack/lib/util/semver.js for original code",
'if(0 in range){version=parseVersion(version);var e=range[0],r=e<0;r&&(e=-e-1);for(var n=0,i=1,a=!0;;i++,n++){var f,s,g=i<range.length?(typeof range[i])[0]:"";if(n>=version.length||"o"==(s=(typeof(f=version[n]))[0]))return!a||("u"==g?i>e&&!r:""==g!=r);if("u"==s){if(!a||"u"!=g)return!1}else if(a)if(g==s)if(i<=e){if(f!=range[i])return!1}else{if(r?f>range[i]:f<range[i])return!1;f!=range[i]&&(a=!1)}else if("s"!=g&&"n"!=g){if(r||i<=e)return!1;a=!1,i--}else{if(i<=e||s<g!=r)return!1;a=!1}else"s"!=g&&"n"!=g&&(a=!1,i--)}}var t=[],o=t.pop.bind(t);for(n=1;n<range.length;n++){var u=range[n];t.push(1==u?o()|o():2==u?o()&o():u?satisfy(u,version):!o())}return!!o();'
])}`;
//#endregion

150
app_vue/node_modules/webpack/lib/util/serialization.js generated vendored Normal file
View File

@ -0,0 +1,150 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
*/
"use strict";
const { DEFAULTS } = require("../config/defaults");
const memoize = require("./memoize");
/** @typedef {import("../serialization/BinaryMiddleware").MEASURE_END_OPERATION_TYPE} MEASURE_END_OPERATION */
/** @typedef {import("../serialization/BinaryMiddleware").MEASURE_START_OPERATION_TYPE} MEASURE_START_OPERATION */
/** @typedef {typeof import("../util/Hash")} Hash */
/** @typedef {import("../util/fs").IntermediateFileSystem} IntermediateFileSystem */
/**
* @template D, S, C
* @typedef {import("../serialization/Serializer")<D, S, C>} Serializer
*/
const getBinaryMiddleware = memoize(() =>
require("../serialization/BinaryMiddleware")
);
const getObjectMiddleware = memoize(() =>
require("../serialization/ObjectMiddleware")
);
const getSingleItemMiddleware = memoize(() =>
require("../serialization/SingleItemMiddleware")
);
const getSerializer = memoize(() => require("../serialization/Serializer"));
const getSerializerMiddleware = memoize(() =>
require("../serialization/SerializerMiddleware")
);
const getBinaryMiddlewareInstance = memoize(
() => new (getBinaryMiddleware())()
);
const registerSerializers = memoize(() => {
require("./registerExternalSerializer");
// Load internal paths with a relative require
// This allows bundling all internal serializers
const internalSerializables = require("./internalSerializables");
getObjectMiddleware().registerLoader(/^webpack\/lib\//, req => {
const loader =
internalSerializables[
/** @type {keyof import("./internalSerializables")} */
(req.slice("webpack/lib/".length))
];
if (loader) {
loader();
} else {
// eslint-disable-next-line no-console
console.warn(`${req} not found in internalSerializables`);
}
return true;
});
});
/**
* @type {Serializer<EXPECTED_ANY, EXPECTED_ANY, EXPECTED_ANY>}
*/
let buffersSerializer;
// Expose serialization API
module.exports = {
get register() {
return getObjectMiddleware().register;
},
get registerLoader() {
return getObjectMiddleware().registerLoader;
},
get registerNotSerializable() {
return getObjectMiddleware().registerNotSerializable;
},
get NOT_SERIALIZABLE() {
return getObjectMiddleware().NOT_SERIALIZABLE;
},
/** @type {MEASURE_START_OPERATION} */
get MEASURE_START_OPERATION() {
return getBinaryMiddleware().MEASURE_START_OPERATION;
},
/** @type {MEASURE_END_OPERATION} */
get MEASURE_END_OPERATION() {
return getBinaryMiddleware().MEASURE_END_OPERATION;
},
get buffersSerializer() {
if (buffersSerializer !== undefined) return buffersSerializer;
registerSerializers();
const Serializer = getSerializer();
const binaryMiddleware = getBinaryMiddlewareInstance();
const SerializerMiddleware = getSerializerMiddleware();
const SingleItemMiddleware = getSingleItemMiddleware();
return /** @type {Serializer<EXPECTED_ANY, EXPECTED_ANY, EXPECTED_ANY>} */ (
buffersSerializer = new Serializer([
new SingleItemMiddleware(),
new (getObjectMiddleware())(context => {
if ("write" in context) {
context.writeLazy = value => {
context.write(
SerializerMiddleware.createLazy(value, binaryMiddleware)
);
};
}
}, DEFAULTS.HASH_FUNCTION),
binaryMiddleware
])
);
},
/**
* @template D, S, C
* @param {IntermediateFileSystem} fs filesystem
* @param {string | Hash} hashFunction hash function to use
* @returns {Serializer<D, S, C>} file serializer
*/
createFileSerializer: (fs, hashFunction) => {
registerSerializers();
const Serializer = getSerializer();
const FileMiddleware = require("../serialization/FileMiddleware");
const fileMiddleware = new FileMiddleware(fs, hashFunction);
const binaryMiddleware = getBinaryMiddlewareInstance();
const SerializerMiddleware = getSerializerMiddleware();
const SingleItemMiddleware = getSingleItemMiddleware();
return /** @type {Serializer<D, S, C>} */ (
new Serializer([
new SingleItemMiddleware(),
new (getObjectMiddleware())(context => {
if ("write" in context) {
context.writeLazy = value => {
context.write(
SerializerMiddleware.createLazy(value, binaryMiddleware)
);
};
context.writeSeparate = (value, options) => {
const lazy = SerializerMiddleware.createLazy(
value,
fileMiddleware,
options
);
context.write(lazy);
return lazy;
};
}
}, hashFunction),
binaryMiddleware,
fileMiddleware
])
);
}
};

206
app_vue/node_modules/webpack/lib/util/smartGrouping.js generated vendored Normal file
View File

@ -0,0 +1,206 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/**
* @typedef {object} GroupOptions
* @property {boolean=} groupChildren
* @property {boolean=} force
* @property {number=} targetGroupCount
*/
/**
* @template T
* @template R
* @typedef {object} GroupConfig
* @property {(item: T) => string[] | undefined} getKeys
* @property {(key: string, children: (R | T)[], items: T[]) => R} createGroup
* @property {(name: string, items: T[]) => GroupOptions=} getOptions
*/
/**
* @template T
* @template R
* @typedef {object} ItemWithGroups
* @property {T} item
* @property {Set<Group<T, R>>} groups
*/
/**
* @template T
* @template R
* @typedef {{ config: GroupConfig<T, R>, name: string, alreadyGrouped: boolean, items: Set<ItemWithGroups<T, R>> | undefined }} Group
*/
/**
* @template T
* @template R
* @param {T[]} items the list of items
* @param {GroupConfig<T, R>[]} groupConfigs configuration
* @returns {(R | T)[]} grouped items
*/
const smartGrouping = (items, groupConfigs) => {
/** @type {Set<ItemWithGroups<T, R>>} */
const itemsWithGroups = new Set();
/** @type {Map<string, Group<T, R>>} */
const allGroups = new Map();
for (const item of items) {
/** @type {Set<Group<T, R>>} */
const groups = new Set();
for (let i = 0; i < groupConfigs.length; i++) {
const groupConfig = groupConfigs[i];
const keys = groupConfig.getKeys(item);
if (keys) {
for (const name of keys) {
const key = `${i}:${name}`;
let group = allGroups.get(key);
if (group === undefined) {
allGroups.set(
key,
(group = {
config: groupConfig,
name,
alreadyGrouped: false,
items: undefined
})
);
}
groups.add(group);
}
}
}
itemsWithGroups.add({
item,
groups
});
}
/**
* @param {Set<ItemWithGroups<T, R>>} itemsWithGroups input items with groups
* @returns {(T | R)[]} groups items
*/
const runGrouping = itemsWithGroups => {
const totalSize = itemsWithGroups.size;
for (const entry of itemsWithGroups) {
for (const group of entry.groups) {
if (group.alreadyGrouped) continue;
const items = group.items;
if (items === undefined) {
group.items = new Set([entry]);
} else {
items.add(entry);
}
}
}
/** @type {Map<Group<T, R>, { items: Set<ItemWithGroups<T, R>>, options: GroupOptions | false | undefined, used: boolean }>} */
const groupMap = new Map();
for (const group of allGroups.values()) {
if (group.items) {
const items = group.items;
group.items = undefined;
groupMap.set(group, {
items,
options: undefined,
used: false
});
}
}
/** @type {(T | R)[]} */
const results = [];
for (;;) {
/** @type {Group<T, R> | undefined} */
let bestGroup;
let bestGroupSize = -1;
let bestGroupItems;
let bestGroupOptions;
for (const [group, state] of groupMap) {
const { items, used } = state;
let options = state.options;
if (options === undefined) {
const groupConfig = group.config;
state.options = options =
(groupConfig.getOptions &&
groupConfig.getOptions(
group.name,
Array.from(items, ({ item }) => item)
)) ||
false;
}
const force = options && options.force;
if (!force) {
if (bestGroupOptions && bestGroupOptions.force) continue;
if (used) continue;
if (items.size <= 1 || totalSize - items.size <= 1) {
continue;
}
}
const targetGroupCount = (options && options.targetGroupCount) || 4;
const sizeValue = force
? items.size
: Math.min(
items.size,
(totalSize * 2) / targetGroupCount +
itemsWithGroups.size -
items.size
);
if (
sizeValue > bestGroupSize ||
(force && (!bestGroupOptions || !bestGroupOptions.force))
) {
bestGroup = group;
bestGroupSize = sizeValue;
bestGroupItems = items;
bestGroupOptions = options;
}
}
if (bestGroup === undefined) {
break;
}
const items = new Set(bestGroupItems);
const options = bestGroupOptions;
const groupChildren = !options || options.groupChildren !== false;
for (const item of items) {
itemsWithGroups.delete(item);
// Remove all groups that items have from the map to not select them again
for (const group of item.groups) {
const state = groupMap.get(group);
if (state !== undefined) {
state.items.delete(item);
if (state.items.size === 0) {
groupMap.delete(group);
} else {
state.options = undefined;
if (groupChildren) {
state.used = true;
}
}
}
}
}
groupMap.delete(bestGroup);
const key = bestGroup.name;
const groupConfig = bestGroup.config;
const allItems = Array.from(items, ({ item }) => item);
bestGroup.alreadyGrouped = true;
const children = groupChildren ? runGrouping(items) : allItems;
bestGroup.alreadyGrouped = false;
results.push(groupConfig.createGroup(key, children, allItems));
}
for (const { item } of itemsWithGroups) {
results.push(item);
}
return results;
};
return runGrouping(itemsWithGroups);
};
module.exports = smartGrouping;

61
app_vue/node_modules/webpack/lib/util/source.js generated vendored Normal file
View File

@ -0,0 +1,61 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/** @typedef {import("webpack-sources").Source} Source */
/** @type {WeakMap<Source, WeakMap<Source, boolean>>} */
const equalityCache = new WeakMap();
/**
* @param {Source} a a source
* @param {Source} b another source
* @returns {boolean} true, when both sources are equal
*/
const _isSourceEqual = (a, b) => {
// prefer .buffer(), it's called anyway during emit
/** @type {Buffer|string} */
let aSource = typeof a.buffer === "function" ? a.buffer() : a.source();
/** @type {Buffer|string} */
let bSource = typeof b.buffer === "function" ? b.buffer() : b.source();
if (aSource === bSource) return true;
if (typeof aSource === "string" && typeof bSource === "string") return false;
if (!Buffer.isBuffer(aSource)) aSource = Buffer.from(aSource, "utf-8");
if (!Buffer.isBuffer(bSource)) bSource = Buffer.from(bSource, "utf-8");
return aSource.equals(bSource);
};
/**
* @param {Source} a a source
* @param {Source} b another source
* @returns {boolean} true, when both sources are equal
*/
const isSourceEqual = (a, b) => {
if (a === b) return true;
const cache1 = equalityCache.get(a);
if (cache1 !== undefined) {
const result = cache1.get(b);
if (result !== undefined) return result;
}
const result = _isSourceEqual(a, b);
if (cache1 !== undefined) {
cache1.set(b, result);
} else {
const map = new WeakMap();
map.set(b, result);
equalityCache.set(a, map);
}
const cache2 = equalityCache.get(b);
if (cache2 !== undefined) {
cache2.set(a, result);
} else {
const map = new WeakMap();
map.set(a, result);
equalityCache.set(b, map);
}
return result;
};
module.exports.isSourceEqual = isSourceEqual;