This commit is contained in:
2025-09-19 14:25:20 +08:00
parent 269893a435
commit fbf3f77229
24949 changed files with 2839404 additions and 0 deletions

View File

@@ -0,0 +1,32 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/** @typedef {import("../Compiler")} Compiler */
const PLUGIN_NAME = "AddBuildDependenciesPlugin";
class AddBuildDependenciesPlugin {
/**
* @param {Iterable<string>} buildDependencies list of build dependencies
*/
constructor(buildDependencies) {
this.buildDependencies = new Set(buildDependencies);
}
/**
* Apply the plugin
* @param {Compiler} compiler the compiler instance
* @returns {void}
*/
apply(compiler) {
compiler.hooks.compilation.tap(PLUGIN_NAME, (compilation) => {
compilation.buildDependencies.addAll(this.buildDependencies);
});
}
}
module.exports = AddBuildDependenciesPlugin;

View File

@@ -0,0 +1,40 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/** @typedef {import("../Compiler")} Compiler */
class AddManagedPathsPlugin {
/**
* @param {Iterable<string | RegExp>} managedPaths list of managed paths
* @param {Iterable<string | RegExp>} immutablePaths list of immutable paths
* @param {Iterable<string | RegExp>} unmanagedPaths list of unmanaged paths
*/
constructor(managedPaths, immutablePaths, unmanagedPaths) {
this.managedPaths = new Set(managedPaths);
this.immutablePaths = new Set(immutablePaths);
this.unmanagedPaths = new Set(unmanagedPaths);
}
/**
* Apply the plugin
* @param {Compiler} compiler the compiler instance
* @returns {void}
*/
apply(compiler) {
for (const managedPath of this.managedPaths) {
compiler.managedPaths.add(managedPath);
}
for (const immutablePath of this.immutablePaths) {
compiler.immutablePaths.add(immutablePath);
}
for (const unmanagedPath of this.unmanagedPaths) {
compiler.unmanagedPaths.add(unmanagedPath);
}
}
}
module.exports = AddManagedPathsPlugin;

242
node_modules/webpack/lib/cache/IdleFileCachePlugin.js generated vendored Normal file
View File

@@ -0,0 +1,242 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const Cache = require("../Cache");
const ProgressPlugin = require("../ProgressPlugin");
/** @typedef {import("../Compiler")} Compiler */
/** @typedef {import("./PackFileCacheStrategy")} PackFileCacheStrategy */
const BUILD_DEPENDENCIES_KEY = Symbol("build dependencies key");
const PLUGIN_NAME = "IdleFileCachePlugin";
class IdleFileCachePlugin {
/**
* @param {PackFileCacheStrategy} strategy cache strategy
* @param {number} idleTimeout timeout
* @param {number} idleTimeoutForInitialStore initial timeout
* @param {number} idleTimeoutAfterLargeChanges timeout after changes
*/
constructor(
strategy,
idleTimeout,
idleTimeoutForInitialStore,
idleTimeoutAfterLargeChanges
) {
this.strategy = strategy;
this.idleTimeout = idleTimeout;
this.idleTimeoutForInitialStore = idleTimeoutForInitialStore;
this.idleTimeoutAfterLargeChanges = idleTimeoutAfterLargeChanges;
}
/**
* Apply the plugin
* @param {Compiler} compiler the compiler instance
* @returns {void}
*/
apply(compiler) {
const strategy = this.strategy;
const idleTimeout = this.idleTimeout;
const idleTimeoutForInitialStore = Math.min(
idleTimeout,
this.idleTimeoutForInitialStore
);
const idleTimeoutAfterLargeChanges = this.idleTimeoutAfterLargeChanges;
const resolvedPromise = Promise.resolve();
let timeSpendInBuild = 0;
let timeSpendInStore = 0;
let avgTimeSpendInStore = 0;
/** @type {Map<string | typeof BUILD_DEPENDENCIES_KEY, () => Promise<void | void[]>>} */
const pendingIdleTasks = new Map();
compiler.cache.hooks.store.tap(
{ name: PLUGIN_NAME, stage: Cache.STAGE_DISK },
(identifier, etag, data) => {
pendingIdleTasks.set(identifier, () =>
strategy.store(identifier, etag, data)
);
}
);
compiler.cache.hooks.get.tapPromise(
{ name: PLUGIN_NAME, stage: Cache.STAGE_DISK },
(identifier, etag, gotHandlers) => {
const restore = () =>
strategy.restore(identifier, etag).then((cacheEntry) => {
if (cacheEntry === undefined) {
gotHandlers.push((result, callback) => {
if (result !== undefined) {
pendingIdleTasks.set(identifier, () =>
strategy.store(identifier, etag, result)
);
}
callback();
});
} else {
return cacheEntry;
}
});
const pendingTask = pendingIdleTasks.get(identifier);
if (pendingTask !== undefined) {
pendingIdleTasks.delete(identifier);
return pendingTask().then(restore);
}
return restore();
}
);
compiler.cache.hooks.storeBuildDependencies.tap(
{ name: PLUGIN_NAME, stage: Cache.STAGE_DISK },
(dependencies) => {
pendingIdleTasks.set(BUILD_DEPENDENCIES_KEY, () =>
Promise.resolve().then(() =>
strategy.storeBuildDependencies(dependencies)
)
);
}
);
compiler.cache.hooks.shutdown.tapPromise(
{ name: PLUGIN_NAME, stage: Cache.STAGE_DISK },
() => {
if (idleTimer) {
clearTimeout(idleTimer);
idleTimer = undefined;
}
isIdle = false;
const reportProgress = ProgressPlugin.getReporter(compiler);
const jobs = [...pendingIdleTasks.values()];
if (reportProgress) reportProgress(0, "process pending cache items");
const promises = jobs.map((fn) => fn());
pendingIdleTasks.clear();
promises.push(currentIdlePromise);
const promise = Promise.all(promises);
currentIdlePromise = promise.then(() => strategy.afterAllStored());
if (reportProgress) {
currentIdlePromise = currentIdlePromise.then(() => {
reportProgress(1, "stored");
});
}
return currentIdlePromise.then(() => {
// Reset strategy
if (strategy.clear) strategy.clear();
});
}
);
/** @type {Promise<void | void[]>} */
let currentIdlePromise = resolvedPromise;
let isIdle = false;
let isInitialStore = true;
const processIdleTasks = () => {
if (isIdle) {
const startTime = Date.now();
if (pendingIdleTasks.size > 0) {
const promises = [currentIdlePromise];
const maxTime = startTime + 100;
let maxCount = 100;
for (const [filename, factory] of pendingIdleTasks) {
pendingIdleTasks.delete(filename);
promises.push(factory());
if (maxCount-- <= 0 || Date.now() > maxTime) break;
}
currentIdlePromise = Promise.all(
/** @type {Promise<void>[]} */
(promises)
);
currentIdlePromise.then(() => {
timeSpendInStore += Date.now() - startTime;
// Allow to exit the process between
idleTimer = setTimeout(processIdleTasks, 0);
idleTimer.unref();
});
return;
}
currentIdlePromise = currentIdlePromise
.then(async () => {
await strategy.afterAllStored();
timeSpendInStore += Date.now() - startTime;
avgTimeSpendInStore =
Math.max(avgTimeSpendInStore, timeSpendInStore) * 0.9 +
timeSpendInStore * 0.1;
timeSpendInStore = 0;
timeSpendInBuild = 0;
})
.catch((err) => {
const logger = compiler.getInfrastructureLogger(PLUGIN_NAME);
logger.warn(`Background tasks during idle failed: ${err.message}`);
logger.debug(err.stack);
});
isInitialStore = false;
}
};
/** @type {ReturnType<typeof setTimeout> | undefined} */
let idleTimer;
compiler.cache.hooks.beginIdle.tap(
{ name: PLUGIN_NAME, stage: Cache.STAGE_DISK },
() => {
const isLargeChange = timeSpendInBuild > avgTimeSpendInStore * 2;
if (isInitialStore && idleTimeoutForInitialStore < idleTimeout) {
compiler
.getInfrastructureLogger(PLUGIN_NAME)
.log(
`Initial cache was generated and cache will be persisted in ${
idleTimeoutForInitialStore / 1000
}s.`
);
} else if (
isLargeChange &&
idleTimeoutAfterLargeChanges < idleTimeout
) {
compiler
.getInfrastructureLogger(PLUGIN_NAME)
.log(
`Spend ${Math.round(timeSpendInBuild) / 1000}s in build and ${
Math.round(avgTimeSpendInStore) / 1000
}s in average in cache store. This is considered as large change and cache will be persisted in ${
idleTimeoutAfterLargeChanges / 1000
}s.`
);
}
idleTimer = setTimeout(
() => {
idleTimer = undefined;
isIdle = true;
resolvedPromise.then(processIdleTasks);
},
Math.min(
isInitialStore ? idleTimeoutForInitialStore : Infinity,
isLargeChange ? idleTimeoutAfterLargeChanges : Infinity,
idleTimeout
)
);
idleTimer.unref();
}
);
compiler.cache.hooks.endIdle.tap(
{ name: PLUGIN_NAME, stage: Cache.STAGE_DISK },
() => {
if (idleTimer) {
clearTimeout(idleTimer);
idleTimer = undefined;
}
isIdle = false;
}
);
compiler.hooks.done.tap(PLUGIN_NAME, (stats) => {
// 10% build overhead is ignored, as it's not cacheable
timeSpendInBuild *= 0.9;
timeSpendInBuild +=
/** @type {number} */ (stats.endTime) -
/** @type {number} */ (stats.startTime);
});
}
}
module.exports = IdleFileCachePlugin;

59
node_modules/webpack/lib/cache/MemoryCachePlugin.js generated vendored Normal file
View File

@@ -0,0 +1,59 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const Cache = require("../Cache");
/** @typedef {import("webpack-sources").Source} Source */
/** @typedef {import("../Cache").Data} Data */
/** @typedef {import("../Cache").Etag} Etag */
/** @typedef {import("../Compiler")} Compiler */
/** @typedef {import("../Module")} Module */
class MemoryCachePlugin {
/**
* Apply the plugin
* @param {Compiler} compiler the compiler instance
* @returns {void}
*/
apply(compiler) {
/** @type {Map<string, { etag: Etag | null, data: Data } | null>} */
const cache = new Map();
compiler.cache.hooks.store.tap(
{ name: "MemoryCachePlugin", stage: Cache.STAGE_MEMORY },
(identifier, etag, data) => {
cache.set(identifier, { etag, data });
}
);
compiler.cache.hooks.get.tap(
{ name: "MemoryCachePlugin", stage: Cache.STAGE_MEMORY },
(identifier, etag, gotHandlers) => {
const cacheEntry = cache.get(identifier);
if (cacheEntry === null) {
return null;
} else if (cacheEntry !== undefined) {
return cacheEntry.etag === etag ? cacheEntry.data : null;
}
gotHandlers.push((result, callback) => {
if (result === undefined) {
cache.set(identifier, null);
} else {
cache.set(identifier, { etag, data: result });
}
return callback();
});
}
);
compiler.cache.hooks.shutdown.tap(
{ name: "MemoryCachePlugin", stage: Cache.STAGE_MEMORY },
() => {
cache.clear();
}
);
}
}
module.exports = MemoryCachePlugin;

View File

@@ -0,0 +1,143 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const Cache = require("../Cache");
/** @typedef {import("webpack-sources").Source} Source */
/** @typedef {import("../Cache").Data} Data */
/** @typedef {import("../Cache").Etag} Etag */
/** @typedef {import("../Compiler")} Compiler */
/** @typedef {import("../Module")} Module */
/**
* @typedef {object} MemoryWithGcCachePluginOptions
* @property {number} maxGenerations max generations
*/
const PLUGIN_NAME = "MemoryWithGcCachePlugin";
class MemoryWithGcCachePlugin {
/**
* @param {MemoryWithGcCachePluginOptions} options options
*/
constructor({ maxGenerations }) {
this._maxGenerations = maxGenerations;
}
/**
* Apply the plugin
* @param {Compiler} compiler the compiler instance
* @returns {void}
*/
apply(compiler) {
const maxGenerations = this._maxGenerations;
/** @type {Map<string, { etag: Etag | null, data: Data } | undefined | null>} */
const cache = new Map();
/** @type {Map<string, { entry: { etag: Etag | null, data: Data } | null, until: number }>} */
const oldCache = new Map();
let generation = 0;
let cachePosition = 0;
const logger = compiler.getInfrastructureLogger(PLUGIN_NAME);
compiler.hooks.afterDone.tap(PLUGIN_NAME, () => {
generation++;
let clearedEntries = 0;
let lastClearedIdentifier;
// Avoid coverage problems due indirect changes
/* istanbul ignore next */
for (const [identifier, entry] of oldCache) {
if (entry.until > generation) break;
oldCache.delete(identifier);
if (cache.get(identifier) === undefined) {
cache.delete(identifier);
clearedEntries++;
lastClearedIdentifier = identifier;
}
}
if (clearedEntries > 0 || oldCache.size > 0) {
logger.log(
`${cache.size - oldCache.size} active entries, ${
oldCache.size
} recently unused cached entries${
clearedEntries > 0
? `, ${clearedEntries} old unused cache entries removed e. g. ${lastClearedIdentifier}`
: ""
}`
);
}
let i = (cache.size / maxGenerations) | 0;
let j = cachePosition >= cache.size ? 0 : cachePosition;
cachePosition = j + i;
for (const [identifier, entry] of cache) {
if (j !== 0) {
j--;
continue;
}
if (entry !== undefined) {
// We don't delete the cache entry, but set it to undefined instead
// This reserves the location in the data table and avoids rehashing
// when constantly adding and removing entries.
// It will be deleted when removed from oldCache.
cache.set(identifier, undefined);
oldCache.delete(identifier);
oldCache.set(identifier, {
entry,
until: generation + maxGenerations
});
if (i-- === 0) break;
}
}
});
compiler.cache.hooks.store.tap(
{ name: PLUGIN_NAME, stage: Cache.STAGE_MEMORY },
(identifier, etag, data) => {
cache.set(identifier, { etag, data });
}
);
compiler.cache.hooks.get.tap(
{ name: PLUGIN_NAME, stage: Cache.STAGE_MEMORY },
(identifier, etag, gotHandlers) => {
const cacheEntry = cache.get(identifier);
if (cacheEntry === null) {
return null;
} else if (cacheEntry !== undefined) {
return cacheEntry.etag === etag ? cacheEntry.data : null;
}
const oldCacheEntry = oldCache.get(identifier);
if (oldCacheEntry !== undefined) {
const cacheEntry = oldCacheEntry.entry;
if (cacheEntry === null) {
oldCache.delete(identifier);
cache.set(identifier, cacheEntry);
return null;
}
if (cacheEntry.etag !== etag) return null;
oldCache.delete(identifier);
cache.set(identifier, cacheEntry);
return cacheEntry.data;
}
gotHandlers.push((result, callback) => {
if (result === undefined) {
cache.set(identifier, null);
} else {
cache.set(identifier, { etag, data: result });
}
return callback();
});
}
);
compiler.cache.hooks.shutdown.tap(
{ name: PLUGIN_NAME, stage: Cache.STAGE_MEMORY },
() => {
cache.clear();
oldCache.clear();
}
);
}
}
module.exports = MemoryWithGcCachePlugin;

1553
node_modules/webpack/lib/cache/PackFileCacheStrategy.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

456
node_modules/webpack/lib/cache/ResolverCachePlugin.js generated vendored Normal file
View File

@@ -0,0 +1,456 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const LazySet = require("../util/LazySet");
const makeSerializable = require("../util/makeSerializable");
/** @typedef {import("enhanced-resolve").ResolveContext} ResolveContext */
/** @typedef {import("enhanced-resolve").ResolveOptions} ResolveOptions */
/** @typedef {import("enhanced-resolve").ResolveRequest} ResolveRequest */
/** @typedef {import("enhanced-resolve").Resolver} Resolver */
/** @typedef {import("../CacheFacade").ItemCacheFacade} ItemCacheFacade */
/** @typedef {import("../Compiler")} Compiler */
/** @typedef {import("../FileSystemInfo")} FileSystemInfo */
/** @typedef {import("../FileSystemInfo").Snapshot} Snapshot */
/** @typedef {import("../FileSystemInfo").SnapshotOptions} SnapshotOptions */
/** @typedef {import("../ResolverFactory").ResolveOptionsWithDependencyType} ResolveOptionsWithDependencyType */
/** @typedef {import("../serialization/ObjectMiddleware").ObjectDeserializerContext} ObjectDeserializerContext */
/** @typedef {import("../serialization/ObjectMiddleware").ObjectSerializerContext} ObjectSerializerContext */
/**
* @template T
* @typedef {import("tapable").SyncHook<T>} SyncHook
*/
/**
* @template H
* @typedef {import("tapable").HookMapInterceptor<H>} HookMapInterceptor
*/
class CacheEntry {
/**
* @param {ResolveRequest} result result
* @param {Snapshot} snapshot snapshot
*/
constructor(result, snapshot) {
this.result = result;
this.snapshot = snapshot;
}
/**
* @param {ObjectSerializerContext} context context
*/
serialize({ write }) {
write(this.result);
write(this.snapshot);
}
/**
* @param {ObjectDeserializerContext} context context
*/
deserialize({ read }) {
this.result = read();
this.snapshot = read();
}
}
makeSerializable(CacheEntry, "webpack/lib/cache/ResolverCachePlugin");
/**
* @template T
* @param {Set<T> | LazySet<T>} set set to add items to
* @param {Set<T> | LazySet<T> | Iterable<T>} otherSet set to add items from
* @returns {void}
*/
const addAllToSet = (set, otherSet) => {
if (set instanceof LazySet) {
set.addAll(otherSet);
} else {
for (const item of otherSet) {
set.add(item);
}
}
};
/**
* @template {object} T
* @param {T} object an object
* @param {boolean} excludeContext if true, context is not included in string
* @returns {string} stringified version
*/
const objectToString = (object, excludeContext) => {
let str = "";
for (const key in object) {
if (excludeContext && key === "context") continue;
const value = object[key];
str +=
typeof value === "object" && value !== null
? `|${key}=[${objectToString(value, false)}|]`
: `|${key}=|${value}`;
}
return str;
};
/** @typedef {NonNullable<ResolveContext["yield"]>} Yield */
const PLUGIN_NAME = "ResolverCachePlugin";
class ResolverCachePlugin {
/**
* Apply the plugin
* @param {Compiler} compiler the compiler instance
* @returns {void}
*/
apply(compiler) {
const cache = compiler.getCache(PLUGIN_NAME);
/** @type {FileSystemInfo} */
let fileSystemInfo;
/** @type {SnapshotOptions | undefined} */
let snapshotOptions;
let realResolves = 0;
let cachedResolves = 0;
let cacheInvalidResolves = 0;
let concurrentResolves = 0;
compiler.hooks.thisCompilation.tap(PLUGIN_NAME, (compilation) => {
snapshotOptions = compilation.options.snapshot.resolve;
fileSystemInfo = compilation.fileSystemInfo;
compilation.hooks.finishModules.tap(PLUGIN_NAME, () => {
if (realResolves + cachedResolves > 0) {
const logger = compilation.getLogger(`webpack.${PLUGIN_NAME}`);
logger.log(
`${Math.round(
(100 * realResolves) / (realResolves + cachedResolves)
)}% really resolved (${realResolves} real resolves with ${cacheInvalidResolves} cached but invalid, ${cachedResolves} cached valid, ${concurrentResolves} concurrent)`
);
realResolves = 0;
cachedResolves = 0;
cacheInvalidResolves = 0;
concurrentResolves = 0;
}
});
});
/** @typedef {(err?: Error | null, resolveRequest?: ResolveRequest | null) => void} Callback */
/** @typedef {ResolveRequest & { _ResolverCachePluginCacheMiss: true }} ResolveRequestWithCacheMiss */
/**
* @param {ItemCacheFacade} itemCache cache
* @param {Resolver} resolver the resolver
* @param {ResolveContext} resolveContext context for resolving meta info
* @param {ResolveRequest} request the request info object
* @param {Callback} callback callback function
* @returns {void}
*/
const doRealResolve = (
itemCache,
resolver,
resolveContext,
request,
callback
) => {
realResolves++;
const newRequest =
/** @type {ResolveRequestWithCacheMiss} */
({
_ResolverCachePluginCacheMiss: true,
...request
});
/** @type {ResolveContext} */
const newResolveContext = {
...resolveContext,
stack: new Set(),
/** @type {LazySet<string>} */
missingDependencies: new LazySet(),
/** @type {LazySet<string>} */
fileDependencies: new LazySet(),
/** @type {LazySet<string>} */
contextDependencies: new LazySet()
};
/** @type {ResolveRequest[] | undefined} */
let yieldResult;
let withYield = false;
if (typeof newResolveContext.yield === "function") {
yieldResult = [];
withYield = true;
newResolveContext.yield = (obj) =>
/** @type {ResolveRequest[]} */
(yieldResult).push(obj);
}
/**
* @param {"fileDependencies" | "contextDependencies" | "missingDependencies"} key key
*/
const propagate = (key) => {
if (resolveContext[key]) {
addAllToSet(
/** @type {Set<string>} */ (resolveContext[key]),
/** @type {Set<string>} */ (newResolveContext[key])
);
}
};
const resolveTime = Date.now();
resolver.doResolve(
resolver.hooks.resolve,
newRequest,
"Cache miss",
newResolveContext,
(err, result) => {
propagate("fileDependencies");
propagate("contextDependencies");
propagate("missingDependencies");
if (err) return callback(err);
const fileDependencies = newResolveContext.fileDependencies;
const contextDependencies = newResolveContext.contextDependencies;
const missingDependencies = newResolveContext.missingDependencies;
fileSystemInfo.createSnapshot(
resolveTime,
/** @type {Set<string>} */
(fileDependencies),
/** @type {Set<string>} */
(contextDependencies),
/** @type {Set<string>} */
(missingDependencies),
snapshotOptions,
(err, snapshot) => {
if (err) return callback(err);
const resolveResult = withYield ? yieldResult : result;
// since we intercept resolve hook
// we still can get result in callback
if (withYield && result) {
/** @type {ResolveRequest[]} */
(yieldResult).push(result);
}
if (!snapshot) {
if (resolveResult) {
return callback(
null,
/** @type {ResolveRequest} */
(resolveResult)
);
}
return callback();
}
itemCache.store(
new CacheEntry(
/** @type {ResolveRequest} */
(resolveResult),
snapshot
),
(storeErr) => {
if (storeErr) return callback(storeErr);
if (resolveResult) {
return callback(
null,
/** @type {ResolveRequest} */
(resolveResult)
);
}
callback();
}
);
}
);
}
);
};
compiler.resolverFactory.hooks.resolver.intercept({
factory(type, _hook) {
/** @typedef {(err?: Error, resolveRequest?: ResolveRequest) => void} ActiveRequest */
/** @type {Map<string, ActiveRequest[]>} */
const activeRequests = new Map();
/** @type {Map<string, [ActiveRequest[], Yield[]]>} */
const activeRequestsWithYield = new Map();
const hook =
/** @type {SyncHook<[Resolver, ResolveOptions, ResolveOptionsWithDependencyType]>} */
(_hook);
hook.tap(PLUGIN_NAME, (resolver, options, userOptions) => {
if (
/** @type {ResolveOptions & { cache: boolean }} */
(options).cache !== true
) {
return;
}
const optionsIdent = objectToString(userOptions, false);
const cacheWithContext =
options.cacheWithContext !== undefined
? options.cacheWithContext
: false;
resolver.hooks.resolve.tapAsync(
{
name: PLUGIN_NAME,
stage: -100
},
(request, resolveContext, callback) => {
if (
/** @type {ResolveRequestWithCacheMiss} */
(request)._ResolverCachePluginCacheMiss ||
!fileSystemInfo
) {
return callback();
}
const withYield = typeof resolveContext.yield === "function";
const identifier = `${type}${
withYield ? "|yield" : "|default"
}${optionsIdent}${objectToString(request, !cacheWithContext)}`;
if (withYield) {
const activeRequest = activeRequestsWithYield.get(identifier);
if (activeRequest) {
activeRequest[0].push(callback);
activeRequest[1].push(
/** @type {Yield} */
(resolveContext.yield)
);
return;
}
} else {
const activeRequest = activeRequests.get(identifier);
if (activeRequest) {
activeRequest.push(callback);
return;
}
}
const itemCache = cache.getItemCache(identifier, null);
/** @type {Callback[] | false | undefined} */
let callbacks;
/** @type {Yield[] | undefined} */
let yields;
/**
* @type {(err?: Error | null, result?: ResolveRequest | ResolveRequest[] | null) => void}
*/
const done = withYield
? (err, result) => {
if (callbacks === undefined) {
if (err) {
callback(err);
} else {
if (result) {
for (const r of /** @type {ResolveRequest[]} */ (
result
)) {
/** @type {Yield} */
(resolveContext.yield)(r);
}
}
callback(null, null);
}
yields = undefined;
callbacks = false;
} else {
const definedCallbacks =
/** @type {Callback[]} */
(callbacks);
if (err) {
for (const cb of definedCallbacks) cb(err);
} else {
for (let i = 0; i < definedCallbacks.length; i++) {
const cb = definedCallbacks[i];
const yield_ = /** @type {Yield[]} */ (yields)[i];
if (result) {
for (const r of /** @type {ResolveRequest[]} */ (
result
)) {
yield_(r);
}
}
cb(null, null);
}
}
activeRequestsWithYield.delete(identifier);
yields = undefined;
callbacks = false;
}
}
: (err, result) => {
if (callbacks === undefined) {
callback(err, /** @type {ResolveRequest} */ (result));
callbacks = false;
} else {
for (const callback of /** @type {Callback[]} */ (
callbacks
)) {
callback(err, /** @type {ResolveRequest} */ (result));
}
activeRequests.delete(identifier);
callbacks = false;
}
};
/**
* @param {(Error | null)=} err error if any
* @param {(CacheEntry | null)=} cacheEntry cache entry
* @returns {void}
*/
const processCacheResult = (err, cacheEntry) => {
if (err) return done(err);
if (cacheEntry) {
const { snapshot, result } = cacheEntry;
fileSystemInfo.checkSnapshotValid(snapshot, (err, valid) => {
if (err || !valid) {
cacheInvalidResolves++;
return doRealResolve(
itemCache,
resolver,
resolveContext,
request,
done
);
}
cachedResolves++;
if (resolveContext.missingDependencies) {
addAllToSet(
/** @type {Set<string>} */
(resolveContext.missingDependencies),
snapshot.getMissingIterable()
);
}
if (resolveContext.fileDependencies) {
addAllToSet(
/** @type {Set<string>} */
(resolveContext.fileDependencies),
snapshot.getFileIterable()
);
}
if (resolveContext.contextDependencies) {
addAllToSet(
/** @type {Set<string>} */
(resolveContext.contextDependencies),
snapshot.getContextIterable()
);
}
done(null, result);
});
} else {
doRealResolve(
itemCache,
resolver,
resolveContext,
request,
done
);
}
};
itemCache.get(processCacheResult);
if (withYield && callbacks === undefined) {
callbacks = [callback];
yields = [/** @type {Yield} */ (resolveContext.yield)];
activeRequestsWithYield.set(identifier, [callbacks, yields]);
} else if (callbacks === undefined) {
callbacks = [callback];
activeRequests.set(identifier, callbacks);
}
}
);
});
return hook;
}
});
}
}
module.exports = ResolverCachePlugin;

82
node_modules/webpack/lib/cache/getLazyHashedEtag.js generated vendored Normal file
View File

@@ -0,0 +1,82 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const { DEFAULTS } = require("../config/defaults");
const createHash = require("../util/createHash");
/** @typedef {import("../util/Hash")} Hash */
/** @typedef {typeof import("../util/Hash")} HashConstructor */
/**
* @typedef {object} HashableObject
* @property {(hash: Hash) => void} updateHash
*/
class LazyHashedEtag {
/**
* @param {HashableObject} obj object with updateHash method
* @param {string | HashConstructor} hashFunction the hash function to use
*/
constructor(obj, hashFunction = DEFAULTS.HASH_FUNCTION) {
this._obj = obj;
this._hash = undefined;
this._hashFunction = hashFunction;
}
/**
* @returns {string} hash of object
*/
toString() {
if (this._hash === undefined) {
const hash = createHash(this._hashFunction);
this._obj.updateHash(hash);
this._hash = /** @type {string} */ (hash.digest("base64"));
}
return this._hash;
}
}
/** @type {Map<string | HashConstructor, WeakMap<HashableObject, LazyHashedEtag>>} */
const mapStrings = new Map();
/** @type {WeakMap<HashConstructor, WeakMap<HashableObject, LazyHashedEtag>>} */
const mapObjects = new WeakMap();
/**
* @param {HashableObject} obj object with updateHash method
* @param {(string | HashConstructor)=} hashFunction the hash function to use
* @returns {LazyHashedEtag} etag
*/
const getter = (obj, hashFunction = DEFAULTS.HASH_FUNCTION) => {
let innerMap;
if (typeof hashFunction === "string") {
innerMap = mapStrings.get(hashFunction);
if (innerMap === undefined) {
const newHash = new LazyHashedEtag(obj, hashFunction);
innerMap = new WeakMap();
innerMap.set(obj, newHash);
mapStrings.set(hashFunction, innerMap);
return newHash;
}
} else {
innerMap = mapObjects.get(hashFunction);
if (innerMap === undefined) {
const newHash = new LazyHashedEtag(obj, hashFunction);
innerMap = new WeakMap();
innerMap.set(obj, newHash);
mapObjects.set(hashFunction, innerMap);
return newHash;
}
}
const hash = innerMap.get(obj);
if (hash !== undefined) return hash;
const newHash = new LazyHashedEtag(obj, hashFunction);
innerMap.set(obj, newHash);
return newHash;
};
module.exports = getter;

69
node_modules/webpack/lib/cache/mergeEtags.js generated vendored Normal file
View File

@@ -0,0 +1,69 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/** @typedef {import("../Cache").Etag} Etag */
class MergedEtag {
/**
* @param {Etag} a first
* @param {Etag} b second
*/
constructor(a, b) {
this.a = a;
this.b = b;
}
toString() {
return `${this.a.toString()}|${this.b.toString()}`;
}
}
const dualObjectMap = new WeakMap();
const objectStringMap = new WeakMap();
/**
* @param {Etag} a first
* @param {Etag} b second
* @returns {Etag} result
*/
const mergeEtags = (a, b) => {
if (typeof a === "string") {
if (typeof b === "string") {
return `${a}|${b}`;
}
const temp = b;
b = a;
a = temp;
} else if (typeof b !== "string") {
// both a and b are objects
let map = dualObjectMap.get(a);
if (map === undefined) {
dualObjectMap.set(a, (map = new WeakMap()));
}
const mergedEtag = map.get(b);
if (mergedEtag === undefined) {
const newMergedEtag = new MergedEtag(a, b);
map.set(b, newMergedEtag);
return newMergedEtag;
}
return mergedEtag;
}
// a is object, b is string
let map = objectStringMap.get(a);
if (map === undefined) {
objectStringMap.set(a, (map = new Map()));
}
const mergedEtag = map.get(b);
if (mergedEtag === undefined) {
const newMergedEtag = new MergedEtag(a, b);
map.set(b, newMergedEtag);
return newMergedEtag;
}
return mergedEtag;
};
module.exports = mergeEtags;