This commit is contained in:
2025-09-19 14:25:20 +08:00
parent 269893a435
commit fbf3f77229
24949 changed files with 2839404 additions and 0 deletions

19
node_modules/@jridgewell/gen-mapping/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,19 @@
Copyright 2024 Justin Ridgewell <justin@ridgewell.name>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

227
node_modules/@jridgewell/gen-mapping/README.md generated vendored Normal file
View File

@@ -0,0 +1,227 @@
# @jridgewell/gen-mapping
> Generate source maps
`gen-mapping` allows you to generate a source map during transpilation or minification.
With a source map, you're able to trace the original location in the source file, either in Chrome's
DevTools or using a library like [`@jridgewell/trace-mapping`][trace-mapping].
You may already be familiar with the [`source-map`][source-map] package's `SourceMapGenerator`. This
provides the same `addMapping` and `setSourceContent` API.
## Installation
```sh
npm install @jridgewell/gen-mapping
```
## Usage
```typescript
import { GenMapping, addMapping, setSourceContent, toEncodedMap, toDecodedMap } from '@jridgewell/gen-mapping';
const map = new GenMapping({
file: 'output.js',
sourceRoot: 'https://example.com/',
});
setSourceContent(map, 'input.js', `function foo() {}`);
addMapping(map, {
// Lines start at line 1, columns at column 0.
generated: { line: 1, column: 0 },
source: 'input.js',
original: { line: 1, column: 0 },
});
addMapping(map, {
generated: { line: 1, column: 9 },
source: 'input.js',
original: { line: 1, column: 9 },
name: 'foo',
});
assert.deepEqual(toDecodedMap(map), {
version: 3,
file: 'output.js',
names: ['foo'],
sourceRoot: 'https://example.com/',
sources: ['input.js'],
sourcesContent: ['function foo() {}'],
mappings: [
[ [0, 0, 0, 0], [9, 0, 0, 9, 0] ]
],
});
assert.deepEqual(toEncodedMap(map), {
version: 3,
file: 'output.js',
names: ['foo'],
sourceRoot: 'https://example.com/',
sources: ['input.js'],
sourcesContent: ['function foo() {}'],
mappings: 'AAAA,SAASA',
});
```
### Smaller Sourcemaps
Not everything needs to be added to a sourcemap, and needless markings can cause signficantly
larger file sizes. `gen-mapping` exposes `maybeAddSegment`/`maybeAddMapping` APIs that will
intelligently determine if this marking adds useful information. If not, the marking will be
skipped.
```typescript
import { maybeAddMapping } from '@jridgewell/gen-mapping';
const map = new GenMapping();
// Adding a sourceless marking at the beginning of a line isn't useful.
maybeAddMapping(map, {
generated: { line: 1, column: 0 },
});
// Adding a new source marking is useful.
maybeAddMapping(map, {
generated: { line: 1, column: 0 },
source: 'input.js',
original: { line: 1, column: 0 },
});
// But adding another marking pointing to the exact same original location isn't, even if the
// generated column changed.
maybeAddMapping(map, {
generated: { line: 1, column: 9 },
source: 'input.js',
original: { line: 1, column: 0 },
});
assert.deepEqual(toEncodedMap(map), {
version: 3,
names: [],
sources: ['input.js'],
sourcesContent: [null],
mappings: 'AAAA',
});
```
## Benchmarks
```
node v18.0.0
amp.js.map
Memory Usage:
gen-mapping: addSegment 5852872 bytes
gen-mapping: addMapping 7716042 bytes
source-map-js 6143250 bytes
source-map-0.6.1 6124102 bytes
source-map-0.8.0 6121173 bytes
Smallest memory usage is gen-mapping: addSegment
Adding speed:
gen-mapping: addSegment x 441 ops/sec ±2.07% (90 runs sampled)
gen-mapping: addMapping x 350 ops/sec ±2.40% (86 runs sampled)
source-map-js: addMapping x 169 ops/sec ±2.42% (80 runs sampled)
source-map-0.6.1: addMapping x 167 ops/sec ±2.56% (80 runs sampled)
source-map-0.8.0: addMapping x 168 ops/sec ±2.52% (80 runs sampled)
Fastest is gen-mapping: addSegment
Generate speed:
gen-mapping: decoded output x 150,824,370 ops/sec ±0.07% (102 runs sampled)
gen-mapping: encoded output x 663 ops/sec ±0.22% (98 runs sampled)
source-map-js: encoded output x 197 ops/sec ±0.45% (84 runs sampled)
source-map-0.6.1: encoded output x 198 ops/sec ±0.33% (85 runs sampled)
source-map-0.8.0: encoded output x 197 ops/sec ±0.06% (93 runs sampled)
Fastest is gen-mapping: decoded output
***
babel.min.js.map
Memory Usage:
gen-mapping: addSegment 37578063 bytes
gen-mapping: addMapping 37212897 bytes
source-map-js 47638527 bytes
source-map-0.6.1 47690503 bytes
source-map-0.8.0 47470188 bytes
Smallest memory usage is gen-mapping: addMapping
Adding speed:
gen-mapping: addSegment x 31.05 ops/sec ±8.31% (43 runs sampled)
gen-mapping: addMapping x 29.83 ops/sec ±7.36% (51 runs sampled)
source-map-js: addMapping x 20.73 ops/sec ±6.22% (38 runs sampled)
source-map-0.6.1: addMapping x 20.03 ops/sec ±10.51% (38 runs sampled)
source-map-0.8.0: addMapping x 19.30 ops/sec ±8.27% (37 runs sampled)
Fastest is gen-mapping: addSegment
Generate speed:
gen-mapping: decoded output x 381,379,234 ops/sec ±0.29% (96 runs sampled)
gen-mapping: encoded output x 95.15 ops/sec ±2.98% (72 runs sampled)
source-map-js: encoded output x 15.20 ops/sec ±7.41% (33 runs sampled)
source-map-0.6.1: encoded output x 16.36 ops/sec ±10.46% (31 runs sampled)
source-map-0.8.0: encoded output x 16.06 ops/sec ±6.45% (31 runs sampled)
Fastest is gen-mapping: decoded output
***
preact.js.map
Memory Usage:
gen-mapping: addSegment 416247 bytes
gen-mapping: addMapping 419824 bytes
source-map-js 1024619 bytes
source-map-0.6.1 1146004 bytes
source-map-0.8.0 1113250 bytes
Smallest memory usage is gen-mapping: addSegment
Adding speed:
gen-mapping: addSegment x 13,755 ops/sec ±0.15% (98 runs sampled)
gen-mapping: addMapping x 13,013 ops/sec ±0.11% (101 runs sampled)
source-map-js: addMapping x 4,564 ops/sec ±0.21% (98 runs sampled)
source-map-0.6.1: addMapping x 4,562 ops/sec ±0.11% (99 runs sampled)
source-map-0.8.0: addMapping x 4,593 ops/sec ±0.11% (100 runs sampled)
Fastest is gen-mapping: addSegment
Generate speed:
gen-mapping: decoded output x 379,864,020 ops/sec ±0.23% (93 runs sampled)
gen-mapping: encoded output x 14,368 ops/sec ±4.07% (82 runs sampled)
source-map-js: encoded output x 5,261 ops/sec ±0.21% (99 runs sampled)
source-map-0.6.1: encoded output x 5,124 ops/sec ±0.58% (99 runs sampled)
source-map-0.8.0: encoded output x 5,434 ops/sec ±0.33% (96 runs sampled)
Fastest is gen-mapping: decoded output
***
react.js.map
Memory Usage:
gen-mapping: addSegment 975096 bytes
gen-mapping: addMapping 1102981 bytes
source-map-js 2918836 bytes
source-map-0.6.1 2885435 bytes
source-map-0.8.0 2874336 bytes
Smallest memory usage is gen-mapping: addSegment
Adding speed:
gen-mapping: addSegment x 4,772 ops/sec ±0.15% (100 runs sampled)
gen-mapping: addMapping x 4,456 ops/sec ±0.13% (97 runs sampled)
source-map-js: addMapping x 1,618 ops/sec ±0.24% (97 runs sampled)
source-map-0.6.1: addMapping x 1,622 ops/sec ±0.12% (99 runs sampled)
source-map-0.8.0: addMapping x 1,631 ops/sec ±0.12% (100 runs sampled)
Fastest is gen-mapping: addSegment
Generate speed:
gen-mapping: decoded output x 379,107,695 ops/sec ±0.07% (99 runs sampled)
gen-mapping: encoded output x 5,421 ops/sec ±1.60% (89 runs sampled)
source-map-js: encoded output x 2,113 ops/sec ±1.81% (98 runs sampled)
source-map-0.6.1: encoded output x 2,126 ops/sec ±0.10% (100 runs sampled)
source-map-0.8.0: encoded output x 2,176 ops/sec ±0.39% (98 runs sampled)
Fastest is gen-mapping: decoded output
```
[source-map]: https://www.npmjs.com/package/source-map
[trace-mapping]: https://github.com/jridgewell/sourcemaps/tree/main/packages/trace-mapping

View File

@@ -0,0 +1,292 @@
// src/set-array.ts
var SetArray = class {
constructor() {
this._indexes = { __proto__: null };
this.array = [];
}
};
function cast(set) {
return set;
}
function get(setarr, key) {
return cast(setarr)._indexes[key];
}
function put(setarr, key) {
const index = get(setarr, key);
if (index !== void 0) return index;
const { array, _indexes: indexes } = cast(setarr);
const length = array.push(key);
return indexes[key] = length - 1;
}
function remove(setarr, key) {
const index = get(setarr, key);
if (index === void 0) return;
const { array, _indexes: indexes } = cast(setarr);
for (let i = index + 1; i < array.length; i++) {
const k = array[i];
array[i - 1] = k;
indexes[k]--;
}
indexes[key] = void 0;
array.pop();
}
// src/gen-mapping.ts
import {
encode
} from "@jridgewell/sourcemap-codec";
import { TraceMap, decodedMappings } from "@jridgewell/trace-mapping";
// src/sourcemap-segment.ts
var COLUMN = 0;
var SOURCES_INDEX = 1;
var SOURCE_LINE = 2;
var SOURCE_COLUMN = 3;
var NAMES_INDEX = 4;
// src/gen-mapping.ts
var NO_NAME = -1;
var GenMapping = class {
constructor({ file, sourceRoot } = {}) {
this._names = new SetArray();
this._sources = new SetArray();
this._sourcesContent = [];
this._mappings = [];
this.file = file;
this.sourceRoot = sourceRoot;
this._ignoreList = new SetArray();
}
};
function cast2(map) {
return map;
}
function addSegment(map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
return addSegmentInternal(
false,
map,
genLine,
genColumn,
source,
sourceLine,
sourceColumn,
name,
content
);
}
function addMapping(map, mapping) {
return addMappingInternal(false, map, mapping);
}
var maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
return addSegmentInternal(
true,
map,
genLine,
genColumn,
source,
sourceLine,
sourceColumn,
name,
content
);
};
var maybeAddMapping = (map, mapping) => {
return addMappingInternal(true, map, mapping);
};
function setSourceContent(map, source, content) {
const {
_sources: sources,
_sourcesContent: sourcesContent
// _originalScopes: originalScopes,
} = cast2(map);
const index = put(sources, source);
sourcesContent[index] = content;
}
function setIgnore(map, source, ignore = true) {
const {
_sources: sources,
_sourcesContent: sourcesContent,
_ignoreList: ignoreList
// _originalScopes: originalScopes,
} = cast2(map);
const index = put(sources, source);
if (index === sourcesContent.length) sourcesContent[index] = null;
if (ignore) put(ignoreList, index);
else remove(ignoreList, index);
}
function toDecodedMap(map) {
const {
_mappings: mappings,
_sources: sources,
_sourcesContent: sourcesContent,
_names: names,
_ignoreList: ignoreList
// _originalScopes: originalScopes,
// _generatedRanges: generatedRanges,
} = cast2(map);
removeEmptyFinalLines(mappings);
return {
version: 3,
file: map.file || void 0,
names: names.array,
sourceRoot: map.sourceRoot || void 0,
sources: sources.array,
sourcesContent,
mappings,
// originalScopes,
// generatedRanges,
ignoreList: ignoreList.array
};
}
function toEncodedMap(map) {
const decoded = toDecodedMap(map);
return Object.assign({}, decoded, {
// originalScopes: decoded.originalScopes.map((os) => encodeOriginalScopes(os)),
// generatedRanges: encodeGeneratedRanges(decoded.generatedRanges as GeneratedRange[]),
mappings: encode(decoded.mappings)
});
}
function fromMap(input) {
const map = new TraceMap(input);
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot });
putAll(cast2(gen)._names, map.names);
putAll(cast2(gen)._sources, map.sources);
cast2(gen)._sourcesContent = map.sourcesContent || map.sources.map(() => null);
cast2(gen)._mappings = decodedMappings(map);
if (map.ignoreList) putAll(cast2(gen)._ignoreList, map.ignoreList);
return gen;
}
function allMappings(map) {
const out = [];
const { _mappings: mappings, _sources: sources, _names: names } = cast2(map);
for (let i = 0; i < mappings.length; i++) {
const line = mappings[i];
for (let j = 0; j < line.length; j++) {
const seg = line[j];
const generated = { line: i + 1, column: seg[COLUMN] };
let source = void 0;
let original = void 0;
let name = void 0;
if (seg.length !== 1) {
source = sources.array[seg[SOURCES_INDEX]];
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] };
if (seg.length === 5) name = names.array[seg[NAMES_INDEX]];
}
out.push({ generated, source, original, name });
}
}
return out;
}
function addSegmentInternal(skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
const {
_mappings: mappings,
_sources: sources,
_sourcesContent: sourcesContent,
_names: names
// _originalScopes: originalScopes,
} = cast2(map);
const line = getIndex(mappings, genLine);
const index = getColumnIndex(line, genColumn);
if (!source) {
if (skipable && skipSourceless(line, index)) return;
return insert(line, index, [genColumn]);
}
assert(sourceLine);
assert(sourceColumn);
const sourcesIndex = put(sources, source);
const namesIndex = name ? put(names, name) : NO_NAME;
if (sourcesIndex === sourcesContent.length) sourcesContent[sourcesIndex] = content != null ? content : null;
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
return;
}
return insert(
line,
index,
name ? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex] : [genColumn, sourcesIndex, sourceLine, sourceColumn]
);
}
function assert(_val) {
}
function getIndex(arr, index) {
for (let i = arr.length; i <= index; i++) {
arr[i] = [];
}
return arr[index];
}
function getColumnIndex(line, genColumn) {
let index = line.length;
for (let i = index - 1; i >= 0; index = i--) {
const current = line[i];
if (genColumn >= current[COLUMN]) break;
}
return index;
}
function insert(array, index, value) {
for (let i = array.length; i > index; i--) {
array[i] = array[i - 1];
}
array[index] = value;
}
function removeEmptyFinalLines(mappings) {
const { length } = mappings;
let len = length;
for (let i = len - 1; i >= 0; len = i, i--) {
if (mappings[i].length > 0) break;
}
if (len < length) mappings.length = len;
}
function putAll(setarr, array) {
for (let i = 0; i < array.length; i++) put(setarr, array[i]);
}
function skipSourceless(line, index) {
if (index === 0) return true;
const prev = line[index - 1];
return prev.length === 1;
}
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) {
if (index === 0) return false;
const prev = line[index - 1];
if (prev.length === 1) return false;
return sourcesIndex === prev[SOURCES_INDEX] && sourceLine === prev[SOURCE_LINE] && sourceColumn === prev[SOURCE_COLUMN] && namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME);
}
function addMappingInternal(skipable, map, mapping) {
const { generated, source, original, name, content } = mapping;
if (!source) {
return addSegmentInternal(
skipable,
map,
generated.line - 1,
generated.column,
null,
null,
null,
null,
null
);
}
assert(original);
return addSegmentInternal(
skipable,
map,
generated.line - 1,
generated.column,
source,
original.line - 1,
original.column,
name,
content
);
}
export {
GenMapping,
addMapping,
addSegment,
allMappings,
fromMap,
maybeAddMapping,
maybeAddSegment,
setIgnore,
setSourceContent,
toDecodedMap,
toEncodedMap
};
//# sourceMappingURL=gen-mapping.mjs.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,358 @@
(function (global, factory) {
if (typeof exports === 'object' && typeof module !== 'undefined') {
factory(module, require('@jridgewell/sourcemap-codec'), require('@jridgewell/trace-mapping'));
module.exports = def(module);
} else if (typeof define === 'function' && define.amd) {
define(['module', '@jridgewell/sourcemap-codec', '@jridgewell/trace-mapping'], function(mod) {
factory.apply(this, arguments);
mod.exports = def(mod);
});
} else {
const mod = { exports: {} };
factory(mod, global.sourcemapCodec, global.traceMapping);
global = typeof globalThis !== 'undefined' ? globalThis : global || self;
global.genMapping = def(mod);
}
function def(m) { return 'default' in m.exports ? m.exports.default : m.exports; }
})(this, (function (module, require_sourcemapCodec, require_traceMapping) {
"use strict";
var __create = Object.create;
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __getProtoOf = Object.getPrototypeOf;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __commonJS = (cb, mod) => function __require() {
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
};
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
// If the importer is in node compatibility mode or this is not an ESM
// file that has been converted to a CommonJS file using a Babel-
// compatible transform (i.e. "__esModule" has not been set), then set
// "default" to the CommonJS "module.exports" for node compatibility.
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
mod
));
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
// umd:@jridgewell/sourcemap-codec
var require_sourcemap_codec = __commonJS({
"umd:@jridgewell/sourcemap-codec"(exports, module2) {
module2.exports = require_sourcemapCodec;
}
});
// umd:@jridgewell/trace-mapping
var require_trace_mapping = __commonJS({
"umd:@jridgewell/trace-mapping"(exports, module2) {
module2.exports = require_traceMapping;
}
});
// src/gen-mapping.ts
var gen_mapping_exports = {};
__export(gen_mapping_exports, {
GenMapping: () => GenMapping,
addMapping: () => addMapping,
addSegment: () => addSegment,
allMappings: () => allMappings,
fromMap: () => fromMap,
maybeAddMapping: () => maybeAddMapping,
maybeAddSegment: () => maybeAddSegment,
setIgnore: () => setIgnore,
setSourceContent: () => setSourceContent,
toDecodedMap: () => toDecodedMap,
toEncodedMap: () => toEncodedMap
});
module.exports = __toCommonJS(gen_mapping_exports);
// src/set-array.ts
var SetArray = class {
constructor() {
this._indexes = { __proto__: null };
this.array = [];
}
};
function cast(set) {
return set;
}
function get(setarr, key) {
return cast(setarr)._indexes[key];
}
function put(setarr, key) {
const index = get(setarr, key);
if (index !== void 0) return index;
const { array, _indexes: indexes } = cast(setarr);
const length = array.push(key);
return indexes[key] = length - 1;
}
function remove(setarr, key) {
const index = get(setarr, key);
if (index === void 0) return;
const { array, _indexes: indexes } = cast(setarr);
for (let i = index + 1; i < array.length; i++) {
const k = array[i];
array[i - 1] = k;
indexes[k]--;
}
indexes[key] = void 0;
array.pop();
}
// src/gen-mapping.ts
var import_sourcemap_codec = __toESM(require_sourcemap_codec());
var import_trace_mapping = __toESM(require_trace_mapping());
// src/sourcemap-segment.ts
var COLUMN = 0;
var SOURCES_INDEX = 1;
var SOURCE_LINE = 2;
var SOURCE_COLUMN = 3;
var NAMES_INDEX = 4;
// src/gen-mapping.ts
var NO_NAME = -1;
var GenMapping = class {
constructor({ file, sourceRoot } = {}) {
this._names = new SetArray();
this._sources = new SetArray();
this._sourcesContent = [];
this._mappings = [];
this.file = file;
this.sourceRoot = sourceRoot;
this._ignoreList = new SetArray();
}
};
function cast2(map) {
return map;
}
function addSegment(map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
return addSegmentInternal(
false,
map,
genLine,
genColumn,
source,
sourceLine,
sourceColumn,
name,
content
);
}
function addMapping(map, mapping) {
return addMappingInternal(false, map, mapping);
}
var maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
return addSegmentInternal(
true,
map,
genLine,
genColumn,
source,
sourceLine,
sourceColumn,
name,
content
);
};
var maybeAddMapping = (map, mapping) => {
return addMappingInternal(true, map, mapping);
};
function setSourceContent(map, source, content) {
const {
_sources: sources,
_sourcesContent: sourcesContent
// _originalScopes: originalScopes,
} = cast2(map);
const index = put(sources, source);
sourcesContent[index] = content;
}
function setIgnore(map, source, ignore = true) {
const {
_sources: sources,
_sourcesContent: sourcesContent,
_ignoreList: ignoreList
// _originalScopes: originalScopes,
} = cast2(map);
const index = put(sources, source);
if (index === sourcesContent.length) sourcesContent[index] = null;
if (ignore) put(ignoreList, index);
else remove(ignoreList, index);
}
function toDecodedMap(map) {
const {
_mappings: mappings,
_sources: sources,
_sourcesContent: sourcesContent,
_names: names,
_ignoreList: ignoreList
// _originalScopes: originalScopes,
// _generatedRanges: generatedRanges,
} = cast2(map);
removeEmptyFinalLines(mappings);
return {
version: 3,
file: map.file || void 0,
names: names.array,
sourceRoot: map.sourceRoot || void 0,
sources: sources.array,
sourcesContent,
mappings,
// originalScopes,
// generatedRanges,
ignoreList: ignoreList.array
};
}
function toEncodedMap(map) {
const decoded = toDecodedMap(map);
return Object.assign({}, decoded, {
// originalScopes: decoded.originalScopes.map((os) => encodeOriginalScopes(os)),
// generatedRanges: encodeGeneratedRanges(decoded.generatedRanges as GeneratedRange[]),
mappings: (0, import_sourcemap_codec.encode)(decoded.mappings)
});
}
function fromMap(input) {
const map = new import_trace_mapping.TraceMap(input);
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot });
putAll(cast2(gen)._names, map.names);
putAll(cast2(gen)._sources, map.sources);
cast2(gen)._sourcesContent = map.sourcesContent || map.sources.map(() => null);
cast2(gen)._mappings = (0, import_trace_mapping.decodedMappings)(map);
if (map.ignoreList) putAll(cast2(gen)._ignoreList, map.ignoreList);
return gen;
}
function allMappings(map) {
const out = [];
const { _mappings: mappings, _sources: sources, _names: names } = cast2(map);
for (let i = 0; i < mappings.length; i++) {
const line = mappings[i];
for (let j = 0; j < line.length; j++) {
const seg = line[j];
const generated = { line: i + 1, column: seg[COLUMN] };
let source = void 0;
let original = void 0;
let name = void 0;
if (seg.length !== 1) {
source = sources.array[seg[SOURCES_INDEX]];
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] };
if (seg.length === 5) name = names.array[seg[NAMES_INDEX]];
}
out.push({ generated, source, original, name });
}
}
return out;
}
function addSegmentInternal(skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
const {
_mappings: mappings,
_sources: sources,
_sourcesContent: sourcesContent,
_names: names
// _originalScopes: originalScopes,
} = cast2(map);
const line = getIndex(mappings, genLine);
const index = getColumnIndex(line, genColumn);
if (!source) {
if (skipable && skipSourceless(line, index)) return;
return insert(line, index, [genColumn]);
}
assert(sourceLine);
assert(sourceColumn);
const sourcesIndex = put(sources, source);
const namesIndex = name ? put(names, name) : NO_NAME;
if (sourcesIndex === sourcesContent.length) sourcesContent[sourcesIndex] = content != null ? content : null;
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
return;
}
return insert(
line,
index,
name ? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex] : [genColumn, sourcesIndex, sourceLine, sourceColumn]
);
}
function assert(_val) {
}
function getIndex(arr, index) {
for (let i = arr.length; i <= index; i++) {
arr[i] = [];
}
return arr[index];
}
function getColumnIndex(line, genColumn) {
let index = line.length;
for (let i = index - 1; i >= 0; index = i--) {
const current = line[i];
if (genColumn >= current[COLUMN]) break;
}
return index;
}
function insert(array, index, value) {
for (let i = array.length; i > index; i--) {
array[i] = array[i - 1];
}
array[index] = value;
}
function removeEmptyFinalLines(mappings) {
const { length } = mappings;
let len = length;
for (let i = len - 1; i >= 0; len = i, i--) {
if (mappings[i].length > 0) break;
}
if (len < length) mappings.length = len;
}
function putAll(setarr, array) {
for (let i = 0; i < array.length; i++) put(setarr, array[i]);
}
function skipSourceless(line, index) {
if (index === 0) return true;
const prev = line[index - 1];
return prev.length === 1;
}
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) {
if (index === 0) return false;
const prev = line[index - 1];
if (prev.length === 1) return false;
return sourcesIndex === prev[SOURCES_INDEX] && sourceLine === prev[SOURCE_LINE] && sourceColumn === prev[SOURCE_COLUMN] && namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME);
}
function addMappingInternal(skipable, map, mapping) {
const { generated, source, original, name, content } = mapping;
if (!source) {
return addSegmentInternal(
skipable,
map,
generated.line - 1,
generated.column,
null,
null,
null,
null,
null
);
}
assert(original);
return addSegmentInternal(
skipable,
map,
generated.line - 1,
generated.column,
source,
original.line - 1,
original.column,
name,
content
);
}
}));
//# sourceMappingURL=gen-mapping.umd.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,88 @@
import type { SourceMapInput } from '@jridgewell/trace-mapping';
import type { DecodedSourceMap, EncodedSourceMap, Pos, Mapping } from './types';
export type { DecodedSourceMap, EncodedSourceMap, Mapping };
export type Options = {
file?: string | null;
sourceRoot?: string | null;
};
/**
* Provides the state to generate a sourcemap.
*/
export declare class GenMapping {
private _names;
private _sources;
private _sourcesContent;
private _mappings;
private _ignoreList;
file: string | null | undefined;
sourceRoot: string | null | undefined;
constructor({ file, sourceRoot }?: Options);
}
/**
* A low-level API to associate a generated position with an original source position. Line and
* column here are 0-based, unlike `addMapping`.
*/
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source?: null, sourceLine?: null, sourceColumn?: null, name?: null, content?: null): void;
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name?: null, content?: string | null): void;
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name: string, content?: string | null): void;
/**
* A high-level API to associate a generated position with an original source position. Line is
* 1-based, but column is 0-based, due to legacy behavior in `source-map` library.
*/
export declare function addMapping(map: GenMapping, mapping: {
generated: Pos;
source?: null;
original?: null;
name?: null;
content?: null;
}): void;
export declare function addMapping(map: GenMapping, mapping: {
generated: Pos;
source: string;
original: Pos;
name?: null;
content?: string | null;
}): void;
export declare function addMapping(map: GenMapping, mapping: {
generated: Pos;
source: string;
original: Pos;
name: string;
content?: string | null;
}): void;
/**
* Same as `addSegment`, but will only add the segment if it generates useful information in the
* resulting map. This only works correctly if segments are added **in order**, meaning you should
* not add a segment with a lower generated line/column than one that came before.
*/
export declare const maybeAddSegment: typeof addSegment;
/**
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
* not add a mapping with a lower generated line/column than one that came before.
*/
export declare const maybeAddMapping: typeof addMapping;
/**
* Adds/removes the content of the source file to the source map.
*/
export declare function setSourceContent(map: GenMapping, source: string, content: string | null): void;
export declare function setIgnore(map: GenMapping, source: string, ignore?: boolean): void;
/**
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
export declare function toDecodedMap(map: GenMapping): DecodedSourceMap;
/**
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
export declare function toEncodedMap(map: GenMapping): EncodedSourceMap;
/**
* Constructs a new GenMapping, using the already present mappings of the input.
*/
export declare function fromMap(input: SourceMapInput): GenMapping;
/**
* Returns an array of high-level mapping objects for every recorded segment, which could then be
* passed to the `source-map` library.
*/
export declare function allMappings(map: GenMapping): Mapping[];

View File

@@ -0,0 +1,32 @@
type Key = string | number | symbol;
/**
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
* index of the `key` in the backing array.
*
* This is designed to allow synchronizing a second array with the contents of the backing array,
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
* and there are never duplicates.
*/
export declare class SetArray<T extends Key = Key> {
private _indexes;
array: readonly T[];
constructor();
}
/**
* Gets the index associated with `key` in the backing array, if it is already present.
*/
export declare function get<T extends Key>(setarr: SetArray<T>, key: T): number | undefined;
/**
* Puts `key` into the backing array, if it is not already present. Returns
* the index of the `key` in the backing array.
*/
export declare function put<T extends Key>(setarr: SetArray<T>, key: T): number;
/**
* Pops the last added item out of the SetArray.
*/
export declare function pop<T extends Key>(setarr: SetArray<T>): void;
/**
* Removes the key, if it exists in the set.
*/
export declare function remove<T extends Key>(setarr: SetArray<T>, key: T): void;
export {};

View File

@@ -0,0 +1,12 @@
type GeneratedColumn = number;
type SourcesIndex = number;
type SourceLine = number;
type SourceColumn = number;
type NamesIndex = number;
export type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
export declare const COLUMN = 0;
export declare const SOURCES_INDEX = 1;
export declare const SOURCE_LINE = 2;
export declare const SOURCE_COLUMN = 3;
export declare const NAMES_INDEX = 4;
export {};

View File

@@ -0,0 +1,43 @@
import type { SourceMapSegment } from './sourcemap-segment';
export interface SourceMapV3 {
file?: string | null;
names: readonly string[];
sourceRoot?: string;
sources: readonly (string | null)[];
sourcesContent?: readonly (string | null)[];
version: 3;
ignoreList?: readonly number[];
}
export interface EncodedSourceMap extends SourceMapV3 {
mappings: string;
}
export interface DecodedSourceMap extends SourceMapV3 {
mappings: readonly SourceMapSegment[][];
}
export interface Pos {
line: number;
column: number;
}
export interface OriginalPos extends Pos {
source: string;
}
export interface BindingExpressionRange {
start: Pos;
expression: string;
}
export type Mapping = {
generated: Pos;
source: undefined;
original: undefined;
name: undefined;
} | {
generated: Pos;
source: string;
original: Pos;
name: string;
} | {
generated: Pos;
source: string;
original: Pos;
name: undefined;
};

67
node_modules/@jridgewell/gen-mapping/package.json generated vendored Normal file
View File

@@ -0,0 +1,67 @@
{
"name": "@jridgewell/gen-mapping",
"version": "0.3.13",
"description": "Generate source maps",
"keywords": [
"source",
"map"
],
"main": "dist/gen-mapping.umd.js",
"module": "dist/gen-mapping.mjs",
"types": "types/gen-mapping.d.cts",
"files": [
"dist",
"src",
"types"
],
"exports": {
".": [
{
"import": {
"types": "./types/gen-mapping.d.mts",
"default": "./dist/gen-mapping.mjs"
},
"default": {
"types": "./types/gen-mapping.d.cts",
"default": "./dist/gen-mapping.umd.js"
}
},
"./dist/gen-mapping.umd.js"
],
"./package.json": "./package.json"
},
"scripts": {
"benchmark": "run-s build:code benchmark:*",
"benchmark:install": "cd benchmark && npm install",
"benchmark:only": "node --expose-gc benchmark/index.js",
"build": "run-s -n build:code build:types",
"build:code": "node ../../esbuild.mjs gen-mapping.ts",
"build:types": "run-s build:types:force build:types:emit build:types:mts",
"build:types:force": "rimraf tsconfig.build.tsbuildinfo",
"build:types:emit": "tsc --project tsconfig.build.json",
"build:types:mts": "node ../../mts-types.mjs",
"clean": "run-s -n clean:code clean:types",
"clean:code": "tsc --build --clean tsconfig.build.json",
"clean:types": "rimraf dist types",
"test": "run-s -n test:types test:only test:format",
"test:format": "prettier --check '{src,test}/**/*.ts'",
"test:only": "mocha",
"test:types": "eslint '{src,test}/**/*.ts'",
"lint": "run-s -n lint:types lint:format",
"lint:format": "npm run test:format -- --write",
"lint:types": "npm run test:types -- --fix",
"prepublishOnly": "npm run-s -n build test"
},
"homepage": "https://github.com/jridgewell/sourcemaps/tree/main/packages/gen-mapping",
"repository": {
"type": "git",
"url": "git+https://github.com/jridgewell/sourcemaps.git",
"directory": "packages/gen-mapping"
},
"author": "Justin Ridgewell <justin@ridgewell.name>",
"license": "MIT",
"dependencies": {
"@jridgewell/sourcemap-codec": "^1.5.0",
"@jridgewell/trace-mapping": "^0.3.24"
}
}

614
node_modules/@jridgewell/gen-mapping/src/gen-mapping.ts generated vendored Normal file
View File

@@ -0,0 +1,614 @@
import { SetArray, put, remove } from './set-array';
import {
encode,
// encodeGeneratedRanges,
// encodeOriginalScopes
} from '@jridgewell/sourcemap-codec';
import { TraceMap, decodedMappings } from '@jridgewell/trace-mapping';
import {
COLUMN,
SOURCES_INDEX,
SOURCE_LINE,
SOURCE_COLUMN,
NAMES_INDEX,
} from './sourcemap-segment';
import type { SourceMapInput } from '@jridgewell/trace-mapping';
// import type { OriginalScope, GeneratedRange } from '@jridgewell/sourcemap-codec';
import type { SourceMapSegment } from './sourcemap-segment';
import type {
DecodedSourceMap,
EncodedSourceMap,
Pos,
Mapping,
// BindingExpressionRange,
// OriginalPos,
// OriginalScopeInfo,
// GeneratedRangeInfo,
} from './types';
export type { DecodedSourceMap, EncodedSourceMap, Mapping };
export type Options = {
file?: string | null;
sourceRoot?: string | null;
};
const NO_NAME = -1;
/**
* Provides the state to generate a sourcemap.
*/
export class GenMapping {
declare private _names: SetArray<string>;
declare private _sources: SetArray<string>;
declare private _sourcesContent: (string | null)[];
declare private _mappings: SourceMapSegment[][];
// private declare _originalScopes: OriginalScope[][];
// private declare _generatedRanges: GeneratedRange[];
declare private _ignoreList: SetArray<number>;
declare file: string | null | undefined;
declare sourceRoot: string | null | undefined;
constructor({ file, sourceRoot }: Options = {}) {
this._names = new SetArray();
this._sources = new SetArray();
this._sourcesContent = [];
this._mappings = [];
// this._originalScopes = [];
// this._generatedRanges = [];
this.file = file;
this.sourceRoot = sourceRoot;
this._ignoreList = new SetArray();
}
}
interface PublicMap {
_names: GenMapping['_names'];
_sources: GenMapping['_sources'];
_sourcesContent: GenMapping['_sourcesContent'];
_mappings: GenMapping['_mappings'];
// _originalScopes: GenMapping['_originalScopes'];
// _generatedRanges: GenMapping['_generatedRanges'];
_ignoreList: GenMapping['_ignoreList'];
}
/**
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
* with public access modifiers.
*/
function cast(map: unknown): PublicMap {
return map as any;
}
/**
* A low-level API to associate a generated position with an original source position. Line and
* column here are 0-based, unlike `addMapping`.
*/
export function addSegment(
map: GenMapping,
genLine: number,
genColumn: number,
source?: null,
sourceLine?: null,
sourceColumn?: null,
name?: null,
content?: null,
): void;
export function addSegment(
map: GenMapping,
genLine: number,
genColumn: number,
source: string,
sourceLine: number,
sourceColumn: number,
name?: null,
content?: string | null,
): void;
export function addSegment(
map: GenMapping,
genLine: number,
genColumn: number,
source: string,
sourceLine: number,
sourceColumn: number,
name: string,
content?: string | null,
): void;
export function addSegment(
map: GenMapping,
genLine: number,
genColumn: number,
source?: string | null,
sourceLine?: number | null,
sourceColumn?: number | null,
name?: string | null,
content?: string | null,
): void {
return addSegmentInternal(
false,
map,
genLine,
genColumn,
source,
sourceLine,
sourceColumn,
name,
content,
);
}
/**
* A high-level API to associate a generated position with an original source position. Line is
* 1-based, but column is 0-based, due to legacy behavior in `source-map` library.
*/
export function addMapping(
map: GenMapping,
mapping: {
generated: Pos;
source?: null;
original?: null;
name?: null;
content?: null;
},
): void;
export function addMapping(
map: GenMapping,
mapping: {
generated: Pos;
source: string;
original: Pos;
name?: null;
content?: string | null;
},
): void;
export function addMapping(
map: GenMapping,
mapping: {
generated: Pos;
source: string;
original: Pos;
name: string;
content?: string | null;
},
): void;
export function addMapping(
map: GenMapping,
mapping: {
generated: Pos;
source?: string | null;
original?: Pos | null;
name?: string | null;
content?: string | null;
},
): void {
return addMappingInternal(false, map, mapping as Parameters<typeof addMappingInternal>[2]);
}
/**
* Same as `addSegment`, but will only add the segment if it generates useful information in the
* resulting map. This only works correctly if segments are added **in order**, meaning you should
* not add a segment with a lower generated line/column than one that came before.
*/
export const maybeAddSegment: typeof addSegment = (
map,
genLine,
genColumn,
source,
sourceLine,
sourceColumn,
name,
content,
) => {
return addSegmentInternal(
true,
map,
genLine,
genColumn,
source,
sourceLine,
sourceColumn,
name,
content,
);
};
/**
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
* not add a mapping with a lower generated line/column than one that came before.
*/
export const maybeAddMapping: typeof addMapping = (map, mapping) => {
return addMappingInternal(true, map, mapping as Parameters<typeof addMappingInternal>[2]);
};
/**
* Adds/removes the content of the source file to the source map.
*/
export function setSourceContent(map: GenMapping, source: string, content: string | null): void {
const {
_sources: sources,
_sourcesContent: sourcesContent,
// _originalScopes: originalScopes,
} = cast(map);
const index = put(sources, source);
sourcesContent[index] = content;
// if (index === originalScopes.length) originalScopes[index] = [];
}
export function setIgnore(map: GenMapping, source: string, ignore = true) {
const {
_sources: sources,
_sourcesContent: sourcesContent,
_ignoreList: ignoreList,
// _originalScopes: originalScopes,
} = cast(map);
const index = put(sources, source);
if (index === sourcesContent.length) sourcesContent[index] = null;
// if (index === originalScopes.length) originalScopes[index] = [];
if (ignore) put(ignoreList, index);
else remove(ignoreList, index);
}
/**
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
export function toDecodedMap(map: GenMapping): DecodedSourceMap {
const {
_mappings: mappings,
_sources: sources,
_sourcesContent: sourcesContent,
_names: names,
_ignoreList: ignoreList,
// _originalScopes: originalScopes,
// _generatedRanges: generatedRanges,
} = cast(map);
removeEmptyFinalLines(mappings);
return {
version: 3,
file: map.file || undefined,
names: names.array,
sourceRoot: map.sourceRoot || undefined,
sources: sources.array,
sourcesContent,
mappings,
// originalScopes,
// generatedRanges,
ignoreList: ignoreList.array,
};
}
/**
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
export function toEncodedMap(map: GenMapping): EncodedSourceMap {
const decoded = toDecodedMap(map);
return Object.assign({}, decoded, {
// originalScopes: decoded.originalScopes.map((os) => encodeOriginalScopes(os)),
// generatedRanges: encodeGeneratedRanges(decoded.generatedRanges as GeneratedRange[]),
mappings: encode(decoded.mappings as SourceMapSegment[][]),
});
}
/**
* Constructs a new GenMapping, using the already present mappings of the input.
*/
export function fromMap(input: SourceMapInput): GenMapping {
const map = new TraceMap(input);
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot });
putAll(cast(gen)._names, map.names);
putAll(cast(gen)._sources, map.sources as string[]);
cast(gen)._sourcesContent = map.sourcesContent || map.sources.map(() => null);
cast(gen)._mappings = decodedMappings(map) as GenMapping['_mappings'];
// TODO: implement originalScopes/generatedRanges
if (map.ignoreList) putAll(cast(gen)._ignoreList, map.ignoreList);
return gen;
}
/**
* Returns an array of high-level mapping objects for every recorded segment, which could then be
* passed to the `source-map` library.
*/
export function allMappings(map: GenMapping): Mapping[] {
const out: Mapping[] = [];
const { _mappings: mappings, _sources: sources, _names: names } = cast(map);
for (let i = 0; i < mappings.length; i++) {
const line = mappings[i];
for (let j = 0; j < line.length; j++) {
const seg = line[j];
const generated = { line: i + 1, column: seg[COLUMN] };
let source: string | undefined = undefined;
let original: Pos | undefined = undefined;
let name: string | undefined = undefined;
if (seg.length !== 1) {
source = sources.array[seg[SOURCES_INDEX]];
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] };
if (seg.length === 5) name = names.array[seg[NAMES_INDEX]];
}
out.push({ generated, source, original, name } as Mapping);
}
}
return out;
}
// This split declaration is only so that terser can elminiate the static initialization block.
function addSegmentInternal<S extends string | null | undefined>(
skipable: boolean,
map: GenMapping,
genLine: number,
genColumn: number,
source: S,
sourceLine: S extends string ? number : null | undefined,
sourceColumn: S extends string ? number : null | undefined,
name: S extends string ? string | null | undefined : null | undefined,
content: S extends string ? string | null | undefined : null | undefined,
): void {
const {
_mappings: mappings,
_sources: sources,
_sourcesContent: sourcesContent,
_names: names,
// _originalScopes: originalScopes,
} = cast(map);
const line = getIndex(mappings, genLine);
const index = getColumnIndex(line, genColumn);
if (!source) {
if (skipable && skipSourceless(line, index)) return;
return insert(line, index, [genColumn]);
}
// Sigh, TypeScript can't figure out sourceLine and sourceColumn aren't nullish if source
// isn't nullish.
assert<number>(sourceLine);
assert<number>(sourceColumn);
const sourcesIndex = put(sources, source);
const namesIndex = name ? put(names, name) : NO_NAME;
if (sourcesIndex === sourcesContent.length) sourcesContent[sourcesIndex] = content ?? null;
// if (sourcesIndex === originalScopes.length) originalScopes[sourcesIndex] = [];
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
return;
}
return insert(
line,
index,
name
? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
: [genColumn, sourcesIndex, sourceLine, sourceColumn],
);
}
function assert<T>(_val: unknown): asserts _val is T {
// noop.
}
function getIndex<T>(arr: T[][], index: number): T[] {
for (let i = arr.length; i <= index; i++) {
arr[i] = [];
}
return arr[index];
}
function getColumnIndex(line: SourceMapSegment[], genColumn: number): number {
let index = line.length;
for (let i = index - 1; i >= 0; index = i--) {
const current = line[i];
if (genColumn >= current[COLUMN]) break;
}
return index;
}
function insert<T>(array: T[], index: number, value: T) {
for (let i = array.length; i > index; i--) {
array[i] = array[i - 1];
}
array[index] = value;
}
function removeEmptyFinalLines(mappings: SourceMapSegment[][]) {
const { length } = mappings;
let len = length;
for (let i = len - 1; i >= 0; len = i, i--) {
if (mappings[i].length > 0) break;
}
if (len < length) mappings.length = len;
}
function putAll<T extends string | number>(setarr: SetArray<T>, array: T[]) {
for (let i = 0; i < array.length; i++) put(setarr, array[i]);
}
function skipSourceless(line: SourceMapSegment[], index: number): boolean {
// The start of a line is already sourceless, so adding a sourceless segment to the beginning
// doesn't generate any useful information.
if (index === 0) return true;
const prev = line[index - 1];
// If the previous segment is also sourceless, then adding another sourceless segment doesn't
// genrate any new information. Else, this segment will end the source/named segment and point to
// a sourceless position, which is useful.
return prev.length === 1;
}
function skipSource(
line: SourceMapSegment[],
index: number,
sourcesIndex: number,
sourceLine: number,
sourceColumn: number,
namesIndex: number,
): boolean {
// A source/named segment at the start of a line gives position at that genColumn
if (index === 0) return false;
const prev = line[index - 1];
// If the previous segment is sourceless, then we're transitioning to a source.
if (prev.length === 1) return false;
// If the previous segment maps to the exact same source position, then this segment doesn't
// provide any new position information.
return (
sourcesIndex === prev[SOURCES_INDEX] &&
sourceLine === prev[SOURCE_LINE] &&
sourceColumn === prev[SOURCE_COLUMN] &&
namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME)
);
}
function addMappingInternal<S extends string | null | undefined>(
skipable: boolean,
map: GenMapping,
mapping: {
generated: Pos;
source: S;
original: S extends string ? Pos : null | undefined;
name: S extends string ? string | null | undefined : null | undefined;
content: S extends string ? string | null | undefined : null | undefined;
},
) {
const { generated, source, original, name, content } = mapping;
if (!source) {
return addSegmentInternal(
skipable,
map,
generated.line - 1,
generated.column,
null,
null,
null,
null,
null,
);
}
assert<Pos>(original);
return addSegmentInternal(
skipable,
map,
generated.line - 1,
generated.column,
source as string,
original.line - 1,
original.column,
name,
content,
);
}
/*
export function addOriginalScope(
map: GenMapping,
data: {
start: Pos;
end: Pos;
source: string;
kind: string;
name?: string;
variables?: string[];
},
): OriginalScopeInfo {
const { start, end, source, kind, name, variables } = data;
const {
_sources: sources,
_sourcesContent: sourcesContent,
_originalScopes: originalScopes,
_names: names,
} = cast(map);
const index = put(sources, source);
if (index === sourcesContent.length) sourcesContent[index] = null;
if (index === originalScopes.length) originalScopes[index] = [];
const kindIndex = put(names, kind);
const scope: OriginalScope = name
? [start.line - 1, start.column, end.line - 1, end.column, kindIndex, put(names, name)]
: [start.line - 1, start.column, end.line - 1, end.column, kindIndex];
if (variables) {
scope.vars = variables.map((v) => put(names, v));
}
const len = originalScopes[index].push(scope);
return [index, len - 1, variables];
}
*/
// Generated Ranges
/*
export function addGeneratedRange(
map: GenMapping,
data: {
start: Pos;
isScope: boolean;
originalScope?: OriginalScopeInfo;
callsite?: OriginalPos;
},
): GeneratedRangeInfo {
const { start, isScope, originalScope, callsite } = data;
const {
_originalScopes: originalScopes,
_sources: sources,
_sourcesContent: sourcesContent,
_generatedRanges: generatedRanges,
} = cast(map);
const range: GeneratedRange = [
start.line - 1,
start.column,
0,
0,
originalScope ? originalScope[0] : -1,
originalScope ? originalScope[1] : -1,
];
if (originalScope?.[2]) {
range.bindings = originalScope[2].map(() => [[-1]]);
}
if (callsite) {
const index = put(sources, callsite.source);
if (index === sourcesContent.length) sourcesContent[index] = null;
if (index === originalScopes.length) originalScopes[index] = [];
range.callsite = [index, callsite.line - 1, callsite.column];
}
if (isScope) range.isScope = true;
generatedRanges.push(range);
return [range, originalScope?.[2]];
}
export function setEndPosition(range: GeneratedRangeInfo, pos: Pos) {
range[0][2] = pos.line - 1;
range[0][3] = pos.column;
}
export function addBinding(
map: GenMapping,
range: GeneratedRangeInfo,
variable: string,
expression: string | BindingExpressionRange,
) {
const { _names: names } = cast(map);
const bindings = (range[0].bindings ||= []);
const vars = range[1];
const index = vars!.indexOf(variable);
const binding = getIndex(bindings, index);
if (typeof expression === 'string') binding[0] = [put(names, expression)];
else {
const { start } = expression;
binding.push([put(names, expression.expression), start.line - 1, start.column]);
}
}
*/

82
node_modules/@jridgewell/gen-mapping/src/set-array.ts generated vendored Normal file
View File

@@ -0,0 +1,82 @@
type Key = string | number | symbol;
/**
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
* index of the `key` in the backing array.
*
* This is designed to allow synchronizing a second array with the contents of the backing array,
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
* and there are never duplicates.
*/
export class SetArray<T extends Key = Key> {
declare private _indexes: Record<T, number | undefined>;
declare array: readonly T[];
constructor() {
this._indexes = { __proto__: null } as any;
this.array = [];
}
}
interface PublicSet<T extends Key> {
array: T[];
_indexes: SetArray<T>['_indexes'];
}
/**
* Typescript doesn't allow friend access to private fields, so this just casts the set into a type
* with public access modifiers.
*/
function cast<T extends Key>(set: SetArray<T>): PublicSet<T> {
return set as any;
}
/**
* Gets the index associated with `key` in the backing array, if it is already present.
*/
export function get<T extends Key>(setarr: SetArray<T>, key: T): number | undefined {
return cast(setarr)._indexes[key];
}
/**
* Puts `key` into the backing array, if it is not already present. Returns
* the index of the `key` in the backing array.
*/
export function put<T extends Key>(setarr: SetArray<T>, key: T): number {
// The key may or may not be present. If it is present, it's a number.
const index = get(setarr, key);
if (index !== undefined) return index;
const { array, _indexes: indexes } = cast(setarr);
const length = array.push(key);
return (indexes[key] = length - 1);
}
/**
* Pops the last added item out of the SetArray.
*/
export function pop<T extends Key>(setarr: SetArray<T>): void {
const { array, _indexes: indexes } = cast(setarr);
if (array.length === 0) return;
const last = array.pop()!;
indexes[last] = undefined;
}
/**
* Removes the key, if it exists in the set.
*/
export function remove<T extends Key>(setarr: SetArray<T>, key: T): void {
const index = get(setarr, key);
if (index === undefined) return;
const { array, _indexes: indexes } = cast(setarr);
for (let i = index + 1; i < array.length; i++) {
const k = array[i];
array[i - 1] = k;
indexes[k]!--;
}
indexes[key] = undefined;
array.pop();
}

View File

@@ -0,0 +1,16 @@
type GeneratedColumn = number;
type SourcesIndex = number;
type SourceLine = number;
type SourceColumn = number;
type NamesIndex = number;
export type SourceMapSegment =
| [GeneratedColumn]
| [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn]
| [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
export const COLUMN = 0;
export const SOURCES_INDEX = 1;
export const SOURCE_LINE = 2;
export const SOURCE_COLUMN = 3;
export const NAMES_INDEX = 4;

61
node_modules/@jridgewell/gen-mapping/src/types.ts generated vendored Normal file
View File

@@ -0,0 +1,61 @@
// import type { GeneratedRange, OriginalScope } from '@jridgewell/sourcemap-codec';
import type { SourceMapSegment } from './sourcemap-segment';
export interface SourceMapV3 {
file?: string | null;
names: readonly string[];
sourceRoot?: string;
sources: readonly (string | null)[];
sourcesContent?: readonly (string | null)[];
version: 3;
ignoreList?: readonly number[];
}
export interface EncodedSourceMap extends SourceMapV3 {
mappings: string;
// originalScopes: string[];
// generatedRanges: string;
}
export interface DecodedSourceMap extends SourceMapV3 {
mappings: readonly SourceMapSegment[][];
// originalScopes: readonly OriginalScope[][];
// generatedRanges: readonly GeneratedRange[];
}
export interface Pos {
line: number; // 1-based
column: number; // 0-based
}
export interface OriginalPos extends Pos {
source: string;
}
export interface BindingExpressionRange {
start: Pos;
expression: string;
}
// export type OriginalScopeInfo = [number, number, string[] | undefined];
// export type GeneratedRangeInfo = [GeneratedRange, string[] | undefined];
export type Mapping =
| {
generated: Pos;
source: undefined;
original: undefined;
name: undefined;
}
| {
generated: Pos;
source: string;
original: Pos;
name: string;
}
| {
generated: Pos;
source: string;
original: Pos;
name: undefined;
};

View File

@@ -0,0 +1,89 @@
import type { SourceMapInput } from '@jridgewell/trace-mapping';
import type { DecodedSourceMap, EncodedSourceMap, Pos, Mapping } from './types.cts';
export type { DecodedSourceMap, EncodedSourceMap, Mapping };
export type Options = {
file?: string | null;
sourceRoot?: string | null;
};
/**
* Provides the state to generate a sourcemap.
*/
export declare class GenMapping {
private _names;
private _sources;
private _sourcesContent;
private _mappings;
private _ignoreList;
file: string | null | undefined;
sourceRoot: string | null | undefined;
constructor({ file, sourceRoot }?: Options);
}
/**
* A low-level API to associate a generated position with an original source position. Line and
* column here are 0-based, unlike `addMapping`.
*/
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source?: null, sourceLine?: null, sourceColumn?: null, name?: null, content?: null): void;
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name?: null, content?: string | null): void;
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name: string, content?: string | null): void;
/**
* A high-level API to associate a generated position with an original source position. Line is
* 1-based, but column is 0-based, due to legacy behavior in `source-map` library.
*/
export declare function addMapping(map: GenMapping, mapping: {
generated: Pos;
source?: null;
original?: null;
name?: null;
content?: null;
}): void;
export declare function addMapping(map: GenMapping, mapping: {
generated: Pos;
source: string;
original: Pos;
name?: null;
content?: string | null;
}): void;
export declare function addMapping(map: GenMapping, mapping: {
generated: Pos;
source: string;
original: Pos;
name: string;
content?: string | null;
}): void;
/**
* Same as `addSegment`, but will only add the segment if it generates useful information in the
* resulting map. This only works correctly if segments are added **in order**, meaning you should
* not add a segment with a lower generated line/column than one that came before.
*/
export declare const maybeAddSegment: typeof addSegment;
/**
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
* not add a mapping with a lower generated line/column than one that came before.
*/
export declare const maybeAddMapping: typeof addMapping;
/**
* Adds/removes the content of the source file to the source map.
*/
export declare function setSourceContent(map: GenMapping, source: string, content: string | null): void;
export declare function setIgnore(map: GenMapping, source: string, ignore?: boolean): void;
/**
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
export declare function toDecodedMap(map: GenMapping): DecodedSourceMap;
/**
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
export declare function toEncodedMap(map: GenMapping): EncodedSourceMap;
/**
* Constructs a new GenMapping, using the already present mappings of the input.
*/
export declare function fromMap(input: SourceMapInput): GenMapping;
/**
* Returns an array of high-level mapping objects for every recorded segment, which could then be
* passed to the `source-map` library.
*/
export declare function allMappings(map: GenMapping): Mapping[];
//# sourceMappingURL=gen-mapping.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"gen-mapping.d.ts","sourceRoot":"","sources":["../src/gen-mapping.ts"],"names":[],"mappings":"AAgBA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,2BAA2B,CAAC;AAGhE,OAAO,KAAK,EACV,gBAAgB,EAChB,gBAAgB,EAChB,GAAG,EACH,OAAO,EAKR,MAAM,SAAS,CAAC;AAEjB,YAAY,EAAE,gBAAgB,EAAE,gBAAgB,EAAE,OAAO,EAAE,CAAC;AAE5D,MAAM,MAAM,OAAO,GAAG;IACpB,IAAI,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IACrB,UAAU,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;CAC5B,CAAC;AAIF;;GAEG;AACH,qBAAa,UAAU;IACrB,QAAgB,MAAM,CAAmB;IACzC,QAAgB,QAAQ,CAAmB;IAC3C,QAAgB,eAAe,CAAoB;IACnD,QAAgB,SAAS,CAAuB;IAGhD,QAAgB,WAAW,CAAmB;IACtC,IAAI,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;IAChC,UAAU,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;gBAElC,EAAE,IAAI,EAAE,UAAU,EAAE,GAAE,OAAY;CAW/C;AAoBD;;;GAGG;AACH,wBAAgB,UAAU,CACxB,GAAG,EAAE,UAAU,EACf,OAAO,EAAE,MAAM,EACf,SAAS,EAAE,MAAM,EACjB,MAAM,CAAC,EAAE,IAAI,EACb,UAAU,CAAC,EAAE,IAAI,EACjB,YAAY,CAAC,EAAE,IAAI,EACnB,IAAI,CAAC,EAAE,IAAI,EACX,OAAO,CAAC,EAAE,IAAI,GACb,IAAI,CAAC;AACR,wBAAgB,UAAU,CACxB,GAAG,EAAE,UAAU,EACf,OAAO,EAAE,MAAM,EACf,SAAS,EAAE,MAAM,EACjB,MAAM,EAAE,MAAM,EACd,UAAU,EAAE,MAAM,EAClB,YAAY,EAAE,MAAM,EACpB,IAAI,CAAC,EAAE,IAAI,EACX,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,GACtB,IAAI,CAAC;AACR,wBAAgB,UAAU,CACxB,GAAG,EAAE,UAAU,EACf,OAAO,EAAE,MAAM,EACf,SAAS,EAAE,MAAM,EACjB,MAAM,EAAE,MAAM,EACd,UAAU,EAAE,MAAM,EAClB,YAAY,EAAE,MAAM,EACpB,IAAI,EAAE,MAAM,EACZ,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,GACtB,IAAI,CAAC;AAwBR;;;GAGG;AACH,wBAAgB,UAAU,CACxB,GAAG,EAAE,UAAU,EACf,OAAO,EAAE;IACP,SAAS,EAAE,GAAG,CAAC;IACf,MAAM,CAAC,EAAE,IAAI,CAAC;IACd,QAAQ,CAAC,EAAE,IAAI,CAAC;IAChB,IAAI,CAAC,EAAE,IAAI,CAAC;IACZ,OAAO,CAAC,EAAE,IAAI,CAAC;CAChB,GACA,IAAI,CAAC;AACR,wBAAgB,UAAU,CACxB,GAAG,EAAE,UAAU,EACf,OAAO,EAAE;IACP,SAAS,EAAE,GAAG,CAAC;IACf,MAAM,EAAE,MAAM,CAAC;IACf,QAAQ,EAAE,GAAG,CAAC;IACd,IAAI,CAAC,EAAE,IAAI,CAAC;IACZ,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;CACzB,GACA,IAAI,CAAC;AACR,wBAAgB,UAAU,CACxB,GAAG,EAAE,UAAU,EACf,OAAO,EAAE;IACP,SAAS,EAAE,GAAG,CAAC;IACf,MAAM,EAAE,MAAM,CAAC;IACf,QAAQ,EAAE,GAAG,CAAC;IACd,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;CACzB,GACA,IAAI,CAAC;AAcR;;;;GAIG;AACH,eAAO,MAAM,eAAe,EAAE,OAAO,UAqBpC,CAAC;AAEF;;;;GAIG;AACH,eAAO,MAAM,eAAe,EAAE,OAAO,UAEpC,CAAC;AAEF;;GAEG;AACH,wBAAgB,gBAAgB,CAAC,GAAG,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI,GAAG,IAAI,CAS9F;AAED,wBAAgB,SAAS,CAAC,GAAG,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,UAAO,QAYvE;AAED;;;GAGG;AACH,wBAAgB,YAAY,CAAC,GAAG,EAAE,UAAU,GAAG,gBAAgB,CAwB9D;AAED;;;GAGG;AACH,wBAAgB,YAAY,CAAC,GAAG,EAAE,UAAU,GAAG,gBAAgB,CAO9D;AAED;;GAEG;AACH,wBAAgB,OAAO,CAAC,KAAK,EAAE,cAAc,GAAG,UAAU,CAYzD;AAED;;;GAGG;AACH,wBAAgB,WAAW,CAAC,GAAG,EAAE,UAAU,GAAG,OAAO,EAAE,CA0BtD"}

View File

@@ -0,0 +1,89 @@
import type { SourceMapInput } from '@jridgewell/trace-mapping';
import type { DecodedSourceMap, EncodedSourceMap, Pos, Mapping } from './types.mts';
export type { DecodedSourceMap, EncodedSourceMap, Mapping };
export type Options = {
file?: string | null;
sourceRoot?: string | null;
};
/**
* Provides the state to generate a sourcemap.
*/
export declare class GenMapping {
private _names;
private _sources;
private _sourcesContent;
private _mappings;
private _ignoreList;
file: string | null | undefined;
sourceRoot: string | null | undefined;
constructor({ file, sourceRoot }?: Options);
}
/**
* A low-level API to associate a generated position with an original source position. Line and
* column here are 0-based, unlike `addMapping`.
*/
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source?: null, sourceLine?: null, sourceColumn?: null, name?: null, content?: null): void;
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name?: null, content?: string | null): void;
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name: string, content?: string | null): void;
/**
* A high-level API to associate a generated position with an original source position. Line is
* 1-based, but column is 0-based, due to legacy behavior in `source-map` library.
*/
export declare function addMapping(map: GenMapping, mapping: {
generated: Pos;
source?: null;
original?: null;
name?: null;
content?: null;
}): void;
export declare function addMapping(map: GenMapping, mapping: {
generated: Pos;
source: string;
original: Pos;
name?: null;
content?: string | null;
}): void;
export declare function addMapping(map: GenMapping, mapping: {
generated: Pos;
source: string;
original: Pos;
name: string;
content?: string | null;
}): void;
/**
* Same as `addSegment`, but will only add the segment if it generates useful information in the
* resulting map. This only works correctly if segments are added **in order**, meaning you should
* not add a segment with a lower generated line/column than one that came before.
*/
export declare const maybeAddSegment: typeof addSegment;
/**
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
* not add a mapping with a lower generated line/column than one that came before.
*/
export declare const maybeAddMapping: typeof addMapping;
/**
* Adds/removes the content of the source file to the source map.
*/
export declare function setSourceContent(map: GenMapping, source: string, content: string | null): void;
export declare function setIgnore(map: GenMapping, source: string, ignore?: boolean): void;
/**
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
export declare function toDecodedMap(map: GenMapping): DecodedSourceMap;
/**
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
export declare function toEncodedMap(map: GenMapping): EncodedSourceMap;
/**
* Constructs a new GenMapping, using the already present mappings of the input.
*/
export declare function fromMap(input: SourceMapInput): GenMapping;
/**
* Returns an array of high-level mapping objects for every recorded segment, which could then be
* passed to the `source-map` library.
*/
export declare function allMappings(map: GenMapping): Mapping[];
//# sourceMappingURL=gen-mapping.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"gen-mapping.d.ts","sourceRoot":"","sources":["../src/gen-mapping.ts"],"names":[],"mappings":"AAgBA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,2BAA2B,CAAC;AAGhE,OAAO,KAAK,EACV,gBAAgB,EAChB,gBAAgB,EAChB,GAAG,EACH,OAAO,EAKR,MAAM,SAAS,CAAC;AAEjB,YAAY,EAAE,gBAAgB,EAAE,gBAAgB,EAAE,OAAO,EAAE,CAAC;AAE5D,MAAM,MAAM,OAAO,GAAG;IACpB,IAAI,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IACrB,UAAU,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;CAC5B,CAAC;AAIF;;GAEG;AACH,qBAAa,UAAU;IACrB,QAAgB,MAAM,CAAmB;IACzC,QAAgB,QAAQ,CAAmB;IAC3C,QAAgB,eAAe,CAAoB;IACnD,QAAgB,SAAS,CAAuB;IAGhD,QAAgB,WAAW,CAAmB;IACtC,IAAI,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;IAChC,UAAU,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;gBAElC,EAAE,IAAI,EAAE,UAAU,EAAE,GAAE,OAAY;CAW/C;AAoBD;;;GAGG;AACH,wBAAgB,UAAU,CACxB,GAAG,EAAE,UAAU,EACf,OAAO,EAAE,MAAM,EACf,SAAS,EAAE,MAAM,EACjB,MAAM,CAAC,EAAE,IAAI,EACb,UAAU,CAAC,EAAE,IAAI,EACjB,YAAY,CAAC,EAAE,IAAI,EACnB,IAAI,CAAC,EAAE,IAAI,EACX,OAAO,CAAC,EAAE,IAAI,GACb,IAAI,CAAC;AACR,wBAAgB,UAAU,CACxB,GAAG,EAAE,UAAU,EACf,OAAO,EAAE,MAAM,EACf,SAAS,EAAE,MAAM,EACjB,MAAM,EAAE,MAAM,EACd,UAAU,EAAE,MAAM,EAClB,YAAY,EAAE,MAAM,EACpB,IAAI,CAAC,EAAE,IAAI,EACX,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,GACtB,IAAI,CAAC;AACR,wBAAgB,UAAU,CACxB,GAAG,EAAE,UAAU,EACf,OAAO,EAAE,MAAM,EACf,SAAS,EAAE,MAAM,EACjB,MAAM,EAAE,MAAM,EACd,UAAU,EAAE,MAAM,EAClB,YAAY,EAAE,MAAM,EACpB,IAAI,EAAE,MAAM,EACZ,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,GACtB,IAAI,CAAC;AAwBR;;;GAGG;AACH,wBAAgB,UAAU,CACxB,GAAG,EAAE,UAAU,EACf,OAAO,EAAE;IACP,SAAS,EAAE,GAAG,CAAC;IACf,MAAM,CAAC,EAAE,IAAI,CAAC;IACd,QAAQ,CAAC,EAAE,IAAI,CAAC;IAChB,IAAI,CAAC,EAAE,IAAI,CAAC;IACZ,OAAO,CAAC,EAAE,IAAI,CAAC;CAChB,GACA,IAAI,CAAC;AACR,wBAAgB,UAAU,CACxB,GAAG,EAAE,UAAU,EACf,OAAO,EAAE;IACP,SAAS,EAAE,GAAG,CAAC;IACf,MAAM,EAAE,MAAM,CAAC;IACf,QAAQ,EAAE,GAAG,CAAC;IACd,IAAI,CAAC,EAAE,IAAI,CAAC;IACZ,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;CACzB,GACA,IAAI,CAAC;AACR,wBAAgB,UAAU,CACxB,GAAG,EAAE,UAAU,EACf,OAAO,EAAE;IACP,SAAS,EAAE,GAAG,CAAC;IACf,MAAM,EAAE,MAAM,CAAC;IACf,QAAQ,EAAE,GAAG,CAAC;IACd,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;CACzB,GACA,IAAI,CAAC;AAcR;;;;GAIG;AACH,eAAO,MAAM,eAAe,EAAE,OAAO,UAqBpC,CAAC;AAEF;;;;GAIG;AACH,eAAO,MAAM,eAAe,EAAE,OAAO,UAEpC,CAAC;AAEF;;GAEG;AACH,wBAAgB,gBAAgB,CAAC,GAAG,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI,GAAG,IAAI,CAS9F;AAED,wBAAgB,SAAS,CAAC,GAAG,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,UAAO,QAYvE;AAED;;;GAGG;AACH,wBAAgB,YAAY,CAAC,GAAG,EAAE,UAAU,GAAG,gBAAgB,CAwB9D;AAED;;;GAGG;AACH,wBAAgB,YAAY,CAAC,GAAG,EAAE,UAAU,GAAG,gBAAgB,CAO9D;AAED;;GAEG;AACH,wBAAgB,OAAO,CAAC,KAAK,EAAE,cAAc,GAAG,UAAU,CAYzD;AAED;;;GAGG;AACH,wBAAgB,WAAW,CAAC,GAAG,EAAE,UAAU,GAAG,OAAO,EAAE,CA0BtD"}

View File

@@ -0,0 +1,33 @@
type Key = string | number | symbol;
/**
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
* index of the `key` in the backing array.
*
* This is designed to allow synchronizing a second array with the contents of the backing array,
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
* and there are never duplicates.
*/
export declare class SetArray<T extends Key = Key> {
private _indexes;
array: readonly T[];
constructor();
}
/**
* Gets the index associated with `key` in the backing array, if it is already present.
*/
export declare function get<T extends Key>(setarr: SetArray<T>, key: T): number | undefined;
/**
* Puts `key` into the backing array, if it is not already present. Returns
* the index of the `key` in the backing array.
*/
export declare function put<T extends Key>(setarr: SetArray<T>, key: T): number;
/**
* Pops the last added item out of the SetArray.
*/
export declare function pop<T extends Key>(setarr: SetArray<T>): void;
/**
* Removes the key, if it exists in the set.
*/
export declare function remove<T extends Key>(setarr: SetArray<T>, key: T): void;
export {};
//# sourceMappingURL=set-array.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"set-array.d.ts","sourceRoot":"","sources":["../src/set-array.ts"],"names":[],"mappings":"AAAA,KAAK,GAAG,GAAG,MAAM,GAAG,MAAM,GAAG,MAAM,CAAC;AAEpC;;;;;;;GAOG;AACH,qBAAa,QAAQ,CAAC,CAAC,SAAS,GAAG,GAAG,GAAG;IACvC,QAAgB,QAAQ,CAAgC;IAChD,KAAK,EAAE,SAAS,CAAC,EAAE,CAAC;;CAM7B;AAeD;;GAEG;AACH,wBAAgB,GAAG,CAAC,CAAC,SAAS,GAAG,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,EAAE,GAAG,EAAE,CAAC,GAAG,MAAM,GAAG,SAAS,CAElF;AAED;;;GAGG;AACH,wBAAgB,GAAG,CAAC,CAAC,SAAS,GAAG,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,EAAE,GAAG,EAAE,CAAC,GAAG,MAAM,CAStE;AAED;;GAEG;AACH,wBAAgB,GAAG,CAAC,CAAC,SAAS,GAAG,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,GAAG,IAAI,CAM5D;AAED;;GAEG;AACH,wBAAgB,MAAM,CAAC,CAAC,SAAS,GAAG,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,EAAE,GAAG,EAAE,CAAC,GAAG,IAAI,CAYvE"}

View File

@@ -0,0 +1,33 @@
type Key = string | number | symbol;
/**
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
* index of the `key` in the backing array.
*
* This is designed to allow synchronizing a second array with the contents of the backing array,
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
* and there are never duplicates.
*/
export declare class SetArray<T extends Key = Key> {
private _indexes;
array: readonly T[];
constructor();
}
/**
* Gets the index associated with `key` in the backing array, if it is already present.
*/
export declare function get<T extends Key>(setarr: SetArray<T>, key: T): number | undefined;
/**
* Puts `key` into the backing array, if it is not already present. Returns
* the index of the `key` in the backing array.
*/
export declare function put<T extends Key>(setarr: SetArray<T>, key: T): number;
/**
* Pops the last added item out of the SetArray.
*/
export declare function pop<T extends Key>(setarr: SetArray<T>): void;
/**
* Removes the key, if it exists in the set.
*/
export declare function remove<T extends Key>(setarr: SetArray<T>, key: T): void;
export {};
//# sourceMappingURL=set-array.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"set-array.d.ts","sourceRoot":"","sources":["../src/set-array.ts"],"names":[],"mappings":"AAAA,KAAK,GAAG,GAAG,MAAM,GAAG,MAAM,GAAG,MAAM,CAAC;AAEpC;;;;;;;GAOG;AACH,qBAAa,QAAQ,CAAC,CAAC,SAAS,GAAG,GAAG,GAAG;IACvC,QAAgB,QAAQ,CAAgC;IAChD,KAAK,EAAE,SAAS,CAAC,EAAE,CAAC;;CAM7B;AAeD;;GAEG;AACH,wBAAgB,GAAG,CAAC,CAAC,SAAS,GAAG,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,EAAE,GAAG,EAAE,CAAC,GAAG,MAAM,GAAG,SAAS,CAElF;AAED;;;GAGG;AACH,wBAAgB,GAAG,CAAC,CAAC,SAAS,GAAG,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,EAAE,GAAG,EAAE,CAAC,GAAG,MAAM,CAStE;AAED;;GAEG;AACH,wBAAgB,GAAG,CAAC,CAAC,SAAS,GAAG,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,GAAG,IAAI,CAM5D;AAED;;GAEG;AACH,wBAAgB,MAAM,CAAC,CAAC,SAAS,GAAG,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,EAAE,GAAG,EAAE,CAAC,GAAG,IAAI,CAYvE"}

View File

@@ -0,0 +1,13 @@
type GeneratedColumn = number;
type SourcesIndex = number;
type SourceLine = number;
type SourceColumn = number;
type NamesIndex = number;
export type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
export declare const COLUMN = 0;
export declare const SOURCES_INDEX = 1;
export declare const SOURCE_LINE = 2;
export declare const SOURCE_COLUMN = 3;
export declare const NAMES_INDEX = 4;
export {};
//# sourceMappingURL=sourcemap-segment.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"sourcemap-segment.d.ts","sourceRoot":"","sources":["../src/sourcemap-segment.ts"],"names":[],"mappings":"AAAA,KAAK,eAAe,GAAG,MAAM,CAAC;AAC9B,KAAK,YAAY,GAAG,MAAM,CAAC;AAC3B,KAAK,UAAU,GAAG,MAAM,CAAC;AACzB,KAAK,YAAY,GAAG,MAAM,CAAC;AAC3B,KAAK,UAAU,GAAG,MAAM,CAAC;AAEzB,MAAM,MAAM,gBAAgB,GACxB,CAAC,eAAe,CAAC,GACjB,CAAC,eAAe,EAAE,YAAY,EAAE,UAAU,EAAE,YAAY,CAAC,GACzD,CAAC,eAAe,EAAE,YAAY,EAAE,UAAU,EAAE,YAAY,EAAE,UAAU,CAAC,CAAC;AAE1E,eAAO,MAAM,MAAM,IAAI,CAAC;AACxB,eAAO,MAAM,aAAa,IAAI,CAAC;AAC/B,eAAO,MAAM,WAAW,IAAI,CAAC;AAC7B,eAAO,MAAM,aAAa,IAAI,CAAC;AAC/B,eAAO,MAAM,WAAW,IAAI,CAAC"}

View File

@@ -0,0 +1,13 @@
type GeneratedColumn = number;
type SourcesIndex = number;
type SourceLine = number;
type SourceColumn = number;
type NamesIndex = number;
export type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
export declare const COLUMN = 0;
export declare const SOURCES_INDEX = 1;
export declare const SOURCE_LINE = 2;
export declare const SOURCE_COLUMN = 3;
export declare const NAMES_INDEX = 4;
export {};
//# sourceMappingURL=sourcemap-segment.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"sourcemap-segment.d.ts","sourceRoot":"","sources":["../src/sourcemap-segment.ts"],"names":[],"mappings":"AAAA,KAAK,eAAe,GAAG,MAAM,CAAC;AAC9B,KAAK,YAAY,GAAG,MAAM,CAAC;AAC3B,KAAK,UAAU,GAAG,MAAM,CAAC;AACzB,KAAK,YAAY,GAAG,MAAM,CAAC;AAC3B,KAAK,UAAU,GAAG,MAAM,CAAC;AAEzB,MAAM,MAAM,gBAAgB,GACxB,CAAC,eAAe,CAAC,GACjB,CAAC,eAAe,EAAE,YAAY,EAAE,UAAU,EAAE,YAAY,CAAC,GACzD,CAAC,eAAe,EAAE,YAAY,EAAE,UAAU,EAAE,YAAY,EAAE,UAAU,CAAC,CAAC;AAE1E,eAAO,MAAM,MAAM,IAAI,CAAC;AACxB,eAAO,MAAM,aAAa,IAAI,CAAC;AAC/B,eAAO,MAAM,WAAW,IAAI,CAAC;AAC7B,eAAO,MAAM,aAAa,IAAI,CAAC;AAC/B,eAAO,MAAM,WAAW,IAAI,CAAC"}

44
node_modules/@jridgewell/gen-mapping/types/types.d.cts generated vendored Normal file
View File

@@ -0,0 +1,44 @@
import type { SourceMapSegment } from './sourcemap-segment.cts';
export interface SourceMapV3 {
file?: string | null;
names: readonly string[];
sourceRoot?: string;
sources: readonly (string | null)[];
sourcesContent?: readonly (string | null)[];
version: 3;
ignoreList?: readonly number[];
}
export interface EncodedSourceMap extends SourceMapV3 {
mappings: string;
}
export interface DecodedSourceMap extends SourceMapV3 {
mappings: readonly SourceMapSegment[][];
}
export interface Pos {
line: number;
column: number;
}
export interface OriginalPos extends Pos {
source: string;
}
export interface BindingExpressionRange {
start: Pos;
expression: string;
}
export type Mapping = {
generated: Pos;
source: undefined;
original: undefined;
name: undefined;
} | {
generated: Pos;
source: string;
original: Pos;
name: string;
} | {
generated: Pos;
source: string;
original: Pos;
name: undefined;
};
//# sourceMappingURL=types.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,qBAAqB,CAAC;AAE5D,MAAM,WAAW,WAAW;IAC1B,IAAI,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IACrB,KAAK,EAAE,SAAS,MAAM,EAAE,CAAC;IACzB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,OAAO,EAAE,SAAS,CAAC,MAAM,GAAG,IAAI,CAAC,EAAE,CAAC;IACpC,cAAc,CAAC,EAAE,SAAS,CAAC,MAAM,GAAG,IAAI,CAAC,EAAE,CAAC;IAC5C,OAAO,EAAE,CAAC,CAAC;IACX,UAAU,CAAC,EAAE,SAAS,MAAM,EAAE,CAAC;CAChC;AAED,MAAM,WAAW,gBAAiB,SAAQ,WAAW;IACnD,QAAQ,EAAE,MAAM,CAAC;CAGlB;AAED,MAAM,WAAW,gBAAiB,SAAQ,WAAW;IACnD,QAAQ,EAAE,SAAS,gBAAgB,EAAE,EAAE,CAAC;CAGzC;AAED,MAAM,WAAW,GAAG;IAClB,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,EAAE,MAAM,CAAC;CAChB;AAED,MAAM,WAAW,WAAY,SAAQ,GAAG;IACtC,MAAM,EAAE,MAAM,CAAC;CAChB;AAED,MAAM,WAAW,sBAAsB;IACrC,KAAK,EAAE,GAAG,CAAC;IACX,UAAU,EAAE,MAAM,CAAC;CACpB;AAKD,MAAM,MAAM,OAAO,GACf;IACE,SAAS,EAAE,GAAG,CAAC;IACf,MAAM,EAAE,SAAS,CAAC;IAClB,QAAQ,EAAE,SAAS,CAAC;IACpB,IAAI,EAAE,SAAS,CAAC;CACjB,GACD;IACE,SAAS,EAAE,GAAG,CAAC;IACf,MAAM,EAAE,MAAM,CAAC;IACf,QAAQ,EAAE,GAAG,CAAC;IACd,IAAI,EAAE,MAAM,CAAC;CACd,GACD;IACE,SAAS,EAAE,GAAG,CAAC;IACf,MAAM,EAAE,MAAM,CAAC;IACf,QAAQ,EAAE,GAAG,CAAC;IACd,IAAI,EAAE,SAAS,CAAC;CACjB,CAAC"}

44
node_modules/@jridgewell/gen-mapping/types/types.d.mts generated vendored Normal file
View File

@@ -0,0 +1,44 @@
import type { SourceMapSegment } from './sourcemap-segment.mts';
export interface SourceMapV3 {
file?: string | null;
names: readonly string[];
sourceRoot?: string;
sources: readonly (string | null)[];
sourcesContent?: readonly (string | null)[];
version: 3;
ignoreList?: readonly number[];
}
export interface EncodedSourceMap extends SourceMapV3 {
mappings: string;
}
export interface DecodedSourceMap extends SourceMapV3 {
mappings: readonly SourceMapSegment[][];
}
export interface Pos {
line: number;
column: number;
}
export interface OriginalPos extends Pos {
source: string;
}
export interface BindingExpressionRange {
start: Pos;
expression: string;
}
export type Mapping = {
generated: Pos;
source: undefined;
original: undefined;
name: undefined;
} | {
generated: Pos;
source: string;
original: Pos;
name: string;
} | {
generated: Pos;
source: string;
original: Pos;
name: undefined;
};
//# sourceMappingURL=types.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,qBAAqB,CAAC;AAE5D,MAAM,WAAW,WAAW;IAC1B,IAAI,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IACrB,KAAK,EAAE,SAAS,MAAM,EAAE,CAAC;IACzB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,OAAO,EAAE,SAAS,CAAC,MAAM,GAAG,IAAI,CAAC,EAAE,CAAC;IACpC,cAAc,CAAC,EAAE,SAAS,CAAC,MAAM,GAAG,IAAI,CAAC,EAAE,CAAC;IAC5C,OAAO,EAAE,CAAC,CAAC;IACX,UAAU,CAAC,EAAE,SAAS,MAAM,EAAE,CAAC;CAChC;AAED,MAAM,WAAW,gBAAiB,SAAQ,WAAW;IACnD,QAAQ,EAAE,MAAM,CAAC;CAGlB;AAED,MAAM,WAAW,gBAAiB,SAAQ,WAAW;IACnD,QAAQ,EAAE,SAAS,gBAAgB,EAAE,EAAE,CAAC;CAGzC;AAED,MAAM,WAAW,GAAG;IAClB,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,EAAE,MAAM,CAAC;CAChB;AAED,MAAM,WAAW,WAAY,SAAQ,GAAG;IACtC,MAAM,EAAE,MAAM,CAAC;CAChB;AAED,MAAM,WAAW,sBAAsB;IACrC,KAAK,EAAE,GAAG,CAAC;IACX,UAAU,EAAE,MAAM,CAAC;CACpB;AAKD,MAAM,MAAM,OAAO,GACf;IACE,SAAS,EAAE,GAAG,CAAC;IACf,MAAM,EAAE,SAAS,CAAC;IAClB,QAAQ,EAAE,SAAS,CAAC;IACpB,IAAI,EAAE,SAAS,CAAC;CACjB,GACD;IACE,SAAS,EAAE,GAAG,CAAC;IACf,MAAM,EAAE,MAAM,CAAC;IACf,QAAQ,EAAE,GAAG,CAAC;IACd,IAAI,EAAE,MAAM,CAAC;CACd,GACD;IACE,SAAS,EAAE,GAAG,CAAC;IACf,MAAM,EAAE,MAAM,CAAC;IACf,QAAQ,EAAE,GAAG,CAAC;IACd,IAAI,EAAE,SAAS,CAAC;CACjB,CAAC"}

19
node_modules/@jridgewell/remapping/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,19 @@
Copyright 2024 Justin Ridgewell <justin@ridgewell.name>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

218
node_modules/@jridgewell/remapping/README.md generated vendored Normal file
View File

@@ -0,0 +1,218 @@
# @jridgewell/remapping
> Remap sequential sourcemaps through transformations to point at the original source code
Remapping allows you to take the sourcemaps generated through transforming your code and "remap"
them to the original source locations. Think "my minified code, transformed with babel and bundled
with webpack", all pointing to the correct location in your original source code.
With remapping, none of your source code transformations need to be aware of the input's sourcemap,
they only need to generate an output sourcemap. This greatly simplifies building custom
transformations (think a find-and-replace).
## Installation
```sh
npm install @jridgewell/remapping
```
## Usage
```typescript
function remapping(
map: SourceMap | SourceMap[],
loader: (file: string, ctx: LoaderContext) => (SourceMap | null | undefined),
options?: { excludeContent: boolean, decodedMappings: boolean }
): SourceMap;
// LoaderContext gives the loader the importing sourcemap, tree depth, the ability to override the
// "source" location (where child sources are resolved relative to, or the location of original
// source), and the ability to override the "content" of an original source for inclusion in the
// output sourcemap.
type LoaderContext = {
readonly importer: string;
readonly depth: number;
source: string;
content: string | null | undefined;
}
```
`remapping` takes the final output sourcemap, and a `loader` function. For every source file pointer
in the sourcemap, the `loader` will be called with the resolved path. If the path itself represents
a transformed file (it has a sourcmap associated with it), then the `loader` should return that
sourcemap. If not, the path will be treated as an original, untransformed source code.
```js
// Babel transformed "helloworld.js" into "transformed.js"
const transformedMap = JSON.stringify({
file: 'transformed.js',
// 1st column of 2nd line of output file translates into the 1st source
// file, line 3, column 2
mappings: ';CAEE',
sources: ['helloworld.js'],
version: 3,
});
// Uglify minified "transformed.js" into "transformed.min.js"
const minifiedTransformedMap = JSON.stringify({
file: 'transformed.min.js',
// 0th column of 1st line of output file translates into the 1st source
// file, line 2, column 1.
mappings: 'AACC',
names: [],
sources: ['transformed.js'],
version: 3,
});
const remapped = remapping(
minifiedTransformedMap,
(file, ctx) => {
// The "transformed.js" file is an transformed file.
if (file === 'transformed.js') {
// The root importer is empty.
console.assert(ctx.importer === '');
// The depth in the sourcemap tree we're currently loading.
// The root `minifiedTransformedMap` is depth 0, and its source children are depth 1, etc.
console.assert(ctx.depth === 1);
return transformedMap;
}
// Loader will be called to load transformedMap's source file pointers as well.
console.assert(file === 'helloworld.js');
// `transformed.js`'s sourcemap points into `helloworld.js`.
console.assert(ctx.importer === 'transformed.js');
// This is a source child of `transformed`, which is a source child of `minifiedTransformedMap`.
console.assert(ctx.depth === 2);
return null;
}
);
console.log(remapped);
// {
// file: 'transpiled.min.js',
// mappings: 'AAEE',
// sources: ['helloworld.js'],
// version: 3,
// };
```
In this example, `loader` will be called twice:
1. `"transformed.js"`, the first source file pointer in the `minifiedTransformedMap`. We return the
associated sourcemap for it (its a transformed file, after all) so that sourcemap locations can
be traced through it into the source files it represents.
2. `"helloworld.js"`, our original, unmodified source code. This file does not have a sourcemap, so
we return `null`.
The `remapped` sourcemap now points from `transformed.min.js` into locations in `helloworld.js`. If
you were to read the `mappings`, it says "0th column of the first line output line points to the 1st
column of the 2nd line of the file `helloworld.js`".
### Multiple transformations of a file
As a convenience, if you have multiple single-source transformations of a file, you may pass an
array of sourcemap files in the order of most-recent transformation sourcemap first. Note that this
changes the `importer` and `depth` of each call to our loader. So our above example could have been
written as:
```js
const remapped = remapping(
[minifiedTransformedMap, transformedMap],
() => null
);
console.log(remapped);
// {
// file: 'transpiled.min.js',
// mappings: 'AAEE',
// sources: ['helloworld.js'],
// version: 3,
// };
```
### Advanced control of the loading graph
#### `source`
The `source` property can overridden to any value to change the location of the current load. Eg,
for an original source file, it allows us to change the location to the original source regardless
of what the sourcemap source entry says. And for transformed files, it allows us to change the
relative resolving location for child sources of the loaded sourcemap.
```js
const remapped = remapping(
minifiedTransformedMap,
(file, ctx) => {
if (file === 'transformed.js') {
// We pretend the transformed.js file actually exists in the 'src/' directory. When the nested
// source files are loaded, they will now be relative to `src/`.
ctx.source = 'src/transformed.js';
return transformedMap;
}
console.assert(file === 'src/helloworld.js');
// We could futher change the source of this original file, eg, to be inside a nested directory
// itself. This will be reflected in the remapped sourcemap.
ctx.source = 'src/nested/transformed.js';
return null;
}
);
console.log(remapped);
// {
// …,
// sources: ['src/nested/helloworld.js'],
// };
```
#### `content`
The `content` property can be overridden when we encounter an original source file. Eg, this allows
you to manually provide the source content of the original file regardless of whether the
`sourcesContent` field is present in the parent sourcemap. It can also be set to `null` to remove
the source content.
```js
const remapped = remapping(
minifiedTransformedMap,
(file, ctx) => {
if (file === 'transformed.js') {
// transformedMap does not include a `sourcesContent` field, so usually the remapped sourcemap
// would not include any `sourcesContent` values.
return transformedMap;
}
console.assert(file === 'helloworld.js');
// We can read the file to provide the source content.
ctx.content = fs.readFileSync(file, 'utf8');
return null;
}
);
console.log(remapped);
// {
// …,
// sourcesContent: [
// 'console.log("Hello world!")',
// ],
// };
```
### Options
#### excludeContent
By default, `excludeContent` is `false`. Passing `{ excludeContent: true }` will exclude the
`sourcesContent` field from the returned sourcemap. This is mainly useful when you want to reduce
the size out the sourcemap.
#### decodedMappings
By default, `decodedMappings` is `false`. Passing `{ decodedMappings: true }` will leave the
`mappings` field in a [decoded state](https://github.com/rich-harris/sourcemap-codec) instead of
encoding into a VLQ string.

144
node_modules/@jridgewell/remapping/dist/remapping.mjs generated vendored Normal file
View File

@@ -0,0 +1,144 @@
// src/build-source-map-tree.ts
import { TraceMap } from "@jridgewell/trace-mapping";
// src/source-map-tree.ts
import { GenMapping, maybeAddSegment, setIgnore, setSourceContent } from "@jridgewell/gen-mapping";
import { traceSegment, decodedMappings } from "@jridgewell/trace-mapping";
var SOURCELESS_MAPPING = /* @__PURE__ */ SegmentObject("", -1, -1, "", null, false);
var EMPTY_SOURCES = [];
function SegmentObject(source, line, column, name, content, ignore) {
return { source, line, column, name, content, ignore };
}
function Source(map, sources, source, content, ignore) {
return {
map,
sources,
source,
content,
ignore
};
}
function MapSource(map, sources) {
return Source(map, sources, "", null, false);
}
function OriginalSource(source, content, ignore) {
return Source(null, EMPTY_SOURCES, source, content, ignore);
}
function traceMappings(tree) {
const gen = new GenMapping({ file: tree.map.file });
const { sources: rootSources, map } = tree;
const rootNames = map.names;
const rootMappings = decodedMappings(map);
for (let i = 0; i < rootMappings.length; i++) {
const segments = rootMappings[i];
for (let j = 0; j < segments.length; j++) {
const segment = segments[j];
const genCol = segment[0];
let traced = SOURCELESS_MAPPING;
if (segment.length !== 1) {
const source2 = rootSources[segment[1]];
traced = originalPositionFor(
source2,
segment[2],
segment[3],
segment.length === 5 ? rootNames[segment[4]] : ""
);
if (traced == null) continue;
}
const { column, line, name, content, source, ignore } = traced;
maybeAddSegment(gen, i, genCol, source, line, column, name);
if (source && content != null) setSourceContent(gen, source, content);
if (ignore) setIgnore(gen, source, true);
}
}
return gen;
}
function originalPositionFor(source, line, column, name) {
if (!source.map) {
return SegmentObject(source.source, line, column, name, source.content, source.ignore);
}
const segment = traceSegment(source.map, line, column);
if (segment == null) return null;
if (segment.length === 1) return SOURCELESS_MAPPING;
return originalPositionFor(
source.sources[segment[1]],
segment[2],
segment[3],
segment.length === 5 ? source.map.names[segment[4]] : name
);
}
// src/build-source-map-tree.ts
function asArray(value) {
if (Array.isArray(value)) return value;
return [value];
}
function buildSourceMapTree(input, loader) {
const maps = asArray(input).map((m) => new TraceMap(m, ""));
const map = maps.pop();
for (let i = 0; i < maps.length; i++) {
if (maps[i].sources.length > 1) {
throw new Error(
`Transformation map ${i} must have exactly one source file.
Did you specify these with the most recent transformation maps first?`
);
}
}
let tree = build(map, loader, "", 0);
for (let i = maps.length - 1; i >= 0; i--) {
tree = MapSource(maps[i], [tree]);
}
return tree;
}
function build(map, loader, importer, importerDepth) {
const { resolvedSources, sourcesContent, ignoreList } = map;
const depth = importerDepth + 1;
const children = resolvedSources.map((sourceFile, i) => {
const ctx = {
importer,
depth,
source: sourceFile || "",
content: void 0,
ignore: void 0
};
const sourceMap = loader(ctx.source, ctx);
const { source, content, ignore } = ctx;
if (sourceMap) return build(new TraceMap(sourceMap, source), loader, source, depth);
const sourceContent = content !== void 0 ? content : sourcesContent ? sourcesContent[i] : null;
const ignored = ignore !== void 0 ? ignore : ignoreList ? ignoreList.includes(i) : false;
return OriginalSource(source, sourceContent, ignored);
});
return MapSource(map, children);
}
// src/source-map.ts
import { toDecodedMap, toEncodedMap } from "@jridgewell/gen-mapping";
var SourceMap = class {
constructor(map, options) {
const out = options.decodedMappings ? toDecodedMap(map) : toEncodedMap(map);
this.version = out.version;
this.file = out.file;
this.mappings = out.mappings;
this.names = out.names;
this.ignoreList = out.ignoreList;
this.sourceRoot = out.sourceRoot;
this.sources = out.sources;
if (!options.excludeContent) {
this.sourcesContent = out.sourcesContent;
}
}
toString() {
return JSON.stringify(this);
}
};
// src/remapping.ts
function remapping(input, loader, options) {
const opts = typeof options === "object" ? options : { excludeContent: !!options, decodedMappings: false };
const tree = buildSourceMapTree(input, loader);
return new SourceMap(traceMappings(tree), opts);
}
export {
remapping as default
};
//# sourceMappingURL=remapping.mjs.map

View File

@@ -0,0 +1,6 @@
{
"version": 3,
"sources": ["../src/build-source-map-tree.ts", "../src/source-map-tree.ts", "../src/source-map.ts", "../src/remapping.ts"],
"mappings": ";AAAA,SAAS,gBAAgB;;;ACAzB,SAAS,YAAY,iBAAiB,WAAW,wBAAwB;AACzE,SAAS,cAAc,uBAAuB;AA+B9C,IAAM,qBAAqC,8BAAc,IAAI,IAAI,IAAI,IAAI,MAAM,KAAK;AACpF,IAAM,gBAA2B,CAAC;AAElC,SAAS,cACP,QACA,MACA,QACA,MACA,SACA,QACwB;AACxB,SAAO,EAAE,QAAQ,MAAM,QAAQ,MAAM,SAAS,OAAO;AACvD;AAgBA,SAAS,OACP,KACA,SACA,QACA,SACA,QACS;AACT,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAMO,SAAS,UAAU,KAAe,SAA+B;AACtE,SAAO,OAAO,KAAK,SAAS,IAAI,MAAM,KAAK;AAC7C;AAMO,SAAS,eACd,QACA,SACA,QACgB;AAChB,SAAO,OAAO,MAAM,eAAe,QAAQ,SAAS,MAAM;AAC5D;AAMO,SAAS,cAAc,MAA6B;AAGzD,QAAM,MAAM,IAAI,WAAW,EAAE,MAAM,KAAK,IAAI,KAAK,CAAC;AAClD,QAAM,EAAE,SAAS,aAAa,IAAI,IAAI;AACtC,QAAM,YAAY,IAAI;AACtB,QAAM,eAAe,gBAAgB,GAAG;AAExC,WAAS,IAAI,GAAG,IAAI,aAAa,QAAQ,KAAK;AAC5C,UAAM,WAAW,aAAa,CAAC;AAE/B,aAAS,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK;AACxC,YAAM,UAAU,SAAS,CAAC;AAC1B,YAAM,SAAS,QAAQ,CAAC;AACxB,UAAI,SAAwC;AAI5C,UAAI,QAAQ,WAAW,GAAG;AACxB,cAAMA,UAAS,YAAY,QAAQ,CAAC,CAAC;AACrC,iBAAS;AAAA,UACPA;AAAA,UACA,QAAQ,CAAC;AAAA,UACT,QAAQ,CAAC;AAAA,UACT,QAAQ,WAAW,IAAI,UAAU,QAAQ,CAAC,CAAC,IAAI;AAAA,QACjD;AAIA,YAAI,UAAU,KAAM;AAAA,MACtB;AAEA,YAAM,EAAE,QAAQ,MAAM,MAAM,SAAS,QAAQ,OAAO,IAAI;AAExD,sBAAgB,KAAK,GAAG,QAAQ,QAAQ,MAAM,QAAQ,IAAI;AAC1D,UAAI,UAAU,WAAW,KAAM,kBAAiB,KAAK,QAAQ,OAAO;AACpE,UAAI,OAAQ,WAAU,KAAK,QAAQ,IAAI;AAAA,IACzC;AAAA,EACF;AAEA,SAAO;AACT;AAMO,SAAS,oBACd,QACA,MACA,QACA,MAC+B;AAC/B,MAAI,CAAC,OAAO,KAAK;AACf,WAAO,cAAc,OAAO,QAAQ,MAAM,QAAQ,MAAM,OAAO,SAAS,OAAO,MAAM;AAAA,EACvF;AAEA,QAAM,UAAU,aAAa,OAAO,KAAK,MAAM,MAAM;AAGrD,MAAI,WAAW,KAAM,QAAO;AAG5B,MAAI,QAAQ,WAAW,EAAG,QAAO;AAEjC,SAAO;AAAA,IACL,OAAO,QAAQ,QAAQ,CAAC,CAAC;AAAA,IACzB,QAAQ,CAAC;AAAA,IACT,QAAQ,CAAC;AAAA,IACT,QAAQ,WAAW,IAAI,OAAO,IAAI,MAAM,QAAQ,CAAC,CAAC,IAAI;AAAA,EACxD;AACF;;;ADpKA,SAAS,QAAW,OAAqB;AACvC,MAAI,MAAM,QAAQ,KAAK,EAAG,QAAO;AACjC,SAAO,CAAC,KAAK;AACf;AAae,SAAR,mBACL,OACA,QACe;AACf,QAAM,OAAO,QAAQ,KAAK,EAAE,IAAI,CAAC,MAAM,IAAI,SAAS,GAAG,EAAE,CAAC;AAC1D,QAAM,MAAM,KAAK,IAAI;AAErB,WAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,QAAI,KAAK,CAAC,EAAE,QAAQ,SAAS,GAAG;AAC9B,YAAM,IAAI;AAAA,QACR,sBAAsB,CAAC;AAAA;AAAA,MAEzB;AAAA,IACF;AAAA,EACF;AAEA,MAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,CAAC;AACnC,WAAS,IAAI,KAAK,SAAS,GAAG,KAAK,GAAG,KAAK;AACzC,WAAO,UAAU,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC;AAAA,EAClC;AACA,SAAO;AACT;AAEA,SAAS,MACP,KACA,QACA,UACA,eACe;AACf,QAAM,EAAE,iBAAiB,gBAAgB,WAAW,IAAI;AAExD,QAAM,QAAQ,gBAAgB;AAC9B,QAAM,WAAW,gBAAgB,IAAI,CAAC,YAA2B,MAAuB;AAKtF,UAAM,MAAqB;AAAA,MACzB;AAAA,MACA;AAAA,MACA,QAAQ,cAAc;AAAA,MACtB,SAAS;AAAA,MACT,QAAQ;AAAA,IACV;AAIA,UAAM,YAAY,OAAO,IAAI,QAAQ,GAAG;AAExC,UAAM,EAAE,QAAQ,SAAS,OAAO,IAAI;AAGpC,QAAI,UAAW,QAAO,MAAM,IAAI,SAAS,WAAW,MAAM,GAAG,QAAQ,QAAQ,KAAK;AAMlF,UAAM,gBACJ,YAAY,SAAY,UAAU,iBAAiB,eAAe,CAAC,IAAI;AACzE,UAAM,UAAU,WAAW,SAAY,SAAS,aAAa,WAAW,SAAS,CAAC,IAAI;AACtF,WAAO,eAAe,QAAQ,eAAe,OAAO;AAAA,EACtD,CAAC;AAED,SAAO,UAAU,KAAK,QAAQ;AAChC;;;AExFA,SAAS,cAAc,oBAAoB;AAS3C,IAAqB,YAArB,MAA+B;AAAA,EAU7B,YAAY,KAAiB,SAAkB;AAC7C,UAAM,MAAM,QAAQ,kBAAkB,aAAa,GAAG,IAAI,aAAa,GAAG;AAC1E,SAAK,UAAU,IAAI;AACnB,SAAK,OAAO,IAAI;AAChB,SAAK,WAAW,IAAI;AACpB,SAAK,QAAQ,IAAI;AACjB,SAAK,aAAa,IAAI;AACtB,SAAK,aAAa,IAAI;AAEtB,SAAK,UAAU,IAAI;AACnB,QAAI,CAAC,QAAQ,gBAAgB;AAC3B,WAAK,iBAAiB,IAAI;AAAA,IAC5B;AAAA,EACF;AAAA,EAEA,WAAmB;AACjB,WAAO,KAAK,UAAU,IAAI;AAAA,EAC5B;AACF;;;ACLe,SAAR,UACL,OACA,QACA,SACW;AACX,QAAM,OACJ,OAAO,YAAY,WAAW,UAAU,EAAE,gBAAgB,CAAC,CAAC,SAAS,iBAAiB,MAAM;AAC9F,QAAM,OAAO,mBAAmB,OAAO,MAAM;AAC7C,SAAO,IAAI,UAAU,cAAc,IAAI,GAAG,IAAI;AAChD;",
"names": ["source"]
}

View File

@@ -0,0 +1,212 @@
(function (global, factory) {
if (typeof exports === 'object' && typeof module !== 'undefined') {
factory(module, require('@jridgewell/gen-mapping'), require('@jridgewell/trace-mapping'));
module.exports = def(module);
} else if (typeof define === 'function' && define.amd) {
define(['module', '@jridgewell/gen-mapping', '@jridgewell/trace-mapping'], function(mod) {
factory.apply(this, arguments);
mod.exports = def(mod);
});
} else {
const mod = { exports: {} };
factory(mod, global.genMapping, global.traceMapping);
global = typeof globalThis !== 'undefined' ? globalThis : global || self;
global.remapping = def(mod);
}
function def(m) { return 'default' in m.exports ? m.exports.default : m.exports; }
})(this, (function (module, require_genMapping, require_traceMapping) {
"use strict";
var __create = Object.create;
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __getProtoOf = Object.getPrototypeOf;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __commonJS = (cb, mod) => function __require() {
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
};
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
// If the importer is in node compatibility mode or this is not an ESM
// file that has been converted to a CommonJS file using a Babel-
// compatible transform (i.e. "__esModule" has not been set), then set
// "default" to the CommonJS "module.exports" for node compatibility.
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
mod
));
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
// umd:@jridgewell/trace-mapping
var require_trace_mapping = __commonJS({
"umd:@jridgewell/trace-mapping"(exports, module2) {
module2.exports = require_traceMapping;
}
});
// umd:@jridgewell/gen-mapping
var require_gen_mapping = __commonJS({
"umd:@jridgewell/gen-mapping"(exports, module2) {
module2.exports = require_genMapping;
}
});
// src/remapping.ts
var remapping_exports = {};
__export(remapping_exports, {
default: () => remapping
});
module.exports = __toCommonJS(remapping_exports);
// src/build-source-map-tree.ts
var import_trace_mapping2 = __toESM(require_trace_mapping());
// src/source-map-tree.ts
var import_gen_mapping = __toESM(require_gen_mapping());
var import_trace_mapping = __toESM(require_trace_mapping());
var SOURCELESS_MAPPING = /* @__PURE__ */ SegmentObject("", -1, -1, "", null, false);
var EMPTY_SOURCES = [];
function SegmentObject(source, line, column, name, content, ignore) {
return { source, line, column, name, content, ignore };
}
function Source(map, sources, source, content, ignore) {
return {
map,
sources,
source,
content,
ignore
};
}
function MapSource(map, sources) {
return Source(map, sources, "", null, false);
}
function OriginalSource(source, content, ignore) {
return Source(null, EMPTY_SOURCES, source, content, ignore);
}
function traceMappings(tree) {
const gen = new import_gen_mapping.GenMapping({ file: tree.map.file });
const { sources: rootSources, map } = tree;
const rootNames = map.names;
const rootMappings = (0, import_trace_mapping.decodedMappings)(map);
for (let i = 0; i < rootMappings.length; i++) {
const segments = rootMappings[i];
for (let j = 0; j < segments.length; j++) {
const segment = segments[j];
const genCol = segment[0];
let traced = SOURCELESS_MAPPING;
if (segment.length !== 1) {
const source2 = rootSources[segment[1]];
traced = originalPositionFor(
source2,
segment[2],
segment[3],
segment.length === 5 ? rootNames[segment[4]] : ""
);
if (traced == null) continue;
}
const { column, line, name, content, source, ignore } = traced;
(0, import_gen_mapping.maybeAddSegment)(gen, i, genCol, source, line, column, name);
if (source && content != null) (0, import_gen_mapping.setSourceContent)(gen, source, content);
if (ignore) (0, import_gen_mapping.setIgnore)(gen, source, true);
}
}
return gen;
}
function originalPositionFor(source, line, column, name) {
if (!source.map) {
return SegmentObject(source.source, line, column, name, source.content, source.ignore);
}
const segment = (0, import_trace_mapping.traceSegment)(source.map, line, column);
if (segment == null) return null;
if (segment.length === 1) return SOURCELESS_MAPPING;
return originalPositionFor(
source.sources[segment[1]],
segment[2],
segment[3],
segment.length === 5 ? source.map.names[segment[4]] : name
);
}
// src/build-source-map-tree.ts
function asArray(value) {
if (Array.isArray(value)) return value;
return [value];
}
function buildSourceMapTree(input, loader) {
const maps = asArray(input).map((m) => new import_trace_mapping2.TraceMap(m, ""));
const map = maps.pop();
for (let i = 0; i < maps.length; i++) {
if (maps[i].sources.length > 1) {
throw new Error(
`Transformation map ${i} must have exactly one source file.
Did you specify these with the most recent transformation maps first?`
);
}
}
let tree = build(map, loader, "", 0);
for (let i = maps.length - 1; i >= 0; i--) {
tree = MapSource(maps[i], [tree]);
}
return tree;
}
function build(map, loader, importer, importerDepth) {
const { resolvedSources, sourcesContent, ignoreList } = map;
const depth = importerDepth + 1;
const children = resolvedSources.map((sourceFile, i) => {
const ctx = {
importer,
depth,
source: sourceFile || "",
content: void 0,
ignore: void 0
};
const sourceMap = loader(ctx.source, ctx);
const { source, content, ignore } = ctx;
if (sourceMap) return build(new import_trace_mapping2.TraceMap(sourceMap, source), loader, source, depth);
const sourceContent = content !== void 0 ? content : sourcesContent ? sourcesContent[i] : null;
const ignored = ignore !== void 0 ? ignore : ignoreList ? ignoreList.includes(i) : false;
return OriginalSource(source, sourceContent, ignored);
});
return MapSource(map, children);
}
// src/source-map.ts
var import_gen_mapping2 = __toESM(require_gen_mapping());
var SourceMap = class {
constructor(map, options) {
const out = options.decodedMappings ? (0, import_gen_mapping2.toDecodedMap)(map) : (0, import_gen_mapping2.toEncodedMap)(map);
this.version = out.version;
this.file = out.file;
this.mappings = out.mappings;
this.names = out.names;
this.ignoreList = out.ignoreList;
this.sourceRoot = out.sourceRoot;
this.sources = out.sources;
if (!options.excludeContent) {
this.sourcesContent = out.sourcesContent;
}
}
toString() {
return JSON.stringify(this);
}
};
// src/remapping.ts
function remapping(input, loader, options) {
const opts = typeof options === "object" ? options : { excludeContent: !!options, decodedMappings: false };
const tree = buildSourceMapTree(input, loader);
return new SourceMap(traceMappings(tree), opts);
}
}));
//# sourceMappingURL=remapping.umd.js.map

View File

@@ -0,0 +1,6 @@
{
"version": 3,
"sources": ["umd:@jridgewell/trace-mapping", "umd:@jridgewell/gen-mapping", "../src/remapping.ts", "../src/build-source-map-tree.ts", "../src/source-map-tree.ts", "../src/source-map.ts"],
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA,2CAAAA,SAAA;AAAA,IAAAA,QAAO,UAAU;AAAA;AAAA;;;ACAjB;AAAA,yCAAAC,SAAA;AAAA,IAAAA,QAAO,UAAU;AAAA;AAAA;;;ACAjB;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,wBAAyB;;;ACAzB,yBAAyE;AACzE,2BAA8C;AA+B9C,IAAM,qBAAqC,8BAAc,IAAI,IAAI,IAAI,IAAI,MAAM,KAAK;AACpF,IAAM,gBAA2B,CAAC;AAElC,SAAS,cACP,QACA,MACA,QACA,MACA,SACA,QACwB;AACxB,SAAO,EAAE,QAAQ,MAAM,QAAQ,MAAM,SAAS,OAAO;AACvD;AAgBA,SAAS,OACP,KACA,SACA,QACA,SACA,QACS;AACT,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAMO,SAAS,UAAU,KAAe,SAA+B;AACtE,SAAO,OAAO,KAAK,SAAS,IAAI,MAAM,KAAK;AAC7C;AAMO,SAAS,eACd,QACA,SACA,QACgB;AAChB,SAAO,OAAO,MAAM,eAAe,QAAQ,SAAS,MAAM;AAC5D;AAMO,SAAS,cAAc,MAA6B;AAGzD,QAAM,MAAM,IAAI,8BAAW,EAAE,MAAM,KAAK,IAAI,KAAK,CAAC;AAClD,QAAM,EAAE,SAAS,aAAa,IAAI,IAAI;AACtC,QAAM,YAAY,IAAI;AACtB,QAAM,mBAAe,sCAAgB,GAAG;AAExC,WAAS,IAAI,GAAG,IAAI,aAAa,QAAQ,KAAK;AAC5C,UAAM,WAAW,aAAa,CAAC;AAE/B,aAAS,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK;AACxC,YAAM,UAAU,SAAS,CAAC;AAC1B,YAAM,SAAS,QAAQ,CAAC;AACxB,UAAI,SAAwC;AAI5C,UAAI,QAAQ,WAAW,GAAG;AACxB,cAAMC,UAAS,YAAY,QAAQ,CAAC,CAAC;AACrC,iBAAS;AAAA,UACPA;AAAA,UACA,QAAQ,CAAC;AAAA,UACT,QAAQ,CAAC;AAAA,UACT,QAAQ,WAAW,IAAI,UAAU,QAAQ,CAAC,CAAC,IAAI;AAAA,QACjD;AAIA,YAAI,UAAU,KAAM;AAAA,MACtB;AAEA,YAAM,EAAE,QAAQ,MAAM,MAAM,SAAS,QAAQ,OAAO,IAAI;AAExD,8CAAgB,KAAK,GAAG,QAAQ,QAAQ,MAAM,QAAQ,IAAI;AAC1D,UAAI,UAAU,WAAW,KAAM,0CAAiB,KAAK,QAAQ,OAAO;AACpE,UAAI,OAAQ,mCAAU,KAAK,QAAQ,IAAI;AAAA,IACzC;AAAA,EACF;AAEA,SAAO;AACT;AAMO,SAAS,oBACd,QACA,MACA,QACA,MAC+B;AAC/B,MAAI,CAAC,OAAO,KAAK;AACf,WAAO,cAAc,OAAO,QAAQ,MAAM,QAAQ,MAAM,OAAO,SAAS,OAAO,MAAM;AAAA,EACvF;AAEA,QAAM,cAAU,mCAAa,OAAO,KAAK,MAAM,MAAM;AAGrD,MAAI,WAAW,KAAM,QAAO;AAG5B,MAAI,QAAQ,WAAW,EAAG,QAAO;AAEjC,SAAO;AAAA,IACL,OAAO,QAAQ,QAAQ,CAAC,CAAC;AAAA,IACzB,QAAQ,CAAC;AAAA,IACT,QAAQ,CAAC;AAAA,IACT,QAAQ,WAAW,IAAI,OAAO,IAAI,MAAM,QAAQ,CAAC,CAAC,IAAI;AAAA,EACxD;AACF;;;ADpKA,SAAS,QAAW,OAAqB;AACvC,MAAI,MAAM,QAAQ,KAAK,EAAG,QAAO;AACjC,SAAO,CAAC,KAAK;AACf;AAae,SAAR,mBACL,OACA,QACe;AACf,QAAM,OAAO,QAAQ,KAAK,EAAE,IAAI,CAAC,MAAM,IAAI,+BAAS,GAAG,EAAE,CAAC;AAC1D,QAAM,MAAM,KAAK,IAAI;AAErB,WAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,QAAI,KAAK,CAAC,EAAE,QAAQ,SAAS,GAAG;AAC9B,YAAM,IAAI;AAAA,QACR,sBAAsB,CAAC;AAAA;AAAA,MAEzB;AAAA,IACF;AAAA,EACF;AAEA,MAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,CAAC;AACnC,WAAS,IAAI,KAAK,SAAS,GAAG,KAAK,GAAG,KAAK;AACzC,WAAO,UAAU,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC;AAAA,EAClC;AACA,SAAO;AACT;AAEA,SAAS,MACP,KACA,QACA,UACA,eACe;AACf,QAAM,EAAE,iBAAiB,gBAAgB,WAAW,IAAI;AAExD,QAAM,QAAQ,gBAAgB;AAC9B,QAAM,WAAW,gBAAgB,IAAI,CAAC,YAA2B,MAAuB;AAKtF,UAAM,MAAqB;AAAA,MACzB;AAAA,MACA;AAAA,MACA,QAAQ,cAAc;AAAA,MACtB,SAAS;AAAA,MACT,QAAQ;AAAA,IACV;AAIA,UAAM,YAAY,OAAO,IAAI,QAAQ,GAAG;AAExC,UAAM,EAAE,QAAQ,SAAS,OAAO,IAAI;AAGpC,QAAI,UAAW,QAAO,MAAM,IAAI,+BAAS,WAAW,MAAM,GAAG,QAAQ,QAAQ,KAAK;AAMlF,UAAM,gBACJ,YAAY,SAAY,UAAU,iBAAiB,eAAe,CAAC,IAAI;AACzE,UAAM,UAAU,WAAW,SAAY,SAAS,aAAa,WAAW,SAAS,CAAC,IAAI;AACtF,WAAO,eAAe,QAAQ,eAAe,OAAO;AAAA,EACtD,CAAC;AAED,SAAO,UAAU,KAAK,QAAQ;AAChC;;;AExFA,IAAAC,sBAA2C;AAS3C,IAAqB,YAArB,MAA+B;AAAA,EAU7B,YAAY,KAAiB,SAAkB;AAC7C,UAAM,MAAM,QAAQ,sBAAkB,kCAAa,GAAG,QAAI,kCAAa,GAAG;AAC1E,SAAK,UAAU,IAAI;AACnB,SAAK,OAAO,IAAI;AAChB,SAAK,WAAW,IAAI;AACpB,SAAK,QAAQ,IAAI;AACjB,SAAK,aAAa,IAAI;AACtB,SAAK,aAAa,IAAI;AAEtB,SAAK,UAAU,IAAI;AACnB,QAAI,CAAC,QAAQ,gBAAgB;AAC3B,WAAK,iBAAiB,IAAI;AAAA,IAC5B;AAAA,EACF;AAAA,EAEA,WAAmB;AACjB,WAAO,KAAK,UAAU,IAAI;AAAA,EAC5B;AACF;;;AHLe,SAAR,UACL,OACA,QACA,SACW;AACX,QAAM,OACJ,OAAO,YAAY,WAAW,UAAU,EAAE,gBAAgB,CAAC,CAAC,SAAS,iBAAiB,MAAM;AAC9F,QAAM,OAAO,mBAAmB,OAAO,MAAM;AAC7C,SAAO,IAAI,UAAU,cAAc,IAAI,GAAG,IAAI;AAChD;",
"names": ["module", "module", "import_trace_mapping", "source", "import_gen_mapping"]
}

71
node_modules/@jridgewell/remapping/package.json generated vendored Normal file
View File

@@ -0,0 +1,71 @@
{
"name": "@jridgewell/remapping",
"version": "2.3.5",
"description": "Remap sequential sourcemaps through transformations to point at the original source code",
"keywords": [
"source",
"map",
"remap"
],
"main": "dist/remapping.umd.js",
"module": "dist/remapping.mjs",
"types": "types/remapping.d.cts",
"files": [
"dist",
"src",
"types"
],
"exports": {
".": [
{
"import": {
"types": "./types/remapping.d.mts",
"default": "./dist/remapping.mjs"
},
"default": {
"types": "./types/remapping.d.cts",
"default": "./dist/remapping.umd.js"
}
},
"./dist/remapping.umd.js"
],
"./package.json": "./package.json"
},
"scripts": {
"benchmark": "run-s build:code benchmark:*",
"benchmark:install": "cd benchmark && npm install",
"benchmark:only": "node --expose-gc benchmark/index.js",
"build": "run-s -n build:code build:types",
"build:code": "node ../../esbuild.mjs remapping.ts",
"build:types": "run-s build:types:force build:types:emit build:types:mts",
"build:types:force": "rimraf tsconfig.build.tsbuildinfo",
"build:types:emit": "tsc --project tsconfig.build.json",
"build:types:mts": "node ../../mts-types.mjs",
"clean": "run-s -n clean:code clean:types",
"clean:code": "tsc --build --clean tsconfig.build.json",
"clean:types": "rimraf dist types",
"test": "run-s -n test:types test:only test:format",
"test:format": "prettier --check '{src,test}/**/*.ts'",
"test:only": "mocha",
"test:types": "eslint '{src,test}/**/*.ts'",
"lint": "run-s -n lint:types lint:format",
"lint:format": "npm run test:format -- --write",
"lint:types": "npm run test:types -- --fix",
"prepublishOnly": "npm run-s -n build test"
},
"homepage": "https://github.com/jridgewell/sourcemaps/tree/main/packages/remapping",
"repository": {
"type": "git",
"url": "git+https://github.com/jridgewell/sourcemaps.git",
"directory": "packages/remapping"
},
"author": "Justin Ridgewell <justin@ridgewell.name>",
"license": "MIT",
"dependencies": {
"@jridgewell/gen-mapping": "^0.3.5",
"@jridgewell/trace-mapping": "^0.3.24"
},
"devDependencies": {
"source-map": "0.6.1"
}
}

View File

@@ -0,0 +1,89 @@
import { TraceMap } from '@jridgewell/trace-mapping';
import { OriginalSource, MapSource } from './source-map-tree';
import type { Sources, MapSource as MapSourceType } from './source-map-tree';
import type { SourceMapInput, SourceMapLoader, LoaderContext } from './types';
function asArray<T>(value: T | T[]): T[] {
if (Array.isArray(value)) return value;
return [value];
}
/**
* Recursively builds a tree structure out of sourcemap files, with each node
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
* `OriginalSource`s and `SourceMapTree`s.
*
* Every sourcemap is composed of a collection of source files and mappings
* into locations of those source files. When we generate a `SourceMapTree` for
* the sourcemap, we attempt to load each source file's own sourcemap. If it
* does not have an associated sourcemap, it is considered an original,
* unmodified source file.
*/
export default function buildSourceMapTree(
input: SourceMapInput | SourceMapInput[],
loader: SourceMapLoader,
): MapSourceType {
const maps = asArray(input).map((m) => new TraceMap(m, ''));
const map = maps.pop()!;
for (let i = 0; i < maps.length; i++) {
if (maps[i].sources.length > 1) {
throw new Error(
`Transformation map ${i} must have exactly one source file.\n` +
'Did you specify these with the most recent transformation maps first?',
);
}
}
let tree = build(map, loader, '', 0);
for (let i = maps.length - 1; i >= 0; i--) {
tree = MapSource(maps[i], [tree]);
}
return tree;
}
function build(
map: TraceMap,
loader: SourceMapLoader,
importer: string,
importerDepth: number,
): MapSourceType {
const { resolvedSources, sourcesContent, ignoreList } = map;
const depth = importerDepth + 1;
const children = resolvedSources.map((sourceFile: string | null, i: number): Sources => {
// The loading context gives the loader more information about why this file is being loaded
// (eg, from which importer). It also allows the loader to override the location of the loaded
// sourcemap/original source, or to override the content in the sourcesContent field if it's
// an unmodified source file.
const ctx: LoaderContext = {
importer,
depth,
source: sourceFile || '',
content: undefined,
ignore: undefined,
};
// Use the provided loader callback to retrieve the file's sourcemap.
// TODO: We should eventually support async loading of sourcemap files.
const sourceMap = loader(ctx.source, ctx);
const { source, content, ignore } = ctx;
// If there is a sourcemap, then we need to recurse into it to load its source files.
if (sourceMap) return build(new TraceMap(sourceMap, source), loader, source, depth);
// Else, it's an unmodified source file.
// The contents of this unmodified source file can be overridden via the loader context,
// allowing it to be explicitly null or a string. If it remains undefined, we fall back to
// the importing sourcemap's `sourcesContent` field.
const sourceContent =
content !== undefined ? content : sourcesContent ? sourcesContent[i] : null;
const ignored = ignore !== undefined ? ignore : ignoreList ? ignoreList.includes(i) : false;
return OriginalSource(source, sourceContent, ignored);
});
return MapSource(map, children);
}

42
node_modules/@jridgewell/remapping/src/remapping.ts generated vendored Normal file
View File

@@ -0,0 +1,42 @@
import buildSourceMapTree from './build-source-map-tree';
import { traceMappings } from './source-map-tree';
import SourceMap from './source-map';
import type { SourceMapInput, SourceMapLoader, Options } from './types';
export type {
SourceMapSegment,
EncodedSourceMap,
EncodedSourceMap as RawSourceMap,
DecodedSourceMap,
SourceMapInput,
SourceMapLoader,
LoaderContext,
Options,
} from './types';
export type { SourceMap };
/**
* Traces through all the mappings in the root sourcemap, through the sources
* (and their sourcemaps), all the way back to the original source location.
*
* `loader` will be called every time we encounter a source file. If it returns
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
* it returns a falsey value, that source file is treated as an original,
* unmodified source file.
*
* Pass `excludeContent` to exclude any self-containing source file content
* from the output sourcemap.
*
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
* VLQ encoded) mappings.
*/
export default function remapping(
input: SourceMapInput | SourceMapInput[],
loader: SourceMapLoader,
options?: boolean | Options,
): SourceMap {
const opts =
typeof options === 'object' ? options : { excludeContent: !!options, decodedMappings: false };
const tree = buildSourceMapTree(input, loader);
return new SourceMap(traceMappings(tree), opts);
}

View File

@@ -0,0 +1,172 @@
import { GenMapping, maybeAddSegment, setIgnore, setSourceContent } from '@jridgewell/gen-mapping';
import { traceSegment, decodedMappings } from '@jridgewell/trace-mapping';
import type { TraceMap } from '@jridgewell/trace-mapping';
export type SourceMapSegmentObject = {
column: number;
line: number;
name: string;
source: string;
content: string | null;
ignore: boolean;
};
export type OriginalSource = {
map: null;
sources: Sources[];
source: string;
content: string | null;
ignore: boolean;
};
export type MapSource = {
map: TraceMap;
sources: Sources[];
source: string;
content: null;
ignore: false;
};
export type Sources = OriginalSource | MapSource;
const SOURCELESS_MAPPING = /* #__PURE__ */ SegmentObject('', -1, -1, '', null, false);
const EMPTY_SOURCES: Sources[] = [];
function SegmentObject(
source: string,
line: number,
column: number,
name: string,
content: string | null,
ignore: boolean,
): SourceMapSegmentObject {
return { source, line, column, name, content, ignore };
}
function Source(
map: TraceMap,
sources: Sources[],
source: '',
content: null,
ignore: false,
): MapSource;
function Source(
map: null,
sources: Sources[],
source: string,
content: string | null,
ignore: boolean,
): OriginalSource;
function Source(
map: TraceMap | null,
sources: Sources[],
source: string | '',
content: string | null,
ignore: boolean,
): Sources {
return {
map,
sources,
source,
content,
ignore,
} as any;
}
/**
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
* (which may themselves be SourceMapTrees).
*/
export function MapSource(map: TraceMap, sources: Sources[]): MapSource {
return Source(map, sources, '', null, false);
}
/**
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
* segment tracing ends at the `OriginalSource`.
*/
export function OriginalSource(
source: string,
content: string | null,
ignore: boolean,
): OriginalSource {
return Source(null, EMPTY_SOURCES, source, content, ignore);
}
/**
* traceMappings is only called on the root level SourceMapTree, and begins the process of
* resolving each mapping in terms of the original source files.
*/
export function traceMappings(tree: MapSource): GenMapping {
// TODO: Eventually support sourceRoot, which has to be removed because the sources are already
// fully resolved. We'll need to make sources relative to the sourceRoot before adding them.
const gen = new GenMapping({ file: tree.map.file });
const { sources: rootSources, map } = tree;
const rootNames = map.names;
const rootMappings = decodedMappings(map);
for (let i = 0; i < rootMappings.length; i++) {
const segments = rootMappings[i];
for (let j = 0; j < segments.length; j++) {
const segment = segments[j];
const genCol = segment[0];
let traced: SourceMapSegmentObject | null = SOURCELESS_MAPPING;
// 1-length segments only move the current generated column, there's no source information
// to gather from it.
if (segment.length !== 1) {
const source = rootSources[segment[1]];
traced = originalPositionFor(
source,
segment[2],
segment[3],
segment.length === 5 ? rootNames[segment[4]] : '',
);
// If the trace is invalid, then the trace ran into a sourcemap that doesn't contain a
// respective segment into an original source.
if (traced == null) continue;
}
const { column, line, name, content, source, ignore } = traced;
maybeAddSegment(gen, i, genCol, source, line, column, name);
if (source && content != null) setSourceContent(gen, source, content);
if (ignore) setIgnore(gen, source, true);
}
}
return gen;
}
/**
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
* child SourceMapTrees, until we find the original source map.
*/
export function originalPositionFor(
source: Sources,
line: number,
column: number,
name: string,
): SourceMapSegmentObject | null {
if (!source.map) {
return SegmentObject(source.source, line, column, name, source.content, source.ignore);
}
const segment = traceSegment(source.map, line, column);
// If we couldn't find a segment, then this doesn't exist in the sourcemap.
if (segment == null) return null;
// 1-length segments only move the current generated column, there's no source information
// to gather from it.
if (segment.length === 1) return SOURCELESS_MAPPING;
return originalPositionFor(
source.sources[segment[1]],
segment[2],
segment[3],
segment.length === 5 ? source.map.names[segment[4]] : name,
);
}

38
node_modules/@jridgewell/remapping/src/source-map.ts generated vendored Normal file
View File

@@ -0,0 +1,38 @@
import { toDecodedMap, toEncodedMap } from '@jridgewell/gen-mapping';
import type { GenMapping } from '@jridgewell/gen-mapping';
import type { DecodedSourceMap, EncodedSourceMap, Options } from './types';
/**
* A SourceMap v3 compatible sourcemap, which only includes fields that were
* provided to it.
*/
export default class SourceMap {
declare file?: string | null;
declare mappings: EncodedSourceMap['mappings'] | DecodedSourceMap['mappings'];
declare sourceRoot?: string;
declare names: string[];
declare sources: (string | null)[];
declare sourcesContent?: (string | null)[];
declare version: 3;
declare ignoreList: number[] | undefined;
constructor(map: GenMapping, options: Options) {
const out = options.decodedMappings ? toDecodedMap(map) : toEncodedMap(map);
this.version = out.version; // SourceMap spec says this should be first.
this.file = out.file;
this.mappings = out.mappings as SourceMap['mappings'];
this.names = out.names as SourceMap['names'];
this.ignoreList = out.ignoreList as SourceMap['ignoreList'];
this.sourceRoot = out.sourceRoot;
this.sources = out.sources as SourceMap['sources'];
if (!options.excludeContent) {
this.sourcesContent = out.sourcesContent as SourceMap['sourcesContent'];
}
}
toString(): string {
return JSON.stringify(this);
}
}

27
node_modules/@jridgewell/remapping/src/types.ts generated vendored Normal file
View File

@@ -0,0 +1,27 @@
import type { SourceMapInput } from '@jridgewell/trace-mapping';
export type {
SourceMapSegment,
DecodedSourceMap,
EncodedSourceMap,
} from '@jridgewell/trace-mapping';
export type { SourceMapInput };
export type LoaderContext = {
readonly importer: string;
readonly depth: number;
source: string;
content: string | null | undefined;
ignore: boolean | undefined;
};
export type SourceMapLoader = (
file: string,
ctx: LoaderContext,
) => SourceMapInput | null | undefined | void;
export type Options = {
excludeContent?: boolean;
decodedMappings?: boolean;
};

View File

@@ -0,0 +1,15 @@
import type { MapSource as MapSourceType } from './source-map-tree.cts';
import type { SourceMapInput, SourceMapLoader } from './types.cts';
/**
* Recursively builds a tree structure out of sourcemap files, with each node
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
* `OriginalSource`s and `SourceMapTree`s.
*
* Every sourcemap is composed of a collection of source files and mappings
* into locations of those source files. When we generate a `SourceMapTree` for
* the sourcemap, we attempt to load each source file's own sourcemap. If it
* does not have an associated sourcemap, it is considered an original,
* unmodified source file.
*/
export = function buildSourceMapTree(input: SourceMapInput | SourceMapInput[], loader: SourceMapLoader): MapSourceType;
//# sourceMappingURL=build-source-map-tree.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"build-source-map-tree.d.ts","sourceRoot":"","sources":["../src/build-source-map-tree.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,EAAW,SAAS,IAAI,aAAa,EAAE,MAAM,mBAAmB,CAAC;AAC7E,OAAO,KAAK,EAAE,cAAc,EAAE,eAAe,EAAiB,MAAM,SAAS,CAAC;AAO9E;;;;;;;;;;GAUG;AACH,MAAM,CAAC,OAAO,UAAU,kBAAkB,CACxC,KAAK,EAAE,cAAc,GAAG,cAAc,EAAE,EACxC,MAAM,EAAE,eAAe,GACtB,aAAa,CAkBf"}

View File

@@ -0,0 +1,15 @@
import type { MapSource as MapSourceType } from './source-map-tree.mts';
import type { SourceMapInput, SourceMapLoader } from './types.mts';
/**
* Recursively builds a tree structure out of sourcemap files, with each node
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
* `OriginalSource`s and `SourceMapTree`s.
*
* Every sourcemap is composed of a collection of source files and mappings
* into locations of those source files. When we generate a `SourceMapTree` for
* the sourcemap, we attempt to load each source file's own sourcemap. If it
* does not have an associated sourcemap, it is considered an original,
* unmodified source file.
*/
export default function buildSourceMapTree(input: SourceMapInput | SourceMapInput[], loader: SourceMapLoader): MapSourceType;
//# sourceMappingURL=build-source-map-tree.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"build-source-map-tree.d.ts","sourceRoot":"","sources":["../src/build-source-map-tree.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,EAAW,SAAS,IAAI,aAAa,EAAE,MAAM,mBAAmB,CAAC;AAC7E,OAAO,KAAK,EAAE,cAAc,EAAE,eAAe,EAAiB,MAAM,SAAS,CAAC;AAO9E;;;;;;;;;;GAUG;AACH,MAAM,CAAC,OAAO,UAAU,kBAAkB,CACxC,KAAK,EAAE,cAAc,GAAG,cAAc,EAAE,EACxC,MAAM,EAAE,eAAe,GACtB,aAAa,CAkBf"}

View File

@@ -0,0 +1,21 @@
import SourceMap from './source-map.cts';
import type { SourceMapInput, SourceMapLoader, Options } from './types.cts';
export type { SourceMapSegment, EncodedSourceMap, EncodedSourceMap as RawSourceMap, DecodedSourceMap, SourceMapInput, SourceMapLoader, LoaderContext, Options, } from './types.cts';
export type { SourceMap };
/**
* Traces through all the mappings in the root sourcemap, through the sources
* (and their sourcemaps), all the way back to the original source location.
*
* `loader` will be called every time we encounter a source file. If it returns
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
* it returns a falsey value, that source file is treated as an original,
* unmodified source file.
*
* Pass `excludeContent` to exclude any self-containing source file content
* from the output sourcemap.
*
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
* VLQ encoded) mappings.
*/
export = function remapping(input: SourceMapInput | SourceMapInput[], loader: SourceMapLoader, options?: boolean | Options): SourceMap;
//# sourceMappingURL=remapping.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"remapping.d.ts","sourceRoot":"","sources":["../src/remapping.ts"],"names":[],"mappings":"AAEA,OAAO,SAAS,MAAM,cAAc,CAAC;AAErC,OAAO,KAAK,EAAE,cAAc,EAAE,eAAe,EAAE,OAAO,EAAE,MAAM,SAAS,CAAC;AACxE,YAAY,EACV,gBAAgB,EAChB,gBAAgB,EAChB,gBAAgB,IAAI,YAAY,EAChC,gBAAgB,EAChB,cAAc,EACd,eAAe,EACf,aAAa,EACb,OAAO,GACR,MAAM,SAAS,CAAC;AACjB,YAAY,EAAE,SAAS,EAAE,CAAC;AAE1B;;;;;;;;;;;;;;GAcG;AACH,MAAM,CAAC,OAAO,UAAU,SAAS,CAC/B,KAAK,EAAE,cAAc,GAAG,cAAc,EAAE,EACxC,MAAM,EAAE,eAAe,EACvB,OAAO,CAAC,EAAE,OAAO,GAAG,OAAO,GAC1B,SAAS,CAKX"}

View File

@@ -0,0 +1,21 @@
import SourceMap from './source-map.mts';
import type { SourceMapInput, SourceMapLoader, Options } from './types.mts';
export type { SourceMapSegment, EncodedSourceMap, EncodedSourceMap as RawSourceMap, DecodedSourceMap, SourceMapInput, SourceMapLoader, LoaderContext, Options, } from './types.mts';
export type { SourceMap };
/**
* Traces through all the mappings in the root sourcemap, through the sources
* (and their sourcemaps), all the way back to the original source location.
*
* `loader` will be called every time we encounter a source file. If it returns
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
* it returns a falsey value, that source file is treated as an original,
* unmodified source file.
*
* Pass `excludeContent` to exclude any self-containing source file content
* from the output sourcemap.
*
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
* VLQ encoded) mappings.
*/
export default function remapping(input: SourceMapInput | SourceMapInput[], loader: SourceMapLoader, options?: boolean | Options): SourceMap;
//# sourceMappingURL=remapping.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"remapping.d.ts","sourceRoot":"","sources":["../src/remapping.ts"],"names":[],"mappings":"AAEA,OAAO,SAAS,MAAM,cAAc,CAAC;AAErC,OAAO,KAAK,EAAE,cAAc,EAAE,eAAe,EAAE,OAAO,EAAE,MAAM,SAAS,CAAC;AACxE,YAAY,EACV,gBAAgB,EAChB,gBAAgB,EAChB,gBAAgB,IAAI,YAAY,EAChC,gBAAgB,EAChB,cAAc,EACd,eAAe,EACf,aAAa,EACb,OAAO,GACR,MAAM,SAAS,CAAC;AACjB,YAAY,EAAE,SAAS,EAAE,CAAC;AAE1B;;;;;;;;;;;;;;GAcG;AACH,MAAM,CAAC,OAAO,UAAU,SAAS,CAC/B,KAAK,EAAE,cAAc,GAAG,cAAc,EAAE,EACxC,MAAM,EAAE,eAAe,EACvB,OAAO,CAAC,EAAE,OAAO,GAAG,OAAO,GAC1B,SAAS,CAKX"}

View File

@@ -0,0 +1,46 @@
import { GenMapping } from '@jridgewell/gen-mapping';
import type { TraceMap } from '@jridgewell/trace-mapping';
export type SourceMapSegmentObject = {
column: number;
line: number;
name: string;
source: string;
content: string | null;
ignore: boolean;
};
export type OriginalSource = {
map: null;
sources: Sources[];
source: string;
content: string | null;
ignore: boolean;
};
export type MapSource = {
map: TraceMap;
sources: Sources[];
source: string;
content: null;
ignore: false;
};
export type Sources = OriginalSource | MapSource;
/**
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
* (which may themselves be SourceMapTrees).
*/
export declare function MapSource(map: TraceMap, sources: Sources[]): MapSource;
/**
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
* segment tracing ends at the `OriginalSource`.
*/
export declare function OriginalSource(source: string, content: string | null, ignore: boolean): OriginalSource;
/**
* traceMappings is only called on the root level SourceMapTree, and begins the process of
* resolving each mapping in terms of the original source files.
*/
export declare function traceMappings(tree: MapSource): GenMapping;
/**
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
* child SourceMapTrees, until we find the original source map.
*/
export declare function originalPositionFor(source: Sources, line: number, column: number, name: string): SourceMapSegmentObject | null;
//# sourceMappingURL=source-map-tree.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"source-map-tree.d.ts","sourceRoot":"","sources":["../src/source-map-tree.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAgD,MAAM,yBAAyB,CAAC;AAGnG,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,2BAA2B,CAAC;AAE1D,MAAM,MAAM,sBAAsB,GAAG;IACnC,MAAM,EAAE,MAAM,CAAC;IACf,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;IACvB,MAAM,EAAE,OAAO,CAAC;CACjB,CAAC;AAEF,MAAM,MAAM,cAAc,GAAG;IAC3B,GAAG,EAAE,IAAI,CAAC;IACV,OAAO,EAAE,OAAO,EAAE,CAAC;IACnB,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;IACvB,MAAM,EAAE,OAAO,CAAC;CACjB,CAAC;AAEF,MAAM,MAAM,SAAS,GAAG;IACtB,GAAG,EAAE,QAAQ,CAAC;IACd,OAAO,EAAE,OAAO,EAAE,CAAC;IACnB,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,EAAE,IAAI,CAAC;IACd,MAAM,EAAE,KAAK,CAAC;CACf,CAAC;AAEF,MAAM,MAAM,OAAO,GAAG,cAAc,GAAG,SAAS,CAAC;AA8CjD;;;GAGG;AACH,wBAAgB,SAAS,CAAC,GAAG,EAAE,QAAQ,EAAE,OAAO,EAAE,OAAO,EAAE,GAAG,SAAS,CAEtE;AAED;;;GAGG;AACH,wBAAgB,cAAc,CAC5B,MAAM,EAAE,MAAM,EACd,OAAO,EAAE,MAAM,GAAG,IAAI,EACtB,MAAM,EAAE,OAAO,GACd,cAAc,CAEhB;AAED;;;GAGG;AACH,wBAAgB,aAAa,CAAC,IAAI,EAAE,SAAS,GAAG,UAAU,CAyCzD;AAED;;;GAGG;AACH,wBAAgB,mBAAmB,CACjC,MAAM,EAAE,OAAO,EACf,IAAI,EAAE,MAAM,EACZ,MAAM,EAAE,MAAM,EACd,IAAI,EAAE,MAAM,GACX,sBAAsB,GAAG,IAAI,CAmB/B"}

View File

@@ -0,0 +1,46 @@
import { GenMapping } from '@jridgewell/gen-mapping';
import type { TraceMap } from '@jridgewell/trace-mapping';
export type SourceMapSegmentObject = {
column: number;
line: number;
name: string;
source: string;
content: string | null;
ignore: boolean;
};
export type OriginalSource = {
map: null;
sources: Sources[];
source: string;
content: string | null;
ignore: boolean;
};
export type MapSource = {
map: TraceMap;
sources: Sources[];
source: string;
content: null;
ignore: false;
};
export type Sources = OriginalSource | MapSource;
/**
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
* (which may themselves be SourceMapTrees).
*/
export declare function MapSource(map: TraceMap, sources: Sources[]): MapSource;
/**
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
* segment tracing ends at the `OriginalSource`.
*/
export declare function OriginalSource(source: string, content: string | null, ignore: boolean): OriginalSource;
/**
* traceMappings is only called on the root level SourceMapTree, and begins the process of
* resolving each mapping in terms of the original source files.
*/
export declare function traceMappings(tree: MapSource): GenMapping;
/**
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
* child SourceMapTrees, until we find the original source map.
*/
export declare function originalPositionFor(source: Sources, line: number, column: number, name: string): SourceMapSegmentObject | null;
//# sourceMappingURL=source-map-tree.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"source-map-tree.d.ts","sourceRoot":"","sources":["../src/source-map-tree.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAgD,MAAM,yBAAyB,CAAC;AAGnG,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,2BAA2B,CAAC;AAE1D,MAAM,MAAM,sBAAsB,GAAG;IACnC,MAAM,EAAE,MAAM,CAAC;IACf,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;IACvB,MAAM,EAAE,OAAO,CAAC;CACjB,CAAC;AAEF,MAAM,MAAM,cAAc,GAAG;IAC3B,GAAG,EAAE,IAAI,CAAC;IACV,OAAO,EAAE,OAAO,EAAE,CAAC;IACnB,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;IACvB,MAAM,EAAE,OAAO,CAAC;CACjB,CAAC;AAEF,MAAM,MAAM,SAAS,GAAG;IACtB,GAAG,EAAE,QAAQ,CAAC;IACd,OAAO,EAAE,OAAO,EAAE,CAAC;IACnB,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,EAAE,IAAI,CAAC;IACd,MAAM,EAAE,KAAK,CAAC;CACf,CAAC;AAEF,MAAM,MAAM,OAAO,GAAG,cAAc,GAAG,SAAS,CAAC;AA8CjD;;;GAGG;AACH,wBAAgB,SAAS,CAAC,GAAG,EAAE,QAAQ,EAAE,OAAO,EAAE,OAAO,EAAE,GAAG,SAAS,CAEtE;AAED;;;GAGG;AACH,wBAAgB,cAAc,CAC5B,MAAM,EAAE,MAAM,EACd,OAAO,EAAE,MAAM,GAAG,IAAI,EACtB,MAAM,EAAE,OAAO,GACd,cAAc,CAEhB;AAED;;;GAGG;AACH,wBAAgB,aAAa,CAAC,IAAI,EAAE,SAAS,GAAG,UAAU,CAyCzD;AAED;;;GAGG;AACH,wBAAgB,mBAAmB,CACjC,MAAM,EAAE,OAAO,EACf,IAAI,EAAE,MAAM,EACZ,MAAM,EAAE,MAAM,EACd,IAAI,EAAE,MAAM,GACX,sBAAsB,GAAG,IAAI,CAmB/B"}

View File

@@ -0,0 +1,19 @@
import type { GenMapping } from '@jridgewell/gen-mapping';
import type { DecodedSourceMap, EncodedSourceMap, Options } from './types.cts';
/**
* A SourceMap v3 compatible sourcemap, which only includes fields that were
* provided to it.
*/
export = class SourceMap {
file?: string | null;
mappings: EncodedSourceMap['mappings'] | DecodedSourceMap['mappings'];
sourceRoot?: string;
names: string[];
sources: (string | null)[];
sourcesContent?: (string | null)[];
version: 3;
ignoreList: number[] | undefined;
constructor(map: GenMapping, options: Options);
toString(): string;
}
//# sourceMappingURL=source-map.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"source-map.d.ts","sourceRoot":"","sources":["../src/source-map.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,KAAK,EAAE,gBAAgB,EAAE,gBAAgB,EAAE,OAAO,EAAE,MAAM,SAAS,CAAC;AAE3E;;;GAGG;AACH,MAAM,CAAC,OAAO,OAAO,SAAS;IACpB,IAAI,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IACrB,QAAQ,EAAE,gBAAgB,CAAC,UAAU,CAAC,GAAG,gBAAgB,CAAC,UAAU,CAAC,CAAC;IACtE,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,KAAK,EAAE,MAAM,EAAE,CAAC;IAChB,OAAO,EAAE,CAAC,MAAM,GAAG,IAAI,CAAC,EAAE,CAAC;IAC3B,cAAc,CAAC,EAAE,CAAC,MAAM,GAAG,IAAI,CAAC,EAAE,CAAC;IACnC,OAAO,EAAE,CAAC,CAAC;IACX,UAAU,EAAE,MAAM,EAAE,GAAG,SAAS,CAAC;gBAE7B,GAAG,EAAE,UAAU,EAAE,OAAO,EAAE,OAAO;IAe7C,QAAQ,IAAI,MAAM;CAGnB"}

View File

@@ -0,0 +1,19 @@
import type { GenMapping } from '@jridgewell/gen-mapping';
import type { DecodedSourceMap, EncodedSourceMap, Options } from './types.mts';
/**
* A SourceMap v3 compatible sourcemap, which only includes fields that were
* provided to it.
*/
export default class SourceMap {
file?: string | null;
mappings: EncodedSourceMap['mappings'] | DecodedSourceMap['mappings'];
sourceRoot?: string;
names: string[];
sources: (string | null)[];
sourcesContent?: (string | null)[];
version: 3;
ignoreList: number[] | undefined;
constructor(map: GenMapping, options: Options);
toString(): string;
}
//# sourceMappingURL=source-map.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"source-map.d.ts","sourceRoot":"","sources":["../src/source-map.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,KAAK,EAAE,gBAAgB,EAAE,gBAAgB,EAAE,OAAO,EAAE,MAAM,SAAS,CAAC;AAE3E;;;GAGG;AACH,MAAM,CAAC,OAAO,OAAO,SAAS;IACpB,IAAI,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IACrB,QAAQ,EAAE,gBAAgB,CAAC,UAAU,CAAC,GAAG,gBAAgB,CAAC,UAAU,CAAC,CAAC;IACtE,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,KAAK,EAAE,MAAM,EAAE,CAAC;IAChB,OAAO,EAAE,CAAC,MAAM,GAAG,IAAI,CAAC,EAAE,CAAC;IAC3B,cAAc,CAAC,EAAE,CAAC,MAAM,GAAG,IAAI,CAAC,EAAE,CAAC;IACnC,OAAO,EAAE,CAAC,CAAC;IACX,UAAU,EAAE,MAAM,EAAE,GAAG,SAAS,CAAC;gBAE7B,GAAG,EAAE,UAAU,EAAE,OAAO,EAAE,OAAO;IAe7C,QAAQ,IAAI,MAAM;CAGnB"}

16
node_modules/@jridgewell/remapping/types/types.d.cts generated vendored Normal file
View File

@@ -0,0 +1,16 @@
import type { SourceMapInput } from '@jridgewell/trace-mapping';
export type { SourceMapSegment, DecodedSourceMap, EncodedSourceMap, } from '@jridgewell/trace-mapping';
export type { SourceMapInput };
export type LoaderContext = {
readonly importer: string;
readonly depth: number;
source: string;
content: string | null | undefined;
ignore: boolean | undefined;
};
export type SourceMapLoader = (file: string, ctx: LoaderContext) => SourceMapInput | null | undefined | void;
export type Options = {
excludeContent?: boolean;
decodedMappings?: boolean;
};
//# sourceMappingURL=types.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,2BAA2B,CAAC;AAEhE,YAAY,EACV,gBAAgB,EAChB,gBAAgB,EAChB,gBAAgB,GACjB,MAAM,2BAA2B,CAAC;AAEnC,YAAY,EAAE,cAAc,EAAE,CAAC;AAE/B,MAAM,MAAM,aAAa,GAAG;IAC1B,QAAQ,CAAC,QAAQ,EAAE,MAAM,CAAC;IAC1B,QAAQ,CAAC,KAAK,EAAE,MAAM,CAAC;IACvB,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;IACnC,MAAM,EAAE,OAAO,GAAG,SAAS,CAAC;CAC7B,CAAC;AAEF,MAAM,MAAM,eAAe,GAAG,CAC5B,IAAI,EAAE,MAAM,EACZ,GAAG,EAAE,aAAa,KACf,cAAc,GAAG,IAAI,GAAG,SAAS,GAAG,IAAI,CAAC;AAE9C,MAAM,MAAM,OAAO,GAAG;IACpB,cAAc,CAAC,EAAE,OAAO,CAAC;IACzB,eAAe,CAAC,EAAE,OAAO,CAAC;CAC3B,CAAC"}

16
node_modules/@jridgewell/remapping/types/types.d.mts generated vendored Normal file
View File

@@ -0,0 +1,16 @@
import type { SourceMapInput } from '@jridgewell/trace-mapping';
export type { SourceMapSegment, DecodedSourceMap, EncodedSourceMap, } from '@jridgewell/trace-mapping';
export type { SourceMapInput };
export type LoaderContext = {
readonly importer: string;
readonly depth: number;
source: string;
content: string | null | undefined;
ignore: boolean | undefined;
};
export type SourceMapLoader = (file: string, ctx: LoaderContext) => SourceMapInput | null | undefined | void;
export type Options = {
excludeContent?: boolean;
decodedMappings?: boolean;
};
//# sourceMappingURL=types.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,2BAA2B,CAAC;AAEhE,YAAY,EACV,gBAAgB,EAChB,gBAAgB,EAChB,gBAAgB,GACjB,MAAM,2BAA2B,CAAC;AAEnC,YAAY,EAAE,cAAc,EAAE,CAAC;AAE/B,MAAM,MAAM,aAAa,GAAG;IAC1B,QAAQ,CAAC,QAAQ,EAAE,MAAM,CAAC;IAC1B,QAAQ,CAAC,KAAK,EAAE,MAAM,CAAC;IACvB,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;IACnC,MAAM,EAAE,OAAO,GAAG,SAAS,CAAC;CAC7B,CAAC;AAEF,MAAM,MAAM,eAAe,GAAG,CAC5B,IAAI,EAAE,MAAM,EACZ,GAAG,EAAE,aAAa,KACf,cAAc,GAAG,IAAI,GAAG,SAAS,GAAG,IAAI,CAAC;AAE9C,MAAM,MAAM,OAAO,GAAG;IACpB,cAAc,CAAC,EAAE,OAAO,CAAC;IACzB,eAAe,CAAC,EAAE,OAAO,CAAC;CAC3B,CAAC"}

19
node_modules/@jridgewell/resolve-uri/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,19 @@
Copyright 2019 Justin Ridgewell <jridgewell@google.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

40
node_modules/@jridgewell/resolve-uri/README.md generated vendored Normal file
View File

@@ -0,0 +1,40 @@
# @jridgewell/resolve-uri
> Resolve a URI relative to an optional base URI
Resolve any combination of absolute URIs, protocol-realtive URIs, absolute paths, or relative paths.
## Installation
```sh
npm install @jridgewell/resolve-uri
```
## Usage
```typescript
function resolve(input: string, base?: string): string;
```
```js
import resolve from '@jridgewell/resolve-uri';
resolve('foo', 'https://example.com'); // => 'https://example.com/foo'
```
| Input | Base | Resolution | Explanation |
|-----------------------|-------------------------|--------------------------------|--------------------------------------------------------------|
| `https://example.com` | _any_ | `https://example.com/` | Input is normalized only |
| `//example.com` | `https://base.com/` | `https://example.com/` | Input inherits the base's protocol |
| `//example.com` | _rest_ | `//example.com/` | Input is normalized only |
| `/example` | `https://base.com/` | `https://base.com/example` | Input inherits the base's origin |
| `/example` | `//base.com/` | `//base.com/example` | Input inherits the base's host and remains protocol relative |
| `/example` | _rest_ | `/example` | Input is normalized only |
| `example` | `https://base.com/dir/` | `https://base.com/dir/example` | Input is joined with the base |
| `example` | `https://base.com/file` | `https://base.com/example` | Input is joined with the base without its file |
| `example` | `//base.com/dir/` | `//base.com/dir/example` | Input is joined with the base's last directory |
| `example` | `//base.com/file` | `//base.com/example` | Input is joined with the base without its file |
| `example` | `/base/dir/` | `/base/dir/example` | Input is joined with the base's last directory |
| `example` | `/base/file` | `/base/example` | Input is joined with the base without its file |
| `example` | `base/dir/` | `base/dir/example` | Input is joined with the base's last directory |
| `example` | `base/file` | `base/example` | Input is joined with the base without its file |

View File

@@ -0,0 +1,232 @@
// Matches the scheme of a URL, eg "http://"
const schemeRegex = /^[\w+.-]+:\/\//;
/**
* Matches the parts of a URL:
* 1. Scheme, including ":", guaranteed.
* 2. User/password, including "@", optional.
* 3. Host, guaranteed.
* 4. Port, including ":", optional.
* 5. Path, including "/", optional.
* 6. Query, including "?", optional.
* 7. Hash, including "#", optional.
*/
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/;
/**
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).
*
* 1. Host, optional.
* 2. Path, which may include "/", guaranteed.
* 3. Query, including "?", optional.
* 4. Hash, including "#", optional.
*/
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i;
function isAbsoluteUrl(input) {
return schemeRegex.test(input);
}
function isSchemeRelativeUrl(input) {
return input.startsWith('//');
}
function isAbsolutePath(input) {
return input.startsWith('/');
}
function isFileUrl(input) {
return input.startsWith('file:');
}
function isRelative(input) {
return /^[.?#]/.test(input);
}
function parseAbsoluteUrl(input) {
const match = urlRegex.exec(input);
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || '');
}
function parseFileUrl(input) {
const match = fileRegex.exec(input);
const path = match[2];
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || '');
}
function makeUrl(scheme, user, host, port, path, query, hash) {
return {
scheme,
user,
host,
port,
path,
query,
hash,
type: 7 /* Absolute */,
};
}
function parseUrl(input) {
if (isSchemeRelativeUrl(input)) {
const url = parseAbsoluteUrl('http:' + input);
url.scheme = '';
url.type = 6 /* SchemeRelative */;
return url;
}
if (isAbsolutePath(input)) {
const url = parseAbsoluteUrl('http://foo.com' + input);
url.scheme = '';
url.host = '';
url.type = 5 /* AbsolutePath */;
return url;
}
if (isFileUrl(input))
return parseFileUrl(input);
if (isAbsoluteUrl(input))
return parseAbsoluteUrl(input);
const url = parseAbsoluteUrl('http://foo.com/' + input);
url.scheme = '';
url.host = '';
url.type = input
? input.startsWith('?')
? 3 /* Query */
: input.startsWith('#')
? 2 /* Hash */
: 4 /* RelativePath */
: 1 /* Empty */;
return url;
}
function stripPathFilename(path) {
// If a path ends with a parent directory "..", then it's a relative path with excess parent
// paths. It's not a file, so we can't strip it.
if (path.endsWith('/..'))
return path;
const index = path.lastIndexOf('/');
return path.slice(0, index + 1);
}
function mergePaths(url, base) {
normalizePath(base, base.type);
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
// path).
if (url.path === '/') {
url.path = base.path;
}
else {
// Resolution happens relative to the base path's directory, not the file.
url.path = stripPathFilename(base.path) + url.path;
}
}
/**
* The path can have empty directories "//", unneeded parents "foo/..", or current directory
* "foo/.". We need to normalize to a standard representation.
*/
function normalizePath(url, type) {
const rel = type <= 4 /* RelativePath */;
const pieces = url.path.split('/');
// We need to preserve the first piece always, so that we output a leading slash. The item at
// pieces[0] is an empty string.
let pointer = 1;
// Positive is the number of real directories we've output, used for popping a parent directory.
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
let positive = 0;
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
// real directory, we won't need to append, unless the other conditions happen again.
let addTrailingSlash = false;
for (let i = 1; i < pieces.length; i++) {
const piece = pieces[i];
// An empty directory, could be a trailing slash, or just a double "//" in the path.
if (!piece) {
addTrailingSlash = true;
continue;
}
// If we encounter a real directory, then we don't need to append anymore.
addTrailingSlash = false;
// A current directory, which we can always drop.
if (piece === '.')
continue;
// A parent directory, we need to see if there are any real directories we can pop. Else, we
// have an excess of parents, and we'll need to keep the "..".
if (piece === '..') {
if (positive) {
addTrailingSlash = true;
positive--;
pointer--;
}
else if (rel) {
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
pieces[pointer++] = piece;
}
continue;
}
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
// any popped or dropped directories.
pieces[pointer++] = piece;
positive++;
}
let path = '';
for (let i = 1; i < pointer; i++) {
path += '/' + pieces[i];
}
if (!path || (addTrailingSlash && !path.endsWith('/..'))) {
path += '/';
}
url.path = path;
}
/**
* Attempts to resolve `input` URL/path relative to `base`.
*/
function resolve(input, base) {
if (!input && !base)
return '';
const url = parseUrl(input);
let inputType = url.type;
if (base && inputType !== 7 /* Absolute */) {
const baseUrl = parseUrl(base);
const baseType = baseUrl.type;
switch (inputType) {
case 1 /* Empty */:
url.hash = baseUrl.hash;
// fall through
case 2 /* Hash */:
url.query = baseUrl.query;
// fall through
case 3 /* Query */:
case 4 /* RelativePath */:
mergePaths(url, baseUrl);
// fall through
case 5 /* AbsolutePath */:
// The host, user, and port are joined, you can't copy one without the others.
url.user = baseUrl.user;
url.host = baseUrl.host;
url.port = baseUrl.port;
// fall through
case 6 /* SchemeRelative */:
// The input doesn't have a schema at least, so we need to copy at least that over.
url.scheme = baseUrl.scheme;
}
if (baseType > inputType)
inputType = baseType;
}
normalizePath(url, inputType);
const queryHash = url.query + url.hash;
switch (inputType) {
// This is impossible, because of the empty checks at the start of the function.
// case UrlType.Empty:
case 2 /* Hash */:
case 3 /* Query */:
return queryHash;
case 4 /* RelativePath */: {
// The first char is always a "/", and we need it to be relative.
const path = url.path.slice(1);
if (!path)
return queryHash || '.';
if (isRelative(base || input) && !isRelative(path)) {
// If base started with a leading ".", or there is no base and input started with a ".",
// then we need to ensure that the relative path starts with a ".". We don't know if
// relative starts with a "..", though, so check before prepending.
return './' + path + queryHash;
}
return path + queryHash;
}
case 5 /* AbsolutePath */:
return url.path + queryHash;
default:
return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash;
}
}
export { resolve as default };
//# sourceMappingURL=resolve-uri.mjs.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,240 @@
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
typeof define === 'function' && define.amd ? define(factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.resolveURI = factory());
})(this, (function () { 'use strict';
// Matches the scheme of a URL, eg "http://"
const schemeRegex = /^[\w+.-]+:\/\//;
/**
* Matches the parts of a URL:
* 1. Scheme, including ":", guaranteed.
* 2. User/password, including "@", optional.
* 3. Host, guaranteed.
* 4. Port, including ":", optional.
* 5. Path, including "/", optional.
* 6. Query, including "?", optional.
* 7. Hash, including "#", optional.
*/
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/;
/**
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).
*
* 1. Host, optional.
* 2. Path, which may include "/", guaranteed.
* 3. Query, including "?", optional.
* 4. Hash, including "#", optional.
*/
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i;
function isAbsoluteUrl(input) {
return schemeRegex.test(input);
}
function isSchemeRelativeUrl(input) {
return input.startsWith('//');
}
function isAbsolutePath(input) {
return input.startsWith('/');
}
function isFileUrl(input) {
return input.startsWith('file:');
}
function isRelative(input) {
return /^[.?#]/.test(input);
}
function parseAbsoluteUrl(input) {
const match = urlRegex.exec(input);
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || '');
}
function parseFileUrl(input) {
const match = fileRegex.exec(input);
const path = match[2];
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || '');
}
function makeUrl(scheme, user, host, port, path, query, hash) {
return {
scheme,
user,
host,
port,
path,
query,
hash,
type: 7 /* Absolute */,
};
}
function parseUrl(input) {
if (isSchemeRelativeUrl(input)) {
const url = parseAbsoluteUrl('http:' + input);
url.scheme = '';
url.type = 6 /* SchemeRelative */;
return url;
}
if (isAbsolutePath(input)) {
const url = parseAbsoluteUrl('http://foo.com' + input);
url.scheme = '';
url.host = '';
url.type = 5 /* AbsolutePath */;
return url;
}
if (isFileUrl(input))
return parseFileUrl(input);
if (isAbsoluteUrl(input))
return parseAbsoluteUrl(input);
const url = parseAbsoluteUrl('http://foo.com/' + input);
url.scheme = '';
url.host = '';
url.type = input
? input.startsWith('?')
? 3 /* Query */
: input.startsWith('#')
? 2 /* Hash */
: 4 /* RelativePath */
: 1 /* Empty */;
return url;
}
function stripPathFilename(path) {
// If a path ends with a parent directory "..", then it's a relative path with excess parent
// paths. It's not a file, so we can't strip it.
if (path.endsWith('/..'))
return path;
const index = path.lastIndexOf('/');
return path.slice(0, index + 1);
}
function mergePaths(url, base) {
normalizePath(base, base.type);
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
// path).
if (url.path === '/') {
url.path = base.path;
}
else {
// Resolution happens relative to the base path's directory, not the file.
url.path = stripPathFilename(base.path) + url.path;
}
}
/**
* The path can have empty directories "//", unneeded parents "foo/..", or current directory
* "foo/.". We need to normalize to a standard representation.
*/
function normalizePath(url, type) {
const rel = type <= 4 /* RelativePath */;
const pieces = url.path.split('/');
// We need to preserve the first piece always, so that we output a leading slash. The item at
// pieces[0] is an empty string.
let pointer = 1;
// Positive is the number of real directories we've output, used for popping a parent directory.
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
let positive = 0;
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
// real directory, we won't need to append, unless the other conditions happen again.
let addTrailingSlash = false;
for (let i = 1; i < pieces.length; i++) {
const piece = pieces[i];
// An empty directory, could be a trailing slash, or just a double "//" in the path.
if (!piece) {
addTrailingSlash = true;
continue;
}
// If we encounter a real directory, then we don't need to append anymore.
addTrailingSlash = false;
// A current directory, which we can always drop.
if (piece === '.')
continue;
// A parent directory, we need to see if there are any real directories we can pop. Else, we
// have an excess of parents, and we'll need to keep the "..".
if (piece === '..') {
if (positive) {
addTrailingSlash = true;
positive--;
pointer--;
}
else if (rel) {
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
pieces[pointer++] = piece;
}
continue;
}
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
// any popped or dropped directories.
pieces[pointer++] = piece;
positive++;
}
let path = '';
for (let i = 1; i < pointer; i++) {
path += '/' + pieces[i];
}
if (!path || (addTrailingSlash && !path.endsWith('/..'))) {
path += '/';
}
url.path = path;
}
/**
* Attempts to resolve `input` URL/path relative to `base`.
*/
function resolve(input, base) {
if (!input && !base)
return '';
const url = parseUrl(input);
let inputType = url.type;
if (base && inputType !== 7 /* Absolute */) {
const baseUrl = parseUrl(base);
const baseType = baseUrl.type;
switch (inputType) {
case 1 /* Empty */:
url.hash = baseUrl.hash;
// fall through
case 2 /* Hash */:
url.query = baseUrl.query;
// fall through
case 3 /* Query */:
case 4 /* RelativePath */:
mergePaths(url, baseUrl);
// fall through
case 5 /* AbsolutePath */:
// The host, user, and port are joined, you can't copy one without the others.
url.user = baseUrl.user;
url.host = baseUrl.host;
url.port = baseUrl.port;
// fall through
case 6 /* SchemeRelative */:
// The input doesn't have a schema at least, so we need to copy at least that over.
url.scheme = baseUrl.scheme;
}
if (baseType > inputType)
inputType = baseType;
}
normalizePath(url, inputType);
const queryHash = url.query + url.hash;
switch (inputType) {
// This is impossible, because of the empty checks at the start of the function.
// case UrlType.Empty:
case 2 /* Hash */:
case 3 /* Query */:
return queryHash;
case 4 /* RelativePath */: {
// The first char is always a "/", and we need it to be relative.
const path = url.path.slice(1);
if (!path)
return queryHash || '.';
if (isRelative(base || input) && !isRelative(path)) {
// If base started with a leading ".", or there is no base and input started with a ".",
// then we need to ensure that the relative path starts with a ".". We don't know if
// relative starts with a "..", though, so check before prepending.
return './' + path + queryHash;
}
return path + queryHash;
}
case 5 /* AbsolutePath */:
return url.path + queryHash;
default:
return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash;
}
}
return resolve;
}));
//# sourceMappingURL=resolve-uri.umd.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,4 @@
/**
* Attempts to resolve `input` URL/path relative to `base`.
*/
export default function resolve(input: string, base: string | undefined): string;

69
node_modules/@jridgewell/resolve-uri/package.json generated vendored Normal file
View File

@@ -0,0 +1,69 @@
{
"name": "@jridgewell/resolve-uri",
"version": "3.1.2",
"description": "Resolve a URI relative to an optional base URI",
"keywords": [
"resolve",
"uri",
"url",
"path"
],
"author": "Justin Ridgewell <justin@ridgewell.name>",
"license": "MIT",
"repository": "https://github.com/jridgewell/resolve-uri",
"main": "dist/resolve-uri.umd.js",
"module": "dist/resolve-uri.mjs",
"types": "dist/types/resolve-uri.d.ts",
"exports": {
".": [
{
"types": "./dist/types/resolve-uri.d.ts",
"browser": "./dist/resolve-uri.umd.js",
"require": "./dist/resolve-uri.umd.js",
"import": "./dist/resolve-uri.mjs"
},
"./dist/resolve-uri.umd.js"
],
"./package.json": "./package.json"
},
"files": [
"dist"
],
"engines": {
"node": ">=6.0.0"
},
"scripts": {
"prebuild": "rm -rf dist",
"build": "run-s -n build:*",
"build:rollup": "rollup -c rollup.config.js",
"build:ts": "tsc --project tsconfig.build.json",
"lint": "run-s -n lint:*",
"lint:prettier": "npm run test:lint:prettier -- --write",
"lint:ts": "npm run test:lint:ts -- --fix",
"pretest": "run-s build:rollup",
"test": "run-s -n test:lint test:only",
"test:debug": "mocha --inspect-brk",
"test:lint": "run-s -n test:lint:*",
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
"test:only": "mocha",
"test:coverage": "c8 mocha",
"test:watch": "mocha --watch",
"prepublishOnly": "npm run preversion",
"preversion": "run-s test build"
},
"devDependencies": {
"@jridgewell/resolve-uri-latest": "npm:@jridgewell/resolve-uri@*",
"@rollup/plugin-typescript": "8.3.0",
"@typescript-eslint/eslint-plugin": "5.10.0",
"@typescript-eslint/parser": "5.10.0",
"c8": "7.11.0",
"eslint": "8.7.0",
"eslint-config-prettier": "8.3.0",
"mocha": "9.2.0",
"npm-run-all": "4.1.5",
"prettier": "2.5.1",
"rollup": "2.66.0",
"typescript": "4.5.5"
}
}

19
node_modules/@jridgewell/source-map/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,19 @@
Copyright 2024 Justin Ridgewell <justin@ridgewell.name>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

184
node_modules/@jridgewell/source-map/README.md generated vendored Normal file
View File

@@ -0,0 +1,184 @@
# @jridgewell/source-map
> Packages `@jridgewell/trace-mapping` and `@jridgewell/gen-mapping` into the familiar source-map API
This isn't the full API, but it's the core functionality. This wraps
[@jridgewell/trace-mapping][trace-mapping] and [@jridgewell/gen-mapping][gen-mapping]
implementations.
## Installation
```sh
npm install @jridgewell/source-map
```
## Usage
TODO
### SourceMapConsumer
```typescript
import { SourceMapConsumer } from '@jridgewell/source-map';
const smc = new SourceMapConsumer({
version: 3,
names: ['foo'],
sources: ['input.js'],
mappings: 'AAAAA',
});
```
#### SourceMapConsumer.fromSourceMap(mapGenerator[, mapUrl])
Transforms a `SourceMapGenerator` into a `SourceMapConsumer`.
```typescript
const smg = new SourceMapGenerator();
const smc = SourceMapConsumer.fromSourceMap(map);
smc.originalPositionFor({ line: 1, column: 0 });
```
#### SourceMapConsumer.prototype.originalPositionFor(generatedPosition)
```typescript
const smc = new SourceMapConsumer(map);
smc.originalPositionFor({ line: 1, column: 0 });
```
#### SourceMapConsumer.prototype.mappings
```typescript
const smc = new SourceMapConsumer(map);
smc.mappings; // AAAA
```
#### SourceMapConsumer.prototype.allGeneratedPositionsFor(originalPosition)
```typescript
const smc = new SourceMapConsumer(map);
smc.allGeneratedpositionsfor({ line: 1, column: 5, source: "baz.ts" });
// [
// { line: 2, column: 8 }
// ]
```
#### SourceMapConsumer.prototype.eachMapping(callback[, context[, order]])
> This implementation currently does not support the "order" parameter.
> This function can only iterate in Generated order.
```typescript
const smc = new SourceMapConsumer(map);
smc.eachMapping((mapping) => {
// { source: 'baz.ts',
// generatedLine: 4,
// generatedColumn: 5,
// originalLine: 4,
// originalColumn: 5,
// name: null }
});
```
#### SourceMapConsumer.prototype.generatedPositionFor(originalPosition)
```typescript
const smc = new SourceMapConsumer(map);
smc.generatedPositionFor({ line: 1, column: 5, source: "baz.ts" });
// { line: 2, column: 8 }
```
#### SourceMapConsumer.prototype.hasContentsOfAllSources()
```typescript
const smc = new SourceMapConsumer(map);
smc.hasContentsOfAllSources();
// true
```
#### SourceMapConsumer.prototype.sourceContentFor(source[, returnNullOnMissing])
```typescript
const smc = new SourceMapConsumer(map);
smc.generatedPositionFor("baz.ts");
// "export default ..."
```
#### SourceMapConsumer.prototype.version
Returns the source map's version
### SourceMapGenerator
```typescript
import { SourceMapGenerator } from '@jridgewell/source-map';
const smg = new SourceMapGenerator({
file: 'output.js',
sourceRoot: 'https://example.com/',
});
```
#### SourceMapGenerator.fromSourceMap(map)
Transform a `SourceMapConsumer` into a `SourceMapGenerator`.
```typescript
const smc = new SourceMapConsumer();
const smg = SourceMapGenerator.fromSourceMap(smc);
```
#### SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile[, sourceMapPath]])
> This method is not implemented yet
#### SourceMapGenerator.prototype.addMapping(mapping)
```typescript
const smg = new SourceMapGenerator();
smg.addMapping({
generated: { line: 1, column: 0 },
source: 'input.js',
original: { line: 1, column: 0 },
name: 'foo',
});
```
#### SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent)
```typescript
const smg = new SourceMapGenerator();
smg.setSourceContent('input.js', 'foobar');
```
#### SourceMapGenerator.prototype.toJSON()
```typescript
const smg = new SourceMapGenerator();
smg.toJSON(); // { version: 3, names: [], sources: [], mappings: '' }
```
#### SourceMapGenerator.prototype.toString()
```typescript
const smg = new SourceMapGenerator();
smg.toJSON(); // "{version:3,names:[],sources:[],mappings:''}"
```
#### SourceMapGenerator.prototype.toDecodedMap()
```typescript
const smg = new SourceMapGenerator();
smg.toDecodedMap(); // { version: 3, names: [], sources: [], mappings: [] }
```
## Known differences with other implementations
This implementation has some differences with `source-map` and `source-map-js`.
- `SourceMapConsumer.prototype.eachMapping()`
- Does not support the `order` argument
- `SourceMapGenerator.prototype.applySourceMap()`
- Not implemented
[trace-mapping]: https://github.com/jridgewell/sourcemaps/tree/main/packages/trace-mapping/
[gen-mapping]: https://github.com/jridgewell/sourcemaps/tree/main/packages/gen-mapping/

101
node_modules/@jridgewell/source-map/dist/source-map.mjs generated vendored Normal file
View File

@@ -0,0 +1,101 @@
// src/source-map.ts
import {
AnyMap,
originalPositionFor,
generatedPositionFor,
allGeneratedPositionsFor,
eachMapping,
encodedMappings,
sourceContentFor
} from "@jridgewell/trace-mapping";
import {
GenMapping,
maybeAddMapping,
toDecodedMap,
toEncodedMap,
setSourceContent,
fromMap
} from "@jridgewell/gen-mapping";
var SourceMapConsumer = class _SourceMapConsumer {
constructor(map, mapUrl) {
const trace = this._map = new AnyMap(map, mapUrl);
this.file = trace.file;
this.names = trace.names;
this.sourceRoot = trace.sourceRoot;
this.sources = trace.resolvedSources;
this.sourcesContent = trace.sourcesContent;
this.version = trace.version;
}
static fromSourceMap(map, mapUrl) {
if (map.toDecodedMap) {
return new _SourceMapConsumer(map.toDecodedMap(), mapUrl);
}
return new _SourceMapConsumer(map.toJSON(), mapUrl);
}
get mappings() {
return encodedMappings(this._map);
}
originalPositionFor(needle) {
return originalPositionFor(this._map, needle);
}
generatedPositionFor(originalPosition) {
return generatedPositionFor(this._map, originalPosition);
}
allGeneratedPositionsFor(originalPosition) {
return allGeneratedPositionsFor(this._map, originalPosition);
}
hasContentsOfAllSources() {
if (!this.sourcesContent || this.sourcesContent.length !== this.sources.length) {
return false;
}
for (const content of this.sourcesContent) {
if (content == null) {
return false;
}
}
return true;
}
sourceContentFor(source, nullOnMissing) {
const sourceContent = sourceContentFor(this._map, source);
if (sourceContent != null) {
return sourceContent;
}
if (nullOnMissing) {
return null;
}
throw new Error(`"${source}" is not in the SourceMap.`);
}
eachMapping(callback, context) {
eachMapping(this._map, context ? callback.bind(context) : callback);
}
destroy() {
}
};
var SourceMapGenerator = class _SourceMapGenerator {
constructor(opts) {
this._map = opts instanceof GenMapping ? opts : new GenMapping(opts);
}
static fromSourceMap(consumer) {
return new _SourceMapGenerator(fromMap(consumer));
}
addMapping(mapping) {
maybeAddMapping(this._map, mapping);
}
setSourceContent(source, content) {
setSourceContent(this._map, source, content);
}
toJSON() {
return toEncodedMap(this._map);
}
toString() {
return JSON.stringify(this.toJSON());
}
toDecodedMap() {
return toDecodedMap(this._map);
}
};
export {
SourceMapConsumer,
SourceMapGenerator
};
//# sourceMappingURL=source-map.mjs.map

View File

@@ -0,0 +1,6 @@
{
"version": 3,
"sources": ["../src/source-map.ts"],
"mappings": ";AAAA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AAaA,IAAM,oBAAN,MAAM,mBAAkB;AAAA,EAS7B,YACE,KACA,QACA;AACA,UAAM,QAAS,KAAK,OAAO,IAAI,OAAO,KAAK,MAAM;AAEjD,SAAK,OAAO,MAAM;AAClB,SAAK,QAAQ,MAAM;AACnB,SAAK,aAAa,MAAM;AACxB,SAAK,UAAU,MAAM;AACrB,SAAK,iBAAiB,MAAM;AAC5B,SAAK,UAAU,MAAM;AAAA,EACvB;AAAA,EAEA,OAAO,cAAc,KAAyB,QAAuC;AAGnF,QAAI,IAAI,cAAc;AACpB,aAAO,IAAI,mBAAkB,IAAI,aAAa,GAA8B,MAAM;AAAA,IACpF;AAGA,WAAO,IAAI,mBAAkB,IAAI,OAAO,GAA8B,MAAM;AAAA,EAC9E;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,gBAAgB,KAAK,IAAI;AAAA,EAClC;AAAA,EAEA,oBACE,QACwC;AACxC,WAAO,oBAAoB,KAAK,MAAM,MAAM;AAAA,EAC9C;AAAA,EAEA,qBACE,kBACyC;AACzC,WAAO,qBAAqB,KAAK,MAAM,gBAAgB;AAAA,EACzD;AAAA,EAEA,yBACE,kBAC2C;AAC3C,WAAO,yBAAyB,KAAK,MAAM,gBAAgB;AAAA,EAC7D;AAAA,EAEA,0BAAmC;AACjC,QAAI,CAAC,KAAK,kBAAkB,KAAK,eAAe,WAAW,KAAK,QAAQ,QAAQ;AAC9E,aAAO;AAAA,IACT;AAEA,eAAW,WAAW,KAAK,gBAAgB;AACzC,UAAI,WAAW,MAAM;AACnB,eAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,iBAAiB,QAAgB,eAAwC;AACvE,UAAM,gBAAgB,iBAAiB,KAAK,MAAM,MAAM;AACxD,QAAI,iBAAiB,MAAM;AACzB,aAAO;AAAA,IACT;AAEA,QAAI,eAAe;AACjB,aAAO;AAAA,IACT;AACA,UAAM,IAAI,MAAM,IAAI,MAAM,4BAA4B;AAAA,EACxD;AAAA,EAEA,YACE,UACA,SACM;AAEN,gBAAY,KAAK,MAAM,UAAU,SAAS,KAAK,OAAO,IAAI,QAAQ;AAAA,EACpE;AAAA,EAEA,UAAU;AAAA,EAEV;AACF;AAEO,IAAM,qBAAN,MAAM,oBAAmB;AAAA,EAG9B,YAAY,MAAgE;AAE1E,SAAK,OAAO,gBAAgB,aAAa,OAAO,IAAI,WAAW,IAAI;AAAA,EACrE;AAAA,EAEA,OAAO,cAAc,UAA6B;AAChD,WAAO,IAAI,oBAAmB,QAAQ,QAAQ,CAAC;AAAA,EACjD;AAAA,EAEA,WAAW,SAAoF;AAC7F,oBAAgB,KAAK,MAAM,OAAO;AAAA,EACpC;AAAA,EAEA,iBACE,QACA,SACqC;AACrC,qBAAiB,KAAK,MAAM,QAAQ,OAAO;AAAA,EAC7C;AAAA,EAEA,SAA0C;AACxC,WAAO,aAAa,KAAK,IAAI;AAAA,EAC/B;AAAA,EAEA,WAAmB;AACjB,WAAO,KAAK,UAAU,KAAK,OAAO,CAAC;AAAA,EACrC;AAAA,EAEA,eAAgD;AAC9C,WAAO,aAAa,KAAK,IAAI;AAAA,EAC/B;AACF;",
"names": []
}

View File

@@ -0,0 +1,152 @@
(function (global, factory) {
if (typeof exports === 'object' && typeof module !== 'undefined') {
factory(module, require('@jridgewell/gen-mapping'), require('@jridgewell/trace-mapping'));
module.exports = def(module);
} else if (typeof define === 'function' && define.amd) {
define(['module', '@jridgewell/gen-mapping', '@jridgewell/trace-mapping'], function(mod) {
factory.apply(this, arguments);
mod.exports = def(mod);
});
} else {
const mod = { exports: {} };
factory(mod, global.genMapping, global.traceMapping);
global = typeof globalThis !== 'undefined' ? globalThis : global || self;
global.sourceMap = def(mod);
}
function def(m) { return 'default' in m.exports ? m.exports.default : m.exports; }
})(this, (function (module, require_genMapping, require_traceMapping) {
"use strict";
var __create = Object.create;
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __getProtoOf = Object.getPrototypeOf;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __commonJS = (cb, mod) => function __require() {
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
};
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
// If the importer is in node compatibility mode or this is not an ESM
// file that has been converted to a CommonJS file using a Babel-
// compatible transform (i.e. "__esModule" has not been set), then set
// "default" to the CommonJS "module.exports" for node compatibility.
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
mod
));
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
// umd:@jridgewell/trace-mapping
var require_trace_mapping = __commonJS({
"umd:@jridgewell/trace-mapping"(exports, module2) {
module2.exports = require_traceMapping;
}
});
// umd:@jridgewell/gen-mapping
var require_gen_mapping = __commonJS({
"umd:@jridgewell/gen-mapping"(exports, module2) {
module2.exports = require_genMapping;
}
});
// src/source-map.ts
var source_map_exports = {};
__export(source_map_exports, {
SourceMapConsumer: () => SourceMapConsumer,
SourceMapGenerator: () => SourceMapGenerator
});
module.exports = __toCommonJS(source_map_exports);
var import_trace_mapping = __toESM(require_trace_mapping());
var import_gen_mapping = __toESM(require_gen_mapping());
var SourceMapConsumer = class _SourceMapConsumer {
constructor(map, mapUrl) {
const trace = this._map = new import_trace_mapping.AnyMap(map, mapUrl);
this.file = trace.file;
this.names = trace.names;
this.sourceRoot = trace.sourceRoot;
this.sources = trace.resolvedSources;
this.sourcesContent = trace.sourcesContent;
this.version = trace.version;
}
static fromSourceMap(map, mapUrl) {
if (map.toDecodedMap) {
return new _SourceMapConsumer(map.toDecodedMap(), mapUrl);
}
return new _SourceMapConsumer(map.toJSON(), mapUrl);
}
get mappings() {
return (0, import_trace_mapping.encodedMappings)(this._map);
}
originalPositionFor(needle) {
return (0, import_trace_mapping.originalPositionFor)(this._map, needle);
}
generatedPositionFor(originalPosition) {
return (0, import_trace_mapping.generatedPositionFor)(this._map, originalPosition);
}
allGeneratedPositionsFor(originalPosition) {
return (0, import_trace_mapping.allGeneratedPositionsFor)(this._map, originalPosition);
}
hasContentsOfAllSources() {
if (!this.sourcesContent || this.sourcesContent.length !== this.sources.length) {
return false;
}
for (const content of this.sourcesContent) {
if (content == null) {
return false;
}
}
return true;
}
sourceContentFor(source, nullOnMissing) {
const sourceContent = (0, import_trace_mapping.sourceContentFor)(this._map, source);
if (sourceContent != null) {
return sourceContent;
}
if (nullOnMissing) {
return null;
}
throw new Error(`"${source}" is not in the SourceMap.`);
}
eachMapping(callback, context) {
(0, import_trace_mapping.eachMapping)(this._map, context ? callback.bind(context) : callback);
}
destroy() {
}
};
var SourceMapGenerator = class _SourceMapGenerator {
constructor(opts) {
this._map = opts instanceof import_gen_mapping.GenMapping ? opts : new import_gen_mapping.GenMapping(opts);
}
static fromSourceMap(consumer) {
return new _SourceMapGenerator((0, import_gen_mapping.fromMap)(consumer));
}
addMapping(mapping) {
(0, import_gen_mapping.maybeAddMapping)(this._map, mapping);
}
setSourceContent(source, content) {
(0, import_gen_mapping.setSourceContent)(this._map, source, content);
}
toJSON() {
return (0, import_gen_mapping.toEncodedMap)(this._map);
}
toString() {
return JSON.stringify(this.toJSON());
}
toDecodedMap() {
return (0, import_gen_mapping.toDecodedMap)(this._map);
}
};
}));
//# sourceMappingURL=source-map.umd.js.map

View File

@@ -0,0 +1,6 @@
{
"version": 3,
"sources": ["umd:@jridgewell/trace-mapping", "umd:@jridgewell/gen-mapping", "../src/source-map.ts"],
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA,2CAAAA,SAAA;AAAA,IAAAA,QAAO,UAAU;AAAA;AAAA;;;ACAjB;AAAA,yCAAAC,SAAA;AAAA,IAAAA,QAAO,UAAU;AAAA;AAAA;;;ACAjB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,2BAQO;AACP,yBAOO;AAaA,IAAM,oBAAN,MAAM,mBAAkB;AAAA,EAS7B,YACE,KACA,QACA;AACA,UAAM,QAAS,KAAK,OAAO,IAAI,4BAAO,KAAK,MAAM;AAEjD,SAAK,OAAO,MAAM;AAClB,SAAK,QAAQ,MAAM;AACnB,SAAK,aAAa,MAAM;AACxB,SAAK,UAAU,MAAM;AACrB,SAAK,iBAAiB,MAAM;AAC5B,SAAK,UAAU,MAAM;AAAA,EACvB;AAAA,EAEA,OAAO,cAAc,KAAyB,QAAuC;AAGnF,QAAI,IAAI,cAAc;AACpB,aAAO,IAAI,mBAAkB,IAAI,aAAa,GAA8B,MAAM;AAAA,IACpF;AAGA,WAAO,IAAI,mBAAkB,IAAI,OAAO,GAA8B,MAAM;AAAA,EAC9E;AAAA,EAEA,IAAI,WAAmB;AACrB,eAAO,sCAAgB,KAAK,IAAI;AAAA,EAClC;AAAA,EAEA,oBACE,QACwC;AACxC,eAAO,0CAAoB,KAAK,MAAM,MAAM;AAAA,EAC9C;AAAA,EAEA,qBACE,kBACyC;AACzC,eAAO,2CAAqB,KAAK,MAAM,gBAAgB;AAAA,EACzD;AAAA,EAEA,yBACE,kBAC2C;AAC3C,eAAO,+CAAyB,KAAK,MAAM,gBAAgB;AAAA,EAC7D;AAAA,EAEA,0BAAmC;AACjC,QAAI,CAAC,KAAK,kBAAkB,KAAK,eAAe,WAAW,KAAK,QAAQ,QAAQ;AAC9E,aAAO;AAAA,IACT;AAEA,eAAW,WAAW,KAAK,gBAAgB;AACzC,UAAI,WAAW,MAAM;AACnB,eAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,iBAAiB,QAAgB,eAAwC;AACvE,UAAM,oBAAgB,uCAAiB,KAAK,MAAM,MAAM;AACxD,QAAI,iBAAiB,MAAM;AACzB,aAAO;AAAA,IACT;AAEA,QAAI,eAAe;AACjB,aAAO;AAAA,IACT;AACA,UAAM,IAAI,MAAM,IAAI,MAAM,4BAA4B;AAAA,EACxD;AAAA,EAEA,YACE,UACA,SACM;AAEN,0CAAY,KAAK,MAAM,UAAU,SAAS,KAAK,OAAO,IAAI,QAAQ;AAAA,EACpE;AAAA,EAEA,UAAU;AAAA,EAEV;AACF;AAEO,IAAM,qBAAN,MAAM,oBAAmB;AAAA,EAG9B,YAAY,MAAgE;AAE1E,SAAK,OAAO,gBAAgB,gCAAa,OAAO,IAAI,8BAAW,IAAI;AAAA,EACrE;AAAA,EAEA,OAAO,cAAc,UAA6B;AAChD,WAAO,IAAI,wBAAmB,4BAAQ,QAAQ,CAAC;AAAA,EACjD;AAAA,EAEA,WAAW,SAAoF;AAC7F,4CAAgB,KAAK,MAAM,OAAO;AAAA,EACpC;AAAA,EAEA,iBACE,QACA,SACqC;AACrC,6CAAiB,KAAK,MAAM,QAAQ,OAAO;AAAA,EAC7C;AAAA,EAEA,SAA0C;AACxC,eAAO,iCAAa,KAAK,IAAI;AAAA,EAC/B;AAAA,EAEA,WAAmB;AACjB,WAAO,KAAK,UAAU,KAAK,OAAO,CAAC;AAAA,EACrC;AAAA,EAEA,eAAgD;AAC9C,eAAO,iCAAa,KAAK,IAAI;AAAA,EAC/B;AACF;",
"names": ["module", "module"]
}

68
node_modules/@jridgewell/source-map/package.json generated vendored Normal file
View File

@@ -0,0 +1,68 @@
{
"name": "@jridgewell/source-map",
"version": "0.3.11",
"description": "Packages @jridgewell/trace-mapping and @jridgewell/gen-mapping into the familiar source-map API",
"keywords": [
"sourcemap",
"source",
"map"
],
"main": "dist/source-map.umd.js",
"module": "dist/source-map.mjs",
"types": "types/source-map.d.cts",
"files": [
"dist",
"src",
"types"
],
"exports": {
".": [
{
"import": {
"types": "./types/source-map.d.mts",
"default": "./dist/source-map.mjs"
},
"default": {
"types": "./types/source-map.d.cts",
"default": "./dist/source-map.umd.js"
}
},
"./dist/source-map.umd.js"
],
"./package.json": "./package.json"
},
"scripts": {
"benchmark": "run-s build:code benchmark:*",
"benchmark:install": "cd benchmark && npm install",
"benchmark:only": "node --expose-gc benchmark/index.js",
"build": "run-s -n build:code build:types",
"build:code": "node ../../esbuild.mjs source-map.ts",
"build:types": "run-s build:types:force build:types:emit build:types:mts",
"build:types:force": "rimraf tsconfig.build.tsbuildinfo",
"build:types:emit": "tsc --project tsconfig.build.json",
"build:types:mts": "node ../../mts-types.mjs",
"clean": "run-s -n clean:code clean:types",
"clean:code": "tsc --build --clean tsconfig.build.json",
"clean:types": "rimraf dist types",
"test": "run-s -n test:types test:only test:format",
"test:format": "prettier --check '{src,test}/**/*.ts'",
"test:only": "mocha",
"test:types": "eslint '{src,test}/**/*.ts'",
"lint": "run-s -n lint:types lint:format",
"lint:format": "npm run test:format -- --write",
"lint:types": "npm run test:types -- --fix",
"prepublishOnly": "npm run-s -n build test"
},
"homepage": "https://github.com/jridgewell/sourcemaps/tree/main/packages/source-map",
"repository": {
"type": "git",
"url": "git+https://github.com/jridgewell/sourcemaps.git",
"directory": "packages/source-map"
},
"author": "Justin Ridgewell <justin@ridgewell.name>",
"license": "MIT",
"dependencies": {
"@jridgewell/gen-mapping": "^0.3.5",
"@jridgewell/trace-mapping": "^0.3.25"
}
}

159
node_modules/@jridgewell/source-map/src/source-map.ts generated vendored Normal file
View File

@@ -0,0 +1,159 @@
import {
AnyMap,
originalPositionFor,
generatedPositionFor,
allGeneratedPositionsFor,
eachMapping,
encodedMappings,
sourceContentFor,
} from '@jridgewell/trace-mapping';
import {
GenMapping,
maybeAddMapping,
toDecodedMap,
toEncodedMap,
setSourceContent,
fromMap,
} from '@jridgewell/gen-mapping';
import type {
TraceMap,
SourceMapInput,
SectionedSourceMapInput,
DecodedSourceMap,
} from '@jridgewell/trace-mapping';
export type { TraceMap, SourceMapInput, SectionedSourceMapInput, DecodedSourceMap };
import type { Mapping, EncodedSourceMap } from '@jridgewell/gen-mapping';
export type { Mapping, EncodedSourceMap };
export class SourceMapConsumer {
declare private _map: TraceMap;
declare file: TraceMap['file'];
declare names: TraceMap['names'];
declare sourceRoot: TraceMap['sourceRoot'];
declare sources: TraceMap['sources'];
declare sourcesContent: TraceMap['sourcesContent'];
declare version: TraceMap['version'];
constructor(
map: ConstructorParameters<typeof AnyMap>[0],
mapUrl?: ConstructorParameters<typeof AnyMap>[1],
) {
const trace = (this._map = new AnyMap(map, mapUrl));
this.file = trace.file;
this.names = trace.names;
this.sourceRoot = trace.sourceRoot;
this.sources = trace.resolvedSources;
this.sourcesContent = trace.sourcesContent;
this.version = trace.version;
}
static fromSourceMap(map: SourceMapGenerator, mapUrl?: Parameters<typeof AnyMap>[1]) {
// This is more performant if we receive
// a @jridgewell/source-map SourceMapGenerator
if (map.toDecodedMap) {
return new SourceMapConsumer(map.toDecodedMap() as SectionedSourceMapInput, mapUrl);
}
// This is a fallback for `source-map` and `source-map-js`
return new SourceMapConsumer(map.toJSON() as SectionedSourceMapInput, mapUrl);
}
get mappings(): string {
return encodedMappings(this._map);
}
originalPositionFor(
needle: Parameters<typeof originalPositionFor>[1],
): ReturnType<typeof originalPositionFor> {
return originalPositionFor(this._map, needle);
}
generatedPositionFor(
originalPosition: Parameters<typeof generatedPositionFor>[1],
): ReturnType<typeof generatedPositionFor> {
return generatedPositionFor(this._map, originalPosition);
}
allGeneratedPositionsFor(
originalPosition: Parameters<typeof generatedPositionFor>[1],
): ReturnType<typeof generatedPositionFor>[] {
return allGeneratedPositionsFor(this._map, originalPosition);
}
hasContentsOfAllSources(): boolean {
if (!this.sourcesContent || this.sourcesContent.length !== this.sources.length) {
return false;
}
for (const content of this.sourcesContent) {
if (content == null) {
return false;
}
}
return true;
}
sourceContentFor(source: string, nullOnMissing?: boolean): string | null {
const sourceContent = sourceContentFor(this._map, source);
if (sourceContent != null) {
return sourceContent;
}
if (nullOnMissing) {
return null;
}
throw new Error(`"${source}" is not in the SourceMap.`);
}
eachMapping(
callback: Parameters<typeof eachMapping>[1],
context?: any /*, order?: number*/,
): void {
// order is ignored as @jridgewell/trace-map doesn't implement it
eachMapping(this._map, context ? callback.bind(context) : callback);
}
destroy() {
// noop.
}
}
export class SourceMapGenerator {
declare private _map: GenMapping;
constructor(opts: ConstructorParameters<typeof GenMapping>[0] | GenMapping) {
// TODO :: should this be duck-typed ?
this._map = opts instanceof GenMapping ? opts : new GenMapping(opts);
}
static fromSourceMap(consumer: SourceMapConsumer) {
return new SourceMapGenerator(fromMap(consumer));
}
addMapping(mapping: Parameters<typeof maybeAddMapping>[1]): ReturnType<typeof maybeAddMapping> {
maybeAddMapping(this._map, mapping);
}
setSourceContent(
source: Parameters<typeof setSourceContent>[1],
content: Parameters<typeof setSourceContent>[2],
): ReturnType<typeof setSourceContent> {
setSourceContent(this._map, source, content);
}
toJSON(): ReturnType<typeof toEncodedMap> {
return toEncodedMap(this._map);
}
toString(): string {
return JSON.stringify(this.toJSON());
}
toDecodedMap(): ReturnType<typeof toDecodedMap> {
return toDecodedMap(this._map);
}
}

View File

@@ -0,0 +1,36 @@
import { AnyMap, originalPositionFor, generatedPositionFor, eachMapping } from '@jridgewell/trace-mapping';
import { GenMapping, maybeAddMapping, toDecodedMap, toEncodedMap, setSourceContent } from '@jridgewell/gen-mapping';
import type { TraceMap, SourceMapInput, SectionedSourceMapInput, DecodedSourceMap } from '@jridgewell/trace-mapping';
export type { TraceMap, SourceMapInput, SectionedSourceMapInput, DecodedSourceMap };
import type { Mapping, EncodedSourceMap } from '@jridgewell/gen-mapping';
export type { Mapping, EncodedSourceMap };
export declare class SourceMapConsumer {
private _map;
file: TraceMap['file'];
names: TraceMap['names'];
sourceRoot: TraceMap['sourceRoot'];
sources: TraceMap['sources'];
sourcesContent: TraceMap['sourcesContent'];
version: TraceMap['version'];
constructor(map: ConstructorParameters<typeof AnyMap>[0], mapUrl?: ConstructorParameters<typeof AnyMap>[1]);
static fromSourceMap(map: SourceMapGenerator, mapUrl?: Parameters<typeof AnyMap>[1]): SourceMapConsumer;
get mappings(): string;
originalPositionFor(needle: Parameters<typeof originalPositionFor>[1]): ReturnType<typeof originalPositionFor>;
generatedPositionFor(originalPosition: Parameters<typeof generatedPositionFor>[1]): ReturnType<typeof generatedPositionFor>;
allGeneratedPositionsFor(originalPosition: Parameters<typeof generatedPositionFor>[1]): ReturnType<typeof generatedPositionFor>[];
hasContentsOfAllSources(): boolean;
sourceContentFor(source: string, nullOnMissing?: boolean): string | null;
eachMapping(callback: Parameters<typeof eachMapping>[1], context?: any): void;
destroy(): void;
}
export declare class SourceMapGenerator {
private _map;
constructor(opts: ConstructorParameters<typeof GenMapping>[0] | GenMapping);
static fromSourceMap(consumer: SourceMapConsumer): SourceMapGenerator;
addMapping(mapping: Parameters<typeof maybeAddMapping>[1]): ReturnType<typeof maybeAddMapping>;
setSourceContent(source: Parameters<typeof setSourceContent>[1], content: Parameters<typeof setSourceContent>[2]): ReturnType<typeof setSourceContent>;
toJSON(): ReturnType<typeof toEncodedMap>;
toString(): string;
toDecodedMap(): ReturnType<typeof toDecodedMap>;
}
//# sourceMappingURL=source-map.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"source-map.d.ts","sourceRoot":"","sources":["../src/source-map.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,MAAM,EACN,mBAAmB,EACnB,oBAAoB,EAEpB,WAAW,EAGZ,MAAM,2BAA2B,CAAC;AACnC,OAAO,EACL,UAAU,EACV,eAAe,EACf,YAAY,EACZ,YAAY,EACZ,gBAAgB,EAEjB,MAAM,yBAAyB,CAAC;AAEjC,OAAO,KAAK,EACV,QAAQ,EACR,cAAc,EACd,uBAAuB,EACvB,gBAAgB,EACjB,MAAM,2BAA2B,CAAC;AACnC,YAAY,EAAE,QAAQ,EAAE,cAAc,EAAE,uBAAuB,EAAE,gBAAgB,EAAE,CAAC;AAEpF,OAAO,KAAK,EAAE,OAAO,EAAE,gBAAgB,EAAE,MAAM,yBAAyB,CAAC;AACzE,YAAY,EAAE,OAAO,EAAE,gBAAgB,EAAE,CAAC;AAE1C,qBAAa,iBAAiB;IAC5B,QAAgB,IAAI,CAAW;IACvB,IAAI,EAAE,QAAQ,CAAC,MAAM,CAAC,CAAC;IACvB,KAAK,EAAE,QAAQ,CAAC,OAAO,CAAC,CAAC;IACzB,UAAU,EAAE,QAAQ,CAAC,YAAY,CAAC,CAAC;IACnC,OAAO,EAAE,QAAQ,CAAC,SAAS,CAAC,CAAC;IAC7B,cAAc,EAAE,QAAQ,CAAC,gBAAgB,CAAC,CAAC;IAC3C,OAAO,EAAE,QAAQ,CAAC,SAAS,CAAC,CAAC;gBAGnC,GAAG,EAAE,qBAAqB,CAAC,OAAO,MAAM,CAAC,CAAC,CAAC,CAAC,EAC5C,MAAM,CAAC,EAAE,qBAAqB,CAAC,OAAO,MAAM,CAAC,CAAC,CAAC,CAAC;IAYlD,MAAM,CAAC,aAAa,CAAC,GAAG,EAAE,kBAAkB,EAAE,MAAM,CAAC,EAAE,UAAU,CAAC,OAAO,MAAM,CAAC,CAAC,CAAC,CAAC;IAWnF,IAAI,QAAQ,IAAI,MAAM,CAErB;IAED,mBAAmB,CACjB,MAAM,EAAE,UAAU,CAAC,OAAO,mBAAmB,CAAC,CAAC,CAAC,CAAC,GAChD,UAAU,CAAC,OAAO,mBAAmB,CAAC;IAIzC,oBAAoB,CAClB,gBAAgB,EAAE,UAAU,CAAC,OAAO,oBAAoB,CAAC,CAAC,CAAC,CAAC,GAC3D,UAAU,CAAC,OAAO,oBAAoB,CAAC;IAI1C,wBAAwB,CACtB,gBAAgB,EAAE,UAAU,CAAC,OAAO,oBAAoB,CAAC,CAAC,CAAC,CAAC,GAC3D,UAAU,CAAC,OAAO,oBAAoB,CAAC,EAAE;IAI5C,uBAAuB,IAAI,OAAO;IAclC,gBAAgB,CAAC,MAAM,EAAE,MAAM,EAAE,aAAa,CAAC,EAAE,OAAO,GAAG,MAAM,GAAG,IAAI;IAYxE,WAAW,CACT,QAAQ,EAAE,UAAU,CAAC,OAAO,WAAW,CAAC,CAAC,CAAC,CAAC,EAC3C,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI;IAKP,OAAO;CAGR;AAED,qBAAa,kBAAkB;IAC7B,QAAgB,IAAI,CAAa;gBAErB,IAAI,EAAE,qBAAqB,CAAC,OAAO,UAAU,CAAC,CAAC,CAAC,CAAC,GAAG,UAAU;IAK1E,MAAM,CAAC,aAAa,CAAC,QAAQ,EAAE,iBAAiB;IAIhD,UAAU,CAAC,OAAO,EAAE,UAAU,CAAC,OAAO,eAAe,CAAC,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,OAAO,eAAe,CAAC;IAI9F,gBAAgB,CACd,MAAM,EAAE,UAAU,CAAC,OAAO,gBAAgB,CAAC,CAAC,CAAC,CAAC,EAC9C,OAAO,EAAE,UAAU,CAAC,OAAO,gBAAgB,CAAC,CAAC,CAAC,CAAC,GAC9C,UAAU,CAAC,OAAO,gBAAgB,CAAC;IAItC,MAAM,IAAI,UAAU,CAAC,OAAO,YAAY,CAAC;IAIzC,QAAQ,IAAI,MAAM;IAIlB,YAAY,IAAI,UAAU,CAAC,OAAO,YAAY,CAAC;CAGhD"}

View File

@@ -0,0 +1,36 @@
import { AnyMap, originalPositionFor, generatedPositionFor, eachMapping } from '@jridgewell/trace-mapping';
import { GenMapping, maybeAddMapping, toDecodedMap, toEncodedMap, setSourceContent } from '@jridgewell/gen-mapping';
import type { TraceMap, SourceMapInput, SectionedSourceMapInput, DecodedSourceMap } from '@jridgewell/trace-mapping';
export type { TraceMap, SourceMapInput, SectionedSourceMapInput, DecodedSourceMap };
import type { Mapping, EncodedSourceMap } from '@jridgewell/gen-mapping';
export type { Mapping, EncodedSourceMap };
export declare class SourceMapConsumer {
private _map;
file: TraceMap['file'];
names: TraceMap['names'];
sourceRoot: TraceMap['sourceRoot'];
sources: TraceMap['sources'];
sourcesContent: TraceMap['sourcesContent'];
version: TraceMap['version'];
constructor(map: ConstructorParameters<typeof AnyMap>[0], mapUrl?: ConstructorParameters<typeof AnyMap>[1]);
static fromSourceMap(map: SourceMapGenerator, mapUrl?: Parameters<typeof AnyMap>[1]): SourceMapConsumer;
get mappings(): string;
originalPositionFor(needle: Parameters<typeof originalPositionFor>[1]): ReturnType<typeof originalPositionFor>;
generatedPositionFor(originalPosition: Parameters<typeof generatedPositionFor>[1]): ReturnType<typeof generatedPositionFor>;
allGeneratedPositionsFor(originalPosition: Parameters<typeof generatedPositionFor>[1]): ReturnType<typeof generatedPositionFor>[];
hasContentsOfAllSources(): boolean;
sourceContentFor(source: string, nullOnMissing?: boolean): string | null;
eachMapping(callback: Parameters<typeof eachMapping>[1], context?: any): void;
destroy(): void;
}
export declare class SourceMapGenerator {
private _map;
constructor(opts: ConstructorParameters<typeof GenMapping>[0] | GenMapping);
static fromSourceMap(consumer: SourceMapConsumer): SourceMapGenerator;
addMapping(mapping: Parameters<typeof maybeAddMapping>[1]): ReturnType<typeof maybeAddMapping>;
setSourceContent(source: Parameters<typeof setSourceContent>[1], content: Parameters<typeof setSourceContent>[2]): ReturnType<typeof setSourceContent>;
toJSON(): ReturnType<typeof toEncodedMap>;
toString(): string;
toDecodedMap(): ReturnType<typeof toDecodedMap>;
}
//# sourceMappingURL=source-map.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"source-map.d.ts","sourceRoot":"","sources":["../src/source-map.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,MAAM,EACN,mBAAmB,EACnB,oBAAoB,EAEpB,WAAW,EAGZ,MAAM,2BAA2B,CAAC;AACnC,OAAO,EACL,UAAU,EACV,eAAe,EACf,YAAY,EACZ,YAAY,EACZ,gBAAgB,EAEjB,MAAM,yBAAyB,CAAC;AAEjC,OAAO,KAAK,EACV,QAAQ,EACR,cAAc,EACd,uBAAuB,EACvB,gBAAgB,EACjB,MAAM,2BAA2B,CAAC;AACnC,YAAY,EAAE,QAAQ,EAAE,cAAc,EAAE,uBAAuB,EAAE,gBAAgB,EAAE,CAAC;AAEpF,OAAO,KAAK,EAAE,OAAO,EAAE,gBAAgB,EAAE,MAAM,yBAAyB,CAAC;AACzE,YAAY,EAAE,OAAO,EAAE,gBAAgB,EAAE,CAAC;AAE1C,qBAAa,iBAAiB;IAC5B,QAAgB,IAAI,CAAW;IACvB,IAAI,EAAE,QAAQ,CAAC,MAAM,CAAC,CAAC;IACvB,KAAK,EAAE,QAAQ,CAAC,OAAO,CAAC,CAAC;IACzB,UAAU,EAAE,QAAQ,CAAC,YAAY,CAAC,CAAC;IACnC,OAAO,EAAE,QAAQ,CAAC,SAAS,CAAC,CAAC;IAC7B,cAAc,EAAE,QAAQ,CAAC,gBAAgB,CAAC,CAAC;IAC3C,OAAO,EAAE,QAAQ,CAAC,SAAS,CAAC,CAAC;gBAGnC,GAAG,EAAE,qBAAqB,CAAC,OAAO,MAAM,CAAC,CAAC,CAAC,CAAC,EAC5C,MAAM,CAAC,EAAE,qBAAqB,CAAC,OAAO,MAAM,CAAC,CAAC,CAAC,CAAC;IAYlD,MAAM,CAAC,aAAa,CAAC,GAAG,EAAE,kBAAkB,EAAE,MAAM,CAAC,EAAE,UAAU,CAAC,OAAO,MAAM,CAAC,CAAC,CAAC,CAAC;IAWnF,IAAI,QAAQ,IAAI,MAAM,CAErB;IAED,mBAAmB,CACjB,MAAM,EAAE,UAAU,CAAC,OAAO,mBAAmB,CAAC,CAAC,CAAC,CAAC,GAChD,UAAU,CAAC,OAAO,mBAAmB,CAAC;IAIzC,oBAAoB,CAClB,gBAAgB,EAAE,UAAU,CAAC,OAAO,oBAAoB,CAAC,CAAC,CAAC,CAAC,GAC3D,UAAU,CAAC,OAAO,oBAAoB,CAAC;IAI1C,wBAAwB,CACtB,gBAAgB,EAAE,UAAU,CAAC,OAAO,oBAAoB,CAAC,CAAC,CAAC,CAAC,GAC3D,UAAU,CAAC,OAAO,oBAAoB,CAAC,EAAE;IAI5C,uBAAuB,IAAI,OAAO;IAclC,gBAAgB,CAAC,MAAM,EAAE,MAAM,EAAE,aAAa,CAAC,EAAE,OAAO,GAAG,MAAM,GAAG,IAAI;IAYxE,WAAW,CACT,QAAQ,EAAE,UAAU,CAAC,OAAO,WAAW,CAAC,CAAC,CAAC,CAAC,EAC3C,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI;IAKP,OAAO;CAGR;AAED,qBAAa,kBAAkB;IAC7B,QAAgB,IAAI,CAAa;gBAErB,IAAI,EAAE,qBAAqB,CAAC,OAAO,UAAU,CAAC,CAAC,CAAC,CAAC,GAAG,UAAU;IAK1E,MAAM,CAAC,aAAa,CAAC,QAAQ,EAAE,iBAAiB;IAIhD,UAAU,CAAC,OAAO,EAAE,UAAU,CAAC,OAAO,eAAe,CAAC,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,OAAO,eAAe,CAAC;IAI9F,gBAAgB,CACd,MAAM,EAAE,UAAU,CAAC,OAAO,gBAAgB,CAAC,CAAC,CAAC,CAAC,EAC9C,OAAO,EAAE,UAAU,CAAC,OAAO,gBAAgB,CAAC,CAAC,CAAC,CAAC,GAC9C,UAAU,CAAC,OAAO,gBAAgB,CAAC;IAItC,MAAM,IAAI,UAAU,CAAC,OAAO,YAAY,CAAC;IAIzC,QAAQ,IAAI,MAAM;IAIlB,YAAY,IAAI,UAAU,CAAC,OAAO,YAAY,CAAC;CAGhD"}

19
node_modules/@jridgewell/sourcemap-codec/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,19 @@
Copyright 2024 Justin Ridgewell <justin@ridgewell.name>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

264
node_modules/@jridgewell/sourcemap-codec/README.md generated vendored Normal file
View File

@@ -0,0 +1,264 @@
# @jridgewell/sourcemap-codec
Encode/decode the `mappings` property of a [sourcemap](https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit).
## Why?
Sourcemaps are difficult to generate and manipulate, because the `mappings` property the part that actually links the generated code back to the original source is encoded using an obscure method called [Variable-length quantity](https://en.wikipedia.org/wiki/Variable-length_quantity). On top of that, each segment in the mapping contains offsets rather than absolute indices, which means that you can't look at a segment in isolation you have to understand the whole sourcemap.
This package makes the process slightly easier.
## Installation
```bash
npm install @jridgewell/sourcemap-codec
```
## Usage
```js
import { encode, decode } from '@jridgewell/sourcemap-codec';
var decoded = decode( ';EAEEA,EAAE,EAAC,CAAE;ECQY,UACC' );
assert.deepEqual( decoded, [
// the first line (of the generated code) has no mappings,
// as shown by the starting semi-colon (which separates lines)
[],
// the second line contains four (comma-separated) segments
[
// segments are encoded as you'd expect:
// [ generatedCodeColumn, sourceIndex, sourceCodeLine, sourceCodeColumn, nameIndex ]
// i.e. the first segment begins at column 2, and maps back to the second column
// of the second line (both zero-based) of the 0th source, and uses the 0th
// name in the `map.names` array
[ 2, 0, 2, 2, 0 ],
// the remaining segments are 4-length rather than 5-length,
// because they don't map a name
[ 4, 0, 2, 4 ],
[ 6, 0, 2, 5 ],
[ 7, 0, 2, 7 ]
],
// the final line contains two segments
[
[ 2, 1, 10, 19 ],
[ 12, 1, 11, 20 ]
]
]);
var encoded = encode( decoded );
assert.equal( encoded, ';EAEEA,EAAE,EAAC,CAAE;ECQY,UACC' );
```
## Benchmarks
```
node v20.10.0
amp.js.map - 45120 segments
Decode Memory Usage:
local code 5815135 bytes
@jridgewell/sourcemap-codec 1.4.15 5868160 bytes
sourcemap-codec 5492584 bytes
source-map-0.6.1 13569984 bytes
source-map-0.8.0 6390584 bytes
chrome dev tools 8011136 bytes
Smallest memory usage is sourcemap-codec
Decode speed:
decode: local code x 492 ops/sec ±1.22% (90 runs sampled)
decode: @jridgewell/sourcemap-codec 1.4.15 x 499 ops/sec ±1.16% (89 runs sampled)
decode: sourcemap-codec x 376 ops/sec ±1.66% (89 runs sampled)
decode: source-map-0.6.1 x 34.99 ops/sec ±0.94% (48 runs sampled)
decode: source-map-0.8.0 x 351 ops/sec ±0.07% (95 runs sampled)
chrome dev tools x 165 ops/sec ±0.91% (86 runs sampled)
Fastest is decode: @jridgewell/sourcemap-codec 1.4.15
Encode Memory Usage:
local code 444248 bytes
@jridgewell/sourcemap-codec 1.4.15 623024 bytes
sourcemap-codec 8696280 bytes
source-map-0.6.1 8745176 bytes
source-map-0.8.0 8736624 bytes
Smallest memory usage is local code
Encode speed:
encode: local code x 796 ops/sec ±0.11% (97 runs sampled)
encode: @jridgewell/sourcemap-codec 1.4.15 x 795 ops/sec ±0.25% (98 runs sampled)
encode: sourcemap-codec x 231 ops/sec ±0.83% (86 runs sampled)
encode: source-map-0.6.1 x 166 ops/sec ±0.57% (86 runs sampled)
encode: source-map-0.8.0 x 203 ops/sec ±0.45% (88 runs sampled)
Fastest is encode: local code,encode: @jridgewell/sourcemap-codec 1.4.15
***
babel.min.js.map - 347793 segments
Decode Memory Usage:
local code 35424960 bytes
@jridgewell/sourcemap-codec 1.4.15 35424696 bytes
sourcemap-codec 36033464 bytes
source-map-0.6.1 62253704 bytes
source-map-0.8.0 43843920 bytes
chrome dev tools 45111400 bytes
Smallest memory usage is @jridgewell/sourcemap-codec 1.4.15
Decode speed:
decode: local code x 38.18 ops/sec ±5.44% (52 runs sampled)
decode: @jridgewell/sourcemap-codec 1.4.15 x 38.36 ops/sec ±5.02% (52 runs sampled)
decode: sourcemap-codec x 34.05 ops/sec ±4.45% (47 runs sampled)
decode: source-map-0.6.1 x 4.31 ops/sec ±2.76% (15 runs sampled)
decode: source-map-0.8.0 x 55.60 ops/sec ±0.13% (73 runs sampled)
chrome dev tools x 16.94 ops/sec ±3.78% (46 runs sampled)
Fastest is decode: source-map-0.8.0
Encode Memory Usage:
local code 2606016 bytes
@jridgewell/sourcemap-codec 1.4.15 2626440 bytes
sourcemap-codec 21152576 bytes
source-map-0.6.1 25023928 bytes
source-map-0.8.0 25256448 bytes
Smallest memory usage is local code
Encode speed:
encode: local code x 127 ops/sec ±0.18% (83 runs sampled)
encode: @jridgewell/sourcemap-codec 1.4.15 x 128 ops/sec ±0.26% (83 runs sampled)
encode: sourcemap-codec x 29.31 ops/sec ±2.55% (53 runs sampled)
encode: source-map-0.6.1 x 18.85 ops/sec ±3.19% (36 runs sampled)
encode: source-map-0.8.0 x 19.34 ops/sec ±1.97% (36 runs sampled)
Fastest is encode: @jridgewell/sourcemap-codec 1.4.15
***
preact.js.map - 1992 segments
Decode Memory Usage:
local code 261696 bytes
@jridgewell/sourcemap-codec 1.4.15 244296 bytes
sourcemap-codec 302816 bytes
source-map-0.6.1 939176 bytes
source-map-0.8.0 336 bytes
chrome dev tools 587368 bytes
Smallest memory usage is source-map-0.8.0
Decode speed:
decode: local code x 17,782 ops/sec ±0.32% (97 runs sampled)
decode: @jridgewell/sourcemap-codec 1.4.15 x 17,863 ops/sec ±0.40% (100 runs sampled)
decode: sourcemap-codec x 12,453 ops/sec ±0.27% (101 runs sampled)
decode: source-map-0.6.1 x 1,288 ops/sec ±1.05% (96 runs sampled)
decode: source-map-0.8.0 x 9,289 ops/sec ±0.27% (101 runs sampled)
chrome dev tools x 4,769 ops/sec ±0.18% (100 runs sampled)
Fastest is decode: @jridgewell/sourcemap-codec 1.4.15
Encode Memory Usage:
local code 262944 bytes
@jridgewell/sourcemap-codec 1.4.15 25544 bytes
sourcemap-codec 323048 bytes
source-map-0.6.1 507808 bytes
source-map-0.8.0 507480 bytes
Smallest memory usage is @jridgewell/sourcemap-codec 1.4.15
Encode speed:
encode: local code x 24,207 ops/sec ±0.79% (95 runs sampled)
encode: @jridgewell/sourcemap-codec 1.4.15 x 24,288 ops/sec ±0.48% (96 runs sampled)
encode: sourcemap-codec x 6,761 ops/sec ±0.21% (100 runs sampled)
encode: source-map-0.6.1 x 5,374 ops/sec ±0.17% (99 runs sampled)
encode: source-map-0.8.0 x 5,633 ops/sec ±0.32% (99 runs sampled)
Fastest is encode: @jridgewell/sourcemap-codec 1.4.15,encode: local code
***
react.js.map - 5726 segments
Decode Memory Usage:
local code 678816 bytes
@jridgewell/sourcemap-codec 1.4.15 678816 bytes
sourcemap-codec 816400 bytes
source-map-0.6.1 2288864 bytes
source-map-0.8.0 721360 bytes
chrome dev tools 1012512 bytes
Smallest memory usage is local code
Decode speed:
decode: local code x 6,178 ops/sec ±0.19% (98 runs sampled)
decode: @jridgewell/sourcemap-codec 1.4.15 x 6,261 ops/sec ±0.22% (100 runs sampled)
decode: sourcemap-codec x 4,472 ops/sec ±0.90% (99 runs sampled)
decode: source-map-0.6.1 x 449 ops/sec ±0.31% (95 runs sampled)
decode: source-map-0.8.0 x 3,219 ops/sec ±0.13% (100 runs sampled)
chrome dev tools x 1,743 ops/sec ±0.20% (99 runs sampled)
Fastest is decode: @jridgewell/sourcemap-codec 1.4.15
Encode Memory Usage:
local code 140960 bytes
@jridgewell/sourcemap-codec 1.4.15 159808 bytes
sourcemap-codec 969304 bytes
source-map-0.6.1 930520 bytes
source-map-0.8.0 930248 bytes
Smallest memory usage is local code
Encode speed:
encode: local code x 8,013 ops/sec ±0.19% (100 runs sampled)
encode: @jridgewell/sourcemap-codec 1.4.15 x 7,989 ops/sec ±0.20% (101 runs sampled)
encode: sourcemap-codec x 2,472 ops/sec ±0.21% (99 runs sampled)
encode: source-map-0.6.1 x 2,200 ops/sec ±0.17% (99 runs sampled)
encode: source-map-0.8.0 x 2,220 ops/sec ±0.37% (99 runs sampled)
Fastest is encode: local code
***
vscode.map - 2141001 segments
Decode Memory Usage:
local code 198955264 bytes
@jridgewell/sourcemap-codec 1.4.15 199175352 bytes
sourcemap-codec 199102688 bytes
source-map-0.6.1 386323432 bytes
source-map-0.8.0 244116432 bytes
chrome dev tools 293734280 bytes
Smallest memory usage is local code
Decode speed:
decode: local code x 3.90 ops/sec ±22.21% (15 runs sampled)
decode: @jridgewell/sourcemap-codec 1.4.15 x 3.95 ops/sec ±23.53% (15 runs sampled)
decode: sourcemap-codec x 3.82 ops/sec ±17.94% (14 runs sampled)
decode: source-map-0.6.1 x 0.61 ops/sec ±7.81% (6 runs sampled)
decode: source-map-0.8.0 x 9.54 ops/sec ±0.28% (28 runs sampled)
chrome dev tools x 2.18 ops/sec ±10.58% (10 runs sampled)
Fastest is decode: source-map-0.8.0
Encode Memory Usage:
local code 13509880 bytes
@jridgewell/sourcemap-codec 1.4.15 13537648 bytes
sourcemap-codec 32540104 bytes
source-map-0.6.1 127531040 bytes
source-map-0.8.0 127535312 bytes
Smallest memory usage is local code
Encode speed:
encode: local code x 20.10 ops/sec ±0.19% (38 runs sampled)
encode: @jridgewell/sourcemap-codec 1.4.15 x 20.26 ops/sec ±0.32% (38 runs sampled)
encode: sourcemap-codec x 5.44 ops/sec ±1.64% (18 runs sampled)
encode: source-map-0.6.1 x 2.30 ops/sec ±4.79% (10 runs sampled)
encode: source-map-0.8.0 x 2.46 ops/sec ±6.53% (10 runs sampled)
Fastest is encode: @jridgewell/sourcemap-codec 1.4.15
```
# License
MIT

View File

@@ -0,0 +1,423 @@
// src/vlq.ts
var comma = ",".charCodeAt(0);
var semicolon = ";".charCodeAt(0);
var chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
var intToChar = new Uint8Array(64);
var charToInt = new Uint8Array(128);
for (let i = 0; i < chars.length; i++) {
const c = chars.charCodeAt(i);
intToChar[i] = c;
charToInt[c] = i;
}
function decodeInteger(reader, relative) {
let value = 0;
let shift = 0;
let integer = 0;
do {
const c = reader.next();
integer = charToInt[c];
value |= (integer & 31) << shift;
shift += 5;
} while (integer & 32);
const shouldNegate = value & 1;
value >>>= 1;
if (shouldNegate) {
value = -2147483648 | -value;
}
return relative + value;
}
function encodeInteger(builder, num, relative) {
let delta = num - relative;
delta = delta < 0 ? -delta << 1 | 1 : delta << 1;
do {
let clamped = delta & 31;
delta >>>= 5;
if (delta > 0) clamped |= 32;
builder.write(intToChar[clamped]);
} while (delta > 0);
return num;
}
function hasMoreVlq(reader, max) {
if (reader.pos >= max) return false;
return reader.peek() !== comma;
}
// src/strings.ts
var bufLength = 1024 * 16;
var td = typeof TextDecoder !== "undefined" ? /* @__PURE__ */ new TextDecoder() : typeof Buffer !== "undefined" ? {
decode(buf) {
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
return out.toString();
}
} : {
decode(buf) {
let out = "";
for (let i = 0; i < buf.length; i++) {
out += String.fromCharCode(buf[i]);
}
return out;
}
};
var StringWriter = class {
constructor() {
this.pos = 0;
this.out = "";
this.buffer = new Uint8Array(bufLength);
}
write(v) {
const { buffer } = this;
buffer[this.pos++] = v;
if (this.pos === bufLength) {
this.out += td.decode(buffer);
this.pos = 0;
}
}
flush() {
const { buffer, out, pos } = this;
return pos > 0 ? out + td.decode(buffer.subarray(0, pos)) : out;
}
};
var StringReader = class {
constructor(buffer) {
this.pos = 0;
this.buffer = buffer;
}
next() {
return this.buffer.charCodeAt(this.pos++);
}
peek() {
return this.buffer.charCodeAt(this.pos);
}
indexOf(char) {
const { buffer, pos } = this;
const idx = buffer.indexOf(char, pos);
return idx === -1 ? buffer.length : idx;
}
};
// src/scopes.ts
var EMPTY = [];
function decodeOriginalScopes(input) {
const { length } = input;
const reader = new StringReader(input);
const scopes = [];
const stack = [];
let line = 0;
for (; reader.pos < length; reader.pos++) {
line = decodeInteger(reader, line);
const column = decodeInteger(reader, 0);
if (!hasMoreVlq(reader, length)) {
const last = stack.pop();
last[2] = line;
last[3] = column;
continue;
}
const kind = decodeInteger(reader, 0);
const fields = decodeInteger(reader, 0);
const hasName = fields & 1;
const scope = hasName ? [line, column, 0, 0, kind, decodeInteger(reader, 0)] : [line, column, 0, 0, kind];
let vars = EMPTY;
if (hasMoreVlq(reader, length)) {
vars = [];
do {
const varsIndex = decodeInteger(reader, 0);
vars.push(varsIndex);
} while (hasMoreVlq(reader, length));
}
scope.vars = vars;
scopes.push(scope);
stack.push(scope);
}
return scopes;
}
function encodeOriginalScopes(scopes) {
const writer = new StringWriter();
for (let i = 0; i < scopes.length; ) {
i = _encodeOriginalScopes(scopes, i, writer, [0]);
}
return writer.flush();
}
function _encodeOriginalScopes(scopes, index, writer, state) {
const scope = scopes[index];
const { 0: startLine, 1: startColumn, 2: endLine, 3: endColumn, 4: kind, vars } = scope;
if (index > 0) writer.write(comma);
state[0] = encodeInteger(writer, startLine, state[0]);
encodeInteger(writer, startColumn, 0);
encodeInteger(writer, kind, 0);
const fields = scope.length === 6 ? 1 : 0;
encodeInteger(writer, fields, 0);
if (scope.length === 6) encodeInteger(writer, scope[5], 0);
for (const v of vars) {
encodeInteger(writer, v, 0);
}
for (index++; index < scopes.length; ) {
const next = scopes[index];
const { 0: l, 1: c } = next;
if (l > endLine || l === endLine && c >= endColumn) {
break;
}
index = _encodeOriginalScopes(scopes, index, writer, state);
}
writer.write(comma);
state[0] = encodeInteger(writer, endLine, state[0]);
encodeInteger(writer, endColumn, 0);
return index;
}
function decodeGeneratedRanges(input) {
const { length } = input;
const reader = new StringReader(input);
const ranges = [];
const stack = [];
let genLine = 0;
let definitionSourcesIndex = 0;
let definitionScopeIndex = 0;
let callsiteSourcesIndex = 0;
let callsiteLine = 0;
let callsiteColumn = 0;
let bindingLine = 0;
let bindingColumn = 0;
do {
const semi = reader.indexOf(";");
let genColumn = 0;
for (; reader.pos < semi; reader.pos++) {
genColumn = decodeInteger(reader, genColumn);
if (!hasMoreVlq(reader, semi)) {
const last = stack.pop();
last[2] = genLine;
last[3] = genColumn;
continue;
}
const fields = decodeInteger(reader, 0);
const hasDefinition = fields & 1;
const hasCallsite = fields & 2;
const hasScope = fields & 4;
let callsite = null;
let bindings = EMPTY;
let range;
if (hasDefinition) {
const defSourcesIndex = decodeInteger(reader, definitionSourcesIndex);
definitionScopeIndex = decodeInteger(
reader,
definitionSourcesIndex === defSourcesIndex ? definitionScopeIndex : 0
);
definitionSourcesIndex = defSourcesIndex;
range = [genLine, genColumn, 0, 0, defSourcesIndex, definitionScopeIndex];
} else {
range = [genLine, genColumn, 0, 0];
}
range.isScope = !!hasScope;
if (hasCallsite) {
const prevCsi = callsiteSourcesIndex;
const prevLine = callsiteLine;
callsiteSourcesIndex = decodeInteger(reader, callsiteSourcesIndex);
const sameSource = prevCsi === callsiteSourcesIndex;
callsiteLine = decodeInteger(reader, sameSource ? callsiteLine : 0);
callsiteColumn = decodeInteger(
reader,
sameSource && prevLine === callsiteLine ? callsiteColumn : 0
);
callsite = [callsiteSourcesIndex, callsiteLine, callsiteColumn];
}
range.callsite = callsite;
if (hasMoreVlq(reader, semi)) {
bindings = [];
do {
bindingLine = genLine;
bindingColumn = genColumn;
const expressionsCount = decodeInteger(reader, 0);
let expressionRanges;
if (expressionsCount < -1) {
expressionRanges = [[decodeInteger(reader, 0)]];
for (let i = -1; i > expressionsCount; i--) {
const prevBl = bindingLine;
bindingLine = decodeInteger(reader, bindingLine);
bindingColumn = decodeInteger(reader, bindingLine === prevBl ? bindingColumn : 0);
const expression = decodeInteger(reader, 0);
expressionRanges.push([expression, bindingLine, bindingColumn]);
}
} else {
expressionRanges = [[expressionsCount]];
}
bindings.push(expressionRanges);
} while (hasMoreVlq(reader, semi));
}
range.bindings = bindings;
ranges.push(range);
stack.push(range);
}
genLine++;
reader.pos = semi + 1;
} while (reader.pos < length);
return ranges;
}
function encodeGeneratedRanges(ranges) {
if (ranges.length === 0) return "";
const writer = new StringWriter();
for (let i = 0; i < ranges.length; ) {
i = _encodeGeneratedRanges(ranges, i, writer, [0, 0, 0, 0, 0, 0, 0]);
}
return writer.flush();
}
function _encodeGeneratedRanges(ranges, index, writer, state) {
const range = ranges[index];
const {
0: startLine,
1: startColumn,
2: endLine,
3: endColumn,
isScope,
callsite,
bindings
} = range;
if (state[0] < startLine) {
catchupLine(writer, state[0], startLine);
state[0] = startLine;
state[1] = 0;
} else if (index > 0) {
writer.write(comma);
}
state[1] = encodeInteger(writer, range[1], state[1]);
const fields = (range.length === 6 ? 1 : 0) | (callsite ? 2 : 0) | (isScope ? 4 : 0);
encodeInteger(writer, fields, 0);
if (range.length === 6) {
const { 4: sourcesIndex, 5: scopesIndex } = range;
if (sourcesIndex !== state[2]) {
state[3] = 0;
}
state[2] = encodeInteger(writer, sourcesIndex, state[2]);
state[3] = encodeInteger(writer, scopesIndex, state[3]);
}
if (callsite) {
const { 0: sourcesIndex, 1: callLine, 2: callColumn } = range.callsite;
if (sourcesIndex !== state[4]) {
state[5] = 0;
state[6] = 0;
} else if (callLine !== state[5]) {
state[6] = 0;
}
state[4] = encodeInteger(writer, sourcesIndex, state[4]);
state[5] = encodeInteger(writer, callLine, state[5]);
state[6] = encodeInteger(writer, callColumn, state[6]);
}
if (bindings) {
for (const binding of bindings) {
if (binding.length > 1) encodeInteger(writer, -binding.length, 0);
const expression = binding[0][0];
encodeInteger(writer, expression, 0);
let bindingStartLine = startLine;
let bindingStartColumn = startColumn;
for (let i = 1; i < binding.length; i++) {
const expRange = binding[i];
bindingStartLine = encodeInteger(writer, expRange[1], bindingStartLine);
bindingStartColumn = encodeInteger(writer, expRange[2], bindingStartColumn);
encodeInteger(writer, expRange[0], 0);
}
}
}
for (index++; index < ranges.length; ) {
const next = ranges[index];
const { 0: l, 1: c } = next;
if (l > endLine || l === endLine && c >= endColumn) {
break;
}
index = _encodeGeneratedRanges(ranges, index, writer, state);
}
if (state[0] < endLine) {
catchupLine(writer, state[0], endLine);
state[0] = endLine;
state[1] = 0;
} else {
writer.write(comma);
}
state[1] = encodeInteger(writer, endColumn, state[1]);
return index;
}
function catchupLine(writer, lastLine, line) {
do {
writer.write(semicolon);
} while (++lastLine < line);
}
// src/sourcemap-codec.ts
function decode(mappings) {
const { length } = mappings;
const reader = new StringReader(mappings);
const decoded = [];
let genColumn = 0;
let sourcesIndex = 0;
let sourceLine = 0;
let sourceColumn = 0;
let namesIndex = 0;
do {
const semi = reader.indexOf(";");
const line = [];
let sorted = true;
let lastCol = 0;
genColumn = 0;
while (reader.pos < semi) {
let seg;
genColumn = decodeInteger(reader, genColumn);
if (genColumn < lastCol) sorted = false;
lastCol = genColumn;
if (hasMoreVlq(reader, semi)) {
sourcesIndex = decodeInteger(reader, sourcesIndex);
sourceLine = decodeInteger(reader, sourceLine);
sourceColumn = decodeInteger(reader, sourceColumn);
if (hasMoreVlq(reader, semi)) {
namesIndex = decodeInteger(reader, namesIndex);
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex];
} else {
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn];
}
} else {
seg = [genColumn];
}
line.push(seg);
reader.pos++;
}
if (!sorted) sort(line);
decoded.push(line);
reader.pos = semi + 1;
} while (reader.pos <= length);
return decoded;
}
function sort(line) {
line.sort(sortComparator);
}
function sortComparator(a, b) {
return a[0] - b[0];
}
function encode(decoded) {
const writer = new StringWriter();
let sourcesIndex = 0;
let sourceLine = 0;
let sourceColumn = 0;
let namesIndex = 0;
for (let i = 0; i < decoded.length; i++) {
const line = decoded[i];
if (i > 0) writer.write(semicolon);
if (line.length === 0) continue;
let genColumn = 0;
for (let j = 0; j < line.length; j++) {
const segment = line[j];
if (j > 0) writer.write(comma);
genColumn = encodeInteger(writer, segment[0], genColumn);
if (segment.length === 1) continue;
sourcesIndex = encodeInteger(writer, segment[1], sourcesIndex);
sourceLine = encodeInteger(writer, segment[2], sourceLine);
sourceColumn = encodeInteger(writer, segment[3], sourceColumn);
if (segment.length === 4) continue;
namesIndex = encodeInteger(writer, segment[4], namesIndex);
}
}
return writer.flush();
}
export {
decode,
decodeGeneratedRanges,
decodeOriginalScopes,
encode,
encodeGeneratedRanges,
encodeOriginalScopes
};
//# sourceMappingURL=sourcemap-codec.mjs.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,464 @@
(function (global, factory) {
if (typeof exports === 'object' && typeof module !== 'undefined') {
factory(module);
module.exports = def(module);
} else if (typeof define === 'function' && define.amd) {
define(['module'], function(mod) {
factory.apply(this, arguments);
mod.exports = def(mod);
});
} else {
const mod = { exports: {} };
factory(mod);
global = typeof globalThis !== 'undefined' ? globalThis : global || self;
global.sourcemapCodec = def(mod);
}
function def(m) { return 'default' in m.exports ? m.exports.default : m.exports; }
})(this, (function (module) {
"use strict";
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
// src/sourcemap-codec.ts
var sourcemap_codec_exports = {};
__export(sourcemap_codec_exports, {
decode: () => decode,
decodeGeneratedRanges: () => decodeGeneratedRanges,
decodeOriginalScopes: () => decodeOriginalScopes,
encode: () => encode,
encodeGeneratedRanges: () => encodeGeneratedRanges,
encodeOriginalScopes: () => encodeOriginalScopes
});
module.exports = __toCommonJS(sourcemap_codec_exports);
// src/vlq.ts
var comma = ",".charCodeAt(0);
var semicolon = ";".charCodeAt(0);
var chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
var intToChar = new Uint8Array(64);
var charToInt = new Uint8Array(128);
for (let i = 0; i < chars.length; i++) {
const c = chars.charCodeAt(i);
intToChar[i] = c;
charToInt[c] = i;
}
function decodeInteger(reader, relative) {
let value = 0;
let shift = 0;
let integer = 0;
do {
const c = reader.next();
integer = charToInt[c];
value |= (integer & 31) << shift;
shift += 5;
} while (integer & 32);
const shouldNegate = value & 1;
value >>>= 1;
if (shouldNegate) {
value = -2147483648 | -value;
}
return relative + value;
}
function encodeInteger(builder, num, relative) {
let delta = num - relative;
delta = delta < 0 ? -delta << 1 | 1 : delta << 1;
do {
let clamped = delta & 31;
delta >>>= 5;
if (delta > 0) clamped |= 32;
builder.write(intToChar[clamped]);
} while (delta > 0);
return num;
}
function hasMoreVlq(reader, max) {
if (reader.pos >= max) return false;
return reader.peek() !== comma;
}
// src/strings.ts
var bufLength = 1024 * 16;
var td = typeof TextDecoder !== "undefined" ? /* @__PURE__ */ new TextDecoder() : typeof Buffer !== "undefined" ? {
decode(buf) {
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
return out.toString();
}
} : {
decode(buf) {
let out = "";
for (let i = 0; i < buf.length; i++) {
out += String.fromCharCode(buf[i]);
}
return out;
}
};
var StringWriter = class {
constructor() {
this.pos = 0;
this.out = "";
this.buffer = new Uint8Array(bufLength);
}
write(v) {
const { buffer } = this;
buffer[this.pos++] = v;
if (this.pos === bufLength) {
this.out += td.decode(buffer);
this.pos = 0;
}
}
flush() {
const { buffer, out, pos } = this;
return pos > 0 ? out + td.decode(buffer.subarray(0, pos)) : out;
}
};
var StringReader = class {
constructor(buffer) {
this.pos = 0;
this.buffer = buffer;
}
next() {
return this.buffer.charCodeAt(this.pos++);
}
peek() {
return this.buffer.charCodeAt(this.pos);
}
indexOf(char) {
const { buffer, pos } = this;
const idx = buffer.indexOf(char, pos);
return idx === -1 ? buffer.length : idx;
}
};
// src/scopes.ts
var EMPTY = [];
function decodeOriginalScopes(input) {
const { length } = input;
const reader = new StringReader(input);
const scopes = [];
const stack = [];
let line = 0;
for (; reader.pos < length; reader.pos++) {
line = decodeInteger(reader, line);
const column = decodeInteger(reader, 0);
if (!hasMoreVlq(reader, length)) {
const last = stack.pop();
last[2] = line;
last[3] = column;
continue;
}
const kind = decodeInteger(reader, 0);
const fields = decodeInteger(reader, 0);
const hasName = fields & 1;
const scope = hasName ? [line, column, 0, 0, kind, decodeInteger(reader, 0)] : [line, column, 0, 0, kind];
let vars = EMPTY;
if (hasMoreVlq(reader, length)) {
vars = [];
do {
const varsIndex = decodeInteger(reader, 0);
vars.push(varsIndex);
} while (hasMoreVlq(reader, length));
}
scope.vars = vars;
scopes.push(scope);
stack.push(scope);
}
return scopes;
}
function encodeOriginalScopes(scopes) {
const writer = new StringWriter();
for (let i = 0; i < scopes.length; ) {
i = _encodeOriginalScopes(scopes, i, writer, [0]);
}
return writer.flush();
}
function _encodeOriginalScopes(scopes, index, writer, state) {
const scope = scopes[index];
const { 0: startLine, 1: startColumn, 2: endLine, 3: endColumn, 4: kind, vars } = scope;
if (index > 0) writer.write(comma);
state[0] = encodeInteger(writer, startLine, state[0]);
encodeInteger(writer, startColumn, 0);
encodeInteger(writer, kind, 0);
const fields = scope.length === 6 ? 1 : 0;
encodeInteger(writer, fields, 0);
if (scope.length === 6) encodeInteger(writer, scope[5], 0);
for (const v of vars) {
encodeInteger(writer, v, 0);
}
for (index++; index < scopes.length; ) {
const next = scopes[index];
const { 0: l, 1: c } = next;
if (l > endLine || l === endLine && c >= endColumn) {
break;
}
index = _encodeOriginalScopes(scopes, index, writer, state);
}
writer.write(comma);
state[0] = encodeInteger(writer, endLine, state[0]);
encodeInteger(writer, endColumn, 0);
return index;
}
function decodeGeneratedRanges(input) {
const { length } = input;
const reader = new StringReader(input);
const ranges = [];
const stack = [];
let genLine = 0;
let definitionSourcesIndex = 0;
let definitionScopeIndex = 0;
let callsiteSourcesIndex = 0;
let callsiteLine = 0;
let callsiteColumn = 0;
let bindingLine = 0;
let bindingColumn = 0;
do {
const semi = reader.indexOf(";");
let genColumn = 0;
for (; reader.pos < semi; reader.pos++) {
genColumn = decodeInteger(reader, genColumn);
if (!hasMoreVlq(reader, semi)) {
const last = stack.pop();
last[2] = genLine;
last[3] = genColumn;
continue;
}
const fields = decodeInteger(reader, 0);
const hasDefinition = fields & 1;
const hasCallsite = fields & 2;
const hasScope = fields & 4;
let callsite = null;
let bindings = EMPTY;
let range;
if (hasDefinition) {
const defSourcesIndex = decodeInteger(reader, definitionSourcesIndex);
definitionScopeIndex = decodeInteger(
reader,
definitionSourcesIndex === defSourcesIndex ? definitionScopeIndex : 0
);
definitionSourcesIndex = defSourcesIndex;
range = [genLine, genColumn, 0, 0, defSourcesIndex, definitionScopeIndex];
} else {
range = [genLine, genColumn, 0, 0];
}
range.isScope = !!hasScope;
if (hasCallsite) {
const prevCsi = callsiteSourcesIndex;
const prevLine = callsiteLine;
callsiteSourcesIndex = decodeInteger(reader, callsiteSourcesIndex);
const sameSource = prevCsi === callsiteSourcesIndex;
callsiteLine = decodeInteger(reader, sameSource ? callsiteLine : 0);
callsiteColumn = decodeInteger(
reader,
sameSource && prevLine === callsiteLine ? callsiteColumn : 0
);
callsite = [callsiteSourcesIndex, callsiteLine, callsiteColumn];
}
range.callsite = callsite;
if (hasMoreVlq(reader, semi)) {
bindings = [];
do {
bindingLine = genLine;
bindingColumn = genColumn;
const expressionsCount = decodeInteger(reader, 0);
let expressionRanges;
if (expressionsCount < -1) {
expressionRanges = [[decodeInteger(reader, 0)]];
for (let i = -1; i > expressionsCount; i--) {
const prevBl = bindingLine;
bindingLine = decodeInteger(reader, bindingLine);
bindingColumn = decodeInteger(reader, bindingLine === prevBl ? bindingColumn : 0);
const expression = decodeInteger(reader, 0);
expressionRanges.push([expression, bindingLine, bindingColumn]);
}
} else {
expressionRanges = [[expressionsCount]];
}
bindings.push(expressionRanges);
} while (hasMoreVlq(reader, semi));
}
range.bindings = bindings;
ranges.push(range);
stack.push(range);
}
genLine++;
reader.pos = semi + 1;
} while (reader.pos < length);
return ranges;
}
function encodeGeneratedRanges(ranges) {
if (ranges.length === 0) return "";
const writer = new StringWriter();
for (let i = 0; i < ranges.length; ) {
i = _encodeGeneratedRanges(ranges, i, writer, [0, 0, 0, 0, 0, 0, 0]);
}
return writer.flush();
}
function _encodeGeneratedRanges(ranges, index, writer, state) {
const range = ranges[index];
const {
0: startLine,
1: startColumn,
2: endLine,
3: endColumn,
isScope,
callsite,
bindings
} = range;
if (state[0] < startLine) {
catchupLine(writer, state[0], startLine);
state[0] = startLine;
state[1] = 0;
} else if (index > 0) {
writer.write(comma);
}
state[1] = encodeInteger(writer, range[1], state[1]);
const fields = (range.length === 6 ? 1 : 0) | (callsite ? 2 : 0) | (isScope ? 4 : 0);
encodeInteger(writer, fields, 0);
if (range.length === 6) {
const { 4: sourcesIndex, 5: scopesIndex } = range;
if (sourcesIndex !== state[2]) {
state[3] = 0;
}
state[2] = encodeInteger(writer, sourcesIndex, state[2]);
state[3] = encodeInteger(writer, scopesIndex, state[3]);
}
if (callsite) {
const { 0: sourcesIndex, 1: callLine, 2: callColumn } = range.callsite;
if (sourcesIndex !== state[4]) {
state[5] = 0;
state[6] = 0;
} else if (callLine !== state[5]) {
state[6] = 0;
}
state[4] = encodeInteger(writer, sourcesIndex, state[4]);
state[5] = encodeInteger(writer, callLine, state[5]);
state[6] = encodeInteger(writer, callColumn, state[6]);
}
if (bindings) {
for (const binding of bindings) {
if (binding.length > 1) encodeInteger(writer, -binding.length, 0);
const expression = binding[0][0];
encodeInteger(writer, expression, 0);
let bindingStartLine = startLine;
let bindingStartColumn = startColumn;
for (let i = 1; i < binding.length; i++) {
const expRange = binding[i];
bindingStartLine = encodeInteger(writer, expRange[1], bindingStartLine);
bindingStartColumn = encodeInteger(writer, expRange[2], bindingStartColumn);
encodeInteger(writer, expRange[0], 0);
}
}
}
for (index++; index < ranges.length; ) {
const next = ranges[index];
const { 0: l, 1: c } = next;
if (l > endLine || l === endLine && c >= endColumn) {
break;
}
index = _encodeGeneratedRanges(ranges, index, writer, state);
}
if (state[0] < endLine) {
catchupLine(writer, state[0], endLine);
state[0] = endLine;
state[1] = 0;
} else {
writer.write(comma);
}
state[1] = encodeInteger(writer, endColumn, state[1]);
return index;
}
function catchupLine(writer, lastLine, line) {
do {
writer.write(semicolon);
} while (++lastLine < line);
}
// src/sourcemap-codec.ts
function decode(mappings) {
const { length } = mappings;
const reader = new StringReader(mappings);
const decoded = [];
let genColumn = 0;
let sourcesIndex = 0;
let sourceLine = 0;
let sourceColumn = 0;
let namesIndex = 0;
do {
const semi = reader.indexOf(";");
const line = [];
let sorted = true;
let lastCol = 0;
genColumn = 0;
while (reader.pos < semi) {
let seg;
genColumn = decodeInteger(reader, genColumn);
if (genColumn < lastCol) sorted = false;
lastCol = genColumn;
if (hasMoreVlq(reader, semi)) {
sourcesIndex = decodeInteger(reader, sourcesIndex);
sourceLine = decodeInteger(reader, sourceLine);
sourceColumn = decodeInteger(reader, sourceColumn);
if (hasMoreVlq(reader, semi)) {
namesIndex = decodeInteger(reader, namesIndex);
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex];
} else {
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn];
}
} else {
seg = [genColumn];
}
line.push(seg);
reader.pos++;
}
if (!sorted) sort(line);
decoded.push(line);
reader.pos = semi + 1;
} while (reader.pos <= length);
return decoded;
}
function sort(line) {
line.sort(sortComparator);
}
function sortComparator(a, b) {
return a[0] - b[0];
}
function encode(decoded) {
const writer = new StringWriter();
let sourcesIndex = 0;
let sourceLine = 0;
let sourceColumn = 0;
let namesIndex = 0;
for (let i = 0; i < decoded.length; i++) {
const line = decoded[i];
if (i > 0) writer.write(semicolon);
if (line.length === 0) continue;
let genColumn = 0;
for (let j = 0; j < line.length; j++) {
const segment = line[j];
if (j > 0) writer.write(comma);
genColumn = encodeInteger(writer, segment[0], genColumn);
if (segment.length === 1) continue;
sourcesIndex = encodeInteger(writer, segment[1], sourcesIndex);
sourceLine = encodeInteger(writer, segment[2], sourceLine);
sourceColumn = encodeInteger(writer, segment[3], sourceColumn);
if (segment.length === 4) continue;
namesIndex = encodeInteger(writer, segment[4], namesIndex);
}
}
return writer.flush();
}
}));
//# sourceMappingURL=sourcemap-codec.umd.js.map

File diff suppressed because one or more lines are too long

63
node_modules/@jridgewell/sourcemap-codec/package.json generated vendored Normal file
View File

@@ -0,0 +1,63 @@
{
"name": "@jridgewell/sourcemap-codec",
"version": "1.5.5",
"description": "Encode/decode sourcemap mappings",
"keywords": [
"sourcemap",
"vlq"
],
"main": "dist/sourcemap-codec.umd.js",
"module": "dist/sourcemap-codec.mjs",
"types": "types/sourcemap-codec.d.cts",
"files": [
"dist",
"src",
"types"
],
"exports": {
".": [
{
"import": {
"types": "./types/sourcemap-codec.d.mts",
"default": "./dist/sourcemap-codec.mjs"
},
"default": {
"types": "./types/sourcemap-codec.d.cts",
"default": "./dist/sourcemap-codec.umd.js"
}
},
"./dist/sourcemap-codec.umd.js"
],
"./package.json": "./package.json"
},
"scripts": {
"benchmark": "run-s build:code benchmark:*",
"benchmark:install": "cd benchmark && npm install",
"benchmark:only": "node --expose-gc benchmark/index.js",
"build": "run-s -n build:code build:types",
"build:code": "node ../../esbuild.mjs sourcemap-codec.ts",
"build:types": "run-s build:types:force build:types:emit build:types:mts",
"build:types:force": "rimraf tsconfig.build.tsbuildinfo",
"build:types:emit": "tsc --project tsconfig.build.json",
"build:types:mts": "node ../../mts-types.mjs",
"clean": "run-s -n clean:code clean:types",
"clean:code": "tsc --build --clean tsconfig.build.json",
"clean:types": "rimraf dist types",
"test": "run-s -n test:types test:only test:format",
"test:format": "prettier --check '{src,test}/**/*.ts'",
"test:only": "mocha",
"test:types": "eslint '{src,test}/**/*.ts'",
"lint": "run-s -n lint:types lint:format",
"lint:format": "npm run test:format -- --write",
"lint:types": "npm run test:types -- --fix",
"prepublishOnly": "npm run-s -n build test"
},
"homepage": "https://github.com/jridgewell/sourcemaps/tree/main/packages/sourcemap-codec",
"repository": {
"type": "git",
"url": "git+https://github.com/jridgewell/sourcemaps.git",
"directory": "packages/sourcemap-codec"
},
"author": "Justin Ridgewell <justin@ridgewell.name>",
"license": "MIT"
}

345
node_modules/@jridgewell/sourcemap-codec/src/scopes.ts generated vendored Normal file
View File

@@ -0,0 +1,345 @@
import { StringReader, StringWriter } from './strings';
import { comma, decodeInteger, encodeInteger, hasMoreVlq, semicolon } from './vlq';
const EMPTY: any[] = [];
type Line = number;
type Column = number;
type Kind = number;
type Name = number;
type Var = number;
type SourcesIndex = number;
type ScopesIndex = number;
type Mix<A, B, O> = (A & O) | (B & O);
export type OriginalScope = Mix<
[Line, Column, Line, Column, Kind],
[Line, Column, Line, Column, Kind, Name],
{ vars: Var[] }
>;
export type GeneratedRange = Mix<
[Line, Column, Line, Column],
[Line, Column, Line, Column, SourcesIndex, ScopesIndex],
{
callsite: CallSite | null;
bindings: Binding[];
isScope: boolean;
}
>;
export type CallSite = [SourcesIndex, Line, Column];
type Binding = BindingExpressionRange[];
export type BindingExpressionRange = [Name] | [Name, Line, Column];
export function decodeOriginalScopes(input: string): OriginalScope[] {
const { length } = input;
const reader = new StringReader(input);
const scopes: OriginalScope[] = [];
const stack: OriginalScope[] = [];
let line = 0;
for (; reader.pos < length; reader.pos++) {
line = decodeInteger(reader, line);
const column = decodeInteger(reader, 0);
if (!hasMoreVlq(reader, length)) {
const last = stack.pop()!;
last[2] = line;
last[3] = column;
continue;
}
const kind = decodeInteger(reader, 0);
const fields = decodeInteger(reader, 0);
const hasName = fields & 0b0001;
const scope: OriginalScope = (
hasName ? [line, column, 0, 0, kind, decodeInteger(reader, 0)] : [line, column, 0, 0, kind]
) as OriginalScope;
let vars: Var[] = EMPTY;
if (hasMoreVlq(reader, length)) {
vars = [];
do {
const varsIndex = decodeInteger(reader, 0);
vars.push(varsIndex);
} while (hasMoreVlq(reader, length));
}
scope.vars = vars;
scopes.push(scope);
stack.push(scope);
}
return scopes;
}
export function encodeOriginalScopes(scopes: OriginalScope[]): string {
const writer = new StringWriter();
for (let i = 0; i < scopes.length; ) {
i = _encodeOriginalScopes(scopes, i, writer, [0]);
}
return writer.flush();
}
function _encodeOriginalScopes(
scopes: OriginalScope[],
index: number,
writer: StringWriter,
state: [
number, // GenColumn
],
): number {
const scope = scopes[index];
const { 0: startLine, 1: startColumn, 2: endLine, 3: endColumn, 4: kind, vars } = scope;
if (index > 0) writer.write(comma);
state[0] = encodeInteger(writer, startLine, state[0]);
encodeInteger(writer, startColumn, 0);
encodeInteger(writer, kind, 0);
const fields = scope.length === 6 ? 0b0001 : 0;
encodeInteger(writer, fields, 0);
if (scope.length === 6) encodeInteger(writer, scope[5], 0);
for (const v of vars) {
encodeInteger(writer, v, 0);
}
for (index++; index < scopes.length; ) {
const next = scopes[index];
const { 0: l, 1: c } = next;
if (l > endLine || (l === endLine && c >= endColumn)) {
break;
}
index = _encodeOriginalScopes(scopes, index, writer, state);
}
writer.write(comma);
state[0] = encodeInteger(writer, endLine, state[0]);
encodeInteger(writer, endColumn, 0);
return index;
}
export function decodeGeneratedRanges(input: string): GeneratedRange[] {
const { length } = input;
const reader = new StringReader(input);
const ranges: GeneratedRange[] = [];
const stack: GeneratedRange[] = [];
let genLine = 0;
let definitionSourcesIndex = 0;
let definitionScopeIndex = 0;
let callsiteSourcesIndex = 0;
let callsiteLine = 0;
let callsiteColumn = 0;
let bindingLine = 0;
let bindingColumn = 0;
do {
const semi = reader.indexOf(';');
let genColumn = 0;
for (; reader.pos < semi; reader.pos++) {
genColumn = decodeInteger(reader, genColumn);
if (!hasMoreVlq(reader, semi)) {
const last = stack.pop()!;
last[2] = genLine;
last[3] = genColumn;
continue;
}
const fields = decodeInteger(reader, 0);
const hasDefinition = fields & 0b0001;
const hasCallsite = fields & 0b0010;
const hasScope = fields & 0b0100;
let callsite: CallSite | null = null;
let bindings: Binding[] = EMPTY;
let range: GeneratedRange;
if (hasDefinition) {
const defSourcesIndex = decodeInteger(reader, definitionSourcesIndex);
definitionScopeIndex = decodeInteger(
reader,
definitionSourcesIndex === defSourcesIndex ? definitionScopeIndex : 0,
);
definitionSourcesIndex = defSourcesIndex;
range = [genLine, genColumn, 0, 0, defSourcesIndex, definitionScopeIndex] as GeneratedRange;
} else {
range = [genLine, genColumn, 0, 0] as GeneratedRange;
}
range.isScope = !!hasScope;
if (hasCallsite) {
const prevCsi = callsiteSourcesIndex;
const prevLine = callsiteLine;
callsiteSourcesIndex = decodeInteger(reader, callsiteSourcesIndex);
const sameSource = prevCsi === callsiteSourcesIndex;
callsiteLine = decodeInteger(reader, sameSource ? callsiteLine : 0);
callsiteColumn = decodeInteger(
reader,
sameSource && prevLine === callsiteLine ? callsiteColumn : 0,
);
callsite = [callsiteSourcesIndex, callsiteLine, callsiteColumn];
}
range.callsite = callsite;
if (hasMoreVlq(reader, semi)) {
bindings = [];
do {
bindingLine = genLine;
bindingColumn = genColumn;
const expressionsCount = decodeInteger(reader, 0);
let expressionRanges: BindingExpressionRange[];
if (expressionsCount < -1) {
expressionRanges = [[decodeInteger(reader, 0)]];
for (let i = -1; i > expressionsCount; i--) {
const prevBl = bindingLine;
bindingLine = decodeInteger(reader, bindingLine);
bindingColumn = decodeInteger(reader, bindingLine === prevBl ? bindingColumn : 0);
const expression = decodeInteger(reader, 0);
expressionRanges.push([expression, bindingLine, bindingColumn]);
}
} else {
expressionRanges = [[expressionsCount]];
}
bindings.push(expressionRanges);
} while (hasMoreVlq(reader, semi));
}
range.bindings = bindings;
ranges.push(range);
stack.push(range);
}
genLine++;
reader.pos = semi + 1;
} while (reader.pos < length);
return ranges;
}
export function encodeGeneratedRanges(ranges: GeneratedRange[]): string {
if (ranges.length === 0) return '';
const writer = new StringWriter();
for (let i = 0; i < ranges.length; ) {
i = _encodeGeneratedRanges(ranges, i, writer, [0, 0, 0, 0, 0, 0, 0]);
}
return writer.flush();
}
function _encodeGeneratedRanges(
ranges: GeneratedRange[],
index: number,
writer: StringWriter,
state: [
number, // GenLine
number, // GenColumn
number, // DefSourcesIndex
number, // DefScopesIndex
number, // CallSourcesIndex
number, // CallLine
number, // CallColumn
],
): number {
const range = ranges[index];
const {
0: startLine,
1: startColumn,
2: endLine,
3: endColumn,
isScope,
callsite,
bindings,
} = range;
if (state[0] < startLine) {
catchupLine(writer, state[0], startLine);
state[0] = startLine;
state[1] = 0;
} else if (index > 0) {
writer.write(comma);
}
state[1] = encodeInteger(writer, range[1], state[1]);
const fields =
(range.length === 6 ? 0b0001 : 0) | (callsite ? 0b0010 : 0) | (isScope ? 0b0100 : 0);
encodeInteger(writer, fields, 0);
if (range.length === 6) {
const { 4: sourcesIndex, 5: scopesIndex } = range;
if (sourcesIndex !== state[2]) {
state[3] = 0;
}
state[2] = encodeInteger(writer, sourcesIndex, state[2]);
state[3] = encodeInteger(writer, scopesIndex, state[3]);
}
if (callsite) {
const { 0: sourcesIndex, 1: callLine, 2: callColumn } = range.callsite!;
if (sourcesIndex !== state[4]) {
state[5] = 0;
state[6] = 0;
} else if (callLine !== state[5]) {
state[6] = 0;
}
state[4] = encodeInteger(writer, sourcesIndex, state[4]);
state[5] = encodeInteger(writer, callLine, state[5]);
state[6] = encodeInteger(writer, callColumn, state[6]);
}
if (bindings) {
for (const binding of bindings) {
if (binding.length > 1) encodeInteger(writer, -binding.length, 0);
const expression = binding[0][0];
encodeInteger(writer, expression, 0);
let bindingStartLine = startLine;
let bindingStartColumn = startColumn;
for (let i = 1; i < binding.length; i++) {
const expRange = binding[i];
bindingStartLine = encodeInteger(writer, expRange[1]!, bindingStartLine);
bindingStartColumn = encodeInteger(writer, expRange[2]!, bindingStartColumn);
encodeInteger(writer, expRange[0]!, 0);
}
}
}
for (index++; index < ranges.length; ) {
const next = ranges[index];
const { 0: l, 1: c } = next;
if (l > endLine || (l === endLine && c >= endColumn)) {
break;
}
index = _encodeGeneratedRanges(ranges, index, writer, state);
}
if (state[0] < endLine) {
catchupLine(writer, state[0], endLine);
state[0] = endLine;
state[1] = 0;
} else {
writer.write(comma);
}
state[1] = encodeInteger(writer, endColumn, state[1]);
return index;
}
function catchupLine(writer: StringWriter, lastLine: number, line: number) {
do {
writer.write(semicolon);
} while (++lastLine < line);
}

View File

@@ -0,0 +1,111 @@
import { comma, decodeInteger, encodeInteger, hasMoreVlq, semicolon } from './vlq';
import { StringWriter, StringReader } from './strings';
export {
decodeOriginalScopes,
encodeOriginalScopes,
decodeGeneratedRanges,
encodeGeneratedRanges,
} from './scopes';
export type { OriginalScope, GeneratedRange, CallSite, BindingExpressionRange } from './scopes';
export type SourceMapSegment =
| [number]
| [number, number, number, number]
| [number, number, number, number, number];
export type SourceMapLine = SourceMapSegment[];
export type SourceMapMappings = SourceMapLine[];
export function decode(mappings: string): SourceMapMappings {
const { length } = mappings;
const reader = new StringReader(mappings);
const decoded: SourceMapMappings = [];
let genColumn = 0;
let sourcesIndex = 0;
let sourceLine = 0;
let sourceColumn = 0;
let namesIndex = 0;
do {
const semi = reader.indexOf(';');
const line: SourceMapLine = [];
let sorted = true;
let lastCol = 0;
genColumn = 0;
while (reader.pos < semi) {
let seg: SourceMapSegment;
genColumn = decodeInteger(reader, genColumn);
if (genColumn < lastCol) sorted = false;
lastCol = genColumn;
if (hasMoreVlq(reader, semi)) {
sourcesIndex = decodeInteger(reader, sourcesIndex);
sourceLine = decodeInteger(reader, sourceLine);
sourceColumn = decodeInteger(reader, sourceColumn);
if (hasMoreVlq(reader, semi)) {
namesIndex = decodeInteger(reader, namesIndex);
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex];
} else {
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn];
}
} else {
seg = [genColumn];
}
line.push(seg);
reader.pos++;
}
if (!sorted) sort(line);
decoded.push(line);
reader.pos = semi + 1;
} while (reader.pos <= length);
return decoded;
}
function sort(line: SourceMapSegment[]) {
line.sort(sortComparator);
}
function sortComparator(a: SourceMapSegment, b: SourceMapSegment): number {
return a[0] - b[0];
}
export function encode(decoded: SourceMapMappings): string;
export function encode(decoded: Readonly<SourceMapMappings>): string;
export function encode(decoded: Readonly<SourceMapMappings>): string {
const writer = new StringWriter();
let sourcesIndex = 0;
let sourceLine = 0;
let sourceColumn = 0;
let namesIndex = 0;
for (let i = 0; i < decoded.length; i++) {
const line = decoded[i];
if (i > 0) writer.write(semicolon);
if (line.length === 0) continue;
let genColumn = 0;
for (let j = 0; j < line.length; j++) {
const segment = line[j];
if (j > 0) writer.write(comma);
genColumn = encodeInteger(writer, segment[0], genColumn);
if (segment.length === 1) continue;
sourcesIndex = encodeInteger(writer, segment[1], sourcesIndex);
sourceLine = encodeInteger(writer, segment[2], sourceLine);
sourceColumn = encodeInteger(writer, segment[3], sourceColumn);
if (segment.length === 4) continue;
namesIndex = encodeInteger(writer, segment[4], namesIndex);
}
}
return writer.flush();
}

View File

@@ -0,0 +1,65 @@
const bufLength = 1024 * 16;
// Provide a fallback for older environments.
const td =
typeof TextDecoder !== 'undefined'
? /* #__PURE__ */ new TextDecoder()
: typeof Buffer !== 'undefined'
? {
decode(buf: Uint8Array): string {
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
return out.toString();
},
}
: {
decode(buf: Uint8Array): string {
let out = '';
for (let i = 0; i < buf.length; i++) {
out += String.fromCharCode(buf[i]);
}
return out;
},
};
export class StringWriter {
pos = 0;
private out = '';
private buffer = new Uint8Array(bufLength);
write(v: number): void {
const { buffer } = this;
buffer[this.pos++] = v;
if (this.pos === bufLength) {
this.out += td.decode(buffer);
this.pos = 0;
}
}
flush(): string {
const { buffer, out, pos } = this;
return pos > 0 ? out + td.decode(buffer.subarray(0, pos)) : out;
}
}
export class StringReader {
pos = 0;
declare private buffer: string;
constructor(buffer: string) {
this.buffer = buffer;
}
next(): number {
return this.buffer.charCodeAt(this.pos++);
}
peek(): number {
return this.buffer.charCodeAt(this.pos);
}
indexOf(char: string): number {
const { buffer, pos } = this;
const idx = buffer.indexOf(char, pos);
return idx === -1 ? buffer.length : idx;
}
}

55
node_modules/@jridgewell/sourcemap-codec/src/vlq.ts generated vendored Normal file
View File

@@ -0,0 +1,55 @@
import type { StringReader, StringWriter } from './strings';
export const comma = ','.charCodeAt(0);
export const semicolon = ';'.charCodeAt(0);
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
const intToChar = new Uint8Array(64); // 64 possible chars.
const charToInt = new Uint8Array(128); // z is 122 in ASCII
for (let i = 0; i < chars.length; i++) {
const c = chars.charCodeAt(i);
intToChar[i] = c;
charToInt[c] = i;
}
export function decodeInteger(reader: StringReader, relative: number): number {
let value = 0;
let shift = 0;
let integer = 0;
do {
const c = reader.next();
integer = charToInt[c];
value |= (integer & 31) << shift;
shift += 5;
} while (integer & 32);
const shouldNegate = value & 1;
value >>>= 1;
if (shouldNegate) {
value = -0x80000000 | -value;
}
return relative + value;
}
export function encodeInteger(builder: StringWriter, num: number, relative: number): number {
let delta = num - relative;
delta = delta < 0 ? (-delta << 1) | 1 : delta << 1;
do {
let clamped = delta & 0b011111;
delta >>>= 5;
if (delta > 0) clamped |= 0b100000;
builder.write(intToChar[clamped]);
} while (delta > 0);
return num;
}
export function hasMoreVlq(reader: StringReader, max: number) {
if (reader.pos >= max) return false;
return reader.peek() !== comma;
}

View File

@@ -0,0 +1,50 @@
type Line = number;
type Column = number;
type Kind = number;
type Name = number;
type Var = number;
type SourcesIndex = number;
type ScopesIndex = number;
type Mix<A, B, O> = (A & O) | (B & O);
export type OriginalScope = Mix<[
Line,
Column,
Line,
Column,
Kind
], [
Line,
Column,
Line,
Column,
Kind,
Name
], {
vars: Var[];
}>;
export type GeneratedRange = Mix<[
Line,
Column,
Line,
Column
], [
Line,
Column,
Line,
Column,
SourcesIndex,
ScopesIndex
], {
callsite: CallSite | null;
bindings: Binding[];
isScope: boolean;
}>;
export type CallSite = [SourcesIndex, Line, Column];
type Binding = BindingExpressionRange[];
export type BindingExpressionRange = [Name] | [Name, Line, Column];
export declare function decodeOriginalScopes(input: string): OriginalScope[];
export declare function encodeOriginalScopes(scopes: OriginalScope[]): string;
export declare function decodeGeneratedRanges(input: string): GeneratedRange[];
export declare function encodeGeneratedRanges(ranges: GeneratedRange[]): string;
export {};
//# sourceMappingURL=scopes.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"scopes.d.ts","sourceRoot":"","sources":["../src/scopes.ts"],"names":[],"mappings":"AAKA,KAAK,IAAI,GAAG,MAAM,CAAC;AACnB,KAAK,MAAM,GAAG,MAAM,CAAC;AACrB,KAAK,IAAI,GAAG,MAAM,CAAC;AACnB,KAAK,IAAI,GAAG,MAAM,CAAC;AACnB,KAAK,GAAG,GAAG,MAAM,CAAC;AAClB,KAAK,YAAY,GAAG,MAAM,CAAC;AAC3B,KAAK,WAAW,GAAG,MAAM,CAAC;AAE1B,KAAK,GAAG,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;AAEtC,MAAM,MAAM,aAAa,GAAG,GAAG,CAC7B;IAAC,IAAI;IAAE,MAAM;IAAE,IAAI;IAAE,MAAM;IAAE,IAAI;CAAC,EAClC;IAAC,IAAI;IAAE,MAAM;IAAE,IAAI;IAAE,MAAM;IAAE,IAAI;IAAE,IAAI;CAAC,EACxC;IAAE,IAAI,EAAE,GAAG,EAAE,CAAA;CAAE,CAChB,CAAC;AAEF,MAAM,MAAM,cAAc,GAAG,GAAG,CAC9B;IAAC,IAAI;IAAE,MAAM;IAAE,IAAI;IAAE,MAAM;CAAC,EAC5B;IAAC,IAAI;IAAE,MAAM;IAAE,IAAI;IAAE,MAAM;IAAE,YAAY;IAAE,WAAW;CAAC,EACvD;IACE,QAAQ,EAAE,QAAQ,GAAG,IAAI,CAAC;IAC1B,QAAQ,EAAE,OAAO,EAAE,CAAC;IACpB,OAAO,EAAE,OAAO,CAAC;CAClB,CACF,CAAC;AACF,MAAM,MAAM,QAAQ,GAAG,CAAC,YAAY,EAAE,IAAI,EAAE,MAAM,CAAC,CAAC;AACpD,KAAK,OAAO,GAAG,sBAAsB,EAAE,CAAC;AACxC,MAAM,MAAM,sBAAsB,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,IAAI,EAAE,MAAM,CAAC,CAAC;AAEnE,wBAAgB,oBAAoB,CAAC,KAAK,EAAE,MAAM,GAAG,aAAa,EAAE,CAyCnE;AAED,wBAAgB,oBAAoB,CAAC,MAAM,EAAE,aAAa,EAAE,GAAG,MAAM,CAQpE;AA2CD,wBAAgB,qBAAqB,CAAC,KAAK,EAAE,MAAM,GAAG,cAAc,EAAE,CAoGrE;AAED,wBAAgB,qBAAqB,CAAC,MAAM,EAAE,cAAc,EAAE,GAAG,MAAM,CAUtE"}

View File

@@ -0,0 +1,50 @@
type Line = number;
type Column = number;
type Kind = number;
type Name = number;
type Var = number;
type SourcesIndex = number;
type ScopesIndex = number;
type Mix<A, B, O> = (A & O) | (B & O);
export type OriginalScope = Mix<[
Line,
Column,
Line,
Column,
Kind
], [
Line,
Column,
Line,
Column,
Kind,
Name
], {
vars: Var[];
}>;
export type GeneratedRange = Mix<[
Line,
Column,
Line,
Column
], [
Line,
Column,
Line,
Column,
SourcesIndex,
ScopesIndex
], {
callsite: CallSite | null;
bindings: Binding[];
isScope: boolean;
}>;
export type CallSite = [SourcesIndex, Line, Column];
type Binding = BindingExpressionRange[];
export type BindingExpressionRange = [Name] | [Name, Line, Column];
export declare function decodeOriginalScopes(input: string): OriginalScope[];
export declare function encodeOriginalScopes(scopes: OriginalScope[]): string;
export declare function decodeGeneratedRanges(input: string): GeneratedRange[];
export declare function encodeGeneratedRanges(ranges: GeneratedRange[]): string;
export {};
//# sourceMappingURL=scopes.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"scopes.d.ts","sourceRoot":"","sources":["../src/scopes.ts"],"names":[],"mappings":"AAKA,KAAK,IAAI,GAAG,MAAM,CAAC;AACnB,KAAK,MAAM,GAAG,MAAM,CAAC;AACrB,KAAK,IAAI,GAAG,MAAM,CAAC;AACnB,KAAK,IAAI,GAAG,MAAM,CAAC;AACnB,KAAK,GAAG,GAAG,MAAM,CAAC;AAClB,KAAK,YAAY,GAAG,MAAM,CAAC;AAC3B,KAAK,WAAW,GAAG,MAAM,CAAC;AAE1B,KAAK,GAAG,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;AAEtC,MAAM,MAAM,aAAa,GAAG,GAAG,CAC7B;IAAC,IAAI;IAAE,MAAM;IAAE,IAAI;IAAE,MAAM;IAAE,IAAI;CAAC,EAClC;IAAC,IAAI;IAAE,MAAM;IAAE,IAAI;IAAE,MAAM;IAAE,IAAI;IAAE,IAAI;CAAC,EACxC;IAAE,IAAI,EAAE,GAAG,EAAE,CAAA;CAAE,CAChB,CAAC;AAEF,MAAM,MAAM,cAAc,GAAG,GAAG,CAC9B;IAAC,IAAI;IAAE,MAAM;IAAE,IAAI;IAAE,MAAM;CAAC,EAC5B;IAAC,IAAI;IAAE,MAAM;IAAE,IAAI;IAAE,MAAM;IAAE,YAAY;IAAE,WAAW;CAAC,EACvD;IACE,QAAQ,EAAE,QAAQ,GAAG,IAAI,CAAC;IAC1B,QAAQ,EAAE,OAAO,EAAE,CAAC;IACpB,OAAO,EAAE,OAAO,CAAC;CAClB,CACF,CAAC;AACF,MAAM,MAAM,QAAQ,GAAG,CAAC,YAAY,EAAE,IAAI,EAAE,MAAM,CAAC,CAAC;AACpD,KAAK,OAAO,GAAG,sBAAsB,EAAE,CAAC;AACxC,MAAM,MAAM,sBAAsB,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,IAAI,EAAE,MAAM,CAAC,CAAC;AAEnE,wBAAgB,oBAAoB,CAAC,KAAK,EAAE,MAAM,GAAG,aAAa,EAAE,CAyCnE;AAED,wBAAgB,oBAAoB,CAAC,MAAM,EAAE,aAAa,EAAE,GAAG,MAAM,CAQpE;AA2CD,wBAAgB,qBAAqB,CAAC,KAAK,EAAE,MAAM,GAAG,cAAc,EAAE,CAoGrE;AAED,wBAAgB,qBAAqB,CAAC,MAAM,EAAE,cAAc,EAAE,GAAG,MAAM,CAUtE"}

View File

@@ -0,0 +1,9 @@
export { decodeOriginalScopes, encodeOriginalScopes, decodeGeneratedRanges, encodeGeneratedRanges, } from './scopes.cts';
export type { OriginalScope, GeneratedRange, CallSite, BindingExpressionRange } from './scopes.cts';
export type SourceMapSegment = [number] | [number, number, number, number] | [number, number, number, number, number];
export type SourceMapLine = SourceMapSegment[];
export type SourceMapMappings = SourceMapLine[];
export declare function decode(mappings: string): SourceMapMappings;
export declare function encode(decoded: SourceMapMappings): string;
export declare function encode(decoded: Readonly<SourceMapMappings>): string;
//# sourceMappingURL=sourcemap-codec.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"sourcemap-codec.d.ts","sourceRoot":"","sources":["../src/sourcemap-codec.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,oBAAoB,EACpB,oBAAoB,EACpB,qBAAqB,EACrB,qBAAqB,GACtB,MAAM,UAAU,CAAC;AAClB,YAAY,EAAE,aAAa,EAAE,cAAc,EAAE,QAAQ,EAAE,sBAAsB,EAAE,MAAM,UAAU,CAAC;AAEhG,MAAM,MAAM,gBAAgB,GACxB,CAAC,MAAM,CAAC,GACR,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,GAChC,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;AAC7C,MAAM,MAAM,aAAa,GAAG,gBAAgB,EAAE,CAAC;AAC/C,MAAM,MAAM,iBAAiB,GAAG,aAAa,EAAE,CAAC;AAEhD,wBAAgB,MAAM,CAAC,QAAQ,EAAE,MAAM,GAAG,iBAAiB,CAiD1D;AAUD,wBAAgB,MAAM,CAAC,OAAO,EAAE,iBAAiB,GAAG,MAAM,CAAC;AAC3D,wBAAgB,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,iBAAiB,CAAC,GAAG,MAAM,CAAC"}

Some files were not shown because too many files have changed in this diff Show More