Fixed prisma.schema, package.json and prisma seed.ts
This commit is contained in:
162
node_modules/@jridgewell/trace-mapping/README.md
generated
vendored
162
node_modules/@jridgewell/trace-mapping/README.md
generated
vendored
@ -17,21 +17,13 @@ npm install @jridgewell/trace-mapping
|
|||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
import {
|
import { TraceMap, originalPositionFor, generatedPositionFor } from '@jridgewell/trace-mapping';
|
||||||
TraceMap,
|
|
||||||
originalPositionFor,
|
|
||||||
generatedPositionFor,
|
|
||||||
sourceContentFor,
|
|
||||||
isIgnored,
|
|
||||||
} from '@jridgewell/trace-mapping';
|
|
||||||
|
|
||||||
const tracer = new TraceMap({
|
const tracer = new TraceMap({
|
||||||
version: 3,
|
version: 3,
|
||||||
sources: ['input.js'],
|
sources: ['input.js'],
|
||||||
sourcesContent: ['content of input.js'],
|
|
||||||
names: ['foo'],
|
names: ['foo'],
|
||||||
mappings: 'KAyCIA',
|
mappings: 'KAyCIA',
|
||||||
ignoreList: [],
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// Lines start at line 1, columns at column 0.
|
// Lines start at line 1, columns at column 0.
|
||||||
@ -43,9 +35,6 @@ assert.deepEqual(traced, {
|
|||||||
name: 'foo',
|
name: 'foo',
|
||||||
});
|
});
|
||||||
|
|
||||||
const content = sourceContentFor(tracer, traced.source);
|
|
||||||
assert.strictEqual(content, 'content for input.js');
|
|
||||||
|
|
||||||
const generated = generatedPositionFor(tracer, {
|
const generated = generatedPositionFor(tracer, {
|
||||||
source: 'input.js',
|
source: 'input.js',
|
||||||
line: 42,
|
line: 42,
|
||||||
@ -55,9 +44,6 @@ assert.deepEqual(generated, {
|
|||||||
line: 1,
|
line: 1,
|
||||||
column: 5,
|
column: 5,
|
||||||
});
|
});
|
||||||
|
|
||||||
const ignored = isIgnored(tracer, 'input.js');
|
|
||||||
assert.equal(ignored, false);
|
|
||||||
```
|
```
|
||||||
|
|
||||||
We also provide a lower level API to get the actual segment that matches our line and column. Unlike
|
We also provide a lower level API to get the actual segment that matches our line and column. Unlike
|
||||||
@ -133,124 +119,74 @@ assert.deepEqual(traced, {
|
|||||||
```
|
```
|
||||||
node v18.0.0
|
node v18.0.0
|
||||||
|
|
||||||
amp.js.map - 45120 segments
|
amp.js.map
|
||||||
|
trace-mapping: decoded JSON input x 183 ops/sec ±0.41% (87 runs sampled)
|
||||||
Memory Usage:
|
trace-mapping: encoded JSON input x 384 ops/sec ±0.89% (89 runs sampled)
|
||||||
trace-mapping decoded 562400 bytes
|
trace-mapping: decoded Object input x 3,085 ops/sec ±0.24% (100 runs sampled)
|
||||||
trace-mapping encoded 5706544 bytes
|
trace-mapping: encoded Object input x 452 ops/sec ±0.80% (84 runs sampled)
|
||||||
source-map-js 10717664 bytes
|
source-map-js: encoded Object input x 88.82 ops/sec ±0.45% (77 runs sampled)
|
||||||
source-map-0.6.1 17446384 bytes
|
source-map-0.6.1: encoded Object input x 38.39 ops/sec ±1.88% (52 runs sampled)
|
||||||
source-map-0.8.0 9701757 bytes
|
|
||||||
Smallest memory usage is trace-mapping decoded
|
|
||||||
|
|
||||||
Init speed:
|
|
||||||
trace-mapping: decoded JSON input x 180 ops/sec ±0.34% (85 runs sampled)
|
|
||||||
trace-mapping: encoded JSON input x 364 ops/sec ±1.77% (89 runs sampled)
|
|
||||||
trace-mapping: decoded Object input x 3,116 ops/sec ±0.50% (96 runs sampled)
|
|
||||||
trace-mapping: encoded Object input x 410 ops/sec ±2.62% (85 runs sampled)
|
|
||||||
source-map-js: encoded Object input x 84.23 ops/sec ±0.91% (73 runs sampled)
|
|
||||||
source-map-0.6.1: encoded Object input x 37.21 ops/sec ±2.08% (51 runs sampled)
|
|
||||||
Fastest is trace-mapping: decoded Object input
|
Fastest is trace-mapping: decoded Object input
|
||||||
|
|
||||||
Trace speed:
|
trace-mapping: decoded originalPositionFor x 4,025,347 ops/sec ±0.15% (97 runs sampled)
|
||||||
trace-mapping: decoded originalPositionFor x 3,952,212 ops/sec ±0.17% (98 runs sampled)
|
trace-mapping: encoded originalPositionFor x 3,333,136 ops/sec ±1.26% (90 runs sampled)
|
||||||
trace-mapping: encoded originalPositionFor x 3,487,468 ops/sec ±1.58% (90 runs sampled)
|
source-map-js: encoded originalPositionFor x 824,978 ops/sec ±1.06% (94 runs sampled)
|
||||||
source-map-js: encoded originalPositionFor x 827,730 ops/sec ±0.78% (97 runs sampled)
|
source-map-0.6.1: encoded originalPositionFor x 741,300 ops/sec ±0.93% (92 runs sampled)
|
||||||
source-map-0.6.1: encoded originalPositionFor x 748,991 ops/sec ±0.53% (94 runs sampled)
|
source-map-0.8.0: encoded originalPositionFor x 2,587,603 ops/sec ±0.75% (97 runs sampled)
|
||||||
source-map-0.8.0: encoded originalPositionFor x 2,532,894 ops/sec ±0.57% (95 runs sampled)
|
|
||||||
Fastest is trace-mapping: decoded originalPositionFor
|
Fastest is trace-mapping: decoded originalPositionFor
|
||||||
|
|
||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
|
babel.min.js.map
|
||||||
babel.min.js.map - 347793 segments
|
trace-mapping: decoded JSON input x 17.43 ops/sec ±8.81% (33 runs sampled)
|
||||||
|
trace-mapping: encoded JSON input x 34.18 ops/sec ±4.67% (50 runs sampled)
|
||||||
Memory Usage:
|
trace-mapping: decoded Object input x 1,010 ops/sec ±0.41% (98 runs sampled)
|
||||||
trace-mapping decoded 89832 bytes
|
trace-mapping: encoded Object input x 39.45 ops/sec ±4.01% (52 runs sampled)
|
||||||
trace-mapping encoded 35474640 bytes
|
source-map-js: encoded Object input x 6.57 ops/sec ±3.04% (21 runs sampled)
|
||||||
source-map-js 51257176 bytes
|
source-map-0.6.1: encoded Object input x 4.23 ops/sec ±2.93% (15 runs sampled)
|
||||||
source-map-0.6.1 63515664 bytes
|
|
||||||
source-map-0.8.0 42933752 bytes
|
|
||||||
Smallest memory usage is trace-mapping decoded
|
|
||||||
|
|
||||||
Init speed:
|
|
||||||
trace-mapping: decoded JSON input x 15.41 ops/sec ±8.65% (34 runs sampled)
|
|
||||||
trace-mapping: encoded JSON input x 28.20 ops/sec ±12.87% (42 runs sampled)
|
|
||||||
trace-mapping: decoded Object input x 964 ops/sec ±0.36% (99 runs sampled)
|
|
||||||
trace-mapping: encoded Object input x 31.77 ops/sec ±13.79% (45 runs sampled)
|
|
||||||
source-map-js: encoded Object input x 6.45 ops/sec ±5.16% (21 runs sampled)
|
|
||||||
source-map-0.6.1: encoded Object input x 4.07 ops/sec ±5.24% (15 runs sampled)
|
|
||||||
Fastest is trace-mapping: decoded Object input
|
Fastest is trace-mapping: decoded Object input
|
||||||
|
|
||||||
Trace speed:
|
trace-mapping: decoded originalPositionFor x 7,576,265 ops/sec ±0.74% (96 runs sampled)
|
||||||
trace-mapping: decoded originalPositionFor x 7,183,038 ops/sec ±0.58% (95 runs sampled)
|
trace-mapping: encoded originalPositionFor x 5,019,743 ops/sec ±0.74% (94 runs sampled)
|
||||||
trace-mapping: encoded originalPositionFor x 5,192,185 ops/sec ±0.41% (100 runs sampled)
|
source-map-js: encoded originalPositionFor x 3,396,137 ops/sec ±42.32% (95 runs sampled)
|
||||||
source-map-js: encoded originalPositionFor x 4,259,489 ops/sec ±0.79% (94 runs sampled)
|
source-map-0.6.1: encoded originalPositionFor x 3,753,176 ops/sec ±0.72% (95 runs sampled)
|
||||||
source-map-0.6.1: encoded originalPositionFor x 3,742,629 ops/sec ±0.71% (95 runs sampled)
|
source-map-0.8.0: encoded originalPositionFor x 6,423,633 ops/sec ±0.74% (95 runs sampled)
|
||||||
source-map-0.8.0: encoded originalPositionFor x 6,270,211 ops/sec ±0.64% (94 runs sampled)
|
|
||||||
Fastest is trace-mapping: decoded originalPositionFor
|
Fastest is trace-mapping: decoded originalPositionFor
|
||||||
|
|
||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
|
preact.js.map
|
||||||
preact.js.map - 1992 segments
|
trace-mapping: decoded JSON input x 3,499 ops/sec ±0.18% (98 runs sampled)
|
||||||
|
trace-mapping: encoded JSON input x 6,078 ops/sec ±0.25% (99 runs sampled)
|
||||||
Memory Usage:
|
trace-mapping: decoded Object input x 254,788 ops/sec ±0.13% (100 runs sampled)
|
||||||
trace-mapping decoded 37128 bytes
|
trace-mapping: encoded Object input x 14,063 ops/sec ±0.27% (94 runs sampled)
|
||||||
trace-mapping encoded 247280 bytes
|
source-map-js: encoded Object input x 2,465 ops/sec ±0.25% (98 runs sampled)
|
||||||
source-map-js 1143536 bytes
|
source-map-0.6.1: encoded Object input x 1,174 ops/sec ±1.90% (95 runs sampled)
|
||||||
source-map-0.6.1 1290992 bytes
|
|
||||||
source-map-0.8.0 96544 bytes
|
|
||||||
Smallest memory usage is trace-mapping decoded
|
|
||||||
|
|
||||||
Init speed:
|
|
||||||
trace-mapping: decoded JSON input x 3,483 ops/sec ±0.30% (98 runs sampled)
|
|
||||||
trace-mapping: encoded JSON input x 6,092 ops/sec ±0.18% (97 runs sampled)
|
|
||||||
trace-mapping: decoded Object input x 249,076 ops/sec ±0.24% (98 runs sampled)
|
|
||||||
trace-mapping: encoded Object input x 14,555 ops/sec ±0.48% (100 runs sampled)
|
|
||||||
source-map-js: encoded Object input x 2,447 ops/sec ±0.36% (99 runs sampled)
|
|
||||||
source-map-0.6.1: encoded Object input x 1,201 ops/sec ±0.57% (96 runs sampled)
|
|
||||||
Fastest is trace-mapping: decoded Object input
|
Fastest is trace-mapping: decoded Object input
|
||||||
|
|
||||||
Trace speed:
|
trace-mapping: decoded originalPositionFor x 7,720,171 ops/sec ±0.14% (97 runs sampled)
|
||||||
trace-mapping: decoded originalPositionFor x 7,620,192 ops/sec ±0.09% (99 runs sampled)
|
trace-mapping: encoded originalPositionFor x 6,864,485 ops/sec ±0.16% (101 runs sampled)
|
||||||
trace-mapping: encoded originalPositionFor x 6,872,554 ops/sec ±0.30% (97 runs sampled)
|
source-map-js: encoded originalPositionFor x 2,387,219 ops/sec ±0.28% (98 runs sampled)
|
||||||
source-map-js: encoded originalPositionFor x 2,489,570 ops/sec ±0.35% (94 runs sampled)
|
source-map-0.6.1: encoded originalPositionFor x 1,565,339 ops/sec ±0.32% (101 runs sampled)
|
||||||
source-map-0.6.1: encoded originalPositionFor x 1,698,633 ops/sec ±0.28% (98 runs sampled)
|
source-map-0.8.0: encoded originalPositionFor x 3,819,732 ops/sec ±0.38% (98 runs sampled)
|
||||||
source-map-0.8.0: encoded originalPositionFor x 4,015,644 ops/sec ±0.22% (98 runs sampled)
|
|
||||||
Fastest is trace-mapping: decoded originalPositionFor
|
Fastest is trace-mapping: decoded originalPositionFor
|
||||||
|
|
||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
|
react.js.map
|
||||||
react.js.map - 5726 segments
|
trace-mapping: decoded JSON input x 1,719 ops/sec ±0.19% (99 runs sampled)
|
||||||
|
trace-mapping: encoded JSON input x 4,284 ops/sec ±0.51% (99 runs sampled)
|
||||||
Memory Usage:
|
trace-mapping: decoded Object input x 94,668 ops/sec ±0.08% (99 runs sampled)
|
||||||
trace-mapping decoded 16176 bytes
|
trace-mapping: encoded Object input x 5,287 ops/sec ±0.24% (99 runs sampled)
|
||||||
trace-mapping encoded 681552 bytes
|
source-map-js: encoded Object input x 814 ops/sec ±0.20% (98 runs sampled)
|
||||||
source-map-js 2418352 bytes
|
source-map-0.6.1: encoded Object input x 429 ops/sec ±0.24% (94 runs sampled)
|
||||||
source-map-0.6.1 2443672 bytes
|
|
||||||
source-map-0.8.0 111768 bytes
|
|
||||||
Smallest memory usage is trace-mapping decoded
|
|
||||||
|
|
||||||
Init speed:
|
|
||||||
trace-mapping: decoded JSON input x 1,720 ops/sec ±0.34% (98 runs sampled)
|
|
||||||
trace-mapping: encoded JSON input x 4,406 ops/sec ±0.35% (100 runs sampled)
|
|
||||||
trace-mapping: decoded Object input x 92,122 ops/sec ±0.10% (99 runs sampled)
|
|
||||||
trace-mapping: encoded Object input x 5,385 ops/sec ±0.37% (99 runs sampled)
|
|
||||||
source-map-js: encoded Object input x 794 ops/sec ±0.40% (98 runs sampled)
|
|
||||||
source-map-0.6.1: encoded Object input x 416 ops/sec ±0.54% (91 runs sampled)
|
|
||||||
Fastest is trace-mapping: decoded Object input
|
Fastest is trace-mapping: decoded Object input
|
||||||
|
|
||||||
Trace speed:
|
trace-mapping: decoded originalPositionFor x 28,927,989 ops/sec ±0.61% (94 runs sampled)
|
||||||
trace-mapping: decoded originalPositionFor x 32,759,519 ops/sec ±0.33% (100 runs sampled)
|
trace-mapping: encoded originalPositionFor x 27,394,475 ops/sec ±0.55% (97 runs sampled)
|
||||||
trace-mapping: encoded originalPositionFor x 31,116,306 ops/sec ±0.33% (97 runs sampled)
|
source-map-js: encoded originalPositionFor x 16,856,730 ops/sec ±0.45% (96 runs sampled)
|
||||||
source-map-js: encoded originalPositionFor x 17,458,435 ops/sec ±0.44% (97 runs sampled)
|
source-map-0.6.1: encoded originalPositionFor x 12,258,950 ops/sec ±0.41% (97 runs sampled)
|
||||||
source-map-0.6.1: encoded originalPositionFor x 12,687,097 ops/sec ±0.43% (95 runs sampled)
|
source-map-0.8.0: encoded originalPositionFor x 22,272,990 ops/sec ±0.58% (95 runs sampled)
|
||||||
source-map-0.8.0: encoded originalPositionFor x 23,538,275 ops/sec ±0.38% (95 runs sampled)
|
|
||||||
Fastest is trace-mapping: decoded originalPositionFor
|
Fastest is trace-mapping: decoded originalPositionFor
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|||||||
556
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs
generated
vendored
556
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs
generated
vendored
@ -101,14 +101,14 @@ function binarySearch(haystack, needle, low, high) {
|
|||||||
return low - 1;
|
return low - 1;
|
||||||
}
|
}
|
||||||
function upperBound(haystack, needle, index) {
|
function upperBound(haystack, needle, index) {
|
||||||
for (let i = index + 1; i < haystack.length; index = i++) {
|
for (let i = index + 1; i < haystack.length; i++, index++) {
|
||||||
if (haystack[i][COLUMN] !== needle)
|
if (haystack[i][COLUMN] !== needle)
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
return index;
|
return index;
|
||||||
}
|
}
|
||||||
function lowerBound(haystack, needle, index) {
|
function lowerBound(haystack, needle, index) {
|
||||||
for (let i = index - 1; i >= 0; index = i--) {
|
for (let i = index - 1; i >= 0; i--, index--) {
|
||||||
if (haystack[i][COLUMN] !== needle)
|
if (haystack[i][COLUMN] !== needle)
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -167,9 +167,8 @@ function buildBySources(decoded, memos) {
|
|||||||
// segment should go. Either way, we want to insert after that. And there may be multiple
|
// segment should go. Either way, we want to insert after that. And there may be multiple
|
||||||
// generated segments associated with an original location, so there may need to move several
|
// generated segments associated with an original location, so there may need to move several
|
||||||
// indexes before we find where we need to insert.
|
// indexes before we find where we need to insert.
|
||||||
let index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
const index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
||||||
memo.lastIndex = ++index;
|
insert(originalLine, (memo.lastIndex = index + 1), [sourceColumn, i, seg[COLUMN]]);
|
||||||
insert(originalLine, index, [sourceColumn, i, seg[COLUMN]]);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return sources;
|
return sources;
|
||||||
@ -190,16 +189,22 @@ function buildNullArray() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const AnyMap = function (map, mapUrl) {
|
const AnyMap = function (map, mapUrl) {
|
||||||
const parsed = parse(map);
|
const parsed = typeof map === 'string' ? JSON.parse(map) : map;
|
||||||
if (!('sections' in parsed)) {
|
if (!('sections' in parsed))
|
||||||
return new TraceMap(parsed, mapUrl);
|
return new TraceMap(parsed, mapUrl);
|
||||||
}
|
|
||||||
const mappings = [];
|
const mappings = [];
|
||||||
const sources = [];
|
const sources = [];
|
||||||
const sourcesContent = [];
|
const sourcesContent = [];
|
||||||
const names = [];
|
const names = [];
|
||||||
const ignoreList = [];
|
const { sections } = parsed;
|
||||||
recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, ignoreList, 0, 0, Infinity, Infinity);
|
let i = 0;
|
||||||
|
for (; i < sections.length - 1; i++) {
|
||||||
|
const no = sections[i + 1].offset;
|
||||||
|
addSection(sections[i], mapUrl, mappings, sources, sourcesContent, names, no.line, no.column);
|
||||||
|
}
|
||||||
|
if (sections.length > 0) {
|
||||||
|
addSection(sections[i], mapUrl, mappings, sources, sourcesContent, names, Infinity, Infinity);
|
||||||
|
}
|
||||||
const joined = {
|
const joined = {
|
||||||
version: 3,
|
version: 3,
|
||||||
file: parsed.file,
|
file: parsed.file,
|
||||||
@ -207,73 +212,42 @@ const AnyMap = function (map, mapUrl) {
|
|||||||
sources,
|
sources,
|
||||||
sourcesContent,
|
sourcesContent,
|
||||||
mappings,
|
mappings,
|
||||||
ignoreList,
|
|
||||||
};
|
};
|
||||||
return presortedDecodedMap(joined);
|
return presortedDecodedMap(joined);
|
||||||
};
|
};
|
||||||
function parse(map) {
|
function addSection(section, mapUrl, mappings, sources, sourcesContent, names, stopLine, stopColumn) {
|
||||||
return typeof map === 'string' ? JSON.parse(map) : map;
|
const map = AnyMap(section.map, mapUrl);
|
||||||
}
|
const { line: lineOffset, column: columnOffset } = section.offset;
|
||||||
function recurse(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
|
||||||
const { sections } = input;
|
|
||||||
for (let i = 0; i < sections.length; i++) {
|
|
||||||
const { map, offset } = sections[i];
|
|
||||||
let sl = stopLine;
|
|
||||||
let sc = stopColumn;
|
|
||||||
if (i + 1 < sections.length) {
|
|
||||||
const nextOffset = sections[i + 1].offset;
|
|
||||||
sl = Math.min(stopLine, lineOffset + nextOffset.line);
|
|
||||||
if (sl === stopLine) {
|
|
||||||
sc = Math.min(stopColumn, columnOffset + nextOffset.column);
|
|
||||||
}
|
|
||||||
else if (sl < stopLine) {
|
|
||||||
sc = columnOffset + nextOffset.column;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
addSection(map, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset + offset.line, columnOffset + offset.column, sl, sc);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
function addSection(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
|
||||||
const parsed = parse(input);
|
|
||||||
if ('sections' in parsed)
|
|
||||||
return recurse(...arguments);
|
|
||||||
const map = new TraceMap(parsed, mapUrl);
|
|
||||||
const sourcesOffset = sources.length;
|
const sourcesOffset = sources.length;
|
||||||
const namesOffset = names.length;
|
const namesOffset = names.length;
|
||||||
const decoded = decodedMappings(map);
|
const decoded = decodedMappings(map);
|
||||||
const { resolvedSources, sourcesContent: contents, ignoreList: ignores } = map;
|
const { resolvedSources } = map;
|
||||||
append(sources, resolvedSources);
|
append(sources, resolvedSources);
|
||||||
|
append(sourcesContent, map.sourcesContent || fillSourcesContent(resolvedSources.length));
|
||||||
append(names, map.names);
|
append(names, map.names);
|
||||||
if (contents)
|
// If this section jumps forwards several lines, we need to add lines to the output mappings catch up.
|
||||||
append(sourcesContent, contents);
|
for (let i = mappings.length; i <= lineOffset; i++)
|
||||||
else
|
mappings.push([]);
|
||||||
for (let i = 0; i < resolvedSources.length; i++)
|
// We can only add so many lines before we step into the range that the next section's map
|
||||||
sourcesContent.push(null);
|
// controls. When we get to the last line, then we'll start checking the segments to see if
|
||||||
if (ignores)
|
// they've crossed into the column range.
|
||||||
for (let i = 0; i < ignores.length; i++)
|
const stopI = stopLine - lineOffset;
|
||||||
ignoreList.push(ignores[i] + sourcesOffset);
|
const len = Math.min(decoded.length, stopI + 1);
|
||||||
for (let i = 0; i < decoded.length; i++) {
|
for (let i = 0; i < len; i++) {
|
||||||
const lineI = lineOffset + i;
|
const line = decoded[i];
|
||||||
// We can only add so many lines before we step into the range that the next section's map
|
// On the 0th loop, the line will already exist due to a previous section, or the line catch up
|
||||||
// controls. When we get to the last line, then we'll start checking the segments to see if
|
// loop above.
|
||||||
// they've crossed into the column range. But it may not have any columns that overstep, so we
|
const out = i === 0 ? mappings[lineOffset] : (mappings[lineOffset + i] = []);
|
||||||
// still need to check that we don't overstep lines, too.
|
|
||||||
if (lineI > stopLine)
|
|
||||||
return;
|
|
||||||
// The out line may already exist in mappings (if we're continuing the line started by a
|
|
||||||
// previous section). Or, we may have jumped ahead several lines to start this section.
|
|
||||||
const out = getLine(mappings, lineI);
|
|
||||||
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
||||||
// map can be multiple lines), it doesn't.
|
// map can be multiple lines), it doesn't.
|
||||||
const cOffset = i === 0 ? columnOffset : 0;
|
const cOffset = i === 0 ? columnOffset : 0;
|
||||||
const line = decoded[i];
|
|
||||||
for (let j = 0; j < line.length; j++) {
|
for (let j = 0; j < line.length; j++) {
|
||||||
const seg = line[j];
|
const seg = line[j];
|
||||||
const column = cOffset + seg[COLUMN];
|
const column = cOffset + seg[COLUMN];
|
||||||
// If this segment steps into the column range that the next section's map controls, we need
|
// If this segment steps into the column range that the next section's map controls, we need
|
||||||
// to stop early.
|
// to stop early.
|
||||||
if (lineI === stopLine && column >= stopColumn)
|
if (i === stopI && column >= stopColumn)
|
||||||
return;
|
break;
|
||||||
if (seg.length === 1) {
|
if (seg.length === 1) {
|
||||||
out.push([column]);
|
out.push([column]);
|
||||||
continue;
|
continue;
|
||||||
@ -281,9 +255,11 @@ function addSection(input, mapUrl, mappings, sources, sourcesContent, names, ign
|
|||||||
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
||||||
const sourceLine = seg[SOURCE_LINE];
|
const sourceLine = seg[SOURCE_LINE];
|
||||||
const sourceColumn = seg[SOURCE_COLUMN];
|
const sourceColumn = seg[SOURCE_COLUMN];
|
||||||
out.push(seg.length === 4
|
if (seg.length === 4) {
|
||||||
? [column, sourcesIndex, sourceLine, sourceColumn]
|
out.push([column, sourcesIndex, sourceLine, sourceColumn]);
|
||||||
: [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
continue;
|
||||||
|
}
|
||||||
|
out.push([column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -291,32 +267,100 @@ function append(arr, other) {
|
|||||||
for (let i = 0; i < other.length; i++)
|
for (let i = 0; i < other.length; i++)
|
||||||
arr.push(other[i]);
|
arr.push(other[i]);
|
||||||
}
|
}
|
||||||
function getLine(arr, index) {
|
// Sourcemaps don't need to have sourcesContent, and if they don't, we need to create an array of
|
||||||
for (let i = arr.length; i <= index; i++)
|
// equal length to the sources. This is because the sources and sourcesContent are paired arrays,
|
||||||
arr[i] = [];
|
// where `sourcesContent[i]` is the content of the `sources[i]` file. If we didn't, then joined
|
||||||
return arr[index];
|
// sourcemap would desynchronize the sources/contents.
|
||||||
|
function fillSourcesContent(len) {
|
||||||
|
const sourcesContent = [];
|
||||||
|
for (let i = 0; i < len; i++)
|
||||||
|
sourcesContent[i] = null;
|
||||||
|
return sourcesContent;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const INVALID_ORIGINAL_MAPPING = Object.freeze({
|
||||||
|
source: null,
|
||||||
|
line: null,
|
||||||
|
column: null,
|
||||||
|
name: null,
|
||||||
|
});
|
||||||
|
const INVALID_GENERATED_MAPPING = Object.freeze({
|
||||||
|
line: null,
|
||||||
|
column: null,
|
||||||
|
});
|
||||||
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
||||||
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
||||||
const LEAST_UPPER_BOUND = -1;
|
const LEAST_UPPER_BOUND = -1;
|
||||||
const GREATEST_LOWER_BOUND = 1;
|
const GREATEST_LOWER_BOUND = 1;
|
||||||
|
/**
|
||||||
|
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
let encodedMappings;
|
||||||
|
/**
|
||||||
|
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
let decodedMappings;
|
||||||
|
/**
|
||||||
|
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||||
|
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||||
|
*/
|
||||||
|
let traceSegment;
|
||||||
|
/**
|
||||||
|
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||||
|
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||||
|
* `source-map` library.
|
||||||
|
*/
|
||||||
|
let originalPositionFor;
|
||||||
|
/**
|
||||||
|
* Finds the source/line/column directly after the mapping returned by originalPositionFor, provided
|
||||||
|
* the found mapping is from the same source and line as the originalPositionFor mapping.
|
||||||
|
*
|
||||||
|
* Eg, in the code `let id = 1`, `originalPositionAfter` could find the mapping associated with `1`
|
||||||
|
* using the same needle that would return `id` when calling `originalPositionFor`.
|
||||||
|
*/
|
||||||
|
let generatedPositionFor;
|
||||||
|
/**
|
||||||
|
* Iterates each mapping in generated position order.
|
||||||
|
*/
|
||||||
|
let eachMapping;
|
||||||
|
/**
|
||||||
|
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||||
|
* maps.
|
||||||
|
*/
|
||||||
|
let presortedDecodedMap;
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
let decodedMap;
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
let encodedMap;
|
||||||
class TraceMap {
|
class TraceMap {
|
||||||
constructor(map, mapUrl) {
|
constructor(map, mapUrl) {
|
||||||
|
this._decodedMemo = memoizedState();
|
||||||
|
this._bySources = undefined;
|
||||||
|
this._bySourceMemos = undefined;
|
||||||
const isString = typeof map === 'string';
|
const isString = typeof map === 'string';
|
||||||
if (!isString && map._decodedMemo)
|
if (!isString && map.constructor === TraceMap)
|
||||||
return map;
|
return map;
|
||||||
const parsed = (isString ? JSON.parse(map) : map);
|
const parsed = (isString ? JSON.parse(map) : map);
|
||||||
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||||||
this.version = version;
|
this.version = version;
|
||||||
this.file = file;
|
this.file = file;
|
||||||
this.names = names || [];
|
this.names = names;
|
||||||
this.sourceRoot = sourceRoot;
|
this.sourceRoot = sourceRoot;
|
||||||
this.sources = sources;
|
this.sources = sources;
|
||||||
this.sourcesContent = sourcesContent;
|
this.sourcesContent = sourcesContent;
|
||||||
this.ignoreList = parsed.ignoreList || parsed.x_google_ignoreList || undefined;
|
if (sourceRoot || mapUrl) {
|
||||||
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||||||
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
this.resolvedSources = sources.map((s) => s || '');
|
||||||
|
}
|
||||||
const { mappings } = parsed;
|
const { mappings } = parsed;
|
||||||
if (typeof mappings === 'string') {
|
if (typeof mappings === 'string') {
|
||||||
this._encoded = mappings;
|
this._encoded = mappings;
|
||||||
@ -326,191 +370,134 @@ class TraceMap {
|
|||||||
this._encoded = undefined;
|
this._encoded = undefined;
|
||||||
this._decoded = maybeSort(mappings, isString);
|
this._decoded = maybeSort(mappings, isString);
|
||||||
}
|
}
|
||||||
this._decodedMemo = memoizedState();
|
|
||||||
this._bySources = undefined;
|
|
||||||
this._bySourceMemos = undefined;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/**
|
(() => {
|
||||||
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
encodedMappings = (map) => {
|
||||||
* with public access modifiers.
|
var _a;
|
||||||
*/
|
return ((_a = map._encoded) !== null && _a !== void 0 ? _a : (map._encoded = encode(map._decoded)));
|
||||||
function cast(map) {
|
|
||||||
return map;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
|
||||||
*/
|
|
||||||
function encodedMappings(map) {
|
|
||||||
var _a;
|
|
||||||
var _b;
|
|
||||||
return ((_a = (_b = cast(map))._encoded) !== null && _a !== void 0 ? _a : (_b._encoded = encode(cast(map)._decoded)));
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
|
||||||
*/
|
|
||||||
function decodedMappings(map) {
|
|
||||||
var _a;
|
|
||||||
return ((_a = cast(map))._decoded || (_a._decoded = decode(cast(map)._encoded)));
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* A low-level API to find the segment associated with a generated line/column (think, from a
|
|
||||||
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
|
||||||
*/
|
|
||||||
function traceSegment(map, line, column) {
|
|
||||||
const decoded = decodedMappings(map);
|
|
||||||
// It's common for parent source maps to have pointers to lines that have no
|
|
||||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
|
||||||
if (line >= decoded.length)
|
|
||||||
return null;
|
|
||||||
const segments = decoded[line];
|
|
||||||
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
|
||||||
return index === -1 ? null : segments[index];
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* A higher-level API to find the source/line/column associated with a generated line/column
|
|
||||||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
|
||||||
* `source-map` library.
|
|
||||||
*/
|
|
||||||
function originalPositionFor(map, needle) {
|
|
||||||
let { line, column, bias } = needle;
|
|
||||||
line--;
|
|
||||||
if (line < 0)
|
|
||||||
throw new Error(LINE_GTR_ZERO);
|
|
||||||
if (column < 0)
|
|
||||||
throw new Error(COL_GTR_EQ_ZERO);
|
|
||||||
const decoded = decodedMappings(map);
|
|
||||||
// It's common for parent source maps to have pointers to lines that have no
|
|
||||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
|
||||||
if (line >= decoded.length)
|
|
||||||
return OMapping(null, null, null, null);
|
|
||||||
const segments = decoded[line];
|
|
||||||
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
|
||||||
if (index === -1)
|
|
||||||
return OMapping(null, null, null, null);
|
|
||||||
const segment = segments[index];
|
|
||||||
if (segment.length === 1)
|
|
||||||
return OMapping(null, null, null, null);
|
|
||||||
const { names, resolvedSources } = map;
|
|
||||||
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Finds the generated line/column position of the provided source/line/column source position.
|
|
||||||
*/
|
|
||||||
function generatedPositionFor(map, needle) {
|
|
||||||
const { source, line, column, bias } = needle;
|
|
||||||
return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Finds all generated line/column positions of the provided source/line/column source position.
|
|
||||||
*/
|
|
||||||
function allGeneratedPositionsFor(map, needle) {
|
|
||||||
const { source, line, column, bias } = needle;
|
|
||||||
// SourceMapConsumer uses LEAST_UPPER_BOUND for some reason, so we follow suit.
|
|
||||||
return generatedPosition(map, source, line, column, bias || LEAST_UPPER_BOUND, true);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Iterates each mapping in generated position order.
|
|
||||||
*/
|
|
||||||
function eachMapping(map, cb) {
|
|
||||||
const decoded = decodedMappings(map);
|
|
||||||
const { names, resolvedSources } = map;
|
|
||||||
for (let i = 0; i < decoded.length; i++) {
|
|
||||||
const line = decoded[i];
|
|
||||||
for (let j = 0; j < line.length; j++) {
|
|
||||||
const seg = line[j];
|
|
||||||
const generatedLine = i + 1;
|
|
||||||
const generatedColumn = seg[0];
|
|
||||||
let source = null;
|
|
||||||
let originalLine = null;
|
|
||||||
let originalColumn = null;
|
|
||||||
let name = null;
|
|
||||||
if (seg.length !== 1) {
|
|
||||||
source = resolvedSources[seg[1]];
|
|
||||||
originalLine = seg[2] + 1;
|
|
||||||
originalColumn = seg[3];
|
|
||||||
}
|
|
||||||
if (seg.length === 5)
|
|
||||||
name = names[seg[4]];
|
|
||||||
cb({
|
|
||||||
generatedLine,
|
|
||||||
generatedColumn,
|
|
||||||
source,
|
|
||||||
originalLine,
|
|
||||||
originalColumn,
|
|
||||||
name,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
function sourceIndex(map, source) {
|
|
||||||
const { sources, resolvedSources } = map;
|
|
||||||
let index = sources.indexOf(source);
|
|
||||||
if (index === -1)
|
|
||||||
index = resolvedSources.indexOf(source);
|
|
||||||
return index;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
|
||||||
*/
|
|
||||||
function sourceContentFor(map, source) {
|
|
||||||
const { sourcesContent } = map;
|
|
||||||
if (sourcesContent == null)
|
|
||||||
return null;
|
|
||||||
const index = sourceIndex(map, source);
|
|
||||||
return index === -1 ? null : sourcesContent[index];
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Determines if the source is marked to ignore by the source map.
|
|
||||||
*/
|
|
||||||
function isIgnored(map, source) {
|
|
||||||
const { ignoreList } = map;
|
|
||||||
if (ignoreList == null)
|
|
||||||
return false;
|
|
||||||
const index = sourceIndex(map, source);
|
|
||||||
return index === -1 ? false : ignoreList.includes(index);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
|
||||||
* maps.
|
|
||||||
*/
|
|
||||||
function presortedDecodedMap(map, mapUrl) {
|
|
||||||
const tracer = new TraceMap(clone(map, []), mapUrl);
|
|
||||||
cast(tracer)._decoded = map.mappings;
|
|
||||||
return tracer;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
|
||||||
* a sourcemap, or to JSON.stringify.
|
|
||||||
*/
|
|
||||||
function decodedMap(map) {
|
|
||||||
return clone(map, decodedMappings(map));
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
|
||||||
* a sourcemap, or to JSON.stringify.
|
|
||||||
*/
|
|
||||||
function encodedMap(map) {
|
|
||||||
return clone(map, encodedMappings(map));
|
|
||||||
}
|
|
||||||
function clone(map, mappings) {
|
|
||||||
return {
|
|
||||||
version: map.version,
|
|
||||||
file: map.file,
|
|
||||||
names: map.names,
|
|
||||||
sourceRoot: map.sourceRoot,
|
|
||||||
sources: map.sources,
|
|
||||||
sourcesContent: map.sourcesContent,
|
|
||||||
mappings,
|
|
||||||
ignoreList: map.ignoreList || map.x_google_ignoreList,
|
|
||||||
};
|
};
|
||||||
}
|
decodedMappings = (map) => {
|
||||||
function OMapping(source, line, column, name) {
|
return (map._decoded || (map._decoded = decode(map._encoded)));
|
||||||
return { source, line, column, name };
|
};
|
||||||
}
|
traceSegment = (map, line, column) => {
|
||||||
function GMapping(line, column) {
|
const decoded = decodedMappings(map);
|
||||||
return { line, column };
|
// It's common for parent source maps to have pointers to lines that have no
|
||||||
}
|
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||||
|
if (line >= decoded.length)
|
||||||
|
return null;
|
||||||
|
return traceSegmentInternal(decoded[line], map._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
||||||
|
};
|
||||||
|
originalPositionFor = (map, { line, column, bias }) => {
|
||||||
|
line--;
|
||||||
|
if (line < 0)
|
||||||
|
throw new Error(LINE_GTR_ZERO);
|
||||||
|
if (column < 0)
|
||||||
|
throw new Error(COL_GTR_EQ_ZERO);
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
// It's common for parent source maps to have pointers to lines that have no
|
||||||
|
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||||
|
if (line >= decoded.length)
|
||||||
|
return INVALID_ORIGINAL_MAPPING;
|
||||||
|
const segment = traceSegmentInternal(decoded[line], map._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
||||||
|
if (segment == null)
|
||||||
|
return INVALID_ORIGINAL_MAPPING;
|
||||||
|
if (segment.length == 1)
|
||||||
|
return INVALID_ORIGINAL_MAPPING;
|
||||||
|
const { names, resolvedSources } = map;
|
||||||
|
return {
|
||||||
|
source: resolvedSources[segment[SOURCES_INDEX]],
|
||||||
|
line: segment[SOURCE_LINE] + 1,
|
||||||
|
column: segment[SOURCE_COLUMN],
|
||||||
|
name: segment.length === 5 ? names[segment[NAMES_INDEX]] : null,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
generatedPositionFor = (map, { source, line, column, bias }) => {
|
||||||
|
line--;
|
||||||
|
if (line < 0)
|
||||||
|
throw new Error(LINE_GTR_ZERO);
|
||||||
|
if (column < 0)
|
||||||
|
throw new Error(COL_GTR_EQ_ZERO);
|
||||||
|
const { sources, resolvedSources } = map;
|
||||||
|
let sourceIndex = sources.indexOf(source);
|
||||||
|
if (sourceIndex === -1)
|
||||||
|
sourceIndex = resolvedSources.indexOf(source);
|
||||||
|
if (sourceIndex === -1)
|
||||||
|
return INVALID_GENERATED_MAPPING;
|
||||||
|
const generated = (map._bySources || (map._bySources = buildBySources(decodedMappings(map), (map._bySourceMemos = sources.map(memoizedState)))));
|
||||||
|
const memos = map._bySourceMemos;
|
||||||
|
const segments = generated[sourceIndex][line];
|
||||||
|
if (segments == null)
|
||||||
|
return INVALID_GENERATED_MAPPING;
|
||||||
|
const segment = traceSegmentInternal(segments, memos[sourceIndex], line, column, bias || GREATEST_LOWER_BOUND);
|
||||||
|
if (segment == null)
|
||||||
|
return INVALID_GENERATED_MAPPING;
|
||||||
|
return {
|
||||||
|
line: segment[REV_GENERATED_LINE] + 1,
|
||||||
|
column: segment[REV_GENERATED_COLUMN],
|
||||||
|
};
|
||||||
|
};
|
||||||
|
eachMapping = (map, cb) => {
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
const { names, resolvedSources } = map;
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
const generatedLine = i + 1;
|
||||||
|
const generatedColumn = seg[0];
|
||||||
|
let source = null;
|
||||||
|
let originalLine = null;
|
||||||
|
let originalColumn = null;
|
||||||
|
let name = null;
|
||||||
|
if (seg.length !== 1) {
|
||||||
|
source = resolvedSources[seg[1]];
|
||||||
|
originalLine = seg[2] + 1;
|
||||||
|
originalColumn = seg[3];
|
||||||
|
}
|
||||||
|
if (seg.length === 5)
|
||||||
|
name = names[seg[4]];
|
||||||
|
cb({
|
||||||
|
generatedLine,
|
||||||
|
generatedColumn,
|
||||||
|
source,
|
||||||
|
originalLine,
|
||||||
|
originalColumn,
|
||||||
|
name,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
presortedDecodedMap = (map, mapUrl) => {
|
||||||
|
const clone = Object.assign({}, map);
|
||||||
|
clone.mappings = [];
|
||||||
|
const tracer = new TraceMap(clone, mapUrl);
|
||||||
|
tracer._decoded = map.mappings;
|
||||||
|
return tracer;
|
||||||
|
};
|
||||||
|
decodedMap = (map) => {
|
||||||
|
return {
|
||||||
|
version: 3,
|
||||||
|
file: map.file,
|
||||||
|
names: map.names,
|
||||||
|
sourceRoot: map.sourceRoot,
|
||||||
|
sources: map.sources,
|
||||||
|
sourcesContent: map.sourcesContent,
|
||||||
|
mappings: decodedMappings(map),
|
||||||
|
};
|
||||||
|
};
|
||||||
|
encodedMap = (map) => {
|
||||||
|
return {
|
||||||
|
version: 3,
|
||||||
|
file: map.file,
|
||||||
|
names: map.names,
|
||||||
|
sourceRoot: map.sourceRoot,
|
||||||
|
sources: map.sources,
|
||||||
|
sourcesContent: map.sourcesContent,
|
||||||
|
mappings: encodedMappings(map),
|
||||||
|
};
|
||||||
|
};
|
||||||
|
})();
|
||||||
function traceSegmentInternal(segments, memo, line, column, bias) {
|
function traceSegmentInternal(segments, memo, line, column, bias) {
|
||||||
let index = memoizedBinarySearch(segments, column, memo, line);
|
let index = memoizedBinarySearch(segments, column, memo, line);
|
||||||
if (found) {
|
if (found) {
|
||||||
@ -519,62 +506,9 @@ function traceSegmentInternal(segments, memo, line, column, bias) {
|
|||||||
else if (bias === LEAST_UPPER_BOUND)
|
else if (bias === LEAST_UPPER_BOUND)
|
||||||
index++;
|
index++;
|
||||||
if (index === -1 || index === segments.length)
|
if (index === -1 || index === segments.length)
|
||||||
return -1;
|
return null;
|
||||||
return index;
|
return segments[index];
|
||||||
}
|
|
||||||
function sliceGeneratedPositions(segments, memo, line, column, bias) {
|
|
||||||
let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND);
|
|
||||||
// We ignored the bias when tracing the segment so that we're guarnateed to find the first (in
|
|
||||||
// insertion order) segment that matched. Even if we did respect the bias when tracing, we would
|
|
||||||
// still need to call `lowerBound()` to find the first segment, which is slower than just looking
|
|
||||||
// for the GREATEST_LOWER_BOUND to begin with. The only difference that matters for us is when the
|
|
||||||
// binary search didn't match, in which case GREATEST_LOWER_BOUND just needs to increment to
|
|
||||||
// match LEAST_UPPER_BOUND.
|
|
||||||
if (!found && bias === LEAST_UPPER_BOUND)
|
|
||||||
min++;
|
|
||||||
if (min === -1 || min === segments.length)
|
|
||||||
return [];
|
|
||||||
// We may have found the segment that started at an earlier column. If this is the case, then we
|
|
||||||
// need to slice all generated segments that match _that_ column, because all such segments span
|
|
||||||
// to our desired column.
|
|
||||||
const matchedColumn = found ? column : segments[min][COLUMN];
|
|
||||||
// The binary search is not guaranteed to find the lower bound when a match wasn't found.
|
|
||||||
if (!found)
|
|
||||||
min = lowerBound(segments, matchedColumn, min);
|
|
||||||
const max = upperBound(segments, matchedColumn, min);
|
|
||||||
const result = [];
|
|
||||||
for (; min <= max; min++) {
|
|
||||||
const segment = segments[min];
|
|
||||||
result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]));
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
function generatedPosition(map, source, line, column, bias, all) {
|
|
||||||
var _a;
|
|
||||||
line--;
|
|
||||||
if (line < 0)
|
|
||||||
throw new Error(LINE_GTR_ZERO);
|
|
||||||
if (column < 0)
|
|
||||||
throw new Error(COL_GTR_EQ_ZERO);
|
|
||||||
const { sources, resolvedSources } = map;
|
|
||||||
let sourceIndex = sources.indexOf(source);
|
|
||||||
if (sourceIndex === -1)
|
|
||||||
sourceIndex = resolvedSources.indexOf(source);
|
|
||||||
if (sourceIndex === -1)
|
|
||||||
return all ? [] : GMapping(null, null);
|
|
||||||
const generated = ((_a = cast(map))._bySources || (_a._bySources = buildBySources(decodedMappings(map), (cast(map)._bySourceMemos = sources.map(memoizedState)))));
|
|
||||||
const segments = generated[sourceIndex][line];
|
|
||||||
if (segments == null)
|
|
||||||
return all ? [] : GMapping(null, null);
|
|
||||||
const memo = cast(map)._bySourceMemos[sourceIndex];
|
|
||||||
if (all)
|
|
||||||
return sliceGeneratedPositions(segments, memo, line, column, bias);
|
|
||||||
const index = traceSegmentInternal(segments, memo, line, column, bias);
|
|
||||||
if (index === -1)
|
|
||||||
return GMapping(null, null);
|
|
||||||
const segment = segments[index];
|
|
||||||
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export { AnyMap, GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap, allGeneratedPositionsFor, decodedMap, decodedMappings, eachMapping, encodedMap, encodedMappings, generatedPositionFor, isIgnored, originalPositionFor, presortedDecodedMap, sourceContentFor, traceSegment };
|
export { AnyMap, GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap, decodedMap, decodedMappings, eachMapping, encodedMap, encodedMappings, generatedPositionFor, originalPositionFor, presortedDecodedMap, traceSegment };
|
||||||
//# sourceMappingURL=trace-mapping.mjs.map
|
//# sourceMappingURL=trace-mapping.mjs.map
|
||||||
|
|||||||
2
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs.map
generated
vendored
2
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs.map
generated
vendored
File diff suppressed because one or more lines are too long
578
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js
generated
vendored
578
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js
generated
vendored
@ -4,13 +4,17 @@
|
|||||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.traceMapping = {}, global.sourcemapCodec, global.resolveURI));
|
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.traceMapping = {}, global.sourcemapCodec, global.resolveURI));
|
||||||
})(this, (function (exports, sourcemapCodec, resolveUri) { 'use strict';
|
})(this, (function (exports, sourcemapCodec, resolveUri) { 'use strict';
|
||||||
|
|
||||||
|
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
|
||||||
|
|
||||||
|
var resolveUri__default = /*#__PURE__*/_interopDefaultLegacy(resolveUri);
|
||||||
|
|
||||||
function resolve(input, base) {
|
function resolve(input, base) {
|
||||||
// The base is always treated as a directory, if it's not empty.
|
// The base is always treated as a directory, if it's not empty.
|
||||||
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
||||||
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
||||||
if (base && !base.endsWith('/'))
|
if (base && !base.endsWith('/'))
|
||||||
base += '/';
|
base += '/';
|
||||||
return resolveUri(input, base);
|
return resolveUri__default["default"](input, base);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -104,14 +108,14 @@
|
|||||||
return low - 1;
|
return low - 1;
|
||||||
}
|
}
|
||||||
function upperBound(haystack, needle, index) {
|
function upperBound(haystack, needle, index) {
|
||||||
for (let i = index + 1; i < haystack.length; index = i++) {
|
for (let i = index + 1; i < haystack.length; i++, index++) {
|
||||||
if (haystack[i][COLUMN] !== needle)
|
if (haystack[i][COLUMN] !== needle)
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
return index;
|
return index;
|
||||||
}
|
}
|
||||||
function lowerBound(haystack, needle, index) {
|
function lowerBound(haystack, needle, index) {
|
||||||
for (let i = index - 1; i >= 0; index = i--) {
|
for (let i = index - 1; i >= 0; i--, index--) {
|
||||||
if (haystack[i][COLUMN] !== needle)
|
if (haystack[i][COLUMN] !== needle)
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -170,9 +174,8 @@
|
|||||||
// segment should go. Either way, we want to insert after that. And there may be multiple
|
// segment should go. Either way, we want to insert after that. And there may be multiple
|
||||||
// generated segments associated with an original location, so there may need to move several
|
// generated segments associated with an original location, so there may need to move several
|
||||||
// indexes before we find where we need to insert.
|
// indexes before we find where we need to insert.
|
||||||
let index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
const index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
||||||
memo.lastIndex = ++index;
|
insert(originalLine, (memo.lastIndex = index + 1), [sourceColumn, i, seg[COLUMN]]);
|
||||||
insert(originalLine, index, [sourceColumn, i, seg[COLUMN]]);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return sources;
|
return sources;
|
||||||
@ -193,16 +196,22 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
const AnyMap = function (map, mapUrl) {
|
const AnyMap = function (map, mapUrl) {
|
||||||
const parsed = parse(map);
|
const parsed = typeof map === 'string' ? JSON.parse(map) : map;
|
||||||
if (!('sections' in parsed)) {
|
if (!('sections' in parsed))
|
||||||
return new TraceMap(parsed, mapUrl);
|
return new TraceMap(parsed, mapUrl);
|
||||||
}
|
|
||||||
const mappings = [];
|
const mappings = [];
|
||||||
const sources = [];
|
const sources = [];
|
||||||
const sourcesContent = [];
|
const sourcesContent = [];
|
||||||
const names = [];
|
const names = [];
|
||||||
const ignoreList = [];
|
const { sections } = parsed;
|
||||||
recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, ignoreList, 0, 0, Infinity, Infinity);
|
let i = 0;
|
||||||
|
for (; i < sections.length - 1; i++) {
|
||||||
|
const no = sections[i + 1].offset;
|
||||||
|
addSection(sections[i], mapUrl, mappings, sources, sourcesContent, names, no.line, no.column);
|
||||||
|
}
|
||||||
|
if (sections.length > 0) {
|
||||||
|
addSection(sections[i], mapUrl, mappings, sources, sourcesContent, names, Infinity, Infinity);
|
||||||
|
}
|
||||||
const joined = {
|
const joined = {
|
||||||
version: 3,
|
version: 3,
|
||||||
file: parsed.file,
|
file: parsed.file,
|
||||||
@ -210,73 +219,42 @@
|
|||||||
sources,
|
sources,
|
||||||
sourcesContent,
|
sourcesContent,
|
||||||
mappings,
|
mappings,
|
||||||
ignoreList,
|
|
||||||
};
|
};
|
||||||
return presortedDecodedMap(joined);
|
return exports.presortedDecodedMap(joined);
|
||||||
};
|
};
|
||||||
function parse(map) {
|
function addSection(section, mapUrl, mappings, sources, sourcesContent, names, stopLine, stopColumn) {
|
||||||
return typeof map === 'string' ? JSON.parse(map) : map;
|
const map = AnyMap(section.map, mapUrl);
|
||||||
}
|
const { line: lineOffset, column: columnOffset } = section.offset;
|
||||||
function recurse(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
|
||||||
const { sections } = input;
|
|
||||||
for (let i = 0; i < sections.length; i++) {
|
|
||||||
const { map, offset } = sections[i];
|
|
||||||
let sl = stopLine;
|
|
||||||
let sc = stopColumn;
|
|
||||||
if (i + 1 < sections.length) {
|
|
||||||
const nextOffset = sections[i + 1].offset;
|
|
||||||
sl = Math.min(stopLine, lineOffset + nextOffset.line);
|
|
||||||
if (sl === stopLine) {
|
|
||||||
sc = Math.min(stopColumn, columnOffset + nextOffset.column);
|
|
||||||
}
|
|
||||||
else if (sl < stopLine) {
|
|
||||||
sc = columnOffset + nextOffset.column;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
addSection(map, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset + offset.line, columnOffset + offset.column, sl, sc);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
function addSection(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
|
||||||
const parsed = parse(input);
|
|
||||||
if ('sections' in parsed)
|
|
||||||
return recurse(...arguments);
|
|
||||||
const map = new TraceMap(parsed, mapUrl);
|
|
||||||
const sourcesOffset = sources.length;
|
const sourcesOffset = sources.length;
|
||||||
const namesOffset = names.length;
|
const namesOffset = names.length;
|
||||||
const decoded = decodedMappings(map);
|
const decoded = exports.decodedMappings(map);
|
||||||
const { resolvedSources, sourcesContent: contents, ignoreList: ignores } = map;
|
const { resolvedSources } = map;
|
||||||
append(sources, resolvedSources);
|
append(sources, resolvedSources);
|
||||||
|
append(sourcesContent, map.sourcesContent || fillSourcesContent(resolvedSources.length));
|
||||||
append(names, map.names);
|
append(names, map.names);
|
||||||
if (contents)
|
// If this section jumps forwards several lines, we need to add lines to the output mappings catch up.
|
||||||
append(sourcesContent, contents);
|
for (let i = mappings.length; i <= lineOffset; i++)
|
||||||
else
|
mappings.push([]);
|
||||||
for (let i = 0; i < resolvedSources.length; i++)
|
// We can only add so many lines before we step into the range that the next section's map
|
||||||
sourcesContent.push(null);
|
// controls. When we get to the last line, then we'll start checking the segments to see if
|
||||||
if (ignores)
|
// they've crossed into the column range.
|
||||||
for (let i = 0; i < ignores.length; i++)
|
const stopI = stopLine - lineOffset;
|
||||||
ignoreList.push(ignores[i] + sourcesOffset);
|
const len = Math.min(decoded.length, stopI + 1);
|
||||||
for (let i = 0; i < decoded.length; i++) {
|
for (let i = 0; i < len; i++) {
|
||||||
const lineI = lineOffset + i;
|
const line = decoded[i];
|
||||||
// We can only add so many lines before we step into the range that the next section's map
|
// On the 0th loop, the line will already exist due to a previous section, or the line catch up
|
||||||
// controls. When we get to the last line, then we'll start checking the segments to see if
|
// loop above.
|
||||||
// they've crossed into the column range. But it may not have any columns that overstep, so we
|
const out = i === 0 ? mappings[lineOffset] : (mappings[lineOffset + i] = []);
|
||||||
// still need to check that we don't overstep lines, too.
|
|
||||||
if (lineI > stopLine)
|
|
||||||
return;
|
|
||||||
// The out line may already exist in mappings (if we're continuing the line started by a
|
|
||||||
// previous section). Or, we may have jumped ahead several lines to start this section.
|
|
||||||
const out = getLine(mappings, lineI);
|
|
||||||
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
||||||
// map can be multiple lines), it doesn't.
|
// map can be multiple lines), it doesn't.
|
||||||
const cOffset = i === 0 ? columnOffset : 0;
|
const cOffset = i === 0 ? columnOffset : 0;
|
||||||
const line = decoded[i];
|
|
||||||
for (let j = 0; j < line.length; j++) {
|
for (let j = 0; j < line.length; j++) {
|
||||||
const seg = line[j];
|
const seg = line[j];
|
||||||
const column = cOffset + seg[COLUMN];
|
const column = cOffset + seg[COLUMN];
|
||||||
// If this segment steps into the column range that the next section's map controls, we need
|
// If this segment steps into the column range that the next section's map controls, we need
|
||||||
// to stop early.
|
// to stop early.
|
||||||
if (lineI === stopLine && column >= stopColumn)
|
if (i === stopI && column >= stopColumn)
|
||||||
return;
|
break;
|
||||||
if (seg.length === 1) {
|
if (seg.length === 1) {
|
||||||
out.push([column]);
|
out.push([column]);
|
||||||
continue;
|
continue;
|
||||||
@ -284,9 +262,11 @@
|
|||||||
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
||||||
const sourceLine = seg[SOURCE_LINE];
|
const sourceLine = seg[SOURCE_LINE];
|
||||||
const sourceColumn = seg[SOURCE_COLUMN];
|
const sourceColumn = seg[SOURCE_COLUMN];
|
||||||
out.push(seg.length === 4
|
if (seg.length === 4) {
|
||||||
? [column, sourcesIndex, sourceLine, sourceColumn]
|
out.push([column, sourcesIndex, sourceLine, sourceColumn]);
|
||||||
: [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
continue;
|
||||||
|
}
|
||||||
|
out.push([column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -294,32 +274,100 @@
|
|||||||
for (let i = 0; i < other.length; i++)
|
for (let i = 0; i < other.length; i++)
|
||||||
arr.push(other[i]);
|
arr.push(other[i]);
|
||||||
}
|
}
|
||||||
function getLine(arr, index) {
|
// Sourcemaps don't need to have sourcesContent, and if they don't, we need to create an array of
|
||||||
for (let i = arr.length; i <= index; i++)
|
// equal length to the sources. This is because the sources and sourcesContent are paired arrays,
|
||||||
arr[i] = [];
|
// where `sourcesContent[i]` is the content of the `sources[i]` file. If we didn't, then joined
|
||||||
return arr[index];
|
// sourcemap would desynchronize the sources/contents.
|
||||||
|
function fillSourcesContent(len) {
|
||||||
|
const sourcesContent = [];
|
||||||
|
for (let i = 0; i < len; i++)
|
||||||
|
sourcesContent[i] = null;
|
||||||
|
return sourcesContent;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const INVALID_ORIGINAL_MAPPING = Object.freeze({
|
||||||
|
source: null,
|
||||||
|
line: null,
|
||||||
|
column: null,
|
||||||
|
name: null,
|
||||||
|
});
|
||||||
|
const INVALID_GENERATED_MAPPING = Object.freeze({
|
||||||
|
line: null,
|
||||||
|
column: null,
|
||||||
|
});
|
||||||
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
||||||
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
||||||
const LEAST_UPPER_BOUND = -1;
|
const LEAST_UPPER_BOUND = -1;
|
||||||
const GREATEST_LOWER_BOUND = 1;
|
const GREATEST_LOWER_BOUND = 1;
|
||||||
|
/**
|
||||||
|
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
exports.encodedMappings = void 0;
|
||||||
|
/**
|
||||||
|
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
exports.decodedMappings = void 0;
|
||||||
|
/**
|
||||||
|
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||||
|
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||||
|
*/
|
||||||
|
exports.traceSegment = void 0;
|
||||||
|
/**
|
||||||
|
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||||
|
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||||
|
* `source-map` library.
|
||||||
|
*/
|
||||||
|
exports.originalPositionFor = void 0;
|
||||||
|
/**
|
||||||
|
* Finds the source/line/column directly after the mapping returned by originalPositionFor, provided
|
||||||
|
* the found mapping is from the same source and line as the originalPositionFor mapping.
|
||||||
|
*
|
||||||
|
* Eg, in the code `let id = 1`, `originalPositionAfter` could find the mapping associated with `1`
|
||||||
|
* using the same needle that would return `id` when calling `originalPositionFor`.
|
||||||
|
*/
|
||||||
|
exports.generatedPositionFor = void 0;
|
||||||
|
/**
|
||||||
|
* Iterates each mapping in generated position order.
|
||||||
|
*/
|
||||||
|
exports.eachMapping = void 0;
|
||||||
|
/**
|
||||||
|
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||||
|
* maps.
|
||||||
|
*/
|
||||||
|
exports.presortedDecodedMap = void 0;
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
exports.decodedMap = void 0;
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
exports.encodedMap = void 0;
|
||||||
class TraceMap {
|
class TraceMap {
|
||||||
constructor(map, mapUrl) {
|
constructor(map, mapUrl) {
|
||||||
|
this._decodedMemo = memoizedState();
|
||||||
|
this._bySources = undefined;
|
||||||
|
this._bySourceMemos = undefined;
|
||||||
const isString = typeof map === 'string';
|
const isString = typeof map === 'string';
|
||||||
if (!isString && map._decodedMemo)
|
if (!isString && map.constructor === TraceMap)
|
||||||
return map;
|
return map;
|
||||||
const parsed = (isString ? JSON.parse(map) : map);
|
const parsed = (isString ? JSON.parse(map) : map);
|
||||||
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||||||
this.version = version;
|
this.version = version;
|
||||||
this.file = file;
|
this.file = file;
|
||||||
this.names = names || [];
|
this.names = names;
|
||||||
this.sourceRoot = sourceRoot;
|
this.sourceRoot = sourceRoot;
|
||||||
this.sources = sources;
|
this.sources = sources;
|
||||||
this.sourcesContent = sourcesContent;
|
this.sourcesContent = sourcesContent;
|
||||||
this.ignoreList = parsed.ignoreList || parsed.x_google_ignoreList || undefined;
|
if (sourceRoot || mapUrl) {
|
||||||
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||||||
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
this.resolvedSources = sources.map((s) => s || '');
|
||||||
|
}
|
||||||
const { mappings } = parsed;
|
const { mappings } = parsed;
|
||||||
if (typeof mappings === 'string') {
|
if (typeof mappings === 'string') {
|
||||||
this._encoded = mappings;
|
this._encoded = mappings;
|
||||||
@ -329,191 +377,134 @@
|
|||||||
this._encoded = undefined;
|
this._encoded = undefined;
|
||||||
this._decoded = maybeSort(mappings, isString);
|
this._decoded = maybeSort(mappings, isString);
|
||||||
}
|
}
|
||||||
this._decodedMemo = memoizedState();
|
|
||||||
this._bySources = undefined;
|
|
||||||
this._bySourceMemos = undefined;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/**
|
(() => {
|
||||||
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
exports.encodedMappings = (map) => {
|
||||||
* with public access modifiers.
|
var _a;
|
||||||
*/
|
return ((_a = map._encoded) !== null && _a !== void 0 ? _a : (map._encoded = sourcemapCodec.encode(map._decoded)));
|
||||||
function cast(map) {
|
|
||||||
return map;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
|
||||||
*/
|
|
||||||
function encodedMappings(map) {
|
|
||||||
var _a;
|
|
||||||
var _b;
|
|
||||||
return ((_a = (_b = cast(map))._encoded) !== null && _a !== void 0 ? _a : (_b._encoded = sourcemapCodec.encode(cast(map)._decoded)));
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
|
||||||
*/
|
|
||||||
function decodedMappings(map) {
|
|
||||||
var _a;
|
|
||||||
return ((_a = cast(map))._decoded || (_a._decoded = sourcemapCodec.decode(cast(map)._encoded)));
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* A low-level API to find the segment associated with a generated line/column (think, from a
|
|
||||||
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
|
||||||
*/
|
|
||||||
function traceSegment(map, line, column) {
|
|
||||||
const decoded = decodedMappings(map);
|
|
||||||
// It's common for parent source maps to have pointers to lines that have no
|
|
||||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
|
||||||
if (line >= decoded.length)
|
|
||||||
return null;
|
|
||||||
const segments = decoded[line];
|
|
||||||
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
|
||||||
return index === -1 ? null : segments[index];
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* A higher-level API to find the source/line/column associated with a generated line/column
|
|
||||||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
|
||||||
* `source-map` library.
|
|
||||||
*/
|
|
||||||
function originalPositionFor(map, needle) {
|
|
||||||
let { line, column, bias } = needle;
|
|
||||||
line--;
|
|
||||||
if (line < 0)
|
|
||||||
throw new Error(LINE_GTR_ZERO);
|
|
||||||
if (column < 0)
|
|
||||||
throw new Error(COL_GTR_EQ_ZERO);
|
|
||||||
const decoded = decodedMappings(map);
|
|
||||||
// It's common for parent source maps to have pointers to lines that have no
|
|
||||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
|
||||||
if (line >= decoded.length)
|
|
||||||
return OMapping(null, null, null, null);
|
|
||||||
const segments = decoded[line];
|
|
||||||
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
|
||||||
if (index === -1)
|
|
||||||
return OMapping(null, null, null, null);
|
|
||||||
const segment = segments[index];
|
|
||||||
if (segment.length === 1)
|
|
||||||
return OMapping(null, null, null, null);
|
|
||||||
const { names, resolvedSources } = map;
|
|
||||||
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Finds the generated line/column position of the provided source/line/column source position.
|
|
||||||
*/
|
|
||||||
function generatedPositionFor(map, needle) {
|
|
||||||
const { source, line, column, bias } = needle;
|
|
||||||
return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Finds all generated line/column positions of the provided source/line/column source position.
|
|
||||||
*/
|
|
||||||
function allGeneratedPositionsFor(map, needle) {
|
|
||||||
const { source, line, column, bias } = needle;
|
|
||||||
// SourceMapConsumer uses LEAST_UPPER_BOUND for some reason, so we follow suit.
|
|
||||||
return generatedPosition(map, source, line, column, bias || LEAST_UPPER_BOUND, true);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Iterates each mapping in generated position order.
|
|
||||||
*/
|
|
||||||
function eachMapping(map, cb) {
|
|
||||||
const decoded = decodedMappings(map);
|
|
||||||
const { names, resolvedSources } = map;
|
|
||||||
for (let i = 0; i < decoded.length; i++) {
|
|
||||||
const line = decoded[i];
|
|
||||||
for (let j = 0; j < line.length; j++) {
|
|
||||||
const seg = line[j];
|
|
||||||
const generatedLine = i + 1;
|
|
||||||
const generatedColumn = seg[0];
|
|
||||||
let source = null;
|
|
||||||
let originalLine = null;
|
|
||||||
let originalColumn = null;
|
|
||||||
let name = null;
|
|
||||||
if (seg.length !== 1) {
|
|
||||||
source = resolvedSources[seg[1]];
|
|
||||||
originalLine = seg[2] + 1;
|
|
||||||
originalColumn = seg[3];
|
|
||||||
}
|
|
||||||
if (seg.length === 5)
|
|
||||||
name = names[seg[4]];
|
|
||||||
cb({
|
|
||||||
generatedLine,
|
|
||||||
generatedColumn,
|
|
||||||
source,
|
|
||||||
originalLine,
|
|
||||||
originalColumn,
|
|
||||||
name,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
function sourceIndex(map, source) {
|
|
||||||
const { sources, resolvedSources } = map;
|
|
||||||
let index = sources.indexOf(source);
|
|
||||||
if (index === -1)
|
|
||||||
index = resolvedSources.indexOf(source);
|
|
||||||
return index;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
|
||||||
*/
|
|
||||||
function sourceContentFor(map, source) {
|
|
||||||
const { sourcesContent } = map;
|
|
||||||
if (sourcesContent == null)
|
|
||||||
return null;
|
|
||||||
const index = sourceIndex(map, source);
|
|
||||||
return index === -1 ? null : sourcesContent[index];
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Determines if the source is marked to ignore by the source map.
|
|
||||||
*/
|
|
||||||
function isIgnored(map, source) {
|
|
||||||
const { ignoreList } = map;
|
|
||||||
if (ignoreList == null)
|
|
||||||
return false;
|
|
||||||
const index = sourceIndex(map, source);
|
|
||||||
return index === -1 ? false : ignoreList.includes(index);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
|
||||||
* maps.
|
|
||||||
*/
|
|
||||||
function presortedDecodedMap(map, mapUrl) {
|
|
||||||
const tracer = new TraceMap(clone(map, []), mapUrl);
|
|
||||||
cast(tracer)._decoded = map.mappings;
|
|
||||||
return tracer;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
|
||||||
* a sourcemap, or to JSON.stringify.
|
|
||||||
*/
|
|
||||||
function decodedMap(map) {
|
|
||||||
return clone(map, decodedMappings(map));
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
|
||||||
* a sourcemap, or to JSON.stringify.
|
|
||||||
*/
|
|
||||||
function encodedMap(map) {
|
|
||||||
return clone(map, encodedMappings(map));
|
|
||||||
}
|
|
||||||
function clone(map, mappings) {
|
|
||||||
return {
|
|
||||||
version: map.version,
|
|
||||||
file: map.file,
|
|
||||||
names: map.names,
|
|
||||||
sourceRoot: map.sourceRoot,
|
|
||||||
sources: map.sources,
|
|
||||||
sourcesContent: map.sourcesContent,
|
|
||||||
mappings,
|
|
||||||
ignoreList: map.ignoreList || map.x_google_ignoreList,
|
|
||||||
};
|
};
|
||||||
}
|
exports.decodedMappings = (map) => {
|
||||||
function OMapping(source, line, column, name) {
|
return (map._decoded || (map._decoded = sourcemapCodec.decode(map._encoded)));
|
||||||
return { source, line, column, name };
|
};
|
||||||
}
|
exports.traceSegment = (map, line, column) => {
|
||||||
function GMapping(line, column) {
|
const decoded = exports.decodedMappings(map);
|
||||||
return { line, column };
|
// It's common for parent source maps to have pointers to lines that have no
|
||||||
}
|
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||||
|
if (line >= decoded.length)
|
||||||
|
return null;
|
||||||
|
return traceSegmentInternal(decoded[line], map._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
||||||
|
};
|
||||||
|
exports.originalPositionFor = (map, { line, column, bias }) => {
|
||||||
|
line--;
|
||||||
|
if (line < 0)
|
||||||
|
throw new Error(LINE_GTR_ZERO);
|
||||||
|
if (column < 0)
|
||||||
|
throw new Error(COL_GTR_EQ_ZERO);
|
||||||
|
const decoded = exports.decodedMappings(map);
|
||||||
|
// It's common for parent source maps to have pointers to lines that have no
|
||||||
|
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||||
|
if (line >= decoded.length)
|
||||||
|
return INVALID_ORIGINAL_MAPPING;
|
||||||
|
const segment = traceSegmentInternal(decoded[line], map._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
||||||
|
if (segment == null)
|
||||||
|
return INVALID_ORIGINAL_MAPPING;
|
||||||
|
if (segment.length == 1)
|
||||||
|
return INVALID_ORIGINAL_MAPPING;
|
||||||
|
const { names, resolvedSources } = map;
|
||||||
|
return {
|
||||||
|
source: resolvedSources[segment[SOURCES_INDEX]],
|
||||||
|
line: segment[SOURCE_LINE] + 1,
|
||||||
|
column: segment[SOURCE_COLUMN],
|
||||||
|
name: segment.length === 5 ? names[segment[NAMES_INDEX]] : null,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
exports.generatedPositionFor = (map, { source, line, column, bias }) => {
|
||||||
|
line--;
|
||||||
|
if (line < 0)
|
||||||
|
throw new Error(LINE_GTR_ZERO);
|
||||||
|
if (column < 0)
|
||||||
|
throw new Error(COL_GTR_EQ_ZERO);
|
||||||
|
const { sources, resolvedSources } = map;
|
||||||
|
let sourceIndex = sources.indexOf(source);
|
||||||
|
if (sourceIndex === -1)
|
||||||
|
sourceIndex = resolvedSources.indexOf(source);
|
||||||
|
if (sourceIndex === -1)
|
||||||
|
return INVALID_GENERATED_MAPPING;
|
||||||
|
const generated = (map._bySources || (map._bySources = buildBySources(exports.decodedMappings(map), (map._bySourceMemos = sources.map(memoizedState)))));
|
||||||
|
const memos = map._bySourceMemos;
|
||||||
|
const segments = generated[sourceIndex][line];
|
||||||
|
if (segments == null)
|
||||||
|
return INVALID_GENERATED_MAPPING;
|
||||||
|
const segment = traceSegmentInternal(segments, memos[sourceIndex], line, column, bias || GREATEST_LOWER_BOUND);
|
||||||
|
if (segment == null)
|
||||||
|
return INVALID_GENERATED_MAPPING;
|
||||||
|
return {
|
||||||
|
line: segment[REV_GENERATED_LINE] + 1,
|
||||||
|
column: segment[REV_GENERATED_COLUMN],
|
||||||
|
};
|
||||||
|
};
|
||||||
|
exports.eachMapping = (map, cb) => {
|
||||||
|
const decoded = exports.decodedMappings(map);
|
||||||
|
const { names, resolvedSources } = map;
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
const generatedLine = i + 1;
|
||||||
|
const generatedColumn = seg[0];
|
||||||
|
let source = null;
|
||||||
|
let originalLine = null;
|
||||||
|
let originalColumn = null;
|
||||||
|
let name = null;
|
||||||
|
if (seg.length !== 1) {
|
||||||
|
source = resolvedSources[seg[1]];
|
||||||
|
originalLine = seg[2] + 1;
|
||||||
|
originalColumn = seg[3];
|
||||||
|
}
|
||||||
|
if (seg.length === 5)
|
||||||
|
name = names[seg[4]];
|
||||||
|
cb({
|
||||||
|
generatedLine,
|
||||||
|
generatedColumn,
|
||||||
|
source,
|
||||||
|
originalLine,
|
||||||
|
originalColumn,
|
||||||
|
name,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
exports.presortedDecodedMap = (map, mapUrl) => {
|
||||||
|
const clone = Object.assign({}, map);
|
||||||
|
clone.mappings = [];
|
||||||
|
const tracer = new TraceMap(clone, mapUrl);
|
||||||
|
tracer._decoded = map.mappings;
|
||||||
|
return tracer;
|
||||||
|
};
|
||||||
|
exports.decodedMap = (map) => {
|
||||||
|
return {
|
||||||
|
version: 3,
|
||||||
|
file: map.file,
|
||||||
|
names: map.names,
|
||||||
|
sourceRoot: map.sourceRoot,
|
||||||
|
sources: map.sources,
|
||||||
|
sourcesContent: map.sourcesContent,
|
||||||
|
mappings: exports.decodedMappings(map),
|
||||||
|
};
|
||||||
|
};
|
||||||
|
exports.encodedMap = (map) => {
|
||||||
|
return {
|
||||||
|
version: 3,
|
||||||
|
file: map.file,
|
||||||
|
names: map.names,
|
||||||
|
sourceRoot: map.sourceRoot,
|
||||||
|
sources: map.sources,
|
||||||
|
sourcesContent: map.sourcesContent,
|
||||||
|
mappings: exports.encodedMappings(map),
|
||||||
|
};
|
||||||
|
};
|
||||||
|
})();
|
||||||
function traceSegmentInternal(segments, memo, line, column, bias) {
|
function traceSegmentInternal(segments, memo, line, column, bias) {
|
||||||
let index = memoizedBinarySearch(segments, column, memo, line);
|
let index = memoizedBinarySearch(segments, column, memo, line);
|
||||||
if (found) {
|
if (found) {
|
||||||
@ -522,79 +513,16 @@
|
|||||||
else if (bias === LEAST_UPPER_BOUND)
|
else if (bias === LEAST_UPPER_BOUND)
|
||||||
index++;
|
index++;
|
||||||
if (index === -1 || index === segments.length)
|
if (index === -1 || index === segments.length)
|
||||||
return -1;
|
return null;
|
||||||
return index;
|
return segments[index];
|
||||||
}
|
|
||||||
function sliceGeneratedPositions(segments, memo, line, column, bias) {
|
|
||||||
let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND);
|
|
||||||
// We ignored the bias when tracing the segment so that we're guarnateed to find the first (in
|
|
||||||
// insertion order) segment that matched. Even if we did respect the bias when tracing, we would
|
|
||||||
// still need to call `lowerBound()` to find the first segment, which is slower than just looking
|
|
||||||
// for the GREATEST_LOWER_BOUND to begin with. The only difference that matters for us is when the
|
|
||||||
// binary search didn't match, in which case GREATEST_LOWER_BOUND just needs to increment to
|
|
||||||
// match LEAST_UPPER_BOUND.
|
|
||||||
if (!found && bias === LEAST_UPPER_BOUND)
|
|
||||||
min++;
|
|
||||||
if (min === -1 || min === segments.length)
|
|
||||||
return [];
|
|
||||||
// We may have found the segment that started at an earlier column. If this is the case, then we
|
|
||||||
// need to slice all generated segments that match _that_ column, because all such segments span
|
|
||||||
// to our desired column.
|
|
||||||
const matchedColumn = found ? column : segments[min][COLUMN];
|
|
||||||
// The binary search is not guaranteed to find the lower bound when a match wasn't found.
|
|
||||||
if (!found)
|
|
||||||
min = lowerBound(segments, matchedColumn, min);
|
|
||||||
const max = upperBound(segments, matchedColumn, min);
|
|
||||||
const result = [];
|
|
||||||
for (; min <= max; min++) {
|
|
||||||
const segment = segments[min];
|
|
||||||
result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]));
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
function generatedPosition(map, source, line, column, bias, all) {
|
|
||||||
var _a;
|
|
||||||
line--;
|
|
||||||
if (line < 0)
|
|
||||||
throw new Error(LINE_GTR_ZERO);
|
|
||||||
if (column < 0)
|
|
||||||
throw new Error(COL_GTR_EQ_ZERO);
|
|
||||||
const { sources, resolvedSources } = map;
|
|
||||||
let sourceIndex = sources.indexOf(source);
|
|
||||||
if (sourceIndex === -1)
|
|
||||||
sourceIndex = resolvedSources.indexOf(source);
|
|
||||||
if (sourceIndex === -1)
|
|
||||||
return all ? [] : GMapping(null, null);
|
|
||||||
const generated = ((_a = cast(map))._bySources || (_a._bySources = buildBySources(decodedMappings(map), (cast(map)._bySourceMemos = sources.map(memoizedState)))));
|
|
||||||
const segments = generated[sourceIndex][line];
|
|
||||||
if (segments == null)
|
|
||||||
return all ? [] : GMapping(null, null);
|
|
||||||
const memo = cast(map)._bySourceMemos[sourceIndex];
|
|
||||||
if (all)
|
|
||||||
return sliceGeneratedPositions(segments, memo, line, column, bias);
|
|
||||||
const index = traceSegmentInternal(segments, memo, line, column, bias);
|
|
||||||
if (index === -1)
|
|
||||||
return GMapping(null, null);
|
|
||||||
const segment = segments[index];
|
|
||||||
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.AnyMap = AnyMap;
|
exports.AnyMap = AnyMap;
|
||||||
exports.GREATEST_LOWER_BOUND = GREATEST_LOWER_BOUND;
|
exports.GREATEST_LOWER_BOUND = GREATEST_LOWER_BOUND;
|
||||||
exports.LEAST_UPPER_BOUND = LEAST_UPPER_BOUND;
|
exports.LEAST_UPPER_BOUND = LEAST_UPPER_BOUND;
|
||||||
exports.TraceMap = TraceMap;
|
exports.TraceMap = TraceMap;
|
||||||
exports.allGeneratedPositionsFor = allGeneratedPositionsFor;
|
|
||||||
exports.decodedMap = decodedMap;
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||||||
exports.decodedMappings = decodedMappings;
|
|
||||||
exports.eachMapping = eachMapping;
|
|
||||||
exports.encodedMap = encodedMap;
|
|
||||||
exports.encodedMappings = encodedMappings;
|
|
||||||
exports.generatedPositionFor = generatedPositionFor;
|
|
||||||
exports.isIgnored = isIgnored;
|
|
||||||
exports.originalPositionFor = originalPositionFor;
|
|
||||||
exports.presortedDecodedMap = presortedDecodedMap;
|
|
||||||
exports.sourceContentFor = sourceContentFor;
|
|
||||||
exports.traceSegment = traceSegment;
|
|
||||||
|
|
||||||
}));
|
}));
|
||||||
//# sourceMappingURL=trace-mapping.umd.js.map
|
//# sourceMappingURL=trace-mapping.umd.js.map
|
||||||
|
|||||||
2
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js.map
generated
vendored
2
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js.map
generated
vendored
File diff suppressed because one or more lines are too long
2
node_modules/@jridgewell/trace-mapping/dist/types/any-map.d.ts
generated
vendored
2
node_modules/@jridgewell/trace-mapping/dist/types/any-map.d.ts
generated
vendored
@ -1,6 +1,6 @@
|
|||||||
import { TraceMap } from './trace-mapping';
|
import { TraceMap } from './trace-mapping';
|
||||||
import type { SectionedSourceMapInput } from './types';
|
import type { SectionedSourceMapInput } from './types';
|
||||||
type AnyMap = {
|
declare type AnyMap = {
|
||||||
new (map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
|
new (map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
|
||||||
(map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
|
(map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
|
||||||
};
|
};
|
||||||
|
|||||||
2
node_modules/@jridgewell/trace-mapping/dist/types/binary-search.d.ts
generated
vendored
2
node_modules/@jridgewell/trace-mapping/dist/types/binary-search.d.ts
generated
vendored
@ -1,5 +1,5 @@
|
|||||||
import type { SourceMapSegment, ReverseSegment } from './sourcemap-segment';
|
import type { SourceMapSegment, ReverseSegment } from './sourcemap-segment';
|
||||||
export type MemoState = {
|
export declare type MemoState = {
|
||||||
lastKey: number;
|
lastKey: number;
|
||||||
lastNeedle: number;
|
lastNeedle: number;
|
||||||
lastIndex: number;
|
lastIndex: number;
|
||||||
|
|||||||
2
node_modules/@jridgewell/trace-mapping/dist/types/by-source.d.ts
generated
vendored
2
node_modules/@jridgewell/trace-mapping/dist/types/by-source.d.ts
generated
vendored
@ -1,6 +1,6 @@
|
|||||||
import type { ReverseSegment, SourceMapSegment } from './sourcemap-segment';
|
import type { ReverseSegment, SourceMapSegment } from './sourcemap-segment';
|
||||||
import type { MemoState } from './binary-search';
|
import type { MemoState } from './binary-search';
|
||||||
export type Source = {
|
export declare type Source = {
|
||||||
__proto__: null;
|
__proto__: null;
|
||||||
[line: number]: Exclude<ReverseSegment, [number]>[];
|
[line: number]: Exclude<ReverseSegment, [number]>[];
|
||||||
};
|
};
|
||||||
|
|||||||
16
node_modules/@jridgewell/trace-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
16
node_modules/@jridgewell/trace-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
@ -1,11 +1,11 @@
|
|||||||
type GeneratedColumn = number;
|
declare type GeneratedColumn = number;
|
||||||
type SourcesIndex = number;
|
declare type SourcesIndex = number;
|
||||||
type SourceLine = number;
|
declare type SourceLine = number;
|
||||||
type SourceColumn = number;
|
declare type SourceColumn = number;
|
||||||
type NamesIndex = number;
|
declare type NamesIndex = number;
|
||||||
type GeneratedLine = number;
|
declare type GeneratedLine = number;
|
||||||
export type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
|
export declare type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
|
||||||
export type ReverseSegment = [SourceColumn, GeneratedLine, GeneratedColumn];
|
export declare type ReverseSegment = [SourceColumn, GeneratedLine, GeneratedColumn];
|
||||||
export declare const COLUMN = 0;
|
export declare const COLUMN = 0;
|
||||||
export declare const SOURCES_INDEX = 1;
|
export declare const SOURCES_INDEX = 1;
|
||||||
export declare const SOURCE_LINE = 2;
|
export declare const SOURCE_LINE = 2;
|
||||||
|
|||||||
107
node_modules/@jridgewell/trace-mapping/dist/types/trace-mapping.d.ts
generated
vendored
107
node_modules/@jridgewell/trace-mapping/dist/types/trace-mapping.d.ts
generated
vendored
@ -1,9 +1,57 @@
|
|||||||
import type { SourceMapSegment } from './sourcemap-segment';
|
import type { SourceMapSegment } from './sourcemap-segment';
|
||||||
import type { SourceMapV3, DecodedSourceMap, EncodedSourceMap, InvalidOriginalMapping, OriginalMapping, InvalidGeneratedMapping, GeneratedMapping, SourceMapInput, Needle, SourceNeedle, SourceMap, EachMapping } from './types';
|
import type { SourceMapV3, DecodedSourceMap, EncodedSourceMap, InvalidOriginalMapping, OriginalMapping, InvalidGeneratedMapping, GeneratedMapping, SourceMapInput, Needle, SourceNeedle, SourceMap, EachMapping } from './types';
|
||||||
export type { SourceMapSegment } from './sourcemap-segment';
|
export type { SourceMapSegment } from './sourcemap-segment';
|
||||||
export type { SourceMap, DecodedSourceMap, EncodedSourceMap, Section, SectionedSourceMap, SourceMapV3, Bias, EachMapping, GeneratedMapping, InvalidGeneratedMapping, InvalidOriginalMapping, Needle, OriginalMapping, OriginalMapping as Mapping, SectionedSourceMapInput, SourceMapInput, SourceNeedle, XInput, EncodedSourceMapXInput, DecodedSourceMapXInput, SectionedSourceMapXInput, SectionXInput, } from './types';
|
export type { SourceMapInput, SectionedSourceMapInput, DecodedSourceMap, EncodedSourceMap, SectionedSourceMap, InvalidOriginalMapping, OriginalMapping as Mapping, OriginalMapping, InvalidGeneratedMapping, GeneratedMapping, EachMapping, } from './types';
|
||||||
export declare const LEAST_UPPER_BOUND = -1;
|
export declare const LEAST_UPPER_BOUND = -1;
|
||||||
export declare const GREATEST_LOWER_BOUND = 1;
|
export declare const GREATEST_LOWER_BOUND = 1;
|
||||||
|
/**
|
||||||
|
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
export declare let encodedMappings: (map: TraceMap) => EncodedSourceMap['mappings'];
|
||||||
|
/**
|
||||||
|
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
export declare let decodedMappings: (map: TraceMap) => Readonly<DecodedSourceMap['mappings']>;
|
||||||
|
/**
|
||||||
|
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||||
|
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||||
|
*/
|
||||||
|
export declare let traceSegment: (map: TraceMap, line: number, column: number) => Readonly<SourceMapSegment> | null;
|
||||||
|
/**
|
||||||
|
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||||
|
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||||
|
* `source-map` library.
|
||||||
|
*/
|
||||||
|
export declare let originalPositionFor: (map: TraceMap, needle: Needle) => OriginalMapping | InvalidOriginalMapping;
|
||||||
|
/**
|
||||||
|
* Finds the source/line/column directly after the mapping returned by originalPositionFor, provided
|
||||||
|
* the found mapping is from the same source and line as the originalPositionFor mapping.
|
||||||
|
*
|
||||||
|
* Eg, in the code `let id = 1`, `originalPositionAfter` could find the mapping associated with `1`
|
||||||
|
* using the same needle that would return `id` when calling `originalPositionFor`.
|
||||||
|
*/
|
||||||
|
export declare let generatedPositionFor: (map: TraceMap, needle: SourceNeedle) => GeneratedMapping | InvalidGeneratedMapping;
|
||||||
|
/**
|
||||||
|
* Iterates each mapping in generated position order.
|
||||||
|
*/
|
||||||
|
export declare let eachMapping: (map: TraceMap, cb: (mapping: EachMapping) => void) => void;
|
||||||
|
/**
|
||||||
|
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||||
|
* maps.
|
||||||
|
*/
|
||||||
|
export declare let presortedDecodedMap: (map: DecodedSourceMap, mapUrl?: string) => TraceMap;
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
export declare let decodedMap: (map: TraceMap) => Omit<DecodedSourceMap, 'mappings'> & {
|
||||||
|
mappings: readonly SourceMapSegment[][];
|
||||||
|
};
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
export declare let encodedMap: (map: TraceMap) => EncodedSourceMap;
|
||||||
export { AnyMap } from './any-map';
|
export { AnyMap } from './any-map';
|
||||||
export declare class TraceMap implements SourceMap {
|
export declare class TraceMap implements SourceMap {
|
||||||
version: SourceMapV3['version'];
|
version: SourceMapV3['version'];
|
||||||
@ -12,7 +60,6 @@ export declare class TraceMap implements SourceMap {
|
|||||||
sourceRoot: SourceMapV3['sourceRoot'];
|
sourceRoot: SourceMapV3['sourceRoot'];
|
||||||
sources: SourceMapV3['sources'];
|
sources: SourceMapV3['sources'];
|
||||||
sourcesContent: SourceMapV3['sourcesContent'];
|
sourcesContent: SourceMapV3['sourcesContent'];
|
||||||
ignoreList: SourceMapV3['ignoreList'];
|
|
||||||
resolvedSources: string[];
|
resolvedSources: string[];
|
||||||
private _encoded;
|
private _encoded;
|
||||||
private _decoded;
|
private _decoded;
|
||||||
@ -21,59 +68,3 @@ export declare class TraceMap implements SourceMap {
|
|||||||
private _bySourceMemos;
|
private _bySourceMemos;
|
||||||
constructor(map: SourceMapInput, mapUrl?: string | null);
|
constructor(map: SourceMapInput, mapUrl?: string | null);
|
||||||
}
|
}
|
||||||
/**
|
|
||||||
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
|
||||||
*/
|
|
||||||
export declare function encodedMappings(map: TraceMap): EncodedSourceMap['mappings'];
|
|
||||||
/**
|
|
||||||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
|
||||||
*/
|
|
||||||
export declare function decodedMappings(map: TraceMap): Readonly<DecodedSourceMap['mappings']>;
|
|
||||||
/**
|
|
||||||
* A low-level API to find the segment associated with a generated line/column (think, from a
|
|
||||||
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
|
||||||
*/
|
|
||||||
export declare function traceSegment(map: TraceMap, line: number, column: number): Readonly<SourceMapSegment> | null;
|
|
||||||
/**
|
|
||||||
* A higher-level API to find the source/line/column associated with a generated line/column
|
|
||||||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
|
||||||
* `source-map` library.
|
|
||||||
*/
|
|
||||||
export declare function originalPositionFor(map: TraceMap, needle: Needle): OriginalMapping | InvalidOriginalMapping;
|
|
||||||
/**
|
|
||||||
* Finds the generated line/column position of the provided source/line/column source position.
|
|
||||||
*/
|
|
||||||
export declare function generatedPositionFor(map: TraceMap, needle: SourceNeedle): GeneratedMapping | InvalidGeneratedMapping;
|
|
||||||
/**
|
|
||||||
* Finds all generated line/column positions of the provided source/line/column source position.
|
|
||||||
*/
|
|
||||||
export declare function allGeneratedPositionsFor(map: TraceMap, needle: SourceNeedle): GeneratedMapping[];
|
|
||||||
/**
|
|
||||||
* Iterates each mapping in generated position order.
|
|
||||||
*/
|
|
||||||
export declare function eachMapping(map: TraceMap, cb: (mapping: EachMapping) => void): void;
|
|
||||||
/**
|
|
||||||
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
|
||||||
*/
|
|
||||||
export declare function sourceContentFor(map: TraceMap, source: string): string | null;
|
|
||||||
/**
|
|
||||||
* Determines if the source is marked to ignore by the source map.
|
|
||||||
*/
|
|
||||||
export declare function isIgnored(map: TraceMap, source: string): boolean;
|
|
||||||
/**
|
|
||||||
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
|
||||||
* maps.
|
|
||||||
*/
|
|
||||||
export declare function presortedDecodedMap(map: DecodedSourceMap, mapUrl?: string): TraceMap;
|
|
||||||
/**
|
|
||||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
|
||||||
* a sourcemap, or to JSON.stringify.
|
|
||||||
*/
|
|
||||||
export declare function decodedMap(map: TraceMap): Omit<DecodedSourceMap, 'mappings'> & {
|
|
||||||
mappings: readonly SourceMapSegment[][];
|
|
||||||
};
|
|
||||||
/**
|
|
||||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
|
||||||
* a sourcemap, or to JSON.stringify.
|
|
||||||
*/
|
|
||||||
export declare function encodedMap(map: TraceMap): EncodedSourceMap;
|
|
||||||
|
|||||||
38
node_modules/@jridgewell/trace-mapping/dist/types/types.d.ts
generated
vendored
38
node_modules/@jridgewell/trace-mapping/dist/types/types.d.ts
generated
vendored
@ -1,5 +1,5 @@
|
|||||||
import type { SourceMapSegment } from './sourcemap-segment';
|
import type { SourceMapSegment } from './sourcemap-segment';
|
||||||
import type { GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap } from './trace-mapping';
|
import type { TraceMap } from './trace-mapping';
|
||||||
export interface SourceMapV3 {
|
export interface SourceMapV3 {
|
||||||
file?: string | null;
|
file?: string | null;
|
||||||
names: string[];
|
names: string[];
|
||||||
@ -7,7 +7,6 @@ export interface SourceMapV3 {
|
|||||||
sources: (string | null)[];
|
sources: (string | null)[];
|
||||||
sourcesContent?: (string | null)[];
|
sourcesContent?: (string | null)[];
|
||||||
version: 3;
|
version: 3;
|
||||||
ignoreList?: number[];
|
|
||||||
}
|
}
|
||||||
export interface EncodedSourceMap extends SourceMapV3 {
|
export interface EncodedSourceMap extends SourceMapV3 {
|
||||||
mappings: string;
|
mappings: string;
|
||||||
@ -27,52 +26,40 @@ export interface SectionedSourceMap {
|
|||||||
sections: Section[];
|
sections: Section[];
|
||||||
version: 3;
|
version: 3;
|
||||||
}
|
}
|
||||||
export type OriginalMapping = {
|
export declare type OriginalMapping = {
|
||||||
source: string | null;
|
source: string | null;
|
||||||
line: number;
|
line: number;
|
||||||
column: number;
|
column: number;
|
||||||
name: string | null;
|
name: string | null;
|
||||||
};
|
};
|
||||||
export type InvalidOriginalMapping = {
|
export declare type InvalidOriginalMapping = {
|
||||||
source: null;
|
source: null;
|
||||||
line: null;
|
line: null;
|
||||||
column: null;
|
column: null;
|
||||||
name: null;
|
name: null;
|
||||||
};
|
};
|
||||||
export type GeneratedMapping = {
|
export declare type GeneratedMapping = {
|
||||||
line: number;
|
line: number;
|
||||||
column: number;
|
column: number;
|
||||||
};
|
};
|
||||||
export type InvalidGeneratedMapping = {
|
export declare type InvalidGeneratedMapping = {
|
||||||
line: null;
|
line: null;
|
||||||
column: null;
|
column: null;
|
||||||
};
|
};
|
||||||
export type Bias = typeof GREATEST_LOWER_BOUND | typeof LEAST_UPPER_BOUND;
|
export declare type SourceMapInput = string | EncodedSourceMap | DecodedSourceMap | TraceMap;
|
||||||
export type XInput = {
|
export declare type SectionedSourceMapInput = SourceMapInput | SectionedSourceMap;
|
||||||
x_google_ignoreList?: SourceMapV3['ignoreList'];
|
export declare type Needle = {
|
||||||
};
|
|
||||||
export type EncodedSourceMapXInput = EncodedSourceMap & XInput;
|
|
||||||
export type DecodedSourceMapXInput = DecodedSourceMap & XInput;
|
|
||||||
export type SectionedSourceMapXInput = Omit<SectionedSourceMap, 'sections'> & {
|
|
||||||
sections: SectionXInput[];
|
|
||||||
};
|
|
||||||
export type SectionXInput = Omit<Section, 'map'> & {
|
|
||||||
map: SectionedSourceMapInput;
|
|
||||||
};
|
|
||||||
export type SourceMapInput = string | EncodedSourceMapXInput | DecodedSourceMapXInput | TraceMap;
|
|
||||||
export type SectionedSourceMapInput = SourceMapInput | SectionedSourceMapXInput;
|
|
||||||
export type Needle = {
|
|
||||||
line: number;
|
line: number;
|
||||||
column: number;
|
column: number;
|
||||||
bias?: Bias;
|
bias?: 1 | -1;
|
||||||
};
|
};
|
||||||
export type SourceNeedle = {
|
export declare type SourceNeedle = {
|
||||||
source: string;
|
source: string;
|
||||||
line: number;
|
line: number;
|
||||||
column: number;
|
column: number;
|
||||||
bias?: Bias;
|
bias?: 1 | -1;
|
||||||
};
|
};
|
||||||
export type EachMapping = {
|
export declare type EachMapping = {
|
||||||
generatedLine: number;
|
generatedLine: number;
|
||||||
generatedColumn: number;
|
generatedColumn: number;
|
||||||
source: null;
|
source: null;
|
||||||
@ -95,5 +82,4 @@ export declare abstract class SourceMap {
|
|||||||
sources: SourceMapV3['sources'];
|
sources: SourceMapV3['sources'];
|
||||||
sourcesContent: SourceMapV3['sourcesContent'];
|
sourcesContent: SourceMapV3['sourcesContent'];
|
||||||
resolvedSources: SourceMapV3['sources'];
|
resolvedSources: SourceMapV3['sources'];
|
||||||
ignoreList: SourceMapV3['ignoreList'];
|
|
||||||
}
|
}
|
||||||
|
|||||||
59
node_modules/@jridgewell/trace-mapping/package.json
generated
vendored
59
node_modules/@jridgewell/trace-mapping/package.json
generated
vendored
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@jridgewell/trace-mapping",
|
"name": "@jridgewell/trace-mapping",
|
||||||
"version": "0.3.25",
|
"version": "0.3.9",
|
||||||
"description": "Trace the original position through a source map",
|
"description": "Trace the original position through a source map",
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"source",
|
"source",
|
||||||
@ -8,20 +8,16 @@
|
|||||||
],
|
],
|
||||||
"main": "dist/trace-mapping.umd.js",
|
"main": "dist/trace-mapping.umd.js",
|
||||||
"module": "dist/trace-mapping.mjs",
|
"module": "dist/trace-mapping.mjs",
|
||||||
"types": "dist/types/trace-mapping.d.ts",
|
"typings": "dist/types/trace-mapping.d.ts",
|
||||||
"files": [
|
"files": [
|
||||||
"dist"
|
"dist"
|
||||||
],
|
],
|
||||||
"exports": {
|
"exports": {
|
||||||
".": [
|
".": {
|
||||||
{
|
"browser": "./dist/trace-mapping.umd.js",
|
||||||
"types": "./dist/types/trace-mapping.d.ts",
|
"require": "./dist/trace-mapping.umd.js",
|
||||||
"browser": "./dist/trace-mapping.umd.js",
|
"import": "./dist/trace-mapping.mjs"
|
||||||
"require": "./dist/trace-mapping.umd.js",
|
},
|
||||||
"import": "./dist/trace-mapping.mjs"
|
|
||||||
},
|
|
||||||
"./dist/trace-mapping.umd.js"
|
|
||||||
],
|
|
||||||
"./package.json": "./package.json"
|
"./package.json": "./package.json"
|
||||||
},
|
},
|
||||||
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||||
@ -33,9 +29,9 @@
|
|||||||
"scripts": {
|
"scripts": {
|
||||||
"benchmark": "run-s build:rollup benchmark:*",
|
"benchmark": "run-s build:rollup benchmark:*",
|
||||||
"benchmark:install": "cd benchmark && npm install",
|
"benchmark:install": "cd benchmark && npm install",
|
||||||
"benchmark:only": "node --expose-gc benchmark/index.mjs",
|
"benchmark:only": "node benchmark/index.mjs",
|
||||||
"build": "run-s -n build:*",
|
"build": "run-s -n build:*",
|
||||||
"build:rollup": "rollup -c rollup.config.mjs",
|
"build:rollup": "rollup -c rollup.config.js",
|
||||||
"build:ts": "tsc --project tsconfig.build.json",
|
"build:ts": "tsc --project tsconfig.build.json",
|
||||||
"lint": "run-s -n lint:*",
|
"lint": "run-s -n lint:*",
|
||||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||||
@ -44,34 +40,31 @@
|
|||||||
"prepublishOnly": "npm run preversion",
|
"prepublishOnly": "npm run preversion",
|
||||||
"preversion": "run-s test build",
|
"preversion": "run-s test build",
|
||||||
"test": "run-s -n test:lint test:only",
|
"test": "run-s -n test:lint test:only",
|
||||||
"test:debug": "mocha --inspect-brk",
|
"test:debug": "ava debug",
|
||||||
"test:lint": "run-s -n test:lint:*",
|
"test:lint": "run-s -n test:lint:*",
|
||||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts' '**/*.md'",
|
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts' '**/*.md'",
|
||||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||||
"test:only": "c8 mocha",
|
"test:only": "c8 ava",
|
||||||
"test:watch": "mocha --watch"
|
"test:watch": "ava --watch"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@rollup/plugin-typescript": "11.1.6",
|
"@rollup/plugin-typescript": "8.3.0",
|
||||||
"@types/mocha": "10.0.6",
|
"@typescript-eslint/eslint-plugin": "5.10.0",
|
||||||
"@types/node": "20.11.20",
|
"@typescript-eslint/parser": "5.10.0",
|
||||||
"@typescript-eslint/eslint-plugin": "6.18.1",
|
"ava": "4.0.1",
|
||||||
"@typescript-eslint/parser": "6.18.1",
|
|
||||||
"benchmark": "2.1.4",
|
"benchmark": "2.1.4",
|
||||||
"c8": "9.0.0",
|
"c8": "7.11.0",
|
||||||
"esbuild": "0.19.11",
|
"esbuild": "0.14.14",
|
||||||
"eslint": "8.56.0",
|
"esbuild-node-loader": "0.6.4",
|
||||||
"eslint-config-prettier": "9.1.0",
|
"eslint": "8.7.0",
|
||||||
"eslint-plugin-no-only-tests": "3.1.0",
|
"eslint-config-prettier": "8.3.0",
|
||||||
"mocha": "10.3.0",
|
|
||||||
"npm-run-all": "4.1.5",
|
"npm-run-all": "4.1.5",
|
||||||
"prettier": "3.1.1",
|
"prettier": "2.5.1",
|
||||||
"rollup": "4.9.4",
|
"rollup": "2.64.0",
|
||||||
"tsx": "4.7.0",
|
"typescript": "4.5.4"
|
||||||
"typescript": "5.3.3"
|
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@jridgewell/resolve-uri": "^3.1.0",
|
"@jridgewell/resolve-uri": "^3.0.3",
|
||||||
"@jridgewell/sourcemap-codec": "^1.4.14"
|
"@jridgewell/sourcemap-codec": "^1.4.10"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -6,7 +6,8 @@
|
|||||||
"dev": "next dev",
|
"dev": "next dev",
|
||||||
"build": "next build",
|
"build": "next build",
|
||||||
"start": "next start",
|
"start": "next start",
|
||||||
"lint": "next lint"
|
"lint": "next lint",
|
||||||
|
"seed": "bun run prisma/seed.ts"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@prisma/client": "^5.20.0",
|
"@prisma/client": "^5.20.0",
|
||||||
@ -14,7 +15,8 @@
|
|||||||
"prisma": "^5.20.0",
|
"prisma": "^5.20.0",
|
||||||
"react": "^18",
|
"react": "^18",
|
||||||
"react-dom": "^18",
|
"react-dom": "^18",
|
||||||
"react-icons": "^5.3.0"
|
"react-icons": "^5.3.0",
|
||||||
|
"ts-node": "^10.9.2"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"typescript": "^5",
|
"typescript": "^5",
|
||||||
@ -23,8 +25,5 @@
|
|||||||
"@types/react-dom": "^18",
|
"@types/react-dom": "^18",
|
||||||
"postcss": "^8",
|
"postcss": "^8",
|
||||||
"tailwindcss": "^3.4.1"
|
"tailwindcss": "^3.4.1"
|
||||||
},
|
|
||||||
"prisma": {
|
|
||||||
"seed": "ts-node prisma/seed.ts"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
99
prisma/migrations/20241005201030_initial/migration.sql
Normal file
99
prisma/migrations/20241005201030_initial/migration.sql
Normal file
@ -0,0 +1,99 @@
|
|||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "Vehicle" (
|
||||||
|
"id" SERIAL NOT NULL,
|
||||||
|
"brand" TEXT NOT NULL,
|
||||||
|
"model" TEXT NOT NULL,
|
||||||
|
"variant" TEXT,
|
||||||
|
"year" INTEGER NOT NULL,
|
||||||
|
"kilometers" INTEGER NOT NULL,
|
||||||
|
"condition" TEXT NOT NULL,
|
||||||
|
"location" TEXT NOT NULL,
|
||||||
|
"latitude" TEXT NOT NULL,
|
||||||
|
"longitude" TEXT NOT NULL,
|
||||||
|
"gasType" TEXT NOT NULL,
|
||||||
|
"images" TEXT[],
|
||||||
|
"description" TEXT NOT NULL,
|
||||||
|
"service" TEXT NOT NULL,
|
||||||
|
"inspectedAt" TIMESTAMP(3),
|
||||||
|
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||||
|
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
|
CONSTRAINT "Vehicle_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "Equipment" (
|
||||||
|
"id" SERIAL NOT NULL,
|
||||||
|
"name" TEXT NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "Equipment_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "Auction" (
|
||||||
|
"id" SERIAL NOT NULL,
|
||||||
|
"vehicleId" INTEGER NOT NULL,
|
||||||
|
"userId" INTEGER NOT NULL,
|
||||||
|
"askingPrice" DOUBLE PRECISION NOT NULL,
|
||||||
|
"description" TEXT,
|
||||||
|
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||||
|
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
|
CONSTRAINT "Auction_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "User" (
|
||||||
|
"id" SERIAL NOT NULL,
|
||||||
|
"name" TEXT NOT NULL,
|
||||||
|
"company" TEXT,
|
||||||
|
"address" TEXT NOT NULL,
|
||||||
|
"latitude" TEXT NOT NULL,
|
||||||
|
"longitude" TEXT NOT NULL,
|
||||||
|
"phone" TEXT NOT NULL,
|
||||||
|
"privatePhone" TEXT,
|
||||||
|
"email" TEXT NOT NULL,
|
||||||
|
"cvr" TEXT,
|
||||||
|
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||||
|
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
|
CONSTRAINT "User_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "Bid" (
|
||||||
|
"id" SERIAL NOT NULL,
|
||||||
|
"auctionId" INTEGER NOT NULL,
|
||||||
|
"bid" DOUBLE PRECISION NOT NULL,
|
||||||
|
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||||
|
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
|
CONSTRAINT "Bid_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "_VehicleEquipment" (
|
||||||
|
"A" INTEGER NOT NULL,
|
||||||
|
"B" INTEGER NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "_VehicleEquipment_AB_unique" ON "_VehicleEquipment"("A", "B");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "_VehicleEquipment_B_index" ON "_VehicleEquipment"("B");
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "Auction" ADD CONSTRAINT "Auction_vehicleId_fkey" FOREIGN KEY ("vehicleId") REFERENCES "Vehicle"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "Auction" ADD CONSTRAINT "Auction_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "Bid" ADD CONSTRAINT "Bid_auctionId_fkey" FOREIGN KEY ("auctionId") REFERENCES "Auction"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "_VehicleEquipment" ADD CONSTRAINT "_VehicleEquipment_A_fkey" FOREIGN KEY ("A") REFERENCES "Equipment"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "_VehicleEquipment" ADD CONSTRAINT "_VehicleEquipment_B_fkey" FOREIGN KEY ("B") REFERENCES "Vehicle"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
3
prisma/migrations/migration_lock.toml
Normal file
3
prisma/migrations/migration_lock.toml
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
# Please do not edit this file manually
|
||||||
|
# It should be added in your version-control system (i.e. Git)
|
||||||
|
provider = "postgresql"
|
||||||
@ -141,7 +141,7 @@ const equipmentData = [
|
|||||||
"undervognsbehandlet"
|
"undervognsbehandlet"
|
||||||
];
|
];
|
||||||
|
|
||||||
const brandData = [
|
/* const brandData = [
|
||||||
"Abarth", "AC", "Adler", "Aiways", "Alfa Romeo", "Alpina", "Aston Martin",
|
"Abarth", "AC", "Adler", "Aiways", "Alfa Romeo", "Alpina", "Aston Martin",
|
||||||
"Auburn", "Audi", "Austin", "Austin Healey", "Auto Union", "Bentley",
|
"Auburn", "Audi", "Austin", "Austin Healey", "Auto Union", "Bentley",
|
||||||
"BMW", "Buick", "BYD", "Cadillac", "CF Moto", "Chevrolet", "Chrysler",
|
"BMW", "Buick", "BYD", "Cadillac", "CF Moto", "Chevrolet", "Chrysler",
|
||||||
@ -157,7 +157,7 @@ const brandData = [
|
|||||||
"Seat", "Seres", "Singer", "Skoda", "Smart", "SsangYong", "Studebaker",
|
"Seat", "Seres", "Singer", "Skoda", "Smart", "SsangYong", "Studebaker",
|
||||||
"Subaru", "Suzuki", "Saab", "Tesla", "Toyota", "Triumph", "Vauxhall",
|
"Subaru", "Suzuki", "Saab", "Tesla", "Toyota", "Triumph", "Vauxhall",
|
||||||
"Volvo", "Voyah", "VW", "Wolseley", "Xpeng", "Yugo"
|
"Volvo", "Voyah", "VW", "Wolseley", "Xpeng", "Yugo"
|
||||||
];
|
]; */
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
for (const equipment of equipmentData) {
|
for (const equipment of equipmentData) {
|
||||||
@ -166,18 +166,8 @@ async function main() {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
console.log('Equipment data seeded');
|
console.log('Equipment data seeded');
|
||||||
|
|
||||||
|
|
||||||
// Seed Brands
|
|
||||||
for (const brand of brandData) {
|
|
||||||
await prisma.brand.create({
|
|
||||||
data: { name: brand },
|
|
||||||
});
|
|
||||||
}
|
|
||||||
console.log('Brand data seeded');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
main()
|
main()
|
||||||
.catch((e) => {
|
.catch((e) => {
|
||||||
console.error(e);
|
console.error(e);
|
||||||
|
|||||||
Reference in New Issue
Block a user