460 lines
17 KiB
JavaScript
460 lines
17 KiB
JavaScript
|
import resolveUri from '@jridgewell/resolve-uri';
|
||
|
import { encode } from 'sourcemap-codec';
|
||
|
|
||
|
function resolve(input, base) {
|
||
|
// The base is always treated as a directory, if it's not empty.
|
||
|
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
||
|
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
||
|
if (base && !base.endsWith('/'))
|
||
|
base += '/';
|
||
|
return resolveUri(input, base);
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Removes everything after the last "/", but leaves the slash.
|
||
|
*/
|
||
|
function stripFilename(path) {
|
||
|
if (!path)
|
||
|
return '';
|
||
|
const index = path.lastIndexOf('/');
|
||
|
return path.slice(0, index + 1);
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* A binary search implementation that returns the index if a match is found,
|
||
|
* or the negated index of where the `needle` should be inserted.
|
||
|
*
|
||
|
* The `comparator` callback receives both the `item` under comparison and the
|
||
|
* needle we are searching for. It must return `0` if the `item` is a match,
|
||
|
* any negative number if `item` is too small (and we must search after it), or
|
||
|
* any positive number if the `item` is too large (and we must search before
|
||
|
* it).
|
||
|
*
|
||
|
* The `len` param allows you to treat contiguous blocks of memory as a single item. Eg, a 5-length
|
||
|
* tuple (with values at indices 0-4) would only test index 0.
|
||
|
*
|
||
|
* If no match is found, then the left-index (the index associated with the item that comes just
|
||
|
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||
|
* the next index:
|
||
|
*
|
||
|
* ```js
|
||
|
* const array = [1, 3];
|
||
|
* const needle = 2;
|
||
|
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||
|
*
|
||
|
* assert.equal(index, 0);
|
||
|
* array.splice(index + 1, 0, needle);
|
||
|
* assert.deepEqual(array, [1, 2, 3]);
|
||
|
* ```
|
||
|
*/
|
||
|
function binarySearch(haystack, needle, comparator, low, high, len) {
|
||
|
low /= len;
|
||
|
high /= len;
|
||
|
while (low <= high) {
|
||
|
const mid = low + ((high - low) >> 1);
|
||
|
const index = mid * len;
|
||
|
const cmp = comparator(haystack[index], needle);
|
||
|
if (cmp === 0) {
|
||
|
return index;
|
||
|
}
|
||
|
if (cmp < 0) {
|
||
|
low = mid + 1;
|
||
|
}
|
||
|
else {
|
||
|
high = mid - 1;
|
||
|
}
|
||
|
}
|
||
|
return (low - 1) * len;
|
||
|
}
|
||
|
/**
|
||
|
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||
|
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||
|
*/
|
||
|
function memoizedBinarySearch(haystack, needle, comparator, low, high, len, state, line, column) {
|
||
|
const { _lastLine: lastLine, _lastColumn: lastColumn, _lastIndex: lastIndex } = state;
|
||
|
if (line === lastLine) {
|
||
|
if (column === lastColumn) {
|
||
|
return lastIndex;
|
||
|
}
|
||
|
if (column >= lastColumn) {
|
||
|
low = Math.max(lastIndex, 0);
|
||
|
}
|
||
|
else {
|
||
|
high = lastIndex;
|
||
|
}
|
||
|
}
|
||
|
state._lastLine = line;
|
||
|
state._lastColumn = column;
|
||
|
return (state._lastIndex = binarySearch(haystack, needle, comparator, low, high, len));
|
||
|
}
|
||
|
|
||
|
const ITEM_LENGTH$1 = 1;
|
||
|
class DecodedSourceMapImpl {
|
||
|
constructor(map, owned) {
|
||
|
this._lastIndex = 0;
|
||
|
this._lastLine = 0;
|
||
|
this._lastColumn = 0;
|
||
|
this._mappings = maybeSort(map.mappings, owned);
|
||
|
}
|
||
|
encodedMappings() {
|
||
|
return encode(this._mappings);
|
||
|
}
|
||
|
decodedMappings() {
|
||
|
return this._mappings;
|
||
|
}
|
||
|
map(fn) {
|
||
|
const mapOut = [];
|
||
|
const mappings = this._mappings;
|
||
|
for (let i = 0; i < mappings.length; i++) {
|
||
|
const line = mappings[i];
|
||
|
const lineOut = [];
|
||
|
mapOut.push(lineOut);
|
||
|
for (let j = 0; j < line.length; j++) {
|
||
|
const seg = line[j];
|
||
|
const { length } = seg;
|
||
|
let segOut;
|
||
|
if (length === 4)
|
||
|
segOut = fn(i, seg[0], seg[1], seg[2], seg[3], -1);
|
||
|
else if (length === 5)
|
||
|
segOut = fn(i, seg[0], seg[1], seg[2], seg[3], seg[4]);
|
||
|
else
|
||
|
segOut = fn(i, seg[0], -1, -1, -1, -1);
|
||
|
if (segOut != null)
|
||
|
lineOut.push(segOut);
|
||
|
}
|
||
|
}
|
||
|
return mapOut;
|
||
|
}
|
||
|
traceSegment(line, column) {
|
||
|
const mappings = this._mappings;
|
||
|
// It's common for parent source maps to have pointers to lines that have no
|
||
|
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||
|
if (line >= mappings.length)
|
||
|
return null;
|
||
|
const segments = mappings[line];
|
||
|
const index = memoizedBinarySearch(segments, column, searchComparator$1, 0, segments.length - ITEM_LENGTH$1, ITEM_LENGTH$1, this, line, column);
|
||
|
// we come before any mapped segment
|
||
|
if (index < 0)
|
||
|
return null;
|
||
|
return segments[index];
|
||
|
}
|
||
|
}
|
||
|
function maybeSort(mappings, owned) {
|
||
|
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
||
|
if (unsortedIndex === mappings.length)
|
||
|
return mappings;
|
||
|
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
||
|
// not, we do not want to modify the consumer's input array.
|
||
|
if (!owned)
|
||
|
mappings = mappings.slice();
|
||
|
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
||
|
mappings[i] = sortSegments(mappings[i], owned);
|
||
|
}
|
||
|
return mappings;
|
||
|
}
|
||
|
function nextUnsortedSegmentLine(mappings, start) {
|
||
|
for (let i = start; i < mappings.length; i++) {
|
||
|
if (!isSorted(mappings[i]))
|
||
|
return i;
|
||
|
}
|
||
|
return mappings.length;
|
||
|
}
|
||
|
function isSorted(line) {
|
||
|
for (let j = 1; j < line.length; j++) {
|
||
|
if (line[j][0] < line[j - 1][0]) {
|
||
|
return false;
|
||
|
}
|
||
|
}
|
||
|
return true;
|
||
|
}
|
||
|
function sortSegments(line, owned) {
|
||
|
if (!owned)
|
||
|
line = line.slice();
|
||
|
return line.sort(sortComparator$1);
|
||
|
}
|
||
|
function sortComparator$1(a, b) {
|
||
|
return a[0] - b[0];
|
||
|
}
|
||
|
function searchComparator$1(segment, needle) {
|
||
|
return segment[0] - needle;
|
||
|
}
|
||
|
|
||
|
const base64 = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=';
|
||
|
const base64Index = new Uint8Array(128);
|
||
|
for (let i = 0; i < base64.length; i++) {
|
||
|
base64Index[base64.charCodeAt(i)] = i;
|
||
|
}
|
||
|
const ITEM_LENGTH = 5;
|
||
|
function decode(encoded, lines) {
|
||
|
let generatedColumn = 0;
|
||
|
// 0 is used as a "not found" marker (because TypedArrays are 0 by default, so we don't need to
|
||
|
// manually set any missing VLQ numbers), so these start at 1. Public APIs will decrement the
|
||
|
// values so they are correct before returning to consumers.
|
||
|
let sourcesIndex = 1;
|
||
|
let sourceLine = 1;
|
||
|
let sourceColumn = 1;
|
||
|
let namesIndex = 1;
|
||
|
let lineSorted = true;
|
||
|
// count tracks the number of segments we have stored.
|
||
|
let count = 0;
|
||
|
// lastLineStart tracks the `count` of the segment that started the current line, so that we may
|
||
|
// sort the line after it's complete.
|
||
|
let lastLineStart = 0;
|
||
|
let decoded = new Uint32Array(1000);
|
||
|
// The first line starts with segment 0. If there are no mappings on this line, then the next
|
||
|
// line will also start with segment 0 (meaning the first line ends with segment 0).
|
||
|
lines.push(0);
|
||
|
for (let pos = 0; pos < encoded.length;) {
|
||
|
switch (encoded.charCodeAt(pos)) {
|
||
|
// Commas separate segements on a line.
|
||
|
case 44: // ','
|
||
|
pos++;
|
||
|
continue;
|
||
|
// Semicolons separates lines.
|
||
|
case 59: // ';'
|
||
|
lines.push(count);
|
||
|
if (!lineSorted)
|
||
|
easySort(decoded, lastLineStart, count);
|
||
|
lineSorted = true;
|
||
|
lastLineStart = count;
|
||
|
// generatedColumn is reset when the next line starts, per the spec.
|
||
|
generatedColumn = 0;
|
||
|
pos++;
|
||
|
continue;
|
||
|
default:
|
||
|
// Ensure that we have at least 5 items left in the decoded buffer, so we can push this
|
||
|
// segment on.
|
||
|
decoded = reserve(decoded, count, ITEM_LENGTH);
|
||
|
// Segments are guaranteed to have at least the generatedColumn VLQ.
|
||
|
pos = decodeInteger(encoded, pos, decoded, count);
|
||
|
if (lineSorted)
|
||
|
lineSorted = decoded[count] < 0;
|
||
|
generatedColumn = decoded[count] += generatedColumn;
|
||
|
count++;
|
||
|
if (!hasMoreMappings(encoded, pos)) {
|
||
|
count += 4;
|
||
|
continue;
|
||
|
}
|
||
|
// If there are more VLQ, then we're guaranteed to have sourcesIndex, sourceLine, and
|
||
|
// sourceColumn.
|
||
|
pos = decodeInteger(encoded, pos, decoded, count);
|
||
|
sourcesIndex = decoded[count] += sourcesIndex;
|
||
|
count++;
|
||
|
pos = decodeInteger(encoded, pos, decoded, count);
|
||
|
sourceLine = decoded[count] += sourceLine;
|
||
|
count++;
|
||
|
pos = decodeInteger(encoded, pos, decoded, count);
|
||
|
sourceColumn = decoded[count] += sourceColumn;
|
||
|
count++;
|
||
|
if (!hasMoreMappings(encoded, pos)) {
|
||
|
count += 1;
|
||
|
continue;
|
||
|
}
|
||
|
// Finally, namesIndex.
|
||
|
pos = decodeInteger(encoded, pos, decoded, count);
|
||
|
namesIndex = decoded[count] += namesIndex;
|
||
|
count++;
|
||
|
}
|
||
|
}
|
||
|
// Cap the lines, so that we can always look at index and index+1 for the start and end indices.
|
||
|
lines.push(count);
|
||
|
if (!lineSorted)
|
||
|
easySort(decoded, lastLineStart, count);
|
||
|
return decoded.subarray(0, count);
|
||
|
}
|
||
|
function reserve(buf, pos, count) {
|
||
|
if (buf.length > pos + count)
|
||
|
return buf;
|
||
|
const swap = new Uint32Array(buf.length * 2);
|
||
|
swap.set(buf);
|
||
|
return swap;
|
||
|
}
|
||
|
function hasMoreMappings(encoded, pos) {
|
||
|
if (pos === encoded.length)
|
||
|
return false;
|
||
|
const c = encoded.charCodeAt(pos);
|
||
|
return c !== 44 /* ',' */ && c !== 59 /* ';' */;
|
||
|
}
|
||
|
function decodeInteger(encoded, pos, state, index) {
|
||
|
let value = 0;
|
||
|
let shift = 0;
|
||
|
let integer = 0;
|
||
|
// VLQ is a variable-length quantity (duh). The low 5 bits encode are integer data, with the 6th
|
||
|
// bit being the "continue" bit signaling more data in the VLQ.
|
||
|
do {
|
||
|
const c = encoded.charCodeAt(pos++);
|
||
|
integer = base64Index[c];
|
||
|
value |= (integer & 0b011111) << shift;
|
||
|
shift += 5;
|
||
|
} while (integer & 0b100000);
|
||
|
// The lowest bit encodes whether the VLQ was originally negative.
|
||
|
const shouldNegate = value & 1;
|
||
|
value >>>= 1;
|
||
|
// Nomally, -x produces a negative value like you would expect. But what does `-0` represent? In
|
||
|
// VLQ, it should represent -2,147,483,648 (the smallest 32bit signed int). But -0 is just 0 in
|
||
|
// JS, so we have to bitwise-OR with -0x80000000. This won't affect any other value, eg
|
||
|
// `-1 | -0x80000000` is still `-1`.
|
||
|
if (shouldNegate)
|
||
|
value = -0x80000000 | -value;
|
||
|
state[index] = value;
|
||
|
return pos;
|
||
|
}
|
||
|
function easySort(state, start, end) {
|
||
|
// This isn't a fast algorithm, but I believe it's exceedingly rare for a mapping to be unsorted.
|
||
|
const segments = [];
|
||
|
for (let i = start; i < end; i += ITEM_LENGTH) {
|
||
|
segments.push(state.slice(i, i + ITEM_LENGTH));
|
||
|
}
|
||
|
segments.sort(sortComparator);
|
||
|
for (let i = start, j = 0; i < end; i += ITEM_LENGTH, j++) {
|
||
|
state.set(segments[j], i);
|
||
|
}
|
||
|
}
|
||
|
function sortComparator(a, b) {
|
||
|
return a[0] - b[0];
|
||
|
}
|
||
|
|
||
|
class EncodedSourceMapImpl {
|
||
|
constructor(map) {
|
||
|
this._lastIndex = 0;
|
||
|
this._lastLine = 0;
|
||
|
this._lastColumn = 0;
|
||
|
this._lineIndices = [];
|
||
|
this._encoded = map.mappings;
|
||
|
this._mappings = decode(this._encoded, this._lineIndices);
|
||
|
}
|
||
|
encodedMappings() {
|
||
|
return this._encoded;
|
||
|
}
|
||
|
decodedMappings() {
|
||
|
return this.map(segmentify);
|
||
|
}
|
||
|
map(fn) {
|
||
|
const { _mappings: mappings, _lineIndices: lineIndices } = this;
|
||
|
const mapOut = [];
|
||
|
let lineOut = [];
|
||
|
let generatedLine = 0;
|
||
|
let lineIndex = lineIndices[generatedLine + 1];
|
||
|
for (let i = 0; i < mappings.length;) {
|
||
|
while (i < lineIndex) {
|
||
|
const segOut = fn(generatedLine, mappings[i + 0], mappings[i + 1] - 1, mappings[i + 2] - 1, mappings[i + 3] - 1, mappings[i + 4] - 1);
|
||
|
if (segOut != null)
|
||
|
lineOut.push(segOut);
|
||
|
i += ITEM_LENGTH;
|
||
|
}
|
||
|
do {
|
||
|
mapOut.push(lineOut);
|
||
|
lineOut = [];
|
||
|
generatedLine++;
|
||
|
lineIndex = lineIndices[generatedLine + 1];
|
||
|
} while (i === lineIndex);
|
||
|
}
|
||
|
return mapOut;
|
||
|
}
|
||
|
traceSegment(line, column) {
|
||
|
const { _mappings: mappings, _lineIndices: lineIndices } = this;
|
||
|
// It's common for parent source maps to have pointers to lines that have no
|
||
|
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||
|
if (line >= lineIndices.length - 1)
|
||
|
return null;
|
||
|
const index = memoizedBinarySearch(mappings, column, searchComparator, lineIndices[line], lineIndices[line + 1] - 1, ITEM_LENGTH, this, line, column);
|
||
|
// we come before any mapped segment
|
||
|
if (index < 0)
|
||
|
return null;
|
||
|
return segmentify(line, mappings[index + 0], mappings[index + 1] - 1, mappings[index + 2] - 1, mappings[index + 3] - 1, mappings[index + 4] - 1);
|
||
|
}
|
||
|
}
|
||
|
function segmentify(_genLine, genCol, source, line, col, name) {
|
||
|
// If the sourcesIndex is -1, then the VLQ segment didn't specify 2-5 values.
|
||
|
if (source === -1)
|
||
|
return [genCol];
|
||
|
// If the namesIndex is -1, then the VLQ segment didn't specify 5th value.
|
||
|
if (name === -1)
|
||
|
return [genCol, source, line, col];
|
||
|
return [genCol, source, line, col, name];
|
||
|
}
|
||
|
function searchComparator(column, needle) {
|
||
|
return column - needle;
|
||
|
}
|
||
|
|
||
|
const INVALID_MAPPING = Object.freeze({
|
||
|
source: null,
|
||
|
line: null,
|
||
|
column: null,
|
||
|
name: null,
|
||
|
});
|
||
|
class TraceMap {
|
||
|
constructor(map, mapUrl) {
|
||
|
const isString = typeof map === 'string';
|
||
|
const parsed = isString ? JSON.parse(map) : map;
|
||
|
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||
|
this.version = version;
|
||
|
this.file = file;
|
||
|
this.names = names;
|
||
|
this.sourceRoot = sourceRoot;
|
||
|
this.sources = sources;
|
||
|
this.sourcesContent = sourcesContent;
|
||
|
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||
|
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||
|
if (typeof parsed.mappings === 'string') {
|
||
|
this._impl = new EncodedSourceMapImpl(parsed);
|
||
|
}
|
||
|
else {
|
||
|
this._impl = new DecodedSourceMapImpl(parsed, isString);
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||
|
*/
|
||
|
encodedMappings() {
|
||
|
return this._impl.encodedMappings();
|
||
|
}
|
||
|
/**
|
||
|
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||
|
*/
|
||
|
decodedMappings() {
|
||
|
return this._impl.decodedMappings();
|
||
|
}
|
||
|
/**
|
||
|
* Similar to Array.p.map, maps each segment into a new segment. Passes -1 for any values that do
|
||
|
* not exist in the SourceMapSegment. Both generatedLine and generatedColumn are 0-based.
|
||
|
*/
|
||
|
map(fn) {
|
||
|
return this._impl.map(fn);
|
||
|
}
|
||
|
/**
|
||
|
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||
|
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||
|
*/
|
||
|
traceSegment(line, column) {
|
||
|
return this._impl.traceSegment(line, column);
|
||
|
}
|
||
|
/**
|
||
|
* A higher-level API to find the source/line/column associated with a generated line/column
|
||
|
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||
|
* `source-map` library.
|
||
|
*/
|
||
|
originalPositionFor({ line, column }) {
|
||
|
if (line < 1)
|
||
|
throw new Error('`line` must be greater than 0 (lines start at line 1)');
|
||
|
if (column < 0) {
|
||
|
throw new Error('`column` must be greater than or equal to 0 (columns start at column 0)');
|
||
|
}
|
||
|
const segment = this.traceSegment(line - 1, column);
|
||
|
if (segment == null)
|
||
|
return INVALID_MAPPING;
|
||
|
if (segment.length == 1)
|
||
|
return INVALID_MAPPING;
|
||
|
const { names, resolvedSources } = this;
|
||
|
return {
|
||
|
source: resolvedSources[segment[1]],
|
||
|
line: segment[2] + 1,
|
||
|
column: segment[3],
|
||
|
name: segment.length === 5 ? names[segment[4]] : null,
|
||
|
};
|
||
|
}
|
||
|
}
|
||
|
|
||
|
export { TraceMap, TraceMap as default };
|
||
|
//# sourceMappingURL=trace-mapping.mjs.map
|