1 |
- {"version":3,"file":"trace-mapping.umd.js","sources":["../src/resolve.ts","../src/strip-filename.ts","../src/sourcemap-segment.ts","../src/sort.ts","../src/binary-search.ts","../src/by-source.ts","../src/any-map.ts","../src/trace-mapping.ts"],"sourcesContent":["import resolveUri from '@jridgewell/resolve-uri';\n\nexport default function resolve(input: string, base: string | undefined): string {\n // The base is always treated as a directory, if it's not empty.\n // https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327\n // https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401\n if (base && !base.endsWith('/')) base += '/';\n\n return resolveUri(input, base);\n}\n","/**\n * Removes everything after the last \"/\", but leaves the slash.\n */\nexport default function stripFilename(path: string | undefined | null): string {\n if (!path) return '';\n const index = path.lastIndexOf('/');\n return path.slice(0, index + 1);\n}\n","type GeneratedColumn = number;\ntype SourcesIndex = number;\ntype SourceLine = number;\ntype SourceColumn = number;\ntype NamesIndex = number;\n\ntype GeneratedLine = number;\n\nexport type SourceMapSegment =\n | [GeneratedColumn]\n | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn]\n | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];\n\nexport type ReverseSegment = [SourceColumn, GeneratedLine, GeneratedColumn];\n\nexport const COLUMN = 0;\nexport const SOURCES_INDEX = 1;\nexport const SOURCE_LINE = 2;\nexport const SOURCE_COLUMN = 3;\nexport const NAMES_INDEX = 4;\n\nexport const REV_GENERATED_LINE = 1;\nexport const REV_GENERATED_COLUMN = 2;\n","import { COLUMN } from './sourcemap-segment';\n\nimport type { SourceMapSegment } from './sourcemap-segment';\n\nexport default function maybeSort(\n mappings: SourceMapSegment[][],\n owned: boolean,\n): SourceMapSegment[][] {\n const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);\n if (unsortedIndex === mappings.length) return mappings;\n\n // If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If\n // not, we do not want to modify the consumer's input array.\n if (!owned) mappings = mappings.slice();\n\n for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {\n mappings[i] = sortSegments(mappings[i], owned);\n }\n return mappings;\n}\n\nfunction nextUnsortedSegmentLine(mappings: SourceMapSegment[][], start: number): number {\n for (let i = start; i < mappings.length; i++) {\n if (!isSorted(mappings[i])) return i;\n }\n return mappings.length;\n}\n\nfunction isSorted(line: SourceMapSegment[]): boolean {\n for (let j = 1; j < line.length; j++) {\n if (line[j][COLUMN] < line[j - 1][COLUMN]) {\n return false;\n }\n }\n return true;\n}\n\nfunction sortSegments(line: SourceMapSegment[], owned: boolean): SourceMapSegment[] {\n if (!owned) line = line.slice();\n return line.sort(sortComparator);\n}\n\nfunction sortComparator(a: SourceMapSegment, b: SourceMapSegment): number {\n return a[COLUMN] - b[COLUMN];\n}\n","import type { SourceMapSegment, ReverseSegment } from './sourcemap-segment';\nimport { COLUMN } from './sourcemap-segment';\n\nexport type MemoState = {\n lastKey: number;\n lastNeedle: number;\n lastIndex: number;\n};\n\nexport let found = false;\n\n/**\n * A binary search implementation that returns the index if a match is found.\n * If no match is found, then the left-index (the index associated with the item that comes just\n * before the desired index) is returned. To maintain proper sort order, a splice would happen at\n * the next index:\n *\n * ```js\n * const array = [1, 3];\n * const needle = 2;\n * const index = binarySearch(array, needle, (item, needle) => item - needle);\n *\n * assert.equal(index, 0);\n * array.splice(index + 1, 0, needle);\n * assert.deepEqual(array, [1, 2, 3]);\n * ```\n */\nexport function binarySearch(\n haystack: SourceMapSegment[] | ReverseSegment[],\n needle: number,\n low: number,\n high: number,\n): number {\n while (low <= high) {\n const mid = low + ((high - low) >> 1);\n const cmp = haystack[mid][COLUMN] - needle;\n\n if (cmp === 0) {\n found = true;\n return mid;\n }\n\n if (cmp < 0) {\n low = mid + 1;\n } else {\n high = mid - 1;\n }\n }\n\n found = false;\n return low - 1;\n}\n\nexport function upperBound(\n haystack: SourceMapSegment[] | ReverseSegment[],\n needle: number,\n index: number,\n): number {\n for (let i = index + 1; i < haystack.length; index = i++) {\n if (haystack[i][COLUMN] !== needle) break;\n }\n return index;\n}\n\nexport function lowerBound(\n haystack: SourceMapSegment[] | ReverseSegment[],\n needle: number,\n index: number,\n): number {\n for (let i = index - 1; i >= 0; index = i--) {\n if (haystack[i][COLUMN] !== needle) break;\n }\n return index;\n}\n\nexport function memoizedState(): MemoState {\n return {\n lastKey: -1,\n lastNeedle: -1,\n lastIndex: -1,\n };\n}\n\n/**\n * This overly complicated beast is just to record the last tested line/column and the resulting\n * index, allowing us to skip a few tests if mappings are monotonically increasing.\n */\nexport function memoizedBinarySearch(\n haystack: SourceMapSegment[] | ReverseSegment[],\n needle: number,\n state: MemoState,\n key: number,\n): number {\n const { lastKey, lastNeedle, lastIndex } = state;\n\n let low = 0;\n let high = haystack.length - 1;\n if (key === lastKey) {\n if (needle === lastNeedle) {\n found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;\n return lastIndex;\n }\n\n if (needle >= lastNeedle) {\n // lastIndex may be -1 if the previous needle was not found.\n low = lastIndex === -1 ? 0 : lastIndex;\n } else {\n high = lastIndex;\n }\n }\n state.lastKey = key;\n state.lastNeedle = needle;\n\n return (state.lastIndex = binarySearch(haystack, needle, low, high));\n}\n","import { COLUMN, SOURCES_INDEX, SOURCE_LINE, SOURCE_COLUMN } from './sourcemap-segment';\nimport { memoizedBinarySearch, upperBound } from './binary-search';\n\nimport type { ReverseSegment, SourceMapSegment } from './sourcemap-segment';\nimport type { MemoState } from './binary-search';\n\nexport type Source = {\n __proto__: null;\n [line: number]: Exclude<ReverseSegment, [number]>[];\n};\n\n// Rebuilds the original source files, with mappings that are ordered by source line/column instead\n// of generated line/column.\nexport default function buildBySources(\n decoded: readonly SourceMapSegment[][],\n memos: MemoState[],\n): Source[] {\n const sources: Source[] = memos.map(buildNullArray);\n\n for (let i = 0; i < decoded.length; i++) {\n const line = decoded[i];\n for (let j = 0; j < line.length; j++) {\n const seg = line[j];\n if (seg.length === 1) continue;\n\n const sourceIndex = seg[SOURCES_INDEX];\n const sourceLine = seg[SOURCE_LINE];\n const sourceColumn = seg[SOURCE_COLUMN];\n const originalSource = sources[sourceIndex];\n const originalLine = (originalSource[sourceLine] ||= []);\n const memo = memos[sourceIndex];\n\n // The binary search either found a match, or it found the left-index just before where the\n // segment should go. Either way, we want to insert after that. And there may be multiple\n // generated segments associated with an original location, so there may need to move several\n // indexes before we find where we need to insert.\n const index = upperBound(\n originalLine,\n sourceColumn,\n memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine),\n );\n\n insert(originalLine, (memo.lastIndex = index + 1), [sourceColumn, i, seg[COLUMN]]);\n }\n }\n\n return sources;\n}\n\nfunction insert<T>(array: T[], index: number, value: T) {\n for (let i = array.length; i > index; i--) {\n array[i] = array[i - 1];\n }\n array[index] = value;\n}\n\n// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like\n// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.\n// Numeric properties on objects are magically sorted in ascending order by the engine regardless of\n// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending\n// order when iterating with for-in.\nfunction buildNullArray<T extends { __proto__: null }>(): T {\n return { __proto__: null } as T;\n}\n","import { TraceMap, presortedDecodedMap, decodedMappings } from './trace-mapping';\nimport {\n COLUMN,\n SOURCES_INDEX,\n SOURCE_LINE,\n SOURCE_COLUMN,\n NAMES_INDEX,\n} from './sourcemap-segment';\n\nimport type {\n Section,\n SectionedSourceMap,\n DecodedSourceMap,\n SectionedSourceMapInput,\n} from './types';\nimport type { SourceMapSegment } from './sourcemap-segment';\n\ntype AnyMap = {\n new (map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;\n (map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;\n};\n\nexport const AnyMap: AnyMap = function (map, mapUrl) {\n const parsed =\n typeof map === 'string' ? (JSON.parse(map) as Exclude<SectionedSourceMapInput, string>) : map;\n\n if (!('sections' in parsed)) return new TraceMap(parsed, mapUrl);\n\n const mappings: SourceMapSegment[][] = [];\n const sources: string[] = [];\n const sourcesContent: (string | null)[] = [];\n const names: string[] = [];\n\n recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, 0, 0, Infinity, Infinity);\n\n const joined: DecodedSourceMap = {\n version: 3,\n file: parsed.file,\n names,\n sources,\n sourcesContent,\n mappings,\n };\n\n return presortedDecodedMap(joined);\n} as AnyMap;\n\nfunction recurse(\n input: SectionedSourceMap,\n mapUrl: string | null | undefined,\n mappings: SourceMapSegment[][],\n sources: string[],\n sourcesContent: (string | null)[],\n names: string[],\n lineOffset: number,\n columnOffset: number,\n stopLine: number,\n stopColumn: number,\n) {\n const { sections } = input;\n for (let i = 0; i < sections.length; i++) {\n const { map, offset } = sections[i];\n\n let sl = stopLine;\n let sc = stopColumn;\n if (i + 1 < sections.length) {\n const nextOffset = sections[i + 1].offset;\n sl = Math.min(stopLine, lineOffset + nextOffset.line);\n\n if (sl === stopLine) {\n sc = Math.min(stopColumn, columnOffset + nextOffset.column);\n } else if (sl < stopLine) {\n sc = columnOffset + nextOffset.column;\n }\n }\n\n addSection(\n map,\n mapUrl,\n mappings,\n sources,\n sourcesContent,\n names,\n lineOffset + offset.line,\n columnOffset + offset.column,\n sl,\n sc,\n );\n }\n}\n\nfunction addSection(\n input: Section['map'],\n mapUrl: string | null | undefined,\n mappings: SourceMapSegment[][],\n sources: string[],\n sourcesContent: (string | null)[],\n names: string[],\n lineOffset: number,\n columnOffset: number,\n stopLine: number,\n stopColumn: number,\n) {\n if ('sections' in input) return recurse(...(arguments as unknown as Parameters<typeof recurse>));\n\n const map = new TraceMap(input, mapUrl);\n const sourcesOffset = sources.length;\n const namesOffset = names.length;\n const decoded = decodedMappings(map);\n const { resolvedSources, sourcesContent: contents } = map;\n\n append(sources, resolvedSources);\n append(names, map.names);\n if (contents) append(sourcesContent, contents);\n else for (let i = 0; i < resolvedSources.length; i++) sourcesContent.push(null);\n\n for (let i = 0; i < decoded.length; i++) {\n const lineI = lineOffset + i;\n\n // We can only add so many lines before we step into the range that the next section's map\n // controls. When we get to the last line, then we'll start checking the segments to see if\n // they've crossed into the column range. But it may not have any columns that overstep, so we\n // still need to check that we don't overstep lines, too.\n if (lineI > stopLine) return;\n\n // The out line may already exist in mappings (if we're continuing the line started by a\n // previous section). Or, we may have jumped ahead several lines to start this section.\n const out = getLine(mappings, lineI);\n // On the 0th loop, the section's column offset shifts us forward. On all other lines (since the\n // map can be multiple lines), it doesn't.\n const cOffset = i === 0 ? columnOffset : 0;\n\n const line = decoded[i];\n for (let j = 0; j < line.length; j++) {\n const seg = line[j];\n const column = cOffset + seg[COLUMN];\n\n // If this segment steps into the column range that the next section's map controls, we need\n // to stop early.\n if (lineI === stopLine && column >= stopColumn) return;\n\n if (seg.length === 1) {\n out.push([column]);\n continue;\n }\n\n const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];\n const sourceLine = seg[SOURCE_LINE];\n const sourceColumn = seg[SOURCE_COLUMN];\n out.push(\n seg.length === 4\n ? [column, sourcesIndex, sourceLine, sourceColumn]\n : [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]],\n );\n }\n }\n}\n\nfunction append<T>(arr: T[], other: T[]) {\n for (let i = 0; i < other.length; i++) arr.push(other[i]);\n}\n\nfunction getLine<T>(arr: T[][], index: number): T[] {\n for (let i = arr.length; i <= index; i++) arr[i] = [];\n return arr[index];\n}\n","import { encode, decode } from '@jridgewell/sourcemap-codec';\n\nimport resolve from './resolve';\nimport stripFilename from './strip-filename';\nimport maybeSort from './sort';\nimport buildBySources from './by-source';\nimport {\n memoizedState,\n memoizedBinarySearch,\n upperBound,\n lowerBound,\n found as bsFound,\n} from './binary-search';\nimport {\n SOURCES_INDEX,\n SOURCE_LINE,\n SOURCE_COLUMN,\n NAMES_INDEX,\n REV_GENERATED_LINE,\n REV_GENERATED_COLUMN,\n} from './sourcemap-segment';\n\nimport type { SourceMapSegment, ReverseSegment } from './sourcemap-segment';\nimport type {\n SourceMapV3,\n DecodedSourceMap,\n EncodedSourceMap,\n InvalidOriginalMapping,\n OriginalMapping,\n InvalidGeneratedMapping,\n GeneratedMapping,\n SourceMapInput,\n Needle,\n SourceNeedle,\n SourceMap,\n EachMapping,\n} from './types';\nimport type { Source } from './by-source';\nimport type { MemoState } from './binary-search';\n\nexport type { SourceMapSegment } from './sourcemap-segment';\nexport type {\n SourceMapInput,\n SectionedSourceMapInput,\n DecodedSourceMap,\n EncodedSourceMap,\n SectionedSourceMap,\n InvalidOriginalMapping,\n OriginalMapping as Mapping,\n OriginalMapping,\n InvalidGeneratedMapping,\n GeneratedMapping,\n EachMapping,\n} from './types';\n\nconst LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';\nconst COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';\n\nexport const LEAST_UPPER_BOUND = -1;\nexport const GREATEST_LOWER_BOUND = 1;\n\n/**\n * Returns the encoded (VLQ string) form of the SourceMap's mappings field.\n */\nexport let encodedMappings: (map: TraceMap) => EncodedSourceMap['mappings'];\n\n\nexport let decodedMappings: (map: TraceMap) => Readonly<DecodedSourceMap['mappings']>;\n\n\nexport let traceSegment: (\n map: TraceMap,\n line: number,\n column: number,\n) => Readonly<SourceMapSegment> | null;\n\n/**\n * A higher-level API to find the source/line/column associated with a generated line/column\n * (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in\n * `source-map` library.\n */\nexport let originalPositionFor: (\n map: TraceMap,\n needle: Needle,\n) => OriginalMapping | InvalidOriginalMapping;\n\n/**\n * Finds the source/line/column directly after the mapping returned by originalPositionFor, provided\n * the found mapping is from the same source and line as the originalPositionFor mapping.\n *\n * Eg, in the code `let id = 1`, `originalPositionAfter` could find the mapping associated with `1`\n * using the same needle that would return `id` when calling `originalPositionFor`.\n */\nexport let generatedPositionFor: (\n map: TraceMap,\n needle: SourceNeedle,\n) => GeneratedMapping | InvalidGeneratedMapping;\n\n/**\n * Iterates each mapping in generated position order.\n */\nexport let eachMapping: (map: TraceMap, cb: (mapping: EachMapping) => void) => void;\n\n\nexport let sourceContentFor: (map: TraceMap, source: string) => string | null;\n\n/**\n * A helper that skips sorting of the input map's mappings array, which can be expensive for larger\n * maps.\n */\nexport let presortedDecodedMap: (map: DecodedSourceMap, mapUrl?: string) => TraceMap;\n\n\nexport let decodedMap: (\n map: TraceMap,\n) => Omit<DecodedSourceMap, 'mappings'> & { mappings: readonly SourceMapSegment[][] };\n\n\nexport let encodedMap: (map: TraceMap) => EncodedSourceMap;\n\nexport { AnyMap } from './any-map';\n\nexport class TraceMap implements SourceMap {\n declare version: SourceMapV3['version'];\n declare file: SourceMapV3['file'];\n declare names: SourceMapV3['names'];\n declare sourceRoot: SourceMapV3['sourceRoot'];\n declare sources: SourceMapV3['sources'];\n declare sourcesContent: SourceMapV3['sourcesContent'];\n\n declare resolvedSources: string[];\n private declare _encoded: string | undefined;\n\n private declare _decoded: SourceMapSegment[][] | undefined;\n private declare _decodedMemo: MemoState;\n\n private declare _bySources: Source[] | undefined;\n private declare _bySourceMemos: MemoState[] | undefined;\n\n constructor(map: SourceMapInput, mapUrl?: string | null) {\n const isString = typeof map === 'string';\n\n if (!isString && (map as unknown as { _decodedMemo: any })._decodedMemo) return map as TraceMap;\n\n const parsed = (isString ? JSON.parse(map) : map) as Exclude<SourceMapInput, string | TraceMap>;\n\n const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;\n this.version = version;\n this.file = file;\n this.names = names;\n this.sourceRoot = sourceRoot;\n this.sources = sources;\n this.sourcesContent = sourcesContent;\n\n const from = resolve(sourceRoot || '', stripFilename(mapUrl));\n this.resolvedSources = sources.map((s) => resolve(s || '', from));\n\n const { mappings } = parsed;\n if (typeof mappings === 'string') {\n this._encoded = mappings;\n this._decoded = undefined;\n } else {\n this._encoded = undefined;\n this._decoded = maybeSort(mappings, isString);\n }\n\n this._decodedMemo = memoizedState();\n this._bySources = undefined;\n this._bySourceMemos = undefined;\n }\n\n static {\n encodedMappings = (map) => {\n return (map._encoded ??= encode(map._decoded!));\n };\n\n decodedMappings = (map) => {\n return (map._decoded ||= decode(map._encoded!));\n };\n\n traceSegment = (map, line, column) => {\n const decoded = decodedMappings(map);\n\n
|