Skip to content

Commit a241607

Browse files
committed
fix: improve performance in many places
- readMappings: avoid Int32Array iterator protocol by indexing `currentData[0]` directly instead of destructuring - createMappingsSerializer: skip `";".repeat(1)` for the dominant consecutive-line case - splitIntoPotentialTokens: replace chained `===` comparisons with two Uint8Array lookup tables in the hot scan loops - streamChunksOfSourceMap: hoist `lines.length` and drop a redundant per-iteration bounds check; in the lines-only path the bounds check is already guaranteed by the leading guard - streamChunksOfCombinedSourceMap: coerce binary-search `r` to int32 - ConcatSource.streamChunks: only compute the remapped `nameIndex` when the chunk actually carries a source mapping https://claude.ai/code/session_013RELTj96iEXrmMSPxnwjeR
1 parent ccfbc65 commit a241607

6 files changed

Lines changed: 98 additions & 59 deletions

lib/ConcatSource.js

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -222,11 +222,6 @@ class ConcatSource extends Source {
222222
sourceIndex < 0 || sourceIndex >= sourceIndexMapping.length
223223
? -1
224224
: sourceIndexMapping[sourceIndex];
225-
const resultNameIndex =
226-
nameIndex < 0 || nameIndex >= nameIndexMapping.length
227-
? -1
228-
: nameIndexMapping[nameIndex];
229-
lastMappingLine = resultSourceIndex < 0 ? 0 : generatedLine;
230225
let _chunk;
231226
// When using finalSource, we process the entire source code at once at the end, rather than chunk by chunk
232227
if (finalSource) {
@@ -235,8 +230,17 @@ class ConcatSource extends Source {
235230
_chunk = chunk;
236231
}
237232
if (resultSourceIndex < 0) {
233+
lastMappingLine = 0;
238234
onChunk(_chunk, line, column, -1, -1, -1, -1);
239235
} else {
236+
// Only compute the remapped name index when the chunk
237+
// actually carries a source mapping; otherwise it is
238+
// unused.
239+
const resultNameIndex =
240+
nameIndex < 0 || nameIndex >= nameIndexMapping.length
241+
? -1
242+
: nameIndexMapping[nameIndex];
243+
lastMappingLine = generatedLine;
240244
onChunk(
241245
_chunk,
242246
line,

lib/helpers/createMappingsSerializer.js

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,12 @@ const createFullMappingsSerializer = () => {
8585

8686
let str;
8787
if (currentLine < generatedLine) {
88-
str = ";".repeat(generatedLine - currentLine);
88+
// Consecutive lines (diff === 1) are the dominant case; avoid the
89+
// `.repeat()` call entirely for them.
90+
str =
91+
generatedLine === currentLine + 1
92+
? ";"
93+
: ";".repeat(generatedLine - currentLine);
8994
currentLine = generatedLine;
9095
currentColumn = 0;
9196
initial = false;

lib/helpers/readMappings.js

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,10 @@ const readMappings = (mappings, onMapping) => {
7171
currentData[4],
7272
);
7373
}
74-
[generatedColumn] = currentData;
74+
// Direct typed-array index is faster here than destructuring,
75+
// which would invoke the Int32Array iterator protocol.
76+
// eslint-disable-next-line prefer-destructuring
77+
generatedColumn = currentData[0];
7578
}
7679
currentDataPos = 0;
7780
if (value === NEXT_LINE) {

lib/helpers/splitIntoPotentialTokens.js

Lines changed: 24 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,25 @@
1313
// \r = 13
1414
// \t = 9
1515

16+
// Two Uint8Array lookup tables replace the chained `===` comparisons in the
17+
// hot scan loops. V8 keeps the tables in L1 as a constant, so the inner
18+
// condition becomes a single bounds check plus a typed-array load, which is
19+
// cheaper than 4–6 branches per character for long inputs.
20+
// Indexed by charCode; entries outside the ASCII range are implicitly 0.
21+
const BOUNDARY = new Uint8Array(126);
22+
BOUNDARY[10] = 1; // \n
23+
BOUNDARY[59] = 1; // ;
24+
BOUNDARY[123] = 1; // {
25+
BOUNDARY[125] = 1; // }
26+
27+
const SEPARATOR = new Uint8Array(126);
28+
SEPARATOR[9] = 1; // \t
29+
SEPARATOR[13] = 1; // \r
30+
SEPARATOR[32] = 1; // space
31+
SEPARATOR[59] = 1; // ;
32+
SEPARATOR[123] = 1; // {
33+
SEPARATOR[125] = 1; // }
34+
1635
/**
1736
* @param {string} str string
1837
* @returns {string[] | null} array of string separated by potential tokens
@@ -26,18 +45,14 @@ const splitIntoPotentialTokens = (str) => {
2645
const start = i;
2746
block: {
2847
let cc = str.charCodeAt(i);
29-
while (cc !== 10 && cc !== 59 && cc !== 123 && cc !== 125) {
48+
// Advance through non-boundary characters. Non-ASCII codepoints
49+
// (cc >= 126) are by definition not boundaries.
50+
while (cc >= 126 || BOUNDARY[cc] === 0) {
3051
if (++i >= len) break block;
3152
cc = str.charCodeAt(i);
3253
}
33-
while (
34-
cc === 59 ||
35-
cc === 32 ||
36-
cc === 123 ||
37-
cc === 125 ||
38-
cc === 13 ||
39-
cc === 9
40-
) {
54+
// Consume trailing separators so they stay grouped with the token.
55+
while (cc < 126 && SEPARATOR[cc] === 1) {
4156
if (++i >= len) break block;
4257
cc = str.charCodeAt(i);
4358
}

lib/helpers/streamChunksOfCombinedSourceMap.js

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,11 @@ const streamChunksOfCombinedSourceMap = (
7676
if (line > innerSourceMapLineData.length) return -1;
7777
const { mappingsData } = innerSourceMapLineData[line - 1];
7878
let l = 0;
79-
let r = mappingsData.length / 5;
79+
// `mappingsData.length` is always a multiple of 5 (five values pushed
80+
// per mapping), so dividing is exact. Coerce the bound to an int32 so
81+
// the binary-search loop stays on V8's fast small-int path instead of
82+
// comparing an int against a float.
83+
let r = (mappingsData.length / 5) | 0;
8084
while (l < r) {
8185
const m = (l + r) >> 1;
8286
if (mappingsData[m * 5] <= column) {

lib/helpers/streamChunksOfSourceMap.js

Lines changed: 50 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,8 @@ const streamChunksOfSourceMapFull = (
3232
onName,
3333
) => {
3434
const lines = splitIntoLines(source);
35-
if (lines.length === 0) {
35+
const linesLength = lines.length;
36+
if (linesLength === 0) {
3637
return {
3738
generatedLine: 1,
3839
generatedColumn: 0,
@@ -52,9 +53,9 @@ const streamChunksOfSourceMapFull = (
5253
}
5354
}
5455

55-
const lastLine = lines[lines.length - 1];
56+
const lastLine = lines[linesLength - 1];
5657
const lastNewLine = lastLine.endsWith("\n");
57-
const finalLine = lastNewLine ? lines.length + 1 : lines.length;
58+
const finalLine = lastNewLine ? linesLength + 1 : linesLength;
5859
const finalColumn = lastNewLine ? 0 : lastLine.length;
5960

6061
let currentGeneratedLine = 1;
@@ -83,7 +84,7 @@ const streamChunksOfSourceMapFull = (
8384
originalColumn,
8485
nameIndex,
8586
) => {
86-
if (mappingActive && currentGeneratedLine <= lines.length) {
87+
if (mappingActive && currentGeneratedLine <= linesLength) {
8788
let chunk;
8889
const mappingLine = currentGeneratedLine;
8990
const mappingColumn = currentGeneratedColumn;
@@ -110,7 +111,7 @@ const streamChunksOfSourceMapFull = (
110111
mappingActive = false;
111112
}
112113
if (generatedLine > currentGeneratedLine && currentGeneratedColumn > 0) {
113-
if (currentGeneratedLine <= lines.length) {
114+
if (currentGeneratedLine <= linesLength) {
114115
const chunk = lines[currentGeneratedLine - 1].slice(
115116
currentGeneratedColumn,
116117
);
@@ -127,22 +128,29 @@ const streamChunksOfSourceMapFull = (
127128
currentGeneratedLine++;
128129
currentGeneratedColumn = 0;
129130
}
130-
while (generatedLine > currentGeneratedLine) {
131-
if (currentGeneratedLine <= lines.length) {
132-
onChunk(
133-
lines[currentGeneratedLine - 1],
134-
currentGeneratedLine,
135-
0,
136-
-1,
137-
-1,
138-
-1,
139-
-1,
140-
);
141-
}
131+
// Emit each fully-passed generated line. Once we move past the last
132+
// available line we stop emitting, but still need to advance the
133+
// counter to `generatedLine` so subsequent state matches the caller.
134+
while (
135+
generatedLine > currentGeneratedLine &&
136+
currentGeneratedLine <= linesLength
137+
) {
138+
onChunk(
139+
lines[currentGeneratedLine - 1],
140+
currentGeneratedLine,
141+
0,
142+
-1,
143+
-1,
144+
-1,
145+
-1,
146+
);
142147
currentGeneratedLine++;
143148
}
149+
if (currentGeneratedLine < generatedLine) {
150+
currentGeneratedLine = generatedLine;
151+
}
144152
if (generatedColumn > currentGeneratedColumn) {
145-
if (currentGeneratedLine <= lines.length) {
153+
if (currentGeneratedLine <= linesLength) {
146154
const chunk = lines[currentGeneratedLine - 1].slice(
147155
currentGeneratedColumn,
148156
generatedColumn,
@@ -195,7 +203,8 @@ const streamChunksOfSourceMapLinesFull = (
195203
_onName,
196204
) => {
197205
const lines = splitIntoLines(source);
198-
if (lines.length === 0) {
206+
const linesLength = lines.length;
207+
if (linesLength === 0) {
199208
return {
200209
generatedLine: 1,
201210
generatedColumn: 0,
@@ -232,39 +241,38 @@ const streamChunksOfSourceMapLinesFull = (
232241
if (
233242
sourceIndex < 0 ||
234243
generatedLine < currentGeneratedLine ||
235-
generatedLine > lines.length
244+
generatedLine > linesLength
236245
) {
237246
return;
238247
}
248+
// `generatedLine <= linesLength` is guaranteed by the guard above, so
249+
// every line we iterate over is in bounds — no per-iteration length
250+
// check needed.
239251
while (generatedLine > currentGeneratedLine) {
240-
if (currentGeneratedLine <= lines.length) {
241-
onChunk(
242-
lines[currentGeneratedLine - 1],
243-
currentGeneratedLine,
244-
0,
245-
-1,
246-
-1,
247-
-1,
248-
-1,
249-
);
250-
}
251-
currentGeneratedLine++;
252-
}
253-
if (generatedLine <= lines.length) {
254252
onChunk(
255-
lines[generatedLine - 1],
256-
generatedLine,
253+
lines[currentGeneratedLine - 1],
254+
currentGeneratedLine,
257255
0,
258-
sourceIndex,
259-
originalLine,
260-
originalColumn,
256+
-1,
257+
-1,
258+
-1,
261259
-1,
262260
);
263261
currentGeneratedLine++;
264262
}
263+
onChunk(
264+
lines[generatedLine - 1],
265+
generatedLine,
266+
0,
267+
sourceIndex,
268+
originalLine,
269+
originalColumn,
270+
-1,
271+
);
272+
currentGeneratedLine++;
265273
};
266274
readMappings(mappings, onMapping);
267-
for (; currentGeneratedLine <= lines.length; currentGeneratedLine++) {
275+
for (; currentGeneratedLine <= linesLength; currentGeneratedLine++) {
268276
onChunk(
269277
lines[currentGeneratedLine - 1],
270278
currentGeneratedLine,
@@ -276,10 +284,10 @@ const streamChunksOfSourceMapLinesFull = (
276284
);
277285
}
278286

279-
const lastLine = lines[lines.length - 1];
287+
const lastLine = lines[linesLength - 1];
280288
const lastNewLine = lastLine.endsWith("\n");
281289

282-
const finalLine = lastNewLine ? lines.length + 1 : lines.length;
290+
const finalLine = lastNewLine ? linesLength + 1 : linesLength;
283291
const finalColumn = lastNewLine ? 0 : lastLine.length;
284292

285293
return {

0 commit comments

Comments
 (0)