Skip to content

Commit 7db8867

Browse files
committed
test: expand coverage for sources, helpers, and package entry
Add targeted tests that exercise previously untested branches and edge cases: the abstract `Source` base class, `SizeOnlySource`'s `updateHash`, `CompatSource`'s optional delegates, `RawSource`'s type validation and Buffer-backed source path, `OriginalSource`'s `getName` and Buffer-backed map path, `ConcatSource`'s `buffer()` fallbacks and re-optimization paths, `PrefixSource`'s accessors and empty-prefix handling, `ReplaceSource`'s accessors, guards, hash stability, and edge cases, `CachedSource`'s lazy source accessor, hash flushing, map-only streamChunks, and Buffer round-tripping, and `SourceMapSource`'s string/buffer source-map variants and hash inclusion of `removeOriginalSource`. Add a dedicated helpers unit test file covering `getGeneratedSourceInfo`, `getSource`, `splitIntoLines`, `splitIntoPotentialTokens`, `readMappings`, `getFromStreamChunks`, `streamAndGetSourceAndMap`, and `stringBufferUtils`, including string-interning toggles and gap filling for non-sequential source/name indices.
1 parent 5a07869 commit 7db8867

12 files changed

Lines changed: 1111 additions & 0 deletions

test/CachedSource.js

Lines changed: 147 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -419,4 +419,151 @@ describe.each([
419419
expect(getHash(clone())).toBe(getHash(original));
420420
expect(calls).toBe(3);
421421
});
422+
423+
it("should expose originalLazy (function form) and original()", () => {
424+
const original = new RawSource("Hello World");
425+
const lazy = () => original;
426+
const source = new CachedSource(lazy);
427+
expect(source.originalLazy()).toBe(lazy);
428+
expect(source.original()).toBe(original);
429+
// After original() resolves the function, originalLazy returns the resolved source
430+
expect(source.originalLazy()).toBe(original);
431+
});
432+
433+
it("should compute size from cached buffer when _cachedSize is undefined", () => {
434+
const buffer = Buffer.from("Hello World");
435+
// Provide cachedData with buffer but no size
436+
const cachedSource = new CachedSource(new RawSource("Hello World"), {
437+
buffer,
438+
maps: new Map(),
439+
});
440+
expect(cachedSource.size()).toBe(buffer.length);
441+
expect(cachedSource.size()).toBe(buffer.length);
442+
});
443+
444+
it("should return null for missing map when cached entry is empty", () => {
445+
const cachedSource = new CachedSource(new RawSource("Hello World"), {
446+
buffer: Buffer.from("Hello World"),
447+
size: 11,
448+
maps: new Map([["{}", {}]]),
449+
});
450+
expect(cachedSource.map()).toBeNull();
451+
});
452+
453+
it("should flush accumulated hash strings when they exceed the threshold", () => {
454+
class StringyHashSource extends Source {
455+
source() {
456+
return "ignored";
457+
}
458+
459+
buffer() {
460+
return Buffer.from("ignored");
461+
}
462+
463+
size() {
464+
return 7;
465+
}
466+
467+
map() {
468+
return null;
469+
}
470+
471+
updateHash(hash) {
472+
for (let i = 0; i < 15000; i++) {
473+
hash.update(`chunk-${i}-`);
474+
}
475+
}
476+
}
477+
478+
const cachedSource = new CachedSource(new StringyHashSource());
479+
480+
const hashA = crypto.createHash("md5");
481+
cachedSource.updateHash(hashA);
482+
const digestA = hashA.digest("hex");
483+
484+
// When hashing again, the cached hash update is replayed directly
485+
const hashB = crypto.createHash("md5");
486+
cachedSource.updateHash(hashB);
487+
const digestB = hashB.digest("hex");
488+
489+
expect(digestA).toBe(digestB);
490+
});
491+
492+
it("should handle hash updates starting with a Buffer (no prior string to flush)", () => {
493+
class BufferFirstHashSource extends Source {
494+
source() {
495+
return "text";
496+
}
497+
498+
buffer() {
499+
return Buffer.from("text");
500+
}
501+
502+
size() {
503+
return 4;
504+
}
505+
506+
map() {
507+
return null;
508+
}
509+
510+
updateHash(hash) {
511+
// Start with a Buffer so the tracker "else" branch runs
512+
// with currentString === undefined, and also pass a long string
513+
// so the length-gate in the "string" branch is exercised.
514+
hash.update(Buffer.from("leading-buffer-"));
515+
hash.update("a".repeat(11000));
516+
hash.update("short-string");
517+
}
518+
}
519+
520+
const cachedSource = new CachedSource(new BufferFirstHashSource());
521+
const hashA = crypto.createHash("md5");
522+
cachedSource.updateHash(hashA);
523+
const digestA = hashA.digest("hex");
524+
525+
const hashB = crypto.createHash("md5");
526+
cachedSource.updateHash(hashB);
527+
const digestB = hashB.digest("hex");
528+
529+
expect(digestA).toBe(digestB);
530+
});
531+
532+
it("should allow streamChunks when cached map exists but source is not cached", () => {
533+
const original = new OriginalSource("Hello World", "file.js");
534+
const cachedSource = new CachedSource(original);
535+
536+
// Populate map cache only (no source/buffer cached yet)
537+
cachedSource.map({});
538+
539+
const chunks = [];
540+
cachedSource.streamChunks(
541+
{},
542+
(...args) => {
543+
chunks.push(args);
544+
},
545+
() => {},
546+
() => {},
547+
);
548+
expect(chunks.length).toBeGreaterThan(0);
549+
});
550+
551+
it("should round-trip CachedSource with a Buffer-backed source", () => {
552+
const buffer = Buffer.from(Array.from({ length: 64 }, (_, i) => i));
553+
const original = new RawSource(buffer);
554+
const source = new CachedSource(original);
555+
556+
// Populate _cachedSource with the Buffer
557+
source.source();
558+
source.size();
559+
560+
const cachedData = source.getCachedData();
561+
expect(cachedData.source).toBe(false);
562+
563+
// @ts-expect-error for tests
564+
const clone = new CachedSource(null, cachedData);
565+
expect(clone.source()).toEqual(source.source());
566+
expect(clone.buffer()).toEqual(source.buffer());
567+
expect(clone.size()).toEqual(source.size());
568+
});
422569
});

test/CompatSource.js

Lines changed: 103 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,4 +36,107 @@ describe("compatSource", () => {
3636
});
3737
expect(calledWith).toEqual([Buffer.from(CONTENT)]);
3838
});
39+
40+
it("should use buffer from source-like when provided", () => {
41+
const CONTENT = "Line1\n\nLine3\n";
42+
const buffer = Buffer.from(CONTENT);
43+
const source = CompatSource.from({
44+
source() {
45+
return CONTENT;
46+
},
47+
buffer() {
48+
return buffer;
49+
},
50+
});
51+
expect(source.buffer()).toBe(buffer);
52+
});
53+
54+
it("should use size from super when sourceLike doesn't define size", () => {
55+
const CONTENT = "Hello";
56+
const source = CompatSource.from({
57+
source() {
58+
return CONTENT;
59+
},
60+
});
61+
expect(source.size()).toBe(5);
62+
});
63+
64+
it("should call map from sourceLike when provided", () => {
65+
const map = {
66+
version: 3,
67+
sources: ["a.js"],
68+
names: [],
69+
mappings: "",
70+
file: "x",
71+
};
72+
const source = CompatSource.from({
73+
source() {
74+
return "content";
75+
},
76+
map() {
77+
return map;
78+
},
79+
updateHash(hash) {
80+
hash.update("custom");
81+
},
82+
});
83+
expect(source.map()).toBe(map);
84+
});
85+
86+
it("should call sourceAndMap from sourceLike when provided", () => {
87+
const map = {
88+
version: 3,
89+
sources: ["a.js"],
90+
names: [],
91+
mappings: "",
92+
file: "x",
93+
};
94+
const sourceAndMap = { source: "content", map };
95+
const source = CompatSource.from({
96+
source() {
97+
return "content";
98+
},
99+
sourceAndMap() {
100+
return sourceAndMap;
101+
},
102+
});
103+
expect(source.sourceAndMap()).toBe(sourceAndMap);
104+
});
105+
106+
it("should call updateHash from sourceLike when provided", () => {
107+
/** @type {(string | Buffer)[]} */
108+
const calledWith = [];
109+
const source = CompatSource.from({
110+
source() {
111+
return "content";
112+
},
113+
updateHash(hash) {
114+
hash.update("custom-hash");
115+
},
116+
});
117+
source.updateHash({
118+
// @ts-expect-error for tests
119+
update(value) {
120+
calledWith.push(value);
121+
},
122+
});
123+
expect(calledWith).toEqual(["custom-hash"]);
124+
});
125+
126+
it("should throw when map is defined but updateHash is not", () => {
127+
const source = CompatSource.from({
128+
source() {
129+
return "content";
130+
},
131+
map() {
132+
return null;
133+
},
134+
});
135+
expect(() => {
136+
source.updateHash({
137+
// @ts-expect-error for tests
138+
update() {},
139+
});
140+
}).toThrow(/'map' method must also provide an 'updateHash' method/);
141+
});
39142
});

test/ConcatSource.js

Lines changed: 83 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -212,6 +212,89 @@ describe("concatSource", () => {
212212
`);
213213
});
214214

215+
it("should concat a SourceLike child without a buffer() method that returns a Buffer", () => {
216+
const customBuffer = Buffer.from("custom-content");
217+
const customSource = {
218+
source() {
219+
return customBuffer;
220+
},
221+
size() {
222+
return customBuffer.length;
223+
},
224+
};
225+
const source = new ConcatSource(customSource, new RawSource("-after"));
226+
const result = source.buffer();
227+
expect(result).toEqual(
228+
Buffer.concat([customBuffer, Buffer.from("-after")]),
229+
);
230+
});
231+
232+
it("should concat a SourceLike child where source() returns a string (no buffer())", () => {
233+
const customSource = {
234+
source() {
235+
return "custom-content";
236+
},
237+
size() {
238+
return "custom-content".length;
239+
},
240+
};
241+
const source = new ConcatSource(customSource, new RawSource("-after"));
242+
const result = source.buffer();
243+
expect(result).toEqual(
244+
Buffer.concat([Buffer.from("custom-content"), Buffer.from("-after")]),
245+
);
246+
});
247+
248+
it("should optimize nested string-only ConcatSources across re-optimization", () => {
249+
// Create two ConcatSources that produce RawSource (kept in stringsAsRawSources)
250+
const c1 = new ConcatSource("a", "b");
251+
c1.source(); // triggers _optimize, putting its RawSource("ab") into stringsAsRawSources
252+
253+
const c2 = new ConcatSource("c", "d");
254+
c2.source(); // same, puts RawSource("cd") into stringsAsRawSources
255+
256+
const merged = new ConcatSource();
257+
merged.add(c1); // flatten c1's children
258+
merged.add("x");
259+
merged.add(c2);
260+
merged.add("y");
261+
merged.add(c1);
262+
expect(merged.source()).toBe("abxcdyab");
263+
});
264+
265+
it("should re-optimize when raw source followed by regular source", () => {
266+
const c1 = new ConcatSource("a", "b");
267+
c1.source(); // places RawSource into stringsAsRawSources
268+
const regular = new OriginalSource("Z", "z.js");
269+
const merged = new ConcatSource();
270+
merged.add(c1); // flatten
271+
merged.add(regular);
272+
expect(merged.source()).toBe("abZ");
273+
expect(merged.getChildren()).toHaveLength(2);
274+
});
275+
276+
it("should reflect empty ConcatSource", () => {
277+
const source = new ConcatSource();
278+
expect(source.source()).toBe("");
279+
expect(source.size()).toBe(0);
280+
expect(source.buffer()).toEqual(Buffer.alloc(0));
281+
});
282+
283+
it("should flatten nested ConcatSource via add()", () => {
284+
const inner = new ConcatSource("a", "b");
285+
const outer = new ConcatSource();
286+
outer.add(inner);
287+
outer.add("c");
288+
expect(outer.source()).toBe("abc");
289+
});
290+
291+
it("should optimize on first getChildren() call", () => {
292+
const source = new ConcatSource("a", "b", new RawSource("c"));
293+
// Call getChildren without first calling source()/size()/buffer()
294+
const children = source.getChildren();
295+
expect(children.length).toBeGreaterThan(0);
296+
});
297+
215298
it("should handle column mapping correctly with missing sources", () => {
216299
const source = new ConcatSource(
217300
"/*! For license information please see main.js.LICENSE.txt */",

test/OriginalSource.js

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -102,6 +102,25 @@ describe.each([
102102
expect(source.size()).toBe(256);
103103
});
104104

105+
it("should expose getName()", () => {
106+
const source = new OriginalSource("hi", "file.js");
107+
expect(source.getName()).toBe("file.js");
108+
});
109+
110+
it("should compute map correctly from buffer-backed source", () => {
111+
const content = "Line1\nLine2\n";
112+
const source = new OriginalSource(Buffer.from(content), "file.js");
113+
expect(source.sourceAndMap().source).toBe(content);
114+
});
115+
116+
it("should map correctly when constructed from a Buffer (streamChunks path)", () => {
117+
const content = "Line1\nLine2\n";
118+
const source = new OriginalSource(Buffer.from(content), "file.js");
119+
// Calling map() without calling source() first to ensure streamChunks
120+
// populates _value from the buffer
121+
expect(source.map({ columns: false })).not.toBeNull();
122+
});
123+
105124
it("should return the correct size for unicode files", () => {
106125
const source = new OriginalSource("😋", "file.js");
107126
expect(source.size()).toBe(4);

0 commit comments

Comments
 (0)