forked from inikulin/parse5
/
index.test.ts
51 lines (43 loc) · 1.64 KB
/
index.test.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
import { Tokenizer } from 'parse5';
import { generateTokenizationTests } from 'parse5-test-utils/utils/generate-tokenization-tests.js';
import * as assert from 'node:assert';
const dataPath = new URL('../../../../test/data/html5lib-tests/tokenizer', import.meta.url);
const tokenizerOpts = {
sourceCodeLocationInfo: true,
};
generateTokenizationTests('Tokenizer', dataPath.pathname, (handler) => new Tokenizer(tokenizerOpts, handler));
function noop(): void {
// Noop
}
describe('Tokenizer methods', () => {
it('should pause and resume', () => {
let count = 0;
const tokenizer = new Tokenizer(tokenizerOpts, {
onComment(t): void {
assert.strictEqual(t.data, 'INIT');
assert.strictEqual(count++, 0);
tokenizer.pause();
tokenizer.write('<!doctype foo>', false);
},
onDoctype(t): void {
assert.strictEqual(t.name, 'foo');
assert.strictEqual(count++, 2);
expect(() => tokenizer.resume()).toThrow('Parser was already resumed');
tokenizer.write('<next>', true);
},
onStartTag(t): void {
assert.strictEqual(count++, 3);
assert.strictEqual(t.tagName, 'next');
},
onEndTag: noop,
onEof: noop,
onCharacter: noop,
onNullCharacter: noop,
onWhitespaceCharacter: noop,
});
tokenizer.write('<!--INIT-->', false);
assert.strictEqual(count++, 1);
expect(tokenizer).toHaveProperty('paused', true);
tokenizer.resume();
});
});