forked from codesandbox/codesandbox-client
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtokenization.test.ts
More file actions
120 lines (100 loc) · 3.55 KB
/
tokenization.test.ts
File metadata and controls
120 lines (100 loc) · 3.55 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
/*---------------------------------------------------------
* Copyright (C) Microsoft Corporation. All rights reserved.
*--------------------------------------------------------*/
'use strict';
import * as fs from 'fs';
import * as path from 'path';
import * as assert from 'assert';
import { Registry, IGrammar, RegistryOptions, StackElement, parseRawGrammar, Thenable } from '../main';
import { IOnigLib, IRawGrammar } from '../types';
import { getOnigasm, getOniguruma } from '../onigLibs';
const REPO_ROOT = path.join(__dirname, '../../');
function assertTokenizationSuite(testLocation: string): void {
interface IRawTest {
desc: string;
grammars: string[];
grammarPath?: string;
grammarScopeName?: string;
grammarInjections?: string[];
lines: IRawTestLine[];
skipOnigasm: boolean;
}
interface IRawTestLine {
line: string;
tokens: IRawToken[];
}
interface IRawToken {
value: string;
scopes: string[];
}
let tests: IRawTest[] = JSON.parse(fs.readFileSync(testLocation).toString());
tests.forEach((test) => {
if (test.skipOnigasm) {
it.skip(test.desc + '-onigasm', () => {
return performTest(test, getOnigasm());
});
} else {
it(test.desc + '-onigasm', () => {
return performTest(test, getOnigasm());
});
}
it(test.desc + '-oniguruma', () => {
return performTest(test, getOniguruma());
});
});
async function performTest(test: IRawTest, onigLib: Thenable<IOnigLib>): Promise<void> {
let grammarScopeName = test.grammarScopeName;
let grammarByScope : { [scope:string]:IRawGrammar } = {};
for (let grammarPath of test.grammars) {
let content = fs.readFileSync(path.join(path.dirname(testLocation), grammarPath)).toString();
let rawGrammar = parseRawGrammar(content, grammarPath);
grammarByScope[rawGrammar.scopeName] = rawGrammar;
if (!grammarScopeName && grammarPath === test.grammarPath) {
grammarScopeName = rawGrammar.scopeName;
}
};
let locator: RegistryOptions = {
loadGrammar: (scopeName: string) => Promise.resolve(grammarByScope[scopeName]),
getInjections: (scopeName: string) => {
if (scopeName === grammarScopeName) {
return test.grammarInjections;
}
},
getOnigLib: () => onigLib
};
let registry = new Registry(locator);
let grammar: IGrammar = await registry.loadGrammar(grammarScopeName);
if (!grammar) {
throw new Error('I HAVE NO GRAMMAR FOR TEST');
}
let prevState: StackElement = null;
for (let i = 0; i < test.lines.length; i++) {
prevState = assertLineTokenization(grammar, test.lines[i], prevState);
}
}
function assertLineTokenization(grammar: IGrammar, testCase: IRawTestLine, prevState: StackElement): StackElement {
let actual = grammar.tokenizeLine(testCase.line, prevState);
let actualTokens: IRawToken[] = actual.tokens.map((token) => {
return {
value: testCase.line.substring(token.startIndex, token.endIndex),
scopes: token.scopes
};
});
// TODO@Alex: fix tests instead of working around
if (testCase.line.length > 0) {
// Remove empty tokens...
testCase.tokens = testCase.tokens.filter((token) => {
return (token.value.length > 0);
});
}
assert.deepEqual(actualTokens, testCase.tokens, 'Tokenizing line ' + testCase.line);
return actual.ruleStack;
}
}
describe('Tokenization /first-mate/', () => {
assertTokenizationSuite(path.join(REPO_ROOT, 'test-cases/first-mate/tests.json'));
});
describe('Tokenization /suite1/', () => {
assertTokenizationSuite(path.join(REPO_ROOT, 'test-cases/suite1/tests.json'));
assertTokenizationSuite(path.join(REPO_ROOT, 'test-cases/suite1/whileTests.json'));
});