Skip to content

Commit 312be63

Browse files
mlpoalecthomas
authored andcommitted
Allow several tests for a same lexer
1 parent dcfd826 commit 312be63

File tree

2 files changed

+63
-33
lines changed

2 files changed

+63
-33
lines changed

Diff for: lexers/README.md

+3
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,9 @@
33
The tests in this directory feed a known input `testdata/<name>.actual` into the parser for `<name>` and check
44
that its output matches `<name>.exported`.
55

6+
It is also possible to perform several tests on a same parser `<name>`, by placing know inputs `*.actual` into a
7+
directory `testdata/<name>/`.
8+
69
## Running the tests
710

811
Run the tests as normal:

Diff for: lexers/lexers_test.go

+60-33
Original file line numberDiff line numberDiff line change
@@ -44,49 +44,76 @@ func BenchmarkGet(b *testing.B) {
4444
}
4545
}
4646

47+
func FileTest(t *testing.T, lexer chroma.Lexer, actualFilename, expectedFilename string) {
48+
t.Helper()
49+
t.Run(lexer.Config().Name+"/"+actualFilename, func(t *testing.T) {
50+
// Read and tokenise source text.
51+
actualText, err := ioutil.ReadFile(actualFilename)
52+
assert.NoError(t, err)
53+
actual, err := chroma.Tokenise(lexer, nil, string(actualText))
54+
assert.NoError(t, err)
55+
56+
if os.Getenv("RECORD") == "true" {
57+
// Update the expected file with the generated output of this lexer
58+
f, err := os.Create(expectedFilename)
59+
defer f.Close() // nolint: gosec
60+
assert.NoError(t, err)
61+
assert.NoError(t, formatters.JSON.Format(f, nil, chroma.Literator(actual...)))
62+
} else {
63+
// Read expected JSON into token slice.
64+
var expected []chroma.Token
65+
r, err := os.Open(expectedFilename)
66+
assert.NoError(t, err)
67+
err = json.NewDecoder(r).Decode(&expected)
68+
assert.NoError(t, err)
69+
70+
// Equal?
71+
assert.Equal(t, expected, actual)
72+
}
73+
})
74+
}
75+
4776
// Test source files are in the form <key>.<key> and validation data is in the form <key>.<key>.expected.
4877
func TestLexers(t *testing.T) {
4978
files, err := ioutil.ReadDir("testdata")
5079
assert.NoError(t, err)
5180

5281
for _, file := range files {
53-
ext := filepath.Ext(file.Name())[1:]
54-
if ext != "actual" {
55-
continue
56-
}
82+
if file.IsDir() {
83+
dirname := filepath.Join("testdata", file.Name())
84+
lexer := lexers.Get(file.Name())
85+
assert.NotNil(t, lexer)
5786

58-
base := strings.Split(strings.TrimSuffix(file.Name(), filepath.Ext(file.Name())), ".")[0]
59-
lexer := lexers.Get(base)
60-
assert.NotNil(t, lexer)
87+
subFiles, err := ioutil.ReadDir(dirname)
88+
assert.NoError(t, err)
6189

62-
filename := filepath.Join("testdata", file.Name())
63-
expectedFilename := strings.TrimSuffix(filename, filepath.Ext(filename)) + ".expected"
90+
for _, subFile := range subFiles {
91+
ext := filepath.Ext(subFile.Name())[1:]
92+
if ext != "actual" {
93+
continue
94+
}
6495

65-
lexer = chroma.Coalesce(lexer)
66-
t.Run(lexer.Config().Name, func(t *testing.T) {
67-
// Read and tokenise source text.
68-
actualText, err := ioutil.ReadFile(filename)
69-
assert.NoError(t, err)
70-
actual, err := chroma.Tokenise(lexer, nil, string(actualText))
71-
assert.NoError(t, err)
96+
filename := filepath.Join(dirname, subFile.Name())
97+
expectedFilename := strings.TrimSuffix(filename, filepath.Ext(filename)) + ".expected"
7298

73-
if os.Getenv("RECORD") == "true" {
74-
// Update the expected file with the generated output of this lexer
75-
f, err := os.Create(expectedFilename)
76-
defer f.Close() // nolint: gosec
77-
assert.NoError(t, err)
78-
assert.NoError(t, formatters.JSON.Format(f, nil, chroma.Literator(actual...)))
79-
} else {
80-
// Read expected JSON into token slice.
81-
var expected []chroma.Token
82-
r, err := os.Open(expectedFilename)
83-
assert.NoError(t, err)
84-
err = json.NewDecoder(r).Decode(&expected)
85-
assert.NoError(t, err)
86-
87-
// Equal?
88-
assert.Equal(t, expected, actual)
99+
lexer = chroma.Coalesce(lexer)
100+
FileTest(t, lexer, filename, expectedFilename)
101+
}
102+
} else {
103+
ext := filepath.Ext(file.Name())[1:]
104+
if ext != "actual" {
105+
continue
89106
}
90-
})
107+
108+
base := strings.Split(strings.TrimSuffix(file.Name(), filepath.Ext(file.Name())), ".")[0]
109+
lexer := lexers.Get(base)
110+
assert.NotNil(t, lexer)
111+
112+
filename := filepath.Join("testdata", file.Name())
113+
expectedFilename := strings.TrimSuffix(filename, filepath.Ext(filename)) + ".expected"
114+
115+
lexer = chroma.Coalesce(lexer)
116+
FileTest(t, lexer, filename, expectedFilename)
117+
}
91118
}
92119
}

0 commit comments

Comments
 (0)