Skip to content

Commit

Permalink
linters: expands test suite for testing linter
Browse files Browse the repository at this point in the history
  • Loading branch information
bvobart committed Jun 14, 2021
1 parent cc19ce7 commit 384f39e
Show file tree
Hide file tree
Showing 9 changed files with 218 additions and 14 deletions.
22 changes: 13 additions & 9 deletions linters/testing/linter.go
Original file line number Diff line number Diff line change
Expand Up @@ -37,12 +37,14 @@ func (l *TestingLinter) Rules() []*api.Rule {
func (l *TestingLinter) LintProject(project api.Project) (api.Report, error) {
report := api.NewReport()

// TODO: find and count the amount of test files to score RuleHasTests
l.ScoreRuleHasTests(&report, project)
l.ScoreRuleTestsPass(&report, project)

// TODO: implement the linting for RuleTestCoverage, which checks whether there is a Cobertura XML coverage report and analyses it for test coverage.
// TODO: check whether all test files are in tests folder.
// TODO: determine possible config options:
// - target amount of tests per file
// - target test coverage

return report, nil
}
Expand Down Expand Up @@ -70,13 +72,14 @@ func (l *TestingLinter) ScoreRuleTestsPass(report *api.Report, project api.Proje
return
}

if !utils.FileExists(l.Config.Report) {
junitReportPath := path.Join(project.Dir, l.Config.Report)
if !utils.FileExists(junitReportPath) {
report.Scores[RuleTestsPass] = 0
report.Details[RuleTestsPass] = fmt.Sprintf("A test report was provided, namely `%s`, but this file could not be found. Please update the `testing.report` setting in your project's `mllint` configuration to fix the path to your project's test report.", l.Config.Report)
report.Details[RuleTestsPass] = fmt.Sprintf("A test report was provided, namely `%s`, but this file could not be found. Please update the `testing.report` setting in your project's `mllint` configuration to fix the path to your project's test report. Remember that this path must be relative to the root of your project directory.", l.Config.Report)
return
}

suites, err := junit.IngestFile(l.Config.Report)
suites, err := junit.IngestFile(junitReportPath)
if err != nil {
report.Scores[RuleTestsPass] = 0
report.Details[RuleTestsPass] = fmt.Sprintf(`A test report file `+"`%s`"+` was provided and found, but there was an error parsing the JUnit XML contents:
Expand All @@ -97,20 +100,21 @@ Please make sure your test report file is a valid JUnit XML file. %s`, l.Config.
if totalTests == 0 {
report.Scores[RuleTestsPass] = 0
report.Details[RuleTestsPass] = fmt.Sprintf(`No tests were run, according to the provided test report file `+"`%s`"+`. Don't be shy, implement some tests!`, l.Config.Report)
return
}

score := 100 * float64(passedTests) / float64(totalTests)
report.Scores[RuleTestsPass] = score
if passedTests == totalTests {
report.Details[RuleTestsPass] = fmt.Sprintf("Congratulations, all of your project's %d tests passed!", totalTests)
report.Details[RuleTestsPass] = fmt.Sprintf("Congratulations, all **%d** tests in your project passed!", totalTests)
} else if passedTests == 0 {
report.Details[RuleTestsPass] = fmt.Sprintf("Oh no! What a shame... **None** of your project's %d tests passed! There must be something terribly wrong.", totalTests)
report.Details[RuleTestsPass] = fmt.Sprintf("Oh no! What a shame... **None** of the %d tests in your project passed! There must be something terribly wrong.", totalTests)
} else if score < 0.25 {
report.Details[RuleTestsPass] = fmt.Sprintf("Oh no! Only **%d** out of **%d** tests passed... That's less than a quarter of all tests in your project...", passedTests, totalTests)
report.Details[RuleTestsPass] = fmt.Sprintf("Oh no! Only **%d** out of **%d** tests in your project passed... That's less than a quarter of all your project's tests...", passedTests, totalTests)
} else if score > 0.75 {
report.Details[RuleTestsPass] = fmt.Sprintf("Hmm, only **%d** out of **%d** of your project's tests passed... That's over three quarter of all tests in your project, but it's not enough: all tests must pass. Good luck fixing the broken tests!", passedTests, totalTests)
report.Details[RuleTestsPass] = fmt.Sprintf("Hmm, only **%d** out of **%d** tests in your project passed... That's over three quarter of all tests in your project, but it's not enough: _all tests must pass_. Good luck fixing the broken tests!", passedTests, totalTests)
} else {
report.Details[RuleTestsPass] = fmt.Sprintf("Oh my, only **%d** out of **%d** of your project's tests passed... You can do better, right? Good luck fixing those tests!", passedTests, totalTests)
report.Details[RuleTestsPass] = fmt.Sprintf("Oh my, only **%d** out of **%d** tests in your project passed... You can do better, right? Good luck fixing those tests!", passedTests, totalTests)
}
}

Expand Down
115 changes: 111 additions & 4 deletions linters/testing/linter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ func TestTestingLinter(t *stdtesting.T) {
{
Name: "NoTestsNoFiles",
Dir: ".",
Expect: func(report api.Report, err error) {
Expect: func(t *stdtesting.T, report api.Report, err error) {
require.NoError(t, err)
require.EqualValues(t, 0, report.Scores[testing.RuleHasTests])
require.EqualValues(t, 0, report.Scores[testing.RuleTestsPass])
Expand All @@ -32,7 +32,7 @@ func TestTestingLinter(t *stdtesting.T) {
Name: "NoTestsSixteenFiles",
Dir: ".",
Options: testutils.NewOptions().UsePythonFiles(createPythonFilenames(16)),
Expect: func(report api.Report, err error) {
Expect: func(t *stdtesting.T, report api.Report, err error) {
require.NoError(t, err)
require.EqualValues(t, 0, report.Scores[testing.RuleHasTests])
require.EqualValues(t, 0, report.Scores[testing.RuleTestsPass])
Expand All @@ -44,7 +44,7 @@ func TestTestingLinter(t *stdtesting.T) {
Name: "OneTestSixteenFiles",
Dir: ".",
Options: testutils.NewOptions().UsePythonFiles(createPythonFilenames(16).Concat(createPythonTestFilenames(1))),
Expect: func(report api.Report, err error) {
Expect: func(t *stdtesting.T, report api.Report, err error) {
require.NoError(t, err)
require.EqualValues(t, 25, report.Scores[testing.RuleHasTests])
require.EqualValues(t, 0, report.Scores[testing.RuleTestsPass])
Expand All @@ -56,14 +56,121 @@ func TestTestingLinter(t *stdtesting.T) {
Name: "FourTestsSixteenFiles",
Dir: ".",
Options: testutils.NewOptions().UsePythonFiles(createPythonFilenames(16).Concat(createPythonTestFilenames(4))),
Expect: func(report api.Report, err error) {
Expect: func(t *stdtesting.T, report api.Report, err error) {
require.NoError(t, err)
require.EqualValues(t, 100, report.Scores[testing.RuleHasTests])
require.EqualValues(t, 0, report.Scores[testing.RuleTestsPass])
// require.Equal(t, 0, report.Scores[testing.RuleTestsFolder])
// require.Equal(t, 0, report.Scores[testing.RuleTestCoverage])
},
},
{
Name: "FourTestsSixteenFiles/AllPassed",
Dir: "test-resources",
Options: testutils.NewOptions().UsePythonFiles(createPythonFilenames(16).Concat(createPythonTestFilenames(4))).
WithConfig(func() *config.Config {
c := config.Default()
c.Testing.Report = "junit-passed-all.xml"
c.Testing.Coverage = "" // TODO
return c
}()),
Expect: func(t *stdtesting.T, report api.Report, err error) {
require.NoError(t, err)
require.EqualValues(t, 100, report.Scores[testing.RuleHasTests])
require.EqualValues(t, 100, report.Scores[testing.RuleTestsPass])
require.Contains(t, report.Details[testing.RuleTestsPass], "all **4** tests in your project passed")
// require.Equal(t, 0, report.Scores[testing.RuleTestsFolder])
// require.Equal(t, 0, report.Scores[testing.RuleTestCoverage])
},
},
{
Name: "FourTestsSixteenFiles/AllFailed",
Dir: "test-resources",
Options: testutils.NewOptions().UsePythonFiles(createPythonFilenames(16).Concat(createPythonTestFilenames(4))).
WithConfig(func() *config.Config {
c := config.Default()
c.Testing.Report = "junit-failed-all.xml"
c.Testing.Coverage = "" // TODO
return c
}()),
Expect: func(t *stdtesting.T, report api.Report, err error) {
require.NoError(t, err)
require.EqualValues(t, 100, report.Scores[testing.RuleHasTests])
require.EqualValues(t, 0, report.Scores[testing.RuleTestsPass])
require.Contains(t, report.Details[testing.RuleTestsPass], "**None** of the 4 tests in your project passed")
// require.Equal(t, 0, report.Scores[testing.RuleTestsFolder])
// require.Equal(t, 0, report.Scores[testing.RuleTestCoverage])
},
},
{
Name: "FourTestsSixteenFiles/HalfPassed",
Dir: "test-resources",
Options: testutils.NewOptions().UsePythonFiles(createPythonFilenames(16).Concat(createPythonTestFilenames(4))).
WithConfig(func() *config.Config {
c := config.Default()
c.Testing.Report = "junit-passed-half.xml"
c.Testing.Coverage = "" // TODO
return c
}()),
Expect: func(t *stdtesting.T, report api.Report, err error) {
require.NoError(t, err)
require.EqualValues(t, 100, report.Scores[testing.RuleHasTests])
require.EqualValues(t, 50, report.Scores[testing.RuleTestsPass])
require.Contains(t, report.Details[testing.RuleTestsPass], "**2** out of **4** tests in your project passed")
// require.Equal(t, 0, report.Scores[testing.RuleTestsFolder])
// require.Equal(t, 0, report.Scores[testing.RuleTestCoverage])
},
},
{
Name: "UnfindableTestReport",
Dir: "test-resources",
Options: testutils.NewOptions().UsePythonFiles(createPythonFilenames(16).Concat(createPythonTestFilenames(4))).
WithConfig(func() *config.Config {
c := config.Default()
c.Testing.Report = "non-existant-file.xml"
return c
}()),
Expect: func(t *stdtesting.T, report api.Report, err error) {
require.NoError(t, err)
require.EqualValues(t, 100, report.Scores[testing.RuleHasTests])
require.EqualValues(t, 0, report.Scores[testing.RuleTestsPass])
require.Contains(t, report.Details[testing.RuleTestsPass], "`non-existant-file.xml`")
require.Contains(t, report.Details[testing.RuleTestsPass], "file could not be found")
require.Contains(t, report.Details[testing.RuleTestsPass], "update the `testing.report` setting")
},
},
{
Name: "MalformedTestReport",
Dir: "test-resources",
Options: testutils.NewOptions().UsePythonFiles(createPythonFilenames(16).Concat(createPythonTestFilenames(4))).
WithConfig(func() *config.Config {
c := config.Default()
c.Testing.Report = "junit-malformed.xml"
return c
}()),
Expect: func(t *stdtesting.T, report api.Report, err error) {
require.NoError(t, err)
require.EqualValues(t, 100, report.Scores[testing.RuleHasTests])
require.EqualValues(t, 0, report.Scores[testing.RuleTestsPass])
require.Contains(t, report.Details[testing.RuleTestsPass], "was provided and found, but there was an error parsing the JUnit XML contents")
},
},
{
Name: "EmptyTestReport",
Dir: "test-resources",
Options: testutils.NewOptions().UsePythonFiles(createPythonFilenames(16).Concat(createPythonTestFilenames(4))).
WithConfig(func() *config.Config {
c := config.Default()
c.Testing.Report = "junit-empty.xml"
return c
}()),
Expect: func(t *stdtesting.T, report api.Report, err error) {
require.NoError(t, err)
require.EqualValues(t, 100, report.Scores[testing.RuleHasTests])
require.EqualValues(t, 0, report.Scores[testing.RuleTestsPass])
require.Contains(t, report.Details[testing.RuleTestsPass], "No tests were run")
},
},
})

suite.DefaultOptions().WithConfig(config.Default())
Expand Down
2 changes: 1 addition & 1 deletion linters/testing/rules.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import "github.com/bvobart/mllint/api"
var RuleHasTests = api.Rule{
Name: "Project has automated tests",
Slug: "testing/has-tests",
Details: "TODO",
Details: "TODO, https://docs.pytest.org/en/6.2.x/goodpractices.html#conventions-for-python-test-discovery",
Weight: 1,
}

Expand Down
3 changes: 3 additions & 0 deletions linters/testing/test-resources/junit-empty.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
<?xml version="1.0" encoding="utf-8"?>
<testsuites>
</testsuites>
41 changes: 41 additions & 0 deletions linters/testing/test-resources/junit-failed-all.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
<?xml version="1.0" encoding="utf-8"?>
<testsuites>
<testsuite name="pytest" errors="0" failures="0" skipped="0" tests="4" time="0.091" timestamp="2021-06-13T17:04:08.088328" hostname="mllint">

<testcase classname="tests.file1_test" name="test_parse_post" time="0.001">
<failure message="assert 1 == 2">def test_parse_post() -&gt; None:
"Tests the process_post method"
&gt; assert 1==2
E assert 1 == 2

tests/prepare_test.py:29: AssertionError</failure>
</testcase>

<testcase classname="tests.file2_test" name="test_process_post" time="0.001">
<failure message="assert 1 == 2">def test_process_post() -&gt; None:
"Tests the process_post method"
&gt; assert 1==2
E assert 1 == 2

tests/prepare_test.py:29: AssertionError</failure>
</testcase>

<testcase classname="tests.file3_test" name="test_parse_post" time="0.001">
<failure message="assert 1 == 2">def test_parse_post() -&gt; None:
"Tests the process_post method"
&gt; assert 1==2
E assert 1 == 2

tests/prepare_test.py:29: AssertionError</failure>
</testcase>

<testcase classname="tests.file4_test" name="test_process_post" time="0.001">
<failure message="assert 1 == 2">def test_process_post() -&gt; None:
"Tests the process_post method"
&gt; assert 1==2
E assert 1 == 2

tests/prepare_test.py:29: AssertionError</failure>
</testcase>
</testsuite>
</testsuites>
4 changes: 4 additions & 0 deletions linters/testing/test-resources/junit-malformed.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
<?xml version="1.0" encoding="utf-8"?>
<testsuites>
<testsuite name="pytest" errors="0" failures="0" skipped="0" tests="4" time="0.091" timestamp="2021-06-13T17:04:08.088328" hostname="mllint">
<testcase classname="tests.file1_test" name="test_parse_post" time="0.001" />
9 changes: 9 additions & 0 deletions linters/testing/test-resources/junit-passed-all.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
<?xml version="1.0" encoding="utf-8"?>
<testsuites>
<testsuite name="pytest" errors="0" failures="0" skipped="0" tests="4" time="0.091" timestamp="2021-06-13T17:04:08.088328" hostname="mllint">
<testcase classname="tests.file1_test" name="test_parse_post" time="0.001" />
<testcase classname="tests.file2_test" name="test_process_post" time="0.001" />
<testcase classname="tests.file3_test" name="test_parse_post" time="0.001" />
<testcase classname="tests.file4_test" name="test_process_post" time="0.001" />
</testsuite>
</testsuites>
27 changes: 27 additions & 0 deletions linters/testing/test-resources/junit-passed-half.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
<?xml version="1.0" encoding="utf-8"?>
<testsuites>
<testsuite name="pytest" errors="0" failures="1" skipped="0" tests="2" time="0.117" timestamp="2021-06-14T22:12:43.078881" hostname="mllint">
<testcase classname="tests.prepare_test" name="test_parse_post" time="0.001" />

<testcase classname="tests.prepare_test" name="test_process_post" time="0.001">
<failure message="assert 1 == 2">def test_process_post() -&gt; None:
"Tests the process_post method"
&gt; assert 1==2
E assert 1 == 2

tests/prepare_test.py:29: AssertionError</failure>
</testcase>

<testcase classname="tests.prepare_test" name="test_parse_post" time="0.001" />

<testcase classname="tests.prepare_test" name="test_process_post" time="0.001">
<failure message="assert 1 == 2">def test_process_post() -&gt; None:
"Tests the process_post method"
&gt; assert 1==2
E assert 1 == 2

tests/prepare_test.py:29: AssertionError</failure>
</testcase>

</testsuite>
</testsuites>
9 changes: 9 additions & 0 deletions linters/testing/test-resources/junit-report.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
<?xml version="1.0" encoding="utf-8"?>
<testsuites>
<testsuite name="pytest" errors="0" failures="0" skipped="0" tests="4" time="0.091" timestamp="2021-06-13T17:04:08.088328" hostname="mllint">
<testcase classname="tests.file1_test" name="test_parse_post" time="0.001" />
<testcase classname="tests.file2_test" name="test_process_post" time="0.001" />
<testcase classname="tests.file3_test" name="test_parse_post" time="0.001" />
<testcase classname="tests.file4_test" name="test_process_post" time="0.001" />
</testsuite>
</testsuites>

0 comments on commit 384f39e

Please sign in to comment.