diff --git a/README.md b/README.md index eced6ec..d1d472e 100644 --- a/README.md +++ b/README.md @@ -67,6 +67,7 @@ Reporter options should also be strings exception for suiteNameTemplate, classNa | `JEST_JUNIT_ADD_FILE_ATTRIBUTE` | `addFileAttribute` | Add file attribute to the output. This config is primarily for Circle CI. This setting provides richer details but may break on other CI platforms. Must be a string. | `"false"` | N/A | `JEST_JUNIT_INCLUDE_CONSOLE_OUTPUT` | `includeConsoleOutput` | Adds console output to any testSuite that generates stdout during a test run. | `false` | N/A | `JEST_JUNIT_INCLUDE_SHORT_CONSOLE_OUTPUT` | `includeShortConsoleOutput` | Adds short console output (only message value) to any testSuite that generates stdout during a test run. | `false` | N/A +| `JEST_JUNIT_REPORT_TEST_SUITE_ERRORS` | `reportTestSuiteErrors` | Reports test suites that failed to execute altogether as `error`. _Note:_ since the suite name cannot be determined from files that fail to load, it will default to file path.| `false` | N/A | `JEST_USE_PATH_FOR_SUITE_NAME` | `usePathForSuiteName` | **DEPRECATED. Use `suiteNameTemplate` instead.** Use file path as the `name` attribute of `` | `"false"` | N/A diff --git a/__mocks__/empty-suite.json b/__mocks__/empty-suite.json new file mode 100644 index 0000000..87c5168 --- /dev/null +++ b/__mocks__/empty-suite.json @@ -0,0 +1,65 @@ +{ + "numFailedTestSuites": 1, + "numFailedTests": 0, + "numPassedTestSuites": 0, + "numPassedTests": 0, + "numPendingTestSuites": 0, + "numPendingTests": 0, + "numRuntimeErrorTestSuites": 1, + "numTodoTests": 0, + "numTotalTestSuites": 1, + "numTotalTests": 0, + "openHandles": [], + "snapshot": { + "added": 0, + "didUpdate": false, + "failure": false, + "filesAdded": 0, + "filesRemoved": 0, + "filesRemovedList": [], + "filesUnmatched": 0, + "filesUpdated": 0, + "matched": 0, + "total": 0, + "unchecked": 0, + "uncheckedKeysByFile": [], + "unmatched": 0, + "updated": 0 + }, + "startTime": 1601808216222, + "success": false, + "testResults": [ + { + "failureMessage": " \u001b[1m● \u001b[22mTest suite failed to run\n\n Your test suite must contain at least one test.\n\n \u001b[2mat onResult (\u001b[22mnode_modules/@jest/core/build/TestScheduler.js\u001b[2m:175:18)\u001b[22m\n \u001b[2mat testRunner.on (\u001b[22mnode_modules/@jest/core/build/TestScheduler.js\u001b[2m:304:17)\u001b[22m\n \u001b[2mat Promise.all.staticListeners.map (\u001b[22mnode_modules/emittery/index.js\u001b[2m:260:13)\u001b[22m\n at Array.map ()\n \u001b[2mat Emittery.Typed.emit (\u001b[22mnode_modules/emittery/index.js\u001b[2m:258:23)\u001b[22m\n", + "leaks": false, + "numFailingTests": 0, + "numPassingTests": 0, + "numPendingTests": 0, + "numTodoTests": 0, + "openHandles": [], + "perfStats": { + "end": 0, + "runtime": 0, + "slow": false, + "start": 0 + }, + "skipped": false, + "snapshot": { + "added": 0, + "fileDeleted": false, + "matched": 0, + "unchecked": 0, + "uncheckedKeys": [], + "unmatched": 0, + "updated": 0 + }, + "testExecError": { + "message": "Your test suite must contain at least one test.", + "stack": "Error: Your test suite must contain at least one test.\n at onResult ../mpl-modules/test/docker/node_modules/@jest/core/build/TestScheduler.js:175:18)\n at testRunner.on ../mpl-modules/test/docker/node_modules/@jest/core/build/TestScheduler.js:304:17)\n at Promise.all.staticListeners.map ../mpl-modules/test/docker/node_modules/emittery/index.js:260:13)\n at Array.map ()\n at Emittery.Typed.emit ../mpl-modules/test/docker/node_modules/emittery/index.js:258:23)" + }, + "testFilePath": "/path/to/spec/test.spec.ts", + "testResults": [] + } + ], + "wasInterrupted": false +} \ No newline at end of file diff --git a/__mocks__/failing-compilation.json b/__mocks__/failing-compilation.json new file mode 100644 index 0000000..614bd77 --- /dev/null +++ b/__mocks__/failing-compilation.json @@ -0,0 +1,102 @@ +{ + "numFailedTestSuites": 1, + "numFailedTests": 0, + "numPassedTestSuites": 0, + "numPassedTests": 0, + "numPendingTestSuites": 0, + "numPendingTests": 0, + "numRuntimeErrorTestSuites": 1, + "numTodoTests": 0, + "numTotalTestSuites": 1, + "numTotalTests": 0, + "openHandles": [], + "snapshot": { + "added": 0, + "didUpdate": false, + "failure": false, + "filesAdded": 0, + "filesRemoved": 0, + "filesRemovedList": [], + "filesUnmatched": 0, + "filesUpdated": 0, + "matched": 0, + "total": 0, + "unchecked": 0, + "uncheckedKeysByFile": [], + "unmatched": 0, + "updated": 0 + }, + "startTime": 1489712747092, + "success": false, + "testResults": [ + { + "displayName": "", + "failureMessage": " \u001b[1m● \u001b[22mTest suite failed to run\n\n TypeScript diagnostics (customize using `[jest-config].globals.ts-jest.diagnostics` option):\n \u001b[96mspec/test.spec.ts\u001b[0m:\u001b[93m10\u001b[0m:\u001b[93m35\u001b[0m - \u001b[91merror\u001b[0m\u001b[90m TS2339: \u001b[0mProperty 'hello' does not exist on type 'HelloScreamer'.\n\n \u001b[7m10\u001b[0m const screamed = screamer.hello();\n \u001b[7m \u001b[0m \u001b[91m ~~~~~\u001b[0m\n", + "leaks": false, + "numFailingTests": 0, + "numPassingTests": 0, + "numPendingTests": 0, + "numTodoTests": 0, + "openHandles": [], + "perfStats": { + "end": 1499904221109, + "start": 1499904215586 + }, + "skipped": false, + "snapshot": { + "added": 0, + "fileDeleted": false, + "matched": 0, + "unchecked": 0, + "unmatched": 0, + "updated": 0 + }, + "sourceMaps": {}, + "testExecError": "spec/test.spec.ts:10:35 - error TS2339: Property 'hello' does not exist on type 'HelloScreamer\n\n 10 const screamed = screamer.hello();\n~~~~~", + "testFilePath": "/path/to/spec/test.spec.ts", + "testResults": [], + "coverage": { + } + }, + { + "console": [], + "failureMessage": null, + "numFailingTests": 0, + "numPassingTests": 1, + "numPendingTests": 0, + "perfStats": { + "end": 1518274351347, + "start": 1518274351274 + }, + "snapshot": { + "added": 0, + "fileDeleted": false, + "matched": 0, + "unchecked": 0, + "unmatched": 0, + "updated": 0, + "uncheckedKeys": [] + }, + "testFilePath": "/path/to/project2/__tests__/test2.test.js", + "testResults": [ + { + "ancestorTitles": [ + "another thing" + ], + "duration": 1, + "failureMessages": [], + "fullName": "another thing should foo", + "location": null, + "numPassingAsserts": 0, + "status": "passed", + "title": "should foo" + } + ], + "sourceMaps": {}, + "skipped": false, + "displayName": "project2", + "leaks": false + } + ], + "wasInterrupted": false +} \ No newline at end of file diff --git a/__mocks__/failing-import.json b/__mocks__/failing-import.json new file mode 100644 index 0000000..3b5cfd4 --- /dev/null +++ b/__mocks__/failing-import.json @@ -0,0 +1,63 @@ +{ + "numFailedTestSuites": 1, + "numFailedTests": 0, + "numPassedTestSuites": 1, + "numPassedTests": 1, + "numPendingTestSuites": 0, + "numPendingTests": 0, + "numRuntimeErrorTestSuites": 1, + "numTodoTests": 0, + "numTotalTestSuites": 2, + "numTotalTests": 1, + "openHandles": [], + "snapshot": { + "added": 0, + "didUpdate": false, + "failure": false, + "filesAdded": 0, + "filesRemoved": 0, + "filesRemovedList": [], + "filesUnmatched": 0, + "filesUpdated": 0, + "matched": 0, + "total": 0, + "unchecked": 0, + "uncheckedKeysByFile": [], + "unmatched": 0, + "updated": 0 + }, + "startTime": 1601544556519, + "success": false, + "testResults": [ + { + "displayName": "", + "failureMessage": " \u001b[1m● \u001b[22mTest suite failed to run\n\n Cannot find module './mult' from 'mul.test.js'\n\n \u001b[0m\u001b[31m\u001b[1m>\u001b[22m\u001b[39m\u001b[90m 1 | \u001b[39m\u001b[36mconst\u001b[39m mul \u001b[33m=\u001b[39m require(\u001b[32m'./mult'\u001b[39m)\u001b[33m;\u001b[39m\u001b[0m\n \u001b[0m \u001b[90m | \u001b[39m\u001b[31m\u001b[1m^\u001b[22m\u001b[39m\u001b[0m\n \u001b[0m \u001b[90m 2 | \u001b[39m\u001b[0m\n \u001b[0m \u001b[90m 3 | \u001b[39mtest(\u001b[32m'multplies 2 * 3 to equal 6'\u001b[39m\u001b[33m,\u001b[39m () \u001b[33m=>\u001b[39m {\u001b[0m\n \u001b[0m \u001b[90m 4 | \u001b[39m expect(mul(\u001b[35m2\u001b[39m\u001b[33m,\u001b[39m \u001b[35m3\u001b[39m))\u001b[33m.\u001b[39mtoBe(\u001b[35m6\u001b[39m)\u001b[33m;\u001b[39m\u001b[0m\n\n \u001b[2mat Resolver.resolveModule (\u001b[22mnode_modules/jest-resolve/build/index.js\u001b[2m:259:17)\u001b[22m\n \u001b[2mat Object. (\u001b[22m\u001b[0m\u001b[36msrc/mul.test.js\u001b[39m\u001b[0m\u001b[2m:1:1)\u001b[22m\n", + "leaks": false, + "numFailingTests": 0, + "numPassingTests": 0, + "numPendingTests": 0, + "numTodoTests": 0, + "openHandles": [], + "perfStats": { + "end": 0, + "start": 0 + }, + "skipped": false, + "snapshot": { + "added": 0, + "fileDeleted": false, + "matched": 0, + "unchecked": 0, + "uncheckedKeys": [], + "unmatched": 0, + "updated": 0 + }, + "testExecError": { + "code": "MODULE_NOT_FOUND" + }, + "testFilePath": "/path/to/spec/test.spec.ts", + "testResults": [] + } + ], + "wasInterrupted": false +} \ No newline at end of file diff --git a/__tests__/__snapshots__/buildJsonResults.test.js.snap b/__tests__/__snapshots__/buildJsonResults.test.js.snap index 9eb31b9..95937c4 100644 --- a/__tests__/__snapshots__/buildJsonResults.test.js.snap +++ b/__tests__/__snapshots__/buildJsonResults.test.js.snap @@ -5,6 +5,7 @@ Object { "testsuites": Array [ Object { "_attr": Object { + "errors": 0, "failures": 0, "name": "jest tests", "tests": 2, @@ -41,7 +42,7 @@ Object { Object { "_attr": Object { "classname": "a thing should foo", - "name": "project1-foo", + "name": "project1-bar", "time": 0.003, }, }, @@ -79,7 +80,7 @@ Object { Object { "_attr": Object { "classname": "another thing should foo", - "name": "project2-foo", + "name": "project2-bar", "time": 0.001, }, }, diff --git a/__tests__/buildJsonResults.test.js b/__tests__/buildJsonResults.test.js index 32880dd..96a54ba 100644 --- a/__tests__/buildJsonResults.test.js +++ b/__tests__/buildJsonResults.test.js @@ -134,6 +134,72 @@ describe('buildJsonResults', () => { .toBe('function called with vars: filepath, filename, suitename, classname, title, displayName'); }); + it('should report no results as error', () => { + const failingTestsReport = require('../__mocks__/failing-compilation.json'); + + jsonResults = buildJsonResults(failingTestsReport, '/path/to/test', + Object.assign({}, constants.DEFAULT_OPTIONS, { + reportTestSuiteErrors: "true" + })); + + const totals = jsonResults.testsuites[0]._attr; + expect(totals.tests).toEqual(1); + expect(totals.errors).toEqual(1); + expect(totals.failures).toEqual(0); + + const suiteResult = jsonResults.testsuites[1].testsuite[0]._attr; + expect(suiteResult.name).toEqual('../spec/test.spec.ts') + expect(suiteResult.errors).toEqual(1); + expect(suiteResult.tests).toEqual(0); + + const errorSuite = jsonResults.testsuites[1].testsuite[2]; + expect(errorSuite.testcase[0]._attr.name).toEqual(suiteResult.name); + expect(errorSuite.testcase[0]._attr.classname).toEqual('Test suite failed to run'); + expect(errorSuite.testcase[1].error).toContain("Property 'hello' does not exist"); + + }); + + it('should report failureMessage if testExecErrorNotSet', () => { + const failingTestsReport = require('../__mocks__/failing-import.json'); + + jsonResults = buildJsonResults(failingTestsReport, '/path/to/test', + Object.assign({}, constants.DEFAULT_OPTIONS, { + reportTestSuiteErrors: "true" + })); + + const errorSuite = jsonResults.testsuites[1].testsuite[2]; + expect(errorSuite.testcase[0]._attr.name).toEqual('../spec/test.spec.ts'); + expect(errorSuite.testcase[0]._attr.classname).toEqual('Test suite failed to run'); + expect(errorSuite.testcase[1].error).toContain("Cannot find module './mult'"); + }); + + it('should report empty suites as error', () => { + const failingTestsReport = require('../__mocks__/empty-suite.json'); + + jsonResults = buildJsonResults(failingTestsReport, '/path/to/test', + Object.assign({}, constants.DEFAULT_OPTIONS, { + reportTestSuiteErrors: "true" + })); + + const errorSuite = jsonResults.testsuites[1].testsuite[2]; + expect(errorSuite.testcase[0]._attr.name).toEqual('../spec/test.spec.ts'); + expect(errorSuite.testcase[0]._attr.classname).toEqual('Test suite failed to run'); + expect(errorSuite.testcase[1].error).toContain("Your test suite must contain at least one test"); + }); + + it('should honor templates when test has errors', () => { + const failingTestsReport = require('../__mocks__/failing-compilation.json'); + + jsonResults = buildJsonResults(failingTestsReport, '/path/to/test', + Object.assign({}, constants.DEFAULT_OPTIONS, { + reportTestSuiteErrors: "true", + suiteNameTemplate: "{displayName}-foo", + titleTemplate: "{title}-bar" + })); + + expect(jsonResults.testsuites[2].testsuite[2].testcase[0]._attr.name).toEqual('should foo-bar'); + }); + it('should return the proper filepath when titleTemplate is "{filepath}"', () => { const noFailingTestsReport = require('../__mocks__/no-failing-tests.json'); jsonResults = buildJsonResults(noFailingTestsReport, '/', @@ -216,7 +282,7 @@ describe('buildJsonResults', () => { jsonResults = buildJsonResults(multiProjectNoFailingTestsReport, '/', Object.assign({}, constants.DEFAULT_OPTIONS, { suiteNameTemplate: "{displayName}-foo", - titleTemplate: "{displayName}-foo" + titleTemplate: "{displayName}-bar" })); expect(jsonResults).toMatchSnapshot(); diff --git a/constants/index.js b/constants/index.js index 279e35b..c1d0686 100644 --- a/constants/index.js +++ b/constants/index.js @@ -13,6 +13,7 @@ module.exports = { JEST_JUNIT_ADD_FILE_ATTRIBUTE: 'addFileAttribute', JEST_JUNIT_INCLUDE_CONSOLE_OUTPUT: 'includeConsoleOutput', JEST_JUNIT_INCLUDE_SHORT_CONSOLE_OUTPUT: 'includeShortConsoleOutput', + JEST_JUNIT_REPORT_TEST_SUITE_ERRORS: 'reportTestSuiteErrors', JEST_USE_PATH_FOR_SUITE_NAME: 'usePathForSuiteName', JEST_JUNIT_TEST_SUITE_PROPERTIES_JSON_FILE: 'testSuitePropertiesFile' }, @@ -29,6 +30,7 @@ module.exports = { addFileAttribute: 'false', includeConsoleOutput: 'false', includeShortConsoleOutput: 'false', + reportTestSuiteErrors: 'false', testSuitePropertiesFile: 'junitProperties.js' }, SUITENAME_VAR: 'suitename', diff --git a/utils/buildJsonResults.js b/utils/buildJsonResults.js index cd24f1b..5d67705 100644 --- a/utils/buildJsonResults.js +++ b/utils/buildJsonResults.js @@ -34,11 +34,32 @@ const executionTime = function (startTime, endTime) { return (endTime - startTime) / 1000; } +const addErrorTestResult = function (suite) { + suite.testResults.push({ + "ancestorTitles": [], + "duration": 0, + "failureMessages": [ + suite.failureMessage + ], + "numPassingAsserts": 0, + "status": "error" + }) +} + module.exports = function (report, appDirectory, options) { // Check if there is a junitProperties.js (or whatever they called it) const junitSuitePropertiesFilePath = path.join(process.cwd(), options.testSuitePropertiesFile); let ignoreSuitePropertiesCheck = !fs.existsSync(junitSuitePropertiesFilePath); + // If the usePathForSuiteName option is true and the + // suiteNameTemplate value is set to the default, overrides + // the suiteNameTemplate. + if (options.usePathForSuiteName === 'true' && + options.suiteNameTemplate === toTemplateTag(constants.TITLE_VAR)) { + + options.suiteNameTemplate = toTemplateTag(constants.FILEPATH_VAR); + } + // Generate a single XML file for all jest tests let jsonResults = { 'testsuites': [{ @@ -46,6 +67,7 @@ module.exports = function (report, appDirectory, options) { 'name': options.suiteName, 'tests': 0, 'failures': 0, + 'errors': 0, // Overall execution time: // Since tests are typically executed in parallel this time can be significantly smaller // than the sum of the individual test suites @@ -56,18 +78,20 @@ module.exports = function (report, appDirectory, options) { // Iterate through outer testResults (test suites) report.testResults.forEach((suite) => { - // Skip empty test suites - if (suite.testResults.length <= 0) { + const noResults = suite.testResults.length === 0; + if (noResults && options.reportTestSuiteErrors === 'false') { return; } - // If the usePathForSuiteName option is true and the - // suiteNameTemplate value is set to the default, overrides - // the suiteNameTemplate. - if (options.usePathForSuiteName === 'true' && - options.suiteNameTemplate === toTemplateTag(constants.TITLE_VAR)) { + const noResultOptions = noResults ? { + suiteNameTemplate: toTemplateTag(constants.FILEPATH_VAR), + titleTemplate: toTemplateTag(constants.FILEPATH_VAR), + classNameTemplate: `Test suite failed to run` + } : {}; - options.suiteNameTemplate = toTemplateTag(constants.FILEPATH_VAR); + const suiteOptions = Object.assign({}, options, noResultOptions); + if (noResults) { + addErrorTestResult(suite); } // Build variables for suite name @@ -87,11 +111,12 @@ module.exports = function (report, appDirectory, options) { const suiteNumTests = suite.numFailingTests + suite.numPassingTests + suite.numPendingTests; const suiteExecutionTime = executionTime(suite.perfStats.start, suite.perfStats.end); + const suiteErrors = noResults ? 1 : 0; let testSuite = { 'testsuite': [{ _attr: { - name: replaceVars(options.suiteNameTemplate, suiteNameVariables), - errors: 0, // not supported + name: replaceVars(suiteOptions.suiteNameTemplate, suiteNameVariables), + errors: suiteErrors, failures: suite.numFailingTests, skipped: suite.numPendingTests, timestamp: (new Date(suite.perfStats.start)).toISOString().slice(0, -5), @@ -103,6 +128,7 @@ module.exports = function (report, appDirectory, options) { // Update top level testsuites properties jsonResults.testsuites[0]._attr.failures += suite.numFailingTests; + jsonResults.testsuites[0]._attr.errors += suiteErrors; jsonResults.testsuites[0]._attr.tests += suiteNumTests; if (!ignoreSuitePropertiesCheck) { @@ -131,7 +157,7 @@ module.exports = function (report, appDirectory, options) { // Iterate through test cases suite.testResults.forEach((tc) => { - const classname = tc.ancestorTitles.join(options.ancestorSeparator); + const classname = tc.ancestorTitles.join(suiteOptions.ancestorSeparator); const testTitle = tc.title; // Build replacement map @@ -146,23 +172,24 @@ module.exports = function (report, appDirectory, options) { let testCase = { 'testcase': [{ _attr: { - classname: replaceVars(options.classNameTemplate, testVariables), - name: replaceVars(options.titleTemplate, testVariables), + classname: replaceVars(suiteOptions.classNameTemplate, testVariables), + name: replaceVars(suiteOptions.titleTemplate, testVariables), time: tc.duration / 1000 } }] }; - if (options.addFileAttribute === 'true') { + if (suiteOptions.addFileAttribute === 'true') { testCase.testcase[0]._attr.file = filepath; } // Write out all failure messages as tags // Nested underneath tag - if (tc.status === 'failed') { + if (tc.status === 'failed'|| tc.status === 'error') { tc.failureMessages.forEach((failure) => { + const tagName = tc.status === 'failed' ? 'failure': 'error' testCase.testcase.push({ - 'failure': stripAnsi(failure) + [tagName]: stripAnsi(failure) }); }) } @@ -179,7 +206,7 @@ module.exports = function (report, appDirectory, options) { }); // Write stdout console output if available - if (options.includeConsoleOutput === 'true' && suite.console && suite.console.length) { + if (suiteOptions.includeConsoleOutput === 'true' && suite.console && suite.console.length) { // Stringify the entire console object // Easier this way because formatting in a readable way is tough with XML // And this can be parsed more easily @@ -193,7 +220,7 @@ module.exports = function (report, appDirectory, options) { } // Write short stdout console output if available - if (options.includeShortConsoleOutput === 'true' && suite.console && suite.console.length) { + if (suiteOptions.includeShortConsoleOutput === 'true' && suite.console && suite.console.length) { // Extract and then Stringify the console message value // Easier this way because formatting in a readable way is tough with XML // And this can be parsed more easily diff --git a/utils/getOptions.js b/utils/getOptions.js index e42b82e..3db23b6 100644 --- a/utils/getOptions.js +++ b/utils/getOptions.js @@ -21,8 +21,6 @@ function getEnvOptions() { } function getAppOptions(pathToResolve) { - const initialPath = pathToResolve; - let traversing = true; // Find nearest package.json by traversing up directories until /