Skip to content
This repository was archived by the owner on Oct 28, 2024. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
92 changes: 60 additions & 32 deletions src/co/elastic/NotificationManager.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -35,53 +35,51 @@ This method generates flakey test data from Jenkins test results
* @param secret Vault path to secrets which hold authentication information for Elasticsearch
* @param jobInfo JobInfo data collected from job-info.json
* @param testsErrors list of test failed, see src/test/resources/tests-errors.json
Comment thread
cachedout marked this conversation as resolved.
* @param flakyReportIdx, what's the id.
* @param flakyThreshold, to tweak the score.
* @param testsSummary object with the test results summary, see src/test/resources/tests-summary.json
* @param querySize The maximum value of results to be reported. Default 500
* @param queryTimeout Specifies the period of time to wait for a response. Default 20s
* @param disableGHComment whether to disable the GH comment notification.
* @param disableGHIssueCreation whether to disable the GH create issue if any flaky matches.
* @param jobName
*/
def analyzeFlakey(Map args = [:]) {
def es = args.containsKey('es') ? args.es : error('analyzeFlakey: es parameter is required')
def jobName = args.containsKey('jobName') ? args.jobName : error('analyzeFlakey: jobName parameter is required')
def secret = args.containsKey('es_secret') ? args.es_secret : null
def flakyReportIdx = args.containsKey('flakyReportIdx') ? args.flakyReportIdx : error('analyzeFlakey: flakyReportIdx parameter is required')
def testsErrors = args.containsKey('testsErrors') ? args.testsErrors : []
def flakyThreshold = args.containsKey('flakyThreshold') ? args.flakyThreshold : 0.0
def testsSummary = args.containsKey('testsSummary') ? args.testsSummary : null
def querySize = args.get('querySize', 500)
def queryTimeout = args.get('queryTimeout', '20s')
def disableGHComment = args.get('disableGHComment', false)
def disableGHIssueCreation = args.get('disableGHIssueCreation', false)

def labels = 'flaky-test,ci-reported'
def boURL = getBlueoceanDisplayURL()
def flakyTestsWithIssues = [:]
def genuineTestFailures = []

if (!flakyReportIdx?.trim()) {
error 'analyzeFlakey: did not receive flakyReportIdx data'
}

// Only if there are test failures to analyse
if(testsErrors.size() > 0) {
// 1. Only if there are test failures to analyse
// 2. Only continue if we have a jobName passed in. This does not raise an error to preserve
// backward compatability.
if(testsErrors.size() > 0 && jobName?.trim()) {

// Query only the test_name field since it's the only used and don't want to overkill the
// jenkins instance when using the toJSON step since it reads in memory the json response.
// for 500 entries it's about 2500 lines versus 8000 lines if no filter_path
def query = "/${flakyReportIdx}/_search?size=${querySize}&filter_path=hits.hits._source.test_name,hits.hits._index"
def query = "/flaky-tests/_search?filter_path=aggregations.test_name.buckets"
def flakeyTestsRaw = sendDataToElasticsearch(es: es,
secret: secret,
data: queryFilter(queryTimeout, flakyThreshold),
data: queryFilter(jobName),
restCall: query)
def flakeyTestsParsed = toJSON(flakeyTestsRaw)

// Normalise both data structures with their names
// Intesection what tests are failing and also scored as flaky.
// Subset of genuine test failures, aka, those failures that were not scored as flaky previously.
def testFailures = testsErrors.collect { it.name }
def testFlaky = flakeyTestsParsed?.hits?.hits?.collect { it['_source']['test_name'] }
def testFlaky = flakeyTestsParsed?.aggregations?.test_name?.buckets?.collect { it['key'] }

// The following is code which is included here as a possible future enhancement if at some point
// we wish to include the number of flakes found in the time period. (Currently hard-coded to 90d)
//
//def testFlakyFreq = [:]
//flakeyTestsParsed?.aggregations?.test_name?.buckets?.each { it -> testFlakyFreq[it['key']] = it['doc_count'] }

def foundFlakyList = testFlaky?.size() > 0 ? testFailures.intersect(testFlaky) : []
genuineTestFailures = testFailures.minus(foundFlakyList)
log(level: 'DEBUG', text: "analyzeFlakey: Flaky tests raw: ${flakeyTestsRaw}")
Expand Down Expand Up @@ -369,19 +367,49 @@ def generateBuildReport(Map args = [:]) {
return output
}

def queryFilter(timeout, flakyThreshold) {
return """{
"timeout": "${timeout}",
"sort" : [
{ "timestamp" : "desc" },
{ "test_score" : "desc" }
],
"query" : {
"range" : {
"test_score" : {
"gt" : ${flakyThreshold}
}
}
def queryFilter(jobName) {
return """
{
"aggs": {
Comment thread
cachedout marked this conversation as resolved.
"test_name": {
"terms": {
"field": "test.name.keyword",
"order": {
"_count": "desc"
}
}
}
},
"size": 0,
"query": {
"bool": {
"must": [],
"filter": [
{
"bool": {
"should": [
{
"match_phrase": {
"job.fullName": "${jobName}"
}
}"""
}
],
"minimum_should_match": 1
}
},
{
"range": {
"build.startTime": {
"gte": "now-90d",
"format": "strict_date_optional_time"
}
}
}
],
"should": [],
"must_not": []
}
}
}
"""
}
54 changes: 10 additions & 44 deletions src/test/groovy/NotificationManagerStepTests.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -702,7 +702,7 @@ class NotificationManagerStepTests extends ApmBasePipelineTest {
helper.registerAllowedMethod('sendDataToElasticsearch', [Map.class], {readJSON(file: 'flake-empty-results.json')})
helper.registerAllowedMethod('isPR', { return true })
script.analyzeFlakey(
flakyReportIdx: 'reporter-apm-agent-python-apm-agent-python-master',
jobName: 'Beats/beats/master',
es: "https://fake_url",
testsErrors: readJSON(file: 'flake-tests-errors-without-match.json'),
testsSummary: readJSON(file: 'flake-tests-summary.json')
Expand All @@ -722,7 +722,7 @@ class NotificationManagerStepTests extends ApmBasePipelineTest {
)
helper.registerAllowedMethod('isPR', { return true })
script.analyzeFlakey(
flakyReportIdx: 'reporter-apm-agent-python-apm-agent-python-master',
jobName: 'Beats/beats/master',
es: "https://fake_url",
testsErrors: readJSON(file: 'flake-tests-errors-without-match.json'),
testsSummary: readJSON(file: 'flake-tests-summary.json')
Expand All @@ -742,7 +742,7 @@ class NotificationManagerStepTests extends ApmBasePipelineTest {
)
helper.registerAllowedMethod('isPR', { return true })
script.analyzeFlakey(
flakyReportIdx: 'reporter-apm-agent-python-apm-agent-python-master',
jobName: 'Beats/beats/master',
es: "https://fake_url",
testsErrors: readJSON(file: 'flake-tests-errors.json'),
testsSummary: readJSON(file: 'flake-tests-summary.json')
Expand All @@ -765,7 +765,7 @@ class NotificationManagerStepTests extends ApmBasePipelineTest {
helper.registerAllowedMethod('githubCreateIssue', [Map.class], { return '100' } )
helper.registerAllowedMethod('isPR', { return true })
script.analyzeFlakey(
flakyReportIdx: 'reporter-apm-agent-python-apm-agent-python-master',
jobName: 'Beats/beats/master',
es: "https://fake_url",
testsErrors: readJSON(file: 'flake-tests-errors.json'),
testsSummary: readJSON(file: 'flake-tests-summary.json')
Expand All @@ -789,7 +789,7 @@ class NotificationManagerStepTests extends ApmBasePipelineTest {
helper.registerAllowedMethod('githubCreateIssue', [Map.class], { return '100' } )
helper.registerAllowedMethod('isPR', { return true })
script.analyzeFlakey(
flakyReportIdx: 'reporter-apm-agent-python-apm-agent-python-master',
jobName: 'Beats/beats/master',
es: "https://fake_url",
testsErrors: readJSON(file: 'flake-tests-errors.json'),
testsSummary: readJSON(file: 'flake-tests-summary.json')
Expand All @@ -813,7 +813,7 @@ class NotificationManagerStepTests extends ApmBasePipelineTest {
helper.registerAllowedMethod('githubCreateIssue', [Map.class], { return '100' } )
helper.registerAllowedMethod('isPR', { return true })
script.analyzeFlakey(
flakyReportIdx: 'reporter-apm-agent-python-apm-agent-python-master',
jobName: 'Beats/beats/master',
es: "https://fake_url",
testsErrors: readJSON(file: 'flake-tests-errors.json'),
testsSummary: readJSON(file: 'flake-tests-summary.json')
Expand All @@ -833,7 +833,7 @@ class NotificationManagerStepTests extends ApmBasePipelineTest {
helper.registerAllowedMethod('lookForGitHubIssues', [Map.class], { return [ bar: '' ] })
helper.registerAllowedMethod('isPR', { return true })
script.analyzeFlakey(
flakyReportIdx: 'reporter-apm-agent-python-apm-agent-python-master',
jobName: 'Beats/beats/master',
es: "https://fake_url",
testsErrors: readJSON(file: 'flake-tests-errors.json'),
testsSummary: readJSON(file: 'flake-tests-summary.json'),
Expand All @@ -853,7 +853,7 @@ class NotificationManagerStepTests extends ApmBasePipelineTest {
helper.registerAllowedMethod('lookForGitHubIssues', [Map.class], { return [:] } )
helper.registerAllowedMethod('isPR', { return true })
script.analyzeFlakey(
flakyReportIdx: 'reporter-apm-agent-python-apm-agent-python-master',
jobName: 'Beats/beats/master',
es: "https://fake_url",
testsErrors: [:],
testsSummary: [ "failed": 0, "passed": 120, "skipped": 0, "total": 120 ]
Expand All @@ -870,7 +870,7 @@ class NotificationManagerStepTests extends ApmBasePipelineTest {
helper.registerAllowedMethod('lookForGitHubIssues', [Map.class], { return [:] } )
helper.registerAllowedMethod('isPR', { return true })
script.analyzeFlakey(
flakyReportIdx: 'reporter-apm-agent-python-apm-agent-python-master',
jobName: 'Beats/beats/master',
es: "https://fake_url",
testsErrors: [:],
testsSummary: [:]
Expand All @@ -880,48 +880,14 @@ class NotificationManagerStepTests extends ApmBasePipelineTest {
assertJobStatusSuccess()
}

@Test
void test_analyzeFlakeyThreshold() throws Exception {
helper.registerAllowedMethod(
"sendDataToElasticsearch",
[Map.class],
{m -> readJSON(file: "flake-results.json")}
)
script.analyzeFlakey(
flakyReportIdx: 'reporter-apm-agent-python-apm-agent-python-master',
es: "https://fake_url",
testsErrors: readJSON(file: 'flake-tests-errors.json'),
flakyThreshold: 0.5
)
printCallStack()
assertTrue(assertMethodCallContainsPattern('sendDataToElasticsearch', '"gt" : 0.5'))
assertJobStatusSuccess()
}

@Test
void test_analyzeFlakeyNoJobInfo() throws Exception {
try {
script.analyzeFlakey(
flakyReportIdx: '',
es: "https://fake_url",
testsErrors: readJSON(file: 'flake-tests-errors.json')
)
} catch(e) {
//NOOP
}
printCallStack()
assertTrue(assertMethodCallContainsPattern('error', 'analyzeFlakey: did not receive flakyReportIdx data'))
assertJobStatusFailure()
}

@Test
void test_analyzeFlakey_without_comment_notifications() throws Exception {
helper.registerAllowedMethod('sendDataToElasticsearch', [Map.class], {readJSON(file: "flake-results.json")})
helper.registerAllowedMethod('lookForGitHubIssues', [Map.class], { return [:] } )
helper.registerAllowedMethod('isPR', { return true })
script.analyzeFlakey(
disableGHComment: true,
flakyReportIdx: 'reporter-apm-agent-python-apm-agent-python-master',
jobName: 'Beats/beats/master',
es: "https://fake_url",
testsErrors: [:],
testsSummary: [:]
Expand Down
6 changes: 2 additions & 4 deletions vars/notifyBuildResult.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,6 @@ def call(Map args = [:]) {
def notifySlackComment = args.containsKey('slackComment') ? args.slackComment : false
def analyzeFlakey = args.containsKey('analyzeFlakey') ? args.analyzeFlakey : false
def newPRComment = args.containsKey('newPRComment') ? args.newPRComment : [:]
def flakyReportIdx = args.containsKey('flakyReportIdx') ? args.flakyReportIdx : ""
def flakyThreshold = args.containsKey('flakyThreshold') ? args.flakyThreshold : 0.0

node('master || metal || linux'){
stage('Reporting build status'){
Expand All @@ -68,19 +66,19 @@ def call(Map args = [:]) {
def slackCredentials = args.containsKey('slackCredentials') ? args.slackCredentials : 'jenkins-slack-integration-token'
def aggregateComments = args.get('aggregateComments', true)
def flakyDisableGHIssueCreation = args.get('flakyDisableGHIssueCreation', false)
def jobName = args.get('jobName') ? args.jobName : ''
catchError(message: 'There were some failures with the notifications', buildResult: 'SUCCESS', stageResult: 'UNSTABLE') {
def data = getBuildInfoJsonFiles(jobURL: env.JOB_URL, buildNumber: env.BUILD_NUMBER, returnData: true)
data['docsUrl'] = "http://${env?.REPO_NAME}_${env?.CHANGE_ID}.docs-preview.app.elstc.co/diff"
data['emailRecipients'] = to
data['statsUrl'] = statsURL
data['es'] = es
data['es_secret'] = secret
Comment thread
cachedout marked this conversation as resolved.
data['flakyReportIdx'] = flakyReportIdx
data['flakyThreshold'] = flakyThreshold
data['header'] = slackHeader
data['channel'] = slackChannel
data['credentialId'] = slackCredentials
data['enabled'] = slackNotify
data['jobName'] = jobName

data['disableGHIssueCreation'] = flakyDisableGHIssueCreation
// Allow to aggregate the comments, for such it disables the default notifications.
Expand Down
3 changes: 1 addition & 2 deletions vars/notifyBuildResult.txt
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,7 @@ emails on Failed builds that are not pull request.
* slackCredentials: What slack credentials to be used. Default value uses `jenkins-slack-integration-token`.
* slackNotify: Whether to send or not the slack notifications, by default it sends notifications on Failed builds that are not pull request.
* analyzeFlakey: Whether or not to add a comment in the PR with tests which have been detected as flakey. Default: `false`.
Comment thread
cachedout marked this conversation as resolved.
* flakyReportIdx: The flaky index to compare this jobs results to. e.g. reporter-apm-agent-java-apm-agent-java-master
* flakyThreshold: The threshold below which flaky tests will be ignored. Default: 0.0
* flakyDisableGHIssueCreation: whether to disable the GH create issue if any flaky matches. Default false.
* newPRComment: The map of the data to be populated as a comment. Default empty.
* aggregateComments: Whether to create only one single GitHub PR Comment with all the details. Default true.
* jobName: The name of the job, e.g. `Beats/beats/master`.