Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
changed print statments to use logging
Browse files Browse the repository at this point in the history
changed Jenkinsfile to use redesigned system

included config file for dependency analyzer

minor fixes
  • Loading branch information
Carl Tsai committed Aug 8, 2018
1 parent f9ec677 commit 46b3c0f
Show file tree
Hide file tree
Showing 6 changed files with 63 additions and 52 deletions.
43 changes: 15 additions & 28 deletions tools/flaky_tests/Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -19,40 +19,27 @@

err = null

def init_git() {
deleteDir()
retry(5) {
try{
timeout(time: 15, unit: 'MINUTES') {
checkout scm
sh 'git submodule update --init --recursive'
sh 'git clean -d -f'
}
} catch (exc) {
deleteDir()
error "Failed to fetch source code with $(exc)"
sleep 2
}
}
node('mxnetlinux-cpu') {
// Loading the utilities requires a node context unfortunately
checkout scm
utils = load('ci/Jenkinsfile_utils.groovy')
}
utils.assign_node)labels(linux_gpu: 'mxnetlinux-gpu')

def docker_run(platform, function_name, use_nvidia, shared_mem = '500m') {
def command = "ci/build.py --docker-registry ${env.DOCKER_CACHE_REGISTRY} %USE_NVIDIA% --platform %PLATFORM% --docker-build-retries 3 --shm-size %%SHARED_MEM% /work/runtime_functions.sh %FUNCTION_NAME%"
command = command.replaceAll('%USE_NVIDIA%', use_nvidia ? '--nvidiadocker': '')
command = command.replaceAll('%PLATFORM%', platform)
command = command.replaceAll('%FUNCTION_NAME%', function_name)
command = command.replaceAll('%SHARED_MEM%', shared_mem)

sh command
}

try{
utils.main_wrapper(
core_logic: {
stage('Flakiness Check'){
node('mxnetlinux-gpu') {
node(NODE_LINUX_GPU) {
ws('workspace/flakiness_check'){
init_git()
docker_run('ubuntu_gpu', 'run_flakiness_checker', true)
utils.init_git()
utils.docker_run('ubuntu_build_cuda', 'build_ubuntu_gpu_cuda91_cudnn7', false)
utils.docker_run('ubuntu_gpu', 'run_flakiness_checker', false)
}
}
}
}
,
failure_handler: {
}
)
16 changes: 9 additions & 7 deletions tools/flaky_tests/check_branch.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import os
import subprocess
import json
import sys

import flakiness_checker
import diff_collator
Expand Down Expand Up @@ -88,7 +89,7 @@ def time_test(test):
try:
n = int(TIME_BUDGET / total_time)
except ZeroDivisionError:
logger.Error("Total time for tests was 0")
logger.error("Total time for tests was 0")
return []

logger.debug("total_time: %f | num_trials: %d", total_time, n)
Expand All @@ -112,17 +113,17 @@ def check_tests(tests):


def output_results(flaky, nonflaky):
print("Following tests failed flakiness checker:")
logger.info("Following tests failed flakiness checker:")
if not flaky:
print("None")
logger.info("None")
for test in flaky:
print("%s:%s".format(test[0], test[1]))
logger.info("%s:%s", test[0], test[1])

print("Following tests passed flakiness checker:")
logger.info("Following tests passed flakiness checker:")
if not nonflaky:
print("None")
logger.info("None")
for test in nonflaky:
print("{}:{}".format(test[0], test[1]))
logger.info("%s:%s", test[0], test[1])

logger.info("[Results]\tTotal: %d\tFlaky: %d\tNon-flaky: %d",
len(flaky) + len(nonflaky), len(flaky), len(nonflaky))
Expand All @@ -138,6 +139,7 @@ def output_results(flaky, nonflaky):

diff_output = diff_collator.get_diff_output(args)
changes = diff_collator.parser(diff_output)
diff_collator.output_changes(changes)

changes = {k:set(v.keys()) for k, v in changes.items()}
tests = select_tests(changes)
Expand Down
17 changes: 9 additions & 8 deletions tools/flaky_tests/dependency_analyzer.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
import json

DEFAULT_CONFIG_FILE = os.path.join(
os.path.dirname(__file__), "config.json")
os.path.dirname(__file__), "test_dependencies.config")

logger = logging.getLogger(__name__)

Expand All @@ -59,7 +59,7 @@ def find_dependents(dependencies, top):

try:
file_deps = read_config(DEFAULT_CONFIG_FILE)
except FileNotFoundError:
except IOError:
file_deps = {}
logger.WARNING("No config file found, "
"continuing with no file dependencies")
Expand Down Expand Up @@ -115,21 +115,22 @@ def visit_Attribute(self, node):

try:
dependents |= find_dependents_file(dependents - dependencies, filename)
except RecursionError as re:
logger.error("Encountered recursion error when seaching {}: {}",
except RuntimeError as re:
logger.error("Encountered recursion error when seaching %s: %s",
filename, re.args[0])

return dependents


def output_results(dependents):
logger.info("Dependencies:")
for filename in dependents.keys():
print(filename)
logger.info(filename)
if not dependents[filename]:
print("None")
logger.info("None")
continue
for func in dependents[filename]:
print("\t{}".format(func))
logger.info("\t%s", func)


def parse_args():
Expand All @@ -152,7 +153,7 @@ def __call__(self, parser, namespace, values, option_string=None):
help="list of dependent functions, "
"in the format: <file>:<func_name>")

arg_parse.add_argument(
arg_parser.add_argument(
"--logging-level", "-l", dest="level", default="INFO",
help="logging level, defaults to INFO")

Expand Down
13 changes: 6 additions & 7 deletions tools/flaky_tests/diff_collator.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,11 +126,12 @@ def parse_patch(patch):
if line.startswith("+def "):
func_name = line.split()[1].split("(")[0]
changes[func_name] = []
logger.debug("\tFound new top-level function: %s", func_name)

return file_name, changes


def output_changes(changes, verbosity):
def output_changes(changes, verbosity=2):
""" Output changes in an easy to understand format
Three verbosity levels:
Expand All @@ -147,23 +148,21 @@ def output_changes(changes, verbosity):
5:5
func_c
"""
if not verbosity:
verbosity = 2
logger.debug("verbosity: %d", verbosity)

if not changes:
logger.info("No changes found")
else:
for file_name, chunks in changes.items():
print(file_name)
logger.info(file_name)
if verbosity < 2:
continue
for func_name, ranges in chunks.items():
print("\t{}".format(func_name))
logger.info("\t%s", func_name)
if verbosity < 3:
continue
for (start, end) in ranges:
print("\t\t{}:{}".format(start, end))
logger.info("\t\t%s:%s", start, end)



Expand Down Expand Up @@ -212,7 +211,7 @@ def parse_args():

changes = parser(diff_output)
for file_name, chunks in changes.items():
if not re.fullmatch(args.expr, file_name):
if not re.match(args.expr, file_name):
del changes[file_name]

output_changes(changes, args.verbosity)
Expand Down
4 changes: 2 additions & 2 deletions tools/flaky_tests/flakiness_checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ def find_test_path(self, test_file):
test_path = os.path.split(test_file)
top = os.path.join(os.getcwd(), test_path[0])

for (path, dirs, files) in os.walk(top):
for (path, _ , files) in os.walk(top):
if test_path[1] in files:
return os.path.join(path, test_path[1])

Expand Down Expand Up @@ -122,7 +122,7 @@ def parse_args():
except AttributeError:
logging.basicConfig(level=logging.INFO)
logging.warning("Invalid logging level: %s", args.level)
logger.debug("args: $s", args)
logger.debug("args: %s", args)

code = run_test_trials(args.test_path, args.test_name, args.num_trials,
args.seed, args.args)
Expand Down
22 changes: 22 additions & 0 deletions tools/flaky_tests/test_dependencies.config
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
{
"tests/python/gpu/test_operator_gpu.py": [
"test_operator.py",
"test_optimizer.py",
"test_random.py",
"test_exc_handling.py",
"test_sparse_ndarray.py",
"test_sparse_operator.py",
"test_ndarray.py"
],
"tests/python/gpu/test_gluon_gpu.py": [
"test_gluon.py",
"test_loss.py",
"test_gluon_rnn.py"
],
"tests/python/mkl/test_quantization_mkldnn.py": [
"test_quantization.py"
],
"tests/python/quantization_gpu/test_quantization_gpu.py": [
"test_quantization.py"
]
}

0 comments on commit 46b3c0f

Please sign in to comment.