Skip to content

Commit

Permalink
Merge pull request #831 from ewels/master
Browse files Browse the repository at this point in the history
  • Loading branch information
ewels authored Jan 18, 2021
2 parents e529029 + bcf63b1 commit be0a144
Show file tree
Hide file tree
Showing 6 changed files with 66 additions and 45 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

* Added schema validation of GitHub action workflows to lint function [[#795](https://github.com/nf-core/tools/issues/795)]
* Fixed bug in schema title and description validation
* Added second progress bar for conda dependencies lint check, as it can be slow [[#299](https://github.com/nf-core/tools/issues/299)]

## [v1.12.1 - Silver Dolphin](https://github.com/nf-core/tools/releases/tag/1.12.1) - [2020-12-03]

Expand Down
49 changes: 25 additions & 24 deletions nf_core/lint/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,7 @@ def __init__(self, wf_path, release_mode=False):
]
if self.release_mode:
self.lint_tests.extend(["version_consistency"])
self.progress_bar = None

def _load(self):
"""Load information about the pipeline into the PipelineLint object"""
Expand Down Expand Up @@ -196,32 +197,32 @@ def _lint_pipeline(self):
if self.release_mode:
log.info("Including --release mode tests")

progress = rich.progress.Progress(
self.progress_bar = rich.progress.Progress(
"[bold blue]{task.description}",
rich.progress.BarColumn(bar_width=None),
"[magenta]{task.completed} of {task.total}[reset] » [bold yellow]{task.fields[func_name]}",
"[magenta]{task.completed} of {task.total}[reset] » [bold yellow]{task.fields[test_name]}",
transient=True,
)
with progress:
lint_progress = progress.add_task(
"Running lint checks", total=len(self.lint_tests), func_name=self.lint_tests[0]
with self.progress_bar:
lint_progress = self.progress_bar.add_task(
"Running lint checks", total=len(self.lint_tests), test_name=self.lint_tests[0]
)
for fun_name in self.lint_tests:
if self.lint_config.get(fun_name, {}) is False:
log.debug("Skipping lint test '{}'".format(fun_name))
self.ignored.append((fun_name, fun_name))
for test_name in self.lint_tests:
if self.lint_config.get(test_name, {}) is False:
log.debug("Skipping lint test '{}'".format(test_name))
self.ignored.append((test_name, test_name))
continue
progress.update(lint_progress, advance=1, func_name=fun_name)
log.debug("Running lint test: {}".format(fun_name))
test_results = getattr(self, fun_name)()
self.progress_bar.update(lint_progress, advance=1, test_name=test_name)
log.debug("Running lint test: {}".format(test_name))
test_results = getattr(self, test_name)()
for test in test_results.get("passed", []):
self.passed.append((fun_name, test))
self.passed.append((test_name, test))
for test in test_results.get("ignored", []):
self.ignored.append((fun_name, test))
self.ignored.append((test_name, test))
for test in test_results.get("warned", []):
self.warned.append((fun_name, test))
self.warned.append((test_name, test))
for test in test_results.get("failed", []):
self.failed.append((fun_name, test))
self.failed.append((test_name, test))

def _print_results(self, show_passed=False):
"""Print linting results to the command line.
Expand Down Expand Up @@ -252,7 +253,7 @@ def _s(some_list):
if len(self.passed) > 0 and show_passed:
table = Table(style="green", box=rich.box.ROUNDED)
table.add_column(
r"\[✔] {} Test{} Passed".format(len(self.passed), _s(self.passed)),
r"[✔] {} Test{} Passed".format(len(self.passed), _s(self.passed)),
no_wrap=True,
)
table = format_result(self.passed, table)
Expand All @@ -261,22 +262,22 @@ def _s(some_list):
# Table of ignored tests
if len(self.ignored) > 0:
table = Table(style="grey58", box=rich.box.ROUNDED)
table.add_column(r"\[?] {} Test{} Ignored".format(len(self.ignored), _s(self.ignored)), no_wrap=True)
table.add_column(r"[?] {} Test{} Ignored".format(len(self.ignored), _s(self.ignored)), no_wrap=True)
table = format_result(self.ignored, table)
console.print(table)

# Table of warning tests
if len(self.warned) > 0:
table = Table(style="yellow", box=rich.box.ROUNDED)
table.add_column(r"\[!] {} Test Warning{}".format(len(self.warned), _s(self.warned)), no_wrap=True)
table.add_column(r"[!] {} Test Warning{}".format(len(self.warned), _s(self.warned)), no_wrap=True)
table = format_result(self.warned, table)
console.print(table)

# Table of failing tests
if len(self.failed) > 0:
table = Table(style="red", box=rich.box.ROUNDED)
table.add_column(
r"\[✗] {} Test{} Failed".format(len(self.failed), _s(self.failed)),
r"[✗] {} Test{} Failed".format(len(self.failed), _s(self.failed)),
no_wrap=True,
)
table = format_result(self.failed, table)
Expand All @@ -286,12 +287,12 @@ def _s(some_list):
table = Table(box=rich.box.ROUNDED)
table.add_column("[bold green]LINT RESULTS SUMMARY".format(len(self.passed)), no_wrap=True)
table.add_row(
r"\[✔] {:>3} Test{} Passed".format(len(self.passed), _s(self.passed)),
r"[✔] {:>3} Test{} Passed".format(len(self.passed), _s(self.passed)),
style="green",
)
table.add_row(r"\[?] {:>3} Test{} Ignored".format(len(self.ignored), _s(self.ignored)), style="grey58")
table.add_row(r"\[!] {:>3} Test Warning{}".format(len(self.warned), _s(self.warned)), style="yellow")
table.add_row(r"\[✗] {:>3} Test{} Failed".format(len(self.failed), _s(self.failed)), style="red")
table.add_row(r"[?] {:>3} Test{} Ignored".format(len(self.ignored), _s(self.ignored)), style="grey58")
table.add_row(r"[!] {:>3} Test Warning{}".format(len(self.warned), _s(self.warned)), style="yellow")
table.add_row(r"[✗] {:>3} Test{} Failed".format(len(self.failed), _s(self.failed)), style="red")
console.print(table)

def _get_results_md(self):
Expand Down
18 changes: 16 additions & 2 deletions nf_core/lint/conda_env_yaml.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,13 @@ def conda_env_yaml(self):
passed.append("Conda environment name was correct ({})".format(expected_env_name))

# Check conda dependency list
for dep in self.conda_config.get("dependencies", []):
conda_deps = self.conda_config.get("dependencies", [])
if len(conda_deps) > 0:
conda_progress = self.progress_bar.add_task(
"Checking Conda packages", total=len(conda_deps), test_name=conda_deps[0]
)
for dep in conda_deps:
self.progress_bar.update(conda_progress, advance=1, test_name=dep)
if isinstance(dep, str):
# Check that each dependency has a version number
try:
Expand Down Expand Up @@ -100,7 +106,13 @@ def conda_env_yaml(self):
passed.append("Conda package is the latest available: `{}`".format(dep))

elif isinstance(dep, dict):
for pip_dep in dep.get("pip", []):
pip_deps = dep.get("pip", [])
if len(pip_deps) > 0:
pip_progress = self.progress_bar.add_task(
"Checking PyPI packages", total=len(pip_deps), test_name=pip_deps[0]
)
for pip_dep in pip_deps:
self.progress_bar.update(pip_progress, advance=1, test_name=pip_dep)
# Check that each pip dependency has a version number
try:
assert pip_dep.count("=") == 2
Expand Down Expand Up @@ -128,6 +140,8 @@ def conda_env_yaml(self):
)
else:
passed.append("PyPi package is latest available: {}".format(pip_depver))
self.progress_bar.update(pip_progress, visible=False)
self.progress_bar.update(conda_progress, visible=False)

return {"passed": passed, "warned": warned, "failed": failed}

Expand Down
31 changes: 18 additions & 13 deletions nf_core/lint/pipeline_todos.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,12 @@
#!/usr/bin/env python

import logging
import os
import io
import fnmatch

log = logging.getLogger(__name__)


def pipeline_todos(self):
"""Check for nf-core *TODO* lines.
Expand Down Expand Up @@ -44,17 +47,19 @@ def pipeline_todos(self):
dirs = [d for d in dirs if not fnmatch.fnmatch(os.path.join(root, d), i)]
files = [f for f in files if not fnmatch.fnmatch(os.path.join(root, f), i)]
for fname in files:
with io.open(os.path.join(root, fname), "rt", encoding="latin1") as fh:
for l in fh:
if "TODO nf-core" in l:
l = (
l.replace("<!--", "")
.replace("-->", "")
.replace("# TODO nf-core: ", "")
.replace("// TODO nf-core: ", "")
.replace("TODO nf-core: ", "")
.strip()
)
warned.append("TODO string in `{}`: _{}_".format(fname, l))

try:
with io.open(os.path.join(root, fname), "rt", encoding="latin1") as fh:
for l in fh:
if "TODO nf-core" in l:
l = (
l.replace("<!--", "")
.replace("-->", "")
.replace("# TODO nf-core: ", "")
.replace("// TODO nf-core: ", "")
.replace("TODO nf-core: ", "")
.strip()
)
warned.append("TODO string in `{}`: _{}_".format(fname, l))
except FileNotFoundError:
log.debug(f"Could not open file {fname} in pipeline_todos lint test")
return {"passed": passed, "warned": warned, "failed": failed}
10 changes: 5 additions & 5 deletions nf_core/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,13 +79,13 @@ def load_lint_schema(self):
self.load_schema()
num_params = self.validate_schema()
self.get_schema_defaults()
log.info("[green]\[✓] Pipeline schema looks valid[/] [dim](found {} params)".format(num_params))
log.info("[green][✓] Pipeline schema looks valid[/] [dim](found {} params)".format(num_params))
except json.decoder.JSONDecodeError as e:
error_msg = "[bold red]Could not parse schema JSON:[/] {}".format(e)
log.error(error_msg)
raise AssertionError(error_msg)
except AssertionError as e:
error_msg = "[red]\[✗] Pipeline schema does not follow nf-core specs:\n {}".format(e)
error_msg = "[red][✗] Pipeline schema does not follow nf-core specs:\n {}".format(e)
log.error(error_msg)
raise AssertionError(error_msg)

Expand Down Expand Up @@ -159,12 +159,12 @@ def validate_params(self):
assert self.schema is not None
jsonschema.validate(self.input_params, self.schema)
except AssertionError:
log.error("[red]\[✗] Pipeline schema not found")
log.error("[red][✗] Pipeline schema not found")
return False
except jsonschema.exceptions.ValidationError as e:
log.error("[red]\[✗] Input parameters are invalid: {}".format(e.message))
log.error("[red][✗] Input parameters are invalid: {}".format(e.message))
return False
log.info("[green]\[✓] Input parameters look valid")
log.info("[green][✓] Input parameters look valid")
return True

def validate_schema(self, schema=None):
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
"pyyaml",
"requests",
"requests_cache",
"rich>=9.4",
"rich>=9.8.2",
"tabulate",
],
setup_requires=["twine>=1.11.0", "setuptools>=38.6."],
Expand Down

0 comments on commit be0a144

Please sign in to comment.