-
-
Notifications
You must be signed in to change notification settings - Fork 2.7k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Add pytest.mark.skip shortcut (Issue #607) #1040
Changes from all commits
beaa8e5
9f77a85
d8fbb0b
97f7815
8a4517f
79587d4
48df2f6
7c088d1
36924b5
4867554
0c05ca1
cb58eaa
b71add2
4e94135
f144666
ad0b8e3
61b8443
5ec08d3
dc7153e
771aef9
abc27f5
d162894
04545f8
eee2413
1b5aa28
9e57954
213dbe7
25d74a5
5ff9a0f
fc0bd94
122980e
00d0c74
df874db
7504429
8984177
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,2 +1,2 @@ | ||
# | ||
__version__ = '2.8.2.dev1' | ||
__version__ = '2.9.0.dev1' |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -5,13 +5,16 @@ | |
|
||
import py | ||
import pytest | ||
from _pytest.mark import MarkInfo | ||
|
||
|
||
def pytest_addoption(parser): | ||
group = parser.getgroup("general") | ||
group.addoption('--runxfail', | ||
action="store_true", dest="runxfail", default=False, | ||
help="run tests even if they are marked xfail") | ||
|
||
|
||
def pytest_configure(config): | ||
if config.option.runxfail: | ||
old = pytest.xfail | ||
|
@@ -38,18 +41,22 @@ def nop(*args, **kwargs): | |
"See http://pytest.org/latest/skipping.html" | ||
) | ||
|
||
|
||
def pytest_namespace(): | ||
return dict(xfail=xfail) | ||
|
||
|
||
class XFailed(pytest.fail.Exception): | ||
""" raised from an explicit call to pytest.xfail() """ | ||
|
||
|
||
def xfail(reason=""): | ||
""" xfail an executing test or setup functions with the given reason.""" | ||
__tracebackhide__ = True | ||
raise XFailed(reason) | ||
xfail.Exception = XFailed | ||
|
||
|
||
class MarkEvaluator: | ||
def __init__(self, item, name): | ||
self.item = item | ||
|
@@ -147,10 +154,25 @@ def getexplanation(self): | |
|
||
@pytest.hookimpl(tryfirst=True) | ||
def pytest_runtest_setup(item): | ||
evalskip = MarkEvaluator(item, 'skipif') | ||
if evalskip.istrue(): | ||
item._evalskip = evalskip | ||
pytest.skip(evalskip.getexplanation()) | ||
# Check if skip or skipif are specified as pytest marks | ||
|
||
skipif_info = item.keywords.get('skipif') | ||
if isinstance(skipif_info, MarkInfo): | ||
eval_skipif = MarkEvaluator(item, 'skipif') | ||
if eval_skipif.istrue(): | ||
item._evalskip = eval_skipif | ||
pytest.skip(eval_skipif.getexplanation()) | ||
|
||
skip_info = item.keywords.get('skip') | ||
if isinstance(skip_info, MarkInfo): | ||
item._evalskip = True | ||
if 'reason' in skip_info.kwargs: | ||
pytest.skip(skip_info.kwargs['reason']) | ||
elif skip_info.args: | ||
pytest.skip(skip_info.args[0]) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I'm not very happy with the way args are handled in this and was hoping for some feedback. for example, what if the user passes more than 1 arg? Should we raise an exception? What if an invalid kwarg is passed? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I'm not sure, I also think the argument handling for marks in general is a little clunky. I say this looks good enough, but we should probably look at a way to improve argument handling in general for marks in the future. |
||
else: | ||
pytest.skip("unconditional skip") | ||
|
||
item._evalxfail = MarkEvaluator(item, 'xfail') | ||
check_xfail_no_run(item) | ||
|
||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -4,6 +4,7 @@ | |
from _pytest.skipping import MarkEvaluator, folded_skips, pytest_runtest_setup | ||
from _pytest.runner import runtestprotocol | ||
|
||
|
||
class TestEvaluator: | ||
def test_no_marker(self, testdir): | ||
item = testdir.getitem("def test_func(): pass") | ||
|
@@ -382,6 +383,90 @@ def test_func(): | |
]) | ||
|
||
|
||
class TestSkip: | ||
def test_skip_class(self, testdir): | ||
testdir.makepyfile(""" | ||
import pytest | ||
@pytest.mark.skip | ||
class TestSomething(object): | ||
def test_foo(self): | ||
pass | ||
def test_bar(self): | ||
pass | ||
|
||
def test_baz(): | ||
pass | ||
""") | ||
rec = testdir.inline_run() | ||
rec.assertoutcome(skipped=2, passed=1) | ||
|
||
def test_skips_on_false_string(self, testdir): | ||
testdir.makepyfile(""" | ||
import pytest | ||
@pytest.mark.skip('False') | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. what would you expect this arg to be? Would this be assumed to be a reason? Should it be ignored? I guess reason makes the most sense... There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Yep, I would expect to be the reason for the skip: an optional informative message. If not given, we can use a default message like I used in my example ( |
||
def test_foo(): | ||
pass | ||
""") | ||
rec = testdir.inline_run() | ||
rec.assertoutcome(skipped=1) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Also check the reason here please There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. How would I check the reason here if it's not specified though? Is matching the string "skipped instance" good enough? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I assume the reason in this case was |
||
|
||
def test_arg_as_reason(self, testdir): | ||
testdir.makepyfile(""" | ||
import pytest | ||
@pytest.mark.skip('testing stuff') | ||
def test_bar(): | ||
pass | ||
""") | ||
result = testdir.runpytest('-rs') | ||
result.stdout.fnmatch_lines([ | ||
"*testing stuff*", | ||
"*1 skipped*", | ||
]) | ||
|
||
def test_skip_no_reason(self, testdir): | ||
testdir.makepyfile(""" | ||
import pytest | ||
@pytest.mark.skip | ||
def test_foo(): | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I was actually referring to this test @nicoddemus, github moved our comments when I updated the code There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Oh! Well, checking |
||
pass | ||
""") | ||
result = testdir.runpytest('-rs') | ||
result.stdout.fnmatch_lines([ | ||
"*unconditional skip*", | ||
"*1 skipped*", | ||
]) | ||
|
||
def test_skip_with_reason(self, testdir): | ||
testdir.makepyfile(""" | ||
import pytest | ||
@pytest.mark.skip(reason="for lolz") | ||
def test_bar(): | ||
pass | ||
""") | ||
result = testdir.runpytest('-rs') | ||
result.stdout.fnmatch_lines([ | ||
"*for lolz*", | ||
"*1 skipped*", | ||
]) | ||
|
||
def test_only_skips_marked_test(self, testdir): | ||
testdir.makepyfile(""" | ||
import pytest | ||
@pytest.mark.skip | ||
def test_foo(): | ||
pass | ||
@pytest.mark.skip(reason="nothing in particular") | ||
def test_bar(): | ||
pass | ||
def test_baz(): | ||
assert True | ||
""") | ||
result = testdir.runpytest('-rs') | ||
result.stdout.fnmatch_lines([ | ||
"*nothing in particular*", | ||
"*1 passed*2 skipped*", | ||
]) | ||
|
||
class TestSkipif: | ||
def test_skipif_conditional(self, testdir): | ||
item = testdir.getitem(""" | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Just as an FYI to why these are here, my editor automatically trims out extra whitespace at the end of lines.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
No worries, thanks