|
5 | 5 | import sys |
6 | 6 | import warnings |
7 | 7 |
|
| 8 | +from _pytest.config import ExitCode |
8 | 9 | from _pytest.fixtures import FixtureRequest |
9 | 10 | from _pytest.pytester import Pytester |
10 | 11 | import pytest |
@@ -885,3 +886,155 @@ def test_resource_warning(tmp_path): |
885 | 886 | else [] |
886 | 887 | ) |
887 | 888 | result.stdout.fnmatch_lines([*expected_extra, "*1 passed*"]) |
| 889 | + |
| 890 | + |
| 891 | +class TestMaxWarnings: |
| 892 | + """Tests for the --max-warnings feature.""" |
| 893 | + |
| 894 | + PYFILE = """ |
| 895 | + import warnings |
| 896 | + def test_one(): |
| 897 | + warnings.warn(UserWarning("warning one")) |
| 898 | + def test_two(): |
| 899 | + warnings.warn(UserWarning("warning two")) |
| 900 | + """ |
| 901 | + |
| 902 | + @pytest.mark.filterwarnings("default::UserWarning") |
| 903 | + def test_max_warnings_not_set(self, pytester: Pytester) -> None: |
| 904 | + """Without --max-warnings, warnings don't affect exit code.""" |
| 905 | + pytester.makepyfile(self.PYFILE) |
| 906 | + result = pytester.runpytest() |
| 907 | + result.assert_outcomes(passed=2, warnings=2) |
| 908 | + assert result.ret == ExitCode.OK |
| 909 | + |
| 910 | + @pytest.mark.filterwarnings("default::UserWarning") |
| 911 | + def test_max_warnings_not_exceeded(self, pytester: Pytester) -> None: |
| 912 | + """When warning count is below the threshold, exit code is OK.""" |
| 913 | + pytester.makepyfile(self.PYFILE) |
| 914 | + result = pytester.runpytest("--max-warnings", "10") |
| 915 | + result.assert_outcomes(passed=2, warnings=2) |
| 916 | + assert result.ret == ExitCode.OK |
| 917 | + |
| 918 | + @pytest.mark.filterwarnings("default::UserWarning") |
| 919 | + def test_max_warnings_exceeded(self, pytester: Pytester) -> None: |
| 920 | + """When warning count exceeds threshold, exit code is MAX_WARNINGS_ERROR.""" |
| 921 | + pytester.makepyfile(self.PYFILE) |
| 922 | + result = pytester.runpytest("--max-warnings", "1") |
| 923 | + assert result.ret == ExitCode.MAX_WARNINGS_ERROR |
| 924 | + |
| 925 | + @pytest.mark.filterwarnings("default::UserWarning") |
| 926 | + def test_max_warnings_equal_to_count(self, pytester: Pytester) -> None: |
| 927 | + """When warning count equals threshold exactly, exit code is OK.""" |
| 928 | + pytester.makepyfile(self.PYFILE) |
| 929 | + result = pytester.runpytest("--max-warnings", "2") |
| 930 | + result.assert_outcomes(passed=2, warnings=2) |
| 931 | + assert result.ret == ExitCode.OK |
| 932 | + |
| 933 | + @pytest.mark.filterwarnings("default::UserWarning") |
| 934 | + def test_max_warnings_zero(self, pytester: Pytester) -> None: |
| 935 | + """--max-warnings 0 means no warnings are allowed.""" |
| 936 | + pytester.makepyfile(self.PYFILE) |
| 937 | + result = pytester.runpytest("--max-warnings", "0") |
| 938 | + assert result.ret == ExitCode.MAX_WARNINGS_ERROR |
| 939 | + |
| 940 | + @pytest.mark.filterwarnings("default::UserWarning") |
| 941 | + def test_max_warnings_exceeded_message(self, pytester: Pytester) -> None: |
| 942 | + """Verify the output message when max warnings is exceeded.""" |
| 943 | + pytester.makepyfile(self.PYFILE) |
| 944 | + result = pytester.runpytest("--max-warnings", "1") |
| 945 | + result.stdout.fnmatch_lines( |
| 946 | + ["*Tests pass, but maximum allowed warnings exceeded: 2 > 1*"] |
| 947 | + ) |
| 948 | + |
| 949 | + @pytest.mark.filterwarnings("default::UserWarning") |
| 950 | + def test_max_warnings_ini_option(self, pytester: Pytester) -> None: |
| 951 | + """max_warnings can be set via INI configuration.""" |
| 952 | + pytester.makeini( |
| 953 | + """ |
| 954 | + [pytest] |
| 955 | + max_warnings = 1 |
| 956 | + """ |
| 957 | + ) |
| 958 | + pytester.makepyfile(self.PYFILE) |
| 959 | + result = pytester.runpytest() |
| 960 | + assert result.ret == ExitCode.MAX_WARNINGS_ERROR |
| 961 | + |
| 962 | + @pytest.mark.filterwarnings("default::UserWarning") |
| 963 | + def test_max_warnings_with_test_failure(self, pytester: Pytester) -> None: |
| 964 | + """When tests fail AND warnings exceed max, TESTS_FAILED takes priority.""" |
| 965 | + pytester.makepyfile( |
| 966 | + """ |
| 967 | + import warnings |
| 968 | + def test_fail(): |
| 969 | + warnings.warn(UserWarning("a warning")) |
| 970 | + assert False |
| 971 | + """ |
| 972 | + ) |
| 973 | + result = pytester.runpytest("--max-warnings", "0") |
| 974 | + assert result.ret == ExitCode.TESTS_FAILED |
| 975 | + |
| 976 | + @pytest.mark.filterwarnings("default::UserWarning") |
| 977 | + def test_max_warnings_with_filterwarnings_ignore(self, pytester: Pytester) -> None: |
| 978 | + """Filtered (ignored) warnings don't count toward max_warnings.""" |
| 979 | + pytester.makepyfile( |
| 980 | + """ |
| 981 | + import warnings |
| 982 | + def test_one(): |
| 983 | + warnings.warn(UserWarning("counted")) |
| 984 | + warnings.warn(RuntimeWarning("ignored")) |
| 985 | + """ |
| 986 | + ) |
| 987 | + result = pytester.runpytest( |
| 988 | + "--max-warnings", |
| 989 | + "1", |
| 990 | + "-W", |
| 991 | + "ignore::RuntimeWarning", |
| 992 | + ) |
| 993 | + result.assert_outcomes(passed=1, warnings=1) |
| 994 | + assert result.ret == ExitCode.OK |
| 995 | + |
| 996 | + @pytest.mark.filterwarnings("default::UserWarning") |
| 997 | + def test_max_warnings_with_filterwarnings_error(self, pytester: Pytester) -> None: |
| 998 | + """Warnings turned into errors via filterwarnings don't count as warnings.""" |
| 999 | + pytester.makepyfile( |
| 1000 | + """ |
| 1001 | + import warnings |
| 1002 | + def test_one(): |
| 1003 | + warnings.warn(UserWarning("still a warning")) |
| 1004 | + def test_two(): |
| 1005 | + warnings.warn(RuntimeWarning("becomes an error")) |
| 1006 | + """ |
| 1007 | + ) |
| 1008 | + result = pytester.runpytest( |
| 1009 | + "--max-warnings", |
| 1010 | + "0", |
| 1011 | + "-W", |
| 1012 | + "error::RuntimeWarning", |
| 1013 | + ) |
| 1014 | + # The RuntimeWarning becomes a test error, so TESTS_FAILED takes priority. |
| 1015 | + assert result.ret == ExitCode.TESTS_FAILED |
| 1016 | + |
| 1017 | + @pytest.mark.filterwarnings("default::UserWarning") |
| 1018 | + def test_max_warnings_with_filterwarnings_ini_ignore( |
| 1019 | + self, pytester: Pytester |
| 1020 | + ) -> None: |
| 1021 | + """Warnings ignored via ini filterwarnings don't count toward max_warnings.""" |
| 1022 | + pytester.makeini( |
| 1023 | + """ |
| 1024 | + [pytest] |
| 1025 | + filterwarnings = |
| 1026 | + ignore::RuntimeWarning |
| 1027 | + max_warnings = 1 |
| 1028 | + """ |
| 1029 | + ) |
| 1030 | + pytester.makepyfile( |
| 1031 | + """ |
| 1032 | + import warnings |
| 1033 | + def test_one(): |
| 1034 | + warnings.warn(UserWarning("counted")) |
| 1035 | + warnings.warn(RuntimeWarning("ignored by ini")) |
| 1036 | + """ |
| 1037 | + ) |
| 1038 | + result = pytester.runpytest() |
| 1039 | + result.assert_outcomes(passed=1, warnings=1) |
| 1040 | + assert result.ret == ExitCode.OK |
0 commit comments