Skip to content

Commit bb45638

Browse files
authored
Add option to automatically generate failing.llvm (#2680)
* Add pytest fixture to regen failing.llvm * regen failing.llvm * tweaks * fix chaining exceptions * Address review suggestions
1 parent 98eac94 commit bb45638

File tree

3 files changed

+3787
-143
lines changed

3 files changed

+3787
-143
lines changed

kevm-pyk/src/tests/conftest.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,12 @@
1010

1111

1212
def pytest_addoption(parser: Parser) -> None:
13+
parser.addoption(
14+
'--save-failing',
15+
action='store_true',
16+
default=False,
17+
help='Save failing tests to the failing.llvm file',
18+
)
1319
parser.addoption(
1420
'--update-expected-output',
1521
action='store_true',
@@ -47,6 +53,11 @@ def pytest_addoption(parser: Parser) -> None:
4753
)
4854

4955

56+
@pytest.fixture
57+
def save_failing(request: FixtureRequest) -> bool:
58+
return request.config.getoption('--save-failing')
59+
60+
5061
@pytest.fixture
5162
def update_expected_output(request: FixtureRequest) -> bool:
5263
return request.config.getoption('--update-expected-output')

kevm-pyk/src/tests/integration/test_conformance.py

Lines changed: 34 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -30,14 +30,18 @@
3030
TEST_DIR: Final = REPO_ROOT / 'tests/ethereum-tests'
3131
GOLDEN: Final = (REPO_ROOT / 'tests/templates/output-success-llvm.json').read_text().rstrip()
3232
TEST_FILES_WITH_CID_0: Final = (REPO_ROOT / 'tests/bchain.0.chainId').read_text().splitlines()
33+
FAILING_TESTS_FILE: Final = REPO_ROOT / 'tests/failing.llvm'
34+
SLOW_TESTS_FILE: Final = REPO_ROOT / 'tests/slow.llvm'
3335

3436

35-
def _test(gst_file: Path, schedule: str, mode: str, usegas: bool) -> None:
37+
def _test(gst_file: Path, *, schedule: str, mode: str, usegas: bool, save_failing: bool) -> None:
3638
skipped_gst_tests = SKIPPED_TESTS.get(gst_file, [])
3739
if '*' in skipped_gst_tests:
3840
pytest.skip()
3941

40-
chainid = 0 if str(gst_file.relative_to(TEST_DIR)) in TEST_FILES_WITH_CID_0 else 1
42+
failing_tests: list[str] = []
43+
gst_file_relative_path: Final[str] = str(gst_file.relative_to(TEST_DIR))
44+
chainid = 0 if gst_file_relative_path in TEST_FILES_WITH_CID_0 else 1
4145

4246
with gst_file.open() as f:
4347
gst_data = json.load(f)
@@ -47,7 +51,24 @@ def _test(gst_file: Path, schedule: str, mode: str, usegas: bool) -> None:
4751
if test_name in skipped_gst_tests:
4852
continue
4953
res = interpret({test_name: test}, schedule, mode, chainid, usegas, check=False)
50-
_assert_exit_code_zero(res)
54+
55+
try:
56+
_assert_exit_code_zero(res)
57+
except AssertionError:
58+
if not save_failing:
59+
raise
60+
failing_tests.append(test_name)
61+
62+
if not failing_tests:
63+
return
64+
if save_failing:
65+
with FAILING_TESTS_FILE.open('a') as ff:
66+
if len(failing_tests) == len(gst_data):
67+
ff.write(f'{gst_file_relative_path},*\n')
68+
else:
69+
for test_name in sorted(failing_tests):
70+
ff.write(f'{gst_file_relative_path},{test_name}\n')
71+
raise AssertionError(f'Found failing tests in GST file {gst_file_relative_path}: {failing_tests}')
5172

5273

5374
def _assert_exit_code_zero(pattern: Pattern) -> None:
@@ -66,8 +87,8 @@ def _assert_exit_code_zero(pattern: Pattern) -> None:
6687

6788

6889
def _skipped_tests() -> dict[Path, list[str]]:
69-
slow_tests = read_csv_file(REPO_ROOT / 'tests/slow.llvm')
70-
failing_tests = read_csv_file(REPO_ROOT / 'tests/failing.llvm')
90+
slow_tests = read_csv_file(SLOW_TESTS_FILE)
91+
failing_tests = read_csv_file(FAILING_TESTS_FILE)
7192
skipped: dict[Path, list[str]] = {}
7293
for test_file, test in slow_tests + failing_tests:
7394
test_file = TEST_DIR / test_file
@@ -93,8 +114,8 @@ def read_csv_file(csv_file: Path) -> tuple[tuple[Path, str], ...]:
93114
VM_TESTS,
94115
ids=[str(test_file.relative_to(VM_TEST_DIR)) for test_file in VM_TESTS],
95116
)
96-
def test_vm(test_file: Path) -> None:
97-
_test(test_file, 'DEFAULT', 'VMTESTS', True)
117+
def test_vm(test_file: Path, save_failing: bool) -> None:
118+
_test(test_file, schedule='DEFAULT', mode='VMTESTS', usegas=True, save_failing=save_failing)
98119

99120

100121
@pytest.mark.skip(reason='failing / slow VM tests')
@@ -103,8 +124,8 @@ def test_vm(test_file: Path) -> None:
103124
SKIPPED_VM_TESTS,
104125
ids=[str(test_file.relative_to(VM_TEST_DIR)) for test_file in SKIPPED_VM_TESTS],
105126
)
106-
def test_rest_vm(test_file: Path) -> None:
107-
_test(test_file, 'DEFAULT', 'VMTESTS', True)
127+
def test_rest_vm(test_file: Path, save_failing: bool) -> None:
128+
_test(test_file, schedule='DEFAULT', mode='VMTESTS', usegas=True, save_failing=save_failing)
108129

109130

110131
ALL_TEST_DIR: Final = TEST_DIR / 'BlockchainTests/GeneralStateTests'
@@ -118,8 +139,8 @@ def test_rest_vm(test_file: Path) -> None:
118139
BCHAIN_TESTS,
119140
ids=[str(test_file.relative_to(ALL_TEST_DIR)) for test_file in BCHAIN_TESTS],
120141
)
121-
def test_bchain(test_file: Path) -> None:
122-
_test(test_file, 'CANCUN', 'NORMAL', True)
142+
def test_bchain(test_file: Path, save_failing: bool) -> None:
143+
_test(test_file, schedule='CANCUN', mode='NORMAL', usegas=True, save_failing=save_failing)
123144

124145

125146
@pytest.mark.skip(reason='failing / slow blockchain tests')
@@ -128,5 +149,5 @@ def test_bchain(test_file: Path) -> None:
128149
SKIPPED_BCHAIN_TESTS,
129150
ids=[str(test_file.relative_to(ALL_TEST_DIR)) for test_file in SKIPPED_BCHAIN_TESTS],
130151
)
131-
def test_rest_bchain(test_file: Path) -> None:
132-
_test(test_file, 'CANCUN', 'NORMAL', True)
152+
def test_rest_bchain(test_file: Path, save_failing: bool) -> None:
153+
_test(test_file, schedule='CANCUN', mode='NORMAL', usegas=True, save_failing=save_failing)

0 commit comments

Comments
 (0)