Skip to content

Commit

Permalink
Add option to automatically generate failing.llvm (#2680)
Browse files Browse the repository at this point in the history
* Add pytest fixture to regen failing.llvm

* regen failing.llvm

* tweaks

* fix chaining exceptions

* Address review suggestions
  • Loading branch information
anvacaru authored Jan 16, 2025
1 parent 98eac94 commit bb45638
Show file tree
Hide file tree
Showing 3 changed files with 3,787 additions and 143 deletions.
11 changes: 11 additions & 0 deletions kevm-pyk/src/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,12 @@


def pytest_addoption(parser: Parser) -> None:
parser.addoption(
'--save-failing',
action='store_true',
default=False,
help='Save failing tests to the failing.llvm file',
)
parser.addoption(
'--update-expected-output',
action='store_true',
Expand Down Expand Up @@ -47,6 +53,11 @@ def pytest_addoption(parser: Parser) -> None:
)


@pytest.fixture
def save_failing(request: FixtureRequest) -> bool:
return request.config.getoption('--save-failing')


@pytest.fixture
def update_expected_output(request: FixtureRequest) -> bool:
return request.config.getoption('--update-expected-output')
Expand Down
47 changes: 34 additions & 13 deletions kevm-pyk/src/tests/integration/test_conformance.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,14 +30,18 @@
TEST_DIR: Final = REPO_ROOT / 'tests/ethereum-tests'
GOLDEN: Final = (REPO_ROOT / 'tests/templates/output-success-llvm.json').read_text().rstrip()
TEST_FILES_WITH_CID_0: Final = (REPO_ROOT / 'tests/bchain.0.chainId').read_text().splitlines()
FAILING_TESTS_FILE: Final = REPO_ROOT / 'tests/failing.llvm'
SLOW_TESTS_FILE: Final = REPO_ROOT / 'tests/slow.llvm'


def _test(gst_file: Path, schedule: str, mode: str, usegas: bool) -> None:
def _test(gst_file: Path, *, schedule: str, mode: str, usegas: bool, save_failing: bool) -> None:
skipped_gst_tests = SKIPPED_TESTS.get(gst_file, [])
if '*' in skipped_gst_tests:
pytest.skip()

chainid = 0 if str(gst_file.relative_to(TEST_DIR)) in TEST_FILES_WITH_CID_0 else 1
failing_tests: list[str] = []
gst_file_relative_path: Final[str] = str(gst_file.relative_to(TEST_DIR))
chainid = 0 if gst_file_relative_path in TEST_FILES_WITH_CID_0 else 1

with gst_file.open() as f:
gst_data = json.load(f)
Expand All @@ -47,7 +51,24 @@ def _test(gst_file: Path, schedule: str, mode: str, usegas: bool) -> None:
if test_name in skipped_gst_tests:
continue
res = interpret({test_name: test}, schedule, mode, chainid, usegas, check=False)
_assert_exit_code_zero(res)

try:
_assert_exit_code_zero(res)
except AssertionError:
if not save_failing:
raise
failing_tests.append(test_name)

if not failing_tests:
return
if save_failing:
with FAILING_TESTS_FILE.open('a') as ff:
if len(failing_tests) == len(gst_data):
ff.write(f'{gst_file_relative_path},*\n')
else:
for test_name in sorted(failing_tests):
ff.write(f'{gst_file_relative_path},{test_name}\n')
raise AssertionError(f'Found failing tests in GST file {gst_file_relative_path}: {failing_tests}')


def _assert_exit_code_zero(pattern: Pattern) -> None:
Expand All @@ -66,8 +87,8 @@ def _assert_exit_code_zero(pattern: Pattern) -> None:


def _skipped_tests() -> dict[Path, list[str]]:
slow_tests = read_csv_file(REPO_ROOT / 'tests/slow.llvm')
failing_tests = read_csv_file(REPO_ROOT / 'tests/failing.llvm')
slow_tests = read_csv_file(SLOW_TESTS_FILE)
failing_tests = read_csv_file(FAILING_TESTS_FILE)
skipped: dict[Path, list[str]] = {}
for test_file, test in slow_tests + failing_tests:
test_file = TEST_DIR / test_file
Expand All @@ -93,8 +114,8 @@ def read_csv_file(csv_file: Path) -> tuple[tuple[Path, str], ...]:
VM_TESTS,
ids=[str(test_file.relative_to(VM_TEST_DIR)) for test_file in VM_TESTS],
)
def test_vm(test_file: Path) -> None:
_test(test_file, 'DEFAULT', 'VMTESTS', True)
def test_vm(test_file: Path, save_failing: bool) -> None:
_test(test_file, schedule='DEFAULT', mode='VMTESTS', usegas=True, save_failing=save_failing)


@pytest.mark.skip(reason='failing / slow VM tests')
Expand All @@ -103,8 +124,8 @@ def test_vm(test_file: Path) -> None:
SKIPPED_VM_TESTS,
ids=[str(test_file.relative_to(VM_TEST_DIR)) for test_file in SKIPPED_VM_TESTS],
)
def test_rest_vm(test_file: Path) -> None:
_test(test_file, 'DEFAULT', 'VMTESTS', True)
def test_rest_vm(test_file: Path, save_failing: bool) -> None:
_test(test_file, schedule='DEFAULT', mode='VMTESTS', usegas=True, save_failing=save_failing)


ALL_TEST_DIR: Final = TEST_DIR / 'BlockchainTests/GeneralStateTests'
Expand All @@ -118,8 +139,8 @@ def test_rest_vm(test_file: Path) -> None:
BCHAIN_TESTS,
ids=[str(test_file.relative_to(ALL_TEST_DIR)) for test_file in BCHAIN_TESTS],
)
def test_bchain(test_file: Path) -> None:
_test(test_file, 'CANCUN', 'NORMAL', True)
def test_bchain(test_file: Path, save_failing: bool) -> None:
_test(test_file, schedule='CANCUN', mode='NORMAL', usegas=True, save_failing=save_failing)


@pytest.mark.skip(reason='failing / slow blockchain tests')
Expand All @@ -128,5 +149,5 @@ def test_bchain(test_file: Path) -> None:
SKIPPED_BCHAIN_TESTS,
ids=[str(test_file.relative_to(ALL_TEST_DIR)) for test_file in SKIPPED_BCHAIN_TESTS],
)
def test_rest_bchain(test_file: Path) -> None:
_test(test_file, 'CANCUN', 'NORMAL', True)
def test_rest_bchain(test_file: Path, save_failing: bool) -> None:
_test(test_file, schedule='CANCUN', mode='NORMAL', usegas=True, save_failing=save_failing)
Loading

0 comments on commit bb45638

Please sign in to comment.