diff --git a/.github/configs/feature.yaml b/.github/configs/feature.yaml index b9404e5d59..17ac97dce5 100644 --- a/.github/configs/feature.yaml +++ b/.github/configs/feature.yaml @@ -1,27 +1,27 @@ # Unless filling for special features, all features should fill for previous forks (starting from Frontier) too stable: evm-type: stable - fill-params: --until=Prague --fill-static-tests --ignore=tests/static/state_tests/stQuadraticComplexityTest + fill-params: --no-html --until=Prague --fill-static-tests --ignore=tests/static/state_tests/stQuadraticComplexityTest develop: - evm-type: develop - fill-params: --until=BPO4 --fill-static-tests --ignore=tests/static/state_tests/stQuadraticComplexityTest + evm-type: develop + fill-params: --no-html --until=BPO4 --fill-static-tests --ignore=tests/static/state_tests/stQuadraticComplexityTest benchmark: evm-type: benchmark - fill-params: --fork=Prague --gas-benchmark-values 1,5,10,30,60,100,150 -m benchmark ./tests/benchmark + fill-params: --no-html --fork=Prague --gas-benchmark-values 1,5,10,30,60,100,150 -m benchmark ./tests/benchmark benchmark_develop: evm-type: benchmark - fill-params: --fork=Osaka --gas-benchmark-values 1,5,10,30,60,100,150 -m "benchmark" ./tests/benchmark + fill-params: --no-html --fork=Osaka --gas-benchmark-values 1,5,10,30,60,100,150 -m "benchmark" ./tests/benchmark feature_only: true benchmark_fast: evm-type: benchmark - fill-params: --fork=Prague --gas-benchmark-values 100 -m "benchmark" ./tests/benchmark + fill-params: --no-html --fork=Prague --gas-benchmark-values 100 -m "benchmark" ./tests/benchmark feature_only: true bal: evm-type: develop - fill-params: --fork=Amsterdam --fill-static-tests + fill-params: --no-html --fork=Amsterdam --fill-static-tests feature_only: true diff --git a/packages/testing/src/execution_testing/cli/gen_index.py b/packages/testing/src/execution_testing/cli/gen_index.py index 1e4af37cf1..3a95688d5e 100644 --- a/packages/testing/src/execution_testing/cli/gen_index.py +++ b/packages/testing/src/execution_testing/cli/gen_index.py @@ -226,5 +226,79 @@ def generate_fixtures_index( f.write(index.model_dump_json(exclude_none=False, indent=2)) +def merge_partial_indexes(output_dir: Path, quiet_mode: bool = False) -> None: + """ + Merge partial index files from all workers into final index.json. + + This is called by pytest_sessionfinish on the master process after all + workers have finished and written their partial indexes. + + Partial indexes use JSONL format (one JSON object per line) for efficient + append-only writes during fill. Entries are validated with Pydantic here. + + Args: + output_dir: The fixture output directory. + quiet_mode: If True, don't print status messages. + + """ + meta_dir = output_dir / ".meta" + partial_files = list(meta_dir.glob("partial_index*.jsonl")) + + if not partial_files: + raise Exception("No partial indexes found.") + + # Merge all partial indexes (JSONL format: one entry per line) + # Read as raw dicts — the data was already validated when collected + # from live Pydantic fixture objects in add_fixture(). + all_raw_entries: list[dict] = [] + all_forks: set = set() + all_formats: set = set() + + for partial_file in partial_files: + with open(partial_file) as f: + for line in f: + line = line.strip() + if not line: + continue + entry_data = json.loads(line) + all_raw_entries.append(entry_data) + # Collect forks and formats from raw strings + if entry_data.get("fork"): + all_forks.add(entry_data["fork"]) + if entry_data.get("format"): + all_formats.add(entry_data["format"]) + + # Compute root hash from raw dicts (no Pydantic needed) + root_hash = HashableItem.from_raw_entries(all_raw_entries).hash() + + # Build final index — Pydantic validates the entire structure once + # via model_validate(), not 96k individual model_validate() calls. + index = IndexFile.model_validate( + { + "test_cases": all_raw_entries, + "root_hash": HexNumber(root_hash), + "created_at": datetime.datetime.now(), + "test_count": len(all_raw_entries), + "forks": list(all_forks), + "fixture_formats": list(all_formats), + } + ) + + # Write final index + index_path = meta_dir / "index.json" + index_path.parent.mkdir(parents=True, exist_ok=True) + index_path.write_text(index.model_dump_json(exclude_none=True, indent=2)) + + if not quiet_mode: + rich.print( + f"[green]Merged {len(partial_files)} partial indexes " + f"({len(all_raw_entries)} test cases) into {index_path}[/]" + ) + + # Cleanup partial files + for partial_file in partial_files: + partial_file.unlink() + + if __name__ == "__main__": generate_fixtures_index_cli() diff --git a/packages/testing/src/execution_testing/cli/hasher.py b/packages/testing/src/execution_testing/cli/hasher.py index 5b13e229b1..5bd6a9b8e9 100644 --- a/packages/testing/src/execution_testing/cli/hasher.py +++ b/packages/testing/src/execution_testing/cli/hasher.py @@ -1,17 +1,22 @@ """Simple CLI tool to hash a directory of JSON fixtures.""" +from __future__ import annotations + import hashlib import json import sys from dataclasses import dataclass, field from enum import IntEnum, auto from pathlib import Path -from typing import Any, Callable, Dict, List, Optional, TypeVar +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, TypeVar import click from rich.console import Console from rich.markup import escape as rich_escape +if TYPE_CHECKING: + from execution_testing.fixtures.consume import TestCaseIndexFile + class HashableItemType(IntEnum): """Represents the type of a hashable item.""" @@ -145,6 +150,104 @@ def from_folder( items[file_path.name] = item return cls(type=HashableItemType.FOLDER, items=items, parents=parents) + @classmethod + def from_index_entries( + cls, entries: List["TestCaseIndexFile"] + ) -> "HashableItem": + """ + Create a hashable item tree from index entries (no file I/O). + + This produces the same hash as from_folder() but uses pre-collected + fixture hashes instead of reading files from disk. + + Optimized to O(n) using a trie-like structure built in a single pass, + avoiding repeated path operations and iterations. + """ + raw = [ + { + "id": e.id, + "json_path": str(e.json_path), + "fixture_hash": str(e.fixture_hash) + if e.fixture_hash + else None, + } + for e in entries + ] + return cls.from_raw_entries(raw) + + @classmethod + def from_raw_entries(cls, entries: List[Dict]) -> "HashableItem": + """ + Create a hashable item tree from raw entry dicts (no file I/O). + + Accepts dicts with "id", "json_path", and "fixture_hash" keys. + This avoids Pydantic overhead entirely — only plain string/int + operations are used to build the hash tree. + + Produces the same hash as from_folder() and from_index_entries(). + """ + # Build a trie where each node is either: + # - A dict (folder node) containing child nodes + # - A list of (test_id, hash_bytes) tuples (file node marker) + # + # Structure: {folder: {folder: {file.json: [(id, hash), ...]}}} + root_trie: dict = {} + + # Single pass: insert all entries into trie + for entry in entries: + fixture_hash = entry.get("fixture_hash") + if not fixture_hash: + continue + + # Navigate/create path to file node + path_parts = Path(entry["json_path"]).parts + current = root_trie + + # Navigate to parent folder, creating nodes as needed + for part in path_parts[:-1]: + if part not in current: + current[part] = {} + current = current[part] + + # Add test entry to file node + file_name = path_parts[-1] + if file_name not in current: + current[file_name] = [] + + # Convert hex string to 32-byte hash + hash_bytes = int(fixture_hash, 16).to_bytes(32, "big") + current[file_name].append((entry["id"], hash_bytes)) + + # Convert trie to HashableItem tree (single recursive pass) + def trie_to_hashable(node: dict) -> Dict[str, "HashableItem"]: + """Convert a trie node to HashableItem dict.""" + items: Dict[str, HashableItem] = {} + + for name, child in node.items(): + if isinstance(child, list): + # File node: child is list of (test_id, hash_bytes) + test_items = { + test_id: cls( + type=HashableItemType.TEST, root=hash_bytes + ) + for test_id, hash_bytes in child + } + items[name] = cls( + type=HashableItemType.FILE, items=test_items + ) + else: + # Folder node: recurse + items[name] = cls( + type=HashableItemType.FOLDER, + items=trie_to_hashable(child), + ) + + return items + + return cls( + type=HashableItemType.FOLDER, items=trie_to_hashable(root_trie) + ) + def render_hash_report( folder: Path, diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/filler.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/filler.py index 55a8f37c1e..bd2490f820 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/filler.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/filler.py @@ -29,7 +29,7 @@ ReferenceSpec, ) from execution_testing.cli.gen_index import ( - generate_fixtures_index, + merge_partial_indexes, ) from execution_testing.client_clis import TransitionTool from execution_testing.client_clis.clis.geth import FixtureConsumerTool @@ -44,6 +44,7 @@ PreAllocGroupBuilders, PreAllocGroups, TestInfo, + merge_partial_fixture_files, ) from execution_testing.forks import ( Fork, @@ -1237,11 +1238,16 @@ def fixture_collector( single_fixture_per_file=fixture_output.single_fixture_per_file, filler_path=filler_path, base_dump_dir=base_dump_dir, + generate_index=request.config.getoption("generate_index"), ) yield fixture_collector - fixture_collector.dump_fixtures() + worker_id = os.environ.get("PYTEST_XDIST_WORKER", None) + fixture_collector.dump_fixtures(worker_id) if do_fixture_verification: fixture_collector.verify_fixture_files(evm_fixture_verification) + # Write partial index for this worker/scope + if fixture_collector.generate_index: + fixture_collector.write_partial_index(worker_id) @pytest.fixture(autouse=True, scope="session") @@ -1589,6 +1595,19 @@ def pytest_collection_modifyitems( for i in reversed(items_for_removal): items.pop(i) + # Schedule slow-marked tests first (Longest Processing Time First). + # Workers each grab the next test from the queue, so slow tests get + # distributed across workers and finish before the fast-test tail. + slow_items = [] + normal_items = [] + for item in items: + if item.get_closest_marker("slow") is not None: + slow_items.append(item) + else: + normal_items.append(item) + if slow_items: + items[:] = slow_items + normal_items + def pytest_sessionfinish(session: pytest.Session, exitstatus: int) -> None: """ @@ -1630,18 +1649,24 @@ def pytest_sessionfinish(session: pytest.Session, exitstatus: int) -> None: if fixture_output.is_stdout or is_help_or_collectonly_mode(session.config): return + # Merge partial fixture files from all workers into final JSON files + merge_partial_fixture_files(fixture_output.directory) + # Remove any lock files that may have been created. for file in fixture_output.directory.rglob("*.lock"): file.unlink() - # Generate index file for all produced fixtures. + # Generate index file for all produced fixtures by merging partial indexes. + # Only merge if partial indexes were actually written (i.e., tests produced + # fixtures). When no tests are filled (e.g., all skipped), no partial + # indexes exist and merge_partial_indexes should not be called. if ( session.config.getoption("generate_index") and not session_instance.phase_manager.is_pre_alloc_generation ): - generate_fixtures_index( - fixture_output.directory, quiet_mode=True, force_flag=False - ) + meta_dir = fixture_output.directory / ".meta" + if meta_dir.exists() and any(meta_dir.glob("partial_index*.jsonl")): + merge_partial_indexes(fixture_output.directory, quiet_mode=True) # Create tarball of the output directory if the output is a tarball. fixture_output.create_tarball() diff --git a/packages/testing/src/execution_testing/cli/tests/test_hasher.py b/packages/testing/src/execution_testing/cli/tests/test_hasher.py index b80bdc1e30..bd5c935780 100644 --- a/packages/testing/src/execution_testing/cli/tests/test_hasher.py +++ b/packages/testing/src/execution_testing/cli/tests/test_hasher.py @@ -1,11 +1,57 @@ -"""Tests for the hasher CLI tool.""" +"""Tests for the hasher CLI tool, module, and merge_partial_indexes.""" import json +import tempfile from pathlib import Path +from typing import Generator, List +import pytest from click.testing import CliRunner -from execution_testing.cli.hasher import hasher +from execution_testing.base_types import HexNumber +from execution_testing.cli.gen_index import merge_partial_indexes +from execution_testing.cli.hasher import HashableItem, hasher +from execution_testing.fixtures.consume import IndexFile, TestCaseIndexFile + +HASH_1 = 0x1111111111111111111111111111111111111111111111111111111111111111 +HASH_2 = 0x2222222222222222222222222222222222222222222222222222222222222222 +HASH_3 = 0x3333333333333333333333333333333333333333333333333333333333333333 +HASH_4 = 0x4444444444444444444444444444444444444444444444444444444444444444 +HASH_9 = 0x9999999999999999999999999999999999999999999999999999999999999999 + + +def _hex_str(h: int) -> str: + """Convert an integer hash to its 0x-prefixed hex string.""" + return f"0x{h:064x}" + + +def _make_entry( + test_id: str, + json_path: str, + fixture_hash: int, + fork: str | None = None, + fmt: str | None = None, +) -> TestCaseIndexFile: + """Create a TestCaseIndexFile for testing.""" + return TestCaseIndexFile( + id=test_id, + json_path=Path(json_path), + fixture_hash=HexNumber(fixture_hash), + fork=fork, + format=fmt, + ) + + +def _make_json_fixture(test_names_and_hashes: dict[str, int]) -> str: + """Create a JSON fixture file matching from_folder expectations.""" + data = {} + for name, h in test_names_and_hashes.items(): + data[name] = { + "_info": {"hash": _hex_str(h)}, + "pre": {}, + "post": {}, + } + return json.dumps(data) def create_fixture(path: Path, test_name: str, hash_value: str) -> None: @@ -340,3 +386,384 @@ def test_hash_help(self) -> None: result = runner.invoke(hasher, ["hash", "--help"]) assert result.exit_code == 0 assert "Hash folders of JSON fixtures" in result.output + + +class TestHashableItemFromIndexEntries: + """Test that from_index_entries produces same hash as from_folder.""" + + @pytest.fixture + def fixture_dir(self) -> Generator[Path, None, None]: + """Create a temporary directory with test fixtures.""" + with tempfile.TemporaryDirectory() as tmpdir: + base = Path(tmpdir) + + # state_tests/cancun/test.json (two tests) + state_tests = base / "state_tests" / "cancun" + state_tests.mkdir(parents=True) + (state_tests / "test.json").write_text( + _make_json_fixture({"test_one": HASH_1, "test_two": HASH_2}) + ) + + # blockchain_tests/cancun/test.json (one test) + blockchain_tests = base / "blockchain_tests" / "cancun" + blockchain_tests.mkdir(parents=True) + (blockchain_tests / "test.json").write_text( + _make_json_fixture({"test_three": HASH_3}) + ) + + yield base + + @pytest.fixture + def index_entries(self) -> List[TestCaseIndexFile]: + """Create index entries matching the fixture_dir structure.""" + return [ + _make_entry("test_one", "state_tests/cancun/test.json", HASH_1), + _make_entry("test_two", "state_tests/cancun/test.json", HASH_2), + _make_entry( + "test_three", "blockchain_tests/cancun/test.json", HASH_3 + ), + ] + + def test_hash_matches_from_folder( + self, + fixture_dir: Path, + index_entries: List[TestCaseIndexFile], + ) -> None: + """Verify from_index_entries produces same hash as from_folder.""" + hash_from_folder = HashableItem.from_folder( + folder_path=fixture_dir + ).hash() + hash_from_entries = HashableItem.from_index_entries( + index_entries + ).hash() + assert hash_from_folder == hash_from_entries + + def test_hash_changes_with_different_entries( + self, index_entries: List[TestCaseIndexFile] + ) -> None: + """Verify hash changes when entries change.""" + hash1 = HashableItem.from_index_entries(index_entries).hash() + + modified = index_entries.copy() + modified[0] = _make_entry( + "test_one", "state_tests/cancun/test.json", HASH_9 + ) + hash2 = HashableItem.from_index_entries(modified).hash() + + assert hash1 != hash2 + + def test_empty_entries(self) -> None: + """Verify empty entries produces a valid hash.""" + result = HashableItem.from_index_entries([]).hash() + assert result is not None + assert len(result) == 32 + + def test_multiple_files_in_same_folder(self) -> None: + """Verify hash with multiple JSON files in the same folder.""" + with tempfile.TemporaryDirectory() as tmpdir: + base = Path(tmpdir) + folder = base / "tests" / "cancun" + folder.mkdir(parents=True) + + (folder / "test_a.json").write_text( + _make_json_fixture({"a1": HASH_1}) + ) + (folder / "test_b.json").write_text( + _make_json_fixture({"b1": HASH_2}) + ) + + entries = [ + _make_entry("a1", "tests/cancun/test_a.json", HASH_1), + _make_entry("b1", "tests/cancun/test_b.json", HASH_2), + ] + + hash_from_folder = HashableItem.from_folder( + folder_path=base + ).hash() + hash_from_entries = HashableItem.from_index_entries(entries).hash() + assert hash_from_folder == hash_from_entries + + def test_deeply_nested_paths(self) -> None: + """Verify hash with deeply nested folder structures (3+ levels).""" + with tempfile.TemporaryDirectory() as tmpdir: + base = Path(tmpdir) + deep = base / "a" / "b" / "c" / "d" + deep.mkdir(parents=True) + + (deep / "test.json").write_text( + _make_json_fixture({"t1": HASH_1, "t2": HASH_2}) + ) + + entries = [ + _make_entry("t1", "a/b/c/d/test.json", HASH_1), + _make_entry("t2", "a/b/c/d/test.json", HASH_2), + ] + + hash_from_folder = HashableItem.from_folder( + folder_path=base + ).hash() + hash_from_entries = HashableItem.from_index_entries(entries).hash() + assert hash_from_folder == hash_from_entries + + def test_single_file_single_test(self) -> None: + """Verify degenerate case: one folder, one file, one test.""" + with tempfile.TemporaryDirectory() as tmpdir: + base = Path(tmpdir) + folder = base / "tests" + folder.mkdir() + + (folder / "only.json").write_text( + _make_json_fixture({"solo": HASH_4}) + ) + + entries = [_make_entry("solo", "tests/only.json", HASH_4)] + + hash_from_folder = HashableItem.from_folder( + folder_path=base + ).hash() + hash_from_entries = HashableItem.from_index_entries(entries).hash() + assert hash_from_folder == hash_from_entries + + def test_entries_with_none_fixture_hash_skipped(self) -> None: + """Verify entries with fixture_hash=None are skipped.""" + entries_with_none = [ + _make_entry("t1", "tests/a.json", HASH_1), + TestCaseIndexFile( + id="t_null", + json_path=Path("tests/a.json"), + fixture_hash=None, + fork=None, + format=None, + ), + ] + entries_without_none = [ + _make_entry("t1", "tests/a.json", HASH_1), + ] + + hash_with = HashableItem.from_index_entries(entries_with_none).hash() + hash_without = HashableItem.from_index_entries( + entries_without_none + ).hash() + assert hash_with == hash_without + + +class TestMergePartialIndexes: + """Test the JSONL partial index merge pipeline end-to-end.""" + + def _write_jsonl(self, path: Path, entries: list[dict]) -> None: + """Write a list of dicts as JSONL lines.""" + path.parent.mkdir(parents=True, exist_ok=True) + with open(path, "w") as f: + for entry in entries: + f.write(json.dumps(entry) + "\n") + + def _make_entry_dict( + self, + test_id: str, + json_path: str, + fixture_hash: int, + fork: str | None = None, + fmt: str | None = None, + ) -> dict: + """Create a dict matching what collector.py writes to JSONL.""" + return { + "id": test_id, + "json_path": json_path, + "fixture_hash": _hex_str(fixture_hash), + "fork": fork, + "format": fmt, + "pre_hash": None, + } + + def test_merge_produces_valid_index(self) -> None: + """Verify merging JSONL partials produces a valid index.json.""" + with tempfile.TemporaryDirectory() as tmpdir: + output_dir = Path(tmpdir) + meta_dir = output_dir / ".meta" + meta_dir.mkdir(parents=True) + + entries = [ + self._make_entry_dict( + "test_a", + "state_tests/cancun/test.json", + HASH_1, + fork="Cancun", + fmt="state_test", + ), + self._make_entry_dict( + "test_b", + "blockchain_tests/cancun/test.json", + HASH_2, + fork="Cancun", + fmt="blockchain_test", + ), + ] + + self._write_jsonl( + meta_dir / "partial_index.gw0.jsonl", entries[:1] + ) + self._write_jsonl( + meta_dir / "partial_index.gw1.jsonl", entries[1:] + ) + + merge_partial_indexes(output_dir, quiet_mode=True) + + index_path = meta_dir / "index.json" + assert index_path.exists() + + index = IndexFile.model_validate_json(index_path.read_text()) + assert index.test_count == 2 + assert index.root_hash is not None + assert index.root_hash != 0 + + def test_merge_fixture_formats_uses_format_name(self) -> None: + """ + Verify fixture_formats contains format_name values (e.g. + 'state_test') not class names (e.g. 'StateFixture'). + + This is the exact bug that format.__name__ would have caused. + """ + with tempfile.TemporaryDirectory() as tmpdir: + output_dir = Path(tmpdir) + meta_dir = output_dir / ".meta" + + entries = [ + self._make_entry_dict( + "t1", + "state_tests/test.json", + HASH_1, + fork="Cancun", + fmt="state_test", + ), + self._make_entry_dict( + "t2", + "blockchain_tests/test.json", + HASH_2, + fork="Cancun", + fmt="blockchain_test", + ), + ] + self._write_jsonl(meta_dir / "partial_index.gw0.jsonl", entries) + + merge_partial_indexes(output_dir, quiet_mode=True) + + index = IndexFile.model_validate_json( + (meta_dir / "index.json").read_text() + ) + assert index.fixture_formats is not None + assert sorted(index.fixture_formats) == [ + "blockchain_test", + "state_test", + ] + + def test_merge_forks_collected_correctly(self) -> None: + """Verify forks are collected from validated entries.""" + with tempfile.TemporaryDirectory() as tmpdir: + output_dir = Path(tmpdir) + meta_dir = output_dir / ".meta" + + entries = [ + self._make_entry_dict( + "t1", + "state_tests/test.json", + HASH_1, + fork="Cancun", + fmt="state_test", + ), + self._make_entry_dict( + "t2", + "state_tests/test2.json", + HASH_2, + fork="Shanghai", + fmt="state_test", + ), + ] + self._write_jsonl(meta_dir / "partial_index.gw0.jsonl", entries) + + merge_partial_indexes(output_dir, quiet_mode=True) + + index = IndexFile.model_validate_json( + (meta_dir / "index.json").read_text() + ) + assert index.forks is not None + assert sorted(str(f) for f in index.forks) == [ + "Cancun", + "Shanghai", + ] + + def test_merge_cleans_up_partial_files(self) -> None: + """Verify partial JSONL files are deleted after merge.""" + with tempfile.TemporaryDirectory() as tmpdir: + output_dir = Path(tmpdir) + meta_dir = output_dir / ".meta" + + entries = [ + self._make_entry_dict( + "t1", + "state_tests/test.json", + HASH_1, + fmt="state_test", + ), + ] + self._write_jsonl(meta_dir / "partial_index.gw0.jsonl", entries) + self._write_jsonl(meta_dir / "partial_index.gw1.jsonl", entries) + + merge_partial_indexes(output_dir, quiet_mode=True) + + remaining = list(meta_dir.glob("partial_index*.jsonl")) + assert remaining == [] + + def test_merge_multiple_workers_same_hash_as_single(self) -> None: + """Verify hash is the same regardless of how entries are split.""" + entry_dicts = [ + self._make_entry_dict( + "t1", "state_tests/a.json", HASH_1, fmt="state_test" + ), + self._make_entry_dict( + "t2", "state_tests/a.json", HASH_2, fmt="state_test" + ), + self._make_entry_dict( + "t3", "blockchain_tests/b.json", HASH_3, fmt="blockchain_test" + ), + ] + + # Single worker: all entries in one file + with tempfile.TemporaryDirectory() as tmpdir1: + output1 = Path(tmpdir1) + meta1 = output1 / ".meta" + self._write_jsonl(meta1 / "partial_index.gw0.jsonl", entry_dicts) + merge_partial_indexes(output1, quiet_mode=True) + index1 = IndexFile.model_validate_json( + (meta1 / "index.json").read_text() + ) + + # Multiple workers: entries split across files + with tempfile.TemporaryDirectory() as tmpdir2: + output2 = Path(tmpdir2) + meta2 = output2 / ".meta" + self._write_jsonl( + meta2 / "partial_index.gw0.jsonl", entry_dicts[:1] + ) + self._write_jsonl( + meta2 / "partial_index.gw1.jsonl", entry_dicts[1:2] + ) + self._write_jsonl( + meta2 / "partial_index.gw2.jsonl", entry_dicts[2:] + ) + merge_partial_indexes(output2, quiet_mode=True) + index2 = IndexFile.model_validate_json( + (meta2 / "index.json").read_text() + ) + + assert index1.root_hash == index2.root_hash + assert index1.test_count == index2.test_count + + def test_merge_raises_when_no_partial_files(self) -> None: + """Verify merge_partial_indexes raises when no partials exist.""" + with tempfile.TemporaryDirectory() as tmpdir: + output_dir = Path(tmpdir) + meta_dir = output_dir / ".meta" + meta_dir.mkdir(parents=True) + + with pytest.raises(Exception, match="No partial indexes found"): + merge_partial_indexes(output_dir, quiet_mode=True) diff --git a/packages/testing/src/execution_testing/fixtures/__init__.py b/packages/testing/src/execution_testing/fixtures/__init__.py index 9d882707e7..18d4b3a118 100644 --- a/packages/testing/src/execution_testing/fixtures/__init__.py +++ b/packages/testing/src/execution_testing/fixtures/__init__.py @@ -14,7 +14,11 @@ BlockchainFixture, BlockchainFixtureCommon, ) -from .collector import FixtureCollector, TestInfo +from .collector import ( + FixtureCollector, + TestInfo, + merge_partial_fixture_files, +) from .consume import FixtureConsumer from .pre_alloc_groups import ( PreAllocGroup, @@ -45,4 +49,5 @@ "StateFixture", "TestInfo", "TransactionFixture", + "merge_partial_fixture_files", ] diff --git a/packages/testing/src/execution_testing/fixtures/collector.py b/packages/testing/src/execution_testing/fixtures/collector.py index b389609d5d..74bdb6b471 100644 --- a/packages/testing/src/execution_testing/fixtures/collector.py +++ b/packages/testing/src/execution_testing/fixtures/collector.py @@ -9,7 +9,16 @@ import sys from dataclasses import dataclass, field from pathlib import Path -from typing import ClassVar, Dict, Literal, Optional, Tuple +from typing import ( + ClassVar, + Dict, + List, + Literal, + Optional, + Tuple, +) + +from filelock import FileLock from execution_testing.base_types import to_json @@ -18,6 +27,70 @@ from .file import Fixtures +def merge_partial_fixture_files(output_dir: Path) -> None: + """ + Merge all partial fixture JSONL files into final JSON fixture files. + + Called at session end after all workers have written their partials. + Each partial file contains JSONL lines: {"k": fixture_id, "v": json_str} + """ + # Find all partial files + partial_files = list(output_dir.rglob("*.partial.*.jsonl")) + if not partial_files: + return + + # Group partial files by their target fixture file + # e.g., "test.partial.gw0.jsonl" -> "test.json" + partials_by_target: Dict[Path, List[Path]] = {} + for partial in partial_files: + # Remove .partial.{worker_id}.jsonl suffix to get target + name = partial.name + # Find ".partial." and remove everything after + idx = name.find(".partial.") + if idx == -1: + continue + target_name = name[:idx] + ".json" + target_path = partial.parent / target_name + if target_path not in partials_by_target: + partials_by_target[target_path] = [] + partials_by_target[target_path].append(partial) + + # Merge each group into its target file + for target_path, partials in partials_by_target.items(): + entries: Dict[str, str] = {} + + # Read all partial files + for partial in partials: + with open(partial) as f: + for line in f: + line = line.strip() + if not line: + continue + entry = json.loads(line) + entries[entry["k"]] = entry["v"] + + # Write final JSON file + sorted_keys = sorted(entries.keys()) + parts = ["{\n"] + last_idx = len(sorted_keys) - 1 + for i, key in enumerate(sorted_keys): + key_json = json.dumps(key) + # Add indentation for nesting inside outer JSON object + value_indented = entries[key].replace("\n", "\n ") + parts.append(f" {key_json}: {value_indented}") + parts.append(",\n" if i < last_idx else "\n") + parts.append("}") + target_path.write_text("".join(parts)) + + # Clean up partial files + for partial in partials: + partial.unlink() + # Also remove lock files + lock_file = partial.with_suffix(".lock") + if lock_file.exists(): + lock_file.unlink() + + @dataclass(kw_only=True, slots=True) class TestInfo: """Contains test information from the current node.""" @@ -125,10 +198,14 @@ class FixtureCollector: filler_path: Path base_dump_dir: Optional[Path] = None flush_interval: int = 1000 + generate_index: bool = True # Internal state all_fixtures: Dict[Path, Fixtures] = field(default_factory=dict) json_path_to_test_item: Dict[Path, TestInfo] = field(default_factory=dict) + # Store index entries as simple dicts + # (avoid Pydantic overhead during collection) + index_entries: List[Dict] = field(default_factory=list) def get_fixture_basename(self, info: TestInfo) -> Path: """Return basename of the fixture file for a given test case.""" @@ -166,6 +243,22 @@ def add_fixture(self, info: TestInfo, fixture: BaseFixture) -> Path: self.all_fixtures[fixture_path][info.get_id()] = fixture + # Collect index entry while data is in memory (if indexing enabled) + # Store as simple dict to avoid Pydantic overhead during collection + if self.generate_index: + relative_path = fixture_path.relative_to(self.output_dir) + fixture_fork = fixture.get_fork() + index_entry = { + "id": info.get_id(), + "json_path": str(relative_path), + "fixture_hash": str(fixture.hash) if fixture.hash else None, + "fork": fixture_fork.name() if fixture_fork else None, + "format": fixture.format_name, + } + if (pre_hash := getattr(fixture, "pre_hash", None)) is not None: + index_entry["pre_hash"] = pre_hash + self.index_entries.append(index_entry) + if ( self.flush_interval > 0 and len(self.all_fixtures) >= self.flush_interval @@ -174,7 +267,7 @@ def add_fixture(self, info: TestInfo, fixture: BaseFixture) -> Path: return fixture_path - def dump_fixtures(self) -> None: + def dump_fixtures(self, worker_id: str | None = None) -> None: """Dump all collected fixtures to their respective files.""" if self.output_dir.name == "stdout": combined_fixtures = { @@ -191,10 +284,35 @@ def dump_fixtures(self) -> None: raise TypeError( "All fixtures in a single file must have the same format." ) - fixtures.collect_into_file(fixture_path) + self._write_partial_fixtures(fixture_path, fixtures, worker_id) self.all_fixtures.clear() + def _write_partial_fixtures( + self, file_path: Path, fixtures: Fixtures, worker_id: str | None + ) -> None: + """ + Write fixtures to a partial JSONL file (append-only). + + Each line is a JSON object: {"key": "fixture_id", "value": "json_str"} + This avoids O(n) merge work per worker - just O(1) append. + Final merge to JSON happens at session end. + """ + suffix = f".{worker_id}" if worker_id else ".main" + partial_path = file_path.with_suffix(f".partial{suffix}.jsonl") + partial_path.parent.mkdir(parents=True, exist_ok=True) + lock_file_path = partial_path.with_suffix(".lock") + + lines = [] + for name in fixtures: + value = json.dumps(fixtures[name].json_dict_with_info(), indent=4) + # Store as JSONL: {"k": key, "v": serialized value string} + lines.append(json.dumps({"k": name, "v": value}) + "\n") + + with FileLock(lock_file_path): + with open(partial_path, "a") as f: + f.writelines(lines) + def verify_fixture_files( self, evm_fixture_verification: FixtureConsumer ) -> None: @@ -231,3 +349,38 @@ def _get_consume_direct_dump_dir( return info.get_dump_dir_path( self.base_dump_dir, self.filler_path, level="test_function" ) + + def write_partial_index(self, worker_id: str | None = None) -> Path | None: + """ + Append collected index entries to a partial index file using JSONL + format. + + Uses append-only JSONL (JSON Lines) format for efficient writes without + read-modify-write cycles. Each line is a complete JSON object + representing one index entry. + + Args: + worker_id: The xdist worker ID (e.g., "gw0"), or None for master. + + Returns: + Path to the partial index file, or None if indexing is disabled. + + """ + if not self.generate_index or not self.index_entries: + return None + + suffix = f".{worker_id}" if worker_id else ".master" + partial_index_path = ( + self.output_dir / ".meta" / f"partial_index{suffix}.jsonl" + ) + partial_index_path.parent.mkdir(parents=True, exist_ok=True) + lock_file_path = partial_index_path.with_suffix(".lock") + + # Append entries as JSONL (one JSON object per line) + # This avoids read-modify-write cycles + with FileLock(lock_file_path): + with open(partial_index_path, "a") as f: + for entry in self.index_entries: + f.write(json.dumps(entry) + "\n") + + return partial_index_path diff --git a/packages/testing/src/execution_testing/fixtures/consume.py b/packages/testing/src/execution_testing/fixtures/consume.py index d43ad6deac..3c03a85cd5 100644 --- a/packages/testing/src/execution_testing/fixtures/consume.py +++ b/packages/testing/src/execution_testing/fixtures/consume.py @@ -47,8 +47,8 @@ class TestCaseBase(BaseModel): """Base model for a test case used in EEST consume commands.""" id: str - fixture_hash: HexNumber | None - fork: Fork | None + fixture_hash: HexNumber | None = None + fork: Fork | None = None format: FixtureFormat pre_hash: str | None = None __test__ = False # stop pytest from collecting this class as a test diff --git a/packages/testing/src/execution_testing/fixtures/file.py b/packages/testing/src/execution_testing/fixtures/file.py index 4cf09cb1b1..5656359136 100644 --- a/packages/testing/src/execution_testing/fixtures/file.py +++ b/packages/testing/src/execution_testing/fixtures/file.py @@ -62,10 +62,10 @@ def collect_into_file(self, file_path: Path) -> None: lock_file_path = file_path.with_suffix(".lock") with FileLock(lock_file_path): if file_path.exists(): - with open(file_path, "r") as f: - json_fixtures = json.load(f) + json_fixtures = json.loads(file_path.read_bytes()) for name, fixture in self.items(): json_fixtures[name] = fixture.json_dict_with_info() - with open(file_path, "w") as f: - json.dump(dict(sorted(json_fixtures.items())), f, indent=4) + file_path.write_text( + json.dumps(dict(sorted(json_fixtures.items())), indent=4) + ) diff --git a/packages/testing/src/execution_testing/fixtures/tests/test_collector.py b/packages/testing/src/execution_testing/fixtures/tests/test_collector.py new file mode 100644 index 0000000000..87e55e6f89 --- /dev/null +++ b/packages/testing/src/execution_testing/fixtures/tests/test_collector.py @@ -0,0 +1,435 @@ +"""Test cases for the execution_testing.fixtures.collector module.""" + +import json +from pathlib import Path + +import pytest + +from ..base import BaseFixture +from ..collector import FixtureCollector, TestInfo, merge_partial_fixture_files +from ..file import Fixtures +from ..transaction import FixtureResult, TransactionFixture + + +def _make_fixture(nonce: int = 0) -> TransactionFixture: + """Create a minimal TransactionFixture for testing.""" + fixture = TransactionFixture( + transaction=f"0x{nonce:04x}", + result={"Paris": FixtureResult(intrinsic_gas=nonce)}, + ) + fixture.fill_info( + "t8n-test", + f"test description {nonce}", + fixture_source_url="http://example.com", + ref_spec=None, + _info_metadata={}, + ) + return fixture + + +def _make_info(test_id: str, module_path: Path) -> TestInfo: + """Create a TestInfo for testing.""" + return TestInfo( + name=f"test_func[fork_Paris-{test_id}]", + id=f"{module_path}::test_func[fork_Paris-{test_id}]", + original_name="test_func", + module_path=module_path, + ) + + +@pytest.fixture +def output_dir(tmp_path: Path) -> Path: + """Create output directory for test fixtures.""" + out = tmp_path / "output" + out.mkdir() + return out + + +@pytest.fixture +def filler_path(tmp_path: Path) -> Path: + """Create a filler path (tests directory root).""" + p = tmp_path / "tests" + p.mkdir() + return p + + +@pytest.fixture +def module_path(filler_path: Path) -> Path: + """Create a dummy test module path.""" + mod = filler_path / "cancun" / "test_example.py" + mod.parent.mkdir(parents=True, exist_ok=True) + mod.touch() + return mod + + +class TestPartialFixtureFiles: + """Tests for partial fixture file writing and merging.""" + + def test_single_fixture_matches_json_dumps( + self, output_dir: Path, filler_path: Path, module_path: Path + ) -> None: + """Output for a single fixture must match json.dumps(..., indent=4).""" + collector = FixtureCollector( + output_dir=output_dir, + fill_static_tests=False, + single_fixture_per_file=False, + filler_path=filler_path, + generate_index=False, + ) + fixture = _make_fixture(1) + info = _make_info("tx_test", module_path) + collector.add_fixture(info, fixture) + collector.dump_fixtures(worker_id="gw0") + merge_partial_fixture_files(output_dir) + + # Find the written file + json_files = list(output_dir.rglob("*.json")) + assert len(json_files) == 1 + written = json_files[0].read_text() + + # Build expected output using the original json.dumps approach + fixture_id = info.get_id() + expected_dict = {fixture_id: fixture.json_dict_with_info()} + expected = json.dumps(dict(sorted(expected_dict.items())), indent=4) + assert written == expected + + def test_multiple_fixtures_match_json_dumps( + self, output_dir: Path, filler_path: Path, module_path: Path + ) -> None: + """ + Output for multiple fixtures must match json.dumps(..., indent=4). + """ + collector = FixtureCollector( + output_dir=output_dir, + fill_static_tests=False, + single_fixture_per_file=False, + filler_path=filler_path, + generate_index=False, + ) + fixtures_and_infos = [] + for i in range(5): + fixture = _make_fixture(i) + info = _make_info(f"tx_test_{i}", module_path) + collector.add_fixture(info, fixture) + fixtures_and_infos.append((info, fixture)) + + collector.dump_fixtures(worker_id="gw0") + merge_partial_fixture_files(output_dir) + + json_files = list(output_dir.rglob("*.json")) + assert len(json_files) == 1 + written = json_files[0].read_text() + + expected_dict = { + info.get_id(): fixture.json_dict_with_info() + for info, fixture in fixtures_and_infos + } + expected = json.dumps(dict(sorted(expected_dict.items())), indent=4) + assert written == expected + + def test_multiple_workers_merge_correctly( + self, output_dir: Path, filler_path: Path, module_path: Path + ) -> None: + """ + Simulates xdist: worker A and B write partial files, merge at end. + Final output should match json.dumps of all fixtures. + """ + collector1 = FixtureCollector( + output_dir=output_dir, + fill_static_tests=False, + single_fixture_per_file=False, + filler_path=filler_path, + generate_index=False, + ) + # Worker A writes fixtures 0-2 + pairs_a = [] + for i in range(3): + fixture = _make_fixture(i) + info = _make_info(f"tx_test_{i}", module_path) + collector1.add_fixture(info, fixture) + pairs_a.append((info, fixture)) + collector1.dump_fixtures(worker_id="gw0") + + # Worker B writes fixtures 3-5 (separate partial file) + collector2 = FixtureCollector( + output_dir=output_dir, + fill_static_tests=False, + single_fixture_per_file=False, + filler_path=filler_path, + generate_index=False, + ) + pairs_b = [] + for i in range(3, 6): + fixture = _make_fixture(i) + info = _make_info(f"tx_test_{i}", module_path) + collector2.add_fixture(info, fixture) + pairs_b.append((info, fixture)) + collector2.dump_fixtures(worker_id="gw1") + + # Merge at session end + merge_partial_fixture_files(output_dir) + + # Verify final output matches json.dumps of all 6 fixtures + json_files = list(output_dir.rglob("*.json")) + assert len(json_files) == 1 + written = json_files[0].read_text() + + expected_dict = { + info.get_id(): fixture.json_dict_with_info() + for info, fixture in pairs_a + pairs_b + } + expected = json.dumps(dict(sorted(expected_dict.items())), indent=4) + assert written == expected + + def test_output_is_valid_json( + self, output_dir: Path, filler_path: Path, module_path: Path + ) -> None: + """The written file must be parseable as valid JSON.""" + collector = FixtureCollector( + output_dir=output_dir, + fill_static_tests=False, + single_fixture_per_file=False, + filler_path=filler_path, + generate_index=False, + ) + for i in range(3): + fixture = _make_fixture(i) + info = _make_info(f"tx_test_{i}", module_path) + collector.add_fixture(info, fixture) + + collector.dump_fixtures(worker_id="gw0") + merge_partial_fixture_files(output_dir) + + json_files = list(output_dir.rglob("*.json")) + assert len(json_files) == 1 + parsed = json.loads(json_files[0].read_text()) + assert isinstance(parsed, dict) + assert len(parsed) == 3 + + def test_fixtures_sorted_by_key( + self, output_dir: Path, filler_path: Path, module_path: Path + ) -> None: + """Fixture entries in the output file must be sorted by key.""" + collector = FixtureCollector( + output_dir=output_dir, + fill_static_tests=False, + single_fixture_per_file=False, + filler_path=filler_path, + generate_index=False, + ) + # Add in reverse order + for i in reversed(range(3)): + fixture = _make_fixture(i) + info = _make_info(f"tx_test_{i}", module_path) + collector.add_fixture(info, fixture) + + collector.dump_fixtures(worker_id="gw0") + merge_partial_fixture_files(output_dir) + + json_files = list(output_dir.rglob("*.json")) + assert len(json_files) == 1 + content = json_files[0].read_text() + parsed = json.loads(content) + keys = list(parsed.keys()) + assert keys == sorted(keys) + + def test_partial_files_cleaned_up_after_merge( + self, output_dir: Path, filler_path: Path, module_path: Path + ) -> None: + """Partial JSONL files are deleted after merging.""" + collector = FixtureCollector( + output_dir=output_dir, + fill_static_tests=False, + single_fixture_per_file=False, + filler_path=filler_path, + generate_index=False, + ) + fixture = _make_fixture(1) + info = _make_info("tx_test", module_path) + collector.add_fixture(info, fixture) + collector.dump_fixtures(worker_id="gw0") + + # Verify partial file exists before merge + partial_files = list(output_dir.rglob("*.partial.*.jsonl")) + assert len(partial_files) == 1 + + merge_partial_fixture_files(output_dir) + + # Verify partial file is deleted after merge + partial_files = list(output_dir.rglob("*.partial.*.jsonl")) + assert len(partial_files) == 0 + + +class TestLegacyCompatibility: + """ + Tests verifying the new partial file approach produces byte-identical + output to the legacy Fixtures.collect_into_file() method. + """ + + def test_single_fixture_matches_legacy( + self, output_dir: Path, filler_path: Path, module_path: Path + ) -> None: + """Single fixture output matches legacy collect_into_file().""" + fixture: BaseFixture = _make_fixture(1) + info = _make_info("tx_test", module_path) + fixture_id = info.get_id() + + # Legacy approach: use Fixtures.collect_into_file() + legacy_dir = output_dir / "legacy" + legacy_dir.mkdir() + legacy_file = legacy_dir / "test.json" + legacy_fixtures = Fixtures(root={fixture_id: fixture}) + legacy_fixtures.collect_into_file(legacy_file) + legacy_output = legacy_file.read_text() + + # New approach: use partial files + merge + new_dir = output_dir / "new" + new_dir.mkdir() + collector = FixtureCollector( + output_dir=new_dir, + fill_static_tests=False, + single_fixture_per_file=False, + filler_path=filler_path, + generate_index=False, + ) + collector.add_fixture(info, fixture) + collector.dump_fixtures(worker_id="gw0") + merge_partial_fixture_files(new_dir) + new_files = list(new_dir.rglob("*.json")) + assert len(new_files) == 1 + new_output = new_files[0].read_text() + + assert new_output == legacy_output + + def test_multiple_fixtures_match_legacy( + self, output_dir: Path, filler_path: Path, module_path: Path + ) -> None: + """Multiple fixtures output matches legacy collect_into_file().""" + fixtures_dict: dict[str, BaseFixture] = {} + infos = [] + for i in range(5): + fixture = _make_fixture(i) + info = _make_info(f"tx_test_{i}", module_path) + fixtures_dict[info.get_id()] = fixture + infos.append(info) + + # Legacy approach + legacy_dir = output_dir / "legacy" + legacy_dir.mkdir() + legacy_file = legacy_dir / "test.json" + legacy_fixtures = Fixtures(root=fixtures_dict) + legacy_fixtures.collect_into_file(legacy_file) + legacy_output = legacy_file.read_text() + + # New approach + new_dir = output_dir / "new" + new_dir.mkdir() + collector = FixtureCollector( + output_dir=new_dir, + fill_static_tests=False, + single_fixture_per_file=False, + filler_path=filler_path, + generate_index=False, + ) + for i, info in enumerate(infos): + collector.add_fixture(info, list(fixtures_dict.values())[i]) + collector.dump_fixtures(worker_id="gw0") + merge_partial_fixture_files(new_dir) + new_files = list(new_dir.rglob("*.json")) + assert len(new_files) == 1 + new_output = new_files[0].read_text() + + assert new_output == legacy_output + + def test_multiple_workers_match_legacy( + self, output_dir: Path, filler_path: Path, module_path: Path + ) -> None: + """ + Multiple workers writing to same logical file matches legacy output. + """ + fixtures_dict: dict[str, BaseFixture] = {} + infos = [] + for i in range(6): + fixture = _make_fixture(i) + info = _make_info(f"tx_test_{i}", module_path) + fixtures_dict[info.get_id()] = fixture + infos.append(info) + + # Legacy approach: all fixtures in one call + legacy_dir = output_dir / "legacy" + legacy_dir.mkdir() + legacy_file = legacy_dir / "test.json" + legacy_fixtures = Fixtures(root=fixtures_dict) + legacy_fixtures.collect_into_file(legacy_file) + legacy_output = legacy_file.read_text() + + # New approach: simulate 3 workers, each with 2 fixtures + new_dir = output_dir / "new" + new_dir.mkdir() + fixture_values = list(fixtures_dict.values()) + for worker_idx in range(3): + collector = FixtureCollector( + output_dir=new_dir, + fill_static_tests=False, + single_fixture_per_file=False, + filler_path=filler_path, + generate_index=False, + ) + start = worker_idx * 2 + for i in range(start, start + 2): + collector.add_fixture(infos[i], fixture_values[i]) + collector.dump_fixtures(worker_id=f"gw{worker_idx}") + + merge_partial_fixture_files(new_dir) + new_files = list(new_dir.rglob("*.json")) + assert len(new_files) == 1 + new_output = new_files[0].read_text() + + assert new_output == legacy_output + + def test_special_characters_in_keys_match_legacy( + self, output_dir: Path, filler_path: Path, module_path: Path + ) -> None: + """Fixture IDs with special characters produce identical output.""" + # Create fixtures with complex IDs (typical pytest node IDs) + fixtures_dict: dict[str, BaseFixture] = {} + infos = [] + complex_ids = [ + "param[fork_Paris-state_test]", + "param[fork_Shanghai-blockchain_test]", + 'param[value="quoted"]', + "param[path/with/slashes]", + ] + for i, test_id in enumerate(complex_ids): + fixture = _make_fixture(i) + info = _make_info(test_id, module_path) + fixtures_dict[info.get_id()] = fixture + infos.append(info) + + # Legacy approach + legacy_dir = output_dir / "legacy" + legacy_dir.mkdir() + legacy_file = legacy_dir / "test.json" + legacy_fixtures = Fixtures(root=fixtures_dict) + legacy_fixtures.collect_into_file(legacy_file) + legacy_output = legacy_file.read_text() + + # New approach + new_dir = output_dir / "new" + new_dir.mkdir() + collector = FixtureCollector( + output_dir=new_dir, + fill_static_tests=False, + single_fixture_per_file=False, + filler_path=filler_path, + generate_index=False, + ) + for i, info in enumerate(infos): + collector.add_fixture(info, list(fixtures_dict.values())[i]) + collector.dump_fixtures(worker_id="gw0") + merge_partial_fixture_files(new_dir) + new_files = list(new_dir.rglob("*.json")) + assert len(new_files) == 1 + new_output = new_files[0].read_text() + + assert new_output == legacy_output diff --git a/tests/cancun/eip4844_blobs/test_excess_blob_gas_fork_transition.py b/tests/cancun/eip4844_blobs/test_excess_blob_gas_fork_transition.py index 1866a4e210..d5f6dbff75 100644 --- a/tests/cancun/eip4844_blobs/test_excess_blob_gas_fork_transition.py +++ b/tests/cancun/eip4844_blobs/test_excess_blob_gas_fork_transition.py @@ -526,6 +526,7 @@ def test_fork_transition_excess_blob_gas_at_blob_genesis( ], ) @pytest.mark.parametrize("block_base_fee_per_gas", [7, 16, 23]) +@pytest.mark.slow def test_fork_transition_excess_blob_gas_post_blob_genesis( blockchain_test: BlockchainTestFiller, genesis_environment: Environment, diff --git a/tests/frontier/opcodes/test_blockhash.py b/tests/frontier/opcodes/test_blockhash.py index de0b7a3034..ca7c07459f 100644 --- a/tests/frontier/opcodes/test_blockhash.py +++ b/tests/frontier/opcodes/test_blockhash.py @@ -22,6 +22,7 @@ pytest.param(256, True, id="256_empty_blocks"), ], ) +@pytest.mark.slow() def test_genesis_hash_available( blockchain_test: BlockchainTestFiller, pre: Alloc, diff --git a/tests/osaka/eip7918_blob_reserve_price/test_blob_reserve_price_with_bpo.py b/tests/osaka/eip7918_blob_reserve_price/test_blob_reserve_price_with_bpo.py index 79c4bd14d8..c0eb3af65c 100644 --- a/tests/osaka/eip7918_blob_reserve_price/test_blob_reserve_price_with_bpo.py +++ b/tests/osaka/eip7918_blob_reserve_price/test_blob_reserve_price_with_bpo.py @@ -21,6 +21,7 @@ @pytest.mark.valid_for_bpo_forks() @pytest.mark.parametrize("parent_excess_blobs", [27]) @pytest.mark.parametrize("block_base_fee_per_gas", [17]) +@pytest.mark.slow def test_blob_base_fee_with_bpo_transition( blockchain_test: BlockchainTestFiller, pre: Alloc, diff --git a/tests/osaka/eip7918_blob_reserve_price/test_blob_reserve_price_with_bpo_transitions.py b/tests/osaka/eip7918_blob_reserve_price/test_blob_reserve_price_with_bpo_transitions.py index 08f98b96df..f4e3af6282 100644 --- a/tests/osaka/eip7918_blob_reserve_price/test_blob_reserve_price_with_bpo_transitions.py +++ b/tests/osaka/eip7918_blob_reserve_price/test_blob_reserve_price_with_bpo_transitions.py @@ -560,6 +560,7 @@ def get_fork_scenarios(fork: Fork) -> Iterator[ParameterSet]: ) @pytest.mark.valid_at_transition_to("Osaka", subsequent_forks=True) @pytest.mark.valid_for_bpo_forks() +@pytest.mark.slow() def test_reserve_price_at_transition( blockchain_test: BlockchainTestFiller, pre: Alloc, diff --git a/tests/prague/eip6110_deposits/test_deposits.py b/tests/prague/eip6110_deposits/test_deposits.py index be345f02a1..7f69c23ee9 100644 --- a/tests/prague/eip6110_deposits/test_deposits.py +++ b/tests/prague/eip6110_deposits/test_deposits.py @@ -914,6 +914,7 @@ ), ], ) +@pytest.mark.slow() @pytest.mark.pre_alloc_group( "deposit_requests", reason="Tests standard deposit request functionality with system contract", diff --git a/tests/static/state_tests/stAttackTest/ContractCreationSpamFiller.json b/tests/static/state_tests/stAttackTest/ContractCreationSpamFiller.json index 543bb0e046..07acf98ea3 100644 --- a/tests/static/state_tests/stAttackTest/ContractCreationSpamFiller.json +++ b/tests/static/state_tests/stAttackTest/ContractCreationSpamFiller.json @@ -1,5 +1,8 @@ { "ContractCreationSpam" : { + "_info" : { + "pytest_marks": ["slow"] + }, "env" : { "currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba", "currentDifficulty" : "0x20000", diff --git a/tests/static/state_tests/stQuadraticComplexityTest/Return50000_2Filler.json b/tests/static/state_tests/stQuadraticComplexityTest/Return50000_2Filler.json index 486b49cbe7..c5646174aa 100644 --- a/tests/static/state_tests/stQuadraticComplexityTest/Return50000_2Filler.json +++ b/tests/static/state_tests/stQuadraticComplexityTest/Return50000_2Filler.json @@ -1,5 +1,8 @@ { "Return50000_2" : { + "_info" : { + "pytest_marks": ["slow"] + }, "env" : { "currentCoinbase" : "b94f5374fce5edbc8e2a8697c15331677e6ebf0b", "currentDifficulty" : "0x020000", diff --git a/tests/static/state_tests/stStaticCall/static_Return50000_2Filler.json b/tests/static/state_tests/stStaticCall/static_Return50000_2Filler.json index f2c178c800..7b7ce761bf 100644 --- a/tests/static/state_tests/stStaticCall/static_Return50000_2Filler.json +++ b/tests/static/state_tests/stStaticCall/static_Return50000_2Filler.json @@ -1,5 +1,8 @@ { "static_Return50000_2" : { + "_info" : { + "pytest_marks": ["slow"] + }, "env" : { "currentCoinbase" : "b94f5374fce5edbc8e2a8697c15331677e6ebf0b", "currentDifficulty" : "0x020000", diff --git a/tox.ini b/tox.ini index 22b12f6121..36f8fc652d 100644 --- a/tox.ini +++ b/tox.ini @@ -93,6 +93,7 @@ commands = fill \ -m "not slow and not zkevm and not benchmark" \ -n auto --maxprocesses 10 --dist=loadgroup \ + --skip-index \ --cov-config=pyproject.toml \ --cov=ethereum \ --cov-report=term \ @@ -103,6 +104,7 @@ commands = --log-to "{toxworkdir}/logs" \ --clean \ --until Amsterdam \ + --durations=50 \ {posargs} \ tests