From 645d820491d44ccc76b1fbd5324a748a64710ba7 Mon Sep 17 00:00:00 2001 From: spencer Date: Tue, 13 Jan 2026 16:16:23 +0000 Subject: [PATCH 001/154] chore(forks): update Osaka and BPO fork deployment status (#2013) --- .../src/execution_testing/forks/forks/forks.py | 13 +++++-------- .../testing/src/execution_testing/forks/helpers.py | 3 ++- .../src/execution_testing/forks/tests/test_forks.py | 7 ++++--- 3 files changed, 11 insertions(+), 12 deletions(-) diff --git a/packages/testing/src/execution_testing/forks/forks/forks.py b/packages/testing/src/execution_testing/forks/forks/forks.py index 4e66d9bf95..7e650a821d 100644 --- a/packages/testing/src/execution_testing/forks/forks/forks.py +++ b/packages/testing/src/execution_testing/forks/forks/forks.py @@ -2211,14 +2211,6 @@ def block_rlp_size_limit( safety_margin = 2_097_152 return max_block_size - safety_margin - @classmethod - def is_deployed(cls) -> bool: - """ - Flag that the fork has not been deployed to mainnet; it is under active - development. - """ - return False - @classmethod def valid_opcodes( cls, *, block_number: int = 0, timestamp: int = 0 @@ -2405,6 +2397,11 @@ class BPO3(BPO2, bpo_fork=True): For testing purposes only. """ + @classmethod + def is_deployed(cls) -> bool: + """BPO3 is a pseudo fork for testing, not deployed to mainnet.""" + return False + @classmethod def blob_base_fee_update_fraction( cls, *, block_number: int = 0, timestamp: int = 0 diff --git a/packages/testing/src/execution_testing/forks/helpers.py b/packages/testing/src/execution_testing/forks/helpers.py index fe94bdb3cb..b86c2bc46f 100644 --- a/packages/testing/src/execution_testing/forks/helpers.py +++ b/packages/testing/src/execution_testing/forks/helpers.py @@ -76,11 +76,12 @@ def get_deployed_forks() -> List[Type[BaseFork]]: """ Return list of all the fork classes implemented by `execution_testing.forks` that have been deployed to mainnet, chronologically ordered by deployment. + BPO (Blob Parameter Only) forks are excluded as they are handled separately. """ return [ fork for fork in get_forks() - if fork.is_deployed() and not fork.ignore() + if fork.is_deployed() and not fork.ignore() and not fork.bpo_fork() ] diff --git a/packages/testing/src/execution_testing/forks/tests/test_forks.py b/packages/testing/src/execution_testing/forks/tests/test_forks.py index b5a91ef81f..f945fad0a6 100644 --- a/packages/testing/src/execution_testing/forks/tests/test_forks.py +++ b/packages/testing/src/execution_testing/forks/tests/test_forks.py @@ -8,6 +8,7 @@ from execution_testing.base_types import BlobSchedule from ..forks.forks import ( + Amsterdam, BPO1, BPO2, BPO3, @@ -49,9 +50,9 @@ from ..transition_base_fork import transition_fork FIRST_DEPLOYED = Frontier -LAST_DEPLOYED = Prague -LAST_DEVELOPMENT = Osaka -DEVELOPMENT_FORKS = [Osaka] +LAST_DEPLOYED = Osaka +LAST_DEVELOPMENT = Amsterdam +DEVELOPMENT_FORKS = [Amsterdam] def test_transition_forks() -> None: From 8cdef0ce5fa548af2532fabb3c43bd9dbf1a5695 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=94=A1=E4=BD=B3=E8=AA=A0=20Louis=20Tsai?= <72684086+LouisTsai-Csie@users.noreply.github.com> Date: Wed, 14 Jan 2026 00:57:26 +0800 Subject: [PATCH 002/154] refactor(test-benchmark): relabel repricing marker (#2015) --- tests/benchmark/compute/instruction/test_account_query.py | 2 +- tests/benchmark/compute/instruction/test_control_flow.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/benchmark/compute/instruction/test_account_query.py b/tests/benchmark/compute/instruction/test_account_query.py index fcb2ed891e..0c67c38e84 100644 --- a/tests/benchmark/compute/instruction/test_account_query.py +++ b/tests/benchmark/compute/instruction/test_account_query.py @@ -136,7 +136,6 @@ def test_codecopy_benchmark( ) -@pytest.mark.repricing @pytest.mark.parametrize( "opcode", [ @@ -345,6 +344,7 @@ def test_extcode_ops( ) +@pytest.mark.repricing @pytest.mark.parametrize( "copy_size", [0, 32, 256, 512, 1024], diff --git a/tests/benchmark/compute/instruction/test_control_flow.py b/tests/benchmark/compute/instruction/test_control_flow.py index bf58cfdba7..1a2156321a 100644 --- a/tests/benchmark/compute/instruction/test_control_flow.py +++ b/tests/benchmark/compute/instruction/test_control_flow.py @@ -62,9 +62,9 @@ def test_jumps( ) +@pytest.mark.repricing def test_jump_benchmark( benchmark_test: BenchmarkTestFiller, - pre: Alloc, ) -> None: """Benchmark JUMP instruction with different dest.""" benchmark_test( From 36b472072175a6a384d0563e65a9b611ba310e4b Mon Sep 17 00:00:00 2001 From: spencer Date: Tue, 13 Jan 2026 19:11:57 +0000 Subject: [PATCH 003/154] chore(ci): reduce docker hub rate limit usage in hive-consume workflow (#2004) Co-authored-by: danceratopz --- .../actions/cache-docker-images/action.yaml | 51 +++++++++++++++++++ .../actions/load-docker-images/action.yaml | 37 ++++++++++++++ .github/workflows/hive-consume.yaml | 43 ++++++++++++++-- 3 files changed, 126 insertions(+), 5 deletions(-) create mode 100644 .github/actions/cache-docker-images/action.yaml create mode 100644 .github/actions/load-docker-images/action.yaml diff --git a/.github/actions/cache-docker-images/action.yaml b/.github/actions/cache-docker-images/action.yaml new file mode 100644 index 0000000000..eabb32bbfc --- /dev/null +++ b/.github/actions/cache-docker-images/action.yaml @@ -0,0 +1,51 @@ +name: Cache Docker Images +description: Cache Docker images to avoid Docker Hub rate limits + +inputs: + images: + description: "Space-separated list of Docker images to cache" + required: true + default: "docker.io/ethereum/client-go:latest docker.io/alpine:latest docker.io/library/golang:1-alpine" + cache-key-prefix: + description: "Prefix for the cache key" + required: false + default: "docker-images" + +runs: + using: "composite" + steps: + - name: Get week number + id: week + shell: bash + run: echo "num=$(date +%U)" >> $GITHUB_OUTPUT + + - name: Restore Docker image cache + id: cache-restore + uses: actions/cache/restore@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3 + with: + path: /tmp/docker-images + key: ${{ inputs.cache-key-prefix }}-week${{ steps.week.outputs.num }}-${{ hashFiles('.github/actions/cache-docker-images/action.yaml', 'execution-specs/.github/actions/cache-docker-images/action.yaml') }} + + - name: Pull and save Docker images + shell: bash + run: | + mkdir -p /tmp/docker-images + for image in ${{ inputs.images }}; do + # Create a safe filename from image name + filename=$(echo "$image" | sed 's/[\/:]/-/g').tar.gz + if [ ! -f "/tmp/docker-images/$filename" ]; then + echo "Pulling $image..." + docker pull "$image" + echo "Saving $image to /tmp/docker-images/$filename..." + docker save "$image" | gzip > "/tmp/docker-images/$filename" + else + echo "Cache hit for $image, skipping pull" + fi + done + + - name: Save Docker image cache + if: steps.cache-restore.outputs.cache-hit != 'true' + uses: actions/cache/save@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3 + with: + path: /tmp/docker-images + key: ${{ inputs.cache-key-prefix }}-week${{ steps.week.outputs.num }}-${{ hashFiles('.github/actions/cache-docker-images/action.yaml', 'execution-specs/.github/actions/cache-docker-images/action.yaml') }} diff --git a/.github/actions/load-docker-images/action.yaml b/.github/actions/load-docker-images/action.yaml new file mode 100644 index 0000000000..fc375e9b16 --- /dev/null +++ b/.github/actions/load-docker-images/action.yaml @@ -0,0 +1,37 @@ +name: Load Cached Docker Images +description: Load Docker images from cache + +inputs: + cache-key-prefix: + description: "Prefix for the cache key" + required: false + default: "docker-images" + +runs: + using: "composite" + steps: + - name: Get week number + id: week + shell: bash + run: echo "num=$(date +%U)" >> $GITHUB_OUTPUT + + - name: Restore Docker image cache + uses: actions/cache/restore@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3 + with: + path: /tmp/docker-images + key: ${{ inputs.cache-key-prefix }}-week${{ steps.week.outputs.num }}-${{ hashFiles('.github/actions/cache-docker-images/action.yaml', 'execution-specs/.github/actions/cache-docker-images/action.yaml') }} + + - name: Load cached Docker images + shell: bash + run: | + if [ -d /tmp/docker-images ]; then + for file in /tmp/docker-images/*.tar.gz; do + if [ -f "$file" ]; then + echo "Loading $file..." + gunzip -c "$file" | docker load + fi + done + else + echo "::error::No cached images found - cache may not exist yet. Run the cache-docker-images action first." + exit 1 + fi diff --git a/.github/workflows/hive-consume.yaml b/.github/workflows/hive-consume.yaml index cb040765f9..13b30cf16d 100644 --- a/.github/workflows/hive-consume.yaml +++ b/.github/workflows/hive-consume.yaml @@ -1,4 +1,4 @@ -name: Hive Consume Tests +name: Hive Consume on: push: @@ -7,6 +7,10 @@ on: pull_request: paths: - ".github/workflows/hive-consume.yaml" + - ".github/actions/start-hive-dev/**" + - ".github/actions/cache-docker-images/**" + - ".github/actions/load-docker-images/**" + - ".github/configs/hive/**" - "packages/testing/src/execution_testing/cli/pytest_commands/consume.py" - "packages/testing/src/execution_testing/cli/pytest_commands/pytest_ini_files/pytest-consume.ini" - "packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/**" @@ -14,6 +18,18 @@ on: - "packages/testing/src/execution_testing/fixtures/consume.py" - "packages/testing/src/execution_testing/rpc/**" workflow_dispatch: + inputs: + docker_images: + description: "Space-separated list of Docker images to cache" + required: false + default: "docker.io/ethereum/client-go:latest docker.io/alpine:latest docker.io/library/golang:1-alpine" + workflow_call: + inputs: + docker_images: + description: "Space-separated list of Docker images to cache" + required: false + type: string + default: "docker.io/ethereum/client-go:latest docker.io/alpine:latest docker.io/library/golang:1-alpine" concurrency: group: hive-consume-${{ github.workflow }}-${{ github.ref || github.run_id }} @@ -24,23 +40,36 @@ env: FIXTURES_URL: https://github.com/ethereum/execution-spec-tests/releases/download/v5.3.0/fixtures_develop.tar.gz jobs: + cache-docker-images: + name: Cache Docker Images + runs-on: [self-hosted-ghr, size-l-x64] + steps: + - name: Checkout execution-specs + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + + - name: Cache Docker images + uses: ./.github/actions/cache-docker-images + with: + images: ${{ inputs.docker_images || 'docker.io/ethereum/client-go:latest docker.io/alpine:latest docker.io/library/golang:1-alpine' }} + test-hive: name: ${{ matrix.name }} + needs: cache-docker-images runs-on: [self-hosted-ghr, size-l-x64] strategy: fail-fast: true matrix: include: - - name: consume-engine + - name: Engine mode: simulator simulator: ethereum/eels/consume-engine - - name: consume-rlp + - name: RLP mode: simulator simulator: ethereum/eels/consume-rlp - - name: consume-sync + - name: Sync mode: simulator simulator: ethereum/eels/consume-sync - - name: dev-mode + - name: Dev Mode mode: dev consume_command: engine steps: @@ -71,6 +100,9 @@ jobs: version: ${{ vars.UV_VERSION }} python-version: ${{ vars.DEFAULT_PYTHON_VERSION }} + - name: Load cached Docker images + uses: ./execution-specs/.github/actions/load-docker-images + - name: Build hive run: | cd hive @@ -106,3 +138,4 @@ jobs: run: | uv sync --all-extras uv run consume ${{ matrix.consume_command }} --input ${{ env.FIXTURES_URL }} -k "Osaka and test_block_at_rlp_limit_with_logs" + From 180dcecd7deb336b5a33c957bd860a5e98c3fab9 Mon Sep 17 00:00:00 2001 From: felix Date: Tue, 13 Jan 2026 21:35:25 +0100 Subject: [PATCH 004/154] fix(test-types): log `Transaction` values as readable hex-strings instead of raw bytes (#1803) Co-authored-by: felipe Co-authored-by: danceratopz --- .../cli/gentest/source_code_generator.py | 4 +- .../test_types/transaction_types.py | 42 +++++++++++++++++++ 2 files changed, 44 insertions(+), 2 deletions(-) diff --git a/packages/testing/src/execution_testing/cli/gentest/source_code_generator.py b/packages/testing/src/execution_testing/cli/gentest/source_code_generator.py index 7f316cbd08..ecbed9150e 100644 --- a/packages/testing/src/execution_testing/cli/gentest/source_code_generator.py +++ b/packages/testing/src/execution_testing/cli/gentest/source_code_generator.py @@ -89,7 +89,6 @@ def format_code(code: str) -> str: str(formatter_path), "format", str(input_file_path), - "--quiet", "--no-cache", "--config", str(config_path), @@ -100,7 +99,8 @@ def format_code(code: str) -> str: if result.returncode != 0: raise Exception( f"Error formatting code using formatter '{formatter_path}': " - f"{result.stderr}" + f"returncode={result.returncode}, stdout={result.stdout!r}, " + f"stderr={result.stderr!r}" ) # Return the formatted source code diff --git a/packages/testing/src/execution_testing/test_types/transaction_types.py b/packages/testing/src/execution_testing/test_types/transaction_types.py index 0245581e62..aa4ef542fb 100644 --- a/packages/testing/src/execution_testing/test_types/transaction_types.py +++ b/packages/testing/src/execution_testing/test_types/transaction_types.py @@ -3,6 +3,7 @@ from dataclasses import dataclass from enum import IntEnum from functools import cached_property +import numbers from typing import Any, ClassVar, Dict, Generic, List, Literal, Self, Sequence import ethereum_rlp as eth_rlp @@ -842,6 +843,47 @@ def signer_minimum_balance(self, *, fork: Fork) -> int: else: return gas_price * gas_limit + self.value + def _format_field_value(self, value: Any) -> str: + """ + Format a field value for string representation. + + Uses decimal for numeric values (int, HexNumber, etc.) and + hex encoding for Address, Bytes, Hash, etc. + """ + if value is None: + return "None" + + # fields like 'value' should be shown as decimal number + if isinstance(value, numbers.Number): + # Force decimal representation for int subclasses like HexNumber + if isinstance(value, int): + return str(int(value)) + + return str(value) + + # fields like 'to' should be shown as hex string + if hasattr(value, "hex") and callable(value.hex): + return f'"{value.hex()}"' + + return repr(value) + + def __repr__(self) -> str: + """ + Return string representation with hex-encoded values for + applicable fields. + """ + field_strs = [] + for field_name in self.__class__.model_fields: + value = getattr(self, field_name) + formatted_value = self._format_field_value(value) + field_strs.append(f"{field_name}={formatted_value}") + + return f"{self.__class__.__name__}({', '.join(field_strs)})" + + def __str__(self) -> str: + """Return the repr string representation.""" + return self.__repr__() + class NetworkWrappedTransaction(CamelModel, RLPSerializable): """ From 7ed5f38e05d53233a238d14da78978c55f4ec04e Mon Sep 17 00:00:00 2001 From: danceratopz Date: Wed, 14 Jan 2026 13:13:15 +0100 Subject: [PATCH 005/154] fix(test-types): strip extra fields from geth receipts (#2014) Geth returns additional fields in transaction receipts that are not part of the model: - `type`: transaction type field. - `blockNumber`: block number field. Strip these fields to maintain compatibility with modern geth while keeping strict validation for unexpected fields. --- .../testing/src/execution_testing/test_types/receipt_types.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/packages/testing/src/execution_testing/test_types/receipt_types.py b/packages/testing/src/execution_testing/test_types/receipt_types.py index c2e40e1f41..4343b25d8d 100644 --- a/packages/testing/src/execution_testing/test_types/receipt_types.py +++ b/packages/testing/src/execution_testing/test_types/receipt_types.py @@ -49,6 +49,9 @@ def strip_extra_fields(cls, data: Any) -> Any: # t8n tool may return 'post_state' which is not part of this model data.pop("post_state", None) data.pop("postState", None) + # geth (1.16+) returns extra fields in receipts + data.pop("type", None) + data.pop("blockNumber", None) return data transaction_hash: Hash | None = None From 2c83b84a0f407bec88a1b619f057d645e2a09437 Mon Sep 17 00:00:00 2001 From: CPerezz <37264926+CPerezz@users.noreply.github.com> Date: Wed, 14 Jan 2026 13:39:01 +0100 Subject: [PATCH 006/154] feat(benchmark): support tx gas limit cap in stateful benchmarks (#1962) Co-authored-by: danceratopz --- .../stateful/bloatnet/test_multi_opcode.py | 306 +++++++++++++----- .../stateful/bloatnet/test_single_opcode.py | 134 +++++--- 2 files changed, 305 insertions(+), 135 deletions(-) diff --git a/tests/benchmark/stateful/bloatnet/test_multi_opcode.py b/tests/benchmark/stateful/bloatnet/test_multi_opcode.py index 75e0875988..a19be48830 100755 --- a/tests/benchmark/stateful/bloatnet/test_multi_opcode.py +++ b/tests/benchmark/stateful/bloatnet/test_multi_opcode.py @@ -62,6 +62,7 @@ def test_bloatnet_balance_extcodesize( pre: Alloc, fork: Fork, gas_benchmark_value: int, + tx_gas_limit: int, balance_first: bool, ) -> None: """ @@ -79,28 +80,55 @@ def test_bloatnet_balance_extcodesize( # Calculate gas costs intrinsic_gas = fork.transaction_intrinsic_cost_calculator()(calldata=b"") + # Setup overhead (before loop): STATICCALL + result handling + memory setup + setup_overhead = ( + gas_costs.G_COLD_ACCOUNT_ACCESS # STATICCALL to factory (2600) + + gas_costs.G_VERY_LOW # ISZERO (3) + + gas_costs.G_VERY_LOW # PUSH2 (3) + + gas_costs.G_HIGH # JUMPI (10) + + gas_costs.G_VERY_LOW * 2 # MLOAD × 2 for factory results (3 * 2) + + gas_costs.G_VERY_LOW * 3 # MSTORE × 3 for memory setup (3 * 3) + + gas_costs.G_VERY_LOW # MSTORE8 for 0xFF prefix (3) + + gas_costs.G_VERY_LOW # PUSH1 for memory position (3) + ) + + # Cleanup overhead (after loop) + cleanup_overhead = gas_costs.G_BASE # POP counter (2) + + # While loop condition overhead per iteration + loop_condition_overhead = ( + gas_costs.G_VERY_LOW # DUP1 (3) + + gas_costs.G_VERY_LOW # PUSH1 (3) + + gas_costs.G_VERY_LOW # SWAP1 (3) + + gas_costs.G_VERY_LOW # SUB (3) + + gas_costs.G_VERY_LOW # DUP1 (3) + + gas_costs.G_VERY_LOW # ISZERO (3) + + gas_costs.G_VERY_LOW # ISZERO (3) + + gas_costs.G_HIGH # JUMPI (10) + ) + # Cost per contract access with CREATE2 address generation cost_per_contract = ( gas_costs.G_KECCAK_256 # SHA3 static cost for address generation (30) - + gas_costs.G_KECCAK_256_WORD - * 3 # SHA3 dynamic cost (85 bytes = 3 words * 6) + + gas_costs.G_KECCAK_256_WORD * 3 # SHA3 dynamic (85 bytes = 3 words) + gas_costs.G_COLD_ACCOUNT_ACCESS # Cold access (2600) + gas_costs.G_BASE # POP first result (2) + gas_costs.G_WARM_ACCOUNT_ACCESS # Warm access (100) + gas_costs.G_BASE # POP second result (2) - + gas_costs.G_BASE # DUP1 before first op (3) - + gas_costs.G_VERY_LOW * 4 # PUSH1 operations (4 * 3) - + gas_costs.G_LOW # MLOAD for salt (3) + + gas_costs.G_VERY_LOW # DUP1 before first op (3) + + gas_costs.G_VERY_LOW # MLOAD for salt (3) + gas_costs.G_VERY_LOW # ADD for increment (3) - + gas_costs.G_LOW # MSTORE salt back (3) - + 10 # While loop overhead + + gas_costs.G_VERY_LOW # MSTORE salt back (3) + + loop_condition_overhead # While loop condition ) - # Calculate how many contracts to access based on available gas - available_gas = ( - gas_benchmark_value - intrinsic_gas - 1000 - ) # Reserve for cleanup - contracts_needed = int(available_gas // cost_per_contract) + # Calculate how many transactions we need to fill the block + num_txs = max(1, gas_benchmark_value // tx_gas_limit) + + # Calculate how many contracts to access per transaction + total_overhead = setup_overhead + cleanup_overhead + available_gas_per_tx = tx_gas_limit - intrinsic_gas - total_overhead + contracts_per_tx = int(available_gas_per_tx // cost_per_contract) # Deploy factory using stub contract - NO HARDCODED VALUES # The stub "bloatnet_factory" must be provided via --address-stubs flag @@ -114,8 +142,9 @@ def test_bloatnet_balance_extcodesize( # Log test requirements - deployed count read from factory storage print( - f"Test needs {contracts_needed} contracts for " - f"{gas_benchmark_value / 1_000_000:.1f}M gas. " + f"Tx gas limit: {tx_gas_limit / 1_000_000:.1f}M gas. " + f"Number of txs: {num_txs}. " + f"Contracts per tx: {contracts_per_tx}. " f"Factory storage will be checked during execution." ) @@ -187,12 +216,16 @@ def test_bloatnet_balance_extcodesize( # Deploy attack contract attack_address = pre.deploy_contract(code=attack_code) - # Run the attack - attack_tx = Transaction( - to=attack_address, - gas_limit=gas_benchmark_value, - sender=pre.fund_eoa(), - ) + # Create multiple attack transactions to fill the block + sender = pre.fund_eoa() + attack_txs = [ + Transaction( + to=attack_address, + gas_limit=tx_gas_limit, + sender=sender, + ) + for _ in range(num_txs) + ] # Post-state: just verify attack contract exists post = { @@ -201,7 +234,7 @@ def test_bloatnet_balance_extcodesize( blockchain_test( pre=pre, - blocks=[Block(txs=[attack_tx])], + blocks=[Block(txs=attack_txs)], post=post, ) @@ -217,6 +250,7 @@ def test_bloatnet_balance_extcodecopy( pre: Alloc, fork: Fork, gas_benchmark_value: int, + tx_gas_limit: int, balance_first: bool, ) -> None: """ @@ -235,27 +269,56 @@ def test_bloatnet_balance_extcodecopy( # Calculate costs intrinsic_gas = fork.transaction_intrinsic_cost_calculator()(calldata=b"") + # Setup overhead (before loop): STATICCALL + result handling + memory setup + setup_overhead = ( + gas_costs.G_COLD_ACCOUNT_ACCESS # STATICCALL to factory (2600) + + gas_costs.G_VERY_LOW # ISZERO (3) + + gas_costs.G_VERY_LOW # PUSH2 (3) + + gas_costs.G_HIGH # JUMPI (10) + + gas_costs.G_VERY_LOW * 2 # MLOAD × 2 for factory results (3 * 2) + + gas_costs.G_VERY_LOW * 3 # MSTORE × 3 for memory setup (3 * 3) + + gas_costs.G_VERY_LOW # MSTORE8 for 0xFF prefix (3) + + gas_costs.G_VERY_LOW # PUSH1 for memory position (3) + ) + + # Cleanup overhead (after loop) + cleanup_overhead = gas_costs.G_BASE # POP counter (2) + + # While loop condition overhead per iteration + loop_condition_overhead = ( + gas_costs.G_VERY_LOW # DUP1 (3) + + gas_costs.G_VERY_LOW # PUSH1 (3) + + gas_costs.G_VERY_LOW # SWAP1 (3) + + gas_costs.G_VERY_LOW # SUB (3) + + gas_costs.G_VERY_LOW # DUP1 (3) + + gas_costs.G_VERY_LOW # ISZERO (3) + + gas_costs.G_VERY_LOW # ISZERO (3) + + gas_costs.G_HIGH # JUMPI (10) + ) + # Cost per contract with EXTCODECOPY and CREATE2 address generation cost_per_contract = ( gas_costs.G_KECCAK_256 # SHA3 static cost for address generation (30) - + gas_costs.G_KECCAK_256_WORD - * 3 # SHA3 dynamic cost (85 bytes = 3 words * 6) + + gas_costs.G_KECCAK_256_WORD * 3 # SHA3 dynamic (85 bytes = 3 words) + gas_costs.G_COLD_ACCOUNT_ACCESS # Cold access (2600) + gas_costs.G_BASE # POP first result (2) + gas_costs.G_WARM_ACCOUNT_ACCESS # Warm access base (100) + gas_costs.G_COPY * 1 # Copy cost for 1 byte (3) - + gas_costs.G_BASE * 2 # DUP1 before first op, DUP4 for address (6) - + gas_costs.G_VERY_LOW * 8 # PUSH operations (8 * 3 = 24) - + gas_costs.G_LOW * 2 # MLOAD for salt twice (6) + + gas_costs.G_VERY_LOW * 2 # DUP1 + DUP4 for address (6) + + gas_costs.G_VERY_LOW * 2 # MLOAD for salt twice (6) + gas_costs.G_VERY_LOW * 2 # ADD operations (6) - + gas_costs.G_LOW # MSTORE salt back (3) + + gas_costs.G_VERY_LOW # MSTORE salt back (3) + gas_costs.G_BASE # POP after second op (2) - + 10 # While loop overhead + + loop_condition_overhead # While loop condition ) - # Calculate how many contracts to access - available_gas = gas_benchmark_value - intrinsic_gas - 1000 - contracts_needed = int(available_gas // cost_per_contract) + # Calculate how many transactions we need to fill the block + num_txs = max(1, gas_benchmark_value // tx_gas_limit) + + # Calculate how many contracts to access per transaction + total_overhead = setup_overhead + cleanup_overhead + available_gas_per_tx = tx_gas_limit - intrinsic_gas - total_overhead + contracts_per_tx = int(available_gas_per_tx // cost_per_contract) # Deploy factory using stub contract - NO HARDCODED VALUES # The stub "bloatnet_factory" must be provided via --address-stubs flag @@ -269,8 +332,9 @@ def test_bloatnet_balance_extcodecopy( # Log test requirements - deployed count read from factory storage print( - f"Test needs {contracts_needed} contracts for " - f"{gas_benchmark_value / 1_000_000:.1f}M gas. " + f"Tx gas limit: {tx_gas_limit / 1_000_000:.1f}M gas. " + f"Number of txs: {num_txs}. " + f"Contracts per tx: {contracts_per_tx}. " f"Factory storage will be checked during execution." ) @@ -349,12 +413,16 @@ def test_bloatnet_balance_extcodecopy( # Deploy attack contract attack_address = pre.deploy_contract(code=attack_code) - # Run the attack - attack_tx = Transaction( - to=attack_address, - gas_limit=gas_benchmark_value, - sender=pre.fund_eoa(), - ) + # Create multiple attack transactions to fill the block + sender = pre.fund_eoa() + attack_txs = [ + Transaction( + to=attack_address, + gas_limit=tx_gas_limit, + sender=sender, + ) + for _ in range(num_txs) + ] # Post-state post = { @@ -363,7 +431,7 @@ def test_bloatnet_balance_extcodecopy( blockchain_test( pre=pre, - blocks=[Block(txs=[attack_tx])], + blocks=[Block(txs=attack_txs)], post=post, ) @@ -379,6 +447,7 @@ def test_bloatnet_balance_extcodehash( pre: Alloc, fork: Fork, gas_benchmark_value: int, + tx_gas_limit: int, balance_first: bool, ) -> None: """ @@ -396,28 +465,55 @@ def test_bloatnet_balance_extcodehash( # Calculate gas costs intrinsic_gas = fork.transaction_intrinsic_cost_calculator()(calldata=b"") + # Setup overhead (before loop): STATICCALL + result handling + memory setup + setup_overhead = ( + gas_costs.G_COLD_ACCOUNT_ACCESS # STATICCALL to factory (2600) + + gas_costs.G_VERY_LOW # ISZERO (3) + + gas_costs.G_VERY_LOW # PUSH2 (3) + + gas_costs.G_HIGH # JUMPI (10) + + gas_costs.G_VERY_LOW * 2 # MLOAD × 2 for factory results (3 * 2) + + gas_costs.G_VERY_LOW * 3 # MSTORE × 3 for memory setup (3 * 3) + + gas_costs.G_VERY_LOW # MSTORE8 for 0xFF prefix (3) + + gas_costs.G_VERY_LOW # PUSH1 for memory position (3) + ) + + # Cleanup overhead (after loop) + cleanup_overhead = gas_costs.G_BASE # POP counter (2) + + # While loop condition overhead per iteration + loop_condition_overhead = ( + gas_costs.G_VERY_LOW # DUP1 (3) + + gas_costs.G_VERY_LOW # PUSH1 (3) + + gas_costs.G_VERY_LOW # SWAP1 (3) + + gas_costs.G_VERY_LOW # SUB (3) + + gas_costs.G_VERY_LOW # DUP1 (3) + + gas_costs.G_VERY_LOW # ISZERO (3) + + gas_costs.G_VERY_LOW # ISZERO (3) + + gas_costs.G_HIGH # JUMPI (10) + ) + # Cost per contract access with CREATE2 address generation cost_per_contract = ( gas_costs.G_KECCAK_256 # SHA3 static cost for address generation (30) - + gas_costs.G_KECCAK_256_WORD - * 3 # SHA3 dynamic cost (85 bytes = 3 words * 6) + + gas_costs.G_KECCAK_256_WORD * 3 # SHA3 dynamic (85 bytes = 3 words) + gas_costs.G_COLD_ACCOUNT_ACCESS # Cold access (2600) + gas_costs.G_BASE # POP first result (2) + gas_costs.G_WARM_ACCOUNT_ACCESS # Warm access (100) + gas_costs.G_BASE # POP second result (2) - + gas_costs.G_BASE # DUP1 before first op (3) - + gas_costs.G_VERY_LOW * 4 # PUSH1 operations (4 * 3) - + gas_costs.G_LOW # MLOAD for salt (3) + + gas_costs.G_VERY_LOW # DUP1 before first op (3) + + gas_costs.G_VERY_LOW # MLOAD for salt (3) + gas_costs.G_VERY_LOW # ADD for increment (3) - + gas_costs.G_LOW # MSTORE salt back (3) - + 10 # While loop overhead + + gas_costs.G_VERY_LOW # MSTORE salt back (3) + + loop_condition_overhead # While loop condition ) - # Calculate how many contracts to access based on available gas - available_gas = ( - gas_benchmark_value - intrinsic_gas - 1000 - ) # Reserve for cleanup - contracts_needed = int(available_gas // cost_per_contract) + # Calculate how many transactions we need to fill the block + num_txs = max(1, gas_benchmark_value // tx_gas_limit) + + # Calculate how many contracts to access per transaction + total_overhead = setup_overhead + cleanup_overhead + available_gas_per_tx = tx_gas_limit - intrinsic_gas - total_overhead + contracts_per_tx = int(available_gas_per_tx // cost_per_contract) # Deploy factory using stub contract factory_address = pre.deploy_contract( @@ -427,8 +523,9 @@ def test_bloatnet_balance_extcodehash( # Log test requirements print( - f"Test needs {contracts_needed} contracts for " - f"{gas_benchmark_value / 1_000_000:.1f}M gas. " + f"Tx gas limit: {tx_gas_limit / 1_000_000:.1f}M gas. " + f"Number of txs: {num_txs}. " + f"Contracts per tx: {contracts_per_tx}. " f"Factory storage will be checked during execution." ) @@ -489,12 +586,16 @@ def test_bloatnet_balance_extcodehash( # Deploy attack contract attack_address = pre.deploy_contract(code=attack_code) - # Run the attack - attack_tx = Transaction( - to=attack_address, - gas_limit=gas_benchmark_value, - sender=pre.fund_eoa(), - ) + # Create multiple attack transactions to fill the block + sender = pre.fund_eoa() + attack_txs = [ + Transaction( + to=attack_address, + gas_limit=tx_gas_limit, + sender=sender, + ) + for _ in range(num_txs) + ] # Post-state post = { @@ -503,7 +604,7 @@ def test_bloatnet_balance_extcodehash( blockchain_test( pre=pre, - blocks=[Block(txs=[attack_tx])], + blocks=[Block(txs=attack_txs)], post=post, ) @@ -530,6 +631,7 @@ def test_mixed_sload_sstore( pre: Alloc, fork: Fork, gas_benchmark_value: int, + tx_gas_limit: int, address_stubs: AddressStubs | None, num_contracts: int, sload_percent: int, @@ -578,6 +680,26 @@ def test_mixed_sload_sstore( # Calculate gas costs intrinsic_gas = fork.transaction_intrinsic_cost_calculator()(calldata=b"") + # Per-contract fixed overhead (setup + teardown for each contract's loops) + # Each contract has two loops: SLOAD (balanceOf) and SSTORE (approve) + overhead_per_contract = ( + # SLOAD loop setup/teardown + gas_costs.G_VERY_LOW # MSTORE to initialize counter (3) + + gas_costs.G_JUMPDEST # JUMPDEST at loop start (1) + + gas_costs.G_VERY_LOW # MLOAD for While condition (3) + + gas_costs.G_VERY_LOW # ISZERO (3) + + gas_costs.G_VERY_LOW # ISZERO (3) + + gas_costs.G_HIGH # JUMPI (10) + # SSTORE loop setup/teardown + + gas_costs.G_VERY_LOW # MSTORE selector (3) + + gas_costs.G_VERY_LOW # MSTORE to initialize counter (3) + + gas_costs.G_JUMPDEST # JUMPDEST at loop start (1) + + gas_costs.G_VERY_LOW # MLOAD for While condition (3) + + gas_costs.G_VERY_LOW # ISZERO (3) + + gas_costs.G_VERY_LOW # ISZERO (3) + + gas_costs.G_HIGH # JUMPI (10) + ) + # Fixed overhead for SLOAD loop sload_loop_overhead = ( # Attack contract loop overhead @@ -585,16 +707,16 @@ def test_mixed_sload_sstore( + gas_costs.G_VERY_LOW * 2 # MSTORE selector (3*2) + gas_costs.G_VERY_LOW * 3 # MLOAD + MSTORE address (3*3) + gas_costs.G_BASE # POP (2) - + gas_costs.G_BASE * 3 # SUB + MLOAD + MSTORE counter decrement - + gas_costs.G_BASE * 2 # ISZERO * 2 for loop condition (2*2) - + gas_costs.G_MID # JUMPI (8) + + gas_costs.G_VERY_LOW * 3 # SUB + MLOAD + MSTORE decrement (3*3) + + gas_costs.G_VERY_LOW * 2 # ISZERO * 2 for loop condition (3*2) + + gas_costs.G_HIGH # JUMPI (10) ) # ERC20 balanceOf internal gas sload_erc20_internal = ( gas_costs.G_VERY_LOW # PUSH4 selector (3) + gas_costs.G_BASE # EQ selector match (2) - + gas_costs.G_MID # JUMPI to function (8) + + gas_costs.G_HIGH # JUMPI to function (10) + gas_costs.G_JUMPDEST # JUMPDEST at function start (1) + gas_costs.G_VERY_LOW * 2 # CALLDATALOAD arg (3*2) + gas_costs.G_KECCAK_256 # keccak256 static (30) @@ -607,19 +729,19 @@ def test_mixed_sload_sstore( sstore_loop_overhead = ( # Attack contract loop body operations gas_costs.G_VERY_LOW # MSTORE selector at memory[32] (3) - + gas_costs.G_LOW # MLOAD counter (5) + + gas_costs.G_VERY_LOW # MLOAD counter (3) + gas_costs.G_VERY_LOW # MSTORE spender at memory[64] (3) + gas_costs.G_BASE # POP call result (2) # Counter decrement - + gas_costs.G_LOW # MLOAD counter (5) + + gas_costs.G_VERY_LOW # MLOAD counter (3) + gas_costs.G_VERY_LOW # PUSH1 1 (3) + gas_costs.G_VERY_LOW # SUB (3) + gas_costs.G_VERY_LOW # MSTORE counter back (3) # While loop condition check - + gas_costs.G_LOW # MLOAD counter (5) - + gas_costs.G_BASE # ISZERO (2) - + gas_costs.G_BASE # ISZERO (2) - + gas_costs.G_MID # JUMPI back to loop start (8) + + gas_costs.G_VERY_LOW # MLOAD counter (3) + + gas_costs.G_VERY_LOW # ISZERO (3) + + gas_costs.G_VERY_LOW # ISZERO (3) + + gas_costs.G_HIGH # JUMPI back to loop start (10) ) # ERC20 approve internal gas @@ -627,7 +749,7 @@ def test_mixed_sload_sstore( sstore_erc20_internal = ( gas_costs.G_VERY_LOW # PUSH4 selector (3) + gas_costs.G_BASE # EQ selector match (2) - + gas_costs.G_MID # JUMPI to function (8) + + gas_costs.G_HIGH # JUMPI to function (10) + gas_costs.G_JUMPDEST # JUMPDEST at function start (1) + gas_costs.G_VERY_LOW # CALLDATALOAD spender (3) + gas_costs.G_VERY_LOW # CALLDATALOAD amount (3) @@ -642,13 +764,19 @@ def test_mixed_sload_sstore( + gas_costs.G_VERY_LOW # PUSH1 0 for return offset (3) ) - # Calculate gas budget per contract - available_gas = gas_benchmark_value - intrinsic_gas - gas_per_contract = available_gas // num_contracts + # Calculate how many transactions we need to fill the block + num_txs = max(1, gas_benchmark_value // tx_gas_limit) + + # Calculate gas budget per contract per transaction + total_overhead_per_tx = intrinsic_gas + ( + overhead_per_contract * num_contracts + ) + available_gas_per_tx = tx_gas_limit - total_overhead_per_tx + gas_per_contract_per_tx = available_gas_per_tx // num_contracts # For each contract, split gas by percentage - sload_gas_per_contract = (gas_per_contract * sload_percent) // 100 - sstore_gas_per_contract = (gas_per_contract * sstore_percent) // 100 + sload_gas_per_contract = (gas_per_contract_per_tx * sload_percent) // 100 + sstore_gas_per_contract = (gas_per_contract_per_tx * sstore_percent) // 100 # Account for cold/warm transitions in CALL costs # First SLOAD call is COLD (2600), rest are WARM (100) @@ -686,9 +814,11 @@ def test_mixed_sload_sstore( # Log test requirements print( f"Total gas budget: {gas_benchmark_value / 1_000_000:.1f}M gas. " - f"~{gas_per_contract / 1_000_000:.1f}M gas per contract " + f"Tx gas limit: {tx_gas_limit / 1_000_000:.1f}M gas. " + f"Number of txs: {num_txs}. " + f"~{gas_per_contract_per_tx / 1_000_000:.2f}M gas per contract per tx " f"({sload_percent}% SLOAD, {sstore_percent}% SSTORE). " - f"Per contract: {sload_calls_per_contract} balanceOf calls, " + f"Per contract per tx: {sload_calls_per_contract} balanceOf calls, " f"{sstore_calls_per_contract} approve calls." ) @@ -763,12 +893,16 @@ def test_mixed_sload_sstore( # Deploy attack contract attack_address = pre.deploy_contract(code=attack_code) - # Run the attack - attack_tx = Transaction( - to=attack_address, - gas_limit=gas_benchmark_value, - sender=pre.fund_eoa(), - ) + # Create multiple attack transactions to fill the block + sender = pre.fund_eoa() + attack_txs = [ + Transaction( + to=attack_address, + gas_limit=tx_gas_limit, + sender=sender, + ) + for _ in range(num_txs) + ] # Post-state post = { @@ -777,6 +911,6 @@ def test_mixed_sload_sstore( blockchain_test( pre=pre, - blocks=[Block(txs=[attack_tx])], + blocks=[Block(txs=attack_txs)], post=post, ) diff --git a/tests/benchmark/stateful/bloatnet/test_single_opcode.py b/tests/benchmark/stateful/bloatnet/test_single_opcode.py index 34e2c46434..04dc629a80 100644 --- a/tests/benchmark/stateful/bloatnet/test_single_opcode.py +++ b/tests/benchmark/stateful/bloatnet/test_single_opcode.py @@ -84,6 +84,7 @@ def test_sload_empty_erc20_balanceof( pre: Alloc, fork: Fork, gas_benchmark_value: int, + tx_gas_limit: int, address_stubs: AddressStubs | None, num_contracts: int, request: pytest.FixtureRequest, @@ -128,6 +129,17 @@ def test_sload_empty_erc20_balanceof( # Calculate gas costs intrinsic_gas = fork.transaction_intrinsic_cost_calculator()(calldata=b"") + # Per-contract fixed overhead (setup + teardown for each contract's loop) + overhead_per_contract = ( + gas_costs.G_VERY_LOW # MSTORE to initialize counter (3) + + gas_costs.G_JUMPDEST # JUMPDEST at loop start (1) + + gas_costs.G_VERY_LOW # MLOAD for While condition check (3) + + gas_costs.G_VERY_LOW # ISZERO (3) + + gas_costs.G_VERY_LOW # ISZERO (3) + + gas_costs.G_HIGH # JUMPI (10) + + gas_costs.G_BASE # POP to clean up counter at end (2) + ) + # Fixed overhead per iteration (loop mechanics, independent of warm/cold) loop_overhead = ( # Attack contract loop overhead @@ -135,16 +147,16 @@ def test_sload_empty_erc20_balanceof( + gas_costs.G_VERY_LOW * 2 # MSTORE selector (3*2) + gas_costs.G_VERY_LOW * 3 # MLOAD + MSTORE address (3*3) + gas_costs.G_BASE # POP (2) - + gas_costs.G_BASE * 3 # SUB + MLOAD + MSTORE counter decrement - + gas_costs.G_BASE * 2 # ISZERO * 2 for loop condition (2*2) - + gas_costs.G_MID # JUMPI (8) + + gas_costs.G_VERY_LOW * 3 # SUB + MLOAD + MSTORE decrement (3*3) + + gas_costs.G_VERY_LOW * 2 # ISZERO * 2 for loop condition (3*2) + + gas_costs.G_HIGH # JUMPI (10) ) # ERC20 internal gas (same for all calls) erc20_internal_gas = ( gas_costs.G_VERY_LOW # PUSH4 selector (3) + gas_costs.G_BASE # EQ selector match (2) - + gas_costs.G_MID # JUMPI to function (8) + + gas_costs.G_HIGH # JUMPI to function (10) + gas_costs.G_JUMPDEST # JUMPDEST at function start (1) + gas_costs.G_VERY_LOW * 2 # CALLDATALOAD arg (3*2) + gas_costs.G_KECCAK_256 # keccak256 static (30) @@ -154,14 +166,7 @@ def test_sload_empty_erc20_balanceof( # RETURN costs 0 gas ) - # Calculate gas budget per contract - available_gas = gas_benchmark_value - intrinsic_gas - gas_per_contract = available_gas // num_contracts - # For each contract: first call is COLD (2600), subsequent are WARM (100) - # Solve for calls_per_contract: - # gas_per_contract = cold_call + (calls-1) * warm_call - # Simplifies to: gas = cold_warm_diff + calls * warm_call_cost warm_call_cost = ( loop_overhead + gas_costs.G_WARM_ACCOUNT_ACCESS + erc20_internal_gas ) @@ -169,8 +174,21 @@ def test_sload_empty_erc20_balanceof( gas_costs.G_COLD_ACCOUNT_ACCESS - gas_costs.G_WARM_ACCOUNT_ACCESS ) + # Calculate how many transactions we need to fill the block + num_txs = max(1, gas_benchmark_value // tx_gas_limit) + + # Calculate gas budget per contract per transaction + total_overhead_per_tx = intrinsic_gas + ( + overhead_per_contract * num_contracts + ) + available_gas_per_tx = tx_gas_limit - total_overhead_per_tx + gas_per_contract_per_tx = available_gas_per_tx // num_contracts + + # Solve for calls_per_contract per tx: + # gas_per_contract_per_tx = cold_call + (calls-1) * warm_call + # Simplifies to: gas = cold_warm_diff + calls * warm_call_cost calls_per_contract = int( - (gas_per_contract - cold_warm_diff) // warm_call_cost + (gas_per_contract_per_tx - cold_warm_diff) // warm_call_cost ) # Deploy selected ERC20 contracts using stubs @@ -188,8 +206,11 @@ def test_sload_empty_erc20_balanceof( # Log test requirements print( f"Total gas budget: {gas_benchmark_value / 1_000_000:.1f}M gas. " - f"~{gas_per_contract / 1_000_000:.1f}M gas per contract, " - f"{calls_per_contract} balanceOf calls per contract." + f"Tx gas limit: {tx_gas_limit / 1_000_000:.1f}M gas. " + f"Number of txs: {num_txs}. " + f"Overhead per contract: {overhead_per_contract}. " + f"~{gas_per_contract_per_tx / 1_000_000:.2f}M gas/contract/tx, " + f"{calls_per_contract} balanceOf calls/contract/tx." ) # Build attack code that loops through each contract @@ -230,12 +251,16 @@ def test_sload_empty_erc20_balanceof( # Deploy attack contract attack_address = pre.deploy_contract(code=attack_code) - # Run the attack - attack_tx = Transaction( - to=attack_address, - gas_limit=gas_benchmark_value, - sender=pre.fund_eoa(), - ) + # Create multiple attack transactions to fill the block + sender = pre.fund_eoa() + attack_txs = [ + Transaction( + to=attack_address, + gas_limit=tx_gas_limit, + sender=sender, + ) + for _ in range(num_txs) + ] # Post-state post = { @@ -244,7 +269,7 @@ def test_sload_empty_erc20_balanceof( blockchain_test( pre=pre, - blocks=[Block(txs=[attack_tx])], + blocks=[Block(txs=attack_txs)], post=post, ) @@ -256,6 +281,7 @@ def test_sstore_erc20_approve( pre: Alloc, fork: Fork, gas_benchmark_value: int, + tx_gas_limit: int, address_stubs: AddressStubs | None, num_contracts: int, request: pytest.FixtureRequest, @@ -305,30 +331,30 @@ def test_sstore_erc20_approve( gas_costs.G_VERY_LOW # MSTORE to initialize counter (3) + memory_expansion_cost # Memory expansion (15) + gas_costs.G_JUMPDEST # JUMPDEST at loop start (1) - + gas_costs.G_LOW # MLOAD for While condition check (5) - + gas_costs.G_BASE # ISZERO (2) - + gas_costs.G_BASE # ISZERO (2) - + gas_costs.G_MID # JUMPI (8) + + gas_costs.G_VERY_LOW # MLOAD for While condition check (3) + + gas_costs.G_VERY_LOW # ISZERO (3) + + gas_costs.G_VERY_LOW # ISZERO (3) + + gas_costs.G_HIGH # JUMPI (10) + gas_costs.G_BASE # POP to clean up counter at end (2) - ) # = 38 + ) # = 40 # Fixed overhead per iteration (loop mechanics, independent of warm/cold) loop_overhead = ( # Attack contract loop body operations gas_costs.G_VERY_LOW # MSTORE selector at memory[32] (3) - + gas_costs.G_LOW # MLOAD counter (5) + + gas_costs.G_VERY_LOW # MLOAD counter (3) + gas_costs.G_VERY_LOW # MSTORE spender at memory[64] (3) + gas_costs.G_BASE # POP call result (2) # Counter decrement: MSTORE(0, SUB(MLOAD(0), 1)) - + gas_costs.G_LOW # MLOAD counter (5) + + gas_costs.G_VERY_LOW # MLOAD counter (3) + gas_costs.G_VERY_LOW # PUSH1 1 (3) + gas_costs.G_VERY_LOW # SUB (3) + gas_costs.G_VERY_LOW # MSTORE counter back (3) # While loop condition check - + gas_costs.G_LOW # MLOAD counter (5) - + gas_costs.G_BASE # ISZERO (2) - + gas_costs.G_BASE # ISZERO (2) - + gas_costs.G_MID # JUMPI back to loop start (8) + + gas_costs.G_VERY_LOW # MLOAD counter (3) + + gas_costs.G_VERY_LOW # ISZERO (3) + + gas_costs.G_VERY_LOW # ISZERO (3) + + gas_costs.G_HIGH # JUMPI back to loop start (10) ) # ERC20 internal gas (same for all calls) @@ -337,7 +363,7 @@ def test_sstore_erc20_approve( erc20_internal_gas = ( gas_costs.G_VERY_LOW # PUSH4 selector (3) + gas_costs.G_BASE # EQ selector match (2) - + gas_costs.G_MID # JUMPI to function (8) + + gas_costs.G_HIGH # JUMPI to function (10) + gas_costs.G_JUMPDEST # JUMPDEST at function start (1) + gas_costs.G_VERY_LOW # CALLDATALOAD spender (3) + gas_costs.G_VERY_LOW # CALLDATALOAD amount (3) @@ -353,10 +379,6 @@ def test_sstore_erc20_approve( # RETURN costs 0 gas ) - # Calculate total gas needed - total_overhead = intrinsic_gas + (overhead_per_contract * num_contracts) - available_gas_for_iterations = gas_benchmark_value - total_overhead - # For each contract: first call is COLD (2600), subsequent are WARM (100) # Solve for calls per contract accounting for cold/warm transition warm_call_cost = ( @@ -366,10 +388,19 @@ def test_sstore_erc20_approve( gas_costs.G_COLD_ACCOUNT_ACCESS - gas_costs.G_WARM_ACCOUNT_ACCESS ) - # Per contract: gas_available = cold_warm_diff + calls * warm_call_cost - gas_per_contract = available_gas_for_iterations // num_contracts + # Calculate how many transactions we need to fill the block + num_txs = max(1, gas_benchmark_value // tx_gas_limit) + + # Calculate gas budget per contract per transaction + total_overhead_per_tx = intrinsic_gas + ( + overhead_per_contract * num_contracts + ) + available_gas_per_tx = tx_gas_limit - total_overhead_per_tx + gas_per_contract_per_tx = available_gas_per_tx // num_contracts + + # Per contract per tx: gas = cold_warm_diff + calls * warm_call_cost calls_per_contract = int( - (gas_per_contract - cold_warm_diff) // warm_call_cost + (gas_per_contract_per_tx - cold_warm_diff) // warm_call_cost ) # Deploy selected ERC20 contracts using stubs @@ -384,10 +415,11 @@ def test_sstore_erc20_approve( # Log test requirements print( f"Total gas budget: {gas_benchmark_value / 1_000_000:.1f}M gas. " - f"Intrinsic: {intrinsic_gas}, " + f"Tx gas limit: {tx_gas_limit / 1_000_000:.1f}M gas. " + f"Number of txs: {num_txs}. " f"Overhead per contract: {overhead_per_contract}, " f"Warm call cost: {warm_call_cost}. " - f"{calls_per_contract} approve calls per contract " + f"{calls_per_contract} approve calls per contract per tx " f"({num_contracts} contracts)." ) @@ -433,12 +465,16 @@ def test_sstore_erc20_approve( # Deploy attack contract attack_address = pre.deploy_contract(code=attack_code) - # Run the attack - attack_tx = Transaction( - to=attack_address, - gas_limit=gas_benchmark_value, - sender=pre.fund_eoa(), - ) + # Create multiple attack transactions to fill the block + sender = pre.fund_eoa() + attack_txs = [ + Transaction( + to=attack_address, + gas_limit=tx_gas_limit, + sender=sender, + ) + for _ in range(num_txs) + ] # Post-state post = { @@ -447,6 +483,6 @@ def test_sstore_erc20_approve( blockchain_test( pre=pre, - blocks=[Block(txs=[attack_tx])], + blocks=[Block(txs=attack_txs)], post=post, ) From 5eb4e7b7dc16ecfd9dd004727171e5aa2bbeaca3 Mon Sep 17 00:00:00 2001 From: Mario Vega Date: Thu, 15 Jan 2026 21:54:10 +0100 Subject: [PATCH 007/154] feat(testing/forks): Implement `bytecode.gas_cost(fork)` (#2002) * feat(testing/vm): Implement bytecode gas calc * feat(tests): Use `bytecode.gas_cost` in some tests * refactor: Renames, tox fixes --- docs/writing_tests/index.md | 4 + docs/writing_tests/opcode_metadata.md | 406 +++++++++ .../plugins/forks/tests/test_forks.py | 6 +- .../src/execution_testing/forks/base_fork.py | 27 +- .../execution_testing/forks/forks/forks.py | 810 +++++++++++++++++- .../src/execution_testing/forks/gas_costs.py | 1 + .../forks/tests/test_opcode_gas_costs.py | 602 +++++++++++++ .../tools/utility/generators.py | 11 +- .../src/execution_testing/vm/__init__.py | 8 + .../testing/src/execution_testing/vm/bases.py | 52 ++ .../src/execution_testing/vm/bytecode.py | 44 +- .../src/execution_testing/vm/opcodes.py | 514 ++++++++++- .../eip2929_gas_cost_increases/test_call.py | 7 +- .../test_mcopy_memory_expansion.py | 102 +-- .../create/test_create_deposit_oog.py | 90 +- tests/frontier/opcodes/test_all_opcodes.py | 139 +-- tests/frontier/opcodes/test_dup.py | 3 - tests/frontier/opcodes/test_exp.py | 13 +- tests/frontier/opcodes/test_log.py | 19 +- .../test_count_leading_zeros.py | 8 +- 20 files changed, 2551 insertions(+), 315 deletions(-) create mode 100644 docs/writing_tests/opcode_metadata.md create mode 100644 packages/testing/src/execution_testing/forks/tests/test_opcode_gas_costs.py create mode 100644 packages/testing/src/execution_testing/vm/bases.py diff --git a/docs/writing_tests/index.md b/docs/writing_tests/index.md index 8f446b47cd..0d3ebee67b 100644 --- a/docs/writing_tests/index.md +++ b/docs/writing_tests/index.md @@ -28,4 +28,8 @@ For help deciding which test format to select, see [Types of Tests](./types_of_t - [Gas Optimization](./gas_optimization.md) - Optimize gas limits in your tests for efficiency and compatibility with future forks. - [Porting tests](./porting_legacy_tests.md): A guide to porting @ethereum/tests to EEST. +## Advanced Topics + +- [Opcode Metadata and Gas Calculations](./opcode_metadata.md) - Calculate gas costs and refunds using opcode metadata (advanced feature for gas-focused tests) + Please check that your code adheres to the repo's coding standards and read the other pages in this section for more background and an explanation of how to implement state transition and blockchain tests. diff --git a/docs/writing_tests/opcode_metadata.md b/docs/writing_tests/opcode_metadata.md new file mode 100644 index 0000000000..e6530e4267 --- /dev/null +++ b/docs/writing_tests/opcode_metadata.md @@ -0,0 +1,406 @@ +# Advanced: Opcode Metadata and Gas Calculations + +## Overview + +The execution testing package provides capabilities to calculate gas costs and refunds for individual opcodes and bytecode sequences based on their metadata. This is useful for: + +- Writing tests that rely on exact gas consumption +- Creating gas benchmarking tests +- Validating gas cost calculations for specific opcode scenarios +- Future-proofing tests against breaking in upcoming forks that change gas rules + +## Opcode Metadata + +Many opcodes accept metadata parameters that affect their gas cost calculations. Metadata represents runtime state information that influences gas consumption. + +### Common Metadata Fields + +#### Memory Expansion + +Opcodes that can expand memory accept: + +- `new_memory_size`: Memory size after the operation (in bytes) +- `old_memory_size`: Memory size before the operation (in bytes) + +Example: + +```python +Op.MSTORE(offset=0, value=0x123, new_memory_size=32, old_memory_size=0) +``` + +#### Account Access (Warm/Cold) + +Opcodes that access accounts accept: + +- `address_warm`: Whether the address is already warm (`True`) or cold (`False`) + +Example: + +```python +Op.BALANCE(address=0x1234, address_warm=True) # Warm access: 100 gas +Op.BALANCE(address=0x1234, address_warm=False) # Cold access: 2,600 gas +``` + +#### Storage Access + +- `key_warm`: Whether the storage key is already warm +- `original_value`: The value the storage key had at the beginning of the transaction +- `current_value`: The value the storage key holds at the time the opcode is executed +- `new_value`: The value set by the opcode + +Example: + +```python +Op.SSTORE(key=1, value=0, key_warm=True, original_value=1, new_value=0) +``` + +#### Data Copy Operations + +- `data_size`: Number of bytes being copied + +Example: + +```python +Op.CALLDATACOPY(dest_offset=0, offset=0, size=64, data_size=64, new_memory_size=64) +``` + +#### Contract Creation + +- `init_code_size`: Size of the initialization code (affects CREATE/CREATE2 gas) + +Example: + +```python +Op.CREATE(value=0, offset=0, size=100, init_code_size=100, new_memory_size=100) +``` + +#### Call Operations + +- `address_warm`: Whether the call target is warm +- `value_transfer`: Whether value is being transferred +- `account_new`: Whether creating a new account + +Example: + +```python +Op.CALL( + gas=100000, + address=0x5678, + value=1, + address_warm=False, + value_transfer=True, + account_new=True, + new_memory_size=64 +) +``` + +#### Return from Contract Creation + +- `code_deposit_size`: Size of bytecode being deployed (only for RETURN in initcode) + +Example: + +```python +Op.RETURN(offset=0, size=100, code_deposit_size=100, new_memory_size=100) +``` + +#### Exponential Operation + +- `exponent`: The exponent value (byte size calculated automatically) + +Example: + +```python +Op.EXP(a=2, exponent=0xFFFFFF) # Gas based on exponent byte size +``` + +## Calculating Gas Costs + +### For Individual Opcodes + +Use the fork's `opcode_gas_calculator()` to get gas costs: + +```python +from execution_testing import Op +from execution_testing.forks import Osaka + +# Get the gas calculator for the fork +gas_calc = Osaka.opcode_gas_calculator() + +# Calculate gas for a simple opcode +add_gas = gas_calc(Op.ADD) # Returns 3 (G_VERY_LOW) + +# Calculate gas for an opcode with metadata +mstore_gas = gas_calc(Op.MSTORE(new_memory_size=32)) +# Returns: 3 (base) + memory_expansion_cost(32 bytes) + +# Calculate gas for complex metadata +call_gas = gas_calc( + Op.CALL( + address_warm=False, + value_transfer=True, + account_new=True, + new_memory_size=64 + ) +) +# Returns: 2,600 (cold) + 9,000 (value) + 25,000 (new account) + memory_expansion_cost +``` + +### For Bytecode Sequences + +Use the `bytecode.gas_cost(fork)` method: + +```python +from execution_testing import Op +from execution_testing.forks import Osaka + +# Simple bytecode +bytecode = Op.PUSH1(1) + Op.PUSH1(2) + Op.ADD +total_gas = bytecode.gas_cost(Osaka) +# Returns: 3 + 3 + 3 = 9 + +# With metadata +bytecode = Op.MSTORE(0, 1, new_memory_size=32) + Op.MLOAD(0) # Last opcode does not expand the memory further +total_gas = bytecode.gas_cost(Osaka) +# Calculates total including memory expansion +``` + +### Fork-Specific Gas Costs + +Gas costs can vary between forks. Always specify the fork when calculating: + +```python +from execution_testing.forks import Shanghai, Osaka, Paris + +# CREATE gas costs differ between forks (EIP-3860 in Shanghai) +create_op = Op.CREATE(init_code_size=100, new_memory_size=100) + +shanghai_gas = create_op.gas_cost(Shanghai) +# Returns: 32,000 + (2 * 4 words) + memory_expansion = 32,008 + expansion + +osaka_gas = create_op.gas_cost(Osaka) +# Same calculation, inherited from Shanghai + +assert shanghai_gas == osaka_gas + +paris_gas = create_op.gas_cost(Paris) +# Different calculation, prior to Shanghai the initcode was not metered + +assert paris_gas != shanghai_gas +``` + +## Calculating Refunds + +Some opcodes provide gas refunds. Currently, only `SSTORE` provides refunds when clearing storage. + +### For Individual Opcodes + +```python +from execution_testing import Op +from execution_testing.forks import Osaka + +# Get the refund calculator +refund_calc = Osaka.opcode_refund_calculator() + +# SSTORE clearing storage (non-zero → zero) +sstore_refund = refund_calc( + Op.SSTORE(new_value=0, original_value=1) +) +# Returns: 4,800 (R_STORAGE_CLEAR) + +# SSTORE not clearing storage +no_refund = refund_calc( + Op.SSTORE(new_value=2, original_value=1) +) +# Returns: 0 + +# Other opcodes don't provide refunds +add_refund = refund_calc(Op.ADD) +# Returns: 0 +``` + +### For Bytecode Sequences + +Use the `bytecode.refund(fork)` method: + +```python +from execution_testing import Op +from execution_testing.forks import Osaka + +# Multiple SSTORE operations clearing storage +bytecode = ( + Op.SSTORE(0, 0, original_value=1, new_value=0) + + Op.SSTORE(1, 0, original_value=1, new_value=0) +) +total_refund = bytecode.refund(Osaka) +# Returns: 4,800 + 4,800 = 9,600 +``` + +## Writing Tests with Gas Calculations + +### Example: Out-of-Gas Test Using Exact Gas Calculation + +This example demonstrates a practical use case: testing that a subcall with insufficient gas fails correctly. + +```python +import pytest +from execution_testing import ( + Account, + Alloc, + Environment, + Fork, + StateTestFiller, + Transaction, + Op, +) + +@pytest.mark.valid_from("Byzantium") +def test_subcall_out_of_gas( + state_test: StateTestFiller, + fork: Fork, + pre: Alloc, + env: Environment, +): + """ + Test that a subcall with exactly (gas_needed - 1) fails with out-of-gas, + and verify via SSTORE that the operation didn't execute. + """ + + # Define the code that will run in the subcall + # A simple SSTORE operation with known gas cost + subcall_code = Op.SSTORE( + slot=0, + value=1, + key_warm=False, # Cold storage access + new_value=1, + ) + Op.STOP + + # Calculate exact gas needed for this operation in this fork + subcall_gas_needed = subcall_code.gas_cost(fork) + + # Deploy contract that will be called + callee = pre.deploy_contract(subcall_code) + + # Deploy caller contract that calls with insufficient gas + caller = pre.deploy_contract( + # Call with exactly 1 gas less than needed + Op.SSTORE( + slot=0, + value=Op.CALL( + gas=subcall_gas_needed - 1, # Insufficient gas! + address=callee, + value=0, + args_offset=0, + args_size=0, + ret_offset=0, + ret_size=0, + ), + ) + ) + + tx = Transaction( + to=caller, + gas_limit=500_000, + sender=pre.fund_eoa(), + ) + + post = { + caller: Account( + storage={ + 0: 0, # CALL returns 0 on failure + }, + ), + callee: Account( + storage={ + 0: 0, # SSTORE didn't execute due to OOG + }, + ), + } + + state_test(env=env, pre=pre, post=post, tx=tx) +``` + +This example shows: + +- **Practical Use**: Testing out-of-gas conditions requires knowing exact gas costs +- **Metadata Usage**: Using SSTORE metadata to calculate precise gas requirements +- **Verification**: Post-state checks confirm the subcall failed (storage unchanged) +- **Future-Proof**: Uses `gas_cost(fork)` so it adapts if gas calculations change + +## Important Considerations + +### 1. Most Tests Don't Need This + +Most tests simply need to specify sufficient gas for the transaction to work and do not need to be exact. You typically only need explicit gas calculations when: + +- Writing gas-focused benchmarks +- Verifying exact gas consumption for specific scenarios +- Testing edge cases in gas metering (off-by-one checks) + +### 2. Metadata Must Match Runtime State + +The metadata is not checked against the executed bytecode! When using metadata in tests, ensure the pre-state and transactions are accurately set up to reflect the bytecode metadata: + +```python +# ❌ Incorrect: This is impossible because the first `Op.BALANCE` will always warm up the account: +Op.BALANCE(address=some_address, address_warm=False) + Op.BALANCE(address=some_address, address_warm=False) + +# ✅ Correct: If the address was accessed earlier, it's warm: +Op.BALANCE(address=some_address, address_warm=False) + Op.BALANCE(address=some_address, address_warm=True) +``` + +Example using the test pre-conditions: + +```python +# ✅ Correct: The address is in the access list, it's warm from the beginning: +code_address = pre.deploy_contract(Op.BALANCE(address=some_address, address_warm=True) + Op.BALANCE(address=some_address, address_warm=True)) +... +tx = Transaction( + to=code_address, + gas_limit=500_000, + sender=pre.fund_eoa(), + access_list=[AccessList(address=code_address, storage_keys=[])] +) +``` + +### 3. Memory Size Calculations + +Memory expansion is calculated from the highest offset accessed: + +```python +# MSTORE to offset 0 requires 32 bytes of memory +Op.MSTORE(offset=0, value=0x123, new_memory_size=32) + +# MSTORE to offset 32 requires 64 bytes total +Op.MSTORE(offset=32, value=0x456, new_memory_size=64, old_memory_size=32) +``` + +### 4. Fork Activation Matters + +Some opcodes are only available in certain forks: + +```python +# ✅ Available in Shanghai and later +Op.PUSH0.gas_cost(Shanghai) + +# ❌ Not available in Paris +# Op.PUSH0.gas_cost(Paris) # Would raise an error + +# ✅ Available in Osaka and later +Op.CLZ.gas_cost(Osaka) +``` + +### 5. Refunds Are Limited + +Only certain operations provide refunds: + +- **SSTORE**: Refund when clearing storage (non-zero → zero) +- Most opcodes return 0 refund + +Transaction-level operations like authorization lists also provide refunds, but these are handled at the transaction level, not in opcode metadata. + +## See Also + +- [Gas Optimization](./gas_optimization.md) - Optimizing test gas limits +- [Fork Methods](./fork_methods.md) - Using fork-specific methods +- [Writing Tests](./writing_a_new_test.md) - General test writing guide diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_forks.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_forks.py index ed04021ded..7aa4d0cd68 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_forks.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_forks.py @@ -38,7 +38,11 @@ def test_all_forks({StateTest.pytest_parameter_name()}): ) result = pytester.runpytest("-c", "pytest-fill.ini", "-v") all_forks = get_deployed_forks() - forks_under_test = forks_from_until(all_forks[0], all_forks[-1]) + forks_under_test = [ + f + for f in forks_from_until(all_forks[0], all_forks[-1]) + if not f.ignore() + ] expected_skipped = 2 # eels doesn't support Constantinople expected_passed = ( len(forks_under_test) * len(StateTest.supported_fixture_formats) diff --git a/packages/testing/src/execution_testing/forks/base_fork.py b/packages/testing/src/execution_testing/forks/base_fork.py index 4e20a0e2bb..a061497b25 100644 --- a/packages/testing/src/execution_testing/forks/base_fork.py +++ b/packages/testing/src/execution_testing/forks/base_fork.py @@ -1,8 +1,9 @@ """Abstract base class for Ethereum forks.""" -from abc import ABC, ABCMeta, abstractmethod +from abc import ABCMeta, abstractmethod from typing import ( Any, + Callable, ClassVar, Dict, List, @@ -23,7 +24,12 @@ BlobSchedule, ) from execution_testing.base_types.conversions import BytesConvertible -from execution_testing.vm import EVMCodeType, Opcodes +from execution_testing.vm import ( + EVMCodeType, + ForkOpcodeInterface, + OpcodeBase, + Opcodes, +) from .base_decorators import prefer_transition_to_method from .gas_costs import GasCosts @@ -237,7 +243,7 @@ def __le__(cls, other: "BaseForkMeta") -> bool: return cls is other or BaseForkMeta._is_subclass_of(other, cls) -class BaseFork(ABC, metaclass=BaseForkMeta): +class BaseFork(ForkOpcodeInterface, metaclass=BaseForkMeta): """ An abstract class representing an Ethereum fork. @@ -355,6 +361,21 @@ def gas_costs( """Return dataclass with the gas costs constants for the fork.""" pass + @classmethod + @abstractmethod + def opcode_gas_map( + cls, *, block_number: int = 0, timestamp: int = 0 + ) -> Dict[OpcodeBase, int | Callable[[OpcodeBase], int]]: + """ + Return a mapping of opcodes to either int or callable. + + The values of the mapping can be as follow: + - Constants (int): Direct gas cost values from gas_costs() + - Callables: Functions that take the opcode instance with metadata and + return gas cost + """ + pass + @classmethod @abstractmethod def memory_expansion_gas_calculator( diff --git a/packages/testing/src/execution_testing/forks/forks/forks.py b/packages/testing/src/execution_testing/forks/forks/forks.py index 7e650a821d..62c108c65f 100644 --- a/packages/testing/src/execution_testing/forks/forks/forks.py +++ b/packages/testing/src/execution_testing/forks/forks/forks.py @@ -4,7 +4,16 @@ from hashlib import sha256 from os.path import realpath from pathlib import Path -from typing import List, Literal, Mapping, Optional, Sized, Tuple +from typing import ( + Callable, + Dict, + List, + Literal, + Mapping, + Optional, + Sized, + Tuple, +) from execution_testing.base_types import ( AccessList, @@ -14,7 +23,12 @@ ForkBlobSchedule, ) from execution_testing.base_types.conversions import BytesConvertible -from execution_testing.vm import EVMCodeType, Opcodes +from execution_testing.vm import ( + EVMCodeType, + OpcodeBase, + OpcodeGasCalculator, + Opcodes, +) from ..base_fork import ( BaseFeeChangeCalculator, @@ -128,6 +142,7 @@ def gas_costs( G_WARM_SLOAD=100, G_COLD_SLOAD=2_100, G_STORAGE_SET=20_000, + G_STORAGE_UPDATE=5_000, G_STORAGE_RESET=2_900, R_STORAGE_CLEAR=4_800, G_SELF_DESTRUCT=5_000, @@ -157,6 +172,494 @@ def gas_costs( R_AUTHORIZATION_EXISTING_AUTHORITY=0, ) + @classmethod + def _with_memory_expansion( + cls, + base_gas: int | Callable[[OpcodeBase], int], + memory_expansion_gas_calculator: MemoryExpansionGasCalculator, + ) -> Callable[[OpcodeBase], int]: + """ + Wrap a gas cost calculator to include memory expansion cost. + + Args: + base_gas: Either a constant gas cost (int) or a callable that + calculates it + memory_expansion_gas_calculator: Calculator for memory expansion + cost + + Returns: + A callable that calculates base_gas + memory_expansion_cost + + """ + + def wrapper(opcode: OpcodeBase) -> int: + # Calculate base gas cost + if callable(base_gas): + base_cost = base_gas(opcode) + else: + base_cost = base_gas + + # Add memory expansion cost if metadata is present + new_memory_size = opcode.metadata["new_memory_size"] + old_memory_size = opcode.metadata["old_memory_size"] + expansion_cost = memory_expansion_gas_calculator( + new_bytes=new_memory_size, previous_bytes=old_memory_size + ) + + return base_cost + expansion_cost + + return wrapper + + @classmethod + def _with_account_access( + cls, + base_gas: int | Callable[[OpcodeBase], int], + gas_costs: "GasCosts", + ) -> Callable[[OpcodeBase], int]: + """ + Wrap a gas cost calculator to include account access cost. + + Args: + base_gas: Either a constant gas cost (int) or a callable that + calculates it + gas_costs: The gas costs dataclass for accessing warm/cold costs + + Returns: + A callable that calculates base_gas + account_access_cost + + """ + + def wrapper(opcode: OpcodeBase) -> int: + # Calculate base gas cost + if callable(base_gas): + base_cost = base_gas(opcode) + else: + base_cost = base_gas + + # Add account access cost based on warmth + if opcode.metadata["address_warm"]: + access_cost = gas_costs.G_WARM_ACCOUNT_ACCESS + else: + access_cost = gas_costs.G_COLD_ACCOUNT_ACCESS + + return base_cost + access_cost + + return wrapper + + @classmethod + def _with_data_copy( + cls, + base_gas: int | Callable[[OpcodeBase], int], + gas_costs: "GasCosts", + ) -> Callable[[OpcodeBase], int]: + """ + Wrap a gas cost calculator to include data copy cost. + + Args: + base_gas: Either a constant gas cost (int) or a callable that + calculates it + gas_costs: The gas costs dataclass for accessing G_COPY + + Returns: + A callable that calculates base_gas + copy_cost + + """ + + def wrapper(opcode: OpcodeBase) -> int: + # Calculate base gas cost + if callable(base_gas): + base_cost = base_gas(opcode) + else: + base_cost = base_gas + + # Add copy cost based on data size + data_size = opcode.metadata["data_size"] + word_count = (data_size + 31) // 32 + copy_cost = gas_costs.G_COPY * word_count + + return base_cost + copy_cost + + return wrapper + + @classmethod + def opcode_gas_map( + cls, *, block_number: int = 0, timestamp: int = 0 + ) -> Dict[OpcodeBase, int | Callable[[OpcodeBase], int]]: + """ + Return a mapping of opcodes to their gas costs. + + Each entry is either: + - Constants (int): Direct gas cost values from gas_costs() + - Callables: Functions that take the opcode instance with metadata and + return gas cost + """ + gas_costs = cls.gas_costs( + block_number=block_number, timestamp=timestamp + ) + memory_expansion_calculator = cls.memory_expansion_gas_calculator( + block_number=block_number, timestamp=timestamp + ) + + # Define the opcode gas cost mapping + # Each entry is either: + # - an int (constant cost) + # - a callable(opcode) -> int + return { + # Stop and arithmetic operations + Opcodes.STOP: 0, + Opcodes.ADD: gas_costs.G_VERY_LOW, + Opcodes.MUL: gas_costs.G_LOW, + Opcodes.SUB: gas_costs.G_VERY_LOW, + Opcodes.DIV: gas_costs.G_LOW, + Opcodes.SDIV: gas_costs.G_LOW, + Opcodes.MOD: gas_costs.G_LOW, + Opcodes.SMOD: gas_costs.G_LOW, + Opcodes.ADDMOD: gas_costs.G_MID, + Opcodes.MULMOD: gas_costs.G_MID, + Opcodes.EXP: lambda op: gas_costs.G_EXP + + gas_costs.G_EXP_BYTE + * ((op.metadata["exponent"].bit_length() + 7) // 8), + Opcodes.SIGNEXTEND: gas_costs.G_LOW, + # Comparison & bitwise logic operations + Opcodes.LT: gas_costs.G_VERY_LOW, + Opcodes.GT: gas_costs.G_VERY_LOW, + Opcodes.SLT: gas_costs.G_VERY_LOW, + Opcodes.SGT: gas_costs.G_VERY_LOW, + Opcodes.EQ: gas_costs.G_VERY_LOW, + Opcodes.ISZERO: gas_costs.G_VERY_LOW, + Opcodes.AND: gas_costs.G_VERY_LOW, + Opcodes.OR: gas_costs.G_VERY_LOW, + Opcodes.XOR: gas_costs.G_VERY_LOW, + Opcodes.NOT: gas_costs.G_VERY_LOW, + Opcodes.BYTE: gas_costs.G_VERY_LOW, + # SHA3 + Opcodes.SHA3: cls._with_memory_expansion( + lambda op: gas_costs.G_KECCAK_256 + + gas_costs.G_KECCAK_256_WORD + * ((op.metadata["data_size"] + 31) // 32), + memory_expansion_calculator, + ), + # Environmental information + Opcodes.ADDRESS: gas_costs.G_BASE, + Opcodes.BALANCE: cls._with_account_access(0, gas_costs), + Opcodes.ORIGIN: gas_costs.G_BASE, + Opcodes.CALLER: gas_costs.G_BASE, + Opcodes.CALLVALUE: gas_costs.G_BASE, + Opcodes.CALLDATALOAD: gas_costs.G_VERY_LOW, + Opcodes.CALLDATASIZE: gas_costs.G_BASE, + Opcodes.CALLDATACOPY: cls._with_memory_expansion( + cls._with_data_copy(gas_costs.G_VERY_LOW, gas_costs), + memory_expansion_calculator, + ), + Opcodes.CODESIZE: gas_costs.G_BASE, + Opcodes.CODECOPY: cls._with_memory_expansion( + cls._with_data_copy(gas_costs.G_VERY_LOW, gas_costs), + memory_expansion_calculator, + ), + Opcodes.GASPRICE: gas_costs.G_BASE, + Opcodes.EXTCODESIZE: cls._with_account_access(0, gas_costs), + Opcodes.EXTCODECOPY: cls._with_memory_expansion( + cls._with_data_copy( + cls._with_account_access(0, gas_costs), + gas_costs, + ), + memory_expansion_calculator, + ), + # Block information + Opcodes.BLOCKHASH: gas_costs.G_BLOCKHASH, + Opcodes.COINBASE: gas_costs.G_BASE, + Opcodes.TIMESTAMP: gas_costs.G_BASE, + Opcodes.NUMBER: gas_costs.G_BASE, + Opcodes.PREVRANDAO: gas_costs.G_BASE, + Opcodes.GASLIMIT: gas_costs.G_BASE, + # Stack, memory, storage and flow operations + Opcodes.POP: gas_costs.G_BASE, + Opcodes.MLOAD: cls._with_memory_expansion( + gas_costs.G_VERY_LOW, memory_expansion_calculator + ), + Opcodes.MSTORE: cls._with_memory_expansion( + gas_costs.G_VERY_LOW, memory_expansion_calculator + ), + Opcodes.MSTORE8: cls._with_memory_expansion( + gas_costs.G_VERY_LOW, memory_expansion_calculator + ), + Opcodes.SLOAD: lambda op: gas_costs.G_WARM_SLOAD + if op.metadata["key_warm"] + else gas_costs.G_COLD_SLOAD, + Opcodes.SSTORE: lambda op: cls._calculate_sstore_gas( + op, gas_costs + ), + Opcodes.JUMP: gas_costs.G_MID, + Opcodes.JUMPI: gas_costs.G_HIGH, + Opcodes.PC: gas_costs.G_BASE, + Opcodes.MSIZE: gas_costs.G_BASE, + Opcodes.GAS: gas_costs.G_BASE, + Opcodes.JUMPDEST: gas_costs.G_JUMPDEST, + # Push operations (PUSH1 through PUSH32) + **{ + getattr(Opcodes, f"PUSH{i}"): gas_costs.G_VERY_LOW + for i in range(1, 33) + }, + # Dup operations (DUP1 through DUP16) + **{ + getattr(Opcodes, f"DUP{i}"): gas_costs.G_VERY_LOW + for i in range(1, 17) + }, + # Swap operations (SWAP1 through SWAP16) + **{ + getattr(Opcodes, f"SWAP{i}"): gas_costs.G_VERY_LOW + for i in range(1, 17) + }, + # Logging operations + Opcodes.LOG0: cls._with_memory_expansion( + lambda op: gas_costs.G_LOG + + gas_costs.G_LOG_DATA * op.metadata["data_size"], + memory_expansion_calculator, + ), + Opcodes.LOG1: cls._with_memory_expansion( + lambda op: gas_costs.G_LOG + + gas_costs.G_LOG_DATA * op.metadata["data_size"] + + gas_costs.G_LOG_TOPIC, + memory_expansion_calculator, + ), + Opcodes.LOG2: cls._with_memory_expansion( + lambda op: gas_costs.G_LOG + + gas_costs.G_LOG_DATA * op.metadata["data_size"] + + gas_costs.G_LOG_TOPIC * 2, + memory_expansion_calculator, + ), + Opcodes.LOG3: cls._with_memory_expansion( + lambda op: gas_costs.G_LOG + + gas_costs.G_LOG_DATA * op.metadata["data_size"] + + gas_costs.G_LOG_TOPIC * 3, + memory_expansion_calculator, + ), + Opcodes.LOG4: cls._with_memory_expansion( + lambda op: gas_costs.G_LOG + + gas_costs.G_LOG_DATA * op.metadata["data_size"] + + gas_costs.G_LOG_TOPIC * 4, + memory_expansion_calculator, + ), + # System operations + Opcodes.CREATE: cls._with_memory_expansion( + lambda op: cls._calculate_create_gas(op, gas_costs), + memory_expansion_calculator, + ), + Opcodes.CALL: cls._with_memory_expansion( + lambda op: cls._calculate_call_gas(op, gas_costs), + memory_expansion_calculator, + ), + Opcodes.CALLCODE: cls._with_memory_expansion( + lambda op: cls._calculate_call_gas(op, gas_costs), + memory_expansion_calculator, + ), + Opcodes.RETURN: cls._with_memory_expansion( + lambda op: cls._calculate_return_gas(op, gas_costs), + memory_expansion_calculator, + ), + Opcodes.INVALID: 0, + Opcodes.SELFDESTRUCT: lambda op: cls._calculate_selfdestruct_gas( + op, gas_costs + ), + } + + @classmethod + def opcode_gas_calculator( + cls, *, block_number: int = 0, timestamp: int = 0 + ) -> OpcodeGasCalculator: + """ + Return callable that calculates the gas cost of a single opcode. + """ + opcode_gas_map = cls.opcode_gas_map( + block_number=block_number, timestamp=timestamp + ) + + def fn(opcode: OpcodeBase) -> int: + # Get the gas cost or calculator + if opcode not in opcode_gas_map: + raise ValueError( + f"No gas cost defined for opcode: {opcode._name_}" + ) + gas_cost_or_calculator = opcode_gas_map[opcode] + + # If it's a callable, call it with the opcode + if callable(gas_cost_or_calculator): + return gas_cost_or_calculator(opcode) + + # Otherwise it's a constant + return gas_cost_or_calculator + + return fn + + @classmethod + def opcode_refund_map( + cls, *, block_number: int = 0, timestamp: int = 0 + ) -> Dict[OpcodeBase, int | Callable[[OpcodeBase], int]]: + """ + Return a mapping of opcodes to their gas refunds. + + Each entry is either: + - Constants (int): Direct gas refund values + - Callables: Functions that take the opcode instance with metadata and + return gas refund + """ + gas_costs = cls.gas_costs( + block_number=block_number, timestamp=timestamp + ) + + # Only SSTORE provides refunds + return { + Opcodes.SSTORE: lambda op: cls._calculate_sstore_refund( + op, gas_costs + ), + } + + @classmethod + def opcode_refund_calculator( + cls, *, block_number: int = 0, timestamp: int = 0 + ) -> OpcodeGasCalculator: + """ + Return callable that calculates the gas refund of a single opcode. + """ + opcode_refund_map = cls.opcode_refund_map( + block_number=block_number, timestamp=timestamp + ) + + def fn(opcode: OpcodeBase) -> int: + # Get the gas refund or calculator + if opcode not in opcode_refund_map: + # Most opcodes don't provide refunds + return 0 + refund_or_calculator = opcode_refund_map[opcode] + + # If it's a callable, call it with the opcode + if callable(refund_or_calculator): + return refund_or_calculator(opcode) + + # Otherwise it's a constant + return refund_or_calculator + + return fn + + @classmethod + def _calculate_sstore_refund( + cls, opcode: OpcodeBase, gas_costs: GasCosts + ) -> int: + """Calculate SSTORE gas refund based on metadata.""" + metadata = opcode.metadata + + original_value = metadata["original_value"] + current_value = metadata["current_value"] + if current_value is None: + current_value = original_value + new_value = metadata["new_value"] + + # Refund is provided when setting from non-zero to zero + refund = 0 + if current_value != new_value: + if original_value != 0 and current_value != 0 and new_value == 0: + # Storage is cleared for the first time in the transaction + refund += gas_costs.R_STORAGE_CLEAR + + if original_value != 0 and current_value == 0: + # Gas refund issued earlier to be reversed + refund -= gas_costs.R_STORAGE_CLEAR + + if original_value == new_value: + # Storage slot being restored to its original value + if original_value == 0: + # Slot was originally empty and was SET earlier + refund += gas_costs.G_STORAGE_SET - gas_costs.G_WARM_SLOAD + else: + # Slot was originally non-empty and was UPDATED earlier + refund += ( + gas_costs.G_STORAGE_UPDATE + - gas_costs.G_COLD_SLOAD + - gas_costs.G_WARM_SLOAD + ) + + return refund + + @classmethod + def _calculate_sstore_gas( + cls, opcode: OpcodeBase, gas_costs: GasCosts + ) -> int: + """Calculate SSTORE gas cost based on metadata.""" + metadata = opcode.metadata + + original_value = metadata["original_value"] + current_value = metadata["current_value"] + if current_value is None: + current_value = original_value + new_value = metadata["new_value"] + + gas_cost = 0 if metadata["key_warm"] else gas_costs.G_COLD_SLOAD + + if original_value == current_value and current_value != new_value: + if original_value == 0: + gas_cost += gas_costs.G_STORAGE_SET + else: + gas_cost += gas_costs.G_STORAGE_UPDATE - gas_costs.G_COLD_SLOAD + else: + gas_cost += gas_costs.G_WARM_SLOAD + + return gas_cost + + @classmethod + def _calculate_call_gas( + cls, opcode: OpcodeBase, gas_costs: GasCosts + ) -> int: + """ + Calculate CALL/DELEGATECALL/STATICCALL gas cost based on metadata. + """ + metadata = opcode.metadata + + # Base cost depends on address warmth + if metadata["address_warm"]: + base_cost = gas_costs.G_WARM_ACCOUNT_ACCESS + else: + base_cost = gas_costs.G_COLD_ACCOUNT_ACCESS + + return base_cost + + @classmethod + def _calculate_create_gas( + cls, opcode: OpcodeBase, gas_costs: GasCosts + ) -> int: + """CREATE gas is constant at Frontier.""" + del opcode + return gas_costs.G_CREATE + + @classmethod + def _calculate_return_gas( + cls, opcode: OpcodeBase, gas_costs: GasCosts + ) -> int: + """Calculate RETURN gas cost based on metadata.""" + metadata = opcode.metadata + + # Code deposit cost when returning from initcode + code_deposit_size = metadata["code_deposit_size"] + return gas_costs.G_CODE_DEPOSIT_BYTE * code_deposit_size + + @classmethod + def _calculate_selfdestruct_gas( + cls, opcode: OpcodeBase, gas_costs: GasCosts + ) -> int: + """Calculate SELFDESTRUCT gas cost based on metadata.""" + metadata = opcode.metadata + + base_cost = gas_costs.G_SELF_DESTRUCT + + # Check if the beneficiary is cold + if not metadata["address_warm"]: + base_cost += gas_costs.G_COLD_ACCOUNT_ACCESS + + # Check if creating a new account + if metadata["account_new"]: + base_cost += gas_costs.G_NEW_ACCOUNT + + return base_cost + @classmethod def memory_expansion_gas_calculator( cls, *, block_number: int = 0, timestamp: int = 0 @@ -414,7 +917,8 @@ def full_blob_tx_wrapper_version( ) -> int | None: """Return the version of the full blob transaction wrapper.""" raise NotImplementedError( - f"Full blob transaction wrapper version is not supported in {cls.name()}" + "Full blob transaction wrapper version is not supported in " + f"{cls.name()}" ) @classmethod @@ -878,6 +1382,28 @@ def call_opcodes( Homestead, cls ).call_opcodes(block_number=block_number, timestamp=timestamp) + @classmethod + def opcode_gas_map( + cls, *, block_number: int = 0, timestamp: int = 0 + ) -> Dict[OpcodeBase, int | Callable[[OpcodeBase], int]]: + """Add DELEGATECALL opcode gas cost for Homestead.""" + gas_costs = cls.gas_costs( + block_number=block_number, timestamp=timestamp + ) + memory_expansion_calculator = cls.memory_expansion_gas_calculator( + block_number=block_number, timestamp=timestamp + ) + base_map = super(Homestead, cls).opcode_gas_map( + block_number=block_number, timestamp=timestamp + ) + return { + **base_map, + Opcodes.DELEGATECALL: cls._with_memory_expansion( + lambda op: cls._calculate_call_gas(op, gas_costs), + memory_expansion_calculator, + ), + } + @classmethod def valid_opcodes( cls, *, block_number: int = 0, timestamp: int = 0 @@ -939,10 +1465,32 @@ class Tangerine(DAOFork, ignore=True): class SpuriousDragon(Tangerine, ignore=True): """SpuriousDragon fork (EIP-155, EIP-158).""" - pass + @classmethod + def _calculate_call_gas( + cls, opcode: OpcodeBase, gas_costs: GasCosts + ) -> int: + """ + At Spurious Dragon, the call gas cost needs to take the value transfer + and account new into account. + """ + base_cost = super(SpuriousDragon, cls)._calculate_call_gas( + opcode, gas_costs + ) + # Additional costs for value transfer, does not apply to STATICCALL + metadata = opcode.metadata + if "value_transfer" in metadata: + if metadata["value_transfer"]: + base_cost += gas_costs.G_CALL_VALUE + if metadata["account_new"]: + base_cost += gas_costs.G_NEW_ACCOUNT + elif metadata["account_new"]: + raise ValueError("Account new requires value transfer") -class Byzantium(Homestead): + return base_cost + + +class Byzantium(SpuriousDragon): """Byzantium fork.""" @classmethod @@ -994,6 +1542,36 @@ def call_opcodes( Byzantium, cls ).call_opcodes(block_number=block_number, timestamp=timestamp) + @classmethod + def opcode_gas_map( + cls, *, block_number: int = 0, timestamp: int = 0 + ) -> Dict[OpcodeBase, int | Callable[[OpcodeBase], int]]: + """Add Byzantium opcodes gas costs.""" + gas_costs = cls.gas_costs( + block_number=block_number, timestamp=timestamp + ) + memory_expansion_calculator = cls.memory_expansion_gas_calculator( + block_number=block_number, timestamp=timestamp + ) + base_map = super(Byzantium, cls).opcode_gas_map( + block_number=block_number, timestamp=timestamp + ) + return { + **base_map, + Opcodes.RETURNDATASIZE: gas_costs.G_BASE, + Opcodes.RETURNDATACOPY: cls._with_memory_expansion( + cls._with_data_copy(gas_costs.G_VERY_LOW, gas_costs), + memory_expansion_calculator, + ), + Opcodes.STATICCALL: cls._with_memory_expansion( + lambda op: cls._calculate_call_gas(op, gas_costs), + memory_expansion_calculator, + ), + Opcodes.REVERT: cls._with_memory_expansion( + 0, memory_expansion_calculator + ), + } + @classmethod def valid_opcodes( cls, *, block_number: int = 0, timestamp: int = 0 @@ -1037,6 +1615,20 @@ def get_reward(cls, *, block_number: int = 0, timestamp: int = 0) -> int: del block_number, timestamp return 2_000_000_000_000_000_000 + @classmethod + def _calculate_create2_gas( + cls, opcode: OpcodeBase, gas_costs: GasCosts + ) -> int: + """Calculate CREATE2 gas cost based on metadata.""" + metadata = opcode.metadata + + # Keccak256 hashing cost + init_code_size = metadata["init_code_size"] + init_code_words = (init_code_size + 31) // 32 + hash_gas = gas_costs.G_KECCAK_256_WORD * init_code_words + + return gas_costs.G_CREATE + hash_gas + @classmethod def create_opcodes( cls, *, block_number: int = 0, timestamp: int = 0 @@ -1046,6 +1638,32 @@ def create_opcodes( Constantinople, cls ).create_opcodes(block_number=block_number, timestamp=timestamp) + @classmethod + def opcode_gas_map( + cls, *, block_number: int = 0, timestamp: int = 0 + ) -> Dict[OpcodeBase, int | Callable[[OpcodeBase], int]]: + """Add Constantinople opcodes gas costs.""" + gas_costs = cls.gas_costs( + block_number=block_number, timestamp=timestamp + ) + memory_expansion_calculator = cls.memory_expansion_gas_calculator( + block_number=block_number, timestamp=timestamp + ) + base_map = super(Constantinople, cls).opcode_gas_map( + block_number=block_number, timestamp=timestamp + ) + return { + **base_map, + Opcodes.SHL: gas_costs.G_VERY_LOW, + Opcodes.SHR: gas_costs.G_VERY_LOW, + Opcodes.SAR: gas_costs.G_VERY_LOW, + Opcodes.EXTCODEHASH: cls._with_account_access(0, gas_costs), + Opcodes.CREATE2: cls._with_memory_expansion( + lambda op: cls._calculate_create2_gas(op, gas_costs), + memory_expansion_calculator, + ), + } + @classmethod def valid_opcodes( cls, *, block_number: int = 0, timestamp: int = 0 @@ -1081,6 +1699,23 @@ def precompiles( block_number=block_number, timestamp=timestamp ) + @classmethod + def opcode_gas_map( + cls, *, block_number: int = 0, timestamp: int = 0 + ) -> Dict[OpcodeBase, int | Callable[[OpcodeBase], int]]: + """Add Istanbul opcodes gas costs.""" + gas_costs = cls.gas_costs( + block_number=block_number, timestamp=timestamp + ) + base_map = super(Istanbul, cls).opcode_gas_map( + block_number=block_number, timestamp=timestamp + ) + return { + **base_map, + Opcodes.CHAINID: gas_costs.G_BASE, + Opcodes.SELFBALANCE: gas_costs.G_LOW, + } + @classmethod def valid_opcodes( cls, *, block_number: int = 0, timestamp: int = 0 @@ -1209,6 +1844,22 @@ def contract_creating_tx_types( block_number=block_number, timestamp=timestamp ) + @classmethod + def opcode_gas_map( + cls, *, block_number: int = 0, timestamp: int = 0 + ) -> Dict[OpcodeBase, int | Callable[[OpcodeBase], int]]: + """Add London opcodes gas costs.""" + gas_costs = cls.gas_costs( + block_number=block_number, timestamp=timestamp + ) + base_map = super(London, cls).opcode_gas_map( + block_number=block_number, timestamp=timestamp + ) + return { + **base_map, + Opcodes.BASEFEE: gas_costs.G_BASE, + } + @classmethod def valid_opcodes( cls, *, block_number: int = 0, timestamp: int = 0 @@ -1460,6 +2111,66 @@ def max_initcode_size( del block_number, timestamp return 0xC000 + @classmethod + def _calculate_create_gas( + cls, opcode: OpcodeBase, gas_costs: GasCosts + ) -> int: + """ + Calculate CREATE gas cost based on metadata (from Shanghai, includes + initcode cost). + """ + metadata = opcode.metadata + + # Get base cost from parent fork + base_cost = super(Shanghai, cls)._calculate_create_gas( + opcode, gas_costs + ) + + # Add initcode cost (EIP-3860) + init_code_size = metadata["init_code_size"] + init_code_words = (init_code_size + 31) // 32 + init_code_gas = gas_costs.G_INITCODE_WORD * init_code_words + + return base_cost + init_code_gas + + @classmethod + def _calculate_create2_gas( + cls, opcode: OpcodeBase, gas_costs: GasCosts + ) -> int: + """ + Calculate CREATE2 gas cost based on metadata (from Shanghai, + includes initcode cost). + """ + metadata = opcode.metadata + + # Get base cost from parent fork (includes keccak hash cost) + base_cost = super(Shanghai, cls)._calculate_create2_gas( + opcode, gas_costs + ) + + # Add initcode cost (EIP-3860) + init_code_size = metadata["init_code_size"] + init_code_words = (init_code_size + 31) // 32 + init_code_gas = gas_costs.G_INITCODE_WORD * init_code_words + + return base_cost + init_code_gas + + @classmethod + def opcode_gas_map( + cls, *, block_number: int = 0, timestamp: int = 0 + ) -> Dict[OpcodeBase, int | Callable[[OpcodeBase], int]]: + """Add Shanghai opcodes gas costs.""" + gas_costs = cls.gas_costs( + block_number=block_number, timestamp=timestamp + ) + base_map = super(Shanghai, cls).opcode_gas_map( + block_number=block_number, timestamp=timestamp + ) + return { + **base_map, + Opcodes.PUSH0: gas_costs.G_BASE, + } + @classmethod def valid_opcodes( cls, *, block_number: int = 0, timestamp: int = 0 @@ -1478,7 +2189,9 @@ class Cancun(Shanghai): "CELL_LENGTH": 2048, # EIP-2537: Main subgroup order = q, due to this BLS_MODULUS # every blob byte (uint256) must be smaller than 116 - "BLS_MODULUS": 0x73EDA753299D7D483339D80809A1D80553BDA402FFFE5BFEFFFFFFFF00000001, + "BLS_MODULUS": ( + 0x73EDA753299D7D483339D80809A1D80553BDA402FFFE5BFEFFFFFFFF00000001 + ), # https://github.com/ethereum/consensus-specs/blob/ # cc6996c22692d70e41b7a453d925172ee4b719ad/specs/deneb/ # polynomial-commitments.md?plain=1#L78 @@ -1493,7 +2206,8 @@ def get_blob_constant(cls, name: str) -> int | Literal["big"]: """Return blob constant if it exists.""" retrieved_constant = cls.BLOB_CONSTANTS.get(name) assert retrieved_constant is not None, ( - f"You tried to retrieve the blob constant {name} but it does not exist!" + f"You tried to retrieve the blob constant {name} but it does " + "not exist!" ) return retrieved_constant @@ -1755,9 +2469,10 @@ def pre_allocation_blockchain( new_allocation = { 0x000F3DF6D732807EF1319FB7B8BB8522D0BEAC02: { "nonce": 1, - "code": "0x3373fffffffffffffffffffffffffffffffffffffffe14604d57602036146024575f5f" - "fd5b5f35801560495762001fff810690815414603c575f5ffd5b62001fff01545f5260205ff35b5f" - "5ffd5b62001fff42064281555f359062001fff015500", + "code": "0x3373fffffffffffffffffffffffffffffffffffffffe14604d" + "57602036146024575f5ffd5b5f35801560495762001fff810690" + "815414603c575f5ffd5b62001fff01545f5260205ff35b5f5ffd" + "5b62001fff42064281555f359062001fff015500", } } return new_allocation | super(Cancun, cls).pre_allocation_blockchain() # type: ignore @@ -1794,6 +2509,45 @@ def engine_new_payload_beacon_root( del block_number, timestamp return True + @classmethod + def opcode_gas_map( + cls, *, block_number: int = 0, timestamp: int = 0 + ) -> Dict[OpcodeBase, int | Callable[[OpcodeBase], int]]: + """ + Return a mapping of opcodes to their gas costs for Cancun. + + Adds Cancun-specific opcodes: BLOBHASH, BLOBBASEFEE, TLOAD, TSTORE, + MCOPY. + """ + gas_costs = cls.gas_costs( + block_number=block_number, timestamp=timestamp + ) + memory_expansion_calculator = cls.memory_expansion_gas_calculator( + block_number=block_number, timestamp=timestamp + ) + + # Get parent fork's opcode gas map + base_map = super(Cancun, cls).opcode_gas_map( + block_number=block_number, timestamp=timestamp + ) + + # Add Cancun-specific opcodes + return { + **base_map, + # EIP-4844: Shard Blob Transactions + Opcodes.BLOBHASH: gas_costs.G_VERY_LOW, + # EIP-7516: BLOBBASEFEE opcode + Opcodes.BLOBBASEFEE: gas_costs.G_BASE, + # EIP-1153: Transient storage opcodes + Opcodes.TLOAD: gas_costs.G_WARM_SLOAD, + Opcodes.TSTORE: gas_costs.G_WARM_SLOAD, + # EIP-5656: MCOPY - Memory copying instruction + Opcodes.MCOPY: cls._with_memory_expansion( + cls._with_data_copy(gas_costs.G_VERY_LOW, gas_costs), + memory_expansion_calculator, + ), + } + @classmethod def valid_opcodes( cls, *, block_number: int = 0, timestamp: int = 0 @@ -1941,6 +2695,26 @@ def fn(*, data: BytesConvertible, floor: bool = False) -> int: return fn + @classmethod + def _calculate_call_gas( + cls, opcode: OpcodeBase, gas_costs: GasCosts + ) -> int: + """ + At Prague, the call gas cost needs to take the authorization into + account. + """ + metadata = opcode.metadata + + base_cost = super(Prague, cls)._calculate_call_gas(opcode, gas_costs) + + if metadata["delegated_address"] or metadata["delegated_address_warm"]: + if metadata["delegated_address_warm"]: + base_cost += gas_costs.G_WARM_ACCOUNT_ACCESS + else: + base_cost += gas_costs.G_COLD_ACCOUNT_ACCESS + + return base_cost + @classmethod def transaction_data_floor_cost_calculator( cls, *, block_number: int = 0, timestamp: int = 0 @@ -2211,6 +2985,22 @@ def block_rlp_size_limit( safety_margin = 2_097_152 return max_block_size - safety_margin + @classmethod + def opcode_gas_map( + cls, *, block_number: int = 0, timestamp: int = 0 + ) -> Dict[OpcodeBase, int | Callable[[OpcodeBase], int]]: + """Add Osaka opcodes gas costs.""" + gas_costs = cls.gas_costs( + block_number=block_number, timestamp=timestamp + ) + base_map = super(Osaka, cls).opcode_gas_map( + block_number=block_number, timestamp=timestamp + ) + return { + **base_map, + Opcodes.CLZ: gas_costs.G_LOW, + } + @classmethod def valid_opcodes( cls, *, block_number: int = 0, timestamp: int = 0 diff --git a/packages/testing/src/execution_testing/forks/gas_costs.py b/packages/testing/src/execution_testing/forks/gas_costs.py index 7b73fc5f96..a94d4f0c96 100644 --- a/packages/testing/src/execution_testing/forks/gas_costs.py +++ b/packages/testing/src/execution_testing/forks/gas_costs.py @@ -20,6 +20,7 @@ class GasCosts: G_WARM_SLOAD: int G_COLD_SLOAD: int G_STORAGE_SET: int + G_STORAGE_UPDATE: int G_STORAGE_RESET: int R_STORAGE_CLEAR: int diff --git a/packages/testing/src/execution_testing/forks/tests/test_opcode_gas_costs.py b/packages/testing/src/execution_testing/forks/tests/test_opcode_gas_costs.py new file mode 100644 index 0000000000..6391429a37 --- /dev/null +++ b/packages/testing/src/execution_testing/forks/tests/test_opcode_gas_costs.py @@ -0,0 +1,602 @@ +"""Test opcode gas costs.""" + +import pytest + +from execution_testing.vm import Bytecode, Op + +from ..forks.forks import Osaka, Homestead +from ..helpers import Fork + + +@pytest.mark.parametrize( + "fork,opcode,expected_cost", + [ + pytest.param( + Osaka, + Op.MSTORE(new_memory_size=1), + Osaka.memory_expansion_gas_calculator()(new_bytes=1) + + Osaka.gas_costs().G_VERY_LOW, + id="mstore_memory_expansion", + ), + pytest.param( + Osaka, + Op.SSTORE, + Osaka.gas_costs().G_STORAGE_SET + Osaka.gas_costs().G_COLD_SLOAD, + id="sstore_defaults", + ), + pytest.param( + Osaka, + Op.SSTORE(key_warm=True), + Osaka.gas_costs().G_STORAGE_SET, + id="sstore_warm_key", + ), + # EXP tests + pytest.param( + Osaka, + Op.EXP(exponent=0), + Osaka.gas_costs().G_EXP, + id="exp_zero_exponent", + ), + pytest.param( + Osaka, + Op.EXP(exponent=0xFFFFFF), # 3 bytes + Osaka.gas_costs().G_EXP + Osaka.gas_costs().G_EXP_BYTE * 3, + id="exp_three_bytes", + ), + pytest.param( + Osaka, + Op.EXP(exponent=0x1FFFFFF), # 3 bytes + Osaka.gas_costs().G_EXP + Osaka.gas_costs().G_EXP_BYTE * 4, + id="exp_three_bytes_plus_one_bit", + ), + # SHA3 tests + pytest.param( + Osaka, + Op.SHA3(data_size=0), + Osaka.gas_costs().G_KECCAK_256, + id="sha3_zero_data", + ), + pytest.param( + Osaka, + Op.SHA3(data_size=64, new_memory_size=96), + Osaka.gas_costs().G_KECCAK_256 + + Osaka.gas_costs().G_KECCAK_256_WORD * 2 + + Osaka.memory_expansion_gas_calculator()(new_bytes=96), + id="sha3_with_data_and_memory", + ), + # BALANCE tests + pytest.param( + Osaka, + Op.BALANCE(address_warm=False), + Osaka.gas_costs().G_COLD_ACCOUNT_ACCESS, + id="balance_cold_address", + ), + pytest.param( + Osaka, + Op.BALANCE(address_warm=True), + Osaka.gas_costs().G_WARM_ACCOUNT_ACCESS, + id="balance_warm_address", + ), + # CALLDATACOPY tests + pytest.param( + Osaka, + Op.CALLDATACOPY(data_size=32, new_memory_size=32), + Osaka.gas_costs().G_VERY_LOW + + Osaka.gas_costs().G_COPY * 1 + + Osaka.memory_expansion_gas_calculator()(new_bytes=32), + id="calldatacopy_one_word", + ), + pytest.param( + Osaka, + Op.CALLDATACOPY( + data_size=64, new_memory_size=64, old_memory_size=32 + ), + Osaka.gas_costs().G_VERY_LOW + + Osaka.gas_costs().G_COPY * 2 + + Osaka.memory_expansion_gas_calculator()( + new_bytes=64, previous_bytes=32 + ), + id="calldatacopy_expansion", + ), + # CODECOPY tests + pytest.param( + Osaka, + Op.CODECOPY(data_size=96, new_memory_size=96), + Osaka.gas_costs().G_VERY_LOW + + Osaka.gas_costs().G_COPY * 3 + + Osaka.memory_expansion_gas_calculator()(new_bytes=96), + id="codecopy_three_words", + ), + # EXTCODESIZE tests + pytest.param( + Osaka, + Op.EXTCODESIZE(address_warm=False), + Osaka.gas_costs().G_COLD_ACCOUNT_ACCESS, + id="extcodesize_cold", + ), + pytest.param( + Osaka, + Op.EXTCODESIZE(address_warm=True), + Osaka.gas_costs().G_WARM_ACCOUNT_ACCESS, + id="extcodesize_warm", + ), + # EXTCODECOPY tests + pytest.param( + Osaka, + Op.EXTCODECOPY( + address_warm=True, data_size=32, new_memory_size=32 + ), + Osaka.gas_costs().G_WARM_ACCOUNT_ACCESS + + Osaka.gas_costs().G_COPY * 1 + + Osaka.memory_expansion_gas_calculator()(new_bytes=32), + id="extcodecopy_warm", + ), + pytest.param( + Osaka, + Op.EXTCODECOPY( + address_warm=False, data_size=64, new_memory_size=64 + ), + Osaka.gas_costs().G_COLD_ACCOUNT_ACCESS + + Osaka.gas_costs().G_COPY * 2 + + Osaka.memory_expansion_gas_calculator()(new_bytes=64), + id="extcodecopy_cold", + ), + # EXTCODEHASH tests + pytest.param( + Osaka, + Op.EXTCODEHASH(address_warm=False), + Osaka.gas_costs().G_COLD_ACCOUNT_ACCESS, + id="extcodehash_cold", + ), + pytest.param( + Osaka, + Op.EXTCODEHASH(address_warm=True), + Osaka.gas_costs().G_WARM_ACCOUNT_ACCESS, + id="extcodehash_warm", + ), + # RETURNDATACOPY tests + pytest.param( + Osaka, + Op.RETURNDATACOPY(data_size=32, new_memory_size=32), + Osaka.gas_costs().G_VERY_LOW + + Osaka.gas_costs().G_COPY * 1 + + Osaka.memory_expansion_gas_calculator()(new_bytes=32), + id="returndatacopy", + ), + # MLOAD tests + pytest.param( + Osaka, + Op.MLOAD(new_memory_size=32), + Osaka.gas_costs().G_VERY_LOW + + Osaka.memory_expansion_gas_calculator()(new_bytes=32), + id="mload_memory_expansion", + ), + # MSTORE8 tests + pytest.param( + Osaka, + Op.MSTORE8(new_memory_size=1), + Osaka.gas_costs().G_VERY_LOW + + Osaka.memory_expansion_gas_calculator()(new_bytes=1), + id="mstore8_memory_expansion", + ), + # SLOAD tests + pytest.param( + Osaka, + Op.SLOAD(key_warm=False), + Osaka.gas_costs().G_COLD_SLOAD, + id="sload_cold", + ), + pytest.param( + Osaka, + Op.SLOAD(key_warm=True), + Osaka.gas_costs().G_WARM_SLOAD, + id="sload_warm", + ), + # MCOPY tests + pytest.param( + Osaka, + Op.MCOPY(data_size=32, new_memory_size=32), + Osaka.gas_costs().G_VERY_LOW + + Osaka.gas_costs().G_COPY * 1 + + Osaka.memory_expansion_gas_calculator()(new_bytes=32), + id="mcopy_one_word", + ), + pytest.param( + Osaka, + Op.MCOPY(data_size=96, new_memory_size=128, old_memory_size=64), + Osaka.gas_costs().G_VERY_LOW + + Osaka.gas_costs().G_COPY * 3 + + Osaka.memory_expansion_gas_calculator()( + new_bytes=128, previous_bytes=64 + ), + id="mcopy_expansion", + ), + # LOG0 tests + pytest.param( + Osaka, + Op.LOG0(data_size=32, new_memory_size=32), + Osaka.gas_costs().G_LOG + + Osaka.gas_costs().G_LOG_DATA * 32 + + Osaka.memory_expansion_gas_calculator()(new_bytes=32), + id="log0", + ), + # LOG1 tests + pytest.param( + Osaka, + Op.LOG1(data_size=64, new_memory_size=64), + Osaka.gas_costs().G_LOG + + Osaka.gas_costs().G_LOG_DATA * 64 + + Osaka.gas_costs().G_LOG_TOPIC + + Osaka.memory_expansion_gas_calculator()(new_bytes=64), + id="log1", + ), + # LOG2 tests + pytest.param( + Osaka, + Op.LOG2(data_size=128, new_memory_size=128), + Osaka.gas_costs().G_LOG + + Osaka.gas_costs().G_LOG_DATA * 128 + + Osaka.gas_costs().G_LOG_TOPIC * 2 + + Osaka.memory_expansion_gas_calculator()(new_bytes=128), + id="log2", + ), + # LOG3 tests + pytest.param( + Osaka, + Op.LOG3(data_size=256, new_memory_size=256), + Osaka.gas_costs().G_LOG + + Osaka.gas_costs().G_LOG_DATA * 256 + + Osaka.gas_costs().G_LOG_TOPIC * 3 + + Osaka.memory_expansion_gas_calculator()(new_bytes=256), + id="log3", + ), + # LOG4 tests + pytest.param( + Osaka, + Op.LOG4(data_size=512, new_memory_size=512), + Osaka.gas_costs().G_LOG + + Osaka.gas_costs().G_LOG_DATA * 512 + + Osaka.gas_costs().G_LOG_TOPIC * 4 + + Osaka.memory_expansion_gas_calculator()(new_bytes=512), + id="log4", + ), + # CREATE tests + pytest.param( + Osaka, + Op.CREATE(init_code_size=100, new_memory_size=100), + Osaka.gas_costs().G_CREATE + + Osaka.gas_costs().G_INITCODE_WORD * 4 # (100 + 31) // 32 = 4 + + Osaka.memory_expansion_gas_calculator()(new_bytes=100), + id="create_with_initcode", + ), + # CREATE2 tests + pytest.param( + Osaka, + Op.CREATE2(init_code_size=64, new_memory_size=64), + Osaka.gas_costs().G_CREATE + + Osaka.gas_costs().G_INITCODE_WORD * 2 + + Osaka.gas_costs().G_KECCAK_256_WORD * 2 + + Osaka.memory_expansion_gas_calculator()(new_bytes=64), + id="create2_with_initcode_and_hash", + ), + # CALL tests + pytest.param( + Osaka, + Op.CALL( + address_warm=True, value_transfer=False, new_memory_size=64 + ), + Osaka.gas_costs().G_WARM_ACCOUNT_ACCESS + + Osaka.memory_expansion_gas_calculator()(new_bytes=64), + id="call_warm_no_value", + ), + pytest.param( + Osaka, + Op.CALL(address_warm=False, delegated_address=True), + Osaka.gas_costs().G_COLD_ACCOUNT_ACCESS + + Osaka.gas_costs().G_COLD_ACCOUNT_ACCESS, + id="call_cold_delegated_address", + ), + pytest.param( + Osaka, + Op.CALL( + address_warm=False, + delegated_address=True, + delegated_address_warm=True, + ), + Osaka.gas_costs().G_COLD_ACCOUNT_ACCESS + + Osaka.gas_costs().G_WARM_ACCOUNT_ACCESS, + id="call_warm_delegated_address", + ), + pytest.param( + Osaka, + Op.CALL(address_warm=False, value_transfer=True, account_new=True), + Osaka.gas_costs().G_COLD_ACCOUNT_ACCESS + + Osaka.gas_costs().G_CALL_VALUE + + Osaka.gas_costs().G_NEW_ACCOUNT, + id="call_cold_account_new", + ), + pytest.param( + Homestead, + Op.CALL(address_warm=False, value_transfer=True, account_new=True), + Homestead.gas_costs().G_COLD_ACCOUNT_ACCESS, + id="call_cold_account_new_homestead", + ), + pytest.param( + Osaka, + Op.CALL( + address_warm=False, + value_transfer=True, + account_new=False, + new_memory_size=32, + ), + Osaka.gas_costs().G_COLD_ACCOUNT_ACCESS + + Osaka.gas_costs().G_CALL_VALUE + + Osaka.memory_expansion_gas_calculator()(new_bytes=32), + id="call_cold_with_value", + ), + pytest.param( + Osaka, + Op.CALL( + address_warm=False, + value_transfer=True, + account_new=True, + new_memory_size=32, + ), + Osaka.gas_costs().G_COLD_ACCOUNT_ACCESS + + Osaka.gas_costs().G_CALL_VALUE + + Osaka.gas_costs().G_NEW_ACCOUNT + + Osaka.memory_expansion_gas_calculator()(new_bytes=32), + id="call_cold_new_account", + ), + # CALLCODE tests + pytest.param( + Osaka, + Op.CALLCODE( + address_warm=True, value_transfer=False, new_memory_size=32 + ), + Osaka.gas_costs().G_WARM_ACCOUNT_ACCESS + + Osaka.memory_expansion_gas_calculator()(new_bytes=32), + id="callcode_warm", + ), + # DELEGATECALL tests + pytest.param( + Osaka, + Op.DELEGATECALL(address_warm=True, new_memory_size=32), + Osaka.gas_costs().G_WARM_ACCOUNT_ACCESS + + Osaka.memory_expansion_gas_calculator()(new_bytes=32), + id="delegatecall_warm", + ), + pytest.param( + Osaka, + Op.DELEGATECALL(address_warm=False, new_memory_size=64), + Osaka.gas_costs().G_COLD_ACCOUNT_ACCESS + + Osaka.memory_expansion_gas_calculator()(new_bytes=64), + id="delegatecall_cold", + ), + # STATICCALL tests + pytest.param( + Osaka, + Op.STATICCALL(address_warm=True, new_memory_size=32), + Osaka.gas_costs().G_WARM_ACCOUNT_ACCESS + + Osaka.memory_expansion_gas_calculator()(new_bytes=32), + id="staticcall_warm", + ), + pytest.param( + Osaka, + Op.STATICCALL(address_warm=False, new_memory_size=0), + Osaka.gas_costs().G_COLD_ACCOUNT_ACCESS, + id="staticcall_cold_no_memory", + ), + # RETURN tests + pytest.param( + Osaka, + Op.RETURN(new_memory_size=32), + Osaka.memory_expansion_gas_calculator()(new_bytes=32), + id="return_no_deposit", + ), + pytest.param( + Osaka, + Op.RETURN(code_deposit_size=100, new_memory_size=32), + Osaka.gas_costs().G_CODE_DEPOSIT_BYTE * 100 + + Osaka.memory_expansion_gas_calculator()(new_bytes=32), + id="return_with_code_deposit", + ), + # REVERT tests + pytest.param( + Osaka, + Op.REVERT(new_memory_size=64), + Osaka.memory_expansion_gas_calculator()(new_bytes=64), + id="revert_memory_expansion", + ), + # CLZ test (Osaka-specific) + pytest.param( + Osaka, + Op.CLZ, + Osaka.gas_costs().G_LOW, + id="clz_osaka", + ), + ], +) +def test_opcode_gas_costs(fork: Fork, opcode: Op, expected_cost: int) -> None: + op_gas_cost_calc = fork.opcode_gas_calculator() + assert expected_cost == op_gas_cost_calc(opcode) + + +@pytest.mark.parametrize( + "fork,bytecode,expected_cost", + [ + pytest.param( + Osaka, + Op.ADD + Op.SUB, + Osaka.gas_costs().G_VERY_LOW * 2, + id="sum_of_opcodes", + ), + pytest.param( + Osaka, + Op.ADD(1, 1), + Osaka.gas_costs().G_VERY_LOW * 3, + id="opcode_with_args", + ), + pytest.param( + Osaka, + Op.SSTORE(1, 2, key_warm=True), + Osaka.gas_costs().G_STORAGE_SET + Osaka.gas_costs().G_VERY_LOW * 2, + id="opcode_with_metadata", + ), + ], +) +def test_bytecode_gas_costs( + fork: Fork, bytecode: Bytecode, expected_cost: int +) -> None: + assert expected_cost == bytecode.gas_cost(fork) + + +@pytest.mark.parametrize( + "fork,opcode,expected_refund", + [ + pytest.param( + Osaka, + Op.SSTORE(original_value=0, new_value=0), + 0, + id="sstore_no_refund_zero_to_zero", + ), + pytest.param( + Osaka, + Op.SSTORE(original_value=1, new_value=1), + 0, + id="sstore_no_refund_nonzero_to_nonzero", + ), + pytest.param( + Osaka, + Op.SSTORE(original_value=1, new_value=0), + Osaka.gas_costs().R_STORAGE_CLEAR, + id="sstore_refund_clear_storage", + ), + pytest.param( + Osaka, + Op.ADD, + 0, + id="add_no_refund", + ), + pytest.param( + Osaka, + Op.MSTORE, + 0, + id="mstore_no_refund", + ), + ], +) +def test_opcode_refunds(fork: Fork, opcode: Op, expected_refund: int) -> None: + op_refund_calc = fork.opcode_refund_calculator() + assert expected_refund == op_refund_calc(opcode) + + +@pytest.mark.parametrize( + "fork,bytecode,expected_refund", + [ + pytest.param( + Osaka, + Op.SSTORE(original_value=1, new_value=0), + Osaka.gas_costs().R_STORAGE_CLEAR, + id="single_sstore_clear", + ), + pytest.param( + Osaka, + Op.SSTORE(original_value=2, new_value=0) + + Op.SSTORE(original_value=1, new_value=0), + Osaka.gas_costs().R_STORAGE_CLEAR * 2, + id="double_sstore_clear", + ), + pytest.param( + Osaka, + Op.SSTORE(original_value=1, new_value=2) + + Op.SSTORE(original_value=1, new_value=0), + Osaka.gas_costs().R_STORAGE_CLEAR, + id="mixed_sstore_one_clear", + ), + pytest.param( + Osaka, + Op.ADD + Op.SUB, + 0, + id="no_refund_opcodes", + ), + ], +) +def test_bytecode_refunds( + fork: Fork, bytecode: Bytecode, expected_refund: int +) -> None: + assert expected_refund == bytecode.refund(fork) + + +@pytest.mark.parametrize( + "fork,opcode,expected_cost", + [ + # No-op: new == current (value_reset=True on clean slot) + pytest.param( + Osaka, + Op.SSTORE(key_warm=True, original_value=0, new_value=0), + Osaka.gas_costs().G_WARM_SLOAD, + id="sstore_noop_zero_warm", # 0 → 0 + ), + pytest.param( + Osaka, + Op.SSTORE(key_warm=False, original_value=0, new_value=0), + Osaka.gas_costs().G_COLD_SLOAD + Osaka.gas_costs().G_WARM_SLOAD, + id="sstore_noop_zero_cold", # 0 → 0 + ), + pytest.param( + Osaka, + Op.SSTORE(key_warm=True, original_value=5, new_value=5), + Osaka.gas_costs().G_WARM_SLOAD, + id="sstore_noop_nonzero_warm", # 5 → 5 + ), + pytest.param( + Osaka, + Op.SSTORE(key_warm=False, original_value=5, new_value=5), + Osaka.gas_costs().G_COLD_SLOAD + Osaka.gas_costs().G_WARM_SLOAD, + id="sstore_noop_nonzero_cold", # 5 → 5 + ), + # Create storage: 0 → X (original == 0) + pytest.param( + Osaka, + Op.SSTORE(key_warm=True, new_value=5), + Osaka.gas_costs().G_STORAGE_SET, + id="sstore_create_warm", # 0 → 5 + ), + pytest.param( + Osaka, + Op.SSTORE(key_warm=False, new_value=5), + Osaka.gas_costs().G_COLD_SLOAD + Osaka.gas_costs().G_STORAGE_SET, + id="sstore_create_cold", # 0 → 5 + ), + # Modify storage: X → Y (original != 0, new != 0, new != original) + pytest.param( + Osaka, + Op.SSTORE(key_warm=True, original_value=5, new_value=7), + Osaka.gas_costs().G_STORAGE_RESET, + id="sstore_modify_warm", # 5 → 7 + ), + pytest.param( + Osaka, + Op.SSTORE(key_warm=False, original_value=5, new_value=7), + Osaka.gas_costs().G_COLD_SLOAD + Osaka.gas_costs().G_STORAGE_RESET, + id="sstore_modify_cold", # 5 → 7 + ), + # Clear storage: X → 0 (original != 0, new == 0) + pytest.param( + Osaka, + Op.SSTORE(key_warm=True, original_value=5, new_value=0), + Osaka.gas_costs().G_STORAGE_RESET, + id="sstore_clear_warm", # 5 → 0 + ), + pytest.param( + Osaka, + Op.SSTORE(key_warm=False, original_value=5, new_value=0), + Osaka.gas_costs().G_COLD_SLOAD + Osaka.gas_costs().G_STORAGE_RESET, + id="sstore_clear_cold", # 5 → 0 + ), + ], +) +def test_sstore_gas_costs(fork: Fork, opcode: Op, expected_cost: int) -> None: + """Test SSTORE gas costs for all single-SSTORE scenarios.""" + assert opcode.gas_cost(fork) == expected_cost diff --git a/packages/testing/src/execution_testing/tools/utility/generators.py b/packages/testing/src/execution_testing/tools/utility/generators.py index 0ff5cc4c0b..ef50b67ec7 100644 --- a/packages/testing/src/execution_testing/tools/utility/generators.py +++ b/packages/testing/src/execution_testing/tools/utility/generators.py @@ -446,8 +446,9 @@ def gas_test( pre: Alloc, setup_code: Bytecode, subject_code: Bytecode, + subject_code_warm: Bytecode | None = None, tear_down_code: Bytecode | None = None, - cold_gas: int, + cold_gas: int | None = None, warm_gas: int | None = None, subject_address: Address | None = None, subject_balance: int = 0, @@ -468,12 +469,18 @@ def gas_test( "Gas tests before Berlin are not supported due to CALL gas changes" ) + if cold_gas is None: + cold_gas = subject_code.gas_cost(fork) + if cold_gas <= 0: raise ValueError( f"Target gas allocations (cold_gas) must be > 0, got {cold_gas}" ) if warm_gas is None: - warm_gas = cold_gas + if subject_code_warm is not None: + warm_gas = subject_code_warm.gas_cost(fork) + else: + warm_gas = cold_gas sender = pre.fund_eoa() if tear_down_code is None: diff --git a/packages/testing/src/execution_testing/vm/__init__.py b/packages/testing/src/execution_testing/vm/__init__.py index 2305418904..3f9332ac62 100644 --- a/packages/testing/src/execution_testing/vm/__init__.py +++ b/packages/testing/src/execution_testing/vm/__init__.py @@ -1,5 +1,10 @@ """Ethereum Virtual Machine related definitions and utilities.""" +from .bases import ( + ForkOpcodeInterface, + OpcodeBase, + OpcodeGasCalculator, +) from .bytecode import Bytecode from .evm_types import EVMCodeType from .helpers import MemoryVariable, call_return_code @@ -18,12 +23,15 @@ __all__ = ( "Bytecode", "EVMCodeType", + "ForkOpcodeInterface", "Macro", "Macros", "MemoryVariable", "Op", "Opcode", + "OpcodeBase", "OpcodeCallArg", + "OpcodeGasCalculator", "Opcodes", "UndefinedOpcodes", "call_return_code", diff --git a/packages/testing/src/execution_testing/vm/bases.py b/packages/testing/src/execution_testing/vm/bases.py new file mode 100644 index 0000000000..d2f0e06c95 --- /dev/null +++ b/packages/testing/src/execution_testing/vm/bases.py @@ -0,0 +1,52 @@ +"""Base classes for the EVM.""" + +from abc import ABC, abstractmethod +from typing import Any, Dict, Protocol + + +class OpcodeBase: + """Base class for the opcode type.""" + + metadata: Dict[str, Any] + _name_: str = "" + + def __bytes__(self) -> bytes: + """Return the opcode byte representation.""" + raise NotImplementedError("OpcodeBase does not implement __bytes__") + + +class OpcodeGasCalculator(Protocol): + """ + A protocol to calculate the cost or refund of a single opcode. + """ + + def __call__(self, opcode: OpcodeBase) -> int: + """Return the gas cost or refund for executing the given opcode.""" + pass + + +class ForkOpcodeInterface(ABC): + """ + Interface for a fork that is used to calculate opcode gas costs + and refunds. + """ + + @classmethod + @abstractmethod + def opcode_gas_calculator( + cls, *, block_number: int = 0, timestamp: int = 0 + ) -> OpcodeGasCalculator: + """ + Return callable that calculates the gas cost of a single opcode. + """ + pass + + @classmethod + @abstractmethod + def opcode_refund_calculator( + cls, *, block_number: int = 0, timestamp: int = 0 + ) -> OpcodeGasCalculator: + """ + Return callable that calculates the gas refund of a single opcode. + """ + pass diff --git a/packages/testing/src/execution_testing/vm/bytecode.py b/packages/testing/src/execution_testing/vm/bytecode.py index 6ee1b6014a..6c8dc984f9 100644 --- a/packages/testing/src/execution_testing/vm/bytecode.py +++ b/packages/testing/src/execution_testing/vm/bytecode.py @@ -1,6 +1,6 @@ """Ethereum Virtual Machine bytecode primitives and utilities.""" -from typing import Any, Self, SupportsBytes +from typing import Any, List, Self, SupportsBytes, Type from pydantic import GetCoreSchemaHandler from pydantic_core.core_schema import ( @@ -11,6 +11,8 @@ from execution_testing.base_types import Bytes, Hash +from .bases import ForkOpcodeInterface, OpcodeBase + class Bytecode: """ @@ -38,6 +40,7 @@ class Bytecode: min_stack_height: int terminating: bool + opcode_list: List[OpcodeBase] def __new__( cls, @@ -49,8 +52,11 @@ def __new__( min_stack_height: int | None = None, terminating: bool = False, name: str = "", + opcode_list: List[OpcodeBase] | None = None, ) -> Self: """Create new opcode instance.""" + if opcode_list is None: + opcode_list = [] if bytes_or_byte_code_base is None: instance = super().__new__(cls) instance._bytes_ = b"" @@ -60,6 +66,7 @@ def __new__( instance.max_stack_height = 0 instance.terminating = False instance._name_ = name + instance.opcode_list = opcode_list return instance if isinstance(bytes_or_byte_code_base, Bytecode): @@ -72,6 +79,7 @@ def __new__( obj.min_stack_height = bytes_or_byte_code_base.min_stack_height obj.max_stack_height = bytes_or_byte_code_base.max_stack_height obj.terminating = bytes_or_byte_code_base.terminating + obj.opcode_list = bytes_or_byte_code_base.opcode_list[:] obj._name_ = bytes_or_byte_code_base._name_ return obj @@ -93,6 +101,7 @@ def __new__( else: obj.max_stack_height = max_stack_height obj.terminating = terminating + obj.opcode_list = opcode_list obj._name_ = name return obj @@ -215,6 +224,7 @@ def __add__(self, other: "Bytecode | bytes | int | None") -> "Bytecode": min_stack_height=c_min, max_stack_height=c_max, terminating=other.terminating, + opcode_list=self.opcode_list + other.opcode_list, ) def __radd__(self, other: "Bytecode | int | None") -> "Bytecode": @@ -253,6 +263,38 @@ def keccak256(self) -> Hash: """Return the keccak256 hash of the opcode byte representation.""" return Bytes(self._bytes_).keccak256() + def gas_cost( + self, + fork: Type[ForkOpcodeInterface], + *, + block_number: int = 0, + timestamp: int = 0, + ) -> int: + """Use a fork object to calculate the gas used by this bytecode.""" + opcode_gas_calculator = fork.opcode_gas_calculator( + block_number=block_number, timestamp=timestamp + ) + total_gas = 0 + for opcode in self.opcode_list: + total_gas += opcode_gas_calculator(opcode) + return total_gas + + def refund( + self, + fork: Type[ForkOpcodeInterface], + *, + block_number: int = 0, + timestamp: int = 0, + ) -> int: + """Use a fork object to calculate the gas refund from this bytecode.""" + opcode_refund_calculator = fork.opcode_refund_calculator( + block_number=block_number, timestamp=timestamp + ) + total_refund = 0 + for opcode in self.opcode_list: + total_refund += opcode_refund_calculator(opcode) + return total_refund + @classmethod def __get_pydantic_core_schema__( cls, source_type: Any, handler: GetCoreSchemaHandler diff --git a/packages/testing/src/execution_testing/vm/opcodes.py b/packages/testing/src/execution_testing/vm/opcodes.py index a05295d49c..7d79d28c96 100644 --- a/packages/testing/src/execution_testing/vm/opcodes.py +++ b/packages/testing/src/execution_testing/vm/opcodes.py @@ -13,6 +13,7 @@ from typing import ( Any, Callable, + Dict, Iterable, List, Mapping, @@ -22,6 +23,7 @@ from execution_testing.base_types import to_bytes +from .bases import OpcodeBase from .bytecode import Bytecode @@ -42,7 +44,9 @@ def _get_int_size(n: int) -> int: def _stack_argument_to_bytecode( - arg: "int | bytes | SupportsBytes | str | Opcode | Bytecode | Iterable[int]", + arg: ( + "int | bytes | SupportsBytes | str | Opcode | Bytecode | Iterable[int]" + ), ) -> Bytecode: """Convert stack argument in an opcode or macro to bytecode.""" if isinstance(arg, Bytecode): @@ -78,7 +82,7 @@ def _stack_argument_to_bytecode( return new_opcode -class Opcode(Bytecode): +class Opcode(Bytecode, OpcodeBase): """ Represents a single Opcode instruction in the EVM, with extra metadata useful to parametrize tests. @@ -97,6 +101,8 @@ class Opcode(Bytecode): otherwise 0 - unchecked_stack: whether the bytecode should ignore stack checks when being called + - metadata: dictionary containing extra metadata about the opcode instance, + useful for gas cost calculations and other analysis """ @@ -107,6 +113,7 @@ class Opcode(Bytecode): ] kwargs: List[str] kwargs_defaults: KW_ARGS_DEFAULTS_TYPE + original_opcode: Optional["Opcode"] = None unchecked_stack: bool = False def __new__( @@ -123,11 +130,15 @@ def __new__( unchecked_stack: bool = False, terminating: bool = False, kwargs: List[str] | None = None, - kwargs_defaults: Optional[KW_ARGS_DEFAULTS_TYPE] = None, + kwargs_defaults: KW_ARGS_DEFAULTS_TYPE | None = None, + metadata: Dict[str, Any] | None = None, + original_opcode: Optional["Opcode"] = None, ) -> "Opcode": """Create new opcode instance.""" if kwargs_defaults is None: kwargs_defaults = {} + if metadata is None: + metadata = {} if type(opcode_or_byte) is Opcode: # Required because Enum class calls the base class # with the instantiated object as parameter. @@ -165,6 +176,9 @@ def __new__( else: obj.kwargs = kwargs obj.kwargs_defaults = kwargs_defaults + obj.metadata = metadata + obj.original_opcode = original_opcode + obj.opcode_list = [obj] return obj raise TypeError( "Opcode constructor '__new__' didn't return an instance!" @@ -224,7 +238,8 @@ def __getitem__( ) else: raise TypeError( - "Opcode data portion must be either an int or bytes/hex string" + "Opcode data portion must be either an int or bytes/hex " + "string" ) popped_stack_items = self.popped_stack_items pushed_stack_items = self.pushed_stack_items @@ -255,16 +270,69 @@ def __getitem__( terminating=self.terminating, kwargs=self.kwargs, kwargs_defaults=self.kwargs_defaults, + metadata=self.metadata, + original_opcode=self, ) + new_opcode.opcode_list = [new_opcode] new_opcode._name_ = f"{self._name_}_0x{data_portion.hex()}" return new_opcode + def with_metadata(self, **metadata: Any) -> "Opcode": + """ + Create a copy of this opcode with updated metadata. + + Validates metadata keys against metadata and merges with existing + metadata. + + Args: + **metadata: Metadata key-value pairs to set or update + + Returns: + A new Opcode instance with the updated metadata + + Raises: + ValueError: If invalid metadata keys are provided + + Example: + >>> warm_sstore = Op.SSTORE.with_metadata(key_warm=True, + new_value=2) + + """ + # Validate metadata keys + for key in metadata: + if key not in self.metadata: + raise ValueError( + f"Invalid metadata key '{key}' for opcode {self._name_}. " + f"Valid metadata keys: {list(self.metadata.keys())}" + ) + + # Create a new opcode instance with updated metadata + new_opcode = Opcode( + bytes(self), + popped_stack_items=self.popped_stack_items, + pushed_stack_items=self.pushed_stack_items, + min_stack_height=self.min_stack_height, + max_stack_height=self.max_stack_height, + data_portion_length=self.data_portion_length, + data_portion_formatter=self.data_portion_formatter, + unchecked_stack=self.unchecked_stack, + terminating=self.terminating, + kwargs=self.kwargs, + kwargs_defaults=self.kwargs_defaults, + # Merge defaults, existing metadata, and new metadata + metadata={**self.metadata, **metadata}, + original_opcode=self, + ) + new_opcode.opcode_list = [new_opcode] + new_opcode._name_ = self._name_ + return new_opcode + def __call__( self, *args_t: "int | bytes | str | Opcode | Bytecode | Iterable[int]", unchecked: bool = False, **kwargs: "int | bytes | str | Opcode | Bytecode", - ) -> Bytecode: + ) -> "Bytecode | Opcode": """ Make all opcode instances callable to return formatted bytecode, which constitutes a data portion, that is located after the opcode byte, @@ -297,50 +365,64 @@ def __call__( args: List["int | bytes | str | Opcode | Bytecode | Iterable[int]"] = ( list(args_t) ) - - if self.has_data_portion(): + opcode = self + + # handle metadata first + metadata = {} + for key in opcode.metadata: + if key in kwargs: + metadata[key] = kwargs.pop(key) + if metadata: + opcode = opcode.with_metadata(**metadata) + if len(args) == 0 and len(kwargs) == 0: + # Nothing else to do, return + return opcode + + if opcode.has_data_portion(): if len(args) == 0: raise ValueError( "Opcode with data portion requires at least one argument" ) - assert type(self) is Opcode + assert type(opcode) is Opcode get_item_arg = args.pop() assert not isinstance(get_item_arg, Bytecode) - return self[get_item_arg](*args) + return opcode[get_item_arg](*args) - if self.kwargs is not None and len(kwargs) > 0: + if opcode.kwargs is not None and len(kwargs) > 0: assert len(args) == 0, ( f"Cannot mix positional and keyword arguments {args} {kwargs}" ) # Validate that all provided kwargs are valid - invalid_kwargs = set(kwargs.keys()) - set(self.kwargs) + invalid_kwargs = set(kwargs.keys()) - set(opcode.kwargs) if invalid_kwargs: raise ValueError( - f"Invalid keyword argument(s) {list(invalid_kwargs)} for opcode " - f"{self._name_}. Valid arguments are: {self.kwargs}" + f"Invalid keyword argument(s) {list(invalid_kwargs)} for " + f"opcode {opcode._name_}. " + f"Valid arguments are: {opcode.kwargs}" ) - for kw in self.kwargs: + for kw in opcode.kwargs: args.append( kwargs[kw] if kw in kwargs - else self.kwargs_defaults.get(kw, 0) + else opcode.kwargs_defaults.get(kw, 0) ) # The rest of the arguments form the stack. - if len(args) != self.popped_stack_items and not ( - unchecked or self.unchecked_stack + if len(args) != opcode.popped_stack_items and not ( + unchecked or opcode.unchecked_stack ): raise ValueError( - f"Opcode {self._name_} requires {self.popped_stack_items} stack elements, but " - f"{len(args)} were provided. Use 'unchecked=True' parameter to ignore this check." + f"Opcode {opcode._name_} requires {opcode.popped_stack_items} " + f"stack elements, but {len(args)} were provided. " + "Use 'unchecked=True' parameter to ignore this check." ) pre_opcode_bytecode = Bytecode() while len(args) > 0: pre_opcode_bytecode += _stack_argument_to_bytecode(args.pop()) - return pre_opcode_bytecode + self + return pre_opcode_bytecode + opcode def __lt__(self, other: "Opcode") -> bool: """Compare two opcodes by their integer value.""" @@ -350,6 +432,25 @@ def __gt__(self, other: "Opcode") -> bool: """Compare two opcodes by their integer value.""" return self.int() > other.int() + def get_original_opcode(self) -> "Opcode": + """Return the original opcode instance.""" + if self.original_opcode is not None: + return self.original_opcode + return self + + def __hash__(self) -> int: + """Hash the opcode by its integer value.""" + return hash(self.get_original_opcode().int()) + + def __eq__(self, other: object) -> bool: + """Compare two opcodes by their integer value.""" + if isinstance(other, Opcode): + return ( + self.get_original_opcode().int() + == other.get_original_opcode().int() + ) + return super().__eq__(other) + def int(self) -> int: """Return integer representation of the opcode.""" return int.from_bytes(self, byteorder="big") @@ -361,6 +462,16 @@ def has_data_portion(self) -> bool: or self.data_portion_formatter is not None ) + def get_metadata(self) -> Dict[str, Any]: + """ + Get a copy of the current metadata. + + Returns: + A dictionary containing the current metadata values + + """ + return self.metadata.copy() + OpcodeCallArg = int | bytes | str | Bytecode | Iterable[int] @@ -798,7 +909,12 @@ class Opcodes(Opcode, Enum): Source: [evm.codes/#09](https://www.evm.codes/#09) """ - EXP = Opcode(0x0A, popped_stack_items=2, pushed_stack_items=1) + EXP = Opcode( + 0x0A, + popped_stack_items=2, + pushed_stack_items=1, + metadata={"exponent": 0}, + ) """ EXP(a, exponent) = a ** exponent ---- @@ -825,6 +941,10 @@ class Opcodes(Opcode, Enum): - static_gas = 10 - dynamic_gas = 50 * exponent_byte_size + Metadata + ---- + - exponent: the exponent value (default: 0) + Source: [evm.codes/#0A](https://www.evm.codes/#0A) """ @@ -1295,6 +1415,7 @@ class Opcodes(Opcode, Enum): popped_stack_items=2, pushed_stack_items=1, kwargs=["offset", "size"], + metadata={"data_size": 0, "new_memory_size": 0, "old_memory_size": 0}, ) """ SHA3(offset, size) = hash @@ -1323,6 +1444,12 @@ class Opcodes(Opcode, Enum): - static_gas = 30 - dynamic_gas = 6 * minimum_word_size + memory_expansion_cost + Metadata + ---- + - data_size: number of bytes being hashed (default: 0) + - new_memory_size: memory size after expansion in bytes (default: 0) + - old_memory_size: memory size before expansion in bytes (default: 0) + Source: [evm.codes/#20](https://www.evm.codes/#20) """ @@ -1355,7 +1482,11 @@ class Opcodes(Opcode, Enum): """ BALANCE = Opcode( - 0x31, popped_stack_items=1, pushed_stack_items=1, kwargs=["address"] + 0x31, + popped_stack_items=1, + pushed_stack_items=1, + kwargs=["address"], + metadata={"address_warm": False}, ) """ BALANCE(address) = balance @@ -1383,6 +1514,10 @@ class Opcodes(Opcode, Enum): - static_gas = 0 - dynamic_gas = 100 if warm_address, 2600 if cold_address + Metadata + ---- + - address_warm: whether the address is already warm (default: False) + Source: [evm.codes/#31](https://www.evm.codes/#31) """ @@ -1535,7 +1670,10 @@ class Opcodes(Opcode, Enum): """ CALLDATACOPY = Opcode( - 0x37, popped_stack_items=3, kwargs=["dest_offset", "offset", "size"] + 0x37, + popped_stack_items=3, + kwargs=["dest_offset", "offset", "size"], + metadata={"data_size": 0, "new_memory_size": 0, "old_memory_size": 0}, ) """ CALLDATACOPY(dest_offset, offset, size) @@ -1565,6 +1703,12 @@ class Opcodes(Opcode, Enum): - static_gas = 3 - dynamic_gas = 3 * minimum_word_size + memory_expansion_cost + Metadata + ---- + - data_size: number of bytes being copied (default: 0) + - new_memory_size: memory size after expansion in bytes (default: 0) + - old_memory_size: memory size before expansion in bytes (default: 0) + Source: [evm.codes/#37](https://www.evm.codes/#37) """ @@ -1597,7 +1741,10 @@ class Opcodes(Opcode, Enum): """ CODECOPY = Opcode( - 0x39, popped_stack_items=3, kwargs=["dest_offset", "offset", "size"] + 0x39, + popped_stack_items=3, + kwargs=["dest_offset", "offset", "size"], + metadata={"data_size": 0, "new_memory_size": 0, "old_memory_size": 0}, ) """ CODECOPY(dest_offset, offset, size) @@ -1623,6 +1770,12 @@ class Opcodes(Opcode, Enum): - static_gas = 3 - dynamic_gas = 3 * minimum_word_size + memory_expansion_cost + Metadata + ---- + - data_size: number of bytes being copied (default: 0) + - new_memory_size: memory size after expansion in bytes (default: 0) + - old_memory_size: memory size before expansion in bytes (default: 0) + Source: [evm.codes/#39](https://www.evm.codes/#39) """ @@ -1651,7 +1804,11 @@ class Opcodes(Opcode, Enum): """ EXTCODESIZE = Opcode( - 0x3B, popped_stack_items=1, pushed_stack_items=1, kwargs=["address"] + 0x3B, + popped_stack_items=1, + pushed_stack_items=1, + kwargs=["address"], + metadata={"address_warm": False}, ) """ EXTCODESIZE(address) = size @@ -1678,6 +1835,10 @@ class Opcodes(Opcode, Enum): - static_gas = 0 - dynamic_gas = 100 if warm_address, 2600 if cold_address + Metadata + ---- + - address_warm: whether the address is already warm (default: False) + Source: [evm.codes/#3B](https://www.evm.codes/#3B) """ @@ -1685,6 +1846,12 @@ class Opcodes(Opcode, Enum): 0x3C, popped_stack_items=4, kwargs=["address", "dest_offset", "offset", "size"], + metadata={ + "address_warm": False, + "data_size": 0, + "new_memory_size": 0, + "old_memory_size": 0, + }, ) """ EXTCODECOPY(address, dest_offset, offset, size) @@ -1716,6 +1883,13 @@ class Opcodes(Opcode, Enum): - dynamic_gas = 3 * minimum_word_size + memory_expansion_cost + address_access_cost + Metadata + ---- + - address_warm: whether the address is already warm (default: False) + - data_size: number of bytes being copied (default: 0) + - new_memory_size: memory size after expansion in bytes (default: 0) + - old_memory_size: memory size before expansion in bytes (default: 0) + Source: [evm.codes/#3C](https://www.evm.codes/#3C) """ @@ -1744,7 +1918,10 @@ class Opcodes(Opcode, Enum): """ RETURNDATACOPY = Opcode( - 0x3E, popped_stack_items=3, kwargs=["dest_offset", "offset", "size"] + 0x3E, + popped_stack_items=3, + kwargs=["dest_offset", "offset", "size"], + metadata={"data_size": 0, "new_memory_size": 0, "old_memory_size": 0}, ) """ RETURNDATACOPY(dest_offset, offset, size) @@ -1771,11 +1948,21 @@ class Opcodes(Opcode, Enum): - static_gas = 3 - dynamic_gas = 3 * minimum_word_size + memory_expansion_cost + Metadata + ---- + - data_size: number of bytes being copied (default: 0) + - new_memory_size: memory size after expansion in bytes (default: 0) + - old_memory_size: memory size before expansion in bytes (default: 0) + Source: [evm.codes/#3E](https://www.evm.codes/#3E) """ EXTCODEHASH = Opcode( - 0x3F, popped_stack_items=1, pushed_stack_items=1, kwargs=["address"] + 0x3F, + popped_stack_items=1, + pushed_stack_items=1, + kwargs=["address"], + metadata={"address_warm": False}, ) """ EXTCODEHASH(address) = hash @@ -1804,6 +1991,10 @@ class Opcodes(Opcode, Enum): - static_gas = 0 - dynamic_gas = 100 if warm_address, 2600 if cold_address + Metadata + ---- + - address_warm: whether the address is already warm (default: False) + Source: [evm.codes/#3F](https://www.evm.codes/#3F) """ @@ -2151,7 +2342,11 @@ class Opcodes(Opcode, Enum): """ MLOAD = Opcode( - 0x51, popped_stack_items=1, pushed_stack_items=1, kwargs=["offset"] + 0x51, + popped_stack_items=1, + pushed_stack_items=1, + kwargs=["offset"], + metadata={"new_memory_size": 0, "old_memory_size": 0}, ) """ MLOAD(offset) = value @@ -2179,10 +2374,20 @@ class Opcodes(Opcode, Enum): - static_gas = 3 - dynamic_gas = memory_expansion_cost + Metadata + ---- + - new_memory_size: memory size after expansion in bytes (default: 0) + - old_memory_size: memory size before expansion in bytes (default: 0) + Source: [evm.codes/#51](https://www.evm.codes/#51) """ - MSTORE = Opcode(0x52, popped_stack_items=2, kwargs=["offset", "value"]) + MSTORE = Opcode( + 0x52, + popped_stack_items=2, + kwargs=["offset", "value"], + metadata={"new_memory_size": 0, "old_memory_size": 0}, + ) """ MSTORE(offset, value) ---- @@ -2209,10 +2414,20 @@ class Opcodes(Opcode, Enum): - static_gas = 3 - dynamic_gas = memory_expansion_cost + Metadata + ---- + - new_memory_size: memory size after expansion in bytes (default: 0) + - old_memory_size: memory size before expansion in bytes (default: 0) + Source: [evm.codes/#52](https://www.evm.codes/#52) """ - MSTORE8 = Opcode(0x53, popped_stack_items=2, kwargs=["offset", "value"]) + MSTORE8 = Opcode( + 0x53, + popped_stack_items=2, + kwargs=["offset", "value"], + metadata={"new_memory_size": 0, "old_memory_size": 0}, + ) """ MSTORE8(offset, value) ---- @@ -2236,11 +2451,20 @@ class Opcodes(Opcode, Enum): - static_gas = 3 - dynamic_gas = memory_expansion_cost + Metadata + ---- + - new_memory_size: memory size after expansion in bytes (default: 0) + - old_memory_size: memory size before expansion in bytes (default: 0) + Source: [evm.codes/#53](https://www.evm.codes/#53) """ SLOAD = Opcode( - 0x54, popped_stack_items=1, pushed_stack_items=1, kwargs=["key"] + 0x54, + popped_stack_items=1, + pushed_stack_items=1, + kwargs=["key"], + metadata={"key_warm": False}, ) """ SLOAD(key) = value @@ -2268,10 +2492,24 @@ class Opcodes(Opcode, Enum): - static_gas = 0 - dynamic_gas = 100 if warm_address, 2600 if cold_address + Metadata + ---- + - key_warm: whether the storage key is already warm (default: False) + Source: [evm.codes/#54](https://www.evm.codes/#54) """ - SSTORE = Opcode(0x55, popped_stack_items=2, kwargs=["key", "value"]) + SSTORE = Opcode( + 0x55, + popped_stack_items=2, + kwargs=["key", "value"], + metadata={ + "key_warm": False, + "original_value": 0, + "current_value": None, + "new_value": 1, + }, + ) """ SSTORE(key, value) ---- @@ -2315,6 +2553,16 @@ class Opcodes(Opcode, Enum): base_dynamic_gas += 2100 ``` + Metadata + ---- + - key_warm: whether the key had already been accessed during the + transaction, either by SLOAD or SSTORE (default: False) + - original_value: value the storage key had at the beginning of + the transaction (default: 0) + - current_value: value the storage key holds at the execution + of the opcode (default: None, which means same as original_value) + - new_value: value being set by the opcode (default: 1) + Source: [evm.codes/#55](https://www.evm.codes/#55) """ @@ -2571,7 +2819,10 @@ class Opcodes(Opcode, Enum): """ MCOPY = Opcode( - 0x5E, popped_stack_items=3, kwargs=["dest_offset", "offset", "size"] + 0x5E, + popped_stack_items=3, + kwargs=["dest_offset", "offset", "size"], + metadata={"data_size": 0, "new_memory_size": 0, "old_memory_size": 0}, ) """ MCOPY(dest_offset, offset, size) @@ -2601,6 +2852,12 @@ class Opcodes(Opcode, Enum): - static_gas = 3 - dynamic_gas = 3 * minimum_word_size + memory_expansion_cost + Metadata + ---- + - data_size: number of bytes being copied (default: 0) + - new_memory_size: memory size after expansion in bytes (default: 0) + - old_memory_size: memory size before expansion in bytes (default: 0) + Source: [EIP-5656](https://eips.ethereum.org/EIPS/eip-5656) """ @@ -4696,7 +4953,12 @@ class Opcodes(Opcode, Enum): Source: [evm.codes/#9F](https://www.evm.codes/#9F) """ - LOG0 = Opcode(0xA0, popped_stack_items=2, kwargs=["offset", "size"]) + LOG0 = Opcode( + 0xA0, + popped_stack_items=2, + kwargs=["offset", "size"], + metadata={"data_size": 0, "new_memory_size": 0, "old_memory_size": 0}, + ) """ LOG0(offset, size) ---- @@ -4723,11 +4985,20 @@ class Opcodes(Opcode, Enum): - static_gas = 375 - dynamic_gas = 375 * topic_count + 8 * size + memory_expansion_cost + Metadata + ---- + - data_size: number of bytes in the log data (default: 0) + - new_memory_size: memory size after expansion in bytes (default: 0) + - old_memory_size: memory size before expansion in bytes (default: 0) + Source: [evm.codes/#A0](https://www.evm.codes/#A0) """ LOG1 = Opcode( - 0xA1, popped_stack_items=3, kwargs=["offset", "size", "topic_1"] + 0xA1, + popped_stack_items=3, + kwargs=["offset", "size", "topic_1"], + metadata={"data_size": 0, "new_memory_size": 0, "old_memory_size": 0}, ) """ LOG1(offset, size, topic_1) @@ -4756,6 +5027,12 @@ class Opcodes(Opcode, Enum): - static_gas = 375 - dynamic_gas = 375 * topic_count + 8 * size + memory_expansion_cost + Metadata + ---- + - data_size: number of bytes in the log data (default: 0) + - new_memory_size: memory size after expansion in bytes (default: 0) + - old_memory_size: memory size before expansion in bytes (default: 0) + Source: [evm.codes/#A1](https://www.evm.codes/#A1) """ @@ -4763,6 +5040,7 @@ class Opcodes(Opcode, Enum): 0xA2, popped_stack_items=4, kwargs=["offset", "size", "topic_1", "topic_2"], + metadata={"data_size": 0, "new_memory_size": 0, "old_memory_size": 0}, ) """ LOG2(offset, size, topic_1, topic_2) @@ -4792,6 +5070,12 @@ class Opcodes(Opcode, Enum): - static_gas = 375 - dynamic_gas = 375 * topic_count + 8 * size + memory_expansion_cost + Metadata + ---- + - data_size: number of bytes in the log data (default: 0) + - new_memory_size: memory size after expansion in bytes (default: 0) + - old_memory_size: memory size before expansion in bytes (default: 0) + Source: [evm.codes/#A2](https://www.evm.codes/#A2) """ @@ -4799,6 +5083,7 @@ class Opcodes(Opcode, Enum): 0xA3, popped_stack_items=5, kwargs=["offset", "size", "topic_1", "topic_2", "topic_3"], + metadata={"data_size": 0, "new_memory_size": 0, "old_memory_size": 0}, ) """ LOG3(offset, size, topic_1, topic_2, topic_3) @@ -4829,6 +5114,12 @@ class Opcodes(Opcode, Enum): - static_gas = 375 - dynamic_gas = 375 * topic_count + 8 * size + memory_expansion_cost + Metadata + ---- + - data_size: number of bytes in the log data (default: 0) + - new_memory_size: memory size after expansion in bytes (default: 0) + - old_memory_size: memory size before expansion in bytes (default: 0) + Source: [evm.codes/#A3](https://www.evm.codes/#A3) """ @@ -4836,6 +5127,7 @@ class Opcodes(Opcode, Enum): 0xA4, popped_stack_items=6, kwargs=["offset", "size", "topic_1", "topic_2", "topic_3", "topic_4"], + metadata={"data_size": 0, "new_memory_size": 0, "old_memory_size": 0}, ) """ LOG4(offset, size, topic_1, topic_2, topic_3, topic_4) @@ -4867,6 +5159,12 @@ class Opcodes(Opcode, Enum): - static_gas = 375 - dynamic_gas = 375 * topic_count + 8 * size + memory_expansion_cost + Metadata + ---- + - data_size: number of bytes in the log data (default: 0) + - new_memory_size: memory size after expansion in bytes (default: 0) + - old_memory_size: memory size before expansion in bytes (default: 0) + Source: [evm.codes/#A4](https://www.evm.codes/#A4) """ @@ -5407,6 +5705,11 @@ class Opcodes(Opcode, Enum): popped_stack_items=3, pushed_stack_items=1, kwargs=["value", "offset", "size"], + metadata={ + "init_code_size": 0, + "new_memory_size": 0, + "old_memory_size": 0, + }, ) """ CREATE(value, offset, size) = address @@ -5444,6 +5747,12 @@ class Opcodes(Opcode, Enum): code_deposit_cost ``` + Metadata + ---- + - init_code_size: size of the initialization code in bytes (default: 0) + - new_memory_size: memory size after expansion in bytes (default: 0) + - old_memory_size: memory size before expansion in bytes (default: 0) + Source: [evm.codes/#F0](https://www.evm.codes/#F0) """ @@ -5461,6 +5770,15 @@ class Opcodes(Opcode, Enum): "ret_size", ], kwargs_defaults={"gas": GAS}, + metadata={ + "address_warm": False, + "value_transfer": False, + "account_new": False, + "new_memory_size": 0, + "old_memory_size": 0, + "delegated_address": False, + "delegated_address_warm": False, + }, ) """ CALL(gas, address, value, args_offset, args_size, ret_offset, ret_size) @@ -5501,6 +5819,18 @@ class Opcodes(Opcode, Enum): value_to_empty_account_cost ``` + Metadata + ---- + - address_warm: whether the address is already warm (default: False) + - value_transfer: whether value is being transferred (default: False) + - account_new: whether creating a new account (default: False) + - new_memory_size: memory size after expansion in bytes (default: 0) + - old_memory_size: memory size before expansion in bytes (default: 0) + - delegated_address: whether the target is a delegated account + (default: False) + - delegated_address_warm: whether the delegated address of the target + is already warm (default: False) + Source: [evm.codes/#F1](https://www.evm.codes/#F1) """ @@ -5518,6 +5848,15 @@ class Opcodes(Opcode, Enum): "ret_size", ], kwargs_defaults={"gas": GAS}, + metadata={ + "address_warm": False, + "value_transfer": False, + "account_new": False, + "new_memory_size": 0, + "old_memory_size": 0, + "delegated_address": False, + "delegated_address_warm": False, + }, ) """ CALLCODE(gas, address, value, args_offset, args_size, ret_offset, ret_size) @@ -5558,11 +5897,31 @@ class Opcodes(Opcode, Enum): address_access_cost + positive_value_cost ``` + Metadata + ---- + - address_warm: whether the address is already warm (default: False) + - value_transfer: whether value is being transferred (default: False) + - account_new: whether creating a new account (default: False) + - new_memory_size: memory size after expansion in bytes (default: 0) + - old_memory_size: memory size before expansion in bytes (default: 0) + - delegated_address: whether the target is a delegated account + (default: False) + - delegated_address_warm: whether the delegated address of the target + is already warm (default: False) + Source: [evm.codes/#F2](https://www.evm.codes/#F2) """ RETURN = Opcode( - 0xF3, popped_stack_items=2, kwargs=["offset", "size"], terminating=True + 0xF3, + popped_stack_items=2, + kwargs=["offset", "size"], + terminating=True, + metadata={ + "new_memory_size": 0, + "old_memory_size": 0, + "code_deposit_size": 0, + }, ) """ RETURN(offset, size) @@ -5591,6 +5950,13 @@ class Opcodes(Opcode, Enum): - static_gas = 0 - dynamic_gas = memory_expansion_cost + Metadata + ---- + - new_memory_size: memory size after expansion in bytes (default: 0) + - old_memory_size: memory size before expansion in bytes (default: 0) + - code_deposit_size: size of bytecode being deployed in bytes (default: 0, + only for RETURN in initcode) + Source: [evm.codes/#F3](https://www.evm.codes/#F3) """ @@ -5607,6 +5973,15 @@ class Opcodes(Opcode, Enum): "ret_size", ], kwargs_defaults={"gas": GAS}, + metadata={ + "address_warm": False, + "value_transfer": False, + "account_new": False, + "new_memory_size": 0, + "old_memory_size": 0, + "delegated_address": False, + "delegated_address_warm": False, + }, ) """ DELEGATECALL(gas, address, args_offset, args_size, ret_offset, ret_size) @@ -5644,6 +6019,18 @@ class Opcodes(Opcode, Enum): - dynamic_gas = memory_expansion_cost + code_execution_cost + address_access_cost + Metadata + ---- + - address_warm: whether the address is already warm (default: False) + - value_transfer: always False for DELEGATECALL (default: False) + - account_new: always False for DELEGATECALL (default: False) + - new_memory_size: memory size after expansion in bytes (default: 0) + - old_memory_size: memory size before expansion in bytes (default: 0) + - delegated_address: whether the target is a delegated account + (default: False) + - delegated_address_warm: whether the delegated address of the target + is already warm (default: False) + Source: [evm.codes/#F4](https://www.evm.codes/#F4) """ @@ -5652,6 +6039,11 @@ class Opcodes(Opcode, Enum): popped_stack_items=4, pushed_stack_items=1, kwargs=["value", "offset", "size", "salt"], + metadata={ + "init_code_size": 0, + "new_memory_size": 0, + "old_memory_size": 0, + }, ) """ CREATE2(value, offset, size, salt) = address @@ -5691,6 +6083,12 @@ class Opcodes(Opcode, Enum): + deployment_code_execution_cost + code_deposit_cost ``` + Metadata + ---- + - init_code_size: size of the initialization code in bytes (default: 0) + - new_memory_size: memory size after expansion in bytes (default: 0) + - old_memory_size: memory size before expansion in bytes (default: 0) + Source: [evm.codes/#F5](https://www.evm.codes/#F5) """ @@ -5796,6 +6194,13 @@ class Opcodes(Opcode, Enum): "ret_size", ], kwargs_defaults={"gas": GAS}, + metadata={ + "address_warm": False, + "new_memory_size": 0, + "old_memory_size": 0, + "delegated_address": False, + "delegated_address_warm": False, + }, ) """ STATICCALL(gas, address, args_offset, args_size, ret_offset, ret_size) @@ -5832,6 +6237,18 @@ class Opcodes(Opcode, Enum): - dynamic_gas = memory_expansion_cost + code_execution_cost + address_access_cost + Metadata + ---- + - address_warm: whether the address is already warm (default: False) + - value_transfer: always False for STATICCALL (default: False) + - account_new: always False for STATICCALL (default: False) + - new_memory_size: memory size after expansion in bytes (default: 0) + - old_memory_size: memory size before expansion in bytes (default: 0) + - delegated_address: whether the target is a delegated account + (default: False) + - delegated_address_warm: whether the delegated address of the target + is already warm (default: False) + Source: [evm.codes/#FA](https://www.evm.codes/#FA) """ @@ -5903,7 +6320,11 @@ class Opcodes(Opcode, Enum): """ REVERT = Opcode( - 0xFD, popped_stack_items=2, kwargs=["offset", "size"], terminating=True + 0xFD, + popped_stack_items=2, + kwargs=["offset", "size"], + terminating=True, + metadata={"new_memory_size": 0, "old_memory_size": 0}, ) """ REVERT(offset, size) @@ -5928,6 +6349,11 @@ class Opcodes(Opcode, Enum): static_gas = 0 dynamic_gas = memory_expansion_cost + Metadata + ---- + - new_memory_size: memory size after expansion in bytes (default: 0) + - old_memory_size: memory size before expansion in bytes (default: 0) + Source: [evm.codes/#FD](https://www.evm.codes/#FD) """ @@ -5959,7 +6385,12 @@ class Opcodes(Opcode, Enum): Source: [evm.codes/#FE](https://www.evm.codes/#FE) """ - SELFDESTRUCT = Opcode(0xFF, popped_stack_items=1, kwargs=["address"]) + SELFDESTRUCT = Opcode( + 0xFF, + popped_stack_items=1, + kwargs=["address"], + metadata={"address_warm": False, "account_new": False}, + ) """ SELFDESTRUCT(address) ---- @@ -5980,6 +6411,13 @@ class Opcodes(Opcode, Enum): ---- 5000 + Metadata + ---- + - address_warm: whether the beneficiary address is already warm + (default: False) + - account_new: whether creating a new beneficiary account, requires + non-zero balance in the source account (default: False) + Source: [evm.codes/#FF](https://www.evm.codes/#FF) """ diff --git a/tests/berlin/eip2929_gas_cost_increases/test_call.py b/tests/berlin/eip2929_gas_cost_increases/test_call.py index a1b6238085..0ac8f376e6 100644 --- a/tests/berlin/eip2929_gas_cost_increases/test_call.py +++ b/tests/berlin/eip2929_gas_cost_increases/test_call.py @@ -24,8 +24,8 @@ def test_call_insufficient_balance( Test a regular CALL to see if it warms the destination with insufficient balance. """ - gas_costs = fork.gas_costs() destination = pre.fund_eoa(1) + warm_code = Op.BALANCE(destination, address_warm=True) contract_address = pre.deploy_contract( # Perform the aborted external calls Op.SSTORE( @@ -42,8 +42,7 @@ def test_call_insufficient_balance( ) # Measure the gas cost for BALANCE operation + CodeGasMeasure( - code=Op.BALANCE(destination), - overhead_cost=gas_costs.G_VERY_LOW, # PUSH20 costs 3 gas + code=warm_code, extra_stack_items=1, # BALANCE puts balance on stack sstore_key=1, ), @@ -63,7 +62,7 @@ def test_call_insufficient_balance( contract_address: Account( storage={ 0: 0, # The CALL is aborted - 1: gas_costs.G_WARM_ACCOUNT_ACCESS, # Warm access cost + 1: warm_code.gas_cost(fork), }, ), } diff --git a/tests/cancun/eip5656_mcopy/test_mcopy_memory_expansion.py b/tests/cancun/eip5656_mcopy/test_mcopy_memory_expansion.py index 9b12efb34d..0d49e55766 100644 --- a/tests/cancun/eip5656_mcopy/test_mcopy_memory_expansion.py +++ b/tests/cancun/eip5656_mcopy/test_mcopy_memory_expansion.py @@ -39,12 +39,21 @@ @pytest.fixture -def callee_bytecode(dest: int, src: int, length: int) -> Bytecode: +def callee_bytecode( + dest: int, src: int, length: int, initial_memory: bytes +) -> Bytecode: """Callee performs a single mcopy operation and then returns.""" bytecode = Bytecode() # Copy the initial memory - bytecode += Op.CALLDATACOPY(0x00, 0x00, Op.CALLDATASIZE()) + bytecode += Op.CALLDATACOPY( + dest_offset=0x00, + offset=0x00, + size=Op.CALLDATASIZE, + old_memory_size=0, + new_memory_size=len(initial_memory), + data_size=len(initial_memory), + ) # Pushes for the return operation bytecode += Op.PUSH1(0x00) + Op.PUSH1(0x00) @@ -52,7 +61,17 @@ def callee_bytecode(dest: int, src: int, length: int) -> Bytecode: bytecode += Op.SSTORE(slot_code_worked, value_code_worked) # Perform the mcopy operation - bytecode += Op.MCOPY(dest, src, length) + new_memory_size = len(initial_memory) + if dest + length > new_memory_size and length > 0: + new_memory_size = dest + length + bytecode += Op.MCOPY( + dest, + src, + length, + old_memory_size=len(initial_memory), + new_memory_size=new_memory_size, + data_size=length, + ) bytecode += Op.RETURN @@ -68,79 +87,35 @@ def tx_access_list() -> List[AccessList]: @pytest.fixture -def call_exact_cost( - fork: Fork, - initial_memory: bytes, - dest: int, - length: int, - tx_access_list: List[AccessList], -) -> int: - """ - Return the exact cost of the subcall, based on the initial memory and the - length of the copy. - """ - # Starting from EIP-7623, we need to use an access list to raise the - # intrinsic gas cost to be above the floor data cost. - cost_memory_bytes = fork.memory_expansion_gas_calculator() - gas_costs = fork.gas_costs() - tx_intrinsic_gas_cost_calculator = ( - fork.transaction_intrinsic_cost_calculator() - ) - - mcopy_cost = 3 - mcopy_cost += 3 * ((length + 31) // 32) - if length > 0 and dest + length > len(initial_memory): - mcopy_cost += cost_memory_bytes( - new_bytes=dest + length, previous_bytes=len(initial_memory) - ) - - calldatacopy_cost = 3 - calldatacopy_cost += 3 * ((len(initial_memory) + 31) // 32) - calldatacopy_cost += cost_memory_bytes(new_bytes=len(initial_memory)) - - pushes_cost = gas_costs.G_VERY_LOW * 9 - calldatasize_cost = gas_costs.G_BASE - - sstore_cost = 22100 - return ( - tx_intrinsic_gas_cost_calculator( - calldata=initial_memory, access_list=tx_access_list - ) - + mcopy_cost - + calldatacopy_cost - + pushes_cost - + calldatasize_cost - + sstore_cost - ) - - -@pytest.fixture -def block_gas_limit() -> int: # noqa: D103 - return 100_000_000 +def block_gas_limit(env: Environment) -> int: # noqa: D103 + return env.gas_limit @pytest.fixture def tx_gas_limit( # noqa: D103 fork: Fork, - call_exact_cost: int, + callee_bytecode: Bytecode, block_gas_limit: int, successful: bool, + initial_memory: bytes, + tx_access_list: List[AccessList], ) -> int: + tx_intrinsic_gas_cost_calculator = ( + fork.transaction_intrinsic_cost_calculator() + ) + call_exact_cost = callee_bytecode.gas_cost(fork) return min( - call_exact_cost - (0 if successful else 1), + call_exact_cost + - (0 if successful else 1) + + tx_intrinsic_gas_cost_calculator( + calldata=initial_memory, access_list=tx_access_list + ), # If the transaction gas limit cap is not set (pre-osaka), # use the block gas limit fork.transaction_gas_limit_cap() or block_gas_limit, ) -@pytest.fixture -def env( # noqa: D103 - block_gas_limit: int, -) -> Environment: - return Environment(gas_limit=block_gas_limit) - - @pytest.fixture def caller_address(pre: Alloc, callee_bytecode: bytes) -> Address: # noqa: D103 return pre.deploy_contract(code=callee_bytecode) @@ -261,11 +236,6 @@ def test_mcopy_memory_expansion( "half_max_length_expansion", ], ) -@pytest.mark.parametrize( - "call_exact_cost", - [2**128 - 1], - ids=[""], -) # Limit subcall gas, otherwise it would be impossibly large @pytest.mark.parametrize("successful", [False]) @pytest.mark.parametrize( "initial_memory", diff --git a/tests/frontier/create/test_create_deposit_oog.py b/tests/frontier/create/test_create_deposit_oog.py index 085c15a632..18932d3ca3 100644 --- a/tests/frontier/create/test_create_deposit_oog.py +++ b/tests/frontier/create/test_create_deposit_oog.py @@ -6,70 +6,100 @@ from execution_testing import ( Account, Alloc, - Environment, Fork, Op, StateTestFiller, - Storage, Transaction, compute_create_address, ) -from execution_testing.forks import Byzantium, Frontier, Homestead +from execution_testing.forks import Byzantium, Frontier SLOT_CREATE_RESULT = 1 SLOT_CREATE_RESULT_PRE = 0xDEADBEEF @pytest.mark.valid_from("Frontier") +@pytest.mark.parametrize("enough_gas", [True, False]) @pytest.mark.with_all_create_opcodes def test_create_deposit_oog( state_test: StateTestFiller, fork: Fork, pre: Alloc, create_opcode: Op, + enough_gas: bool, ) -> None: """Run create deploys with a lot of deposited code.""" - deposited_len = 10_000 - initcode = Op.RETURN(0, deposited_len) - tx_gas_limit = 1_000_000 - assert tx_gas_limit < deposited_len * fork.gas_costs().G_CODE_DEPOSIT_BYTE + deposited_len = 32 + expand_memory_code = Op.MSTORE8( + # Expand memory first + offset=deposited_len - 1, + value=0, + new_memory_size=deposited_len, # For gas accounting + ) + return_code = Op.RETURN( + offset=0, + size=deposited_len, + code_deposit_size=deposited_len, # For gas accounting + ) + initcode = expand_memory_code + return_code sender = pre.fund_eoa() - expect_post = Storage() - code = pre.deploy_contract( - code=Op.MSTORE(0, Op.PUSH32(bytes(initcode))) - + Op.SSTORE( - SLOT_CREATE_RESULT, - create_opcode(offset=32 - len(initcode), size=len(initcode)), + factory_memory_expansion_code = Op.MSTORE( + 0, + Op.PUSH32(bytes(initcode)), + new_memory_size=32, # For gas accounting + ) + factory_create_code = create_opcode( + offset=32 - len(initcode), + size=len(initcode), + init_code_size=len(initcode), # For gas accounting + ) + factory_code = ( + factory_memory_expansion_code + factory_create_code + Op.STOP + ) + + factory_address = pre.deploy_contract(code=factory_code) + create_gas = return_code.gas_cost(fork) + expand_memory_code.gas_cost(fork) + if not enough_gas: + create_gas -= 1 + if fork >= Byzantium: + # Increment the gas for the 63/64 rule + create_gas = (create_gas * 64) // 63 + call_gas = create_gas + factory_code.gas_cost(fork) + caller_address = pre.deploy_contract( + code=Op.CALL( + gas=call_gas, address=factory_address, ret_offset=0, ret_size=32 ) + Op.STOP, - nonce=1, - storage={SLOT_CREATE_RESULT: SLOT_CREATE_RESULT_PRE}, ) new_address = compute_create_address( - address=code, nonce=1, initcode=initcode, salt=0, opcode=create_opcode + address=factory_address, + nonce=1, + initcode=initcode, + salt=0, + opcode=create_opcode, ) - if fork == Frontier: - expect_post[SLOT_CREATE_RESULT] = new_address - elif fork == Homestead: - # Before the introduction of the 63/64th rule there is no - # gas left for SSTOREing the return value. - expect_post[SLOT_CREATE_RESULT] = SLOT_CREATE_RESULT_PRE - else: - expect_post[SLOT_CREATE_RESULT] = 0 - tx = Transaction( - gas_limit=tx_gas_limit, - to=code, + gas_limit=10_000_000, + to=caller_address, sender=sender, protected=fork >= Byzantium, ) + created_account: Account | None = Account(code=b"\x00" * deposited_len) + if not enough_gas: + if fork > Frontier: + created_account = None + else: + # At Frontier, OOG on return yields an empty account. + created_account = Account() + post = { - code: Account(storage=expect_post), - new_address: Account(code=b"", nonce=0) if fork == Frontier else None, + factory_address: Account(nonce=2), + caller_address: Account(nonce=1), + new_address: created_account, } - state_test(env=Environment(), pre=pre, post=post, tx=tx) + state_test(pre=pre, post=post, tx=tx) diff --git a/tests/frontier/opcodes/test_all_opcodes.py b/tests/frontier/opcodes/test_all_opcodes.py index 1e1ec957e7..022a1f4968 100644 --- a/tests/frontier/opcodes/test_all_opcodes.py +++ b/tests/frontier/opcodes/test_all_opcodes.py @@ -214,139 +214,26 @@ def constant_gas_opcodes(fork: Fork) -> Generator[ParameterSet, None, None]: per fork. """ valid_opcodes = set(fork.valid_opcodes()) - gas_costs = fork.gas_costs() - opcode_floor_gas = { - Op.ADD: gas_costs.G_VERY_LOW, - Op.MUL: gas_costs.G_LOW, - Op.SUB: gas_costs.G_VERY_LOW, - Op.DIV: gas_costs.G_LOW, - Op.SDIV: gas_costs.G_LOW, - Op.MOD: gas_costs.G_LOW, - Op.SMOD: gas_costs.G_LOW, - Op.ADDMOD: gas_costs.G_MID, - Op.MULMOD: gas_costs.G_MID, - Op.EXP: gas_costs.G_HIGH, - Op.SIGNEXTEND: gas_costs.G_LOW, - Op.LT: gas_costs.G_VERY_LOW, - Op.GT: gas_costs.G_VERY_LOW, - Op.SLT: gas_costs.G_VERY_LOW, - Op.SGT: gas_costs.G_VERY_LOW, - Op.EQ: gas_costs.G_VERY_LOW, - Op.ISZERO: gas_costs.G_VERY_LOW, - Op.AND: gas_costs.G_VERY_LOW, - Op.OR: gas_costs.G_VERY_LOW, - Op.XOR: gas_costs.G_VERY_LOW, - Op.NOT: gas_costs.G_VERY_LOW, - Op.BYTE: gas_costs.G_VERY_LOW, - Op.SHL: gas_costs.G_VERY_LOW, - Op.SHR: gas_costs.G_VERY_LOW, - Op.SAR: gas_costs.G_VERY_LOW, - Op.CLZ: gas_costs.G_LOW, - Op.SHA3: gas_costs.G_KECCAK_256, - Op.ADDRESS: gas_costs.G_BASE, - Op.BALANCE: gas_costs.G_WARM_ACCOUNT_ACCESS, - Op.ORIGIN: gas_costs.G_BASE, - Op.CALLER: gas_costs.G_BASE, - Op.CALLVALUE: gas_costs.G_BASE, - Op.CALLDATALOAD: gas_costs.G_VERY_LOW, - Op.CALLDATASIZE: gas_costs.G_BASE, - Op.CALLDATACOPY: gas_costs.G_COPY, - Op.CODESIZE: gas_costs.G_BASE, - Op.CODECOPY: gas_costs.G_COPY, - Op.GASPRICE: gas_costs.G_BASE, - Op.EXTCODESIZE: gas_costs.G_WARM_ACCOUNT_ACCESS, - Op.EXTCODECOPY: gas_costs.G_WARM_ACCOUNT_ACCESS, - Op.RETURNDATASIZE: gas_costs.G_BASE, - Op.RETURNDATACOPY: gas_costs.G_COPY, - Op.EXTCODEHASH: gas_costs.G_WARM_ACCOUNT_ACCESS, - Op.BLOCKHASH: gas_costs.G_BLOCKHASH, - Op.COINBASE: gas_costs.G_BASE, - Op.TIMESTAMP: gas_costs.G_BASE, - Op.NUMBER: gas_costs.G_BASE, - Op.PREVRANDAO: gas_costs.G_BASE, - Op.GASLIMIT: gas_costs.G_BASE, - Op.CHAINID: gas_costs.G_BASE, - Op.SELFBALANCE: gas_costs.G_LOW, - Op.BASEFEE: gas_costs.G_BASE, - Op.BLOBHASH: gas_costs.G_VERY_LOW, - Op.BLOBBASEFEE: gas_costs.G_BASE, - Op.POP: gas_costs.G_BASE, - Op.MLOAD: gas_costs.G_VERY_LOW, - Op.MSTORE: gas_costs.G_VERY_LOW, - Op.MSTORE8: gas_costs.G_VERY_LOW, - Op.SLOAD: gas_costs.G_WARM_SLOAD, - Op.JUMP: gas_costs.G_MID, - Op.JUMPI: gas_costs.G_HIGH, - Op.PC: gas_costs.G_BASE, - Op.MSIZE: gas_costs.G_BASE, - Op.GAS: gas_costs.G_BASE, - Op.JUMPDEST: gas_costs.G_JUMPDEST, - Op.TLOAD: gas_costs.G_WARM_SLOAD, - Op.TSTORE: gas_costs.G_WARM_SLOAD, - Op.MCOPY: gas_costs.G_VERY_LOW, - Op.PUSH0: gas_costs.G_BASE, - Op.LOG0: gas_costs.G_LOG + (0 * gas_costs.G_LOG_TOPIC), - Op.LOG1: gas_costs.G_LOG + (1 * gas_costs.G_LOG_TOPIC), - Op.LOG2: gas_costs.G_LOG + (2 * gas_costs.G_LOG_TOPIC), - Op.LOG3: gas_costs.G_LOG + (3 * gas_costs.G_LOG_TOPIC), - Op.LOG4: gas_costs.G_LOG + (4 * gas_costs.G_LOG_TOPIC), - Op.CREATE: gas_costs.G_TRANSACTION_CREATE, - Op.CALL: gas_costs.G_WARM_ACCOUNT_ACCESS, - Op.CALLCODE: gas_costs.G_WARM_ACCOUNT_ACCESS, - Op.DELEGATECALL: gas_costs.G_WARM_ACCOUNT_ACCESS, - Op.CREATE2: gas_costs.G_TRANSACTION_CREATE, - Op.STATICCALL: gas_costs.G_WARM_ACCOUNT_ACCESS, - Op.SELFDESTRUCT: gas_costs.G_SELF_DESTRUCT, - Op.STOP: 0, - Op.RETURN: 0, - Op.REVERT: 0, - Op.INVALID: 0, - } - - # PUSHx, SWAPx, DUPx have uniform gas costs - for opcode in valid_opcodes: - if 0x60 <= opcode.int() <= 0x9F: - opcode_floor_gas[opcode] = gas_costs.G_VERY_LOW - for opcode in sorted(valid_opcodes): # SSTORE - untestable due to 2300 gas stipend rule if opcode == Op.SSTORE: continue - warm_gas = opcode_floor_gas[opcode] - if warm_gas == 0: + if opcode.gas_cost(fork) == 0: # zero constant gas opcodes - untestable continue - cold_gas = warm_gas - if opcode in [ - Op.BALANCE, - Op.EXTCODESIZE, - Op.EXTCODECOPY, - Op.EXTCODEHASH, - Op.CALL, - Op.CALLCODE, - Op.DELEGATECALL, - Op.STATICCALL, - ]: - cold_gas = gas_costs.G_COLD_ACCOUNT_ACCESS - elif opcode == Op.SELFDESTRUCT: - # Add the cost of accessing the send all destination account. - cold_gas += gas_costs.G_COLD_ACCOUNT_ACCESS - elif opcode == Op.SLOAD: - cold_gas = gas_costs.G_COLD_SLOAD - yield pytest.param(opcode, warm_gas, cold_gas, id=f"{opcode}") + yield pytest.param( + opcode, + id=f"{opcode}", + ) @pytest.mark.valid_from("Berlin") -@pytest.mark.parametrize_by_fork( - "opcode,warm_gas,cold_gas", constant_gas_opcodes -) +@pytest.mark.parametrize_by_fork("opcode", constant_gas_opcodes) def test_constant_gas( state_test: StateTestFiller, pre: Alloc, - opcode: Op, + opcode: Opcode, fork: Fork, - warm_gas: int, - cold_gas: int, ) -> None: """Test that constant gas opcodes work as expected.""" # Using Op.GAS as salt to guarantee no address collision on CREATE2. @@ -357,13 +244,21 @@ def test_constant_gas( + prepare_stack_constant_gas_oog(opcode) + create2_salt ) + warm_opcode_metadata = {} + if "address_warm" in opcode.metadata: + warm_opcode_metadata["address_warm"] = True + if "key_warm" in opcode.metadata: + warm_opcode_metadata["key_warm"] = True + if warm_opcode_metadata: + warm_opcode = opcode(**warm_opcode_metadata) + else: + warm_opcode = opcode gas_test( fork=fork, state_test=state_test, pre=pre, setup_code=setup_code, subject_code=opcode, + subject_code_warm=warm_opcode, tear_down_code=prepare_suffix(opcode), - cold_gas=cold_gas, - warm_gas=warm_gas, ) diff --git a/tests/frontier/opcodes/test_dup.py b/tests/frontier/opcodes/test_dup.py index 6ff2e859e7..35fd3c428f 100644 --- a/tests/frontier/opcodes/test_dup.py +++ b/tests/frontier/opcodes/test_dup.py @@ -69,12 +69,9 @@ def test_dup( tx = Transaction( ty=0x0, - nonce=0, to=account, gas_limit=500000, - gas_price=10, protected=False if fork in [Frontier, Homestead] else True, - data="", sender=sender, ) diff --git a/tests/frontier/opcodes/test_exp.py b/tests/frontier/opcodes/test_exp.py index bf4851cdd8..2fcbbe9f99 100644 --- a/tests/frontier/opcodes/test_exp.py +++ b/tests/frontier/opcodes/test_exp.py @@ -15,13 +15,6 @@ REFERENCE_SPEC_VERSION = "N/A" -def exp_gas(fork: Fork, exponent: int) -> int: - """Calculate gas cost for EXP opcode given the exponent.""" - gas_costs = fork.gas_costs() - byte_len = (exponent.bit_length() + 7) // 8 - return gas_costs.G_EXP + gas_costs.G_EXP_BYTE * byte_len - - @pytest.mark.valid_from("Berlin") @pytest.mark.parametrize( "a", [0, 1, pytest.param(2**256 - 1, id="a2to256minus1")] @@ -46,14 +39,10 @@ def test_gas( fork: Fork, ) -> None: """Test that EXP gas works as expected.""" - gas_cost = exp_gas(fork, exponent) - gas_test( fork=fork, state_test=state_test, pre=pre, setup_code=Op.PUSH32(exponent) + Op.PUSH32(a), - subject_code=Op.EXP, - cold_gas=gas_cost, - warm_gas=gas_cost, + subject_code=Op.EXP(exponent=exponent), ) diff --git a/tests/frontier/opcodes/test_log.py b/tests/frontier/opcodes/test_log.py index bb8fc25070..e3cfe61c4f 100644 --- a/tests/frontier/opcodes/test_log.py +++ b/tests/frontier/opcodes/test_log.py @@ -15,19 +15,6 @@ REFERENCE_SPEC_VERSION = "N/A" -def log_gas(fork: Fork, topics: int, data_size: int) -> int: - """ - Calculate gas cost for LOGx opcodes given the number of topics and data - size. - """ - gas_costs = fork.gas_costs() - return ( - gas_costs.G_LOG - + gas_costs.G_LOG_TOPIC * topics - + gas_costs.G_LOG_DATA * data_size - ) - - @pytest.mark.valid_from("Berlin") @pytest.mark.parametrize( "opcode,topics", @@ -52,8 +39,6 @@ def test_gas( fork: Fork, ) -> None: """Test that LOGx gas works as expected.""" - gas_cost = log_gas(fork, topics, data_size) - gas_test( fork=fork, state_test=state_test, @@ -62,7 +47,5 @@ def test_gas( + Op.PUSH1(0) * topics + Op.PUSH32(data_size) + Op.PUSH1(0), - subject_code=opcode, - cold_gas=gas_cost, - warm_gas=gas_cost, + subject_code=opcode(data_size=data_size), ) diff --git a/tests/osaka/eip7939_count_leading_zeros/test_count_leading_zeros.py b/tests/osaka/eip7939_count_leading_zeros/test_count_leading_zeros.py index ff1da2f5ee..ec4e7ac17f 100644 --- a/tests/osaka/eip7939_count_leading_zeros/test_count_leading_zeros.py +++ b/tests/osaka/eip7939_count_leading_zeros/test_count_leading_zeros.py @@ -136,7 +136,7 @@ def test_clz_gas_cost( CodeGasMeasure( code=Op.CLZ(Op.PUSH1(1)), extra_stack_items=1, - overhead_cost=fork.gas_costs().G_VERY_LOW, + overhead_cost=Op.PUSH1.gas_cost(fork), ), ), storage={"0x00": "0xdeadbeef"}, @@ -145,7 +145,7 @@ def test_clz_gas_cost( tx = Transaction(to=contract_address, sender=sender, gas_limit=200_000) post = { contract_address: Account( # Cost measured is CLZ + PUSH1 - storage={"0x00": fork.gas_costs().G_LOW} + storage={"0x00": Op.CLZ.gas_cost(fork)} ), } state_test(pre=pre, post=post, tx=tx) @@ -172,9 +172,7 @@ def test_clz_gas_cost_boundary( call_code = Op.SSTORE( 0, Op.CALL( - gas=fork.gas_costs().G_VERY_LOW - + Spec.CLZ_GAS_COST - + gas_cost_delta, + gas=code.gas_cost(fork) + gas_cost_delta, address=contract_address, ), ) From 7282b64fbd0fd44f5e93746800be0ff6dde025ed Mon Sep 17 00:00:00 2001 From: fselmo Date: Tue, 11 Nov 2025 08:18:46 -0700 Subject: [PATCH 008/154] feat(specs): Implement EIP-7928: Block-Level Access Lists fix(tests): Fix Amsterdam filling after rebase fix(specs): Fix issues with new ruff + mypy rules after rebase - bal -> block_access_list; re-add custom rlp encoding for block access list - bytes to uint - move away from method-style - Update EIP-7928 implementation: system contracts at index 0, migrate to RLP - System contracts (parent hash, beacon root) now use block_access_index 0 - Transactions use block_access_index 1 to len(transactions) - Post-execution changes use block_access_index len(transactions) + 1 - Migrated from SSZ to RLP encoding as per updated EIP-7928 spec - Updated all tests to match new API and structure - Replaced tx_index with block_access_index throughout codebase - add system contract logic - add markdown docstrings - update BAL format; address comments - ssz encoding and bal validation - six ssz types - bal tests - balspecs fix: do not track setting empty code to a new account (#19) fix: track implicit SLOAD within SSTORE for OOG cases (#18) refactor: Put back explicit acct tracking outside 7702 delegation path (#17) fix non-tracked 7702 authority for invalid delegations (#16) * fix non-tracked 7702 authority for invalid delegations * fix: lint issues * fix: track delegation target when loaded as call target * fix: track delegation target when loaded as call target from call opcodes * chore: fix issues with documentation generation Fix self-destruct cases with pre-execution balance cache / tracking * fix self-destruct implementation * fix self-destruct tracking balance * fix it in the bal finalization by filtering * add balance reset and fix tests * simplify pre-balance tracking not using snapshots fix duplicated code entries for in transaction self destruct fix self destruct in same transaction bug fix call/delagate call tracking bug fix zero-value transfer tracking (#6) * fix zero-value transfer tracking * fix reverted frame tracking * rename variables * fix missing addresses bug * fix: docs run & move imports to top of file refactor: move rlp_utils to block_access_lists; bal -> block_access_lists Some remaining fixes due to large refactor in `forks/osaka`: - Move BALs from amsterdam -> forks/amsterdam - rename: build -> build_block_access_list - fix docc issues move state change tracker to State correct system contract addresses Fixes to communicate with BALs EEST branch: - fix(bal): Initialize the state tracker before system contract calls - We were missing system contract calls to beacon roots and history contracts. This change initializes the state tracker before system contract calls and passes the tracker to these calls if post-Amsterdam. - fix(docs): Fix issues with toxenvs: lint, doc, json_infra - fix(t8n): Only initialize the bal_change_tracker for amsterdam - feat(fork criteria): Index upcoming forks for better ordering / fix issues - chore(forks): Fix issues from lint after rebase with Osaka latest - fix(setuptools): Update packages to include amsterdam - chore(lint): Fix 'tox -e static' issues - Fix bug in tracker Manually cherry-picked from e72991bf3876563900d5c2bcc2442b0a1eeb439f Author: nerolation - chore(tests): Attempt to resolve issues with CI tests - chore(lint): fix issues from running ``tox -e static`` locally - refactor(bal): Send BAL as a list over t8n tool - fix(amsterdam): Add change tracker to state test in t8n - chore(lint,tests): Fix tests after moving bal from osaka -> amsterdam - chore(forks): Move bals from Osaka to Amsterdam - chore(lint): Fix lint issues - refactor(bal): Send the full bal object and bal_hash over t8n - If we send the full object over JSON, we can model_validate() on ESST. - If we send the hash, once we fill the pydantic model, we can get the rlp and the hash and validate that our objects match while only really validating the parts of the BAL we are interested in for each test. - chore: point to working eest branch - chore(bals): Remove unused SSZ utils.py The SSZ implementation is no longer needed as we are now using RLP - refactor(bals): Clean up BAL module types and imports - Bytes -> Bytes32 type for storage slots - Remove unused imports / fix imports / fix linting - Update function signatures to match tracker - fix(bals-tx-index): Track bal indexes in t8n Keep track of BAL index state in t8n --- .../execution_testing/fixtures/blockchain.py | 16 + .../src/execution_testing/forks/base_fork.py | 19 + .../execution_testing/forks/forks/forks.py | 37 + .../src/execution_testing/specs/blockchain.py | 37 +- pyproject.toml | 20 + .../amsterdam/block_access_lists/__init__.py | 57 ++ .../amsterdam/block_access_lists/builder.py | 435 ++++++++++++ .../amsterdam/block_access_lists/rlp_types.py | 130 ++++ .../amsterdam/block_access_lists/rlp_utils.py | 232 ++++++ .../amsterdam/block_access_lists/tracker.py | 667 ++++++++++++++++++ src/ethereum/forks/amsterdam/blocks.py | 18 + src/ethereum/forks/amsterdam/fork.py | 63 +- src/ethereum/forks/amsterdam/state.py | 84 ++- src/ethereum/forks/amsterdam/vm/__init__.py | 6 + .../forks/amsterdam/vm/eoa_delegation.py | 12 + .../amsterdam/vm/instructions/environment.py | 17 +- .../amsterdam/vm/instructions/storage.py | 36 +- .../forks/amsterdam/vm/instructions/system.py | 37 +- .../forks/amsterdam/vm/interpreter.py | 21 + src/ethereum/forks/osaka/vm/eoa_delegation.py | 1 + src/ethereum/genesis.py | 6 + .../evm_tools/loaders/fork_loader.py | 17 + .../evm_tools/t8n/__init__.py | 43 +- src/ethereum_spec_tools/evm_tools/t8n/env.py | 3 + .../evm_tools/t8n/t8n_types.py | 94 +++ whitelist.txt | 8 +- 26 files changed, 2083 insertions(+), 33 deletions(-) create mode 100644 src/ethereum/forks/amsterdam/block_access_lists/__init__.py create mode 100644 src/ethereum/forks/amsterdam/block_access_lists/builder.py create mode 100644 src/ethereum/forks/amsterdam/block_access_lists/rlp_types.py create mode 100644 src/ethereum/forks/amsterdam/block_access_lists/rlp_utils.py create mode 100644 src/ethereum/forks/amsterdam/block_access_lists/tracker.py diff --git a/packages/testing/src/execution_testing/fixtures/blockchain.py b/packages/testing/src/execution_testing/fixtures/blockchain.py index 77c58d984e..14116d2803 100644 --- a/packages/testing/src/execution_testing/fixtures/blockchain.py +++ b/packages/testing/src/execution_testing/fixtures/blockchain.py @@ -203,6 +203,9 @@ class FixtureHeader(CamelModel): requests_hash: ( Annotated[Hash, HeaderForkRequirement("requests")] | None ) = Field(None) + block_access_list_hash: ( + Annotated[Hash, HeaderForkRequirement("bal_hash")] | None + ) = Field(None, alias="blockAccessListHash") fork: Fork | None = Field(None, exclude=True) @@ -287,6 +290,11 @@ def genesis(cls, fork: Fork, env: Environment, state_root: Hash) -> Self: "requests_hash": Requests() if fork.header_requests_required(block_number=0, timestamp=0) else None, + "block_access_list_hash": ( + BlockAccessList().rlp_hash + if fork.header_bal_hash_required(block_number=0, timestamp=0) + else None + ), "fork": fork, } return cls(**environment_values, **extras) @@ -416,6 +424,14 @@ def from_fixture_header( "Invalid header for engine_newPayload" ) + if fork.engine_execution_payload_block_access_list( + block_number=header.number, timestamp=header.timestamp + ): + if block_access_list is None: + raise ValueError( + f"`block_access_list` is required in engine `ExecutionPayload` for >={fork}." + ) + execution_payload = FixtureExecutionPayload.from_fixture_header( header=header, transactions=transactions, diff --git a/packages/testing/src/execution_testing/forks/base_fork.py b/packages/testing/src/execution_testing/forks/base_fork.py index a061497b25..2a465af677 100644 --- a/packages/testing/src/execution_testing/forks/base_fork.py +++ b/packages/testing/src/execution_testing/forks/base_fork.py @@ -351,6 +351,14 @@ def header_requests_required( """Return true if the header must contain beacon chain requests.""" pass + @classmethod + @abstractmethod + def header_bal_hash_required( + cls, *, block_number: int = 0, timestamp: int = 0 + ) -> bool: + """Return true if the header must contain block access list hash.""" + pass + # Gas related abstract methods @classmethod @@ -743,6 +751,17 @@ def engine_new_payload_target_blobs_per_block( """ pass + @classmethod + @abstractmethod + def engine_execution_payload_block_access_list( + cls, *, block_number: int = 0, timestamp: int = 0 + ) -> bool: + """ + Return `True` if the engine api version requires execution payload to + include a `block_access_list`. + """ + pass + @classmethod @abstractmethod def engine_payload_attribute_target_blobs_per_block( diff --git a/packages/testing/src/execution_testing/forks/forks/forks.py b/packages/testing/src/execution_testing/forks/forks/forks.py index 62c108c65f..e10e87d86e 100644 --- a/packages/testing/src/execution_testing/forks/forks/forks.py +++ b/packages/testing/src/execution_testing/forks/forks/forks.py @@ -947,6 +947,14 @@ def header_requests_required( del block_number, timestamp return False + @classmethod + def header_bal_hash_required( + cls, *, block_number: int = 0, timestamp: int = 0 + ) -> bool: + """At genesis, header must not contain block access list hash.""" + del block_number, timestamp + return False + @classmethod def engine_new_payload_version( cls, *, block_number: int = 0, timestamp: int = 0 @@ -987,6 +995,14 @@ def engine_new_payload_requests( del block_number, timestamp return False + @classmethod + def engine_execution_payload_block_access_list( + cls, *, block_number: int = 0, timestamp: int = 0 + ) -> bool: + """At genesis, payloads do not have block access list.""" + del block_number, timestamp + return False + @classmethod def engine_new_payload_target_blobs_per_block( cls, @@ -3264,6 +3280,16 @@ class Amsterdam(BPO2): # related Amsterdam specs change over time, and before Amsterdam is # live on mainnet. + @classmethod + def header_bal_hash_required( + cls, *, block_number: int = 0, timestamp: int = 0 + ) -> bool: + """ + From Amsterdam, header must contain block access list hash (EIP-7928). + """ + del block_number, timestamp + return True + @classmethod def is_deployed(cls) -> bool: """Return True if this fork is deployed.""" @@ -3277,6 +3303,17 @@ def engine_new_payload_version( del block_number, timestamp return 5 + @classmethod + def engine_execution_payload_block_access_list( + cls, *, block_number: int = 0, timestamp: int = 0 + ) -> bool: + """ + From Amsterdam, engine execution payload includes `block_access_list` + as a parameter. + """ + del block_number, timestamp + return True + class EOFv1(Prague, solc_name="cancun"): """EOF fork.""" diff --git a/packages/testing/src/execution_testing/specs/blockchain.py b/packages/testing/src/execution_testing/specs/blockchain.py index 0bfa1eb419..7ec289c8dc 100644 --- a/packages/testing/src/execution_testing/specs/blockchain.py +++ b/packages/testing/src/execution_testing/specs/blockchain.py @@ -699,6 +699,24 @@ def generate_block_data( ) requests_list = block.requests + if self.fork.header_bal_hash_required( + block_number=header.number, timestamp=header.timestamp + ): + assert ( + transition_tool_output.result.block_access_list is not None + ), ( + "Block access list is required for this block but was not provided " + "by the transition tool" + ) + + rlp = transition_tool_output.result.block_access_list.rlp + computed_bal_hash = Hash(rlp.keccak256()) + assert computed_bal_hash == header.block_access_list_hash, ( + "Block access list hash in header does not match the " + f"computed hash from BAL: {header.block_access_list_hash} " + f"!= {computed_bal_hash}" + ) + if block.rlp_modifier is not None: # Modify any parameter specified in the `rlp_modifier` after # transition tool processing. @@ -707,6 +725,23 @@ def generate_block_data( self.fork ) # Deleted during `apply` because `exclude=True` + # Process block access list - apply transformer if present for invalid + # tests + t8n_bal = transition_tool_output.result.block_access_list + bal = t8n_bal + if ( + block.expected_block_access_list is not None + and t8n_bal is not None + ): + block.expected_block_access_list.verify_against(t8n_bal) + + bal = block.expected_block_access_list.modify_if_invalid_test( + t8n_bal + ) + if bal != t8n_bal: + # If the BAL was modified, update the header hash + header.block_access_list_hash = Hash(bal.rlp.keccak256()) + built_block = BuiltBlock( header=header, alloc=transition_tool_output.alloc, @@ -720,7 +755,7 @@ def generate_block_data( expected_exception=block.exception, engine_api_error_code=block.engine_api_error_code, fork=self.fork, - block_access_list=None, + block_access_list=bal, ) try: diff --git a/pyproject.toml b/pyproject.toml index 8a03e7ac9e..dc05b9dad3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -142,6 +142,13 @@ packages = [ "ethereum.forks.osaka.vm.instructions", "ethereum.forks.osaka.vm.precompiled_contracts", "ethereum.forks.osaka.vm.precompiled_contracts.bls12_381", + "ethereum.forks.amsterdam", + "ethereum.forks.amsterdam.block_access_lists", + "ethereum.forks.amsterdam.utils", + "ethereum.forks.amsterdam.vm", + "ethereum.forks.amsterdam.vm.instructions", + "ethereum.forks.amsterdam.vm.precompiled_contracts", + "ethereum.forks.amsterdam.vm.precompiled_contracts.bls12_381", ] [tool.setuptools.package-data] @@ -378,6 +385,15 @@ ignore = [ "src/ethereum_spec_tools/evm_tools/t8n/evm_trace.py" = [ "N815" # The traces must use camel case in JSON property names ] +"src/ethereum/forks/amsterdam/blocks.py" = [ + "E501" # Line too long - needed for long ref links +] + "src/ethereum/forks/amsterdam/block_access_lists/builder.py" = [ + "E501" # Line too long - needed for long ref links + ] +"src/ethereum/forks/amsterdam/block_access_lists/rlp_utils.py" = [ + "E501" # Line too long - needed for long ref links + ] "tests/*" = ["ARG001"] "vulture_whitelist.py" = [ "B018", # Useless expression (intentional for Vulture whitelisting) @@ -385,6 +401,10 @@ ignore = [ "F405", # Undefined names from star imports ] +[tool.ruff.lint.mccabe] +# Set the maximum allowed cyclomatic complexity. C901 default is 10. +max-complexity = 7 + [tool.codespell] builtin = "clear,code,usage" # Version control & tooling, build artifacts, data files, test fixtures, temp files, lock files diff --git a/src/ethereum/forks/amsterdam/block_access_lists/__init__.py b/src/ethereum/forks/amsterdam/block_access_lists/__init__.py new file mode 100644 index 0000000000..856ab832bc --- /dev/null +++ b/src/ethereum/forks/amsterdam/block_access_lists/__init__.py @@ -0,0 +1,57 @@ +""" +Block Access Lists (EIP-7928) implementation for Ethereum Amsterdam fork. +""" + +from .builder import ( + BlockAccessListBuilder, + add_balance_change, + add_code_change, + add_nonce_change, + add_storage_read, + add_storage_write, + add_touched_account, + build_block_access_list, +) +from .rlp_utils import ( + compute_block_access_list_hash, + rlp_encode_block_access_list, + validate_block_access_list_against_execution, +) +from .tracker import ( + StateChangeTracker, + begin_call_frame, + commit_call_frame, + rollback_call_frame, + set_block_access_index, + track_address_access, + track_balance_change, + track_code_change, + track_nonce_change, + track_storage_read, + track_storage_write, +) + +__all__ = [ + "BlockAccessListBuilder", + "StateChangeTracker", + "add_balance_change", + "add_code_change", + "add_nonce_change", + "add_storage_read", + "add_storage_write", + "add_touched_account", + "begin_call_frame", + "build_block_access_list", + "commit_call_frame", + "compute_block_access_list_hash", + "rollback_call_frame", + "set_block_access_index", + "rlp_encode_block_access_list", + "track_address_access", + "track_balance_change", + "track_code_change", + "track_nonce_change", + "track_storage_read", + "track_storage_write", + "validate_block_access_list_against_execution", +] diff --git a/src/ethereum/forks/amsterdam/block_access_lists/builder.py b/src/ethereum/forks/amsterdam/block_access_lists/builder.py new file mode 100644 index 0000000000..a9d6ee9930 --- /dev/null +++ b/src/ethereum/forks/amsterdam/block_access_lists/builder.py @@ -0,0 +1,435 @@ +""" +Implements the Block Access List builder that tracks all account +and storage accesses during block execution and constructs the final +[`BlockAccessList`]. + +The builder follows a two-phase approach: + +1. **Collection Phase**: During transaction execution, all state accesses are + recorded via the tracking functions. +2. **Build Phase**: After block execution, the accumulated data is sorted + and encoded into the final deterministic format. + +[`BlockAccessList`]: ref:ethereum.forks.amsterdam.block_access_lists.rlp_types.BlockAccessList # noqa: E501 +""" + +from dataclasses import dataclass, field +from typing import Dict, List, Set + +from ethereum_types.bytes import Bytes, Bytes32 +from ethereum_types.numeric import U64, U256 + +from ..fork_types import Address +from .rlp_types import ( + AccountChanges, + BalanceChange, + BlockAccessIndex, + BlockAccessList, + CodeChange, + NonceChange, + SlotChanges, + StorageChange, +) + + +@dataclass +class AccountData: + """ + Account data stored in the builder during block execution. + + This dataclass tracks all changes made to a single account throughout + the execution of a block, organized by the type of change and the + transaction index where it occurred. + """ + + storage_changes: Dict[Bytes32, List[StorageChange]] = field( + default_factory=dict + ) + """ + Mapping from storage slot to list of changes made to that slot. + Each change includes the transaction index and new value. + """ + + storage_reads: Set[Bytes32] = field(default_factory=set) + """ + Set of storage slots that were read but not modified. + """ + + balance_changes: List[BalanceChange] = field(default_factory=list) + """ + List of balance changes for this account, ordered by transaction index. + """ + + nonce_changes: List[NonceChange] = field(default_factory=list) + """ + List of nonce changes for this account, ordered by transaction index. + """ + + code_changes: List[CodeChange] = field(default_factory=list) + """ + List of code changes (contract deployments) for this account, + ordered by transaction index. + """ + + +@dataclass +class BlockAccessListBuilder: + """ + Builder for constructing [`BlockAccessList`] efficiently during transaction + execution. + + The builder accumulates all account and storage accesses during block + execution and constructs a deterministic access list. Changes are tracked + by address, field type, and transaction index to enable efficient + reconstruction of state changes. + + [`BlockAccessList`]: ref:ethereum.forks.amsterdam.block_access_lists.rlp_types.BlockAccessList # noqa: E501 + """ + + accounts: Dict[Address, AccountData] = field(default_factory=dict) + """ + Mapping from account address to its tracked changes during block execution. + """ + + +def ensure_account(builder: BlockAccessListBuilder, address: Address) -> None: + """ + Ensure an account exists in the builder's tracking structure. + + Creates an empty [`AccountData`] entry for the given address if it + doesn't already exist. This function is idempotent and safe to call + multiple times for the same address. + + Parameters + ---------- + builder : + The block access list builder instance. + address : + The account address to ensure exists. + + [`AccountData`] : + ref:ethereum.forks.amsterdam.block_access_lists.builder.AccountData + + """ + if address not in builder.accounts: + builder.accounts[address] = AccountData() + + +def add_storage_write( + builder: BlockAccessListBuilder, + address: Address, + slot: Bytes32, + block_access_index: BlockAccessIndex, + new_value: Bytes32, +) -> None: + """ + Add a storage write operation to the block access list. + + Records a storage slot modification for a given address at a specific + transaction index. Multiple writes to the same slot are tracked + separately, maintaining the order and transaction index of each change. + + Parameters + ---------- + builder : + The block access list builder instance. + address : + The account address whose storage is being modified. + slot : + The storage slot being written to. + block_access_index : + The block access index for this change (0 for pre-execution, + 1..n for transactions, n+1 for post-execution). + new_value : + The new value being written to the storage slot. + + """ + ensure_account(builder, address) + + if slot not in builder.accounts[address].storage_changes: + builder.accounts[address].storage_changes[slot] = [] + + change = StorageChange( + block_access_index=block_access_index, new_value=new_value + ) + builder.accounts[address].storage_changes[slot].append(change) + + +def add_storage_read( + builder: BlockAccessListBuilder, address: Address, slot: Bytes32 +) -> None: + """ + Add a storage read operation to the block access list. + + Records that a storage slot was read during execution. Storage slots + that are both read and written will only appear in the storage changes + list, not in the storage reads list, as per [EIP-7928]. + + Parameters + ---------- + builder : + The block access list builder instance. + address : + The account address whose storage is being read. + slot : + The storage slot being read. + + [EIP-7928]: https://eips.ethereum.org/EIPS/eip-7928 + + """ + ensure_account(builder, address) + builder.accounts[address].storage_reads.add(slot) + + +def add_balance_change( + builder: BlockAccessListBuilder, + address: Address, + block_access_index: BlockAccessIndex, + post_balance: U256, +) -> None: + """ + Add a balance change to the block access list. + + Records the post-transaction balance for an account after it has been + modified. This includes changes from transfers, gas fees, block rewards, + and any other balance-affecting operations. + + Parameters + ---------- + builder : + The block access list builder instance. + address : + The account address whose balance changed. + block_access_index : + The block access index for this change (0 for pre-execution, + 1..n for transactions, n+1 for post-execution). + post_balance : + The account balance after the change as U256. + + """ + ensure_account(builder, address) + + # Balance value is already U256 + balance_value = post_balance + + # Check if we already have a balance change for this tx_index and update it + # This ensures we only track the final balance per transaction + existing_changes = builder.accounts[address].balance_changes + for i, existing in enumerate(existing_changes): + if existing.block_access_index == block_access_index: + # Update the existing balance change with the new balance + existing_changes[i] = BalanceChange( + block_access_index=block_access_index, + post_balance=balance_value, + ) + return + + # No existing change for this tx_index, add a new one + change = BalanceChange( + block_access_index=block_access_index, post_balance=balance_value + ) + builder.accounts[address].balance_changes.append(change) + + +def add_nonce_change( + builder: BlockAccessListBuilder, + address: Address, + block_access_index: BlockAccessIndex, + new_nonce: U64, +) -> None: + """ + Add a nonce change to the block access list. + + Records a nonce increment for an account. This occurs when an EOA sends + a transaction or when a contract performs [`CREATE`] or [`CREATE2`] + operations. + + Parameters + ---------- + builder : + The block access list builder instance. + address : + The account address whose nonce changed. + block_access_index : + The block access index for this change (0 for pre-execution, + 1..n for transactions, n+1 for post-execution). + new_nonce : + The new nonce value after the change. + + [`CREATE`]: ref:ethereum.forks.amsterdam.vm.instructions.system.create + [`CREATE2`]: ref:ethereum.forks.amsterdam.vm.instructions.system.create2 + + """ + ensure_account(builder, address) + + # Check if we already have a nonce change for this tx_index and update it + # This ensures we only track the final nonce per transaction + existing_changes = builder.accounts[address].nonce_changes + for i, existing in enumerate(existing_changes): + if existing.block_access_index == block_access_index: + # Update the existing nonce change with the new nonce + existing_changes[i] = NonceChange( + block_access_index=block_access_index, new_nonce=new_nonce + ) + return + + # No existing change for this tx_index, add a new one + change = NonceChange( + block_access_index=block_access_index, new_nonce=new_nonce + ) + builder.accounts[address].nonce_changes.append(change) + + +def add_code_change( + builder: BlockAccessListBuilder, + address: Address, + block_access_index: BlockAccessIndex, + new_code: Bytes, +) -> None: + """ + Add a code change to the block access list. + + Records contract code deployment or modification. This typically occurs + during contract creation via [`CREATE`], [`CREATE2`], or [`SETCODE`] + operations. + + Parameters + ---------- + builder : + The block access list builder instance. + address : + The account address receiving new code. + block_access_index : + The block access index for this change (0 for pre-execution, + 1..n for transactions, n+1 for post-execution). + new_code : + The deployed contract bytecode. + + [`CREATE`]: ref:ethereum.forks.amsterdam.vm.instructions.system.create + [`CREATE2`]: ref:ethereum.forks.amsterdam.vm.instructions.system.create2 + + """ + ensure_account(builder, address) + + # Check if we already have a code change for this block_access_index + # This handles the case of in-transaction selfdestructs where code is + # first deployed and then cleared in the same transaction + existing_changes = builder.accounts[address].code_changes + for i, existing in enumerate(existing_changes): + if existing.block_access_index == block_access_index: + # Replace the existing code change with the new one + # For selfdestructs, this ensures we only record the final state (empty code) + existing_changes[i] = CodeChange( + block_access_index=block_access_index, new_code=new_code + ) + return + + # No existing change for this block_access_index, add a new one + change = CodeChange( + block_access_index=block_access_index, new_code=new_code + ) + builder.accounts[address].code_changes.append(change) + + +def add_touched_account( + builder: BlockAccessListBuilder, address: Address +) -> None: + """ + Add an account that was accessed but not modified. + + Records that an account was accessed during execution without any state + changes. This is used for operations like [`EXTCODEHASH`], [`BALANCE`], + [`EXTCODESIZE`], and [`EXTCODECOPY`] that read account data without + modifying it. + + Parameters + ---------- + builder : + The block access list builder instance. + address : + The account address that was accessed. + + [`EXTCODEHASH`] : + ref:ethereum.forks.amsterdam.vm.instructions.environment.extcodehash + [`BALANCE`] : + ref:ethereum.forks.amsterdam.vm.instructions.environment.balance + [`EXTCODESIZE`] : + ref:ethereum.forks.amsterdam.vm.instructions.environment.extcodesize + [`EXTCODECOPY`] : + ref:ethereum.forks.amsterdam.vm.instructions.environment.extcodecopy + + """ + ensure_account(builder, address) + + +def build_block_access_list( + builder: BlockAccessListBuilder, +) -> BlockAccessList: + """ + Build the final [`BlockAccessList`] from accumulated changes. + + Constructs a deterministic block access list by sorting all accumulated + changes. The resulting list is ordered by: + + 1. Account addresses (lexicographically) + 2. Within each account: + - Storage slots (lexicographically) + - Transaction indices (numerically) for each change type + + Parameters + ---------- + builder : + The block access list builder containing all tracked changes. + + Returns + ------- + block_access_list : + The final sorted and encoded block access list. + + [`BlockAccessList`]: ref:ethereum.forks.amsterdam.block_access_lists.rlp_types.BlockAccessList # noqa: E501 + + """ + account_changes_list = [] + + for address, changes in builder.accounts.items(): + storage_changes = [] + for slot, slot_changes in changes.storage_changes.items(): + sorted_changes = tuple( + sorted(slot_changes, key=lambda x: x.block_access_index) + ) + storage_changes.append( + SlotChanges(slot=slot, changes=sorted_changes) + ) + + storage_reads = [] + for slot in changes.storage_reads: + if slot not in changes.storage_changes: + storage_reads.append(slot) + + balance_changes = tuple( + sorted(changes.balance_changes, key=lambda x: x.block_access_index) + ) + nonce_changes = tuple( + sorted(changes.nonce_changes, key=lambda x: x.block_access_index) + ) + code_changes = tuple( + sorted(changes.code_changes, key=lambda x: x.block_access_index) + ) + + storage_changes.sort(key=lambda x: x.slot) + storage_reads.sort() + + account_change = AccountChanges( + address=address, + storage_changes=tuple(storage_changes), + storage_reads=tuple(storage_reads), + balance_changes=balance_changes, + nonce_changes=nonce_changes, + code_changes=code_changes, + ) + + account_changes_list.append(account_change) + + account_changes_list.sort(key=lambda x: x.address) + + return BlockAccessList(account_changes=tuple(account_changes_list)) diff --git a/src/ethereum/forks/amsterdam/block_access_lists/rlp_types.py b/src/ethereum/forks/amsterdam/block_access_lists/rlp_types.py new file mode 100644 index 0000000000..e4d37d6a74 --- /dev/null +++ b/src/ethereum/forks/amsterdam/block_access_lists/rlp_types.py @@ -0,0 +1,130 @@ +""" +Defines the RLP data structures for Block-Level Access Lists +as specified in EIP-7928. These structures enable efficient encoding and +decoding of all accounts and storage locations accessed during block execution. + +The encoding follows the pattern: +address -> field -> block_access_index -> change. +""" + +from dataclasses import dataclass +from typing import Tuple + +from ethereum_types.bytes import Bytes, Bytes20, Bytes32 +from ethereum_types.frozen import slotted_freezable +from ethereum_types.numeric import U64, U256, Uint + +# Type aliases for clarity (matching EIP-7928 specification) +Address = Bytes20 +StorageKey = Bytes32 +StorageValue = Bytes32 +CodeData = Bytes +BlockAccessIndex = Uint # uint16 in the spec, but using Uint for compatibility +Balance = U256 # Post-transaction balance in wei +Nonce = U64 + +# Constants chosen to support a 630m block gas limit +MAX_TXS = 30_000 +# MAX_SLOTS = 300_000 +# MAX_ACCOUNTS = 300_000 +MAX_CODE_SIZE = 24_576 +MAX_CODE_CHANGES = 1 + + +@slotted_freezable +@dataclass +class StorageChange: + """ + Storage change: [block_access_index, new_value]. + RLP encoded as a list. + """ + + block_access_index: BlockAccessIndex + new_value: StorageValue + + +@slotted_freezable +@dataclass +class BalanceChange: + """ + Balance change: [block_access_index, post_balance]. + RLP encoded as a list. + """ + + block_access_index: BlockAccessIndex + post_balance: Balance + + +@slotted_freezable +@dataclass +class NonceChange: + """ + Nonce change: [block_access_index, new_nonce]. + RLP encoded as a list. + """ + + block_access_index: BlockAccessIndex + new_nonce: Nonce + + +@slotted_freezable +@dataclass +class CodeChange: + """ + Code change: [block_access_index, new_code]. + RLP encoded as a list. + """ + + block_access_index: BlockAccessIndex + new_code: CodeData + + +@slotted_freezable +@dataclass +class SlotChanges: + """ + All changes to a single storage slot: [slot, [changes]]. + RLP encoded as a list. + """ + + slot: StorageKey + changes: Tuple[StorageChange, ...] + + +@slotted_freezable +@dataclass +class AccountChanges: + """ + All changes for a single account, grouped by field type. + RLP encoded as: [address, storage_changes, storage_reads, + balance_changes, nonce_changes, code_changes]. + """ + + address: Address + + # slot -> [block_access_index -> new_value] + storage_changes: Tuple[SlotChanges, ...] + + # read-only storage keys + storage_reads: Tuple[StorageKey, ...] + + # [block_access_index -> post_balance] + balance_changes: Tuple[BalanceChange, ...] + + # [block_access_index -> new_nonce] + nonce_changes: Tuple[NonceChange, ...] + + # [block_access_index -> new_code] + code_changes: Tuple[CodeChange, ...] + + +@slotted_freezable +@dataclass +class BlockAccessList: + """ + Block-Level Access List for EIP-7928. + Contains all addresses accessed during block execution. + RLP encoded as a list of AccountChanges. + """ + + account_changes: Tuple[AccountChanges, ...] diff --git a/src/ethereum/forks/amsterdam/block_access_lists/rlp_utils.py b/src/ethereum/forks/amsterdam/block_access_lists/rlp_utils.py new file mode 100644 index 0000000000..bbcf4a3d21 --- /dev/null +++ b/src/ethereum/forks/amsterdam/block_access_lists/rlp_utils.py @@ -0,0 +1,232 @@ +""" +Utilities for working with Block Access Lists using RLP encoding, +as specified in EIP-7928. + +This module provides: + +- RLP encoding functions for all Block Access List types +- Hash computation using [`keccak256`] +- Validation logic to ensure structural correctness + +The encoding follows the RLP specification used throughout Ethereum. + +[`keccak256`]: ref:ethereum.crypto.hash.keccak256 +""" + +from typing import cast + +from ethereum_rlp import Extended, rlp +from ethereum_types.bytes import Bytes +from ethereum_types.numeric import Uint + +from ethereum.crypto.hash import Hash32, keccak256 + +from .builder import BlockAccessListBuilder +from .rlp_types import MAX_CODE_SIZE, MAX_TXS, BlockAccessList + + +def compute_block_access_list_hash( + block_access_list: BlockAccessList, +) -> Hash32: + """ + Compute the hash of a Block Access List. + + The Block Access List is RLP-encoded and then hashed with keccak256. + + Parameters + ---------- + block_access_list : + The Block Access List to hash. + + Returns + ------- + hash : + The keccak256 hash of the RLP-encoded Block Access List. + + """ + block_access_list_bytes = rlp_encode_block_access_list(block_access_list) + return keccak256(block_access_list_bytes) + + +def rlp_encode_block_access_list(block_access_list: BlockAccessList) -> Bytes: + """ + Encode a [`BlockAccessList`] to RLP bytes. + + This is the top-level encoding function that produces the final RLP + representation of a block's access list, following the updated EIP-7928 + specification. + + Parameters + ---------- + block_access_list : + The block access list to encode. + + Returns + ------- + encoded : + The complete RLP-encoded block access list. + + [`BlockAccessList`]: ref:ethereum.forks.amsterdam.block_access_lists.rlp_types.BlockAccessList # noqa: E501 + + """ + # Encode as a list of AccountChanges directly (not wrapped) + account_changes_list = [] + for account in block_access_list.account_changes: + # Each account is encoded as: + # [address, storage_changes, storage_reads, + # balance_changes, nonce_changes, code_changes] + storage_changes_list = [ + [ + slot_changes.slot, + [ + [Uint(c.block_access_index), c.new_value] + for c in slot_changes.changes + ], + ] + for slot_changes in account.storage_changes + ] + + storage_reads_list = list(account.storage_reads) + + balance_changes_list = [ + [Uint(bc.block_access_index), Uint(bc.post_balance)] + for bc in account.balance_changes + ] + + nonce_changes_list = [ + [Uint(nc.block_access_index), Uint(nc.new_nonce)] + for nc in account.nonce_changes + ] + + code_changes_list = [ + [Uint(cc.block_access_index), cc.new_code] + for cc in account.code_changes + ] + + account_changes_list.append( + [ + account.address, + storage_changes_list, + storage_reads_list, + balance_changes_list, + nonce_changes_list, + code_changes_list, + ] + ) + + encoded = rlp.encode(cast(Extended, account_changes_list)) + return Bytes(encoded) + + +def validate_block_access_list_against_execution( + block_access_list: BlockAccessList, + block_access_list_builder: BlockAccessListBuilder | None = None, +) -> bool: + """ + Validate that a Block Access List is structurally correct and + optionally matches a builder's state. + + Parameters + ---------- + block_access_list : + The Block Access List to validate. + block_access_list_builder : + Optional Block Access List builder to validate against. + If provided, checks that the + Block Access List hash matches what would be built from + the builder's current state. + + Returns + ------- + valid : + True if the Block Access List is structurally valid and + matches the builder (if provided). + + """ + # 1. Validate structural constraints + + # Check that storage changes and reads don't overlap for the same slot + for account in block_access_list.account_changes: + changed_slots = {sc.slot for sc in account.storage_changes} + read_slots = set(account.storage_reads) + + # A slot should not be in both changes and reads (per EIP-7928) + if changed_slots & read_slots: + return False + + # 2. Validate ordering (addresses should be sorted lexicographically) + addresses = [ + account.address for account in block_access_list.account_changes + ] + if addresses != sorted(addresses): + return False + + # 3. Validate all data is within bounds + max_block_access_index = ( + MAX_TXS + 1 + ) # 0 for pre-exec, 1..MAX_TXS for txs, MAX_TXS+1 for post-exec + for account in block_access_list.account_changes: + # Validate storage slots are sorted within each account + storage_slots = [sc.slot for sc in account.storage_changes] + if storage_slots != sorted(storage_slots): + return False + + # Check storage changes + for slot_changes in account.storage_changes: + # Check changes are sorted by block_access_index + indices = [c.block_access_index for c in slot_changes.changes] + if indices != sorted(indices): + return False + + for change in slot_changes.changes: + if int(change.block_access_index) > max_block_access_index: + return False + + # Check balance changes are sorted by block_access_index + balance_indices = [ + bc.block_access_index for bc in account.balance_changes + ] + if balance_indices != sorted(balance_indices): + return False + + for balance_change in account.balance_changes: + if int(balance_change.block_access_index) > max_block_access_index: + return False + + # Check nonce changes are sorted by block_access_index + nonce_indices = [nc.block_access_index for nc in account.nonce_changes] + if nonce_indices != sorted(nonce_indices): + return False + + for nonce_change in account.nonce_changes: + if int(nonce_change.block_access_index) > max_block_access_index: + return False + + # Check code changes are sorted by block_access_index + code_indices = [cc.block_access_index for cc in account.code_changes] + if code_indices != sorted(code_indices): + return False + + for code_change in account.code_changes: + if int(code_change.block_access_index) > max_block_access_index: + return False + if len(code_change.new_code) > MAX_CODE_SIZE: + return False + + # 4. If Block Access List builder provided, validate against it + # by comparing hashes + if block_access_list_builder is not None: + from .builder import build_block_access_list + + # Build a Block Access List from the builder + expected_block_access_list = build_block_access_list( + block_access_list_builder + ) + + # Compare hashes + if compute_block_access_list_hash( + block_access_list + ) != compute_block_access_list_hash(expected_block_access_list): + return False + + return True diff --git a/src/ethereum/forks/amsterdam/block_access_lists/tracker.py b/src/ethereum/forks/amsterdam/block_access_lists/tracker.py new file mode 100644 index 0000000000..66a4f1ebbd --- /dev/null +++ b/src/ethereum/forks/amsterdam/block_access_lists/tracker.py @@ -0,0 +1,667 @@ +""" +Provides state change tracking functionality for building Block +Access Lists during transaction execution. + +The tracker integrates with the EVM execution to capture all state accesses +and modifications, distinguishing between actual changes and no-op operations. +It maintains a cache of pre-state values to enable accurate change detection +throughout block execution. + +See [EIP-7928] for the full specification +[EIP-7928]: https://eips.ethereum.org/EIPS/eip-7928 +""" + +from dataclasses import dataclass, field +from typing import TYPE_CHECKING, Dict, List, Set, Tuple + +from ethereum_types.bytes import Bytes, Bytes32 +from ethereum_types.numeric import U64, U256, Uint + +from ..fork_types import Address +from .builder import ( + BlockAccessListBuilder, + add_balance_change, + add_code_change, + add_nonce_change, + add_storage_read, + add_storage_write, + add_touched_account, +) +from .rlp_types import BlockAccessIndex + +if TYPE_CHECKING: + from ..state import State # noqa: F401 + + +@dataclass +class CallFrameSnapshot: + """ + Snapshot of block access list state for a single call frame. + + Used to track changes within a call frame to enable proper handling + of reverts as specified in EIP-7928. + """ + + touched_addresses: Set[Address] = field(default_factory=set) + """Addresses touched during this call frame.""" + + storage_writes: Dict[Tuple[Address, Bytes32], U256] = field( + default_factory=dict + ) + """Storage writes made during this call frame.""" + + balance_changes: Set[Tuple[Address, BlockAccessIndex, U256]] = field( + default_factory=set + ) + """Balance changes made during this call frame.""" + + nonce_changes: Set[Tuple[Address, BlockAccessIndex, U64]] = field( + default_factory=set + ) + """Nonce changes made during this call frame.""" + + code_changes: Set[Tuple[Address, BlockAccessIndex, Bytes]] = field( + default_factory=set + ) + """Code changes made during this call frame.""" + + +@dataclass +class StateChangeTracker: + """ + Tracks state changes during transaction execution for Block Access List + construction. + + This tracker maintains a cache of pre-state values and coordinates with + the [`BlockAccessListBuilder`] to record all state changes made during + block execution. It ensures that only actual changes (not no-op writes) + are recorded in the access list. + + [`BlockAccessListBuilder`]: + ref:ethereum.forks.amsterdam.block_access_lists.builder.BlockAccessListBuilder + """ + + block_access_list_builder: BlockAccessListBuilder + """ + The builder instance that accumulates all tracked changes. + """ + + pre_storage_cache: Dict[tuple, U256] = field(default_factory=dict) + """ + Cache of pre-transaction storage values, keyed by (address, slot) tuples. + This cache is cleared at the start of each transaction to track values + from the beginning of the current transaction. + """ + + pre_balance_cache: Dict[Address, U256] = field(default_factory=dict) + """ + Cache of pre-transaction balance values, keyed by address. + This cache is cleared at the start of each transaction and used by + finalize_transaction_changes to filter out balance changes where + the final balance equals the initial balance. + """ + + current_block_access_index: Uint = Uint(0) + """ + The current block access index (0 for pre-execution, + 1..n for transactions, n+1 for post-execution). + """ + + call_frame_snapshots: List[CallFrameSnapshot] = field(default_factory=list) + """ + Stack of snapshots for nested call frames to handle reverts properly. + """ + + +def set_block_access_index( + tracker: StateChangeTracker, block_access_index: Uint +) -> None: + """ + Set the current block access index for tracking changes. + + Must be called before processing each transaction/system contract + to ensure changes are associated with the correct block access index. + + Note: Block access indices differ from transaction indices: + - 0: Pre-execution (system contracts like beacon roots, block hashes) + - 1..n: Transactions (tx at index i gets block_access_index i+1) + - n+1: Post-execution (withdrawals, requests) + + Parameters + ---------- + tracker : + The state change tracker instance. + block_access_index : + The block access index (0 for pre-execution, + 1..n for transactions, n+1 for post-execution). + + """ + tracker.current_block_access_index = block_access_index + # Clear the pre-storage cache for each new transaction to ensure + # no-op writes are detected relative to the transaction start + tracker.pre_storage_cache.clear() + # Clear the pre-balance cache for each new transaction + tracker.pre_balance_cache.clear() + + +def capture_pre_state( + tracker: StateChangeTracker, address: Address, key: Bytes32, state: "State" +) -> U256: + """ + Capture and cache the pre-transaction value for a storage location. + + Retrieves the storage value from the beginning of the current transaction. + The value is cached within the transaction to avoid repeated lookups and + to maintain consistency across multiple accesses within the same + transaction. + + Parameters + ---------- + tracker : + The state change tracker instance. + address : + The account address containing the storage. + key : + The storage slot to read. + state : + The current execution state. + + Returns + ------- + value : + The storage value at the beginning of the current transaction. + + """ + cache_key = (address, key) + if cache_key not in tracker.pre_storage_cache: + # Import locally to avoid circular import + from ..state import get_storage + + tracker.pre_storage_cache[cache_key] = get_storage(state, address, key) + return tracker.pre_storage_cache[cache_key] + + +def track_address_access( + tracker: StateChangeTracker, address: Address +) -> None: + """ + Track that an address was accessed. + + Records account access even when no state changes occur. This is + important for operations that read account data without modifying it. + + Parameters + ---------- + tracker : + The state change tracker instance. + address : + The account address that was accessed. + + """ + add_touched_account(tracker.block_access_list_builder, address) + + +def track_storage_read( + tracker: StateChangeTracker, address: Address, key: Bytes32, state: "State" +) -> None: + """ + Track a storage read operation. + + Records that a storage slot was read and captures its pre-state value. + The slot will only appear in the final access list if it wasn't also + written to during block execution. + + Parameters + ---------- + tracker : + The state change tracker instance. + address : + The account address whose storage is being read. + key : + The storage slot being read. + state : + The current execution state. + + """ + track_address_access(tracker, address) + + capture_pre_state(tracker, address, key, state) + + add_storage_read(tracker.block_access_list_builder, address, key) + + +def track_storage_write( + tracker: StateChangeTracker, + address: Address, + key: Bytes32, + new_value: U256, + state: "State", +) -> None: + """ + Track a storage write operation. + + Records storage modifications, but only if the new value differs from + the pre-state value. No-op writes (where the value doesn't change) are + tracked as reads instead, as specified in [EIP-7928]. + + Parameters + ---------- + tracker : + The state change tracker instance. + address : + The account address whose storage is being modified. + key : + The storage slot being written to. + new_value : + The new value to write. + state : + The current execution state. + + [EIP-7928]: https://eips.ethereum.org/EIPS/eip-7928 + + """ + track_address_access(tracker, address) + + pre_value = capture_pre_state(tracker, address, key, state) + + value_bytes = new_value.to_be_bytes32() + + if pre_value != new_value: + add_storage_write( + tracker.block_access_list_builder, + address, + key, + BlockAccessIndex(tracker.current_block_access_index), + value_bytes, + ) + # Record in current call frame snapshot if exists + if tracker.call_frame_snapshots: + snapshot = tracker.call_frame_snapshots[-1] + snapshot.storage_writes[(address, key)] = new_value + else: + add_storage_read(tracker.block_access_list_builder, address, key) + + +def capture_pre_balance( + tracker: StateChangeTracker, address: Address, state: "State" +) -> U256: + """ + Capture and cache the pre-transaction balance for an account. + + This function caches the balance on first access for each address during + a transaction. It must be called before any balance modifications are made + to ensure we capture the pre-transaction balance correctly. The cache is + cleared at the beginning of each transaction. + + This is used by finalize_transaction_changes to determine which balance + changes should be filtered out. + + Parameters + ---------- + tracker : + The state change tracker instance. + address : + The account address. + state : + The current execution state. + + Returns + ------- + value : + The balance at the beginning of the current transaction. + + """ + if address not in tracker.pre_balance_cache: + # Import locally to avoid circular import + from ..state import get_account + + # Cache the current balance on first access + # This should be called before any balance modifications + account = get_account(state, address) + tracker.pre_balance_cache[address] = account.balance + return tracker.pre_balance_cache[address] + + +def track_balance_change( + tracker: StateChangeTracker, + address: Address, + new_balance: U256, +) -> None: + """ + Track a balance change for an account. + + Records the new balance after any balance-affecting operation, including + transfers, gas payments, block rewards, and withdrawals. The balance is + encoded as a 16-byte value (uint128) which is sufficient for the total + ETH supply. + + Parameters + ---------- + tracker : + The state change tracker instance. + address : + The account address whose balance changed. + new_balance : + The new balance value. + + """ + track_address_access(tracker, address) + + block_access_index = BlockAccessIndex(tracker.current_block_access_index) + add_balance_change( + tracker.block_access_list_builder, + address, + block_access_index, + new_balance, + ) + + # Record in current call frame snapshot if exists + if tracker.call_frame_snapshots: + snapshot = tracker.call_frame_snapshots[-1] + snapshot.balance_changes.add( + (address, block_access_index, new_balance) + ) + + +def track_nonce_change( + tracker: StateChangeTracker, address: Address, new_nonce: Uint +) -> None: + """ + Track a nonce change for an account. + + Records nonce increments for both EOAs (when sending transactions) and + contracts (when performing [`CREATE`] or [`CREATE2`] operations). Deployed + contracts also have their initial nonce tracked. + + Parameters + ---------- + tracker : + The state change tracker instance. + address : + The account address whose nonce changed. + new_nonce : + The new nonce value. + state : + The current execution state. + + [`CREATE`]: ref:ethereum.forks.amsterdam.vm.instructions.system.create + [`CREATE2`]: ref:ethereum.forks.amsterdam.vm.instructions.system.create2 + + """ + track_address_access(tracker, address) + block_access_index = BlockAccessIndex(tracker.current_block_access_index) + nonce_u64 = U64(new_nonce) + add_nonce_change( + tracker.block_access_list_builder, + address, + block_access_index, + nonce_u64, + ) + + # Record in current call frame snapshot if exists + if tracker.call_frame_snapshots: + snapshot = tracker.call_frame_snapshots[-1] + snapshot.nonce_changes.add((address, block_access_index, nonce_u64)) + + +def track_code_change( + tracker: StateChangeTracker, address: Address, new_code: Bytes +) -> None: + """ + Track a code change for contract deployment. + + Records new contract code deployments via [`CREATE`], [`CREATE2`], or + [`SETCODE`] operations. This function is called when contract bytecode + is deployed to an address. + + Parameters + ---------- + tracker : + The state change tracker instance. + address : + The address receiving the contract code. + new_code : + The deployed contract bytecode. + + [`CREATE`]: ref:ethereum.forks.amsterdam.vm.instructions.system.create + [`CREATE2`]: ref:ethereum.forks.amsterdam.vm.instructions.system.create2 + + """ + track_address_access(tracker, address) + block_access_index = BlockAccessIndex(tracker.current_block_access_index) + add_code_change( + tracker.block_access_list_builder, + address, + block_access_index, + new_code, + ) + + # Record in current call frame snapshot if exists + if tracker.call_frame_snapshots: + snapshot = tracker.call_frame_snapshots[-1] + snapshot.code_changes.add((address, block_access_index, new_code)) + + +def handle_in_transaction_selfdestruct( + tracker: StateChangeTracker, address: Address +) -> None: + """ + Handle an account that self-destructed in the same transaction it was + created. + + Per EIP-7928, accounts destroyed within their creation transaction must be + included as read-only with storage writes converted to reads. Nonce and + code changes from the current transaction are also removed. + + Note: Balance changes are handled separately by + finalize_transaction_changes. + + Parameters + ---------- + tracker : + The state change tracker instance. + address : + The address that self-destructed. + + """ + builder = tracker.block_access_list_builder + if address not in builder.accounts: + return + + account_data = builder.accounts[address] + current_index = tracker.current_block_access_index + + # Convert storage writes from current tx to reads + for slot in list(account_data.storage_changes.keys()): + account_data.storage_changes[slot] = [ + c + for c in account_data.storage_changes[slot] + if c.block_access_index != current_index + ] + if not account_data.storage_changes[slot]: + del account_data.storage_changes[slot] + account_data.storage_reads.add(slot) + + # Remove nonce and code changes from current transaction + account_data.nonce_changes = [ + c + for c in account_data.nonce_changes + if c.block_access_index != current_index + ] + account_data.code_changes = [ + c + for c in account_data.code_changes + if c.block_access_index != current_index + ] + + +def finalize_transaction_changes( + tracker: StateChangeTracker, state: "State" +) -> None: + """ + Finalize changes for the current transaction. + + This method is called at the end of each transaction execution to filter + out spurious balance changes. It removes all balance changes for addresses + where the post-transaction balance equals the pre-transaction balance. + + This is crucial for handling cases like: + - In-transaction self-destructs where an account with 0 balance is created + and destroyed, resulting in no net balance change + - Round-trip transfers where an account receives and sends equal amounts + + Only actual state changes are recorded in the Block Access List. + + Parameters + ---------- + tracker : + The state change tracker instance. + state : + The current execution state. + + """ + # Import locally to avoid circular import + from ..state import get_account + + builder = tracker.block_access_list_builder + current_index = tracker.current_block_access_index + + # Check each address that had balance changes in this transaction + for address in list(builder.accounts.keys()): + account_data = builder.accounts[address] + + # Get the pre-transaction balance + pre_balance = capture_pre_balance(tracker, address, state) + + # Get the current (post-transaction) balance + post_balance = get_account(state, address).balance + + # If pre-tx balance equals post-tx balance, remove all balance changes + # for this address in the current transaction + if pre_balance == post_balance: + # Filter out balance changes from the current transaction + account_data.balance_changes = [ + change + for change in account_data.balance_changes + if change.block_access_index != current_index + ] + + +def begin_call_frame(tracker: StateChangeTracker) -> None: + """ + Begin a new call frame for tracking reverts. + + Creates a new snapshot to track changes within this call frame. + This allows proper handling of reverts as specified in EIP-7928. + + Parameters + ---------- + tracker : + The state change tracker instance. + + """ + tracker.call_frame_snapshots.append(CallFrameSnapshot()) + + +def rollback_call_frame(tracker: StateChangeTracker) -> None: + """ + Rollback changes from the current call frame. + + When a call reverts, this function: + - Converts storage writes to reads + - Removes balance, nonce, and code changes + - Preserves touched addresses + + This implements EIP-7928 revert handling where reverted writes + become reads and addresses remain in the access list. + + Parameters + ---------- + tracker : + The state change tracker instance. + + """ + if not tracker.call_frame_snapshots: + return + + snapshot = tracker.call_frame_snapshots.pop() + builder = tracker.block_access_list_builder + + # Convert storage writes to reads + for (address, slot), _ in snapshot.storage_writes.items(): + # Remove the write from storage_changes + if address in builder.accounts: + account_data = builder.accounts[address] + if slot in account_data.storage_changes: + # Filter out changes from this call frame + account_data.storage_changes[slot] = [ + change + for change in account_data.storage_changes[slot] + if change.block_access_index + != tracker.current_block_access_index + ] + if not account_data.storage_changes[slot]: + del account_data.storage_changes[slot] + # Add as a read instead + account_data.storage_reads.add(slot) + + # Remove balance changes from this call frame + for address, block_access_index, new_balance in snapshot.balance_changes: + if address in builder.accounts: + account_data = builder.accounts[address] + # Filter out balance changes from this call frame + account_data.balance_changes = [ + change + for change in account_data.balance_changes + if not ( + change.block_access_index == block_access_index + and change.post_balance == new_balance + ) + ] + + # Remove nonce changes from this call frame + for address, block_access_index, new_nonce in snapshot.nonce_changes: + if address in builder.accounts: + account_data = builder.accounts[address] + # Filter out nonce changes from this call frame + account_data.nonce_changes = [ + change + for change in account_data.nonce_changes + if not ( + change.block_access_index == block_access_index + and change.new_nonce == new_nonce + ) + ] + + # Remove code changes from this call frame + for address, block_access_index, new_code in snapshot.code_changes: + if address in builder.accounts: + account_data = builder.accounts[address] + # Filter out code changes from this call frame + account_data.code_changes = [ + change + for change in account_data.code_changes + if not ( + change.block_access_index == block_access_index + and change.new_code == new_code + ) + ] + + # All touched addresses remain in the access list (already tracked) + + +def commit_call_frame(tracker: StateChangeTracker) -> None: + """ + Commit changes from the current call frame. + + Removes the current call frame snapshot without rolling back changes. + Called when a call completes successfully. + + Parameters + ---------- + tracker : + The state change tracker instance. + + """ + if tracker.call_frame_snapshots: + tracker.call_frame_snapshots.pop() diff --git a/src/ethereum/forks/amsterdam/blocks.py b/src/ethereum/forks/amsterdam/blocks.py index ba3c27e9e3..0d14066f47 100644 --- a/src/ethereum/forks/amsterdam/blocks.py +++ b/src/ethereum/forks/amsterdam/blocks.py @@ -19,6 +19,7 @@ from ethereum.crypto.hash import Hash32 +from .block_access_lists.rlp_types import BlockAccessList from .fork_types import Address, Bloom, Root from .transactions import ( AccessListTransaction, @@ -242,6 +243,16 @@ class Header: [SHA2-256]: https://en.wikipedia.org/wiki/SHA-2 """ + block_access_list_hash: Hash32 + """ + [SHA2-256] hash of the Block Access List containing all accounts and + storage locations accessed during block execution. Introduced in + [EIP-7928]. See [`compute_block_access_list_hash`][cbalh] for more + details. + [EIP-7928]: https://eips.ethereum.org/EIPS/eip-7928 + [cbalh]: ref:ethereum.forks.amsterdam.block_access_lists.rlp_utils.compute_block_access_list_hash # noqa: E501 + """ + @slotted_freezable @dataclass @@ -295,6 +306,13 @@ class Block: A tuple of withdrawals processed in this block. """ + block_access_list: BlockAccessList + """ + Block Access List containing all accounts and storage locations accessed + during block execution. Introduced in [EIP-7928]. + [EIP-7928]: https://eips.ethereum.org/EIPS/eip-7928 + """ + @slotted_freezable @dataclass diff --git a/src/ethereum/forks/amsterdam/fork.py b/src/ethereum/forks/amsterdam/fork.py index 1d8bbcc106..08f5ad734c 100644 --- a/src/ethereum/forks/amsterdam/fork.py +++ b/src/ethereum/forks/amsterdam/fork.py @@ -16,7 +16,7 @@ from ethereum_rlp import rlp from ethereum_types.bytes import Bytes -from ethereum_types.numeric import U64, U256, Uint +from ethereum_types.numeric import U64, U256, Uint, ulen from ethereum.crypto.hash import Hash32, keccak256 from ethereum.exceptions import ( @@ -29,6 +29,14 @@ ) from . import vm +from .block_access_lists.builder import build_block_access_list +from .block_access_lists.rlp_utils import compute_block_access_list_hash +from .block_access_lists.tracker import ( + finalize_transaction_changes, + handle_in_transaction_selfdestruct, + set_block_access_index, + track_balance_change, +) from .blocks import Block, Header, Log, Receipt, Withdrawal, encode_receipt from .bloom import logs_bloom from .exceptions import ( @@ -53,6 +61,7 @@ from .state import ( State, TransientStorage, + account_exists_and_is_empty, destroy_account, get_account, increment_nonce, @@ -246,6 +255,9 @@ def state_transition(chain: BlockChain, block: Block) -> None: block_logs_bloom = logs_bloom(block_output.block_logs) withdrawals_root = root(block_output.withdrawals_trie) requests_hash = compute_requests_hash(block_output.requests) + computed_block_access_list_hash = compute_block_access_list_hash( + block_output.block_access_list + ) if block_output.block_gas_used != block.header.gas_used: raise InvalidBlock( @@ -265,6 +277,8 @@ def state_transition(chain: BlockChain, block: Block) -> None: raise InvalidBlock if requests_hash != block.header.requests_hash: raise InvalidBlock + if computed_block_access_list_hash != block.header.block_access_list_hash: + raise InvalidBlock("Invalid block access list hash") chain.blocks.append(block) if len(chain.blocks) > 255: @@ -764,6 +778,10 @@ def apply_body( """ block_output = vm.BlockOutput() + # Set block access index for pre-execution system contracts + # EIP-7928: System contracts use block_access_index 0 + set_block_access_index(block_env.state.change_tracker, Uint(0)) + process_unchecked_system_transaction( block_env=block_env, target_address=BEACON_ROOTS_ADDRESS, @@ -779,12 +797,21 @@ def apply_body( for i, tx in enumerate(map(decode_transaction, transactions)): process_transaction(block_env, block_output, tx, Uint(i)) + # EIP-7928: Post-execution uses block_access_index len(transactions) + 1 + post_execution_index = ulen(transactions) + Uint(1) + set_block_access_index( + block_env.state.change_tracker, post_execution_index + ) + process_withdrawals(block_env, block_output, withdrawals) process_general_purpose_requests( block_env=block_env, block_output=block_output, ) + block_output.block_access_list = build_block_access_list( + block_env.state.change_tracker.block_access_list_builder + ) return block_output @@ -864,6 +891,10 @@ def process_transaction( Index of the transaction in the block. """ + # EIP-7928: Transactions use block_access_index 1 to len(transactions) + # Transaction at index i gets block_access_index i+1 + set_block_access_index(block_env.state.change_tracker, index + Uint(1)) + trie_set( block_output.transactions_trie, rlp.encode(index), @@ -971,15 +1002,35 @@ def process_transaction( coinbase_balance_after_mining_fee = get_account( block_env.state, block_env.coinbase ).balance + U256(transaction_fee) + + # Always set coinbase balance to ensure proper tracking set_account_balance( block_env.state, block_env.coinbase, coinbase_balance_after_mining_fee, ) + if coinbase_balance_after_mining_fee == 0 and account_exists_and_is_empty( + block_env.state, block_env.coinbase + ): + destroy_account(block_env.state, block_env.coinbase) + for address in tx_output.accounts_to_delete: + # EIP-7928: In-transaction self-destruct - convert storage writes to + # reads and remove nonce/code changes. Only accounts created in same + # tx are in accounts_to_delete per EIP-6780. + handle_in_transaction_selfdestruct( + block_env.state.change_tracker, address + ) destroy_account(block_env.state, address) + # EIP-7928: Finalize transaction changes + # Remove balance changes where post-tx balance equals pre-tx balance + finalize_transaction_changes( + block_env.state.change_tracker, + block_env.state, + ) + block_output.block_gas_used += tx_gas_used_after_refund block_output.blob_gas_used += tx_blob_gas_used @@ -1020,6 +1071,16 @@ def increase_recipient_balance(recipient: Account) -> None: modify_state(block_env.state, wd.address, increase_recipient_balance) + # Track balance change for BAL + # (withdrawals are tracked as system contract changes) + new_balance = get_account(block_env.state, wd.address).balance + track_balance_change( + block_env.state.change_tracker, wd.address, U256(new_balance) + ) + + if account_exists_and_is_empty(block_env.state, wd.address): + destroy_account(block_env.state, wd.address) + def check_gas_limit(gas_limit: Uint, parent_gas_limit: Uint) -> bool: """ diff --git a/src/ethereum/forks/amsterdam/state.py b/src/ethereum/forks/amsterdam/state.py index e997411f6d..3067b175d6 100644 --- a/src/ethereum/forks/amsterdam/state.py +++ b/src/ethereum/forks/amsterdam/state.py @@ -23,6 +23,14 @@ from ethereum_types.frozen import modify from ethereum_types.numeric import U256, Uint +from .block_access_lists.builder import BlockAccessListBuilder +from .block_access_lists.tracker import ( + StateChangeTracker, + capture_pre_balance, + track_balance_change, + track_code_change, + track_nonce_change, +) from .fork_types import EMPTY_ACCOUNT, Account, Address, Root from .trie import EMPTY_TRIE_ROOT, Trie, copy_trie, root, trie_get, trie_set @@ -46,6 +54,9 @@ class State: ] ] = field(default_factory=list) created_accounts: Set[Address] = field(default_factory=set) + change_tracker: StateChangeTracker = field( + default_factory=lambda: StateChangeTracker(BlockAccessListBuilder()) + ) @dataclass @@ -440,6 +451,34 @@ def account_has_storage(state: State, address: Address) -> bool: return address in state._storage_tries +def account_exists_and_is_empty(state: State, address: Address) -> bool: + """ + Checks if an account exists and has zero nonce, empty code and zero + balance. + + Parameters + ---------- + state: + The state + address: + Address of the account that needs to be checked. + + Returns + ------- + exists_and_is_empty : `bool` + True if an account exists and has zero nonce, empty code and zero + balance, False otherwise. + + """ + account = get_account_optional(state, address) + return ( + account is not None + and account.nonce == Uint(0) + and account.code == b"" + and account.balance == 0 + ) + + def is_account_alive(state: State, address: Address) -> bool: """ Check whether an account is both in the state and non-empty. @@ -469,16 +508,7 @@ def modify_state( exists and has zero nonce, empty code, and zero balance, it is destroyed. """ set_account(state, address, modify(get_account(state, address), f)) - - account = get_account_optional(state, address) - account_exists_and_is_empty = ( - account is not None - and account.nonce == Uint(0) - and account.code == b"" - and account.balance == 0 - ) - - if account_exists_and_is_empty: + if account_exists_and_is_empty(state, address): destroy_account(state, address) @@ -491,6 +521,9 @@ def move_ether( """ Move funds between accounts. """ + # Capture pre-transaction balance before first modification + capture_pre_balance(state.change_tracker, sender_address, state) + capture_pre_balance(state.change_tracker, recipient_address, state) def reduce_sender_balance(sender: Account) -> None: if sender.balance < amount: @@ -503,6 +536,16 @@ def increase_recipient_balance(recipient: Account) -> None: modify_state(state, sender_address, reduce_sender_balance) modify_state(state, recipient_address, increase_recipient_balance) + sender_new_balance = get_account(state, sender_address).balance + recipient_new_balance = get_account(state, recipient_address).balance + + track_balance_change( + state.change_tracker, sender_address, U256(sender_new_balance) + ) + track_balance_change( + state.change_tracker, recipient_address, U256(recipient_new_balance) + ) + def set_account_balance(state: State, address: Address, amount: U256) -> None: """ @@ -520,12 +563,16 @@ def set_account_balance(state: State, address: Address, amount: U256) -> None: The amount that needs to set in balance. """ + # Capture pre-transaction balance before first modification + capture_pre_balance(state.change_tracker, address, state) def set_balance(account: Account) -> None: account.balance = amount modify_state(state, address, set_balance) + track_balance_change(state.change_tracker, address, amount) + def increment_nonce(state: State, address: Address) -> None: """ @@ -546,6 +593,16 @@ def increase_nonce(sender: Account) -> None: modify_state(state, address, increase_nonce) + # Track nonce change for Block Access List + # (for ALL accounts and ALL nonce changes) + # This includes: + # - EOA senders (transaction nonce increments) + # - Contracts performing CREATE/CREATE2 + # - Deployed contracts + # - EIP-7702 authorities + account = get_account(state, address) + track_nonce_change(state.change_tracker, address, account.nonce) + def set_code(state: State, address: Address, code: Bytes) -> None: """ @@ -569,6 +626,13 @@ def write_code(sender: Account) -> None: modify_state(state, address, write_code) + # Only track code changes if it's not setting empty code on a + # newly created address. For newly created addresses, setting + # code to b"" is not a meaningful state change since the address + # had no code to begin with. + if not (code == b"" and address in state.created_accounts): + track_code_change(state.change_tracker, address, code) + def get_storage_original(state: State, address: Address, key: Bytes32) -> U256: """ diff --git a/src/ethereum/forks/amsterdam/vm/__init__.py b/src/ethereum/forks/amsterdam/vm/__init__.py index b2a8c5e2b9..7c2db77ce9 100644 --- a/src/ethereum/forks/amsterdam/vm/__init__.py +++ b/src/ethereum/forks/amsterdam/vm/__init__.py @@ -21,6 +21,7 @@ from ethereum.crypto.hash import Hash32 from ethereum.exceptions import EthereumException +from ..block_access_lists.rlp_types import BlockAccessList from ..blocks import Log, Receipt, Withdrawal from ..fork_types import Address, Authorization, VersionedHash from ..state import State, TransientStorage @@ -73,6 +74,8 @@ class BlockOutput: Total blob gas used in the block. requests : `Bytes` Hash of all the requests in the block. + block_access_list: `BlockAccessList` + The block access list for the block. """ block_gas_used: Uint = Uint(0) @@ -89,6 +92,9 @@ class BlockOutput: ) blob_gas_used: U64 = U64(0) requests: List[Bytes] = field(default_factory=list) + block_access_list: BlockAccessList = field( + default_factory=lambda: BlockAccessList(account_changes=()) + ) @dataclass diff --git a/src/ethereum/forks/amsterdam/vm/eoa_delegation.py b/src/ethereum/forks/amsterdam/vm/eoa_delegation.py index 29909b5fa5..eca5978435 100644 --- a/src/ethereum/forks/amsterdam/vm/eoa_delegation.py +++ b/src/ethereum/forks/amsterdam/vm/eoa_delegation.py @@ -12,6 +12,7 @@ from ethereum.crypto.hash import keccak256 from ethereum.exceptions import InvalidBlock, InvalidSignatureError +from ..block_access_lists.tracker import track_address_access from ..fork_types import Address, Authorization from ..state import account_exists, get_account, increment_nonce, set_code from ..utils.hexadecimal import hex_to_address @@ -134,10 +135,14 @@ def access_delegation( """ state = evm.message.block_env.state + code = get_account(state, address).code if not is_valid_delegation(code): return False, address, code, Uint(0) + # EIP-7928: Track the authority address (delegated account being called) + track_address_access(state.change_tracker, address) + address = Address(code[EOA_DELEGATION_MARKER_LENGTH:]) if address in evm.accessed_addresses: access_gas_cost = GAS_WARM_ACCESS @@ -146,6 +151,9 @@ def access_delegation( access_gas_cost = GAS_COLD_ACCOUNT_ACCESS code = get_account(state, address).code + # EIP-7928: Track delegation target when loaded as call target + track_address_access(state.change_tracker, address) + return True, address, code, access_gas_cost @@ -185,6 +193,10 @@ def set_delegation(message: Message) -> U256: authority_account = get_account(state, authority) authority_code = authority_account.code + # EIP-7928: Track authority account access in BAL even if delegation + # fails + track_address_access(state.change_tracker, authority) + if authority_code and not is_valid_delegation(authority_code): continue diff --git a/src/ethereum/forks/amsterdam/vm/instructions/environment.py b/src/ethereum/forks/amsterdam/vm/instructions/environment.py index 8369043465..39b89567ff 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/environment.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/environment.py @@ -17,6 +17,7 @@ from ethereum.crypto.hash import keccak256 from ethereum.utils.numeric import ceil32 +from ...block_access_lists.tracker import track_address_access from ...fork_types import EMPTY_ACCOUNT from ...state import get_account from ...utils.address import to_address_masked @@ -84,7 +85,9 @@ def balance(evm: Evm) -> None: # OPERATION # Non-existent accounts default to EMPTY_ACCOUNT, which has balance 0. - balance = get_account(evm.message.block_env.state, address).balance + state = evm.message.block_env.state + balance = get_account(state, address).balance + track_address_access(state.change_tracker, address) push(evm.stack, balance) @@ -350,7 +353,9 @@ def extcodesize(evm: Evm) -> None: charge_gas(evm, access_gas_cost) # OPERATION - code = get_account(evm.message.block_env.state, address).code + state = evm.message.block_env.state + code = get_account(state, address).code + track_address_access(state.change_tracker, address) codesize = U256(len(code)) push(evm.stack, codesize) @@ -392,7 +397,9 @@ def extcodecopy(evm: Evm) -> None: # OPERATION evm.memory += b"\x00" * extend_memory.expand_by - code = get_account(evm.message.block_env.state, address).code + state = evm.message.block_env.state + code = get_account(state, address).code + track_address_access(state.change_tracker, address) value = buffer_read(code, code_start_index, size) memory_write(evm.memory, memory_start_index, value) @@ -482,7 +489,9 @@ def extcodehash(evm: Evm) -> None: charge_gas(evm, access_gas_cost) # OPERATION - account = get_account(evm.message.block_env.state, address) + state = evm.message.block_env.state + account = get_account(state, address) + track_address_access(state.change_tracker, address) if account == EMPTY_ACCOUNT: codehash = U256(0) diff --git a/src/ethereum/forks/amsterdam/vm/instructions/storage.py b/src/ethereum/forks/amsterdam/vm/instructions/storage.py index e6777c30a0..35ff36bab3 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/storage.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/storage.py @@ -13,6 +13,10 @@ from ethereum_types.numeric import Uint +from ...block_access_lists.tracker import ( + track_storage_read, + track_storage_write, +) from ...state import ( get_storage, get_storage_original, @@ -56,8 +60,14 @@ def sload(evm: Evm) -> None: charge_gas(evm, GAS_COLD_SLOAD) # OPERATION - value = get_storage( - evm.message.block_env.state, evm.message.current_target, key + state = evm.message.block_env.state + value = get_storage(state, evm.message.current_target, key) + + track_storage_read( + state.change_tracker, + evm.message.current_target, + key, + evm.message.block_env.state, ) push(evm.stack, value) @@ -88,6 +98,15 @@ def sstore(evm: Evm) -> None: ) current_value = get_storage(state, evm.message.current_target, key) + # Track the implicit SLOAD that occurs in SSTORE + # This must happen BEFORE charge_gas() so reads are recorded even if OOG + track_storage_read( + state.change_tracker, + evm.message.current_target, + key, + evm.message.block_env.state, + ) + gas_cost = Uint(0) if (evm.message.current_target, key) not in evm.accessed_storage_keys: @@ -126,6 +145,19 @@ def sstore(evm: Evm) -> None: charge_gas(evm, gas_cost) if evm.message.is_static: raise WriteInStaticContext + + # Track storage write BEFORE modifying state + # so we capture the correct pre-value + + track_storage_write( + state.change_tracker, + evm.message.current_target, + key, + new_value, + state, + ) + + # Now modify the storage set_storage(state, evm.message.current_target, key, new_value) # PROGRAM COUNTER diff --git a/src/ethereum/forks/amsterdam/vm/instructions/system.py b/src/ethereum/forks/amsterdam/vm/instructions/system.py index fea7a0c1b9..665a6048c7 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/system.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/system.py @@ -16,6 +16,7 @@ from ethereum.utils.numeric import ceil32 +from ...block_access_lists.tracker import track_address_access from ...fork_types import Address from ...state import ( account_has_code_or_nonce, @@ -77,6 +78,8 @@ def generic_create( process_create_message, ) + state = evm.message.block_env.state + call_data = memory_read_bytes( evm.memory, memory_start_position, memory_size ) @@ -90,7 +93,7 @@ def generic_create( evm.return_data = b"" sender_address = evm.message.current_target - sender = get_account(evm.message.block_env.state, sender_address) + sender = get_account(state, sender_address) if ( sender.balance < endowment @@ -104,15 +107,19 @@ def generic_create( evm.accessed_addresses.add(contract_address) if account_has_code_or_nonce( - evm.message.block_env.state, contract_address - ) or account_has_storage(evm.message.block_env.state, contract_address): + state, contract_address + ) or account_has_storage(state, contract_address): increment_nonce( - evm.message.block_env.state, evm.message.current_target + state, + evm.message.current_target, ) push(evm.stack, U256(0)) return - increment_nonce(evm.message.block_env.state, evm.message.current_target) + increment_nonce( + state, + evm.message.current_target, + ) child_message = Message( block_env=evm.message.block_env, @@ -133,6 +140,9 @@ def generic_create( disable_precompiles=False, parent_evm=evm, ) + + track_address_access(state.change_tracker, contract_address) + child_evm = process_create_message(child_message) if child_evm.error: @@ -326,6 +336,9 @@ def generic_call( disable_precompiles=disable_precompiles, parent_evm=evm, ) + + track_address_access(evm.message.block_env.state.change_tracker, to) + child_evm = process_message(child_message) if child_evm.error: @@ -486,6 +499,10 @@ def callcode(evm: Evm) -> None: ) charge_gas(evm, message_call_gas.cost + extend_memory.cost) + track_address_access( + evm.message.block_env.state.change_tracker, code_address + ) + # OPERATION evm.memory += b"\x00" * extend_memory.expand_by sender_balance = get_account( @@ -566,7 +583,11 @@ def selfdestruct(evm: Evm) -> None: if originator in evm.message.block_env.state.created_accounts: # If beneficiary is the same as originator, then # the ether is burnt. - set_account_balance(evm.message.block_env.state, originator, U256(0)) + set_account_balance( + evm.message.block_env.state, + originator, + U256(0), + ) evm.accounts_to_delete.add(originator) # HALT the execution @@ -622,6 +643,10 @@ def delegatecall(evm: Evm) -> None: ) charge_gas(evm, message_call_gas.cost + extend_memory.cost) + track_address_access( + evm.message.block_env.state.change_tracker, code_address + ) + # OPERATION evm.memory += b"\x00" * extend_memory.expand_by generic_call( diff --git a/src/ethereum/forks/amsterdam/vm/interpreter.py b/src/ethereum/forks/amsterdam/vm/interpreter.py index 07e9f1d2db..afd66169f1 100644 --- a/src/ethereum/forks/amsterdam/vm/interpreter.py +++ b/src/ethereum/forks/amsterdam/vm/interpreter.py @@ -29,6 +29,12 @@ evm_trace, ) +from ..block_access_lists.tracker import ( + begin_call_frame, + commit_call_frame, + rollback_call_frame, + track_address_access, +) from ..blocks import Log from ..fork_types import Address from ..state import ( @@ -133,6 +139,12 @@ def process_message_call(message: Message) -> MessageCallOutput: message.code = get_account(block_env.state, delegated_address).code message.code_address = delegated_address + # EIP-7928: Track delegation target when loaded as call target + track_address_access( + block_env.state.change_tracker, + delegated_address, + ) + evm = process_message(message) if evm.error: @@ -241,6 +253,11 @@ def process_message(message: Message) -> Evm: # take snapshot of state before processing the message begin_transaction(state, transient_storage) + if hasattr(state, "change_tracker") and state.change_tracker: + begin_call_frame(state.change_tracker) + # Track target address access when processing a message + track_address_access(state.change_tracker, message.current_target) + if message.should_transfer_value and message.value != 0: move_ether( state, message.caller, message.current_target, message.value @@ -251,8 +268,12 @@ def process_message(message: Message) -> Evm: # revert state to the last saved checkpoint # since the message call resulted in an error rollback_transaction(state, transient_storage) + if hasattr(state, "change_tracker") and state.change_tracker: + rollback_call_frame(state.change_tracker) else: commit_transaction(state, transient_storage) + if hasattr(state, "change_tracker") and state.change_tracker: + commit_call_frame(state.change_tracker) return evm diff --git a/src/ethereum/forks/osaka/vm/eoa_delegation.py b/src/ethereum/forks/osaka/vm/eoa_delegation.py index 29909b5fa5..0913fa63ff 100644 --- a/src/ethereum/forks/osaka/vm/eoa_delegation.py +++ b/src/ethereum/forks/osaka/vm/eoa_delegation.py @@ -134,6 +134,7 @@ def access_delegation( """ state = evm.message.block_env.state + code = get_account(state, address).code if not is_valid_delegation(code): return False, address, code, Uint(0) diff --git a/src/ethereum/genesis.py b/src/ethereum/genesis.py index 84519271dc..7ba79d6c26 100644 --- a/src/ethereum/genesis.py +++ b/src/ethereum/genesis.py @@ -259,6 +259,9 @@ def add_genesis_block( if has_field(hardfork.Header, "requests_hash"): fields["requests_hash"] = Hash32(b"\0" * 32) + if has_field(hardfork.Header, "block_access_list_hash"): + fields["block_access_list_hash"] = Hash32(b"\0" * 32) + genesis_header = hardfork.Header(**fields) block_fields = { @@ -273,6 +276,9 @@ def add_genesis_block( if has_field(hardfork.Block, "requests"): block_fields["requests"] = () + if has_field(hardfork.Block, "block_access_list"): + block_fields["block_access_list"] = rlp.encode([]) + genesis_block = hardfork.Block(**block_fields) chain.blocks.append(genesis_block) diff --git a/src/ethereum_spec_tools/evm_tools/loaders/fork_loader.py b/src/ethereum_spec_tools/evm_tools/loaders/fork_loader.py index f9be3a8820..5aebe681f3 100644 --- a/src/ethereum_spec_tools/evm_tools/loaders/fork_loader.py +++ b/src/ethereum_spec_tools/evm_tools/loaders/fork_loader.py @@ -120,6 +120,23 @@ def has_signing_hash_155(self) -> bool: """Check if the fork has a `signing_hash_155` function.""" return hasattr(self._module("transactions"), "signing_hash_155") + @property + def build_block_access_list(self) -> Any: + """Build function of the fork.""" + return self._module("block_access_lists").build_block_access_list + + @property + def compute_block_access_list_hash(self) -> Any: + """compute_block_access_list_hash function of the fork.""" + return self._module( + "block_access_lists" + ).compute_block_access_list_hash + + @property + def set_block_access_index(self) -> Any: + """set_block_access_index function of the fork.""" + return self._module("block_access_lists").set_block_access_index + @property def signing_hash_2930(self) -> Any: """signing_hash_2930 function of the fork.""" diff --git a/src/ethereum_spec_tools/evm_tools/t8n/__init__.py b/src/ethereum_spec_tools/evm_tools/t8n/__init__.py index fc09dfd8a3..3032572afc 100644 --- a/src/ethereum_spec_tools/evm_tools/t8n/__init__.py +++ b/src/ethereum_spec_tools/evm_tools/t8n/__init__.py @@ -10,7 +10,7 @@ from typing import Any, Final, TextIO, Tuple, Type, TypeVar from ethereum_rlp import rlp -from ethereum_types.numeric import U64, U256, Uint +from ethereum_types.numeric import U64, U256, Uint, ulen from typing_extensions import override from ethereum import trace @@ -372,6 +372,10 @@ def run_state_test(self) -> Any: self.result.rejected = self.txs.rejected_txs def _run_blockchain_test(self, block_env: Any, block_output: Any) -> None: + if self.fork.is_after_fork("amsterdam"): + self.fork.set_block_access_index( + block_env.state.change_tracker, Uint(0) + ) if self.fork.has_compute_requests_hash: self.fork.process_unchecked_system_transaction( block_env=block_env, @@ -386,20 +390,38 @@ def _run_blockchain_test(self, block_env: Any, block_output: Any) -> None: data=block_env.parent_beacon_block_root, ) - for i, tx in zip( - self.txs.successfully_parsed, - self.txs.transactions, - strict=True, + for tx_index, (original_idx, tx) in enumerate( + zip( + self.txs.successfully_parsed, + self.txs.transactions, + strict=True, + ) ): self.backup_state() try: self.fork.process_transaction( - block_env, block_output, tx, Uint(i) + block_env, block_output, tx, Uint(tx_index) ) except EthereumException as e: - self.txs.rejected_txs[i] = f"Failed transaction: {e!r}" + self.txs.rejected_txs[original_idx] = ( + f"Failed transaction: {e!r}" + ) self.restore_state() - self.logger.warning(f"Transaction {i} failed: {e!r}") + self.logger.warning( + f"Transaction {original_idx} failed: {e!r}" + ) + + if self.fork.is_after_fork("amsterdam"): + assert block_env.state.change_tracker is not None + num_transactions = ulen( + [tx for tx in self.txs.successfully_parsed if tx] + ) + + # post-execution use n + 1 + post_execution_index = num_transactions + Uint(1) + self.fork.set_block_access_index( + block_env.state.change_tracker, post_execution_index + ) if not self.fork.proof_of_stake: if self.options.state_reward is None: @@ -417,6 +439,11 @@ def _run_blockchain_test(self, block_env: Any, block_output: Any) -> None: if self.fork.has_compute_requests_hash: self.fork.process_general_purpose_requests(block_env, block_output) + if self.fork.is_after_fork("amsterdam"): + block_output.block_access_list = self.fork.build_block_access_list( + block_env.state.change_tracker.block_access_list_builder + ) + def run_blockchain_test(self) -> None: """ Apply a block on the pre-state. Also includes system operations. diff --git a/src/ethereum_spec_tools/evm_tools/t8n/env.py b/src/ethereum_spec_tools/evm_tools/t8n/env.py index 07a3071c1f..8cadd58c9f 100644 --- a/src/ethereum_spec_tools/evm_tools/t8n/env.py +++ b/src/ethereum_spec_tools/evm_tools/t8n/env.py @@ -145,6 +145,9 @@ def read_excess_blob_gas(self, data: Any, t8n: "T8N") -> None: if t8n.fork.has_compute_requests_hash: arguments["requests_hash"] = Hash32(b"\0" * 32) + if t8n.fork.is_after_fork("amsterdam"): + arguments["block_access_list_hash"] = Hash32(b"\0" * 32) + parent_header = t8n.fork.Header(**arguments) self.excess_blob_gas = t8n.fork.calculate_excess_blob_gas( diff --git a/src/ethereum_spec_tools/evm_tools/t8n/t8n_types.py b/src/ethereum_spec_tools/evm_tools/t8n/t8n_types.py index c60c266965..1aac5e4b3f 100644 --- a/src/ethereum_spec_tools/evm_tools/t8n/t8n_types.py +++ b/src/ethereum_spec_tools/evm_tools/t8n/t8n_types.py @@ -268,6 +268,8 @@ class Result: requests_hash: Optional[Hash32] = None requests: Optional[List[Bytes]] = None block_exception: Optional[str] = None + block_access_list: Optional[Any] = None + block_access_list_hash: Optional[Hash32] = None def get_receipts_from_output( self, @@ -323,6 +325,87 @@ def update(self, t8n: "T8N", block_env: Any, block_output: Any) -> None: self.requests = block_output.requests self.requests_hash = t8n.fork.compute_requests_hash(self.requests) + if hasattr(block_output, "block_access_list"): + self.block_access_list = block_output.block_access_list + self.block_access_list_hash = ( + t8n.fork.compute_block_access_list_hash( + block_output.block_access_list + ) + ) + + def _block_access_list_to_json(self, bal: Any) -> Any: + """ + Convert BlockAccessList to JSON format matching the Pydantic models. + """ + account_changes = [] + + for account in bal.account_changes: + account_data: Dict[str, Any] = { + "address": "0x" + account.address.hex() + } + + # Add storage changes if present + if account.storage_changes: + storage_changes = [] + for slot_change in account.storage_changes: + slot_data: Dict[str, Any] = { + "slot": int.from_bytes(slot_change.slot, "big"), + "slotChanges": [], + } + for change in slot_change.changes: + slot_data["slotChanges"].append( + { + "txIndex": int(change.block_access_index), + "postValue": int.from_bytes( + change.new_value, "big" + ), + } + ) + storage_changes.append(slot_data) + account_data["storageChanges"] = storage_changes + + # Add storage reads if present + if account.storage_reads: + account_data["storageReads"] = [ + int.from_bytes(slot, "big") + for slot in account.storage_reads + ] + + # Add balance changes if present + if account.balance_changes: + account_data["balanceChanges"] = [ + { + "txIndex": int(change.block_access_index), + "postBalance": int(change.post_balance), + } + for change in account.balance_changes + ] + + # Add nonce changes if present + if account.nonce_changes: + account_data["nonceChanges"] = [ + { + "txIndex": int(change.block_access_index), + "postNonce": int(change.new_nonce), + } + for change in account.nonce_changes + ] + + # Add code changes if present + if account.code_changes: + account_data["codeChanges"] = [ + { + "txIndex": int(change.block_access_index), + "newCode": "0x" + change.new_code.hex(), + } + for change in account.code_changes + ] + + account_changes.append(account_data) + + # return as list directly + return account_changes + def json_encode_receipts(self) -> Any: """ Encode receipts to JSON. @@ -390,4 +473,15 @@ def to_json(self) -> Any: if self.block_exception is not None: data["blockException"] = self.block_exception + if self.block_access_list is not None: + # Convert BAL to JSON format + data["blockAccessList"] = self._block_access_list_to_json( + self.block_access_list + ) + + if self.block_access_list_hash is not None: + data["blockAccessListHash"] = encode_to_hex( + self.block_access_list_hash + ) + return data diff --git a/whitelist.txt b/whitelist.txt index 4f73730fd5..0d4f493ffc 100644 --- a/whitelist.txt +++ b/whitelist.txt @@ -1335,4 +1335,10 @@ ZeroPaddedHexNumber zfill zkevm Zsh -zsh \ No newline at end of file +zsh +slot1 +slot2 +lexicographically +uint16 +uint128 +630m From 6de5318fa3c6cf28da2ec1b0670ce50d656e9877 Mon Sep 17 00:00:00 2001 From: felipe Date: Thu, 30 Oct 2025 07:42:56 -0600 Subject: [PATCH 009/154] fix(specs): Fix zero value withdrawals BAL tracking (#29) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix(specs): Fix zero value withdrawals BAL tracking * docs(specs): rename 'finalize' to 'normalize' in comments * docs(specs): remove reference to uint128 for balance tracking --------- Co-authored-by: Toni Wahrstätter <51536394+nerolation@users.noreply.github.com> Co-authored-by: Toni Wahrstätter --- .../amsterdam/block_access_lists/tracker.py | 25 ++++++++++--------- src/ethereum/forks/amsterdam/fork.py | 21 +++++++++++++--- 2 files changed, 31 insertions(+), 15 deletions(-) diff --git a/src/ethereum/forks/amsterdam/block_access_lists/tracker.py b/src/ethereum/forks/amsterdam/block_access_lists/tracker.py index 66a4f1ebbd..0ea945e7b1 100644 --- a/src/ethereum/forks/amsterdam/block_access_lists/tracker.py +++ b/src/ethereum/forks/amsterdam/block_access_lists/tracker.py @@ -97,7 +97,7 @@ class StateChangeTracker: """ Cache of pre-transaction balance values, keyed by address. This cache is cleared at the start of each transaction and used by - finalize_transaction_changes to filter out balance changes where + normalize_balance_changes to filter out balance changes where the final balance equals the initial balance. """ @@ -293,7 +293,7 @@ def capture_pre_balance( to ensure we capture the pre-transaction balance correctly. The cache is cleared at the beginning of each transaction. - This is used by finalize_transaction_changes to determine which balance + This is used by normalize_balance_changes to determine which balance changes should be filtered out. Parameters @@ -331,9 +331,7 @@ def track_balance_change( Track a balance change for an account. Records the new balance after any balance-affecting operation, including - transfers, gas payments, block rewards, and withdrawals. The balance is - encoded as a 16-byte value (uint128) which is sufficient for the total - ETH supply. + transfers, gas payments, block rewards, and withdrawals. Parameters ---------- @@ -454,7 +452,7 @@ def handle_in_transaction_selfdestruct( code changes from the current transaction are also removed. Note: Balance changes are handled separately by - finalize_transaction_changes. + normalize_balance_changes. Parameters ---------- @@ -495,22 +493,25 @@ def handle_in_transaction_selfdestruct( ] -def finalize_transaction_changes( +def normalize_balance_changes( tracker: StateChangeTracker, state: "State" ) -> None: """ - Finalize changes for the current transaction. + Normalize balance changes for the current block access index. - This method is called at the end of each transaction execution to filter - out spurious balance changes. It removes all balance changes for addresses - where the post-transaction balance equals the pre-transaction balance. + This method filters out spurious balance changes by removing all balance + changes for addresses where the post-execution balance equals the + pre-execution balance. This is crucial for handling cases like: - In-transaction self-destructs where an account with 0 balance is created and destroyed, resulting in no net balance change - Round-trip transfers where an account receives and sends equal amounts + - Zero-amount withdrawals where the balance doesn't actually change - Only actual state changes are recorded in the Block Access List. + This should be called at the end of any operation that tracks balance + changes (transactions, withdrawals, etc.). Only actual state changes are + recorded in the Block Access List. Parameters ---------- diff --git a/src/ethereum/forks/amsterdam/fork.py b/src/ethereum/forks/amsterdam/fork.py index 08f5ad734c..c326bd1b93 100644 --- a/src/ethereum/forks/amsterdam/fork.py +++ b/src/ethereum/forks/amsterdam/fork.py @@ -32,8 +32,9 @@ from .block_access_lists.builder import build_block_access_list from .block_access_lists.rlp_utils import compute_block_access_list_hash from .block_access_lists.tracker import ( - finalize_transaction_changes, + capture_pre_balance, handle_in_transaction_selfdestruct, + normalize_balance_changes, set_block_access_index, track_balance_change, ) @@ -1024,9 +1025,9 @@ def process_transaction( ) destroy_account(block_env.state, address) - # EIP-7928: Finalize transaction changes + # EIP-7928: Normalize balance changes for this transaction # Remove balance changes where post-tx balance equals pre-tx balance - finalize_transaction_changes( + normalize_balance_changes( block_env.state.change_tracker, block_env.state, ) @@ -1069,6 +1070,12 @@ def increase_recipient_balance(recipient: Account) -> None: rlp.encode(wd), ) + # Capture pre-balance before modification (even for zero withdrawals) + # This ensures the address appears in BAL per EIP-7928 + capture_pre_balance( + block_env.state.change_tracker, wd.address, block_env.state + ) + modify_state(block_env.state, wd.address, increase_recipient_balance) # Track balance change for BAL @@ -1078,6 +1085,14 @@ def increase_recipient_balance(recipient: Account) -> None: block_env.state.change_tracker, wd.address, U256(new_balance) ) + # EIP-7928: Normalize balance changes for this withdrawal + # Remove balance changes where post-withdrawal balance + # equals pre-withdrawal balance + normalize_balance_changes( + block_env.state.change_tracker, + block_env.state, + ) + if account_exists_and_is_empty(block_env.state, wd.address): destroy_account(block_env.state, wd.address) From 69e182e60fc790f16891edd9e44dd2b3a2a3930b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Toni=20Wahrst=C3=A4tter?= <51536394+nerolation@users.noreply.github.com> Date: Thu, 30 Oct 2025 21:26:25 +0100 Subject: [PATCH 010/154] fix(specs): static upfront check for create + selfdestruct (#22) --- .../forks/amsterdam/vm/instructions/system.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/src/ethereum/forks/amsterdam/vm/instructions/system.py b/src/ethereum/forks/amsterdam/vm/instructions/system.py index 665a6048c7..c9feaabbc7 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/system.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/system.py @@ -78,18 +78,22 @@ def generic_create( process_create_message, ) + # Check static context first + if evm.message.is_static: + raise WriteInStaticContext + + # Check max init code size early before memory read + if memory_size > U256(MAX_INIT_CODE_SIZE): + raise OutOfGasError + state = evm.message.block_env.state call_data = memory_read_bytes( evm.memory, memory_start_position, memory_size ) - if len(call_data) > MAX_INIT_CODE_SIZE: - raise OutOfGasError create_message_gas = max_message_call_gas(Uint(evm.gas_left)) evm.gas_left -= create_message_gas - if evm.message.is_static: - raise WriteInStaticContext evm.return_data = b"" sender_address = evm.message.current_target @@ -544,6 +548,9 @@ def selfdestruct(evm: Evm) -> None: The current EVM frame. """ + if evm.message.is_static: + raise WriteInStaticContext + # STACK beneficiary = to_address_masked(pop(evm.stack)) @@ -563,8 +570,6 @@ def selfdestruct(evm: Evm) -> None: gas_cost += GAS_SELF_DESTRUCT_NEW_ACCOUNT charge_gas(evm, gas_cost) - if evm.message.is_static: - raise WriteInStaticContext originator = evm.message.current_target originator_balance = get_account( From ab9cf9f2459addffb2c06578eee747f4a67d73e2 Mon Sep 17 00:00:00 2001 From: raxhvl Date: Wed, 29 Oct 2025 10:37:49 +0100 Subject: [PATCH 011/154] feat(tests): Implement more EIP-7928 tests refactor(tests): Prevent skips by splitting tests appropriately fix(tests): Use valid inputs to precompile tests chore(tests): linting fixes feat(tests): EIP-7928 test_bal_storage_write_read_cross_frame feat(tests): EIP-7928 test_bal_storage_write_read_same_frame feat(tests): EIP-7928 test_bal_nonexistent_account_access feat(tests): EIP-7928 test_bal_nonexistent_value_transfer feat(tests): EIP-7928 test_bal_precompiles feat(tests): EIP-7928 test_bal_withdrawal_to_coinbase_empty_block feat(tests): EIP-7928 test_bal_withdrawal_to_coinbase feat(tests): EIP-7928 test_bal_withdrawal_largest_amount feat(tests): EIP-7928 test_bal_withdrawal_to_precompiles fix(tests): expectation for nonexistent account in post fix(specs,tests): Fix withdrawal tests for BALs issue with idx==0 - `self.txs.successfully_parsed` is a list of transaction indexes, not transactions. The "if tx" check here would then check `if 0` which parses as a boolean ``False``. This means we would skip counting the tx if index=0 was successful. - Fixes some test expectations where `post_code` was being checked instead of ``new_code``. feat(tests): EIP-7928 test_bal_zero_withdrawal feat(tests): EIP-7928 test_bal_withdrawal_and_new_contract feat(tests): EIP-7928 test_bal_withdrawal_and_selfdestruct feat(tests): EIP-7928 test_bal_multiple_withdrawals_same_address feat(tests): EIP-7928 withdrawal_and_value_transfer_same_address feat(tests): EIP-7928 withdrawal_and_state_access_same_account feat(tests): EIP-7928 test_bal_withdrawal_no_evm_execution feat(tests): EIP-7928 test_bal_withdrawal_to_nonexistent_account feat(tests): EIP-7928 test_bal_withdrawal_empty_block feat(tests): EIP-7928 test_bal_withdrawal_with_transaction feat(tests): EIP-7928 coinbase --- .../evm_tools/t8n/__init__.py | 6 +- .../test_block_access_lists.py | 508 +++++++++++ .../test_block_access_lists_eip4895.py | 810 ++++++++++++++++++ .../test_block_access_lists_opcodes.py | 148 ++++ .../test_cases.md | 27 +- 5 files changed, 1494 insertions(+), 5 deletions(-) create mode 100644 tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip4895.py diff --git a/src/ethereum_spec_tools/evm_tools/t8n/__init__.py b/src/ethereum_spec_tools/evm_tools/t8n/__init__.py index 3032572afc..6ef8d72d3b 100644 --- a/src/ethereum_spec_tools/evm_tools/t8n/__init__.py +++ b/src/ethereum_spec_tools/evm_tools/t8n/__init__.py @@ -414,7 +414,11 @@ def _run_blockchain_test(self, block_env: Any, block_output: Any) -> None: if self.fork.is_after_fork("amsterdam"): assert block_env.state.change_tracker is not None num_transactions = ulen( - [tx for tx in self.txs.successfully_parsed if tx] + [ + tx_idx + for tx_idx in self.txs.successfully_parsed + if tx_idx is not None + ] ) # post-execution use n + 1 diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py index d6c7948f13..9b6793a87c 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py @@ -1417,3 +1417,511 @@ def test_bal_fully_unmutated_account( ) blockchain_test(pre=pre, blocks=[block], post={}) + + +def test_bal_empty_block_no_coinbase( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Ensure BAL correctly handles empty blocks without including coinbase. + + When a block has no transactions and no withdrawals, the coinbase/fee + recipient receives no fees and should not be included in the BAL. + """ + coinbase = pre.fund_eoa(amount=0) + + block = Block( + txs=[], + withdrawals=None, + fee_recipient=coinbase, + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + # Coinbase must NOT be included - receives no fees + coinbase: None, + } + ), + ) + + blockchain_test(pre=pre, blocks=[block], post={}) + + +def test_bal_coinbase_zero_tip( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + fork: Fork, +) -> None: + """Ensure BAL includes coinbase even when priority fee is zero.""" + alice_initial_balance = 1_000_000 + alice = pre.fund_eoa(amount=alice_initial_balance) + bob = pre.fund_eoa(amount=0) + coinbase = pre.fund_eoa(amount=0) # fee recipient + + intrinsic_gas_calculator = fork.transaction_intrinsic_cost_calculator() + intrinsic_gas = intrinsic_gas_calculator( + calldata=b"", + contract_creation=False, + access_list=[], + ) + tx_gas_limit = intrinsic_gas + 1000 + + # Calculate base fee + genesis_env = Environment(base_fee_per_gas=0x7) + base_fee_per_gas = fork.base_fee_per_gas_calculator()( + parent_base_fee_per_gas=int(genesis_env.base_fee_per_gas or 0), + parent_gas_used=0, + parent_gas_limit=genesis_env.gas_limit, + ) + + # Set gas_price equal to base_fee so tip = 0 + tx = Transaction( + sender=alice, + to=bob, + value=5, + gas_limit=tx_gas_limit, + gas_price=base_fee_per_gas, + ) + + alice_final_balance = ( + alice_initial_balance - 5 - (intrinsic_gas * base_fee_per_gas) + ) + + block = Block( + txs=[tx], + fee_recipient=coinbase, + header_verify=Header(base_fee_per_gas=base_fee_per_gas), + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + balance_changes=[ + BalBalanceChange( + tx_index=1, post_balance=alice_final_balance + ) + ], + ), + bob: BalAccountExpectation( + balance_changes=[ + BalBalanceChange(tx_index=1, post_balance=5) + ] + ), + # Coinbase must be included even with zero tip + coinbase: BalAccountExpectation.empty(), + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + alice: Account(nonce=1, balance=alice_final_balance), + bob: Account(balance=5), + }, + genesis_environment=genesis_env, + ) + + +@pytest.mark.parametrize( + "value", + [ + pytest.param(10**18, id="with_value"), + pytest.param(0, id="no_value"), + ], +) +@pytest.mark.with_all_precompiles +def test_bal_precompile_funded( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + precompile: Address, + value: int, +) -> None: + """ + Ensure BAL records precompile value transfer. + + Alice sends value to precompile (pure value transfer). + If value > 0: BAL must include balance_changes. + If value = 0: BAL must have empty balance_changes. + """ + alice = pre.fund_eoa() + + addr_int = int.from_bytes(precompile, "big") + + # Map precompile addresses to their required minimal input sizes + # - Most precompiles accept zero-padded input of appropriate length + # - For 0x0a (POINT_EVALUATION), use a known valid input from mainnet + if addr_int == 0x0A: + # Valid point evaluation input from mainnet tx: + # https://etherscan.io/tx/0xcb3dc8f3b14f1cda0c16a619a112102a8ec70dce1b3f1b28272227cf8d5fbb0e + tx_data = ( + bytes.fromhex( + # versioned_hash (32) + "018156B94FE9735E573BAB36DAD05D60FEB720D424CCD20AAF719343C31E4246" + ) + + bytes.fromhex( + # z (32) + "019123BCB9D06356701F7BE08B4494625B87A7B02EDC566126FB81F6306E915F" + ) + + bytes.fromhex( + # y (32) + "6C2EB1E94C2532935B8465351BA1BD88EABE2B3FA1AADFF7D1CD816E8315BD38" + ) + + bytes.fromhex( + # kzg_commitment (48) + "A9546D41993E10DF2A7429B8490394EA9EE62807BAE6F326D1044A51581306F58D4B9DFD5931E044688855280FF3799E" + ) + + bytes.fromhex( + # kzg_proof (48) + "A2EA83D9391E0EE42E0C650ACC7A1F842A7D385189485DDB4FD54ADE3D9FD50D608167DCA6C776AAD4B8AD5C20691BFE" + ) + ) + else: + precompile_min_input = { + 0x01: 128, # ECRECOVER + 0x02: 0, # SHA256 (accepts empty) + 0x03: 0, # RIPEMD160 (accepts empty) + 0x04: 0, # IDENTITY (accepts empty) + 0x05: 96, # MODEXP + 0x06: 128, # BN256ADD + 0x07: 96, # BN256MUL + 0x08: 0, # BN256PAIRING (empty is valid) + 0x09: 213, # BLAKE2F + 0x0B: 256, # BLS12_G1_ADD + 0x0C: 160, # BLS12_G1_MSM + 0x0D: 512, # BLS12_G2_ADD + 0x0E: 288, # BLS12_G2_MSM + 0x0F: 384, # BLS12_PAIRING + 0x10: 64, # BLS12_MAP_FP_TO_G1 + 0x11: 128, # BLS12_MAP_FP2_TO_G2 + 0x100: 160, # P256VERIFY + } + + input_size = precompile_min_input.get(addr_int, 0) + tx_data = bytes([0x00] * input_size if input_size > 0 else []) + + tx = Transaction( + sender=alice, + to=precompile, + value=value, + gas_limit=5_000_000, + data=tx_data, + ) + + block = Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + ), + precompile: BalAccountExpectation( + balance_changes=[ + BalBalanceChange(tx_index=1, post_balance=value) + ] + if value > 0 + else [], + storage_reads=[], + storage_changes=[], + code_changes=[], + ), + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + alice: Account(nonce=1), + }, + ) + + +@pytest.mark.parametrize_by_fork( + "precompile", + lambda fork: [ + pytest.param(addr, id=f"0x{int.from_bytes(addr, 'big'):02x}") + for addr in fork.precompiles(block_number=0, timestamp=0) + ], +) +def test_bal_precompile_call( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + precompile: Address, +) -> None: + """ + Ensure BAL records precompile when called via contract. + + Alice calls Oracle contract which calls precompile. + BAL must include precompile with no balance/storage/code changes. + """ + alice = pre.fund_eoa() + + # Oracle contract that calls the precompile + oracle = pre.deploy_contract( + code=Op.CALL(100_000, precompile, 0, 0, 0, 0, 0) + Op.STOP + ) + + tx = Transaction( + sender=alice, + to=oracle, + gas_limit=200_000, + gas_price=0xA, + ) + + block = Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + ), + oracle: BalAccountExpectation.empty(), + precompile: BalAccountExpectation.empty(), + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + alice: Account(nonce=1), + }, + ) + + +@pytest.mark.parametrize( + "value", + [ + pytest.param(0, id="zero_value"), + pytest.param(10**18, id="positive_value"), + ], +) +def test_bal_nonexistent_value_transfer( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + value: int, +) -> None: + """ + Ensure BAL captures non-existent account on value transfer. + + Alice sends value directly to non-existent Bob. + """ + alice = pre.fund_eoa() + bob = Address(0xB0B) + + tx = Transaction( + sender=alice, + to=bob, + value=value, + gas_limit=100_000, + ) + + block = Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + ), + bob: BalAccountExpectation( + balance_changes=[ + BalBalanceChange(tx_index=1, post_balance=value) + ] + if value > 0 + else [], + ), + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + alice: Account(nonce=1), + bob: Account(balance=value) if value > 0 else Account.NONEXISTENT, + }, + ) + + +@pytest.mark.parametrize( + "account_access_opcode", + [ + pytest.param( + lambda target_addr: Op.BALANCE(target_addr), + id="balance", + ), + pytest.param( + lambda target_addr: Op.EXTCODESIZE(target_addr), + id="extcodesize", + ), + pytest.param( + lambda target_addr: Op.EXTCODECOPY(target_addr, 0, 0, 32), + id="extcodecopy", + ), + pytest.param( + lambda target_addr: Op.EXTCODEHASH(target_addr), + id="extcodehash", + ), + pytest.param( + lambda target_addr: Op.STATICCALL(0, target_addr, 0, 0, 0, 0), + id="staticcall", + ), + pytest.param( + lambda target_addr: Op.DELEGATECALL(0, target_addr, 0, 0, 0, 0), + id="delegatecall", + ), + ], +) +def test_bal_nonexistent_account_access_read_only( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + account_access_opcode: Callable[[Address], Op], +) -> None: + """ + Ensure BAL captures non-existent account access via read-only opcodes. + + Alice calls Oracle contract which uses read-only opcodes to access + non-existent Bob (BALANCE, EXTCODESIZE, EXTCODECOPY, EXTCODEHASH, + STATICCALL, DELEGATECALL). + """ + alice = pre.fund_eoa() + bob = Address(0xB0B) + oracle_balance = 2 * 10**18 + + oracle_code = account_access_opcode(bob) + oracle = pre.deploy_contract(code=oracle_code, balance=oracle_balance) + + tx = Transaction( + sender=alice, + to=oracle, + gas_limit=1_000_000, + ) + + block = Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + ), + oracle: BalAccountExpectation.empty(), + bob: BalAccountExpectation.empty(), + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + alice: Account(nonce=1), + oracle: Account(balance=oracle_balance), + bob: Account.NONEXISTENT, + }, + ) + + +@pytest.mark.parametrize( + "opcode_type,value", + [ + pytest.param("call", 0, id="call_zero_value"), + pytest.param("call", 10**18, id="call_positive_value"), + pytest.param("callcode", 0, id="callcode_zero_value"), + pytest.param("callcode", 10**18, id="callcode_positive_value"), + ], +) +def test_bal_nonexistent_account_access_value_transfer( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + opcode_type: str, + value: int, +) -> None: + """ + Ensure BAL captures non-existent account access via CALL/CALLCODE + with value. + + Alice calls Oracle contract which uses CALL or CALLCODE to access + non-existent Bob with value transfer. + - CALL: Transfers value from Oracle to Bob + - CALLCODE: Self-transfer (net zero), Bob accessed for code + """ + alice = pre.fund_eoa() + bob = Address(0xB0B) + oracle_balance = 2 * 10**18 + + if opcode_type == "call": + oracle_code = Op.CALL(100_000, bob, value, 0, 0, 0, 0) + else: # callcode + oracle_code = Op.CALLCODE(100_000, bob, value, 0, 0, 0, 0) + + oracle = pre.deploy_contract(code=oracle_code, balance=oracle_balance) + + tx = Transaction( + sender=alice, + to=oracle, + gas_limit=1_000_000, + ) + + # Calculate expected balances + if opcode_type == "call" and value > 0: + # CALL: Oracle loses value, Bob gains value + oracle_final_balance = oracle_balance - value + bob_final_balance = value + bob_has_balance_change = True + oracle_has_balance_change = True + elif opcode_type == "callcode" and value > 0: + # CALLCODE: Self-transfer (net zero), Bob just accessed for code + oracle_final_balance = oracle_balance + bob_final_balance = 0 + bob_has_balance_change = False + oracle_has_balance_change = False + else: + # Zero value + oracle_final_balance = oracle_balance + bob_final_balance = 0 + bob_has_balance_change = False + oracle_has_balance_change = False + + block = Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + ), + oracle: BalAccountExpectation( + balance_changes=[ + BalBalanceChange( + tx_index=1, post_balance=oracle_final_balance + ) + ] + if oracle_has_balance_change + else [], + ), + bob: BalAccountExpectation( + balance_changes=[ + BalBalanceChange( + tx_index=1, post_balance=bob_final_balance + ) + ] + if bob_has_balance_change + else [], + ), + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + alice: Account(nonce=1), + oracle: Account(balance=oracle_final_balance), + bob: Account(balance=bob_final_balance) + if bob_has_balance_change + else Account.NONEXISTENT, + }, + ) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip4895.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip4895.py new file mode 100644 index 0000000000..edb8295c17 --- /dev/null +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip4895.py @@ -0,0 +1,810 @@ +"""Tests for the effects of EIP-4895 withdrawals on EIP-7928.""" + +import pytest +from execution_testing import ( + EOA, + Account, + Address, + Alloc, + BalAccountExpectation, + BalBalanceChange, + BalCodeChange, + BalNonceChange, + BalStorageChange, + BalStorageSlot, + Block, + BlockAccessListExpectation, + BlockchainTestFiller, + Environment, + Fork, + Header, + Initcode, + Op, + Transaction, + Withdrawal, + compute_create_address, +) + +from .spec import ref_spec_7928 + +REFERENCE_SPEC_GIT_PATH = ref_spec_7928.git_path +REFERENCE_SPEC_VERSION = ref_spec_7928.version + +pytestmark = pytest.mark.valid_from("Amsterdam") + +GWEI = 10**9 + + +def test_bal_withdrawal_empty_block( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Ensure BAL captures withdrawal balance changes in empty block. + + Charlie starts with 1 gwei balance (existing account). + Block with 0 transactions and 1 withdrawal of 10 gwei to Charlie. + Charlie ends with 11 gwei balance. + """ + charlie = pre.fund_eoa(amount=1 * GWEI) + + block = Block( + txs=[], + withdrawals=[ + Withdrawal( + index=0, + validator_index=0, + address=charlie, + amount=10, + ) + ], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + charlie: BalAccountExpectation( + balance_changes=[ + BalBalanceChange(tx_index=1, post_balance=11 * GWEI) + ], + ), + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + charlie: Account(balance=11 * GWEI), + }, + ) + + +def test_bal_withdrawal_and_transaction( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Ensure BAL captures both transaction and withdrawal balance changes. + + Alice starts with 1 ETH, Bob starts with 0, Charlie starts with 0. + Alice sends 5 wei to Bob. + Charlie receives 10 gwei withdrawal. + Bob ends with 5 wei, Charlie ends with 10 gwei. + """ + alice = pre.fund_eoa() + bob = pre.fund_eoa(amount=0) + charlie = pre.fund_eoa(amount=0) + + tx = Transaction( + sender=alice, + to=bob, + value=5, + max_fee_per_gas=50, + max_priority_fee_per_gas=5, + ) + + block = Block( + txs=[tx], + withdrawals=[ + Withdrawal( + index=0, + validator_index=0, + address=charlie, + amount=10, + ) + ], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + ), + bob: BalAccountExpectation( + balance_changes=[ + BalBalanceChange(tx_index=1, post_balance=5) + ], + ), + charlie: BalAccountExpectation( + balance_changes=[ + BalBalanceChange(tx_index=2, post_balance=10 * GWEI) + ], + ), + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + alice: Account(nonce=1), + bob: Account(balance=5), + charlie: Account(balance=10 * GWEI), + }, + ) + + +def test_bal_withdrawal_to_nonexistent_account( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Ensure BAL captures withdrawal to non-existent account. + + Charlie is a non-existent address (not in pre-state). + Block with 0 transactions and 1 withdrawal of 10 gwei to Charlie. + Charlie ends with 10 gwei balance. + """ + charlie = Address(0xCC) + + block = Block( + txs=[], + withdrawals=[ + Withdrawal( + index=0, + validator_index=0, + address=charlie, + amount=10, + ) + ], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + charlie: BalAccountExpectation( + balance_changes=[ + BalBalanceChange(tx_index=1, post_balance=10 * GWEI) + ], + ), + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + charlie: Account(balance=10 * GWEI), + }, + ) + + +def test_bal_withdrawal_no_evm_execution( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Ensure BAL captures withdrawal without triggering EVM execution. + + Oracle contract starts with 0 balance and storage slot 0x01 = 0x42. + Oracle's code writes 0xFF to slot 0x01 when called. + Block with 0 transactions and 1 withdrawal of 10 gwei to Oracle. + Storage slot 0x01 remains 0x42 (EVM never executes). + """ + oracle = pre.deploy_contract( + code=Op.SSTORE(0x01, 0xFF), + storage={0x01: 0x42}, + ) + + block = Block( + txs=[], + withdrawals=[ + Withdrawal( + index=0, + validator_index=0, + address=oracle, + amount=10, + ) + ], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + oracle: BalAccountExpectation( + balance_changes=[ + BalBalanceChange(tx_index=1, post_balance=10 * GWEI) + ], + storage_reads=[], + storage_changes=[], + ), + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + oracle: Account( + balance=10 * GWEI, + storage={0x01: 0x42}, + ), + }, + ) + + +def test_bal_withdrawal_and_state_access_same_account( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Ensure BAL captures both state access and withdrawal to same address. + + Oracle contract starts with 0 balance and storage slot 0x01 = 0x42. + Alice calls Oracle (reads slot 0x01, writes 0x99 to slot 0x02). + Oracle receives withdrawal of 10 gwei. + Both state access and withdrawal are captured in BAL. + """ + alice = pre.fund_eoa() + oracle = pre.deploy_contract( + code=Op.SLOAD(0x01) + Op.SSTORE(0x02, 0x99), + storage={0x01: 0x42}, + ) + + tx = Transaction( + sender=alice, + to=oracle, + gas_limit=1_000_000, + gas_price=0xA, + ) + + block = Block( + txs=[tx], + withdrawals=[ + Withdrawal( + index=0, + validator_index=0, + address=oracle, + amount=10, + ) + ], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + ), + oracle: BalAccountExpectation( + storage_reads=[0x01], + storage_changes=[ + BalStorageSlot( + slot=0x02, + slot_changes=[ + BalStorageChange(tx_index=1, post_value=0x99) + ], + ) + ], + balance_changes=[ + BalBalanceChange(tx_index=2, post_balance=10 * GWEI) + ], + ), + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + alice: Account(nonce=1), + oracle: Account( + balance=10 * GWEI, + storage={0x01: 0x42, 0x02: 0x99}, + ), + }, + ) + + +def test_bal_withdrawal_and_value_transfer_same_address( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Ensure BAL captures both value transfer and withdrawal to same address. + + Alice starts with 1 ETH, Bob starts with 0. + Alice sends 5 gwei to Bob. + Bob receives withdrawal of 10 gwei. + Bob ends with 15 gwei (5 from tx + 10 from withdrawal). + """ + alice = pre.fund_eoa() + bob = pre.fund_eoa(amount=0) + + tx = Transaction( + sender=alice, + to=bob, + value=5 * GWEI, + gas_price=0xA, + ) + + block = Block( + txs=[tx], + withdrawals=[ + Withdrawal( + index=0, + validator_index=0, + address=bob, + amount=10, + ) + ], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + ), + bob: BalAccountExpectation( + balance_changes=[ + BalBalanceChange(tx_index=1, post_balance=5 * GWEI), + BalBalanceChange(tx_index=2, post_balance=15 * GWEI), + ], + ), + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + alice: Account(nonce=1), + bob: Account(balance=15 * GWEI), + }, + ) + + +def test_bal_multiple_withdrawals_same_address( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Ensure BAL accumulates multiple withdrawals to same address. + + Charlie starts with 0 balance. + Block empty block with 3 withdrawals to Charlie: 5 gwei, 10 gwei, 15 gwei. + Charlie ends with 30 gwei balance (cumulative). + """ + charlie = pre.fund_eoa(amount=0) + + block = Block( + txs=[], + withdrawals=[ + Withdrawal(index=i, validator_index=i, address=charlie, amount=amt) + for i, amt in enumerate([5, 10, 15]) + ], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + charlie: BalAccountExpectation( + balance_changes=[ + BalBalanceChange(tx_index=1, post_balance=30 * GWEI) + ], + ), + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + charlie: Account(balance=30 * GWEI), + }, + ) + + +def test_bal_withdrawal_and_selfdestruct( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Ensure BAL captures withdrawal to self-destructed contract address. + + Oracle contract starts with 100 gwei balance. + Alice triggers Oracle to self-destruct, sending balance to Bob. + Oracle receives withdrawal of 50 gwei after self-destructing. + Oracle ends with 50 gwei (funded by withdrawal). + """ + alice = pre.fund_eoa() + bob = pre.fund_eoa(amount=0) + oracle = pre.deploy_contract( + balance=100 * GWEI, + code=Op.SELFDESTRUCT(bob), + ) + + tx = Transaction( + sender=alice, + to=oracle, + gas_limit=1_000_000, + gas_price=0xA, + ) + + block = Block( + txs=[tx], + withdrawals=[ + Withdrawal( + index=0, + validator_index=0, + address=oracle, + amount=50, + ) + ], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + ), + bob: BalAccountExpectation( + balance_changes=[ + BalBalanceChange(tx_index=1, post_balance=100 * GWEI) + ], + ), + oracle: BalAccountExpectation( + balance_changes=[ + BalBalanceChange(tx_index=1, post_balance=0), + BalBalanceChange(tx_index=2, post_balance=50 * GWEI), + ], + ), + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + alice: Account(nonce=1), + bob: Account(balance=100 * GWEI), + oracle: Account(balance=50 * GWEI), + }, + ) + + +def test_bal_withdrawal_and_new_contract( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Ensure BAL captures withdrawal to newly created contract. + + Alice deploys Oracle contract with 5 gwei initial balance. + Oracle receives withdrawal of 10 gwei in same block. + Oracle ends with 15 gwei (5 from deployment + 10 from withdrawal). + """ + alice = pre.fund_eoa() + + code = Op.STOP + initcode = Initcode(deploy_code=code) + oracle = compute_create_address(address=alice) + + tx = Transaction( + sender=alice, + to=None, + data=initcode, + value=5 * GWEI, + gas_limit=1_000_000, + gas_price=0xA, + ) + + block = Block( + txs=[tx], + withdrawals=[ + Withdrawal( + index=0, + validator_index=0, + address=oracle, + amount=10, + ) + ], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + ), + oracle: BalAccountExpectation( + code_changes=[BalCodeChange(tx_index=1, new_code=code)], + balance_changes=[ + BalBalanceChange(tx_index=1, post_balance=5 * GWEI), + BalBalanceChange(tx_index=2, post_balance=15 * GWEI), + ], + ), + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + alice: Account(nonce=1), + oracle: Account(balance=15 * GWEI, code=code), + }, + ) + + +@pytest.mark.parametrize( + "initial_balance", + [ + pytest.param(5 * GWEI, id="existing_account"), + pytest.param(0, id="nonexistent_account"), + ], +) +def test_bal_zero_withdrawal( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + initial_balance: int, +) -> None: + """ + Ensure BAL handles zero-amount withdrawal correctly. + + Charlie either exists with initial balance or is non-existent. + Block with 0 transactions and 1 zero-amount withdrawal to Charlie. + Charlie appears in BAL but with empty changes, balance unchanged. + """ + if initial_balance > 0: + charlie = pre.fund_eoa(amount=initial_balance) + else: + charlie = EOA(0xCC) + + block = Block( + txs=[], + withdrawals=[ + Withdrawal( + index=0, + validator_index=0, + address=charlie, + amount=0, + ) + ], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + charlie: BalAccountExpectation.empty(), + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + charlie: Account(balance=initial_balance) + if initial_balance > 0 + else Account.NONEXISTENT, + }, + ) + + +@pytest.mark.parametrize_by_fork( + "precompile", + lambda fork: [ + pytest.param(addr, id=f"0x{int.from_bytes(addr, 'big'):02x}") + for addr in fork.precompiles(block_number=0, timestamp=0) + ], +) +def test_bal_withdrawal_to_precompiles( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + precompile: Address, +) -> None: + """ + Ensure BAL captures withdrawal to precompile addresses. + + Block with 0 transactions and 1 withdrawal of 10 gwei to precompile. + Precompile ends with 10 gwei balance. + """ + block = Block( + txs=[], + withdrawals=[ + Withdrawal( + index=0, + validator_index=0, + address=precompile, + amount=10, + ) + ], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + precompile: BalAccountExpectation( + balance_changes=[ + BalBalanceChange(tx_index=1, post_balance=10 * GWEI) + ], + storage_reads=[], + storage_changes=[], + ), + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + precompile: Account(balance=10 * GWEI), + }, + ) + + +def test_bal_withdrawal_largest_amount( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Ensure BAL captures withdrawal with largest amount. + + Block with 0 transactions and 1 withdrawal of maximum + uint64 value (2^64-1)Gwei to Charlie. + Charlie ends with (2^64-1) Gwei. + """ + charlie = pre.fund_eoa(amount=0) + max_amount = 2**64 - 1 + + block = Block( + txs=[], + withdrawals=[ + Withdrawal( + index=0, + validator_index=0, + address=charlie, + amount=max_amount, + ) + ], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + charlie: BalAccountExpectation( + balance_changes=[ + BalBalanceChange( + tx_index=1, post_balance=max_amount * GWEI + ) + ], + ), + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + charlie: Account(balance=max_amount * GWEI), + }, + ) + + +def test_bal_withdrawal_to_coinbase( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + fork: Fork, +) -> None: + """ + Ensure BAL captures withdrawal to coinbase address. + + Block with 1 transaction and 1 withdrawal to coinbase/fee recipient. + Coinbase receives both transaction fees and withdrawal. + """ + alice = pre.fund_eoa() + bob = pre.fund_eoa(amount=0) + coinbase = pre.fund_eoa(amount=0) + + intrinsic_gas_calculator = fork.transaction_intrinsic_cost_calculator() + intrinsic_gas = intrinsic_gas_calculator() + tx_gas_limit = intrinsic_gas + 1000 + gas_price = 0xA + + tx = Transaction( + sender=alice, + to=bob, + value=5, + gas_limit=tx_gas_limit, + gas_price=gas_price, + ) + + # Calculate tip to coinbase + genesis_env = Environment(base_fee_per_gas=0x7) + base_fee_per_gas = fork.base_fee_per_gas_calculator()( + parent_base_fee_per_gas=int(genesis_env.base_fee_per_gas or 0), + parent_gas_used=0, + parent_gas_limit=genesis_env.gas_limit, + ) + tip_to_coinbase = (gas_price - base_fee_per_gas) * intrinsic_gas + coinbase_final_balance = tip_to_coinbase + (10 * GWEI) + + block = Block( + txs=[tx], + fee_recipient=coinbase, + header_verify=Header(base_fee_per_gas=base_fee_per_gas), + withdrawals=[ + Withdrawal( + index=0, + validator_index=0, + address=coinbase, + amount=10, + ) + ], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + ), + bob: BalAccountExpectation( + balance_changes=[ + BalBalanceChange(tx_index=1, post_balance=5) + ], + ), + coinbase: BalAccountExpectation( + balance_changes=[ + BalBalanceChange( + tx_index=1, post_balance=tip_to_coinbase + ), + BalBalanceChange( + tx_index=2, post_balance=coinbase_final_balance + ), + ], + ), + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + alice: Account(nonce=1), + bob: Account(balance=5), + coinbase: Account(balance=coinbase_final_balance), + }, + genesis_environment=genesis_env, + ) + + +def test_bal_withdrawal_to_coinbase_empty_block( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Ensure BAL captures withdrawal to coinbase when there are no transactions. + + Empty block with 1 withdrawal of 10 gwei to coinbase/fee recipient. + Coinbase receives only withdrawal (no transaction fees). + """ + coinbase = pre.fund_eoa(amount=0) + + block = Block( + txs=[], + fee_recipient=coinbase, + withdrawals=[ + Withdrawal( + index=0, + validator_index=0, + address=coinbase, + amount=10, + ) + ], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + coinbase: BalAccountExpectation( + balance_changes=[ + BalBalanceChange(tx_index=1, post_balance=10 * GWEI) + ], + ), + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + coinbase: Account(balance=10 * GWEI), + }, + ) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py index 5e5ebaefe3..17799d3655 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py @@ -15,12 +15,14 @@ """ from enum import Enum +from typing import Callable import pytest from execution_testing import ( Account, Alloc, BalAccountExpectation, + BalNonceChange, BalStorageChange, BalStorageSlot, Block, @@ -606,3 +608,149 @@ def test_bal_extcodecopy_and_oog( target_contract: Account(), }, ) + + +def test_bal_storage_write_read_same_frame( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Ensure BAL captures write precedence over read in same call frame. + + Oracle writes to slot 0x01, then reads from slot 0x01 in same call. + The write shadows the read - only the write appears in BAL. + """ + alice = pre.fund_eoa() + + oracle_code = ( + Op.SSTORE(0x01, 0x42) # Write 0x42 to slot 0x01 + + Op.SLOAD(0x01) # Read from slot 0x01 + + Op.STOP + ) + oracle = pre.deploy_contract(code=oracle_code, storage={0x01: 0x99}) + + tx = Transaction( + sender=alice, + to=oracle, + gas_limit=1_000_000, + ) + + block = Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + ), + oracle: BalAccountExpectation( + storage_changes=[ + BalStorageSlot( + slot=0x01, + slot_changes=[ + BalStorageChange(tx_index=1, post_value=0x42) + ], + ) + ], + storage_reads=[], # Empty! Write shadows the read + ), + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + alice: Account(nonce=1), + oracle: Account(storage={0x01: 0x42}), + }, + ) + + +@pytest.mark.parametrize( + "call_opcode", + [ + pytest.param( + lambda target: Op.CALL(100_000, target, 0, 0, 0, 0, 0), id="call" + ), + pytest.param( + lambda target: Op.DELEGATECALL(100_000, target, 0, 0, 0, 0), + id="delegatecall", + ), + pytest.param( + lambda target: Op.CALLCODE(100_000, target, 0, 0, 0, 0, 0), + id="callcode", + ), + ], +) +def test_bal_storage_write_read_cross_frame( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + call_opcode: Callable[[Bytecode], Bytecode], +) -> None: + """ + Ensure BAL captures write precedence over read across call frames. + + Frame 1: Read slot 0x01 (0x99), write 0x42, then call itself. + Frame 2: Read slot 0x01 (0x42), see it's 0x42 and return. + Both reads are shadowed by the write - only write appears in BAL. + """ + alice = pre.fund_eoa() + + # Oracle code: + # 1. Read slot 0x01 (initial: 0x99, recursive: 0x42) + # 2. If value == 0x42, return (exit recursion) + # 3. Write 0x42 to slot 0x01 + # 4. Call itself recursively + oracle_code = ( + Op.SLOAD(0x01) # Load value from slot 0x01 + + Op.PUSH1(0x42) # Push 0x42 for comparison + + Op.EQ # Check if loaded value == 0x42 + + Op.PUSH1(0x1D) # Jump destination (after SSTORE + CALL) + + Op.JUMPI # If equal, jump to end (exit recursion) + + Op.PUSH1(0x42) # Value to write + + Op.PUSH1(0x01) # Slot 0x01 + + Op.SSTORE # Write 0x42 to slot 0x01 + + call_opcode(Op.ADDRESS) # Call itself + + Op.JUMPDEST # Jump destination for exit + + Op.STOP + ) + + oracle = pre.deploy_contract(code=oracle_code, storage={0x01: 0x99}) + + tx = Transaction( + sender=alice, + to=oracle, + gas_limit=1_000_000, + ) + + block = Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + ), + oracle: BalAccountExpectation( + storage_changes=[ + BalStorageSlot( + slot=0x01, + slot_changes=[ + BalStorageChange(tx_index=1, post_value=0x42) + ], + ) + ], + storage_reads=[], # Empty! Write shadows both reads + ), + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + alice: Account(nonce=1), + oracle: Account(storage={0x01: 0x42}), + }, + ) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index 945330b628..351395479d 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -29,8 +29,8 @@ | `test_bal_pure_contract_call` | Ensure BAL captures contract access for pure computation calls | Alice calls `PureContract` that performs pure arithmetic (ADD operation) without storage or balance changes | BAL MUST include Alice and `PureContract` in `account_changes`, and `nonce_changes` for Alice. | ✅ Completed | | `test_bal_create2_to_A_read_then_selfdestruct` | BAL records balance change for A and storage access (no persistent change) | Tx0: Alice sends ETH to address **A**. Tx1: Deployer `CREATE2` a contract **at A**; contract does `SLOAD(B)` and immediately `SELFDESTRUCT(beneficiary=X)` in the same tx. | BAL **MUST** include **A** with `balance_changes` (funding in Tx0 and transfer on selfdestruct in Tx1). BAL **MUST** include storage key **B** as an accessed `StorageKey`, and **MUST NOT** include **B** under `storage_changes` (no persistence due to same-tx create+destruct). | 🟡 Planned | | `test_bal_create2_to_A_write_then_selfdestruct` | BAL records balance change for A and storage access even if a write occurred (no persistent change) | Tx0: Alice sends ETH to **A**. Tx1: Deployer `CREATE2` contract **at A**; contract does `SSTORE(B, v)` (optionally `SLOAD(B)`), then `SELFDESTRUCT(beneficiary=Y)` in the same tx. | BAL **MUST** include **A** with `balance_changes` (Tx0 fund; Tx1 outflow to `Y`). BAL **MUST** include **B** as `StorageKey` accessed, and **MUST NOT** include **B** under `storage_changes` (ephemeral write discarded because the contract was created and destroyed in the same tx). | 🟡 Planned | -| `test_bal_precompile_funded_then_called` | BAL records precompile with balance change (fund) and access (call) | **Tx0**: Alice sends `1 ETH` to `ecrecover` (0x01). **Tx1**: Alice (or Bob) calls `ecrecover` with valid input and `0 ETH`. | BAL **MUST** include address `0x01` with `balance_changes` (from Tx0). No `storage_changes` or `code_changes`. | 🟡 Planned | -| `test_bal_precompile_call_only` | BAL records precompile when called with no balance change | Alice calls `ecrecover` (0x01) with a valid input, sending **0 ETH**. | BAL **MUST** include address `0x01` in access list, with **no** `balance_changes`, `storage_changes`, or `code_changes`. | 🟡 Planned | +| `test_bal_precompile_funded` | BAL records precompile value transfer with or without balance change | Alice sends value to precompile (all precompiles) via direct transaction. Parameterized: (1) with value (1 ETH), (2) without value (0 ETH). | For with_value: BAL **MUST** include precompile with `balance_changes`. For no_value: BAL **MUST** include precompile with empty `balance_changes`. No `storage_changes` or `code_changes` in either case. | ✅ Completed | +| `test_bal_precompile_call` | BAL records precompile when called via contract | Alice calls Oracle contract which calls precompile (all precompiles) via CALL opcode with 0 ETH | BAL **MUST** include Alice with `nonce_changes`, Oracle with empty changes, and precompile with empty changes. No `balance_changes`, `storage_changes`, or `code_changes` for precompile. | ✅ Completed | | `test_bal_7702_delegated_create` | BAL tracks EIP-7702 delegation indicator write and contract creation | Alice sends a type-4 (7702) tx authorizing herself to delegate to `Deployer` code which executes `CREATE` | BAL MUST include for **Alice**: `code_changes` (delegation indicator), `nonce_changes` (increment from 7702 processing), and `balance_changes` (post-gas). For **Child**: `code_changes` (runtime bytecode) and `nonce_changes = 1`. | 🟡 Planned | | `test_bal_7702_delegation_create` | Ensure BAL captures creation of EOA delegation | Alice authorizes delegation to contract `Oracle`. Transaction sends 10 wei to Bob. Two variants: (1) Self-funded: Alice sends 7702 tx herself. (2) Sponsored: `Relayer` sends 7702 tx on Alice's behalf. | BAL **MUST** include Alice: `code_changes` (delegation designation `0xef0100\|\|address(Oracle)`),`nonce_changes` (increment). Bob: `balance_changes` (receives 10 wei). For sponsored variant, BAL **MUST** also include `Relayer`:`nonce_changes`.`Oracle` **MUST NOT** be present in BAL - the account is never accessed. | ✅ Completed | | `test_bal_7702_delegation_update` | Ensure BAL captures update of existing EOA delegation | Alice first delegates to `Oracle1`, then in second tx updates delegation to `Oracle2`. Each transaction sends 10 wei to Bob. Two variants: (1) Self-funded: Alice sends both 7702 txs herself. (2) Sponsored: `Relayer` sends both 7702 txs on Alice's behalf. | BAL **MUST** include Alice: first tx has `code_changes` (delegation designation `0xef0100\|\|address(Oracle1)`),`nonce_changes`. Second tx has`code_changes` (delegation designation `0xef0100\|\|address(Oracle2)`),`nonce_changes`. Bob:`balance_changes` (receives 10 wei on each tx). For sponsored variant, BAL **MUST** also include `Relayer`:`nonce_changes` for both transactions. `Oracle1` and `Oracle2` **MUST NOT** be present in BAL - accounts are never accessed. | ✅ Completed | @@ -58,5 +58,24 @@ | `test_bal_invalid_complex_corruption` | Verify clients reject blocks with multiple BAL corruptions | Alice calls contract with storage writes; BAL has multiple issues: wrong account, missing nonce, wrong storage value | Block **MUST** be rejected with `INVALID_BLOCK_ACCESS_LIST` exception. Clients **MUST** detect any corruption regardless of other issues. | ✅ Completed | | `test_bal_invalid_missing_account` | Verify clients reject blocks with missing required account entries in BAL | Alice sends transaction to Bob; BAL modifier removes Bob's account entry (recipient should be included) | Block **MUST** be rejected with `INVALID_BLOCK_ACCESS_LIST` exception. Clients **MUST** validate all accessed accounts are present. | ✅ Completed | | `test_bal_invalid_balance_value` | Verify clients reject blocks with incorrect balance values in BAL | Alice sends value to Bob; BAL modifier changes balance to incorrect value | Block **MUST** be rejected with `INVALID_BLOCK_ACCESS_LIST` exception. Clients **MUST** validate balance change values match actual state transitions. | ✅ Completed | -| `test_bal_empty_block_no_coinbase` | Verify BAL correctly handles empty blocks without including coinbase | Block with 0 transactions, no withdrawals. System contracts may perform operations (EIP-2935 parent hash, EIP-4788 beacon root if active). | BAL **MUST NOT** include the coinbase/fee recipient (receives no fees). BAL **MAY** include system contract addresses (EIP-2935 `HISTORY_STORAGE_ADDRESS`, EIP-4788 `BEACON_ROOTS_ADDRESS`) with `storage_changes` at `tx_index=0` (pre-execution system operations). Maximum 4 system contract addresses if all active. | 🟡 Planned | -| `test_bal_empty_block_withdrawal_to_coinbase` | Verify BAL includes coinbase when it receives EIP-4895 withdrawal even in empty block | Block with 0 transactions but contains EIP-4895 withdrawal(s) with coinbase as recipient. System contracts may perform operations. | BAL **MUST** include coinbase with `balance_changes` at `tx_index=1` (post-execution: len(txs)+1 = 0+1). BAL **MAY** include system contract addresses with `storage_changes` at `tx_index=0` (pre-execution system operations). This confirms that coinbase inclusion depends on actual state changes, not transaction presence. | 🟡 Planned | +| `test_bal_empty_block_no_coinbase` | Ensure BAL correctly handles empty blocks without including coinbase | Block with 0 transactions, no withdrawals. System contracts may perform operations (EIP-2935 parent hash, EIP-4788 beacon root if active). | BAL **MUST NOT** include the coinbase/fee recipient (receives no fees). BAL **MAY** include system contract addresses (EIP-2935 `HISTORY_STORAGE_ADDRESS`, EIP-4788 `BEACON_ROOTS_ADDRESS`) with `storage_changes` at `tx_index=0` (pre-execution system operations). | ✅ Completed | +| `test_bal_coinbase_zero_tip` | Ensure BAL includes coinbase even when priority fee is zero | Block with 1 transaction: Alice sends 5 wei to Bob with priority fee = 0 (base fee burned post-EIP-1559) | BAL **MUST** include Alice with `balance_changes` (gas cost) and `nonce_changes`. BAL **MUST** include Bob with `balance_changes`. BAL **MUST** include coinbase with empty changes. | ✅ Completed | +| `test_bal_withdrawal_empty_block` | Ensure BAL captures withdrawal balance changes in empty block | Charlie starts with 1 gwei. Block with 0 transactions and 1 withdrawal of 10 gwei to Charlie | BAL **MUST** include Charlie with `balance_changes` at `block_access_index = 1`. Charlie's `balance_changes` **MUST** show final balance of 11 gwei. All other fields (storage_reads, storage_changes, nonce_changes, code_changes) **MUST** be empty. | ✅ Completed | +| `test_bal_withdrawal_and_transaction` | Ensure BAL captures both transaction and withdrawal balance changes | Block with 1 transaction: Alice sends 5 wei to Bob. 1 withdrawal of 10 gwei to Charlie | BAL **MUST** include Alice with `nonce_changes` and `balance_changes` at `block_access_index = 1`. BAL **MUST** include Bob with `balance_changes` at `block_access_index = 1`. BAL **MUST** include Charlie with `balance_changes` at `block_access_index = 2` showing final balance after receiving 10 gwei. All other fields for Charlie **MUST** be empty. | ✅ Completed | +| `test_bal_withdrawal_to_nonexistent_account` | Ensure BAL captures withdrawal to non-existent account | Block with 1 withdrawal of 10 gwei to non-existent account Charlie | BAL **MUST** include Charlie with `balance_changes` at `block_access_index = 1` showing final balance of 10 gwei. All other fields (storage_reads, storage_changes, nonce_changes, code_changes) **MUST** be empty. | ✅ Completed | +| `test_bal_withdrawal_no_evm_execution` | Ensure BAL captures withdrawal without triggering EVM execution | Contract `Oracle` with storage slot 0x01 = 0x42. `Oracle` code writes to slot 0x01 when called. Block with 1 withdrawal of 10 gwei to `Oracle` | BAL **MUST** include `Oracle` with `balance_changes` at `block_access_index = 1` showing final balance after receiving 10 gwei. Storage slot 0x01 **MUST** remain 0x42 and all other fields (storage_reads, storage_changes, nonce_changes, code_changes) **MUST** be empty. | ✅ Completed | +| `test_bal_withdrawal_and_state_access_same_account` | Ensure BAL captures both state access and withdrawal to same address | Contract `Oracle` with storage slot 0x01 = 0x42. Block with 1 transaction: Alice calls `Oracle` (reads from slot 0x01, writes to slot 0x02). 1 withdrawal of 10 gwei to `Oracle` | BAL **MUST** include `Oracle` with `storage_reads` for slot 0x01 and `storage_changes` for slot 0x02 at `block_access_index = 1`. `Oracle` **MUST** also have `balance_changes` at `block_access_index = 2` showing final balance after receiving 10 gwei. Both state access and withdrawal **MUST** be captured. | ✅ Completed | +| `test_bal_withdrawal_and_value_transfer_same_address` | Ensure BAL captures both transaction value transfer and withdrawal to same address | Block with 1 transaction: Alice sends 5 gwei to Bob. 1 withdrawal of 10 gwei to Bob | BAL **MUST** include Alice with `nonce_changes` and `balance_changes` at `block_access_index = 1`. BAL **MUST** include Bob with `balance_changes` at `block_access_index = 1` showing balance after receiving 5 gwei. Bob **MUST** also have `balance_changes` at `block_access_index = 2` showing balance after receiving 10 gwei withdrawal. Bob's final post-state balance **MUST** be 15 gwei (cumulative). | ✅ Completed | +| `test_bal_multiple_withdrawals_same_address` | Ensure BAL accumulates multiple withdrawals to same address | Block with 3 withdrawals to Charlie: 5 gwei, 10 gwei, 15 gwei | BAL **MUST** include Charlie with `balance_changes` at `block_access_index = 1` showing final balance of 30 gwei. All other fields (storage_reads, storage_changes, nonce_changes, code_changes) **MUST** be empty. | ✅ Completed | +| `test_bal_withdrawal_and_selfdestruct` | Ensure BAL captures withdrawal to self-destructed contract address | Contract `Oracle` with 100 gwei balance. Block with 1 transaction: `Oracle` self-destructs sending balance to Bob. 1 withdrawal of 50 gwei to `Oracle`'s address | BAL **MUST** include `Oracle` with `balance_changes` showing 0 balance at `block_access_index = 1` (after self-destruct). BAL **MUST** include Bob with `balance_changes` showing 100 gwei received from self-destruct at `block_access_index = 1`. `Oracle` **MUST** also have `balance_changes` at `block_access_index = 2` showing 50 gwei after withdrawal. Both self-destruct and withdrawal **MUST** be captured. | ✅ Completed | +| `test_bal_withdrawal_and_new_contract` | Ensure BAL captures withdrawal to newly created contract | Block with 1 transaction: Alice deploys contract `Oracle` with 5 gwei initial balance. 1 withdrawal of 10 gwei to `Oracle` | BAL **MUST** include `Oracle` with `code_changes` and `balance_changes` showing 5 gwei at `block_access_index = 1`. `Oracle` **MUST** also have `balance_changes` at `block_access_index = 2` showing balance after receiving 10 gwei withdrawal. `Oracle`'s final post-state balance **MUST** be 15 gwei (cumulative). | ✅ Completed | +| `test_bal_zero_withdrawal` | Ensure BAL handles zero-amount withdrawal correctly | Block with 0 transactions and 1 zero-amount withdrawal (0 gwei) to Charlie. Two variations: Charlie has existing balance (5 gwei) or Charlie is non-existent. | BAL **MUST** include Charlie at `block_access_index = 1` with empty changes. Balance remains unchanged. | ✅ Completed | +| `test_bal_withdrawal_to_precompiles` | Ensure BAL captures withdrawal to precompile addresses | Block with 1 withdrawal of 10 gwei to precompile address (all precompiles) | BAL **MUST** include precompile address with `balance_changes` at `block_access_index = 1` showing final balance of 10 gwei. All other fields (storage_reads, storage_changes, nonce_changes, code_changes) **MUST** be empty. | ✅ Completed | +| `test_bal_withdrawal_largest_amount` | Ensure BAL captures withdrawal with largest amount | Block with 1 withdrawal of maximum uint64 value (2^64-1 gwei) to Charlie | BAL **MUST** include Charlie with `balance_changes` at `block_access_index = 1` showing final balance of (2^64-1) * 10^9 wei. All other fields (storage_reads, storage_changes, nonce_changes, code_changes) **MUST** be empty. | ✅ Completed | +| `test_bal_withdrawal_to_coinbase` | Ensure BAL captures withdrawal to coinbase address | Block with 1 transaction: Alice sends 5 wei to Bob. 1 withdrawal of 10 gwei to coinbase/fee recipient | BAL **MUST** include coinbase with `balance_changes` at `block_access_index = 1` showing balance after transaction fees. Coinbase **MUST** also have `balance_changes` at `block_access_index = 2` showing balance after receiving 10 gwei withdrawal. Coinbase's final post-state balance **MUST** include both transaction fees and withdrawal. | ✅ Completed | +| `test_bal_withdrawal_to_coinbase_empty_block` | Ensure BAL captures withdrawal to coinbase even when there are no transactions (no fees) | Block with 0 transactions and 1 withdrawal of 10 gwei to coinbase/fee recipient | BAL **MUST** include coinbase with `balance_changes` at `block_access_index = 1` showing final balance of 10 gwei. All other fields (storage_reads, storage_changes, nonce_changes, code_changes) **MUST** be empty. | ✅ Completed | +| `test_bal_nonexistent_value_transfer` | Ensure BAL captures non-existent account on value transfer | Alice sends value (0 wei or 1 ETH) to non-existent account Bob (address never funded or accessed before) via direct transfer | For zero value: BAL **MUST** include Alice with `nonce_changes` and Bob (non-existent) with empty changes. For positive value: BAL **MUST** include Bob with `balance_changes` showing received amount. | ✅ Completed | +| `test_bal_nonexistent_account_access_read_only` | Ensure BAL captures non-existent account accessed via read-only account-reading opcodes | Alice calls `Oracle` contract which uses read-only account access opcodes (`BALANCE`, `EXTCODESIZE`, `EXTCODECOPY`, `EXTCODEHASH`, `STATICCALL`, `DELEGATECALL`) on non-existent account Bob. | BAL **MUST** include Alice with `nonce_changes`, `Oracle` with empty changes, and Bob with empty changes (account accessed but no state modifications). | ✅ Completed | +| `test_bal_nonexistent_account_access_value_transfer` | Ensure BAL captures non-existent account accessed via CALL/CALLCODE with value transfers | Alice calls `Oracle` contract which uses `CALL` or `CALLCODE` on non-existent account Bob. Tests both zero and positive value transfers. | BAL **MUST** include Alice with `nonce_changes`. For CALL with positive value: `Oracle` with `balance_changes` (loses value), Bob with `balance_changes` (receives value). For CALLCODE with value or zero value transfers: `Oracle` and Bob with empty changes (CALLCODE self-transfer = net zero). | ✅ Completed | +| `test_bal_storage_write_read_same_frame` | Ensure BAL captures write precedence over read in same call frame (writes shadow reads) | Alice calls `Oracle` which writes (`SSTORE`) value `0x42` to slot `0x01`, then reads (`SLOAD`) from slot `0x01` in the same call frame | BAL **MUST** include `Oracle` with slot `0x01` in `storage_changes` showing final value `0x42`. Slot `0x01` **MUST NOT** appear in `storage_reads` (write shadows the subsequent read in same frame). | ✅ Completed | +| `test_bal_storage_write_read_cross_frame` | Ensure BAL captures write precedence over read across call frames (writes shadow reads cross-frame) | Alice calls `Oracle`. First call reads slot `0x01` (sees initial value), writes `0x42` to slot `0x01`, then calls itself (via `CALL`, `DELEGATECALL`, or `CALLCODE`). Second call reads slot `0x01` (sees `0x42`) and exits. | BAL **MUST** include `Oracle` with slot `0x01` in `storage_changes` showing final value `0x42`. Slot `0x01` **MUST NOT** appear in `storage_reads` (write shadows both the read before it in same frame and the read in the recursive call). | ✅ Completed | From 786fdbb82084a0f4c3adf2dcab4a5325fa26782e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Toni=20Wahrst=C3=A4tter?= <51536394+nerolation@users.noreply.github.com> Date: Fri, 31 Oct 2025 18:22:24 +0100 Subject: [PATCH 012/154] fix(specs): Ensure tracking before first access (#1722) --- .../forks/amsterdam/vm/instructions/system.py | 36 ++++++++++++------- 1 file changed, 24 insertions(+), 12 deletions(-) diff --git a/src/ethereum/forks/amsterdam/vm/instructions/system.py b/src/ethereum/forks/amsterdam/vm/instructions/system.py index c9feaabbc7..1de3a140a1 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/system.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/system.py @@ -110,6 +110,9 @@ def generic_create( evm.accessed_addresses.add(contract_address) + # Track address access for BAL + track_address_access(state.change_tracker, contract_address) + if account_has_code_or_nonce( state, contract_address ) or account_has_storage(state, contract_address): @@ -145,8 +148,6 @@ def generic_create( parent_evm=evm, ) - track_address_access(state.change_tracker, contract_address) - child_evm = process_create_message(child_message) if child_evm.error: @@ -341,8 +342,6 @@ def generic_call( parent_evm=evm, ) - track_address_access(evm.message.block_env.state.change_tracker, to) - child_evm = process_message(child_message) if child_evm.error: @@ -396,6 +395,9 @@ def call(evm: Evm) -> None: evm.accessed_addresses.add(to) access_gas_cost = GAS_COLD_ACCOUNT_ACCESS + # Track address access for BAL + track_address_access(evm.message.block_env.state.change_tracker, to) + code_address = to ( disable_precompiles, @@ -485,6 +487,11 @@ def callcode(evm: Evm) -> None: evm.accessed_addresses.add(code_address) access_gas_cost = GAS_COLD_ACCOUNT_ACCESS + # Track address access for BAL + track_address_access( + evm.message.block_env.state.change_tracker, code_address + ) + ( disable_precompiles, code_address, @@ -503,10 +510,6 @@ def callcode(evm: Evm) -> None: ) charge_gas(evm, message_call_gas.cost + extend_memory.cost) - track_address_access( - evm.message.block_env.state.change_tracker, code_address - ) - # OPERATION evm.memory += b"\x00" * extend_memory.expand_by sender_balance = get_account( @@ -560,6 +563,11 @@ def selfdestruct(evm: Evm) -> None: evm.accessed_addresses.add(beneficiary) gas_cost += GAS_COLD_ACCOUNT_ACCESS + # Track address access for BAL + track_address_access( + evm.message.block_env.state.change_tracker, beneficiary + ) + if ( not is_account_alive(evm.message.block_env.state, beneficiary) and get_account( @@ -635,6 +643,11 @@ def delegatecall(evm: Evm) -> None: evm.accessed_addresses.add(code_address) access_gas_cost = GAS_COLD_ACCOUNT_ACCESS + # Track address access for BAL + track_address_access( + evm.message.block_env.state.change_tracker, code_address + ) + ( disable_precompiles, code_address, @@ -648,10 +661,6 @@ def delegatecall(evm: Evm) -> None: ) charge_gas(evm, message_call_gas.cost + extend_memory.cost) - track_address_access( - evm.message.block_env.state.change_tracker, code_address - ) - # OPERATION evm.memory += b"\x00" * extend_memory.expand_by generic_call( @@ -708,6 +717,9 @@ def staticcall(evm: Evm) -> None: evm.accessed_addresses.add(to) access_gas_cost = GAS_COLD_ACCOUNT_ACCESS + # Track address access for BAL + track_address_access(evm.message.block_env.state.change_tracker, to) + code_address = to ( disable_precompiles, From 06420451f7902568ebafa14c794efce34f8cc1fc Mon Sep 17 00:00:00 2001 From: felipe Date: Tue, 4 Nov 2025 04:44:36 -0700 Subject: [PATCH 013/154] fix(spec-specs): duplicate storage writes in state tracker (#1743) - Perform a similar check to balance changes and other tracker methods and keep only the last write. --- docs/CHANGELOG.md | 1 + .../amsterdam/block_access_lists/builder.py | 16 ++++++++++++++-- 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md index bfe963e079..4dae0d4ff7 100644 --- a/docs/CHANGELOG.md +++ b/docs/CHANGELOG.md @@ -57,6 +57,7 @@ Test fixtures for use by clients are available for each release on the [Github r ### 📋 Misc - 🐞 WELDed the EEST tox environments relevant to producing documentation into EELS, and added a tool to cleanly add codespell whitelist entries. ([#1695](https://github.com/ethereum/execution-specs/pull/1659)). +- 🐞 Fix duplicate storage write issues for block access lists EIP-7928 implementation ([#1743](https://github.com/ethereum/execution-specs/pull/1743)). ### 🧪 Test Cases diff --git a/src/ethereum/forks/amsterdam/block_access_lists/builder.py b/src/ethereum/forks/amsterdam/block_access_lists/builder.py index a9d6ee9930..f27e26c377 100644 --- a/src/ethereum/forks/amsterdam/block_access_lists/builder.py +++ b/src/ethereum/forks/amsterdam/block_access_lists/builder.py @@ -126,8 +126,8 @@ def add_storage_write( Add a storage write operation to the block access list. Records a storage slot modification for a given address at a specific - transaction index. Multiple writes to the same slot are tracked - separately, maintaining the order and transaction index of each change. + transaction index. If multiple writes occur to the same slot within the + same transaction (same block_access_index), only the final value is kept. Parameters ---------- @@ -149,6 +149,18 @@ def add_storage_write( if slot not in builder.accounts[address].storage_changes: builder.accounts[address].storage_changes[slot] = [] + # Check if there's already an entry with the same block_access_index + # If so, update it with the new value, keeping only the final write + changes = builder.accounts[address].storage_changes[slot] + for i, existing_change in enumerate(changes): + if existing_change.block_access_index == block_access_index: + # Update the existing entry with the new value + changes[i] = StorageChange( + block_access_index=block_access_index, new_value=new_value + ) + return + + # No existing entry found, append new change change = StorageChange( block_access_index=block_access_index, new_value=new_value ) From 68e171448c63e04ef9dd658e80ec90c96df55828 Mon Sep 17 00:00:00 2001 From: felipe Date: Thu, 13 Nov 2025 09:05:36 -0700 Subject: [PATCH 014/154] fix(test-specs): validate t8n BAL independent of expectation existence (#1742) - Validate static checks on the t8n BAL if it exists - IF the expectation also exists, validate against the expectation Keep these checks separate as this helps validation now that we fill for all tests, regardless if they have an expectation or not. --- .../src/execution_testing/specs/blockchain.py | 6 + .../block_access_list/expectations.py | 114 +----- .../test_types/block_access_list/t8n.py | 106 ++++++ ... => test_block_access_list_expectation.py} | 225 ----------- .../tests/test_block_access_list_t8n.py | 351 ++++++++++++++++++ 5 files changed, 465 insertions(+), 337 deletions(-) rename packages/testing/src/execution_testing/test_types/tests/{test_block_access_lists.py => test_block_access_list_expectation.py} (82%) create mode 100644 packages/testing/src/execution_testing/test_types/tests/test_block_access_list_t8n.py diff --git a/packages/testing/src/execution_testing/specs/blockchain.py b/packages/testing/src/execution_testing/specs/blockchain.py index 7ec289c8dc..a508a78ad6 100644 --- a/packages/testing/src/execution_testing/specs/blockchain.py +++ b/packages/testing/src/execution_testing/specs/blockchain.py @@ -729,6 +729,12 @@ def generate_block_data( # tests t8n_bal = transition_tool_output.result.block_access_list bal = t8n_bal + + # Always validate BAL structural integrity (ordering, duplicates) if present + if t8n_bal is not None: + t8n_bal.validate_structure() + + # If expected BAL is defined, verify against it if ( block.expected_block_access_list is not None and t8n_bal is not None diff --git a/packages/testing/src/execution_testing/test_types/block_access_list/expectations.py b/packages/testing/src/execution_testing/test_types/block_access_list/expectations.py index 080aafc661..9030471549 100644 --- a/packages/testing/src/execution_testing/test_types/block_access_list/expectations.py +++ b/packages/testing/src/execution_testing/test_types/block_access_list/expectations.py @@ -18,7 +18,6 @@ BalCodeChange, BalNonceChange, BalStorageSlot, - BlockAccessListChangeLists, ) from .exceptions import BlockAccessListValidationError from .t8n import BlockAccessList @@ -175,9 +174,8 @@ def verify_against(self, actual_bal: "BlockAccessList") -> None: Verify that the actual BAL from the client matches this expected BAL. Validation steps: - 1. Validate actual BAL conforms to EIP-7928 ordering requirements - 2. Verify address expectations - presence or explicit absence - 3. Verify expected changes within accounts match actual changes + 1. Verify address expectations - presence or explicit absence + 2. Verify expected changes within accounts match actual changes Args: actual_bal: The BlockAccessList model from the client @@ -186,9 +184,6 @@ def verify_against(self, actual_bal: "BlockAccessList") -> None: BlockAccessListValidationError: If verification fails """ - # validate the actual BAL structure follows EIP-7928 ordering - self._validate_bal_ordering(actual_bal) - actual_accounts_by_addr = {acc.address: acc for acc in actual_bal.root} for address, expectation in self.account_expectations.items(): if expectation is None: @@ -232,111 +227,6 @@ def verify_against(self, actual_bal: "BlockAccessList") -> None: f"Account {address}: {str(e)}" ) from e - @staticmethod - def _validate_bal_ordering(bal: "BlockAccessList") -> None: - """ - Validate BAL ordering follows EIP-7928 requirements. - - Args: - bal: The BlockAccessList to validate - - Raises: - BlockAccessListValidationError: If ordering is invalid - - """ - # Check address ordering (ascending) - for i in range(1, len(bal.root)): - if bal.root[i - 1].address >= bal.root[i].address: - raise BlockAccessListValidationError( - f"BAL addresses are not in lexicographic order: " - f"{bal.root[i - 1].address} >= {bal.root[i].address}" - ) - - # Check transaction index ordering and uniqueness within accounts - for account in bal.root: - changes_to_check: List[tuple[str, BlockAccessListChangeLists]] = [ - ("nonce_changes", account.nonce_changes), - ("balance_changes", account.balance_changes), - ("code_changes", account.code_changes), - ] - - for field_name, change_list in changes_to_check: - if not change_list: - continue - - tx_indices = [c.tx_index for c in change_list] - - # Check both ordering and duplicates - if tx_indices != sorted(tx_indices): - raise BlockAccessListValidationError( - f"Transaction indices not in ascending order in {field_name} of account " - f"{account.address}. Got: {tx_indices}, Expected: {sorted(tx_indices)}" - ) - - if len(tx_indices) != len(set(tx_indices)): - duplicates = sorted( - { - idx - for idx in tx_indices - if tx_indices.count(idx) > 1 - } - ) - raise BlockAccessListValidationError( - f"Duplicate transaction indices in {field_name} of account " - f"{account.address}. Duplicates: {duplicates}" - ) - - # Check storage slot ordering - for i in range(1, len(account.storage_changes)): - if ( - account.storage_changes[i - 1].slot - >= account.storage_changes[i].slot - ): - raise BlockAccessListValidationError( - f"Storage slots not in ascending order in account " - f"{account.address}: {account.storage_changes[i - 1].slot} >= " - f"{account.storage_changes[i].slot}" - ) - - # Check transaction index ordering and uniqueness within storage - # slots - for storage_slot in account.storage_changes: - if not storage_slot.slot_changes: - continue - - tx_indices = [c.tx_index for c in storage_slot.slot_changes] - - # Check both ordering and duplicates - if tx_indices != sorted(tx_indices): - raise BlockAccessListValidationError( - f"Transaction indices not in ascending order in storage slot " - f"{storage_slot.slot} of account {account.address}. " - f"Got: {tx_indices}, Expected: {sorted(tx_indices)}" - ) - - if len(tx_indices) != len(set(tx_indices)): - duplicates = sorted( - { - idx - for idx in tx_indices - if tx_indices.count(idx) > 1 - } - ) - raise BlockAccessListValidationError( - f"Duplicate transaction indices in storage slot " - f"{storage_slot.slot} of account {account.address}. " - f"Duplicates: {duplicates}" - ) - - # Check storage reads ordering - for i in range(1, len(account.storage_reads)): - if account.storage_reads[i - 1] >= account.storage_reads[i]: - raise BlockAccessListValidationError( - f"Storage reads not in ascending order in account " - f"{account.address}: {account.storage_reads[i - 1]} >= " - f"{account.storage_reads[i]}" - ) - @staticmethod def _compare_account_expectations( expected: BalAccountExpectation, actual: BalAccountChange diff --git a/packages/testing/src/execution_testing/test_types/block_access_list/t8n.py b/packages/testing/src/execution_testing/test_types/block_access_list/t8n.py index 9a9ba84508..03b8224bbf 100644 --- a/packages/testing/src/execution_testing/test_types/block_access_list/t8n.py +++ b/packages/testing/src/execution_testing/test_types/block_access_list/t8n.py @@ -12,6 +12,7 @@ ) from .account_changes import BalAccountChange +from .exceptions import BlockAccessListValidationError class BlockAccessList(EthereumTestRootModel[List[BalAccountChange]]): @@ -49,3 +50,108 @@ def rlp(self) -> Bytes: def rlp_hash(self) -> Bytes: """Return the hash of the RLP encoded block access list.""" return self.rlp.keccak256() + + def validate_structure(self) -> None: + """ + Validate BAL structure follows EIP-7928 requirements. + + Checks: + - Addresses are in lexicographic (ascending) order + - Transaction indices are sorted and unique within each change list + - Storage slots are in ascending order + - Storage reads are in ascending order + + Raises: + BlockAccessListValidationError: If validation fails + """ + # Check address ordering (ascending) + for i in range(1, len(self.root)): + if self.root[i - 1].address >= self.root[i].address: + raise BlockAccessListValidationError( + f"BAL addresses are not in lexicographic order: " + f"{self.root[i - 1].address} >= {self.root[i].address}" + ) + + # Check transaction index ordering and uniqueness within accounts + for account in self.root: + changes_to_check: List[tuple[str, List[Any]]] = [ + ("nonce_changes", account.nonce_changes), + ("balance_changes", account.balance_changes), + ("code_changes", account.code_changes), + ] + + for field_name, change_list in changes_to_check: + if not change_list: + continue + + tx_indices = [c.tx_index for c in change_list] + + # Check both ordering and duplicates + if tx_indices != sorted(tx_indices): + raise BlockAccessListValidationError( + f"Transaction indices not in ascending order in {field_name} of account " + f"{account.address}. Got: {tx_indices}, Expected: {sorted(tx_indices)}" + ) + + if len(tx_indices) != len(set(tx_indices)): + duplicates = sorted( + { + idx + for idx in tx_indices + if tx_indices.count(idx) > 1 + } + ) + raise BlockAccessListValidationError( + f"Duplicate transaction indices in {field_name} of account " + f"{account.address}. Duplicates: {duplicates}" + ) + + # Check storage slot ordering + for i in range(1, len(account.storage_changes)): + if ( + account.storage_changes[i - 1].slot + >= account.storage_changes[i].slot + ): + raise BlockAccessListValidationError( + f"Storage slots not in ascending order in account " + f"{account.address}: {account.storage_changes[i - 1].slot} >= " + f"{account.storage_changes[i].slot}" + ) + + # Check transaction index ordering and uniqueness within storage slots + for storage_slot in account.storage_changes: + if not storage_slot.slot_changes: + continue + + tx_indices = [c.tx_index for c in storage_slot.slot_changes] + + # Check both ordering and duplicates + if tx_indices != sorted(tx_indices): + raise BlockAccessListValidationError( + f"Transaction indices not in ascending order in storage slot " + f"{storage_slot.slot} of account {account.address}. " + f"Got: {tx_indices}, Expected: {sorted(tx_indices)}" + ) + + if len(tx_indices) != len(set(tx_indices)): + duplicates = sorted( + { + idx + for idx in tx_indices + if tx_indices.count(idx) > 1 + } + ) + raise BlockAccessListValidationError( + f"Duplicate transaction indices in storage slot " + f"{storage_slot.slot} of account {account.address}. " + f"Duplicates: {duplicates}" + ) + + # Check storage reads ordering + for i in range(1, len(account.storage_reads)): + if account.storage_reads[i - 1] >= account.storage_reads[i]: + raise BlockAccessListValidationError( + f"Storage reads not in ascending order in account " + f"{account.address}: {account.storage_reads[i - 1]} >= " + f"{account.storage_reads[i]}" + ) diff --git a/packages/testing/src/execution_testing/test_types/tests/test_block_access_lists.py b/packages/testing/src/execution_testing/test_types/tests/test_block_access_list_expectation.py similarity index 82% rename from packages/testing/src/execution_testing/test_types/tests/test_block_access_lists.py rename to packages/testing/src/execution_testing/test_types/tests/test_block_access_list_expectation.py index 8effee3688..899d9647e4 100644 --- a/packages/testing/src/execution_testing/test_types/tests/test_block_access_lists.py +++ b/packages/testing/src/execution_testing/test_types/tests/test_block_access_list_expectation.py @@ -312,231 +312,6 @@ def test_missing_expected_address() -> None: expectation.verify_against(actual_bal) -@pytest.mark.parametrize( - "addresses,error_message", - [ - ( - [ - Address(0xB), - Address(0xA), # should come first - ], - "BAL addresses are not in lexicographic order", - ), - ( - [ - Address(0x1), - Address(0x3), - Address(0x2), - ], - "BAL addresses are not in lexicographic order", - ), - ], -) -def test_actual_bal_address_ordering_validation( - addresses: Any, error_message: str -) -> None: - """Test that actual BAL must have addresses in lexicographic order.""" - # Create BAL with addresses in the given order - actual_bal = BlockAccessList( - [ - BalAccountChange(address=addr, nonce_changes=[]) - for addr in addresses - ] - ) - - expectation = BlockAccessListExpectation(account_expectations={}) - - with pytest.raises(BlockAccessListValidationError, match=error_message): - expectation.verify_against(actual_bal) - - -@pytest.mark.parametrize( - "storage_slots,error_message", - [ - ( - [StorageKey(0x02), StorageKey(0x01)], # 0x02 before 0x01 - "Storage slots not in ascending order", - ), - ( - [StorageKey(0x01), StorageKey(0x03), StorageKey(0x02)], - "Storage slots not in ascending order", - ), - ], -) -def test_actual_bal_storage_slot_ordering( - storage_slots: Any, error_message: str -) -> None: - """Test that actual BAL must have storage slots in lexicographic order.""" - addr = Address(0xA) - - actual_bal = BlockAccessList( - [ - BalAccountChange( - address=addr, - storage_changes=[ - BalStorageSlot(slot=slot, slot_changes=[]) - for slot in storage_slots - ], - ) - ] - ) - - expectation = BlockAccessListExpectation(account_expectations={}) - - with pytest.raises(BlockAccessListValidationError, match=error_message): - expectation.verify_against(actual_bal) - - -@pytest.mark.parametrize( - "storage_reads,error_message", - [ - ( - [StorageKey(0x02), StorageKey(0x01)], - "Storage reads not in ascending order", - ), - ( - [StorageKey(0x01), StorageKey(0x03), StorageKey(0x02)], - "Storage reads not in ascending order", - ), - ], -) -def test_actual_bal_storage_reads_ordering( - storage_reads: Any, error_message: str -) -> None: - """Test that actual BAL must have storage reads in lexicographic order.""" - addr = Address(0xA) - - actual_bal = BlockAccessList( - [BalAccountChange(address=addr, storage_reads=storage_reads)] - ) - - expectation = BlockAccessListExpectation(account_expectations={}) - - with pytest.raises(BlockAccessListValidationError, match=error_message): - expectation.verify_against(actual_bal) - - -@pytest.mark.parametrize( - "field_name", - ["nonce_changes", "balance_changes", "code_changes"], -) -def test_actual_bal_tx_indices_ordering(field_name: str) -> None: - """Test that actual BAL must have tx indices in ascending order.""" - addr = Address(0xA) - - tx_indices = [2, 3, 1] # out of order - - changes: Any = [] - if field_name == "nonce_changes": - changes = [ - BalNonceChange(tx_index=idx, post_nonce=1) for idx in tx_indices - ] - elif field_name == "balance_changes": - changes = [ - BalBalanceChange(tx_index=idx, post_balance=100) - for idx in tx_indices - ] - elif field_name == "code_changes": - changes = [ - BalCodeChange(tx_index=idx, new_code=b"code") for idx in tx_indices - ] - - actual_bal = BlockAccessList( - [BalAccountChange(address=addr, **{field_name: changes})] - ) - - expectation = BlockAccessListExpectation(account_expectations={}) - - with pytest.raises( - BlockAccessListValidationError, - match="Transaction indices not in ascending order", - ): - expectation.verify_against(actual_bal) - - -@pytest.mark.parametrize( - "field_name", - ["nonce_changes", "balance_changes", "code_changes"], -) -def test_actual_bal_duplicate_tx_indices(field_name: str) -> None: - """ - Test that actual BAL must not have duplicate tx indices in change lists. - """ - addr = Address(0xA) - - # Duplicate tx_index=1 - changes: Any = [] - if field_name == "nonce_changes": - changes = [ - BalNonceChange(tx_index=1, post_nonce=1), - BalNonceChange(tx_index=1, post_nonce=2), # duplicate tx_index - BalNonceChange(tx_index=2, post_nonce=3), - ] - elif field_name == "balance_changes": - changes = [ - BalBalanceChange(tx_index=1, post_balance=100), - BalBalanceChange( - tx_index=1, post_balance=200 - ), # duplicate tx_index - BalBalanceChange(tx_index=2, post_balance=300), - ] - elif field_name == "code_changes": - changes = [ - BalCodeChange(tx_index=1, new_code=b"code1"), - BalCodeChange(tx_index=1, new_code=b""), # duplicate tx_index - BalCodeChange(tx_index=2, new_code=b"code2"), - ] - - actual_bal = BlockAccessList( - [BalAccountChange(address=addr, **{field_name: changes})] - ) - - expectation = BlockAccessListExpectation(account_expectations={}) - - with pytest.raises( - BlockAccessListValidationError, - match=f"Duplicate transaction indices in {field_name}.*Duplicates: \\[1\\]", - ): - expectation.verify_against(actual_bal) - - -def test_actual_bal_storage_duplicate_tx_indices() -> None: - """ - Test that storage changes must not have duplicate tx indices within same - slot. - """ - addr = Address(0xA) - - # Create storage changes with duplicate tx_index within the same slot - actual_bal = BlockAccessList( - [ - BalAccountChange( - address=addr, - storage_changes=[ - BalStorageSlot( - slot=0x01, - slot_changes=[ - BalStorageChange(tx_index=1, post_value=0x100), - BalStorageChange( - tx_index=1, post_value=0x200 - ), # duplicate tx_index - BalStorageChange(tx_index=2, post_value=0x300), - ], - ) - ], - ) - ] - ) - - expectation = BlockAccessListExpectation(account_expectations={}) - - with pytest.raises( - BlockAccessListValidationError, - match="Duplicate transaction indices in storage slot.*Duplicates: \\[1\\]", - ): - expectation.verify_against(actual_bal) - - def test_expected_addresses_auto_sorted() -> None: """ Test that expected addresses are automatically sorted before comparison. diff --git a/packages/testing/src/execution_testing/test_types/tests/test_block_access_list_t8n.py b/packages/testing/src/execution_testing/test_types/tests/test_block_access_list_t8n.py new file mode 100644 index 0000000000..3c884cf2f4 --- /dev/null +++ b/packages/testing/src/execution_testing/test_types/tests/test_block_access_list_t8n.py @@ -0,0 +1,351 @@ +""" +Tests for BlockAccessList.validate_structure() method. + +These tests verify that the BAL structural validation correctly enforces +EIP-7928 requirements for ordering and uniqueness. +""" + +from typing import List, Union + +import pytest + +from execution_testing.base_types import Address, HexNumber, StorageKey +from execution_testing.test_types.block_access_list import ( + BalAccountChange, + BalBalanceChange, + BalCodeChange, + BalNonceChange, + BalStorageChange, + BalStorageSlot, + BlockAccessList, + BlockAccessListValidationError, +) + + +def test_bal_address_ordering_validation() -> None: + """Test that BAL addresses must be in lexicographic order.""" + alice = Address(0xAA) + bob = Address(0xBB) + + # Correct order: alice < bob + bal_valid = BlockAccessList( + [ + BalAccountChange(address=alice), + BalAccountChange(address=bob), + ] + ) + bal_valid.validate_structure() # Should not raise + + # Incorrect order: bob before alice + bal_invalid = BlockAccessList( + [ + BalAccountChange(address=bob), + BalAccountChange(address=alice), + ] + ) + + with pytest.raises( + BlockAccessListValidationError, + match="addresses are not in lexicographic order", + ): + bal_invalid.validate_structure() + + +def test_bal_storage_slot_ordering() -> None: + """Test that storage slots must be in ascending order.""" + addr = Address(0xA) + + # Correct order + bal_valid = BlockAccessList( + [ + BalAccountChange( + address=addr, + storage_changes=[ + BalStorageSlot(slot=StorageKey(0), slot_changes=[]), + BalStorageSlot(slot=StorageKey(1), slot_changes=[]), + BalStorageSlot(slot=StorageKey(2), slot_changes=[]), + ], + ) + ] + ) + bal_valid.validate_structure() # Should not raise + + # Incorrect order: slot 2 before slot 1 + bal_invalid = BlockAccessList( + [ + BalAccountChange( + address=addr, + storage_changes=[ + BalStorageSlot(slot=StorageKey(0), slot_changes=[]), + BalStorageSlot(slot=StorageKey(2), slot_changes=[]), + BalStorageSlot(slot=StorageKey(1), slot_changes=[]), + ], + ) + ] + ) + + with pytest.raises( + BlockAccessListValidationError, + match="Storage slots not in ascending order", + ): + bal_invalid.validate_structure() + + +def test_bal_storage_reads_ordering() -> None: + """Test that storage reads must be in ascending order.""" + addr = Address(0xA) + + # Correct order + bal_valid = BlockAccessList( + [ + BalAccountChange( + address=addr, + storage_reads=[StorageKey(0), StorageKey(1), StorageKey(2)], + ) + ] + ) + bal_valid.validate_structure() # Should not raise + + # Incorrect order + bal_invalid = BlockAccessList( + [ + BalAccountChange( + address=addr, + storage_reads=[StorageKey(0), StorageKey(2), StorageKey(1)], + ) + ] + ) + + with pytest.raises( + BlockAccessListValidationError, + match="Storage reads not in ascending order", + ): + bal_invalid.validate_structure() + + +@pytest.mark.parametrize( + "field_name", + ["nonce_changes", "balance_changes", "code_changes"], +) +def test_bal_tx_indices_ordering(field_name: str) -> None: + """ + Test that transaction indices must be in ascending order within change lists. + """ + addr = Address(0xA) + + changes_valid: List[Union[BalNonceChange, BalBalanceChange, BalCodeChange]] + changes_invalid: List[ + Union[BalNonceChange, BalBalanceChange, BalCodeChange] + ] + + # Correct order: tx_index 1, 2, 3 + if field_name == "nonce_changes": + changes_valid = [ + BalNonceChange(tx_index=HexNumber(1), post_nonce=HexNumber(1)), + BalNonceChange(tx_index=HexNumber(2), post_nonce=HexNumber(2)), + BalNonceChange(tx_index=HexNumber(3), post_nonce=HexNumber(3)), + ] + changes_invalid = [ + BalNonceChange(tx_index=HexNumber(1), post_nonce=HexNumber(1)), + BalNonceChange(tx_index=HexNumber(3), post_nonce=HexNumber(3)), + BalNonceChange(tx_index=HexNumber(2), post_nonce=HexNumber(2)), + ] + elif field_name == "balance_changes": + changes_valid = [ + BalBalanceChange( + tx_index=HexNumber(1), post_balance=HexNumber(100) + ), + BalBalanceChange( + tx_index=HexNumber(2), post_balance=HexNumber(200) + ), + BalBalanceChange( + tx_index=HexNumber(3), post_balance=HexNumber(300) + ), + ] + changes_invalid = [ + BalBalanceChange( + tx_index=HexNumber(1), post_balance=HexNumber(100) + ), + BalBalanceChange( + tx_index=HexNumber(3), post_balance=HexNumber(300) + ), + BalBalanceChange( + tx_index=HexNumber(2), post_balance=HexNumber(200) + ), + ] + elif field_name == "code_changes": + changes_valid = [ + BalCodeChange(tx_index=HexNumber(1), new_code=b"code1"), + BalCodeChange(tx_index=HexNumber(2), new_code=b"code2"), + BalCodeChange(tx_index=HexNumber(3), new_code=b"code3"), + ] + changes_invalid = [ + BalCodeChange(tx_index=HexNumber(1), new_code=b"code1"), + BalCodeChange(tx_index=HexNumber(3), new_code=b"code3"), + BalCodeChange(tx_index=HexNumber(2), new_code=b"code2"), + ] + + bal_valid = BlockAccessList( + [BalAccountChange(address=addr, **{field_name: changes_valid})] + ) + bal_valid.validate_structure() # Should not raise + + bal_invalid = BlockAccessList( + [BalAccountChange(address=addr, **{field_name: changes_invalid})] + ) + + with pytest.raises( + BlockAccessListValidationError, + match=f"Transaction indices not in ascending order in {field_name}", + ): + bal_invalid.validate_structure() + + +@pytest.mark.parametrize( + "field_name", + ["nonce_changes", "balance_changes", "code_changes"], +) +def test_bal_duplicate_tx_indices(field_name: str) -> None: + """ + Test that BAL must not have duplicate tx indices in change lists. + """ + addr = Address(0xA) + + changes: List[Union[BalNonceChange, BalBalanceChange, BalCodeChange]] + + # Duplicate tx_index=1 + if field_name == "nonce_changes": + changes = [ + BalNonceChange(tx_index=HexNumber(1), post_nonce=HexNumber(1)), + BalNonceChange( + tx_index=HexNumber(1), post_nonce=HexNumber(2) + ), # duplicate tx_index + BalNonceChange(tx_index=HexNumber(2), post_nonce=HexNumber(3)), + ] + elif field_name == "balance_changes": + changes = [ + BalBalanceChange( + tx_index=HexNumber(1), post_balance=HexNumber(100) + ), + BalBalanceChange( + tx_index=HexNumber(1), post_balance=HexNumber(200) + ), # duplicate tx_index + BalBalanceChange( + tx_index=HexNumber(2), post_balance=HexNumber(300) + ), + ] + elif field_name == "code_changes": + changes = [ + BalCodeChange(tx_index=HexNumber(1), new_code=b"code1"), + BalCodeChange( + tx_index=HexNumber(1), new_code=b"" + ), # duplicate tx_index + BalCodeChange(tx_index=HexNumber(2), new_code=b"code2"), + ] + + bal = BlockAccessList( + [BalAccountChange(address=addr, **{field_name: changes})] + ) + + with pytest.raises( + BlockAccessListValidationError, + match=f"Duplicate transaction indices in {field_name}.*Duplicates: \\[1\\]", + ): + bal.validate_structure() + + +def test_bal_storage_duplicate_tx_indices() -> None: + """ + Test that storage changes must not have duplicate tx indices within same slot. + """ + addr = Address(0xA) + + # Create storage changes with duplicate tx_index within the same slot + bal = BlockAccessList( + [ + BalAccountChange( + address=addr, + storage_changes=[ + BalStorageSlot( + slot=StorageKey(0), + slot_changes=[ + BalStorageChange( + tx_index=HexNumber(1), + post_value=StorageKey(100), + ), + BalStorageChange( + tx_index=HexNumber(1), + post_value=StorageKey(200), + ), # duplicate tx_index + BalStorageChange( + tx_index=HexNumber(2), + post_value=StorageKey(300), + ), + ], + ) + ], + ) + ] + ) + + with pytest.raises( + BlockAccessListValidationError, + match="Duplicate transaction indices in storage slot.*Duplicates: \\[1\\]", + ): + bal.validate_structure() + + +def test_bal_multiple_violations() -> None: + """ + Test that validation catches the first violation when multiple exist. + """ + alice = Address(0xAA) + bob = Address(0xBB) + + # Wrong address order AND duplicate tx indices + bal = BlockAccessList( + [ + BalAccountChange( + address=bob, # Should come after alice + nonce_changes=[ + BalNonceChange( + tx_index=HexNumber(1), post_nonce=HexNumber(1) + ), + BalNonceChange( + tx_index=HexNumber(1), post_nonce=HexNumber(2) + ), # duplicate + ], + ), + BalAccountChange(address=alice), + ] + ) + + # Should catch the first error (address ordering) + with pytest.raises( + BlockAccessListValidationError, + match="addresses are not in lexicographic order", + ): + bal.validate_structure() + + +def test_bal_empty_list_valid() -> None: + """Test that an empty BAL is valid.""" + bal = BlockAccessList([]) + bal.validate_structure() # Should not raise + + +def test_bal_single_account_valid() -> None: + """Test that a BAL with a single account is valid.""" + bal = BlockAccessList( + [ + BalAccountChange( + address=Address(0xA), + nonce_changes=[ + BalNonceChange( + tx_index=HexNumber(1), post_nonce=HexNumber(1) + ) + ], + ) + ] + ) + bal.validate_structure() # Should not raise From 5e7249dbd1a65db705f3c8f1a81e6adf685761cf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Toni=20Wahrst=C3=A4tter?= Date: Wed, 5 Nov 2025 12:11:41 +0100 Subject: [PATCH 015/154] feat(specs): EIP-7928 refactoring --- .../amsterdam/block_access_lists/__init__.py | 6 + .../amsterdam/block_access_lists/tracker.py | 152 +++++++++------ src/ethereum/forks/amsterdam/fork.py | 38 ++-- src/ethereum/forks/amsterdam/state.py | 112 +++++++---- src/ethereum/forks/amsterdam/vm/__init__.py | 5 + .../forks/amsterdam/vm/eoa_delegation.py | 86 ++++++--- src/ethereum/forks/amsterdam/vm/gas.py | 17 ++ .../amsterdam/vm/instructions/environment.py | 55 +++--- .../amsterdam/vm/instructions/storage.py | 80 ++++---- .../forks/amsterdam/vm/instructions/system.py | 177 +++++++++++------- .../forks/amsterdam/vm/interpreter.py | 31 ++- 11 files changed, 474 insertions(+), 285 deletions(-) diff --git a/src/ethereum/forks/amsterdam/block_access_lists/__init__.py b/src/ethereum/forks/amsterdam/block_access_lists/__init__.py index 856ab832bc..ebcda46e98 100644 --- a/src/ethereum/forks/amsterdam/block_access_lists/__init__.py +++ b/src/ethereum/forks/amsterdam/block_access_lists/__init__.py @@ -21,6 +21,9 @@ StateChangeTracker, begin_call_frame, commit_call_frame, + handle_in_transaction_selfdestruct, + normalize_balance_changes, + prepare_balance_tracking, rollback_call_frame, set_block_access_index, track_address_access, @@ -44,6 +47,9 @@ "build_block_access_list", "commit_call_frame", "compute_block_access_list_hash", + "handle_in_transaction_selfdestruct", + "normalize_balance_changes", + "prepare_balance_tracking", "rollback_call_frame", "set_block_access_index", "rlp_encode_block_access_list", diff --git a/src/ethereum/forks/amsterdam/block_access_lists/tracker.py b/src/ethereum/forks/amsterdam/block_access_lists/tracker.py index 0ea945e7b1..9008a20878 100644 --- a/src/ethereum/forks/amsterdam/block_access_lists/tracker.py +++ b/src/ethereum/forks/amsterdam/block_access_lists/tracker.py @@ -31,6 +31,7 @@ if TYPE_CHECKING: from ..state import State # noqa: F401 + from ..vm import BlockEnvironment # noqa: F401 @dataclass @@ -114,7 +115,7 @@ class StateChangeTracker: def set_block_access_index( - tracker: StateChangeTracker, block_access_index: Uint + block_env: "BlockEnvironment", block_access_index: Uint ) -> None: """ Set the current block access index for tracking changes. @@ -129,13 +130,14 @@ def set_block_access_index( Parameters ---------- - tracker : - The state change tracker instance. + block_env : + The block execution environment. block_access_index : The block access index (0 for pre-execution, 1..n for transactions, n+1 for post-execution). """ + tracker = block_env.change_tracker tracker.current_block_access_index = block_access_index # Clear the pre-storage cache for each new transaction to ensure # no-op writes are detected relative to the transaction start @@ -182,7 +184,7 @@ def capture_pre_state( def track_address_access( - tracker: StateChangeTracker, address: Address + block_env: "BlockEnvironment", address: Address ) -> None: """ Track that an address was accessed. @@ -192,17 +194,19 @@ def track_address_access( Parameters ---------- - tracker : - The state change tracker instance. + block_env : + The block execution environment. address : The account address that was accessed. """ - add_touched_account(tracker.block_access_list_builder, address) + add_touched_account( + block_env.change_tracker.block_access_list_builder, address + ) def track_storage_read( - tracker: StateChangeTracker, address: Address, key: Bytes32, state: "State" + block_env: "BlockEnvironment", address: Address, key: Bytes32 ) -> None: """ Track a storage read operation. @@ -213,29 +217,28 @@ def track_storage_read( Parameters ---------- - tracker : - The state change tracker instance. + block_env : + The block execution environment. address : The account address whose storage is being read. key : The storage slot being read. - state : - The current execution state. """ - track_address_access(tracker, address) + track_address_access(block_env, address) - capture_pre_state(tracker, address, key, state) + capture_pre_state(block_env.change_tracker, address, key, block_env.state) - add_storage_read(tracker.block_access_list_builder, address, key) + add_storage_read( + block_env.change_tracker.block_access_list_builder, address, key + ) def track_storage_write( - tracker: StateChangeTracker, + block_env: "BlockEnvironment", address: Address, key: Bytes32, new_value: U256, - state: "State", ) -> None: """ Track a storage write operation. @@ -246,23 +249,22 @@ def track_storage_write( Parameters ---------- - tracker : - The state change tracker instance. + block_env : + The block execution environment. address : The account address whose storage is being modified. key : The storage slot being written to. new_value : The new value to write. - state : - The current execution state. [EIP-7928]: https://eips.ethereum.org/EIPS/eip-7928 """ - track_address_access(tracker, address) + track_address_access(block_env, address) - pre_value = capture_pre_state(tracker, address, key, state) + tracker = block_env.change_tracker + pre_value = capture_pre_state(tracker, address, key, block_env.state) value_bytes = new_value.to_be_bytes32() @@ -322,8 +324,36 @@ def capture_pre_balance( return tracker.pre_balance_cache[address] +def prepare_balance_tracking( + block_env: "BlockEnvironment", address: Address +) -> None: + """ + Prepare for tracking balance changes by caching the pre-transaction + balance. + + This should be called before any balance modifications when you need to + ensure the pre-balance is captured for later normalization. This is + particularly important for operations like withdrawals where the balance + might not actually change. + + Parameters + ---------- + block_env : + The block execution environment. + address : + The account address whose balance will be tracked. + + + """ + # Ensure the address is tracked + track_address_access(block_env, address) + + # Cache the pre-balance for later normalization + capture_pre_balance(block_env.change_tracker, address, block_env.state) + + def track_balance_change( - tracker: StateChangeTracker, + block_env: "BlockEnvironment", address: Address, new_balance: U256, ) -> None: @@ -335,16 +365,17 @@ def track_balance_change( Parameters ---------- - tracker : - The state change tracker instance. + block_env : + The block execution environment. address : The account address whose balance changed. new_balance : The new balance value. """ - track_address_access(tracker, address) + track_address_access(block_env, address) + tracker = block_env.change_tracker block_access_index = BlockAccessIndex(tracker.current_block_access_index) add_balance_change( tracker.block_access_list_builder, @@ -362,7 +393,7 @@ def track_balance_change( def track_nonce_change( - tracker: StateChangeTracker, address: Address, new_nonce: Uint + block_env: "BlockEnvironment", address: Address, new_nonce: Uint ) -> None: """ Track a nonce change for an account. @@ -373,20 +404,19 @@ def track_nonce_change( Parameters ---------- - tracker : - The state change tracker instance. + block_env : + The block execution environment. address : The account address whose nonce changed. new_nonce : The new nonce value. - state : - The current execution state. [`CREATE`]: ref:ethereum.forks.amsterdam.vm.instructions.system.create [`CREATE2`]: ref:ethereum.forks.amsterdam.vm.instructions.system.create2 """ - track_address_access(tracker, address) + track_address_access(block_env, address) + tracker = block_env.change_tracker block_access_index = BlockAccessIndex(tracker.current_block_access_index) nonce_u64 = U64(new_nonce) add_nonce_change( @@ -403,7 +433,7 @@ def track_nonce_change( def track_code_change( - tracker: StateChangeTracker, address: Address, new_code: Bytes + block_env: "BlockEnvironment", address: Address, new_code: Bytes ) -> None: """ Track a code change for contract deployment. @@ -414,8 +444,8 @@ def track_code_change( Parameters ---------- - tracker : - The state change tracker instance. + block_env : + The block execution environment. address : The address receiving the contract code. new_code : @@ -425,7 +455,8 @@ def track_code_change( [`CREATE2`]: ref:ethereum.forks.amsterdam.vm.instructions.system.create2 """ - track_address_access(tracker, address) + track_address_access(block_env, address) + tracker = block_env.change_tracker block_access_index = BlockAccessIndex(tracker.current_block_access_index) add_code_change( tracker.block_access_list_builder, @@ -441,7 +472,7 @@ def track_code_change( def handle_in_transaction_selfdestruct( - tracker: StateChangeTracker, address: Address + block_env: "BlockEnvironment", address: Address ) -> None: """ Handle an account that self-destructed in the same transaction it was @@ -456,12 +487,13 @@ def handle_in_transaction_selfdestruct( Parameters ---------- - tracker : - The state change tracker instance. + block_env : + The block execution environment. address : The address that self-destructed. """ + tracker = block_env.change_tracker builder = tracker.block_access_list_builder if address not in builder.accounts: return @@ -493,9 +525,7 @@ def handle_in_transaction_selfdestruct( ] -def normalize_balance_changes( - tracker: StateChangeTracker, state: "State" -) -> None: +def normalize_balance_changes(block_env: "BlockEnvironment") -> None: """ Normalize balance changes for the current block access index. @@ -515,15 +545,14 @@ def normalize_balance_changes( Parameters ---------- - tracker : - The state change tracker instance. - state : - The current execution state. + block_env : + The block execution environment. """ # Import locally to avoid circular import from ..state import get_account + tracker = block_env.change_tracker builder = tracker.block_access_list_builder current_index = tracker.current_block_access_index @@ -532,10 +561,10 @@ def normalize_balance_changes( account_data = builder.accounts[address] # Get the pre-transaction balance - pre_balance = capture_pre_balance(tracker, address, state) + pre_balance = capture_pre_balance(tracker, address, block_env.state) # Get the current (post-transaction) balance - post_balance = get_account(state, address).balance + post_balance = get_account(block_env.state, address).balance # If pre-tx balance equals post-tx balance, remove all balance changes # for this address in the current transaction @@ -548,7 +577,7 @@ def normalize_balance_changes( ] -def begin_call_frame(tracker: StateChangeTracker) -> None: +def begin_call_frame(block_env: "BlockEnvironment") -> None: """ Begin a new call frame for tracking reverts. @@ -557,14 +586,14 @@ def begin_call_frame(tracker: StateChangeTracker) -> None: Parameters ---------- - tracker : - The state change tracker instance. + block_env : + The block execution environment. """ - tracker.call_frame_snapshots.append(CallFrameSnapshot()) + block_env.change_tracker.call_frame_snapshots.append(CallFrameSnapshot()) -def rollback_call_frame(tracker: StateChangeTracker) -> None: +def rollback_call_frame(block_env: "BlockEnvironment") -> None: """ Rollback changes from the current call frame. @@ -578,10 +607,11 @@ def rollback_call_frame(tracker: StateChangeTracker) -> None: Parameters ---------- - tracker : - The state change tracker instance. + block_env : + The block execution environment. """ + tracker = block_env.change_tracker if not tracker.call_frame_snapshots: return @@ -651,7 +681,7 @@ def rollback_call_frame(tracker: StateChangeTracker) -> None: # All touched addresses remain in the access list (already tracked) -def commit_call_frame(tracker: StateChangeTracker) -> None: +def commit_call_frame(block_env: "BlockEnvironment") -> None: """ Commit changes from the current call frame. @@ -660,9 +690,9 @@ def commit_call_frame(tracker: StateChangeTracker) -> None: Parameters ---------- - tracker : - The state change tracker instance. + block_env : + The block execution environment. """ - if tracker.call_frame_snapshots: - tracker.call_frame_snapshots.pop() + if block_env.change_tracker.call_frame_snapshots: + block_env.change_tracker.call_frame_snapshots.pop() diff --git a/src/ethereum/forks/amsterdam/fork.py b/src/ethereum/forks/amsterdam/fork.py index c326bd1b93..4ec12acf91 100644 --- a/src/ethereum/forks/amsterdam/fork.py +++ b/src/ethereum/forks/amsterdam/fork.py @@ -32,9 +32,9 @@ from .block_access_lists.builder import build_block_access_list from .block_access_lists.rlp_utils import compute_block_access_list_hash from .block_access_lists.tracker import ( - capture_pre_balance, handle_in_transaction_selfdestruct, normalize_balance_changes, + prepare_balance_tracking, set_block_access_index, track_balance_change, ) @@ -781,7 +781,7 @@ def apply_body( # Set block access index for pre-execution system contracts # EIP-7928: System contracts use block_access_index 0 - set_block_access_index(block_env.state.change_tracker, Uint(0)) + set_block_access_index(block_env, Uint(0)) process_unchecked_system_transaction( block_env=block_env, @@ -800,9 +800,7 @@ def apply_body( # EIP-7928: Post-execution uses block_access_index len(transactions) + 1 post_execution_index = ulen(transactions) + Uint(1) - set_block_access_index( - block_env.state.change_tracker, post_execution_index - ) + set_block_access_index(block_env, post_execution_index) process_withdrawals(block_env, block_output, withdrawals) @@ -811,7 +809,7 @@ def apply_body( block_output=block_output, ) block_output.block_access_list = build_block_access_list( - block_env.state.change_tracker.block_access_list_builder + block_env.change_tracker.block_access_list_builder ) return block_output @@ -894,7 +892,7 @@ def process_transaction( """ # EIP-7928: Transactions use block_access_index 1 to len(transactions) # Transaction at index i gets block_access_index i+1 - set_block_access_index(block_env.state.change_tracker, index + Uint(1)) + set_block_access_index(block_env, index + Uint(1)) trie_set( block_output.transactions_trie, @@ -1020,17 +1018,12 @@ def process_transaction( # EIP-7928: In-transaction self-destruct - convert storage writes to # reads and remove nonce/code changes. Only accounts created in same # tx are in accounts_to_delete per EIP-6780. - handle_in_transaction_selfdestruct( - block_env.state.change_tracker, address - ) + handle_in_transaction_selfdestruct(block_env, address) destroy_account(block_env.state, address) # EIP-7928: Normalize balance changes for this transaction # Remove balance changes where post-tx balance equals pre-tx balance - normalize_balance_changes( - block_env.state.change_tracker, - block_env.state, - ) + normalize_balance_changes(block_env) block_output.block_gas_used += tx_gas_used_after_refund block_output.blob_gas_used += tx_blob_gas_used @@ -1070,28 +1063,21 @@ def increase_recipient_balance(recipient: Account) -> None: rlp.encode(wd), ) - # Capture pre-balance before modification (even for zero withdrawals) - # This ensures the address appears in BAL per EIP-7928 - capture_pre_balance( - block_env.state.change_tracker, wd.address, block_env.state - ) + # Prepare for balance tracking (ensures address appears in BAL and + # pre-balance is cached for normalization) + prepare_balance_tracking(block_env, wd.address) modify_state(block_env.state, wd.address, increase_recipient_balance) # Track balance change for BAL # (withdrawals are tracked as system contract changes) new_balance = get_account(block_env.state, wd.address).balance - track_balance_change( - block_env.state.change_tracker, wd.address, U256(new_balance) - ) + track_balance_change(block_env, wd.address, U256(new_balance)) # EIP-7928: Normalize balance changes for this withdrawal # Remove balance changes where post-withdrawal balance # equals pre-withdrawal balance - normalize_balance_changes( - block_env.state.change_tracker, - block_env.state, - ) + normalize_balance_changes(block_env) if account_exists_and_is_empty(block_env.state, wd.address): destroy_account(block_env.state, wd.address) diff --git a/src/ethereum/forks/amsterdam/state.py b/src/ethereum/forks/amsterdam/state.py index 3067b175d6..b47cd2d377 100644 --- a/src/ethereum/forks/amsterdam/state.py +++ b/src/ethereum/forks/amsterdam/state.py @@ -17,16 +17,14 @@ """ from dataclasses import dataclass, field -from typing import Callable, Dict, List, Optional, Set, Tuple +from typing import TYPE_CHECKING, Callable, Dict, List, Optional, Set, Tuple from ethereum_types.bytes import Bytes, Bytes32 from ethereum_types.frozen import modify from ethereum_types.numeric import U256, Uint -from .block_access_lists.builder import BlockAccessListBuilder from .block_access_lists.tracker import ( - StateChangeTracker, - capture_pre_balance, + prepare_balance_tracking, track_balance_change, track_code_change, track_nonce_change, @@ -34,6 +32,9 @@ from .fork_types import EMPTY_ACCOUNT, Account, Address, Root from .trie import EMPTY_TRIE_ROOT, Trie, copy_trie, root, trie_get, trie_set +if TYPE_CHECKING: + from .vm import BlockEnvironment # noqa: F401 + @dataclass class State: @@ -54,9 +55,6 @@ class State: ] ] = field(default_factory=list) created_accounts: Set[Address] = field(default_factory=set) - change_tracker: StateChangeTracker = field( - default_factory=lambda: StateChangeTracker(BlockAccessListBuilder()) - ) @dataclass @@ -517,13 +515,17 @@ def move_ether( sender_address: Address, recipient_address: Address, amount: U256, + block_env: "BlockEnvironment" = None, ) -> None: """ Move funds between accounts. """ - # Capture pre-transaction balance before first modification - capture_pre_balance(state.change_tracker, sender_address, state) - capture_pre_balance(state.change_tracker, recipient_address, state) + # Only track if block_env is provided (EIP-7928 tracking) + if block_env is not None: + # Prepare for balance tracking (captures pre-balance and ensures + # addresses are tracked) + prepare_balance_tracking(block_env, sender_address) + prepare_balance_tracking(block_env, recipient_address) def reduce_sender_balance(sender: Account) -> None: if sender.balance < amount: @@ -536,18 +538,25 @@ def increase_recipient_balance(recipient: Account) -> None: modify_state(state, sender_address, reduce_sender_balance) modify_state(state, recipient_address, increase_recipient_balance) - sender_new_balance = get_account(state, sender_address).balance - recipient_new_balance = get_account(state, recipient_address).balance + # Only track if block_env is provided (EIP-7928 tracking) + if block_env is not None: + sender_new_balance = get_account(state, sender_address).balance + recipient_new_balance = get_account(state, recipient_address).balance - track_balance_change( - state.change_tracker, sender_address, U256(sender_new_balance) - ) - track_balance_change( - state.change_tracker, recipient_address, U256(recipient_new_balance) - ) + track_balance_change( + block_env, sender_address, U256(sender_new_balance) + ) + track_balance_change( + block_env, recipient_address, U256(recipient_new_balance) + ) -def set_account_balance(state: State, address: Address, amount: U256) -> None: +def set_account_balance( + state: State, + address: Address, + amount: U256, + block_env: "BlockEnvironment" = None, +) -> None: """ Sets the balance of an account. @@ -562,19 +571,29 @@ def set_account_balance(state: State, address: Address, amount: U256) -> None: amount: The amount that needs to set in balance. + block_env: + Optional block environment for tracking changes. + """ - # Capture pre-transaction balance before first modification - capture_pre_balance(state.change_tracker, address, state) + # Only track if block_env is provided (EIP-7928 tracking) + if block_env is not None: + # Prepare for balance tracking (captures pre-balance and ensures + # address is tracked) + prepare_balance_tracking(block_env, address) def set_balance(account: Account) -> None: account.balance = amount modify_state(state, address, set_balance) - track_balance_change(state.change_tracker, address, amount) + # Only track if block_env is provided (EIP-7928 tracking) + if block_env is not None: + track_balance_change(block_env, address, amount) -def increment_nonce(state: State, address: Address) -> None: +def increment_nonce( + state: State, address: Address, block_env: "BlockEnvironment" = None +) -> None: """ Increments the nonce of an account. @@ -586,6 +605,9 @@ def increment_nonce(state: State, address: Address) -> None: address: Address of the account whose nonce needs to be incremented. + block_env: + Optional block environment for tracking changes. + """ def increase_nonce(sender: Account) -> None: @@ -593,18 +615,25 @@ def increase_nonce(sender: Account) -> None: modify_state(state, address, increase_nonce) - # Track nonce change for Block Access List - # (for ALL accounts and ALL nonce changes) - # This includes: - # - EOA senders (transaction nonce increments) - # - Contracts performing CREATE/CREATE2 - # - Deployed contracts - # - EIP-7702 authorities - account = get_account(state, address) - track_nonce_change(state.change_tracker, address, account.nonce) + # Only track if block_env is provided (EIP-7928 tracking) + if block_env is not None: + # Track nonce change for Block Access List + # (for ALL accounts and ALL nonce changes) + # This includes: + # - EOA senders (transaction nonce increments) + # - Contracts performing CREATE/CREATE2 + # - Deployed contracts + # - EIP-7702 authorities + account = get_account(state, address) + track_nonce_change(block_env, address, account.nonce) -def set_code(state: State, address: Address, code: Bytes) -> None: +def set_code( + state: State, + address: Address, + code: Bytes, + block_env: "BlockEnvironment" = None, +) -> None: """ Sets Account code. @@ -619,6 +648,9 @@ def set_code(state: State, address: Address, code: Bytes) -> None: code: The bytecode that needs to be set. + block_env: + Optional block environment for tracking changes. + """ def write_code(sender: Account) -> None: @@ -626,12 +658,14 @@ def write_code(sender: Account) -> None: modify_state(state, address, write_code) - # Only track code changes if it's not setting empty code on a - # newly created address. For newly created addresses, setting - # code to b"" is not a meaningful state change since the address - # had no code to begin with. - if not (code == b"" and address in state.created_accounts): - track_code_change(state.change_tracker, address, code) + # Only track if block_env is provided (EIP-7928 tracking) + if block_env is not None: + # Only track code changes if it's not setting empty code on a + # newly created address. For newly created addresses, setting + # code to b"" is not a meaningful state change since the address + # had no code to begin with. + if not (code == b"" and address in state.created_accounts): + track_code_change(block_env, address, code) def get_storage_original(state: State, address: Address, key: Bytes32) -> U256: diff --git a/src/ethereum/forks/amsterdam/vm/__init__.py b/src/ethereum/forks/amsterdam/vm/__init__.py index 7c2db77ce9..c10df4897b 100644 --- a/src/ethereum/forks/amsterdam/vm/__init__.py +++ b/src/ethereum/forks/amsterdam/vm/__init__.py @@ -21,7 +21,9 @@ from ethereum.crypto.hash import Hash32 from ethereum.exceptions import EthereumException +from ..block_access_lists.builder import BlockAccessListBuilder from ..block_access_lists.rlp_types import BlockAccessList +from ..block_access_lists.tracker import StateChangeTracker from ..blocks import Log, Receipt, Withdrawal from ..fork_types import Address, Authorization, VersionedHash from ..state import State, TransientStorage @@ -48,6 +50,9 @@ class BlockEnvironment: prev_randao: Bytes32 excess_blob_gas: U64 parent_beacon_block_root: Hash32 + change_tracker: StateChangeTracker = field( + default_factory=lambda: StateChangeTracker(BlockAccessListBuilder()) + ) @dataclass diff --git a/src/ethereum/forks/amsterdam/vm/eoa_delegation.py b/src/ethereum/forks/amsterdam/vm/eoa_delegation.py index eca5978435..649027cb43 100644 --- a/src/ethereum/forks/amsterdam/vm/eoa_delegation.py +++ b/src/ethereum/forks/amsterdam/vm/eoa_delegation.py @@ -115,46 +115,92 @@ def recover_authority(authorization: Authorization) -> Address: return Address(keccak256(public_key)[12:32]) -def access_delegation( +def check_delegation( evm: Evm, address: Address -) -> Tuple[bool, Address, Bytes, Uint]: +) -> Tuple[bool, Address, Address, Bytes, Uint]: """ - Get the delegation address, code, and the cost of access from the address. + Check delegation info without modifying state or tracking. Parameters ---------- evm : `Evm` The execution frame. address : `Address` - The address to get the delegation from. + The address to check for delegation. Returns ------- - delegation : `Tuple[bool, Address, Bytes, Uint]` - The delegation address, code, and access gas cost. + delegation : `Tuple[bool, Address, Address, Bytes, Uint]` + (is_delegated, original_address, final_address, code, + additional_gas_cost) """ state = evm.message.block_env.state code = get_account(state, address).code if not is_valid_delegation(code): - return False, address, code, Uint(0) + return False, address, address, code, Uint(0) - # EIP-7928: Track the authority address (delegated account being called) - track_address_access(state.change_tracker, address) + delegated_address = Address(code[EOA_DELEGATION_MARKER_LENGTH:]) - address = Address(code[EOA_DELEGATION_MARKER_LENGTH:]) - if address in evm.accessed_addresses: - access_gas_cost = GAS_WARM_ACCESS + if delegated_address in evm.accessed_addresses: + additional_gas_cost = GAS_WARM_ACCESS else: - evm.accessed_addresses.add(address) - access_gas_cost = GAS_COLD_ACCOUNT_ACCESS - code = get_account(state, address).code + additional_gas_cost = GAS_COLD_ACCOUNT_ACCESS + + delegated_code = get_account(state, delegated_address).code + + return ( + True, + address, + delegated_address, + delegated_code, + additional_gas_cost, + ) + + +def apply_delegation_tracking( + evm: Evm, original_address: Address, delegated_address: Address +) -> None: + """ + Apply delegation tracking after gas check passes. + + Parameters + ---------- + evm : `Evm` + The execution frame. + original_address : `Address` + The original address that was called. + delegated_address : `Address` + The address delegated to. + + """ + track_address_access(evm.message.block_env, original_address) + + if delegated_address not in evm.accessed_addresses: + evm.accessed_addresses.add(delegated_address) + + track_address_access(evm.message.block_env, delegated_address) + + +def access_delegation( + evm: Evm, address: Address +) -> Tuple[bool, Address, Bytes, Uint]: + """ + Access delegation info and track state changes. + + DEPRECATED: Use check_delegation and apply_delegation_tracking + for proper gas check ordering. + + """ + is_delegated, orig_addr, final_addr, code, gas_cost = check_delegation( + evm, address + ) - # EIP-7928: Track delegation target when loaded as call target - track_address_access(state.change_tracker, address) + if is_delegated: + apply_delegation_tracking(evm, orig_addr, final_addr) - return True, address, code, access_gas_cost + return is_delegated, final_addr, code, gas_cost def set_delegation(message: Message) -> U256: @@ -193,9 +239,7 @@ def set_delegation(message: Message) -> U256: authority_account = get_account(state, authority) authority_code = authority_account.code - # EIP-7928: Track authority account access in BAL even if delegation - # fails - track_address_access(state.change_tracker, authority) + track_address_access(message.block_env, authority) if authority_code and not is_valid_delegation(authority_code): continue diff --git a/src/ethereum/forks/amsterdam/vm/gas.py b/src/ethereum/forks/amsterdam/vm/gas.py index 360a4430e3..8fe1820feb 100644 --- a/src/ethereum/forks/amsterdam/vm/gas.py +++ b/src/ethereum/forks/amsterdam/vm/gas.py @@ -118,6 +118,23 @@ class MessageCallGas: sub_call: Uint +def check_gas(evm: Evm, amount: Uint) -> None: + """ + Checks if `amount` gas is available without charging it. + Raises OutOfGasError if insufficient gas. + + Parameters + ---------- + evm : + The current EVM. + amount : + The amount of gas to check. + + """ + if evm.gas_left < amount: + raise OutOfGasError + + def charge_gas(evm: Evm, amount: Uint) -> None: """ Subtracts `amount` from `evm.gas_left`. diff --git a/src/ethereum/forks/amsterdam/vm/instructions/environment.py b/src/ethereum/forks/amsterdam/vm/instructions/environment.py index 39b89567ff..e984d8030f 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/environment.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/environment.py @@ -36,6 +36,7 @@ calculate_blob_gas_price, calculate_gas_extend_memory, charge_gas, + check_gas, ) from ..stack import pop, push @@ -77,17 +78,18 @@ def balance(evm: Evm) -> None: address = to_address_masked(pop(evm.stack)) # GAS - if address in evm.accessed_addresses: - charge_gas(evm, GAS_WARM_ACCESS) - else: + is_cold_access = address not in evm.accessed_addresses + gas_cost = GAS_COLD_ACCOUNT_ACCESS if is_cold_access else GAS_WARM_ACCESS + check_gas(evm, gas_cost) + if is_cold_access: evm.accessed_addresses.add(address) - charge_gas(evm, GAS_COLD_ACCOUNT_ACCESS) + track_address_access(evm.message.block_env, address) + charge_gas(evm, gas_cost) # OPERATION # Non-existent accounts default to EMPTY_ACCOUNT, which has balance 0. state = evm.message.block_env.state balance = get_account(state, address).balance - track_address_access(state.change_tracker, address) push(evm.stack, balance) @@ -344,18 +346,19 @@ def extcodesize(evm: Evm) -> None: address = to_address_masked(pop(evm.stack)) # GAS - if address in evm.accessed_addresses: - access_gas_cost = GAS_WARM_ACCESS - else: + is_cold_access = address not in evm.accessed_addresses + access_gas_cost = ( + GAS_COLD_ACCOUNT_ACCESS if is_cold_access else GAS_WARM_ACCESS + ) + check_gas(evm, access_gas_cost) + if is_cold_access: evm.accessed_addresses.add(address) - access_gas_cost = GAS_COLD_ACCOUNT_ACCESS - + track_address_access(evm.message.block_env, address) charge_gas(evm, access_gas_cost) # OPERATION state = evm.message.block_env.state code = get_account(state, address).code - track_address_access(state.change_tracker, address) codesize = U256(len(code)) push(evm.stack, codesize) @@ -387,19 +390,22 @@ def extcodecopy(evm: Evm) -> None: evm.memory, [(memory_start_index, size)] ) - if address in evm.accessed_addresses: - access_gas_cost = GAS_WARM_ACCESS - else: - evm.accessed_addresses.add(address) - access_gas_cost = GAS_COLD_ACCOUNT_ACCESS + is_cold_access = address not in evm.accessed_addresses + access_gas_cost = ( + GAS_COLD_ACCOUNT_ACCESS if is_cold_access else GAS_WARM_ACCESS + ) + total_gas_cost = access_gas_cost + copy_gas_cost + extend_memory.cost - charge_gas(evm, access_gas_cost + copy_gas_cost + extend_memory.cost) + check_gas(evm, total_gas_cost) + if is_cold_access: + evm.accessed_addresses.add(address) + track_address_access(evm.message.block_env, address) + charge_gas(evm, total_gas_cost) # OPERATION evm.memory += b"\x00" * extend_memory.expand_by state = evm.message.block_env.state code = get_account(state, address).code - track_address_access(state.change_tracker, address) value = buffer_read(code, code_start_index, size) memory_write(evm.memory, memory_start_index, value) @@ -480,18 +486,19 @@ def extcodehash(evm: Evm) -> None: address = to_address_masked(pop(evm.stack)) # GAS - if address in evm.accessed_addresses: - access_gas_cost = GAS_WARM_ACCESS - else: + is_cold_access = address not in evm.accessed_addresses + access_gas_cost = ( + GAS_COLD_ACCOUNT_ACCESS if is_cold_access else GAS_WARM_ACCESS + ) + check_gas(evm, access_gas_cost) + if is_cold_access: evm.accessed_addresses.add(address) - access_gas_cost = GAS_COLD_ACCOUNT_ACCESS - + track_address_access(evm.message.block_env, address) charge_gas(evm, access_gas_cost) # OPERATION state = evm.message.block_env.state account = get_account(state, address) - track_address_access(state.change_tracker, address) if account == EMPTY_ACCOUNT: codehash = U256(0) diff --git a/src/ethereum/forks/amsterdam/vm/instructions/storage.py b/src/ethereum/forks/amsterdam/vm/instructions/storage.py index 35ff36bab3..65a6a38455 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/storage.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/storage.py @@ -34,6 +34,7 @@ GAS_STORAGE_UPDATE, GAS_WARM_ACCESS, charge_gas, + check_gas, ) from ..stack import pop, push @@ -53,22 +54,24 @@ def sload(evm: Evm) -> None: key = pop(evm.stack).to_be_bytes32() # GAS - if (evm.message.current_target, key) in evm.accessed_storage_keys: - charge_gas(evm, GAS_WARM_ACCESS) - else: + gas_cost = ( + GAS_WARM_ACCESS + if (evm.message.current_target, key) in evm.accessed_storage_keys + else GAS_COLD_SLOAD + ) + check_gas(evm, gas_cost) + if (evm.message.current_target, key) not in evm.accessed_storage_keys: evm.accessed_storage_keys.add((evm.message.current_target, key)) - charge_gas(evm, GAS_COLD_SLOAD) - - # OPERATION - state = evm.message.block_env.state - value = get_storage(state, evm.message.current_target, key) - track_storage_read( - state.change_tracker, + evm.message.block_env, evm.message.current_target, key, - evm.message.block_env.state, ) + charge_gas(evm, gas_cost) + + # OPERATION + state = evm.message.block_env.state + value = get_storage(state, evm.message.current_target, key) push(evm.stack, value) @@ -98,19 +101,14 @@ def sstore(evm: Evm) -> None: ) current_value = get_storage(state, evm.message.current_target, key) - # Track the implicit SLOAD that occurs in SSTORE - # This must happen BEFORE charge_gas() so reads are recorded even if OOG - track_storage_read( - state.change_tracker, + # GAS + gas_cost = Uint(0) + is_cold_access = ( evm.message.current_target, key, - evm.message.block_env.state, - ) - - gas_cost = Uint(0) + ) not in evm.accessed_storage_keys - if (evm.message.current_target, key) not in evm.accessed_storage_keys: - evm.accessed_storage_keys.add((evm.message.current_target, key)) + if is_cold_access: gas_cost += GAS_COLD_SLOAD if original_value == current_value and current_value != new_value: @@ -121,7 +119,28 @@ def sstore(evm: Evm) -> None: else: gas_cost += GAS_WARM_ACCESS - # Refund Counter Calculation + check_gas(evm, gas_cost) + + if is_cold_access: + evm.accessed_storage_keys.add((evm.message.current_target, key)) + + track_storage_read( + evm.message.block_env, + evm.message.current_target, + key, + ) + track_storage_write( + evm.message.block_env, + evm.message.current_target, + key, + new_value, + ) + + charge_gas(evm, gas_cost) + if evm.message.is_static: + raise WriteInStaticContext + + # REFUND COUNTER if current_value != new_value: if original_value != 0 and current_value != 0 and new_value == 0: # Storage is cleared for the first time in the transaction @@ -142,22 +161,7 @@ def sstore(evm: Evm) -> None: GAS_STORAGE_UPDATE - GAS_COLD_SLOAD - GAS_WARM_ACCESS ) - charge_gas(evm, gas_cost) - if evm.message.is_static: - raise WriteInStaticContext - - # Track storage write BEFORE modifying state - # so we capture the correct pre-value - - track_storage_write( - state.change_tracker, - evm.message.current_target, - key, - new_value, - state, - ) - - # Now modify the storage + # OPERATION set_storage(state, evm.message.current_target, key, new_value) # PROGRAM COUNTER diff --git a/src/ethereum/forks/amsterdam/vm/instructions/system.py b/src/ethereum/forks/amsterdam/vm/instructions/system.py index 1de3a140a1..8c1babdcd1 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/system.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/system.py @@ -32,7 +32,10 @@ compute_create2_contract_address, to_address_masked, ) -from ...vm.eoa_delegation import access_delegation +from ...vm.eoa_delegation import ( + apply_delegation_tracking, + check_delegation, +) from .. import ( Evm, Message, @@ -53,6 +56,7 @@ calculate_gas_extend_memory, calculate_message_call_gas, charge_gas, + check_gas, init_code_cost, max_message_call_gas, ) @@ -110,8 +114,7 @@ def generic_create( evm.accessed_addresses.add(contract_address) - # Track address access for BAL - track_address_access(state.change_tracker, contract_address) + track_address_access(evm.message.block_env, contract_address) if account_has_code_or_nonce( state, contract_address @@ -389,23 +392,22 @@ def call(evm: Evm) -> None: ], ) - if to in evm.accessed_addresses: - access_gas_cost = GAS_WARM_ACCESS - else: - evm.accessed_addresses.add(to) - access_gas_cost = GAS_COLD_ACCOUNT_ACCESS - - # Track address access for BAL - track_address_access(evm.message.block_env.state.change_tracker, to) + is_cold_access = to not in evm.accessed_addresses + access_gas_cost = ( + GAS_COLD_ACCOUNT_ACCESS if is_cold_access else GAS_WARM_ACCESS + ) - code_address = to ( - disable_precompiles, - code_address, + is_delegated, + original_address, + final_address, code, - delegated_access_gas_cost, - ) = access_delegation(evm, code_address) - access_gas_cost += delegated_access_gas_cost + delegation_gas_cost, + ) = check_delegation(evm, to) + access_gas_cost += delegation_gas_cost + + code_address = final_address + disable_precompiles = is_delegated create_gas_cost = GAS_NEW_ACCOUNT if value == 0 or is_account_alive(evm.message.block_env.state, to): @@ -418,6 +420,17 @@ def call(evm: Evm) -> None: extend_memory.cost, access_gas_cost + create_gas_cost + transfer_gas_cost, ) + + check_gas(evm, message_call_gas.cost + extend_memory.cost) + + if is_cold_access: + evm.accessed_addresses.add(to) + + track_address_access(evm.message.block_env, to) + + if is_delegated: + apply_delegation_tracking(evm, original_address, final_address) + charge_gas(evm, message_call_gas.cost + extend_memory.cost) if evm.message.is_static and value != U256(0): raise WriteInStaticContext @@ -453,7 +466,7 @@ def call(evm: Evm) -> None: def callcode(evm: Evm) -> None: """ - Message-call into this account with alternative account’s code. + Message-call into this account with alternative account's code. Parameters ---------- @@ -481,24 +494,22 @@ def callcode(evm: Evm) -> None: ], ) - if code_address in evm.accessed_addresses: - access_gas_cost = GAS_WARM_ACCESS - else: - evm.accessed_addresses.add(code_address) - access_gas_cost = GAS_COLD_ACCOUNT_ACCESS - - # Track address access for BAL - track_address_access( - evm.message.block_env.state.change_tracker, code_address + is_cold_access = code_address not in evm.accessed_addresses + access_gas_cost = ( + GAS_COLD_ACCOUNT_ACCESS if is_cold_access else GAS_WARM_ACCESS ) ( - disable_precompiles, - code_address, + is_delegated, + original_address, + final_address, code, - delegated_access_gas_cost, - ) = access_delegation(evm, code_address) - access_gas_cost += delegated_access_gas_cost + delegation_gas_cost, + ) = check_delegation(evm, code_address) + access_gas_cost += delegation_gas_cost + + code_address = final_address + disable_precompiles = is_delegated transfer_gas_cost = Uint(0) if value == 0 else GAS_CALL_VALUE message_call_gas = calculate_message_call_gas( @@ -508,6 +519,17 @@ def callcode(evm: Evm) -> None: extend_memory.cost, access_gas_cost + transfer_gas_cost, ) + + check_gas(evm, message_call_gas.cost + extend_memory.cost) + + if is_cold_access: + evm.accessed_addresses.add(original_address) + + track_address_access(evm.message.block_env, original_address) + + if is_delegated: + apply_delegation_tracking(evm, original_address, final_address) + charge_gas(evm, message_call_gas.cost + extend_memory.cost) # OPERATION @@ -559,15 +581,10 @@ def selfdestruct(evm: Evm) -> None: # GAS gas_cost = GAS_SELF_DESTRUCT - if beneficiary not in evm.accessed_addresses: - evm.accessed_addresses.add(beneficiary) + is_cold_access = beneficiary not in evm.accessed_addresses + if is_cold_access: gas_cost += GAS_COLD_ACCOUNT_ACCESS - # Track address access for BAL - track_address_access( - evm.message.block_env.state.change_tracker, beneficiary - ) - if ( not is_account_alive(evm.message.block_env.state, beneficiary) and get_account( @@ -577,6 +594,13 @@ def selfdestruct(evm: Evm) -> None: ): gas_cost += GAS_SELF_DESTRUCT_NEW_ACCOUNT + check_gas(evm, gas_cost) + + if is_cold_access: + evm.accessed_addresses.add(beneficiary) + + track_address_access(evm.message.block_env, beneficiary) + charge_gas(evm, gas_cost) originator = evm.message.current_target @@ -637,28 +661,37 @@ def delegatecall(evm: Evm) -> None: ], ) - if code_address in evm.accessed_addresses: - access_gas_cost = GAS_WARM_ACCESS - else: - evm.accessed_addresses.add(code_address) - access_gas_cost = GAS_COLD_ACCOUNT_ACCESS - - # Track address access for BAL - track_address_access( - evm.message.block_env.state.change_tracker, code_address + is_cold_access = code_address not in evm.accessed_addresses + access_gas_cost = ( + GAS_COLD_ACCOUNT_ACCESS if is_cold_access else GAS_WARM_ACCESS ) ( - disable_precompiles, - code_address, + is_delegated, + original_address, + final_address, code, - delegated_access_gas_cost, - ) = access_delegation(evm, code_address) - access_gas_cost += delegated_access_gas_cost + delegation_gas_cost, + ) = check_delegation(evm, code_address) + access_gas_cost += delegation_gas_cost + + code_address = final_address + disable_precompiles = is_delegated message_call_gas = calculate_message_call_gas( U256(0), gas, Uint(evm.gas_left), extend_memory.cost, access_gas_cost ) + + check_gas(evm, message_call_gas.cost + extend_memory.cost) + + if is_cold_access: + evm.accessed_addresses.add(original_address) + + track_address_access(evm.message.block_env, original_address) + + if is_delegated: + apply_delegation_tracking(evm, original_address, final_address) + charge_gas(evm, message_call_gas.cost + extend_memory.cost) # OPERATION @@ -711,23 +744,22 @@ def staticcall(evm: Evm) -> None: ], ) - if to in evm.accessed_addresses: - access_gas_cost = GAS_WARM_ACCESS - else: - evm.accessed_addresses.add(to) - access_gas_cost = GAS_COLD_ACCOUNT_ACCESS - - # Track address access for BAL - track_address_access(evm.message.block_env.state.change_tracker, to) + is_cold_access = to not in evm.accessed_addresses + access_gas_cost = ( + GAS_COLD_ACCOUNT_ACCESS if is_cold_access else GAS_WARM_ACCESS + ) - code_address = to ( - disable_precompiles, - code_address, + is_delegated, + original_address, + final_address, code, - delegated_access_gas_cost, - ) = access_delegation(evm, code_address) - access_gas_cost += delegated_access_gas_cost + delegation_gas_cost, + ) = check_delegation(evm, to) + access_gas_cost += delegation_gas_cost + + code_address = final_address + disable_precompiles = is_delegated message_call_gas = calculate_message_call_gas( U256(0), @@ -736,6 +768,17 @@ def staticcall(evm: Evm) -> None: extend_memory.cost, access_gas_cost, ) + + check_gas(evm, message_call_gas.cost + extend_memory.cost) + + if is_cold_access: + evm.accessed_addresses.add(to) + + track_address_access(evm.message.block_env, to) + + if is_delegated: + apply_delegation_tracking(evm, original_address, final_address) + charge_gas(evm, message_call_gas.cost + extend_memory.cost) # OPERATION diff --git a/src/ethereum/forks/amsterdam/vm/interpreter.py b/src/ethereum/forks/amsterdam/vm/interpreter.py index afd66169f1..5b33e48dd0 100644 --- a/src/ethereum/forks/amsterdam/vm/interpreter.py +++ b/src/ethereum/forks/amsterdam/vm/interpreter.py @@ -141,7 +141,7 @@ def process_message_call(message: Message) -> MessageCallOutput: # EIP-7928: Track delegation target when loaded as call target track_address_access( - block_env.state.change_tracker, + block_env, delegated_address, ) @@ -253,14 +253,21 @@ def process_message(message: Message) -> Evm: # take snapshot of state before processing the message begin_transaction(state, transient_storage) - if hasattr(state, "change_tracker") and state.change_tracker: - begin_call_frame(state.change_tracker) + if ( + hasattr(message.block_env, "change_tracker") + and message.block_env.change_tracker + ): + begin_call_frame(message.block_env) # Track target address access when processing a message - track_address_access(state.change_tracker, message.current_target) + track_address_access(message.block_env, message.current_target) if message.should_transfer_value and message.value != 0: move_ether( - state, message.caller, message.current_target, message.value + state, + message.caller, + message.current_target, + message.value, + message.block_env, ) evm = execute_code(message) @@ -268,12 +275,18 @@ def process_message(message: Message) -> Evm: # revert state to the last saved checkpoint # since the message call resulted in an error rollback_transaction(state, transient_storage) - if hasattr(state, "change_tracker") and state.change_tracker: - rollback_call_frame(state.change_tracker) + if ( + hasattr(message.block_env, "change_tracker") + and message.block_env.change_tracker + ): + rollback_call_frame(message.block_env) else: commit_transaction(state, transient_storage) - if hasattr(state, "change_tracker") and state.change_tracker: - commit_call_frame(state.change_tracker) + if ( + hasattr(message.block_env, "change_tracker") + and message.block_env.change_tracker + ): + commit_call_frame(message.block_env) return evm From e87d13ab9bac49af785e5f9ac98f369bd97f8d31 Mon Sep 17 00:00:00 2001 From: fselmo Date: Wed, 5 Nov 2025 10:59:35 -0700 Subject: [PATCH 016/154] fix(spec-specs): require and use blockenv for state tracking --- src/ethereum/forks/amsterdam/fork.py | 9 +- src/ethereum/forks/amsterdam/state.py | 86 ++++++++----------- .../forks/amsterdam/vm/eoa_delegation.py | 4 +- .../amsterdam/vm/instructions/storage.py | 23 +++-- .../forks/amsterdam/vm/instructions/system.py | 4 + .../forks/amsterdam/vm/interpreter.py | 6 +- .../evm_tools/t8n/__init__.py | 7 +- 7 files changed, 65 insertions(+), 74 deletions(-) diff --git a/src/ethereum/forks/amsterdam/fork.py b/src/ethereum/forks/amsterdam/fork.py index 4ec12acf91..d74c8eeb80 100644 --- a/src/ethereum/forks/amsterdam/fork.py +++ b/src/ethereum/forks/amsterdam/fork.py @@ -923,13 +923,13 @@ def process_transaction( effective_gas_fee = tx.gas * effective_gas_price gas = tx.gas - intrinsic_gas - increment_nonce(block_env.state, sender) + increment_nonce(block_env.state, sender, block_env) sender_balance_after_gas_fee = ( Uint(sender_account.balance) - effective_gas_fee - blob_gas_fee ) set_account_balance( - block_env.state, sender, U256(sender_balance_after_gas_fee) + block_env.state, sender, U256(sender_balance_after_gas_fee), block_env ) access_list_addresses = set() @@ -995,7 +995,9 @@ def process_transaction( sender_balance_after_refund = get_account( block_env.state, sender ).balance + U256(gas_refund_amount) - set_account_balance(block_env.state, sender, sender_balance_after_refund) + set_account_balance( + block_env.state, sender, sender_balance_after_refund, block_env + ) # transfer miner fees coinbase_balance_after_mining_fee = get_account( @@ -1007,6 +1009,7 @@ def process_transaction( block_env.state, block_env.coinbase, coinbase_balance_after_mining_fee, + block_env, ) if coinbase_balance_after_mining_fee == 0 and account_exists_and_is_empty( diff --git a/src/ethereum/forks/amsterdam/state.py b/src/ethereum/forks/amsterdam/state.py index b47cd2d377..3656a386c7 100644 --- a/src/ethereum/forks/amsterdam/state.py +++ b/src/ethereum/forks/amsterdam/state.py @@ -515,17 +515,15 @@ def move_ether( sender_address: Address, recipient_address: Address, amount: U256, - block_env: "BlockEnvironment" = None, + block_env: "BlockEnvironment", ) -> None: """ Move funds between accounts. """ - # Only track if block_env is provided (EIP-7928 tracking) - if block_env is not None: - # Prepare for balance tracking (captures pre-balance and ensures - # addresses are tracked) - prepare_balance_tracking(block_env, sender_address) - prepare_balance_tracking(block_env, recipient_address) + # Prepare for balance tracking (captures pre-balance and ensures + # addresses are tracked) + prepare_balance_tracking(block_env, sender_address) + prepare_balance_tracking(block_env, recipient_address) def reduce_sender_balance(sender: Account) -> None: if sender.balance < amount: @@ -538,24 +536,20 @@ def increase_recipient_balance(recipient: Account) -> None: modify_state(state, sender_address, reduce_sender_balance) modify_state(state, recipient_address, increase_recipient_balance) - # Only track if block_env is provided (EIP-7928 tracking) - if block_env is not None: - sender_new_balance = get_account(state, sender_address).balance - recipient_new_balance = get_account(state, recipient_address).balance + sender_new_balance = get_account(state, sender_address).balance + recipient_new_balance = get_account(state, recipient_address).balance - track_balance_change( - block_env, sender_address, U256(sender_new_balance) - ) - track_balance_change( - block_env, recipient_address, U256(recipient_new_balance) - ) + track_balance_change(block_env, sender_address, U256(sender_new_balance)) + track_balance_change( + block_env, recipient_address, U256(recipient_new_balance) + ) def set_account_balance( state: State, address: Address, amount: U256, - block_env: "BlockEnvironment" = None, + block_env: "BlockEnvironment", ) -> None: """ Sets the balance of an account. @@ -572,27 +566,23 @@ def set_account_balance( The amount that needs to set in balance. block_env: - Optional block environment for tracking changes. + Block environment for tracking changes. """ - # Only track if block_env is provided (EIP-7928 tracking) - if block_env is not None: - # Prepare for balance tracking (captures pre-balance and ensures - # address is tracked) - prepare_balance_tracking(block_env, address) + # Prepare for balance tracking (captures pre-balance and ensures + # address is tracked) + prepare_balance_tracking(block_env, address) def set_balance(account: Account) -> None: account.balance = amount modify_state(state, address, set_balance) - # Only track if block_env is provided (EIP-7928 tracking) - if block_env is not None: - track_balance_change(block_env, address, amount) + track_balance_change(block_env, address, amount) def increment_nonce( - state: State, address: Address, block_env: "BlockEnvironment" = None + state: State, address: Address, block_env: "BlockEnvironment" ) -> None: """ Increments the nonce of an account. @@ -606,7 +596,7 @@ def increment_nonce( Address of the account whose nonce needs to be incremented. block_env: - Optional block environment for tracking changes. + Block environment for tracking changes. """ @@ -615,24 +605,22 @@ def increase_nonce(sender: Account) -> None: modify_state(state, address, increase_nonce) - # Only track if block_env is provided (EIP-7928 tracking) - if block_env is not None: - # Track nonce change for Block Access List - # (for ALL accounts and ALL nonce changes) - # This includes: - # - EOA senders (transaction nonce increments) - # - Contracts performing CREATE/CREATE2 - # - Deployed contracts - # - EIP-7702 authorities - account = get_account(state, address) - track_nonce_change(block_env, address, account.nonce) + # Track nonce change for Block Access List (EIP-7928) + # (for ALL accounts and ALL nonce changes) + # This includes: + # - EOA senders (transaction nonce increments) + # - Contracts performing CREATE/CREATE2 + # - Deployed contracts + # - EIP-7702 authorities + account = get_account(state, address) + track_nonce_change(block_env, address, account.nonce) def set_code( state: State, address: Address, code: Bytes, - block_env: "BlockEnvironment" = None, + block_env: "BlockEnvironment", ) -> None: """ Sets Account code. @@ -649,7 +637,7 @@ def set_code( The bytecode that needs to be set. block_env: - Optional block environment for tracking changes. + Block environment for tracking changes. """ @@ -658,14 +646,12 @@ def write_code(sender: Account) -> None: modify_state(state, address, write_code) - # Only track if block_env is provided (EIP-7928 tracking) - if block_env is not None: - # Only track code changes if it's not setting empty code on a - # newly created address. For newly created addresses, setting - # code to b"" is not a meaningful state change since the address - # had no code to begin with. - if not (code == b"" and address in state.created_accounts): - track_code_change(block_env, address, code) + # Only track code changes if it's not setting empty code on a + # newly created address (EIP-7928). For newly created addresses, setting + # code to b"" is not a meaningful state change since the address + # had no code to begin with. + if not (code == b"" and address in state.created_accounts): + track_code_change(block_env, address, code) def get_storage_original(state: State, address: Address, key: Bytes32) -> U256: diff --git a/src/ethereum/forks/amsterdam/vm/eoa_delegation.py b/src/ethereum/forks/amsterdam/vm/eoa_delegation.py index 649027cb43..c831f9d337 100644 --- a/src/ethereum/forks/amsterdam/vm/eoa_delegation.py +++ b/src/ethereum/forks/amsterdam/vm/eoa_delegation.py @@ -255,9 +255,9 @@ def set_delegation(message: Message) -> U256: code_to_set = b"" else: code_to_set = EOA_DELEGATION_MARKER + auth.address - set_code(state, authority, code_to_set) + set_code(state, authority, code_to_set, message.block_env) - increment_nonce(state, authority) + increment_nonce(state, authority, message.block_env) if message.code_address is None: raise InvalidBlock("Invalid type 4 transaction: no target") diff --git a/src/ethereum/forks/amsterdam/vm/instructions/storage.py b/src/ethereum/forks/amsterdam/vm/instructions/storage.py index 65a6a38455..f8bf08ca29 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/storage.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/storage.py @@ -100,6 +100,11 @@ def sstore(evm: Evm) -> None: state, evm.message.current_target, key ) current_value = get_storage(state, evm.message.current_target, key) + track_storage_read( + evm.message.block_env, + evm.message.current_target, + key, + ) # GAS gas_cost = Uint(0) @@ -124,18 +129,6 @@ def sstore(evm: Evm) -> None: if is_cold_access: evm.accessed_storage_keys.add((evm.message.current_target, key)) - track_storage_read( - evm.message.block_env, - evm.message.current_target, - key, - ) - track_storage_write( - evm.message.block_env, - evm.message.current_target, - key, - new_value, - ) - charge_gas(evm, gas_cost) if evm.message.is_static: raise WriteInStaticContext @@ -163,6 +156,12 @@ def sstore(evm: Evm) -> None: # OPERATION set_storage(state, evm.message.current_target, key, new_value) + track_storage_write( + evm.message.block_env, + evm.message.current_target, + key, + new_value, + ) # PROGRAM COUNTER evm.pc += Uint(1) diff --git a/src/ethereum/forks/amsterdam/vm/instructions/system.py b/src/ethereum/forks/amsterdam/vm/instructions/system.py index 8c1babdcd1..873e1cd4d8 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/system.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/system.py @@ -122,6 +122,7 @@ def generic_create( increment_nonce( state, evm.message.current_target, + evm.message.block_env, ) push(evm.stack, U256(0)) return @@ -129,6 +130,7 @@ def generic_create( increment_nonce( state, evm.message.current_target, + evm.message.block_env, ) child_message = Message( @@ -613,6 +615,7 @@ def selfdestruct(evm: Evm) -> None: originator, beneficiary, originator_balance, + evm.message.block_env, ) # register account for deletion only if it was created @@ -624,6 +627,7 @@ def selfdestruct(evm: Evm) -> None: evm.message.block_env.state, originator, U256(0), + evm.message.block_env, ) evm.accounts_to_delete.add(originator) diff --git a/src/ethereum/forks/amsterdam/vm/interpreter.py b/src/ethereum/forks/amsterdam/vm/interpreter.py index 5b33e48dd0..8cd40cbce1 100644 --- a/src/ethereum/forks/amsterdam/vm/interpreter.py +++ b/src/ethereum/forks/amsterdam/vm/interpreter.py @@ -205,7 +205,7 @@ def process_create_message(message: Message) -> Evm: # added to SELFDESTRUCT by EIP-6780. mark_account_created(state, message.current_target) - increment_nonce(state, message.current_target) + increment_nonce(state, message.current_target, message.block_env) evm = process_message(message) if not evm.error: contract_code = evm.output @@ -223,7 +223,9 @@ def process_create_message(message: Message) -> Evm: evm.output = b"" evm.error = error else: - set_code(state, message.current_target, contract_code) + set_code( + state, message.current_target, contract_code, message.block_env + ) commit_transaction(state, transient_storage) else: rollback_transaction(state, transient_storage) diff --git a/src/ethereum_spec_tools/evm_tools/t8n/__init__.py b/src/ethereum_spec_tools/evm_tools/t8n/__init__.py index 6ef8d72d3b..3c07ec1d20 100644 --- a/src/ethereum_spec_tools/evm_tools/t8n/__init__.py +++ b/src/ethereum_spec_tools/evm_tools/t8n/__init__.py @@ -412,7 +412,6 @@ def _run_blockchain_test(self, block_env: Any, block_output: Any) -> None: ) if self.fork.is_after_fork("amsterdam"): - assert block_env.state.change_tracker is not None num_transactions = ulen( [ tx_idx @@ -423,9 +422,7 @@ def _run_blockchain_test(self, block_env: Any, block_output: Any) -> None: # post-execution use n + 1 post_execution_index = num_transactions + Uint(1) - self.fork.set_block_access_index( - block_env.state.change_tracker, post_execution_index - ) + self.fork.set_block_access_index(block_env, post_execution_index) if not self.fork.proof_of_stake: if self.options.state_reward is None: @@ -445,7 +442,7 @@ def _run_blockchain_test(self, block_env: Any, block_output: Any) -> None: if self.fork.is_after_fork("amsterdam"): block_output.block_access_list = self.fork.build_block_access_list( - block_env.state.change_tracker.block_access_list_builder + block_env.change_tracker.block_access_list_builder ) def run_blockchain_test(self) -> None: From 873710c505d5d738826c93d12d96e5b38259f3eb Mon Sep 17 00:00:00 2001 From: fselmo Date: Wed, 5 Nov 2025 18:08:56 -0700 Subject: [PATCH 017/154] refactor(spec-specs): track BAL changes via frames --- .../amsterdam/block_access_lists/__init__.py | 30 - .../amsterdam/block_access_lists/builder.py | 71 +- .../amsterdam/block_access_lists/rlp_utils.py | 4 +- .../amsterdam/block_access_lists/tracker.py | 698 ------------------ src/ethereum/forks/amsterdam/fork.py | 129 ++-- src/ethereum/forks/amsterdam/state.py | 72 +- src/ethereum/forks/amsterdam/state_tracker.py | 360 +++++++++ src/ethereum/forks/amsterdam/vm/__init__.py | 16 +- .../forks/amsterdam/vm/eoa_delegation.py | 17 +- .../amsterdam/vm/instructions/environment.py | 10 +- .../amsterdam/vm/instructions/storage.py | 24 +- .../forks/amsterdam/vm/instructions/system.py | 34 +- .../forks/amsterdam/vm/interpreter.py | 107 ++- .../evm_tools/loaders/fork_loader.py | 5 - .../evm_tools/t8n/__init__.py | 18 +- 15 files changed, 691 insertions(+), 904 deletions(-) delete mode 100644 src/ethereum/forks/amsterdam/block_access_lists/tracker.py create mode 100644 src/ethereum/forks/amsterdam/state_tracker.py diff --git a/src/ethereum/forks/amsterdam/block_access_lists/__init__.py b/src/ethereum/forks/amsterdam/block_access_lists/__init__.py index ebcda46e98..a83523861a 100644 --- a/src/ethereum/forks/amsterdam/block_access_lists/__init__.py +++ b/src/ethereum/forks/amsterdam/block_access_lists/__init__.py @@ -17,47 +17,17 @@ rlp_encode_block_access_list, validate_block_access_list_against_execution, ) -from .tracker import ( - StateChangeTracker, - begin_call_frame, - commit_call_frame, - handle_in_transaction_selfdestruct, - normalize_balance_changes, - prepare_balance_tracking, - rollback_call_frame, - set_block_access_index, - track_address_access, - track_balance_change, - track_code_change, - track_nonce_change, - track_storage_read, - track_storage_write, -) __all__ = [ "BlockAccessListBuilder", - "StateChangeTracker", "add_balance_change", "add_code_change", "add_nonce_change", "add_storage_read", "add_storage_write", "add_touched_account", - "begin_call_frame", "build_block_access_list", - "commit_call_frame", "compute_block_access_list_hash", - "handle_in_transaction_selfdestruct", - "normalize_balance_changes", - "prepare_balance_tracking", - "rollback_call_frame", - "set_block_access_index", "rlp_encode_block_access_list", - "track_address_access", - "track_balance_change", - "track_code_change", - "track_nonce_change", - "track_storage_read", - "track_storage_write", "validate_block_access_list_against_execution", ] diff --git a/src/ethereum/forks/amsterdam/block_access_lists/builder.py b/src/ethereum/forks/amsterdam/block_access_lists/builder.py index f27e26c377..07a0ffe7c5 100644 --- a/src/ethereum/forks/amsterdam/block_access_lists/builder.py +++ b/src/ethereum/forks/amsterdam/block_access_lists/builder.py @@ -14,7 +14,7 @@ """ from dataclasses import dataclass, field -from typing import Dict, List, Set +from typing import TYPE_CHECKING, Dict, List, Set from ethereum_types.bytes import Bytes, Bytes32 from ethereum_types.numeric import U64, U256 @@ -31,6 +31,9 @@ StorageChange, ) +if TYPE_CHECKING: + from ..state_tracker import StateChanges + @dataclass class AccountData: @@ -374,11 +377,11 @@ def add_touched_account( ensure_account(builder, address) -def build_block_access_list( +def _build_from_builder( builder: BlockAccessListBuilder, ) -> BlockAccessList: """ - Build the final [`BlockAccessList`] from accumulated changes. + Build the final [`BlockAccessList`] from a builder (internal helper). Constructs a deterministic block access list by sorting all accumulated changes. The resulting list is ordered by: @@ -445,3 +448,65 @@ def build_block_access_list( account_changes_list.sort(key=lambda x: x.address) return BlockAccessList(account_changes=tuple(account_changes_list)) + + +def build_block_access_list( + state_changes: "StateChanges", +) -> BlockAccessList: + """ + Build a [`BlockAccessList`] from a StateChanges frame. + + Converts the accumulated state changes from the frame-based architecture + into the final deterministic BlockAccessList format. + + Parameters + ---------- + state_changes : + The block-level StateChanges frame containing all changes from the block. + + Returns + ------- + block_access_list : + The final sorted and encoded block access list. + + [`BlockAccessList`]: ref:ethereum.forks.amsterdam.block_access_lists.rlp_types.BlockAccessList # noqa: E501 + [`StateChanges`]: ref:ethereum.forks.amsterdam.state_tracker.StateChanges + + """ + builder = BlockAccessListBuilder() + + # Add all touched addresses + for address in state_changes.touched_addresses: + add_touched_account(builder, address) + + # Add all storage reads + for address, slot in state_changes.storage_reads: + add_storage_read(builder, address, slot) + + # Add all storage writes + for (address, slot), ( + block_access_index, + value, + ) in state_changes.storage_writes.items(): + # Convert U256 to Bytes32 for storage + value_bytes = Bytes32(value.to_bytes(U256(32), "big")) + add_storage_write( + builder, address, slot, block_access_index, value_bytes + ) + + # Add all balance changes (balance_changes is keyed by (address, index)) + for ( + address, + block_access_index, + ), new_balance in state_changes.balance_changes.items(): + add_balance_change(builder, address, block_access_index, new_balance) + + # Add all nonce changes + for address, block_access_index, new_nonce in state_changes.nonce_changes: + add_nonce_change(builder, address, block_access_index, new_nonce) + + # Add all code changes + for address, block_access_index, new_code in state_changes.code_changes: + add_code_change(builder, address, block_access_index, new_code) + + return _build_from_builder(builder) diff --git a/src/ethereum/forks/amsterdam/block_access_lists/rlp_utils.py b/src/ethereum/forks/amsterdam/block_access_lists/rlp_utils.py index bbcf4a3d21..738abce181 100644 --- a/src/ethereum/forks/amsterdam/block_access_lists/rlp_utils.py +++ b/src/ethereum/forks/amsterdam/block_access_lists/rlp_utils.py @@ -216,10 +216,10 @@ def validate_block_access_list_against_execution( # 4. If Block Access List builder provided, validate against it # by comparing hashes if block_access_list_builder is not None: - from .builder import build_block_access_list + from .builder import _build_from_builder # Build a Block Access List from the builder - expected_block_access_list = build_block_access_list( + expected_block_access_list = _build_from_builder( block_access_list_builder ) diff --git a/src/ethereum/forks/amsterdam/block_access_lists/tracker.py b/src/ethereum/forks/amsterdam/block_access_lists/tracker.py deleted file mode 100644 index 9008a20878..0000000000 --- a/src/ethereum/forks/amsterdam/block_access_lists/tracker.py +++ /dev/null @@ -1,698 +0,0 @@ -""" -Provides state change tracking functionality for building Block -Access Lists during transaction execution. - -The tracker integrates with the EVM execution to capture all state accesses -and modifications, distinguishing between actual changes and no-op operations. -It maintains a cache of pre-state values to enable accurate change detection -throughout block execution. - -See [EIP-7928] for the full specification -[EIP-7928]: https://eips.ethereum.org/EIPS/eip-7928 -""" - -from dataclasses import dataclass, field -from typing import TYPE_CHECKING, Dict, List, Set, Tuple - -from ethereum_types.bytes import Bytes, Bytes32 -from ethereum_types.numeric import U64, U256, Uint - -from ..fork_types import Address -from .builder import ( - BlockAccessListBuilder, - add_balance_change, - add_code_change, - add_nonce_change, - add_storage_read, - add_storage_write, - add_touched_account, -) -from .rlp_types import BlockAccessIndex - -if TYPE_CHECKING: - from ..state import State # noqa: F401 - from ..vm import BlockEnvironment # noqa: F401 - - -@dataclass -class CallFrameSnapshot: - """ - Snapshot of block access list state for a single call frame. - - Used to track changes within a call frame to enable proper handling - of reverts as specified in EIP-7928. - """ - - touched_addresses: Set[Address] = field(default_factory=set) - """Addresses touched during this call frame.""" - - storage_writes: Dict[Tuple[Address, Bytes32], U256] = field( - default_factory=dict - ) - """Storage writes made during this call frame.""" - - balance_changes: Set[Tuple[Address, BlockAccessIndex, U256]] = field( - default_factory=set - ) - """Balance changes made during this call frame.""" - - nonce_changes: Set[Tuple[Address, BlockAccessIndex, U64]] = field( - default_factory=set - ) - """Nonce changes made during this call frame.""" - - code_changes: Set[Tuple[Address, BlockAccessIndex, Bytes]] = field( - default_factory=set - ) - """Code changes made during this call frame.""" - - -@dataclass -class StateChangeTracker: - """ - Tracks state changes during transaction execution for Block Access List - construction. - - This tracker maintains a cache of pre-state values and coordinates with - the [`BlockAccessListBuilder`] to record all state changes made during - block execution. It ensures that only actual changes (not no-op writes) - are recorded in the access list. - - [`BlockAccessListBuilder`]: - ref:ethereum.forks.amsterdam.block_access_lists.builder.BlockAccessListBuilder - """ - - block_access_list_builder: BlockAccessListBuilder - """ - The builder instance that accumulates all tracked changes. - """ - - pre_storage_cache: Dict[tuple, U256] = field(default_factory=dict) - """ - Cache of pre-transaction storage values, keyed by (address, slot) tuples. - This cache is cleared at the start of each transaction to track values - from the beginning of the current transaction. - """ - - pre_balance_cache: Dict[Address, U256] = field(default_factory=dict) - """ - Cache of pre-transaction balance values, keyed by address. - This cache is cleared at the start of each transaction and used by - normalize_balance_changes to filter out balance changes where - the final balance equals the initial balance. - """ - - current_block_access_index: Uint = Uint(0) - """ - The current block access index (0 for pre-execution, - 1..n for transactions, n+1 for post-execution). - """ - - call_frame_snapshots: List[CallFrameSnapshot] = field(default_factory=list) - """ - Stack of snapshots for nested call frames to handle reverts properly. - """ - - -def set_block_access_index( - block_env: "BlockEnvironment", block_access_index: Uint -) -> None: - """ - Set the current block access index for tracking changes. - - Must be called before processing each transaction/system contract - to ensure changes are associated with the correct block access index. - - Note: Block access indices differ from transaction indices: - - 0: Pre-execution (system contracts like beacon roots, block hashes) - - 1..n: Transactions (tx at index i gets block_access_index i+1) - - n+1: Post-execution (withdrawals, requests) - - Parameters - ---------- - block_env : - The block execution environment. - block_access_index : - The block access index (0 for pre-execution, - 1..n for transactions, n+1 for post-execution). - - """ - tracker = block_env.change_tracker - tracker.current_block_access_index = block_access_index - # Clear the pre-storage cache for each new transaction to ensure - # no-op writes are detected relative to the transaction start - tracker.pre_storage_cache.clear() - # Clear the pre-balance cache for each new transaction - tracker.pre_balance_cache.clear() - - -def capture_pre_state( - tracker: StateChangeTracker, address: Address, key: Bytes32, state: "State" -) -> U256: - """ - Capture and cache the pre-transaction value for a storage location. - - Retrieves the storage value from the beginning of the current transaction. - The value is cached within the transaction to avoid repeated lookups and - to maintain consistency across multiple accesses within the same - transaction. - - Parameters - ---------- - tracker : - The state change tracker instance. - address : - The account address containing the storage. - key : - The storage slot to read. - state : - The current execution state. - - Returns - ------- - value : - The storage value at the beginning of the current transaction. - - """ - cache_key = (address, key) - if cache_key not in tracker.pre_storage_cache: - # Import locally to avoid circular import - from ..state import get_storage - - tracker.pre_storage_cache[cache_key] = get_storage(state, address, key) - return tracker.pre_storage_cache[cache_key] - - -def track_address_access( - block_env: "BlockEnvironment", address: Address -) -> None: - """ - Track that an address was accessed. - - Records account access even when no state changes occur. This is - important for operations that read account data without modifying it. - - Parameters - ---------- - block_env : - The block execution environment. - address : - The account address that was accessed. - - """ - add_touched_account( - block_env.change_tracker.block_access_list_builder, address - ) - - -def track_storage_read( - block_env: "BlockEnvironment", address: Address, key: Bytes32 -) -> None: - """ - Track a storage read operation. - - Records that a storage slot was read and captures its pre-state value. - The slot will only appear in the final access list if it wasn't also - written to during block execution. - - Parameters - ---------- - block_env : - The block execution environment. - address : - The account address whose storage is being read. - key : - The storage slot being read. - - """ - track_address_access(block_env, address) - - capture_pre_state(block_env.change_tracker, address, key, block_env.state) - - add_storage_read( - block_env.change_tracker.block_access_list_builder, address, key - ) - - -def track_storage_write( - block_env: "BlockEnvironment", - address: Address, - key: Bytes32, - new_value: U256, -) -> None: - """ - Track a storage write operation. - - Records storage modifications, but only if the new value differs from - the pre-state value. No-op writes (where the value doesn't change) are - tracked as reads instead, as specified in [EIP-7928]. - - Parameters - ---------- - block_env : - The block execution environment. - address : - The account address whose storage is being modified. - key : - The storage slot being written to. - new_value : - The new value to write. - - [EIP-7928]: https://eips.ethereum.org/EIPS/eip-7928 - - """ - track_address_access(block_env, address) - - tracker = block_env.change_tracker - pre_value = capture_pre_state(tracker, address, key, block_env.state) - - value_bytes = new_value.to_be_bytes32() - - if pre_value != new_value: - add_storage_write( - tracker.block_access_list_builder, - address, - key, - BlockAccessIndex(tracker.current_block_access_index), - value_bytes, - ) - # Record in current call frame snapshot if exists - if tracker.call_frame_snapshots: - snapshot = tracker.call_frame_snapshots[-1] - snapshot.storage_writes[(address, key)] = new_value - else: - add_storage_read(tracker.block_access_list_builder, address, key) - - -def capture_pre_balance( - tracker: StateChangeTracker, address: Address, state: "State" -) -> U256: - """ - Capture and cache the pre-transaction balance for an account. - - This function caches the balance on first access for each address during - a transaction. It must be called before any balance modifications are made - to ensure we capture the pre-transaction balance correctly. The cache is - cleared at the beginning of each transaction. - - This is used by normalize_balance_changes to determine which balance - changes should be filtered out. - - Parameters - ---------- - tracker : - The state change tracker instance. - address : - The account address. - state : - The current execution state. - - Returns - ------- - value : - The balance at the beginning of the current transaction. - - """ - if address not in tracker.pre_balance_cache: - # Import locally to avoid circular import - from ..state import get_account - - # Cache the current balance on first access - # This should be called before any balance modifications - account = get_account(state, address) - tracker.pre_balance_cache[address] = account.balance - return tracker.pre_balance_cache[address] - - -def prepare_balance_tracking( - block_env: "BlockEnvironment", address: Address -) -> None: - """ - Prepare for tracking balance changes by caching the pre-transaction - balance. - - This should be called before any balance modifications when you need to - ensure the pre-balance is captured for later normalization. This is - particularly important for operations like withdrawals where the balance - might not actually change. - - Parameters - ---------- - block_env : - The block execution environment. - address : - The account address whose balance will be tracked. - - - """ - # Ensure the address is tracked - track_address_access(block_env, address) - - # Cache the pre-balance for later normalization - capture_pre_balance(block_env.change_tracker, address, block_env.state) - - -def track_balance_change( - block_env: "BlockEnvironment", - address: Address, - new_balance: U256, -) -> None: - """ - Track a balance change for an account. - - Records the new balance after any balance-affecting operation, including - transfers, gas payments, block rewards, and withdrawals. - - Parameters - ---------- - block_env : - The block execution environment. - address : - The account address whose balance changed. - new_balance : - The new balance value. - - """ - track_address_access(block_env, address) - - tracker = block_env.change_tracker - block_access_index = BlockAccessIndex(tracker.current_block_access_index) - add_balance_change( - tracker.block_access_list_builder, - address, - block_access_index, - new_balance, - ) - - # Record in current call frame snapshot if exists - if tracker.call_frame_snapshots: - snapshot = tracker.call_frame_snapshots[-1] - snapshot.balance_changes.add( - (address, block_access_index, new_balance) - ) - - -def track_nonce_change( - block_env: "BlockEnvironment", address: Address, new_nonce: Uint -) -> None: - """ - Track a nonce change for an account. - - Records nonce increments for both EOAs (when sending transactions) and - contracts (when performing [`CREATE`] or [`CREATE2`] operations). Deployed - contracts also have their initial nonce tracked. - - Parameters - ---------- - block_env : - The block execution environment. - address : - The account address whose nonce changed. - new_nonce : - The new nonce value. - - [`CREATE`]: ref:ethereum.forks.amsterdam.vm.instructions.system.create - [`CREATE2`]: ref:ethereum.forks.amsterdam.vm.instructions.system.create2 - - """ - track_address_access(block_env, address) - tracker = block_env.change_tracker - block_access_index = BlockAccessIndex(tracker.current_block_access_index) - nonce_u64 = U64(new_nonce) - add_nonce_change( - tracker.block_access_list_builder, - address, - block_access_index, - nonce_u64, - ) - - # Record in current call frame snapshot if exists - if tracker.call_frame_snapshots: - snapshot = tracker.call_frame_snapshots[-1] - snapshot.nonce_changes.add((address, block_access_index, nonce_u64)) - - -def track_code_change( - block_env: "BlockEnvironment", address: Address, new_code: Bytes -) -> None: - """ - Track a code change for contract deployment. - - Records new contract code deployments via [`CREATE`], [`CREATE2`], or - [`SETCODE`] operations. This function is called when contract bytecode - is deployed to an address. - - Parameters - ---------- - block_env : - The block execution environment. - address : - The address receiving the contract code. - new_code : - The deployed contract bytecode. - - [`CREATE`]: ref:ethereum.forks.amsterdam.vm.instructions.system.create - [`CREATE2`]: ref:ethereum.forks.amsterdam.vm.instructions.system.create2 - - """ - track_address_access(block_env, address) - tracker = block_env.change_tracker - block_access_index = BlockAccessIndex(tracker.current_block_access_index) - add_code_change( - tracker.block_access_list_builder, - address, - block_access_index, - new_code, - ) - - # Record in current call frame snapshot if exists - if tracker.call_frame_snapshots: - snapshot = tracker.call_frame_snapshots[-1] - snapshot.code_changes.add((address, block_access_index, new_code)) - - -def handle_in_transaction_selfdestruct( - block_env: "BlockEnvironment", address: Address -) -> None: - """ - Handle an account that self-destructed in the same transaction it was - created. - - Per EIP-7928, accounts destroyed within their creation transaction must be - included as read-only with storage writes converted to reads. Nonce and - code changes from the current transaction are also removed. - - Note: Balance changes are handled separately by - normalize_balance_changes. - - Parameters - ---------- - block_env : - The block execution environment. - address : - The address that self-destructed. - - """ - tracker = block_env.change_tracker - builder = tracker.block_access_list_builder - if address not in builder.accounts: - return - - account_data = builder.accounts[address] - current_index = tracker.current_block_access_index - - # Convert storage writes from current tx to reads - for slot in list(account_data.storage_changes.keys()): - account_data.storage_changes[slot] = [ - c - for c in account_data.storage_changes[slot] - if c.block_access_index != current_index - ] - if not account_data.storage_changes[slot]: - del account_data.storage_changes[slot] - account_data.storage_reads.add(slot) - - # Remove nonce and code changes from current transaction - account_data.nonce_changes = [ - c - for c in account_data.nonce_changes - if c.block_access_index != current_index - ] - account_data.code_changes = [ - c - for c in account_data.code_changes - if c.block_access_index != current_index - ] - - -def normalize_balance_changes(block_env: "BlockEnvironment") -> None: - """ - Normalize balance changes for the current block access index. - - This method filters out spurious balance changes by removing all balance - changes for addresses where the post-execution balance equals the - pre-execution balance. - - This is crucial for handling cases like: - - In-transaction self-destructs where an account with 0 balance is created - and destroyed, resulting in no net balance change - - Round-trip transfers where an account receives and sends equal amounts - - Zero-amount withdrawals where the balance doesn't actually change - - This should be called at the end of any operation that tracks balance - changes (transactions, withdrawals, etc.). Only actual state changes are - recorded in the Block Access List. - - Parameters - ---------- - block_env : - The block execution environment. - - """ - # Import locally to avoid circular import - from ..state import get_account - - tracker = block_env.change_tracker - builder = tracker.block_access_list_builder - current_index = tracker.current_block_access_index - - # Check each address that had balance changes in this transaction - for address in list(builder.accounts.keys()): - account_data = builder.accounts[address] - - # Get the pre-transaction balance - pre_balance = capture_pre_balance(tracker, address, block_env.state) - - # Get the current (post-transaction) balance - post_balance = get_account(block_env.state, address).balance - - # If pre-tx balance equals post-tx balance, remove all balance changes - # for this address in the current transaction - if pre_balance == post_balance: - # Filter out balance changes from the current transaction - account_data.balance_changes = [ - change - for change in account_data.balance_changes - if change.block_access_index != current_index - ] - - -def begin_call_frame(block_env: "BlockEnvironment") -> None: - """ - Begin a new call frame for tracking reverts. - - Creates a new snapshot to track changes within this call frame. - This allows proper handling of reverts as specified in EIP-7928. - - Parameters - ---------- - block_env : - The block execution environment. - - """ - block_env.change_tracker.call_frame_snapshots.append(CallFrameSnapshot()) - - -def rollback_call_frame(block_env: "BlockEnvironment") -> None: - """ - Rollback changes from the current call frame. - - When a call reverts, this function: - - Converts storage writes to reads - - Removes balance, nonce, and code changes - - Preserves touched addresses - - This implements EIP-7928 revert handling where reverted writes - become reads and addresses remain in the access list. - - Parameters - ---------- - block_env : - The block execution environment. - - """ - tracker = block_env.change_tracker - if not tracker.call_frame_snapshots: - return - - snapshot = tracker.call_frame_snapshots.pop() - builder = tracker.block_access_list_builder - - # Convert storage writes to reads - for (address, slot), _ in snapshot.storage_writes.items(): - # Remove the write from storage_changes - if address in builder.accounts: - account_data = builder.accounts[address] - if slot in account_data.storage_changes: - # Filter out changes from this call frame - account_data.storage_changes[slot] = [ - change - for change in account_data.storage_changes[slot] - if change.block_access_index - != tracker.current_block_access_index - ] - if not account_data.storage_changes[slot]: - del account_data.storage_changes[slot] - # Add as a read instead - account_data.storage_reads.add(slot) - - # Remove balance changes from this call frame - for address, block_access_index, new_balance in snapshot.balance_changes: - if address in builder.accounts: - account_data = builder.accounts[address] - # Filter out balance changes from this call frame - account_data.balance_changes = [ - change - for change in account_data.balance_changes - if not ( - change.block_access_index == block_access_index - and change.post_balance == new_balance - ) - ] - - # Remove nonce changes from this call frame - for address, block_access_index, new_nonce in snapshot.nonce_changes: - if address in builder.accounts: - account_data = builder.accounts[address] - # Filter out nonce changes from this call frame - account_data.nonce_changes = [ - change - for change in account_data.nonce_changes - if not ( - change.block_access_index == block_access_index - and change.new_nonce == new_nonce - ) - ] - - # Remove code changes from this call frame - for address, block_access_index, new_code in snapshot.code_changes: - if address in builder.accounts: - account_data = builder.accounts[address] - # Filter out code changes from this call frame - account_data.code_changes = [ - change - for change in account_data.code_changes - if not ( - change.block_access_index == block_access_index - and change.new_code == new_code - ) - ] - - # All touched addresses remain in the access list (already tracked) - - -def commit_call_frame(block_env: "BlockEnvironment") -> None: - """ - Commit changes from the current call frame. - - Removes the current call frame snapshot without rolling back changes. - Called when a call completes successfully. - - Parameters - ---------- - block_env : - The block execution environment. - - """ - if block_env.change_tracker.call_frame_snapshots: - block_env.change_tracker.call_frame_snapshots.pop() diff --git a/src/ethereum/forks/amsterdam/fork.py b/src/ethereum/forks/amsterdam/fork.py index d74c8eeb80..86a7089a30 100644 --- a/src/ethereum/forks/amsterdam/fork.py +++ b/src/ethereum/forks/amsterdam/fork.py @@ -16,7 +16,7 @@ from ethereum_rlp import rlp from ethereum_types.bytes import Bytes -from ethereum_types.numeric import U64, U256, Uint, ulen +from ethereum_types.numeric import U64, U256, Uint from ethereum.crypto.hash import Hash32, keccak256 from ethereum.exceptions import ( @@ -30,14 +30,8 @@ from . import vm from .block_access_lists.builder import build_block_access_list +from .block_access_lists.rlp_types import BlockAccessIndex from .block_access_lists.rlp_utils import compute_block_access_list_hash -from .block_access_lists.tracker import ( - handle_in_transaction_selfdestruct, - normalize_balance_changes, - prepare_balance_tracking, - set_block_access_index, - track_balance_change, -) from .blocks import Block, Header, Log, Receipt, Withdrawal, encode_receipt from .bloom import logs_bloom from .exceptions import ( @@ -70,6 +64,11 @@ set_account_balance, state_root, ) +from .state_tracker import ( + create_child_frame, + handle_in_transaction_selfdestruct, + normalize_balance_changes_for_transaction, +) from .transactions import ( AccessListTransaction, BlobTransaction, @@ -779,9 +778,9 @@ def apply_body( """ block_output = vm.BlockOutput() - # Set block access index for pre-execution system contracts # EIP-7928: System contracts use block_access_index 0 - set_block_access_index(block_env, Uint(0)) + # The block frame already starts at index 0, so system transactions + # naturally use that index through the block frame process_unchecked_system_transaction( block_env=block_env, @@ -798,9 +797,10 @@ def apply_body( for i, tx in enumerate(map(decode_transaction, transactions)): process_transaction(block_env, block_output, tx, Uint(i)) - # EIP-7928: Post-execution uses block_access_index len(transactions) + 1 - post_execution_index = ulen(transactions) + Uint(1) - set_block_access_index(block_env, post_execution_index) + # EIP-7928: Increment block frame to post-execution index + # After N transactions, block frame is at index N + # Post-execution operations (withdrawals, etc.) use index N+1 + block_env.block_state_changes.increment_index() process_withdrawals(block_env, block_output, withdrawals) @@ -808,8 +808,9 @@ def apply_body( block_env=block_env, block_output=block_output, ) + # Build block access list from block_env.block_state_changes block_output.block_access_list = build_block_access_list( - block_env.change_tracker.block_access_list_builder + block_env.block_state_changes ) return block_output @@ -890,9 +891,19 @@ def process_transaction( Index of the transaction in the block. """ - # EIP-7928: Transactions use block_access_index 1 to len(transactions) - # Transaction at index i gets block_access_index i+1 - set_block_access_index(block_env, index + Uint(1)) + # EIP-7928: Create a transaction-level StateChanges frame + # The frame will read the current block_access_index from the block frame + # Before transaction starts, increment block index so it's ready + block_env.block_state_changes.increment_index() + tx_state_changes = create_child_frame(block_env.block_state_changes) + + coinbase_pre_balance = get_account( + block_env.state, block_env.coinbase + ).balance + tx_state_changes.track_address(block_env.coinbase) + tx_state_changes.capture_pre_balance( + block_env.coinbase, coinbase_pre_balance + ) trie_set( block_output.transactions_trie, @@ -923,13 +934,16 @@ def process_transaction( effective_gas_fee = tx.gas * effective_gas_price gas = tx.gas - intrinsic_gas - increment_nonce(block_env.state, sender, block_env) + increment_nonce(block_env.state, sender, tx_state_changes) sender_balance_after_gas_fee = ( Uint(sender_account.balance) - effective_gas_fee - blob_gas_fee ) set_account_balance( - block_env.state, sender, U256(sender_balance_after_gas_fee), block_env + block_env.state, + sender, + U256(sender_balance_after_gas_fee), + tx_state_changes, ) access_list_addresses = set() @@ -967,6 +981,8 @@ def process_transaction( ) message = prepare_message(block_env, tx_env, tx) + # Set transaction frame so call frames become children of it + message.transaction_state_changes = tx_state_changes tx_output = process_message_call(message) @@ -996,7 +1012,7 @@ def process_transaction( block_env.state, sender ).balance + U256(gas_refund_amount) set_account_balance( - block_env.state, sender, sender_balance_after_refund, block_env + block_env.state, sender, sender_balance_after_refund, tx_state_changes ) # transfer miner fees @@ -1009,7 +1025,7 @@ def process_transaction( block_env.state, block_env.coinbase, coinbase_balance_after_mining_fee, - block_env, + tx_state_changes, ) if coinbase_balance_after_mining_fee == 0 and account_exists_and_is_empty( @@ -1017,17 +1033,6 @@ def process_transaction( ): destroy_account(block_env.state, block_env.coinbase) - for address in tx_output.accounts_to_delete: - # EIP-7928: In-transaction self-destruct - convert storage writes to - # reads and remove nonce/code changes. Only accounts created in same - # tx are in accounts_to_delete per EIP-6780. - handle_in_transaction_selfdestruct(block_env, address) - destroy_account(block_env.state, address) - - # EIP-7928: Normalize balance changes for this transaction - # Remove balance changes where post-tx balance equals pre-tx balance - normalize_balance_changes(block_env) - block_output.block_gas_used += tx_gas_used_after_refund block_output.blob_gas_used += tx_blob_gas_used @@ -1046,6 +1051,34 @@ def process_transaction( block_output.block_logs += tx_output.logs + # Merge transaction frame into block frame + tx_state_changes.merge_on_success() + + # EIP-7928: Handle in-transaction self-destruct AFTER merge + # Convert storage writes to reads and remove nonce/code changes + # Only accounts created in same tx are in accounts_to_delete per EIP-6780 + + for address in tx_output.accounts_to_delete: + handle_in_transaction_selfdestruct( + block_env.block_state_changes, + address, + BlockAccessIndex( + block_env.block_state_changes.get_block_access_index() + ), + ) + destroy_account(block_env.state, address) + + # EIP-7928: Normalize balance changes for this transaction + # Remove balance changes where post-tx balance equals pre-tx balance + + normalize_balance_changes_for_transaction( + block_env.block_state_changes, + BlockAccessIndex( + block_env.block_state_changes.get_block_access_index() + ), + block_env.state, + ) + def process_withdrawals( block_env: vm.BlockEnvironment, @@ -1055,6 +1088,11 @@ def process_withdrawals( """ Increase the balance of the withdrawing account. """ + withdrawal_addresses = {wd.address for wd in withdrawals} + for address in withdrawal_addresses: + pre_balance = get_account(block_env.state, address).balance + block_env.block_state_changes.track_address(address) + block_env.block_state_changes.capture_pre_balance(address, pre_balance) def increase_recipient_balance(recipient: Account) -> None: recipient.balance += wd.amount * U256(10**9) @@ -1066,25 +1104,28 @@ def increase_recipient_balance(recipient: Account) -> None: rlp.encode(wd), ) - # Prepare for balance tracking (ensures address appears in BAL and - # pre-balance is cached for normalization) - prepare_balance_tracking(block_env, wd.address) - modify_state(block_env.state, wd.address, increase_recipient_balance) - # Track balance change for BAL - # (withdrawals are tracked as system contract changes) + # Track balance change for BAL (withdrawals use post-execution index) new_balance = get_account(block_env.state, wd.address).balance - track_balance_change(block_env, wd.address, U256(new_balance)) - - # EIP-7928: Normalize balance changes for this withdrawal - # Remove balance changes where post-withdrawal balance - # equals pre-withdrawal balance - normalize_balance_changes(block_env) + block_env.block_state_changes.track_balance_change( + wd.address, new_balance + ) if account_exists_and_is_empty(block_env.state, wd.address): destroy_account(block_env.state, wd.address) + # EIP-7928: Normalize balance changes after all withdrawals + # Filters out net-zero changes + + normalize_balance_changes_for_transaction( + block_env.block_state_changes, + BlockAccessIndex( + block_env.block_state_changes.get_block_access_index() + ), + block_env.state, + ) + def check_gas_limit(gas_limit: Uint, parent_gas_limit: Uint) -> bool: """ diff --git a/src/ethereum/forks/amsterdam/state.py b/src/ethereum/forks/amsterdam/state.py index 3656a386c7..326595ac93 100644 --- a/src/ethereum/forks/amsterdam/state.py +++ b/src/ethereum/forks/amsterdam/state.py @@ -21,15 +21,10 @@ from ethereum_types.bytes import Bytes, Bytes32 from ethereum_types.frozen import modify -from ethereum_types.numeric import U256, Uint - -from .block_access_lists.tracker import ( - prepare_balance_tracking, - track_balance_change, - track_code_change, - track_nonce_change, -) +from ethereum_types.numeric import U64, U256, Uint + from .fork_types import EMPTY_ACCOUNT, Account, Address, Root +from .state_tracker import StateChanges from .trie import EMPTY_TRIE_ROOT, Trie, copy_trie, root, trie_get, trie_set if TYPE_CHECKING: @@ -515,15 +510,18 @@ def move_ether( sender_address: Address, recipient_address: Address, amount: U256, - block_env: "BlockEnvironment", + state_changes: StateChanges, ) -> None: """ Move funds between accounts. """ - # Prepare for balance tracking (captures pre-balance and ensures - # addresses are tracked) - prepare_balance_tracking(block_env, sender_address) - prepare_balance_tracking(block_env, recipient_address) + sender_balance = get_account(state, sender_address).balance + recipient_balance = get_account(state, recipient_address).balance + + state_changes.track_address(sender_address) + state_changes.capture_pre_balance(sender_address, sender_balance) + state_changes.track_address(recipient_address) + state_changes.capture_pre_balance(recipient_address, recipient_balance) def reduce_sender_balance(sender: Account) -> None: if sender.balance < amount: @@ -539,9 +537,11 @@ def increase_recipient_balance(recipient: Account) -> None: sender_new_balance = get_account(state, sender_address).balance recipient_new_balance = get_account(state, recipient_address).balance - track_balance_change(block_env, sender_address, U256(sender_new_balance)) - track_balance_change( - block_env, recipient_address, U256(recipient_new_balance) + state_changes.track_balance_change( + sender_address, U256(sender_new_balance) + ) + state_changes.track_balance_change( + recipient_address, U256(recipient_new_balance) ) @@ -549,7 +549,7 @@ def set_account_balance( state: State, address: Address, amount: U256, - block_env: "BlockEnvironment", + state_changes: StateChanges, ) -> None: """ Sets the balance of an account. @@ -565,24 +565,26 @@ def set_account_balance( amount: The amount that needs to set in balance. - block_env: - Block environment for tracking changes. + state_changes: + State changes frame for tracking (EIP-7928). """ - # Prepare for balance tracking (captures pre-balance and ensures - # address is tracked) - prepare_balance_tracking(block_env, address) + current_balance = get_account(state, address).balance + + state_changes.track_address(address) + state_changes.capture_pre_balance(address, current_balance) def set_balance(account: Account) -> None: account.balance = amount modify_state(state, address, set_balance) - - track_balance_change(block_env, address, amount) + state_changes.track_balance_change(address, amount) def increment_nonce( - state: State, address: Address, block_env: "BlockEnvironment" + state: State, + address: Address, + state_changes: "StateChanges", ) -> None: """ Increments the nonce of an account. @@ -595,8 +597,8 @@ def increment_nonce( address: Address of the account whose nonce needs to be incremented. - block_env: - Block environment for tracking changes. + state_changes: + State changes frame for tracking (EIP-7928). """ @@ -606,21 +608,15 @@ def increase_nonce(sender: Account) -> None: modify_state(state, address, increase_nonce) # Track nonce change for Block Access List (EIP-7928) - # (for ALL accounts and ALL nonce changes) - # This includes: - # - EOA senders (transaction nonce increments) - # - Contracts performing CREATE/CREATE2 - # - Deployed contracts - # - EIP-7702 authorities account = get_account(state, address) - track_nonce_change(block_env, address, account.nonce) + state_changes.track_nonce_change(address, U64(account.nonce)) def set_code( state: State, address: Address, code: Bytes, - block_env: "BlockEnvironment", + state_changes: StateChanges, ) -> None: """ Sets Account code. @@ -636,8 +632,8 @@ def set_code( code: The bytecode that needs to be set. - block_env: - Block environment for tracking changes. + state_changes: + State changes frame for tracking (EIP-7928). """ @@ -651,7 +647,7 @@ def write_code(sender: Account) -> None: # code to b"" is not a meaningful state change since the address # had no code to begin with. if not (code == b"" and address in state.created_accounts): - track_code_change(block_env, address, code) + state_changes.track_code_change(address, code) def get_storage_original(state: State, address: Address, key: Bytes32) -> U256: diff --git a/src/ethereum/forks/amsterdam/state_tracker.py b/src/ethereum/forks/amsterdam/state_tracker.py new file mode 100644 index 0000000000..7ee77259b1 --- /dev/null +++ b/src/ethereum/forks/amsterdam/state_tracker.py @@ -0,0 +1,360 @@ +""" +Hierarchical state change tracking for EIP-7928 Block Access Lists. + +Implements a frame-based hierarchy: Block → Transaction → Call frames. +Each frame tracks state changes and merges upward on completion: +- Success: merge all changes (reads + writes) +- Failure: merge only reads (writes discarded) + +Frame Hierarchy: + Block Frame: Root, lifetime = entire block, index 0..N+1 + Transaction Frame: Child of block, lifetime = single transaction + Call Frame: Child of transaction/call, lifetime = single message + +Block Access Index: 0=pre-exec, 1..N=transactions, N+1=post-exec +Stored in root frame, accessed by walking parent chain. + +Pre-State Tracking: Values captured before modifications to enable +net-zero filtering. + +[EIP-7928]: https://eips.ethereum.org/EIPS/eip-7928 +""" + +from dataclasses import dataclass, field +from typing import TYPE_CHECKING, Dict, Optional, Set, Tuple + +from ethereum_types.bytes import Bytes, Bytes32 +from ethereum_types.numeric import U64, U256, Uint + +from .block_access_lists.rlp_types import BlockAccessIndex +from .fork_types import Address + +if TYPE_CHECKING: + from .state import State + + +@dataclass +class StateChanges: + """ + Tracks state changes within a single execution frame. + + Frames form a hierarchy and merge changes upward on completion. + """ + + parent: Optional["StateChanges"] = None + _block_access_index: BlockAccessIndex = BlockAccessIndex(0) + + touched_addresses: Set[Address] = field(default_factory=set) + storage_reads: Set[Tuple[Address, Bytes32]] = field(default_factory=set) + storage_writes: Dict[ + Tuple[Address, Bytes32], Tuple[BlockAccessIndex, U256] + ] = field(default_factory=dict) + + balance_changes: Dict[Tuple[Address, BlockAccessIndex], U256] = field( + default_factory=dict + ) + nonce_changes: Set[Tuple[Address, BlockAccessIndex, U64]] = field( + default_factory=set + ) + code_changes: Set[Tuple[Address, BlockAccessIndex, Bytes]] = field( + default_factory=set + ) + + # Pre-state captures for net-zero filtering + pre_balances: Dict[Address, U256] = field(default_factory=dict) + pre_nonces: Dict[Address, U64] = field(default_factory=dict) + pre_storage: Dict[Tuple[Address, Bytes32], U256] = field( + default_factory=dict + ) + + def get_block_access_index(self) -> BlockAccessIndex: + """Get current block access index by walking to root.""" + current = self + while current.parent is not None: + current = current.parent + return current._block_access_index + + def capture_pre_balance(self, address: Address, balance: U256) -> None: + """Capture pre-balance (first-write-wins for net-zero filtering).""" + if address not in self.pre_balances: + self.pre_balances[address] = balance + + def capture_pre_nonce(self, address: Address, nonce: U64) -> None: + """Capture pre-nonce (first-write-wins).""" + if address not in self.pre_nonces: + self.pre_nonces[address] = nonce + + def capture_pre_storage( + self, address: Address, key: Bytes32, value: U256 + ) -> None: + """Capture pre-storage (first-write-wins for noop filtering).""" + slot = (address, key) + if slot not in self.pre_storage: + self.pre_storage[slot] = value + + def track_address(self, address: Address) -> None: + """Track that an address was accessed.""" + self.touched_addresses.add(address) + + def track_storage_read(self, address: Address, key: Bytes32) -> None: + """Track a storage read operation.""" + self.storage_reads.add((address, key)) + + def track_storage_write( + self, address: Address, key: Bytes32, value: U256 + ) -> None: + """Track a storage write operation with block access index.""" + self.storage_writes[(address, key)] = ( + self.get_block_access_index(), + value, + ) + + def track_balance_change( + self, address: Address, new_balance: U256 + ) -> None: + """Track balance change keyed by (address, index).""" + self.balance_changes[(address, self.get_block_access_index())] = ( + new_balance + ) + + def track_nonce_change(self, address: Address, new_nonce: U64) -> None: + """Track a nonce change.""" + self.nonce_changes.add( + (address, self.get_block_access_index(), new_nonce) + ) + + def track_code_change(self, address: Address, new_code: Bytes) -> None: + """Track a code change.""" + self.code_changes.add( + (address, self.get_block_access_index(), new_code) + ) + + def increment_index(self) -> None: + """Increment block access index by walking to root.""" + root = self + while root.parent is not None: + root = root.parent + root._block_access_index = BlockAccessIndex( + root._block_access_index + Uint(1) + ) + + def merge_on_success(self) -> None: + """ + Merge this frame's changes into parent on successful completion. + + Merges all tracked changes (reads and writes) from this frame + into the parent frame. Filters out net-zero changes based on + captured pre-state values by comparing initial vs final values. + """ + if self.parent is None: + return + + # Merge address accesses + self.parent.touched_addresses.update(self.touched_addresses) + + # Merge pre-state captures for transaction-level normalization + # Only if parent doesn't have value (first capture wins) + for addr, balance in self.pre_balances.items(): + if addr not in self.parent.pre_balances: + self.parent.pre_balances[addr] = balance + for addr, nonce in self.pre_nonces.items(): + if addr not in self.parent.pre_nonces: + self.parent.pre_nonces[addr] = nonce + for slot, value in self.pre_storage.items(): + if slot not in self.parent.pre_storage: + self.parent.pre_storage[slot] = value + + # Merge storage operations, filtering noop writes + self.parent.storage_reads.update(self.storage_reads) + for (addr, key), (idx, value) in self.storage_writes.items(): + # Only merge if value actually changed from pre-state + if (addr, key) in self.pre_storage: + if self.pre_storage[(addr, key)] != value: + self.parent.storage_writes[(addr, key)] = (idx, value) + # If equal, it's a noop write - convert to read only + else: + self.parent.storage_reads.add((addr, key)) + else: + # No pre-state captured, merge as-is + self.parent.storage_writes[(addr, key)] = (idx, value) + + # Merge balance changes - filter net-zero changes + # balance_changes keyed by (address, index) + for (addr, idx), final_balance in self.balance_changes.items(): + if addr in self.pre_balances: + if self.pre_balances[addr] != final_balance: + # Net change occurred - merge the final balance + self.parent.balance_changes[(addr, idx)] = final_balance + # else: Net-zero change - skip entirely + else: + # No pre-balance captured, merge as-is + self.parent.balance_changes[(addr, idx)] = final_balance + + # Merge nonce changes - keep only highest nonce per address + # Nonces are monotonically increasing, so just keep the max + address_final_nonces: Dict[Address, Tuple[BlockAccessIndex, U64]] = {} + for addr, idx, nonce in self.nonce_changes: + # Keep the highest nonce value for each address + if ( + addr not in address_final_nonces + or nonce > address_final_nonces[addr][1] + ): + address_final_nonces[addr] = (idx, nonce) + + # Merge final nonces (no net-zero filtering - nonces never decrease) + for addr, (idx, final_nonce) in address_final_nonces.items(): + self.parent.nonce_changes.add((addr, idx, final_nonce)) + + # Merge code changes - keep only latest code per address + address_final_code: Dict[Address, Tuple[BlockAccessIndex, Bytes]] = {} + for addr, idx, code in self.code_changes: + # Keep the change with highest index (most recent) + if ( + addr not in address_final_code + or idx >= address_final_code[addr][0] + ): + address_final_code[addr] = (idx, code) + + # Merge final code changes + for addr, (idx, final_code) in address_final_code.items(): + self.parent.code_changes.add((addr, idx, final_code)) + + def merge_on_failure(self) -> None: + """ + Merge this frame's changes into parent on failed completion. + + Merges only read operations from this frame into the parent. + Write operations are discarded since the frame reverted. + This is called when a call frame fails/reverts. + """ + if self.parent is None: + return + + # Only merge reads and address accesses on failure + self.parent.touched_addresses.update(self.touched_addresses) + self.parent.storage_reads.update(self.storage_reads) + + # Convert writes to reads (failed writes still accessed the slots) + for address, key in self.storage_writes.keys(): + self.parent.storage_reads.add((address, key)) + + # Note: balance_changes, nonce_changes, and code_changes are NOT + # merged on failure - they are discarded + + +def handle_in_transaction_selfdestruct( + state_changes: StateChanges, + address: Address, + current_block_access_index: BlockAccessIndex, +) -> None: + """ + Handle account self-destructed in same transaction as creation. + + Per EIP-7928 and EIP-6780, accounts destroyed within their creation + transaction must have: + - Nonce changes from current transaction removed + - Code changes from current transaction removed + - Storage writes from current transaction converted to reads + - Balance changes handled by net-zero filtering + + Parameters + ---------- + state_changes : StateChanges + The state changes tracker (typically the block-level frame). + address : Address + The address that self-destructed. + current_block_access_index : BlockAccessIndex + The current block access index (transaction index). + + """ + # Remove nonce changes from current transaction + state_changes.nonce_changes = { + (addr, idx, nonce) + for addr, idx, nonce in state_changes.nonce_changes + if not (addr == address and idx == current_block_access_index) + } + + # Remove code changes from current transaction + state_changes.code_changes = { + (addr, idx, code) + for addr, idx, code in state_changes.code_changes + if not (addr == address and idx == current_block_access_index) + } + + # Convert storage writes from current transaction to reads + for (addr, key), (idx, _value) in list( + state_changes.storage_writes.items() + ): + if addr == address and idx == current_block_access_index: + del state_changes.storage_writes[(addr, key)] + state_changes.storage_reads.add((addr, key)) + + +def normalize_balance_changes_for_transaction( + block_frame: StateChanges, + current_block_access_index: BlockAccessIndex, + state: "State", +) -> None: + """ + Normalize balance changes for the current transaction. + + Removes balance changes where post-transaction balance equals + pre-transaction balance. This handles net-zero transfers across + the entire transaction. + + This function should be called after merging transaction frames + into the block frame to filter out addresses where balance didn't + actually change from transaction start to transaction end. + + Parameters + ---------- + block_frame : StateChanges + The block-level state changes frame. + current_block_access_index : BlockAccessIndex + The current transaction's block access index. + state : State + The current state to read final balances from. + + """ + # Import locally to avoid circular import + from .state import get_account + + # Collect addresses that have balance changes in this transaction + addresses_to_check = [ + addr + for (addr, idx) in block_frame.balance_changes.keys() + if idx == current_block_access_index + ] + + # For each address, compare pre vs post balance + for addr in addresses_to_check: + if addr in block_frame.pre_balances: + pre_balance = block_frame.pre_balances[addr] + post_balance = get_account(state, addr).balance + + if pre_balance == post_balance: + # Remove balance change for this address - net-zero transfer + del block_frame.balance_changes[ + (addr, current_block_access_index) + ] + + +def create_child_frame(parent: StateChanges) -> StateChanges: + """ + Create a child frame for nested execution. + + The child frame will dynamically read the block_access_index from + the root (block) frame, ensuring all frames see the same current index. + + Parameters + ---------- + parent : StateChanges + The parent frame. + + Returns + ------- + child : StateChanges + A new child frame with parent link. + + """ + return StateChanges(parent=parent) diff --git a/src/ethereum/forks/amsterdam/vm/__init__.py b/src/ethereum/forks/amsterdam/vm/__init__.py index c10df4897b..04b74eee9e 100644 --- a/src/ethereum/forks/amsterdam/vm/__init__.py +++ b/src/ethereum/forks/amsterdam/vm/__init__.py @@ -21,12 +21,11 @@ from ethereum.crypto.hash import Hash32 from ethereum.exceptions import EthereumException -from ..block_access_lists.builder import BlockAccessListBuilder from ..block_access_lists.rlp_types import BlockAccessList -from ..block_access_lists.tracker import StateChangeTracker from ..blocks import Log, Receipt, Withdrawal from ..fork_types import Address, Authorization, VersionedHash from ..state import State, TransientStorage +from ..state_tracker import StateChanges from ..transactions import LegacyTransaction from ..trie import Trie @@ -50,8 +49,8 @@ class BlockEnvironment: prev_randao: Bytes32 excess_blob_gas: U64 parent_beacon_block_root: Hash32 - change_tracker: StateChangeTracker = field( - default_factory=lambda: StateChangeTracker(BlockAccessListBuilder()) + block_state_changes: StateChanges = field( + default_factory=lambda: StateChanges() ) @@ -143,6 +142,7 @@ class Message: accessed_storage_keys: Set[Tuple[Address, Bytes32]] disable_precompiles: bool parent_evm: Optional["Evm"] + transaction_state_changes: Optional[StateChanges] = None @dataclass @@ -165,6 +165,7 @@ class Evm: error: Optional[EthereumException] accessed_addresses: Set[Address] accessed_storage_keys: Set[Tuple[Address, Bytes32]] + state_changes: StateChanges def incorporate_child_on_success(evm: Evm, child_evm: Evm) -> None: @@ -186,6 +187,9 @@ def incorporate_child_on_success(evm: Evm, child_evm: Evm) -> None: evm.accessed_addresses.update(child_evm.accessed_addresses) evm.accessed_storage_keys.update(child_evm.accessed_storage_keys) + # Merge state changes from successful child frame (EIP-7928) + child_evm.state_changes.merge_on_success() + def incorporate_child_on_error(evm: Evm, child_evm: Evm) -> None: """ @@ -200,3 +204,7 @@ def incorporate_child_on_error(evm: Evm, child_evm: Evm) -> None: """ evm.gas_left += child_evm.gas_left + + # Merge state changes from failed child frame (EIP-7928) + # Only reads are merged, writes are discarded + child_evm.state_changes.merge_on_failure() diff --git a/src/ethereum/forks/amsterdam/vm/eoa_delegation.py b/src/ethereum/forks/amsterdam/vm/eoa_delegation.py index c831f9d337..4f2d5d5f1c 100644 --- a/src/ethereum/forks/amsterdam/vm/eoa_delegation.py +++ b/src/ethereum/forks/amsterdam/vm/eoa_delegation.py @@ -12,7 +12,7 @@ from ethereum.crypto.hash import keccak256 from ethereum.exceptions import InvalidBlock, InvalidSignatureError -from ..block_access_lists.tracker import track_address_access +# track_address_access removed - now using state_changes.track_address() from ..fork_types import Address, Authorization from ..state import account_exists, get_account, increment_nonce, set_code from ..utils.hexadecimal import hex_to_address @@ -175,12 +175,12 @@ def apply_delegation_tracking( The address delegated to. """ - track_address_access(evm.message.block_env, original_address) + evm.state_changes.track_address(original_address) if delegated_address not in evm.accessed_addresses: evm.accessed_addresses.add(delegated_address) - track_address_access(evm.message.block_env, delegated_address) + evm.state_changes.track_address(delegated_address) def access_delegation( @@ -239,7 +239,7 @@ def set_delegation(message: Message) -> U256: authority_account = get_account(state, authority) authority_code = authority_account.code - track_address_access(message.block_env, authority) + message.block_env.block_state_changes.track_address(authority) if authority_code and not is_valid_delegation(authority_code): continue @@ -255,9 +255,14 @@ def set_delegation(message: Message) -> U256: code_to_set = b"" else: code_to_set = EOA_DELEGATION_MARKER + auth.address - set_code(state, authority, code_to_set, message.block_env) - increment_nonce(state, authority, message.block_env) + # Use transaction frame, not block frame (EIP-7928) + state_changes = ( + message.transaction_state_changes + or message.block_env.block_state_changes + ) + set_code(state, authority, code_to_set, state_changes) + increment_nonce(state, authority, state_changes) if message.code_address is None: raise InvalidBlock("Invalid type 4 transaction: no target") diff --git a/src/ethereum/forks/amsterdam/vm/instructions/environment.py b/src/ethereum/forks/amsterdam/vm/instructions/environment.py index e984d8030f..dae8c20280 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/environment.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/environment.py @@ -17,7 +17,7 @@ from ethereum.crypto.hash import keccak256 from ethereum.utils.numeric import ceil32 -from ...block_access_lists.tracker import track_address_access +# track_address_access removed - now using state_changes.track_address() from ...fork_types import EMPTY_ACCOUNT from ...state import get_account from ...utils.address import to_address_masked @@ -83,7 +83,7 @@ def balance(evm: Evm) -> None: check_gas(evm, gas_cost) if is_cold_access: evm.accessed_addresses.add(address) - track_address_access(evm.message.block_env, address) + evm.state_changes.track_address(address) charge_gas(evm, gas_cost) # OPERATION @@ -353,7 +353,7 @@ def extcodesize(evm: Evm) -> None: check_gas(evm, access_gas_cost) if is_cold_access: evm.accessed_addresses.add(address) - track_address_access(evm.message.block_env, address) + evm.state_changes.track_address(address) charge_gas(evm, access_gas_cost) # OPERATION @@ -399,7 +399,7 @@ def extcodecopy(evm: Evm) -> None: check_gas(evm, total_gas_cost) if is_cold_access: evm.accessed_addresses.add(address) - track_address_access(evm.message.block_env, address) + evm.state_changes.track_address(address) charge_gas(evm, total_gas_cost) # OPERATION @@ -493,7 +493,7 @@ def extcodehash(evm: Evm) -> None: check_gas(evm, access_gas_cost) if is_cold_access: evm.accessed_addresses.add(address) - track_address_access(evm.message.block_env, address) + evm.state_changes.track_address(address) charge_gas(evm, access_gas_cost) # OPERATION diff --git a/src/ethereum/forks/amsterdam/vm/instructions/storage.py b/src/ethereum/forks/amsterdam/vm/instructions/storage.py index f8bf08ca29..1709dab5d7 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/storage.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/storage.py @@ -13,10 +13,6 @@ from ethereum_types.numeric import Uint -from ...block_access_lists.tracker import ( - track_storage_read, - track_storage_write, -) from ...state import ( get_storage, get_storage_original, @@ -62,8 +58,7 @@ def sload(evm: Evm) -> None: check_gas(evm, gas_cost) if (evm.message.current_target, key) not in evm.accessed_storage_keys: evm.accessed_storage_keys.add((evm.message.current_target, key)) - track_storage_read( - evm.message.block_env, + evm.state_changes.track_storage_read( evm.message.current_target, key, ) @@ -100,11 +95,6 @@ def sstore(evm: Evm) -> None: state, evm.message.current_target, key ) current_value = get_storage(state, evm.message.current_target, key) - track_storage_read( - evm.message.block_env, - evm.message.current_target, - key, - ) # GAS gas_cost = Uint(0) @@ -124,6 +114,15 @@ def sstore(evm: Evm) -> None: else: gas_cost += GAS_WARM_ACCESS + # Track storage access BEFORE checking gas (EIP-7928) + # Even if we run out of gas, the access attempt should be tracked + evm.state_changes.capture_pre_storage( + evm.message.current_target, key, current_value + ) + evm.state_changes.track_storage_read( + evm.message.current_target, + key, + ) check_gas(evm, gas_cost) if is_cold_access: @@ -156,8 +155,7 @@ def sstore(evm: Evm) -> None: # OPERATION set_storage(state, evm.message.current_target, key, new_value) - track_storage_write( - evm.message.block_env, + evm.state_changes.track_storage_write( evm.message.current_target, key, new_value, diff --git a/src/ethereum/forks/amsterdam/vm/instructions/system.py b/src/ethereum/forks/amsterdam/vm/instructions/system.py index 873e1cd4d8..e193b539de 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/system.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/system.py @@ -16,7 +16,7 @@ from ethereum.utils.numeric import ceil32 -from ...block_access_lists.tracker import track_address_access +# track_address_access removed - now using state_changes.track_address() from ...fork_types import Address from ...state import ( account_has_code_or_nonce, @@ -114,7 +114,7 @@ def generic_create( evm.accessed_addresses.add(contract_address) - track_address_access(evm.message.block_env, contract_address) + evm.state_changes.track_address(contract_address) if account_has_code_or_nonce( state, contract_address @@ -122,7 +122,7 @@ def generic_create( increment_nonce( state, evm.message.current_target, - evm.message.block_env, + evm.state_changes, ) push(evm.stack, U256(0)) return @@ -130,7 +130,7 @@ def generic_create( increment_nonce( state, evm.message.current_target, - evm.message.block_env, + evm.state_changes, ) child_message = Message( @@ -327,6 +327,8 @@ def generic_call( evm.memory, memory_input_start_position, memory_input_size ) + # EIP-7928: Child message inherits transaction_state_changes from parent + # The actual child frame will be created automatically in process_message child_message = Message( block_env=evm.message.block_env, tx_env=evm.message.tx_env, @@ -345,6 +347,7 @@ def generic_call( accessed_storage_keys=evm.accessed_storage_keys.copy(), disable_precompiles=disable_precompiles, parent_evm=evm, + transaction_state_changes=evm.message.transaction_state_changes, ) child_evm = process_message(child_message) @@ -428,7 +431,7 @@ def call(evm: Evm) -> None: if is_cold_access: evm.accessed_addresses.add(to) - track_address_access(evm.message.block_env, to) + evm.state_changes.track_address(to) if is_delegated: apply_delegation_tracking(evm, original_address, final_address) @@ -527,7 +530,7 @@ def callcode(evm: Evm) -> None: if is_cold_access: evm.accessed_addresses.add(original_address) - track_address_access(evm.message.block_env, original_address) + evm.state_changes.track_address(original_address) if is_delegated: apply_delegation_tracking(evm, original_address, final_address) @@ -539,6 +542,15 @@ def callcode(evm: Evm) -> None: sender_balance = get_account( evm.message.block_env.state, evm.message.current_target ).balance + + # EIP-7928: For CALLCODE with value transfer, capture pre-balance + # in parent frame. CALLCODE transfers value from/to current_target + # (same address), affecting current storage context, not child frame + if value != 0 and sender_balance >= value: + evm.state_changes.capture_pre_balance( + evm.message.current_target, sender_balance + ) + if sender_balance < value: push(evm.stack, U256(0)) evm.return_data = b"" @@ -601,7 +613,7 @@ def selfdestruct(evm: Evm) -> None: if is_cold_access: evm.accessed_addresses.add(beneficiary) - track_address_access(evm.message.block_env, beneficiary) + evm.state_changes.track_address(beneficiary) charge_gas(evm, gas_cost) @@ -615,7 +627,7 @@ def selfdestruct(evm: Evm) -> None: originator, beneficiary, originator_balance, - evm.message.block_env, + evm.state_changes, ) # register account for deletion only if it was created @@ -627,7 +639,7 @@ def selfdestruct(evm: Evm) -> None: evm.message.block_env.state, originator, U256(0), - evm.message.block_env, + evm.state_changes, ) evm.accounts_to_delete.add(originator) @@ -691,7 +703,7 @@ def delegatecall(evm: Evm) -> None: if is_cold_access: evm.accessed_addresses.add(original_address) - track_address_access(evm.message.block_env, original_address) + evm.state_changes.track_address(original_address) if is_delegated: apply_delegation_tracking(evm, original_address, final_address) @@ -778,7 +790,7 @@ def staticcall(evm: Evm) -> None: if is_cold_access: evm.accessed_addresses.add(to) - track_address_access(evm.message.block_env, to) + evm.state_changes.track_address(to) if is_delegated: apply_delegation_tracking(evm, original_address, final_address) diff --git a/src/ethereum/forks/amsterdam/vm/interpreter.py b/src/ethereum/forks/amsterdam/vm/interpreter.py index 8cd40cbce1..ab07912389 100644 --- a/src/ethereum/forks/amsterdam/vm/interpreter.py +++ b/src/ethereum/forks/amsterdam/vm/interpreter.py @@ -29,12 +29,6 @@ evm_trace, ) -from ..block_access_lists.tracker import ( - begin_call_frame, - commit_call_frame, - rollback_call_frame, - track_address_access, -) from ..blocks import Log from ..fork_types import Address from ..state import ( @@ -50,6 +44,7 @@ rollback_transaction, set_code, ) +from ..state_tracker import StateChanges, create_child_frame from ..vm import Message from ..vm.eoa_delegation import get_delegated_code_address, set_delegation from ..vm.gas import GAS_CODE_DEPOSIT, charge_gas @@ -72,6 +67,59 @@ MAX_INIT_CODE_SIZE = 2 * MAX_CODE_SIZE +def create_call_frame(parent_frame: StateChanges) -> StateChanges: + """ + Create a child frame for call-level state tracking. + + Used for contract calls (CALL, DELEGATECALL, STATICCALL, etc.) where + state changes need to be isolated and potentially reverted. + + Parameters + ---------- + parent_frame : + The parent frame (transaction or another call frame). + + Returns + ------- + call_frame : StateChanges + A new child frame linked to the parent. + + """ + return create_child_frame(parent_frame) + + +def get_message_state_frame(message: Message) -> StateChanges: + """ + Determine and create the appropriate state tracking frame for a message. + + Frame selection logic: + - Nested calls: Create child of parent EVM's frame + - Top-level calls: Create child of transaction frame + - System transactions: Use block frame directly (no isolation needed) + + Parameters + ---------- + message : + The message being processed. + + Returns + ------- + state_frame : StateChanges + The state tracking frame to use for this message execution. + + """ + if message.parent_evm is not None: + # Nested call - create child of parent EVM's frame + return create_call_frame(message.parent_evm.state_changes) + elif message.transaction_state_changes is not None: + # Top-level transaction call - create child of transaction frame + # This ensures contract execution is isolated and can be reverted + return create_call_frame(message.transaction_state_changes) + else: + # System transaction - use block frame directly + return message.block_env.block_state_changes + + @dataclass class MessageCallOutput: """ @@ -140,9 +188,8 @@ def process_message_call(message: Message) -> MessageCallOutput: message.code_address = delegated_address # EIP-7928: Track delegation target when loaded as call target - track_address_access( - block_env, - delegated_address, + message.block_env.block_state_changes.track_address( + delegated_address ) evm = process_message(message) @@ -205,7 +252,9 @@ def process_create_message(message: Message) -> Evm: # added to SELFDESTRUCT by EIP-6780. mark_account_created(state, message.current_target) - increment_nonce(state, message.current_target, message.block_env) + increment_nonce( + state, message.current_target, message.block_env.block_state_changes + ) evm = process_message(message) if not evm.error: contract_code = evm.output @@ -224,7 +273,10 @@ def process_create_message(message: Message) -> Evm: evm.error = error else: set_code( - state, message.current_target, contract_code, message.block_env + state, + message.current_target, + contract_code, + message.block_env.block_state_changes, ) commit_transaction(state, transient_storage) else: @@ -252,16 +304,10 @@ def process_message(message: Message) -> Evm: if message.depth > STACK_DEPTH_LIMIT: raise StackDepthLimitError("Stack depth limit reached") - # take snapshot of state before processing the message begin_transaction(state, transient_storage) - if ( - hasattr(message.block_env, "change_tracker") - and message.block_env.change_tracker - ): - begin_call_frame(message.block_env) - # Track target address access when processing a message - track_address_access(message.block_env, message.current_target) + state_changes = get_message_state_frame(message) + state_changes.track_address(message.current_target) if message.should_transfer_value and message.value != 0: move_ether( @@ -269,30 +315,24 @@ def process_message(message: Message) -> Evm: message.caller, message.current_target, message.value, - message.block_env, + state_changes, ) - evm = execute_code(message) + evm = execute_code(message, state_changes) if evm.error: # revert state to the last saved checkpoint # since the message call resulted in an error rollback_transaction(state, transient_storage) - if ( - hasattr(message.block_env, "change_tracker") - and message.block_env.change_tracker - ): - rollback_call_frame(message.block_env) + # Merge call frame state changes into parent + evm.state_changes.merge_on_failure() else: commit_transaction(state, transient_storage) - if ( - hasattr(message.block_env, "change_tracker") - and message.block_env.change_tracker - ): - commit_call_frame(message.block_env) + # Merge call frame state changes into parent + evm.state_changes.merge_on_success() return evm -def execute_code(message: Message) -> Evm: +def execute_code(message: Message, state_changes: StateChanges) -> Evm: """ Executes bytecode present in the `message`. @@ -300,6 +340,8 @@ def execute_code(message: Message) -> Evm: ---------- message : Transaction specific items. + state_changes : + The state changes frame to use for tracking. Returns ------- @@ -327,6 +369,7 @@ def execute_code(message: Message) -> Evm: error=None, accessed_addresses=message.accessed_addresses, accessed_storage_keys=message.accessed_storage_keys, + state_changes=state_changes, ) try: if evm.message.code_address in PRE_COMPILED_CONTRACTS: diff --git a/src/ethereum_spec_tools/evm_tools/loaders/fork_loader.py b/src/ethereum_spec_tools/evm_tools/loaders/fork_loader.py index 5aebe681f3..002bde2e84 100644 --- a/src/ethereum_spec_tools/evm_tools/loaders/fork_loader.py +++ b/src/ethereum_spec_tools/evm_tools/loaders/fork_loader.py @@ -132,11 +132,6 @@ def compute_block_access_list_hash(self) -> Any: "block_access_lists" ).compute_block_access_list_hash - @property - def set_block_access_index(self) -> Any: - """set_block_access_index function of the fork.""" - return self._module("block_access_lists").set_block_access_index - @property def signing_hash_2930(self) -> Any: """signing_hash_2930 function of the fork.""" diff --git a/src/ethereum_spec_tools/evm_tools/t8n/__init__.py b/src/ethereum_spec_tools/evm_tools/t8n/__init__.py index 3c07ec1d20..2100983144 100644 --- a/src/ethereum_spec_tools/evm_tools/t8n/__init__.py +++ b/src/ethereum_spec_tools/evm_tools/t8n/__init__.py @@ -10,7 +10,7 @@ from typing import Any, Final, TextIO, Tuple, Type, TypeVar from ethereum_rlp import rlp -from ethereum_types.numeric import U64, U256, Uint, ulen +from ethereum_types.numeric import U64, U256, Uint from typing_extensions import override from ethereum import trace @@ -411,18 +411,9 @@ def _run_blockchain_test(self, block_env: Any, block_output: Any) -> None: f"Transaction {original_idx} failed: {e!r}" ) + # Post-execution operations use index N+1 if self.fork.is_after_fork("amsterdam"): - num_transactions = ulen( - [ - tx_idx - for tx_idx in self.txs.successfully_parsed - if tx_idx is not None - ] - ) - - # post-execution use n + 1 - post_execution_index = num_transactions + Uint(1) - self.fork.set_block_access_index(block_env, post_execution_index) + block_env.block_state_changes.increment_index() if not self.fork.proof_of_stake: if self.options.state_reward is None: @@ -441,8 +432,9 @@ def _run_blockchain_test(self, block_env: Any, block_output: Any) -> None: self.fork.process_general_purpose_requests(block_env, block_output) if self.fork.is_after_fork("amsterdam"): + # Build block access list from block_env.block_state_changes block_output.block_access_list = self.fork.build_block_access_list( - block_env.change_tracker.block_access_list_builder + block_env.block_state_changes ) def run_blockchain_test(self) -> None: From 2ec75b29e92e28246da108e37105e4dc72f474b7 Mon Sep 17 00:00:00 2001 From: fselmo Date: Tue, 11 Nov 2025 09:26:14 -0700 Subject: [PATCH 018/154] fix(spec-specs): Mark original addr warm before delegation Co-authored-by: spencer spencer.taylor-brown@ethereum.org --- .../forks/amsterdam/vm/instructions/system.py | 24 +++++++------------ 1 file changed, 8 insertions(+), 16 deletions(-) diff --git a/src/ethereum/forks/amsterdam/vm/instructions/system.py b/src/ethereum/forks/amsterdam/vm/instructions/system.py index e193b539de..136f194655 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/system.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/system.py @@ -401,6 +401,8 @@ def call(evm: Evm) -> None: access_gas_cost = ( GAS_COLD_ACCOUNT_ACCESS if is_cold_access else GAS_WARM_ACCESS ) + if is_cold_access: + evm.accessed_addresses.add(to) ( is_delegated, @@ -428,11 +430,7 @@ def call(evm: Evm) -> None: check_gas(evm, message_call_gas.cost + extend_memory.cost) - if is_cold_access: - evm.accessed_addresses.add(to) - evm.state_changes.track_address(to) - if is_delegated: apply_delegation_tracking(evm, original_address, final_address) @@ -503,6 +501,8 @@ def callcode(evm: Evm) -> None: access_gas_cost = ( GAS_COLD_ACCOUNT_ACCESS if is_cold_access else GAS_WARM_ACCESS ) + if is_cold_access: + evm.accessed_addresses.add(code_address) ( is_delegated, @@ -527,11 +527,7 @@ def callcode(evm: Evm) -> None: check_gas(evm, message_call_gas.cost + extend_memory.cost) - if is_cold_access: - evm.accessed_addresses.add(original_address) - evm.state_changes.track_address(original_address) - if is_delegated: apply_delegation_tracking(evm, original_address, final_address) @@ -681,6 +677,8 @@ def delegatecall(evm: Evm) -> None: access_gas_cost = ( GAS_COLD_ACCOUNT_ACCESS if is_cold_access else GAS_WARM_ACCESS ) + if is_cold_access: + evm.accessed_addresses.add(code_address) ( is_delegated, @@ -700,11 +698,7 @@ def delegatecall(evm: Evm) -> None: check_gas(evm, message_call_gas.cost + extend_memory.cost) - if is_cold_access: - evm.accessed_addresses.add(original_address) - evm.state_changes.track_address(original_address) - if is_delegated: apply_delegation_tracking(evm, original_address, final_address) @@ -764,6 +758,8 @@ def staticcall(evm: Evm) -> None: access_gas_cost = ( GAS_COLD_ACCOUNT_ACCESS if is_cold_access else GAS_WARM_ACCESS ) + if is_cold_access: + evm.accessed_addresses.add(to) ( is_delegated, @@ -787,11 +783,7 @@ def staticcall(evm: Evm) -> None: check_gas(evm, message_call_gas.cost + extend_memory.cost) - if is_cold_access: - evm.accessed_addresses.add(to) - evm.state_changes.track_address(to) - if is_delegated: apply_delegation_tracking(evm, original_address, final_address) From 18472bfd0e7aec274f715ad6036050d5473eff2f Mon Sep 17 00:00:00 2001 From: fselmo Date: Tue, 11 Nov 2025 15:56:19 -0700 Subject: [PATCH 019/154] fix(spec-specs): Make sure we account for no changes --- .../forks/amsterdam/block_access_lists/builder.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/ethereum/forks/amsterdam/block_access_lists/builder.py b/src/ethereum/forks/amsterdam/block_access_lists/builder.py index 07a0ffe7c5..843cc42c4e 100644 --- a/src/ethereum/forks/amsterdam/block_access_lists/builder.py +++ b/src/ethereum/forks/amsterdam/block_access_lists/builder.py @@ -483,11 +483,18 @@ def build_block_access_list( for address, slot in state_changes.storage_reads: add_storage_read(builder, address, slot) - # Add all storage writes + # Add all storage writes, filtering net-zero changes for (address, slot), ( block_access_index, value, ) in state_changes.storage_writes.items(): + # Check if this is a net-zero change by comparing with pre-state + if (address, slot) in state_changes.pre_storage: + if state_changes.pre_storage[(address, slot)] == value: + # Net-zero change - convert to read only + add_storage_read(builder, address, slot) + continue + # Convert U256 to Bytes32 for storage value_bytes = Bytes32(value.to_bytes(U256(32), "big")) add_storage_write( From 0f8bc3bbf15fe0c10012cf0d7239e351519794b6 Mon Sep 17 00:00:00 2001 From: fselmo Date: Wed, 12 Nov 2025 11:40:18 -0700 Subject: [PATCH 020/154] fix(spec-specs): Better tracking for code changes; ensure with BAL test --- .../amsterdam/block_access_lists/builder.py | 5 +- src/ethereum/forks/amsterdam/state.py | 8 +-- src/ethereum/forks/amsterdam/state_tracker.py | 48 ++++++++-------- .../forks/amsterdam/vm/eoa_delegation.py | 3 + .../test_block_access_lists_eip7702.py | 55 +++++++++++++++++++ .../test_cases.md | 1 + 6 files changed, 89 insertions(+), 31 deletions(-) diff --git a/src/ethereum/forks/amsterdam/block_access_lists/builder.py b/src/ethereum/forks/amsterdam/block_access_lists/builder.py index 843cc42c4e..3e1870b0b5 100644 --- a/src/ethereum/forks/amsterdam/block_access_lists/builder.py +++ b/src/ethereum/forks/amsterdam/block_access_lists/builder.py @@ -513,7 +513,10 @@ def build_block_access_list( add_nonce_change(builder, address, block_access_index, new_nonce) # Add all code changes - for address, block_access_index, new_code in state_changes.code_changes: + for ( + address, + block_access_index, + ), new_code in state_changes.code_changes.items(): add_code_change(builder, address, block_access_index, new_code) return _build_from_builder(builder) diff --git a/src/ethereum/forks/amsterdam/state.py b/src/ethereum/forks/amsterdam/state.py index 326595ac93..8f58b6e815 100644 --- a/src/ethereum/forks/amsterdam/state.py +++ b/src/ethereum/forks/amsterdam/state.py @@ -641,13 +641,7 @@ def write_code(sender: Account) -> None: sender.code = code modify_state(state, address, write_code) - - # Only track code changes if it's not setting empty code on a - # newly created address (EIP-7928). For newly created addresses, setting - # code to b"" is not a meaningful state change since the address - # had no code to begin with. - if not (code == b"" and address in state.created_accounts): - state_changes.track_code_change(address, code) + state_changes.track_code_change(address, code) def get_storage_original(state: State, address: Address, key: Bytes32) -> U256: diff --git a/src/ethereum/forks/amsterdam/state_tracker.py b/src/ethereum/forks/amsterdam/state_tracker.py index 7ee77259b1..42203d5e8d 100644 --- a/src/ethereum/forks/amsterdam/state_tracker.py +++ b/src/ethereum/forks/amsterdam/state_tracker.py @@ -56,8 +56,8 @@ class StateChanges: nonce_changes: Set[Tuple[Address, BlockAccessIndex, U64]] = field( default_factory=set ) - code_changes: Set[Tuple[Address, BlockAccessIndex, Bytes]] = field( - default_factory=set + code_changes: Dict[Tuple[Address, BlockAccessIndex], Bytes] = field( + default_factory=dict ) # Pre-state captures for net-zero filtering @@ -66,6 +66,7 @@ class StateChanges: pre_storage: Dict[Tuple[Address, Bytes32], U256] = field( default_factory=dict ) + pre_code: Dict[Address, Bytes] = field(default_factory=dict) def get_block_access_index(self) -> BlockAccessIndex: """Get current block access index by walking to root.""" @@ -92,6 +93,11 @@ def capture_pre_storage( if slot not in self.pre_storage: self.pre_storage[slot] = value + def capture_pre_code(self, address: Address, code: Bytes) -> None: + """Capture pre-code (first-write-wins).""" + if address not in self.pre_code: + self.pre_code[address] = code + def track_address(self, address: Address) -> None: """Track that an address was accessed.""" self.touched_addresses.add(address) @@ -125,9 +131,7 @@ def track_nonce_change(self, address: Address, new_nonce: U64) -> None: def track_code_change(self, address: Address, new_code: Bytes) -> None: """Track a code change.""" - self.code_changes.add( - (address, self.get_block_access_index(), new_code) - ) + self.code_changes[(address, self.get_block_access_index())] = new_code def increment_index(self) -> None: """Increment block access index by walking to root.""" @@ -163,6 +167,9 @@ def merge_on_success(self) -> None: for slot, value in self.pre_storage.items(): if slot not in self.parent.pre_storage: self.parent.pre_storage[slot] = value + for addr, code in self.pre_code.items(): + if addr not in self.parent.pre_code: + self.parent.pre_code[addr] = code # Merge storage operations, filtering noop writes self.parent.storage_reads.update(self.storage_reads) @@ -205,19 +212,17 @@ def merge_on_success(self) -> None: for addr, (idx, final_nonce) in address_final_nonces.items(): self.parent.nonce_changes.add((addr, idx, final_nonce)) - # Merge code changes - keep only latest code per address - address_final_code: Dict[Address, Tuple[BlockAccessIndex, Bytes]] = {} - for addr, idx, code in self.code_changes: - # Keep the change with highest index (most recent) - if ( - addr not in address_final_code - or idx >= address_final_code[addr][0] - ): - address_final_code[addr] = (idx, code) - - # Merge final code changes - for addr, (idx, final_code) in address_final_code.items(): - self.parent.code_changes.add((addr, idx, final_code)) + # Merge code changes - filter net-zero changes + # code_changes keyed by (address, index) + for (addr, idx), final_code in self.code_changes.items(): + if addr in self.pre_code: + if self.pre_code[addr] != final_code: + # Net change occurred - merge the final code + self.parent.code_changes[(addr, idx)] = final_code + # else: Net-zero change - skip entirely + else: + # No pre-code captured, merge as-is + self.parent.code_changes[(addr, idx)] = final_code def merge_on_failure(self) -> None: """ @@ -275,11 +280,8 @@ def handle_in_transaction_selfdestruct( } # Remove code changes from current transaction - state_changes.code_changes = { - (addr, idx, code) - for addr, idx, code in state_changes.code_changes - if not (addr == address and idx == current_block_access_index) - } + if (address, current_block_access_index) in state_changes.code_changes: + del state_changes.code_changes[(address, current_block_access_index)] # Convert storage writes from current transaction to reads for (addr, key), (idx, _value) in list( diff --git a/src/ethereum/forks/amsterdam/vm/eoa_delegation.py b/src/ethereum/forks/amsterdam/vm/eoa_delegation.py index 4f2d5d5f1c..bce49462f2 100644 --- a/src/ethereum/forks/amsterdam/vm/eoa_delegation.py +++ b/src/ethereum/forks/amsterdam/vm/eoa_delegation.py @@ -261,6 +261,9 @@ def set_delegation(message: Message) -> U256: message.transaction_state_changes or message.block_env.block_state_changes ) + + # Capture pre-code just before setting to enable no-op filtering + state_changes.capture_pre_code(authority, authority_code) set_code(state, authority, code_to_set, state_changes) increment_nonce(state, authority, state_changes) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip7702.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip7702.py index 1e14bb5a1b..4616d2cbe4 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip7702.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip7702.py @@ -608,3 +608,58 @@ def test_bal_7702_delegated_via_call_opcode( blocks=[block], post=post, ) + + +def test_bal_7702_null_address_delegation_no_code_change( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Ensure BAL does not record spurious code changes when delegating to + NULL_ADDRESS (sets code to empty on an account that already has + empty code). + """ + alice = pre.fund_eoa() + bob = pre.fund_eoa(amount=0) + + tx = Transaction( + sender=alice, + to=bob, + value=10, + gas_limit=1_000_000, + authorization_list=[ + AuthorizationTuple( + address=0, + nonce=1, + signer=alice, + ) + ], + ) + + # `alice` should appear in BAL with nonce change only, NOT code change + # because setting code from b"" to b"" is a net-zero change + account_expectations = { + alice: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=2)], + code_changes=[], # explicit check for no code changes + ), + bob: BalAccountExpectation( + balance_changes=[BalBalanceChange(tx_index=1, post_balance=10)] + ), + } + + blockchain_test( + pre=pre, + blocks=[ + Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations=account_expectations + ), + ) + ], + post={ + alice: Account(nonce=2, code=b""), + bob: Account(balance=10), + }, + ) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index 351395479d..34fa49528d 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -39,6 +39,7 @@ | `test_bal_7702_invalid_nonce_authorization` | Ensure BAL handles failed authorization due to wrong nonce | `Relayer` sends sponsored transaction to Bob (10 wei transfer succeeds) but Alice's authorization to delegate to `Oracle` uses incorrect nonce, causing silent authorization failure | BAL **MUST** include Alice with empty changes (account access), Bob with `balance_changes` (receives 10 wei), Relayer with `nonce_changes`. **MUST NOT** include `Oracle` (authorization failed, no delegation) | ✅ Completed | | `test_bal_7702_invalid_chain_id_authorization` | Ensure BAL handles failed authorization due to wrong chain id | `Relayer` sends sponsored transaction to Bob (10 wei transfer succeeds) but Alice's authorization to delegate to `Oracle` uses incorrect chain id, causing authorization failure before account access | BAL **MUST** include Bob with `balance_changes` (receives 10 wei), Relayer with `nonce_changes`. **MUST NOT** include Alice (authorization fails before loading account) or `Oracle` (authorization failed, no delegation) | ✅ Completed | | `test_bal_7702_delegated_via_call_opcode` | Ensure BAL captures delegation target when a contract uses *CALL opcodes to call a delegated account | Pre-deployed contract `Alice` delegated to `Oracle`. `Caller` contract uses CALL/CALLCODE/DELEGATECALL/STATICCALL to call `Alice`. Bob sends transaction to `Caller`. | BAL **MUST** include Bob: `nonce_changes`. `Caller`: empty changes (account access). `Alice`: empty changes (account access - delegated account being called). `Oracle`: empty changes (delegation target access). | ✅ Completed | +| `test_bal_7702_null_address_delegation` | Ensure BAL does not record spurious code changes for net-zero code operations | Alice sends transaction with authorization delegating to NULL_ADDRESS (0x0), which sets code to `b""` on an account that already has `b""` code. Transaction sends 10 wei to Bob. | BAL **MUST** include Alice with `nonce_changes` (tx nonce + auth nonce increment) but **MUST NOT** include `code_changes` (setting `b"" -> b""` is net-zero and filtered out). Bob: `balance_changes` (receives 10 wei). This ensures net-zero code change is not recorded. | `test_bal_sstore_and_oog` | Ensure BAL handles OOG during SSTORE execution at various gas boundaries (EIP-2200 stipend and implicit SLOAD) | Alice calls contract that attempts `SSTORE` to cold slot `0x01`. Parameterized: (1) OOG at EIP-2200 stipend check (2300 gas after PUSH opcodes) - fails before implicit SLOAD, (2) OOG at stipend + 1 (2301 gas) - passes stipend check but fails after implicit SLOAD, (3) OOG at exact gas - 1, (4) Successful SSTORE with exact gas. | For case (1): BAL **MUST NOT** include slot `0x01` in `storage_reads` or `storage_changes` (fails before implicit SLOAD). For cases (2) and (3): BAL **MUST** include slot `0x01` in `storage_reads` (implicit SLOAD occurred) but **MUST NOT** include in `storage_changes` (write didn't complete). For case (4): BAL **MUST** include slot `0x01` in `storage_changes` only (successful write; read is filtered by builder). | ✅ Completed | | `test_bal_sload_and_oog` | Ensure BAL handles OOG during SLOAD execution correctly | Alice calls contract that attempts `SLOAD` from cold slot `0x01`. Parameterized: (1) OOG at SLOAD opcode (insufficient gas), (2) Successful SLOAD execution. | For OOG case: BAL **MUST NOT** contain slot `0x01` in `storage_reads` since storage wasn't accessed. For success case: BAL **MUST** contain slot `0x01` in `storage_reads`. | ✅ Completed | | `test_bal_balance_and_oog` | Ensure BAL handles OOG during BALANCE opcode execution correctly | Alice calls contract that attempts `BALANCE` opcode on cold target account. Parameterized: (1) OOG at BALANCE opcode (insufficient gas), (2) Successful BALANCE execution. | For OOG case: BAL **MUST NOT** include target account (wasn't accessed). For success case: BAL **MUST** include target account in `account_changes`. | ✅ Completed | From ab4f8f71ea6a353b0007318f521bdd42bc48f67b Mon Sep 17 00:00:00 2001 From: fselmo Date: Wed, 12 Nov 2025 13:57:12 -0700 Subject: [PATCH 021/154] fix(spec-specs): Use child frame for create message --- .../forks/amsterdam/vm/interpreter.py | 15 +++- .../test_block_access_lists_opcodes.py | 83 +++++++++++++++++++ .../test_cases.md | 1 + 3 files changed, 95 insertions(+), 4 deletions(-) diff --git a/src/ethereum/forks/amsterdam/vm/interpreter.py b/src/ethereum/forks/amsterdam/vm/interpreter.py index ab07912389..9d6190b0e8 100644 --- a/src/ethereum/forks/amsterdam/vm/interpreter.py +++ b/src/ethereum/forks/amsterdam/vm/interpreter.py @@ -252,9 +252,11 @@ def process_create_message(message: Message) -> Evm: # added to SELFDESTRUCT by EIP-6780. mark_account_created(state, message.current_target) - increment_nonce( - state, message.current_target, message.block_env.block_state_changes - ) + # Create a temporary child frame for tracking changes that may be rolled + # back on OOG during code deposit. This frame is merged only on success. + create_frame = create_child_frame(message.block_env.block_state_changes) + + increment_nonce(state, message.current_target, create_frame) evm = process_message(message) if not evm.error: contract_code = evm.output @@ -268,6 +270,9 @@ def process_create_message(message: Message) -> Evm: raise OutOfGasError except ExceptionalHalt as error: rollback_transaction(state, transient_storage) + # Merge create_frame on failure - keeps reads, discards writes + # (address access is preserved, nonce change is discarded) + create_frame.merge_on_failure() evm.gas_left = Uint(0) evm.output = b"" evm.error = error @@ -276,9 +281,11 @@ def process_create_message(message: Message) -> Evm: state, message.current_target, contract_code, - message.block_env.block_state_changes, + create_frame, ) commit_transaction(state, transient_storage) + # Merge create_frame on success - includes nonce and code changes + create_frame.merge_on_success() else: rollback_transaction(state, transient_storage) return evm diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py index 17799d3655..207a87d9a8 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py @@ -32,6 +32,7 @@ Fork, Op, Transaction, + compute_create_address, ) from .spec import ref_spec_7928 @@ -754,3 +755,85 @@ def test_bal_storage_write_read_cross_frame( oracle: Account(storage={0x01: 0x42}), }, ) + + +def test_bal_create_oog_code_deposit( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + fork: Fork, +) -> None: + """ + Ensure BAL correctly handles CREATE that runs out of gas during code + deposit. The contract address should appear with empty changes (read + during collision check) but no nonce or code changes (rolled back). + """ + alice = pre.fund_eoa() + + # create init code that returns a very large contract to force OOG + deposited_len = 10_000 + initcode = Op.RETURN(0, deposited_len) + + factory = pre.deploy_contract( + code=Op.MSTORE(0, Op.PUSH32(bytes(initcode))) + + Op.SSTORE( + 1, Op.CREATE(offset=32 - len(initcode), size=len(initcode)) + ) + + Op.STOP, + storage={1: 0xDEADBEEF}, + ) + + contract_address = compute_create_address(address=factory, nonce=1) + + intrinsic_gas_calculator = fork.transaction_intrinsic_cost_calculator() + intrinsic_gas = intrinsic_gas_calculator( + calldata=b"", + contract_creation=False, + access_list=[], + ) + + tx = Transaction( + sender=alice, + to=factory, + gas_limit=intrinsic_gas + 500_000, # insufficient for deposit + ) + + # BAL expectations: + # - Alice: nonce change (tx sender) + # - Factory: nonce change (CREATE increments factory nonce) + # - Contract address: empty changes (read during collision check, + # nonce/code changes rolled back on OOG) + account_expectations = { + alice: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + ), + factory: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=2)], + storage_changes=[ + BalStorageSlot( + slot=1, + slot_changes=[ + # SSTORE saves 0 (CREATE failed) + BalStorageChange(tx_index=1, post_value=0), + ], + ) + ], + ), + contract_address: BalAccountExpectation.empty(), + } + + blockchain_test( + pre=pre, + blocks=[ + Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations=account_expectations + ), + ) + ], + post={ + alice: Account(nonce=1), + factory: Account(nonce=2, storage={1: 0}), + contract_address: Account.NONEXISTENT, + }, + ) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index 34fa49528d..ceea2b0842 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -49,6 +49,7 @@ | `test_bal_extcodecopy_and_oog` | Ensure BAL handles OOG during EXTCODECOPY opcode execution correctly | Alice calls contract that attempts `EXTCODECOPY` from cold target contract. Parameterized: (1) OOG at EXTCODECOPY opcode (insufficient gas), (2) Successful EXTCODECOPY execution. | For OOG case: BAL **MUST NOT** include target contract (wasn't accessed). For success case: BAL **MUST** include target contract in `account_changes`. | ✅ Completed | | `test_bal_oog_7702_delegated_cold_cold` | Ensure BAL handles OOG during EIP-7702 delegated account loading when both accounts are cold | Alice calls cold delegated account Bob (7702) which delegates to cold `TargetContract` with insufficient gas for second cold load | BAL **MUST** include Bob in `account_changes` (first cold load succeeds) but **MUST NOT** include `TargetContract` (second cold load fails due to OOG) | 🟡 Planned | | `test_bal_oog_7702_delegated_warm_cold` | Ensure BAL handles OOG during EIP-7702 delegated account loading when first account is warm, second is cold | Alice calls warm delegated account Bob (7702) which delegates to cold `TargetContract` with insufficient gas for second cold load | BAL **MUST** include Bob in `account_changes` (warm load succeeds) but **MUST NOT** include `TargetContract` (cold load fails due to OOG) | 🟡 Planned | +| `test_bal_create_oog_code_deposit` | Ensure BAL correctly handles CREATE OOG during code deposit | Alice calls factory contract that executes CREATE with init code returning 10,000 bytes. Transaction has insufficient gas for code deposit. Factory nonce increments, CREATE returns 0 and stores in slot 1. | BAL **MUST** include Alice with `nonce_changes`. Factory with `nonce_changes` (incremented by CREATE) and `storage_changes` (slot 1 = 0). Contract address with empty changes (read during collision check). **MUST NOT** include nonce or code changes for contract address (rolled back on OOG). Contract address **MUST NOT** exist in post-state. | ✅ Completed | | `test_bal_invalid_missing_nonce` | Verify clients reject blocks with BAL missing required nonce changes | Alice sends transaction to Bob; BAL modifier removes Alice's nonce change entry | Block **MUST** be rejected with `INVALID_BLOCK_ACCESS_LIST` exception. Clients **MUST** validate that all sender accounts have nonce changes recorded. | ✅ Completed | | `test_bal_invalid_nonce_value` | Verify clients reject blocks with incorrect nonce values in BAL | Alice sends transaction to Bob; BAL modifier changes Alice's nonce to incorrect value | Block **MUST** be rejected with `INVALID_BLOCK_ACCESS_LIST` exception. Clients **MUST** validate nonce values match actual state transitions. | ✅ Completed | | `test_bal_invalid_storage_value` | Verify clients reject blocks with incorrect storage values in BAL | Alice calls contract that writes to storage; BAL modifier changes storage value to incorrect value | Block **MUST** be rejected with `INVALID_BLOCK_ACCESS_LIST` exception. Clients **MUST** validate storage change values match actual state transitions. | ✅ Completed | From 1c119fbdeb0d8aceec61f7805074ff1d6719fc6d Mon Sep 17 00:00:00 2001 From: fselmo Date: Wed, 12 Nov 2025 15:07:17 -0700 Subject: [PATCH 022/154] fix(spec-specs): Normalize transaction before merging to block frame --- src/ethereum/forks/amsterdam/fork.py | 20 ++--- .../test_block_access_lists.py | 89 +++++++++++++++++++ .../test_cases.md | 1 + 3 files changed, 98 insertions(+), 12 deletions(-) diff --git a/src/ethereum/forks/amsterdam/fork.py b/src/ethereum/forks/amsterdam/fork.py index 86a7089a30..25968e4598 100644 --- a/src/ethereum/forks/amsterdam/fork.py +++ b/src/ethereum/forks/amsterdam/fork.py @@ -1051,13 +1051,20 @@ def process_transaction( block_output.block_logs += tx_output.logs + # EIP-7928: Normalize balance changes for this transaction before merging + # into block frame. + normalize_balance_changes_for_transaction( + tx_state_changes, + BlockAccessIndex(tx_state_changes.get_block_access_index()), + block_env.state, + ) + # Merge transaction frame into block frame tx_state_changes.merge_on_success() # EIP-7928: Handle in-transaction self-destruct AFTER merge # Convert storage writes to reads and remove nonce/code changes # Only accounts created in same tx are in accounts_to_delete per EIP-6780 - for address in tx_output.accounts_to_delete: handle_in_transaction_selfdestruct( block_env.block_state_changes, @@ -1068,17 +1075,6 @@ def process_transaction( ) destroy_account(block_env.state, address) - # EIP-7928: Normalize balance changes for this transaction - # Remove balance changes where post-tx balance equals pre-tx balance - - normalize_balance_changes_for_transaction( - block_env.block_state_changes, - BlockAccessIndex( - block_env.block_state_changes.get_block_access_index() - ), - block_env.state, - ) - def process_withdrawals( block_env: vm.BlockEnvironment, diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py index 9b6793a87c..865f79827f 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py @@ -1925,3 +1925,92 @@ def test_bal_nonexistent_account_access_value_transfer( else Account.NONEXISTENT, }, ) + + +def test_bal_multiple_balance_changes_same_account( + pre: Alloc, + fork: Fork, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Ensure BAL correctly tracks multiple balance changes to same account + across multiple transactions. + + An account that receives funds in TX0 and spends them in TX1 should + have TWO balance change entries in the BAL, one for each transaction. + """ + alice = pre.fund_eoa() + bob = pre.fund_eoa(amount=0) + charlie = pre.fund_eoa(amount=0) + + intrinsic_gas_calculator = fork.transaction_intrinsic_cost_calculator() + tx_intrinsic_gas = intrinsic_gas_calculator(calldata=b"", access_list=[]) + + # bob receives funds in tx0, then spends everything in tx1 + gas_price = 10 + tx1_gas_cost = tx_intrinsic_gas * gas_price + spend_amount = 100 + funding_amount = tx1_gas_cost + spend_amount + + tx0 = Transaction( + sender=alice, + to=bob, + value=funding_amount, + gas_limit=tx_intrinsic_gas, + gas_price=gas_price, + ) + + tx1 = Transaction( + sender=bob, + to=charlie, + value=spend_amount, + gas_limit=tx_intrinsic_gas, + gas_price=gas_price, + ) + + bob_balance_after_tx0 = funding_amount + bob_balance_after_tx1 = 0 + + blockchain_test( + pre=pre, + blocks=[ + Block( + txs=[tx0, tx1], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[ + BalNonceChange(tx_index=1, post_nonce=1) + ], + ), + bob: BalAccountExpectation( + nonce_changes=[ + BalNonceChange(tx_index=2, post_nonce=1) + ], + balance_changes=[ + BalBalanceChange( + tx_index=1, + post_balance=bob_balance_after_tx0, + ), + BalBalanceChange( + tx_index=2, + post_balance=bob_balance_after_tx1, + ), + ], + ), + charlie: BalAccountExpectation( + balance_changes=[ + BalBalanceChange( + tx_index=2, post_balance=spend_amount + ) + ], + ), + } + ), + ) + ], + post={ + bob: Account(nonce=1, balance=bob_balance_after_tx1), + charlie: Account(balance=spend_amount), + }, + ) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index ceea2b0842..c33f56eb43 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -49,6 +49,7 @@ | `test_bal_extcodecopy_and_oog` | Ensure BAL handles OOG during EXTCODECOPY opcode execution correctly | Alice calls contract that attempts `EXTCODECOPY` from cold target contract. Parameterized: (1) OOG at EXTCODECOPY opcode (insufficient gas), (2) Successful EXTCODECOPY execution. | For OOG case: BAL **MUST NOT** include target contract (wasn't accessed). For success case: BAL **MUST** include target contract in `account_changes`. | ✅ Completed | | `test_bal_oog_7702_delegated_cold_cold` | Ensure BAL handles OOG during EIP-7702 delegated account loading when both accounts are cold | Alice calls cold delegated account Bob (7702) which delegates to cold `TargetContract` with insufficient gas for second cold load | BAL **MUST** include Bob in `account_changes` (first cold load succeeds) but **MUST NOT** include `TargetContract` (second cold load fails due to OOG) | 🟡 Planned | | `test_bal_oog_7702_delegated_warm_cold` | Ensure BAL handles OOG during EIP-7702 delegated account loading when first account is warm, second is cold | Alice calls warm delegated account Bob (7702) which delegates to cold `TargetContract` with insufficient gas for second cold load | BAL **MUST** include Bob in `account_changes` (warm load succeeds) but **MUST NOT** include `TargetContract` (cold load fails due to OOG) | 🟡 Planned | +| `test_bal_multiple_balance_changes_same_account` | Ensure BAL tracks multiple balance changes to same account across transactions | Alice funds Bob (starts at 0) in tx0 with exact amount needed. Bob spends everything in tx1 to Charlie. Bob's balance: 0 → funding_amount → 0 | BAL **MUST** include Bob with two `balance_changes`: one at txIndex=1 (receives funds) and one at txIndex=2 (balance returns to 0). This tests balance tracking across two transactions. | ✅ Completed | | `test_bal_create_oog_code_deposit` | Ensure BAL correctly handles CREATE OOG during code deposit | Alice calls factory contract that executes CREATE with init code returning 10,000 bytes. Transaction has insufficient gas for code deposit. Factory nonce increments, CREATE returns 0 and stores in slot 1. | BAL **MUST** include Alice with `nonce_changes`. Factory with `nonce_changes` (incremented by CREATE) and `storage_changes` (slot 1 = 0). Contract address with empty changes (read during collision check). **MUST NOT** include nonce or code changes for contract address (rolled back on OOG). Contract address **MUST NOT** exist in post-state. | ✅ Completed | | `test_bal_invalid_missing_nonce` | Verify clients reject blocks with BAL missing required nonce changes | Alice sends transaction to Bob; BAL modifier removes Alice's nonce change entry | Block **MUST** be rejected with `INVALID_BLOCK_ACCESS_LIST` exception. Clients **MUST** validate that all sender accounts have nonce changes recorded. | ✅ Completed | | `test_bal_invalid_nonce_value` | Verify clients reject blocks with incorrect nonce values in BAL | Alice sends transaction to Bob; BAL modifier changes Alice's nonce to incorrect value | Block **MUST** be rejected with `INVALID_BLOCK_ACCESS_LIST` exception. Clients **MUST** validate nonce values match actual state transitions. | ✅ Completed | From 4e8cbccf7f554051a161d28893794ea1ba670f57 Mon Sep 17 00:00:00 2001 From: fselmo Date: Wed, 12 Nov 2025 15:43:40 -0700 Subject: [PATCH 023/154] fix(spec-specs): Early static check for SSTORE before any reads --- .../amsterdam/vm/instructions/storage.py | 6 +- .../test_block_access_lists_opcodes.py | 73 +++++++++++++++++++ .../test_cases.md | 1 + 3 files changed, 78 insertions(+), 2 deletions(-) diff --git a/src/ethereum/forks/amsterdam/vm/instructions/storage.py b/src/ethereum/forks/amsterdam/vm/instructions/storage.py index 1709dab5d7..db1536a707 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/storage.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/storage.py @@ -90,6 +90,10 @@ def sstore(evm: Evm) -> None: if evm.gas_left <= GAS_CALL_STIPEND: raise OutOfGasError + # Check static context before accessing storage + if evm.message.is_static: + raise WriteInStaticContext + state = evm.message.block_env.state original_value = get_storage_original( state, evm.message.current_target, key @@ -129,8 +133,6 @@ def sstore(evm: Evm) -> None: evm.accessed_storage_keys.add((evm.message.current_target, key)) charge_gas(evm, gas_cost) - if evm.message.is_static: - raise WriteInStaticContext # REFUND COUNTER if current_value != new_value: diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py index 207a87d9a8..de354bbeb1 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py @@ -837,3 +837,76 @@ def test_bal_create_oog_code_deposit( contract_address: Account.NONEXISTENT, }, ) + + +def test_bal_sstore_static_context( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Ensure BAL does not record storage reads when SSTORE fails in static + context. + + Contract A makes STATICCALL to Contract B. Contract B attempts SSTORE, + which should fail immediately without recording any storage reads. + """ + alice = pre.fund_eoa() + + contract_b = pre.deploy_contract(code=Op.SSTORE(0, 5)) + + # Contract A makes STATICCALL to Contract B + # The STATICCALL will fail because B tries SSTORE in static context + # But contract_a continues and writes to its own storage + contract_a = pre.deploy_contract( + code=Op.STATICCALL( + gas=1_000_000, + address=contract_b, + args_offset=0, + args_size=0, + ret_offset=0, + ret_size=0, + ) + + Op.POP # pop the return value (0 = failure) + + Op.SSTORE(0, 1) # this should succeed (non-static context) + ) + + tx = Transaction( + sender=alice, + to=contract_a, + gas_limit=2_000_000, + ) + + blockchain_test( + pre=pre, + blocks=[ + Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[ + BalNonceChange(tx_index=1, post_nonce=1) + ], + ), + contract_a: BalAccountExpectation( + storage_changes=[ + BalStorageSlot( + slot=0x00, + slot_changes=[ + BalStorageChange( + tx_index=1, post_value=1 + ), + ], + ), + ], + ), + contract_b: BalAccountExpectation.empty(), + } + ), + ) + ], + post={ + contract_a: Account(storage={0: 1}), + contract_b: Account(storage={0: 0}), # SSTORE failed + }, + ) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index c33f56eb43..1f329b9a3b 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -41,6 +41,7 @@ | `test_bal_7702_delegated_via_call_opcode` | Ensure BAL captures delegation target when a contract uses *CALL opcodes to call a delegated account | Pre-deployed contract `Alice` delegated to `Oracle`. `Caller` contract uses CALL/CALLCODE/DELEGATECALL/STATICCALL to call `Alice`. Bob sends transaction to `Caller`. | BAL **MUST** include Bob: `nonce_changes`. `Caller`: empty changes (account access). `Alice`: empty changes (account access - delegated account being called). `Oracle`: empty changes (delegation target access). | ✅ Completed | | `test_bal_7702_null_address_delegation` | Ensure BAL does not record spurious code changes for net-zero code operations | Alice sends transaction with authorization delegating to NULL_ADDRESS (0x0), which sets code to `b""` on an account that already has `b""` code. Transaction sends 10 wei to Bob. | BAL **MUST** include Alice with `nonce_changes` (tx nonce + auth nonce increment) but **MUST NOT** include `code_changes` (setting `b"" -> b""` is net-zero and filtered out). Bob: `balance_changes` (receives 10 wei). This ensures net-zero code change is not recorded. | `test_bal_sstore_and_oog` | Ensure BAL handles OOG during SSTORE execution at various gas boundaries (EIP-2200 stipend and implicit SLOAD) | Alice calls contract that attempts `SSTORE` to cold slot `0x01`. Parameterized: (1) OOG at EIP-2200 stipend check (2300 gas after PUSH opcodes) - fails before implicit SLOAD, (2) OOG at stipend + 1 (2301 gas) - passes stipend check but fails after implicit SLOAD, (3) OOG at exact gas - 1, (4) Successful SSTORE with exact gas. | For case (1): BAL **MUST NOT** include slot `0x01` in `storage_reads` or `storage_changes` (fails before implicit SLOAD). For cases (2) and (3): BAL **MUST** include slot `0x01` in `storage_reads` (implicit SLOAD occurred) but **MUST NOT** include in `storage_changes` (write didn't complete). For case (4): BAL **MUST** include slot `0x01` in `storage_changes` only (successful write; read is filtered by builder). | ✅ Completed | +| `test_bal_sstore_static_context` | Ensure BAL does not capture spurious storage access when SSTORE fails in static context | Alice calls contract with `STATICCALL` which attempts `SSTORE` to slot `0x01`. SSTORE must fail before any storage access occurs. | BAL **MUST NOT** include slot `0x01` in `storage_reads` or `storage_changes`. Static context check happens before storage access, preventing spurious reads. Alice has `nonce_changes` and `balance_changes` (gas cost). Target contract included with empty changes. | ✅ Completed | | `test_bal_sload_and_oog` | Ensure BAL handles OOG during SLOAD execution correctly | Alice calls contract that attempts `SLOAD` from cold slot `0x01`. Parameterized: (1) OOG at SLOAD opcode (insufficient gas), (2) Successful SLOAD execution. | For OOG case: BAL **MUST NOT** contain slot `0x01` in `storage_reads` since storage wasn't accessed. For success case: BAL **MUST** contain slot `0x01` in `storage_reads`. | ✅ Completed | | `test_bal_balance_and_oog` | Ensure BAL handles OOG during BALANCE opcode execution correctly | Alice calls contract that attempts `BALANCE` opcode on cold target account. Parameterized: (1) OOG at BALANCE opcode (insufficient gas), (2) Successful BALANCE execution. | For OOG case: BAL **MUST NOT** include target account (wasn't accessed). For success case: BAL **MUST** include target account in `account_changes`. | ✅ Completed | | `test_bal_extcodesize_and_oog` | Ensure BAL handles OOG during EXTCODESIZE opcode execution correctly | Alice calls contract that attempts `EXTCODESIZE` opcode on cold target contract. Parameterized: (1) OOG at EXTCODESIZE opcode (insufficient gas), (2) Successful EXTCODESIZE execution. | For OOG case: BAL **MUST NOT** include target contract (wasn't accessed). For success case: BAL **MUST** include target contract in `account_changes`. | ✅ Completed | From eb3ac579c3d677d8be860a79d01b845d502be774 Mon Sep 17 00:00:00 2001 From: fselmo Date: Wed, 12 Nov 2025 18:50:27 -0700 Subject: [PATCH 024/154] fix(spec-specs): Track storage writes more appropriately wrt index --- .../amsterdam/block_access_lists/builder.py | 7 +- src/ethereum/forks/amsterdam/state_tracker.py | 25 +++---- .../test_block_access_lists.py | 74 +++++++++++++++++++ .../test_cases.md | 1 + 4 files changed, 90 insertions(+), 17 deletions(-) diff --git a/src/ethereum/forks/amsterdam/block_access_lists/builder.py b/src/ethereum/forks/amsterdam/block_access_lists/builder.py index 3e1870b0b5..4ed7aa767c 100644 --- a/src/ethereum/forks/amsterdam/block_access_lists/builder.py +++ b/src/ethereum/forks/amsterdam/block_access_lists/builder.py @@ -484,10 +484,11 @@ def build_block_access_list( add_storage_read(builder, address, slot) # Add all storage writes, filtering net-zero changes - for (address, slot), ( + for ( + address, + slot, block_access_index, - value, - ) in state_changes.storage_writes.items(): + ), value in state_changes.storage_writes.items(): # Check if this is a net-zero change by comparing with pre-state if (address, slot) in state_changes.pre_storage: if state_changes.pre_storage[(address, slot)] == value: diff --git a/src/ethereum/forks/amsterdam/state_tracker.py b/src/ethereum/forks/amsterdam/state_tracker.py index 42203d5e8d..0d875be3c0 100644 --- a/src/ethereum/forks/amsterdam/state_tracker.py +++ b/src/ethereum/forks/amsterdam/state_tracker.py @@ -46,9 +46,9 @@ class StateChanges: touched_addresses: Set[Address] = field(default_factory=set) storage_reads: Set[Tuple[Address, Bytes32]] = field(default_factory=set) - storage_writes: Dict[ - Tuple[Address, Bytes32], Tuple[BlockAccessIndex, U256] - ] = field(default_factory=dict) + storage_writes: Dict[Tuple[Address, Bytes32, BlockAccessIndex], U256] = ( + field(default_factory=dict) + ) balance_changes: Dict[Tuple[Address, BlockAccessIndex], U256] = field( default_factory=dict @@ -110,9 +110,8 @@ def track_storage_write( self, address: Address, key: Bytes32, value: U256 ) -> None: """Track a storage write operation with block access index.""" - self.storage_writes[(address, key)] = ( - self.get_block_access_index(), - value, + self.storage_writes[(address, key, self.get_block_access_index())] = ( + value ) def track_balance_change( @@ -173,17 +172,17 @@ def merge_on_success(self) -> None: # Merge storage operations, filtering noop writes self.parent.storage_reads.update(self.storage_reads) - for (addr, key), (idx, value) in self.storage_writes.items(): + for (addr, key, idx), value in self.storage_writes.items(): # Only merge if value actually changed from pre-state if (addr, key) in self.pre_storage: if self.pre_storage[(addr, key)] != value: - self.parent.storage_writes[(addr, key)] = (idx, value) + self.parent.storage_writes[(addr, key, idx)] = value # If equal, it's a noop write - convert to read only else: self.parent.storage_reads.add((addr, key)) else: # No pre-state captured, merge as-is - self.parent.storage_writes[(addr, key)] = (idx, value) + self.parent.storage_writes[(addr, key, idx)] = value # Merge balance changes - filter net-zero changes # balance_changes keyed by (address, index) @@ -240,7 +239,7 @@ def merge_on_failure(self) -> None: self.parent.storage_reads.update(self.storage_reads) # Convert writes to reads (failed writes still accessed the slots) - for address, key in self.storage_writes.keys(): + for address, key, _idx in self.storage_writes.keys(): self.parent.storage_reads.add((address, key)) # Note: balance_changes, nonce_changes, and code_changes are NOT @@ -284,11 +283,9 @@ def handle_in_transaction_selfdestruct( del state_changes.code_changes[(address, current_block_access_index)] # Convert storage writes from current transaction to reads - for (addr, key), (idx, _value) in list( - state_changes.storage_writes.items() - ): + for addr, key, idx in list(state_changes.storage_writes.keys()): if addr == address and idx == current_block_access_index: - del state_changes.storage_writes[(addr, key)] + del state_changes.storage_writes[(addr, key, idx)] state_changes.storage_reads.add((addr, key)) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py index 865f79827f..2fda529643 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py @@ -2014,3 +2014,77 @@ def test_bal_multiple_balance_changes_same_account( charlie: Account(balance=spend_amount), }, ) + + +def test_bal_multiple_storage_writes_same_slot( + blockchain_test: BlockchainTestFiller, + pre: Alloc, +) -> None: + """ + Test that BAL tracks multiple writes to the same storage slot across + transactions in the same block. + + Setup: + - Deploy a contract that increments storage slot 1 on each call + - Alice calls the contract 3 times in the same block + - Each call increments slot 1: 0 -> 1 -> 2 -> 3 + + Expected BAL: + - Contract should have 3 storage_changes for slot 1: + * txIndex 1: postValue = 1 + * txIndex 2: postValue = 2 + * txIndex 3: postValue = 3 + """ + alice = pre.fund_eoa(amount=10**18) + + increment_code = Op.SSTORE(1, Op.ADD(Op.SLOAD(1), 1)) + contract = pre.deploy_contract(code=increment_code) + + tx1 = Transaction(sender=alice, to=contract, gas_limit=200_000) + tx2 = Transaction(sender=alice, to=contract, gas_limit=200_000) + tx3 = Transaction(sender=alice, to=contract, gas_limit=200_000) + + blockchain_test( + pre=pre, + blocks=[ + Block( + txs=[tx1, tx2, tx3], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[ + BalNonceChange(tx_index=1, post_nonce=1), + BalNonceChange(tx_index=2, post_nonce=2), + BalNonceChange(tx_index=3, post_nonce=3), + ], + ), + contract: BalAccountExpectation( + storage_changes=[ + BalStorageSlot( + slot=1, + slot_changes=[ + BalStorageChange( + tx_index=1, post_value=1 + ), + BalStorageChange( + tx_index=2, post_value=2 + ), + BalStorageChange( + tx_index=3, post_value=3 + ), + ], + ), + ], + storage_reads=[], + balance_changes=[], + code_changes=[], + ), + } + ), + ) + ], + post={ + alice: Account(nonce=3), + contract: Account(storage={1: 3}), + }, + ) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index 1f329b9a3b..bc45ef2d21 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -51,6 +51,7 @@ | `test_bal_oog_7702_delegated_cold_cold` | Ensure BAL handles OOG during EIP-7702 delegated account loading when both accounts are cold | Alice calls cold delegated account Bob (7702) which delegates to cold `TargetContract` with insufficient gas for second cold load | BAL **MUST** include Bob in `account_changes` (first cold load succeeds) but **MUST NOT** include `TargetContract` (second cold load fails due to OOG) | 🟡 Planned | | `test_bal_oog_7702_delegated_warm_cold` | Ensure BAL handles OOG during EIP-7702 delegated account loading when first account is warm, second is cold | Alice calls warm delegated account Bob (7702) which delegates to cold `TargetContract` with insufficient gas for second cold load | BAL **MUST** include Bob in `account_changes` (warm load succeeds) but **MUST NOT** include `TargetContract` (cold load fails due to OOG) | 🟡 Planned | | `test_bal_multiple_balance_changes_same_account` | Ensure BAL tracks multiple balance changes to same account across transactions | Alice funds Bob (starts at 0) in tx0 with exact amount needed. Bob spends everything in tx1 to Charlie. Bob's balance: 0 → funding_amount → 0 | BAL **MUST** include Bob with two `balance_changes`: one at txIndex=1 (receives funds) and one at txIndex=2 (balance returns to 0). This tests balance tracking across two transactions. | ✅ Completed | +| `test_bal_multiple_storage_writes_same_slot` | Ensure BAL tracks multiple writes to same storage slot across transactions | Alice calls contract 3 times in same block. Contract increments slot 1 on each call: 0 → 1 → 2 → 3 | BAL **MUST** include contract with slot 1 having three `slot_changes`: txIndex=1 (value 1), txIndex=2 (value 2), txIndex=3 (value 3). Each transaction's write must be recorded separately. | ✅ Completed | | `test_bal_create_oog_code_deposit` | Ensure BAL correctly handles CREATE OOG during code deposit | Alice calls factory contract that executes CREATE with init code returning 10,000 bytes. Transaction has insufficient gas for code deposit. Factory nonce increments, CREATE returns 0 and stores in slot 1. | BAL **MUST** include Alice with `nonce_changes`. Factory with `nonce_changes` (incremented by CREATE) and `storage_changes` (slot 1 = 0). Contract address with empty changes (read during collision check). **MUST NOT** include nonce or code changes for contract address (rolled back on OOG). Contract address **MUST NOT** exist in post-state. | ✅ Completed | | `test_bal_invalid_missing_nonce` | Verify clients reject blocks with BAL missing required nonce changes | Alice sends transaction to Bob; BAL modifier removes Alice's nonce change entry | Block **MUST** be rejected with `INVALID_BLOCK_ACCESS_LIST` exception. Clients **MUST** validate that all sender accounts have nonce changes recorded. | ✅ Completed | | `test_bal_invalid_nonce_value` | Verify clients reject blocks with incorrect nonce values in BAL | Alice sends transaction to Bob; BAL modifier changes Alice's nonce to incorrect value | Block **MUST** be rejected with `INVALID_BLOCK_ACCESS_LIST` exception. Clients **MUST** validate nonce values match actual state transitions. | ✅ Completed | From e668cc363f16543077668cb6154769f0f27354ba Mon Sep 17 00:00:00 2001 From: fselmo Date: Thu, 13 Nov 2025 15:09:39 -0600 Subject: [PATCH 025/154] fix(spec-specs): Use functions, not methods; fix create revert --- src/ethereum/forks/amsterdam/fork.py | 46 +- src/ethereum/forks/amsterdam/state.py | 36 +- src/ethereum/forks/amsterdam/state_tracker.py | 593 ++++++++++++------ src/ethereum/forks/amsterdam/vm/__init__.py | 9 +- .../forks/amsterdam/vm/eoa_delegation.py | 11 +- .../amsterdam/vm/instructions/environment.py | 9 +- .../amsterdam/vm/instructions/storage.py | 21 +- .../forks/amsterdam/vm/instructions/system.py | 36 +- .../forks/amsterdam/vm/interpreter.py | 90 +-- .../evm_tools/t8n/__init__.py | 6 +- .../test_block_access_lists_opcodes.py | 59 ++ .../test_cases.md | 1 + 12 files changed, 597 insertions(+), 320 deletions(-) diff --git a/src/ethereum/forks/amsterdam/fork.py b/src/ethereum/forks/amsterdam/fork.py index 25968e4598..dcc46d1c4f 100644 --- a/src/ethereum/forks/amsterdam/fork.py +++ b/src/ethereum/forks/amsterdam/fork.py @@ -65,9 +65,15 @@ state_root, ) from .state_tracker import ( + capture_pre_balance, create_child_frame, + get_block_access_index, handle_in_transaction_selfdestruct, + increment_block_access_index, + merge_on_success, normalize_balance_changes_for_transaction, + track_address, + track_balance_change, ) from .transactions import ( AccessListTransaction, @@ -800,7 +806,7 @@ def apply_body( # EIP-7928: Increment block frame to post-execution index # After N transactions, block frame is at index N # Post-execution operations (withdrawals, etc.) use index N+1 - block_env.block_state_changes.increment_index() + increment_block_access_index(block_env.block_state_changes) process_withdrawals(block_env, block_output, withdrawals) @@ -893,16 +899,15 @@ def process_transaction( """ # EIP-7928: Create a transaction-level StateChanges frame # The frame will read the current block_access_index from the block frame - # Before transaction starts, increment block index so it's ready - block_env.block_state_changes.increment_index() + increment_block_access_index(block_env.block_state_changes) tx_state_changes = create_child_frame(block_env.block_state_changes) coinbase_pre_balance = get_account( block_env.state, block_env.coinbase ).balance - tx_state_changes.track_address(block_env.coinbase) - tx_state_changes.capture_pre_balance( - block_env.coinbase, coinbase_pre_balance + track_address(tx_state_changes, block_env.coinbase) + capture_pre_balance( + tx_state_changes, block_env.coinbase, coinbase_pre_balance ) trie_set( @@ -1012,15 +1017,16 @@ def process_transaction( block_env.state, sender ).balance + U256(gas_refund_amount) set_account_balance( - block_env.state, sender, sender_balance_after_refund, tx_state_changes + block_env.state, + sender, + sender_balance_after_refund, + tx_state_changes, ) - # transfer miner fees coinbase_balance_after_mining_fee = get_account( block_env.state, block_env.coinbase ).balance + U256(transaction_fee) - # Always set coinbase balance to ensure proper tracking set_account_balance( block_env.state, block_env.coinbase, @@ -1055,12 +1061,13 @@ def process_transaction( # into block frame. normalize_balance_changes_for_transaction( tx_state_changes, - BlockAccessIndex(tx_state_changes.get_block_access_index()), + BlockAccessIndex( + get_block_access_index(block_env.block_state_changes) + ), block_env.state, ) - # Merge transaction frame into block frame - tx_state_changes.merge_on_success() + merge_on_success(tx_state_changes) # EIP-7928: Handle in-transaction self-destruct AFTER merge # Convert storage writes to reads and remove nonce/code changes @@ -1070,7 +1077,7 @@ def process_transaction( block_env.block_state_changes, address, BlockAccessIndex( - block_env.block_state_changes.get_block_access_index() + get_block_access_index(block_env.block_state_changes) ), ) destroy_account(block_env.state, address) @@ -1087,8 +1094,10 @@ def process_withdrawals( withdrawal_addresses = {wd.address for wd in withdrawals} for address in withdrawal_addresses: pre_balance = get_account(block_env.state, address).balance - block_env.block_state_changes.track_address(address) - block_env.block_state_changes.capture_pre_balance(address, pre_balance) + track_address(block_env.block_state_changes, address) + capture_pre_balance( + block_env.block_state_changes, address, pre_balance + ) def increase_recipient_balance(recipient: Account) -> None: recipient.balance += wd.amount * U256(10**9) @@ -1102,10 +1111,9 @@ def increase_recipient_balance(recipient: Account) -> None: modify_state(block_env.state, wd.address, increase_recipient_balance) - # Track balance change for BAL (withdrawals use post-execution index) new_balance = get_account(block_env.state, wd.address).balance - block_env.block_state_changes.track_balance_change( - wd.address, new_balance + track_balance_change( + block_env.block_state_changes, wd.address, new_balance ) if account_exists_and_is_empty(block_env.state, wd.address): @@ -1117,7 +1125,7 @@ def increase_recipient_balance(recipient: Account) -> None: normalize_balance_changes_for_transaction( block_env.block_state_changes, BlockAccessIndex( - block_env.block_state_changes.get_block_access_index() + get_block_access_index(block_env.block_state_changes) ), block_env.state, ) diff --git a/src/ethereum/forks/amsterdam/state.py b/src/ethereum/forks/amsterdam/state.py index 8f58b6e815..af384ec4df 100644 --- a/src/ethereum/forks/amsterdam/state.py +++ b/src/ethereum/forks/amsterdam/state.py @@ -24,7 +24,14 @@ from ethereum_types.numeric import U64, U256, Uint from .fork_types import EMPTY_ACCOUNT, Account, Address, Root -from .state_tracker import StateChanges +from .state_tracker import ( + StateChanges, + capture_pre_balance, + track_address, + track_balance_change, + track_code_change, + track_nonce_change, +) from .trie import EMPTY_TRIE_ROOT, Trie, copy_trie, root, trie_get, trie_set if TYPE_CHECKING: @@ -518,10 +525,10 @@ def move_ether( sender_balance = get_account(state, sender_address).balance recipient_balance = get_account(state, recipient_address).balance - state_changes.track_address(sender_address) - state_changes.capture_pre_balance(sender_address, sender_balance) - state_changes.track_address(recipient_address) - state_changes.capture_pre_balance(recipient_address, recipient_balance) + track_address(state_changes, sender_address) + capture_pre_balance(state_changes, sender_address, sender_balance) + track_address(state_changes, recipient_address) + capture_pre_balance(state_changes, recipient_address, recipient_balance) def reduce_sender_balance(sender: Account) -> None: if sender.balance < amount: @@ -537,11 +544,11 @@ def increase_recipient_balance(recipient: Account) -> None: sender_new_balance = get_account(state, sender_address).balance recipient_new_balance = get_account(state, recipient_address).balance - state_changes.track_balance_change( - sender_address, U256(sender_new_balance) + track_balance_change( + state_changes, sender_address, U256(sender_new_balance) ) - state_changes.track_balance_change( - recipient_address, U256(recipient_new_balance) + track_balance_change( + state_changes, recipient_address, U256(recipient_new_balance) ) @@ -571,14 +578,14 @@ def set_account_balance( """ current_balance = get_account(state, address).balance - state_changes.track_address(address) - state_changes.capture_pre_balance(address, current_balance) + track_address(state_changes, address) + capture_pre_balance(state_changes, address, current_balance) def set_balance(account: Account) -> None: account.balance = amount modify_state(state, address, set_balance) - state_changes.track_balance_change(address, amount) + track_balance_change(state_changes, address, amount) def increment_nonce( @@ -607,9 +614,8 @@ def increase_nonce(sender: Account) -> None: modify_state(state, address, increase_nonce) - # Track nonce change for Block Access List (EIP-7928) account = get_account(state, address) - state_changes.track_nonce_change(address, U64(account.nonce)) + track_nonce_change(state_changes, address, U64(account.nonce)) def set_code( @@ -641,7 +647,7 @@ def write_code(sender: Account) -> None: sender.code = code modify_state(state, address, write_code) - state_changes.track_code_change(address, code) + track_code_change(state_changes, address, code) def get_storage_original(state: State, address: Address, key: Bytes32) -> U256: diff --git a/src/ethereum/forks/amsterdam/state_tracker.py b/src/ethereum/forks/amsterdam/state_tracker.py index 0d875be3c0..2e9385d272 100644 --- a/src/ethereum/forks/amsterdam/state_tracker.py +++ b/src/ethereum/forks/amsterdam/state_tracker.py @@ -12,7 +12,7 @@ Call Frame: Child of transaction/call, lifetime = single message Block Access Index: 0=pre-exec, 1..N=transactions, N+1=post-exec -Stored in root frame, accessed by walking parent chain. +Stored in root frame, passed explicitly to operations. Pre-State Tracking: Values captured before modifications to enable net-zero filtering. @@ -38,7 +38,8 @@ class StateChanges: """ Tracks state changes within a single execution frame. - Frames form a hierarchy and merge changes upward on completion. + Frames form a hierarchy: Block → Transaction → Call frames. + Each frame holds a reference to its parent for upward traversal. """ parent: Optional["StateChanges"] = None @@ -68,182 +69,409 @@ class StateChanges: ) pre_code: Dict[Address, Bytes] = field(default_factory=dict) - def get_block_access_index(self) -> BlockAccessIndex: - """Get current block access index by walking to root.""" - current = self - while current.parent is not None: - current = current.parent - return current._block_access_index - - def capture_pre_balance(self, address: Address, balance: U256) -> None: - """Capture pre-balance (first-write-wins for net-zero filtering).""" - if address not in self.pre_balances: - self.pre_balances[address] = balance - - def capture_pre_nonce(self, address: Address, nonce: U64) -> None: - """Capture pre-nonce (first-write-wins).""" - if address not in self.pre_nonces: - self.pre_nonces[address] = nonce - - def capture_pre_storage( - self, address: Address, key: Bytes32, value: U256 - ) -> None: - """Capture pre-storage (first-write-wins for noop filtering).""" - slot = (address, key) - if slot not in self.pre_storage: - self.pre_storage[slot] = value - - def capture_pre_code(self, address: Address, code: Bytes) -> None: - """Capture pre-code (first-write-wins).""" - if address not in self.pre_code: - self.pre_code[address] = code - - def track_address(self, address: Address) -> None: - """Track that an address was accessed.""" - self.touched_addresses.add(address) - - def track_storage_read(self, address: Address, key: Bytes32) -> None: - """Track a storage read operation.""" - self.storage_reads.add((address, key)) - - def track_storage_write( - self, address: Address, key: Bytes32, value: U256 - ) -> None: - """Track a storage write operation with block access index.""" - self.storage_writes[(address, key, self.get_block_access_index())] = ( - value - ) - - def track_balance_change( - self, address: Address, new_balance: U256 - ) -> None: - """Track balance change keyed by (address, index).""" - self.balance_changes[(address, self.get_block_access_index())] = ( - new_balance - ) - - def track_nonce_change(self, address: Address, new_nonce: U64) -> None: - """Track a nonce change.""" - self.nonce_changes.add( - (address, self.get_block_access_index(), new_nonce) - ) - - def track_code_change(self, address: Address, new_code: Bytes) -> None: - """Track a code change.""" - self.code_changes[(address, self.get_block_access_index())] = new_code - - def increment_index(self) -> None: - """Increment block access index by walking to root.""" - root = self - while root.parent is not None: - root = root.parent - root._block_access_index = BlockAccessIndex( - root._block_access_index + Uint(1) - ) - - def merge_on_success(self) -> None: - """ - Merge this frame's changes into parent on successful completion. - - Merges all tracked changes (reads and writes) from this frame - into the parent frame. Filters out net-zero changes based on - captured pre-state values by comparing initial vs final values. - """ - if self.parent is None: - return - - # Merge address accesses - self.parent.touched_addresses.update(self.touched_addresses) - - # Merge pre-state captures for transaction-level normalization - # Only if parent doesn't have value (first capture wins) - for addr, balance in self.pre_balances.items(): - if addr not in self.parent.pre_balances: - self.parent.pre_balances[addr] = balance - for addr, nonce in self.pre_nonces.items(): - if addr not in self.parent.pre_nonces: - self.parent.pre_nonces[addr] = nonce - for slot, value in self.pre_storage.items(): - if slot not in self.parent.pre_storage: - self.parent.pre_storage[slot] = value - for addr, code in self.pre_code.items(): - if addr not in self.parent.pre_code: - self.parent.pre_code[addr] = code - - # Merge storage operations, filtering noop writes - self.parent.storage_reads.update(self.storage_reads) - for (addr, key, idx), value in self.storage_writes.items(): - # Only merge if value actually changed from pre-state - if (addr, key) in self.pre_storage: - if self.pre_storage[(addr, key)] != value: - self.parent.storage_writes[(addr, key, idx)] = value - # If equal, it's a noop write - convert to read only - else: - self.parent.storage_reads.add((addr, key)) - else: - # No pre-state captured, merge as-is - self.parent.storage_writes[(addr, key, idx)] = value - - # Merge balance changes - filter net-zero changes - # balance_changes keyed by (address, index) - for (addr, idx), final_balance in self.balance_changes.items(): - if addr in self.pre_balances: - if self.pre_balances[addr] != final_balance: - # Net change occurred - merge the final balance - self.parent.balance_changes[(addr, idx)] = final_balance - # else: Net-zero change - skip entirely - else: - # No pre-balance captured, merge as-is - self.parent.balance_changes[(addr, idx)] = final_balance - - # Merge nonce changes - keep only highest nonce per address - # Nonces are monotonically increasing, so just keep the max - address_final_nonces: Dict[Address, Tuple[BlockAccessIndex, U64]] = {} - for addr, idx, nonce in self.nonce_changes: - # Keep the highest nonce value for each address - if ( - addr not in address_final_nonces - or nonce > address_final_nonces[addr][1] - ): - address_final_nonces[addr] = (idx, nonce) - - # Merge final nonces (no net-zero filtering - nonces never decrease) - for addr, (idx, final_nonce) in address_final_nonces.items(): - self.parent.nonce_changes.add((addr, idx, final_nonce)) - - # Merge code changes - filter net-zero changes - # code_changes keyed by (address, index) - for (addr, idx), final_code in self.code_changes.items(): - if addr in self.pre_code: - if self.pre_code[addr] != final_code: - # Net change occurred - merge the final code - self.parent.code_changes[(addr, idx)] = final_code - # else: Net-zero change - skip entirely + +def get_block_frame(state_changes: StateChanges) -> StateChanges: + """ + Walk to block-level frame. + + Parameters + ---------- + state_changes : + Any state changes frame. + + Returns + ------- + block_frame : StateChanges + The block-level frame. + + """ + block_frame = state_changes + while block_frame.parent is not None: + block_frame = block_frame.parent + return block_frame + + +def get_block_access_index(root_frame: StateChanges) -> BlockAccessIndex: + """ + Get current block access index from root frame. + + Parameters + ---------- + root_frame : + The root (block-level) state changes frame. + + Returns + ------- + index : BlockAccessIndex + The current block access index. + + """ + return root_frame._block_access_index + + +def increment_block_access_index(root_frame: StateChanges) -> None: + """ + Increment block access index in root frame. + + Parameters + ---------- + root_frame : + The root (block-level) state changes frame to increment. + + """ + root_frame._block_access_index = BlockAccessIndex( + root_frame._block_access_index + Uint(1) + ) + + +def capture_pre_balance( + state_changes: StateChanges, address: Address, balance: U256 +) -> None: + """ + Capture pre-balance (first-write-wins for net-zero filtering). + + Parameters + ---------- + state_changes : + The state changes frame. + address : + The address whose balance is being captured. + balance : + The balance value before modification. + + """ + if address not in state_changes.pre_balances: + state_changes.pre_balances[address] = balance + + +def capture_pre_nonce( + state_changes: StateChanges, address: Address, nonce: U64 +) -> None: + """ + Capture pre-nonce (first-write-wins). + + Parameters + ---------- + state_changes : + The state changes frame. + address : + The address whose nonce is being captured. + nonce : + The nonce value before modification. + + """ + if address not in state_changes.pre_nonces: + state_changes.pre_nonces[address] = nonce + + +def capture_pre_storage( + state_changes: StateChanges, address: Address, key: Bytes32, value: U256 +) -> None: + """ + Capture pre-storage (first-write-wins for noop filtering). + + Parameters + ---------- + state_changes : + The state changes frame. + address : + The address whose storage is being captured. + key : + The storage key. + value : + The storage value before modification. + + """ + slot = (address, key) + if slot not in state_changes.pre_storage: + state_changes.pre_storage[slot] = value + + +def capture_pre_code( + state_changes: StateChanges, address: Address, code: Bytes +) -> None: + """ + Capture pre-code (first-write-wins). + + Parameters + ---------- + state_changes : + The state changes frame. + address : + The address whose code is being captured. + code : + The code value before modification. + + """ + if address not in state_changes.pre_code: + state_changes.pre_code[address] = code + + +def track_address(state_changes: StateChanges, address: Address) -> None: + """ + Track that an address was accessed. + + Parameters + ---------- + state_changes : + The state changes frame. + address : + The address that was accessed. + + """ + state_changes.touched_addresses.add(address) + + +def track_storage_read( + state_changes: StateChanges, address: Address, key: Bytes32 +) -> None: + """ + Track a storage read operation. + + Parameters + ---------- + state_changes : + The state changes frame. + address : + The address whose storage was read. + key : + The storage key that was read. + + """ + state_changes.storage_reads.add((address, key)) + + +def track_storage_write( + state_changes: StateChanges, + address: Address, + key: Bytes32, + value: U256, +) -> None: + """ + Track a storage write operation with block access index. + + Parameters + ---------- + state_changes : + The state changes frame. + address : + The address whose storage was written. + key : + The storage key that was written. + value : + The new storage value. + + """ + block_frame = get_block_frame(state_changes) + state_changes.storage_writes[ + (address, key, get_block_access_index(block_frame)) + ] = value + + +def track_balance_change( + state_changes: StateChanges, + address: Address, + new_balance: U256, +) -> None: + """ + Track balance change keyed by (address, index). + + Parameters + ---------- + state_changes : + The state changes frame. + address : + The address whose balance changed. + new_balance : + The new balance value. + + """ + block_frame = get_block_frame(state_changes) + state_changes.balance_changes[ + (address, get_block_access_index(block_frame)) + ] = new_balance + + +def track_nonce_change( + state_changes: StateChanges, + address: Address, + new_nonce: U64, +) -> None: + """ + Track a nonce change. + + Parameters + ---------- + state_changes : + The state changes frame. + address : + The address whose nonce changed. + new_nonce : + The new nonce value. + + """ + block_frame = get_block_frame(state_changes) + state_changes.nonce_changes.add( + (address, get_block_access_index(block_frame), new_nonce) + ) + + +def track_code_change( + state_changes: StateChanges, + address: Address, + new_code: Bytes, +) -> None: + """ + Track a code change. + + Parameters + ---------- + state_changes : + The state changes frame. + address : + The address whose code changed. + new_code : + The new code value. + + """ + block_frame = get_block_frame(state_changes) + state_changes.code_changes[ + (address, get_block_access_index(block_frame)) + ] = new_code + + +def merge_on_success(child_frame: StateChanges) -> None: + """ + Merge child frame's changes into parent on successful completion. + + Merges all tracked changes (reads and writes) from the child frame + into the parent frame. Filters out net-zero changes based on + captured pre-state values by comparing initial vs final values. + + Parameters + ---------- + child_frame : + The child frame being merged. + + """ + assert child_frame.parent is not None + parent_frame = child_frame.parent + # Merge address accesses + parent_frame.touched_addresses.update(child_frame.touched_addresses) + + # Merge pre-state captures for transaction-level normalization + # Only if parent doesn't have value (first capture wins) + for addr, balance in child_frame.pre_balances.items(): + if addr not in parent_frame.pre_balances: + parent_frame.pre_balances[addr] = balance + for addr, nonce in child_frame.pre_nonces.items(): + if addr not in parent_frame.pre_nonces: + parent_frame.pre_nonces[addr] = nonce + for slot, value in child_frame.pre_storage.items(): + if slot not in parent_frame.pre_storage: + parent_frame.pre_storage[slot] = value + for addr, code in child_frame.pre_code.items(): + if addr not in parent_frame.pre_code: + parent_frame.pre_code[addr] = code + + # Merge storage operations, filtering noop writes + parent_frame.storage_reads.update(child_frame.storage_reads) + for (addr, key, idx), value in child_frame.storage_writes.items(): + # Only merge if value actually changed from pre-state + if (addr, key) in child_frame.pre_storage: + if child_frame.pre_storage[(addr, key)] != value: + parent_frame.storage_writes[(addr, key, idx)] = value + # If equal, it's a noop write - convert to read only else: - # No pre-code captured, merge as-is - self.parent.code_changes[(addr, idx)] = final_code + parent_frame.storage_reads.add((addr, key)) + else: + # No pre-state captured, merge as-is + parent_frame.storage_writes[(addr, key, idx)] = value + + # Merge balance changes - filter net-zero changes + # balance_changes keyed by (address, index) + for (addr, idx), final_balance in child_frame.balance_changes.items(): + if addr in child_frame.pre_balances: + if child_frame.pre_balances[addr] != final_balance: + # Net change occurred - merge the final balance + parent_frame.balance_changes[(addr, idx)] = final_balance + # else: Net-zero change - skip entirely + else: + # No pre-balance captured, merge as-is + parent_frame.balance_changes[(addr, idx)] = final_balance + + # Merge nonce changes - keep only highest nonce per address + # Nonces are monotonically increasing, so just keep the max + address_final_nonces: Dict[Address, Tuple[BlockAccessIndex, U64]] = {} + for addr, idx, nonce in child_frame.nonce_changes: + # Keep the highest nonce value for each address + if ( + addr not in address_final_nonces + or nonce > address_final_nonces[addr][1] + ): + address_final_nonces[addr] = (idx, nonce) + + # Merge final nonces (no net-zero filtering - nonces never decrease) + for addr, (idx, final_nonce) in address_final_nonces.items(): + parent_frame.nonce_changes.add((addr, idx, final_nonce)) + + # Merge code changes - filter net-zero changes + # code_changes keyed by (address, index) + for (addr, idx), final_code in child_frame.code_changes.items(): + if addr in child_frame.pre_code: + if child_frame.pre_code[addr] != final_code: + # Net change occurred - merge the final code + parent_frame.code_changes[(addr, idx)] = final_code + # else: Net-zero change - skip entirely + else: + # No pre-code captured, merge as-is + parent_frame.code_changes[(addr, idx)] = final_code + + +def merge_on_failure(child_frame: StateChanges) -> None: + """ + Merge child frame's changes into parent on failed completion. + + Merges only read operations from the child frame into the parent. + Write operations are discarded since the frame reverted. + This is called when a call frame fails/reverts. + + Parameters + ---------- + child_frame : + The failed child frame. + + """ + assert child_frame.parent is not None + parent_frame = child_frame.parent + # Only merge reads and address accesses on failure + parent_frame.touched_addresses.update(child_frame.touched_addresses) + parent_frame.storage_reads.update(child_frame.storage_reads) - def merge_on_failure(self) -> None: - """ - Merge this frame's changes into parent on failed completion. + # Convert writes to reads (failed writes still accessed the slots) + for address, key, _idx in child_frame.storage_writes.keys(): + parent_frame.storage_reads.add((address, key)) - Merges only read operations from this frame into the parent. - Write operations are discarded since the frame reverted. - This is called when a call frame fails/reverts. - """ - if self.parent is None: - return + # Note: balance_changes, nonce_changes, and code_changes are NOT + # merged on failure - they are discarded - # Only merge reads and address accesses on failure - self.parent.touched_addresses.update(self.touched_addresses) - self.parent.storage_reads.update(self.storage_reads) - # Convert writes to reads (failed writes still accessed the slots) - for address, key, _idx in self.storage_writes.keys(): - self.parent.storage_reads.add((address, key)) +def create_child_frame(parent: StateChanges) -> StateChanges: + """ + Create a child frame for nested execution. + + Parameters + ---------- + parent : + The parent frame. + + Returns + ------- + child : StateChanges + A new child frame with parent reference set. - # Note: balance_changes, nonce_changes, and code_changes are NOT - # merged on failure - they are discarded + """ + return StateChanges(parent=parent) def handle_in_transaction_selfdestruct( @@ -336,24 +564,3 @@ def normalize_balance_changes_for_transaction( del block_frame.balance_changes[ (addr, current_block_access_index) ] - - -def create_child_frame(parent: StateChanges) -> StateChanges: - """ - Create a child frame for nested execution. - - The child frame will dynamically read the block_access_index from - the root (block) frame, ensuring all frames see the same current index. - - Parameters - ---------- - parent : StateChanges - The parent frame. - - Returns - ------- - child : StateChanges - A new child frame with parent link. - - """ - return StateChanges(parent=parent) diff --git a/src/ethereum/forks/amsterdam/vm/__init__.py b/src/ethereum/forks/amsterdam/vm/__init__.py index 04b74eee9e..26b7e99e45 100644 --- a/src/ethereum/forks/amsterdam/vm/__init__.py +++ b/src/ethereum/forks/amsterdam/vm/__init__.py @@ -25,7 +25,7 @@ from ..blocks import Log, Receipt, Withdrawal from ..fork_types import Address, Authorization, VersionedHash from ..state import State, TransientStorage -from ..state_tracker import StateChanges +from ..state_tracker import StateChanges, merge_on_failure, merge_on_success from ..transactions import LegacyTransaction from ..trie import Trie @@ -187,8 +187,7 @@ def incorporate_child_on_success(evm: Evm, child_evm: Evm) -> None: evm.accessed_addresses.update(child_evm.accessed_addresses) evm.accessed_storage_keys.update(child_evm.accessed_storage_keys) - # Merge state changes from successful child frame (EIP-7928) - child_evm.state_changes.merge_on_success() + merge_on_success(child_evm.state_changes) def incorporate_child_on_error(evm: Evm, child_evm: Evm) -> None: @@ -205,6 +204,4 @@ def incorporate_child_on_error(evm: Evm, child_evm: Evm) -> None: """ evm.gas_left += child_evm.gas_left - # Merge state changes from failed child frame (EIP-7928) - # Only reads are merged, writes are discarded - child_evm.state_changes.merge_on_failure() + merge_on_failure(child_evm.state_changes) diff --git a/src/ethereum/forks/amsterdam/vm/eoa_delegation.py b/src/ethereum/forks/amsterdam/vm/eoa_delegation.py index bce49462f2..cd1f24a70c 100644 --- a/src/ethereum/forks/amsterdam/vm/eoa_delegation.py +++ b/src/ethereum/forks/amsterdam/vm/eoa_delegation.py @@ -15,6 +15,7 @@ # track_address_access removed - now using state_changes.track_address() from ..fork_types import Address, Authorization from ..state import account_exists, get_account, increment_nonce, set_code +from ..state_tracker import capture_pre_code, track_address from ..utils.hexadecimal import hex_to_address from ..vm.gas import GAS_COLD_ACCOUNT_ACCESS, GAS_WARM_ACCESS from . import Evm, Message @@ -175,12 +176,12 @@ def apply_delegation_tracking( The address delegated to. """ - evm.state_changes.track_address(original_address) + track_address(evm.state_changes, original_address) if delegated_address not in evm.accessed_addresses: evm.accessed_addresses.add(delegated_address) - evm.state_changes.track_address(delegated_address) + track_address(evm.state_changes, delegated_address) def access_delegation( @@ -239,7 +240,7 @@ def set_delegation(message: Message) -> U256: authority_account = get_account(state, authority) authority_code = authority_account.code - message.block_env.block_state_changes.track_address(authority) + track_address(message.block_env.block_state_changes, authority) if authority_code and not is_valid_delegation(authority_code): continue @@ -256,14 +257,12 @@ def set_delegation(message: Message) -> U256: else: code_to_set = EOA_DELEGATION_MARKER + auth.address - # Use transaction frame, not block frame (EIP-7928) state_changes = ( message.transaction_state_changes or message.block_env.block_state_changes ) - # Capture pre-code just before setting to enable no-op filtering - state_changes.capture_pre_code(authority, authority_code) + capture_pre_code(state_changes, authority, authority_code) set_code(state, authority, code_to_set, state_changes) increment_nonce(state, authority, state_changes) diff --git a/src/ethereum/forks/amsterdam/vm/instructions/environment.py b/src/ethereum/forks/amsterdam/vm/instructions/environment.py index dae8c20280..3d23b8f136 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/environment.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/environment.py @@ -20,6 +20,7 @@ # track_address_access removed - now using state_changes.track_address() from ...fork_types import EMPTY_ACCOUNT from ...state import get_account +from ...state_tracker import track_address from ...utils.address import to_address_masked from ...vm.memory import buffer_read, memory_write from .. import Evm @@ -83,7 +84,7 @@ def balance(evm: Evm) -> None: check_gas(evm, gas_cost) if is_cold_access: evm.accessed_addresses.add(address) - evm.state_changes.track_address(address) + track_address(evm.state_changes, address) charge_gas(evm, gas_cost) # OPERATION @@ -353,7 +354,7 @@ def extcodesize(evm: Evm) -> None: check_gas(evm, access_gas_cost) if is_cold_access: evm.accessed_addresses.add(address) - evm.state_changes.track_address(address) + track_address(evm.state_changes, address) charge_gas(evm, access_gas_cost) # OPERATION @@ -399,7 +400,7 @@ def extcodecopy(evm: Evm) -> None: check_gas(evm, total_gas_cost) if is_cold_access: evm.accessed_addresses.add(address) - evm.state_changes.track_address(address) + track_address(evm.state_changes, address) charge_gas(evm, total_gas_cost) # OPERATION @@ -493,7 +494,7 @@ def extcodehash(evm: Evm) -> None: check_gas(evm, access_gas_cost) if is_cold_access: evm.accessed_addresses.add(address) - evm.state_changes.track_address(address) + track_address(evm.state_changes, address) charge_gas(evm, access_gas_cost) # OPERATION diff --git a/src/ethereum/forks/amsterdam/vm/instructions/storage.py b/src/ethereum/forks/amsterdam/vm/instructions/storage.py index db1536a707..8edff23534 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/storage.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/storage.py @@ -20,6 +20,11 @@ set_storage, set_transient_storage, ) +from ...state_tracker import ( + capture_pre_storage, + track_storage_read, + track_storage_write, +) from .. import Evm from ..exceptions import OutOfGasError, WriteInStaticContext from ..gas import ( @@ -58,7 +63,8 @@ def sload(evm: Evm) -> None: check_gas(evm, gas_cost) if (evm.message.current_target, key) not in evm.accessed_storage_keys: evm.accessed_storage_keys.add((evm.message.current_target, key)) - evm.state_changes.track_storage_read( + track_storage_read( + evm.state_changes, evm.message.current_target, key, ) @@ -120,10 +126,11 @@ def sstore(evm: Evm) -> None: # Track storage access BEFORE checking gas (EIP-7928) # Even if we run out of gas, the access attempt should be tracked - evm.state_changes.capture_pre_storage( - evm.message.current_target, key, current_value + capture_pre_storage( + evm.state_changes, evm.message.current_target, key, current_value ) - evm.state_changes.track_storage_read( + track_storage_read( + evm.state_changes, evm.message.current_target, key, ) @@ -157,10 +164,8 @@ def sstore(evm: Evm) -> None: # OPERATION set_storage(state, evm.message.current_target, key, new_value) - evm.state_changes.track_storage_write( - evm.message.current_target, - key, - new_value, + track_storage_write( + evm.state_changes, evm.message.current_target, key, new_value ) # PROGRAM COUNTER diff --git a/src/ethereum/forks/amsterdam/vm/instructions/system.py b/src/ethereum/forks/amsterdam/vm/instructions/system.py index 136f194655..389adf6f54 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/system.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/system.py @@ -27,6 +27,7 @@ move_ether, set_account_balance, ) +from ...state_tracker import capture_pre_balance, track_address from ...utils.address import ( compute_contract_address, compute_create2_contract_address, @@ -114,24 +115,16 @@ def generic_create( evm.accessed_addresses.add(contract_address) - evm.state_changes.track_address(contract_address) + track_address(evm.state_changes, contract_address) if account_has_code_or_nonce( state, contract_address ) or account_has_storage(state, contract_address): - increment_nonce( - state, - evm.message.current_target, - evm.state_changes, - ) + increment_nonce(state, evm.message.current_target, evm.state_changes) push(evm.stack, U256(0)) return - increment_nonce( - state, - evm.message.current_target, - evm.state_changes, - ) + increment_nonce(state, evm.message.current_target, evm.state_changes) child_message = Message( block_env=evm.message.block_env, @@ -430,7 +423,7 @@ def call(evm: Evm) -> None: check_gas(evm, message_call_gas.cost + extend_memory.cost) - evm.state_changes.track_address(to) + track_address(evm.state_changes, to) if is_delegated: apply_delegation_tracking(evm, original_address, final_address) @@ -527,7 +520,7 @@ def callcode(evm: Evm) -> None: check_gas(evm, message_call_gas.cost + extend_memory.cost) - evm.state_changes.track_address(original_address) + track_address(evm.state_changes, original_address) if is_delegated: apply_delegation_tracking(evm, original_address, final_address) @@ -543,8 +536,8 @@ def callcode(evm: Evm) -> None: # in parent frame. CALLCODE transfers value from/to current_target # (same address), affecting current storage context, not child frame if value != 0 and sender_balance >= value: - evm.state_changes.capture_pre_balance( - evm.message.current_target, sender_balance + capture_pre_balance( + evm.state_changes, evm.message.current_target, sender_balance ) if sender_balance < value: @@ -609,7 +602,7 @@ def selfdestruct(evm: Evm) -> None: if is_cold_access: evm.accessed_addresses.add(beneficiary) - evm.state_changes.track_address(beneficiary) + track_address(evm.state_changes, beneficiary) charge_gas(evm, gas_cost) @@ -629,13 +622,8 @@ def selfdestruct(evm: Evm) -> None: # register account for deletion only if it was created # in the same transaction if originator in evm.message.block_env.state.created_accounts: - # If beneficiary is the same as originator, then - # the ether is burnt. set_account_balance( - evm.message.block_env.state, - originator, - U256(0), - evm.state_changes, + evm.message.block_env.state, originator, U256(0), evm.state_changes ) evm.accounts_to_delete.add(originator) @@ -698,7 +686,7 @@ def delegatecall(evm: Evm) -> None: check_gas(evm, message_call_gas.cost + extend_memory.cost) - evm.state_changes.track_address(original_address) + track_address(evm.state_changes, original_address) if is_delegated: apply_delegation_tracking(evm, original_address, final_address) @@ -783,7 +771,7 @@ def staticcall(evm: Evm) -> None: check_gas(evm, message_call_gas.cost + extend_memory.cost) - evm.state_changes.track_address(to) + track_address(evm.state_changes, to) if is_delegated: apply_delegation_tracking(evm, original_address, final_address) diff --git a/src/ethereum/forks/amsterdam/vm/interpreter.py b/src/ethereum/forks/amsterdam/vm/interpreter.py index 9d6190b0e8..a63b745624 100644 --- a/src/ethereum/forks/amsterdam/vm/interpreter.py +++ b/src/ethereum/forks/amsterdam/vm/interpreter.py @@ -44,7 +44,13 @@ rollback_transaction, set_code, ) -from ..state_tracker import StateChanges, create_child_frame +from ..state_tracker import ( + StateChanges, + create_child_frame, + merge_on_failure, + merge_on_success, + track_address, +) from ..vm import Message from ..vm.eoa_delegation import get_delegated_code_address, set_delegation from ..vm.gas import GAS_CODE_DEPOSIT, charge_gas @@ -67,35 +73,39 @@ MAX_INIT_CODE_SIZE = 2 * MAX_CODE_SIZE -def create_call_frame(parent_frame: StateChanges) -> StateChanges: +def get_parent_frame(message: Message) -> StateChanges: """ - Create a child frame for call-level state tracking. + Get the appropriate parent frame for a message's state changes. - Used for contract calls (CALL, DELEGATECALL, STATICCALL, etc.) where - state changes need to be isolated and potentially reverted. + Frame selection logic: + - Nested calls: Parent EVM's frame + - Top-level calls: Transaction frame + - System transactions: Block frame Parameters ---------- - parent_frame : - The parent frame (transaction or another call frame). + message : + The message being processed. Returns ------- - call_frame : StateChanges - A new child frame linked to the parent. + parent_frame : StateChanges + The parent frame to use for creating child frames. """ - return create_child_frame(parent_frame) + if message.parent_evm is not None: + return message.parent_evm.state_changes + elif message.transaction_state_changes is not None: + return message.transaction_state_changes + else: + return message.block_env.block_state_changes def get_message_state_frame(message: Message) -> StateChanges: """ Determine and create the appropriate state tracking frame for a message. - Frame selection logic: - - Nested calls: Create child of parent EVM's frame - - Top-level calls: Create child of transaction frame - - System transactions: Use block frame directly (no isolation needed) + Creates a call frame as a child of the appropriate parent frame. Parameters ---------- @@ -108,16 +118,14 @@ def get_message_state_frame(message: Message) -> StateChanges: The state tracking frame to use for this message execution. """ - if message.parent_evm is not None: - # Nested call - create child of parent EVM's frame - return create_call_frame(message.parent_evm.state_changes) - elif message.transaction_state_changes is not None: - # Top-level transaction call - create child of transaction frame - # This ensures contract execution is isolated and can be reverted - return create_call_frame(message.transaction_state_changes) + parent_frame = get_parent_frame(message) + if ( + message.parent_evm is not None + or message.transaction_state_changes is not None + ): + return create_child_frame(parent_frame) else: - # System transaction - use block frame directly - return message.block_env.block_state_changes + return parent_frame @dataclass @@ -188,8 +196,8 @@ def process_message_call(message: Message) -> MessageCallOutput: message.code_address = delegated_address # EIP-7928: Track delegation target when loaded as call target - message.block_env.block_state_changes.track_address( - delegated_address + track_address( + message.block_env.block_state_changes, delegated_address ) evm = process_message(message) @@ -252,9 +260,8 @@ def process_create_message(message: Message) -> Evm: # added to SELFDESTRUCT by EIP-6780. mark_account_created(state, message.current_target) - # Create a temporary child frame for tracking changes that may be rolled - # back on OOG during code deposit. This frame is merged only on success. - create_frame = create_child_frame(message.block_env.block_state_changes) + parent_frame = get_parent_frame(message) + create_frame = create_child_frame(parent_frame) increment_nonce(state, message.current_target, create_frame) evm = process_message(message) @@ -270,24 +277,19 @@ def process_create_message(message: Message) -> Evm: raise OutOfGasError except ExceptionalHalt as error: rollback_transaction(state, transient_storage) - # Merge create_frame on failure - keeps reads, discards writes - # (address access is preserved, nonce change is discarded) - create_frame.merge_on_failure() + merge_on_failure(create_frame) evm.gas_left = Uint(0) evm.output = b"" evm.error = error else: set_code( - state, - message.current_target, - contract_code, - create_frame, + state, message.current_target, contract_code, create_frame ) commit_transaction(state, transient_storage) - # Merge create_frame on success - includes nonce and code changes - create_frame.merge_on_success() + merge_on_success(create_frame) else: rollback_transaction(state, transient_storage) + merge_on_failure(create_frame) return evm @@ -313,8 +315,10 @@ def process_message(message: Message) -> Evm: begin_transaction(state, transient_storage) + parent_frame = get_parent_frame(message) state_changes = get_message_state_frame(message) - state_changes.track_address(message.current_target) + + track_address(state_changes, message.current_target) if message.should_transfer_value and message.value != 0: move_ether( @@ -327,15 +331,13 @@ def process_message(message: Message) -> Evm: evm = execute_code(message, state_changes) if evm.error: - # revert state to the last saved checkpoint - # since the message call resulted in an error rollback_transaction(state, transient_storage) - # Merge call frame state changes into parent - evm.state_changes.merge_on_failure() + if state_changes != parent_frame: + merge_on_failure(evm.state_changes) else: commit_transaction(state, transient_storage) - # Merge call frame state changes into parent - evm.state_changes.merge_on_success() + if state_changes != parent_frame: + merge_on_success(evm.state_changes) return evm diff --git a/src/ethereum_spec_tools/evm_tools/t8n/__init__.py b/src/ethereum_spec_tools/evm_tools/t8n/__init__.py index 2100983144..2d3f8b9e3c 100644 --- a/src/ethereum_spec_tools/evm_tools/t8n/__init__.py +++ b/src/ethereum_spec_tools/evm_tools/t8n/__init__.py @@ -413,7 +413,11 @@ def _run_blockchain_test(self, block_env: Any, block_output: Any) -> None: # Post-execution operations use index N+1 if self.fork.is_after_fork("amsterdam"): - block_env.block_state_changes.increment_index() + from ethereum.forks.amsterdam.state_tracker import ( + increment_block_access_index, + ) + + increment_block_access_index(block_env.block_state_changes) if not self.fork.proof_of_stake: if self.options.state_reward is None: diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py index de354bbeb1..b849137c58 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py @@ -910,3 +910,62 @@ def test_bal_sstore_static_context( contract_b: Account(storage={0: 0}), # SSTORE failed }, ) + + +def test_bal_create_contract_init_revert( + blockchain_test: BlockchainTestFiller, + pre: Alloc, +) -> None: + """ + Test that BAL does not include nonce/code changes when CREATE happens + in a call that then REVERTs. + """ + alice = pre.fund_eoa(amount=10**18) + + # Simple init code that returns STOP as deployed code + init_code_bytes = bytes(Op.RETURN(0, 1) + Op.STOP) + + # Factory that does CREATE then REVERTs + factory = pre.deploy_contract( + code=Op.MSTORE(0, Op.PUSH32(init_code_bytes)) + + Op.POP(Op.CREATE(0, 32 - len(init_code_bytes), len(init_code_bytes))) + + Op.REVERT(0, 0) + ) + + # A caller that CALLs factory to CREATE then REVERT + caller = pre.deploy_contract(code=Op.CALL(address=factory)) + + created_address = compute_create_address(address=factory, nonce=1) + + tx = Transaction( + sender=alice, + to=caller, + gas_limit=500_000, + ) + + blockchain_test( + pre=pre, + blocks=[ + Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[ + BalNonceChange(tx_index=1, post_nonce=1) + ], + ), + caller: BalAccountExpectation.empty(), + factory: BalAccountExpectation.empty(), + created_address: BalAccountExpectation.empty(), + } + ), + ) + ], + post={ + alice: Account(nonce=1), + caller: Account(nonce=1), + factory: Account(nonce=1), + created_address: Account.NONEXISTENT, + }, + ) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index bc45ef2d21..4ca8a63ecd 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -52,6 +52,7 @@ | `test_bal_oog_7702_delegated_warm_cold` | Ensure BAL handles OOG during EIP-7702 delegated account loading when first account is warm, second is cold | Alice calls warm delegated account Bob (7702) which delegates to cold `TargetContract` with insufficient gas for second cold load | BAL **MUST** include Bob in `account_changes` (warm load succeeds) but **MUST NOT** include `TargetContract` (cold load fails due to OOG) | 🟡 Planned | | `test_bal_multiple_balance_changes_same_account` | Ensure BAL tracks multiple balance changes to same account across transactions | Alice funds Bob (starts at 0) in tx0 with exact amount needed. Bob spends everything in tx1 to Charlie. Bob's balance: 0 → funding_amount → 0 | BAL **MUST** include Bob with two `balance_changes`: one at txIndex=1 (receives funds) and one at txIndex=2 (balance returns to 0). This tests balance tracking across two transactions. | ✅ Completed | | `test_bal_multiple_storage_writes_same_slot` | Ensure BAL tracks multiple writes to same storage slot across transactions | Alice calls contract 3 times in same block. Contract increments slot 1 on each call: 0 → 1 → 2 → 3 | BAL **MUST** include contract with slot 1 having three `slot_changes`: txIndex=1 (value 1), txIndex=2 (value 2), txIndex=3 (value 3). Each transaction's write must be recorded separately. | ✅ Completed | +| `test_bal_create_contract_init_revert` | Ensure BAL correctly handles CREATE when parent call reverts | Caller calls factory, factory executes CREATE (succeeds), then factory REVERTs rolling back the CREATE | BAL **MUST** include Alice with `nonce_changes`. Caller and factory with no changes (reverted). Created contract address appears in BAL but **MUST NOT** have `nonce_changes` or `code_changes` (CREATE was rolled back). Contract address **MUST NOT** exist in post-state. | ✅ Completed | | `test_bal_create_oog_code_deposit` | Ensure BAL correctly handles CREATE OOG during code deposit | Alice calls factory contract that executes CREATE with init code returning 10,000 bytes. Transaction has insufficient gas for code deposit. Factory nonce increments, CREATE returns 0 and stores in slot 1. | BAL **MUST** include Alice with `nonce_changes`. Factory with `nonce_changes` (incremented by CREATE) and `storage_changes` (slot 1 = 0). Contract address with empty changes (read during collision check). **MUST NOT** include nonce or code changes for contract address (rolled back on OOG). Contract address **MUST NOT** exist in post-state. | ✅ Completed | | `test_bal_invalid_missing_nonce` | Verify clients reject blocks with BAL missing required nonce changes | Alice sends transaction to Bob; BAL modifier removes Alice's nonce change entry | Block **MUST** be rejected with `INVALID_BLOCK_ACCESS_LIST` exception. Clients **MUST** validate that all sender accounts have nonce changes recorded. | ✅ Completed | | `test_bal_invalid_nonce_value` | Verify clients reject blocks with incorrect nonce values in BAL | Alice sends transaction to Bob; BAL modifier changes Alice's nonce to incorrect value | Block **MUST** be rejected with `INVALID_BLOCK_ACCESS_LIST` exception. Clients **MUST** validate nonce values match actual state transitions. | ✅ Completed | From 37edacd6e2ec4a210d92f7f72f643696e7aacd7b Mon Sep 17 00:00:00 2001 From: fselmo Date: Fri, 14 Nov 2025 10:08:31 -0300 Subject: [PATCH 026/154] fix(spec-specs): Default code to b"" in tracker, skip empty setting --- src/ethereum/forks/amsterdam/state_tracker.py | 11 ++--- .../test_block_access_lists.py | 45 +++++++++++++++++++ .../test_cases.md | 1 + 3 files changed, 49 insertions(+), 8 deletions(-) diff --git a/src/ethereum/forks/amsterdam/state_tracker.py b/src/ethereum/forks/amsterdam/state_tracker.py index 2e9385d272..e157588dac 100644 --- a/src/ethereum/forks/amsterdam/state_tracker.py +++ b/src/ethereum/forks/amsterdam/state_tracker.py @@ -393,7 +393,6 @@ def merge_on_success(child_frame: StateChanges) -> None: for (addr, idx), final_balance in child_frame.balance_changes.items(): if addr in child_frame.pre_balances: if child_frame.pre_balances[addr] != final_balance: - # Net change occurred - merge the final balance parent_frame.balance_changes[(addr, idx)] = final_balance # else: Net-zero change - skip entirely else: @@ -418,14 +417,10 @@ def merge_on_success(child_frame: StateChanges) -> None: # Merge code changes - filter net-zero changes # code_changes keyed by (address, index) for (addr, idx), final_code in child_frame.code_changes.items(): - if addr in child_frame.pre_code: - if child_frame.pre_code[addr] != final_code: - # Net change occurred - merge the final code - parent_frame.code_changes[(addr, idx)] = final_code - # else: Net-zero change - skip entirely - else: - # No pre-code captured, merge as-is + pre_code = child_frame.pre_code.get(addr, b"") + if pre_code != final_code: parent_frame.code_changes[(addr, idx)] = final_code + # else: Net-zero change - skip entirely def merge_on_failure(child_frame: StateChanges) -> None: diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py index 2fda529643..336623c26c 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py @@ -2088,3 +2088,48 @@ def test_bal_multiple_storage_writes_same_slot( contract: Account(storage={1: 3}), }, ) + + +def test_bal_create_transaction_empty_code( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Ensure BAL does not record spurious code changes when a CREATE transaction + deploys empty code. + """ + alice = pre.fund_eoa() + contract_address = compute_create_address(address=alice, nonce=0) + + tx = Transaction( + sender=alice, + to=None, + data=b"", + gas_limit=100_000, + ) + + account_expectations = { + alice: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + ), + contract_address: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + code_changes=[], # ensure no code_changes recorded + ), + } + + blockchain_test( + pre=pre, + blocks=[ + Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations=account_expectations + ), + ) + ], + post={ + alice: Account(nonce=1), + contract_address: Account(nonce=1, code=b""), + }, + ) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index 4ca8a63ecd..44211095a0 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -85,3 +85,4 @@ | `test_bal_nonexistent_account_access_value_transfer` | Ensure BAL captures non-existent account accessed via CALL/CALLCODE with value transfers | Alice calls `Oracle` contract which uses `CALL` or `CALLCODE` on non-existent account Bob. Tests both zero and positive value transfers. | BAL **MUST** include Alice with `nonce_changes`. For CALL with positive value: `Oracle` with `balance_changes` (loses value), Bob with `balance_changes` (receives value). For CALLCODE with value or zero value transfers: `Oracle` and Bob with empty changes (CALLCODE self-transfer = net zero). | ✅ Completed | | `test_bal_storage_write_read_same_frame` | Ensure BAL captures write precedence over read in same call frame (writes shadow reads) | Alice calls `Oracle` which writes (`SSTORE`) value `0x42` to slot `0x01`, then reads (`SLOAD`) from slot `0x01` in the same call frame | BAL **MUST** include `Oracle` with slot `0x01` in `storage_changes` showing final value `0x42`. Slot `0x01` **MUST NOT** appear in `storage_reads` (write shadows the subsequent read in same frame). | ✅ Completed | | `test_bal_storage_write_read_cross_frame` | Ensure BAL captures write precedence over read across call frames (writes shadow reads cross-frame) | Alice calls `Oracle`. First call reads slot `0x01` (sees initial value), writes `0x42` to slot `0x01`, then calls itself (via `CALL`, `DELEGATECALL`, or `CALLCODE`). Second call reads slot `0x01` (sees `0x42`) and exits. | BAL **MUST** include `Oracle` with slot `0x01` in `storage_changes` showing final value `0x42`. Slot `0x01` **MUST NOT** appear in `storage_reads` (write shadows both the read before it in same frame and the read in the recursive call). | ✅ Completed | +| `test_bal_create_transaction_empty_code` | Ensure BAL does not record spurious code changes for CREATE transaction deploying empty code | Alice sends CREATE transaction with empty initcode (deploys code `b""`). Contract address gets nonce = 1 and code = `b""`. | BAL **MUST** include Alice with `nonce_changes` and created contract with `nonce_changes` but **MUST NOT** include `code_changes` for contract. | ✅ Completed | From f9c58f32ad3fd5a74f1f2661e0b4f5dc6b4769c4 Mon Sep 17 00:00:00 2001 From: fselmo Date: Fri, 14 Nov 2025 12:06:16 -0300 Subject: [PATCH 027/154] fix(spec-specs): Fix BAL cross-transaction tracking and nonce dedup - add commit_transaction_frame() - no net-zero filtering for cross-tx changes - keep max nonce per transaction when building BAL, remove block-level code filtering - filter net-zero code changes at tracking time (for 7702 txs) - use commit_transaction_frame() instead of merge_on_success() for tx->block commits --- .../amsterdam/block_access_lists/builder.py | 13 ++-- src/ethereum/forks/amsterdam/fork.py | 4 +- src/ethereum/forks/amsterdam/state.py | 7 +- src/ethereum/forks/amsterdam/state_tracker.py | 44 ++++++++++- .../test_block_access_lists.py | 73 +++++++++++++++++++ .../test_cases.md | 2 + 6 files changed, 133 insertions(+), 10 deletions(-) diff --git a/src/ethereum/forks/amsterdam/block_access_lists/builder.py b/src/ethereum/forks/amsterdam/block_access_lists/builder.py index 4ed7aa767c..9af8d2a24c 100644 --- a/src/ethereum/forks/amsterdam/block_access_lists/builder.py +++ b/src/ethereum/forks/amsterdam/block_access_lists/builder.py @@ -278,14 +278,15 @@ def add_nonce_change( ensure_account(builder, address) # Check if we already have a nonce change for this tx_index and update it - # This ensures we only track the final nonce per transaction + # This ensures we only track the final (highest) nonce per transaction existing_changes = builder.accounts[address].nonce_changes for i, existing in enumerate(existing_changes): if existing.block_access_index == block_access_index: - # Update the existing nonce change with the new nonce - existing_changes[i] = NonceChange( - block_access_index=block_access_index, new_nonce=new_nonce - ) + # Keep the highest nonce value + if new_nonce > existing.new_nonce: + existing_changes[i] = NonceChange( + block_access_index=block_access_index, new_nonce=new_nonce + ) return # No existing change for this tx_index, add a new one @@ -514,6 +515,8 @@ def build_block_access_list( add_nonce_change(builder, address, block_access_index, new_nonce) # Add all code changes + # Net-zero filtering for code changes should happen at the + # transaction level (in merge_on_success), not at the block level. for ( address, block_access_index, diff --git a/src/ethereum/forks/amsterdam/fork.py b/src/ethereum/forks/amsterdam/fork.py index dcc46d1c4f..fa4af90fd8 100644 --- a/src/ethereum/forks/amsterdam/fork.py +++ b/src/ethereum/forks/amsterdam/fork.py @@ -66,11 +66,11 @@ ) from .state_tracker import ( capture_pre_balance, + commit_transaction_frame, create_child_frame, get_block_access_index, handle_in_transaction_selfdestruct, increment_block_access_index, - merge_on_success, normalize_balance_changes_for_transaction, track_address, track_balance_change, @@ -1067,7 +1067,7 @@ def process_transaction( block_env.state, ) - merge_on_success(tx_state_changes) + commit_transaction_frame(tx_state_changes) # EIP-7928: Handle in-transaction self-destruct AFTER merge # Convert storage writes to reads and remove nonce/code changes diff --git a/src/ethereum/forks/amsterdam/state.py b/src/ethereum/forks/amsterdam/state.py index af384ec4df..d16c9d9d69 100644 --- a/src/ethereum/forks/amsterdam/state.py +++ b/src/ethereum/forks/amsterdam/state.py @@ -647,7 +647,12 @@ def write_code(sender: Account) -> None: sender.code = code modify_state(state, address, write_code) - track_code_change(state_changes, address, code) + + # Only track code change if it's not net-zero within this frame + # Compare against pre-code captured in this frame, default to b"" + pre_code = state_changes.pre_code.get(address, b"") + if pre_code != code: + track_code_change(state_changes, address, code) def get_storage_original(state: State, address: Address, key: Bytes32) -> U256: diff --git a/src/ethereum/forks/amsterdam/state_tracker.py b/src/ethereum/forks/amsterdam/state_tracker.py index e157588dac..58ca854bf8 100644 --- a/src/ethereum/forks/amsterdam/state_tracker.py +++ b/src/ethereum/forks/amsterdam/state_tracker.py @@ -400,10 +400,8 @@ def merge_on_success(child_frame: StateChanges) -> None: parent_frame.balance_changes[(addr, idx)] = final_balance # Merge nonce changes - keep only highest nonce per address - # Nonces are monotonically increasing, so just keep the max address_final_nonces: Dict[Address, Tuple[BlockAccessIndex, U64]] = {} for addr, idx, nonce in child_frame.nonce_changes: - # Keep the highest nonce value for each address if ( addr not in address_final_nonces or nonce > address_final_nonces[addr][1] @@ -423,6 +421,48 @@ def merge_on_success(child_frame: StateChanges) -> None: # else: Net-zero change - skip entirely +def commit_transaction_frame(tx_frame: StateChanges) -> None: + """ + Commit a transaction frame's changes to the block frame. + + Merges ALL changes from the transaction frame into the block frame + without net-zero filtering. Each transaction's changes are recorded + at their respective transaction index, even if a later transaction + reverts a change back to its original value. + + This is different from merge_on_success() which filters net-zero + changes within a single transaction's execution. + + Parameters + ---------- + tx_frame : + The transaction frame to commit. + + """ + assert tx_frame.parent is not None + block_frame = tx_frame.parent + + # Merge address accesses + block_frame.touched_addresses.update(tx_frame.touched_addresses) + + # Merge storage operations + block_frame.storage_reads.update(tx_frame.storage_reads) + for (addr, key, idx), value in tx_frame.storage_writes.items(): + block_frame.storage_writes[(addr, key, idx)] = value + + # Merge balance changes + for (addr, idx), final_balance in tx_frame.balance_changes.items(): + block_frame.balance_changes[(addr, idx)] = final_balance + + # Merge nonce changes + for addr, idx, nonce in tx_frame.nonce_changes: + block_frame.nonce_changes.add((addr, idx, nonce)) + + # Merge code changes + for (addr, idx), final_code in tx_frame.code_changes.items(): + block_frame.code_changes[(addr, idx)] = final_code + + def merge_on_failure(child_frame: StateChanges) -> None: """ Merge child frame's changes into parent on failed completion. diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py index 336623c26c..53ae688410 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py @@ -2133,3 +2133,76 @@ def test_bal_create_transaction_empty_code( contract_address: Account(nonce=1, code=b""), }, ) + + +def test_bal_cross_tx_storage_revert_to_zero( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Ensure BAL captures storage changes when tx1 writes a non-zero value + and tx2 reverts it back to zero. This is a regression test for the + blobhash scenario where slot changes were being incorrectly filtered + as net-zero across transaction boundaries. + + Tx1: slot 0 = 0x0 -> 0xABCD (change recorded at tx_index=1) + Tx2: slot 0 = 0xABCD -> 0x0 (change MUST be recorded at tx_index=2) + """ + alice = pre.fund_eoa() + + # Contract that writes to slot 0 based on calldata + contract = pre.deploy_contract(code=Op.SSTORE(0, Op.CALLDATALOAD(0))) + + # Tx1: Write slot 0 = 0xABCD + tx1 = Transaction( + sender=alice, + to=contract, + data=Hash(0xABCD), + gas_limit=100_000, + ) + + # Tx2: Write slot 0 = 0x0 (revert to zero) + tx2 = Transaction( + sender=alice, + to=contract, + data=Hash(0x0), + gas_limit=100_000, + ) + + account_expectations = { + alice: BalAccountExpectation( + nonce_changes=[ + BalNonceChange(tx_index=1, post_nonce=1), + BalNonceChange(tx_index=2, post_nonce=2), + ], + ), + contract: BalAccountExpectation( + storage_changes=[ + BalStorageSlot( + slot=0, + slot_changes=[ + BalStorageChange(tx_index=1, post_value=0xABCD), + # CRITICAL: tx2's write to 0x0 MUST appear + # even though it returns slot to original value + BalStorageChange(tx_index=2, post_value=0x0), + ], + ), + ], + ), + } + + blockchain_test( + pre=pre, + blocks=[ + Block( + txs=[tx1, tx2], + expected_block_access_list=BlockAccessListExpectation( + account_expectations=account_expectations + ), + ) + ], + post={ + alice: Account(nonce=2), + contract: Account(storage={0: 0x0}), + }, + ) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index 44211095a0..5f80be1fbc 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -52,6 +52,8 @@ | `test_bal_oog_7702_delegated_warm_cold` | Ensure BAL handles OOG during EIP-7702 delegated account loading when first account is warm, second is cold | Alice calls warm delegated account Bob (7702) which delegates to cold `TargetContract` with insufficient gas for second cold load | BAL **MUST** include Bob in `account_changes` (warm load succeeds) but **MUST NOT** include `TargetContract` (cold load fails due to OOG) | 🟡 Planned | | `test_bal_multiple_balance_changes_same_account` | Ensure BAL tracks multiple balance changes to same account across transactions | Alice funds Bob (starts at 0) in tx0 with exact amount needed. Bob spends everything in tx1 to Charlie. Bob's balance: 0 → funding_amount → 0 | BAL **MUST** include Bob with two `balance_changes`: one at txIndex=1 (receives funds) and one at txIndex=2 (balance returns to 0). This tests balance tracking across two transactions. | ✅ Completed | | `test_bal_multiple_storage_writes_same_slot` | Ensure BAL tracks multiple writes to same storage slot across transactions | Alice calls contract 3 times in same block. Contract increments slot 1 on each call: 0 → 1 → 2 → 3 | BAL **MUST** include contract with slot 1 having three `slot_changes`: txIndex=1 (value 1), txIndex=2 (value 2), txIndex=3 (value 3). Each transaction's write must be recorded separately. | ✅ Completed | +| `test_bal_create_transaction_empty_code` | Ensure BAL does not record spurious code changes for CREATE transaction deploying empty code | Alice sends CREATE transaction with empty initcode (deploys code `b""`). Contract address gets nonce = 1 and code = `b""`. | BAL **MUST** include Alice with `nonce_changes` and created contract with `nonce_changes` but **MUST NOT** include `code_changes` for contract (setting `b"" -> b""` is net-zero). | ✅ Completed | +| `test_bal_cross_tx_storage_revert_to_zero` | Ensure BAL captures storage changes when tx2 reverts slot back to original value (blobhash regression test) | Alice sends tx1 writing slot 0=0xABCD (from 0x0), then tx2 writing slot 0=0x0 (back to original) | BAL **MUST** include contract with slot 0 having two `slot_changes`: txIndex=1 (0xABCD) and txIndex=2 (0x0). Cross-transaction net-zero **MUST NOT** be filtered. | ✅ Completed | | `test_bal_create_contract_init_revert` | Ensure BAL correctly handles CREATE when parent call reverts | Caller calls factory, factory executes CREATE (succeeds), then factory REVERTs rolling back the CREATE | BAL **MUST** include Alice with `nonce_changes`. Caller and factory with no changes (reverted). Created contract address appears in BAL but **MUST NOT** have `nonce_changes` or `code_changes` (CREATE was rolled back). Contract address **MUST NOT** exist in post-state. | ✅ Completed | | `test_bal_create_oog_code_deposit` | Ensure BAL correctly handles CREATE OOG during code deposit | Alice calls factory contract that executes CREATE with init code returning 10,000 bytes. Transaction has insufficient gas for code deposit. Factory nonce increments, CREATE returns 0 and stores in slot 1. | BAL **MUST** include Alice with `nonce_changes`. Factory with `nonce_changes` (incremented by CREATE) and `storage_changes` (slot 1 = 0). Contract address with empty changes (read during collision check). **MUST NOT** include nonce or code changes for contract address (rolled back on OOG). Contract address **MUST NOT** exist in post-state. | ✅ Completed | | `test_bal_invalid_missing_nonce` | Verify clients reject blocks with BAL missing required nonce changes | Alice sends transaction to Bob; BAL modifier removes Alice's nonce change entry | Block **MUST** be rejected with `INVALID_BLOCK_ACCESS_LIST` exception. Clients **MUST** validate that all sender accounts have nonce changes recorded. | ✅ Completed | From 54b574fe140b3562476ebe9c6a15e9b512f86f66 Mon Sep 17 00:00:00 2001 From: fselmo Date: Fri, 14 Nov 2025 14:29:24 -0300 Subject: [PATCH 028/154] fix(spec-specs): Move destroy_account before BAL normalization --- src/ethereum/forks/amsterdam/fork.py | 13 ++++-- .../test_selfdestruct_revert.py | 41 ++++++++++++++++++- 2 files changed, 49 insertions(+), 5 deletions(-) diff --git a/src/ethereum/forks/amsterdam/fork.py b/src/ethereum/forks/amsterdam/fork.py index fa4af90fd8..9abb065f0e 100644 --- a/src/ethereum/forks/amsterdam/fork.py +++ b/src/ethereum/forks/amsterdam/fork.py @@ -1057,8 +1057,15 @@ def process_transaction( block_output.block_logs += tx_output.logs + # EIP-7928: Handle in-transaction self-destruct BEFORE normalization + # Destroy accounts first so normalization sees correct post-tx state + # Only accounts created in same tx are in accounts_to_delete per EIP-6780 + for address in tx_output.accounts_to_delete: + destroy_account(block_env.state, address) + # EIP-7928: Normalize balance changes for this transaction before merging - # into block frame. + # into block frame. Must happen AFTER destroy_account so net-zero filtering + # sees the correct post-transaction balance (0 for destroyed accounts). normalize_balance_changes_for_transaction( tx_state_changes, BlockAccessIndex( @@ -1069,9 +1076,8 @@ def process_transaction( commit_transaction_frame(tx_state_changes) - # EIP-7928: Handle in-transaction self-destruct AFTER merge + # EIP-7928: Handle in-transaction self-destruct normalization AFTER merge # Convert storage writes to reads and remove nonce/code changes - # Only accounts created in same tx are in accounts_to_delete per EIP-6780 for address in tx_output.accounts_to_delete: handle_in_transaction_selfdestruct( block_env.block_state_changes, @@ -1080,7 +1086,6 @@ def process_transaction( get_block_access_index(block_env.block_state_changes) ), ) - destroy_account(block_env.state, address) def process_withdrawals( diff --git a/tests/cancun/eip6780_selfdestruct/test_selfdestruct_revert.py b/tests/cancun/eip6780_selfdestruct/test_selfdestruct_revert.py index e0bfa59ec2..2c7bbaea3e 100644 --- a/tests/cancun/eip6780_selfdestruct/test_selfdestruct_revert.py +++ b/tests/cancun/eip6780_selfdestruct/test_selfdestruct_revert.py @@ -8,8 +8,12 @@ Account, Address, Alloc, + BalAccountExpectation, + BalBalanceChange, + BlockAccessListExpectation, Bytecode, Environment, + Fork, Initcode, Op, StateTestFiller, @@ -343,6 +347,7 @@ def test_selfdestruct_created_in_same_tx_with_revert( # noqa SC200 selfdestruct_with_transfer_initcode_copy_from_address: Address, recursive_revert_contract_address: Address, recursive_revert_contract_code: Bytecode, + fork: Fork, ) -> None: """ Given: @@ -427,7 +432,41 @@ def test_selfdestruct_created_in_same_tx_with_revert( # noqa SC200 gas_limit=500_000, ) - state_test(env=env, pre=pre, post=post, tx=tx) + expected_block_access_list = None + if fork.header_bal_hash_required(): + account_expectations = {} + + if selfdestruct_on_outer_call > 0: + account_expectations[ + selfdestruct_with_transfer_contract_address + ] = BalAccountExpectation( + storage_reads=[0, 1], # Storage was accessed + balance_changes=[], # No net balance change + ) + account_expectations[selfdestruct_recipient_address] = ( + BalAccountExpectation( + balance_changes=[ + BalBalanceChange( + tx_index=1, + post_balance=1 + if selfdestruct_on_outer_call == 1 + else 2, + ) + ], + ) + ) + + expected_block_access_list = BlockAccessListExpectation( + account_expectations=account_expectations + ) + + state_test( + env=env, + pre=pre, + post=post, + tx=tx, + expected_block_access_list=expected_block_access_list, + ) @pytest.mark.parametrize( From 7c3994818ba9009d213cdc29e79563adee6e8b08 Mon Sep 17 00:00:00 2001 From: fselmo Date: Fri, 14 Nov 2025 17:52:41 -0300 Subject: [PATCH 029/154] fix(spec-specs): Check delegation access gas before reading --- .../forks/amsterdam/vm/eoa_delegation.py | 81 +++++------- .../forks/amsterdam/vm/instructions/system.py | 123 ++++++++++++------ tests/prague/eip7702_set_code_tx/test_gas.py | 23 ++++ 3 files changed, 144 insertions(+), 83 deletions(-) diff --git a/src/ethereum/forks/amsterdam/vm/eoa_delegation.py b/src/ethereum/forks/amsterdam/vm/eoa_delegation.py index cd1f24a70c..72d6d32215 100644 --- a/src/ethereum/forks/amsterdam/vm/eoa_delegation.py +++ b/src/ethereum/forks/amsterdam/vm/eoa_delegation.py @@ -116,11 +116,15 @@ def recover_authority(authorization: Authorization) -> Address: return Address(keccak256(public_key)[12:32]) -def check_delegation( +def calculate_delegation_cost( evm: Evm, address: Address -) -> Tuple[bool, Address, Address, Bytes, Uint]: +) -> Tuple[bool, Address, Optional[Address], Uint]: """ - Check delegation info without modifying state or tracking. + Check if address has delegation and calculate delegation target gas cost. + + This function reads the original account's code to check for delegation + and tracks it in state_changes. It calculates the delegation target's + gas cost but does NOT read the delegation target yet. Parameters ---------- @@ -131,77 +135,64 @@ def check_delegation( Returns ------- - delegation : `Tuple[bool, Address, Address, Bytes, Uint]` - (is_delegated, original_address, final_address, code, - additional_gas_cost) + delegation_info : `Tuple[bool, Address, Optional[Address], Uint]` + (is_delegated, original_address, delegated_address_or_none, + delegation_gas_cost) """ state = evm.message.block_env.state code = get_account(state, address).code + track_address(evm.state_changes, address) + if not is_valid_delegation(code): - return False, address, address, code, Uint(0) + return False, address, None, Uint(0) delegated_address = Address(code[EOA_DELEGATION_MARKER_LENGTH:]) + # Calculate gas cost for delegation target access if delegated_address in evm.accessed_addresses: - additional_gas_cost = GAS_WARM_ACCESS + delegation_gas_cost = GAS_WARM_ACCESS else: - additional_gas_cost = GAS_COLD_ACCOUNT_ACCESS + delegation_gas_cost = GAS_COLD_ACCOUNT_ACCESS - delegated_code = get_account(state, delegated_address).code + return True, address, delegated_address, delegation_gas_cost - return ( - True, - address, - delegated_address, - delegated_code, - additional_gas_cost, - ) - -def apply_delegation_tracking( - evm: Evm, original_address: Address, delegated_address: Address -) -> None: +def read_delegation_target(evm: Evm, delegated_address: Address) -> Bytes: """ - Apply delegation tracking after gas check passes. + Read the delegation target's code and track the access. + + Should ONLY be called AFTER verifying we have gas for the access. + + This function: + 1. Reads the delegation target's code from state + 2. Adds it to accessed_addresses (if not already there) + 3. Tracks it in state_changes for BAL Parameters ---------- evm : `Evm` The execution frame. - original_address : `Address` - The original address that was called. delegated_address : `Address` - The address delegated to. + The delegation target address. + + Returns + ------- + code : `Bytes` + The delegation target's code. """ - track_address(evm.state_changes, original_address) + state = evm.message.block_env.state + # Add to accessed addresses for warm/cold gas accounting if delegated_address not in evm.accessed_addresses: evm.accessed_addresses.add(delegated_address) + # Track the address for BAL track_address(evm.state_changes, delegated_address) - -def access_delegation( - evm: Evm, address: Address -) -> Tuple[bool, Address, Bytes, Uint]: - """ - Access delegation info and track state changes. - - DEPRECATED: Use check_delegation and apply_delegation_tracking - for proper gas check ordering. - - """ - is_delegated, orig_addr, final_addr, code, gas_cost = check_delegation( - evm, address - ) - - if is_delegated: - apply_delegation_tracking(evm, orig_addr, final_addr) - - return is_delegated, final_addr, code, gas_cost + return get_account(state, delegated_address).code def set_delegation(message: Message) -> U256: diff --git a/src/ethereum/forks/amsterdam/vm/instructions/system.py b/src/ethereum/forks/amsterdam/vm/instructions/system.py index 389adf6f54..3513e09a58 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/system.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/system.py @@ -34,8 +34,8 @@ to_address_masked, ) from ...vm.eoa_delegation import ( - apply_delegation_tracking, - check_delegation, + calculate_delegation_cost, + read_delegation_target, ) from .. import ( Evm, @@ -397,13 +397,30 @@ def call(evm: Evm) -> None: if is_cold_access: evm.accessed_addresses.add(to) + # check gas for base access before reading `to` account + base_gas_cost = extend_memory.cost + access_gas_cost + check_gas(evm, base_gas_cost) + + # read `to` account and assess delegation cost ( is_delegated, original_address, - final_address, - code, + delegated_address, delegation_gas_cost, - ) = check_delegation(evm, to) + ) = calculate_delegation_cost(evm, to) + + # check gas again for delegation target access before reading it + if is_delegated and delegation_gas_cost > Uint(0): + check_gas(evm, base_gas_cost + delegation_gas_cost) + + if is_delegated: + assert delegated_address is not None + code = read_delegation_target(evm, delegated_address) + final_address = delegated_address + else: + code = get_account(evm.message.block_env.state, to).code + final_address = to + access_gas_cost += delegation_gas_cost code_address = final_address @@ -421,12 +438,6 @@ def call(evm: Evm) -> None: access_gas_cost + create_gas_cost + transfer_gas_cost, ) - check_gas(evm, message_call_gas.cost + extend_memory.cost) - - track_address(evm.state_changes, to) - if is_delegated: - apply_delegation_tracking(evm, original_address, final_address) - charge_gas(evm, message_call_gas.cost + extend_memory.cost) if evm.message.is_static and value != U256(0): raise WriteInStaticContext @@ -497,13 +508,30 @@ def callcode(evm: Evm) -> None: if is_cold_access: evm.accessed_addresses.add(code_address) + # check gas for base access before reading `code_address` account + base_gas_cost = extend_memory.cost + access_gas_cost + check_gas(evm, base_gas_cost) + + # read code_address account and assess delegation cost ( is_delegated, original_address, - final_address, - code, + delegated_address, delegation_gas_cost, - ) = check_delegation(evm, code_address) + ) = calculate_delegation_cost(evm, code_address) + + # check gas again for delegation target access before reading it + if is_delegated and delegation_gas_cost > Uint(0): + check_gas(evm, base_gas_cost + delegation_gas_cost) + + if is_delegated: + assert delegated_address is not None + code = read_delegation_target(evm, delegated_address) + final_address = delegated_address + else: + code = get_account(evm.message.block_env.state, code_address).code + final_address = code_address + access_gas_cost += delegation_gas_cost code_address = final_address @@ -518,12 +546,6 @@ def callcode(evm: Evm) -> None: access_gas_cost + transfer_gas_cost, ) - check_gas(evm, message_call_gas.cost + extend_memory.cost) - - track_address(evm.state_changes, original_address) - if is_delegated: - apply_delegation_tracking(evm, original_address, final_address) - charge_gas(evm, message_call_gas.cost + extend_memory.cost) # OPERATION @@ -665,16 +687,35 @@ def delegatecall(evm: Evm) -> None: access_gas_cost = ( GAS_COLD_ACCOUNT_ACCESS if is_cold_access else GAS_WARM_ACCESS ) + + # check gas for base access before reading `code_address` account + base_gas_cost = extend_memory.cost + access_gas_cost + check_gas(evm, base_gas_cost) + if is_cold_access: evm.accessed_addresses.add(code_address) + # read `code_address` account and assess delegation cost ( is_delegated, original_address, - final_address, - code, + delegated_address, delegation_gas_cost, - ) = check_delegation(evm, code_address) + ) = calculate_delegation_cost(evm, code_address) + + # check gas again for delegation target access before reading it + if is_delegated and delegation_gas_cost > Uint(0): + check_gas(evm, base_gas_cost + delegation_gas_cost) + + # Now safe to read delegation target since we verified gas + if is_delegated: + assert delegated_address is not None + code = read_delegation_target(evm, delegated_address) + final_address = delegated_address + else: + code = get_account(evm.message.block_env.state, code_address).code + final_address = code_address + access_gas_cost += delegation_gas_cost code_address = final_address @@ -684,12 +725,6 @@ def delegatecall(evm: Evm) -> None: U256(0), gas, Uint(evm.gas_left), extend_memory.cost, access_gas_cost ) - check_gas(evm, message_call_gas.cost + extend_memory.cost) - - track_address(evm.state_changes, original_address) - if is_delegated: - apply_delegation_tracking(evm, original_address, final_address) - charge_gas(evm, message_call_gas.cost + extend_memory.cost) # OPERATION @@ -749,13 +784,31 @@ def staticcall(evm: Evm) -> None: if is_cold_access: evm.accessed_addresses.add(to) + # check gas for base access before reading `to` account + base_gas_cost = extend_memory.cost + access_gas_cost + check_gas(evm, base_gas_cost) + + # read `to` account and assess delegation cost ( is_delegated, original_address, - final_address, - code, + delegated_address, delegation_gas_cost, - ) = check_delegation(evm, to) + ) = calculate_delegation_cost(evm, to) + + # check gas again for delegation target access before reading it + if is_delegated and delegation_gas_cost > Uint(0): + check_gas(evm, base_gas_cost + delegation_gas_cost) + + # Now safe to read delegation target since we verified gas + if is_delegated: + assert delegated_address is not None + code = read_delegation_target(evm, delegated_address) + final_address = delegated_address + else: + code = get_account(evm.message.block_env.state, to).code + final_address = to + access_gas_cost += delegation_gas_cost code_address = final_address @@ -769,12 +822,6 @@ def staticcall(evm: Evm) -> None: access_gas_cost, ) - check_gas(evm, message_call_gas.cost + extend_memory.cost) - - track_address(evm.state_changes, to) - if is_delegated: - apply_delegation_tracking(evm, original_address, final_address) - charge_gas(evm, message_call_gas.cost + extend_memory.cost) # OPERATION diff --git a/tests/prague/eip7702_set_code_tx/test_gas.py b/tests/prague/eip7702_set_code_tx/test_gas.py index 048a84c44b..b415684bf7 100644 --- a/tests/prague/eip7702_set_code_tx/test_gas.py +++ b/tests/prague/eip7702_set_code_tx/test_gas.py @@ -18,6 +18,9 @@ Address, Alloc, AuthorizationTuple, + BalAccountExpectation, + BalNonceChange, + BlockAccessListExpectation, Bytecode, Bytes, ChainConfig, @@ -1269,6 +1272,25 @@ def test_call_to_pre_authorized_oog( sender=pre.fund_eoa(), ) + expected_block_access_list = None + if fork.header_bal_hash_required(): + # Sender nonce changes, callee is accessed but storage unchanged (OOG) + # auth_signer is tracked (we read its code to check delegation) + # delegation is NOT tracked (OOG before reading it) + account_expectations = { + tx.sender: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + ), + callee_address: BalAccountExpectation.empty(), + # read for calculating delegation access cost: + auth_signer: BalAccountExpectation.empty(), + # OOG - not enough gas for delegation access: + delegation: None, + } + expected_block_access_list = BlockAccessListExpectation( + account_expectations=account_expectations + ) + state_test( pre=pre, tx=tx, @@ -1277,4 +1299,5 @@ def test_call_to_pre_authorized_oog( auth_signer: Account(code=Spec.delegation_designation(delegation)), delegation: Account(storage=Storage()), }, + expected_block_access_list=expected_block_access_list, ) From ad4c8eef43a0051ca99cc14f56c68053dfe1cbb1 Mon Sep 17 00:00:00 2001 From: fselmo Date: Sat, 15 Nov 2025 19:22:46 -0300 Subject: [PATCH 030/154] fix(spec-specs): Track code per auth; filter pre at tx frame --- .../amsterdam/block_access_lists/builder.py | 3 +- src/ethereum/forks/amsterdam/state.py | 47 +++++ src/ethereum/forks/amsterdam/state_tracker.py | 10 +- .../forks/amsterdam/vm/eoa_delegation.py | 17 +- .../test_block_access_lists_eip7702.py | 165 ++++++++++++++++++ .../test_cases.md | 4 +- 6 files changed, 238 insertions(+), 8 deletions(-) diff --git a/src/ethereum/forks/amsterdam/block_access_lists/builder.py b/src/ethereum/forks/amsterdam/block_access_lists/builder.py index 9af8d2a24c..ae05445b66 100644 --- a/src/ethereum/forks/amsterdam/block_access_lists/builder.py +++ b/src/ethereum/forks/amsterdam/block_access_lists/builder.py @@ -515,8 +515,7 @@ def build_block_access_list( add_nonce_change(builder, address, block_access_index, new_nonce) # Add all code changes - # Net-zero filtering for code changes should happen at the - # transaction level (in merge_on_success), not at the block level. + # Filtering happens at transaction level in eoa_delegation.py for ( address, block_access_index, diff --git a/src/ethereum/forks/amsterdam/state.py b/src/ethereum/forks/amsterdam/state.py index d16c9d9d69..c1d331942a 100644 --- a/src/ethereum/forks/amsterdam/state.py +++ b/src/ethereum/forks/amsterdam/state.py @@ -655,6 +655,53 @@ def write_code(sender: Account) -> None: track_code_change(state_changes, address, code) +def set_authority_code( + state: State, + address: Address, + code: Bytes, + state_changes: StateChanges, + current_code: Bytes, +) -> None: + """ + Sets authority account code for EIP-7702 delegation. + + This function is used specifically for setting authority code within + EIP-7702 Set Code Transactions. Unlike set_code(), it tracks changes based + on the current code rather than pre_code to handle multiple authorizations + to the same address within a single transaction correctly. + + Parameters + ---------- + state: + The current state. + + address: + Address of the authority account whose code needs to be set. + + code: + The delegation designation bytecode to set. + + state_changes: + State changes frame for tracking (EIP-7928). + + current_code: + The current code before this change. Used to determine if tracking + is needed (only track if code actually changes from current value). + + """ + + def write_code(sender: Account) -> None: + sender.code = code + + modify_state(state, address, write_code) + + # Only track if code is actually changing from current value + # This allows multiple auths to same address to be tracked individually + # Net-zero filtering happens in commit_transaction_frame + if current_code != code: + track_code_change(state_changes, address, code) + + def get_storage_original(state: State, address: Address, key: Bytes32) -> U256: """ Get the original value in a storage slot i.e. the value before the current diff --git a/src/ethereum/forks/amsterdam/state_tracker.py b/src/ethereum/forks/amsterdam/state_tracker.py index 58ca854bf8..7b98396318 100644 --- a/src/ethereum/forks/amsterdam/state_tracker.py +++ b/src/ethereum/forks/amsterdam/state_tracker.py @@ -372,7 +372,7 @@ def merge_on_success(child_frame: StateChanges) -> None: parent_frame.pre_storage[slot] = value for addr, code in child_frame.pre_code.items(): if addr not in parent_frame.pre_code: - parent_frame.pre_code[addr] = code + capture_pre_code(parent_frame, addr, code) # Merge storage operations, filtering noop writes parent_frame.storage_reads.update(child_frame.storage_reads) @@ -458,9 +458,13 @@ def commit_transaction_frame(tx_frame: StateChanges) -> None: for addr, idx, nonce in tx_frame.nonce_changes: block_frame.nonce_changes.add((addr, idx, nonce)) - # Merge code changes + # Merge code changes - filter net-zero changes within the transaction + # Compare final code against transaction's pre-code for (addr, idx), final_code in tx_frame.code_changes.items(): - block_frame.code_changes[(addr, idx)] = final_code + pre_code = tx_frame.pre_code.get(addr, b"") + if pre_code != final_code: + block_frame.code_changes[(addr, idx)] = final_code + # else: Net-zero change within this transaction - skip def merge_on_failure(child_frame: StateChanges) -> None: diff --git a/src/ethereum/forks/amsterdam/vm/eoa_delegation.py b/src/ethereum/forks/amsterdam/vm/eoa_delegation.py index 72d6d32215..ec95fd1a47 100644 --- a/src/ethereum/forks/amsterdam/vm/eoa_delegation.py +++ b/src/ethereum/forks/amsterdam/vm/eoa_delegation.py @@ -14,7 +14,12 @@ # track_address_access removed - now using state_changes.track_address() from ..fork_types import Address, Authorization -from ..state import account_exists, get_account, increment_nonce, set_code +from ..state import ( + account_exists, + get_account, + increment_nonce, + set_authority_code, +) from ..state_tracker import capture_pre_code, track_address from ..utils.hexadecimal import hex_to_address from ..vm.gas import GAS_COLD_ACCOUNT_ACCESS, GAS_WARM_ACCESS @@ -253,8 +258,16 @@ def set_delegation(message: Message) -> U256: or message.block_env.block_state_changes ) + # Capture pre-code before any changes (first-write-wins) capture_pre_code(state_changes, authority, authority_code) - set_code(state, authority, code_to_set, state_changes) + + # Set delegation code + # Uses authority_code (current) for tracking to handle multiple auths + # Net-zero filtering happens in commit_transaction_frame + set_authority_code( + state, authority, code_to_set, state_changes, authority_code + ) + increment_nonce(state, authority, state_changes) if message.code_address is None: diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip7702.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip7702.py index 4616d2cbe4..8182c1992c 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip7702.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip7702.py @@ -663,3 +663,168 @@ def test_bal_7702_null_address_delegation_no_code_change( bob: Account(balance=10), }, ) + + +def test_bal_7702_double_auth_reset( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Ensure BAL captures the net code change when multiple authorizations + occur in the same transaction (double auth). + + This test verifies that when: + 1. First auth sets delegation to CONTRACT_A + 2. Second auth resets delegation to empty (address 0) + + The BAL should show the NET change (empty -> empty), not intermediate + states. This is a regression test for the bug where the BAL showed + the first auth's code but the final state was empty. + """ + alice = pre.fund_eoa() + bob = pre.fund_eoa(amount=0) + relayer = pre.fund_eoa() + + contract_a = pre.deploy_contract(code=Op.STOP) + + # Transaction with double auth: + # 1. First sets delegation to contract_a + # 2. Second resets to empty + tx = Transaction( + sender=relayer, + to=bob, + value=10, + gas_limit=1_000_000, + gas_price=0xA, + authorization_list=[ + AuthorizationTuple( + address=contract_a, + nonce=0, + signer=alice, + ), + AuthorizationTuple( + address=0, # Reset to empty + nonce=1, + signer=alice, + ), + ], + ) + + blockchain_test( + pre=pre, + blocks=[ + Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[ + BalNonceChange(tx_index=1, post_nonce=2) + ], + code_changes=[], + ), + bob: BalAccountExpectation( + balance_changes=[ + BalBalanceChange(tx_index=1, post_balance=10) + ] + ), + relayer: BalAccountExpectation( + nonce_changes=[ + BalNonceChange(tx_index=1, post_nonce=1) + ], + ), + contract_a: None, + } + ), + ) + ], + post={ + alice: Account(nonce=2, code=b""), # Final code is empty + bob: Account(balance=10), + relayer: Account(nonce=1), + }, + ) + + +def test_bal_7702_double_auth_swap( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Ensure BAL captures the net code change when double auth swaps + delegation targets. + + This test verifies that when: + 1. First auth sets delegation to CONTRACT_A + 2. Second auth changes delegation to CONTRACT_B + + The BAL should show the final code change (empty -> CONTRACT_B), + not the intermediate CONTRACT_A. + """ + alice = pre.fund_eoa() + bob = pre.fund_eoa(amount=0) + relayer = pre.fund_eoa() + + contract_a = pre.deploy_contract(code=Op.STOP) + contract_b = pre.deploy_contract(code=Op.STOP) + + tx = Transaction( + sender=relayer, + to=bob, + value=10, + gas_limit=1_000_000, + gas_price=0xA, + authorization_list=[ + AuthorizationTuple( + address=contract_a, + nonce=0, + signer=alice, + ), + AuthorizationTuple( + address=contract_b, # Override to contract_b + nonce=1, + signer=alice, + ), + ], + ) + + account_expectations = { + alice: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=2)], + code_changes=[ + # Should show final code (CONTRACT_B), not CONTRACT_A + BalCodeChange( + tx_index=1, + new_code=Spec7702.delegation_designation(contract_b), + ) + ], + ), + bob: BalAccountExpectation( + balance_changes=[BalBalanceChange(tx_index=1, post_balance=10)] + ), + relayer: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + ), + # Neither contract appears in BAL during delegation setup + contract_a: None, + contract_b: None, + } + + blockchain_test( + pre=pre, + blocks=[ + Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations=account_expectations + ), + ) + ], + post={ + alice: Account( + nonce=2, code=Spec7702.delegation_designation(contract_b) + ), + bob: Account(balance=10), + relayer: Account(nonce=1), + }, + ) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index 5f80be1fbc..39d38a4d27 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -39,7 +39,9 @@ | `test_bal_7702_invalid_nonce_authorization` | Ensure BAL handles failed authorization due to wrong nonce | `Relayer` sends sponsored transaction to Bob (10 wei transfer succeeds) but Alice's authorization to delegate to `Oracle` uses incorrect nonce, causing silent authorization failure | BAL **MUST** include Alice with empty changes (account access), Bob with `balance_changes` (receives 10 wei), Relayer with `nonce_changes`. **MUST NOT** include `Oracle` (authorization failed, no delegation) | ✅ Completed | | `test_bal_7702_invalid_chain_id_authorization` | Ensure BAL handles failed authorization due to wrong chain id | `Relayer` sends sponsored transaction to Bob (10 wei transfer succeeds) but Alice's authorization to delegate to `Oracle` uses incorrect chain id, causing authorization failure before account access | BAL **MUST** include Bob with `balance_changes` (receives 10 wei), Relayer with `nonce_changes`. **MUST NOT** include Alice (authorization fails before loading account) or `Oracle` (authorization failed, no delegation) | ✅ Completed | | `test_bal_7702_delegated_via_call_opcode` | Ensure BAL captures delegation target when a contract uses *CALL opcodes to call a delegated account | Pre-deployed contract `Alice` delegated to `Oracle`. `Caller` contract uses CALL/CALLCODE/DELEGATECALL/STATICCALL to call `Alice`. Bob sends transaction to `Caller`. | BAL **MUST** include Bob: `nonce_changes`. `Caller`: empty changes (account access). `Alice`: empty changes (account access - delegated account being called). `Oracle`: empty changes (delegation target access). | ✅ Completed | -| `test_bal_7702_null_address_delegation` | Ensure BAL does not record spurious code changes for net-zero code operations | Alice sends transaction with authorization delegating to NULL_ADDRESS (0x0), which sets code to `b""` on an account that already has `b""` code. Transaction sends 10 wei to Bob. | BAL **MUST** include Alice with `nonce_changes` (tx nonce + auth nonce increment) but **MUST NOT** include `code_changes` (setting `b"" -> b""` is net-zero and filtered out). Bob: `balance_changes` (receives 10 wei). This ensures net-zero code change is not recorded. +| `test_bal_7702_null_address_delegation` | Ensure BAL does not record spurious code changes for net-zero code operations | Alice sends transaction with authorization delegating to NULL_ADDRESS (0x0), which sets code to `b""` on an account that already has `b""` code. Transaction sends 10 wei to Bob. | BAL **MUST** include Alice with `nonce_changes` (tx nonce + auth nonce increment) but **MUST NOT** include `code_changes` (setting `b"" -> b""` is net-zero and filtered out). Bob: `balance_changes` (receives 10 wei). This ensures net-zero code change is not recorded. | ✅ Completed | +| `test_bal_7702_double_auth_reset` | Ensure BAL captures net code change when double auth resets delegation | `Relayer` sends transaction with two authorizations for Alice: (1) First auth sets delegation to `CONTRACT_A` at nonce=0, (2) Second auth resets delegation to empty (address 0) at nonce=1. Transaction sends 10 wei to Bob. Per EIP-7702, only the last authorization takes effect. | BAL **MUST** include Alice with `nonce_changes` (both auths increment nonce to 2) but **MUST NOT** include `code_changes` (net change is empty → empty). Bob: `balance_changes` (receives 10 wei). Relayer: `nonce_changes`. `CONTRACT_A` **MUST NOT** be in BAL (never accessed). This is a regression test for the bug where BAL showed first auth's code despite final state being empty. | ✅ Completed | +| `test_bal_7702_double_auth_swap` | Ensure BAL captures final code when double auth swaps delegation targets | `Relayer` sends transaction with two authorizations for Alice: (1) First auth sets delegation to `CONTRACT_A` at nonce=0, (2) Second auth changes delegation to `CONTRACT_B` at nonce=1. Transaction sends 10 wei to Bob. Per EIP-7702, only the last authorization takes effect. | BAL **MUST** include Alice with `nonce_changes` (both auths increment nonce to 2) and `code_changes` (final code is delegation designation for `CONTRACT_B`, not `CONTRACT_A`). Bob: `balance_changes` (receives 10 wei). Relayer: `nonce_changes`. Neither `CONTRACT_A` nor `CONTRACT_B` appear in BAL during delegation setup (never accessed). This ensures BAL shows final state, not intermediate changes. | ✅ Completed | | `test_bal_sstore_and_oog` | Ensure BAL handles OOG during SSTORE execution at various gas boundaries (EIP-2200 stipend and implicit SLOAD) | Alice calls contract that attempts `SSTORE` to cold slot `0x01`. Parameterized: (1) OOG at EIP-2200 stipend check (2300 gas after PUSH opcodes) - fails before implicit SLOAD, (2) OOG at stipend + 1 (2301 gas) - passes stipend check but fails after implicit SLOAD, (3) OOG at exact gas - 1, (4) Successful SSTORE with exact gas. | For case (1): BAL **MUST NOT** include slot `0x01` in `storage_reads` or `storage_changes` (fails before implicit SLOAD). For cases (2) and (3): BAL **MUST** include slot `0x01` in `storage_reads` (implicit SLOAD occurred) but **MUST NOT** include in `storage_changes` (write didn't complete). For case (4): BAL **MUST** include slot `0x01` in `storage_changes` only (successful write; read is filtered by builder). | ✅ Completed | | `test_bal_sstore_static_context` | Ensure BAL does not capture spurious storage access when SSTORE fails in static context | Alice calls contract with `STATICCALL` which attempts `SSTORE` to slot `0x01`. SSTORE must fail before any storage access occurs. | BAL **MUST NOT** include slot `0x01` in `storage_reads` or `storage_changes`. Static context check happens before storage access, preventing spurious reads. Alice has `nonce_changes` and `balance_changes` (gas cost). Target contract included with empty changes. | ✅ Completed | | `test_bal_sload_and_oog` | Ensure BAL handles OOG during SLOAD execution correctly | Alice calls contract that attempts `SLOAD` from cold slot `0x01`. Parameterized: (1) OOG at SLOAD opcode (insufficient gas), (2) Successful SLOAD execution. | For OOG case: BAL **MUST NOT** contain slot `0x01` in `storage_reads` since storage wasn't accessed. For success case: BAL **MUST** contain slot `0x01` in `storage_reads`. | ✅ Completed | From 0bf53c61c7b1006d02fcf5f27e6b38ae5ece6fd4 Mon Sep 17 00:00:00 2001 From: fselmo Date: Sun, 16 Nov 2025 18:05:39 -0300 Subject: [PATCH 031/154] fix(spec-specs): Use proper frames for system transactions --- .../amsterdam/block_access_lists/builder.py | 11 +- src/ethereum/forks/amsterdam/fork.py | 10 + .../test_block_access_lists_cross_index.py | 260 ++++++++++++++++++ 3 files changed, 273 insertions(+), 8 deletions(-) create mode 100644 tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_cross_index.py diff --git a/src/ethereum/forks/amsterdam/block_access_lists/builder.py b/src/ethereum/forks/amsterdam/block_access_lists/builder.py index ae05445b66..c1dbf98222 100644 --- a/src/ethereum/forks/amsterdam/block_access_lists/builder.py +++ b/src/ethereum/forks/amsterdam/block_access_lists/builder.py @@ -484,19 +484,14 @@ def build_block_access_list( for address, slot in state_changes.storage_reads: add_storage_read(builder, address, slot) - # Add all storage writes, filtering net-zero changes + # Add all storage writes + # Net-zero filtering happens at transaction commit time, not here. + # At block level, we track ALL writes at their respective indices. for ( address, slot, block_access_index, ), value in state_changes.storage_writes.items(): - # Check if this is a net-zero change by comparing with pre-state - if (address, slot) in state_changes.pre_storage: - if state_changes.pre_storage[(address, slot)] == value: - # Net-zero change - convert to read only - add_storage_read(builder, address, slot) - continue - # Convert U256 to Bytes32 for storage value_bytes = Bytes32(value.to_bytes(U256(32), "big")) add_storage_write( diff --git a/src/ethereum/forks/amsterdam/fork.py b/src/ethereum/forks/amsterdam/fork.py index 9abb065f0e..f5a7081fd9 100644 --- a/src/ethereum/forks/amsterdam/fork.py +++ b/src/ethereum/forks/amsterdam/fork.py @@ -71,6 +71,7 @@ get_block_access_index, handle_in_transaction_selfdestruct, increment_block_access_index, + merge_on_success, normalize_balance_changes_for_transaction, track_address, track_balance_change, @@ -633,6 +634,10 @@ def process_system_transaction( Output of processing the system transaction. """ + # EIP-7928: Create a child frame for system transaction + # This allows proper pre-state capture for net-zero filtering + system_tx_state_changes = create_child_frame(block_env.block_state_changes) + tx_env = vm.TransactionEnvironment( origin=SYSTEM_ADDRESS, gas_price=block_env.base_fee_per_gas, @@ -664,10 +669,15 @@ def process_system_transaction( accessed_storage_keys=set(), disable_precompiles=False, parent_evm=None, + transaction_state_changes=system_tx_state_changes, ) system_tx_output = process_message_call(system_tx_message) + # Merge system transaction changes back to block frame + # System transactions always succeed (or block is invalid) + merge_on_success(system_tx_state_changes) + return system_tx_output diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_cross_index.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_cross_index.py new file mode 100644 index 0000000000..920b8bc344 --- /dev/null +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_cross_index.py @@ -0,0 +1,260 @@ +""" +Tests for EIP-7928 BAL cross-index tracking. + +Tests that state changes are correctly tracked across different block indices: +- Index 1..N: Regular transactions +- Index N+1: Post-execution system operations + +Includes tests for system contracts (withdrawal/consolidation) cross-index +tracking and NOOP filtering behavior. +""" + +import pytest +from execution_testing import ( + Account, + Address, + Alloc, + BalAccountExpectation, + BalStorageChange, + BalStorageSlot, + Block, + BlockAccessListExpectation, + BlockchainTestFiller, + Bytecode, + Op, + Transaction, +) + +from .spec import ref_spec_7928 + +REFERENCE_SPEC_GIT_PATH = ref_spec_7928.git_path +REFERENCE_SPEC_VERSION = ref_spec_7928.version + +pytestmark = pytest.mark.valid_from("Amsterdam") + +WITHDRAWAL_REQUEST_ADDRESS = Address( + 0x00000961EF480EB55E80D19AD83579A64C007002 +) +CONSOLIDATION_REQUEST_ADDRESS = Address( + 0x0000BBDDC7CE488642FB579F8B00F3A590007251 +) + + +def test_bal_withdrawal_contract_cross_index( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Test that the withdrawal system contract shows storage changes at both + index 1 (during transaction) and index 2 (during post-execution). + + This verifies that slots 0x01 and 0x03 are: + 1. Incremented during the transaction (index 1) + 2. Reset during post-execution (index 2) + """ + sender = pre.fund_eoa() + + withdrawal_calldata = ( + (b"\x01" + b"\x00" * 47) # validator pubkey + + (b"\x00" * 8) # amount + ) + + tx = Transaction( + sender=sender, + to=WITHDRAWAL_REQUEST_ADDRESS, + value=1, + data=withdrawal_calldata, + gas_limit=1_000_000, + ) + + blockchain_test( + pre=pre, + blocks=[ + Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + WITHDRAWAL_REQUEST_ADDRESS: BalAccountExpectation( + # slots 0x01 and 0x03 change at BOTH indices + storage_changes=[ + BalStorageSlot( + slot=0x01, # Request count + slot_changes=[ + BalStorageChange( + # Incremented during tx + tx_index=1, + post_value=1, + ), + BalStorageChange( + # Reset during post-exec + tx_index=2, + post_value=0, + ), + ], + ), + BalStorageSlot( + slot=0x03, # Target count + slot_changes=[ + BalStorageChange( + # Incremented during tx + tx_index=1, + post_value=1, + ), + BalStorageChange( + # Reset during post-exec + tx_index=2, + post_value=0, + ), + ], + ), + ], + ), + } + ), + ) + ], + post={}, + ) + + +def test_bal_consolidation_contract_cross_index( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Test that the consolidation system contract shows storage changes at both + index 1 (during transaction) and index 2 (during post-execution). + """ + sender = pre.fund_eoa() + + consolidation_calldata = ( + (b"\x01" + b"\x00" * 47) # source pubkey + + (b"\x02" + b"\x00" * 47) # target pubkey + ) + + tx = Transaction( + sender=sender, + to=CONSOLIDATION_REQUEST_ADDRESS, + value=1, + data=consolidation_calldata, + gas_limit=1_000_000, + ) + + blockchain_test( + pre=pre, + blocks=[ + Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + CONSOLIDATION_REQUEST_ADDRESS: BalAccountExpectation( + storage_changes=[ + BalStorageSlot( + slot=0x01, + slot_changes=[ + BalStorageChange( + # Incremented during tx + tx_index=1, + post_value=1, + ), + BalStorageChange( + # Reset during post-exec + tx_index=2, + post_value=0, + ), + ], + ), + BalStorageSlot( + slot=0x03, + slot_changes=[ + BalStorageChange( + # Incremented during tx + tx_index=1, + post_value=1, + ), + BalStorageChange( + # Reset during post-exec + tx_index=2, + post_value=0, + ), + ], + ), + ], + ), + } + ), + ) + ], + post={}, + ) + + +def test_bal_noop_write_filtering( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Test that NOOP writes (writing same value or 0 to empty) are filtered. + + This verifies that: + 1. Writing 0 to an uninitialized slot doesn't appear in BAL + 2. Writing the same value to a slot doesn't appear in BAL + 3. Only actual changes are tracked + """ + test_code = Bytecode( + # Write 0 to uninitialized slot 1 (noop) + Op.SSTORE(1, 0) + # Write 42 to slot 2 + + Op.SSTORE(2, 42) + # Write 100 to slot 3 (will be same as pre-state, should be filtered) + + Op.SSTORE(3, 100) + # Write 200 to slot 4 (different from pre-state 150, should appear) + + Op.SSTORE(4, 200) + ) + + sender = pre.fund_eoa() + test_address = pre.deploy_contract( + code=test_code, + storage={3: 100, 4: 150}, + ) + + tx = Transaction( + sender=sender, + to=test_address, + gas_limit=100_000, + ) + + # Expected BAL should only show actual changes + expected_block_access_list = BlockAccessListExpectation( + account_expectations={ + test_address: BalAccountExpectation( + storage_changes=[ + BalStorageSlot( + slot=2, + slot_changes=[ + BalStorageChange(tx_index=1, post_value=42), + ], + ), + BalStorageSlot( + slot=4, + slot_changes=[ + BalStorageChange(tx_index=1, post_value=200), + ], + ), + ], + ), + } + ) + + block = Block( + txs=[tx], + expected_block_access_list=expected_block_access_list, + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + test_address: Account(storage={2: 42, 3: 100, 4: 200}), + }, + ) From 20321c8e4f1efeb047e53ada4a9608b9bf3a9d67 Mon Sep 17 00:00:00 2001 From: fselmo Date: Thu, 20 Nov 2025 22:58:11 -0300 Subject: [PATCH 032/154] fix(spec-specs): Track address at init collision --- src/ethereum/forks/amsterdam/vm/interpreter.py | 3 +++ .../eip7610_create_collision/test_initcollision.py | 13 +++++++++++++ 2 files changed, 16 insertions(+) diff --git a/src/ethereum/forks/amsterdam/vm/interpreter.py b/src/ethereum/forks/amsterdam/vm/interpreter.py index a63b745624..57f890a12e 100644 --- a/src/ethereum/forks/amsterdam/vm/interpreter.py +++ b/src/ethereum/forks/amsterdam/vm/interpreter.py @@ -173,6 +173,9 @@ def process_message_call(message: Message) -> MessageCallOutput: is_collision = account_has_code_or_nonce( block_env.state, message.current_target ) or account_has_storage(block_env.state, message.current_target) + track_address( + message.transaction_state_changes, message.current_target + ) if is_collision: return MessageCallOutput( Uint(0), diff --git a/tests/paris/eip7610_create_collision/test_initcollision.py b/tests/paris/eip7610_create_collision/test_initcollision.py index 2f5c42789f..8c25f64674 100644 --- a/tests/paris/eip7610_create_collision/test_initcollision.py +++ b/tests/paris/eip7610_create_collision/test_initcollision.py @@ -7,7 +7,10 @@ from execution_testing import ( Account, Alloc, + BalAccountExpectation, + BlockAccessListExpectation, Bytecode, + Fork, Initcode, Op, StateTestFiller, @@ -66,6 +69,7 @@ def test_init_collision_create_tx( collision_balance: int, collision_code: bytes, initcode: Bytecode, + fork: Fork, ) -> None: """ Test that a contract creation transaction exceptionally aborts when @@ -89,6 +93,14 @@ def test_init_collision_create_tx( code=collision_code, ) + expected_block_access_list = None + if fork.header_bal_hash_required(): + expected_block_access_list = BlockAccessListExpectation( + account_expectations={ + created_contract_address: BalAccountExpectation.empty() + } + ) + state_test( pre=pre, post={ @@ -97,6 +109,7 @@ def test_init_collision_create_tx( ), }, tx=tx, + expected_block_access_list=expected_block_access_list, ) From 771ad192fb6d02dd1b384ba1de37db10435ca2b0 Mon Sep 17 00:00:00 2001 From: fselmo Date: Thu, 20 Nov 2025 22:59:11 -0300 Subject: [PATCH 033/154] chore(spec-specs): Add Amsterdam docstring; update prepare msg --- src/ethereum/forks/amsterdam/__init__.py | 7 ++++++- src/ethereum/forks/amsterdam/fork.py | 9 ++++++--- src/ethereum/forks/amsterdam/utils/message.py | 5 +++++ src/ethereum/forks/amsterdam/vm/__init__.py | 2 +- src/ethereum/forks/amsterdam/vm/instructions/system.py | 1 + 5 files changed, 19 insertions(+), 5 deletions(-) diff --git a/src/ethereum/forks/amsterdam/__init__.py b/src/ethereum/forks/amsterdam/__init__.py index ed77d4700c..e6f3e9476a 100644 --- a/src/ethereum/forks/amsterdam/__init__.py +++ b/src/ethereum/forks/amsterdam/__init__.py @@ -1,9 +1,14 @@ """ -The Amsterdam fork ([EIP-7773]). +The Amsterdam fork ([EIP-7773]) includes block-level access lists. + +### Changes + +- [EIP-7928: Block-Level Access Lists][EIP-7928] ### Releases [EIP-7773]: https://eips.ethereum.org/EIPS/eip-7773 +[EIP-7928]: https://eips.ethereum.org/EIPS/eip-7928 """ from ethereum.fork_criteria import ForkCriteria, Unscheduled diff --git a/src/ethereum/forks/amsterdam/fork.py b/src/ethereum/forks/amsterdam/fork.py index f5a7081fd9..358f194f6e 100644 --- a/src/ethereum/forks/amsterdam/fork.py +++ b/src/ethereum/forks/amsterdam/fork.py @@ -995,9 +995,12 @@ def process_transaction( tx_hash=get_transaction_hash(encode_transaction(tx)), ) - message = prepare_message(block_env, tx_env, tx) - # Set transaction frame so call frames become children of it - message.transaction_state_changes = tx_state_changes + message = prepare_message( + block_env, + tx_env, + tx, + tx_state_changes, + ) tx_output = process_message_call(message) diff --git a/src/ethereum/forks/amsterdam/utils/message.py b/src/ethereum/forks/amsterdam/utils/message.py index 107cdcaf7a..def5b36e20 100644 --- a/src/ethereum/forks/amsterdam/utils/message.py +++ b/src/ethereum/forks/amsterdam/utils/message.py @@ -17,6 +17,7 @@ from ..fork_types import Address from ..state import get_account +from ..state_tracker import StateChanges from ..transactions import Transaction from ..vm import BlockEnvironment, Message, TransactionEnvironment from ..vm.precompiled_contracts.mapping import PRE_COMPILED_CONTRACTS @@ -27,6 +28,7 @@ def prepare_message( block_env: BlockEnvironment, tx_env: TransactionEnvironment, tx: Transaction, + transaction_state_changes: StateChanges, ) -> Message: """ Execute a transaction against the provided environment. @@ -39,6 +41,8 @@ def prepare_message( Environment for the transaction. tx : Transaction to be executed. + transaction_state_changes : + State changes specific to this transaction. Returns ------- @@ -87,4 +91,5 @@ def prepare_message( accessed_storage_keys=set(tx_env.access_list_storage_keys), disable_precompiles=False, parent_evm=None, + transaction_state_changes=transaction_state_changes, ) diff --git a/src/ethereum/forks/amsterdam/vm/__init__.py b/src/ethereum/forks/amsterdam/vm/__init__.py index 26b7e99e45..d414aa50f9 100644 --- a/src/ethereum/forks/amsterdam/vm/__init__.py +++ b/src/ethereum/forks/amsterdam/vm/__init__.py @@ -142,7 +142,7 @@ class Message: accessed_storage_keys: Set[Tuple[Address, Bytes32]] disable_precompiles: bool parent_evm: Optional["Evm"] - transaction_state_changes: Optional[StateChanges] = None + transaction_state_changes: StateChanges @dataclass diff --git a/src/ethereum/forks/amsterdam/vm/instructions/system.py b/src/ethereum/forks/amsterdam/vm/instructions/system.py index 3513e09a58..67666014e2 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/system.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/system.py @@ -144,6 +144,7 @@ def generic_create( accessed_storage_keys=evm.accessed_storage_keys.copy(), disable_precompiles=False, parent_evm=evm, + transaction_state_changes=evm.message.transaction_state_changes, ) child_evm = process_create_message(child_message) From 080df168bc386029ccf41e8a8dc035d6e8edefb2 Mon Sep 17 00:00:00 2001 From: fselmo Date: Thu, 20 Nov 2025 23:30:18 -0300 Subject: [PATCH 034/154] chore: Add pre-amsterdam BAL tests to doc for tracking --- .../amsterdam/eip7928_block_level_access_lists/test_cases.md | 3 +++ .../cancun/eip6780_selfdestruct/test_selfdestruct_revert.py | 5 ++++- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index 39d38a4d27..be92e54608 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -90,3 +90,6 @@ | `test_bal_storage_write_read_same_frame` | Ensure BAL captures write precedence over read in same call frame (writes shadow reads) | Alice calls `Oracle` which writes (`SSTORE`) value `0x42` to slot `0x01`, then reads (`SLOAD`) from slot `0x01` in the same call frame | BAL **MUST** include `Oracle` with slot `0x01` in `storage_changes` showing final value `0x42`. Slot `0x01` **MUST NOT** appear in `storage_reads` (write shadows the subsequent read in same frame). | ✅ Completed | | `test_bal_storage_write_read_cross_frame` | Ensure BAL captures write precedence over read across call frames (writes shadow reads cross-frame) | Alice calls `Oracle`. First call reads slot `0x01` (sees initial value), writes `0x42` to slot `0x01`, then calls itself (via `CALL`, `DELEGATECALL`, or `CALLCODE`). Second call reads slot `0x01` (sees `0x42`) and exits. | BAL **MUST** include `Oracle` with slot `0x01` in `storage_changes` showing final value `0x42`. Slot `0x01` **MUST NOT** appear in `storage_reads` (write shadows both the read before it in same frame and the read in the recursive call). | ✅ Completed | | `test_bal_create_transaction_empty_code` | Ensure BAL does not record spurious code changes for CREATE transaction deploying empty code | Alice sends CREATE transaction with empty initcode (deploys code `b""`). Contract address gets nonce = 1 and code = `b""`. | BAL **MUST** include Alice with `nonce_changes` and created contract with `nonce_changes` but **MUST NOT** include `code_changes` for contract. | ✅ Completed | +| `test_init_collision_create_tx` | Ensure BAL tracks CREATE collisions correctly (pre-Amsterdam test with BAL) | CREATE transaction targeting address with existing storage aborts | BAL **MUST** show empty expectations for collision address (no changes occur due to abort) | ✅ Completed | +| `test_call_to_pre_authorized_oog` | Ensure BAL handles OOG during EIP-7702 delegation access (pre-Amsterdam test with BAL) | Call to delegated account that OOGs before accessing delegation contract | BAL **MUST** include auth_signer (code read for delegation check) but **MUST NOT** include delegation contract (OOG before access) | ✅ Completed | +| `test_selfdestruct_created_in_same_tx_with_revert` | Ensure BAL tracks selfdestruct with revert correctly (pre-Amsterdam test with BAL) | Contract created and selfdestructed in same tx with nested revert | BAL **MUST** track storage reads and balance changes for selfdestruct even with reverts | ✅ Completed | diff --git a/tests/cancun/eip6780_selfdestruct/test_selfdestruct_revert.py b/tests/cancun/eip6780_selfdestruct/test_selfdestruct_revert.py index 2c7bbaea3e..59812174ef 100644 --- a/tests/cancun/eip6780_selfdestruct/test_selfdestruct_revert.py +++ b/tests/cancun/eip6780_selfdestruct/test_selfdestruct_revert.py @@ -441,7 +441,10 @@ def test_selfdestruct_created_in_same_tx_with_revert( # noqa SC200 selfdestruct_with_transfer_contract_address ] = BalAccountExpectation( storage_reads=[0, 1], # Storage was accessed - balance_changes=[], # No net balance change + nonce_changes=[], + balance_changes=[], + code_changes=[], + storage_changes=[], ) account_expectations[selfdestruct_recipient_address] = ( BalAccountExpectation( From 325b33849b4f700aeac646757840b85cd9be906f Mon Sep 17 00:00:00 2001 From: fselmo Date: Mon, 24 Nov 2025 13:50:04 -0700 Subject: [PATCH 035/154] fix(spec-specs): Calculate all gas we can before accessing state - Calculate all gas that we can without state access and check this gas before ever accessing state. This is the most sensible way for an implementation to behave and indeed was revealed to be the way clients are behaving, which differed from the specs. - Use fork.gas_costs to calculate gas costs, NOT hard-coded values. - Create a BAL expectation for the test that yielded discrepancies between clients and specs so this doesn't slip through again. Document this test in `test_cases.md`. --- .../forks/amsterdam/vm/instructions/system.py | 171 ++++++++++-------- .../test_cases.md | 1 + .../test_call_and_callcode_gas_calculation.py | 104 +++++++++-- 3 files changed, 187 insertions(+), 89 deletions(-) diff --git a/src/ethereum/forks/amsterdam/vm/instructions/system.py b/src/ethereum/forks/amsterdam/vm/instructions/system.py index 67666014e2..1fca8b1459 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/system.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/system.py @@ -395,14 +395,22 @@ def call(evm: Evm) -> None: access_gas_cost = ( GAS_COLD_ACCOUNT_ACCESS if is_cold_access else GAS_WARM_ACCESS ) + + transfer_gas_cost = Uint(0) if value == 0 else GAS_CALL_VALUE + + check_gas( + evm, + access_gas_cost + transfer_gas_cost + extend_memory.cost, + ) + + # need to access account to check if account is alive, check gas before + create_gas_cost = GAS_NEW_ACCOUNT + if value == 0 or is_account_alive(evm.message.block_env.state, to): + create_gas_cost = Uint(0) + if is_cold_access: evm.accessed_addresses.add(to) - # check gas for base access before reading `to` account - base_gas_cost = extend_memory.cost + access_gas_cost - check_gas(evm, base_gas_cost) - - # read `to` account and assess delegation cost ( is_delegated, original_address, @@ -410,35 +418,36 @@ def call(evm: Evm) -> None: delegation_gas_cost, ) = calculate_delegation_cost(evm, to) - # check gas again for delegation target access before reading it if is_delegated and delegation_gas_cost > Uint(0): - check_gas(evm, base_gas_cost + delegation_gas_cost) - - if is_delegated: assert delegated_address is not None + message_call_gas = calculate_message_call_gas( + value, + gas, + Uint(evm.gas_left), + extend_memory.cost, + access_gas_cost + + transfer_gas_cost + + create_gas_cost + + delegation_gas_cost, + ) + check_gas(evm, message_call_gas.cost + extend_memory.cost) code = read_delegation_target(evm, delegated_address) final_address = delegated_address else: + message_call_gas = calculate_message_call_gas( + value, + gas, + Uint(evm.gas_left), + extend_memory.cost, + access_gas_cost + create_gas_cost + transfer_gas_cost, + ) + check_gas(evm, message_call_gas.cost + extend_memory.cost) code = get_account(evm.message.block_env.state, to).code final_address = to - access_gas_cost += delegation_gas_cost - code_address = final_address disable_precompiles = is_delegated - create_gas_cost = GAS_NEW_ACCOUNT - if value == 0 or is_account_alive(evm.message.block_env.state, to): - create_gas_cost = Uint(0) - transfer_gas_cost = Uint(0) if value == 0 else GAS_CALL_VALUE - message_call_gas = calculate_message_call_gas( - value, - gas, - Uint(evm.gas_left), - extend_memory.cost, - access_gas_cost + create_gas_cost + transfer_gas_cost, - ) - charge_gas(evm, message_call_gas.cost + extend_memory.cost) if evm.message.is_static and value != U256(0): raise WriteInStaticContext @@ -509,11 +518,14 @@ def callcode(evm: Evm) -> None: if is_cold_access: evm.accessed_addresses.add(code_address) - # check gas for base access before reading `code_address` account - base_gas_cost = extend_memory.cost + access_gas_cost - check_gas(evm, base_gas_cost) + transfer_gas_cost = Uint(0) if value == 0 else GAS_CALL_VALUE + + check_gas( + evm, + access_gas_cost + extend_memory.cost + transfer_gas_cost, + ) - # read code_address account and assess delegation cost + # need to access account to get delegation code, check gas before ( is_delegated, original_address, @@ -521,32 +533,34 @@ def callcode(evm: Evm) -> None: delegation_gas_cost, ) = calculate_delegation_cost(evm, code_address) - # check gas again for delegation target access before reading it if is_delegated and delegation_gas_cost > Uint(0): - check_gas(evm, base_gas_cost + delegation_gas_cost) - - if is_delegated: assert delegated_address is not None + # Recalculate with delegation cost and check gas + message_call_gas = calculate_message_call_gas( + value, + gas, + Uint(evm.gas_left), + extend_memory.cost, + access_gas_cost + transfer_gas_cost + delegation_gas_cost, + ) + check_gas(evm, message_call_gas.cost + extend_memory.cost) code = read_delegation_target(evm, delegated_address) final_address = delegated_address else: + message_call_gas = calculate_message_call_gas( + value, + gas, + Uint(evm.gas_left), + extend_memory.cost, + access_gas_cost + transfer_gas_cost, + ) + check_gas(evm, message_call_gas.cost + extend_memory.cost) code = get_account(evm.message.block_env.state, code_address).code final_address = code_address - access_gas_cost += delegation_gas_cost - code_address = final_address disable_precompiles = is_delegated - transfer_gas_cost = Uint(0) if value == 0 else GAS_CALL_VALUE - message_call_gas = calculate_message_call_gas( - value, - gas, - Uint(evm.gas_left), - extend_memory.cost, - access_gas_cost + transfer_gas_cost, - ) - charge_gas(evm, message_call_gas.cost + extend_memory.cost) # OPERATION @@ -688,15 +702,12 @@ def delegatecall(evm: Evm) -> None: access_gas_cost = ( GAS_COLD_ACCOUNT_ACCESS if is_cold_access else GAS_WARM_ACCESS ) - - # check gas for base access before reading `code_address` account - base_gas_cost = extend_memory.cost + access_gas_cost - check_gas(evm, base_gas_cost) - if is_cold_access: evm.accessed_addresses.add(code_address) - # read `code_address` account and assess delegation cost + check_gas(evm, access_gas_cost + extend_memory.cost) + + # need to access account to get delegation code, check gas before ( is_delegated, original_address, @@ -704,28 +715,33 @@ def delegatecall(evm: Evm) -> None: delegation_gas_cost, ) = calculate_delegation_cost(evm, code_address) - # check gas again for delegation target access before reading it if is_delegated and delegation_gas_cost > Uint(0): - check_gas(evm, base_gas_cost + delegation_gas_cost) - - # Now safe to read delegation target since we verified gas - if is_delegated: assert delegated_address is not None + message_call_gas = calculate_message_call_gas( + U256(0), + gas, + Uint(evm.gas_left), + extend_memory.cost, + access_gas_cost + delegation_gas_cost, + ) + check_gas(evm, message_call_gas.cost + extend_memory.cost) code = read_delegation_target(evm, delegated_address) final_address = delegated_address else: + message_call_gas = calculate_message_call_gas( + U256(0), + gas, + Uint(evm.gas_left), + extend_memory.cost, + access_gas_cost, + ) + check_gas(evm, message_call_gas.cost + extend_memory.cost) code = get_account(evm.message.block_env.state, code_address).code final_address = code_address - access_gas_cost += delegation_gas_cost - code_address = final_address disable_precompiles = is_delegated - message_call_gas = calculate_message_call_gas( - U256(0), gas, Uint(evm.gas_left), extend_memory.cost, access_gas_cost - ) - charge_gas(evm, message_call_gas.cost + extend_memory.cost) # OPERATION @@ -785,11 +801,9 @@ def staticcall(evm: Evm) -> None: if is_cold_access: evm.accessed_addresses.add(to) - # check gas for base access before reading `to` account - base_gas_cost = extend_memory.cost + access_gas_cost - check_gas(evm, base_gas_cost) + check_gas(evm, access_gas_cost + extend_memory.cost) - # read `to` account and assess delegation cost + # need to access account to get delegation code, check gas before ( is_delegated, original_address, @@ -797,32 +811,33 @@ def staticcall(evm: Evm) -> None: delegation_gas_cost, ) = calculate_delegation_cost(evm, to) - # check gas again for delegation target access before reading it if is_delegated and delegation_gas_cost > Uint(0): - check_gas(evm, base_gas_cost + delegation_gas_cost) - - # Now safe to read delegation target since we verified gas - if is_delegated: assert delegated_address is not None + message_call_gas = calculate_message_call_gas( + U256(0), + gas, + Uint(evm.gas_left), + extend_memory.cost, + access_gas_cost + delegation_gas_cost, + ) + check_gas(evm, message_call_gas.cost + extend_memory.cost) code = read_delegation_target(evm, delegated_address) final_address = delegated_address else: + message_call_gas = calculate_message_call_gas( + U256(0), + gas, + Uint(evm.gas_left), + extend_memory.cost, + access_gas_cost, + ) + check_gas(evm, message_call_gas.cost + extend_memory.cost) code = get_account(evm.message.block_env.state, to).code final_address = to - access_gas_cost += delegation_gas_cost - code_address = final_address disable_precompiles = is_delegated - message_call_gas = calculate_message_call_gas( - U256(0), - gas, - Uint(evm.gas_left), - extend_memory.cost, - access_gas_cost, - ) - charge_gas(evm, message_call_gas.cost + extend_memory.cost) # OPERATION diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index be92e54608..fd843a4123 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -93,3 +93,4 @@ | `test_init_collision_create_tx` | Ensure BAL tracks CREATE collisions correctly (pre-Amsterdam test with BAL) | CREATE transaction targeting address with existing storage aborts | BAL **MUST** show empty expectations for collision address (no changes occur due to abort) | ✅ Completed | | `test_call_to_pre_authorized_oog` | Ensure BAL handles OOG during EIP-7702 delegation access (pre-Amsterdam test with BAL) | Call to delegated account that OOGs before accessing delegation contract | BAL **MUST** include auth_signer (code read for delegation check) but **MUST NOT** include delegation contract (OOG before access) | ✅ Completed | | `test_selfdestruct_created_in_same_tx_with_revert` | Ensure BAL tracks selfdestruct with revert correctly (pre-Amsterdam test with BAL) | Contract created and selfdestructed in same tx with nested revert | BAL **MUST** track storage reads and balance changes for selfdestruct even with reverts | ✅ Completed | +| `test_value_transfer_gas_calculation` | Ensure BAL correctly tracks OOG scenarios for CALL/CALLCODE/DELEGATECALL/STATICCALL (pre-Amsterdam test with BAL) | Nested calls with precise gas limits to test OOG behavior. For CALL with OOG: target account read for `is_account_alive` check. For CALLCODE/DELEGATECALL/STATICCALL with OOG: target account **NOT** read (OOG before state access) | For CALL: target in BAL even with OOG. For CALLCODE/DELEGATECALL/STATICCALL: target **NOT** in BAL when OOG (state access deferred until after gas check) | ✅ Completed | diff --git a/tests/frontier/opcodes/test_call_and_callcode_gas_calculation.py b/tests/frontier/opcodes/test_call_and_callcode_gas_calculation.py index abc9d9a28c..015217521d 100644 --- a/tests/frontier/opcodes/test_call_and_callcode_gas_calculation.py +++ b/tests/frontier/opcodes/test_call_and_callcode_gas_calculation.py @@ -40,6 +40,11 @@ Account, Address, Alloc, + BalAccountExpectation, + BalBalanceChange, + BalStorageChange, + BalStorageSlot, + BlockAccessListExpectation, Bytecode, Environment, Op, @@ -71,33 +76,42 @@ def sufficient_gas( Calculate the sufficient gas for the nested call opcode with positive value transfer. """ - # memory_exp_cost is zero for our case. + gas_costs = fork.gas_costs() + cost = 0 if fork >= Berlin: - cost += 2600 # call and address_access_cost + cost += gas_costs.G_COLD_ACCOUNT_ACCESS elif Byzantium <= fork < Berlin: - cost += 700 # call + cost += 700 # Pre-Berlin warm call cost elif fork == Homestead: - cost += 40 # call + cost += 40 # Homestead call cost cost += 1 # mandatory callee gas allowance else: raise Exception("Only forks Homestead and >=Byzantium supported") is_value_call = callee_opcode in [Op.CALL, Op.CALLCODE] if is_value_call: - cost += 9000 # positive_value_cost + cost += gas_costs.G_CALL_VALUE if callee_opcode == Op.CALL: - cost += 25000 # empty_account_cost + cost += gas_costs.G_NEW_ACCOUNT - cost += callee_init_stack_gas + sufficient = callee_init_stack_gas + cost - return cost + return sufficient @pytest.fixture -def callee_code(pre: Alloc, callee_opcode: Op, fork: Fork) -> Bytecode: +def empty_account(pre: Alloc) -> Address: + """A guaranteed-to-be-empty account.""" + return pre.empty_account() + + +@pytest.fixture +def callee_code( + callee_opcode: Op, fork: Fork, empty_account: Address +) -> Bytecode: """ Code called by the caller contract: PUSH1 0x00 * 4 @@ -119,7 +133,7 @@ def callee_code(pre: Alloc, callee_opcode: Op, fork: Fork) -> Bytecode: return callee_opcode( unchecked=False, gas=1 if fork < Byzantium else Op.GAS, - address=pre.empty_account(), + address=empty_account, args_offset=0, args_size=0, ret_offset=0, @@ -197,6 +211,67 @@ def post( # noqa: D103 } +@pytest.fixture +def expected_block_access_list( + fork: Fork, + caller_address: Address, + callee_address: Address, + callee_opcode: Bytecode, + empty_account: Account, + gas_shortage: int, +) -> None | BlockAccessListExpectation: + """The expected block access list for >=Amsterdam cases.""" + if fork.header_bal_hash_required(): + if callee_opcode == Op.CALL: + if gas_shortage: + # call runs OOG after state access due to `is_account_alive` in + # `create_gas_cost` check + empty_account_expectation = BalAccountExpectation.empty() + else: + empty_account_expectation = BalAccountExpectation( + balance_changes=[ + BalBalanceChange(tx_index=1, post_balance=1) + ] + ) + else: + if gas_shortage: + # runs OOG before accessing empty acct (not read) + empty_account_expectation = None + else: + # if successful, only read is recorded + empty_account_expectation = BalAccountExpectation.empty() + + return BlockAccessListExpectation( + account_expectations={ + empty_account: empty_account_expectation, + caller_address: BalAccountExpectation( + balance_changes=[ + BalBalanceChange(tx_index=1, post_balance=4) + ], + storage_reads=[0] if gas_shortage else [], + storage_changes=[ + BalStorageSlot( + slot=0x00, + slot_changes=[ + BalStorageChange(tx_index=1, post_value=1), + ], + ), + ] + if not gas_shortage + else [], + ), + callee_address: BalAccountExpectation( + balance_changes=( + [BalBalanceChange(tx_index=1, post_balance=2)] + if not gas_shortage and callee_opcode == Op.CALL + else [] + ), + ), + } + ) + return None + + @pytest.mark.parametrize( "callee_opcode", [Op.CALL, Op.CALLCODE, Op.DELEGATECALL, Op.STATICCALL] ) @@ -207,12 +282,19 @@ def test_value_transfer_gas_calculation( pre: Alloc, caller_tx: Transaction, post: Dict[str, Account], + expected_block_access_list: BlockAccessListExpectation, ) -> None: """ Tests the nested CALL/CALLCODE/DELEGATECALL/STATICCALL opcode gas consumption with a positive value transfer. """ - state_test(env=Environment(), pre=pre, post=post, tx=caller_tx) + state_test( + env=Environment(), + pre=pre, + post=post, + tx=caller_tx, + expected_block_access_list=expected_block_access_list, + ) @pytest.mark.parametrize( From 09b152472305819eec71a91abb1b00e01832557e Mon Sep 17 00:00:00 2001 From: felipe Date: Tue, 25 Nov 2025 20:36:27 +0000 Subject: [PATCH 036/154] fix(test-tools): Remove named forks from blobSchedule; turn off BPOs --- .../plugins/consume/simulators/helpers/ruleset.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/helpers/ruleset.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/helpers/ruleset.py index 87d3783ce9..bcd8cd891d 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/helpers/ruleset.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/helpers/ruleset.py @@ -296,7 +296,6 @@ def get_blob_schedule_entries(fork: Fork) -> Dict[str, int]: "HIVE_CANCUN_TIMESTAMP": 0, "HIVE_PRAGUE_TIMESTAMP": 0, "HIVE_OSAKA_TIMESTAMP": 0, - **get_blob_schedule_entries(Osaka), }, PragueToOsakaAtTime15k: { "HIVE_FORK_HOMESTEAD": 0, @@ -314,7 +313,6 @@ def get_blob_schedule_entries(fork: Fork) -> Dict[str, int]: "HIVE_CANCUN_TIMESTAMP": 0, "HIVE_PRAGUE_TIMESTAMP": 0, "HIVE_OSAKA_TIMESTAMP": 15000, - **get_blob_schedule_entries(Osaka), }, BPO1: { "HIVE_FORK_HOMESTEAD": 0, @@ -504,6 +502,5 @@ def get_blob_schedule_entries(fork: Fork) -> Dict[str, int]: # "HIVE_BPO3_TIMESTAMP": 0, # "HIVE_BPO4_TIMESTAMP": 0, "HIVE_AMSTERDAM_TIMESTAMP": 0, - **get_blob_schedule_entries(Amsterdam), }, } From 4d36e86f28cb9439a8262f93a35c5a50c7b7e0db Mon Sep 17 00:00:00 2001 From: felix Date: Wed, 26 Nov 2025 10:24:46 +0000 Subject: [PATCH 037/154] fix(cli): add bal exception for erigon (#1809) --- .../testing/src/execution_testing/client_clis/clis/erigon.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/testing/src/execution_testing/client_clis/clis/erigon.py b/packages/testing/src/execution_testing/client_clis/clis/erigon.py index f99462d1d0..8e1f51b851 100644 --- a/packages/testing/src/execution_testing/client_clis/clis/erigon.py +++ b/packages/testing/src/execution_testing/client_clis/clis/erigon.py @@ -57,6 +57,7 @@ class ErigonExceptionMapper(ExceptionMapper): BlockException.INVALID_LOG_BLOOM: "invalid bloom", } mapping_regex = { + BlockException.INVALID_BLOCK_ACCESS_LIST: r"invalid block access list|block access list mismatch", TransactionException.GAS_LIMIT_EXCEEDS_MAXIMUM: ( r"invalid block, txnIdx=\d+,.*gas limit too high" ), From 05ca48fb91f14de7a416a7f7a8b979ac4c34733f Mon Sep 17 00:00:00 2001 From: fselmo Date: Wed, 26 Nov 2025 09:46:27 -0700 Subject: [PATCH 038/154] feat(test): Better describe the BAL for selfdestruct revert --- .../test_selfdestruct_revert.py | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/tests/cancun/eip6780_selfdestruct/test_selfdestruct_revert.py b/tests/cancun/eip6780_selfdestruct/test_selfdestruct_revert.py index 59812174ef..ab2d22f112 100644 --- a/tests/cancun/eip6780_selfdestruct/test_selfdestruct_revert.py +++ b/tests/cancun/eip6780_selfdestruct/test_selfdestruct_revert.py @@ -10,6 +10,10 @@ Alloc, BalAccountExpectation, BalBalanceChange, + BalCodeChange, + BalNonceChange, + BalStorageChange, + BalStorageSlot, BlockAccessListExpectation, Bytecode, Environment, @@ -458,6 +462,31 @@ def test_selfdestruct_created_in_same_tx_with_revert( # noqa SC200 ], ) ) + else: + account_expectations[ + selfdestruct_with_transfer_contract_address + ] = BalAccountExpectation( + storage_reads=[1], + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + balance_changes=[BalBalanceChange(tx_index=1, post_balance=1)], + code_changes=[ + BalCodeChange( + tx_index=1, + new_code=selfdestruct_with_transfer_contract_code, + ), + ], + storage_changes=[ + BalStorageSlot( + slot=0, + slot_changes=[ + BalStorageChange(tx_index=1, post_value=1), + ], + ), + ], + ) + account_expectations[selfdestruct_recipient_address] = ( + BalAccountExpectation.empty() + ) expected_block_access_list = BlockAccessListExpectation( account_expectations=account_expectations From b5d922e36e5f2ee1092349d19ccb8cf65ebbb4fa Mon Sep 17 00:00:00 2001 From: Stefan Date: Sun, 30 Nov 2025 15:50:31 +0100 Subject: [PATCH 039/154] test(eip7928): add EXTCODECOPY OOG memory expansion BAL test --- .../test_block_access_lists_opcodes.py | 90 +++++++++++++++++++ .../test_cases.md | 1 + 2 files changed, 91 insertions(+) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py index b849137c58..4df25a0869 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py @@ -611,6 +611,96 @@ def test_bal_extcodecopy_and_oog( ) +@pytest.mark.parametrize( + "memory_offset,copy_size,gas_shortfall", + [ + pytest.param(0x10000, 32, "large", id="large_offset"), + pytest.param(256, 32, "boundary", id="boundary"), + ], +) +def test_bal_extcodecopy_oog_at_memory_expansion( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + fork: Fork, + memory_offset: int, + copy_size: int, + gas_shortfall: str, +) -> None: + """ + Test EXTCODECOPY OOG at memory expansion - target should NOT appear in BAL. + + Gas for all components (cold access + copy + memory expansion) must be + checked BEFORE recording account access. + """ + alice = pre.fund_eoa() + gas_costs = fork.gas_costs() + + target_contract = pre.deploy_contract(code=Bytecode(Op.STOP)) + + # Build EXTCODECOPY contract with appropriate PUSH sizes + if memory_offset <= 0xFF: + dest_push = Op.PUSH1(memory_offset) + elif memory_offset <= 0xFFFF: + dest_push = Op.PUSH2(memory_offset) + else: + dest_push = Op.PUSH3(memory_offset) + + extcodecopy_contract_code = Bytecode( + Op.PUSH1(copy_size) + + Op.PUSH1(0) + + dest_push + + Op.PUSH20(target_contract) + + Op.EXTCODECOPY + + Op.STOP + ) + + extcodecopy_contract = pre.deploy_contract(code=extcodecopy_contract_code) + + intrinsic_gas_calculator = fork.transaction_intrinsic_cost_calculator() + intrinsic_gas_cost = intrinsic_gas_calculator() + + push_cost = gas_costs.G_VERY_LOW * 4 + cold_access_cost = gas_costs.G_COLD_ACCOUNT_ACCESS + copy_cost = gas_costs.G_COPY * ((copy_size + 31) // 32) + + if gas_shortfall == "large": + # Provide gas for push + cold access + copy, but NOT memory expansion + execution_cost = push_cost + cold_access_cost + copy_cost + tx_gas_limit = intrinsic_gas_cost + execution_cost + else: + # Calculate memory cost and provide exactly 1 less than needed + words = (memory_offset + copy_size + 31) // 32 + memory_cost = (words * gas_costs.G_MEMORY) + (words * words // 512) + execution_cost = push_cost + cold_access_cost + copy_cost + memory_cost + tx_gas_limit = intrinsic_gas_cost + execution_cost - 1 + + tx = Transaction( + sender=alice, + to=extcodecopy_contract, + gas_limit=tx_gas_limit, + ) + + block = Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + extcodecopy_contract: BalAccountExpectation.empty(), + target_contract: None, + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + alice: Account(nonce=1), + extcodecopy_contract: Account(), + target_contract: Account(), + }, + ) + + def test_bal_storage_write_read_same_frame( pre: Alloc, blockchain_test: BlockchainTestFiller, diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index fd843a4123..9613a8f2c7 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -94,3 +94,4 @@ | `test_call_to_pre_authorized_oog` | Ensure BAL handles OOG during EIP-7702 delegation access (pre-Amsterdam test with BAL) | Call to delegated account that OOGs before accessing delegation contract | BAL **MUST** include auth_signer (code read for delegation check) but **MUST NOT** include delegation contract (OOG before access) | ✅ Completed | | `test_selfdestruct_created_in_same_tx_with_revert` | Ensure BAL tracks selfdestruct with revert correctly (pre-Amsterdam test with BAL) | Contract created and selfdestructed in same tx with nested revert | BAL **MUST** track storage reads and balance changes for selfdestruct even with reverts | ✅ Completed | | `test_value_transfer_gas_calculation` | Ensure BAL correctly tracks OOG scenarios for CALL/CALLCODE/DELEGATECALL/STATICCALL (pre-Amsterdam test with BAL) | Nested calls with precise gas limits to test OOG behavior. For CALL with OOG: target account read for `is_account_alive` check. For CALLCODE/DELEGATECALL/STATICCALL with OOG: target account **NOT** read (OOG before state access) | For CALL: target in BAL even with OOG. For CALLCODE/DELEGATECALL/STATICCALL: target **NOT** in BAL when OOG (state access deferred until after gas check) | ✅ Completed | +| `test_bal_extcodecopy_oog_at_memory_expansion` | Ensure BAL excludes target when EXTCODECOPY OOGs at memory expansion | Parameterized: (1) large_offset: 64KB memory offset, gas covers cold access + copy but NOT memory expansion. (2) boundary: 256 byte offset, gas is exactly 1 less than needed. | BAL **MUST NOT** include target contract. Gas must be checked for ALL components (cold access + copy + memory expansion) BEFORE recording account access. | ✅ Completed | From 854ee190900ef5652d07df5fcf80deb26d5e4eb4 Mon Sep 17 00:00:00 2001 From: fselmo Date: Mon, 1 Dec 2025 09:58:44 -0700 Subject: [PATCH 040/154] refactor(test-tests): parametrize existing test oog case instead --- .../test_block_access_lists_opcodes.py | 151 +++++++----------- .../test_cases.md | 3 +- 2 files changed, 57 insertions(+), 97 deletions(-) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py index 4df25a0869..99ad5dcddb 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py @@ -529,105 +529,42 @@ def test_bal_delegatecall_and_oog( @pytest.mark.parametrize( - "fails_at_extcodecopy", - [True, False], - ids=["oog_at_extcodecopy", "successful_extcodecopy"], -) -def test_bal_extcodecopy_and_oog( - pre: Alloc, - blockchain_test: BlockchainTestFiller, - fork: Fork, - fails_at_extcodecopy: bool, -) -> None: - """ - Ensure BAL handles EXTCODECOPY and OOG during EXTCODECOPY appropriately. - """ - alice = pre.fund_eoa() - gas_costs = fork.gas_costs() - - # Create target contract with some code - target_contract = pre.deploy_contract( - code=Bytecode(Op.PUSH1(0x42) + Op.STOP) - ) - - # Create contract that attempts to copy code from target - extcodecopy_contract_code = Bytecode( - Op.PUSH1(0) # size - copy 0 bytes to minimize memory expansion cost - + Op.PUSH1(0) # codeOffset - + Op.PUSH1(0) # destOffset - + Op.PUSH20(target_contract) # address - + Op.EXTCODECOPY # Copy code (cold access + base cost) - + Op.STOP - ) - - extcodecopy_contract = pre.deploy_contract(code=extcodecopy_contract_code) - - intrinsic_gas_calculator = fork.transaction_intrinsic_cost_calculator() - intrinsic_gas_cost = intrinsic_gas_calculator() - - # Costs: - # - 4 PUSH operations = G_VERY_LOW * 4 - # - EXTCODECOPY cold = G_COLD_ACCOUNT_ACCESS + (G_COPY * words) - # where words = ceil32(size) // 32 = ceil32(0) // 32 = 0 - push_cost = gas_costs.G_VERY_LOW * 4 - extcodecopy_cold_cost = ( - gas_costs.G_COLD_ACCOUNT_ACCESS - ) # + (G_COPY * 0) = 0 - tx_gas_limit = intrinsic_gas_cost + push_cost + extcodecopy_cold_cost - - if fails_at_extcodecopy: - # subtract 1 gas to ensure OOG at EXTCODECOPY - tx_gas_limit -= 1 - - tx = Transaction( - sender=alice, - to=extcodecopy_contract, - gas_limit=tx_gas_limit, - ) - - block = Block( - txs=[tx], - expected_block_access_list=BlockAccessListExpectation( - account_expectations={ - extcodecopy_contract: BalAccountExpectation.empty(), - # Target should only appear if EXTCODECOPY succeeded - **( - {target_contract: None} - if fails_at_extcodecopy - else {target_contract: BalAccountExpectation.empty()} - ), - } - ), - ) - - blockchain_test( - pre=pre, - blocks=[block], - post={ - alice: Account(nonce=1), - extcodecopy_contract: Account(), - target_contract: Account(), - }, - ) - - -@pytest.mark.parametrize( - "memory_offset,copy_size,gas_shortfall", + "oog_scenario,memory_offset,copy_size", [ - pytest.param(0x10000, 32, "large", id="large_offset"), - pytest.param(256, 32, "boundary", id="boundary"), + pytest.param("success", 0, 0, id="successful_extcodecopy"), + pytest.param("oog_at_cold_access", 0, 0, id="oog_at_cold_access"), + pytest.param( + "oog_at_memory_large_offset", + 0x10000, + 32, + id="oog_at_memory_large_offset", + ), + pytest.param( + "oog_at_memory_boundary", + 256, + 32, + id="oog_at_memory_boundary", + ), ], ) -def test_bal_extcodecopy_oog_at_memory_expansion( +def test_bal_extcodecopy_and_oog( pre: Alloc, blockchain_test: BlockchainTestFiller, fork: Fork, + oog_scenario: str, memory_offset: int, copy_size: int, - gas_shortfall: str, ) -> None: """ - Test EXTCODECOPY OOG at memory expansion - target should NOT appear in BAL. + Ensure BAL handles EXTCODECOPY and OOG during EXTCODECOPY appropriately. + + Tests various OOG scenarios: + - success: EXTCODECOPY completes, target appears in BAL + - oog_at_cold_access: OOG before cold access, target NOT in BAL + - oog_at_memory_large_offset: OOG at memory expansion (large offset), + target NOT in BAL + - oog_at_memory_boundary: OOG at memory expansion (boundary case), + target NOT in BAL Gas for all components (cold access + copy + memory expansion) must be checked BEFORE recording account access. @@ -635,7 +572,10 @@ def test_bal_extcodecopy_oog_at_memory_expansion( alice = pre.fund_eoa() gas_costs = fork.gas_costs() - target_contract = pre.deploy_contract(code=Bytecode(Op.STOP)) + # Create target contract with some code + target_contract = pre.deploy_contract( + code=Bytecode(Op.PUSH1(0x42) + Op.STOP) + ) # Build EXTCODECOPY contract with appropriate PUSH sizes if memory_offset <= 0xFF: @@ -647,8 +587,8 @@ def test_bal_extcodecopy_oog_at_memory_expansion( extcodecopy_contract_code = Bytecode( Op.PUSH1(copy_size) - + Op.PUSH1(0) - + dest_push + + Op.PUSH1(0) # codeOffset + + dest_push # destOffset + Op.PUSH20(target_contract) + Op.EXTCODECOPY + Op.STOP @@ -659,20 +599,37 @@ def test_bal_extcodecopy_oog_at_memory_expansion( intrinsic_gas_calculator = fork.transaction_intrinsic_cost_calculator() intrinsic_gas_cost = intrinsic_gas_calculator() + # Calculate costs push_cost = gas_costs.G_VERY_LOW * 4 cold_access_cost = gas_costs.G_COLD_ACCOUNT_ACCESS copy_cost = gas_costs.G_COPY * ((copy_size + 31) // 32) - if gas_shortfall == "large": + if oog_scenario == "success": + # Provide enough gas for everything including memory expansion + words = (memory_offset + copy_size + 31) // 32 + memory_cost = (words * gas_costs.G_MEMORY) + (words * words // 512) + execution_cost = push_cost + cold_access_cost + copy_cost + memory_cost + tx_gas_limit = intrinsic_gas_cost + execution_cost + target_in_bal = True + elif oog_scenario == "oog_at_cold_access": + # Provide gas for pushes but 1 less than cold access cost + execution_cost = push_cost + cold_access_cost + tx_gas_limit = intrinsic_gas_cost + execution_cost - 1 + target_in_bal = False + elif oog_scenario == "oog_at_memory_large_offset": # Provide gas for push + cold access + copy, but NOT memory expansion execution_cost = push_cost + cold_access_cost + copy_cost tx_gas_limit = intrinsic_gas_cost + execution_cost - else: + target_in_bal = False + elif oog_scenario == "oog_at_memory_boundary": # Calculate memory cost and provide exactly 1 less than needed words = (memory_offset + copy_size + 31) // 32 memory_cost = (words * gas_costs.G_MEMORY) + (words * words // 512) execution_cost = push_cost + cold_access_cost + copy_cost + memory_cost tx_gas_limit = intrinsic_gas_cost + execution_cost - 1 + target_in_bal = False + else: + raise ValueError(f"Invariant: unknown oog_scenario {oog_scenario}") tx = Transaction( sender=alice, @@ -685,7 +642,11 @@ def test_bal_extcodecopy_oog_at_memory_expansion( expected_block_access_list=BlockAccessListExpectation( account_expectations={ extcodecopy_contract: BalAccountExpectation.empty(), - target_contract: None, + **( + {target_contract: BalAccountExpectation.empty()} + if target_in_bal + else {target_contract: None} + ), } ), ) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index 9613a8f2c7..5c7bcb42b4 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -49,7 +49,7 @@ | `test_bal_extcodesize_and_oog` | Ensure BAL handles OOG during EXTCODESIZE opcode execution correctly | Alice calls contract that attempts `EXTCODESIZE` opcode on cold target contract. Parameterized: (1) OOG at EXTCODESIZE opcode (insufficient gas), (2) Successful EXTCODESIZE execution. | For OOG case: BAL **MUST NOT** include target contract (wasn't accessed). For success case: BAL **MUST** include target contract in `account_changes`. | ✅ Completed | | `test_bal_call_and_oog` | Ensure BAL handles OOG during CALL opcode execution correctly | Alice calls contract that attempts `CALL` to cold target contract. Parameterized: (1) OOG at CALL opcode (insufficient gas), (2) Successful CALL execution. | For OOG case: BAL **MUST NOT** include target contract (wasn't accessed). For success case: BAL **MUST** include target contract in `account_changes`. | ✅ Completed | | `test_bal_delegatecall_and_oog` | Ensure BAL handles OOG during DELEGATECALL opcode execution correctly | Alice calls contract that attempts `DELEGATECALL` to cold target contract. Parameterized: (1) OOG at DELEGATECALL opcode (insufficient gas), (2) Successful DELEGATECALL execution. | For OOG case: BAL **MUST NOT** include target contract (wasn't accessed). For success case: BAL **MUST** include target contract in `account_changes`. | ✅ Completed | -| `test_bal_extcodecopy_and_oog` | Ensure BAL handles OOG during EXTCODECOPY opcode execution correctly | Alice calls contract that attempts `EXTCODECOPY` from cold target contract. Parameterized: (1) OOG at EXTCODECOPY opcode (insufficient gas), (2) Successful EXTCODECOPY execution. | For OOG case: BAL **MUST NOT** include target contract (wasn't accessed). For success case: BAL **MUST** include target contract in `account_changes`. | ✅ Completed | +| `test_bal_extcodecopy_and_oog` | Ensure BAL handles OOG during EXTCODECOPY at various failure points | Alice calls contract that attempts `EXTCODECOPY` from cold target contract. Parameterized: (1) Successful EXTCODECOPY, (2) OOG at cold access (insufficient gas for account access), (3) OOG at memory expansion with large offset (64KB offset, gas covers cold access + copy but NOT memory expansion), (4) OOG at memory expansion boundary (256 byte offset, gas is exactly 1 less than needed). | For success case: BAL **MUST** include target contract. For all OOG cases: BAL **MUST NOT** include target contract. Gas for ALL components (cold access + copy + memory expansion) must be checked BEFORE recording account access. | ✅ Completed | | `test_bal_oog_7702_delegated_cold_cold` | Ensure BAL handles OOG during EIP-7702 delegated account loading when both accounts are cold | Alice calls cold delegated account Bob (7702) which delegates to cold `TargetContract` with insufficient gas for second cold load | BAL **MUST** include Bob in `account_changes` (first cold load succeeds) but **MUST NOT** include `TargetContract` (second cold load fails due to OOG) | 🟡 Planned | | `test_bal_oog_7702_delegated_warm_cold` | Ensure BAL handles OOG during EIP-7702 delegated account loading when first account is warm, second is cold | Alice calls warm delegated account Bob (7702) which delegates to cold `TargetContract` with insufficient gas for second cold load | BAL **MUST** include Bob in `account_changes` (warm load succeeds) but **MUST NOT** include `TargetContract` (cold load fails due to OOG) | 🟡 Planned | | `test_bal_multiple_balance_changes_same_account` | Ensure BAL tracks multiple balance changes to same account across transactions | Alice funds Bob (starts at 0) in tx0 with exact amount needed. Bob spends everything in tx1 to Charlie. Bob's balance: 0 → funding_amount → 0 | BAL **MUST** include Bob with two `balance_changes`: one at txIndex=1 (receives funds) and one at txIndex=2 (balance returns to 0). This tests balance tracking across two transactions. | ✅ Completed | @@ -94,4 +94,3 @@ | `test_call_to_pre_authorized_oog` | Ensure BAL handles OOG during EIP-7702 delegation access (pre-Amsterdam test with BAL) | Call to delegated account that OOGs before accessing delegation contract | BAL **MUST** include auth_signer (code read for delegation check) but **MUST NOT** include delegation contract (OOG before access) | ✅ Completed | | `test_selfdestruct_created_in_same_tx_with_revert` | Ensure BAL tracks selfdestruct with revert correctly (pre-Amsterdam test with BAL) | Contract created and selfdestructed in same tx with nested revert | BAL **MUST** track storage reads and balance changes for selfdestruct even with reverts | ✅ Completed | | `test_value_transfer_gas_calculation` | Ensure BAL correctly tracks OOG scenarios for CALL/CALLCODE/DELEGATECALL/STATICCALL (pre-Amsterdam test with BAL) | Nested calls with precise gas limits to test OOG behavior. For CALL with OOG: target account read for `is_account_alive` check. For CALLCODE/DELEGATECALL/STATICCALL with OOG: target account **NOT** read (OOG before state access) | For CALL: target in BAL even with OOG. For CALLCODE/DELEGATECALL/STATICCALL: target **NOT** in BAL when OOG (state access deferred until after gas check) | ✅ Completed | -| `test_bal_extcodecopy_oog_at_memory_expansion` | Ensure BAL excludes target when EXTCODECOPY OOGs at memory expansion | Parameterized: (1) large_offset: 64KB memory offset, gas covers cold access + copy but NOT memory expansion. (2) boundary: 256 byte offset, gas is exactly 1 less than needed. | BAL **MUST NOT** include target contract. Gas must be checked for ALL components (cold access + copy + memory expansion) BEFORE recording account access. | ✅ Completed | From 355c575c5e4cd24c8d3ef7e62f95108b5aff3490 Mon Sep 17 00:00:00 2001 From: Stefan Date: Mon, 1 Dec 2025 17:09:31 +0100 Subject: [PATCH 041/154] test(eip7928): add cross-block precompile state leak test --- .../test_block_access_lists.py | 77 +++++++++++++++++++ .../test_cases.md | 1 + 2 files changed, 78 insertions(+) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py index 53ae688410..9b525b661d 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py @@ -2206,3 +2206,80 @@ def test_bal_cross_tx_storage_revert_to_zero( contract: Account(storage={0: 0x0}), }, ) + + +# RIPEMD-160 precompile address (used in Parity Touch Bug test) +RIPEMD_160 = Address(0x03) + + +def test_bal_cross_block_precompile_state_leak( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Ensure internal EVM state for precompile handling does not leak between blocks. + + The EVM may track internal state related to the Parity Touch Bug (EIP-161) + when calling RIPEMD-160 (0x03) with zero value. If this state is not properly + reset between blocks, it can cause incorrect BAL entries in subsequent blocks. + + Prerequisites for triggering the bug: + 1. RIPEMD-160 (0x03) must already exist in state before the call. + 2. Block 1 must call RIPEMD-160 with zero value and complete successfully. + 3. Block 2 must have a TX that triggers an exception (not REVERT). + + Expected behavior: + - Block 1: RIPEMD-160 in BAL (legitimate access) + - Block 2: RIPEMD-160 NOT in BAL (never touched in this block) + + Bug behavior: + - Block 2 incorrectly has RIPEMD-160 in its BAL due to leaked internal state. + """ + alice = pre.fund_eoa() + bob = pre.fund_eoa() + + # Pre-fund RIPEMD-160 so it exists before the call. + # This is required to trigger the internal state tracking. + pre[RIPEMD_160] = Account(balance=1) + + # Contract that calls RIPEMD-160 with zero value + ripemd_caller = pre.deploy_contract( + code=Op.CALL(50_000, RIPEMD_160, 0, 0, 0, 0, 0) + Op.STOP + ) + + # Contract that triggers an exception (stack underflow from ADD on empty stack) + exception_contract = pre.deploy_contract(code=Op.ADD) + + # Block 1: Call RIPEMD-160 successfully + block1 = Block( + txs=[ + Transaction( + sender=alice, + to=ripemd_caller, + gas_limit=100_000, + ) + ], + ) + + # Block 2: Exception triggers internal exception handling. + # If internal state leaked from Block 1, RIPEMD-160 would incorrectly + # appear in Block 2's BAL. + block2 = Block( + txs=[ + Transaction( + sender=bob, + to=exception_contract, + gas_limit=100_000, + ) + ], + ) + + blockchain_test( + pre=pre, + blocks=[block1, block2], + post={ + alice: Account(nonce=1), + bob: Account(nonce=1), + RIPEMD_160: Account(balance=1), + }, + ) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index 5c7bcb42b4..dbcf8ce31a 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -94,3 +94,4 @@ | `test_call_to_pre_authorized_oog` | Ensure BAL handles OOG during EIP-7702 delegation access (pre-Amsterdam test with BAL) | Call to delegated account that OOGs before accessing delegation contract | BAL **MUST** include auth_signer (code read for delegation check) but **MUST NOT** include delegation contract (OOG before access) | ✅ Completed | | `test_selfdestruct_created_in_same_tx_with_revert` | Ensure BAL tracks selfdestruct with revert correctly (pre-Amsterdam test with BAL) | Contract created and selfdestructed in same tx with nested revert | BAL **MUST** track storage reads and balance changes for selfdestruct even with reverts | ✅ Completed | | `test_value_transfer_gas_calculation` | Ensure BAL correctly tracks OOG scenarios for CALL/CALLCODE/DELEGATECALL/STATICCALL (pre-Amsterdam test with BAL) | Nested calls with precise gas limits to test OOG behavior. For CALL with OOG: target account read for `is_account_alive` check. For CALLCODE/DELEGATECALL/STATICCALL with OOG: target account **NOT** read (OOG before state access) | For CALL: target in BAL even with OOG. For CALLCODE/DELEGATECALL/STATICCALL: target **NOT** in BAL when OOG (state access deferred until after gas check) | ✅ Completed | +| `test_bal_cross_block_precompile_state_leak` | Ensure internal EVM state for precompile handling does not leak between blocks | Block 1: Alice calls RIPEMD-160 (0x03) with zero value (RIPEMD-160 must be pre-funded). Block 2: Bob's transaction triggers an exception (stack underflow). | BAL for Block 1 **MUST** include RIPEMD-160. BAL for Block 2 **MUST NOT** include RIPEMD-160 (never accessed in Block 2). Internal state from Parity Touch Bug (EIP-161) handling must be reset between blocks. | ✅ Completed | From 6606aff593f59cdf76f5f56c3041049e3d07a6e1 Mon Sep 17 00:00:00 2001 From: fselmo Date: Mon, 1 Dec 2025 10:49:02 -0700 Subject: [PATCH 042/154] refactor(test-tests): Add BAL expectation to state leak test; fix lint --- .../test_block_access_lists.py | 49 +++++++++++++------ 1 file changed, 34 insertions(+), 15 deletions(-) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py index 9b525b661d..5a1c873d88 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py @@ -2208,20 +2208,18 @@ def test_bal_cross_tx_storage_revert_to_zero( ) -# RIPEMD-160 precompile address (used in Parity Touch Bug test) -RIPEMD_160 = Address(0x03) - - -def test_bal_cross_block_precompile_state_leak( +def test_bal_cross_block_ripemd160_state_leak( pre: Alloc, blockchain_test: BlockchainTestFiller, ) -> None: """ - Ensure internal EVM state for precompile handling does not leak between blocks. + Ensure internal EVM state for RIMPEMD-160 precompile handling does not + leak between blocks. The EVM may track internal state related to the Parity Touch Bug (EIP-161) - when calling RIPEMD-160 (0x03) with zero value. If this state is not properly - reset between blocks, it can cause incorrect BAL entries in subsequent blocks. + when calling RIPEMD-160 (0x03) with zero value. If this state is not + properly reset between blocks, it can cause incorrect BAL entries in + subsequent blocks. Prerequisites for triggering the bug: 1. RIPEMD-160 (0x03) must already exist in state before the call. @@ -2233,21 +2231,22 @@ def test_bal_cross_block_precompile_state_leak( - Block 2: RIPEMD-160 NOT in BAL (never touched in this block) Bug behavior: - - Block 2 incorrectly has RIPEMD-160 in its BAL due to leaked internal state. + - Block 2 incorrectly has RIPEMD-160 in its BAL due to leaked + internal state. """ alice = pre.fund_eoa() bob = pre.fund_eoa() - # Pre-fund RIPEMD-160 so it exists before the call. # This is required to trigger the internal state tracking. - pre[RIPEMD_160] = Account(balance=1) + ripemd160_addr = Address(0x03) + pre.fund_address(ripemd160_addr, amount=1) # Contract that calls RIPEMD-160 with zero value ripemd_caller = pre.deploy_contract( - code=Op.CALL(50_000, RIPEMD_160, 0, 0, 0, 0, 0) + Op.STOP + code=Op.CALL(50_000, ripemd160_addr, 0, 0, 0, 0, 0) + Op.STOP ) - - # Contract that triggers an exception (stack underflow from ADD on empty stack) + # Contract that triggers an exception + # (stack underflow from ADD on empty stack) exception_contract = pre.deploy_contract(code=Op.ADD) # Block 1: Call RIPEMD-160 successfully @@ -2259,6 +2258,16 @@ def test_bal_cross_block_precompile_state_leak( gas_limit=100_000, ) ], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)] + ), + bob: None, + ripemd_caller: BalAccountExpectation.empty(), + ripemd160_addr: BalAccountExpectation.empty(), + } + ), ) # Block 2: Exception triggers internal exception handling. @@ -2272,6 +2281,16 @@ def test_bal_cross_block_precompile_state_leak( gas_limit=100_000, ) ], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: None, + bob: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)] + ), + # this is the important check + ripemd160_addr: None, + } + ), ) blockchain_test( @@ -2280,6 +2299,6 @@ def test_bal_cross_block_precompile_state_leak( post={ alice: Account(nonce=1), bob: Account(nonce=1), - RIPEMD_160: Account(balance=1), + ripemd160_addr: Account(balance=1), }, ) From 0f9ef2d08a48c3124e5e5e759e1af6c0c93b3b09 Mon Sep 17 00:00:00 2001 From: Stefan Date: Thu, 4 Dec 2025 18:23:25 +0100 Subject: [PATCH 043/154] feat(test): add SELFDESTRUCT OOG BAL test --- .../test_block_access_lists.py | 70 +++++++++++++++++++ .../test_cases.md | 1 + 2 files changed, 71 insertions(+) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py index 5a1c873d88..592697d64e 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py @@ -373,6 +373,76 @@ def test_bal_self_destruct( ) +def test_bal_self_destruct_oog( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Test that SELFDESTRUCT beneficiary is NOT included in BAL when OOG. + + When SELFDESTRUCT runs out of gas, the operation fails and the beneficiary + address should NOT be added to the Block Access List. + + This test: + 1. Deploys a contract with SELFDESTRUCT bytecode + 2. Calls the contract with limited gas so SELFDESTRUCT fails OOG + 3. Verifies beneficiary is NOT in BAL (the CALL reverts, undoing BAL changes) + + SELFDESTRUCT gas cost to cold new account: 5000 + 2600 + 25000 = 32600 gas + """ + alice = pre.fund_eoa() + + # Beneficiary address for SELFDESTRUCT + beneficiary = Address(0xBEEF) + + # Contract: PUSH20 SELFDESTRUCT + selfdestruct_code = Op.SELFDESTRUCT(beneficiary) + selfdestruct_contract = pre.deploy_contract(code=selfdestruct_code, balance=1000) + + # Caller contract: CALL with limited gas to cause OOG on SELFDESTRUCT + # SELFDESTRUCT needs 32600 gas, we give it only 100 + caller_code = ( + Op.CALL(gas=100, address=selfdestruct_contract, value=0, + args_offset=0, args_size=0, ret_offset=0, ret_size=0) + + Op.STOP + ) + caller_contract = pre.deploy_contract(code=caller_code) + + tx = Transaction( + sender=alice, + to=caller_contract, + gas_limit=100_000, + gas_price=0xA, + ) + + # The inner CALL fails OOG, so SELFDESTRUCT doesn't complete. + # Beneficiary should NOT be in BAL. + block = Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + ), + caller_contract: BalAccountExpectation.empty(), + selfdestruct_contract: BalAccountExpectation.empty(), + # beneficiary should NOT appear - SELFDESTRUCT failed OOG + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + alice: Account(nonce=1), + caller_contract: Account(code=caller_code), + # Contract still exists - SELFDESTRUCT failed + selfdestruct_contract: Account(balance=1000, code=selfdestruct_code), + }, + ) + + @pytest.mark.parametrize( "account_access_opcode", [ diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index dbcf8ce31a..06287528ef 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -95,3 +95,4 @@ | `test_selfdestruct_created_in_same_tx_with_revert` | Ensure BAL tracks selfdestruct with revert correctly (pre-Amsterdam test with BAL) | Contract created and selfdestructed in same tx with nested revert | BAL **MUST** track storage reads and balance changes for selfdestruct even with reverts | ✅ Completed | | `test_value_transfer_gas_calculation` | Ensure BAL correctly tracks OOG scenarios for CALL/CALLCODE/DELEGATECALL/STATICCALL (pre-Amsterdam test with BAL) | Nested calls with precise gas limits to test OOG behavior. For CALL with OOG: target account read for `is_account_alive` check. For CALLCODE/DELEGATECALL/STATICCALL with OOG: target account **NOT** read (OOG before state access) | For CALL: target in BAL even with OOG. For CALLCODE/DELEGATECALL/STATICCALL: target **NOT** in BAL when OOG (state access deferred until after gas check) | ✅ Completed | | `test_bal_cross_block_precompile_state_leak` | Ensure internal EVM state for precompile handling does not leak between blocks | Block 1: Alice calls RIPEMD-160 (0x03) with zero value (RIPEMD-160 must be pre-funded). Block 2: Bob's transaction triggers an exception (stack underflow). | BAL for Block 1 **MUST** include RIPEMD-160. BAL for Block 2 **MUST NOT** include RIPEMD-160 (never accessed in Block 2). Internal state from Parity Touch Bug (EIP-161) handling must be reset between blocks. | ✅ Completed | +| `test_bal_self_destruct_oog` | Ensure BAL does not include SELFDESTRUCT beneficiary when operation fails due to OOG | Alice calls `Caller` contract which CALLs `SelfDestructContract` with limited gas (100). `SelfDestructContract` attempts SELFDESTRUCT to `Beneficiary`. SELFDESTRUCT requires 32600 gas (5000 base + 2600 cold + 25000 new account). | BAL **MUST** include Alice with `nonce_changes`, `Caller` with empty changes, `SelfDestructContract` with empty changes. BAL **MUST NOT** include `Beneficiary` (SELFDESTRUCT failed OOG, CALL reverted, BAL changes rolled back). Contract balance unchanged. | ✅ Completed | From 89232f272170e4125b4eeb4ada1b5553f9f05a3a Mon Sep 17 00:00:00 2001 From: Felipe Selmo Date: Thu, 4 Dec 2025 23:48:41 +0000 Subject: [PATCH 044/154] refactor(tests): move selfdestruct bal tests to oog file; add gas boundaries - more updates to test_cases.md --- .../forks/amsterdam/vm/instructions/system.py | 6 +- .../test_block_access_lists.py | 229 +--------------- .../test_block_access_lists_opcodes.py | 248 +++++++++++++++++- .../test_cases.md | 4 +- 4 files changed, 253 insertions(+), 234 deletions(-) diff --git a/src/ethereum/forks/amsterdam/vm/instructions/system.py b/src/ethereum/forks/amsterdam/vm/instructions/system.py index 1fca8b1459..288594bfe7 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/system.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/system.py @@ -625,6 +625,9 @@ def selfdestruct(evm: Evm) -> None: if is_cold_access: gas_cost += GAS_COLD_ACCOUNT_ACCESS + check_gas(evm, gas_cost) + + # is_account_alive requires account to be accessed, check gas before if ( not is_account_alive(evm.message.block_env.state, beneficiary) and get_account( @@ -634,11 +637,8 @@ def selfdestruct(evm: Evm) -> None: ): gas_cost += GAS_SELF_DESTRUCT_NEW_ACCOUNT - check_gas(evm, gas_cost) - if is_cold_access: evm.accessed_addresses.add(beneficiary) - track_address(evm.state_changes, beneficiary) charge_gas(evm, gas_cost) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py index 592697d64e..50fb9e0ad2 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py @@ -1,6 +1,6 @@ """Tests for EIP-7928 using the consistent data class pattern.""" -from typing import Callable, Dict +from typing import Callable import pytest from execution_testing import ( @@ -21,7 +21,6 @@ Fork, Hash, Header, - Initcode, Op, Transaction, compute_create_address, @@ -217,232 +216,6 @@ def test_bal_code_changes( ) -@pytest.mark.parametrize( - "self_destruct_in_same_tx", [True, False], ids=["same_tx", "new_tx"] -) -@pytest.mark.parametrize( - "pre_funded", [True, False], ids=["pre_funded", "not_pre_funded"] -) -def test_bal_self_destruct( - pre: Alloc, - blockchain_test: BlockchainTestFiller, - self_destruct_in_same_tx: bool, - pre_funded: bool, -) -> None: - """Ensure BAL captures balance changes caused by `SELFDESTRUCT`.""" - alice = pre.fund_eoa() - bob = pre.fund_eoa(amount=0) - - selfdestruct_code = ( - Op.SLOAD(0x01) # Read from storage slot 0x01 - + Op.SSTORE(0x02, 0x42) # Write to storage slot 0x02 - + Op.SELFDESTRUCT(bob) - ) - # A pre existing self-destruct contract with initial storage - kaboom = pre.deploy_contract(code=selfdestruct_code, storage={0x01: 0x123}) - - # A template for self-destruct contract - self_destruct_init_code = Initcode(deploy_code=selfdestruct_code) - template = pre.deploy_contract(code=self_destruct_init_code) - - transfer_amount = expected_recipient_balance = 100 - pre_fund_amount = 10 - - if self_destruct_in_same_tx: - # The goal is to create a self-destructing contract in the same - # transaction to trigger deletion of code as per EIP-6780. - # The factory contract below creates a new self-destructing - # contract and calls it in this transaction. - - bytecode_size = len(self_destruct_init_code) - factory_bytecode = ( - # Clone template memory - Op.EXTCODECOPY(template, 0, 0, bytecode_size) - # Fund 100 wei and deploy the clone - + Op.CREATE(transfer_amount, 0, bytecode_size) - # Call the clone, which self-destructs - + Op.CALL(100_000, Op.DUP6, 0, 0, 0, 0, 0) - + Op.STOP - ) - - factory = pre.deploy_contract(code=factory_bytecode) - kaboom_same_tx = compute_create_address(address=factory, nonce=1) - - # Determine which account will be self-destructed - self_destructed_account = ( - kaboom_same_tx if self_destruct_in_same_tx else kaboom - ) - - if pre_funded: - expected_recipient_balance += pre_fund_amount - pre.fund_address( - address=self_destructed_account, amount=pre_fund_amount - ) - - tx = Transaction( - sender=alice, - to=factory if self_destruct_in_same_tx else kaboom, - value=transfer_amount, - gas_limit=1_000_000, - gas_price=0xA, - ) - - block = Block( - txs=[tx], - expected_block_access_list=BlockAccessListExpectation( - account_expectations={ - alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], - ), - bob: BalAccountExpectation( - balance_changes=[ - BalBalanceChange( - tx_index=1, post_balance=expected_recipient_balance - ) - ] - ), - self_destructed_account: BalAccountExpectation( - balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=0) - ] - if pre_funded - else [], - # Accessed slots for same-tx are recorded as reads (0x02) - storage_reads=[0x01, 0x02] - if self_destruct_in_same_tx - else [0x01], - # Storage changes are recorded for non-same-tx - # self-destructs - storage_changes=[ - BalStorageSlot( - slot=0x02, - slot_changes=[ - BalStorageChange(tx_index=1, post_value=0x42) - ], - ) - ] - if not self_destruct_in_same_tx - else [], - code_changes=[], # should not be present - nonce_changes=[], # should not be present - ), - } - ), - ) - - post: Dict[Address, Account] = { - alice: Account(nonce=1), - bob: Account(balance=expected_recipient_balance), - } - - # If the account was self-destructed in the same transaction, - # we expect the account to be non-existent and its balance to be 0. - if self_destruct_in_same_tx: - post.update( - { - factory: Account( - nonce=2, # incremented after CREATE - balance=0, # spent on CREATE - code=factory_bytecode, - ), - kaboom_same_tx: Account.NONEXISTENT, # type: ignore - # The pre-existing contract remains unaffected - kaboom: Account( - balance=0, code=selfdestruct_code, storage={0x01: 0x123} - ), - } - ) - else: - post.update( - { - # This contract was self-destructed in a separate tx. - # From EIP 6780: `SELFDESTRUCT` does not delete any data - # (including storage keys, code, or the account itself). - kaboom: Account( - balance=0, - code=selfdestruct_code, - storage={0x01: 0x123, 0x2: 0x42}, - ), - } - ) - - blockchain_test( - pre=pre, - blocks=[block], - post=post, - ) - - -def test_bal_self_destruct_oog( - pre: Alloc, - blockchain_test: BlockchainTestFiller, -) -> None: - """ - Test that SELFDESTRUCT beneficiary is NOT included in BAL when OOG. - - When SELFDESTRUCT runs out of gas, the operation fails and the beneficiary - address should NOT be added to the Block Access List. - - This test: - 1. Deploys a contract with SELFDESTRUCT bytecode - 2. Calls the contract with limited gas so SELFDESTRUCT fails OOG - 3. Verifies beneficiary is NOT in BAL (the CALL reverts, undoing BAL changes) - - SELFDESTRUCT gas cost to cold new account: 5000 + 2600 + 25000 = 32600 gas - """ - alice = pre.fund_eoa() - - # Beneficiary address for SELFDESTRUCT - beneficiary = Address(0xBEEF) - - # Contract: PUSH20 SELFDESTRUCT - selfdestruct_code = Op.SELFDESTRUCT(beneficiary) - selfdestruct_contract = pre.deploy_contract(code=selfdestruct_code, balance=1000) - - # Caller contract: CALL with limited gas to cause OOG on SELFDESTRUCT - # SELFDESTRUCT needs 32600 gas, we give it only 100 - caller_code = ( - Op.CALL(gas=100, address=selfdestruct_contract, value=0, - args_offset=0, args_size=0, ret_offset=0, ret_size=0) - + Op.STOP - ) - caller_contract = pre.deploy_contract(code=caller_code) - - tx = Transaction( - sender=alice, - to=caller_contract, - gas_limit=100_000, - gas_price=0xA, - ) - - # The inner CALL fails OOG, so SELFDESTRUCT doesn't complete. - # Beneficiary should NOT be in BAL. - block = Block( - txs=[tx], - expected_block_access_list=BlockAccessListExpectation( - account_expectations={ - alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], - ), - caller_contract: BalAccountExpectation.empty(), - selfdestruct_contract: BalAccountExpectation.empty(), - # beneficiary should NOT appear - SELFDESTRUCT failed OOG - } - ), - ) - - blockchain_test( - pre=pre, - blocks=[block], - post={ - alice: Account(nonce=1), - caller_contract: Account(code=caller_code), - # Contract still exists - SELFDESTRUCT failed - selfdestruct_contract: Account(balance=1000, code=selfdestruct_code), - }, - ) - - @pytest.mark.parametrize( "account_access_opcode", [ diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py index 99ad5dcddb..bae1544eee 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py @@ -15,13 +15,15 @@ """ from enum import Enum -from typing import Callable +from typing import Callable, Dict import pytest from execution_testing import ( Account, + Address, Alloc, BalAccountExpectation, + BalBalanceChange, BalNonceChange, BalStorageChange, BalStorageSlot, @@ -30,6 +32,7 @@ BlockchainTestFiller, Bytecode, Fork, + Initcode, Op, Transaction, compute_create_address, @@ -662,6 +665,249 @@ def test_bal_extcodecopy_and_oog( ) +@pytest.mark.parametrize( + "self_destruct_in_same_tx", [True, False], ids=["same_tx", "new_tx"] +) +@pytest.mark.parametrize( + "pre_funded", [True, False], ids=["pre_funded", "not_pre_funded"] +) +def test_bal_self_destruct( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + self_destruct_in_same_tx: bool, + pre_funded: bool, +) -> None: + """Ensure BAL captures balance changes caused by `SELFDESTRUCT`.""" + alice = pre.fund_eoa() + bob = pre.fund_eoa(amount=0) + + selfdestruct_code = ( + Op.SLOAD(0x01) # Read from storage slot 0x01 + + Op.SSTORE(0x02, 0x42) # Write to storage slot 0x02 + + Op.SELFDESTRUCT(bob) + ) + # A pre existing self-destruct contract with initial storage + kaboom = pre.deploy_contract(code=selfdestruct_code, storage={0x01: 0x123}) + + # A template for self-destruct contract + self_destruct_init_code = Initcode(deploy_code=selfdestruct_code) + template = pre.deploy_contract(code=self_destruct_init_code) + + transfer_amount = expected_recipient_balance = 100 + pre_fund_amount = 10 + + if self_destruct_in_same_tx: + # The goal is to create a self-destructing contract in the same + # transaction to trigger deletion of code as per EIP-6780. + # The factory contract below creates a new self-destructing + # contract and calls it in this transaction. + + bytecode_size = len(self_destruct_init_code) + factory_bytecode = ( + # Clone template memory + Op.EXTCODECOPY(template, 0, 0, bytecode_size) + # Fund 100 wei and deploy the clone + + Op.CREATE(transfer_amount, 0, bytecode_size) + # Call the clone, which self-destructs + + Op.CALL(1_000_000, Op.DUP6, 0, 0, 0, 0, 0) + + Op.STOP + ) + + factory = pre.deploy_contract(code=factory_bytecode) + kaboom_same_tx = compute_create_address(address=factory, nonce=1) + + # Determine which account will be self-destructed + self_destructed_account = ( + kaboom_same_tx if self_destruct_in_same_tx else kaboom + ) + + if pre_funded: + expected_recipient_balance += pre_fund_amount + pre.fund_address( + address=self_destructed_account, amount=pre_fund_amount + ) + + tx = Transaction( + sender=alice, + to=factory if self_destruct_in_same_tx else kaboom, + value=transfer_amount, + gas_limit=1_000_000, + ) + + block = Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + ), + bob: BalAccountExpectation( + balance_changes=[ + BalBalanceChange( + tx_index=1, post_balance=expected_recipient_balance + ) + ] + ), + self_destructed_account: BalAccountExpectation( + balance_changes=[ + BalBalanceChange(tx_index=1, post_balance=0) + ] + if pre_funded + else [], + # Accessed slots for same-tx are recorded as reads (0x02) + storage_reads=[0x01, 0x02] + if self_destruct_in_same_tx + else [0x01], + # Storage changes are recorded for non-same-tx + # self-destructs + storage_changes=[ + BalStorageSlot( + slot=0x02, + slot_changes=[ + BalStorageChange(tx_index=1, post_value=0x42) + ], + ) + ] + if not self_destruct_in_same_tx + else [], + code_changes=[], # should not be present + nonce_changes=[], # should not be present + ), + } + ), + ) + + post: Dict[Address, Account] = { + alice: Account(nonce=1), + bob: Account(balance=expected_recipient_balance), + } + + # If the account was self-destructed in the same transaction, + # we expect the account to non-existent and its balance to be 0. + if self_destruct_in_same_tx: + post.update( + { + factory: Account( + nonce=2, # incremented after CREATE + balance=0, # spent on CREATE + code=factory_bytecode, + ), + kaboom_same_tx: Account.NONEXISTENT, # type: ignore + # The pre-existing contract remains unaffected + kaboom: Account( + balance=0, code=selfdestruct_code, storage={0x01: 0x123} + ), + } + ) + else: + post.update( + { + # This contract was self-destructed in a separate tx. + # From EIP 6780: `SELFDESTRUCT` does not delete any data + # (including storage keys, code, or the account itself). + kaboom: Account( + balance=0, + code=selfdestruct_code, + storage={0x01: 0x123, 0x2: 0x42}, + ), + } + ) + + blockchain_test( + pre=pre, + blocks=[block], + post=post, + ) + + +@pytest.mark.parametrize("oog_before_state_access", [True, False]) +def test_bal_self_destruct_oog( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + fork: Fork, + oog_before_state_access: bool, +) -> None: + """ + Test SELFDESTRUCT BAL behavior at gas boundaries. + + SELFDESTRUCT has two gas checkpoints: + 1. static checks: G_SELF_DESTRUCT + G_COLD_ACCOUNT_ACCESS + OOG here = no state access, beneficiary NOT in BAL + 2. state access: same as static checks, plus G_NEW_ACCOUNT for new account + OOG here = enough gas to access state but not enough for new account, + beneficiary IS in BAL + """ + alice = pre.fund_eoa() + # always use new account so we incur extra G_NEW_ACCOUNT cost + # there is no other gas boundary to test between cold access + # and new account + beneficiary = pre.empty_account() + + # selfdestruct_contract: PUSH20 SELFDESTRUCT + selfdestruct_code = Op.SELFDESTRUCT(beneficiary) + selfdestruct_contract = pre.deploy_contract( + code=selfdestruct_code, balance=1000 + ) + + # Gas needed inside the CALL for SELFDESTRUCT: + # - PUSH20: G_VERY_LOW = 3 + # - SELFDESTRUCT: G_SELF_DESTRUCT + # - G_COLD_ACCOUNT_ACCESS (beneficiary cold access) + gas_costs = fork.gas_costs() + exact_static_gas = ( + gas_costs.G_VERY_LOW + + gas_costs.G_SELF_DESTRUCT + + gas_costs.G_COLD_ACCOUNT_ACCESS + ) + + # subtract one from the exact gas to trigger OOG before state access + oog_gas = ( + exact_static_gas - 1 if oog_before_state_access else exact_static_gas + ) + + # caller_contract: CALL with oog_gas + caller_code = Op.CALL(gas=oog_gas, address=selfdestruct_contract) + caller_contract = pre.deploy_contract(code=caller_code) + + tx = Transaction( + sender=alice, + to=caller_contract, + gas_limit=100_000, + ) + + account_expectations: Dict[Address, BalAccountExpectation | None] = { + alice: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + ), + caller_contract: BalAccountExpectation.empty(), + selfdestruct_contract: BalAccountExpectation.empty(), + # beneficiary only in BAL if we passed check_gas (state accessed) + beneficiary: None + if oog_before_state_access + else BalAccountExpectation.empty(), + } + + block = Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations=account_expectations + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + alice: Account(nonce=1), + caller_contract: Account(code=caller_code), + # selfdestruct_contract still exists - SELFDESTRUCT failed + selfdestruct_contract: Account( + balance=1000, code=selfdestruct_code + ), + }, + ) + + def test_bal_storage_write_read_same_frame( pre: Alloc, blockchain_test: BlockchainTestFiller, diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index 06287528ef..9ddca8a362 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -93,6 +93,6 @@ | `test_init_collision_create_tx` | Ensure BAL tracks CREATE collisions correctly (pre-Amsterdam test with BAL) | CREATE transaction targeting address with existing storage aborts | BAL **MUST** show empty expectations for collision address (no changes occur due to abort) | ✅ Completed | | `test_call_to_pre_authorized_oog` | Ensure BAL handles OOG during EIP-7702 delegation access (pre-Amsterdam test with BAL) | Call to delegated account that OOGs before accessing delegation contract | BAL **MUST** include auth_signer (code read for delegation check) but **MUST NOT** include delegation contract (OOG before access) | ✅ Completed | | `test_selfdestruct_created_in_same_tx_with_revert` | Ensure BAL tracks selfdestruct with revert correctly (pre-Amsterdam test with BAL) | Contract created and selfdestructed in same tx with nested revert | BAL **MUST** track storage reads and balance changes for selfdestruct even with reverts | ✅ Completed | -| `test_value_transfer_gas_calculation` | Ensure BAL correctly tracks OOG scenarios for CALL/CALLCODE/DELEGATECALL/STATICCALL (pre-Amsterdam test with BAL) | Nested calls with precise gas limits to test OOG behavior. For CALL with OOG: target account read for `is_account_alive` check. For CALLCODE/DELEGATECALL/STATICCALL with OOG: target account **NOT** read (OOG before state access) | For CALL: target in BAL even with OOG. For CALLCODE/DELEGATECALL/STATICCALL: target **NOT** in BAL when OOG (state access deferred until after gas check) | ✅ Completed | +| `test_value_transfer_gas_calculation` | Ensure BAL correctly tracks OOG scenarios for CALL/CALLCODE/DELEGATECALL/STATICCALL (pre-Amsterdam test with BAL) | Nested calls with precise gas limits to test OOG behavior. For CALL with OOG: target account is read. For CALLCODE/DELEGATECALL/STATICCALL with OOG: target account **NOT** read (OOG before state access) | For CALL: target in BAL even with OOG. For CALLCODE/DELEGATECALL/STATICCALL: target **NOT** in BAL when OOG (state access deferred until after gas check) | ✅ Completed | | `test_bal_cross_block_precompile_state_leak` | Ensure internal EVM state for precompile handling does not leak between blocks | Block 1: Alice calls RIPEMD-160 (0x03) with zero value (RIPEMD-160 must be pre-funded). Block 2: Bob's transaction triggers an exception (stack underflow). | BAL for Block 1 **MUST** include RIPEMD-160. BAL for Block 2 **MUST NOT** include RIPEMD-160 (never accessed in Block 2). Internal state from Parity Touch Bug (EIP-161) handling must be reset between blocks. | ✅ Completed | -| `test_bal_self_destruct_oog` | Ensure BAL does not include SELFDESTRUCT beneficiary when operation fails due to OOG | Alice calls `Caller` contract which CALLs `SelfDestructContract` with limited gas (100). `SelfDestructContract` attempts SELFDESTRUCT to `Beneficiary`. SELFDESTRUCT requires 32600 gas (5000 base + 2600 cold + 25000 new account). | BAL **MUST** include Alice with `nonce_changes`, `Caller` with empty changes, `SelfDestructContract` with empty changes. BAL **MUST NOT** include `Beneficiary` (SELFDESTRUCT failed OOG, CALL reverted, BAL changes rolled back). Contract balance unchanged. | ✅ Completed | +| `test_bal_self_destruct_oog` | Ensure BAL correctly tracks SELFDESTRUCT beneficiary based on gas boundaries | Alice calls `Caller` contract which CALLs `SelfDestructContract` with precisely controlled gas. `SelfDestructContract` attempts SELFDESTRUCT to new account `Beneficiary`. Static gas = G_VERY_LOW + G_SELF_DESTRUCT + G_COLD_ACCOUNT_ACCESS. Parameterized: (1) OOG before state access (gas = static - 1), (2) OOG after state access (gas = static, but insufficient for G_NEW_ACCOUNT). | For OOG before state access: BAL **MUST NOT** include `Beneficiary` (no state access occurred). For OOG after state access: BAL **MUST** include `Beneficiary` with empty changes (state was accessed before G_NEW_ACCOUNT check failed). Both cases: Alice with `nonce_changes`, `Caller` and `SelfDestructContract` with empty changes. Contract balance unchanged. | ✅ Completed | From 09ec902a79266982ad085425c84b7c93128430c9 Mon Sep 17 00:00:00 2001 From: felipe Date: Mon, 1 Dec 2025 23:25:51 +0000 Subject: [PATCH 045/154] feat(tests): Port oog create refund test; add BAL >= Amsterdam --- .../forks/amsterdam/vm/interpreter.py | 28 +- .../test_create_oog_from_eoa_refunds.py | 417 +++++++++++++++ .../CreateOOGFromEOARefundsFiller.yml | 483 ------------------ 3 files changed, 439 insertions(+), 489 deletions(-) create mode 100644 tests/cancun/create/test_create_oog_from_eoa_refunds.py delete mode 100644 tests/static/state_tests/stCreateTest/CreateOOGFromEOARefundsFiller.yml diff --git a/src/ethereum/forks/amsterdam/vm/interpreter.py b/src/ethereum/forks/amsterdam/vm/interpreter.py index 57f890a12e..893a0d8833 100644 --- a/src/ethereum/forks/amsterdam/vm/interpreter.py +++ b/src/ethereum/forks/amsterdam/vm/interpreter.py @@ -267,7 +267,7 @@ def process_create_message(message: Message) -> Evm: create_frame = create_child_frame(parent_frame) increment_nonce(state, message.current_target, create_frame) - evm = process_message(message) + evm = process_message(message, parent_state_frame=create_frame) if not evm.error: contract_code = evm.output contract_code_gas = Uint(len(contract_code)) * GAS_CODE_DEPOSIT @@ -296,7 +296,10 @@ def process_create_message(message: Message) -> Evm: return evm -def process_message(message: Message) -> Evm: +def process_message( + message: Message, + parent_state_frame: Optional[StateChanges] = None, +) -> Evm: """ Move ether and execute the relevant code. @@ -304,6 +307,12 @@ def process_message(message: Message) -> Evm: ---------- message : Transaction specific items. + parent_state_frame : + Optional parent frame for state tracking. When provided (e.g., for + CREATE's init code), state changes are tracked as a child of this + frame instead of the default parent determined by the message. + This ensures proper frame hierarchy for CREATE operations where + init code changes must be children of the CREATE frame. Returns ------- @@ -318,8 +327,15 @@ def process_message(message: Message) -> Evm: begin_transaction(state, transient_storage) - parent_frame = get_parent_frame(message) - state_changes = get_message_state_frame(message) + if parent_state_frame is not None: + # Use provided parent for CREATE's init code execution. + # This ensures init code state changes are children of create_frame, + # so they are properly converted to reads if code deposit fails. + parent_changes = parent_state_frame + state_changes = create_child_frame(parent_state_frame) + else: + parent_changes = get_parent_frame(message) + state_changes = get_message_state_frame(message) track_address(state_changes, message.current_target) @@ -335,11 +351,11 @@ def process_message(message: Message) -> Evm: evm = execute_code(message, state_changes) if evm.error: rollback_transaction(state, transient_storage) - if state_changes != parent_frame: + if state_changes != parent_changes: merge_on_failure(evm.state_changes) else: commit_transaction(state, transient_storage) - if state_changes != parent_frame: + if state_changes != parent_changes: merge_on_success(evm.state_changes) return evm diff --git a/tests/cancun/create/test_create_oog_from_eoa_refunds.py b/tests/cancun/create/test_create_oog_from_eoa_refunds.py new file mode 100644 index 0000000000..ee7f14571a --- /dev/null +++ b/tests/cancun/create/test_create_oog_from_eoa_refunds.py @@ -0,0 +1,417 @@ +""" +Tests for CREATE OOG scenarios from EOA refunds. + +Tests that verify refunds are not applied on contract creation +when the creation runs out of gas. +""" + +from dataclasses import dataclass +from enum import Enum +from typing import Dict + +import pytest +from execution_testing import ( + Account, + Address, + Alloc, + BalAccountExpectation, + BalNonceChange, + BalStorageChange, + BalStorageSlot, + Block, + BlockAccessListExpectation, + BlockchainTestFiller, + Fork, + Op, + Transaction, + compute_create2_address, + compute_create_address, +) + +pytestmark = pytest.mark.valid_from("Cancun") + + +class OogScenario(Enum): + """Different ways a CREATE can run out of gas or succeed.""" + + NO_OOG = "no_oog" + OOG_CODE_DEPOSIT = "oog_code_deposit" # OOG due to code deposit cost + OOG_INVALID = "oog_invalid_opcode" # OOG due to INVALID opcode + + +class RefundType(Enum): + """Different refund mechanisms tested.""" + + SSTORE_DIRECT = "sstore_in_init_code" + SSTORE_CALL = "sstore_via_call" + SSTORE_DELEGATECALL = "sstore_via_delegatecall" + SSTORE_CALLCODE = "sstore_via_callcode" + SELFDESTRUCT = "selfdestruct_via_call" + LOG_OP = "log_operations" + NESTED_CREATE = "nested_create_in_init_code" + NESTED_CREATE2 = "nested_create2_in_init_code" + + +@dataclass +class HelperContracts: + """Container for deployed helper contract addresses.""" + + sstore_refund: Address + selfdestruct: Address + log_op: Address + init_code: Address + + +def deploy_helper_contracts(pre: Alloc) -> HelperContracts: + """Deploy all helper contracts needed for the tests.""" + # Simple contract to reset sstore and get refund: sstore(1, 0) + sstore_refund_code = Op.SSTORE(1, 0) + Op.STOP + sstore_refund = pre.deploy_contract( + code=sstore_refund_code, + storage={1: 1}, + ) + + # Simple contract that self-destructs to refund + selfdestruct_code = Op.SELFDESTRUCT(Op.ORIGIN) + Op.STOP + selfdestruct = pre.deploy_contract( + code=selfdestruct_code, + storage={1: 1}, + ) + + # Simple contract that performs log operations + log_op_code = ( + Op.MSTORE(0, 0xFF) + + Op.LOG0(0, 32) + + Op.LOG1(0, 32, 0xFA) + + Op.LOG2(0, 32, 0xFA, 0xFB) + + Op.LOG3(0, 32, 0xFA, 0xFB, 0xFC) + + Op.LOG4(0, 32, 0xFA, 0xFB, 0xFC, 0xFD) + + Op.STOP + ) + log_op = pre.deploy_contract( + code=log_op_code, + storage={1: 1}, + ) + + # Init code that successfully creates contract but contains a refund + # sstore(0, 1); sstore(0, 0); return(0, 1) + init_code_with_refund = Op.SSTORE(0, 1) + Op.SSTORE(0, 0) + Op.RETURN(0, 1) + init_code = pre.deploy_contract( + code=init_code_with_refund, + ) + + return HelperContracts( + sstore_refund=sstore_refund, + selfdestruct=selfdestruct, + log_op=log_op, + init_code=init_code, + ) + + +def build_init_code( + refund_type: RefundType, + oog_scenario: OogScenario, + helpers: HelperContracts, +) -> bytes: + """ + Build init code based on refund type and OOG scenario. + + All init codes: + - Write to storage slot 0 + - Optionally trigger refund mechanism + - End with either small return (success) or large return/INVALID (OOG) + """ + # Common prefix: sstore(0, 1) to mark storage access + prefix = Op.SSTORE(0, 1) + + # Build the refund-triggering portion based on type + if refund_type == RefundType.SSTORE_DIRECT: + # Direct sstore refund: sstore(1, 1); sstore(1, 0) + refund_code = Op.SSTORE(1, 1) + Op.SSTORE(1, 0) + + elif refund_type == RefundType.SSTORE_CALL: + # Call to sstore refund helper + refund_code = Op.POP( + Op.CALL(Op.GAS, helpers.sstore_refund, 0, 0, 0, 0, 0) + ) + + elif refund_type == RefundType.SSTORE_DELEGATECALL: + # Delegatecall to sstore refund helper (needs local storage setup) + refund_code = Op.SSTORE(1, 1) + Op.POP( + Op.DELEGATECALL(Op.GAS, helpers.sstore_refund, 0, 0, 0, 0) + ) + + elif refund_type == RefundType.SSTORE_CALLCODE: + refund_code = Op.SSTORE(1, 1) + Op.POP( + Op.CALLCODE(Op.GAS, helpers.sstore_refund, 0, 0, 0, 0, 0) + ) + + elif refund_type == RefundType.SELFDESTRUCT: + refund_code = Op.POP( + Op.CALL(Op.GAS, helpers.selfdestruct, 0, 0, 0, 0, 0) + ) + + elif refund_type == RefundType.LOG_OP: + # call to log op helper + refund_code = Op.POP(Op.CALL(Op.GAS, helpers.log_op, 0, 0, 0, 0, 0)) + + elif refund_type == RefundType.NESTED_CREATE: + # Nested CREATE with refund in init code + # extcodecopy the init code helper and CREATE from it + refund_code = ( + Op.SSTORE(1, 1) + + Op.SSTORE(1, 0) + + Op.EXTCODECOPY( + helpers.init_code, 0, 0, Op.EXTCODESIZE(helpers.init_code) + ) + + Op.POP(Op.CREATE(0, 0, Op.EXTCODESIZE(helpers.init_code))) + ) + + elif refund_type == RefundType.NESTED_CREATE2: + # Nested CREATE2 with refund in init code + refund_code = ( + Op.SSTORE(1, 1) + + Op.SSTORE(1, 0) + + Op.EXTCODECOPY( + helpers.init_code, 0, 0, Op.EXTCODESIZE(helpers.init_code) + ) + + Op.POP(Op.CREATE2(0, 0, Op.EXTCODESIZE(helpers.init_code), 0)) + ) + else: + refund_code = Op.STOP + + # Build the ending based on OOG scenario + if oog_scenario == OogScenario.NO_OOG: + # Return 1 byte of code (cheap code deposit) + if refund_type in ( + RefundType.NESTED_CREATE, + RefundType.NESTED_CREATE2, + ): + # For nested creates, return after init code length + ending = Op.RETURN(Op.ADD(Op.EXTCODESIZE(helpers.init_code), 1), 1) + else: + ending = Op.RETURN(0, 1) + + elif oog_scenario == OogScenario.OOG_CODE_DEPOSIT: + # Return 5000 bytes of code - code deposit cost exceeds available gas + if refund_type in ( + RefundType.NESTED_CREATE, + RefundType.NESTED_CREATE2, + ): + ending = Op.RETURN( + Op.ADD(Op.EXTCODESIZE(helpers.init_code), 1), 5000 + ) + else: + ending = Op.RETURN(0, 5000) + + elif oog_scenario == OogScenario.OOG_INVALID: + # INVALID opcode causes OOG (all gas consumed, no refund) + ending = Op.INVALID + + else: + ending = Op.STOP + + return bytes(prefix + refund_code + ending) + + +@pytest.mark.parametrize( + "oog_scenario", + [ + pytest.param(OogScenario.NO_OOG, id="no_oog"), + pytest.param(OogScenario.OOG_CODE_DEPOSIT, id="oog_code_deposit"), + pytest.param(OogScenario.OOG_INVALID, id="oog_invalid_opcode"), + ], +) +@pytest.mark.parametrize( + "refund_type", + [ + pytest.param(RefundType.SSTORE_DIRECT, id="sstore_direct"), + pytest.param(RefundType.SSTORE_CALL, id="sstore_call"), + pytest.param(RefundType.SSTORE_DELEGATECALL, id="sstore_delegatecall"), + pytest.param(RefundType.SSTORE_CALLCODE, id="sstore_callcode"), + pytest.param(RefundType.SELFDESTRUCT, id="selfdestruct"), + pytest.param(RefundType.LOG_OP, id="log_op"), + pytest.param(RefundType.NESTED_CREATE, id="nested_create"), + pytest.param(RefundType.NESTED_CREATE2, id="nested_create2"), + ], +) +@pytest.mark.ported_from( + [ + "https://github.com/ethereum/tests/blob/v13.3/src/GeneralStateTestsFiller/stCreateTest/CreateOOGFromEOARefundsFiller.yml", + ], + pr=["https://github.com/ethereum/execution-specs/pull/1831"], +) +def test_create_oog_from_eoa_refunds( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + refund_type: RefundType, + oog_scenario: OogScenario, + fork: Fork, +) -> None: + """ + Test CREATE from EOA with various refund mechanisms and OOG scenarios. + + Verifies that: + 1. Refunds are not applied when contract creation runs Out of Gas + 2. When OOG occurs, the sender's balance is fully consumed (no refund) + 3. When OOG occurs, the contract is not created + + For BAL (Block Access List) tracking: + - NoOoG: Storage writes should be recorded as `storage_changes` + - OoG: Storage writes should be converted to `storage_reads` since + the CREATE failed and all state changes were reverted + """ + helpers = deploy_helper_contracts(pre) + sender = pre.fund_eoa(amount=4_000_000) + init_code = build_init_code(refund_type, oog_scenario, helpers) + created_address = compute_create_address(address=sender, nonce=0) + + tx = Transaction( + sender=sender, + to=None, + data=init_code, + gas_limit=400_000, + ) + + post: Dict[Address, Account | None] = { + sender: Account(nonce=1), + } + + if oog_scenario == OogScenario.NO_OOG: + # contract created with code 0x00 (1 byte from memory) + if refund_type == RefundType.NESTED_CREATE: + # Nested CREATE increments the created contract's nonce to 2 + post[created_address] = Account( + nonce=2, + code=b"\x00", + storage={0: 1}, # successful write + ) + + nested_created = compute_create_address( + address=created_address, nonce=1 + ) + post[nested_created] = Account( + nonce=1, + code=b"\x00", + storage={}, + ) + elif refund_type == RefundType.NESTED_CREATE2: + # nested create2 increments the created contract's nonce to 2 + post[created_address] = Account( + nonce=2, + code=b"\x00", + storage={0: 1}, + ) + + nested_created = compute_create2_address( + address=created_address, + salt=0, + initcode=Op.SSTORE(0, 1) + Op.SSTORE(0, 0) + Op.RETURN(0, 1), + ) + post[nested_created] = Account( + nonce=1, + code=b"\x00", + storage={}, + ) + else: + post[created_address] = Account( + nonce=1, + code=b"\x00", + storage={0: 1}, + ) + post[sender] = Account(nonce=1) + else: + # OOG case: contract not created, sender balance is fully consumed + post[created_address] = Account.NONEXISTENT + post[sender] = Account( + nonce=1, + balance=0, + ) + + if refund_type == RefundType.SELFDESTRUCT: + selfdestruct_code = Op.SELFDESTRUCT(Op.ORIGIN) + Op.STOP + if oog_scenario == OogScenario.NO_OOG: + # selfdestruct succeeded, balance is 0 + post[helpers.selfdestruct] = Account( + balance=0, + nonce=1, + ) + else: + # OOG: selfdestruct reverted, helper unchanged + post[helpers.selfdestruct] = Account( + code=bytes(selfdestruct_code), + nonce=1, + storage={1: 1}, + ) + + bal_expectation = None + if fork.header_bal_hash_required(): + if oog_scenario == OogScenario.NO_OOG: + # Success: storage write to slot 0 persists + expected_nonce = ( + 2 + if refund_type + in (RefundType.NESTED_CREATE, RefundType.NESTED_CREATE2) + else 1 + ) + created_bal = BalAccountExpectation( + nonce_changes=[ + BalNonceChange(tx_index=1, post_nonce=expected_nonce) + ], + storage_changes=[ + BalStorageSlot( + slot=0, + slot_changes=[ + BalStorageChange(tx_index=1, post_value=1) + ], + ), + ], + storage_reads=( + # noop write 0 -> 1 -> 0 + [1] + if refund_type + in ( + RefundType.SSTORE_DIRECT, + RefundType.SSTORE_DELEGATECALL, + RefundType.SSTORE_CALLCODE, + RefundType.NESTED_CREATE, + RefundType.NESTED_CREATE2, + ) + else [] + ), + ) + else: + # OOG case: storage writes converted to reads + # All refund types write to slot 0, most also write to slot 1 + if refund_type in ( + RefundType.SSTORE_DIRECT, + RefundType.SSTORE_DELEGATECALL, + RefundType.SSTORE_CALLCODE, + RefundType.NESTED_CREATE, + RefundType.NESTED_CREATE2, + ): + # write to both slot 0 and slot 1 (noop write 0 -> 1 -> 0) + created_bal = BalAccountExpectation( + storage_changes=[], + storage_reads=[0, 1], + ) + else: + # SSTORE_CALL, SELFDESTRUCT, LOG_OP only write to slot 0 + created_bal = BalAccountExpectation( + storage_changes=[], + storage_reads=[0], + ) + bal_expectation = BlockAccessListExpectation( + account_expectations={ + sender: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + ), + created_address: created_bal, + } + ) + + blockchain_test( + pre=pre, + blocks=[Block(txs=[tx], expected_block_access_list=bal_expectation)], + post=post, + ) diff --git a/tests/static/state_tests/stCreateTest/CreateOOGFromEOARefundsFiller.yml b/tests/static/state_tests/stCreateTest/CreateOOGFromEOARefundsFiller.yml deleted file mode 100644 index 4ce14cc9fe..0000000000 --- a/tests/static/state_tests/stCreateTest/CreateOOGFromEOARefundsFiller.yml +++ /dev/null @@ -1,483 +0,0 @@ -CreateOOGFromEOARefunds: - # Test that verifies the refunds are not applied on contract creation when the creation runs Out of Gas - env: - currentCoinbase: 2adc25665018aa1fe0e6bc666dac8fc2697ff9ba - currentDifficulty: '0x20000' - currentGasLimit: 0x100000000 - currentNumber: "1" - currentTimestamp: "1000" - - pre: - #### MAIN CALLER - a94f5374fce5edbc8e2a8697c15331677e6ebf0b: - balance: '0x3d0900' - code: '0x' - nonce: '1' - storage: {} - - ### HELPER CONTRACTS - # Simple contract to reset sstore and refund - 00000000000000000000000000000000000c0deA: - balance: '0' - code: | - :yul berlin - { - // Simple SSTORE to zero to get a refund - sstore(1, 0) - } - nonce: '1' - storage: { - '1': '1' - } - - # Simple contract that self-destructs to refund - 00000000000000000000000000000000000c0deD: - balance: '0' - code: | - :yul berlin - { - selfdestruct(origin()) - } - nonce: '1' - storage: { - '1': '1' - } - - # Simple contract that performs log operations - 00000000000000000000000000000000000c0de0: - balance: '0' - code: | - :yul berlin - { - mstore(0, 0xff) - log0(0, 32) - log1(0, 32, 0xfa) - log2(0, 32, 0xfa, 0xfb) - log3(0, 32, 0xfa, 0xfb, 0xfc) - log4(0, 32, 0xfa, 0xfb, 0xfc, 0xfd) - } - nonce: '1' - storage: { - '1': '1' - } - - # Init code that successfully creates contract but contains a refund - 00000000000000000000000000000000000c0de1: - balance: '0' - code: | - :yul berlin - { - sstore(0, 1) - sstore(0, 0) - return(0, 1) - } - nonce: '1' - storage: {} - - - transaction: - data: - # Create from EOA, Sstore Refund in Init Code, no OoG - - :label SStore_Refund_NoOoG :yul berlin { - sstore(0, 1) - sstore(1, 1) - sstore(1, 0) - return(0, 1) - } - - # Create from EOA, Sstore Refund in Init Code, OoG on Code Deposit - - :label SStore_Refund_OoG :yul berlin { - sstore(0, 1) - sstore(1, 1) - sstore(1, 0) - return(0, 5000) - } - - # Create from EOA, Sstore Refund in Init Code, OoG on Invalid opcode - - :label SStore_Refund_OoG :yul berlin { - sstore(0, 1) - sstore(1, 1) - sstore(1, 0) - invalid() - } - - # Create from EOA, Sstore Refund in Call, no OoG - - :label SStore_Refund_NoOoG :yul berlin { - sstore(0, 1) - pop(call(gas(), 0x00000000000000000000000000000000000c0deA, 0, 0, 0, 0, 0)) - return(0, 1) - } - - # Create from EOA, Sstore Refund in Call, OoG on Code Deposit - - :label SStore_Refund_OoG :yul berlin { - sstore(0, 1) - pop(call(gas(), 0x00000000000000000000000000000000000c0deA, 0, 0, 0, 0, 0)) - return(0, 5000) - } - - # Create from EOA, Sstore Refund in Call, OoG on Invalid opcode - - :label SStore_Refund_OoG :yul berlin { - sstore(0, 1) - pop(call(gas(), 0x00000000000000000000000000000000000c0deA, 0, 0, 0, 0, 0)) - invalid() - } - - # Create from EOA, Sstore Refund in DelegateCall, no OoG - - :label SStore_Refund_NoOoG :yul berlin { - sstore(0, 1) - sstore(1, 1) - pop(delegatecall(gas(), 0x00000000000000000000000000000000000c0deA, 0, 0, 0, 0)) - return(0, 1) - } - - # Create from EOA, Sstore Refund in DelegateCall, OoG on Code Deposit - - :label SStore_Refund_OoG :yul berlin { - sstore(0, 1) - sstore(1, 1) - pop(delegatecall(gas(), 0x00000000000000000000000000000000000c0deA, 0, 0, 0, 0)) - return(0, 5000) - } - - # Create from EOA, Sstore Refund in DelegateCall, OoG on Invalid opcode - - :label SStore_Refund_OoG :yul berlin { - sstore(0, 1) - sstore(1, 1) - pop(delegatecall(gas(), 0x00000000000000000000000000000000000c0deA, 0, 0, 0, 0)) - invalid() - } - - # Create from EOA, Sstore Refund in CallCode, no OoG - - :label SStore_Refund_NoOoG :yul berlin { - sstore(0, 1) - sstore(1, 1) - pop(callcode(gas(), 0x00000000000000000000000000000000000c0deA, 0, 0, 0, 0, 0)) - return(0, 1) - } - - # Create from EOA, Sstore Refund in CallCode, OoG on Code Deposit - - :label SStore_Refund_OoG :yul berlin { - sstore(0, 1) - sstore(1, 1) - pop(callcode(gas(), 0x00000000000000000000000000000000000c0deA, 0, 0, 0, 0, 0)) - return(0, 5000) - } - - # Create from EOA, Sstore Refund in CallCode, OoG on Invalid opcode - - :label SStore_Refund_OoG :yul berlin { - sstore(0, 1) - sstore(1, 1) - pop(callcode(gas(), 0x00000000000000000000000000000000000c0deA, 0, 0, 0, 0, 0)) - invalid() - } - - # Create from EOA, Refund Self-destruct call, no OoG - - :label SelfDestruct_Refund_NoOoG :yul berlin { - sstore(0, 1) - pop(call(gas(), 0x00000000000000000000000000000000000c0deD, 0, 0, 0, 0, 0)) - return(0, 1) - } - - # Create from EOA, Refund Self-destruct call, OoG on Code Deposit - - :label SelfDestruct_Refund_OoG :yul berlin { - sstore(0, 1) - pop(call(gas(), 0x00000000000000000000000000000000000c0deD, 0, 0, 0, 0, 0)) - return(0, 5000) - } - - # Create from EOA, Refund Self-destruct call, OoG on Invalid opcode - - :label SelfDestruct_Refund_OoG :yul berlin { - sstore(0, 1) - pop(call(gas(), 0x00000000000000000000000000000000000c0deD, 0, 0, 0, 0, 0)) - invalid() - } - - # Create from EOA, Log operation in call, no OoG - - :label LogOp_NoOoG :yul berlin { - sstore(0, 1) - pop(call(gas(), 0x00000000000000000000000000000000000c0de0, 0, 0, 0, 0, 0)) - return(0, 1) - } - - # Create from EOA, Log operation in call, OoG on Code Deposit - - :label LogOp_OoG :yul berlin { - sstore(0, 1) - pop(call(gas(), 0x00000000000000000000000000000000000c0de0, 0, 0, 0, 0, 0)) - return(0, 5000) - } - - # Create from EOA, Log operation in call, OoG on Invalid opcode - - :label LogOp_OoG :yul berlin { - sstore(0, 1) - pop(call(gas(), 0x00000000000000000000000000000000000c0de0, 0, 0, 0, 0, 0)) - invalid() - } - - # Create from EOA, Refund within CREATE, no OoG - - :label SStore_Create_Refund_NoOoG :yul berlin { - sstore(0, 1) - sstore(1, 1) - sstore(1, 0) - let initcodeaddr := 0x00000000000000000000000000000000000c0de1 - let initcodelength := extcodesize(initcodeaddr) - extcodecopy(initcodeaddr, 0, 0, initcodelength) - pop(create(0, 0, initcodelength)) - return(add(initcodelength, 1), 1) - } - - # Create from EOA, Refund within CREATE, OoG on Code Deposit - - :label SStore_Create_Refund_OoG :yul berlin { - sstore(0, 1) - sstore(1, 1) - sstore(1, 0) - let initcodeaddr := 0x00000000000000000000000000000000000c0de1 - let initcodelength := extcodesize(initcodeaddr) - extcodecopy(initcodeaddr, 0, 0, initcodelength) - pop(create(0, 0, initcodelength)) - return(add(initcodelength, 1), 5000) - } - - # Create from EOA, Refund within CREATE, OoG on Invalid opcode - - :label SStore_Create_Refund_OoG :yul berlin { - sstore(0, 1) - sstore(1, 1) - sstore(1, 0) - let initcodeaddr := 0x00000000000000000000000000000000000c0de1 - let initcodelength := extcodesize(initcodeaddr) - extcodecopy(initcodeaddr, 0, 0, initcodelength) - pop(create(0, 0, initcodelength)) - invalid() - } - - # Create2 from EOA, Refund within CREATE, no OoG - - :label SStore_Create2_Refund_NoOoG :yul berlin { - sstore(0, 1) - sstore(1, 1) - sstore(1, 0) - let initcodeaddr := 0x00000000000000000000000000000000000c0de1 - //let initcodelength := extcodesize(initcodeaddr) - //extcodecopy(initcodeaddr, 0, 0, initcodelength) - - //protection from solc version changing the init code - - let initcodelength := 15 - mstore(0, 0x6001600055600060005560016000f30000000000000000000000000000000000) - - pop(create2(0, 0, initcodelength, 0)) - return(add(initcodelength, 1), 1) - } - - # Create2 from EOA, Refund within CREATE, OoG on Code Deposit - - :label SStore_Create2_Refund_OoG :yul berlin { - sstore(0, 1) - sstore(1, 1) - sstore(1, 0) - let initcodeaddr := 0x00000000000000000000000000000000000c0de1 - let initcodelength := extcodesize(initcodeaddr) - extcodecopy(initcodeaddr, 0, 0, initcodelength) - pop(create2(0, 0, initcodelength, 0)) - return(add(initcodelength, 1), 5000) - } - - # Create2 from EOA, Refund within CREATE, OoG on Invalid opcode - - :label SStore_Create2_Refund_OoG :yul berlin { - sstore(0, 1) - sstore(1, 1) - sstore(1, 0) - let initcodeaddr := 0x00000000000000000000000000000000000c0de1 - let initcodelength := extcodesize(initcodeaddr) - extcodecopy(initcodeaddr, 0, 0, initcodelength) - pop(create2(0, 0, initcodelength, 0)) - invalid() - } - - gasLimit: - - 0x61a80 - gasPrice: '10' - nonce: '1' - to: "" - secretKey: "45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8" - value: - - 0 - - expect: - - - indexes: - data: - - :label SStore_Refund_NoOoG - network: - - '>=Cancun' - result: - - a94f5374fce5edbc8e2a8697c15331677e6ebf0b: - nonce: 2 - - ec0e71ad0a90ffe1909d27dac207f7680abba42d: - nonce: 1 - code: '0x00' - storage: { - '0': 1 - } - - indexes: - data: - - :label SStore_Refund_OoG - network: - - '>=Cancun' - result: - - a94f5374fce5edbc8e2a8697c15331677e6ebf0b: - # When we OoG, we use up all the gas regardless of the refunds - balance: 0 - nonce: 2 - - ec0e71ad0a90ffe1909d27dac207f7680abba42d: - shouldnotexist: 1 - - - - indexes: - data: - - :label SelfDestruct_Refund_NoOoG - network: - - '>=Cancun' - result: - - a94f5374fce5edbc8e2a8697c15331677e6ebf0b: - nonce: 2 - - ec0e71ad0a90ffe1909d27dac207f7680abba42d: - nonce: 1 - code: '0x00' - storage: { - '0': 1 - } - 00000000000000000000000000000000000c0deD: - balance: 0 - nonce: 1 - - - indexes: - data: - - :label SelfDestruct_Refund_OoG - network: - - '>=Cancun' - result: - - a94f5374fce5edbc8e2a8697c15331677e6ebf0b: - # When we OoG, we use up all the gas regardless of the refunds - balance: 0 - nonce: 2 - - ec0e71ad0a90ffe1909d27dac207f7680abba42d: - shouldnotexist: 1 - - 00000000000000000000000000000000000c0deD: - code: '0x32FF' - nonce: '1' - storage: { - '1': '1' - } - - - indexes: - data: - - :label LogOp_NoOoG - network: - - '>=Cancun' - result: - - a94f5374fce5edbc8e2a8697c15331677e6ebf0b: - nonce: 2 - - ec0e71ad0a90ffe1909d27dac207f7680abba42d: - nonce: 1 - code: '0x00' - storage: { - '0': 1 - } - - indexes: - data: - - :label LogOp_OoG - network: - - '>=Cancun' - result: - - a94f5374fce5edbc8e2a8697c15331677e6ebf0b: - # When we OoG, we use up all the gas regardless of the refunds - balance: 0 - nonce: 2 - - ec0e71ad0a90ffe1909d27dac207f7680abba42d: - shouldnotexist: 1 - - - indexes: - data: - - :label SStore_Create_Refund_NoOoG - network: - - '>=Cancun' - result: - - a94f5374fce5edbc8e2a8697c15331677e6ebf0b: - nonce: 2 - - ec0e71ad0a90ffe1909d27dac207f7680abba42d: - nonce: 2 - code: '0x00' - storage: { - '0': 1 - } - e3476106159f87477ad639e3ddcbb6b240efe459: - nonce: 1 - code: '0x00' - storage: {} - - - indexes: - data: - - :label SStore_Create_Refund_OoG - network: - - '>=Cancun' - result: - - a94f5374fce5edbc8e2a8697c15331677e6ebf0b: - # When we OoG, we use up all the gas regardless of the refunds - balance: 0 - nonce: 2 - - ec0e71ad0a90ffe1909d27dac207f7680abba42d: - shouldnotexist: 1 - e3476106159f87477ad639e3ddcbb6b240efe459: - shouldnotexist: 1 - - - indexes: - data: - - :label SStore_Create2_Refund_NoOoG - network: - - '>=Cancun' - result: - - a94f5374fce5edbc8e2a8697c15331677e6ebf0b: - nonce: 2 - - ec0e71ad0a90ffe1909d27dac207f7680abba42d: - nonce: 2 - code: '0x00' - storage: { - '0': 1 - } - 1eeb9ca3824a07c140fc01aa562a3a896f44e790: - nonce: 1 - code: '0x00' - storage: {} - - - indexes: - data: - - :label SStore_Create2_Refund_OoG - network: - - '>=Cancun' - result: - - a94f5374fce5edbc8e2a8697c15331677e6ebf0b: - # When we OoG, we use up all the gas regardless of the refunds - balance: 0 - nonce: 2 - - ec0e71ad0a90ffe1909d27dac207f7680abba42d: - shouldnotexist: 1 - 1eeb9ca3824a07c140fc01aa562a3a896f44e790: - shouldnotexist: 1 From c40db8381f40e5eef715463ca90bd8138b06d812 Mon Sep 17 00:00:00 2001 From: felipe Date: Mon, 8 Dec 2025 12:38:20 -0700 Subject: [PATCH 046/154] refactor(spec-specs): Refactor state changes and frame hierarchy (#1841) * refactor(spec-specs): Refactor state changes and their frames * chore(spec-specs): cleanup BAL logic; organize gas check for SSTORE * refactor(spec-specs): Changes from comments on PR #1841 * enhance: don't set defaults for state_changes --- src/ethereum/forks/amsterdam/fork.py | 116 ++--- src/ethereum/forks/amsterdam/state.py | 110 +--- src/ethereum/forks/amsterdam/state_tracker.py | 477 ++++++++---------- src/ethereum/forks/amsterdam/utils/message.py | 11 +- src/ethereum/forks/amsterdam/vm/__init__.py | 8 +- .../forks/amsterdam/vm/eoa_delegation.py | 35 +- .../amsterdam/vm/instructions/storage.py | 75 +-- .../forks/amsterdam/vm/instructions/system.py | 85 +++- .../forks/amsterdam/vm/interpreter.py | 163 +++--- .../evm_tools/t8n/__init__.py | 13 +- 10 files changed, 490 insertions(+), 603 deletions(-) diff --git a/src/ethereum/forks/amsterdam/fork.py b/src/ethereum/forks/amsterdam/fork.py index 358f194f6e..697086e2b4 100644 --- a/src/ethereum/forks/amsterdam/fork.py +++ b/src/ethereum/forks/amsterdam/fork.py @@ -30,7 +30,6 @@ from . import vm from .block_access_lists.builder import build_block_access_list -from .block_access_lists.rlp_types import BlockAccessIndex from .block_access_lists.rlp_utils import compute_block_access_list_hash from .blocks import Block, Header, Log, Receipt, Withdrawal, encode_receipt from .bloom import logs_bloom @@ -65,16 +64,16 @@ state_root, ) from .state_tracker import ( + StateChanges, capture_pre_balance, commit_transaction_frame, create_child_frame, - get_block_access_index, - handle_in_transaction_selfdestruct, + filter_net_zero_frame_changes, increment_block_access_index, - merge_on_success, - normalize_balance_changes_for_transaction, track_address, track_balance_change, + track_nonce_change, + track_selfdestruct, ) from .transactions import ( AccessListTransaction, @@ -249,6 +248,7 @@ def state_transition(chain: BlockChain, block: Block) -> None: prev_randao=block.header.prev_randao, excess_blob_gas=block.header.excess_blob_gas, parent_beacon_block_root=block.header.parent_beacon_block_root, + state_changes=StateChanges(), ) block_output = apply_body( @@ -636,7 +636,7 @@ def process_system_transaction( """ # EIP-7928: Create a child frame for system transaction # This allows proper pre-state capture for net-zero filtering - system_tx_state_changes = create_child_frame(block_env.block_state_changes) + system_tx_state_changes = create_child_frame(block_env.state_changes) tx_env = vm.TransactionEnvironment( origin=SYSTEM_ADDRESS, @@ -649,8 +649,12 @@ def process_system_transaction( authorizations=(), index_in_block=None, tx_hash=None, + state_changes=system_tx_state_changes, ) + # Create call frame as child of tx frame + call_frame = create_child_frame(tx_env.state_changes) + system_tx_message = Message( block_env=block_env, tx_env=tx_env, @@ -669,14 +673,15 @@ def process_system_transaction( accessed_storage_keys=set(), disable_precompiles=False, parent_evm=None, - transaction_state_changes=system_tx_state_changes, + is_create=False, + state_changes=call_frame, ) system_tx_output = process_message_call(system_tx_message) - # Merge system transaction changes back to block frame + # Commit system transaction changes to block frame # System transactions always succeed (or block is invalid) - merge_on_success(system_tx_state_changes) + commit_transaction_frame(tx_env.state_changes) return system_tx_output @@ -816,7 +821,7 @@ def apply_body( # EIP-7928: Increment block frame to post-execution index # After N transactions, block frame is at index N # Post-execution operations (withdrawals, etc.) use index N+1 - increment_block_access_index(block_env.block_state_changes) + increment_block_access_index(block_env.state_changes) process_withdrawals(block_env, block_output, withdrawals) @@ -824,9 +829,9 @@ def apply_body( block_env=block_env, block_output=block_output, ) - # Build block access list from block_env.block_state_changes + # Build block access list from block_env.state_changes block_output.block_access_list = build_block_access_list( - block_env.block_state_changes + block_env.state_changes ) return block_output @@ -909,9 +914,10 @@ def process_transaction( """ # EIP-7928: Create a transaction-level StateChanges frame # The frame will read the current block_access_index from the block frame - increment_block_access_index(block_env.block_state_changes) - tx_state_changes = create_child_frame(block_env.block_state_changes) + increment_block_access_index(block_env.state_changes) + tx_state_changes = create_child_frame(block_env.state_changes) + # Capture coinbase pre-balance for net-zero filtering coinbase_pre_balance = get_account( block_env.state, block_env.coinbase ).balance @@ -949,16 +955,27 @@ def process_transaction( effective_gas_fee = tx.gas * effective_gas_price gas = tx.gas - intrinsic_gas - increment_nonce(block_env.state, sender, tx_state_changes) + + # Track sender nonce increment + increment_nonce(block_env.state, sender) + sender_nonce_after = get_account(block_env.state, sender).nonce + track_nonce_change(tx_state_changes, sender, U64(sender_nonce_after)) + + # Track sender balance deduction for gas fee + sender_balance_before = get_account(block_env.state, sender).balance + track_address(tx_state_changes, sender) + capture_pre_balance(tx_state_changes, sender, sender_balance_before) sender_balance_after_gas_fee = ( Uint(sender_account.balance) - effective_gas_fee - blob_gas_fee ) set_account_balance( - block_env.state, + block_env.state, sender, U256(sender_balance_after_gas_fee) + ) + track_balance_change( + tx_state_changes, sender, U256(sender_balance_after_gas_fee), - tx_state_changes, ) access_list_addresses = set() @@ -993,13 +1010,13 @@ def process_transaction( authorizations=authorizations, index_in_block=index, tx_hash=get_transaction_hash(encode_transaction(tx)), + state_changes=tx_state_changes, ) message = prepare_message( block_env, tx_env, tx, - tx_state_changes, ) tx_output = process_message_call(message) @@ -1029,11 +1046,11 @@ def process_transaction( sender_balance_after_refund = get_account( block_env.state, sender ).balance + U256(gas_refund_amount) - set_account_balance( - block_env.state, + set_account_balance(block_env.state, sender, sender_balance_after_refund) + track_balance_change( + tx_env.state_changes, sender, sender_balance_after_refund, - tx_state_changes, ) coinbase_balance_after_mining_fee = get_account( @@ -1041,10 +1058,12 @@ def process_transaction( ).balance + U256(transaction_fee) set_account_balance( - block_env.state, + block_env.state, block_env.coinbase, coinbase_balance_after_mining_fee + ) + track_balance_change( + tx_env.state_changes, block_env.coinbase, coinbase_balance_after_mining_fee, - tx_state_changes, ) if coinbase_balance_after_mining_fee == 0 and account_exists_and_is_empty( @@ -1070,35 +1089,19 @@ def process_transaction( block_output.block_logs += tx_output.logs - # EIP-7928: Handle in-transaction self-destruct BEFORE normalization - # Destroy accounts first so normalization sees correct post-tx state - # Only accounts created in same tx are in accounts_to_delete per EIP-6780 for address in tx_output.accounts_to_delete: destroy_account(block_env.state, address) - # EIP-7928: Normalize balance changes for this transaction before merging - # into block frame. Must happen AFTER destroy_account so net-zero filtering - # sees the correct post-transaction balance (0 for destroyed accounts). - normalize_balance_changes_for_transaction( - tx_state_changes, - BlockAccessIndex( - get_block_access_index(block_env.block_state_changes) - ), - block_env.state, - ) + # EIP-7928: Filter net-zero changes before committing to block frame. + # Must happen AFTER destroy_account so filtering sees correct state. + filter_net_zero_frame_changes(tx_env.state_changes, block_env.state) - commit_transaction_frame(tx_state_changes) + commit_transaction_frame(tx_env.state_changes) - # EIP-7928: Handle in-transaction self-destruct normalization AFTER merge + # EIP-7928: Track in-transaction self-destruct normalization AFTER merge # Convert storage writes to reads and remove nonce/code changes for address in tx_output.accounts_to_delete: - handle_in_transaction_selfdestruct( - block_env.block_state_changes, - address, - BlockAccessIndex( - get_block_access_index(block_env.block_state_changes) - ), - ) + track_selfdestruct(block_env.state_changes, address) def process_withdrawals( @@ -1109,13 +1112,12 @@ def process_withdrawals( """ Increase the balance of the withdrawing account. """ + # Capture pre-state for withdrawal balance filtering withdrawal_addresses = {wd.address for wd in withdrawals} for address in withdrawal_addresses: pre_balance = get_account(block_env.state, address).balance - track_address(block_env.block_state_changes, address) - capture_pre_balance( - block_env.block_state_changes, address, pre_balance - ) + track_address(block_env.state_changes, address) + capture_pre_balance(block_env.state_changes, address, pre_balance) def increase_recipient_balance(recipient: Account) -> None: recipient.balance += wd.amount * U256(10**9) @@ -1131,22 +1133,16 @@ def increase_recipient_balance(recipient: Account) -> None: new_balance = get_account(block_env.state, wd.address).balance track_balance_change( - block_env.block_state_changes, wd.address, new_balance + block_env.state_changes, + wd.address, + new_balance, ) if account_exists_and_is_empty(block_env.state, wd.address): destroy_account(block_env.state, wd.address) - # EIP-7928: Normalize balance changes after all withdrawals - # Filters out net-zero changes - - normalize_balance_changes_for_transaction( - block_env.block_state_changes, - BlockAccessIndex( - get_block_access_index(block_env.block_state_changes) - ), - block_env.state, - ) + # EIP-7928: Filter net-zero balance changes for withdrawals + filter_net_zero_frame_changes(block_env.state_changes, block_env.state) def check_gas_limit(gas_limit: Uint, parent_gas_limit: Uint) -> bool: diff --git a/src/ethereum/forks/amsterdam/state.py b/src/ethereum/forks/amsterdam/state.py index c1d331942a..fcf12e971b 100644 --- a/src/ethereum/forks/amsterdam/state.py +++ b/src/ethereum/forks/amsterdam/state.py @@ -21,17 +21,9 @@ from ethereum_types.bytes import Bytes, Bytes32 from ethereum_types.frozen import modify -from ethereum_types.numeric import U64, U256, Uint +from ethereum_types.numeric import U256, Uint from .fork_types import EMPTY_ACCOUNT, Account, Address, Root -from .state_tracker import ( - StateChanges, - capture_pre_balance, - track_address, - track_balance_change, - track_code_change, - track_nonce_change, -) from .trie import EMPTY_TRIE_ROOT, Trie, copy_trie, root, trie_get, trie_set if TYPE_CHECKING: @@ -517,18 +509,22 @@ def move_ether( sender_address: Address, recipient_address: Address, amount: U256, - state_changes: StateChanges, ) -> None: """ Move funds between accounts. - """ - sender_balance = get_account(state, sender_address).balance - recipient_balance = get_account(state, recipient_address).balance - track_address(state_changes, sender_address) - capture_pre_balance(state_changes, sender_address, sender_balance) - track_address(state_changes, recipient_address) - capture_pre_balance(state_changes, recipient_address, recipient_balance) + Parameters + ---------- + state: + The current state. + sender_address: + Address of the sender. + recipient_address: + Address of the recipient. + amount: + The amount to transfer. + + """ def reduce_sender_balance(sender: Account) -> None: if sender.balance < amount: @@ -541,23 +537,8 @@ def increase_recipient_balance(recipient: Account) -> None: modify_state(state, sender_address, reduce_sender_balance) modify_state(state, recipient_address, increase_recipient_balance) - sender_new_balance = get_account(state, sender_address).balance - recipient_new_balance = get_account(state, recipient_address).balance - - track_balance_change( - state_changes, sender_address, U256(sender_new_balance) - ) - track_balance_change( - state_changes, recipient_address, U256(recipient_new_balance) - ) - -def set_account_balance( - state: State, - address: Address, - amount: U256, - state_changes: StateChanges, -) -> None: +def set_account_balance(state: State, address: Address, amount: U256) -> None: """ Sets the balance of an account. @@ -567,32 +548,20 @@ def set_account_balance( The current state. address: - Address of the account whose nonce needs to be incremented. + Address of the account whose balance needs to be set. amount: The amount that needs to set in balance. - state_changes: - State changes frame for tracking (EIP-7928). - """ - current_balance = get_account(state, address).balance - - track_address(state_changes, address) - capture_pre_balance(state_changes, address, current_balance) def set_balance(account: Account) -> None: account.balance = amount modify_state(state, address, set_balance) - track_balance_change(state_changes, address, amount) -def increment_nonce( - state: State, - address: Address, - state_changes: "StateChanges", -) -> None: +def increment_nonce(state: State, address: Address) -> None: """ Increments the nonce of an account. @@ -604,9 +573,6 @@ def increment_nonce( address: Address of the account whose nonce needs to be incremented. - state_changes: - State changes frame for tracking (EIP-7928). - """ def increase_nonce(sender: Account) -> None: @@ -614,16 +580,8 @@ def increase_nonce(sender: Account) -> None: modify_state(state, address, increase_nonce) - account = get_account(state, address) - track_nonce_change(state_changes, address, U64(account.nonce)) - -def set_code( - state: State, - address: Address, - code: Bytes, - state_changes: StateChanges, -) -> None: +def set_code(state: State, address: Address, code: Bytes) -> None: """ Sets Account code. @@ -638,9 +596,6 @@ def set_code( code: The bytecode that needs to be set. - state_changes: - State changes frame for tracking (EIP-7928). - """ def write_code(sender: Account) -> None: @@ -648,27 +603,13 @@ def write_code(sender: Account) -> None: modify_state(state, address, write_code) - # Only track code change if it's not net-zero within this frame - # Compare against pre-code captured in this frame, default to b"" - pre_code = state_changes.pre_code.get(address, b"") - if pre_code != code: - track_code_change(state_changes, address, code) - -def set_authority_code( - state: State, - address: Address, - code: Bytes, - state_changes: StateChanges, - current_code: Bytes, -) -> None: +def set_authority_code(state: State, address: Address, code: Bytes) -> None: """ Sets authority account code for EIP-7702 delegation. This function is used specifically for setting authority code within - EIP-7702 Set Code Transactions. Unlike set_code(), it tracks changes based - on the current code rather than pre_code to handle multiple authorizations - to the same address within a single transaction correctly. + EIP-7702 Set Code Transactions. Parameters ---------- @@ -681,13 +622,6 @@ def set_authority_code( code: The delegation designation bytecode to set. - state_changes: - State changes frame for tracking (EIP-7928). - - current_code: - The current code before this change. Used to determine if tracking - is needed (only track if code actually changes from current value). - """ def write_code(sender: Account) -> None: @@ -695,12 +629,6 @@ def write_code(sender: Account) -> None: modify_state(state, address, write_code) - # Only track if code is actually changing from current value - # This allows multiple auths to same address to be tracked individually - # Net-zero filtering happens in commit_transaction_frame - if current_code != code: - track_code_change(state_changes, address, code) - def get_storage_original(state: State, address: Address, key: Bytes32) -> U256: """ diff --git a/src/ethereum/forks/amsterdam/state_tracker.py b/src/ethereum/forks/amsterdam/state_tracker.py index 7b98396318..19a929d0dd 100644 --- a/src/ethereum/forks/amsterdam/state_tracker.py +++ b/src/ethereum/forks/amsterdam/state_tracker.py @@ -1,21 +1,12 @@ """ -Hierarchical state change tracking for EIP-7928 Block Access Lists. +EIP-7928 Block Access Lists: Hierarchical State Change Tracking. -Implements a frame-based hierarchy: Block → Transaction → Call frames. -Each frame tracks state changes and merges upward on completion: -- Success: merge all changes (reads + writes) -- Failure: merge only reads (writes discarded) +Frame hierarchy mirrors EVM execution: Block -> Transaction -> Call frames. +Each frame tracks state accesses and merges to parent on completion. -Frame Hierarchy: - Block Frame: Root, lifetime = entire block, index 0..N+1 - Transaction Frame: Child of block, lifetime = single transaction - Call Frame: Child of transaction/call, lifetime = single message - -Block Access Index: 0=pre-exec, 1..N=transactions, N+1=post-exec -Stored in root frame, passed explicitly to operations. - -Pre-State Tracking: Values captured before modifications to enable -net-zero filtering. +On success, changes merge upward with net-zero filtering (pre-state vs final). +On failure, only reads merge (writes discarded). Pre-state captures use +first-write-wins semantics and are stored at the transaction frame level. [EIP-7928]: https://eips.ethereum.org/EIPS/eip-7928 """ @@ -38,12 +29,13 @@ class StateChanges: """ Tracks state changes within a single execution frame. - Frames form a hierarchy: Block → Transaction → Call frames. - Each frame holds a reference to its parent for upward traversal. + Frames form a hierarchy (Block -> Transaction -> Call) linked by parent + references. The block_access_index is stored at the root frame. Pre-state + captures (pre_balances, etc.) are only populated at the transaction level. """ parent: Optional["StateChanges"] = None - _block_access_index: BlockAccessIndex = BlockAccessIndex(0) + block_access_index: BlockAccessIndex = BlockAccessIndex(0) touched_addresses: Set[Address] = field(default_factory=set) storage_reads: Set[Tuple[Address, Bytes32]] = field(default_factory=set) @@ -61,7 +53,7 @@ class StateChanges: default_factory=dict ) - # Pre-state captures for net-zero filtering + # Pre-state captures (transaction-scoped, only populated at tx frame) pre_balances: Dict[Address, U256] = field(default_factory=dict) pre_nonces: Dict[Address, U64] = field(default_factory=dict) pre_storage: Dict[Tuple[Address, Bytes32], U256] = field( @@ -72,17 +64,17 @@ class StateChanges: def get_block_frame(state_changes: StateChanges) -> StateChanges: """ - Walk to block-level frame. + Walk to the root (block-level) frame. Parameters ---------- state_changes : - Any state changes frame. + Any frame in the hierarchy. Returns ------- block_frame : StateChanges - The block-level frame. + The root block-level frame. """ block_frame = state_changes @@ -91,125 +83,128 @@ def get_block_frame(state_changes: StateChanges) -> StateChanges: return block_frame -def get_block_access_index(root_frame: StateChanges) -> BlockAccessIndex: +def increment_block_access_index(root_frame: StateChanges) -> None: """ - Get current block access index from root frame. + Increment the block access index in the root frame. Parameters ---------- root_frame : - The root (block-level) state changes frame. - - Returns - ------- - index : BlockAccessIndex - The current block access index. + The root block-level frame. """ - return root_frame._block_access_index + root_frame.block_access_index = BlockAccessIndex( + root_frame.block_access_index + Uint(1) + ) -def increment_block_access_index(root_frame: StateChanges) -> None: +def get_transaction_frame(state_changes: StateChanges) -> StateChanges: """ - Increment block access index in root frame. + Walk to the transaction-level frame (child of block frame). Parameters ---------- - root_frame : - The root (block-level) state changes frame to increment. + state_changes : + Any frame in the hierarchy. + + Returns + ------- + tx_frame : StateChanges + The transaction-level frame. """ - root_frame._block_access_index = BlockAccessIndex( - root_frame._block_access_index + Uint(1) - ) + tx_frame = state_changes + while tx_frame.parent is not None and tx_frame.parent.parent is not None: + tx_frame = tx_frame.parent + return tx_frame def capture_pre_balance( - state_changes: StateChanges, address: Address, balance: U256 + tx_frame: StateChanges, address: Address, balance: U256 ) -> None: """ - Capture pre-balance (first-write-wins for net-zero filtering). + Capture pre-balance if not already captured (first-write-wins). Parameters ---------- - state_changes : - The state changes frame. + tx_frame : + The transaction-level frame. address : - The address whose balance is being captured. + The address whose balance to capture. balance : - The balance value before modification. + The current balance value. """ - if address not in state_changes.pre_balances: - state_changes.pre_balances[address] = balance + if address not in tx_frame.pre_balances: + tx_frame.pre_balances[address] = balance def capture_pre_nonce( - state_changes: StateChanges, address: Address, nonce: U64 + tx_frame: StateChanges, address: Address, nonce: U64 ) -> None: """ - Capture pre-nonce (first-write-wins). + Capture pre-nonce if not already captured (first-write-wins). Parameters ---------- - state_changes : - The state changes frame. + tx_frame : + The transaction-level frame. address : - The address whose nonce is being captured. + The address whose nonce to capture. nonce : - The nonce value before modification. + The current nonce value. """ - if address not in state_changes.pre_nonces: - state_changes.pre_nonces[address] = nonce + if address not in tx_frame.pre_nonces: + tx_frame.pre_nonces[address] = nonce def capture_pre_storage( - state_changes: StateChanges, address: Address, key: Bytes32, value: U256 + tx_frame: StateChanges, address: Address, key: Bytes32, value: U256 ) -> None: """ - Capture pre-storage (first-write-wins for noop filtering). + Capture pre-storage value if not already captured (first-write-wins). Parameters ---------- - state_changes : - The state changes frame. + tx_frame : + The transaction-level frame. address : - The address whose storage is being captured. + The address whose storage to capture. key : The storage key. value : - The storage value before modification. + The current storage value. """ slot = (address, key) - if slot not in state_changes.pre_storage: - state_changes.pre_storage[slot] = value + if slot not in tx_frame.pre_storage: + tx_frame.pre_storage[slot] = value def capture_pre_code( - state_changes: StateChanges, address: Address, code: Bytes + tx_frame: StateChanges, address: Address, code: Bytes ) -> None: """ - Capture pre-code (first-write-wins). + Capture pre-code if not already captured (first-write-wins). Parameters ---------- - state_changes : - The state changes frame. + tx_frame : + The transaction-level frame. address : - The address whose code is being captured. + The address whose code to capture. code : - The code value before modification. + The current code value. """ - if address not in state_changes.pre_code: - state_changes.pre_code[address] = code + if address not in tx_frame.pre_code: + tx_frame.pre_code[address] = code def track_address(state_changes: StateChanges, address: Address) -> None: """ - Track that an address was accessed. + Record that an address was accessed. Parameters ---------- @@ -226,7 +221,7 @@ def track_storage_read( state_changes: StateChanges, address: Address, key: Bytes32 ) -> None: """ - Track a storage read operation. + Record a storage read operation. Parameters ---------- @@ -248,7 +243,7 @@ def track_storage_write( value: U256, ) -> None: """ - Track a storage write operation with block access index. + Record a storage write keyed by (address, key, block_access_index). Parameters ---------- @@ -262,10 +257,8 @@ def track_storage_write( The new storage value. """ - block_frame = get_block_frame(state_changes) - state_changes.storage_writes[ - (address, key, get_block_access_index(block_frame)) - ] = value + idx = state_changes.block_access_index + state_changes.storage_writes[(address, key, idx)] = value def track_balance_change( @@ -274,7 +267,7 @@ def track_balance_change( new_balance: U256, ) -> None: """ - Track balance change keyed by (address, index). + Record a balance change keyed by (address, block_access_index). Parameters ---------- @@ -286,10 +279,8 @@ def track_balance_change( The new balance value. """ - block_frame = get_block_frame(state_changes) - state_changes.balance_changes[ - (address, get_block_access_index(block_frame)) - ] = new_balance + idx = state_changes.block_access_index + state_changes.balance_changes[(address, idx)] = new_balance def track_nonce_change( @@ -298,7 +289,7 @@ def track_nonce_change( new_nonce: U64, ) -> None: """ - Track a nonce change. + Record a nonce change as (address, block_access_index, new_nonce). Parameters ---------- @@ -310,10 +301,8 @@ def track_nonce_change( The new nonce value. """ - block_frame = get_block_frame(state_changes) - state_changes.nonce_changes.add( - (address, get_block_access_index(block_frame), new_nonce) - ) + idx = state_changes.block_access_index + state_changes.nonce_changes.add((address, idx, new_nonce)) def track_code_change( @@ -322,7 +311,7 @@ def track_code_change( new_code: Bytes, ) -> None: """ - Track a code change. + Record a code change keyed by (address, block_access_index). Parameters ---------- @@ -334,19 +323,55 @@ def track_code_change( The new code value. """ - block_frame = get_block_frame(state_changes) - state_changes.code_changes[ - (address, get_block_access_index(block_frame)) - ] = new_code + idx = state_changes.block_access_index + state_changes.code_changes[(address, idx)] = new_code + + +def track_selfdestruct( + state_changes: StateChanges, + address: Address, +) -> None: + """ + Handle selfdestruct of account created in same transaction. + + Per EIP-7928/EIP-6780: removes nonce/code changes, converts storage + writes to reads. Balance changes handled by net-zero filtering. + + Parameters + ---------- + state_changes : + The state changes tracker. + address : + The address that self-destructed. + + """ + idx = state_changes.block_access_index + + # Remove nonce changes from current transaction + state_changes.nonce_changes = { + (addr, i, nonce) + for addr, i, nonce in state_changes.nonce_changes + if not (addr == address and i == idx) + } + + # Remove code changes from current transaction + if (address, idx) in state_changes.code_changes: + del state_changes.code_changes[(address, idx)] + + # Convert storage writes from current transaction to reads + for addr, key, i in list(state_changes.storage_writes.keys()): + if addr == address and i == idx: + del state_changes.storage_writes[(addr, key, i)] + state_changes.storage_reads.add((addr, key)) def merge_on_success(child_frame: StateChanges) -> None: """ - Merge child frame's changes into parent on successful completion. + Merge child frame into parent on success. - Merges all tracked changes (reads and writes) from the child frame - into the parent frame. Filters out net-zero changes based on - captured pre-state values by comparing initial vs final values. + Child values overwrite parent values (most recent wins). No net-zero + filtering here - that happens once at transaction commit via + normalize_transaction(). Parameters ---------- @@ -356,50 +381,20 @@ def merge_on_success(child_frame: StateChanges) -> None: """ assert child_frame.parent is not None parent_frame = child_frame.parent + # Merge address accesses parent_frame.touched_addresses.update(child_frame.touched_addresses) - # Merge pre-state captures for transaction-level normalization - # Only if parent doesn't have value (first capture wins) - for addr, balance in child_frame.pre_balances.items(): - if addr not in parent_frame.pre_balances: - parent_frame.pre_balances[addr] = balance - for addr, nonce in child_frame.pre_nonces.items(): - if addr not in parent_frame.pre_nonces: - parent_frame.pre_nonces[addr] = nonce - for slot, value in child_frame.pre_storage.items(): - if slot not in parent_frame.pre_storage: - parent_frame.pre_storage[slot] = value - for addr, code in child_frame.pre_code.items(): - if addr not in parent_frame.pre_code: - capture_pre_code(parent_frame, addr, code) - - # Merge storage operations, filtering noop writes + # Merge storage: reads union, writes overwrite (child supersedes parent) parent_frame.storage_reads.update(child_frame.storage_reads) - for (addr, key, idx), value in child_frame.storage_writes.items(): - # Only merge if value actually changed from pre-state - if (addr, key) in child_frame.pre_storage: - if child_frame.pre_storage[(addr, key)] != value: - parent_frame.storage_writes[(addr, key, idx)] = value - # If equal, it's a noop write - convert to read only - else: - parent_frame.storage_reads.add((addr, key)) - else: - # No pre-state captured, merge as-is - parent_frame.storage_writes[(addr, key, idx)] = value - - # Merge balance changes - filter net-zero changes - # balance_changes keyed by (address, index) - for (addr, idx), final_balance in child_frame.balance_changes.items(): - if addr in child_frame.pre_balances: - if child_frame.pre_balances[addr] != final_balance: - parent_frame.balance_changes[(addr, idx)] = final_balance - # else: Net-zero change - skip entirely - else: - # No pre-balance captured, merge as-is - parent_frame.balance_changes[(addr, idx)] = final_balance - - # Merge nonce changes - keep only highest nonce per address + for storage_key, storage_value in child_frame.storage_writes.items(): + parent_frame.storage_writes[storage_key] = storage_value + + # Merge balance changes: child overwrites parent for same key + for balance_key, balance_value in child_frame.balance_changes.items(): + parent_frame.balance_changes[balance_key] = balance_value + + # Merge nonce changes: keep highest nonce per address address_final_nonces: Dict[Address, Tuple[BlockAccessIndex, U64]] = {} for addr, idx, nonce in child_frame.nonce_changes: if ( @@ -407,31 +402,46 @@ def merge_on_success(child_frame: StateChanges) -> None: or nonce > address_final_nonces[addr][1] ): address_final_nonces[addr] = (idx, nonce) - - # Merge final nonces (no net-zero filtering - nonces never decrease) for addr, (idx, final_nonce) in address_final_nonces.items(): parent_frame.nonce_changes.add((addr, idx, final_nonce)) - # Merge code changes - filter net-zero changes - # code_changes keyed by (address, index) - for (addr, idx), final_code in child_frame.code_changes.items(): - pre_code = child_frame.pre_code.get(addr, b"") - if pre_code != final_code: - parent_frame.code_changes[(addr, idx)] = final_code - # else: Net-zero change - skip entirely + # Merge code changes: child overwrites parent for same key + for code_key, code_value in child_frame.code_changes.items(): + parent_frame.code_changes[code_key] = code_value -def commit_transaction_frame(tx_frame: StateChanges) -> None: +def merge_on_failure(child_frame: StateChanges) -> None: """ - Commit a transaction frame's changes to the block frame. + Merge child frame into parent on failure/revert. + + Only reads merge; writes are discarded (converted to reads). + + Parameters + ---------- + child_frame : + The failed child frame. + + """ + assert child_frame.parent is not None + parent_frame = child_frame.parent + # Only merge reads and address accesses on failure + parent_frame.touched_addresses.update(child_frame.touched_addresses) + parent_frame.storage_reads.update(child_frame.storage_reads) + + # Convert writes to reads (failed writes still accessed the slots) + for address, key, _idx in child_frame.storage_writes.keys(): + parent_frame.storage_reads.add((address, key)) + + # Note: balance_changes, nonce_changes, and code_changes are NOT + # merged on failure - they are discarded + - Merges ALL changes from the transaction frame into the block frame - without net-zero filtering. Each transaction's changes are recorded - at their respective transaction index, even if a later transaction - reverts a change back to its original value. +def commit_transaction_frame(tx_frame: StateChanges) -> None: + """ + Commit transaction frame to block frame. - This is different from merge_on_success() which filters net-zero - changes within a single transaction's execution. + Unlike merge_on_success(), this merges ALL changes without net-zero + filtering (each tx's changes recorded at their respective index). Parameters ---------- @@ -458,46 +468,17 @@ def commit_transaction_frame(tx_frame: StateChanges) -> None: for addr, idx, nonce in tx_frame.nonce_changes: block_frame.nonce_changes.add((addr, idx, nonce)) - # Merge code changes - filter net-zero changes within the transaction - # Compare final code against transaction's pre-code + # Merge code changes (net-zero filtering done in normalize_transaction) for (addr, idx), final_code in tx_frame.code_changes.items(): - pre_code = tx_frame.pre_code.get(addr, b"") - if pre_code != final_code: - block_frame.code_changes[(addr, idx)] = final_code - # else: Net-zero change within this transaction - skip - - -def merge_on_failure(child_frame: StateChanges) -> None: - """ - Merge child frame's changes into parent on failed completion. - - Merges only read operations from the child frame into the parent. - Write operations are discarded since the frame reverted. - This is called when a call frame fails/reverts. - - Parameters - ---------- - child_frame : - The failed child frame. - - """ - assert child_frame.parent is not None - parent_frame = child_frame.parent - # Only merge reads and address accesses on failure - parent_frame.touched_addresses.update(child_frame.touched_addresses) - parent_frame.storage_reads.update(child_frame.storage_reads) - - # Convert writes to reads (failed writes still accessed the slots) - for address, key, _idx in child_frame.storage_writes.keys(): - parent_frame.storage_reads.add((address, key)) - - # Note: balance_changes, nonce_changes, and code_changes are NOT - # merged on failure - they are discarded + block_frame.code_changes[(addr, idx)] = final_code def create_child_frame(parent: StateChanges) -> StateChanges: """ - Create a child frame for nested execution. + Create a child frame linked to the given parent. + + Inherits block_access_index from parent so track functions can + access it directly without walking up the frame hierarchy. Parameters ---------- @@ -507,99 +488,69 @@ def create_child_frame(parent: StateChanges) -> StateChanges: Returns ------- child : StateChanges - A new child frame with parent reference set. + A new child frame with parent reference and inherited + block_access_index. """ - return StateChanges(parent=parent) - - -def handle_in_transaction_selfdestruct( - state_changes: StateChanges, - address: Address, - current_block_access_index: BlockAccessIndex, -) -> None: - """ - Handle account self-destructed in same transaction as creation. - - Per EIP-7928 and EIP-6780, accounts destroyed within their creation - transaction must have: - - Nonce changes from current transaction removed - - Code changes from current transaction removed - - Storage writes from current transaction converted to reads - - Balance changes handled by net-zero filtering - - Parameters - ---------- - state_changes : StateChanges - The state changes tracker (typically the block-level frame). - address : Address - The address that self-destructed. - current_block_access_index : BlockAccessIndex - The current block access index (transaction index). - - """ - # Remove nonce changes from current transaction - state_changes.nonce_changes = { - (addr, idx, nonce) - for addr, idx, nonce in state_changes.nonce_changes - if not (addr == address and idx == current_block_access_index) - } - - # Remove code changes from current transaction - if (address, current_block_access_index) in state_changes.code_changes: - del state_changes.code_changes[(address, current_block_access_index)] - - # Convert storage writes from current transaction to reads - for addr, key, idx in list(state_changes.storage_writes.keys()): - if addr == address and idx == current_block_access_index: - del state_changes.storage_writes[(addr, key, idx)] - state_changes.storage_reads.add((addr, key)) + return StateChanges( + parent=parent, + block_access_index=parent.block_access_index, + ) -def normalize_balance_changes_for_transaction( - block_frame: StateChanges, - current_block_access_index: BlockAccessIndex, +def filter_net_zero_frame_changes( + tx_frame: StateChanges, state: "State", ) -> None: """ - Normalize balance changes for the current transaction. + Filter net-zero changes from transaction frame before commit. - Removes balance changes where post-transaction balance equals - pre-transaction balance. This handles net-zero transfers across - the entire transaction. - - This function should be called after merging transaction frames - into the block frame to filter out addresses where balance didn't - actually change from transaction start to transaction end. + Compares final values against pre-tx state for storage, balance, and code. + Net-zero storage writes are converted to reads. Net-zero balance/code + changes are removed entirely. Nonces are not filtered (only increment). Parameters ---------- - block_frame : StateChanges - The block-level state changes frame. - current_block_access_index : BlockAccessIndex - The current transaction's block access index. - state : State - The current state to read final balances from. + tx_frame : + The transaction-level state changes frame. + state : + The current state to read final values from. """ # Import locally to avoid circular import from .state import get_account - # Collect addresses that have balance changes in this transaction + idx = tx_frame.block_access_index + + # Filter storage: compare against pre_storage, convert net-zero to reads + for addr, key, i in list(tx_frame.storage_writes.keys()): + if i != idx: + continue + final_value = tx_frame.storage_writes[(addr, key, i)] + if (addr, key) in tx_frame.pre_storage: + if tx_frame.pre_storage[(addr, key)] == final_value: + # Net-zero write - convert to read + del tx_frame.storage_writes[(addr, key, i)] + tx_frame.storage_reads.add((addr, key)) + + # Filter balance: compare pre vs post, remove if equal addresses_to_check = [ - addr - for (addr, idx) in block_frame.balance_changes.keys() - if idx == current_block_access_index + addr for (addr, i) in tx_frame.balance_changes.keys() if i == idx ] - - # For each address, compare pre vs post balance for addr in addresses_to_check: - if addr in block_frame.pre_balances: - pre_balance = block_frame.pre_balances[addr] + if addr in tx_frame.pre_balances: + pre_balance = tx_frame.pre_balances[addr] post_balance = get_account(state, addr).balance - if pre_balance == post_balance: - # Remove balance change for this address - net-zero transfer - del block_frame.balance_changes[ - (addr, current_block_access_index) - ] + del tx_frame.balance_changes[(addr, idx)] + + # Filter code: compare pre vs post, remove if equal + for addr, i in list(tx_frame.code_changes.keys()): + if i != idx: + continue + final_code = tx_frame.code_changes[(addr, i)] + pre_code = tx_frame.pre_code.get(addr, b"") + if pre_code == final_code: + del tx_frame.code_changes[(addr, i)] + + # Nonces: no filtering needed (nonces only increment, never net-zero) diff --git a/src/ethereum/forks/amsterdam/utils/message.py b/src/ethereum/forks/amsterdam/utils/message.py index def5b36e20..130532fef6 100644 --- a/src/ethereum/forks/amsterdam/utils/message.py +++ b/src/ethereum/forks/amsterdam/utils/message.py @@ -17,7 +17,7 @@ from ..fork_types import Address from ..state import get_account -from ..state_tracker import StateChanges +from ..state_tracker import create_child_frame from ..transactions import Transaction from ..vm import BlockEnvironment, Message, TransactionEnvironment from ..vm.precompiled_contracts.mapping import PRE_COMPILED_CONTRACTS @@ -28,7 +28,6 @@ def prepare_message( block_env: BlockEnvironment, tx_env: TransactionEnvironment, tx: Transaction, - transaction_state_changes: StateChanges, ) -> Message: """ Execute a transaction against the provided environment. @@ -41,8 +40,6 @@ def prepare_message( Environment for the transaction. tx : Transaction to be executed. - transaction_state_changes : - State changes specific to this transaction. Returns ------- @@ -73,6 +70,9 @@ def prepare_message( accessed_addresses.add(current_target) + # Create call frame as child of transaction frame + call_frame = create_child_frame(tx_env.state_changes) + return Message( block_env=block_env, tx_env=tx_env, @@ -91,5 +91,6 @@ def prepare_message( accessed_storage_keys=set(tx_env.access_list_storage_keys), disable_precompiles=False, parent_evm=None, - transaction_state_changes=transaction_state_changes, + is_create=isinstance(tx.to, Bytes0), + state_changes=call_frame, ) diff --git a/src/ethereum/forks/amsterdam/vm/__init__.py b/src/ethereum/forks/amsterdam/vm/__init__.py index d414aa50f9..6726880fcb 100644 --- a/src/ethereum/forks/amsterdam/vm/__init__.py +++ b/src/ethereum/forks/amsterdam/vm/__init__.py @@ -49,9 +49,7 @@ class BlockEnvironment: prev_randao: Bytes32 excess_blob_gas: U64 parent_beacon_block_root: Hash32 - block_state_changes: StateChanges = field( - default_factory=lambda: StateChanges() - ) + state_changes: StateChanges @dataclass @@ -117,6 +115,7 @@ class TransactionEnvironment: authorizations: Tuple[Authorization, ...] index_in_block: Optional[Uint] tx_hash: Optional[Hash32] + state_changes: "StateChanges" = field(default_factory=StateChanges) @dataclass @@ -142,7 +141,8 @@ class Message: accessed_storage_keys: Set[Tuple[Address, Bytes32]] disable_precompiles: bool parent_evm: Optional["Evm"] - transaction_state_changes: StateChanges + is_create: bool + state_changes: "StateChanges" = field(default_factory=StateChanges) @dataclass diff --git a/src/ethereum/forks/amsterdam/vm/eoa_delegation.py b/src/ethereum/forks/amsterdam/vm/eoa_delegation.py index ec95fd1a47..1f1aac9d97 100644 --- a/src/ethereum/forks/amsterdam/vm/eoa_delegation.py +++ b/src/ethereum/forks/amsterdam/vm/eoa_delegation.py @@ -12,7 +12,6 @@ from ethereum.crypto.hash import keccak256 from ethereum.exceptions import InvalidBlock, InvalidSignatureError -# track_address_access removed - now using state_changes.track_address() from ..fork_types import Address, Authorization from ..state import ( account_exists, @@ -20,7 +19,12 @@ increment_nonce, set_authority_code, ) -from ..state_tracker import capture_pre_code, track_address +from ..state_tracker import ( + capture_pre_code, + track_address, + track_code_change, + track_nonce_change, +) from ..utils.hexadecimal import hex_to_address from ..vm.gas import GAS_COLD_ACCOUNT_ACCESS, GAS_WARM_ACCESS from . import Evm, Message @@ -190,11 +194,9 @@ def read_delegation_target(evm: Evm, delegated_address: Address) -> Bytes: """ state = evm.message.block_env.state - # Add to accessed addresses for warm/cold gas accounting if delegated_address not in evm.accessed_addresses: evm.accessed_addresses.add(delegated_address) - # Track the address for BAL track_address(evm.state_changes, delegated_address) return get_account(state, delegated_address).code @@ -236,7 +238,7 @@ def set_delegation(message: Message) -> U256: authority_account = get_account(state, authority) authority_code = authority_account.code - track_address(message.block_env.block_state_changes, authority) + track_address(message.tx_env.state_changes, authority) if authority_code and not is_valid_delegation(authority_code): continue @@ -253,22 +255,19 @@ def set_delegation(message: Message) -> U256: else: code_to_set = EOA_DELEGATION_MARKER + auth.address - state_changes = ( - message.transaction_state_changes - or message.block_env.block_state_changes - ) + tx_frame = message.tx_env.state_changes + # EIP-7928: Capture pre-code before any changes + capture_pre_code(tx_frame, authority, authority_code) - # Capture pre-code before any changes (first-write-wins) - capture_pre_code(state_changes, authority, authority_code) + set_authority_code(state, authority, code_to_set) - # Set delegation code - # Uses authority_code (current) for tracking to handle multiple auths - # Net-zero filtering happens in commit_transaction_frame - set_authority_code( - state, authority, code_to_set, state_changes, authority_code - ) + if authority_code != code_to_set: + # Track code change if different from current + track_code_change(tx_frame, authority, code_to_set) - increment_nonce(state, authority, state_changes) + increment_nonce(state, authority) + nonce_after = get_account(state, authority).nonce + track_nonce_change(tx_frame, authority, U64(nonce_after)) if message.code_address is None: raise InvalidBlock("Invalid type 4 transaction: no target") diff --git a/src/ethereum/forks/amsterdam/vm/instructions/storage.py b/src/ethereum/forks/amsterdam/vm/instructions/storage.py index 8edff23534..de7ef935f5 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/storage.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/storage.py @@ -26,7 +26,7 @@ track_storage_write, ) from .. import Evm -from ..exceptions import OutOfGasError, WriteInStaticContext +from ..exceptions import WriteInStaticContext from ..gas import ( GAS_CALL_STIPEND, GAS_COLD_SLOAD, @@ -55,25 +55,26 @@ def sload(evm: Evm) -> None: key = pop(evm.stack).to_be_bytes32() # GAS - gas_cost = ( - GAS_WARM_ACCESS - if (evm.message.current_target, key) in evm.accessed_storage_keys - else GAS_COLD_SLOAD - ) - check_gas(evm, gas_cost) - if (evm.message.current_target, key) not in evm.accessed_storage_keys: - evm.accessed_storage_keys.add((evm.message.current_target, key)) - track_storage_read( - evm.state_changes, + is_cold_access = ( evm.message.current_target, key, - ) + ) not in evm.accessed_storage_keys + gas_cost = GAS_COLD_SLOAD if is_cold_access else GAS_WARM_ACCESS + charge_gas(evm, gas_cost) # OPERATION state = evm.message.block_env.state value = get_storage(state, evm.message.current_target, key) + if is_cold_access: + evm.accessed_storage_keys.add((evm.message.current_target, key)) + track_storage_read( + evm.state_changes, + evm.message.current_target, + key, + ) + push(evm.stack, value) # PROGRAM COUNTER @@ -93,19 +94,14 @@ def sstore(evm: Evm) -> None: # STACK key = pop(evm.stack).to_be_bytes32() new_value = pop(evm.stack) - if evm.gas_left <= GAS_CALL_STIPEND: - raise OutOfGasError - # Check static context before accessing storage + # check we have at least the stipend gas + check_gas(evm, GAS_CALL_STIPEND + Uint(1)) + + # check static context before accessing storage if evm.message.is_static: raise WriteInStaticContext - state = evm.message.block_env.state - original_value = get_storage_original( - state, evm.message.current_target, key - ) - current_value = get_storage(state, evm.message.current_target, key) - # GAS gas_cost = Uint(0) is_cold_access = ( @@ -116,28 +112,34 @@ def sstore(evm: Evm) -> None: if is_cold_access: gas_cost += GAS_COLD_SLOAD - if original_value == current_value and current_value != new_value: - if original_value == 0: - gas_cost += GAS_STORAGE_SET - else: - gas_cost += GAS_STORAGE_UPDATE - GAS_COLD_SLOAD - else: - gas_cost += GAS_WARM_ACCESS + state = evm.message.block_env.state + original_value = get_storage_original( + state, evm.message.current_target, key + ) + current_value = get_storage(state, evm.message.current_target, key) + + if is_cold_access: + evm.accessed_storage_keys.add((evm.message.current_target, key)) - # Track storage access BEFORE checking gas (EIP-7928) - # Even if we run out of gas, the access attempt should be tracked capture_pre_storage( - evm.state_changes, evm.message.current_target, key, current_value + evm.message.tx_env.state_changes, + evm.message.current_target, + key, + current_value, ) track_storage_read( evm.state_changes, evm.message.current_target, key, ) - check_gas(evm, gas_cost) - if is_cold_access: - evm.accessed_storage_keys.add((evm.message.current_target, key)) + if original_value == current_value and current_value != new_value: + if original_value == 0: + gas_cost += GAS_STORAGE_SET + else: + gas_cost += GAS_STORAGE_UPDATE - GAS_COLD_SLOAD + else: + gas_cost += GAS_WARM_ACCESS charge_gas(evm, gas_cost) @@ -165,7 +167,10 @@ def sstore(evm: Evm) -> None: # OPERATION set_storage(state, evm.message.current_target, key, new_value) track_storage_write( - evm.state_changes, evm.message.current_target, key, new_value + evm.state_changes, + evm.message.current_target, + key, + new_value, ) # PROGRAM COUNTER diff --git a/src/ethereum/forks/amsterdam/vm/instructions/system.py b/src/ethereum/forks/amsterdam/vm/instructions/system.py index 288594bfe7..e5e2ec306f 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/system.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/system.py @@ -12,7 +12,7 @@ """ from ethereum_types.bytes import Bytes, Bytes0 -from ethereum_types.numeric import U256, Uint +from ethereum_types.numeric import U64, U256, Uint from ethereum.utils.numeric import ceil32 @@ -27,7 +27,13 @@ move_ether, set_account_balance, ) -from ...state_tracker import capture_pre_balance, track_address +from ...state_tracker import ( + capture_pre_balance, + create_child_frame, + track_address, + track_balance_change, + track_nonce_change, +) from ...utils.address import ( compute_contract_address, compute_create2_contract_address, @@ -120,11 +126,27 @@ def generic_create( if account_has_code_or_nonce( state, contract_address ) or account_has_storage(state, contract_address): - increment_nonce(state, evm.message.current_target, evm.state_changes) + increment_nonce(state, evm.message.current_target) + nonce_after = get_account(state, evm.message.current_target).nonce + track_nonce_change( + evm.state_changes, + evm.message.current_target, + U64(nonce_after), + ) push(evm.stack, U256(0)) return - increment_nonce(state, evm.message.current_target, evm.state_changes) + # Track nonce increment for CREATE + increment_nonce(state, evm.message.current_target) + nonce_after = get_account(state, evm.message.current_target).nonce + track_nonce_change( + evm.state_changes, + evm.message.current_target, + U64(nonce_after), + ) + + # Create call frame as child of parent EVM's frame + child_state_changes = create_child_frame(evm.state_changes) child_message = Message( block_env=evm.message.block_env, @@ -144,7 +166,8 @@ def generic_create( accessed_storage_keys=evm.accessed_storage_keys.copy(), disable_precompiles=False, parent_evm=evm, - transaction_state_changes=evm.message.transaction_state_changes, + is_create=True, + state_changes=child_state_changes, ) child_evm = process_create_message(child_message) @@ -321,8 +344,9 @@ def generic_call( evm.memory, memory_input_start_position, memory_input_size ) - # EIP-7928: Child message inherits transaction_state_changes from parent - # The actual child frame will be created automatically in process_message + # Create call frame as child of parent EVM's frame + child_state_changes = create_child_frame(evm.state_changes) + child_message = Message( block_env=evm.message.block_env, tx_env=evm.message.tx_env, @@ -341,7 +365,8 @@ def generic_call( accessed_storage_keys=evm.accessed_storage_keys.copy(), disable_precompiles=disable_precompiles, parent_evm=evm, - transaction_state_changes=evm.message.transaction_state_changes, + is_create=False, + state_changes=child_state_changes, ) child_evm = process_message(child_message) @@ -570,11 +595,13 @@ def callcode(evm: Evm) -> None: ).balance # EIP-7928: For CALLCODE with value transfer, capture pre-balance - # in parent frame. CALLCODE transfers value from/to current_target + # in transaction frame. CALLCODE transfers value from/to current_target # (same address), affecting current storage context, not child frame if value != 0 and sender_balance >= value: capture_pre_balance( - evm.state_changes, evm.message.current_target, sender_balance + evm.message.tx_env.state_changes, + evm.message.current_target, + sender_balance, ) if sender_balance < value: @@ -643,25 +670,41 @@ def selfdestruct(evm: Evm) -> None: charge_gas(evm, gas_cost) + state = evm.message.block_env.state originator = evm.message.current_target - originator_balance = get_account( - evm.message.block_env.state, originator - ).balance + originator_balance = get_account(state, originator).balance + beneficiary_balance = get_account(state, beneficiary).balance + + # Get tracking context + tx_frame = evm.message.tx_env.state_changes + + # Capture pre-balances for net-zero filtering + track_address(evm.state_changes, originator) + capture_pre_balance(tx_frame, originator, originator_balance) + capture_pre_balance(tx_frame, beneficiary, beneficiary_balance) - move_ether( - evm.message.block_env.state, + # Transfer balance + move_ether(state, originator, beneficiary, originator_balance) + + # Track balance changes + originator_new_balance = get_account(state, originator).balance + beneficiary_new_balance = get_account(state, beneficiary).balance + track_balance_change( + evm.state_changes, originator, - beneficiary, - originator_balance, + originator_new_balance, + ) + track_balance_change( evm.state_changes, + beneficiary, + beneficiary_new_balance, ) # register account for deletion only if it was created # in the same transaction - if originator in evm.message.block_env.state.created_accounts: - set_account_balance( - evm.message.block_env.state, originator, U256(0), evm.state_changes - ) + if originator in state.created_accounts: + set_account_balance(state, originator, U256(0)) + track_balance_change(evm.state_changes, originator, U256(0)) evm.accounts_to_delete.add(originator) # HALT the execution diff --git a/src/ethereum/forks/amsterdam/vm/interpreter.py b/src/ethereum/forks/amsterdam/vm/interpreter.py index 893a0d8833..154c56de11 100644 --- a/src/ethereum/forks/amsterdam/vm/interpreter.py +++ b/src/ethereum/forks/amsterdam/vm/interpreter.py @@ -15,7 +15,7 @@ from typing import Optional, Set, Tuple from ethereum_types.bytes import Bytes, Bytes0 -from ethereum_types.numeric import U256, Uint, ulen +from ethereum_types.numeric import U64, U256, Uint, ulen from ethereum.exceptions import EthereumException from ethereum.trace import ( @@ -46,10 +46,13 @@ ) from ..state_tracker import ( StateChanges, - create_child_frame, + capture_pre_balance, merge_on_failure, merge_on_success, track_address, + track_balance_change, + track_code_change, + track_nonce_change, ) from ..vm import Message from ..vm.eoa_delegation import get_delegated_code_address, set_delegation @@ -73,61 +76,6 @@ MAX_INIT_CODE_SIZE = 2 * MAX_CODE_SIZE -def get_parent_frame(message: Message) -> StateChanges: - """ - Get the appropriate parent frame for a message's state changes. - - Frame selection logic: - - Nested calls: Parent EVM's frame - - Top-level calls: Transaction frame - - System transactions: Block frame - - Parameters - ---------- - message : - The message being processed. - - Returns - ------- - parent_frame : StateChanges - The parent frame to use for creating child frames. - - """ - if message.parent_evm is not None: - return message.parent_evm.state_changes - elif message.transaction_state_changes is not None: - return message.transaction_state_changes - else: - return message.block_env.block_state_changes - - -def get_message_state_frame(message: Message) -> StateChanges: - """ - Determine and create the appropriate state tracking frame for a message. - - Creates a call frame as a child of the appropriate parent frame. - - Parameters - ---------- - message : - The message being processed. - - Returns - ------- - state_frame : StateChanges - The state tracking frame to use for this message execution. - - """ - parent_frame = get_parent_frame(message) - if ( - message.parent_evm is not None - or message.transaction_state_changes is not None - ): - return create_child_frame(parent_frame) - else: - return parent_frame - - @dataclass class MessageCallOutput: """ @@ -173,9 +121,7 @@ def process_message_call(message: Message) -> MessageCallOutput: is_collision = account_has_code_or_nonce( block_env.state, message.current_target ) or account_has_storage(block_env.state, message.current_target) - track_address( - message.transaction_state_changes, message.current_target - ) + track_address(message.tx_env.state_changes, message.current_target) if is_collision: return MessageCallOutput( Uint(0), @@ -197,11 +143,7 @@ def process_message_call(message: Message) -> MessageCallOutput: message.accessed_addresses.add(delegated_address) message.code = get_account(block_env.state, delegated_address).code message.code_address = delegated_address - - # EIP-7928: Track delegation target when loaded as call target - track_address( - message.block_env.block_state_changes, delegated_address - ) + track_address(message.block_env.state_changes, delegated_address) evm = process_message(message) @@ -263,11 +205,15 @@ def process_create_message(message: Message) -> Evm: # added to SELFDESTRUCT by EIP-6780. mark_account_created(state, message.current_target) - parent_frame = get_parent_frame(message) - create_frame = create_child_frame(parent_frame) + increment_nonce(state, message.current_target) + nonce_after = get_account(state, message.current_target).nonce + track_nonce_change( + message.state_changes, + message.current_target, + U64(nonce_after), + ) - increment_nonce(state, message.current_target, create_frame) - evm = process_message(message, parent_state_frame=create_frame) + evm = process_message(message) if not evm.error: contract_code = evm.output contract_code_gas = Uint(len(contract_code)) * GAS_CODE_DEPOSIT @@ -280,26 +226,28 @@ def process_create_message(message: Message) -> Evm: raise OutOfGasError except ExceptionalHalt as error: rollback_transaction(state, transient_storage) - merge_on_failure(create_frame) + merge_on_failure(message.state_changes) evm.gas_left = Uint(0) evm.output = b"" evm.error = error else: - set_code( - state, message.current_target, contract_code, create_frame - ) + # Note: No need to capture pre code since it's always b"" here + set_code(state, message.current_target, contract_code) + if contract_code != b"": + track_code_change( + message.state_changes, + message.current_target, + contract_code, + ) commit_transaction(state, transient_storage) - merge_on_success(create_frame) + merge_on_success(message.state_changes) else: rollback_transaction(state, transient_storage) - merge_on_failure(create_frame) + merge_on_failure(message.state_changes) return evm -def process_message( - message: Message, - parent_state_frame: Optional[StateChanges] = None, -) -> Evm: +def process_message(message: Message) -> Evm: """ Move ether and execute the relevant code. @@ -307,12 +255,6 @@ def process_message( ---------- message : Transaction specific items. - parent_state_frame : - Optional parent frame for state tracking. When provided (e.g., for - CREATE's init code), state changes are tracked as a child of this - frame instead of the default parent determined by the message. - This ensures proper frame hierarchy for CREATE operations where - init code changes must be children of the CREATE frame. Returns ------- @@ -325,37 +267,54 @@ def process_message( if message.depth > STACK_DEPTH_LIMIT: raise StackDepthLimitError("Stack depth limit reached") + # take snapshot of state before processing the message begin_transaction(state, transient_storage) - if parent_state_frame is not None: - # Use provided parent for CREATE's init code execution. - # This ensures init code state changes are children of create_frame, - # so they are properly converted to reads if code deposit fails. - parent_changes = parent_state_frame - state_changes = create_child_frame(parent_state_frame) - else: - parent_changes = get_parent_frame(message) - state_changes = get_message_state_frame(message) - - track_address(state_changes, message.current_target) + track_address(message.state_changes, message.current_target) if message.should_transfer_value and message.value != 0: + # Track value transfer + sender_balance = get_account(state, message.caller).balance + recipient_balance = get_account(state, message.current_target).balance + + track_address(message.state_changes, message.caller) + capture_pre_balance( + message.tx_env.state_changes, message.caller, sender_balance + ) + capture_pre_balance( + message.tx_env.state_changes, + message.current_target, + recipient_balance, + ) + move_ether( - state, + state, message.caller, message.current_target, message.value + ) + + sender_new_balance = get_account(state, message.caller).balance + recipient_new_balance = get_account( + state, message.current_target + ).balance + + track_balance_change( + message.state_changes, message.caller, + U256(sender_new_balance), + ) + track_balance_change( + message.state_changes, message.current_target, - message.value, - state_changes, + U256(recipient_new_balance), ) - evm = execute_code(message, state_changes) + evm = execute_code(message, message.state_changes) if evm.error: rollback_transaction(state, transient_storage) - if state_changes != parent_changes: + if not message.is_create: merge_on_failure(evm.state_changes) else: commit_transaction(state, transient_storage) - if state_changes != parent_changes: + if not message.is_create: merge_on_success(evm.state_changes) return evm diff --git a/src/ethereum_spec_tools/evm_tools/t8n/__init__.py b/src/ethereum_spec_tools/evm_tools/t8n/__init__.py index 2d3f8b9e3c..c029091467 100644 --- a/src/ethereum_spec_tools/evm_tools/t8n/__init__.py +++ b/src/ethereum_spec_tools/evm_tools/t8n/__init__.py @@ -16,7 +16,9 @@ from ethereum import trace from ethereum.exceptions import EthereumException, InvalidBlock from ethereum.fork_criteria import ByBlockNumber, ByTimestamp, Unscheduled -from ethereum_spec_tools.forks import Hardfork, TemporaryHardfork +from ethereum_spec_tools.forks import TemporaryHardfork +from ethereum.forks.amsterdam.state_tracker import StateChanges +from ethereum_spec_tools.forks import Hardfork from ..loaders.fixture_loader import Load from ..utils import ( @@ -308,6 +310,9 @@ def block_environment(self) -> Any: ) kw_arguments["excess_blob_gas"] = self.env.excess_blob_gas + if self.fork.is_after_fork("amsterdam"): + kw_arguments["state_changes"] = StateChanges() + return block_environment(**kw_arguments) def backup_state(self) -> None: @@ -417,7 +422,7 @@ def _run_blockchain_test(self, block_env: Any, block_output: Any) -> None: increment_block_access_index, ) - increment_block_access_index(block_env.block_state_changes) + increment_block_access_index(block_env.state_changes) if not self.fork.proof_of_stake: if self.options.state_reward is None: @@ -436,9 +441,9 @@ def _run_blockchain_test(self, block_env: Any, block_output: Any) -> None: self.fork.process_general_purpose_requests(block_env, block_output) if self.fork.is_after_fork("amsterdam"): - # Build block access list from block_env.block_state_changes + # Build block access list from block_env.state_changes block_output.block_access_list = self.fork.build_block_access_list( - block_env.block_state_changes + block_env.state_changes ) def run_blockchain_test(self) -> None: From cfa66a2f75ef13fb8d6956560a3875bd443ace45 Mon Sep 17 00:00:00 2001 From: Guruprasad Kamath <48196632+gurukamath@users.noreply.github.com> Date: Mon, 8 Dec 2025 20:40:24 +0100 Subject: [PATCH 047/154] fix(spec,tests): Change BAL to List[AccountChange] (#1844) * fix(spec): update BAL type * fix(tests): run amsterdam jsons * fix(test-specs): Add BAL to genesis beyond Amsterdam fork --------- Co-authored-by: fselmo --- .../src/execution_testing/specs/blockchain.py | 5 +++++ .../amsterdam/block_access_lists/builder.py | 8 ++++---- .../amsterdam/block_access_lists/rlp_types.py | 13 ++----------- .../amsterdam/block_access_lists/rlp_utils.py | 10 ++++------ src/ethereum/forks/amsterdam/vm/__init__.py | 4 +--- .../evm_tools/loaders/fixture_loader.py | 4 ++++ .../evm_tools/t8n/t8n_types.py | 18 ++++++------------ tests/json_infra/__init__.py | 6 +++++- .../helpers/load_blockchain_tests.py | 3 +++ 9 files changed, 34 insertions(+), 37 deletions(-) diff --git a/packages/testing/src/execution_testing/specs/blockchain.py b/packages/testing/src/execution_testing/specs/blockchain.py index a508a78ad6..aef020049b 100644 --- a/packages/testing/src/execution_testing/specs/blockchain.py +++ b/packages/testing/src/execution_testing/specs/blockchain.py @@ -563,11 +563,16 @@ def make_genesis( state_root = pre_alloc.state_root() genesis = FixtureHeader.genesis(self.fork, env, state_root) + genesis_bal = None + if self.fork.header_bal_hash_required(block_number=0, timestamp=0): + genesis_bal = BlockAccessList() + return ( pre_alloc, FixtureBlockBase( header=genesis, withdrawals=None if env.withdrawals is None else [], + block_access_list=genesis_bal, ).with_rlp(txs=[]), ) diff --git a/src/ethereum/forks/amsterdam/block_access_lists/builder.py b/src/ethereum/forks/amsterdam/block_access_lists/builder.py index c1dbf98222..e860c84068 100644 --- a/src/ethereum/forks/amsterdam/block_access_lists/builder.py +++ b/src/ethereum/forks/amsterdam/block_access_lists/builder.py @@ -405,7 +405,7 @@ def _build_from_builder( [`BlockAccessList`]: ref:ethereum.forks.amsterdam.block_access_lists.rlp_types.BlockAccessList # noqa: E501 """ - account_changes_list = [] + block_access_list: BlockAccessList = [] for address, changes in builder.accounts.items(): storage_changes = [] @@ -444,11 +444,11 @@ def _build_from_builder( code_changes=code_changes, ) - account_changes_list.append(account_change) + block_access_list.append(account_change) - account_changes_list.sort(key=lambda x: x.address) + block_access_list.sort(key=lambda x: x.address) - return BlockAccessList(account_changes=tuple(account_changes_list)) + return block_access_list def build_block_access_list( diff --git a/src/ethereum/forks/amsterdam/block_access_lists/rlp_types.py b/src/ethereum/forks/amsterdam/block_access_lists/rlp_types.py index e4d37d6a74..c4f49ff4aa 100644 --- a/src/ethereum/forks/amsterdam/block_access_lists/rlp_types.py +++ b/src/ethereum/forks/amsterdam/block_access_lists/rlp_types.py @@ -8,7 +8,7 @@ """ from dataclasses import dataclass -from typing import Tuple +from typing import List, Tuple from ethereum_types.bytes import Bytes, Bytes20, Bytes32 from ethereum_types.frozen import slotted_freezable @@ -118,13 +118,4 @@ class AccountChanges: code_changes: Tuple[CodeChange, ...] -@slotted_freezable -@dataclass -class BlockAccessList: - """ - Block-Level Access List for EIP-7928. - Contains all addresses accessed during block execution. - RLP encoded as a list of AccountChanges. - """ - - account_changes: Tuple[AccountChanges, ...] +BlockAccessList = List[AccountChanges] diff --git a/src/ethereum/forks/amsterdam/block_access_lists/rlp_utils.py b/src/ethereum/forks/amsterdam/block_access_lists/rlp_utils.py index 738abce181..2cd5b827f3 100644 --- a/src/ethereum/forks/amsterdam/block_access_lists/rlp_utils.py +++ b/src/ethereum/forks/amsterdam/block_access_lists/rlp_utils.py @@ -71,7 +71,7 @@ def rlp_encode_block_access_list(block_access_list: BlockAccessList) -> Bytes: """ # Encode as a list of AccountChanges directly (not wrapped) account_changes_list = [] - for account in block_access_list.account_changes: + for account in block_access_list: # Each account is encoded as: # [address, storage_changes, storage_reads, # balance_changes, nonce_changes, code_changes] @@ -146,7 +146,7 @@ def validate_block_access_list_against_execution( # 1. Validate structural constraints # Check that storage changes and reads don't overlap for the same slot - for account in block_access_list.account_changes: + for account in block_access_list: changed_slots = {sc.slot for sc in account.storage_changes} read_slots = set(account.storage_reads) @@ -155,9 +155,7 @@ def validate_block_access_list_against_execution( return False # 2. Validate ordering (addresses should be sorted lexicographically) - addresses = [ - account.address for account in block_access_list.account_changes - ] + addresses = [account.address for account in block_access_list] if addresses != sorted(addresses): return False @@ -165,7 +163,7 @@ def validate_block_access_list_against_execution( max_block_access_index = ( MAX_TXS + 1 ) # 0 for pre-exec, 1..MAX_TXS for txs, MAX_TXS+1 for post-exec - for account in block_access_list.account_changes: + for account in block_access_list: # Validate storage slots are sorted within each account storage_slots = [sc.slot for sc in account.storage_changes] if storage_slots != sorted(storage_slots): diff --git a/src/ethereum/forks/amsterdam/vm/__init__.py b/src/ethereum/forks/amsterdam/vm/__init__.py index 6726880fcb..6c47b50acf 100644 --- a/src/ethereum/forks/amsterdam/vm/__init__.py +++ b/src/ethereum/forks/amsterdam/vm/__init__.py @@ -94,9 +94,7 @@ class BlockOutput: ) blob_gas_used: U64 = U64(0) requests: List[Bytes] = field(default_factory=list) - block_access_list: BlockAccessList = field( - default_factory=lambda: BlockAccessList(account_changes=()) - ) + block_access_list: BlockAccessList = field(default_factory=list) @dataclass diff --git a/src/ethereum_spec_tools/evm_tools/loaders/fixture_loader.py b/src/ethereum_spec_tools/evm_tools/loaders/fixture_loader.py index d01eda47c9..ab9e1b99d9 100644 --- a/src/ethereum_spec_tools/evm_tools/loaders/fixture_loader.py +++ b/src/ethereum_spec_tools/evm_tools/loaders/fixture_loader.py @@ -196,4 +196,8 @@ def json_to_header(self, raw: Any) -> Any: requests_hash = hex_to_bytes32(raw.get("requestsHash")) parameters.append(requests_hash) + if "blockAccessListHash" in raw: + bal_hash = hex_to_bytes32(raw.get("blockAccessListHash")) + parameters.append(bal_hash) + return self.fork.Header(*parameters) diff --git a/src/ethereum_spec_tools/evm_tools/t8n/t8n_types.py b/src/ethereum_spec_tools/evm_tools/t8n/t8n_types.py index 1aac5e4b3f..cb13727f0d 100644 --- a/src/ethereum_spec_tools/evm_tools/t8n/t8n_types.py +++ b/src/ethereum_spec_tools/evm_tools/t8n/t8n_types.py @@ -333,18 +333,17 @@ def update(self, t8n: "T8N", block_env: Any, block_output: Any) -> None: ) ) - def _block_access_list_to_json(self, bal: Any) -> Any: + @staticmethod + def _block_access_list_to_json(account_changes: Any) -> Any: """ Convert BlockAccessList to JSON format matching the Pydantic models. """ - account_changes = [] - - for account in bal.account_changes: + json_account_changes = [] + for account in account_changes: account_data: Dict[str, Any] = { "address": "0x" + account.address.hex() } - # Add storage changes if present if account.storage_changes: storage_changes = [] for slot_change in account.storage_changes: @@ -364,14 +363,12 @@ def _block_access_list_to_json(self, bal: Any) -> Any: storage_changes.append(slot_data) account_data["storageChanges"] = storage_changes - # Add storage reads if present if account.storage_reads: account_data["storageReads"] = [ int.from_bytes(slot, "big") for slot in account.storage_reads ] - # Add balance changes if present if account.balance_changes: account_data["balanceChanges"] = [ { @@ -381,7 +378,6 @@ def _block_access_list_to_json(self, bal: Any) -> Any: for change in account.balance_changes ] - # Add nonce changes if present if account.nonce_changes: account_data["nonceChanges"] = [ { @@ -391,7 +387,6 @@ def _block_access_list_to_json(self, bal: Any) -> Any: for change in account.nonce_changes ] - # Add code changes if present if account.code_changes: account_data["codeChanges"] = [ { @@ -401,10 +396,9 @@ def _block_access_list_to_json(self, bal: Any) -> Any: for change in account.code_changes ] - account_changes.append(account_data) + json_account_changes.append(account_data) - # return as list directly - return account_changes + return json_account_changes def json_encode_receipts(self) -> Any: """ diff --git a/tests/json_infra/__init__.py b/tests/json_infra/__init__.py index fd2db5553f..6fa7c5dbbe 100644 --- a/tests/json_infra/__init__.py +++ b/tests/json_infra/__init__.py @@ -27,9 +27,13 @@ class _FixtureSource(TypedDict): "fixture_path": "tests/json_infra/fixtures/ethereum_tests", }, "latest_fork_tests": { - "url": "https://github.com/ethereum/execution-spec-tests/releases/download/v5.0.0/fixtures_develop.tar.gz", + "url": "https://github.com/ethereum/execution-spec-tests/releases/download/v5.3.0/fixtures_develop.tar.gz", "fixture_path": "tests/json_infra/fixtures/latest_fork_tests", }, + "amsterdam_tests": { + "url": "https://github.com/ethereum/execution-spec-tests/releases/download/bal%40v1.8.0/fixtures_bal.tar.gz", + "fixture_path": "tests/json_infra/fixtures/amsterdam_tests", + }, } diff --git a/tests/json_infra/helpers/load_blockchain_tests.py b/tests/json_infra/helpers/load_blockchain_tests.py index ec82b42f47..73cf2c662d 100644 --- a/tests/json_infra/helpers/load_blockchain_tests.py +++ b/tests/json_infra/helpers/load_blockchain_tests.py @@ -138,6 +138,9 @@ def runtest(self) -> None: if hasattr(genesis_header, "requests_root"): parameters.append(()) + if hasattr(genesis_header, "block_access_list_hash"): + parameters.append([]) + genesis_block = load.fork.Block(*parameters) genesis_header_hash = hex_to_bytes( From a6745faee42cdc58e9103a9a2547d74811501afb Mon Sep 17 00:00:00 2001 From: fselmo Date: Mon, 8 Dec 2025 12:52:56 -0700 Subject: [PATCH 048/154] fix(spec-specs): Fix issues after rebasing with forks/osaka - fix mkdocs --- .../evm_tools/loaders/fork_loader.py | 9 +++++++++ src/ethereum_spec_tools/evm_tools/t8n/__init__.py | 13 ++++--------- src/ethereum_spec_tools/evm_tools/t8n/env.py | 2 +- tests/cancun/create/__init__.py | 1 + 4 files changed, 15 insertions(+), 10 deletions(-) create mode 100644 tests/cancun/create/__init__.py diff --git a/src/ethereum_spec_tools/evm_tools/loaders/fork_loader.py b/src/ethereum_spec_tools/evm_tools/loaders/fork_loader.py index 002bde2e84..9a14efa54c 100644 --- a/src/ethereum_spec_tools/evm_tools/loaders/fork_loader.py +++ b/src/ethereum_spec_tools/evm_tools/loaders/fork_loader.py @@ -132,6 +132,15 @@ def compute_block_access_list_hash(self) -> Any: "block_access_lists" ).compute_block_access_list_hash + @property + def has_block_access_list_hash(self) -> bool: + """Check if the fork has a `block_access_list_hash` function.""" + try: + module = self._module("block_access_lists") + except ModuleNotFoundError: + return False + return hasattr(module, "compute_block_access_list_hash") + @property def signing_hash_2930(self) -> Any: """signing_hash_2930 function of the fork.""" diff --git a/src/ethereum_spec_tools/evm_tools/t8n/__init__.py b/src/ethereum_spec_tools/evm_tools/t8n/__init__.py index c029091467..4988ef25bc 100644 --- a/src/ethereum_spec_tools/evm_tools/t8n/__init__.py +++ b/src/ethereum_spec_tools/evm_tools/t8n/__init__.py @@ -16,9 +16,8 @@ from ethereum import trace from ethereum.exceptions import EthereumException, InvalidBlock from ethereum.fork_criteria import ByBlockNumber, ByTimestamp, Unscheduled -from ethereum_spec_tools.forks import TemporaryHardfork from ethereum.forks.amsterdam.state_tracker import StateChanges -from ethereum_spec_tools.forks import Hardfork +from ethereum_spec_tools.forks import Hardfork, TemporaryHardfork from ..loaders.fixture_loader import Load from ..utils import ( @@ -310,7 +309,7 @@ def block_environment(self) -> Any: ) kw_arguments["excess_blob_gas"] = self.env.excess_blob_gas - if self.fork.is_after_fork("amsterdam"): + if self.fork.has_block_access_list_hash: kw_arguments["state_changes"] = StateChanges() return block_environment(**kw_arguments) @@ -377,10 +376,6 @@ def run_state_test(self) -> Any: self.result.rejected = self.txs.rejected_txs def _run_blockchain_test(self, block_env: Any, block_output: Any) -> None: - if self.fork.is_after_fork("amsterdam"): - self.fork.set_block_access_index( - block_env.state.change_tracker, Uint(0) - ) if self.fork.has_compute_requests_hash: self.fork.process_unchecked_system_transaction( block_env=block_env, @@ -417,7 +412,7 @@ def _run_blockchain_test(self, block_env: Any, block_output: Any) -> None: ) # Post-execution operations use index N+1 - if self.fork.is_after_fork("amsterdam"): + if self.fork.has_block_access_list_hash: from ethereum.forks.amsterdam.state_tracker import ( increment_block_access_index, ) @@ -440,7 +435,7 @@ def _run_blockchain_test(self, block_env: Any, block_output: Any) -> None: if self.fork.has_compute_requests_hash: self.fork.process_general_purpose_requests(block_env, block_output) - if self.fork.is_after_fork("amsterdam"): + if self.fork.has_block_access_list_hash: # Build block access list from block_env.state_changes block_output.block_access_list = self.fork.build_block_access_list( block_env.state_changes diff --git a/src/ethereum_spec_tools/evm_tools/t8n/env.py b/src/ethereum_spec_tools/evm_tools/t8n/env.py index 8cadd58c9f..be719ba7af 100644 --- a/src/ethereum_spec_tools/evm_tools/t8n/env.py +++ b/src/ethereum_spec_tools/evm_tools/t8n/env.py @@ -145,7 +145,7 @@ def read_excess_blob_gas(self, data: Any, t8n: "T8N") -> None: if t8n.fork.has_compute_requests_hash: arguments["requests_hash"] = Hash32(b"\0" * 32) - if t8n.fork.is_after_fork("amsterdam"): + if t8n.fork.has_block_access_list_hash: arguments["block_access_list_hash"] = Hash32(b"\0" * 32) parent_header = t8n.fork.Header(**arguments) diff --git a/tests/cancun/create/__init__.py b/tests/cancun/create/__init__.py new file mode 100644 index 0000000000..5297fcc089 --- /dev/null +++ b/tests/cancun/create/__init__.py @@ -0,0 +1 @@ +"""Create tests starting at Cancun.""" From 2eb8a61a6d6621a05e5bdf1f0e6b1e415da5ab32 Mon Sep 17 00:00:00 2001 From: Stefan <22667037+qu0b@users.noreply.github.com> Date: Mon, 8 Dec 2025 22:14:15 +0100 Subject: [PATCH 049/154] feat(test): add more bal test cases (#1812) * rebase onto upstream * chore(fix) balance check in call before target access * merge test cases with usptream * chore(fix) format with ruff * chore(fix) revert call changes and add target to bal * merge test cases with usptream * improve wording * chore(formate) fix formatting and line length * refactor(test-tests): Use pre API where possible; explicit check for none in BAL * refactor(test-tests): Refactor opcode tests to bal opcodes test file - Qu0b/fix pre alloc group enginex (#1911) --------- Co-authored-by: fselmo --- .../test_block_access_lists.py | 342 ++++++++++ .../test_block_access_lists_eip4895.py | 4 + .../test_block_access_lists_eip7702.py | 243 +++++++ .../test_block_access_lists_opcodes.py | 618 ++++++++++++++++++ .../test_cases.md | 17 +- 5 files changed, 1220 insertions(+), 4 deletions(-) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py index 50fb9e0ad2..832a8d8e46 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py @@ -8,6 +8,7 @@ Account, Address, Alloc, + AuthorizationTuple, BalAccountExpectation, BalBalanceChange, BalCodeChange, @@ -23,6 +24,7 @@ Header, Op, Transaction, + add_kzg_version, compute_create_address, ) @@ -1365,6 +1367,10 @@ def test_bal_coinbase_zero_tip( ) +@pytest.mark.pre_alloc_group( + "precompile_funded", + reason="Expects clean precompile balances, isolate in EngineX", +) @pytest.mark.parametrize( "value", [ @@ -2051,6 +2057,10 @@ def test_bal_cross_tx_storage_revert_to_zero( ) +@pytest.mark.pre_alloc_group( + "ripemd160_state_leak", + reason="Pre-funds RIPEMD-160, must be isolated in EngineX format", +) def test_bal_cross_block_ripemd160_state_leak( pre: Alloc, blockchain_test: BlockchainTestFiller, @@ -2145,3 +2155,335 @@ def test_bal_cross_block_ripemd160_state_leak( ripemd160_addr: Account(balance=1), }, ) + + +def test_bal_all_transaction_types( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Test BAL with all 5 tx types in single block. + + Types: Legacy, EIP-2930, EIP-1559, Blob, EIP-7702. + Each tx writes to contract storage. Access list addresses are pre-warmed + but NOT in BAL. + + Expected BAL: + - All 5 senders: nonce_changes + - Contracts 0-3: storage_changes + - Alice (7702): nonce_changes, code_changes (delegation), storage_changes + - Oracle: empty (delegation target, accessed) + """ + from tests.prague.eip7702_set_code_tx.spec import Spec as Spec7702 + + # Create senders for each transaction type + sender_0 = pre.fund_eoa() # Type 0 - Legacy + sender_1 = pre.fund_eoa() # Type 1 - Access List + sender_2 = pre.fund_eoa() # Type 2 - EIP-1559 + sender_3 = pre.fund_eoa() # Type 3 - Blob + sender_4 = pre.fund_eoa() # Type 4 - EIP-7702 + + # Create contracts for each tx type (except 7702 which uses delegation) + contract_code = Op.SSTORE(0x01, Op.CALLDATALOAD(0)) + Op.STOP + contract_0 = pre.deploy_contract(code=contract_code) + contract_1 = pre.deploy_contract(code=contract_code) + contract_2 = pre.deploy_contract(code=contract_code) + contract_3 = pre.deploy_contract(code=contract_code) + + # For Type 4 (EIP-7702): Alice delegates to Oracle + alice = pre.fund_eoa() + oracle = pre.deploy_contract(code=Op.SSTORE(0x01, 0x05) + Op.STOP) + + # Dummy address to warm in access list + warmed_address = pre.fund_eoa(amount=1) + + # TX1: Type 0 - Legacy transaction + tx_type_0 = Transaction( + ty=0, + sender=sender_0, + to=contract_0, + gas_limit=100_000, + gas_price=10, + data=Hash(0x01), # Value to store + ) + + # TX2: Type 1 - Access List transaction (EIP-2930) + tx_type_1 = Transaction( + ty=1, + sender=sender_1, + to=contract_1, + gas_limit=100_000, + gas_price=10, + data=Hash(0x02), + access_list=[ + AccessList( + address=warmed_address, + storage_keys=[], + ) + ], + ) + + # TX3: Type 2 - EIP-1559 Dynamic fee transaction + tx_type_2 = Transaction( + ty=2, + sender=sender_2, + to=contract_2, + gas_limit=100_000, + max_fee_per_gas=50, + max_priority_fee_per_gas=5, + data=Hash(0x03), + ) + + # TX4: Type 3 - Blob transaction (EIP-4844) + # Blob versioned hashes need KZG version prefix (0x01) + blob_hashes = add_kzg_version([Hash(0xBEEF)], 1) + tx_type_3 = Transaction( + ty=3, + sender=sender_3, + to=contract_3, + gas_limit=100_000, + max_fee_per_gas=50, + max_priority_fee_per_gas=5, + max_fee_per_blob_gas=10, + blob_versioned_hashes=blob_hashes, + data=Hash(0x04), + ) + + # TX5: Type 4 - EIP-7702 Set Code transaction + tx_type_4 = Transaction( + ty=4, + sender=sender_4, + to=alice, + gas_limit=100_000, + max_fee_per_gas=50, + max_priority_fee_per_gas=5, + authorization_list=[ + AuthorizationTuple( + address=oracle, + nonce=0, + signer=alice, + ) + ], + ) + + block = Block( + txs=[tx_type_0, tx_type_1, tx_type_2, tx_type_3, tx_type_4], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + # Type 0 sender + sender_0: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + ), + # Type 1 sender + sender_1: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=2, post_nonce=1)], + ), + # Type 2 sender + sender_2: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=3, post_nonce=1)], + ), + # Type 3 sender + sender_3: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=4, post_nonce=1)], + ), + # Type 4 sender + sender_4: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=5, post_nonce=1)], + ), + # Contract touched by Type 0 + contract_0: BalAccountExpectation( + storage_changes=[ + BalStorageSlot( + slot=0x01, + slot_changes=[ + BalStorageChange(tx_index=1, post_value=0x01) + ], + ) + ], + ), + # Contract touched by Type 1 + contract_1: BalAccountExpectation( + storage_changes=[ + BalStorageSlot( + slot=0x01, + slot_changes=[ + BalStorageChange(tx_index=2, post_value=0x02) + ], + ) + ], + ), + # Note: warmed_address from access_list is NOT in BAL + # because access lists pre-warm but don't record in BAL + # Contract touched by Type 2 + warmed_address: None, # explicit check + contract_2: BalAccountExpectation( + storage_changes=[ + BalStorageSlot( + slot=0x01, + slot_changes=[ + BalStorageChange(tx_index=3, post_value=0x03) + ], + ) + ], + ), + # Contract touched by Type 3 + contract_3: BalAccountExpectation( + storage_changes=[ + BalStorageSlot( + slot=0x01, + slot_changes=[ + BalStorageChange(tx_index=4, post_value=0x04) + ], + ) + ], + ), + # Alice (Type 4 delegation target, executes oracle code) + alice: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=5, post_nonce=1)], + code_changes=[ + BalCodeChange( + tx_index=5, + new_code=Spec7702.delegation_designation(oracle), + ) + ], + storage_changes=[ + BalStorageSlot( + slot=0x01, + slot_changes=[ + BalStorageChange(tx_index=5, post_value=0x05) + ], + ) + ], + ), + # Oracle (accessed via delegation) + oracle: BalAccountExpectation.empty(), + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + sender_0: Account(nonce=1), + sender_1: Account(nonce=1), + sender_2: Account(nonce=1), + sender_3: Account(nonce=1), + sender_4: Account(nonce=1), + contract_0: Account(storage={0x01: 0x01}), + contract_1: Account(storage={0x01: 0x02}), + contract_2: Account(storage={0x01: 0x03}), + contract_3: Account(storage={0x01: 0x04}), + alice: Account( + nonce=1, + code=Spec7702.delegation_designation(oracle), + storage={0x01: 0x05}, + ), + }, + ) + + +def test_bal_lexicographic_address_ordering( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Test BAL enforces strict lexicographic byte-wise address ordering. + + Addresses: addr_low (0x...01), addr_mid (0x...0100), addr_high (0x01...00). + Endian-trap: addr_endian_low (0x01...02), addr_endian_high (0x02...01). + Contract touches them in reverse order to verify sorting. + + Expected BAL order: low < mid < high < endian_low < endian_high. + Catches endianness bugs in address comparison. + """ + alice = pre.fund_eoa() + + # Create addresses with specific byte patterns for lexicographic testing + # In lexicographic (byte-wise) order: low < mid < high + # addr_low: 0x00...01 (rightmost byte = 0x01) + # addr_mid: 0x00...0100 (second-rightmost byte = 0x01) + # addr_high: 0x01...00 (leftmost byte = 0x01) + addr_low = Address("0x0000000000000000000000000000000000000001") + addr_mid = Address("0x0000000000000000000000000000000000000100") + addr_high = Address("0x0100000000000000000000000000000000000000") + + # Endian-trap addresses: byte-reversals to catch byte-order bugs + # addr_endian_low: 0x01...02 (0x01 at byte 0, 0x02 at byte 19) + # addr_endian_high: 0x02...01 (0x02 at byte 0, 0x01 at byte 19) + # Note: reverse(addr_endian_low) = addr_endian_high + # Correct order: endian_low < endian_high (0x01 < 0x02 at byte 0) + # Reversed bytes would incorrectly get opposite order + addr_endian_low = Address("0x0100000000000000000000000000000000000002") + addr_endian_high = Address("0x0200000000000000000000000000000000000001") + + # Give each address a balance so they exist + addr_balance = 100 + pre[addr_low] = Account(balance=addr_balance) + pre[addr_mid] = Account(balance=addr_balance) + pre[addr_high] = Account(balance=addr_balance) + pre[addr_endian_low] = Account(balance=addr_balance) + pre[addr_endian_high] = Account(balance=addr_balance) + + # Contract that accesses addresses in REVERSE lexicographic order + # to verify sorting is applied correctly + contract_code = ( + Op.BALANCE(addr_high) # Access high first + + Op.POP + + Op.BALANCE(addr_low) # Access low second + + Op.POP + + Op.BALANCE(addr_mid) # Access mid third + + Op.POP + # Access endian-trap addresses in reverse order + + Op.BALANCE(addr_endian_high) # Access endian_high before endian_low + + Op.POP + + Op.BALANCE(addr_endian_low) + + Op.POP + + Op.STOP + ) + + contract = pre.deploy_contract(code=contract_code) + + tx = Transaction( + sender=alice, + to=contract, + gas_limit=1_000_000, + ) + + # BAL must be sorted lexicographically by address bytes + # Order: low < mid < high < endian_low < endian_high + # (sorted by raw address bytes, regardless of access order) + block = Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + ), + contract: BalAccountExpectation.empty(), + # These addresses appear in BAL due to BALANCE access + # The expectation framework verifies correct order + addr_low: BalAccountExpectation.empty(), + addr_mid: BalAccountExpectation.empty(), + addr_high: BalAccountExpectation.empty(), + # Endian-trap addresses: must be sorted correctly despite being + # byte-reversals of each other + addr_endian_low: BalAccountExpectation.empty(), + addr_endian_high: BalAccountExpectation.empty(), + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + alice: Account(nonce=1), + contract: Account(), + addr_low: Account(balance=addr_balance), + addr_mid: Account(balance=addr_balance), + addr_high: Account(balance=addr_balance), + addr_endian_low: Account(balance=addr_balance), + addr_endian_high: Account(balance=addr_balance), + }, + ) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip4895.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip4895.py index edb8295c17..16ce09ff90 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip4895.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip4895.py @@ -585,6 +585,10 @@ def test_bal_zero_withdrawal( ) +@pytest.mark.pre_alloc_group( + "withdrawal_to_precompiles", + reason="Expects clean precompile balances, isolate in EngineX", +) @pytest.mark.parametrize_by_fork( "precompile", lambda fork: [ diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip7702.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip7702.py index 8182c1992c..2c4890611b 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip7702.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip7702.py @@ -16,6 +16,7 @@ BlockchainTestFiller, Op, Transaction, + Withdrawal, ) from ...prague.eip7702_set_code_tx.spec import Spec as Spec7702 @@ -828,3 +829,245 @@ def test_bal_7702_double_auth_swap( relayer: Account(nonce=1), }, ) + + +def test_bal_selfdestruct_to_7702_delegation( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Test BAL with SELFDESTRUCT to 7702 delegated account. + + Tx1: Alice delegates to Oracle. + Tx2: Victim (balance=100) selfdestructs to Alice. + SELFDESTRUCT transfers balance without executing recipient code. + + Expected BAL: + - Alice tx1: code_changes (delegation), nonce_changes + - Alice tx2: balance_changes (+100) + - Victim tx2: balance_changes (100→0) + - Oracle: MUST NOT appear (SELFDESTRUCT doesn't execute recipient code) + """ + # Alice (EOA) will receive delegation then receive selfdestruct balance + # Use explicit initial balance for clarity + alice_initial_balance = 10**18 # 1 ETH default + alice = pre.fund_eoa(amount=alice_initial_balance) + bob = pre.fund_eoa(amount=0) # Just to be the recipient of tx + + # Oracle contract that Alice will delegate to + oracle = pre.deploy_contract(code=Op.SSTORE(0x01, 0x42) + Op.STOP) + + victim_balance = 100 + + # Victim contract that selfdestructs to Alice + victim = pre.deploy_contract( + code=Op.SELFDESTRUCT(alice), + balance=victim_balance, + ) + + # Relayer for tx1 (delegation) + relayer = pre.fund_eoa() + + # Tx1: Alice authorizes delegation to Oracle + tx1 = Transaction( + sender=relayer, + to=bob, + value=10, + gas_limit=1_000_000, + gas_price=0xA, + authorization_list=[ + AuthorizationTuple( + address=oracle, + nonce=0, + signer=alice, + ) + ], + ) + + # Caller contract that triggers selfdestruct on victim + caller = pre.deploy_contract(code=Op.CALL(100_000, victim, 0, 0, 0, 0, 0)) + + # Tx2: Trigger selfdestruct on victim (victim sends balance to Alice) + tx2 = Transaction( + nonce=1, + sender=relayer, + to=caller, + gas_limit=1_000_000, + gas_price=0xA, + ) + + alice_final_balance = alice_initial_balance + victim_balance + + account_expectations = { + alice: BalAccountExpectation( + # tx1: nonce change for auth, code change for delegation + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + code_changes=[ + BalCodeChange( + tx_index=1, + new_code=Spec7702.delegation_designation(oracle), + ) + ], + # tx2: balance change from selfdestruct + balance_changes=[ + BalBalanceChange(tx_index=2, post_balance=alice_final_balance) + ], + ), + bob: BalAccountExpectation( + balance_changes=[BalBalanceChange(tx_index=1, post_balance=10)] + ), + relayer: BalAccountExpectation( + nonce_changes=[ + BalNonceChange(tx_index=1, post_nonce=1), + BalNonceChange(tx_index=2, post_nonce=2), + ], + ), + caller: BalAccountExpectation.empty(), + # Victim (selfdestructing contract): balance changes to 0 + # Explicitly verify ALL fields to avoid false positives + victim: BalAccountExpectation( + nonce_changes=[], # Contract nonce unchanged + balance_changes=[BalBalanceChange(tx_index=2, post_balance=0)], + code_changes=[], # Code unchanged (post-Cancun SELFDESTRUCT) + storage_changes=[], # No storage changes + storage_reads=[], # No storage reads + ), + # Oracle MUST NOT appear in tx2 - SELFDESTRUCT doesn't execute + # recipient code, so delegation target is never accessed + oracle: None, + } + + block = Block( + txs=[tx1, tx2], + expected_block_access_list=BlockAccessListExpectation( + account_expectations=account_expectations + ), + ) + + post = { + alice: Account( + nonce=1, + code=Spec7702.delegation_designation(oracle), + balance=alice_final_balance, + ), + bob: Account(balance=10), + relayer: Account(nonce=2), + # Victim still exists but with 0 balance (post-Cancun SELFDESTRUCT) + victim: Account(balance=0), + } + + blockchain_test( + pre=pre, + blocks=[block], + post=post, + ) + + +GWEI = 10**9 + + +def test_bal_withdrawal_to_7702_delegation( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Test BAL with withdrawal to 7702 delegated account. + + Tx1: Alice delegates to Oracle. Withdrawal: 10 gwei to Alice. + Withdrawals credit balance without executing code. + + Expected BAL: + - Alice tx1: code_changes (delegation), nonce_changes + - Alice tx2: balance_changes (+10 gwei) + - Oracle: MUST NOT appear (withdrawals don't execute recipient code) + """ + # Alice (EOA) will receive delegation then receive withdrawal + alice_initial_balance = 10**18 # 1 ETH default + alice = pre.fund_eoa(amount=alice_initial_balance) + bob = pre.fund_eoa(amount=0) # Recipient of tx value + + # Oracle contract that Alice will delegate to + # If delegation were followed, this would write to storage + oracle = pre.deploy_contract(code=Op.SSTORE(0x01, 0x42) + Op.STOP) + + # Relayer for the delegation tx + relayer = pre.fund_eoa() + + withdrawal_amount_gwei = 10 + + # Tx1: Alice authorizes delegation to Oracle + tx1 = Transaction( + sender=relayer, + to=bob, + value=10, + gas_limit=1_000_000, + gas_price=0xA, + authorization_list=[ + AuthorizationTuple( + address=oracle, + nonce=0, + signer=alice, + ) + ], + ) + + alice_final_balance = alice_initial_balance + ( + withdrawal_amount_gwei * GWEI + ) + + account_expectations = { + alice: BalAccountExpectation( + # tx1: nonce change for auth, code change for delegation + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + code_changes=[ + BalCodeChange( + tx_index=1, + new_code=Spec7702.delegation_designation(oracle), + ) + ], + # tx2 (withdrawal): balance change + balance_changes=[ + BalBalanceChange(tx_index=2, post_balance=alice_final_balance) + ], + ), + bob: BalAccountExpectation( + balance_changes=[BalBalanceChange(tx_index=1, post_balance=10)] + ), + relayer: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + ), + # Oracle MUST NOT appear - withdrawals don't execute recipient code, + # so delegation target is never accessed + oracle: None, + } + + block = Block( + txs=[tx1], + withdrawals=[ + Withdrawal( + index=0, + validator_index=0, + address=alice, + amount=withdrawal_amount_gwei, + ) + ], + expected_block_access_list=BlockAccessListExpectation( + account_expectations=account_expectations + ), + ) + + post = { + alice: Account( + nonce=1, + code=Spec7702.delegation_designation(oracle), + balance=alice_final_balance, + ), + bob: Account(balance=10), + relayer: Account(nonce=1), + } + + blockchain_test( + pre=pre, + blocks=[block], + post=post, + ) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py index bae1544eee..448936448e 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py @@ -1266,3 +1266,621 @@ def test_bal_create_contract_init_revert( created_address: Account.NONEXISTENT, }, ) + + +def test_bal_call_revert_insufficient_funds( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Test BAL with CALL failure due to insufficient balance (not OOG). + + Contract (balance=100): SLOAD(0x01)→CALL(target, value=1000)→SSTORE(0x02). + CALL fails because 1000 > 100. Target is 0xDEAD. + + Expected BAL: + - Contract: storage_reads [0x01], storage_changes slot 0x02 (value=0) + - Target: appears in BAL (accessed before balance check fails) + """ + alice = pre.fund_eoa() + + contract_balance = 100 + transfer_amount = 1000 # More than contract has + + # Target address that should be warmed but not receive funds + # Give it a small balance so it's not considered "empty" and pruned + target_balance = 1 + target_address = pre.fund_eoa(amount=target_balance) + + # Contract that: + # 1. SLOAD slot 0x01 + # 2. CALL target with value=1000 (will fail - insufficient funds) + # 3. SSTORE slot 0x02 with CALL result (0 = failure) + contract_code = ( + Op.SLOAD(0x01) # Read from slot 0x01, push to stack + + Op.POP # Discard value + # CALL(gas, addr, value, argsOffset, argsSize, retOffset, retSize) + + Op.CALL(100_000, target_address, transfer_amount, 0, 0, 0, 0) + # CALL result is on stack (0 = failure, 1 = success) + # Stack: [result] + + Op.PUSH1(0x02) # Push slot number + # Stack: [0x02, result] + + Op.SSTORE # SSTORE pops slot (0x02), then value (result) + + Op.STOP + ) + + contract = pre.deploy_contract( + code=contract_code, + balance=contract_balance, + storage={ + 0x02: 0xDEAD + }, # Non-zero initial value so SSTORE(0) is a change + ) + + tx = Transaction( + sender=alice, + to=contract, + gas_limit=1_000_000, + ) + + block = Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + ), + contract: BalAccountExpectation( + # Storage read for slot 0x01 + storage_reads=[0x01], + # Storage change for slot 0x02 (CALL result = 0) + storage_changes=[ + BalStorageSlot( + slot=0x02, + slot_changes=[ + BalStorageChange(tx_index=1, post_value=0) + ], + ) + ], + ), + # Target appears in BAL - accessed before balance check fails + target_address: BalAccountExpectation.empty(), + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + alice: Account(nonce=1), + contract: Account( + balance=contract_balance, # Unchanged - transfer failed + storage={0x02: 0}, # CALL returned 0 (failure) + ), + target_address: Account(balance=target_balance), # Unchanged + }, + ) + + +def test_bal_create_selfdestruct_to_self_with_call( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Test BAL with init code that CALLs Oracle, writes storage, then + SELFDESTRUCTs to self. + + Factory CREATE2(endowment=100). + Init: CALL(Oracle)→SSTORE(0x01)→SELFDESTRUCT(SELF). + + Expected BAL: + - Factory: nonce_changes, balance_changes (loses 100) + - Oracle: storage_changes slot 0x01 + - Created address: storage_reads [0x01] (aborted write→read), + MUST NOT have nonce/code/storage/balance changes (ephemeral) + """ + alice = pre.fund_eoa() + factory_balance = 1000 + + # Oracle contract that writes to slot 0x01 when called + oracle_code = Op.SSTORE(0x01, 0x42) + Op.STOP + oracle = pre.deploy_contract(code=oracle_code) + + endowment = 100 + + # Init code that: + # 1. Calls Oracle (which writes to its slot 0x01) + # 2. Writes 0x42 to own slot 0x01 + # 3. Selfdestructs to self + initcode_runtime = ( + # CALL(gas, Oracle, value=0, ...) + Op.CALL(100_000, oracle, 0, 0, 0, 0, 0) + + Op.POP + # Write to own storage slot 0x01 + + Op.SSTORE(0x01, 0x42) + # SELFDESTRUCT to self (ADDRESS returns own address) + + Op.SELFDESTRUCT(Op.ADDRESS) + ) + init_code = Initcode(deploy_code=Op.STOP, initcode_prefix=initcode_runtime) + init_code_bytes = bytes(init_code) + init_code_size = len(init_code_bytes) + + # Factory code with embedded initcode (no template contract needed) + # Structure: [execution code] [initcode bytes] + # CODECOPY copies initcode from factory's own code to memory + # + # Two-pass approach: build with placeholder, measure, rebuild + placeholder_offset = 0xFF # Placeholder (same byte size as final value) + factory_execution_template = ( + Op.CODECOPY(0, placeholder_offset, init_code_size) + + Op.SSTORE( + 0x00, + Op.CREATE2( + value=endowment, + offset=0, + size=init_code_size, + salt=0, + ), + ) + + Op.STOP + ) + # Measure execution code size + execution_code_size = len(bytes(factory_execution_template)) + + # Rebuild with actual offset value + factory_execution = ( + Op.CODECOPY(0, execution_code_size, init_code_size) + + Op.SSTORE( + 0x00, + Op.CREATE2( + value=endowment, + offset=0, + size=init_code_size, + salt=0, + ), + ) + + Op.STOP + ) + # Combine execution code with embedded initcode + factory_code = bytes(factory_execution) + init_code_bytes + + factory = pre.deploy_contract(code=factory_code, balance=factory_balance) + + # Calculate the CREATE2 target address + created_address = compute_create_address( + address=factory, + nonce=1, + salt=0, + initcode=init_code_bytes, + opcode=Op.CREATE2, + ) + + tx = Transaction( + sender=alice, + to=factory, + gas_limit=1_000_000, + ) + + block = Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + ), + factory: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=2)], + # Balance changes: loses endowment (100) + balance_changes=[ + BalBalanceChange( + tx_index=1, + post_balance=factory_balance - endowment, + ) + ], + ), + # Oracle: storage changes for slot 0x01 + oracle: BalAccountExpectation( + storage_changes=[ + BalStorageSlot( + slot=0x01, + slot_changes=[ + BalStorageChange(tx_index=1, post_value=0x42) + ], + ) + ], + ), + # Created address: ephemeral (created and destroyed same tx) + # - storage_reads for slot 0x01 (aborted write becomes read) + # - NO nonce/code/storage/balance changes + created_address: BalAccountExpectation( + storage_reads=[0x01], + storage_changes=[], + nonce_changes=[], + code_changes=[], + balance_changes=[], + ), + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + alice: Account(nonce=1), + factory: Account(nonce=2, balance=factory_balance - endowment), + oracle: Account(storage={0x01: 0x42}), + # Created address doesn't exist - destroyed in same tx + created_address: Account.NONEXISTENT, + }, + ) + + +def test_bal_create2_collision( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Test BAL with CREATE2 collision against pre-existing contract. + + Pre-existing contract has code=STOP, nonce=1. + Factory (nonce=1, slot[0]=0xDEAD) executes CREATE2 targeting it. + + Expected BAL: + - Factory: nonce_changes (1→2), storage_changes slot 0 (0xDEAD→0) + - Collision address: empty (accessed during collision check) + - Collision address MUST NOT have nonce_changes or code_changes + """ + alice = pre.fund_eoa() + + # Init code that deploys simple STOP contract + init_code = Initcode(deploy_code=Op.STOP) + init_code_bytes = bytes(init_code) + + # Factory code: CREATE2 and store result in slot 0 + factory_code = ( + # Push init code to memory + Op.MSTORE(0, Op.PUSH32(init_code_bytes)) + # SSTORE(0, CREATE2(...)) - stores CREATE2 result in slot 0 + + Op.SSTORE( + 0x00, + Op.CREATE2( + value=0, + offset=32 - len(init_code_bytes), + size=len(init_code_bytes), + salt=0, + ), + ) + + Op.STOP + ) + + # Deploy factory - it starts with nonce=1 by default + factory = pre.deploy_contract( + code=factory_code, + storage={0x00: 0xDEAD}, # Initial value to prove SSTORE works + ) + + # Calculate the CREATE2 target address + collision_address = compute_create_address( + address=factory, + nonce=1, + salt=0, + initcode=init_code_bytes, + opcode=Op.CREATE2, + ) + + # Set up the collision by pre-populating the target address + # This contract has code (STOP) and nonce=1, causing collision + pre[collision_address] = Account( + code=Op.STOP, + nonce=1, + ) + + tx = Transaction( + sender=alice, + to=factory, + gas_limit=1_000_000, + ) + + block = Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + ), + factory: BalAccountExpectation( + # Nonce incremented 1→2 even on failed CREATE2 + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=2)], + # Storage changes: slot 0 = 0xDEAD → 0 (CREATE2 returned 0) + storage_changes=[ + BalStorageSlot( + slot=0x00, + slot_changes=[ + BalStorageChange(tx_index=1, post_value=0) + ], + ) + ], + ), + # Collision address: empty (accessed but no state changes) + # Explicitly verify ALL fields are empty + collision_address: BalAccountExpectation( + nonce_changes=[], # MUST NOT have nonce changes + balance_changes=[], # MUST NOT have balance changes + code_changes=[], # MUST NOT have code changes + storage_changes=[], # MUST NOT have storage changes + storage_reads=[], # MUST NOT have storage reads + ), + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + alice: Account(nonce=1), + factory: Account(nonce=2, storage={0x00: 0}), + # Collision address unchanged - contract still exists + collision_address: Account(code=bytes(Op.STOP), nonce=1), + }, + ) + + +def test_bal_transient_storage_not_tracked( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Test BAL excludes EIP-1153 transient storage (TSTORE/TLOAD). + + Contract: TSTORE(0x01, 0x42)→TLOAD(0x01)→SSTORE(0x02, result). + + Expected BAL: + - storage_changes: slot 0x02 (persistent) + - MUST NOT include slot 0x01 (transient storage not persisted) + """ + alice = pre.fund_eoa() + + # Contract that uses transient storage then persists to regular storage + contract_code = ( + # TSTORE slot 0x01 with value 0x42 (transient storage) + Op.TSTORE(0x01, 0x42) + # TLOAD slot 0x01 (transient storage read) + + Op.TLOAD(0x01) + # Result (0x42) is on stack, store it in persistent slot 0x02 + + Op.PUSH1(0x02) + + Op.SSTORE # SSTORE pops slot (0x02), then value (0x42) + + Op.STOP + ) + + contract = pre.deploy_contract(code=contract_code) + + tx = Transaction( + sender=alice, + to=contract, + gas_limit=1_000_000, + ) + + block = Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + ), + contract: BalAccountExpectation( + # Persistent storage change for slot 0x02 + storage_changes=[ + BalStorageSlot( + slot=0x02, + slot_changes=[ + BalStorageChange(tx_index=1, post_value=0x42) + ], + ) + ], + # MUST NOT include slot 0x01 in storage_reads + # Transient storage operations don't pollute BAL + storage_reads=[], + ), + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + alice: Account(nonce=1), + contract: Account(storage={0x02: 0x42}), + }, + ) + + +@pytest.mark.pre_alloc_group( + "selfdestruct_to_precompile", + reason="Modifies precompile balance, must be isolated in EngineX format", +) +def test_bal_selfdestruct_to_precompile( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Test BAL with SELFDESTRUCT to precompile (ecrecover 0x01). + + Victim (balance=100) selfdestructs to precompile 0x01. + + Expected BAL: + - Victim: balance_changes (100→0) + - Precompile 0x01: balance_changes (0→100), no code/nonce changes + """ + alice = pre.fund_eoa() + + contract_balance = 100 + ecrecover_precompile = Address(1) # 0x0000...0001 + + # Contract that selfdestructs to ecrecover precompile + victim_code = Op.SELFDESTRUCT(ecrecover_precompile) + + victim = pre.deploy_contract(code=victim_code, balance=contract_balance) + + # Caller that triggers the selfdestruct + caller_code = Op.CALL(100_000, victim, 0, 0, 0, 0, 0) + Op.STOP + caller = pre.deploy_contract(code=caller_code) + + tx = Transaction( + sender=alice, + to=caller, + gas_limit=1_000_000, + ) + + block = Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + ), + caller: BalAccountExpectation.empty(), + # Victim (selfdestructing contract): balance changes 100→0 + # Explicitly verify ALL fields to avoid false positives + victim: BalAccountExpectation( + nonce_changes=[], # Contract nonce unchanged + balance_changes=[ + BalBalanceChange(tx_index=1, post_balance=0) + ], + code_changes=[], # Code unchanged (post-Cancun) + storage_changes=[], # No storage changes + storage_reads=[], # No storage reads + ), + # Precompile receives selfdestruct balance + # Explicitly verify ALL fields to avoid false positives + ecrecover_precompile: BalAccountExpectation( + nonce_changes=[], # MUST NOT have nonce changes + balance_changes=[ + BalBalanceChange( + tx_index=1, post_balance=contract_balance + ) + ], + code_changes=[], # MUST NOT have code changes + storage_changes=[], # MUST NOT have storage changes + storage_reads=[], # MUST NOT have storage reads + ), + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + alice: Account(nonce=1), + caller: Account(), + # Victim still exists with 0 balance (post-Cancun SELFDESTRUCT) + victim: Account(balance=0), + # Precompile has received the balance + ecrecover_precompile: Account(balance=contract_balance), + }, + ) + + +def test_bal_create_early_failure( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Test BAL with CREATE failure due to insufficient endowment. + + Factory (balance=50) attempts CREATE(value=100). + Fails before nonce increment (before track_address). + Distinct from collision where address IS accessed. + + Expected BAL: + - Alice: nonce_changes + - Factory: storage_changes slot 0 (0xDEAD→0), NO nonce_changes + - Contract address: MUST NOT appear (never accessed) + """ + alice = pre.fund_eoa() + + factory_balance = 50 + endowment = 100 # More than factory has + + # Simple init code that deploys STOP + init_code = Initcode(deploy_code=Op.STOP) + init_code_bytes = bytes(init_code) + + # Factory code: CREATE(value=endowment) and store result in slot 0 + factory_code = ( + # Push init code to memory + Op.MSTORE(0, Op.PUSH32(init_code_bytes)) + # SSTORE(0, CREATE(value, offset, size)) + + Op.SSTORE( + 0x00, + Op.CREATE( + value=endowment, # 100 > 50, will fail + offset=32 - len(init_code_bytes), + size=len(init_code_bytes), + ), + ) + + Op.STOP + ) + + # Deploy factory with insufficient balance for the CREATE endowment + factory = pre.deploy_contract( + code=factory_code, + balance=factory_balance, + storage={0x00: 0xDEAD}, # Initial value to prove SSTORE works + ) + + # Calculate what the contract address WOULD be (but it won't be created) + would_be_contract_address = compute_create_address( + address=factory, nonce=1 + ) + + tx = Transaction( + sender=alice, + to=factory, + gas_limit=1_000_000, + ) + + block = Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + ), + factory: BalAccountExpectation( + # NO nonce_changes - CREATE failed before increment_nonce + nonce_changes=[], + # Storage changes: slot 0 = 0xDEAD → 0 (CREATE returned 0) + storage_changes=[ + BalStorageSlot( + slot=0x00, + slot_changes=[ + BalStorageChange(tx_index=1, post_value=0) + ], + ) + ], + ), + # Contract address MUST NOT appear in BAL - never accessed + # (CREATE failed before track_address was called) + would_be_contract_address: None, + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + alice: Account(nonce=1), + # Factory nonce unchanged (still 1), balance unchanged + factory: Account( + nonce=1, balance=factory_balance, storage={0x00: 0} + ), + # Contract was never created + would_be_contract_address: Account.NONEXISTENT, + }, + ) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index 9ddca8a362..07d184d957 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -54,10 +54,10 @@ | `test_bal_oog_7702_delegated_warm_cold` | Ensure BAL handles OOG during EIP-7702 delegated account loading when first account is warm, second is cold | Alice calls warm delegated account Bob (7702) which delegates to cold `TargetContract` with insufficient gas for second cold load | BAL **MUST** include Bob in `account_changes` (warm load succeeds) but **MUST NOT** include `TargetContract` (cold load fails due to OOG) | 🟡 Planned | | `test_bal_multiple_balance_changes_same_account` | Ensure BAL tracks multiple balance changes to same account across transactions | Alice funds Bob (starts at 0) in tx0 with exact amount needed. Bob spends everything in tx1 to Charlie. Bob's balance: 0 → funding_amount → 0 | BAL **MUST** include Bob with two `balance_changes`: one at txIndex=1 (receives funds) and one at txIndex=2 (balance returns to 0). This tests balance tracking across two transactions. | ✅ Completed | | `test_bal_multiple_storage_writes_same_slot` | Ensure BAL tracks multiple writes to same storage slot across transactions | Alice calls contract 3 times in same block. Contract increments slot 1 on each call: 0 → 1 → 2 → 3 | BAL **MUST** include contract with slot 1 having three `slot_changes`: txIndex=1 (value 1), txIndex=2 (value 2), txIndex=3 (value 3). Each transaction's write must be recorded separately. | ✅ Completed | -| `test_bal_create_transaction_empty_code` | Ensure BAL does not record spurious code changes for CREATE transaction deploying empty code | Alice sends CREATE transaction with empty initcode (deploys code `b""`). Contract address gets nonce = 1 and code = `b""`. | BAL **MUST** include Alice with `nonce_changes` and created contract with `nonce_changes` but **MUST NOT** include `code_changes` for contract (setting `b"" -> b""` is net-zero). | ✅ Completed | | `test_bal_cross_tx_storage_revert_to_zero` | Ensure BAL captures storage changes when tx2 reverts slot back to original value (blobhash regression test) | Alice sends tx1 writing slot 0=0xABCD (from 0x0), then tx2 writing slot 0=0x0 (back to original) | BAL **MUST** include contract with slot 0 having two `slot_changes`: txIndex=1 (0xABCD) and txIndex=2 (0x0). Cross-transaction net-zero **MUST NOT** be filtered. | ✅ Completed | | `test_bal_create_contract_init_revert` | Ensure BAL correctly handles CREATE when parent call reverts | Caller calls factory, factory executes CREATE (succeeds), then factory REVERTs rolling back the CREATE | BAL **MUST** include Alice with `nonce_changes`. Caller and factory with no changes (reverted). Created contract address appears in BAL but **MUST NOT** have `nonce_changes` or `code_changes` (CREATE was rolled back). Contract address **MUST NOT** exist in post-state. | ✅ Completed | | `test_bal_create_oog_code_deposit` | Ensure BAL correctly handles CREATE OOG during code deposit | Alice calls factory contract that executes CREATE with init code returning 10,000 bytes. Transaction has insufficient gas for code deposit. Factory nonce increments, CREATE returns 0 and stores in slot 1. | BAL **MUST** include Alice with `nonce_changes`. Factory with `nonce_changes` (incremented by CREATE) and `storage_changes` (slot 1 = 0). Contract address with empty changes (read during collision check). **MUST NOT** include nonce or code changes for contract address (rolled back on OOG). Contract address **MUST NOT** exist in post-state. | ✅ Completed | +| `test_bal_create_early_failure` | Ensure BAL correctly handles CREATE that fails before accessing contract address | Factory (balance=50) attempts CREATE(value=100). CREATE fails due to insufficient endowment (100 > 50). Factory stores CREATE result (0) in slot 0. | BAL **MUST** include Alice with `nonce_changes`. Factory with `storage_changes` (slot 0 = 0) but **MUST NOT** have `nonce_changes` (CREATE failed before nonce increment). Contract address **MUST NOT** appear in BAL (never accessed - CREATE failed before `track_address`). This is distinct from collision/OOG failures where contract address IS in BAL. | ✅ Completed | | `test_bal_invalid_missing_nonce` | Verify clients reject blocks with BAL missing required nonce changes | Alice sends transaction to Bob; BAL modifier removes Alice's nonce change entry | Block **MUST** be rejected with `INVALID_BLOCK_ACCESS_LIST` exception. Clients **MUST** validate that all sender accounts have nonce changes recorded. | ✅ Completed | | `test_bal_invalid_nonce_value` | Verify clients reject blocks with incorrect nonce values in BAL | Alice sends transaction to Bob; BAL modifier changes Alice's nonce to incorrect value | Block **MUST** be rejected with `INVALID_BLOCK_ACCESS_LIST` exception. Clients **MUST** validate nonce values match actual state transitions. | ✅ Completed | | `test_bal_invalid_storage_value` | Verify clients reject blocks with incorrect storage values in BAL | Alice calls contract that writes to storage; BAL modifier changes storage value to incorrect value | Block **MUST** be rejected with `INVALID_BLOCK_ACCESS_LIST` exception. Clients **MUST** validate storage change values match actual state transitions. | ✅ Completed | @@ -89,10 +89,19 @@ | `test_bal_nonexistent_account_access_value_transfer` | Ensure BAL captures non-existent account accessed via CALL/CALLCODE with value transfers | Alice calls `Oracle` contract which uses `CALL` or `CALLCODE` on non-existent account Bob. Tests both zero and positive value transfers. | BAL **MUST** include Alice with `nonce_changes`. For CALL with positive value: `Oracle` with `balance_changes` (loses value), Bob with `balance_changes` (receives value). For CALLCODE with value or zero value transfers: `Oracle` and Bob with empty changes (CALLCODE self-transfer = net zero). | ✅ Completed | | `test_bal_storage_write_read_same_frame` | Ensure BAL captures write precedence over read in same call frame (writes shadow reads) | Alice calls `Oracle` which writes (`SSTORE`) value `0x42` to slot `0x01`, then reads (`SLOAD`) from slot `0x01` in the same call frame | BAL **MUST** include `Oracle` with slot `0x01` in `storage_changes` showing final value `0x42`. Slot `0x01` **MUST NOT** appear in `storage_reads` (write shadows the subsequent read in same frame). | ✅ Completed | | `test_bal_storage_write_read_cross_frame` | Ensure BAL captures write precedence over read across call frames (writes shadow reads cross-frame) | Alice calls `Oracle`. First call reads slot `0x01` (sees initial value), writes `0x42` to slot `0x01`, then calls itself (via `CALL`, `DELEGATECALL`, or `CALLCODE`). Second call reads slot `0x01` (sees `0x42`) and exits. | BAL **MUST** include `Oracle` with slot `0x01` in `storage_changes` showing final value `0x42`. Slot `0x01` **MUST NOT** appear in `storage_reads` (write shadows both the read before it in same frame and the read in the recursive call). | ✅ Completed | -| `test_bal_create_transaction_empty_code` | Ensure BAL does not record spurious code changes for CREATE transaction deploying empty code | Alice sends CREATE transaction with empty initcode (deploys code `b""`). Contract address gets nonce = 1 and code = `b""`. | BAL **MUST** include Alice with `nonce_changes` and created contract with `nonce_changes` but **MUST NOT** include `code_changes` for contract. | ✅ Completed | +| `test_bal_create_transaction_empty_code` | Ensure BAL does not record spurious code changes for CREATE transaction deploying empty code | Alice sends CREATE transaction with empty initcode (deploys code `b""`). Contract address gets nonce = 1 and code = `b""`. | BAL **MUST** include Alice with `nonce_changes` and created contract with `nonce_changes` but **MUST NOT** include `code_changes` for contract (setting `b"" -> b""` is net-zero). | ✅ Completed | +| `test_bal_cross_block_precompile_state_leak` | Ensure internal EVM state for precompile handling does not leak between blocks | Block 1: Alice calls RIPEMD-160 (0x03) with zero value (RIPEMD-160 must be pre-funded). Block 2: Bob's transaction triggers an exception (stack underflow). | BAL for Block 1 **MUST** include RIPEMD-160. BAL for Block 2 **MUST NOT** include RIPEMD-160 (never accessed in Block 2). Internal state from Parity Touch Bug (EIP-161) handling must be reset between blocks. | ✅ Completed | +| `test_bal_all_transaction_types` | Ensure BAL correctly captures state changes from all transaction types in a single block | Single block with 5 transactions: Type 0 (Legacy), Type 1 (EIP-2930 Access List), Type 2 (EIP-1559), Type 3 (EIP-4844 Blob), Type 4 (EIP-7702 Set Code). Each tx writes to contract storage. Note: Access list addresses are pre-warmed but NOT recorded in BAL (no state access). | BAL **MUST** include: (1) All 5 senders with `nonce_changes`. (2) Contracts 0-3 with `storage_changes`. (3) Alice (7702 target) with `nonce_changes`, `code_changes` (delegation), `storage_changes`. (4) Oracle (delegation source) with empty changes. | ✅ Completed | +| `test_bal_create2_collision` | Ensure BAL handles CREATE2 address collision correctly | Factory contract (nonce=1, storage slot 0=0xDEAD) executes `CREATE2(salt=0, initcode)` targeting address that already has `code=STOP, nonce=1`. Pre-deploy contract at calculated CREATE2 target address before factory deployment. | BAL **MUST** include: (1) Factory with `nonce_changes` (1→2, incremented even on failed CREATE2), `storage_changes` for slot 0 (0xDEAD→0, stores failure). (2) Collision address with empty changes (accessed during collision check, no state changes). CREATE2 returns 0. Collision address **MUST NOT** have `nonce_changes` or `code_changes`. | ✅ Completed | +| `test_bal_create_selfdestruct_to_self_with_call` | Ensure BAL handles init code that calls external contract then selfdestructs to itself | Factory executes `CREATE2` with endowment=100. Init code (embedded in factory via CODECOPY): (1) `CALL(Oracle, 0)` - Oracle writes to its storage slot 0x01. (2) `SSTORE(0x01, 0x42)` - write to own storage. (3) `SELFDESTRUCT(SELF)` - selfdestruct to own address. Contract created and destroyed in same tx. | BAL **MUST** include: (1) Factory with `nonce_changes`, `balance_changes` (loses 100). (2) Oracle with `storage_changes` for slot 0x01 (external call succeeded). (3) Created address with `storage_reads` for slot 0x01 (aborted write becomes read) - **MUST NOT** have `nonce_changes`, `code_changes`, `storage_changes`, or `balance_changes` (ephemeral contract, balance burned via SELFDESTRUCT to self). | ✅ Completed | +| `test_bal_selfdestruct_to_7702_delegation` | Ensure BAL correctly handles SELFDESTRUCT to a 7702 delegated account (no code execution on recipient) | Tx1: Alice authorizes delegation to Oracle (sets code to `0xef0100\|\|Oracle`). Tx2: Victim contract (balance=100) executes `SELFDESTRUCT(Alice)`. Two separate transactions in same block. Note: Alice starts with initial balance which accumulates with selfdestruct. | BAL **MUST** include: (1) Alice at tx_index=1 with `code_changes` (delegation), `nonce_changes`. (2) Alice at tx_index=2 with `balance_changes` (receives selfdestruct). (3) Victim at tx_index=2 with `balance_changes` (100→0). **Oracle MUST NOT appear in tx2** - per EVM spec, SELFDESTRUCT transfers balance without executing recipient code, so delegation target is never accessed. | ✅ Completed | +| `test_bal_call_revert_insufficient_funds` | Ensure BAL handles CALL failure due to insufficient balance (not OOG) | Contract (balance=100, storage slot 0x02=0xDEAD) executes: `SLOAD(0x01), CALL(target, value=1000), SSTORE(0x02, result)`. CALL fails because 1000 > 100. Target address 0xDEAD (pre-existing with non-zero balance to avoid pruning). Note: slot 0x02 must start non-zero so SSTORE(0) is a change. | BAL **MUST** include: (1) Contract with `storage_reads` for slot 0x01, `storage_changes` for slot 0x02 (value=0, CALL returned failure). (2) Target (0xDEAD) **MUST** appear in BAL with empty changes - target is accessed before balance check fails. | ✅ Completed | +| `test_bal_lexicographic_address_ordering` | Ensure BAL enforces strict lexicographic byte-wise ordering | Pre-fund three addresses with specific byte patterns: `addr_low = 0x0000...0001`, `addr_mid = 0x0000...0100`, `addr_high = 0x0100...0000`. Contract touches them in reverse order: `BALANCE(addr_high), BALANCE(addr_low), BALANCE(addr_mid)`. Additionally, include two endian-trap addresses that are byte-reversals of each other: `addr_endian_low = 0x0100000000000000000000000000000000000002`, `addr_endian_high = 0x0200000000000000000000000000000000000001`. Note: `reverse(addr_endian_low) = addr_endian_high`. Correct lexicographic order: `addr_endian_low < addr_endian_high` (0x01 < 0x02 at byte 0). If implementation incorrectly reverses bytes before comparing, it would get `addr_endian_low > addr_endian_high` (wrong). | BAL account list **MUST** be sorted lexicographically by address bytes: `addr_low` < `addr_mid` < `addr_high` < `addr_endian_low` < `addr_endian_high`, regardless of access order. The endian-trap addresses specifically catch byte-reversal bugs where addresses are compared with wrong byte order. Complements `test_bal_invalid_account_order` which tests rejection; this tests correct generation. | ✅ Completed | +| `test_bal_transient_storage_not_tracked` | Ensure BAL excludes EIP-1153 transient storage operations | Contract executes: `TSTORE(0x01, 0x42)` (transient write), `TLOAD(0x01)` (transient read), `SSTORE(0x02, result)` (persistent write using transient value). | BAL **MUST** include slot 0x02 in `storage_changes` (persistent storage was modified). BAL **MUST NOT** include slot 0x01 in `storage_reads` or `storage_changes` (transient storage is not persisted, not needed for stateless execution). This verifies TSTORE/TLOAD don't pollute BAL. | ✅ Completed | +| `test_bal_selfdestruct_to_precompile` | Ensure BAL captures SELFDESTRUCT with precompile as beneficiary | Caller triggers victim contract (balance=100) to execute `SELFDESTRUCT(0x0000...0001)` (ecrecover precompile). Precompile starts with balance=0. | BAL **MUST** include: (1) Contract with `balance_changes` (100→0, loses balance to selfdestruct). (2) Precompile address 0x01 with `balance_changes` (0→100, receives selfdestruct balance). Precompile **MUST NOT** have `code_changes` or `nonce_changes`. This complements `test_bal_withdrawal_to_precompiles` (withdrawal) and `test_bal_precompile_funded` (tx value). | ✅ Completed | +| `test_bal_self_destruct_oog` | Ensure BAL correctly tracks SELFDESTRUCT beneficiary based on gas boundaries | Alice calls `Caller` contract which CALLs `SelfDestructContract` with precisely controlled gas. `SelfDestructContract` attempts SELFDESTRUCT to new account `Beneficiary`. Static gas = G_VERY_LOW + G_SELF_DESTRUCT + G_COLD_ACCOUNT_ACCESS. Parameterized: (1) OOG before state access (gas = static - 1), (2) OOG after state access (gas = static, but insufficient for G_NEW_ACCOUNT). | For OOG before state access: BAL **MUST NOT** include `Beneficiary` (no state access occurred). For OOG after state access: BAL **MUST** include `Beneficiary` with empty changes (state was accessed before G_NEW_ACCOUNT check failed). Both cases: Alice with `nonce_changes`, `Caller` and `SelfDestructContract` with empty changes. Contract balance unchanged. | ✅ Completed | +| `test_bal_withdrawal_to_7702_delegation` | Ensure BAL correctly handles withdrawal to a 7702 delegated account (no code execution on recipient) | Tx1: Alice authorizes delegation to Oracle (sets code to `0xef0100\|\|Oracle`). Withdrawal: 10 gwei sent to Alice. Single block with tx + withdrawal. | BAL **MUST** include: (1) Alice at tx_index=1 with `code_changes` (delegation), `nonce_changes`. (2) Alice at tx_index=2 with `balance_changes` (receives withdrawal). **Oracle MUST NOT appear** - withdrawals credit balance without executing recipient code, so delegation target is never accessed. This complements `test_bal_selfdestruct_to_7702_delegation` (selfdestruct) and `test_bal_withdrawal_no_evm_execution` (withdrawal to contract). | ✅ Completed | | `test_init_collision_create_tx` | Ensure BAL tracks CREATE collisions correctly (pre-Amsterdam test with BAL) | CREATE transaction targeting address with existing storage aborts | BAL **MUST** show empty expectations for collision address (no changes occur due to abort) | ✅ Completed | | `test_call_to_pre_authorized_oog` | Ensure BAL handles OOG during EIP-7702 delegation access (pre-Amsterdam test with BAL) | Call to delegated account that OOGs before accessing delegation contract | BAL **MUST** include auth_signer (code read for delegation check) but **MUST NOT** include delegation contract (OOG before access) | ✅ Completed | | `test_selfdestruct_created_in_same_tx_with_revert` | Ensure BAL tracks selfdestruct with revert correctly (pre-Amsterdam test with BAL) | Contract created and selfdestructed in same tx with nested revert | BAL **MUST** track storage reads and balance changes for selfdestruct even with reverts | ✅ Completed | | `test_value_transfer_gas_calculation` | Ensure BAL correctly tracks OOG scenarios for CALL/CALLCODE/DELEGATECALL/STATICCALL (pre-Amsterdam test with BAL) | Nested calls with precise gas limits to test OOG behavior. For CALL with OOG: target account is read. For CALLCODE/DELEGATECALL/STATICCALL with OOG: target account **NOT** read (OOG before state access) | For CALL: target in BAL even with OOG. For CALLCODE/DELEGATECALL/STATICCALL: target **NOT** in BAL when OOG (state access deferred until after gas check) | ✅ Completed | -| `test_bal_cross_block_precompile_state_leak` | Ensure internal EVM state for precompile handling does not leak between blocks | Block 1: Alice calls RIPEMD-160 (0x03) with zero value (RIPEMD-160 must be pre-funded). Block 2: Bob's transaction triggers an exception (stack underflow). | BAL for Block 1 **MUST** include RIPEMD-160. BAL for Block 2 **MUST NOT** include RIPEMD-160 (never accessed in Block 2). Internal state from Parity Touch Bug (EIP-161) handling must be reset between blocks. | ✅ Completed | -| `test_bal_self_destruct_oog` | Ensure BAL correctly tracks SELFDESTRUCT beneficiary based on gas boundaries | Alice calls `Caller` contract which CALLs `SelfDestructContract` with precisely controlled gas. `SelfDestructContract` attempts SELFDESTRUCT to new account `Beneficiary`. Static gas = G_VERY_LOW + G_SELF_DESTRUCT + G_COLD_ACCOUNT_ACCESS. Parameterized: (1) OOG before state access (gas = static - 1), (2) OOG after state access (gas = static, but insufficient for G_NEW_ACCOUNT). | For OOG before state access: BAL **MUST NOT** include `Beneficiary` (no state access occurred). For OOG after state access: BAL **MUST** include `Beneficiary` with empty changes (state was accessed before G_NEW_ACCOUNT check failed). Both cases: Alice with `nonce_changes`, `Caller` and `SelfDestructContract` with empty changes. Contract balance unchanged. | ✅ Completed | From f6d51e8f4f0dd0361dbebefaa6de754de7b24796 Mon Sep 17 00:00:00 2001 From: felipe Date: Mon, 8 Dec 2025 14:40:18 -0700 Subject: [PATCH 050/154] feat(test-tests): BAL test for nested storage write reset same tx (#1854) --- .../test_block_access_lists.py | 92 +++++++++++++++++++ .../test_cases.md | 1 + 2 files changed, 93 insertions(+) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py index 832a8d8e46..f7a4ca2830 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py @@ -1939,6 +1939,98 @@ def test_bal_multiple_storage_writes_same_slot( ) +@pytest.mark.parametrize( + "intermediate_values", + [ + pytest.param([2], id="depth_1"), + pytest.param([2, 3], id="depth_2"), + pytest.param([2, 3, 4], id="depth_3"), + ], +) +def test_bal_nested_delegatecall_storage_writes_net_zero( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + intermediate_values: list, +) -> None: + """ + Test BAL correctly handles nested DELEGATECALL frames where intermediate + frames write different values but the deepest frame reverts to original. + + Each nesting level writes a different intermediate value, and the deepest + frame writes back the original value, resulting in net-zero change. + + Example for depth=2 (intermediate_values=[2, 3]): + - Pre-state: slot 0 = 1 + - Root frame writes: slot 0 = 2 + - Child frame writes: slot 0 = 3 + - Grandchild frame writes: slot 0 = 1 (back to original) + - Expected: No storage_changes (net-zero overall) + """ + alice = pre.fund_eoa() + starting_value = 1 + + # deepest contract writes back to starting_value + deepest_code = Op.SSTORE(0, starting_value) + Op.STOP + next_contract = pre.deploy_contract(code=deepest_code) + delegate_contracts = [next_contract] + + # Build intermediate contracts (in reverse order) that write then + # DELEGATECALL. Skip the first value since that's for the root contract + for value in reversed(intermediate_values[1:]): + code = ( + Op.SSTORE(0, value) + + Op.DELEGATECALL(100_000, next_contract, 0, 0, 0, 0) + + Op.STOP + ) + next_contract = pre.deploy_contract(code=code) + delegate_contracts.append(next_contract) + + # root_contract writes first intermediate value, then DELEGATECALLs + root_contract = pre.deploy_contract( + code=( + Op.SSTORE(0, intermediate_values[0]) + + Op.DELEGATECALL(100_000, next_contract, 0, 0, 0, 0) + + Op.STOP + ), + storage={0: starting_value}, + ) + + tx = Transaction( + sender=alice, + to=root_contract, + gas_limit=500_000, + ) + + account_expectations = { + alice: BalAccountExpectation( + nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + ), + root_contract: BalAccountExpectation( + storage_reads=[0], + storage_changes=[], # validate no changes + ), + } + # All delegate contracts accessed but no changes + for contract in delegate_contracts: + account_expectations[contract] = BalAccountExpectation.empty() + + blockchain_test( + pre=pre, + blocks=[ + Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations=account_expectations + ), + ) + ], + post={ + alice: Account(nonce=1), + root_contract: Account(storage={0: starting_value}), + }, + ) + + def test_bal_create_transaction_empty_code( pre: Alloc, blockchain_test: BlockchainTestFiller, diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index 07d184d957..2a318f5b63 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -54,6 +54,7 @@ | `test_bal_oog_7702_delegated_warm_cold` | Ensure BAL handles OOG during EIP-7702 delegated account loading when first account is warm, second is cold | Alice calls warm delegated account Bob (7702) which delegates to cold `TargetContract` with insufficient gas for second cold load | BAL **MUST** include Bob in `account_changes` (warm load succeeds) but **MUST NOT** include `TargetContract` (cold load fails due to OOG) | 🟡 Planned | | `test_bal_multiple_balance_changes_same_account` | Ensure BAL tracks multiple balance changes to same account across transactions | Alice funds Bob (starts at 0) in tx0 with exact amount needed. Bob spends everything in tx1 to Charlie. Bob's balance: 0 → funding_amount → 0 | BAL **MUST** include Bob with two `balance_changes`: one at txIndex=1 (receives funds) and one at txIndex=2 (balance returns to 0). This tests balance tracking across two transactions. | ✅ Completed | | `test_bal_multiple_storage_writes_same_slot` | Ensure BAL tracks multiple writes to same storage slot across transactions | Alice calls contract 3 times in same block. Contract increments slot 1 on each call: 0 → 1 → 2 → 3 | BAL **MUST** include contract with slot 1 having three `slot_changes`: txIndex=1 (value 1), txIndex=2 (value 2), txIndex=3 (value 3). Each transaction's write must be recorded separately. | ✅ Completed | +| `test_bal_nested_delegatecall_storage_writes_net_zero` | Ensure BAL correctly filters net-zero storage changes across nested DELEGATECALL frames | Parametrized by nesting depth (1-3). Root contract has slot 0 = 1. Each frame writes a different intermediate value via DELEGATECALL chain, deepest frame writes back to original value (1). Example depth=2: 1 → 2 → 3 → 1 | BAL **MUST** include root contract with `storage_reads` for slot 0 but **MUST NOT** include `storage_changes` (net-zero). All delegate contracts **MUST** have empty changes. Tests that frame merging correctly removes parent's intermediate writes when child reverts to pre-tx value. | ✅ Completed | | `test_bal_cross_tx_storage_revert_to_zero` | Ensure BAL captures storage changes when tx2 reverts slot back to original value (blobhash regression test) | Alice sends tx1 writing slot 0=0xABCD (from 0x0), then tx2 writing slot 0=0x0 (back to original) | BAL **MUST** include contract with slot 0 having two `slot_changes`: txIndex=1 (0xABCD) and txIndex=2 (0x0). Cross-transaction net-zero **MUST NOT** be filtered. | ✅ Completed | | `test_bal_create_contract_init_revert` | Ensure BAL correctly handles CREATE when parent call reverts | Caller calls factory, factory executes CREATE (succeeds), then factory REVERTs rolling back the CREATE | BAL **MUST** include Alice with `nonce_changes`. Caller and factory with no changes (reverted). Created contract address appears in BAL but **MUST NOT** have `nonce_changes` or `code_changes` (CREATE was rolled back). Contract address **MUST NOT** exist in post-state. | ✅ Completed | | `test_bal_create_oog_code_deposit` | Ensure BAL correctly handles CREATE OOG during code deposit | Alice calls factory contract that executes CREATE with init code returning 10,000 bytes. Transaction has insufficient gas for code deposit. Factory nonce increments, CREATE returns 0 and stores in slot 1. | BAL **MUST** include Alice with `nonce_changes`. Factory with `nonce_changes` (incremented by CREATE) and `storage_changes` (slot 1 = 0). Contract address with empty changes (read during collision check). **MUST NOT** include nonce or code changes for contract address (rolled back on OOG). Contract address **MUST NOT** exist in post-state. | ✅ Completed | From acaca8dc67024a1e3323cabbb92286bfb6f451a7 Mon Sep 17 00:00:00 2001 From: Felipe Selmo Date: Tue, 9 Dec 2025 16:07:17 +0000 Subject: [PATCH 051/154] fix(spec-specs): post-exec net-zero filtering post specs refactor - refactor(spec-specs): Move net-zero filtering inside commit tx frame --- src/ethereum/forks/amsterdam/fork.py | 8 ++- src/ethereum/forks/amsterdam/state_tracker.py | 16 ++++-- .../test_block_access_lists_cross_index.py | 54 +++++++++++++++++++ 3 files changed, 69 insertions(+), 9 deletions(-) diff --git a/src/ethereum/forks/amsterdam/fork.py b/src/ethereum/forks/amsterdam/fork.py index 697086e2b4..7da3ed03ce 100644 --- a/src/ethereum/forks/amsterdam/fork.py +++ b/src/ethereum/forks/amsterdam/fork.py @@ -681,7 +681,7 @@ def process_system_transaction( # Commit system transaction changes to block frame # System transactions always succeed (or block is invalid) - commit_transaction_frame(tx_env.state_changes) + commit_transaction_frame(tx_env.state_changes, block_env.state) return system_tx_output @@ -1092,11 +1092,9 @@ def process_transaction( for address in tx_output.accounts_to_delete: destroy_account(block_env.state, address) - # EIP-7928: Filter net-zero changes before committing to block frame. + # EIP-7928: Commit transaction frame (includes net-zero filtering). # Must happen AFTER destroy_account so filtering sees correct state. - filter_net_zero_frame_changes(tx_env.state_changes, block_env.state) - - commit_transaction_frame(tx_env.state_changes) + commit_transaction_frame(tx_env.state_changes, block_env.state) # EIP-7928: Track in-transaction self-destruct normalization AFTER merge # Convert storage writes to reads and remove nonce/code changes diff --git a/src/ethereum/forks/amsterdam/state_tracker.py b/src/ethereum/forks/amsterdam/state_tracker.py index 19a929d0dd..05461ea89b 100644 --- a/src/ethereum/forks/amsterdam/state_tracker.py +++ b/src/ethereum/forks/amsterdam/state_tracker.py @@ -436,22 +436,30 @@ def merge_on_failure(child_frame: StateChanges) -> None: # merged on failure - they are discarded -def commit_transaction_frame(tx_frame: StateChanges) -> None: +def commit_transaction_frame( + tx_frame: StateChanges, + state: "State", +) -> None: """ Commit transaction frame to block frame. - Unlike merge_on_success(), this merges ALL changes without net-zero - filtering (each tx's changes recorded at their respective index). + Filters net-zero changes before merging to ensure only actual state + modifications are recorded in the block access list. Parameters ---------- tx_frame : The transaction frame to commit. + state : + The current state (used for net-zero filtering). """ assert tx_frame.parent is not None block_frame = tx_frame.parent + # Filter net-zero changes before committing + filter_net_zero_frame_changes(tx_frame, state) + # Merge address accesses block_frame.touched_addresses.update(tx_frame.touched_addresses) @@ -468,7 +476,7 @@ def commit_transaction_frame(tx_frame: StateChanges) -> None: for addr, idx, nonce in tx_frame.nonce_changes: block_frame.nonce_changes.add((addr, idx, nonce)) - # Merge code changes (net-zero filtering done in normalize_transaction) + # Merge code changes for (addr, idx), final_code in tx_frame.code_changes.items(): block_frame.code_changes[(addr, idx)] = final_code diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_cross_index.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_cross_index.py index 920b8bc344..72ac91c923 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_cross_index.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_cross_index.py @@ -258,3 +258,57 @@ def test_bal_noop_write_filtering( test_address: Account(storage={2: 42, 3: 100, 4: 200}), }, ) + + +def test_bal_system_contract_noop_filtering( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Test that system contract post-execution calls filter net-zero + storage writes. + + When no transaction interacts with withdrawal/consolidation contracts + during a block, the post-execution system calls read storage slots + 0-3 but don't modify them. These should appear as storage READS, + not storage CHANGES. + """ + sender = pre.fund_eoa() + receiver = pre.fund_eoa(amount=0) + + # simple transfer that doesn't interact with system contracts + tx = Transaction( + sender=sender, + to=receiver, + value=100, + gas_limit=21_000, + ) + + # withdrawal and consolidation contracts should NOT have any storage + # changes since they weren't modified - only reads occurred during + # post-execution system calls + expected_block_access_list = BlockAccessListExpectation( + account_expectations={ + WITHDRAWAL_REQUEST_ADDRESS: BalAccountExpectation( + storage_changes=[], + storage_reads=[0x00, 0x01, 0x02, 0x03], + ), + CONSOLIDATION_REQUEST_ADDRESS: BalAccountExpectation( + storage_changes=[], + storage_reads=[0x00, 0x01, 0x02, 0x03], + ), + } + ) + + blockchain_test( + pre=pre, + blocks=[ + Block( + txs=[tx], + expected_block_access_list=expected_block_access_list, + ) + ], + post={ + receiver: Account(balance=100), + }, + ) From 7eb798b8b4586ad8e99c979b47b7426ea6a97091 Mon Sep 17 00:00:00 2001 From: felipe Date: Fri, 12 Dec 2025 11:40:03 -0700 Subject: [PATCH 052/154] feat(test-tests): Expand BAL CALL opcode OOG boundary test cases (#1882) * feat(test-tests): expand bal call opcode oog boundary tests Parametrize for: - value / no value - COLD / WARM target - 7702 delegation / no delegation - WARM / COLD delegation - mem expansion / no mem expansion Include tests both before and after state access to ensure BAL expectations at these gas boundaries are met. * refactor(tests): Use `fork.memory_expansion_gas_calculator()` * feat(test-tests): Refactor; Add second 7702 boundary at success minus 1 * refactor: changes from comments on PR #1882 * fix: update test names and descriptions after refactor --- .../test_block_access_lists.py | 49 +- .../test_block_access_lists_opcodes.py | 1264 +++++++++++++++-- .../test_cases.md | 14 +- whitelist.txt | 1 + 4 files changed, 1175 insertions(+), 153 deletions(-) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py index f7a4ca2830..32a1052841 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py @@ -286,56 +286,13 @@ def test_bal_account_access_target( blockchain_test(pre=pre, blocks=[block], post={}) -def test_bal_call_with_value_transfer( +def test_bal_callcode_nested_value_transfer( pre: Alloc, blockchain_test: BlockchainTestFiller, ) -> None: """ - Ensure BAL captures balance changes from CALL opcode with - value transfer. - """ - alice = pre.fund_eoa() - bob = pre.fund_eoa(amount=0) - - # Oracle contract that uses CALL to transfer 100 wei to Bob - oracle_code = Op.CALL(0, bob, 100, 0, 0, 0, 0) - oracle_contract = pre.deploy_contract(code=oracle_code, balance=200) - - tx = Transaction( - sender=alice, to=oracle_contract, gas_limit=1_000_000, gas_price=0xA - ) - - block = Block( - txs=[tx], - expected_block_access_list=BlockAccessListExpectation( - account_expectations={ - alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], - ), - oracle_contract: BalAccountExpectation( - balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=100) - ], - ), - bob: BalAccountExpectation( - balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=100) - ], - ), - } - ), - ) - - blockchain_test(pre=pre, blocks=[block], post={}) - - -def test_bal_callcode_with_value_transfer( - pre: Alloc, - blockchain_test: BlockchainTestFiller, -) -> None: - """ - Ensure BAL captures balance changes from CALLCODE opcode with - value transfer. + Ensure BAL captures balance changes from nested value transfers + when CALLCODE executes target code that itself makes CALL with value. """ alice = pre.fund_eoa() bob = pre.fund_eoa(amount=0) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py index 448936448e..9e2355b93e 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py @@ -19,6 +19,7 @@ import pytest from execution_testing import ( + AccessList, Account, Address, Alloc, @@ -57,6 +58,36 @@ class OutOfGasAt(Enum): EXACT_GAS_MINUS_1 = "oog_at_exact_gas_minus_1" +class OutOfGasBoundary(Enum): + """ + OOG boundary scenarios for call-type opcodes with 7702 delegation. + + For 7702 targets, there's ALWAYS a gap between static gas check and + second check (delegation_cost). All 4 scenarios test + distinct boundaries. + + Gas check order: + 1. oog_before_target_access: access + transfer (if applicable) + memory. + OOG with not enough for this check - no state access. + 2. oog_after_target_access: only enough for static check, state access + reads target into BAL, not enough for anything else. + 3. oog_success_minus_1: exact gas minus 1. OOG here means target is in + BAL, but we have enough information to calculate delegation cost + AND the message call gas and not read if we don't have enough for + both - delegation target NOT in BAL. + 4. success: target and delegation target both in BAL. + + OOG_SUCCESS_MINUS_1 tests that even when we have enough for delegation + access cost, if we don't have enough for the total (missing subcall_gas), + we don't read the delegation. + """ + + OOG_BEFORE_TARGET_ACCESS = "oog_before_target_access" + OOG_AFTER_TARGET_ACCESS = "oog_after_target_access" + OOG_SUCCESS_MINUS_1 = "oog_success_minus_1" + SUCCESS = "success" + + @pytest.mark.parametrize( "out_of_gas_at", [ @@ -375,159 +406,1184 @@ def test_bal_extcodesize_and_oog( @pytest.mark.parametrize( - "fails_at_call", [True, False], ids=["oog_at_call", "successful_call"] + "oog_boundary", + [OutOfGasBoundary.SUCCESS, OutOfGasBoundary.OOG_BEFORE_TARGET_ACCESS], + ids=lambda x: x.value, +) +@pytest.mark.parametrize( + "target_is_warm", [False, True], ids=["cold_target", "warm_target"] +) +@pytest.mark.parametrize( + "target_is_empty", [False, True], ids=["existing_target", "empty_target"] +) +@pytest.mark.parametrize("value", [0, 1], ids=["no_value", "with_value"]) +@pytest.mark.parametrize( + "memory_expansion", [False, True], ids=["no_memory", "with_memory"] ) -def test_bal_call_and_oog( +def test_bal_call_no_delegation_and_oog_before_target_access( pre: Alloc, blockchain_test: BlockchainTestFiller, fork: Fork, - fails_at_call: bool, + oog_boundary: OutOfGasBoundary, + target_is_warm: bool, + target_is_empty: bool, + value: int, + memory_expansion: bool, ) -> None: - """Ensure BAL handles CALL and OOG during CALL appropriately.""" + """ + CALL without 7702 delegation - test SUCCESS and OOG before target access. + + When target_is_warm=True, we use EIP-2930 tx access list to warm the + target. Access list warming does NOT add to BAL - only EVM access does. + """ + gas_costs = fork.gas_costs() alice = pre.fund_eoa() - bob = pre.fund_eoa() + + target = ( + pre.empty_account() + if target_is_empty + else pre.deploy_contract(code=Op.STOP) + ) + + ret_size = 32 if memory_expansion else 0 + + call_code = Op.CALL( + gas=0, address=target, value=value, ret_size=ret_size, ret_offset=0 + ) + caller = pre.deploy_contract(code=call_code, balance=value) + + access_list = ( + [AccessList(address=target, storage_keys=[])] + if target_is_warm + else None + ) + + intrinsic_cost = fork.transaction_intrinsic_cost_calculator()( + access_list=access_list + ) + + bytecode_cost = gas_costs.G_VERY_LOW * 7 + + access_cost = ( + gas_costs.G_WARM_ACCOUNT_ACCESS + if target_is_warm + else gas_costs.G_COLD_ACCOUNT_ACCESS + ) + transfer_cost = gas_costs.G_CALL_VALUE if value > 0 else 0 + memory_cost = fork.memory_expansion_gas_calculator()(new_bytes=ret_size) + + # Create cost: only if value > 0 AND target is empty + create_cost = ( + gas_costs.G_NEW_ACCOUNT if (value > 0 and target_is_empty) else 0 + ) + + # static gas (before state access): access + transfer + memory + static_gas_cost = access_cost + transfer_cost + memory_cost + # second check includes create_cost + second_check_cost = static_gas_cost + create_cost + + if oog_boundary == OutOfGasBoundary.OOG_BEFORE_TARGET_ACCESS: + gas_limit = intrinsic_cost + bytecode_cost + static_gas_cost - 1 + else: # SUCCESS + gas_limit = intrinsic_cost + bytecode_cost + second_check_cost + + tx = Transaction( + sender=alice, + to=caller, + gas_limit=gas_limit, + access_list=access_list, + ) + + # BAL expectations + account_expectations: Dict[Address, BalAccountExpectation | None] + if oog_boundary == OutOfGasBoundary.OOG_BEFORE_TARGET_ACCESS: + # Target NOT in BAL - we OOG before state access + account_expectations = { + caller: BalAccountExpectation.empty(), + target: None, + } + elif value > 0: + account_expectations = { + caller: BalAccountExpectation( + balance_changes=[BalBalanceChange(tx_index=1, post_balance=0)] + ), + target: BalAccountExpectation( + balance_changes=[ + BalBalanceChange(tx_index=1, post_balance=value) + ] + ), + } + else: + account_expectations = { + caller: BalAccountExpectation.empty(), + target: BalAccountExpectation.empty(), + } + + value_transferred = value > 0 and oog_boundary == OutOfGasBoundary.SUCCESS + + post_state: Dict[Address, Account | None] = {alice: Account(nonce=1)} + + if value_transferred: + post_state[target] = Account(balance=value) + post_state[caller] = Account(balance=0) + else: + post_state[caller] = Account(balance=value) + post_state[target] = ( + Account.NONEXISTENT + if target_is_empty + else Account(balance=0, code=Op.STOP) + ) + + blockchain_test( + pre=pre, + blocks=[ + Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations=account_expectations + ), + ) + ], + post=post_state, + ) + + +@pytest.mark.parametrize( + "target_is_warm", [False, True], ids=["cold_target", "warm_target"] +) +@pytest.mark.parametrize( + "memory_expansion", [False, True], ids=["no_memory", "with_memory"] +) +def test_bal_call_no_delegation_oog_after_target_access( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + fork: Fork, + target_is_warm: bool, + memory_expansion: bool, +) -> None: + """ + CALL without 7702 delegation - OOG after state access. + + When target_is_warm=True, uses EIP-2930 tx access list to warm the target. + Access list warming does NOT add targets to BAL - only EVM access does. + + This test is only meaningful when there's a gap between gas check before + state access and after state access. This only happens if create cost + (empty target) and value transfer cost are both non-zero. + + Note: + - target is always empty - required for create cost + - value=1 (greater than 0) - required for create cost + + The create_cost (G_NEW_ACCOUNT = 25000) is charged only for value transfers + to empty accounts, creating the gap tested here. + + """ gas_costs = fork.gas_costs() + alice = pre.fund_eoa() - # Create contract that attempts to call Bob - call_contract_code = Bytecode( - Op.PUSH1(0) # retSize - + Op.PUSH1(0) # retOffset - + Op.PUSH1(0) # argsSize - + Op.PUSH1(0) # argsOffset - + Op.PUSH1(0) # value - + Op.PUSH20(bob) # address - + Op.PUSH2(0xFFFF) # gas (provide enough for the call) - + Op.CALL # Call (cold account access) - + Op.STOP + # empty target required for create_cost gap + target = pre.empty_account() + # value > 0 required for create_cost + value = 1 + + # memory expansion / no expansion + ret_size = 32 if memory_expansion else 0 + + # caller contract - no warmup code, we use tx access list instead + call_code = Op.CALL( + gas=0, address=target, value=value, ret_size=ret_size, ret_offset=0 ) + caller = pre.deploy_contract(code=call_code, balance=value) - call_contract = pre.deploy_contract(code=call_contract_code) + # Access list for warming target (if needed) + access_list = ( + [AccessList(address=target, storage_keys=[])] + if target_is_warm + else None + ) - intrinsic_gas_calculator = fork.transaction_intrinsic_cost_calculator() - intrinsic_gas_cost = intrinsic_gas_calculator() + intrinsic_cost = fork.transaction_intrinsic_cost_calculator()( + access_list=access_list + ) - # Costs: - # - 7 PUSH operations = G_VERY_LOW * 7 - # - CALL cold = G_COLD_ACCOUNT_ACCESS (minimum for account access) - push_cost = gas_costs.G_VERY_LOW * 7 - call_cold_cost = gas_costs.G_COLD_ACCOUNT_ACCESS - tx_gas_limit = intrinsic_gas_cost + push_cost + call_cold_cost - - if fails_at_call: - # subtract 1 gas to ensure OOG at CALL - tx_gas_limit -= 1 + # Bytecode cost: 7 pushes for Op.CALL (no warmup code) + bytecode_cost = gas_costs.G_VERY_LOW * 7 + + # Access cost for CALL - warm if in tx access list + access_cost = ( + gas_costs.G_WARM_ACCOUNT_ACCESS + if target_is_warm + else gas_costs.G_COLD_ACCOUNT_ACCESS + ) + transfer_cost = gas_costs.G_CALL_VALUE # value > 0, so always charged + memory_cost = fork.memory_expansion_gas_calculator()(new_bytes=ret_size) + + # static gas cost (before state access): access + transfer + memory + static_gas_cost = access_cost + transfer_cost + memory_cost + + # Pass static check, fail at second check due to create cost + # (create_cost = G_NEW_ACCOUNT = 25000 for empty target + value > 0) + gas_limit = intrinsic_cost + bytecode_cost + static_gas_cost tx = Transaction( sender=alice, - to=call_contract, - gas_limit=tx_gas_limit, + to=caller, + gas_limit=gas_limit, + access_list=access_list, ) - block = Block( - txs=[tx], - expected_block_access_list=BlockAccessListExpectation( - account_expectations={ - call_contract: BalAccountExpectation.empty(), - # Bob should only appear if CALL succeeded - **( - {bob: None} - if fails_at_call - else {bob: BalAccountExpectation.empty()} + # Target is always in BAL after state access but value transfer fails + # (no balance changes) + account_expectations: Dict[Address, BalAccountExpectation | None] = { + caller: BalAccountExpectation.empty(), + target: BalAccountExpectation.empty(), + } + + post_state = { + alice: Account(nonce=1), + caller: Account(balance=value), + target: Account.NONEXISTENT, + } + + blockchain_test( + pre=pre, + blocks=[ + Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations=account_expectations ), - } + ) + ], + post=post_state, + ) + + +@pytest.mark.parametrize( + "oog_boundary", + list(OutOfGasBoundary), + ids=lambda x: x.value, +) +@pytest.mark.parametrize( + "target_is_warm", [False, True], ids=["cold_target", "warm_target"] +) +@pytest.mark.parametrize( + "delegation_is_warm", + [False, True], + ids=["cold_delegation", "warm_delegation"], +) +@pytest.mark.parametrize("value", [0, 1], ids=["no_value", "with_value"]) +@pytest.mark.parametrize( + "memory_expansion", [False, True], ids=["no_memory", "with_memory"] +) +def test_bal_call_7702_delegation_and_oog( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + fork: Fork, + oog_boundary: OutOfGasBoundary, + target_is_warm: bool, + delegation_is_warm: bool, + value: int, + memory_expansion: bool, +) -> None: + """ + CALL with 7702 delegation - test all OOG boundaries. + + When target_is_warm or delegation_is_warm, we use EIP-2930 tx access list. + Access list warming does NOT add targets to BAL - only EVM access does. + """ + gas_costs = fork.gas_costs() + alice = pre.fund_eoa() + + delegation_target = pre.deploy_contract(code=Op.STOP) + target = pre.fund_eoa(amount=0, delegation=delegation_target) + + # memory expansion / no expansion + ret_size = 32 if memory_expansion else 0 + + call_code = Op.CALL( + gas=0, + address=target, + value=value, + ret_size=ret_size, + ret_offset=0, + ) + caller = pre.deploy_contract(code=call_code, balance=value) + + # Build access list for warming + access_list: list[AccessList] = [] + if target_is_warm: + access_list.append(AccessList(address=target, storage_keys=[])) + if delegation_is_warm: + access_list.append( + AccessList(address=delegation_target, storage_keys=[]) + ) + + intrinsic_cost = fork.transaction_intrinsic_cost_calculator()( + access_list=access_list + ) + + bytecode_cost = gas_costs.G_VERY_LOW * 7 + + access_cost = ( + gas_costs.G_WARM_ACCOUNT_ACCESS + if target_is_warm + else gas_costs.G_COLD_ACCOUNT_ACCESS + ) + transfer_cost = gas_costs.G_CALL_VALUE if value > 0 else 0 + memory_cost = fork.memory_expansion_gas_calculator()(new_bytes=ret_size) + delegation_cost = ( + gas_costs.G_WARM_ACCOUNT_ACCESS + if delegation_is_warm + else gas_costs.G_COLD_ACCOUNT_ACCESS + ) + + static_gas_cost = access_cost + transfer_cost + memory_cost + + # The EVM's second check cost is static_gas + delegation_cost. + second_check_cost = static_gas_cost + delegation_cost + + if oog_boundary == OutOfGasBoundary.OOG_BEFORE_TARGET_ACCESS: + gas_limit = intrinsic_cost + bytecode_cost + static_gas_cost - 1 + elif oog_boundary == OutOfGasBoundary.OOG_AFTER_TARGET_ACCESS: + # Enough for static_gas only - not enough for delegation_cost + gas_limit = intrinsic_cost + bytecode_cost + static_gas_cost + elif oog_boundary == OutOfGasBoundary.OOG_SUCCESS_MINUS_1: + # One less than second_check_cost - not enough for full call + gas_limit = intrinsic_cost + bytecode_cost + second_check_cost - 1 + else: + gas_limit = intrinsic_cost + bytecode_cost + second_check_cost + + tx = Transaction( + sender=alice, + to=caller, + gas_limit=gas_limit, + access_list=access_list, + ) + + # Access list warming does NOT add to BAL - only EVM execution does + if oog_boundary == OutOfGasBoundary.OOG_BEFORE_TARGET_ACCESS: + target_in_bal = False + delegation_in_bal = False + elif oog_boundary in ( + OutOfGasBoundary.OOG_AFTER_TARGET_ACCESS, + OutOfGasBoundary.OOG_SUCCESS_MINUS_1, + ): + # Both cases: target accessed but not enough gas for full call + # so delegation is NOT read (static check optimization) + target_in_bal = True + delegation_in_bal = False + else: + target_in_bal = True + delegation_in_bal = True + + value_transferred = value > 0 and oog_boundary == OutOfGasBoundary.SUCCESS + + account_expectations: Dict[Address, BalAccountExpectation | None] = { + caller: ( + BalAccountExpectation( + balance_changes=[BalBalanceChange(tx_index=1, post_balance=0)] + ) + if value_transferred + else BalAccountExpectation.empty() ), + delegation_target: ( + BalAccountExpectation.empty() if delegation_in_bal else None + ), + } + + if target_in_bal: + if value_transferred: + account_expectations[target] = BalAccountExpectation( + balance_changes=[ + BalBalanceChange(tx_index=1, post_balance=value) + ] + ) + else: + account_expectations[target] = BalAccountExpectation.empty() + else: + account_expectations[target] = None + + # Post-state balance checks verify value transfer only happened on success + post_state: Dict[Address, Account] = {alice: Account(nonce=1)} + if value > 0: + post_state[target] = Account(balance=value if value_transferred else 0) + post_state[caller] = Account(balance=0 if value_transferred else value) + + blockchain_test( + pre=pre, + blocks=[ + Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations=account_expectations + ), + ) + ], + post=post_state, ) + +@pytest.mark.parametrize( + "oog_boundary", + [OutOfGasBoundary.SUCCESS, OutOfGasBoundary.OOG_BEFORE_TARGET_ACCESS], + ids=lambda x: x.value, +) +@pytest.mark.parametrize( + "target_is_warm", [False, True], ids=["cold_target", "warm_target"] +) +@pytest.mark.parametrize( + "memory_expansion", [False, True], ids=["no_memory", "with_memory"] +) +def test_bal_delegatecall_no_delegation_and_oog_before_target_access( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + fork: Fork, + oog_boundary: OutOfGasBoundary, + target_is_warm: bool, + memory_expansion: bool, +) -> None: + """ + DELEGATECALL without 7702 delegation - test SUCCESS and OOG boundaries. + + When target_is_warm=True, we use EIP-2930 tx access list to warm the + target. Access list warming does NOT add to BAL - only EVM access does. + """ + alice = pre.fund_eoa() + gas_costs = fork.gas_costs() + + target = pre.deploy_contract(code=Op.STOP) + + ret_size = 32 if memory_expansion else 0 + ret_offset = 0 + + delegatecall_code = Op.DELEGATECALL( + address=target, + gas=0, + ret_size=ret_size, + ret_offset=ret_offset, + ) + + caller = pre.deploy_contract(code=delegatecall_code) + + access_list = ( + [AccessList(address=target, storage_keys=[])] + if target_is_warm + else None + ) + + intrinsic_cost = fork.transaction_intrinsic_cost_calculator()( + access_list=access_list + ) + + # 6 pushes: retSize, retOffset, argsSize, argsOffset, address, gas + bytecode_cost = gas_costs.G_VERY_LOW * 6 + + access_cost = ( + gas_costs.G_WARM_ACCOUNT_ACCESS + if target_is_warm + else gas_costs.G_COLD_ACCOUNT_ACCESS + ) + + memory_cost = fork.memory_expansion_gas_calculator()(new_bytes=ret_size) + + # static gas (before state access) == second check (no delegation cost) + static_gas_cost = access_cost + memory_cost + + if oog_boundary == OutOfGasBoundary.OOG_BEFORE_TARGET_ACCESS: + gas_limit = intrinsic_cost + bytecode_cost + static_gas_cost - 1 + else: # SUCCESS + gas_limit = intrinsic_cost + bytecode_cost + static_gas_cost + + tx = Transaction( + sender=alice, + to=caller, + gas_limit=gas_limit, + access_list=access_list, + ) + + # BAL expectations + account_expectations: Dict[Address, BalAccountExpectation | None] + if oog_boundary == OutOfGasBoundary.OOG_BEFORE_TARGET_ACCESS: + # Target NOT in BAL - we OOG before state access + account_expectations = { + caller: BalAccountExpectation.empty(), + target: None, + } + else: # SUCCESS - target in BAL + account_expectations = { + caller: BalAccountExpectation.empty(), + target: BalAccountExpectation.empty(), + } + blockchain_test( pre=pre, - blocks=[block], - post={ - alice: Account(nonce=1), - call_contract: Account(), - }, + blocks=[ + Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations=account_expectations + ), + ) + ], + post={alice: Account(nonce=1)}, ) @pytest.mark.parametrize( - "fails_at_delegatecall", - [True, False], - ids=["oog_at_delegatecall", "successful_delegatecall"], + "oog_boundary", + list(OutOfGasBoundary), + ids=lambda x: x.value, +) +@pytest.mark.parametrize( + "target_is_warm", [False, True], ids=["cold_target", "warm_target"] +) +@pytest.mark.parametrize( + "delegation_is_warm", + [False, True], + ids=["cold_delegation", "warm_delegation"], ) -def test_bal_delegatecall_and_oog( +@pytest.mark.parametrize( + "memory_expansion", [False, True], ids=["no_memory", "with_memory"] +) +def test_bal_delegatecall_7702_delegation_and_oog( pre: Alloc, blockchain_test: BlockchainTestFiller, fork: Fork, - fails_at_delegatecall: bool, + oog_boundary: OutOfGasBoundary, + target_is_warm: bool, + delegation_is_warm: bool, + memory_expansion: bool, ) -> None: """ - Ensure BAL handles DELEGATECALL and OOG during DELEGATECALL - appropriately. + DELEGATECALL with 7702 delegation - test all OOG boundaries. + + When target_is_warm or delegation_is_warm, we use EIP-2930 tx access list. + Access list warming does NOT add targets to BAL - only EVM access does. + + For 7702 delegation, there's ALWAYS a gap between static gas and + second check (delegation_cost) - all 3 scenarios produce distinct + behaviors. """ alice = pre.fund_eoa() gas_costs = fork.gas_costs() - # Create target contract - target_contract = pre.deploy_contract(code=Bytecode(Op.STOP)) + delegation_target = pre.deploy_contract(code=Op.STOP) + target = pre.fund_eoa(amount=0, delegation=delegation_target) - # Create contract that attempts delegatecall to target - delegatecall_contract_code = Bytecode( - Op.PUSH1(0) # retSize - + Op.PUSH1(0) # retOffset - + Op.PUSH1(0) # argsSize - + Op.PUSH1(0) # argsOffset - + Op.PUSH20(target_contract) # address - + Op.PUSH2(0xFFFF) # gas (provide enough for the call) - + Op.DELEGATECALL # Delegatecall (cold account access) - + Op.STOP + # memory expansion / no expansion + ret_size = 32 if memory_expansion else 0 + ret_offset = 0 + + delegatecall_code = Op.DELEGATECALL( + gas=0, + address=target, + ret_size=ret_size, + ret_offset=ret_offset, ) - delegatecall_contract = pre.deploy_contract( - code=delegatecall_contract_code + caller = pre.deploy_contract(code=delegatecall_code) + + # Build access list for warming + access_list: list[AccessList] = [] + if target_is_warm: + access_list.append(AccessList(address=target, storage_keys=[])) + if delegation_is_warm: + access_list.append( + AccessList(address=delegation_target, storage_keys=[]) + ) + + intrinsic_cost = fork.transaction_intrinsic_cost_calculator()( + access_list=access_list ) - intrinsic_gas_calculator = fork.transaction_intrinsic_cost_calculator() - intrinsic_gas_cost = intrinsic_gas_calculator() + bytecode_cost = gas_costs.G_VERY_LOW * 6 - # Costs: - # - 6 PUSH operations = G_VERY_LOW * 6 - # - DELEGATECALL cold = G_COLD_ACCOUNT_ACCESS - push_cost = gas_costs.G_VERY_LOW * 6 - delegatecall_cold_cost = gas_costs.G_COLD_ACCOUNT_ACCESS - tx_gas_limit = intrinsic_gas_cost + push_cost + delegatecall_cold_cost - - if fails_at_delegatecall: - # subtract 1 gas to ensure OOG at DELEGATECALL - tx_gas_limit -= 1 + access_cost = ( + gas_costs.G_WARM_ACCOUNT_ACCESS + if target_is_warm + else gas_costs.G_COLD_ACCOUNT_ACCESS + ) + memory_cost = fork.memory_expansion_gas_calculator()(new_bytes=ret_size) + delegation_cost = ( + gas_costs.G_WARM_ACCOUNT_ACCESS + if delegation_is_warm + else gas_costs.G_COLD_ACCOUNT_ACCESS + ) + + static_gas_cost = access_cost + memory_cost + + # The EVM's second check cost is static_gas + delegation_cost. + second_check_cost = static_gas_cost + delegation_cost + + if oog_boundary == OutOfGasBoundary.OOG_BEFORE_TARGET_ACCESS: + gas_limit = intrinsic_cost + bytecode_cost + static_gas_cost - 1 + elif oog_boundary == OutOfGasBoundary.OOG_AFTER_TARGET_ACCESS: + # Enough for static_gas only - not enough for delegation_cost + gas_limit = intrinsic_cost + bytecode_cost + static_gas_cost + elif oog_boundary == OutOfGasBoundary.OOG_SUCCESS_MINUS_1: + # One less than second_check_cost - not enough for full call + gas_limit = intrinsic_cost + bytecode_cost + second_check_cost - 1 + else: + gas_limit = intrinsic_cost + bytecode_cost + second_check_cost tx = Transaction( sender=alice, - to=delegatecall_contract, - gas_limit=tx_gas_limit, + to=caller, + gas_limit=gas_limit, + access_list=access_list, ) - block = Block( - txs=[tx], - expected_block_access_list=BlockAccessListExpectation( - account_expectations={ - delegatecall_contract: BalAccountExpectation.empty(), - # Target should only appear if DELEGATECALL succeeded - **( - {target_contract: None} - if fails_at_delegatecall - else {target_contract: BalAccountExpectation.empty()} + # Access list warming does NOT add to BAL - only EVM execution does + if oog_boundary == OutOfGasBoundary.OOG_BEFORE_TARGET_ACCESS: + target_in_bal = False + delegation_in_bal = False + elif oog_boundary in ( + OutOfGasBoundary.OOG_AFTER_TARGET_ACCESS, + OutOfGasBoundary.OOG_SUCCESS_MINUS_1, + ): + # Both cases: target accessed but not enough gas for full call + # so delegation is NOT read (static check optimization) + target_in_bal = True + delegation_in_bal = False + else: + target_in_bal = True + delegation_in_bal = True + + account_expectations: Dict[Address, BalAccountExpectation | None] = { + caller: BalAccountExpectation.empty(), + delegation_target: ( + BalAccountExpectation.empty() if delegation_in_bal else None + ), + } + + if target_in_bal: + account_expectations[target] = BalAccountExpectation.empty() + else: + account_expectations[target] = None + + blockchain_test( + pre=pre, + blocks=[ + Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations=account_expectations ), - } + ) + ], + post={alice: Account(nonce=1)}, + ) + + +@pytest.mark.parametrize( + "oog_boundary", + [OutOfGasBoundary.SUCCESS, OutOfGasBoundary.OOG_BEFORE_TARGET_ACCESS], + ids=lambda x: x.value, +) +@pytest.mark.parametrize( + "target_is_warm", [False, True], ids=["cold_target", "warm_target"] +) +@pytest.mark.parametrize("value", [0, 1], ids=["no_value", "with_value"]) +@pytest.mark.parametrize( + "memory_expansion", [False, True], ids=["no_memory", "with_memory"] +) +def test_bal_callcode_no_delegation_and_oog_before_target_access( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + fork: Fork, + oog_boundary: OutOfGasBoundary, + target_is_warm: bool, + value: int, + memory_expansion: bool, +) -> None: + """ + CALLCODE without 7702 delegation - test SUCCESS and OOG boundaries. + + When target_is_warm=True, we use EIP-2930 tx access list to warm the + target. Access list warming does NOT add to BAL - only EVM access does. + CALLCODE has no balance transfer to target (runs in caller's context). + """ + gas_costs = fork.gas_costs() + alice = pre.fund_eoa() + + target = pre.deploy_contract(code=Op.STOP) + + ret_size = 32 if memory_expansion else 0 + + callcode_code = Op.CALLCODE( + gas=0, address=target, value=value, ret_size=ret_size, ret_offset=0 + ) + caller = pre.deploy_contract(code=callcode_code, balance=value) + + access_list = ( + [AccessList(address=target, storage_keys=[])] + if target_is_warm + else None + ) + + intrinsic_cost = fork.transaction_intrinsic_cost_calculator()( + access_list=access_list + ) + + bytecode_cost = gas_costs.G_VERY_LOW * 7 + + access_cost = ( + gas_costs.G_WARM_ACCOUNT_ACCESS + if target_is_warm + else gas_costs.G_COLD_ACCOUNT_ACCESS + ) + transfer_cost = gas_costs.G_CALL_VALUE if value > 0 else 0 + memory_cost = fork.memory_expansion_gas_calculator()(new_bytes=ret_size) + + # static gas: access + transfer + memory (== second check, no delegation) + static_gas_cost = access_cost + transfer_cost + memory_cost + + if oog_boundary == OutOfGasBoundary.OOG_BEFORE_TARGET_ACCESS: + gas_limit = intrinsic_cost + bytecode_cost + static_gas_cost - 1 + else: # SUCCESS + gas_limit = intrinsic_cost + bytecode_cost + static_gas_cost + + tx = Transaction( + sender=alice, + to=caller, + gas_limit=gas_limit, + access_list=access_list, + ) + + # BAL expectations + account_expectations: Dict[Address, BalAccountExpectation | None] + if oog_boundary == OutOfGasBoundary.OOG_BEFORE_TARGET_ACCESS: + # Target NOT in BAL - we OOG before state access + account_expectations = { + caller: BalAccountExpectation.empty(), + target: None, + } + else: # SUCCESS - target in BAL (no balance changes, CALLCODE no transfer) + account_expectations = { + caller: BalAccountExpectation.empty(), + target: BalAccountExpectation.empty(), + } + + # Post-state: CALLCODE runs in caller's context, so value transfer is + # caller-to-caller (net-zero). Caller keeps its balance regardless. + post_state: Dict[Address, Account] = { + alice: Account(nonce=1), + caller: Account(balance=value), + } + + blockchain_test( + pre=pre, + blocks=[ + Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations=account_expectations + ), + ) + ], + post=post_state, + ) + + +@pytest.mark.parametrize( + "oog_boundary", + list(OutOfGasBoundary), + ids=lambda x: x.value, +) +@pytest.mark.parametrize( + "target_is_warm", [False, True], ids=["cold_target", "warm_target"] +) +@pytest.mark.parametrize( + "delegation_is_warm", + [False, True], + ids=["cold_delegation", "warm_delegation"], +) +@pytest.mark.parametrize("value", [0, 1], ids=["no_value", "with_value"]) +@pytest.mark.parametrize( + "memory_expansion", [False, True], ids=["no_memory", "with_memory"] +) +def test_bal_callcode_7702_delegation_and_oog( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + fork: Fork, + oog_boundary: OutOfGasBoundary, + target_is_warm: bool, + delegation_is_warm: bool, + value: int, + memory_expansion: bool, +) -> None: + """ + CALLCODE with 7702 delegation - test all OOG boundaries. + + When target_is_warm or delegation_is_warm, we use EIP-2930 tx access list. + Access list warming does NOT add targets to BAL - only EVM access does. + + For 7702 delegation, there's ALWAYS a gap between static gas and + second check (delegation_cost) - all 3 scenarios produce distinct + behaviors. + """ + gas_costs = fork.gas_costs() + alice = pre.fund_eoa() + + delegation_target = pre.deploy_contract(code=Op.STOP) + target = pre.fund_eoa(amount=0, delegation=delegation_target) + + # memory expansion / no expansion + ret_size = 32 if memory_expansion else 0 + + callcode_code = Op.CALLCODE( + gas=0, + address=target, + value=value, + ret_size=ret_size, + ret_offset=0, + ) + caller = pre.deploy_contract(code=callcode_code, balance=value) + + # Build access list for warming + access_list: list[AccessList] = [] + if target_is_warm: + access_list.append(AccessList(address=target, storage_keys=[])) + if delegation_is_warm: + access_list.append( + AccessList(address=delegation_target, storage_keys=[]) + ) + + intrinsic_cost = fork.transaction_intrinsic_cost_calculator()( + access_list=access_list + ) + + bytecode_cost = gas_costs.G_VERY_LOW * 7 + + access_cost = ( + gas_costs.G_WARM_ACCOUNT_ACCESS + if target_is_warm + else gas_costs.G_COLD_ACCOUNT_ACCESS + ) + transfer_cost = gas_costs.G_CALL_VALUE if value > 0 else 0 + memory_cost = fork.memory_expansion_gas_calculator()(new_bytes=ret_size) + delegation_cost = ( + gas_costs.G_WARM_ACCOUNT_ACCESS + if delegation_is_warm + else gas_costs.G_COLD_ACCOUNT_ACCESS + ) + + static_gas_cost = access_cost + transfer_cost + memory_cost + + # The EVM's second check cost is static_gas + delegation_cost. + second_check_cost = static_gas_cost + delegation_cost + + if oog_boundary == OutOfGasBoundary.OOG_BEFORE_TARGET_ACCESS: + gas_limit = intrinsic_cost + bytecode_cost + static_gas_cost - 1 + elif oog_boundary == OutOfGasBoundary.OOG_AFTER_TARGET_ACCESS: + # Enough for static_gas only - not enough for delegation_cost + gas_limit = intrinsic_cost + bytecode_cost + static_gas_cost + elif oog_boundary == OutOfGasBoundary.OOG_SUCCESS_MINUS_1: + # One less than second_check_cost - not enough for full call + gas_limit = intrinsic_cost + bytecode_cost + second_check_cost - 1 + else: + gas_limit = intrinsic_cost + bytecode_cost + second_check_cost + + tx = Transaction( + sender=alice, + to=caller, + gas_limit=gas_limit, + access_list=access_list, + ) + + # Access list warming does NOT add to BAL - only EVM execution does + if oog_boundary == OutOfGasBoundary.OOG_BEFORE_TARGET_ACCESS: + target_in_bal = False + delegation_in_bal = False + elif oog_boundary in ( + OutOfGasBoundary.OOG_AFTER_TARGET_ACCESS, + OutOfGasBoundary.OOG_SUCCESS_MINUS_1, + ): + # Both cases: target accessed but not enough gas for full call + # so delegation is NOT read (static check optimization) + target_in_bal = True + delegation_in_bal = False + else: + target_in_bal = True + delegation_in_bal = True + + account_expectations: Dict[Address, BalAccountExpectation | None] = { + caller: BalAccountExpectation.empty(), + delegation_target: ( + BalAccountExpectation.empty() if delegation_in_bal else None ), + } + + if target_in_bal: + account_expectations[target] = BalAccountExpectation.empty() + else: + account_expectations[target] = None + + blockchain_test( + pre=pre, + blocks=[ + Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations=account_expectations + ), + ) + ], + post={alice: Account(nonce=1)}, + ) + + +@pytest.mark.parametrize( + "oog_boundary", + [OutOfGasBoundary.SUCCESS, OutOfGasBoundary.OOG_BEFORE_TARGET_ACCESS], + ids=lambda x: x.value, +) +@pytest.mark.parametrize( + "target_is_warm", [False, True], ids=["cold_target", "warm_target"] +) +@pytest.mark.parametrize( + "memory_expansion", [False, True], ids=["no_memory", "with_memory"] +) +def test_bal_staticcall_no_delegation_and_oog_before_target_access( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + fork: Fork, + oog_boundary: OutOfGasBoundary, + target_is_warm: bool, + memory_expansion: bool, +) -> None: + """ + STATICCALL without 7702 delegation - test SUCCESS and OOG boundaries. + + When target_is_warm=True, we use EIP-2930 tx access list to warm the + target. Access list warming does NOT add to BAL - only EVM access does. + """ + alice = pre.fund_eoa() + gas_costs = fork.gas_costs() + + target = pre.deploy_contract(code=Op.STOP) + + ret_size = 32 if memory_expansion else 0 + ret_offset = 0 + + staticcall_code = Op.STATICCALL( + address=target, + gas=0, + ret_size=ret_size, + ret_offset=ret_offset, ) + caller = pre.deploy_contract(code=staticcall_code) + + access_list = ( + [AccessList(address=target, storage_keys=[])] + if target_is_warm + else None + ) + + intrinsic_cost = fork.transaction_intrinsic_cost_calculator()( + access_list=access_list + ) + + # 6 pushes: retSize, retOffset, argsSize, argsOffset, address, gas + bytecode_cost = gas_costs.G_VERY_LOW * 6 + + access_cost = ( + gas_costs.G_WARM_ACCOUNT_ACCESS + if target_is_warm + else gas_costs.G_COLD_ACCOUNT_ACCESS + ) + + memory_cost = fork.memory_expansion_gas_calculator()(new_bytes=ret_size) + + # static gas (before state access) == second check (no delegation cost) + static_gas_cost = access_cost + memory_cost + + if oog_boundary == OutOfGasBoundary.OOG_BEFORE_TARGET_ACCESS: + gas_limit = intrinsic_cost + bytecode_cost + static_gas_cost - 1 + else: # SUCCESS + gas_limit = intrinsic_cost + bytecode_cost + static_gas_cost + + tx = Transaction( + sender=alice, + to=caller, + gas_limit=gas_limit, + access_list=access_list, + ) + + # BAL expectations + account_expectations: Dict[Address, BalAccountExpectation | None] + if oog_boundary == OutOfGasBoundary.OOG_BEFORE_TARGET_ACCESS: + # Target NOT in BAL - we OOG before state access + account_expectations = { + caller: BalAccountExpectation.empty(), + target: None, + } + else: # SUCCESS - target in BAL + account_expectations = { + caller: BalAccountExpectation.empty(), + target: BalAccountExpectation.empty(), + } + blockchain_test( pre=pre, - blocks=[block], - post={ - alice: Account(nonce=1), - delegatecall_contract: Account(), - target_contract: Account(), - }, + blocks=[ + Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations=account_expectations + ), + ) + ], + post={alice: Account(nonce=1)}, + ) + + +@pytest.mark.parametrize( + "oog_boundary", + list(OutOfGasBoundary), + ids=lambda x: x.value, +) +@pytest.mark.parametrize( + "target_is_warm", [False, True], ids=["cold_target", "warm_target"] +) +@pytest.mark.parametrize( + "delegation_is_warm", + [False, True], + ids=["cold_delegation", "warm_delegation"], +) +@pytest.mark.parametrize( + "memory_expansion", [False, True], ids=["no_memory", "with_memory"] +) +def test_bal_staticcall_7702_delegation_and_oog( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + fork: Fork, + oog_boundary: OutOfGasBoundary, + target_is_warm: bool, + delegation_is_warm: bool, + memory_expansion: bool, +) -> None: + """ + STATICCALL with 7702 delegation - test all OOG boundaries. + + When target_is_warm or delegation_is_warm, we use EIP-2930 tx access list. + Access list warming does NOT add targets to BAL - only EVM access does. + + For 7702 delegation, there's ALWAYS a gap between static gas and + second check (delegation_cost) - all 3 scenarios produce distinct + behaviors. + """ + alice = pre.fund_eoa() + gas_costs = fork.gas_costs() + + delegation_target = pre.deploy_contract(code=Op.STOP) + target = pre.fund_eoa(amount=0, delegation=delegation_target) + + # memory expansion / no expansion + ret_size = 32 if memory_expansion else 0 + ret_offset = 0 + + staticcall_code = Op.STATICCALL( + gas=0, + address=target, + ret_size=ret_size, + ret_offset=ret_offset, + ) + + caller = pre.deploy_contract(code=staticcall_code) + + # Build access list for warming + access_list: list[AccessList] = [] + if target_is_warm: + access_list.append(AccessList(address=target, storage_keys=[])) + if delegation_is_warm: + access_list.append( + AccessList(address=delegation_target, storage_keys=[]) + ) + + intrinsic_cost = fork.transaction_intrinsic_cost_calculator()( + access_list=access_list + ) + + bytecode_cost = gas_costs.G_VERY_LOW * 6 + + access_cost = ( + gas_costs.G_WARM_ACCOUNT_ACCESS + if target_is_warm + else gas_costs.G_COLD_ACCOUNT_ACCESS + ) + memory_cost = fork.memory_expansion_gas_calculator()(new_bytes=ret_size) + delegation_cost = ( + gas_costs.G_WARM_ACCOUNT_ACCESS + if delegation_is_warm + else gas_costs.G_COLD_ACCOUNT_ACCESS + ) + + static_gas_cost = access_cost + memory_cost + + # The EVM's second check cost is static_gas + delegation_cost + second_check_cost = static_gas_cost + delegation_cost + + if oog_boundary == OutOfGasBoundary.OOG_BEFORE_TARGET_ACCESS: + gas_limit = intrinsic_cost + bytecode_cost + static_gas_cost - 1 + elif oog_boundary == OutOfGasBoundary.OOG_AFTER_TARGET_ACCESS: + # Enough for static_gas only - not enough for delegation_cost + gas_limit = intrinsic_cost + bytecode_cost + static_gas_cost + elif oog_boundary == OutOfGasBoundary.OOG_SUCCESS_MINUS_1: + # One less than second_check_cost - not enough for full call + gas_limit = intrinsic_cost + bytecode_cost + second_check_cost - 1 + else: + gas_limit = intrinsic_cost + bytecode_cost + second_check_cost + + tx = Transaction( + sender=alice, + to=caller, + gas_limit=gas_limit, + access_list=access_list, + ) + + # Access list warming does NOT add to BAL - only EVM execution does + if oog_boundary == OutOfGasBoundary.OOG_BEFORE_TARGET_ACCESS: + target_in_bal = False + delegation_in_bal = False + elif oog_boundary in ( + OutOfGasBoundary.OOG_AFTER_TARGET_ACCESS, + OutOfGasBoundary.OOG_SUCCESS_MINUS_1, + ): + # Both cases: target accessed but not enough gas for full call + # so delegation is NOT read (static check optimization) + target_in_bal = True + delegation_in_bal = False + else: + target_in_bal = True + delegation_in_bal = True + + account_expectations: Dict[Address, BalAccountExpectation | None] = { + caller: BalAccountExpectation.empty(), + delegation_target: ( + BalAccountExpectation.empty() if delegation_in_bal else None + ), + } + + if target_in_bal: + account_expectations[target] = BalAccountExpectation.empty() + else: + account_expectations[target] = None + + blockchain_test( + pre=pre, + blocks=[ + Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations=account_expectations + ), + ) + ], + post={alice: Account(nonce=1)}, ) @@ -609,8 +1665,9 @@ def test_bal_extcodecopy_and_oog( if oog_scenario == "success": # Provide enough gas for everything including memory expansion - words = (memory_offset + copy_size + 31) // 32 - memory_cost = (words * gas_costs.G_MEMORY) + (words * words // 512) + memory_cost = fork.memory_expansion_gas_calculator()( + new_bytes=memory_offset + copy_size + ) execution_cost = push_cost + cold_access_cost + copy_cost + memory_cost tx_gas_limit = intrinsic_gas_cost + execution_cost target_in_bal = True @@ -626,8 +1683,9 @@ def test_bal_extcodecopy_and_oog( target_in_bal = False elif oog_scenario == "oog_at_memory_boundary": # Calculate memory cost and provide exactly 1 less than needed - words = (memory_offset + copy_size + 31) // 32 - memory_cost = (words * gas_costs.G_MEMORY) + (words * words // 512) + memory_cost = fork.memory_expansion_gas_calculator()( + new_bytes=memory_offset + copy_size + ) execution_cost = push_cost + cold_access_cost + copy_cost + memory_cost tx_gas_limit = intrinsic_gas_cost + execution_cost - 1 target_in_bal = False diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index 2a318f5b63..77c298110b 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -7,8 +7,10 @@ | `test_bal_code_changes` | Ensure BAL captures changes to account code | Alice deploys factory contract that creates new contract | BAL MUST include code changes for newly deployed contract | ✅ Completed | | `test_bal_self_destruct` | Ensure BAL captures storage access and balance changes caused by `SELFDESTRUCT` | Parameterized test: Alice interacts with a contract (either existing or created same-tx) that reads from storage slot 0x01, writes to storage slot 0x02, then executes `SELFDESTRUCT` with Bob as recipient. Contract may be pre-funded with 10 wei | BAL MUST include Alice's nonce change (increment) and Bob's balance change (100 or 110 depending on pre-funding). For the self-destructing contract: storage_reads=[0x01], empty storage_changes=[], and if pre-funded, balance_changes with post_balance=0; if not pre-funded, no balance change recorded. MUST NOT have code_changes or nonce_changes entries | ✅ Completed | | `test_bal_account_access_target` | Ensure BAL captures target addresses of account access opcodes | Alice calls `Oracle` contract which uses account access opcodes (`BALANCE`, `EXTCODESIZE`, `EXTCODECOPY`, `EXTCODEHASH`, `CALL`, `CALLCODE`, `DELEGATECALL`, `STATICCALL`) on `TargetContract`. | BAL MUST include Alice, `Oracle`, and `TargetContract` with empty changes for `TargetContract` and nonce changes for Alice. | ✅ Completed | -| `test_bal_call_with_value_transfer` | Ensure BAL captures balance changes from `CALL` opcode with value transfer | Alice calls `Oracle` contract (200 wei balance) which uses `CALL` opcode to transfer 100 wei to Bob (0 wei balance). | BAL MUST include Alice (nonce changes), Oracle (balance change to 100 wei), and Bob (balance change to 100 wei). | ✅ Completed | -| `test_bal_callcode_with_value_transfer` | Ensure BAL captures balance changes from `CALLCODE` opcode with value transfer | Alice calls `Oracle` contract (200 wei balance) which uses `CALLCODE` opcode to execute `TargetContract`'s code with 100 wei value transfer to Bob (0 wei balance). | BAL MUST include Alice (nonce changes), `Oracle` (balance change to 100 wei), Bob (balance change to 100 wei), and `TargetContract` (empty changes). | ✅ Completed | +| `test_bal_call_no_delegation_and_oog_before_target_access` | Ensure BAL handles OOG before target access and success for non-delegated CALL | Parametrized: target warm/cold, target empty/existing, value 0/1, memory expansion, OOG boundary (before_target_access/success). | OOG: target in BAL ONLY if pre-warmed. Success: target always in BAL with balance changes when value > 0. | ✅ Completed | +| `test_bal_call_no_delegation_oog_after_target_access` | Ensure BAL includes target but excludes value transfer when OOG after target access | Hardcoded: empty target, value=1 (required for create_cost gap). Parametrized: warm/cold, memory expansion. | Target always in BAL. No balance changes (value transfer fails after G_NEW_ACCOUNT check). | ✅ Completed | +| `test_bal_call_7702_delegation_and_oog` | Ensure BAL handles OOG at all 4 boundaries for CALL to 7702 delegated accounts | Parametrized: target warm/cold, delegation warm/cold, value 0/1, memory expansion, OOG boundary (before_target_access/after_target_access/success_minus_1/success). | OOG before: neither in BAL. OOG after & success_minus_1: target in BAL, delegation NOT in BAL (static check optimization). Success: all in BAL. | ✅ Completed | +| `test_bal_callcode_nested_value_transfer` | Ensure BAL captures balance changes from nested value transfers when CALLCODE executes target code that itself makes CALL with value | Alice calls `Oracle` contract (200 wei balance) which uses `CALLCODE` to execute `TargetContract`'s code; that code makes a nested CALL transferring 100 wei to Bob. | BAL MUST include Alice (nonce changes), `Oracle` (balance change to 100 wei), Bob (balance change to 100 wei), and `TargetContract` (empty changes). | ✅ Completed | | `test_bal_delegated_storage_writes` | Ensure BAL captures delegated storage writes via `DELEGATECALL` and `CALLCODE` | Alice calls `Oracle` contract which uses `DELEGATECALL`/`CALLCODE` to `TargetContract` that writes `0x42` to slot `0x01`. | BAL MUST include Alice (nonce changes), `Oracle` (storage changes for slot `0x01` = `0x42`), and `TargetContract` (empty changes). | ✅ Completed | | `test_bal_delegated_storage_reads` | Ensure BAL captures delegated storage reads via `DELEGATECALL` and `CALLCODE` | Alice calls `Oracle` contract (with slot `0x01` = `0x42`) which uses `DELEGATECALL`/`CALLCODE` to `TargetContract` that reads from slot `0x01`. | BAL MUST include Alice (nonce changes), `Oracle` (storage reads for slot `0x01`), and `TargetContract` (empty changes). | ✅ Completed | | `test_bal_block_rewards` | BAL tracks fee recipient balance changes from block rewards | Alice sends 100 wei to Bob with Charlie as fee recipient | BAL MUST include fee recipient Charlie with `balance_changes` reflecting transaction fees collected from the block. | ✅ Completed | @@ -47,8 +49,12 @@ | `test_bal_sload_and_oog` | Ensure BAL handles OOG during SLOAD execution correctly | Alice calls contract that attempts `SLOAD` from cold slot `0x01`. Parameterized: (1) OOG at SLOAD opcode (insufficient gas), (2) Successful SLOAD execution. | For OOG case: BAL **MUST NOT** contain slot `0x01` in `storage_reads` since storage wasn't accessed. For success case: BAL **MUST** contain slot `0x01` in `storage_reads`. | ✅ Completed | | `test_bal_balance_and_oog` | Ensure BAL handles OOG during BALANCE opcode execution correctly | Alice calls contract that attempts `BALANCE` opcode on cold target account. Parameterized: (1) OOG at BALANCE opcode (insufficient gas), (2) Successful BALANCE execution. | For OOG case: BAL **MUST NOT** include target account (wasn't accessed). For success case: BAL **MUST** include target account in `account_changes`. | ✅ Completed | | `test_bal_extcodesize_and_oog` | Ensure BAL handles OOG during EXTCODESIZE opcode execution correctly | Alice calls contract that attempts `EXTCODESIZE` opcode on cold target contract. Parameterized: (1) OOG at EXTCODESIZE opcode (insufficient gas), (2) Successful EXTCODESIZE execution. | For OOG case: BAL **MUST NOT** include target contract (wasn't accessed). For success case: BAL **MUST** include target contract in `account_changes`. | ✅ Completed | -| `test_bal_call_and_oog` | Ensure BAL handles OOG during CALL opcode execution correctly | Alice calls contract that attempts `CALL` to cold target contract. Parameterized: (1) OOG at CALL opcode (insufficient gas), (2) Successful CALL execution. | For OOG case: BAL **MUST NOT** include target contract (wasn't accessed). For success case: BAL **MUST** include target contract in `account_changes`. | ✅ Completed | -| `test_bal_delegatecall_and_oog` | Ensure BAL handles OOG during DELEGATECALL opcode execution correctly | Alice calls contract that attempts `DELEGATECALL` to cold target contract. Parameterized: (1) OOG at DELEGATECALL opcode (insufficient gas), (2) Successful DELEGATECALL execution. | For OOG case: BAL **MUST NOT** include target contract (wasn't accessed). For success case: BAL **MUST** include target contract in `account_changes`. | ✅ Completed | +| `test_bal_delegatecall_no_delegation_and_oog_before_target_access` | Ensure BAL handles OOG before target access and success for non-delegated DELEGATECALL | Parametrized: target warm/cold, memory expansion, OOG boundary (before_target_access/success). | OOG: target in BAL ONLY if pre-warmed. Success: target always in BAL. | ✅ Completed | +| `test_bal_delegatecall_7702_delegation_and_oog` | Ensure BAL handles OOG at all 4 boundaries for DELEGATECALL to 7702 delegated accounts | Parametrized: target warm/cold, delegation warm/cold, memory expansion, OOG boundary (before_target_access/after_target_access/success_minus_1/success). | OOG before: neither in BAL. OOG after & success_minus_1: target in BAL, delegation NOT in BAL (static check optimization). Success: all in BAL. | ✅ Completed | +| `test_bal_callcode_no_delegation_and_oog_before_target_access` | Ensure BAL handles OOG before target access and success for non-delegated CALLCODE | Parametrized: target warm/cold, value 0/1, memory expansion, OOG boundary (before_target_access/success). | OOG: target in BAL ONLY if pre-warmed. Success: target always in BAL. | ✅ Completed | +| `test_bal_callcode_7702_delegation_and_oog` | Ensure BAL handles OOG at all 4 boundaries for CALLCODE to 7702 delegated accounts | Parametrized: target warm/cold, delegation warm/cold, value 0/1, memory expansion, OOG boundary (before_target_access/after_target_access/success_minus_1/success). | OOG before: neither in BAL. OOG after & success_minus_1: target in BAL, delegation NOT in BAL (static check optimization). Success: all in BAL. | ✅ Completed | +| `test_bal_staticcall_no_delegation_and_oog_before_target_access` | Ensure BAL handles OOG before target access and success for non-delegated STATICCALL | Parametrized: target warm/cold, memory expansion, OOG boundary (before_target_access/success). | OOG: target in BAL ONLY if pre-warmed. Success: target always in BAL. | ✅ Completed | +| `test_bal_staticcall_7702_delegation_and_oog` | Ensure BAL handles OOG at all 4 boundaries for STATICCALL to 7702 delegated accounts | Parametrized: target warm/cold, delegation warm/cold, memory expansion, OOG boundary (before_target_access/after_target_access/success_minus_1/success). | OOG before: neither in BAL. OOG after & success_minus_1: target in BAL, delegation NOT in BAL (static check optimization). Success: all in BAL. | ✅ Completed | | `test_bal_extcodecopy_and_oog` | Ensure BAL handles OOG during EXTCODECOPY at various failure points | Alice calls contract that attempts `EXTCODECOPY` from cold target contract. Parameterized: (1) Successful EXTCODECOPY, (2) OOG at cold access (insufficient gas for account access), (3) OOG at memory expansion with large offset (64KB offset, gas covers cold access + copy but NOT memory expansion), (4) OOG at memory expansion boundary (256 byte offset, gas is exactly 1 less than needed). | For success case: BAL **MUST** include target contract. For all OOG cases: BAL **MUST NOT** include target contract. Gas for ALL components (cold access + copy + memory expansion) must be checked BEFORE recording account access. | ✅ Completed | | `test_bal_oog_7702_delegated_cold_cold` | Ensure BAL handles OOG during EIP-7702 delegated account loading when both accounts are cold | Alice calls cold delegated account Bob (7702) which delegates to cold `TargetContract` with insufficient gas for second cold load | BAL **MUST** include Bob in `account_changes` (first cold load succeeds) but **MUST NOT** include `TargetContract` (second cold load fails due to OOG) | 🟡 Planned | | `test_bal_oog_7702_delegated_warm_cold` | Ensure BAL handles OOG during EIP-7702 delegated account loading when first account is warm, second is cold | Alice calls warm delegated account Bob (7702) which delegates to cold `TargetContract` with insufficient gas for second cold load | BAL **MUST** include Bob in `account_changes` (warm load succeeds) but **MUST NOT** include `TargetContract` (cold load fails due to OOG) | 🟡 Planned | diff --git a/whitelist.txt b/whitelist.txt index 0d4f493ffc..171afdf61b 100644 --- a/whitelist.txt +++ b/whitelist.txt @@ -1293,6 +1293,7 @@ VRS vscode vv +warmup Watcherfall wd wds From 28ef367bbd4e1c1ffc18864d712c36e5961bab86 Mon Sep 17 00:00:00 2001 From: felipe Date: Fri, 12 Dec 2025 12:20:44 -0700 Subject: [PATCH 053/154] feat(spec): update eip7928 to latest rlp specs wrt storage; rename tx_index (#1912) * feat(spec-test): Change storage RLP encoding to U256; tx_index rename * fix: unit tests exception message --- .../account_absent_values.py | 21 +- .../block_access_list/account_changes.py | 23 +- .../block_access_list/expectations.py | 23 +- .../test_types/block_access_list/modifiers.py | 73 +++-- .../test_types/block_access_list/t8n.py | 30 +- .../test_block_access_list_expectation.py | 190 ++++++++---- .../tests/test_block_access_list_t8n.py | 107 ++++--- .../amsterdam/block_access_lists/builder.py | 19 +- .../amsterdam/block_access_lists/rlp_types.py | 6 +- .../evm_tools/t8n/t8n_types.py | 17 +- .../test_block_access_lists.py | 286 +++++++++++++----- .../test_block_access_lists_cross_index.py | 24 +- .../test_block_access_lists_eip4895.py | 102 +++++-- .../test_block_access_lists_eip7702.py | 140 ++++++--- .../test_block_access_lists_invalid.py | 88 ++++-- .../test_block_access_lists_opcodes.py | 121 +++++--- .../test_cases.md | 12 +- .../test_create_oog_from_eoa_refunds.py | 12 +- .../test_selfdestruct_revert.py | 16 +- .../test_call_and_callcode_gas_calculation.py | 14 +- tests/prague/eip7702_set_code_tx/test_gas.py | 4 +- 21 files changed, 875 insertions(+), 453 deletions(-) diff --git a/packages/testing/src/execution_testing/test_types/block_access_list/account_absent_values.py b/packages/testing/src/execution_testing/test_types/block_access_list/account_absent_values.py index 735a56e036..aca89076ac 100644 --- a/packages/testing/src/execution_testing/test_types/block_access_list/account_absent_values.py +++ b/packages/testing/src/execution_testing/test_types/block_access_list/account_absent_values.py @@ -52,14 +52,14 @@ class BalAccountAbsentValues(CamelModel): absent_values = BalAccountAbsentValues( nonce_changes=[ # Forbid exact nonce change at this tx - BalNonceChange(tx_index=1, post_nonce=5), + BalNonceChange(block_access_index=1, post_nonce=5), ], storage_changes=[ BalStorageSlot( slot=0x42, slot_changes=[ # Forbid exact storage change at this slot and tx - BalStorageChange(tx_index=2, post_value=0x99) + BalStorageChange(block_access_index=2, post_value=0x99) ], ) ], @@ -171,22 +171,23 @@ def validate_against(self, account: BalAccountChange) -> None: self._validate_forbidden_changes( account.nonce_changes, self.nonce_changes, - lambda a, f: a.tx_index == f.tx_index + lambda a, f: a.block_access_index == f.block_access_index and a.post_nonce == f.post_nonce, - lambda a: f"Unexpected nonce change found at tx {a.tx_index}", + lambda a: f"Unexpected nonce change found at tx {a.block_access_index}", ) self._validate_forbidden_changes( account.balance_changes, self.balance_changes, - lambda a, f: a.tx_index == f.tx_index + lambda a, f: a.block_access_index == f.block_access_index and a.post_balance == f.post_balance, - lambda a: f"Unexpected balance change found at tx {a.tx_index}", + lambda a: f"Unexpected balance change found at tx {a.block_access_index}", ) self._validate_forbidden_changes( account.code_changes, self.code_changes, - lambda a, f: a.tx_index == f.tx_index and a.new_code == f.new_code, - lambda a: f"Unexpected code change found at tx {a.tx_index}", + lambda a, f: a.block_access_index == f.block_access_index + and a.new_code == f.new_code, + lambda a: f"Unexpected code change found at tx {a.block_access_index}", ) for forbidden_storage_slot in self.storage_changes: @@ -197,11 +198,11 @@ def validate_against(self, account: BalAccountChange) -> None: actual_storage_slot.slot_changes, forbidden_storage_slot.slot_changes, lambda a, f: ( - a.tx_index == f.tx_index + a.block_access_index == f.block_access_index and a.post_value == f.post_value ), lambda a, slot=slot_id: ( - f"Unexpected storage change found at slot {slot} in tx {a.tx_index}" + f"Unexpected storage change found at slot {slot} in tx {a.block_access_index}" ), ) diff --git a/packages/testing/src/execution_testing/test_types/block_access_list/account_changes.py b/packages/testing/src/execution_testing/test_types/block_access_list/account_changes.py index 330731fd10..4794c77d69 100644 --- a/packages/testing/src/execution_testing/test_types/block_access_list/account_changes.py +++ b/packages/testing/src/execution_testing/test_types/block_access_list/account_changes.py @@ -15,14 +15,13 @@ CamelModel, HexNumber, RLPSerializable, - StorageKey, ) class BalNonceChange(CamelModel, RLPSerializable): """Represents a nonce change in the block access list.""" - tx_index: HexNumber = Field( + block_access_index: HexNumber = Field( HexNumber(1), description="Transaction index where the change occurred", ) @@ -30,13 +29,13 @@ class BalNonceChange(CamelModel, RLPSerializable): ..., description="Nonce value after the transaction" ) - rlp_fields: ClassVar[List[str]] = ["tx_index", "post_nonce"] + rlp_fields: ClassVar[List[str]] = ["block_access_index", "post_nonce"] class BalBalanceChange(CamelModel, RLPSerializable): """Represents a balance change in the block access list.""" - tx_index: HexNumber = Field( + block_access_index: HexNumber = Field( HexNumber(1), description="Transaction index where the change occurred", ) @@ -44,39 +43,39 @@ class BalBalanceChange(CamelModel, RLPSerializable): ..., description="Balance after the transaction" ) - rlp_fields: ClassVar[List[str]] = ["tx_index", "post_balance"] + rlp_fields: ClassVar[List[str]] = ["block_access_index", "post_balance"] class BalCodeChange(CamelModel, RLPSerializable): """Represents a code change in the block access list.""" - tx_index: HexNumber = Field( + block_access_index: HexNumber = Field( HexNumber(1), description="Transaction index where the change occurred", ) new_code: Bytes = Field(..., description="New code bytes") - rlp_fields: ClassVar[List[str]] = ["tx_index", "new_code"] + rlp_fields: ClassVar[List[str]] = ["block_access_index", "new_code"] class BalStorageChange(CamelModel, RLPSerializable): """Represents a change to a specific storage slot.""" - tx_index: HexNumber = Field( + block_access_index: HexNumber = Field( HexNumber(1), description="Transaction index where the change occurred", ) - post_value: StorageKey = Field( + post_value: HexNumber = Field( ..., description="Value after the transaction" ) - rlp_fields: ClassVar[List[str]] = ["tx_index", "post_value"] + rlp_fields: ClassVar[List[str]] = ["block_access_index", "post_value"] class BalStorageSlot(CamelModel, RLPSerializable): """Represents all changes to a specific storage slot.""" - slot: StorageKey = Field(..., description="Storage slot key") + slot: HexNumber = Field(..., description="Storage slot key") slot_changes: List[BalStorageChange] = Field( default_factory=list, description="List of changes to this slot" ) @@ -100,7 +99,7 @@ class BalAccountChange(CamelModel, RLPSerializable): storage_changes: List[BalStorageSlot] = Field( default_factory=list, description="List of storage changes" ) - storage_reads: List[StorageKey] = Field( + storage_reads: List[HexNumber] = Field( default_factory=list, description="List of storage slots that were read", ) diff --git a/packages/testing/src/execution_testing/test_types/block_access_list/expectations.py b/packages/testing/src/execution_testing/test_types/block_access_list/expectations.py index 9030471549..c700eec77b 100644 --- a/packages/testing/src/execution_testing/test_types/block_access_list/expectations.py +++ b/packages/testing/src/execution_testing/test_types/block_access_list/expectations.py @@ -108,7 +108,7 @@ class BlockAccessListExpectation(CamelModel): expected_block_access_list = BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)] + nonce_changes=[BalNonceChange(block_access_index=1, post_nonce=1)] ), bob: None, # Bob should NOT be in the BAL } @@ -322,8 +322,8 @@ def _compare_account_expectations( slot_actual_idx ] if ( - actual_change.tx_index - == expected_change.tx_index + actual_change.block_access_index + == expected_change.block_access_index and actual_change.post_value == expected_change.post_value ): @@ -357,27 +357,32 @@ def _compare_account_expectations( # Create tuples for comparison (ordering already validated) if field_name == "nonce_changes": expected_tuples = [ - (c.tx_index, c.post_nonce) for c in expected_list + (c.block_access_index, c.post_nonce) + for c in expected_list ] actual_tuples = [ - (c.tx_index, c.post_nonce) for c in actual_list + (c.block_access_index, c.post_nonce) + for c in actual_list ] item_type = "nonce" elif field_name == "balance_changes": expected_tuples = [ - (c.tx_index, int(c.post_balance)) + (c.block_access_index, int(c.post_balance)) for c in expected_list ] actual_tuples = [ - (c.tx_index, int(c.post_balance)) for c in actual_list + (c.block_access_index, int(c.post_balance)) + for c in actual_list ] item_type = "balance" elif field_name == "code_changes": expected_tuples = [ - (c.tx_index, bytes(c.new_code)) for c in expected_list + (c.block_access_index, bytes(c.new_code)) + for c in expected_list ] actual_tuples = [ - (c.tx_index, bytes(c.new_code)) for c in actual_list + (c.block_access_index, bytes(c.new_code)) + for c in actual_list ] item_type = "code" else: diff --git a/packages/testing/src/execution_testing/test_types/block_access_list/modifiers.py b/packages/testing/src/execution_testing/test_types/block_access_list/modifiers.py index 1763970631..d28f7099ba 100644 --- a/packages/testing/src/execution_testing/test_types/block_access_list/modifiers.py +++ b/packages/testing/src/execution_testing/test_types/block_access_list/modifiers.py @@ -54,7 +54,7 @@ def transform(bal: BlockAccessList) -> BlockAccessList: def _modify_field_value( address: Address, - tx_index: int, + block_access_index: int, field_name: str, change_class: type, new_value: Any, @@ -85,9 +85,12 @@ def transform(bal: BlockAccessList) -> BlockAccessList: for j, change in enumerate( storage_slot.slot_changes ): - if change.tx_index == tx_index: + if ( + change.block_access_index + == block_access_index + ): kwargs = { - "tx_index": tx_index, + "block_access_index": block_access_index, value_field: new_value, } storage_slot.slot_changes[j] = ( @@ -98,9 +101,9 @@ def transform(bal: BlockAccessList) -> BlockAccessList: else: # flat structure (nonce, balance, code) for i, change in enumerate(changes): - if change.tx_index == tx_index: + if change.block_access_index == block_access_index: kwargs = { - "tx_index": tx_index, + "block_access_index": block_access_index, value_field: new_value, } changes[i] = change_class(**kwargs) @@ -172,23 +175,28 @@ def remove_code( def modify_nonce( - address: Address, tx_index: int, nonce: int + address: Address, block_access_index: int, nonce: int ) -> Callable[[BlockAccessList], BlockAccessList]: """Set an incorrect nonce value for a specific account and transaction.""" return _modify_field_value( - address, tx_index, "nonce_changes", BalNonceChange, nonce, "post_nonce" + address, + block_access_index, + "nonce_changes", + BalNonceChange, + nonce, + "post_nonce", ) def modify_balance( - address: Address, tx_index: int, balance: int + address: Address, block_access_index: int, balance: int ) -> Callable[[BlockAccessList], BlockAccessList]: """ Set an incorrect balance value for a specific account and transaction. """ return _modify_field_value( address, - tx_index, + block_access_index, "balance_changes", BalBalanceChange, balance, @@ -197,7 +205,7 @@ def modify_balance( def modify_storage( - address: Address, tx_index: int, slot: int, value: int + address: Address, block_access_index: int, slot: int, value: int ) -> Callable[[BlockAccessList], BlockAccessList]: """ Set an incorrect storage value for a specific account, transaction, and @@ -205,7 +213,7 @@ def modify_storage( """ return _modify_field_value( address, - tx_index, + block_access_index, "storage_changes", BalStorageChange, value, @@ -216,11 +224,16 @@ def modify_storage( def modify_code( - address: Address, tx_index: int, code: bytes + address: Address, block_access_index: int, code: bytes ) -> Callable[[BlockAccessList], BlockAccessList]: """Set an incorrect code value for a specific account and transaction.""" return _modify_field_value( - address, tx_index, "code_changes", BalCodeChange, code, "post_code" + address, + block_access_index, + "code_changes", + BalCodeChange, + code, + "post_code", ) @@ -242,46 +255,46 @@ def transform(bal: BlockAccessList) -> BlockAccessList: # Swap in nonce changes if new_account.nonce_changes: for nonce_change in new_account.nonce_changes: - if nonce_change.tx_index == tx1: + if nonce_change.block_access_index == tx1: nonce_indices[tx1] = True - nonce_change.tx_index = HexNumber(tx2) - elif nonce_change.tx_index == tx2: + nonce_change.block_access_index = HexNumber(tx2) + elif nonce_change.block_access_index == tx2: nonce_indices[tx2] = True - nonce_change.tx_index = HexNumber(tx1) + nonce_change.block_access_index = HexNumber(tx1) # Swap in balance changes if new_account.balance_changes: for balance_change in new_account.balance_changes: - if balance_change.tx_index == tx1: + if balance_change.block_access_index == tx1: balance_indices[tx1] = True - balance_change.tx_index = HexNumber(tx2) - elif balance_change.tx_index == tx2: + balance_change.block_access_index = HexNumber(tx2) + elif balance_change.block_access_index == tx2: balance_indices[tx2] = True - balance_change.tx_index = HexNumber(tx1) + balance_change.block_access_index = HexNumber(tx1) # Swap in storage changes (nested structure) if new_account.storage_changes: for storage_slot in new_account.storage_changes: for storage_change in storage_slot.slot_changes: - if storage_change.tx_index == tx1: + if storage_change.block_access_index == tx1: balance_indices[tx1] = True - storage_change.tx_index = HexNumber(tx2) - elif storage_change.tx_index == tx2: + storage_change.block_access_index = HexNumber(tx2) + elif storage_change.block_access_index == tx2: balance_indices[tx2] = True - storage_change.tx_index = HexNumber(tx1) + storage_change.block_access_index = HexNumber(tx1) - # Note: storage_reads is just a list of StorageKey, no tx_index to + # Note: storage_reads is just a list of StorageKey, no block_access_index to # swap # Swap in code changes if new_account.code_changes: for code_change in new_account.code_changes: - if code_change.tx_index == tx1: + if code_change.block_access_index == tx1: code_indices[tx1] = True - code_change.tx_index = HexNumber(tx2) - elif code_change.tx_index == tx2: + code_change.block_access_index = HexNumber(tx2) + elif code_change.block_access_index == tx2: code_indices[tx2] = True - code_change.tx_index = HexNumber(tx1) + code_change.block_access_index = HexNumber(tx1) new_root.append(new_account) diff --git a/packages/testing/src/execution_testing/test_types/block_access_list/t8n.py b/packages/testing/src/execution_testing/test_types/block_access_list/t8n.py index 03b8224bbf..19733a240f 100644 --- a/packages/testing/src/execution_testing/test_types/block_access_list/t8n.py +++ b/packages/testing/src/execution_testing/test_types/block_access_list/t8n.py @@ -84,21 +84,21 @@ def validate_structure(self) -> None: if not change_list: continue - tx_indices = [c.tx_index for c in change_list] + bal_indices = [c.block_access_index for c in change_list] # Check both ordering and duplicates - if tx_indices != sorted(tx_indices): + if bal_indices != sorted(bal_indices): raise BlockAccessListValidationError( - f"Transaction indices not in ascending order in {field_name} of account " - f"{account.address}. Got: {tx_indices}, Expected: {sorted(tx_indices)}" + f"Block access indices not in ascending order in {field_name} of account " + f"{account.address}. Got: {bal_indices}, Expected: {sorted(bal_indices)}" ) - if len(tx_indices) != len(set(tx_indices)): + if len(bal_indices) != len(set(bal_indices)): duplicates = sorted( { idx - for idx in tx_indices - if tx_indices.count(idx) > 1 + for idx in bal_indices + if bal_indices.count(idx) > 1 } ) raise BlockAccessListValidationError( @@ -118,27 +118,29 @@ def validate_structure(self) -> None: f"{account.storage_changes[i].slot}" ) - # Check transaction index ordering and uniqueness within storage slots + # Check bal index ordering and uniqueness within storage slots for storage_slot in account.storage_changes: if not storage_slot.slot_changes: continue - tx_indices = [c.tx_index for c in storage_slot.slot_changes] + bal_indices = [ + c.block_access_index for c in storage_slot.slot_changes + ] # Check both ordering and duplicates - if tx_indices != sorted(tx_indices): + if bal_indices != sorted(bal_indices): raise BlockAccessListValidationError( f"Transaction indices not in ascending order in storage slot " f"{storage_slot.slot} of account {account.address}. " - f"Got: {tx_indices}, Expected: {sorted(tx_indices)}" + f"Got: {bal_indices}, Expected: {sorted(bal_indices)}" ) - if len(tx_indices) != len(set(tx_indices)): + if len(bal_indices) != len(set(bal_indices)): duplicates = sorted( { idx - for idx in tx_indices - if tx_indices.count(idx) > 1 + for idx in bal_indices + if bal_indices.count(idx) > 1 } ) raise BlockAccessListValidationError( diff --git a/packages/testing/src/execution_testing/test_types/tests/test_block_access_list_expectation.py b/packages/testing/src/execution_testing/test_types/tests/test_block_access_list_expectation.py index 899d9647e4..f8b16d4474 100644 --- a/packages/testing/src/execution_testing/test_types/tests/test_block_access_list_expectation.py +++ b/packages/testing/src/execution_testing/test_types/tests/test_block_access_list_expectation.py @@ -29,7 +29,9 @@ def test_address_exclusion_validation_passes() -> None: [ BalAccountChange( address=alice, - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), ] ) @@ -37,7 +39,9 @@ def test_address_exclusion_validation_passes() -> None: expectation = BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)] + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ] ), bob: None, # expect Bob is not in BAL (correctly) } @@ -55,12 +59,14 @@ def test_address_exclusion_validation_raises_when_address_is_present() -> None: [ BalAccountChange( address=alice, - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), BalAccountChange( address=bob, balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=100) + BalBalanceChange(block_access_index=1, post_balance=100) ], ), ] @@ -103,7 +109,9 @@ def test_empty_account_changes_definitions( [ BalAccountChange( address=alice, - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), ] ) @@ -153,14 +161,22 @@ def test_empty_list_validation() -> None: @pytest.mark.parametrize( "field,value", [ - ["nonce_changes", BalNonceChange(tx_index=1, post_nonce=1)], - ["balance_changes", BalBalanceChange(tx_index=1, post_balance=100)], - ["code_changes", BalCodeChange(tx_index=1, new_code=b"code")], + ["nonce_changes", BalNonceChange(block_access_index=1, post_nonce=1)], + [ + "balance_changes", + BalBalanceChange(block_access_index=1, post_balance=100), + ], + [ + "code_changes", + BalCodeChange(block_access_index=1, new_code=b"code"), + ], [ "storage_changes", BalStorageSlot( slot=0x01, - slot_changes=[BalStorageChange(tx_index=1, post_value=0x42)], + slot_changes=[ + BalStorageChange(block_access_index=1, post_value=0x42) + ], ), ], ["storage_reads", 0x01], @@ -179,7 +195,7 @@ def test_empty_list_validation_fails(field: str, value: Any) -> None: alice_acct_change.storage_reads = [value] # set another field to non-empty to avoid all-empty account change alice_acct_change.nonce_changes = [ - BalNonceChange(tx_index=1, post_nonce=1) + BalNonceChange(block_access_index=1, post_nonce=1) ] else: @@ -194,7 +210,7 @@ def test_empty_list_validation_fails(field: str, value: Any) -> None: # match the filled field in actual to avoid all-empty # account expectation alice_acct_expectation.nonce_changes = [ - BalNonceChange(tx_index=1, post_nonce=1) + BalNonceChange(block_access_index=1, post_nonce=1) ] else: setattr(alice_acct_expectation, field, []) @@ -219,9 +235,11 @@ def test_partial_validation() -> None: [ BalAccountChange( address=alice, - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=100) + BalBalanceChange(block_access_index=1, post_balance=100) ], storage_reads=[0x01, 0x02], ), @@ -232,7 +250,9 @@ def test_partial_validation() -> None: expectation = BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], # balance_changes and storage_reads not set and won't be # validated ), @@ -255,7 +275,9 @@ def test_storage_changes_validation() -> None: BalStorageSlot( slot=0x01, slot_changes=[ - BalStorageChange(tx_index=1, post_value=0x42) + BalStorageChange( + block_access_index=1, post_value=0x42 + ) ], ) ], @@ -271,7 +293,9 @@ def test_storage_changes_validation() -> None: BalStorageSlot( slot=0x01, slot_changes=[ - BalStorageChange(tx_index=1, post_value=0x42) + BalStorageChange( + block_access_index=1, post_value=0x42 + ) ], ) ], @@ -291,7 +315,9 @@ def test_missing_expected_address() -> None: [ BalAccountChange( address=alice, - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), ] ) @@ -300,7 +326,9 @@ def test_missing_expected_address() -> None: account_expectations={ # wrongly expect Bob to be present bob: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), } ) @@ -474,9 +502,9 @@ def test_expected_tx_indices_ordering( BalAccountChange( address=addr, nonce_changes=[ - BalNonceChange(tx_index=1, post_nonce=1), - BalNonceChange(tx_index=2, post_nonce=2), - BalNonceChange(tx_index=3, post_nonce=3), + BalNonceChange(block_access_index=1, post_nonce=1), + BalNonceChange(block_access_index=2, post_nonce=2), + BalNonceChange(block_access_index=3, post_nonce=3), ], ) ] @@ -486,7 +514,7 @@ def test_expected_tx_indices_ordering( account_expectations={ addr: BalAccountExpectation( nonce_changes=[ - BalNonceChange(tx_index=idx, post_nonce=idx) + BalNonceChange(block_access_index=idx, post_nonce=idx) for idx in expected_tx_indices ], ), @@ -508,10 +536,12 @@ def test_absent_values_nonce_changes(has_change_should_raise: bool) -> None: """Test nonce_changes_at_tx validator with present/absent changes.""" alice = Address(0xA) - nonce_changes = [BalNonceChange(tx_index=1, post_nonce=1)] + nonce_changes = [BalNonceChange(block_access_index=1, post_nonce=1)] if has_change_should_raise: # add nonce change at tx 2 which should trigger failure - nonce_changes.append(BalNonceChange(tx_index=2, post_nonce=2)) + nonce_changes.append( + BalNonceChange(block_access_index=2, post_nonce=2) + ) actual_bal = BlockAccessList( [ @@ -527,7 +557,9 @@ def test_absent_values_nonce_changes(has_change_should_raise: bool) -> None: # no nonce changes at tx 2 alice: BalAccountExpectation( absent_values=BalAccountAbsentValues( - nonce_changes=[BalNonceChange(tx_index=2, post_nonce=2)] + nonce_changes=[ + BalNonceChange(block_access_index=2, post_nonce=2) + ] ) ) } @@ -547,10 +579,14 @@ def test_absent_values_balance_changes(has_change_should_raise: bool) -> None: """Test balance_changes_at_tx validator with present/absent changes.""" alice = Address(0xA) - balance_changes = [BalBalanceChange(tx_index=1, post_balance=100)] + balance_changes = [ + BalBalanceChange(block_access_index=1, post_balance=100) + ] if has_change_should_raise: # add balance change at tx 2 which should trigger failure - balance_changes.append(BalBalanceChange(tx_index=2, post_balance=200)) + balance_changes.append( + BalBalanceChange(block_access_index=2, post_balance=200) + ) actual_bal = BlockAccessList( [ @@ -566,7 +602,9 @@ def test_absent_values_balance_changes(has_change_should_raise: bool) -> None: alice: BalAccountExpectation( absent_values=BalAccountAbsentValues( balance_changes=[ - BalBalanceChange(tx_index=2, post_balance=200) + BalBalanceChange( + block_access_index=2, post_balance=200 + ) ] ) ), @@ -591,14 +629,18 @@ def test_absent_values_storage_changes(has_change_should_raise: bool) -> None: storage_changes = [ BalStorageSlot( slot=0x01, - slot_changes=[BalStorageChange(tx_index=1, post_value=0x99)], + slot_changes=[ + BalStorageChange(block_access_index=1, post_value=0x99) + ], ) ] if has_change_should_raise: storage_changes.append( BalStorageSlot( slot=0x42, - slot_changes=[BalStorageChange(tx_index=1, post_value=0xBEEF)], + slot_changes=[ + BalStorageChange(block_access_index=1, post_value=0xBEEF) + ], ) ) @@ -620,7 +662,9 @@ def test_absent_values_storage_changes(has_change_should_raise: bool) -> None: BalStorageSlot( slot=0x42, slot_changes=[ - BalStorageChange(tx_index=1, post_value=0xBEEF) + BalStorageChange( + block_access_index=1, post_value=0xBEEF + ) ], ) ] @@ -682,10 +726,12 @@ def test_absent_values_code_changes(has_change_should_raise: bool) -> None: """Test code_changes_at_tx validator with present/absent changes.""" alice = Address(0xA) - code_changes = [BalCodeChange(tx_index=1, new_code=b"\x00")] + code_changes = [BalCodeChange(block_access_index=1, new_code=b"\x00")] if has_change_should_raise: # add code change at tx 2 which should trigger failure - code_changes.append(BalCodeChange(tx_index=2, new_code=b"\x60\x00")) + code_changes.append( + BalCodeChange(block_access_index=2, new_code=b"\x60\x00") + ) actual_bal = BlockAccessList( [ @@ -702,7 +748,9 @@ def test_absent_values_code_changes(has_change_should_raise: bool) -> None: alice: BalAccountExpectation( absent_values=BalAccountAbsentValues( code_changes=[ - BalCodeChange(tx_index=2, new_code=b"\x60\x00") + BalCodeChange( + block_access_index=2, new_code=b"\x60\x00" + ) ] ) ), @@ -732,7 +780,9 @@ def test_multiple_absent_valuess() -> None: BalStorageSlot( slot=0x01, slot_changes=[ - BalStorageChange(tx_index=1, post_value=0x99) + BalStorageChange( + block_access_index=1, post_value=0x99 + ) ], ) ], @@ -750,37 +800,43 @@ def test_multiple_absent_valuess() -> None: BalStorageSlot( slot=0x01, slot_changes=[ - BalStorageChange(tx_index=1, post_value=0x99) + BalStorageChange( + block_access_index=1, post_value=0x99 + ) ], ) ], absent_values=BalAccountAbsentValues( nonce_changes=[ - BalNonceChange(tx_index=1, post_nonce=0), - BalNonceChange(tx_index=2, post_nonce=0), + BalNonceChange(block_access_index=1, post_nonce=0), + BalNonceChange(block_access_index=2, post_nonce=0), ], balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=0), - BalBalanceChange(tx_index=2, post_balance=0), + BalBalanceChange(block_access_index=1, post_balance=0), + BalBalanceChange(block_access_index=2, post_balance=0), ], storage_changes=[ BalStorageSlot( slot=0x42, slot_changes=[ - BalStorageChange(tx_index=1, post_value=0) + BalStorageChange( + block_access_index=1, post_value=0 + ) ], ), BalStorageSlot( slot=0x43, slot_changes=[ - BalStorageChange(tx_index=1, post_value=0) + BalStorageChange( + block_access_index=1, post_value=0 + ) ], ), ], storage_reads=[StorageKey(0x42), StorageKey(0x43)], code_changes=[ - BalCodeChange(tx_index=1, new_code=b""), - BalCodeChange(tx_index=2, new_code=b""), + BalCodeChange(block_access_index=1, new_code=b""), + BalCodeChange(block_access_index=2, new_code=b""), ], ), ), @@ -800,8 +856,8 @@ def test_absent_values_with_multiple_tx_indices() -> None: address=alice, nonce_changes=[ # nonce changes at tx 1 and 3 - BalNonceChange(tx_index=1, post_nonce=1), - BalNonceChange(tx_index=3, post_nonce=2), + BalNonceChange(block_access_index=1, post_nonce=1), + BalNonceChange(block_access_index=3, post_nonce=2), ], ), ] @@ -811,13 +867,13 @@ def test_absent_values_with_multiple_tx_indices() -> None: account_expectations={ alice: BalAccountExpectation( nonce_changes=[ - BalNonceChange(tx_index=1, post_nonce=1), - BalNonceChange(tx_index=3, post_nonce=2), + BalNonceChange(block_access_index=1, post_nonce=1), + BalNonceChange(block_access_index=3, post_nonce=2), ], absent_values=BalAccountAbsentValues( nonce_changes=[ - BalNonceChange(tx_index=2, post_nonce=0), - BalNonceChange(tx_index=4, post_nonce=0), + BalNonceChange(block_access_index=2, post_nonce=0), + BalNonceChange(block_access_index=4, post_nonce=0), ] ), ), @@ -833,8 +889,8 @@ def test_absent_values_with_multiple_tx_indices() -> None: nonce_changes=[ # wrongly forbid change at txs 1 and 2 # (1 exists, so should fail) - BalNonceChange(tx_index=1, post_nonce=1), - BalNonceChange(tx_index=2, post_nonce=0), + BalNonceChange(block_access_index=1, post_nonce=1), + BalNonceChange(block_access_index=2, post_nonce=0), ] ), ), @@ -856,7 +912,9 @@ def test_bal_account_absent_values_comprehensive() -> None: [ BalAccountChange( address=addr, - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ) ] ) @@ -865,7 +923,9 @@ def test_bal_account_absent_values_comprehensive() -> None: account_expectations={ addr: BalAccountExpectation( absent_values=BalAccountAbsentValues( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)] + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ] ) ), } @@ -883,7 +943,7 @@ def test_bal_account_absent_values_comprehensive() -> None: BalAccountChange( address=addr, balance_changes=[ - BalBalanceChange(tx_index=2, post_balance=100) + BalBalanceChange(block_access_index=2, post_balance=100) ], ) ] @@ -894,7 +954,9 @@ def test_bal_account_absent_values_comprehensive() -> None: addr: BalAccountExpectation( absent_values=BalAccountAbsentValues( balance_changes=[ - BalBalanceChange(tx_index=2, post_balance=100) + BalBalanceChange( + block_access_index=2, post_balance=100 + ) ] ) ), @@ -912,7 +974,9 @@ def test_bal_account_absent_values_comprehensive() -> None: [ BalAccountChange( address=addr, - code_changes=[BalCodeChange(tx_index=3, new_code=b"\x60\x00")], + code_changes=[ + BalCodeChange(block_access_index=3, new_code=b"\x60\x00") + ], ) ] ) @@ -922,7 +986,9 @@ def test_bal_account_absent_values_comprehensive() -> None: addr: BalAccountExpectation( absent_values=BalAccountAbsentValues( code_changes=[ - BalCodeChange(tx_index=3, new_code=b"\x60\x00") + BalCodeChange( + block_access_index=3, new_code=b"\x60\x00" + ) ] ) ), @@ -965,7 +1031,9 @@ def test_bal_account_absent_values_comprehensive() -> None: BalStorageSlot( slot=0x01, slot_changes=[ - BalStorageChange(tx_index=1, post_value=99) + BalStorageChange( + block_access_index=1, post_value=99 + ) ], ) ], @@ -981,7 +1049,9 @@ def test_bal_account_absent_values_comprehensive() -> None: BalStorageSlot( slot=0x01, slot_changes=[ - BalStorageChange(tx_index=1, post_value=99) + BalStorageChange( + block_access_index=1, post_value=99 + ) ], ) ] diff --git a/packages/testing/src/execution_testing/test_types/tests/test_block_access_list_t8n.py b/packages/testing/src/execution_testing/test_types/tests/test_block_access_list_t8n.py index 3c884cf2f4..c33cf8a2c7 100644 --- a/packages/testing/src/execution_testing/test_types/tests/test_block_access_list_t8n.py +++ b/packages/testing/src/execution_testing/test_types/tests/test_block_access_list_t8n.py @@ -127,7 +127,7 @@ def test_bal_storage_reads_ordering() -> None: "field_name", ["nonce_changes", "balance_changes", "code_changes"], ) -def test_bal_tx_indices_ordering(field_name: str) -> None: +def test_bal_block_access_indices_ordering(field_name: str) -> None: """ Test that transaction indices must be in ascending order within change lists. """ @@ -138,51 +138,63 @@ def test_bal_tx_indices_ordering(field_name: str) -> None: Union[BalNonceChange, BalBalanceChange, BalCodeChange] ] - # Correct order: tx_index 1, 2, 3 + # Correct order: block_access_index 1, 2, 3 if field_name == "nonce_changes": changes_valid = [ - BalNonceChange(tx_index=HexNumber(1), post_nonce=HexNumber(1)), - BalNonceChange(tx_index=HexNumber(2), post_nonce=HexNumber(2)), - BalNonceChange(tx_index=HexNumber(3), post_nonce=HexNumber(3)), + BalNonceChange( + block_access_index=HexNumber(1), post_nonce=HexNumber(1) + ), + BalNonceChange( + block_access_index=HexNumber(2), post_nonce=HexNumber(2) + ), + BalNonceChange( + block_access_index=HexNumber(3), post_nonce=HexNumber(3) + ), ] changes_invalid = [ - BalNonceChange(tx_index=HexNumber(1), post_nonce=HexNumber(1)), - BalNonceChange(tx_index=HexNumber(3), post_nonce=HexNumber(3)), - BalNonceChange(tx_index=HexNumber(2), post_nonce=HexNumber(2)), + BalNonceChange( + block_access_index=HexNumber(1), post_nonce=HexNumber(1) + ), + BalNonceChange( + block_access_index=HexNumber(3), post_nonce=HexNumber(3) + ), + BalNonceChange( + block_access_index=HexNumber(2), post_nonce=HexNumber(2) + ), ] elif field_name == "balance_changes": changes_valid = [ BalBalanceChange( - tx_index=HexNumber(1), post_balance=HexNumber(100) + block_access_index=HexNumber(1), post_balance=HexNumber(100) ), BalBalanceChange( - tx_index=HexNumber(2), post_balance=HexNumber(200) + block_access_index=HexNumber(2), post_balance=HexNumber(200) ), BalBalanceChange( - tx_index=HexNumber(3), post_balance=HexNumber(300) + block_access_index=HexNumber(3), post_balance=HexNumber(300) ), ] changes_invalid = [ BalBalanceChange( - tx_index=HexNumber(1), post_balance=HexNumber(100) + block_access_index=HexNumber(1), post_balance=HexNumber(100) ), BalBalanceChange( - tx_index=HexNumber(3), post_balance=HexNumber(300) + block_access_index=HexNumber(3), post_balance=HexNumber(300) ), BalBalanceChange( - tx_index=HexNumber(2), post_balance=HexNumber(200) + block_access_index=HexNumber(2), post_balance=HexNumber(200) ), ] elif field_name == "code_changes": changes_valid = [ - BalCodeChange(tx_index=HexNumber(1), new_code=b"code1"), - BalCodeChange(tx_index=HexNumber(2), new_code=b"code2"), - BalCodeChange(tx_index=HexNumber(3), new_code=b"code3"), + BalCodeChange(block_access_index=HexNumber(1), new_code=b"code1"), + BalCodeChange(block_access_index=HexNumber(2), new_code=b"code2"), + BalCodeChange(block_access_index=HexNumber(3), new_code=b"code3"), ] changes_invalid = [ - BalCodeChange(tx_index=HexNumber(1), new_code=b"code1"), - BalCodeChange(tx_index=HexNumber(3), new_code=b"code3"), - BalCodeChange(tx_index=HexNumber(2), new_code=b"code2"), + BalCodeChange(block_access_index=HexNumber(1), new_code=b"code1"), + BalCodeChange(block_access_index=HexNumber(3), new_code=b"code3"), + BalCodeChange(block_access_index=HexNumber(2), new_code=b"code2"), ] bal_valid = BlockAccessList( @@ -196,7 +208,7 @@ def test_bal_tx_indices_ordering(field_name: str) -> None: with pytest.raises( BlockAccessListValidationError, - match=f"Transaction indices not in ascending order in {field_name}", + match=f"Block access indices not in ascending order in {field_name}", ): bal_invalid.validate_structure() @@ -205,7 +217,7 @@ def test_bal_tx_indices_ordering(field_name: str) -> None: "field_name", ["nonce_changes", "balance_changes", "code_changes"], ) -def test_bal_duplicate_tx_indices(field_name: str) -> None: +def test_bal_duplicate_block_access_indices(field_name: str) -> None: """ Test that BAL must not have duplicate tx indices in change lists. """ @@ -213,34 +225,38 @@ def test_bal_duplicate_tx_indices(field_name: str) -> None: changes: List[Union[BalNonceChange, BalBalanceChange, BalCodeChange]] - # Duplicate tx_index=1 + # Duplicate block_access_index=1 if field_name == "nonce_changes": changes = [ - BalNonceChange(tx_index=HexNumber(1), post_nonce=HexNumber(1)), BalNonceChange( - tx_index=HexNumber(1), post_nonce=HexNumber(2) - ), # duplicate tx_index - BalNonceChange(tx_index=HexNumber(2), post_nonce=HexNumber(3)), + block_access_index=HexNumber(1), post_nonce=HexNumber(1) + ), + BalNonceChange( + block_access_index=HexNumber(1), post_nonce=HexNumber(2) + ), # duplicate block_access_index + BalNonceChange( + block_access_index=HexNumber(2), post_nonce=HexNumber(3) + ), ] elif field_name == "balance_changes": changes = [ BalBalanceChange( - tx_index=HexNumber(1), post_balance=HexNumber(100) + block_access_index=HexNumber(1), post_balance=HexNumber(100) ), BalBalanceChange( - tx_index=HexNumber(1), post_balance=HexNumber(200) - ), # duplicate tx_index + block_access_index=HexNumber(1), post_balance=HexNumber(200) + ), # duplicate block_access_index BalBalanceChange( - tx_index=HexNumber(2), post_balance=HexNumber(300) + block_access_index=HexNumber(2), post_balance=HexNumber(300) ), ] elif field_name == "code_changes": changes = [ - BalCodeChange(tx_index=HexNumber(1), new_code=b"code1"), + BalCodeChange(block_access_index=HexNumber(1), new_code=b"code1"), BalCodeChange( - tx_index=HexNumber(1), new_code=b"" - ), # duplicate tx_index - BalCodeChange(tx_index=HexNumber(2), new_code=b"code2"), + block_access_index=HexNumber(1), new_code=b"" + ), # duplicate block_access_index + BalCodeChange(block_access_index=HexNumber(2), new_code=b"code2"), ] bal = BlockAccessList( @@ -254,13 +270,13 @@ def test_bal_duplicate_tx_indices(field_name: str) -> None: bal.validate_structure() -def test_bal_storage_duplicate_tx_indices() -> None: +def test_bal_storage_duplicate_block_access_indices() -> None: """ Test that storage changes must not have duplicate tx indices within same slot. """ addr = Address(0xA) - # Create storage changes with duplicate tx_index within the same slot + # Create storage changes with duplicate block_access_index within the same slot bal = BlockAccessList( [ BalAccountChange( @@ -270,15 +286,15 @@ def test_bal_storage_duplicate_tx_indices() -> None: slot=StorageKey(0), slot_changes=[ BalStorageChange( - tx_index=HexNumber(1), + block_access_index=HexNumber(1), post_value=StorageKey(100), ), BalStorageChange( - tx_index=HexNumber(1), + block_access_index=HexNumber(1), post_value=StorageKey(200), - ), # duplicate tx_index + ), # duplicate block_access_index BalStorageChange( - tx_index=HexNumber(2), + block_access_index=HexNumber(2), post_value=StorageKey(300), ), ], @@ -309,10 +325,12 @@ def test_bal_multiple_violations() -> None: address=bob, # Should come after alice nonce_changes=[ BalNonceChange( - tx_index=HexNumber(1), post_nonce=HexNumber(1) + block_access_index=HexNumber(1), + post_nonce=HexNumber(1), ), BalNonceChange( - tx_index=HexNumber(1), post_nonce=HexNumber(2) + block_access_index=HexNumber(1), + post_nonce=HexNumber(2), ), # duplicate ], ), @@ -342,7 +360,8 @@ def test_bal_single_account_valid() -> None: address=Address(0xA), nonce_changes=[ BalNonceChange( - tx_index=HexNumber(1), post_nonce=HexNumber(1) + block_access_index=HexNumber(1), + post_nonce=HexNumber(1), ) ], ) diff --git a/src/ethereum/forks/amsterdam/block_access_lists/builder.py b/src/ethereum/forks/amsterdam/block_access_lists/builder.py index e860c84068..ff5426746a 100644 --- a/src/ethereum/forks/amsterdam/block_access_lists/builder.py +++ b/src/ethereum/forks/amsterdam/block_access_lists/builder.py @@ -16,7 +16,7 @@ from dataclasses import dataclass, field from typing import TYPE_CHECKING, Dict, List, Set -from ethereum_types.bytes import Bytes, Bytes32 +from ethereum_types.bytes import Bytes from ethereum_types.numeric import U64, U256 from ..fork_types import Address @@ -45,7 +45,7 @@ class AccountData: transaction index where it occurred. """ - storage_changes: Dict[Bytes32, List[StorageChange]] = field( + storage_changes: Dict[U256, List[StorageChange]] = field( default_factory=dict ) """ @@ -53,7 +53,7 @@ class AccountData: Each change includes the transaction index and new value. """ - storage_reads: Set[Bytes32] = field(default_factory=set) + storage_reads: Set[U256] = field(default_factory=set) """ Set of storage slots that were read but not modified. """ @@ -121,9 +121,9 @@ def ensure_account(builder: BlockAccessListBuilder, address: Address) -> None: def add_storage_write( builder: BlockAccessListBuilder, address: Address, - slot: Bytes32, + slot: U256, block_access_index: BlockAccessIndex, - new_value: Bytes32, + new_value: U256, ) -> None: """ Add a storage write operation to the block access list. @@ -171,7 +171,7 @@ def add_storage_write( def add_storage_read( - builder: BlockAccessListBuilder, address: Address, slot: Bytes32 + builder: BlockAccessListBuilder, address: Address, slot: U256 ) -> None: """ Add a storage read operation to the block access list. @@ -482,7 +482,7 @@ def build_block_access_list( # Add all storage reads for address, slot in state_changes.storage_reads: - add_storage_read(builder, address, slot) + add_storage_read(builder, address, U256(int.from_bytes(slot))) # Add all storage writes # Net-zero filtering happens at transaction commit time, not here. @@ -492,10 +492,9 @@ def build_block_access_list( slot, block_access_index, ), value in state_changes.storage_writes.items(): - # Convert U256 to Bytes32 for storage - value_bytes = Bytes32(value.to_bytes(U256(32), "big")) + u256_slot = U256(int.from_bytes(slot)) add_storage_write( - builder, address, slot, block_access_index, value_bytes + builder, address, u256_slot, block_access_index, value ) # Add all balance changes (balance_changes is keyed by (address, index)) diff --git a/src/ethereum/forks/amsterdam/block_access_lists/rlp_types.py b/src/ethereum/forks/amsterdam/block_access_lists/rlp_types.py index c4f49ff4aa..e604d43da1 100644 --- a/src/ethereum/forks/amsterdam/block_access_lists/rlp_types.py +++ b/src/ethereum/forks/amsterdam/block_access_lists/rlp_types.py @@ -10,14 +10,14 @@ from dataclasses import dataclass from typing import List, Tuple -from ethereum_types.bytes import Bytes, Bytes20, Bytes32 +from ethereum_types.bytes import Bytes, Bytes20 from ethereum_types.frozen import slotted_freezable from ethereum_types.numeric import U64, U256, Uint # Type aliases for clarity (matching EIP-7928 specification) Address = Bytes20 -StorageKey = Bytes32 -StorageValue = Bytes32 +StorageKey = U256 +StorageValue = U256 CodeData = Bytes BlockAccessIndex = Uint # uint16 in the spec, but using Uint for compatibility Balance = U256 # Post-transaction balance in wei diff --git a/src/ethereum_spec_tools/evm_tools/t8n/t8n_types.py b/src/ethereum_spec_tools/evm_tools/t8n/t8n_types.py index cb13727f0d..0e84189598 100644 --- a/src/ethereum_spec_tools/evm_tools/t8n/t8n_types.py +++ b/src/ethereum_spec_tools/evm_tools/t8n/t8n_types.py @@ -348,16 +348,16 @@ def _block_access_list_to_json(account_changes: Any) -> Any: storage_changes = [] for slot_change in account.storage_changes: slot_data: Dict[str, Any] = { - "slot": int.from_bytes(slot_change.slot, "big"), + "slot": int(slot_change.slot), "slotChanges": [], } for change in slot_change.changes: slot_data["slotChanges"].append( { - "txIndex": int(change.block_access_index), - "postValue": int.from_bytes( - change.new_value, "big" + "blockAccessIndex": int( + change.block_access_index ), + "postValue": int(change.new_value), } ) storage_changes.append(slot_data) @@ -365,14 +365,13 @@ def _block_access_list_to_json(account_changes: Any) -> Any: if account.storage_reads: account_data["storageReads"] = [ - int.from_bytes(slot, "big") - for slot in account.storage_reads + int(slot) for slot in account.storage_reads ] if account.balance_changes: account_data["balanceChanges"] = [ { - "txIndex": int(change.block_access_index), + "blockAccessIndex": int(change.block_access_index), "postBalance": int(change.post_balance), } for change in account.balance_changes @@ -381,7 +380,7 @@ def _block_access_list_to_json(account_changes: Any) -> Any: if account.nonce_changes: account_data["nonceChanges"] = [ { - "txIndex": int(change.block_access_index), + "blockAccessIndex": int(change.block_access_index), "postNonce": int(change.new_nonce), } for change in account.nonce_changes @@ -390,7 +389,7 @@ def _block_access_list_to_json(account_changes: Any) -> Any: if account.code_changes: account_data["codeChanges"] = [ { - "txIndex": int(change.block_access_index), + "blockAccessIndex": int(change.block_access_index), "newCode": "0x" + change.new_code.hex(), } for change in account.code_changes diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py index 32a1052841..f56a142c8b 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py @@ -55,7 +55,9 @@ def test_bal_nonce_changes( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), } ), @@ -110,16 +112,21 @@ def test_bal_balance_changes( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], balance_changes=[ BalBalanceChange( - tx_index=1, post_balance=alice_final_balance + block_access_index=1, + post_balance=alice_final_balance, ) ], ), bob: BalAccountExpectation( balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=100) + BalBalanceChange( + block_access_index=1, post_balance=100 + ) ], ), } @@ -190,14 +197,20 @@ def test_bal_code_changes( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), factory_contract: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=2)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=2) + ], ), created_contract: BalAccountExpectation( code_changes=[ - BalCodeChange(tx_index=1, new_code=runtime_code_bytes) + BalCodeChange( + block_access_index=1, new_code=runtime_code_bytes + ) ], ), } @@ -275,7 +288,9 @@ def test_bal_account_access_target( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)] + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ] ), target_contract: BalAccountExpectation.empty(), oracle_contract: BalAccountExpectation.empty(), @@ -314,16 +329,22 @@ def test_bal_callcode_nested_value_transfer( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), oracle_contract: BalAccountExpectation( balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=100) + BalBalanceChange( + block_access_index=1, post_balance=100 + ) ], ), bob: BalAccountExpectation( balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=100) + BalBalanceChange( + block_access_index=1, post_balance=100 + ) ], ), target_contract: BalAccountExpectation.empty(), @@ -380,14 +401,18 @@ def test_bal_delegated_storage_writes( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), oracle_contract: BalAccountExpectation( storage_changes=[ BalStorageSlot( slot=0x01, slot_changes=[ - BalStorageChange(tx_index=1, post_value=0x42) + BalStorageChange( + block_access_index=1, post_value=0x42 + ) ], ) ], @@ -448,7 +473,9 @@ def test_bal_delegated_storage_reads( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), oracle_contract: BalAccountExpectation( storage_reads=[0x01], @@ -511,22 +538,27 @@ def test_bal_block_rewards( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], balance_changes=[ BalBalanceChange( - tx_index=1, post_balance=alice_final_balance + block_access_index=1, + post_balance=alice_final_balance, ) ], ), bob: BalAccountExpectation( balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=100) + BalBalanceChange( + block_access_index=1, post_balance=100 + ) ], ), charlie: BalAccountExpectation( balance_changes=[ BalBalanceChange( - tx_index=1, post_balance=tip_to_charlie + block_access_index=1, post_balance=tip_to_charlie ) ], ), @@ -571,7 +603,9 @@ def test_bal_2930_account_listed_but_untouched( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), # The address excluded from BAL since state is not accessed oracle: None, @@ -628,7 +662,9 @@ def test_bal_2930_slot_listed_but_untouched( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), # The account was loaded. pure_calculator: BalAccountExpectation.empty(), @@ -688,20 +724,26 @@ def test_bal_2930_slot_listed_and_unlisted_writes( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), storage_writer: BalAccountExpectation( storage_changes=[ BalStorageSlot( slot=0x01, slot_changes=[ - BalStorageChange(tx_index=1, post_value=0x42) + BalStorageChange( + block_access_index=1, post_value=0x42 + ) ], ), BalStorageSlot( slot=0x02, slot_changes=[ - BalStorageChange(tx_index=1, post_value=0x43) + BalStorageChange( + block_access_index=1, post_value=0x43 + ) ], ), ], @@ -762,7 +804,9 @@ def test_bal_2930_slot_listed_and_unlisted_reads( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), storage_reader: BalAccountExpectation( storage_reads=[0x01, 0x02], @@ -806,10 +850,12 @@ def test_bal_self_transfer( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], balance_changes=[ BalBalanceChange( - tx_index=1, + block_access_index=1, post_balance=start_balance - intrinsic_gas_cost * int(tx.gas_price or 0), ) @@ -848,10 +894,12 @@ def test_bal_zero_value_transfer( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], balance_changes=[ BalBalanceChange( - tx_index=1, + block_access_index=1, post_balance=start_balance - intrinsic_gas_cost * int(tx.gas_price or 0), ) @@ -928,7 +976,9 @@ def test_bal_net_zero_balance_transfer( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), net_zero_bal_contract: BalAccountExpectation( # receives transfer_amount and sends transfer_amount away @@ -942,7 +992,7 @@ def test_bal_net_zero_balance_transfer( slot=0x00, slot_changes=[ BalStorageChange( - tx_index=1, + block_access_index=1, post_value=expected_balance_in_slot, ) ], @@ -955,7 +1005,7 @@ def test_bal_net_zero_balance_transfer( recipient: BalAccountExpectation( balance_changes=[ BalBalanceChange( - tx_index=1, post_balance=transfer_amount + block_access_index=1, post_balance=transfer_amount ) ] if transfer_amount > 0 @@ -1003,7 +1053,9 @@ def test_bal_pure_contract_call( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), # Ensure called contract is tracked pure_contract: BalAccountExpectation.empty(), @@ -1044,7 +1096,9 @@ def test_bal_noop_storage_write( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), storage_contract: BalAccountExpectation( storage_reads=[0x01], @@ -1083,7 +1137,9 @@ def test_bal_aborted_storage_access( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)] + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ] ), storage_contract: BalAccountExpectation( storage_changes=[], @@ -1165,7 +1221,9 @@ def test_bal_aborted_account_access( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)] + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ] ), target_contract: BalAccountExpectation.empty(), abort_contract: BalAccountExpectation.empty(), @@ -1207,7 +1265,9 @@ def test_bal_fully_unmutated_account( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), oracle: BalAccountExpectation( storage_changes=[], # No net storage changes @@ -1295,16 +1355,19 @@ def test_bal_coinbase_zero_tip( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], balance_changes=[ BalBalanceChange( - tx_index=1, post_balance=alice_final_balance + block_access_index=1, + post_balance=alice_final_balance, ) ], ), bob: BalAccountExpectation( balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=5) + BalBalanceChange(block_access_index=1, post_balance=5) ] ), # Coinbase must be included even with zero tip @@ -1418,11 +1481,15 @@ def test_bal_precompile_funded( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), precompile: BalAccountExpectation( balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=value) + BalBalanceChange( + block_access_index=1, post_balance=value + ) ] if value > 0 else [], @@ -1480,7 +1547,9 @@ def test_bal_precompile_call( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), oracle: BalAccountExpectation.empty(), precompile: BalAccountExpectation.empty(), @@ -1529,11 +1598,15 @@ def test_bal_nonexistent_value_transfer( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), bob: BalAccountExpectation( balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=value) + BalBalanceChange( + block_access_index=1, post_balance=value + ) ] if value > 0 else [], @@ -1611,7 +1684,9 @@ def test_bal_nonexistent_account_access_read_only( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), oracle: BalAccountExpectation.empty(), bob: BalAccountExpectation.empty(), @@ -1696,12 +1771,15 @@ def test_bal_nonexistent_account_access_value_transfer( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), oracle: BalAccountExpectation( balance_changes=[ BalBalanceChange( - tx_index=1, post_balance=oracle_final_balance + block_access_index=1, + post_balance=oracle_final_balance, ) ] if oracle_has_balance_change @@ -1710,7 +1788,8 @@ def test_bal_nonexistent_account_access_value_transfer( bob: BalAccountExpectation( balance_changes=[ BalBalanceChange( - tx_index=1, post_balance=bob_final_balance + block_access_index=1, + post_balance=bob_final_balance, ) ] if bob_has_balance_change @@ -1786,20 +1865,24 @@ def test_bal_multiple_balance_changes_same_account( account_expectations={ alice: BalAccountExpectation( nonce_changes=[ - BalNonceChange(tx_index=1, post_nonce=1) + BalNonceChange( + block_access_index=1, post_nonce=1 + ) ], ), bob: BalAccountExpectation( nonce_changes=[ - BalNonceChange(tx_index=2, post_nonce=1) + BalNonceChange( + block_access_index=2, post_nonce=1 + ) ], balance_changes=[ BalBalanceChange( - tx_index=1, + block_access_index=1, post_balance=bob_balance_after_tx0, ), BalBalanceChange( - tx_index=2, + block_access_index=2, post_balance=bob_balance_after_tx1, ), ], @@ -1807,7 +1890,8 @@ def test_bal_multiple_balance_changes_same_account( charlie: BalAccountExpectation( balance_changes=[ BalBalanceChange( - tx_index=2, post_balance=spend_amount + block_access_index=2, + post_balance=spend_amount, ) ], ), @@ -1859,9 +1943,15 @@ def test_bal_multiple_storage_writes_same_slot( account_expectations={ alice: BalAccountExpectation( nonce_changes=[ - BalNonceChange(tx_index=1, post_nonce=1), - BalNonceChange(tx_index=2, post_nonce=2), - BalNonceChange(tx_index=3, post_nonce=3), + BalNonceChange( + block_access_index=1, post_nonce=1 + ), + BalNonceChange( + block_access_index=2, post_nonce=2 + ), + BalNonceChange( + block_access_index=3, post_nonce=3 + ), ], ), contract: BalAccountExpectation( @@ -1870,13 +1960,13 @@ def test_bal_multiple_storage_writes_same_slot( slot=1, slot_changes=[ BalStorageChange( - tx_index=1, post_value=1 + block_access_index=1, post_value=1 ), BalStorageChange( - tx_index=2, post_value=2 + block_access_index=2, post_value=2 ), BalStorageChange( - tx_index=3, post_value=3 + block_access_index=3, post_value=3 ), ], ), @@ -1960,7 +2050,7 @@ def test_bal_nested_delegatecall_storage_writes_net_zero( account_expectations = { alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[BalNonceChange(block_access_index=1, post_nonce=1)], ), root_contract: BalAccountExpectation( storage_reads=[0], @@ -2008,10 +2098,10 @@ def test_bal_create_transaction_empty_code( account_expectations = { alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[BalNonceChange(block_access_index=1, post_nonce=1)], ), contract_address: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[BalNonceChange(block_access_index=1, post_nonce=1)], code_changes=[], # ensure no code_changes recorded ), } @@ -2043,8 +2133,8 @@ def test_bal_cross_tx_storage_revert_to_zero( blobhash scenario where slot changes were being incorrectly filtered as net-zero across transaction boundaries. - Tx1: slot 0 = 0x0 -> 0xABCD (change recorded at tx_index=1) - Tx2: slot 0 = 0xABCD -> 0x0 (change MUST be recorded at tx_index=2) + Tx1: slot 0 = 0x0 -> 0xABCD (change at block_access_index=1) + Tx2: slot 0 = 0xABCD -> 0x0 (change MUST be at block_access_index=2) """ alice = pre.fund_eoa() @@ -2070,8 +2160,8 @@ def test_bal_cross_tx_storage_revert_to_zero( account_expectations = { alice: BalAccountExpectation( nonce_changes=[ - BalNonceChange(tx_index=1, post_nonce=1), - BalNonceChange(tx_index=2, post_nonce=2), + BalNonceChange(block_access_index=1, post_nonce=1), + BalNonceChange(block_access_index=2, post_nonce=2), ], ), contract: BalAccountExpectation( @@ -2079,10 +2169,12 @@ def test_bal_cross_tx_storage_revert_to_zero( BalStorageSlot( slot=0, slot_changes=[ - BalStorageChange(tx_index=1, post_value=0xABCD), + BalStorageChange( + block_access_index=1, post_value=0xABCD + ), # CRITICAL: tx2's write to 0x0 MUST appear # even though it returns slot to original value - BalStorageChange(tx_index=2, post_value=0x0), + BalStorageChange(block_access_index=2, post_value=0x0), ], ), ], @@ -2163,7 +2255,9 @@ def test_bal_cross_block_ripemd160_state_leak( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)] + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ] ), bob: None, ripemd_caller: BalAccountExpectation.empty(), @@ -2187,7 +2281,9 @@ def test_bal_cross_block_ripemd160_state_leak( account_expectations={ alice: None, bob: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)] + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ] ), # this is the important check ripemd160_addr: None, @@ -2321,23 +2417,33 @@ def test_bal_all_transaction_types( account_expectations={ # Type 0 sender sender_0: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), # Type 1 sender sender_1: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=2, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=2, post_nonce=1) + ], ), # Type 2 sender sender_2: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=3, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=3, post_nonce=1) + ], ), # Type 3 sender sender_3: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=4, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=4, post_nonce=1) + ], ), # Type 4 sender sender_4: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=5, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=5, post_nonce=1) + ], ), # Contract touched by Type 0 contract_0: BalAccountExpectation( @@ -2345,7 +2451,9 @@ def test_bal_all_transaction_types( BalStorageSlot( slot=0x01, slot_changes=[ - BalStorageChange(tx_index=1, post_value=0x01) + BalStorageChange( + block_access_index=1, post_value=0x01 + ) ], ) ], @@ -2356,7 +2464,9 @@ def test_bal_all_transaction_types( BalStorageSlot( slot=0x01, slot_changes=[ - BalStorageChange(tx_index=2, post_value=0x02) + BalStorageChange( + block_access_index=2, post_value=0x02 + ) ], ) ], @@ -2370,7 +2480,9 @@ def test_bal_all_transaction_types( BalStorageSlot( slot=0x01, slot_changes=[ - BalStorageChange(tx_index=3, post_value=0x03) + BalStorageChange( + block_access_index=3, post_value=0x03 + ) ], ) ], @@ -2381,17 +2493,21 @@ def test_bal_all_transaction_types( BalStorageSlot( slot=0x01, slot_changes=[ - BalStorageChange(tx_index=4, post_value=0x04) + BalStorageChange( + block_access_index=4, post_value=0x04 + ) ], ) ], ), # Alice (Type 4 delegation target, executes oracle code) alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=5, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=5, post_nonce=1) + ], code_changes=[ BalCodeChange( - tx_index=5, + block_access_index=5, new_code=Spec7702.delegation_designation(oracle), ) ], @@ -2399,7 +2515,9 @@ def test_bal_all_transaction_types( BalStorageSlot( slot=0x01, slot_changes=[ - BalStorageChange(tx_index=5, post_value=0x05) + BalStorageChange( + block_access_index=5, post_value=0x05 + ) ], ) ], @@ -2507,7 +2625,9 @@ def test_bal_lexicographic_address_ordering( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), contract: BalAccountExpectation.empty(), # These addresses appear in BAL due to BALANCE access diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_cross_index.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_cross_index.py index 72ac91c923..9d32fdf123 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_cross_index.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_cross_index.py @@ -82,12 +82,12 @@ def test_bal_withdrawal_contract_cross_index( slot_changes=[ BalStorageChange( # Incremented during tx - tx_index=1, + block_access_index=1, post_value=1, ), BalStorageChange( # Reset during post-exec - tx_index=2, + block_access_index=2, post_value=0, ), ], @@ -97,12 +97,12 @@ def test_bal_withdrawal_contract_cross_index( slot_changes=[ BalStorageChange( # Incremented during tx - tx_index=1, + block_access_index=1, post_value=1, ), BalStorageChange( # Reset during post-exec - tx_index=2, + block_access_index=2, post_value=0, ), ], @@ -154,12 +154,12 @@ def test_bal_consolidation_contract_cross_index( slot_changes=[ BalStorageChange( # Incremented during tx - tx_index=1, + block_access_index=1, post_value=1, ), BalStorageChange( # Reset during post-exec - tx_index=2, + block_access_index=2, post_value=0, ), ], @@ -169,12 +169,12 @@ def test_bal_consolidation_contract_cross_index( slot_changes=[ BalStorageChange( # Incremented during tx - tx_index=1, + block_access_index=1, post_value=1, ), BalStorageChange( # Reset during post-exec - tx_index=2, + block_access_index=2, post_value=0, ), ], @@ -232,13 +232,17 @@ def test_bal_noop_write_filtering( BalStorageSlot( slot=2, slot_changes=[ - BalStorageChange(tx_index=1, post_value=42), + BalStorageChange( + block_access_index=1, post_value=42 + ), ], ), BalStorageSlot( slot=4, slot_changes=[ - BalStorageChange(tx_index=1, post_value=200), + BalStorageChange( + block_access_index=1, post_value=200 + ), ], ), ], diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip4895.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip4895.py index 16ce09ff90..df35ba66ef 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip4895.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip4895.py @@ -62,7 +62,9 @@ def test_bal_withdrawal_empty_block( account_expectations={ charlie: BalAccountExpectation( balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=11 * GWEI) + BalBalanceChange( + block_access_index=1, post_balance=11 * GWEI + ) ], ), } @@ -115,16 +117,20 @@ def test_bal_withdrawal_and_transaction( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), bob: BalAccountExpectation( balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=5) + BalBalanceChange(block_access_index=1, post_balance=5) ], ), charlie: BalAccountExpectation( balance_changes=[ - BalBalanceChange(tx_index=2, post_balance=10 * GWEI) + BalBalanceChange( + block_access_index=2, post_balance=10 * GWEI + ) ], ), } @@ -169,7 +175,9 @@ def test_bal_withdrawal_to_nonexistent_account( account_expectations={ charlie: BalAccountExpectation( balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=10 * GWEI) + BalBalanceChange( + block_access_index=1, post_balance=10 * GWEI + ) ], ), } @@ -216,7 +224,9 @@ def test_bal_withdrawal_no_evm_execution( account_expectations={ oracle: BalAccountExpectation( balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=10 * GWEI) + BalBalanceChange( + block_access_index=1, post_balance=10 * GWEI + ) ], storage_reads=[], storage_changes=[], @@ -275,7 +285,9 @@ def test_bal_withdrawal_and_state_access_same_account( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), oracle: BalAccountExpectation( storage_reads=[0x01], @@ -283,12 +295,16 @@ def test_bal_withdrawal_and_state_access_same_account( BalStorageSlot( slot=0x02, slot_changes=[ - BalStorageChange(tx_index=1, post_value=0x99) + BalStorageChange( + block_access_index=1, post_value=0x99 + ) ], ) ], balance_changes=[ - BalBalanceChange(tx_index=2, post_balance=10 * GWEI) + BalBalanceChange( + block_access_index=2, post_balance=10 * GWEI + ) ], ), } @@ -343,12 +359,18 @@ def test_bal_withdrawal_and_value_transfer_same_address( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), bob: BalAccountExpectation( balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=5 * GWEI), - BalBalanceChange(tx_index=2, post_balance=15 * GWEI), + BalBalanceChange( + block_access_index=1, post_balance=5 * GWEI + ), + BalBalanceChange( + block_access_index=2, post_balance=15 * GWEI + ), ], ), } @@ -388,7 +410,9 @@ def test_bal_multiple_withdrawals_same_address( account_expectations={ charlie: BalAccountExpectation( balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=30 * GWEI) + BalBalanceChange( + block_access_index=1, post_balance=30 * GWEI + ) ], ), } @@ -443,17 +467,23 @@ def test_bal_withdrawal_and_selfdestruct( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), bob: BalAccountExpectation( balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=100 * GWEI) + BalBalanceChange( + block_access_index=1, post_balance=100 * GWEI + ) ], ), oracle: BalAccountExpectation( balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=0), - BalBalanceChange(tx_index=2, post_balance=50 * GWEI), + BalBalanceChange(block_access_index=1, post_balance=0), + BalBalanceChange( + block_access_index=2, post_balance=50 * GWEI + ), ], ), } @@ -510,13 +540,21 @@ def test_bal_withdrawal_and_new_contract( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), oracle: BalAccountExpectation( - code_changes=[BalCodeChange(tx_index=1, new_code=code)], + code_changes=[ + BalCodeChange(block_access_index=1, new_code=code) + ], balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=5 * GWEI), - BalBalanceChange(tx_index=2, post_balance=15 * GWEI), + BalBalanceChange( + block_access_index=1, post_balance=5 * GWEI + ), + BalBalanceChange( + block_access_index=2, post_balance=15 * GWEI + ), ], ), } @@ -621,7 +659,9 @@ def test_bal_withdrawal_to_precompiles( account_expectations={ precompile: BalAccountExpectation( balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=10 * GWEI) + BalBalanceChange( + block_access_index=1, post_balance=10 * GWEI + ) ], storage_reads=[], storage_changes=[], @@ -668,7 +708,8 @@ def test_bal_withdrawal_largest_amount( charlie: BalAccountExpectation( balance_changes=[ BalBalanceChange( - tx_index=1, post_balance=max_amount * GWEI + block_access_index=1, + post_balance=max_amount * GWEI, ) ], ), @@ -738,20 +779,23 @@ def test_bal_withdrawal_to_coinbase( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), bob: BalAccountExpectation( balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=5) + BalBalanceChange(block_access_index=1, post_balance=5) ], ), coinbase: BalAccountExpectation( balance_changes=[ BalBalanceChange( - tx_index=1, post_balance=tip_to_coinbase + block_access_index=1, post_balance=tip_to_coinbase ), BalBalanceChange( - tx_index=2, post_balance=coinbase_final_balance + block_access_index=2, + post_balance=coinbase_final_balance, ), ], ), @@ -798,7 +842,9 @@ def test_bal_withdrawal_to_coinbase_empty_block( account_expectations={ coinbase: BalAccountExpectation( balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=10 * GWEI) + BalBalanceChange( + block_access_index=1, post_balance=10 * GWEI + ) ], ), } diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip7702.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip7702.py index 2c4890611b..60676948a5 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip7702.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip7702.py @@ -70,17 +70,21 @@ def test_bal_7702_delegation_create( account_expectations = { alice: BalAccountExpectation( nonce_changes=[ - BalNonceChange(tx_index=1, post_nonce=2 if self_funded else 1) + BalNonceChange( + block_access_index=1, post_nonce=2 if self_funded else 1 + ) ], code_changes=[ BalCodeChange( - tx_index=1, + block_access_index=1, new_code=Spec7702.delegation_designation(oracle), ) ], ), bob: BalAccountExpectation( - balance_changes=[BalBalanceChange(tx_index=1, post_balance=10)] + balance_changes=[ + BalBalanceChange(block_access_index=1, post_balance=10) + ] ), # Oracle must not be present in BAL - the account is never accessed oracle: None, @@ -89,7 +93,7 @@ def test_bal_7702_delegation_create( # For sponsored variant, relayer must also be included in BAL if not self_funded: account_expectations[relayer] = BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[BalNonceChange(block_access_index=1, post_nonce=1)], ) block = Block( @@ -181,24 +185,28 @@ def test_bal_7702_delegation_update( account_expectations = { alice: BalAccountExpectation( nonce_changes=[ - BalNonceChange(tx_index=1, post_nonce=2 if self_funded else 1), - BalNonceChange(tx_index=2, post_nonce=4 if self_funded else 2), + BalNonceChange( + block_access_index=1, post_nonce=2 if self_funded else 1 + ), + BalNonceChange( + block_access_index=2, post_nonce=4 if self_funded else 2 + ), ], code_changes=[ BalCodeChange( - tx_index=1, + block_access_index=1, new_code=Spec7702.delegation_designation(oracle1), ), BalCodeChange( - tx_index=2, + block_access_index=2, new_code=Spec7702.delegation_designation(oracle2), ), ], ), bob: BalAccountExpectation( balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=10), - BalBalanceChange(tx_index=2, post_balance=20), + BalBalanceChange(block_access_index=1, post_balance=10), + BalBalanceChange(block_access_index=2, post_balance=20), ] ), # Both delegation targets must not be present in BAL @@ -211,8 +219,8 @@ def test_bal_7702_delegation_update( if not self_funded: account_expectations[relayer] = BalAccountExpectation( nonce_changes=[ - BalNonceChange(tx_index=1, post_nonce=1), - BalNonceChange(tx_index=2, post_nonce=2), + BalNonceChange(block_access_index=1, post_nonce=1), + BalNonceChange(block_access_index=2, post_nonce=2), ], ) @@ -306,21 +314,25 @@ def test_bal_7702_delegation_clear( account_expectations = { alice: BalAccountExpectation( nonce_changes=[ - BalNonceChange(tx_index=1, post_nonce=2 if self_funded else 1), - BalNonceChange(tx_index=2, post_nonce=4 if self_funded else 2), + BalNonceChange( + block_access_index=1, post_nonce=2 if self_funded else 1 + ), + BalNonceChange( + block_access_index=2, post_nonce=4 if self_funded else 2 + ), ], code_changes=[ BalCodeChange( - tx_index=1, + block_access_index=1, new_code=Spec7702.delegation_designation(oracle), ), - BalCodeChange(tx_index=2, new_code=""), + BalCodeChange(block_access_index=2, new_code=""), ], ), bob: BalAccountExpectation( balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=10), - BalBalanceChange(tx_index=2, post_balance=20), + BalBalanceChange(block_access_index=1, post_balance=10), + BalBalanceChange(block_access_index=2, post_balance=20), ] ), # Both delegation targets must not be present in BAL @@ -333,8 +345,8 @@ def test_bal_7702_delegation_clear( if not self_funded: account_expectations[relayer] = BalAccountExpectation( nonce_changes=[ - BalNonceChange(tx_index=1, post_nonce=1), - BalNonceChange(tx_index=2, post_nonce=2), + BalNonceChange(block_access_index=1, post_nonce=1), + BalNonceChange(block_access_index=2, post_nonce=2), ], ) @@ -395,20 +407,24 @@ def test_bal_7702_delegated_storage_access( account_expectations={ alice: BalAccountExpectation( balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=10) + BalBalanceChange(block_access_index=1, post_balance=10) ], storage_changes=[ BalStorageSlot( slot=0x02, slot_changes=[ - BalStorageChange(tx_index=1, post_value=0x42) + BalStorageChange( + block_access_index=1, post_value=0x42 + ) ], ) ], storage_reads=[0x01], ), bob: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), # Oracle appears in BAL due to account access # (delegation target) @@ -464,11 +480,13 @@ def test_bal_7702_invalid_nonce_authorization( # Ensuring silent fail bob: BalAccountExpectation( balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=10) + BalBalanceChange(block_access_index=1, post_balance=10) ] ), relayer: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), # Alice's account was marked warm but no changes were made alice: BalAccountExpectation.empty(), @@ -527,11 +545,13 @@ def test_bal_7702_invalid_chain_id_authorization( # Ensuring silent fail bob: BalAccountExpectation( balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=10) + BalBalanceChange(block_access_index=1, post_balance=10) ] ), relayer: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), # Oracle must NOT be present - authorization failed so # account never accessed @@ -592,7 +612,9 @@ def test_bal_7702_delegated_via_call_opcode( expected_block_access_list=BlockAccessListExpectation( account_expectations={ bob: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), caller: BalAccountExpectation.empty(), # `alice` is accessed due to being the call target @@ -641,11 +663,13 @@ def test_bal_7702_null_address_delegation_no_code_change( # because setting code from b"" to b"" is a net-zero change account_expectations = { alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=2)], + nonce_changes=[BalNonceChange(block_access_index=1, post_nonce=2)], code_changes=[], # explicit check for no code changes ), bob: BalAccountExpectation( - balance_changes=[BalBalanceChange(tx_index=1, post_balance=10)] + balance_changes=[ + BalBalanceChange(block_access_index=1, post_balance=10) + ] ), } @@ -720,18 +744,24 @@ def test_bal_7702_double_auth_reset( account_expectations={ alice: BalAccountExpectation( nonce_changes=[ - BalNonceChange(tx_index=1, post_nonce=2) + BalNonceChange( + block_access_index=1, post_nonce=2 + ) ], code_changes=[], ), bob: BalAccountExpectation( balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=10) + BalBalanceChange( + block_access_index=1, post_balance=10 + ) ] ), relayer: BalAccountExpectation( nonce_changes=[ - BalNonceChange(tx_index=1, post_nonce=1) + BalNonceChange( + block_access_index=1, post_nonce=1 + ) ], ), contract_a: None, @@ -791,20 +821,22 @@ def test_bal_7702_double_auth_swap( account_expectations = { alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=2)], + nonce_changes=[BalNonceChange(block_access_index=1, post_nonce=2)], code_changes=[ # Should show final code (CONTRACT_B), not CONTRACT_A BalCodeChange( - tx_index=1, + block_access_index=1, new_code=Spec7702.delegation_designation(contract_b), ) ], ), bob: BalAccountExpectation( - balance_changes=[BalBalanceChange(tx_index=1, post_balance=10)] + balance_changes=[ + BalBalanceChange(block_access_index=1, post_balance=10) + ] ), relayer: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[BalNonceChange(block_access_index=1, post_nonce=1)], ), # Neither contract appears in BAL during delegation setup contract_a: None, @@ -901,25 +933,29 @@ def test_bal_selfdestruct_to_7702_delegation( account_expectations = { alice: BalAccountExpectation( # tx1: nonce change for auth, code change for delegation - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[BalNonceChange(block_access_index=1, post_nonce=1)], code_changes=[ BalCodeChange( - tx_index=1, + block_access_index=1, new_code=Spec7702.delegation_designation(oracle), ) ], # tx2: balance change from selfdestruct balance_changes=[ - BalBalanceChange(tx_index=2, post_balance=alice_final_balance) + BalBalanceChange( + block_access_index=2, post_balance=alice_final_balance + ) ], ), bob: BalAccountExpectation( - balance_changes=[BalBalanceChange(tx_index=1, post_balance=10)] + balance_changes=[ + BalBalanceChange(block_access_index=1, post_balance=10) + ] ), relayer: BalAccountExpectation( nonce_changes=[ - BalNonceChange(tx_index=1, post_nonce=1), - BalNonceChange(tx_index=2, post_nonce=2), + BalNonceChange(block_access_index=1, post_nonce=1), + BalNonceChange(block_access_index=2, post_nonce=2), ], ), caller: BalAccountExpectation.empty(), @@ -927,7 +963,9 @@ def test_bal_selfdestruct_to_7702_delegation( # Explicitly verify ALL fields to avoid false positives victim: BalAccountExpectation( nonce_changes=[], # Contract nonce unchanged - balance_changes=[BalBalanceChange(tx_index=2, post_balance=0)], + balance_changes=[ + BalBalanceChange(block_access_index=2, post_balance=0) + ], code_changes=[], # Code unchanged (post-Cancun SELFDESTRUCT) storage_changes=[], # No storage changes storage_reads=[], # No storage reads @@ -1018,23 +1056,27 @@ def test_bal_withdrawal_to_7702_delegation( account_expectations = { alice: BalAccountExpectation( # tx1: nonce change for auth, code change for delegation - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[BalNonceChange(block_access_index=1, post_nonce=1)], code_changes=[ BalCodeChange( - tx_index=1, + block_access_index=1, new_code=Spec7702.delegation_designation(oracle), ) ], # tx2 (withdrawal): balance change balance_changes=[ - BalBalanceChange(tx_index=2, post_balance=alice_final_balance) + BalBalanceChange( + block_access_index=2, post_balance=alice_final_balance + ) ], ), bob: BalAccountExpectation( - balance_changes=[BalBalanceChange(tx_index=1, post_balance=10)] + balance_changes=[ + BalBalanceChange(block_access_index=1, post_balance=10) + ] ), relayer: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[BalNonceChange(block_access_index=1, post_nonce=1)], ), # Oracle MUST NOT appear - withdrawals don't execute recipient code, # so delegation target is never accessed diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_invalid.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_invalid.py index d94f7eaed4..13d18c7fef 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_invalid.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_invalid.py @@ -75,7 +75,9 @@ def test_bal_invalid_missing_nonce( account_expectations={ sender: BalAccountExpectation( nonce_changes=[ - BalNonceChange(tx_index=1, post_nonce=1) + BalNonceChange( + block_access_index=1, post_nonce=1 + ) ], ), } @@ -118,11 +120,13 @@ def test_bal_invalid_nonce_value( account_expectations={ sender: BalAccountExpectation( nonce_changes=[ - BalNonceChange(tx_index=1, post_nonce=1) + BalNonceChange( + block_access_index=1, post_nonce=1 + ) ], ), } - ).modify(modify_nonce(sender, tx_index=1, nonce=42)), + ).modify(modify_nonce(sender, block_access_index=1, nonce=42)), ) ], ) @@ -171,7 +175,8 @@ def test_bal_invalid_storage_value( slot=0x01, slot_changes=[ BalStorageChange( - tx_index=1, post_value=0x01 + block_access_index=1, + post_value=0x01, ) ], ), @@ -179,7 +184,8 @@ def test_bal_invalid_storage_value( slot=0x02, slot_changes=[ BalStorageChange( - tx_index=1, post_value=0x02 + block_access_index=1, + post_value=0x02, ) ], ), @@ -187,7 +193,8 @@ def test_bal_invalid_storage_value( slot=0x03, slot_changes=[ BalStorageChange( - tx_index=1, post_value=0x03 + block_access_index=1, + post_value=0x03, ) ], ), @@ -196,7 +203,9 @@ def test_bal_invalid_storage_value( } ).modify( # Corrupt storage value for slot 0x02 - modify_storage(contract, tx_index=1, slot=0x02, value=0xFF) + modify_storage( + contract, block_access_index=1, slot=0x02, value=0xFF + ) ), ) ], @@ -246,21 +255,26 @@ def test_bal_invalid_tx_order( account_expectations={ sender1: BalAccountExpectation( nonce_changes=[ - BalNonceChange(tx_index=1, post_nonce=1) + BalNonceChange( + block_access_index=1, post_nonce=1 + ) ], ), sender2: BalAccountExpectation( nonce_changes=[ - BalNonceChange(tx_index=2, post_nonce=1) + BalNonceChange( + block_access_index=2, post_nonce=1 + ) ], ), receiver: BalAccountExpectation( balance_changes=[ BalBalanceChange( - tx_index=1, post_balance=10**15 + block_access_index=1, post_balance=10**15 ), BalBalanceChange( - tx_index=2, post_balance=3 * 10**15 + block_access_index=2, + post_balance=3 * 10**15, ), ], ), @@ -307,7 +321,9 @@ def test_bal_invalid_account( account_expectations={ sender: BalAccountExpectation( nonce_changes=[ - BalNonceChange(tx_index=1, post_nonce=1) + BalNonceChange( + block_access_index=1, post_nonce=1 + ) ], ), } @@ -316,7 +332,9 @@ def test_bal_invalid_account( BalAccountChange( address=phantom, nonce_changes=[ - BalNonceChange(tx_index=1, post_nonce=1) + BalNonceChange( + block_access_index=1, post_nonce=1 + ) ], ) ) @@ -360,13 +378,15 @@ def test_bal_invalid_duplicate_account( account_expectations={ sender: BalAccountExpectation( nonce_changes=[ - BalNonceChange(tx_index=1, post_nonce=1) + BalNonceChange( + block_access_index=1, post_nonce=1 + ) ], ), receiver: BalAccountExpectation( balance_changes=[ BalBalanceChange( - tx_index=1, post_balance=10**15 + block_access_index=1, post_balance=10**15 ) ], ), @@ -410,13 +430,15 @@ def test_bal_invalid_account_order( account_expectations={ sender: BalAccountExpectation( nonce_changes=[ - BalNonceChange(tx_index=1, post_nonce=1) + BalNonceChange( + block_access_index=1, post_nonce=1 + ) ], ), receiver: BalAccountExpectation( balance_changes=[ BalBalanceChange( - tx_index=1, post_balance=10**15 + block_access_index=1, post_balance=10**15 ) ], ), @@ -471,8 +493,12 @@ def test_bal_invalid_complex_corruption( account_expectations={ sender: BalAccountExpectation( nonce_changes=[ - BalNonceChange(tx_index=1, post_nonce=1), - BalNonceChange(tx_index=2, post_nonce=2), + BalNonceChange( + block_access_index=1, post_nonce=1 + ), + BalNonceChange( + block_access_index=2, post_nonce=2 + ), ], ), contract: BalAccountExpectation( @@ -481,7 +507,8 @@ def test_bal_invalid_complex_corruption( slot=0x01, slot_changes=[ BalStorageChange( - tx_index=1, post_value=0x01 + block_access_index=1, + post_value=0x01, ) ], ), @@ -489,7 +516,8 @@ def test_bal_invalid_complex_corruption( slot=0x02, slot_changes=[ BalStorageChange( - tx_index=1, post_value=0x02 + block_access_index=1, + post_value=0x02, ) ], ), @@ -498,7 +526,7 @@ def test_bal_invalid_complex_corruption( receiver: BalAccountExpectation( balance_changes=[ BalBalanceChange( - tx_index=2, post_balance=10**15 + block_access_index=2, post_balance=10**15 ) ], ), @@ -506,7 +534,7 @@ def test_bal_invalid_complex_corruption( ).modify( remove_nonces(sender), modify_storage( - contract, tx_index=1, slot=0x01, value=0xFF + contract, block_access_index=1, slot=0x01, value=0xFF ), remove_balances(receiver), swap_tx_indices(1, 2), @@ -549,13 +577,15 @@ def test_bal_invalid_missing_account( account_expectations={ sender: BalAccountExpectation( nonce_changes=[ - BalNonceChange(tx_index=1, post_nonce=1) + BalNonceChange( + block_access_index=1, post_nonce=1 + ) ], ), receiver: BalAccountExpectation( balance_changes=[ BalBalanceChange( - tx_index=1, post_balance=10**15 + block_access_index=1, post_balance=10**15 ) ], ), @@ -600,12 +630,16 @@ def test_bal_invalid_balance_value( receiver: BalAccountExpectation( balance_changes=[ BalBalanceChange( - tx_index=1, post_balance=10**15 + block_access_index=1, post_balance=10**15 ) ], ), } - ).modify(modify_balance(receiver, tx_index=1, balance=999999)), + ).modify( + modify_balance( + receiver, block_access_index=1, balance=999999 + ) + ), ) ], ) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py index 9e2355b93e..7a07299407 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py @@ -168,7 +168,9 @@ def test_bal_sstore_and_oog( BalStorageSlot( slot=0x01, slot_changes=[ - BalStorageChange(tx_index=1, post_value=0x42) + BalStorageChange( + block_access_index=1, post_value=0x42 + ) ], ), ] @@ -505,11 +507,13 @@ def test_bal_call_no_delegation_and_oog_before_target_access( elif value > 0: account_expectations = { caller: BalAccountExpectation( - balance_changes=[BalBalanceChange(tx_index=1, post_balance=0)] + balance_changes=[ + BalBalanceChange(block_access_index=1, post_balance=0) + ] ), target: BalAccountExpectation( balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=value) + BalBalanceChange(block_access_index=1, post_balance=value) ] ), } @@ -783,7 +787,9 @@ def test_bal_call_7702_delegation_and_oog( account_expectations: Dict[Address, BalAccountExpectation | None] = { caller: ( BalAccountExpectation( - balance_changes=[BalBalanceChange(tx_index=1, post_balance=0)] + balance_changes=[ + BalBalanceChange(block_access_index=1, post_balance=0) + ] ) if value_transferred else BalAccountExpectation.empty() @@ -797,7 +803,7 @@ def test_bal_call_7702_delegation_and_oog( if value_transferred: account_expectations[target] = BalAccountExpectation( balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=value) + BalBalanceChange(block_access_index=1, post_balance=value) ] ) else: @@ -1797,18 +1803,21 @@ def test_bal_self_destruct( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), bob: BalAccountExpectation( balance_changes=[ BalBalanceChange( - tx_index=1, post_balance=expected_recipient_balance + block_access_index=1, + post_balance=expected_recipient_balance, ) ] ), self_destructed_account: BalAccountExpectation( balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=0) + BalBalanceChange(block_access_index=1, post_balance=0) ] if pre_funded else [], @@ -1822,7 +1831,9 @@ def test_bal_self_destruct( BalStorageSlot( slot=0x02, slot_changes=[ - BalStorageChange(tx_index=1, post_value=0x42) + BalStorageChange( + block_access_index=1, post_value=0x42 + ) ], ) ] @@ -1935,7 +1946,7 @@ def test_bal_self_destruct_oog( account_expectations: Dict[Address, BalAccountExpectation | None] = { alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[BalNonceChange(block_access_index=1, post_nonce=1)], ), caller_contract: BalAccountExpectation.empty(), selfdestruct_contract: BalAccountExpectation.empty(), @@ -1996,14 +2007,18 @@ def test_bal_storage_write_read_same_frame( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), oracle: BalAccountExpectation( storage_changes=[ BalStorageSlot( slot=0x01, slot_changes=[ - BalStorageChange(tx_index=1, post_value=0x42) + BalStorageChange( + block_access_index=1, post_value=0x42 + ) ], ) ], @@ -2085,14 +2100,18 @@ def test_bal_storage_write_read_cross_frame( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), oracle: BalAccountExpectation( storage_changes=[ BalStorageSlot( slot=0x01, slot_changes=[ - BalStorageChange(tx_index=1, post_value=0x42) + BalStorageChange( + block_access_index=1, post_value=0x42 + ) ], ) ], @@ -2159,16 +2178,16 @@ def test_bal_create_oog_code_deposit( # nonce/code changes rolled back on OOG) account_expectations = { alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[BalNonceChange(block_access_index=1, post_nonce=1)], ), factory: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=2)], + nonce_changes=[BalNonceChange(block_access_index=1, post_nonce=2)], storage_changes=[ BalStorageSlot( slot=1, slot_changes=[ # SSTORE saves 0 (CREATE failed) - BalStorageChange(tx_index=1, post_value=0), + BalStorageChange(block_access_index=1, post_value=0), ], ) ], @@ -2240,7 +2259,9 @@ def test_bal_sstore_static_context( account_expectations={ alice: BalAccountExpectation( nonce_changes=[ - BalNonceChange(tx_index=1, post_nonce=1) + BalNonceChange( + block_access_index=1, post_nonce=1 + ) ], ), contract_a: BalAccountExpectation( @@ -2249,7 +2270,7 @@ def test_bal_sstore_static_context( slot=0x00, slot_changes=[ BalStorageChange( - tx_index=1, post_value=1 + block_access_index=1, post_value=1 ), ], ), @@ -2307,7 +2328,9 @@ def test_bal_create_contract_init_revert( account_expectations={ alice: BalAccountExpectation( nonce_changes=[ - BalNonceChange(tx_index=1, post_nonce=1) + BalNonceChange( + block_access_index=1, post_nonce=1 + ) ], ), caller: BalAccountExpectation.empty(), @@ -2386,7 +2409,9 @@ def test_bal_call_revert_insufficient_funds( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), contract: BalAccountExpectation( # Storage read for slot 0x01 @@ -2396,7 +2421,9 @@ def test_bal_call_revert_insufficient_funds( BalStorageSlot( slot=0x02, slot_changes=[ - BalStorageChange(tx_index=1, post_value=0) + BalStorageChange( + block_access_index=1, post_value=0 + ) ], ) ], @@ -2525,14 +2552,18 @@ def test_bal_create_selfdestruct_to_self_with_call( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), factory: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=2)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=2) + ], # Balance changes: loses endowment (100) balance_changes=[ BalBalanceChange( - tx_index=1, + block_access_index=1, post_balance=factory_balance - endowment, ) ], @@ -2543,7 +2574,9 @@ def test_bal_create_selfdestruct_to_self_with_call( BalStorageSlot( slot=0x01, slot_changes=[ - BalStorageChange(tx_index=1, post_value=0x42) + BalStorageChange( + block_access_index=1, post_value=0x42 + ) ], ) ], @@ -2646,17 +2679,23 @@ def test_bal_create2_collision( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), factory: BalAccountExpectation( # Nonce incremented 1→2 even on failed CREATE2 - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=2)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=2) + ], # Storage changes: slot 0 = 0xDEAD → 0 (CREATE2 returned 0) storage_changes=[ BalStorageSlot( slot=0x00, slot_changes=[ - BalStorageChange(tx_index=1, post_value=0) + BalStorageChange( + block_access_index=1, post_value=0 + ) ], ) ], @@ -2726,7 +2765,9 @@ def test_bal_transient_storage_not_tracked( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), contract: BalAccountExpectation( # Persistent storage change for slot 0x02 @@ -2734,7 +2775,9 @@ def test_bal_transient_storage_not_tracked( BalStorageSlot( slot=0x02, slot_changes=[ - BalStorageChange(tx_index=1, post_value=0x42) + BalStorageChange( + block_access_index=1, post_value=0x42 + ) ], ) ], @@ -2798,7 +2841,9 @@ def test_bal_selfdestruct_to_precompile( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), caller: BalAccountExpectation.empty(), # Victim (selfdestructing contract): balance changes 100→0 @@ -2806,7 +2851,7 @@ def test_bal_selfdestruct_to_precompile( victim: BalAccountExpectation( nonce_changes=[], # Contract nonce unchanged balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=0) + BalBalanceChange(block_access_index=1, post_balance=0) ], code_changes=[], # Code unchanged (post-Cancun) storage_changes=[], # No storage changes @@ -2818,7 +2863,7 @@ def test_bal_selfdestruct_to_precompile( nonce_changes=[], # MUST NOT have nonce changes balance_changes=[ BalBalanceChange( - tx_index=1, post_balance=contract_balance + block_access_index=1, post_balance=contract_balance ) ], code_changes=[], # MUST NOT have code changes @@ -2907,7 +2952,9 @@ def test_bal_create_early_failure( expected_block_access_list=BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), factory: BalAccountExpectation( # NO nonce_changes - CREATE failed before increment_nonce @@ -2917,7 +2964,9 @@ def test_bal_create_early_failure( BalStorageSlot( slot=0x00, slot_changes=[ - BalStorageChange(tx_index=1, post_value=0) + BalStorageChange( + block_access_index=1, post_value=0 + ) ], ) ], diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index 77c298110b..0432339ead 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -23,9 +23,9 @@ | `test_bal_noop_storage_write` | Ensure BAL includes storage read but not write for no-op writes where pre-state equals post-state | Contract with pre-existing storage value `0x42` in slot `0x01`; transaction executes `SSTORE(0x01, 0x42)` (writing same value) | BAL **MUST** include the contract address with `storage_reads` for slot `0x01` since it was accessed, but **MUST NOT** include it in `storage_changes` (no actual state change). | ✅ Completed | | `test_bal_fully_unmutated_account` | Ensure BAL captures account that has zero net mutations | Alice sends 0 wei to `Oracle` which writes same pre-existing value to storage | BAL MUST include Alice with `nonce_changes` and balance changes (gas), `Oracle` with `storage_reads` for accessed slot but empty `storage_changes`. | ✅ Completed | | `test_bal_net_zero_balance_transfer` | BAL includes accounts with net-zero balance change but excludes them from balance changes | Contract receives and sends same amount to recipient using CALL or SELFDESTRUCT | BAL **MUST** include contract in `account_changes` without `balance_changes` (net zero). BAL **MUST** record non-zero `balance_changes` for recipient. | ✅ Completed | -| `test_bal_system_contracts_2935_4788` | BAL includes pre-exec system writes for parent hash & beacon root | Build a block with `N` normal txs; 2935 & 4788 active | BAL MUST include `HISTORY_STORAGE_ADDRESS` (EIP-2935) and `BEACON_ROOTS_ADDRESS` (EIP-4788) with `storage_changes` to ring-buffer slots; each write uses `tx_index = N` (system op). | 🟡 Planned | -| `test_bal_system_dequeue_withdrawals_eip7002` | BAL tracks post-exec system dequeues for withdrawals | Pre-populate EIP-7002 withdrawal requests; produce a block where dequeues occur | BAL MUST include the 7002 system contract with `storage_changes` (queue head/tail slots 0–3) using `tx_index = len(txs)` and balance changes for withdrawal recipients. | 🟡 Planned | -| `test_bal_system_dequeue_consolidations_eip7251` | BAL tracks post-exec system dequeues for consolidations | Pre-populate EIP-7251 consolidation requests; produce a block where dequeues occur | BAL MUST include the 7251 system contract with `storage_changes` (queue slots 0–3) using `tx_index = len(txs)`. | 🟡 Planned | +| `test_bal_system_contracts_2935_4788` | BAL includes pre-exec system writes for parent hash & beacon root | Build a block with `N` normal txs; 2935 & 4788 active | BAL MUST include `HISTORY_STORAGE_ADDRESS` (EIP-2935) and `BEACON_ROOTS_ADDRESS` (EIP-4788) with `storage_changes` to ring-buffer slots; each write uses `block_access_index = N` (system op). | 🟡 Planned | +| `test_bal_system_dequeue_withdrawals_eip7002` | BAL tracks post-exec system dequeues for withdrawals | Pre-populate EIP-7002 withdrawal requests; produce a block where dequeues occur | BAL MUST include the 7002 system contract with `storage_changes` (queue head/tail slots 0–3) using `block_access_index = len(txs)` and balance changes for withdrawal recipients. | 🟡 Planned | +| `test_bal_system_dequeue_consolidations_eip7251` | BAL tracks post-exec system dequeues for consolidations | Pre-populate EIP-7251 consolidation requests; produce a block where dequeues occur | BAL MUST include the 7251 system contract with `storage_changes` (queue slots 0–3) using `block_access_index = len(txs)`. | 🟡 Planned | | `test_bal_aborted_storage_access` | Ensure BAL captures storage access in aborted transactions correctly | Alice calls contract that reads storage slot `0x01`, writes to slot `0x02`, then aborts with `REVERT`/`INVALID` | BAL MUST include storage_reads for slots `0x01` and `0x02` (aborted writes become reads), empty storage_changes. Only nonce changes for Alice. | ✅ Completed | | `test_bal_aborted_account_access` | Ensure BAL captures account access in aborted transactions for all account accessing opcodes | Alice calls `AbortContract` that performs account access operations (`BALANCE`, `EXTCODESIZE`, `EXTCODECOPY`, `EXTCODEHASH`, `CALL`, `CALLCODE`, `DELEGATECALL`, `STATICCALL`) on `TargetContract` and aborts via `REVERT`/`INVALID` | BAL MUST include Alice, `TargetContract`, and `AbortContract` in account_changes and nonce changes for Alice. | ✅ Completed | | `test_bal_pure_contract_call` | Ensure BAL captures contract access for pure computation calls | Alice calls `PureContract` that performs pure arithmetic (ADD operation) without storage or balance changes | BAL MUST include Alice and `PureContract` in `account_changes`, and `nonce_changes` for Alice. | ✅ Completed | @@ -75,7 +75,7 @@ | `test_bal_invalid_complex_corruption` | Verify clients reject blocks with multiple BAL corruptions | Alice calls contract with storage writes; BAL has multiple issues: wrong account, missing nonce, wrong storage value | Block **MUST** be rejected with `INVALID_BLOCK_ACCESS_LIST` exception. Clients **MUST** detect any corruption regardless of other issues. | ✅ Completed | | `test_bal_invalid_missing_account` | Verify clients reject blocks with missing required account entries in BAL | Alice sends transaction to Bob; BAL modifier removes Bob's account entry (recipient should be included) | Block **MUST** be rejected with `INVALID_BLOCK_ACCESS_LIST` exception. Clients **MUST** validate all accessed accounts are present. | ✅ Completed | | `test_bal_invalid_balance_value` | Verify clients reject blocks with incorrect balance values in BAL | Alice sends value to Bob; BAL modifier changes balance to incorrect value | Block **MUST** be rejected with `INVALID_BLOCK_ACCESS_LIST` exception. Clients **MUST** validate balance change values match actual state transitions. | ✅ Completed | -| `test_bal_empty_block_no_coinbase` | Ensure BAL correctly handles empty blocks without including coinbase | Block with 0 transactions, no withdrawals. System contracts may perform operations (EIP-2935 parent hash, EIP-4788 beacon root if active). | BAL **MUST NOT** include the coinbase/fee recipient (receives no fees). BAL **MAY** include system contract addresses (EIP-2935 `HISTORY_STORAGE_ADDRESS`, EIP-4788 `BEACON_ROOTS_ADDRESS`) with `storage_changes` at `tx_index=0` (pre-execution system operations). | ✅ Completed | +| `test_bal_empty_block_no_coinbase` | Ensure BAL correctly handles empty blocks without including coinbase | Block with 0 transactions, no withdrawals. System contracts may perform operations (EIP-2935 parent hash, EIP-4788 beacon root if active). | BAL **MUST NOT** include the coinbase/fee recipient (receives no fees). BAL **MAY** include system contract addresses (EIP-2935 `HISTORY_STORAGE_ADDRESS`, EIP-4788 `BEACON_ROOTS_ADDRESS`) with `storage_changes` at `block_access_index=0` (pre-execution system operations). | ✅ Completed | | `test_bal_coinbase_zero_tip` | Ensure BAL includes coinbase even when priority fee is zero | Block with 1 transaction: Alice sends 5 wei to Bob with priority fee = 0 (base fee burned post-EIP-1559) | BAL **MUST** include Alice with `balance_changes` (gas cost) and `nonce_changes`. BAL **MUST** include Bob with `balance_changes`. BAL **MUST** include coinbase with empty changes. | ✅ Completed | | `test_bal_withdrawal_empty_block` | Ensure BAL captures withdrawal balance changes in empty block | Charlie starts with 1 gwei. Block with 0 transactions and 1 withdrawal of 10 gwei to Charlie | BAL **MUST** include Charlie with `balance_changes` at `block_access_index = 1`. Charlie's `balance_changes` **MUST** show final balance of 11 gwei. All other fields (storage_reads, storage_changes, nonce_changes, code_changes) **MUST** be empty. | ✅ Completed | | `test_bal_withdrawal_and_transaction` | Ensure BAL captures both transaction and withdrawal balance changes | Block with 1 transaction: Alice sends 5 wei to Bob. 1 withdrawal of 10 gwei to Charlie | BAL **MUST** include Alice with `nonce_changes` and `balance_changes` at `block_access_index = 1`. BAL **MUST** include Bob with `balance_changes` at `block_access_index = 1`. BAL **MUST** include Charlie with `balance_changes` at `block_access_index = 2` showing final balance after receiving 10 gwei. All other fields for Charlie **MUST** be empty. | ✅ Completed | @@ -101,13 +101,13 @@ | `test_bal_all_transaction_types` | Ensure BAL correctly captures state changes from all transaction types in a single block | Single block with 5 transactions: Type 0 (Legacy), Type 1 (EIP-2930 Access List), Type 2 (EIP-1559), Type 3 (EIP-4844 Blob), Type 4 (EIP-7702 Set Code). Each tx writes to contract storage. Note: Access list addresses are pre-warmed but NOT recorded in BAL (no state access). | BAL **MUST** include: (1) All 5 senders with `nonce_changes`. (2) Contracts 0-3 with `storage_changes`. (3) Alice (7702 target) with `nonce_changes`, `code_changes` (delegation), `storage_changes`. (4) Oracle (delegation source) with empty changes. | ✅ Completed | | `test_bal_create2_collision` | Ensure BAL handles CREATE2 address collision correctly | Factory contract (nonce=1, storage slot 0=0xDEAD) executes `CREATE2(salt=0, initcode)` targeting address that already has `code=STOP, nonce=1`. Pre-deploy contract at calculated CREATE2 target address before factory deployment. | BAL **MUST** include: (1) Factory with `nonce_changes` (1→2, incremented even on failed CREATE2), `storage_changes` for slot 0 (0xDEAD→0, stores failure). (2) Collision address with empty changes (accessed during collision check, no state changes). CREATE2 returns 0. Collision address **MUST NOT** have `nonce_changes` or `code_changes`. | ✅ Completed | | `test_bal_create_selfdestruct_to_self_with_call` | Ensure BAL handles init code that calls external contract then selfdestructs to itself | Factory executes `CREATE2` with endowment=100. Init code (embedded in factory via CODECOPY): (1) `CALL(Oracle, 0)` - Oracle writes to its storage slot 0x01. (2) `SSTORE(0x01, 0x42)` - write to own storage. (3) `SELFDESTRUCT(SELF)` - selfdestruct to own address. Contract created and destroyed in same tx. | BAL **MUST** include: (1) Factory with `nonce_changes`, `balance_changes` (loses 100). (2) Oracle with `storage_changes` for slot 0x01 (external call succeeded). (3) Created address with `storage_reads` for slot 0x01 (aborted write becomes read) - **MUST NOT** have `nonce_changes`, `code_changes`, `storage_changes`, or `balance_changes` (ephemeral contract, balance burned via SELFDESTRUCT to self). | ✅ Completed | -| `test_bal_selfdestruct_to_7702_delegation` | Ensure BAL correctly handles SELFDESTRUCT to a 7702 delegated account (no code execution on recipient) | Tx1: Alice authorizes delegation to Oracle (sets code to `0xef0100\|\|Oracle`). Tx2: Victim contract (balance=100) executes `SELFDESTRUCT(Alice)`. Two separate transactions in same block. Note: Alice starts with initial balance which accumulates with selfdestruct. | BAL **MUST** include: (1) Alice at tx_index=1 with `code_changes` (delegation), `nonce_changes`. (2) Alice at tx_index=2 with `balance_changes` (receives selfdestruct). (3) Victim at tx_index=2 with `balance_changes` (100→0). **Oracle MUST NOT appear in tx2** - per EVM spec, SELFDESTRUCT transfers balance without executing recipient code, so delegation target is never accessed. | ✅ Completed | +| `test_bal_selfdestruct_to_7702_delegation` | Ensure BAL correctly handles SELFDESTRUCT to a 7702 delegated account (no code execution on recipient) | Tx1: Alice authorizes delegation to Oracle (sets code to `0xef0100\|\|Oracle`). Tx2: Victim contract (balance=100) executes `SELFDESTRUCT(Alice)`. Two separate transactions in same block. Note: Alice starts with initial balance which accumulates with selfdestruct. | BAL **MUST** include: (1) Alice at block_access_index=1 with `code_changes` (delegation), `nonce_changes`. (2) Alice at block_access_index=2 with `balance_changes` (receives selfdestruct). (3) Victim at block_access_index=2 with `balance_changes` (100→0). **Oracle MUST NOT appear in tx2** - per EVM spec, SELFDESTRUCT transfers balance without executing recipient code, so delegation target is never accessed. | ✅ Completed | | `test_bal_call_revert_insufficient_funds` | Ensure BAL handles CALL failure due to insufficient balance (not OOG) | Contract (balance=100, storage slot 0x02=0xDEAD) executes: `SLOAD(0x01), CALL(target, value=1000), SSTORE(0x02, result)`. CALL fails because 1000 > 100. Target address 0xDEAD (pre-existing with non-zero balance to avoid pruning). Note: slot 0x02 must start non-zero so SSTORE(0) is a change. | BAL **MUST** include: (1) Contract with `storage_reads` for slot 0x01, `storage_changes` for slot 0x02 (value=0, CALL returned failure). (2) Target (0xDEAD) **MUST** appear in BAL with empty changes - target is accessed before balance check fails. | ✅ Completed | | `test_bal_lexicographic_address_ordering` | Ensure BAL enforces strict lexicographic byte-wise ordering | Pre-fund three addresses with specific byte patterns: `addr_low = 0x0000...0001`, `addr_mid = 0x0000...0100`, `addr_high = 0x0100...0000`. Contract touches them in reverse order: `BALANCE(addr_high), BALANCE(addr_low), BALANCE(addr_mid)`. Additionally, include two endian-trap addresses that are byte-reversals of each other: `addr_endian_low = 0x0100000000000000000000000000000000000002`, `addr_endian_high = 0x0200000000000000000000000000000000000001`. Note: `reverse(addr_endian_low) = addr_endian_high`. Correct lexicographic order: `addr_endian_low < addr_endian_high` (0x01 < 0x02 at byte 0). If implementation incorrectly reverses bytes before comparing, it would get `addr_endian_low > addr_endian_high` (wrong). | BAL account list **MUST** be sorted lexicographically by address bytes: `addr_low` < `addr_mid` < `addr_high` < `addr_endian_low` < `addr_endian_high`, regardless of access order. The endian-trap addresses specifically catch byte-reversal bugs where addresses are compared with wrong byte order. Complements `test_bal_invalid_account_order` which tests rejection; this tests correct generation. | ✅ Completed | | `test_bal_transient_storage_not_tracked` | Ensure BAL excludes EIP-1153 transient storage operations | Contract executes: `TSTORE(0x01, 0x42)` (transient write), `TLOAD(0x01)` (transient read), `SSTORE(0x02, result)` (persistent write using transient value). | BAL **MUST** include slot 0x02 in `storage_changes` (persistent storage was modified). BAL **MUST NOT** include slot 0x01 in `storage_reads` or `storage_changes` (transient storage is not persisted, not needed for stateless execution). This verifies TSTORE/TLOAD don't pollute BAL. | ✅ Completed | | `test_bal_selfdestruct_to_precompile` | Ensure BAL captures SELFDESTRUCT with precompile as beneficiary | Caller triggers victim contract (balance=100) to execute `SELFDESTRUCT(0x0000...0001)` (ecrecover precompile). Precompile starts with balance=0. | BAL **MUST** include: (1) Contract with `balance_changes` (100→0, loses balance to selfdestruct). (2) Precompile address 0x01 with `balance_changes` (0→100, receives selfdestruct balance). Precompile **MUST NOT** have `code_changes` or `nonce_changes`. This complements `test_bal_withdrawal_to_precompiles` (withdrawal) and `test_bal_precompile_funded` (tx value). | ✅ Completed | | `test_bal_self_destruct_oog` | Ensure BAL correctly tracks SELFDESTRUCT beneficiary based on gas boundaries | Alice calls `Caller` contract which CALLs `SelfDestructContract` with precisely controlled gas. `SelfDestructContract` attempts SELFDESTRUCT to new account `Beneficiary`. Static gas = G_VERY_LOW + G_SELF_DESTRUCT + G_COLD_ACCOUNT_ACCESS. Parameterized: (1) OOG before state access (gas = static - 1), (2) OOG after state access (gas = static, but insufficient for G_NEW_ACCOUNT). | For OOG before state access: BAL **MUST NOT** include `Beneficiary` (no state access occurred). For OOG after state access: BAL **MUST** include `Beneficiary` with empty changes (state was accessed before G_NEW_ACCOUNT check failed). Both cases: Alice with `nonce_changes`, `Caller` and `SelfDestructContract` with empty changes. Contract balance unchanged. | ✅ Completed | -| `test_bal_withdrawal_to_7702_delegation` | Ensure BAL correctly handles withdrawal to a 7702 delegated account (no code execution on recipient) | Tx1: Alice authorizes delegation to Oracle (sets code to `0xef0100\|\|Oracle`). Withdrawal: 10 gwei sent to Alice. Single block with tx + withdrawal. | BAL **MUST** include: (1) Alice at tx_index=1 with `code_changes` (delegation), `nonce_changes`. (2) Alice at tx_index=2 with `balance_changes` (receives withdrawal). **Oracle MUST NOT appear** - withdrawals credit balance without executing recipient code, so delegation target is never accessed. This complements `test_bal_selfdestruct_to_7702_delegation` (selfdestruct) and `test_bal_withdrawal_no_evm_execution` (withdrawal to contract). | ✅ Completed | +| `test_bal_withdrawal_to_7702_delegation` | Ensure BAL correctly handles withdrawal to a 7702 delegated account (no code execution on recipient) | Tx1: Alice authorizes delegation to Oracle (sets code to `0xef0100\|\|Oracle`). Withdrawal: 10 gwei sent to Alice. Single block with tx + withdrawal. | BAL **MUST** include: (1) Alice at block_access_index=1 with `code_changes` (delegation), `nonce_changes`. (2) Alice at block_access_index=2 with `balance_changes` (receives withdrawal). **Oracle MUST NOT appear** - withdrawals credit balance without executing recipient code, so delegation target is never accessed. This complements `test_bal_selfdestruct_to_7702_delegation` (selfdestruct) and `test_bal_withdrawal_no_evm_execution` (withdrawal to contract). | ✅ Completed | | `test_init_collision_create_tx` | Ensure BAL tracks CREATE collisions correctly (pre-Amsterdam test with BAL) | CREATE transaction targeting address with existing storage aborts | BAL **MUST** show empty expectations for collision address (no changes occur due to abort) | ✅ Completed | | `test_call_to_pre_authorized_oog` | Ensure BAL handles OOG during EIP-7702 delegation access (pre-Amsterdam test with BAL) | Call to delegated account that OOGs before accessing delegation contract | BAL **MUST** include auth_signer (code read for delegation check) but **MUST NOT** include delegation contract (OOG before access) | ✅ Completed | | `test_selfdestruct_created_in_same_tx_with_revert` | Ensure BAL tracks selfdestruct with revert correctly (pre-Amsterdam test with BAL) | Contract created and selfdestructed in same tx with nested revert | BAL **MUST** track storage reads and balance changes for selfdestruct even with reverts | ✅ Completed | diff --git a/tests/cancun/create/test_create_oog_from_eoa_refunds.py b/tests/cancun/create/test_create_oog_from_eoa_refunds.py index ee7f14571a..050f54c91e 100644 --- a/tests/cancun/create/test_create_oog_from_eoa_refunds.py +++ b/tests/cancun/create/test_create_oog_from_eoa_refunds.py @@ -356,13 +356,17 @@ def test_create_oog_from_eoa_refunds( ) created_bal = BalAccountExpectation( nonce_changes=[ - BalNonceChange(tx_index=1, post_nonce=expected_nonce) + BalNonceChange( + block_access_index=1, post_nonce=expected_nonce + ) ], storage_changes=[ BalStorageSlot( slot=0, slot_changes=[ - BalStorageChange(tx_index=1, post_value=1) + BalStorageChange( + block_access_index=1, post_value=1 + ) ], ), ], @@ -404,7 +408,9 @@ def test_create_oog_from_eoa_refunds( bal_expectation = BlockAccessListExpectation( account_expectations={ sender: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), created_address: created_bal, } diff --git a/tests/cancun/eip6780_selfdestruct/test_selfdestruct_revert.py b/tests/cancun/eip6780_selfdestruct/test_selfdestruct_revert.py index ab2d22f112..634bfac822 100644 --- a/tests/cancun/eip6780_selfdestruct/test_selfdestruct_revert.py +++ b/tests/cancun/eip6780_selfdestruct/test_selfdestruct_revert.py @@ -454,7 +454,7 @@ def test_selfdestruct_created_in_same_tx_with_revert( # noqa SC200 BalAccountExpectation( balance_changes=[ BalBalanceChange( - tx_index=1, + block_access_index=1, post_balance=1 if selfdestruct_on_outer_call == 1 else 2, @@ -467,11 +467,15 @@ def test_selfdestruct_created_in_same_tx_with_revert( # noqa SC200 selfdestruct_with_transfer_contract_address ] = BalAccountExpectation( storage_reads=[1], - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], - balance_changes=[BalBalanceChange(tx_index=1, post_balance=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], + balance_changes=[ + BalBalanceChange(block_access_index=1, post_balance=1) + ], code_changes=[ BalCodeChange( - tx_index=1, + block_access_index=1, new_code=selfdestruct_with_transfer_contract_code, ), ], @@ -479,7 +483,9 @@ def test_selfdestruct_created_in_same_tx_with_revert( # noqa SC200 BalStorageSlot( slot=0, slot_changes=[ - BalStorageChange(tx_index=1, post_value=1), + BalStorageChange( + block_access_index=1, post_value=1 + ), ], ), ], diff --git a/tests/frontier/opcodes/test_call_and_callcode_gas_calculation.py b/tests/frontier/opcodes/test_call_and_callcode_gas_calculation.py index 015217521d..ae3dd53df2 100644 --- a/tests/frontier/opcodes/test_call_and_callcode_gas_calculation.py +++ b/tests/frontier/opcodes/test_call_and_callcode_gas_calculation.py @@ -230,7 +230,7 @@ def expected_block_access_list( else: empty_account_expectation = BalAccountExpectation( balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=1) + BalBalanceChange(block_access_index=1, post_balance=1) ] ) else: @@ -246,14 +246,16 @@ def expected_block_access_list( empty_account: empty_account_expectation, caller_address: BalAccountExpectation( balance_changes=[ - BalBalanceChange(tx_index=1, post_balance=4) + BalBalanceChange(block_access_index=1, post_balance=4) ], storage_reads=[0] if gas_shortage else [], storage_changes=[ BalStorageSlot( slot=0x00, slot_changes=[ - BalStorageChange(tx_index=1, post_value=1), + BalStorageChange( + block_access_index=1, post_value=1 + ), ], ), ] @@ -262,7 +264,11 @@ def expected_block_access_list( ), callee_address: BalAccountExpectation( balance_changes=( - [BalBalanceChange(tx_index=1, post_balance=2)] + [ + BalBalanceChange( + block_access_index=1, post_balance=2 + ) + ] if not gas_shortage and callee_opcode == Op.CALL else [] ), diff --git a/tests/prague/eip7702_set_code_tx/test_gas.py b/tests/prague/eip7702_set_code_tx/test_gas.py index b415684bf7..93c2747019 100644 --- a/tests/prague/eip7702_set_code_tx/test_gas.py +++ b/tests/prague/eip7702_set_code_tx/test_gas.py @@ -1279,7 +1279,9 @@ def test_call_to_pre_authorized_oog( # delegation is NOT tracked (OOG before reading it) account_expectations = { tx.sender: BalAccountExpectation( - nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], ), callee_address: BalAccountExpectation.empty(), # read for calculating delegation access cost: From 632044a2aa9a95cd9e60ee368c89251c66e9d8f8 Mon Sep 17 00:00:00 2001 From: fselmo Date: Fri, 12 Dec 2025 14:28:25 -0700 Subject: [PATCH 054/154] fix(test-tests): Avoid hard-coding precompile range for lexicographic test --- .../test_block_access_lists.py | 20 ++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py index f56a142c8b..9f7400b433 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists.py @@ -2557,9 +2557,10 @@ def test_bal_lexicographic_address_ordering( """ Test BAL enforces strict lexicographic byte-wise address ordering. - Addresses: addr_low (0x...01), addr_mid (0x...0100), addr_high (0x01...00). - Endian-trap: addr_endian_low (0x01...02), addr_endian_high (0x02...01). - Contract touches them in reverse order to verify sorting. + Addresses: addr_low (0x...020000), addr_mid (0x...02000000), + addr_high (0x20...00). Endian-trap: addr_endian_low (0x01...02), + addr_endian_high (0x02...01). Contract touches them in reverse + order to verify sorting. Expected BAL order: low < mid < high < endian_low < endian_high. Catches endianness bugs in address comparison. @@ -2568,12 +2569,13 @@ def test_bal_lexicographic_address_ordering( # Create addresses with specific byte patterns for lexicographic testing # In lexicographic (byte-wise) order: low < mid < high - # addr_low: 0x00...01 (rightmost byte = 0x01) - # addr_mid: 0x00...0100 (second-rightmost byte = 0x01) - # addr_high: 0x01...00 (leftmost byte = 0x01) - addr_low = Address("0x0000000000000000000000000000000000000001") - addr_mid = Address("0x0000000000000000000000000000000000000100") - addr_high = Address("0x0100000000000000000000000000000000000000") + # addr_low: 0x00...020000 (0x02 in third-rightmost byte) + # addr_mid: 0x00...02000000 (0x02 in fourth-rightmost byte) + # addr_high: 0x20...00 (leftmost byte = 0x20) + # Note: Using 0x2xxxx addresses to avoid precompiles (0x01-0x11, 0x100) + addr_low = Address("0x0000000000000000000000000000000000020000") + addr_mid = Address("0x0000000000000000000000000000000002000000") + addr_high = Address("0x2000000000000000000000000000000000000000") # Endian-trap addresses: byte-reversals to catch byte-order bugs # addr_endian_low: 0x01...02 (0x01 at byte 0, 0x02 at byte 19) From 35a4bb5819c0a26bf2650f3fb23884985bcdb310 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Toni=20Wahrst=C3=A4tter?= <51536394+nerolation@users.noreply.github.com> Date: Wed, 17 Dec 2025 23:33:47 +0100 Subject: [PATCH 055/154] feat(specs): EIP-7928 move bal from payload (#1917) * feat(specs): EIP-7928 move bal from payload * remove BAL from stf * fix linter --- src/ethereum/forks/amsterdam/blocks.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/src/ethereum/forks/amsterdam/blocks.py b/src/ethereum/forks/amsterdam/blocks.py index 0d14066f47..143b3d18fe 100644 --- a/src/ethereum/forks/amsterdam/blocks.py +++ b/src/ethereum/forks/amsterdam/blocks.py @@ -19,7 +19,6 @@ from ethereum.crypto.hash import Hash32 -from .block_access_lists.rlp_types import BlockAccessList from .fork_types import Address, Bloom, Root from .transactions import ( AccessListTransaction, @@ -306,13 +305,6 @@ class Block: A tuple of withdrawals processed in this block. """ - block_access_list: BlockAccessList - """ - Block Access List containing all accounts and storage locations accessed - during block execution. Introduced in [EIP-7928]. - [EIP-7928]: https://eips.ethereum.org/EIPS/eip-7928 - """ - @slotted_freezable @dataclass From 73392177983c60c81754499ce826c59133bce8d7 Mon Sep 17 00:00:00 2001 From: felipe Date: Mon, 29 Dec 2025 17:37:05 -0700 Subject: [PATCH 056/154] fix(test-tests): Use `ZeroPaddedHexNumber` instead of `HexNumber` for BALs (#1922) * fix(test-tests): Use ZeroPaddedHexNumber instead of HexNumber for consistency * chore(test-tests): add bal serialization roundtrip & 0-padded hex test (#33) * refactor(test): Remove unnecessary instantiation of classes to pydantic types --------- Co-authored-by: danceratopz --- .../block_access_list/account_changes.py | 28 ++--- .../test_types/block_access_list/modifiers.py | 37 ++++-- .../test_block_access_list_expectation.py | 8 +- .../test_block_access_list_serialization.py | 86 +++++++++++++ .../tests/test_block_access_list_t8n.py | 116 ++++++++++-------- 5 files changed, 199 insertions(+), 76 deletions(-) create mode 100644 packages/testing/src/execution_testing/test_types/tests/test_block_access_list_serialization.py diff --git a/packages/testing/src/execution_testing/test_types/block_access_list/account_changes.py b/packages/testing/src/execution_testing/test_types/block_access_list/account_changes.py index 4794c77d69..5bf4461457 100644 --- a/packages/testing/src/execution_testing/test_types/block_access_list/account_changes.py +++ b/packages/testing/src/execution_testing/test_types/block_access_list/account_changes.py @@ -13,19 +13,19 @@ Address, Bytes, CamelModel, - HexNumber, RLPSerializable, + ZeroPaddedHexNumber, ) class BalNonceChange(CamelModel, RLPSerializable): """Represents a nonce change in the block access list.""" - block_access_index: HexNumber = Field( - HexNumber(1), + block_access_index: ZeroPaddedHexNumber = Field( + ZeroPaddedHexNumber(1), description="Transaction index where the change occurred", ) - post_nonce: HexNumber = Field( + post_nonce: ZeroPaddedHexNumber = Field( ..., description="Nonce value after the transaction" ) @@ -35,11 +35,11 @@ class BalNonceChange(CamelModel, RLPSerializable): class BalBalanceChange(CamelModel, RLPSerializable): """Represents a balance change in the block access list.""" - block_access_index: HexNumber = Field( - HexNumber(1), + block_access_index: ZeroPaddedHexNumber = Field( + ZeroPaddedHexNumber(1), description="Transaction index where the change occurred", ) - post_balance: HexNumber = Field( + post_balance: ZeroPaddedHexNumber = Field( ..., description="Balance after the transaction" ) @@ -49,8 +49,8 @@ class BalBalanceChange(CamelModel, RLPSerializable): class BalCodeChange(CamelModel, RLPSerializable): """Represents a code change in the block access list.""" - block_access_index: HexNumber = Field( - HexNumber(1), + block_access_index: ZeroPaddedHexNumber = Field( + ZeroPaddedHexNumber(1), description="Transaction index where the change occurred", ) new_code: Bytes = Field(..., description="New code bytes") @@ -61,11 +61,11 @@ class BalCodeChange(CamelModel, RLPSerializable): class BalStorageChange(CamelModel, RLPSerializable): """Represents a change to a specific storage slot.""" - block_access_index: HexNumber = Field( - HexNumber(1), + block_access_index: ZeroPaddedHexNumber = Field( + ZeroPaddedHexNumber(1), description="Transaction index where the change occurred", ) - post_value: HexNumber = Field( + post_value: ZeroPaddedHexNumber = Field( ..., description="Value after the transaction" ) @@ -75,7 +75,7 @@ class BalStorageChange(CamelModel, RLPSerializable): class BalStorageSlot(CamelModel, RLPSerializable): """Represents all changes to a specific storage slot.""" - slot: HexNumber = Field(..., description="Storage slot key") + slot: ZeroPaddedHexNumber = Field(..., description="Storage slot key") slot_changes: List[BalStorageChange] = Field( default_factory=list, description="List of changes to this slot" ) @@ -99,7 +99,7 @@ class BalAccountChange(CamelModel, RLPSerializable): storage_changes: List[BalStorageSlot] = Field( default_factory=list, description="List of storage changes" ) - storage_reads: List[HexNumber] = Field( + storage_reads: List[ZeroPaddedHexNumber] = Field( default_factory=list, description="List of storage slots that were read", ) diff --git a/packages/testing/src/execution_testing/test_types/block_access_list/modifiers.py b/packages/testing/src/execution_testing/test_types/block_access_list/modifiers.py index d28f7099ba..b71f8e73f8 100644 --- a/packages/testing/src/execution_testing/test_types/block_access_list/modifiers.py +++ b/packages/testing/src/execution_testing/test_types/block_access_list/modifiers.py @@ -8,7 +8,10 @@ from typing import Any, Callable, List, Optional -from execution_testing.base_types import Address, HexNumber +from execution_testing.base_types import ( + Address, + ZeroPaddedHexNumber, +) from .. import BalCodeChange from . import ( @@ -257,20 +260,28 @@ def transform(bal: BlockAccessList) -> BlockAccessList: for nonce_change in new_account.nonce_changes: if nonce_change.block_access_index == tx1: nonce_indices[tx1] = True - nonce_change.block_access_index = HexNumber(tx2) + nonce_change.block_access_index = ZeroPaddedHexNumber( + tx2 + ) elif nonce_change.block_access_index == tx2: nonce_indices[tx2] = True - nonce_change.block_access_index = HexNumber(tx1) + nonce_change.block_access_index = ZeroPaddedHexNumber( + tx1 + ) # Swap in balance changes if new_account.balance_changes: for balance_change in new_account.balance_changes: if balance_change.block_access_index == tx1: balance_indices[tx1] = True - balance_change.block_access_index = HexNumber(tx2) + balance_change.block_access_index = ( + ZeroPaddedHexNumber(tx2) + ) elif balance_change.block_access_index == tx2: balance_indices[tx2] = True - balance_change.block_access_index = HexNumber(tx1) + balance_change.block_access_index = ( + ZeroPaddedHexNumber(tx1) + ) # Swap in storage changes (nested structure) if new_account.storage_changes: @@ -278,10 +289,14 @@ def transform(bal: BlockAccessList) -> BlockAccessList: for storage_change in storage_slot.slot_changes: if storage_change.block_access_index == tx1: balance_indices[tx1] = True - storage_change.block_access_index = HexNumber(tx2) + storage_change.block_access_index = ( + ZeroPaddedHexNumber(tx2) + ) elif storage_change.block_access_index == tx2: balance_indices[tx2] = True - storage_change.block_access_index = HexNumber(tx1) + storage_change.block_access_index = ( + ZeroPaddedHexNumber(tx1) + ) # Note: storage_reads is just a list of StorageKey, no block_access_index to # swap @@ -291,10 +306,14 @@ def transform(bal: BlockAccessList) -> BlockAccessList: for code_change in new_account.code_changes: if code_change.block_access_index == tx1: code_indices[tx1] = True - code_change.block_access_index = HexNumber(tx2) + code_change.block_access_index = ZeroPaddedHexNumber( + tx2 + ) elif code_change.block_access_index == tx2: code_indices[tx2] = True - code_change.block_access_index = HexNumber(tx1) + code_change.block_access_index = ZeroPaddedHexNumber( + tx1 + ) new_root.append(new_account) diff --git a/packages/testing/src/execution_testing/test_types/tests/test_block_access_list_expectation.py b/packages/testing/src/execution_testing/test_types/tests/test_block_access_list_expectation.py index f8b16d4474..ae495e9bd7 100644 --- a/packages/testing/src/execution_testing/test_types/tests/test_block_access_list_expectation.py +++ b/packages/testing/src/execution_testing/test_types/tests/test_block_access_list_expectation.py @@ -567,7 +567,7 @@ def test_absent_values_nonce_changes(has_change_should_raise: bool) -> None: if has_change_should_raise: with pytest.raises( - Exception, match="Unexpected nonce change found at tx 0x2" + Exception, match="Unexpected nonce change found at tx 0x02" ): expectation.verify_against(actual_bal) else: @@ -614,7 +614,7 @@ def test_absent_values_balance_changes(has_change_should_raise: bool) -> None: if has_change_should_raise: with pytest.raises( Exception, - match="Unexpected balance change found at tx 0x2", + match="Unexpected balance change found at tx 0x02", ): expectation.verify_against(actual_bal) else: @@ -759,7 +759,7 @@ def test_absent_values_code_changes(has_change_should_raise: bool) -> None: if has_change_should_raise: with pytest.raises( - Exception, match="Unexpected code change found at tx 0x2" + Exception, match="Unexpected code change found at tx 0x02" ): expectation.verify_against(actual_bal) else: @@ -898,7 +898,7 @@ def test_absent_values_with_multiple_tx_indices() -> None: ) with pytest.raises( - Exception, match="Unexpected nonce change found at tx 0x1" + Exception, match="Unexpected nonce change found at tx 0x01" ): expectation_fail.verify_against(actual_bal) diff --git a/packages/testing/src/execution_testing/test_types/tests/test_block_access_list_serialization.py b/packages/testing/src/execution_testing/test_types/tests/test_block_access_list_serialization.py new file mode 100644 index 0000000000..a42d86ff65 --- /dev/null +++ b/packages/testing/src/execution_testing/test_types/tests/test_block_access_list_serialization.py @@ -0,0 +1,86 @@ +""" +Tests for BlockAccessList serialization format. + +These tests verify that BAL models serialize to JSON with the correct +format, particularly zero-padded hex strings. +""" + +from execution_testing.base_types import Address, Bytes +from execution_testing.test_types.block_access_list import ( + BalAccountChange, + BalBalanceChange, + BalCodeChange, + BalNonceChange, + BalStorageChange, + BalStorageSlot, + BlockAccessList, +) + + +def test_bal_serialization_roundtrip_zero_padded_hex() -> None: + """ + Test that BAL serializes with zero-padded hex format and round-trips correctly. + + This verifies that values like 12 serialize as "0x0c" (not "0xc"), which is + required for consistency with other test vector fields. + """ + addr = Address(0xA) + + original = BlockAccessList( + [ + BalAccountChange( + address=addr, + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=12), + BalNonceChange(block_access_index=2, post_nonce=255), + ], + balance_changes=[ + BalBalanceChange(block_access_index=1, post_balance=15), + ], + code_changes=[ + BalCodeChange( + block_access_index=3, new_code=Bytes(b"\xde\xad") + ), + ], + storage_changes=[ + BalStorageSlot( + slot=12, + slot_changes=[ + BalStorageChange( + block_access_index=1, post_value=255 + ), + BalStorageChange( + block_access_index=2, post_value=4096 + ), + ], + ), + ], + storage_reads=[1, 15, 256], + ) + ] + ) + + # Serialize to JSON + json_data = original.model_dump(mode="json") + account_data = json_data[0] + + # Verify zero-padded hex format (0x0c not 0xc, 0x01 not 0x1) + assert account_data["nonce_changes"][0]["block_access_index"] == "0x01" + assert account_data["nonce_changes"][0]["post_nonce"] == "0x0c" + assert account_data["nonce_changes"][1]["post_nonce"] == "0xff" + assert account_data["balance_changes"][0]["post_balance"] == "0x0f" + assert account_data["code_changes"][0]["block_access_index"] == "0x03" + assert account_data["storage_changes"][0]["slot"] == "0x0c" + assert ( + account_data["storage_changes"][0]["slot_changes"][0]["post_value"] + == "0xff" + ) + assert ( + account_data["storage_changes"][0]["slot_changes"][1]["post_value"] + == "0x1000" + ) + assert account_data["storage_reads"] == ["0x01", "0x0f", "0x0100"] + + # Round-trip: deserialize and verify equality + restored = BlockAccessList.model_validate(json_data) + assert restored == original diff --git a/packages/testing/src/execution_testing/test_types/tests/test_block_access_list_t8n.py b/packages/testing/src/execution_testing/test_types/tests/test_block_access_list_t8n.py index c33cf8a2c7..942abf6bb2 100644 --- a/packages/testing/src/execution_testing/test_types/tests/test_block_access_list_t8n.py +++ b/packages/testing/src/execution_testing/test_types/tests/test_block_access_list_t8n.py @@ -9,7 +9,7 @@ import pytest -from execution_testing.base_types import Address, HexNumber, StorageKey +from execution_testing.base_types import Address from execution_testing.test_types.block_access_list import ( BalAccountChange, BalBalanceChange, @@ -61,9 +61,9 @@ def test_bal_storage_slot_ordering() -> None: BalAccountChange( address=addr, storage_changes=[ - BalStorageSlot(slot=StorageKey(0), slot_changes=[]), - BalStorageSlot(slot=StorageKey(1), slot_changes=[]), - BalStorageSlot(slot=StorageKey(2), slot_changes=[]), + BalStorageSlot(slot=0, slot_changes=[]), + BalStorageSlot(slot=1, slot_changes=[]), + BalStorageSlot(slot=2, slot_changes=[]), ], ) ] @@ -76,9 +76,9 @@ def test_bal_storage_slot_ordering() -> None: BalAccountChange( address=addr, storage_changes=[ - BalStorageSlot(slot=StorageKey(0), slot_changes=[]), - BalStorageSlot(slot=StorageKey(2), slot_changes=[]), - BalStorageSlot(slot=StorageKey(1), slot_changes=[]), + BalStorageSlot(slot=0, slot_changes=[]), + BalStorageSlot(slot=2, slot_changes=[]), + BalStorageSlot(slot=1, slot_changes=[]), ], ) ] @@ -100,7 +100,7 @@ def test_bal_storage_reads_ordering() -> None: [ BalAccountChange( address=addr, - storage_reads=[StorageKey(0), StorageKey(1), StorageKey(2)], + storage_reads=[0, 1, 2], ) ] ) @@ -111,7 +111,7 @@ def test_bal_storage_reads_ordering() -> None: [ BalAccountChange( address=addr, - storage_reads=[StorageKey(0), StorageKey(2), StorageKey(1)], + storage_reads=[0, 2, 1], ) ] ) @@ -142,59 +142,71 @@ def test_bal_block_access_indices_ordering(field_name: str) -> None: if field_name == "nonce_changes": changes_valid = [ BalNonceChange( - block_access_index=HexNumber(1), post_nonce=HexNumber(1) + block_access_index=1, + post_nonce=1, ), BalNonceChange( - block_access_index=HexNumber(2), post_nonce=HexNumber(2) + block_access_index=2, + post_nonce=2, ), BalNonceChange( - block_access_index=HexNumber(3), post_nonce=HexNumber(3) + block_access_index=3, + post_nonce=3, ), ] changes_invalid = [ BalNonceChange( - block_access_index=HexNumber(1), post_nonce=HexNumber(1) + block_access_index=1, + post_nonce=1, ), BalNonceChange( - block_access_index=HexNumber(3), post_nonce=HexNumber(3) + block_access_index=3, + post_nonce=3, ), BalNonceChange( - block_access_index=HexNumber(2), post_nonce=HexNumber(2) + block_access_index=2, + post_nonce=2, ), ] elif field_name == "balance_changes": changes_valid = [ BalBalanceChange( - block_access_index=HexNumber(1), post_balance=HexNumber(100) + block_access_index=1, + post_balance=100, ), BalBalanceChange( - block_access_index=HexNumber(2), post_balance=HexNumber(200) + block_access_index=2, + post_balance=200, ), BalBalanceChange( - block_access_index=HexNumber(3), post_balance=HexNumber(300) + block_access_index=3, + post_balance=300, ), ] changes_invalid = [ BalBalanceChange( - block_access_index=HexNumber(1), post_balance=HexNumber(100) + block_access_index=1, + post_balance=100, ), BalBalanceChange( - block_access_index=HexNumber(3), post_balance=HexNumber(300) + block_access_index=3, + post_balance=300, ), BalBalanceChange( - block_access_index=HexNumber(2), post_balance=HexNumber(200) + block_access_index=2, + post_balance=200, ), ] elif field_name == "code_changes": changes_valid = [ - BalCodeChange(block_access_index=HexNumber(1), new_code=b"code1"), - BalCodeChange(block_access_index=HexNumber(2), new_code=b"code2"), - BalCodeChange(block_access_index=HexNumber(3), new_code=b"code3"), + BalCodeChange(block_access_index=1, new_code=b"code1"), + BalCodeChange(block_access_index=2, new_code=b"code2"), + BalCodeChange(block_access_index=3, new_code=b"code3"), ] changes_invalid = [ - BalCodeChange(block_access_index=HexNumber(1), new_code=b"code1"), - BalCodeChange(block_access_index=HexNumber(3), new_code=b"code3"), - BalCodeChange(block_access_index=HexNumber(2), new_code=b"code2"), + BalCodeChange(block_access_index=1, new_code=b"code1"), + BalCodeChange(block_access_index=3, new_code=b"code3"), + BalCodeChange(block_access_index=2, new_code=b"code2"), ] bal_valid = BlockAccessList( @@ -229,34 +241,40 @@ def test_bal_duplicate_block_access_indices(field_name: str) -> None: if field_name == "nonce_changes": changes = [ BalNonceChange( - block_access_index=HexNumber(1), post_nonce=HexNumber(1) + block_access_index=1, + post_nonce=1, ), BalNonceChange( - block_access_index=HexNumber(1), post_nonce=HexNumber(2) + block_access_index=1, + post_nonce=2, ), # duplicate block_access_index BalNonceChange( - block_access_index=HexNumber(2), post_nonce=HexNumber(3) + block_access_index=2, + post_nonce=3, ), ] elif field_name == "balance_changes": changes = [ BalBalanceChange( - block_access_index=HexNumber(1), post_balance=HexNumber(100) + block_access_index=1, + post_balance=100, ), BalBalanceChange( - block_access_index=HexNumber(1), post_balance=HexNumber(200) + block_access_index=1, + post_balance=200, ), # duplicate block_access_index BalBalanceChange( - block_access_index=HexNumber(2), post_balance=HexNumber(300) + block_access_index=2, + post_balance=300, ), ] elif field_name == "code_changes": changes = [ - BalCodeChange(block_access_index=HexNumber(1), new_code=b"code1"), + BalCodeChange(block_access_index=1, new_code=b"code1"), BalCodeChange( - block_access_index=HexNumber(1), new_code=b"" + block_access_index=1, new_code=b"" ), # duplicate block_access_index - BalCodeChange(block_access_index=HexNumber(2), new_code=b"code2"), + BalCodeChange(block_access_index=2, new_code=b"code2"), ] bal = BlockAccessList( @@ -283,19 +301,19 @@ def test_bal_storage_duplicate_block_access_indices() -> None: address=addr, storage_changes=[ BalStorageSlot( - slot=StorageKey(0), + slot=0, slot_changes=[ BalStorageChange( - block_access_index=HexNumber(1), - post_value=StorageKey(100), + block_access_index=1, + post_value=100, ), BalStorageChange( - block_access_index=HexNumber(1), - post_value=StorageKey(200), + block_access_index=1, + post_value=200, ), # duplicate block_access_index BalStorageChange( - block_access_index=HexNumber(2), - post_value=StorageKey(300), + block_access_index=2, + post_value=300, ), ], ) @@ -325,12 +343,12 @@ def test_bal_multiple_violations() -> None: address=bob, # Should come after alice nonce_changes=[ BalNonceChange( - block_access_index=HexNumber(1), - post_nonce=HexNumber(1), + block_access_index=1, + post_nonce=1, ), BalNonceChange( - block_access_index=HexNumber(1), - post_nonce=HexNumber(2), + block_access_index=1, + post_nonce=2, ), # duplicate ], ), @@ -360,8 +378,8 @@ def test_bal_single_account_valid() -> None: address=Address(0xA), nonce_changes=[ BalNonceChange( - block_access_index=HexNumber(1), - post_nonce=HexNumber(1), + block_access_index=1, + post_nonce=1, ) ], ) From bd0f217ef30b9bad856c398851a3b5f36eb45cd5 Mon Sep 17 00:00:00 2001 From: felipe Date: Tue, 30 Dec 2025 13:57:01 -0700 Subject: [PATCH 057/154] =?UTF-8?q?refactor(spec-specs):=20Refactor=20spec?= =?UTF-8?q?s=20to=20be=20more=20coherent=20wrt=20gas=20acco=E2=80=A6=20(#1?= =?UTF-8?q?897)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * refactor(spec-specs): Refactor specs to be more coherent wrt gas accounting * feat(test): BAL test for call with value in static context --- .../forks/amsterdam/vm/eoa_delegation.py | 53 +--- .../amsterdam/vm/instructions/storage.py | 53 ++-- .../forks/amsterdam/vm/instructions/system.py | 300 ++++++++---------- .../test_block_access_lists_opcodes.py | 74 +++++ .../test_cases.md | 1 + 5 files changed, 236 insertions(+), 245 deletions(-) diff --git a/src/ethereum/forks/amsterdam/vm/eoa_delegation.py b/src/ethereum/forks/amsterdam/vm/eoa_delegation.py index 1f1aac9d97..4509c5917f 100644 --- a/src/ethereum/forks/amsterdam/vm/eoa_delegation.py +++ b/src/ethereum/forks/amsterdam/vm/eoa_delegation.py @@ -5,7 +5,6 @@ from typing import Optional, Tuple from ethereum_rlp import rlp -from ethereum_types.bytes import Bytes from ethereum_types.numeric import U64, U256, Uint from ethereum.crypto.elliptic_curve import SECP256K1N, secp256k1_recover @@ -127,13 +126,9 @@ def recover_authority(authorization: Authorization) -> Address: def calculate_delegation_cost( evm: Evm, address: Address -) -> Tuple[bool, Address, Optional[Address], Uint]: +) -> Tuple[bool, Address, Uint]: """ - Check if address has delegation and calculate delegation target gas cost. - - This function reads the original account's code to check for delegation - and tracks it in state_changes. It calculates the delegation target's - gas cost but does NOT read the delegation target yet. + Get the delegation address and the cost of access from the address. Parameters ---------- @@ -144,9 +139,8 @@ def calculate_delegation_cost( Returns ------- - delegation_info : `Tuple[bool, Address, Optional[Address], Uint]` - (is_delegated, original_address, delegated_address_or_none, - delegation_gas_cost) + delegation : `Tuple[bool, Address, Uint]` + The delegation address and access gas cost. """ state = evm.message.block_env.state @@ -155,51 +149,16 @@ def calculate_delegation_cost( track_address(evm.state_changes, address) if not is_valid_delegation(code): - return False, address, None, Uint(0) + return False, address, Uint(0) delegated_address = Address(code[EOA_DELEGATION_MARKER_LENGTH:]) - # Calculate gas cost for delegation target access if delegated_address in evm.accessed_addresses: delegation_gas_cost = GAS_WARM_ACCESS else: delegation_gas_cost = GAS_COLD_ACCOUNT_ACCESS - return True, address, delegated_address, delegation_gas_cost - - -def read_delegation_target(evm: Evm, delegated_address: Address) -> Bytes: - """ - Read the delegation target's code and track the access. - - Should ONLY be called AFTER verifying we have gas for the access. - - This function: - 1. Reads the delegation target's code from state - 2. Adds it to accessed_addresses (if not already there) - 3. Tracks it in state_changes for BAL - - Parameters - ---------- - evm : `Evm` - The execution frame. - delegated_address : `Address` - The delegation target address. - - Returns - ------- - code : `Bytes` - The delegation target's code. - - """ - state = evm.message.block_env.state - - if delegated_address not in evm.accessed_addresses: - evm.accessed_addresses.add(delegated_address) - - track_address(evm.state_changes, delegated_address) - - return get_account(state, delegated_address).code + return True, delegated_address, delegation_gas_cost def set_delegation(message: Message) -> U256: diff --git a/src/ethereum/forks/amsterdam/vm/instructions/storage.py b/src/ethereum/forks/amsterdam/vm/instructions/storage.py index de7ef935f5..18afa2a2ba 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/storage.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/storage.py @@ -55,20 +55,16 @@ def sload(evm: Evm) -> None: key = pop(evm.stack).to_be_bytes32() # GAS - is_cold_access = ( - evm.message.current_target, - key, - ) not in evm.accessed_storage_keys - gas_cost = GAS_COLD_SLOAD if is_cold_access else GAS_WARM_ACCESS - - charge_gas(evm, gas_cost) + if (evm.message.current_target, key) in evm.accessed_storage_keys: + charge_gas(evm, GAS_WARM_ACCESS) + else: + evm.accessed_storage_keys.add((evm.message.current_target, key)) + charge_gas(evm, GAS_COLD_SLOAD) # OPERATION - state = evm.message.block_env.state - value = get_storage(state, evm.message.current_target, key) - - if is_cold_access: - evm.accessed_storage_keys.add((evm.message.current_target, key)) + value = get_storage( + evm.message.block_env.state, evm.message.current_target, key + ) track_storage_read( evm.state_changes, evm.message.current_target, @@ -91,6 +87,9 @@ def sstore(evm: Evm) -> None: The current EVM frame. """ + if evm.message.is_static: + raise WriteInStaticContext + # STACK key = pop(evm.stack).to_be_bytes32() new_value = pop(evm.stack) @@ -98,28 +97,17 @@ def sstore(evm: Evm) -> None: # check we have at least the stipend gas check_gas(evm, GAS_CALL_STIPEND + Uint(1)) - # check static context before accessing storage - if evm.message.is_static: - raise WriteInStaticContext - - # GAS - gas_cost = Uint(0) - is_cold_access = ( - evm.message.current_target, - key, - ) not in evm.accessed_storage_keys - - if is_cold_access: - gas_cost += GAS_COLD_SLOAD - state = evm.message.block_env.state original_value = get_storage_original( state, evm.message.current_target, key ) current_value = get_storage(state, evm.message.current_target, key) - if is_cold_access: + gas_cost = Uint(0) + + if (evm.message.current_target, key) not in evm.accessed_storage_keys: evm.accessed_storage_keys.add((evm.message.current_target, key)) + gas_cost += GAS_COLD_SLOAD capture_pre_storage( evm.message.tx_env.state_changes, @@ -141,9 +129,7 @@ def sstore(evm: Evm) -> None: else: gas_cost += GAS_WARM_ACCESS - charge_gas(evm, gas_cost) - - # REFUND COUNTER + # Refund Counter Calculation if current_value != new_value: if original_value != 0 and current_value != 0 and new_value == 0: # Storage is cleared for the first time in the transaction @@ -164,7 +150,7 @@ def sstore(evm: Evm) -> None: GAS_STORAGE_UPDATE - GAS_COLD_SLOAD - GAS_WARM_ACCESS ) - # OPERATION + charge_gas(evm, gas_cost) set_storage(state, evm.message.current_target, key, new_value) track_storage_write( evm.state_changes, @@ -214,14 +200,15 @@ def tstore(evm: Evm) -> None: The current EVM frame. """ + if evm.message.is_static: + raise WriteInStaticContext + # STACK key = pop(evm.stack).to_be_bytes32() new_value = pop(evm.stack) # GAS charge_gas(evm, GAS_WARM_ACCESS) - if evm.message.is_static: - raise WriteInStaticContext set_transient_storage( evm.message.tx_env.transient_storage, evm.message.current_target, diff --git a/src/ethereum/forks/amsterdam/vm/instructions/system.py b/src/ethereum/forks/amsterdam/vm/instructions/system.py index e5e2ec306f..02604f68f2 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/system.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/system.py @@ -16,7 +16,6 @@ from ethereum.utils.numeric import ceil32 -# track_address_access removed - now using state_changes.track_address() from ...fork_types import Address from ...state import ( account_has_code_or_nonce, @@ -41,7 +40,6 @@ ) from ...vm.eoa_delegation import ( calculate_delegation_cost, - read_delegation_target, ) from .. import ( Evm, @@ -169,7 +167,6 @@ def generic_create( is_create=True, state_changes=child_state_changes, ) - child_evm = process_create_message(child_message) if child_evm.error: @@ -407,6 +404,9 @@ def call(evm: Evm) -> None: memory_output_start_position = pop(evm.stack) memory_output_size = pop(evm.stack) + if evm.message.is_static and value != U256(0): + raise WriteInStaticContext + # GAS extend_memory = calculate_gas_extend_memory( evm.memory, @@ -417,69 +417,56 @@ def call(evm: Evm) -> None: ) is_cold_access = to not in evm.accessed_addresses - access_gas_cost = ( - GAS_COLD_ACCOUNT_ACCESS if is_cold_access else GAS_WARM_ACCESS - ) + if is_cold_access: + access_gas_cost = GAS_COLD_ACCOUNT_ACCESS + else: + access_gas_cost = GAS_WARM_ACCESS transfer_gas_cost = Uint(0) if value == 0 else GAS_CALL_VALUE + # check static gas before state access check_gas( evm, access_gas_cost + transfer_gas_cost + extend_memory.cost, ) - # need to access account to check if account is alive, check gas before - create_gas_cost = GAS_NEW_ACCOUNT - if value == 0 or is_account_alive(evm.message.block_env.state, to): - create_gas_cost = Uint(0) - + # STATE ACCESS + state = evm.message.block_env.state if is_cold_access: evm.accessed_addresses.add(to) + create_gas_cost = GAS_NEW_ACCOUNT + if value == 0 or is_account_alive(state, to): + create_gas_cost = Uint(0) + + extra_gas = access_gas_cost + transfer_gas_cost + create_gas_cost ( is_delegated, - original_address, - delegated_address, - delegation_gas_cost, + code_address, + delegation_access_cost, ) = calculate_delegation_cost(evm, to) - if is_delegated and delegation_gas_cost > Uint(0): - assert delegated_address is not None - message_call_gas = calculate_message_call_gas( - value, - gas, - Uint(evm.gas_left), - extend_memory.cost, - access_gas_cost - + transfer_gas_cost - + create_gas_cost - + delegation_gas_cost, - ) - check_gas(evm, message_call_gas.cost + extend_memory.cost) - code = read_delegation_target(evm, delegated_address) - final_address = delegated_address - else: - message_call_gas = calculate_message_call_gas( - value, - gas, - Uint(evm.gas_left), - extend_memory.cost, - access_gas_cost + create_gas_cost + transfer_gas_cost, - ) - check_gas(evm, message_call_gas.cost + extend_memory.cost) - code = get_account(evm.message.block_env.state, to).code - final_address = to - - code_address = final_address - disable_precompiles = is_delegated - + if is_delegated: + # check enough gas for delegation access + extra_gas += delegation_access_cost + check_gas(evm, extra_gas + extend_memory.cost) + track_address(evm.state_changes, code_address) + if code_address not in evm.accessed_addresses: + evm.accessed_addresses.add(code_address) + + code = get_account(state, code_address).code + + message_call_gas = calculate_message_call_gas( + value, + gas, + Uint(evm.gas_left), + extend_memory.cost, + extra_gas, + ) charge_gas(evm, message_call_gas.cost + extend_memory.cost) - if evm.message.is_static and value != U256(0): - raise WriteInStaticContext + evm.memory += b"\x00" * extend_memory.expand_by - sender_balance = get_account( - evm.message.block_env.state, evm.message.current_target - ).balance + sender_balance = get_account(state, evm.message.current_target).balance if sender_balance < value: push(evm.stack, U256(0)) evm.return_data = b"" @@ -499,7 +486,7 @@ def call(evm: Evm) -> None: memory_output_start_position, memory_output_size, code, - disable_precompiles, + is_delegated, ) # PROGRAM COUNTER @@ -537,55 +524,48 @@ def callcode(evm: Evm) -> None: ) is_cold_access = code_address not in evm.accessed_addresses - access_gas_cost = ( - GAS_COLD_ACCOUNT_ACCESS if is_cold_access else GAS_WARM_ACCESS - ) if is_cold_access: - evm.accessed_addresses.add(code_address) + access_gas_cost = GAS_COLD_ACCOUNT_ACCESS + else: + access_gas_cost = GAS_WARM_ACCESS transfer_gas_cost = Uint(0) if value == 0 else GAS_CALL_VALUE + # check static gas before state access check_gas( evm, access_gas_cost + extend_memory.cost + transfer_gas_cost, ) - # need to access account to get delegation code, check gas before + # STATE ACCESS + state = evm.message.block_env.state + if is_cold_access: + evm.accessed_addresses.add(code_address) + + extra_gas = access_gas_cost + transfer_gas_cost ( is_delegated, - original_address, - delegated_address, - delegation_gas_cost, + code_address, + delegation_access_cost, ) = calculate_delegation_cost(evm, code_address) - if is_delegated and delegation_gas_cost > Uint(0): - assert delegated_address is not None - # Recalculate with delegation cost and check gas - message_call_gas = calculate_message_call_gas( - value, - gas, - Uint(evm.gas_left), - extend_memory.cost, - access_gas_cost + transfer_gas_cost + delegation_gas_cost, - ) - check_gas(evm, message_call_gas.cost + extend_memory.cost) - code = read_delegation_target(evm, delegated_address) - final_address = delegated_address - else: - message_call_gas = calculate_message_call_gas( - value, - gas, - Uint(evm.gas_left), - extend_memory.cost, - access_gas_cost + transfer_gas_cost, - ) - check_gas(evm, message_call_gas.cost + extend_memory.cost) - code = get_account(evm.message.block_env.state, code_address).code - final_address = code_address - - code_address = final_address - disable_precompiles = is_delegated - + if is_delegated: + # check enough gas for delegation access + extra_gas += delegation_access_cost + check_gas(evm, extra_gas + extend_memory.cost) + track_address(evm.state_changes, code_address) + if code_address not in evm.accessed_addresses: + evm.accessed_addresses.add(code_address) + + code = get_account(state, code_address).code + + message_call_gas = calculate_message_call_gas( + value, + gas, + Uint(evm.gas_left), + extend_memory.cost, + extra_gas, + ) charge_gas(evm, message_call_gas.cost + extend_memory.cost) # OPERATION @@ -623,7 +603,7 @@ def callcode(evm: Evm) -> None: memory_output_start_position, memory_output_size, code, - disable_precompiles, + is_delegated, ) # PROGRAM COUNTER @@ -648,26 +628,26 @@ def selfdestruct(evm: Evm) -> None: # GAS gas_cost = GAS_SELF_DESTRUCT + is_cold_access = beneficiary not in evm.accessed_addresses if is_cold_access: gas_cost += GAS_COLD_ACCOUNT_ACCESS + # check access gas cost before state access check_gas(evm, gas_cost) - # is_account_alive requires account to be accessed, check gas before + # STATE ACCESS + state = evm.message.block_env.state + if is_cold_access: + evm.accessed_addresses.add(beneficiary) + track_address(evm.state_changes, beneficiary) + if ( - not is_account_alive(evm.message.block_env.state, beneficiary) - and get_account( - evm.message.block_env.state, evm.message.current_target - ).balance - != 0 + not is_account_alive(state, beneficiary) + and get_account(state, evm.message.current_target).balance != 0 ): gas_cost += GAS_SELF_DESTRUCT_NEW_ACCOUNT - if is_cold_access: - evm.accessed_addresses.add(beneficiary) - track_address(evm.state_changes, beneficiary) - charge_gas(evm, gas_cost) state = evm.message.block_env.state @@ -703,6 +683,8 @@ def selfdestruct(evm: Evm) -> None: # register account for deletion only if it was created # in the same transaction if originator in state.created_accounts: + # If beneficiary is the same as originator, then + # the ether is burnt. set_account_balance(state, originator, U256(0)) track_balance_change(evm.state_changes, originator, U256(0)) evm.accounts_to_delete.add(originator) @@ -742,49 +724,43 @@ def delegatecall(evm: Evm) -> None: ) is_cold_access = code_address not in evm.accessed_addresses - access_gas_cost = ( - GAS_COLD_ACCOUNT_ACCESS if is_cold_access else GAS_WARM_ACCESS - ) if is_cold_access: - evm.accessed_addresses.add(code_address) + access_gas_cost = GAS_COLD_ACCOUNT_ACCESS + else: + access_gas_cost = GAS_WARM_ACCESS + # check static gas before state access check_gas(evm, access_gas_cost + extend_memory.cost) - # need to access account to get delegation code, check gas before + # STATE ACCESS + state = evm.message.block_env.state + if is_cold_access: + evm.accessed_addresses.add(code_address) + + extra_gas = access_gas_cost ( is_delegated, - original_address, - delegated_address, - delegation_gas_cost, + code_address, + delegation_access_cost, ) = calculate_delegation_cost(evm, code_address) - if is_delegated and delegation_gas_cost > Uint(0): - assert delegated_address is not None - message_call_gas = calculate_message_call_gas( - U256(0), - gas, - Uint(evm.gas_left), - extend_memory.cost, - access_gas_cost + delegation_gas_cost, - ) - check_gas(evm, message_call_gas.cost + extend_memory.cost) - code = read_delegation_target(evm, delegated_address) - final_address = delegated_address - else: - message_call_gas = calculate_message_call_gas( - U256(0), - gas, - Uint(evm.gas_left), - extend_memory.cost, - access_gas_cost, - ) - check_gas(evm, message_call_gas.cost + extend_memory.cost) - code = get_account(evm.message.block_env.state, code_address).code - final_address = code_address + if is_delegated: + # check enough gas for delegation access + extra_gas += delegation_access_cost + check_gas(evm, extra_gas + extend_memory.cost) + track_address(evm.state_changes, code_address) + if code_address not in evm.accessed_addresses: + evm.accessed_addresses.add(code_address) - code_address = final_address - disable_precompiles = is_delegated + code = get_account(state, code_address).code + message_call_gas = calculate_message_call_gas( + U256(0), + gas, + Uint(evm.gas_left), + extend_memory.cost, + extra_gas, + ) charge_gas(evm, message_call_gas.cost + extend_memory.cost) # OPERATION @@ -803,7 +779,7 @@ def delegatecall(evm: Evm) -> None: memory_output_start_position, memory_output_size, code, - disable_precompiles, + is_delegated, ) # PROGRAM COUNTER @@ -838,49 +814,43 @@ def staticcall(evm: Evm) -> None: ) is_cold_access = to not in evm.accessed_addresses - access_gas_cost = ( - GAS_COLD_ACCOUNT_ACCESS if is_cold_access else GAS_WARM_ACCESS - ) if is_cold_access: - evm.accessed_addresses.add(to) + access_gas_cost = GAS_COLD_ACCOUNT_ACCESS + else: + access_gas_cost = GAS_WARM_ACCESS + # check static gas before state access check_gas(evm, access_gas_cost + extend_memory.cost) - # need to access account to get delegation code, check gas before + # STATE ACCESS + state = evm.message.block_env.state + if is_cold_access: + evm.accessed_addresses.add(to) + + extra_gas = access_gas_cost ( is_delegated, - original_address, - delegated_address, - delegation_gas_cost, + code_address, + delegation_access_cost, ) = calculate_delegation_cost(evm, to) - if is_delegated and delegation_gas_cost > Uint(0): - assert delegated_address is not None - message_call_gas = calculate_message_call_gas( - U256(0), - gas, - Uint(evm.gas_left), - extend_memory.cost, - access_gas_cost + delegation_gas_cost, - ) - check_gas(evm, message_call_gas.cost + extend_memory.cost) - code = read_delegation_target(evm, delegated_address) - final_address = delegated_address - else: - message_call_gas = calculate_message_call_gas( - U256(0), - gas, - Uint(evm.gas_left), - extend_memory.cost, - access_gas_cost, - ) - check_gas(evm, message_call_gas.cost + extend_memory.cost) - code = get_account(evm.message.block_env.state, to).code - final_address = to + if is_delegated: + # check enough gas for delegation access + extra_gas += delegation_access_cost + check_gas(evm, extra_gas + extend_memory.cost) + track_address(evm.state_changes, code_address) + if code_address not in evm.accessed_addresses: + evm.accessed_addresses.add(code_address) - code_address = final_address - disable_precompiles = is_delegated + code = get_account(state, code_address).code + message_call_gas = calculate_message_call_gas( + U256(0), + gas, + Uint(evm.gas_left), + extend_memory.cost, + extra_gas, + ) charge_gas(evm, message_call_gas.cost + extend_memory.cost) # OPERATION @@ -899,7 +869,7 @@ def staticcall(evm: Evm) -> None: memory_output_start_position, memory_output_size, code, - disable_precompiles, + is_delegated, ) # PROGRAM COUNTER diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py index 7a07299407..99f2ab58ba 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py @@ -2288,6 +2288,80 @@ def test_bal_sstore_static_context( ) +def test_bal_call_with_value_in_static_context( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Ensure BAL does NOT include target address when CALL with value fails + in static context. The static context check must happen BEFORE any + account access or BAL tracking. + """ + alice = pre.fund_eoa() + + target_starting_balance = 1022 + target = pre.fund_eoa(amount=target_starting_balance) + + caller_starting_balance = 10**18 + caller = pre.deploy_contract( + code=Op.CALL(gas=100_000, address=target, value=1) + Op.STOP, + balance=caller_starting_balance, + ) + + # makes STATICCALL to caller + static_caller = pre.deploy_contract( + code=Op.STATICCALL(gas=500_000, address=caller) + + Op.SSTORE(0, 1) # prove we continued after STATICCALL returned + ) + + tx = Transaction( + sender=alice, + to=static_caller, + gas_limit=1_000_000, + ) + + blockchain_test( + pre=pre, + blocks=[ + Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[ + BalNonceChange( + block_access_index=1, post_nonce=1 + ) + ], + ), + static_caller: BalAccountExpectation( + storage_changes=[ + BalStorageSlot( + slot=0x00, + slot_changes=[ + BalStorageChange( + block_access_index=1, post_value=1 + ), + ], + ), + ], + ), + caller: BalAccountExpectation.empty(), + target: None, # explicit check target is NOT in BAL + } + ), + ) + ], + post={ + # STATICCALL returned, continued + static_caller: Account(storage={0: 1}), + # no transfer occurred, balances unchanged + caller: Account(balance=caller_starting_balance), + target: Account(balance=target_starting_balance), + }, + ) + + def test_bal_create_contract_init_revert( blockchain_test: BlockchainTestFiller, pre: Alloc, diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index 0432339ead..29d6ebac68 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -112,3 +112,4 @@ | `test_call_to_pre_authorized_oog` | Ensure BAL handles OOG during EIP-7702 delegation access (pre-Amsterdam test with BAL) | Call to delegated account that OOGs before accessing delegation contract | BAL **MUST** include auth_signer (code read for delegation check) but **MUST NOT** include delegation contract (OOG before access) | ✅ Completed | | `test_selfdestruct_created_in_same_tx_with_revert` | Ensure BAL tracks selfdestruct with revert correctly (pre-Amsterdam test with BAL) | Contract created and selfdestructed in same tx with nested revert | BAL **MUST** track storage reads and balance changes for selfdestruct even with reverts | ✅ Completed | | `test_value_transfer_gas_calculation` | Ensure BAL correctly tracks OOG scenarios for CALL/CALLCODE/DELEGATECALL/STATICCALL (pre-Amsterdam test with BAL) | Nested calls with precise gas limits to test OOG behavior. For CALL with OOG: target account is read. For CALLCODE/DELEGATECALL/STATICCALL with OOG: target account **NOT** read (OOG before state access) | For CALL: target in BAL even with OOG. For CALLCODE/DELEGATECALL/STATICCALL: target **NOT** in BAL when OOG (state access deferred until after gas check) | ✅ Completed | +| `test_bal_call_with_value_in_static_context` | Ensure BAL does NOT include target when CALL with value fails in static context | `static_caller` uses `STATICCALL` to call `caller`. `caller` attempts `CALL(target, value=1)` which must fail due to static context. Target is an empty account. | BAL **MUST NOT** include target because static context check (`is_static && value > 0`) must happen BEFORE any account access or BAL tracking. BAL **MUST** include `static_caller` with `storage_changes` (STATICCALL succeeded), `caller` with empty changes. | ✅ Completed | From 368ac871d5a7d5ec5316ce7f28e9a2a9829573bf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Toni=20Wahrst=C3=A4tter?= <51536394+nerolation@users.noreply.github.com> Date: Thu, 1 Jan 2026 00:22:38 +0100 Subject: [PATCH 058/154] feat(tests): add more 7928 test descriptions (#1815) * feat(tests): add more 7928 test descriptions * chore(test): remove test duplicated by test_bal_create_selfdestruct_to_self_with_call --------- Co-authored-by: fselmo --- tests/amsterdam/eip7928_block_level_access_lists/test_cases.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index 29d6ebac68..1059dac54c 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -113,3 +113,5 @@ | `test_selfdestruct_created_in_same_tx_with_revert` | Ensure BAL tracks selfdestruct with revert correctly (pre-Amsterdam test with BAL) | Contract created and selfdestructed in same tx with nested revert | BAL **MUST** track storage reads and balance changes for selfdestruct even with reverts | ✅ Completed | | `test_value_transfer_gas_calculation` | Ensure BAL correctly tracks OOG scenarios for CALL/CALLCODE/DELEGATECALL/STATICCALL (pre-Amsterdam test with BAL) | Nested calls with precise gas limits to test OOG behavior. For CALL with OOG: target account is read. For CALLCODE/DELEGATECALL/STATICCALL with OOG: target account **NOT** read (OOG before state access) | For CALL: target in BAL even with OOG. For CALLCODE/DELEGATECALL/STATICCALL: target **NOT** in BAL when OOG (state access deferred until after gas check) | ✅ Completed | | `test_bal_call_with_value_in_static_context` | Ensure BAL does NOT include target when CALL with value fails in static context | `static_caller` uses `STATICCALL` to call `caller`. `caller` attempts `CALL(target, value=1)` which must fail due to static context. Target is an empty account. | BAL **MUST NOT** include target because static context check (`is_static && value > 0`) must happen BEFORE any account access or BAL tracking. BAL **MUST** include `static_caller` with `storage_changes` (STATICCALL succeeded), `caller` with empty changes. | ✅ Completed | +| `test_bal_7702_double_auth_reset_minimal` | Ensure BAL tracks multiple 7702 nonce increments but filters net-zero code change | Single transaction contains two EIP-7702 authorizations for `Alice`: (1) first auth sets delegation `0xef0100\|\|Oracle`, (2) second auth clears delegation back to empty. Transaction sends 10 wei to `Bob`. Two variants: (a) Self-funded: `Alice` is tx sender (one tx nonce bump + two auth bumps → nonce 0→3). (b) Sponsored: `Relayer` is tx sender (`Alice` only in auths → nonce 0→2 for `Alice`, plus one nonce bump for `Relayer`). | Variant (a): BAL **MUST** include `Alice` with `nonce_changes` 0→3. Variant (b): BAL **MUST** include `Alice` with `nonce_changes` 0→2 and `Relayer` with its own `nonce_changes`. For both variants, BAL **MUST NOT** include `code_changes` for `Alice` (net code is empty), **MUST** include `Bob` with `balance_changes` (receives 10 wei), and `Oracle` **MUST NOT** appear in BAL. | 🟡 Planned | +| `test_bal_selfdestruct_send_to_sender` | Ensure BAL tracks SELFDESTRUCT sending all funds back to the tx sender (no burn) | Pre-state: contract `C` exists from a prior transaction with non-empty code and balance = 100 wei. EOA `Alice` sends a transaction calling `C`. `C`’s code executes `SELFDESTRUCT(Alice)`. Under EIP-6780, because `C` was not created in this transaction, SELFDESTRUCT does not delete code or storage; it only transfers the entire 100 wei balance from `C` to `Alice`. Final post-state: `C` still exists with the same code and balance = 0; `Alice`’s balance increased by 100 wei (ignoring gas for this test). | BAL **MUST** include `Alice` with `nonce_changes` (tx sender) and `balance_changes` reflecting receipt of 100 wei, and **MUST** include `C` with `balance_changes` 100→0 and no `code_changes`. BAL **MUST NOT** include any other accounts. This test ensures SELFDESTRUCT-to-sender is modeled as a pure value transfer (no burn, no code deletion). | 🟡 Planned | From ad7be9ee9862dd34774a0ba236ecf87232bccea3 Mon Sep 17 00:00:00 2001 From: raxhvl <10168946+raxhvl@users.noreply.github.com> Date: Wed, 7 Jan 2026 18:19:18 +0100 Subject: [PATCH 059/154] feat(tests): EIP-7928 tests targeting EIP-4788 (#1887) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * ✨ feat(tests): 7928x4788 tests * 🧹 chore: Rename bal index * ✨ feat: zero indexed tx * 🥢 nit: Balance change for query contract * 🥢 nit: Exclude system address from BAL * 🥢 nit: Exclude system address from BAL * 📄 docs: Update test description --------- Co-authored-by: raxhvl Co-authored-by: felipe --- .../test_block_access_lists_eip4788.py | 459 ++++++++++++++++++ .../test_cases.md | 5 +- 2 files changed, 463 insertions(+), 1 deletion(-) create mode 100644 tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip4788.py diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip4788.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip4788.py new file mode 100644 index 0000000000..02c0c6fdd9 --- /dev/null +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip4788.py @@ -0,0 +1,459 @@ +"""Tests for the effects of EIP-4788 beacon roots on EIP-7928.""" + +import pytest +from execution_testing import ( + Account, + Address, + Alloc, + BalAccountExpectation, + BalBalanceChange, + BalNonceChange, + BalStorageChange, + BalStorageSlot, + Block, + BlockAccessListExpectation, + BlockchainTestFiller, + Fork, + Hash, + Op, + Transaction, +) + +from tests.cancun.eip4788_beacon_root.spec import Spec, SpecHelpers + +from .spec import ref_spec_7928 + +REFERENCE_SPEC_GIT_PATH = ref_spec_7928.git_path +REFERENCE_SPEC_VERSION = ref_spec_7928.version + +pytestmark = pytest.mark.valid_from("Amsterdam") + +BEACON_ROOTS_ADDRESS = Address(Spec.BEACON_ROOTS_ADDRESS) +SYSTEM_ADDRESS = Address(Spec.SYSTEM_ADDRESS) + + +def get_beacon_root_slots(timestamp: int) -> tuple: + """ + Return (timestamp_slot, root_slot) for beacon root ring buffer. + + The beacon root contract uses two ring buffers: + - timestamp_slot = timestamp % 8191 + - root_slot = (timestamp % 8191) + 8191 + """ + helpers = SpecHelpers() + return ( + helpers.timestamp_index(timestamp), + helpers.root_index(timestamp), + ) + + +def beacon_root_system_call_expectations( + timestamp: int, + beacon_root: Hash, +) -> dict: + """ + Build BAL expectations for beacon root pre-execution system call. + + Returns account expectations for BEACON_ROOTS_ADDRESS and + SYSTEM_ADDRESS at block_access_index=0. + """ + timestamp_slot, root_slot = get_beacon_root_slots(timestamp) + + return { + BEACON_ROOTS_ADDRESS: BalAccountExpectation( + storage_changes=[ + BalStorageSlot( + slot=timestamp_slot, + slot_changes=[ + BalStorageChange( + block_access_index=0, post_value=timestamp + ) + ], + ), + BalStorageSlot( + slot=root_slot, + slot_changes=[ + BalStorageChange( + block_access_index=0, post_value=beacon_root + ) + ], + ), + ], + ), + # System address MUST NOT be included + SYSTEM_ADDRESS: None, + } + + +def build_beacon_root_setup_block( + timestamp: int, + beacon_root: Hash, +) -> Block: + """ + Build a block that stores beacon root via pre-execution system call. + + This is used as the first block in tests that query beacon roots. + Returns an empty block (no transactions) that only performs the + system call to store the beacon root. + """ + account_expectations = beacon_root_system_call_expectations( + timestamp, beacon_root + ) + + return Block( + txs=[], + parent_beacon_block_root=beacon_root, + timestamp=timestamp, + expected_block_access_list=BlockAccessListExpectation( + account_expectations=account_expectations + ), + ) + + +def test_bal_4788_simple( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + fork: Fork, +) -> None: + """ + Ensure BAL captures beacon root storage writes during pre-execution + system call. + + Block with 2 normal user transactions: Alice sends 10 wei to Charlie, + Bob sends 10 wei to Charlie. At block start (pre-execution), + SYSTEM_ADDRESS calls BEACON_ROOTS_ADDRESS to store parent beacon root. + """ + alice = pre.fund_eoa() + bob = pre.fund_eoa() + charlie = pre.fund_eoa(amount=0) + + block_timestamp = 12 + beacon_root = Hash(0xABCDEF) + + transfer_value = 10 + + tx1 = Transaction( + sender=alice, + to=charlie, + value=transfer_value, + gas_limit=fork.transaction_gas_limit_cap(), + ) + + tx2 = Transaction( + sender=bob, + to=charlie, + value=transfer_value, + gas_limit=fork.transaction_gas_limit_cap(), + ) + + # Build BAL expectations starting with system call + account_expectations = beacon_root_system_call_expectations( + block_timestamp, beacon_root + ) + + # Add transaction-specific expectations + account_expectations[alice] = BalAccountExpectation( + nonce_changes=[BalNonceChange(block_access_index=1, post_nonce=1)], + ) + account_expectations[bob] = BalAccountExpectation( + nonce_changes=[BalNonceChange(block_access_index=2, post_nonce=1)], + ) + account_expectations[charlie] = BalAccountExpectation( + balance_changes=[ + BalBalanceChange( + block_access_index=1, post_balance=transfer_value + ), + BalBalanceChange( + block_access_index=2, post_balance=transfer_value * 2 + ), + ], + ) + + block = Block( + txs=[tx1, tx2], + parent_beacon_block_root=beacon_root, + timestamp=block_timestamp, + expected_block_access_list=BlockAccessListExpectation( + account_expectations=account_expectations + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + alice: Account(nonce=1), + bob: Account(nonce=1), + charlie: Account(balance=transfer_value * 2), + }, + ) + + +def test_bal_4788_empty_block( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Ensure BAL captures beacon root storage writes in empty block. + + Block with no transactions. At block start (pre-execution), + SYSTEM_ADDRESS calls BEACON_ROOTS_ADDRESS to store parent beacon root. + """ + block_timestamp = 12 + beacon_root = Hash(0xABCDEF) + + # Build BAL expectations (only system call, no transactions) + account_expectations = beacon_root_system_call_expectations( + block_timestamp, beacon_root + ) + + block = Block( + txs=[], + parent_beacon_block_root=beacon_root, + timestamp=block_timestamp, + expected_block_access_list=BlockAccessListExpectation( + account_expectations=account_expectations + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={}, + ) + + +@pytest.mark.parametrize( + "timestamp,beacon_root,query_timestamp,expected_result,is_valid", + [ + pytest.param( + 12, Hash(0xABCDEF), 12, Hash(0xABCDEF), True, id="valid_timestamp" + ), + pytest.param(12, Hash(0xABCDEF), 42, 0, False, id="invalid_timestamp"), + pytest.param(12, Hash(0xABCDEF), 0, 0, False, id="zero_timestamp"), + ], +) +@pytest.mark.parametrize( + "value", + [ + pytest.param(0, id="no_value"), + pytest.param(100, id="with_value"), + ], +) +def test_bal_4788_query( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + fork: Fork, + timestamp: int, + beacon_root: Hash, + query_timestamp: int, + expected_result: int | Hash, + is_valid: bool, + value: int, +) -> None: + """ + Ensure BAL captures storage reads when querying beacon root. + + Test scenarios: + 1. Valid query (timestamp=12, matches stored timestamp): Beacon root + contract reads both timestamp and root slots, query contract writes + returned value + 2. Invalid query with non-zero timestamp (timestamp=42, no match): + Beacon root contract reads only timestamp slot then reverts, query + contract has implicit read recorded + 3. Invalid query with zero timestamp (timestamp=0): Beacon root + contract reverts immediately before any storage access, query + contract has implicit read recorded + 4. With value transfer: BAL captures balance changes in addition + to storage operations (only when query is valid) + """ + # Block 1: Store beacon root + block1 = build_beacon_root_setup_block(timestamp, beacon_root) + + # Block 2: Alice queries the beacon root + alice = pre.fund_eoa() + + # Contract that calls beacon root contract with timestamp from calldata + # and stores returned beacon root in slot 0, forwarding any value sent + query_code = ( + Op.CALLDATACOPY(0, 0, 32) + + Op.CALL( + Spec.BEACON_ROOTS_CALL_GAS, + BEACON_ROOTS_ADDRESS, + Op.CALLVALUE, # forward value to beacon root contract + 0, # args offset + 32, # args size (timestamp) + 32, # return offset + 32, # return size (beacon root) + ) + + Op.SSTORE(0, Op.MLOAD(32)) + ) + query_contract = pre.deploy_contract(query_code) + + tx = Transaction( + sender=alice, + to=query_contract, + data=Hash(query_timestamp), + value=value, + gas_limit=fork.transaction_gas_limit_cap(), + ) + + # Build BAL expectations for block 2 + block2_timestamp = timestamp + 1 + block2_beacon_root = Hash(0xDEADBEEF) + + account_expectations = beacon_root_system_call_expectations( + block2_timestamp, block2_beacon_root + ) + + # Add storage reads for the query + timestamp_slot, root_slot = get_beacon_root_slots(query_timestamp) + + # Storage access depends on query validity: + # - Zero timestamp: reverts immediately (no storage access) + # - Valid timestamp: reads both timestamp and root slots + # - Invalid non-zero timestamp: reads only timestamp slot before reverting + account_expectations[BEACON_ROOTS_ADDRESS].storage_reads = ( + [] + if query_timestamp == 0 # Reverts early if timestamp is zero + else [timestamp_slot, root_slot] + if is_valid + else [timestamp_slot] + ) + + # Add balance changes if value is transferred + if value > 0 and is_valid: + account_expectations[BEACON_ROOTS_ADDRESS].balance_changes = [ + BalBalanceChange(block_access_index=1, post_balance=value) + ] + + # Add transaction-specific expectations + account_expectations[alice] = BalAccountExpectation( + nonce_changes=[BalNonceChange(block_access_index=1, post_nonce=1)], + ) + + account_expectations[query_contract] = BalAccountExpectation( + # If the call to beacon root contract reverts + # a no-op write happens and an implicit read is + # recorded. + storage_reads=[] if is_valid else [0], + # Write reverts if invalid + storage_changes=[ + BalStorageSlot( + slot=0, + slot_changes=[ + BalStorageChange( + block_access_index=1, post_value=expected_result + ) + ], + ), + ] + if is_valid + else [], + # if value > 0 and invalid, no balance is sent to beacon root so + # is kept in the query contract + balance_changes=[ + BalBalanceChange( + block_access_index=1, + post_balance=value, + ) + ] + if not is_valid and value > 0 + else [], + ) + + block2 = Block( + txs=[tx], + parent_beacon_block_root=block2_beacon_root, + timestamp=block2_timestamp, + expected_block_access_list=BlockAccessListExpectation( + account_expectations=account_expectations + ), + ) + + post_state = { + alice: Account(nonce=1), + query_contract: Account(storage={0: expected_result}), + } + + if value > 0 and is_valid: + post_state[BEACON_ROOTS_ADDRESS] = Account(balance=value) + + blockchain_test( + pre=pre, + blocks=[block1, block2], + post=post_state, + ) + + +def test_bal_4788_selfdestruct_to_beacon_root( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + fork: Fork, +) -> None: + """ + Ensure BAL captures SELFDESTRUCT to beacon root address alongside + system call storage writes. + + Single block with pre-execution system call writing beacon root to + storage, followed by transaction where contract selfdestructs sending + funds to BEACON_ROOTS_ADDRESS. Tests that same address can appear in + BAL with different change types (storage_changes and balance_changes) + at different transaction indices. + """ + alice = pre.fund_eoa() + + block_timestamp = 12 + beacon_root = Hash(0xABCDEF) + contract_balance = 100 + + # Contract that selfdestructs to beacon root address + selfdestruct_code = Op.SELFDESTRUCT(BEACON_ROOTS_ADDRESS) + selfdestruct_contract = pre.deploy_contract( + code=selfdestruct_code, + balance=contract_balance, + ) + + tx = Transaction( + sender=alice, + to=selfdestruct_contract, + gas_limit=fork.transaction_gas_limit_cap(), + ) + + # Build BAL expectations starting with system call + account_expectations = beacon_root_system_call_expectations( + block_timestamp, beacon_root + ) + + # Add balance change from selfdestruct to beacon root address + account_expectations[BEACON_ROOTS_ADDRESS].balance_changes = [ + BalBalanceChange(block_access_index=1, post_balance=contract_balance) + ] + + # Add transaction-specific expectations + account_expectations[alice] = BalAccountExpectation( + nonce_changes=[BalNonceChange(block_access_index=1, post_nonce=1)], + ) + account_expectations[selfdestruct_contract] = BalAccountExpectation( + balance_changes=[ + BalBalanceChange(block_access_index=1, post_balance=0) + ], + ) + + block = Block( + txs=[tx], + parent_beacon_block_root=beacon_root, + timestamp=block_timestamp, + expected_block_access_list=BlockAccessListExpectation( + account_expectations=account_expectations + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + alice: Account(nonce=1), + BEACON_ROOTS_ADDRESS: Account(balance=contract_balance), + }, + ) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index 1059dac54c..9af98fe766 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -23,7 +23,6 @@ | `test_bal_noop_storage_write` | Ensure BAL includes storage read but not write for no-op writes where pre-state equals post-state | Contract with pre-existing storage value `0x42` in slot `0x01`; transaction executes `SSTORE(0x01, 0x42)` (writing same value) | BAL **MUST** include the contract address with `storage_reads` for slot `0x01` since it was accessed, but **MUST NOT** include it in `storage_changes` (no actual state change). | ✅ Completed | | `test_bal_fully_unmutated_account` | Ensure BAL captures account that has zero net mutations | Alice sends 0 wei to `Oracle` which writes same pre-existing value to storage | BAL MUST include Alice with `nonce_changes` and balance changes (gas), `Oracle` with `storage_reads` for accessed slot but empty `storage_changes`. | ✅ Completed | | `test_bal_net_zero_balance_transfer` | BAL includes accounts with net-zero balance change but excludes them from balance changes | Contract receives and sends same amount to recipient using CALL or SELFDESTRUCT | BAL **MUST** include contract in `account_changes` without `balance_changes` (net zero). BAL **MUST** record non-zero `balance_changes` for recipient. | ✅ Completed | -| `test_bal_system_contracts_2935_4788` | BAL includes pre-exec system writes for parent hash & beacon root | Build a block with `N` normal txs; 2935 & 4788 active | BAL MUST include `HISTORY_STORAGE_ADDRESS` (EIP-2935) and `BEACON_ROOTS_ADDRESS` (EIP-4788) with `storage_changes` to ring-buffer slots; each write uses `block_access_index = N` (system op). | 🟡 Planned | | `test_bal_system_dequeue_withdrawals_eip7002` | BAL tracks post-exec system dequeues for withdrawals | Pre-populate EIP-7002 withdrawal requests; produce a block where dequeues occur | BAL MUST include the 7002 system contract with `storage_changes` (queue head/tail slots 0–3) using `block_access_index = len(txs)` and balance changes for withdrawal recipients. | 🟡 Planned | | `test_bal_system_dequeue_consolidations_eip7251` | BAL tracks post-exec system dequeues for consolidations | Pre-populate EIP-7251 consolidation requests; produce a block where dequeues occur | BAL MUST include the 7251 system contract with `storage_changes` (queue slots 0–3) using `block_access_index = len(txs)`. | 🟡 Planned | | `test_bal_aborted_storage_access` | Ensure BAL captures storage access in aborted transactions correctly | Alice calls contract that reads storage slot `0x01`, writes to slot `0x02`, then aborts with `REVERT`/`INVALID` | BAL MUST include storage_reads for slots `0x01` and `0x02` (aborted writes become reads), empty storage_changes. Only nonce changes for Alice. | ✅ Completed | @@ -113,5 +112,9 @@ | `test_selfdestruct_created_in_same_tx_with_revert` | Ensure BAL tracks selfdestruct with revert correctly (pre-Amsterdam test with BAL) | Contract created and selfdestructed in same tx with nested revert | BAL **MUST** track storage reads and balance changes for selfdestruct even with reverts | ✅ Completed | | `test_value_transfer_gas_calculation` | Ensure BAL correctly tracks OOG scenarios for CALL/CALLCODE/DELEGATECALL/STATICCALL (pre-Amsterdam test with BAL) | Nested calls with precise gas limits to test OOG behavior. For CALL with OOG: target account is read. For CALLCODE/DELEGATECALL/STATICCALL with OOG: target account **NOT** read (OOG before state access) | For CALL: target in BAL even with OOG. For CALLCODE/DELEGATECALL/STATICCALL: target **NOT** in BAL when OOG (state access deferred until after gas check) | ✅ Completed | | `test_bal_call_with_value_in_static_context` | Ensure BAL does NOT include target when CALL with value fails in static context | `static_caller` uses `STATICCALL` to call `caller`. `caller` attempts `CALL(target, value=1)` which must fail due to static context. Target is an empty account. | BAL **MUST NOT** include target because static context check (`is_static && value > 0`) must happen BEFORE any account access or BAL tracking. BAL **MUST** include `static_caller` with `storage_changes` (STATICCALL succeeded), `caller` with empty changes. | ✅ Completed | +| `test_bal_4788_simple` | Ensure BAL captures beacon root storage writes during pre-execution system call | Block with 2 normal user transactions: Alice sends 10 wei to Charlie, Bob sends 10 wei to Charlie. At block start (pre-execution), `SYSTEM_ADDRESS` calls `BEACON_ROOTS_ADDRESS` to store parent beacon root | BAL **MUST** include at `block_access_index=0`: `BEACON_ROOTS_ADDRESS` with two `storage_changes` (timestamp slot and beacon root slot); `SYSTEM_ADDRESS` **MUST NOT** be included in BAL. At `block_access_index=1`: Alice with `nonce_changes`, Charlie with `balance_changes` (10 wei). At `block_access_index=2`: Bob with `nonce_changes`, Charlie with `balance_changes` (20 wei total). | ✅ Completed | +| `test_bal_4788_empty_block` | Ensure BAL captures beacon root storage writes in empty block | Block with no transactions. At block start (pre-execution), `SYSTEM_ADDRESS` calls `BEACON_ROOTS_ADDRESS` to store parent beacon root | BAL **MUST** include at `block_access_index=0`: `BEACON_ROOTS_ADDRESS` with two `storage_changes` (timestamp slot and beacon root slot); `SYSTEM_ADDRESS` **MUST NOT** be included in BAL. No transaction-related BAL entries. | ✅ Completed | +| `test_bal_4788_query` | Ensure BAL captures storage reads when querying beacon root (valid and invalid queries) with optional value transfer | Parameterized test: Block 1 stores beacon root at timestamp 12. Block 2 queries with three timestamp scenarios (valid=12, invalid non-zero=42, invalid zero=0) and value (0 or 100 wei). Valid query (timestamp=12): reads both timestamp and root slots, writes returned value. If value > 0, beacon root contract receives balance. Invalid query with non-zero timestamp (timestamp=42): reads only timestamp slot before reverting, query contract has implicit SLOAD recorded (SSTORE reverts), no value transferred. Invalid query with zero timestamp (timestamp=0): reverts immediately without any storage access, query contract has implicit SLOAD recorded, no value transferred. | Block 1 BAL: System call writes. Block 2 BAL **MUST** include at `block_access_index=0`: System call writes for block 2. Valid case (timestamp=12) at `block_access_index=1`: `BEACON_ROOTS_ADDRESS` with `storage_reads` [timestamp_slot, root_slot] and `balance_changes` if value > 0, query contract with `storage_changes`. Invalid non-zero case (timestamp=42) at `block_access_index=1`: `BEACON_ROOTS_ADDRESS` with `storage_reads` [timestamp_slot only] and NO `balance_changes` (reverted), query contract with `storage_reads` [0] and NO `storage_changes`. Invalid zero case (timestamp=0) at `block_access_index=1`: `BEACON_ROOTS_ADDRESS` with NO `storage_reads` (reverts before access) and NO `balance_changes`, query contract with `storage_reads` [0] and NO `storage_changes`. | ✅ Completed | +| `test_bal_4788_selfdestruct_to_beacon_root` | Ensure BAL captures `SELFDESTRUCT` to beacon root address alongside system call storage writes | Single block: Pre-execution system call writes beacon root to storage. Transaction: Alice calls contract (pre-funded with 100 wei) that selfdestructs with `BEACON_ROOTS_ADDRESS` as beneficiary. | BAL **MUST** include at `block_access_index=0`: `BEACON_ROOTS_ADDRESS` with `storage_changes` (timestamp and root slots from system call). At `block_access_index=1`: Alice with `nonce_changes`, contract with `balance_changes` (100→0), `BEACON_ROOTS_ADDRESS` with `balance_changes` (receives 100 wei). | ✅ Completed | | `test_bal_7702_double_auth_reset_minimal` | Ensure BAL tracks multiple 7702 nonce increments but filters net-zero code change | Single transaction contains two EIP-7702 authorizations for `Alice`: (1) first auth sets delegation `0xef0100\|\|Oracle`, (2) second auth clears delegation back to empty. Transaction sends 10 wei to `Bob`. Two variants: (a) Self-funded: `Alice` is tx sender (one tx nonce bump + two auth bumps → nonce 0→3). (b) Sponsored: `Relayer` is tx sender (`Alice` only in auths → nonce 0→2 for `Alice`, plus one nonce bump for `Relayer`). | Variant (a): BAL **MUST** include `Alice` with `nonce_changes` 0→3. Variant (b): BAL **MUST** include `Alice` with `nonce_changes` 0→2 and `Relayer` with its own `nonce_changes`. For both variants, BAL **MUST NOT** include `code_changes` for `Alice` (net code is empty), **MUST** include `Bob` with `balance_changes` (receives 10 wei), and `Oracle` **MUST NOT** appear in BAL. | 🟡 Planned | | `test_bal_selfdestruct_send_to_sender` | Ensure BAL tracks SELFDESTRUCT sending all funds back to the tx sender (no burn) | Pre-state: contract `C` exists from a prior transaction with non-empty code and balance = 100 wei. EOA `Alice` sends a transaction calling `C`. `C`’s code executes `SELFDESTRUCT(Alice)`. Under EIP-6780, because `C` was not created in this transaction, SELFDESTRUCT does not delete code or storage; it only transfers the entire 100 wei balance from `C` to `Alice`. Final post-state: `C` still exists with the same code and balance = 0; `Alice`’s balance increased by 100 wei (ignoring gas for this test). | BAL **MUST** include `Alice` with `nonce_changes` (tx sender) and `balance_changes` reflecting receipt of 100 wei, and **MUST** include `C` with `balance_changes` 100→0 and no `code_changes`. BAL **MUST NOT** include any other accounts. This test ensures SELFDESTRUCT-to-sender is modeled as a pure value transfer (no burn, no code deletion). | 🟡 Planned | From 19a3960e73820971d69f305cde6ac8a7e508f669 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Toni=20Wahrst=C3=A4tter?= <51536394+nerolation@users.noreply.github.com> Date: Wed, 7 Jan 2026 20:22:02 +0100 Subject: [PATCH 060/154] feat(tests): add invalid BAL tests for spurious block_access_index (#1953) --- tests/amsterdam/eip7928_block_level_access_lists/test_cases.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index 9af98fe766..6f70977982 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -118,3 +118,6 @@ | `test_bal_4788_selfdestruct_to_beacon_root` | Ensure BAL captures `SELFDESTRUCT` to beacon root address alongside system call storage writes | Single block: Pre-execution system call writes beacon root to storage. Transaction: Alice calls contract (pre-funded with 100 wei) that selfdestructs with `BEACON_ROOTS_ADDRESS` as beneficiary. | BAL **MUST** include at `block_access_index=0`: `BEACON_ROOTS_ADDRESS` with `storage_changes` (timestamp and root slots from system call). At `block_access_index=1`: Alice with `nonce_changes`, contract with `balance_changes` (100→0), `BEACON_ROOTS_ADDRESS` with `balance_changes` (receives 100 wei). | ✅ Completed | | `test_bal_7702_double_auth_reset_minimal` | Ensure BAL tracks multiple 7702 nonce increments but filters net-zero code change | Single transaction contains two EIP-7702 authorizations for `Alice`: (1) first auth sets delegation `0xef0100\|\|Oracle`, (2) second auth clears delegation back to empty. Transaction sends 10 wei to `Bob`. Two variants: (a) Self-funded: `Alice` is tx sender (one tx nonce bump + two auth bumps → nonce 0→3). (b) Sponsored: `Relayer` is tx sender (`Alice` only in auths → nonce 0→2 for `Alice`, plus one nonce bump for `Relayer`). | Variant (a): BAL **MUST** include `Alice` with `nonce_changes` 0→3. Variant (b): BAL **MUST** include `Alice` with `nonce_changes` 0→2 and `Relayer` with its own `nonce_changes`. For both variants, BAL **MUST NOT** include `code_changes` for `Alice` (net code is empty), **MUST** include `Bob` with `balance_changes` (receives 10 wei), and `Oracle` **MUST NOT** appear in BAL. | 🟡 Planned | | `test_bal_selfdestruct_send_to_sender` | Ensure BAL tracks SELFDESTRUCT sending all funds back to the tx sender (no burn) | Pre-state: contract `C` exists from a prior transaction with non-empty code and balance = 100 wei. EOA `Alice` sends a transaction calling `C`. `C`’s code executes `SELFDESTRUCT(Alice)`. Under EIP-6780, because `C` was not created in this transaction, SELFDESTRUCT does not delete code or storage; it only transfers the entire 100 wei balance from `C` to `Alice`. Final post-state: `C` still exists with the same code and balance = 0; `Alice`’s balance increased by 100 wei (ignoring gas for this test). | BAL **MUST** include `Alice` with `nonce_changes` (tx sender) and `balance_changes` reflecting receipt of 100 wei, and **MUST** include `C` with `balance_changes` 100→0 and no `code_changes`. BAL **MUST NOT** include any other accounts. This test ensures SELFDESTRUCT-to-sender is modeled as a pure value transfer (no burn, no code deletion). | 🟡 Planned | +| `test_bal_spurious_entry_index_plus_2_with_cross_tx_read` | Ensure clients reject BALs containing a spurious entry at `bal_index = len(transactions)+2`, even if its slot is legitimately read elsewhere in the block | Block with `N` txs. BAL is modified to include an extra `StorageKey` entry with `block_access_index = N+2` for `(VictimContract, slot=0x01)`. Additionally include another tx in the same block that performs `SLOAD(VictimContract, 0x01)` (legitimate read). | Block **MUST** be rejected with `INVALID_BLOCK_ACCESS_LIST` exception. Clients **MUST** validate that every `block_access_index` in BAL is in-range (≤ `N` plus any allowed system ops), and **MUST NOT** accept out-of-range indices even if the `(address, slot)` appears elsewhere legitimately. | 🟡 Planned | +| `test_bal_spurious_entry_index_plus_2_with_cross_tx_write` | Ensure clients reject BALs containing a spurious entry at `bal_index = len(transactions)+2`, even if its slot is legitimately written elsewhere in the block | Block with `N` txs. BAL is modified to include an extra `StorageKey` entry with `block_access_index = N+2` for `(VictimContract, slot=0x01)`. Additionally include another tx in the same block that performs `SSTORE(VictimContract, 0x01, 0x42)` (legitimate write). | Block **MUST** be rejected with `INVALID_BLOCK_ACCESS_LIST` exception. Clients **MUST** validate `block_access_index` bounds and reject spurious indices regardless of whether the referenced `(address, slot)` is otherwise accessed or mutated in the block. | 🟡 Planned | +| `test_bal_spurious_entry_index_plus_2_no_other_txs` | Ensure clients reject BALs containing a spurious entry at `bal_index = len(transactions)+2` when no other transaction touches the referenced slot | Block with `N` txs that do not access `(VictimContract, slot=0x01)`. BAL is modified to include an extra `StorageKey` entry with `block_access_index = N+2` for `(VictimContract, slot=0x01)`. | Block **MUST** be rejected with `INVALID_BLOCK_ACCESS_LIST` exception. Clients **MUST** reject any BAL that contains out-of-range `block_access_index` values, independent of access patterns in the executed block. | 🟡 Planned | From 059f38d239831892d3025c1bfdcd38c2d39c1d61 Mon Sep 17 00:00:00 2001 From: raxhvl <10168946+raxhvl@users.noreply.github.com> Date: Wed, 7 Jan 2026 21:32:46 +0100 Subject: [PATCH 061/154] feat(tests): EIP-7928 tests targeting EIP-7002 (#1918) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat(test-tests): Add EIP-7002 BAL tests 🥢 nit: ✨ feat(tests): parameterised amount in clean sweep ✨ feat(tests): test_bal_7002_request_invalid ✨ feat(tests): test_bal_withdrawal_request_from_contract ✨ feat(tests): test_bal_7002_no_withdrawal_requests ♻️ refactor: ✨ feat(tests): parameter: validator key ✨ feat(tests): simplify ✨ feat(tests): test_bal_7002_partial_sweep ✨ feat(tests): test_bal_7002_clean_sweep ✨ feat: add more coverage ✨ feat: test_bal_withdrawal_request_from_eoa * fix(test-tests): lint * chore: update test docstring to match case description and test behavior * fix: balance_changes -> balance for post Accounts --------- Co-authored-by: raxhvl Co-authored-by: fselmo --- .../test_block_access_lists_eip7002.py | 810 ++++++++++++++++++ .../test_cases.md | 6 +- 2 files changed, 815 insertions(+), 1 deletion(-) create mode 100644 tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip7002.py diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip7002.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip7002.py new file mode 100644 index 0000000000..be9a0d2616 --- /dev/null +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip7002.py @@ -0,0 +1,810 @@ +"""Tests for the effects of EIP-7002 transactions on EIP-7928.""" + +from typing import Callable + +import pytest +from execution_testing import ( + Account, + Address, + Alloc, + BalAccountExpectation, + BalBalanceChange, + BalNonceChange, + BalStorageChange, + BalStorageSlot, + Block, + BlockAccessListExpectation, + BlockchainTestFiller, + Op, + Transaction, +) + +from ...prague.eip7002_el_triggerable_withdrawals.helpers import ( + WithdrawalRequest, + WithdrawalRequestContract, + WithdrawalRequestInteractionBase, + WithdrawalRequestTransaction, +) +from ...prague.eip7002_el_triggerable_withdrawals.spec import Spec as Spec7002 +from .spec import ref_spec_7928 + +REFERENCE_SPEC_GIT_PATH = ref_spec_7928.git_path +REFERENCE_SPEC_VERSION = ref_spec_7928.version + +pytestmark = pytest.mark.valid_from("Amsterdam") + +""" +Note: +1. In each block, the count resets to zero after execution. +2. During a partial sweep, the head is updated after execution; + if not written, the head remains read. +3. Similarly, the excess is modified for overflow; + if not written, it remains read. +4. If the first 32 bytes of the public key are zero, the second slot + in the queue performs a no-op write (i.e., a read). +""" + + +# --- helpers --- # +def _encode_pubkey_amount_slot(withdrawal_request: WithdrawalRequest) -> bytes: + """ + Encode slot +2: 32 bytes containing last 16 bytes of pubkey followed by + 8 bytes of big endian amount, padded with 8 zero bytes on the right. + Storage layout: [16 bytes pubkey][8 bytes amount][8 bytes padding]. + """ + last_16_bytes = withdrawal_request.validator_pubkey[-16:] + amount_bytes = withdrawal_request.amount.to_bytes(8, byteorder="big") + return last_16_bytes + amount_bytes + b"\x00" * 8 + + +def _build_queue_storage_slots( + senders: list, withdrawal_requests: list[WithdrawalRequest] +) -> tuple[list, list]: + """Build queue storage slots for withdrawal requests.""" + num_reqs = len(senders) + queue_writes = [] + queue_reads = [] + for i in range(num_reqs): + base_slot = Spec7002.WITHDRAWAL_REQUEST_QUEUE_STORAGE_OFFSET + (i * 3) + # Slot +0: source address + queue_writes.append( + BalStorageSlot( + slot=base_slot, + slot_changes=[ + BalStorageChange( + block_access_index=i + 1, + post_value=senders[i], + ) + ], + ), + ) + # Slot +1: first 32 bytes of validator pubkey + first_32_bytes = int.from_bytes( + withdrawal_requests[i].validator_pubkey[:32], byteorder="big" + ) + if first_32_bytes != 0: + # Non-zero write: record as storage change + queue_writes.append( + BalStorageSlot( + slot=base_slot + 1, + slot_changes=[ + BalStorageChange( + block_access_index=i + 1, + post_value=first_32_bytes, + ) + ], + ), + ) + else: + # Zero write (no-op): record as storage read + queue_reads.append(base_slot + 1) + # Slot +2: last 16 bytes of pubkey + amount + queue_writes.append( + BalStorageSlot( + slot=base_slot + 2, + slot_changes=[ + BalStorageChange( + block_access_index=i + 1, + post_value=_encode_pubkey_amount_slot( + withdrawal_requests[i] + ), + ) + ], + ), + ) + return queue_writes, queue_reads + + +def _extract_post_storage_from_queue_writes(queue_writes: list) -> dict: + """Extract post-state storage dict from queue writes.""" + post_storage = {} + for bal_slot in queue_writes: + # Get the final value from the last slot_change + if bal_slot.slot_changes: + post_storage[bal_slot.slot] = bal_slot.slot_changes[-1].post_value + return post_storage + + +def _build_incremental_changes( + count: int, + change_class: type, + value_param: str, + value_fn: Callable[[int], int] = lambda i: i, + reset_to: int | None = None, +) -> list: + """ + Build a list of incremental changes with customizable value function. + + Args: + count: Number of changes to create + change_class: Class to instantiate for each change + value_param: Parameter name for the value + (e.g., 'post_balance', 'post_value') + value_fn: Function to compute value from index (default: identity) + reset_to: Optional reset value to append at the end + + """ + changes = [ + change_class(block_access_index=i, **{value_param: value_fn(i)}) + for i in range(1, count + 1) + ] + if reset_to is not None: + changes.append( + change_class( + block_access_index=count + 1, **{value_param: reset_to} + ) + ) + return changes + + +# --- tests --- # + + +@pytest.mark.parametrize( + "pubkey", + # Use different pubkey based on parameter + # 0x01 has first 32 bytes all zero + # Full 48-byte pubkey with non-zero first word + [0x01, b"key" * 16], + ids=["pubkey_first_word_zero", "pubkey_first_word_nonzero"], +) +@pytest.mark.parametrize( + "amount", + [0, 1000], + ids=["amount_zero", "amount_nonzero"], +) +def test_bal_7002_clean_sweep( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + pubkey: bytes, + amount: int, +) -> None: + """ + Ensure BAL correctly tracks "clean sweep" where all withdrawal requests + are dequeued in same block (requests ≤ MAX). + + Tests combinations of: + - pubkey with first 32 bytes zero / non-zero + - amount zero / non-zero + """ + alice = pre.fund_eoa() + + withdrawal_request = WithdrawalRequest( + validator_pubkey=pubkey, + amount=amount, + fee=Spec7002.get_fee(0), + ) + + # Transaction to system contract + tx = Transaction( + sender=alice, + to=Address(Spec7002.WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS), + value=withdrawal_request.fee, + data=withdrawal_request.calldata, + gas_limit=200_000, + ) + + # Build queue writes and reads based on pubkey + queue_writes, queue_reads = _build_queue_storage_slots( + [alice], [withdrawal_request] + ) + + # Base storage reads that always happen + base_storage_reads = [ + # Excess is read-only if while dequeuing queue doesn't overflow + Spec7002.EXCESS_WITHDRAWAL_REQUESTS_STORAGE_SLOT, + # Head slot is read while dequeuing + Spec7002.WITHDRAWAL_REQUEST_QUEUE_HEAD_STORAGE_SLOT, + ] + + block = Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], + ), + Spec7002.WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS: BalAccountExpectation( # noqa: E501 + balance_changes=[ + # Fee is collected. + BalBalanceChange( + block_access_index=1, + post_balance=withdrawal_request.fee, + ) + ], + storage_reads=base_storage_reads + queue_reads, + storage_changes=[ + BalStorageSlot( + slot=Spec7002.WITHDRAWAL_REQUEST_COUNT_STORAGE_SLOT, + # Count goes by number of request. + # Invariant 1: Post-execution ALWAYS resets count. + slot_changes=_build_incremental_changes( + 1, + BalStorageChange, + "post_value", + lambda i: i, + reset_to=0, + ), + ), + BalStorageSlot( + slot=Spec7002.WITHDRAWAL_REQUEST_QUEUE_TAIL_STORAGE_SLOT, + # Tail index goes up by number of requests. + # Invariant 2: resets if clean sweep. + slot_changes=_build_incremental_changes( + 1, + BalStorageChange, + "post_value", + lambda i: i, + reset_to=0, + ), + ), + ] + + queue_writes, + ), + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + alice: Account(nonce=1), + Spec7002.WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS: Account( + balance=withdrawal_request.fee, + storage=_extract_post_storage_from_queue_writes(queue_writes), + ), + }, + ) + + +def test_bal_7002_partial_sweep( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Ensure BAL correctly tracks queue overflow when requests exceed MAX. + Block 1: 20 requests (partial sweep, 16 dequeued). + Block 2: Empty (clean sweep of remaining 4). + """ + num_requests = 20 + fee = Spec7002.get_fee(0) + senders = [pre.fund_eoa() for _ in range(num_requests)] + + # Block 1: 20 withdrawal requests + withdrawal_requests = [ + WithdrawalRequest(validator_pubkey=i + 1, amount=0, fee=fee) + for i in range(num_requests) + ] + + eip7002_address = Address(Spec7002.WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS) + + txs_block_1 = [ + Transaction( + sender=sender, + to=eip7002_address, + value=withdrawal_request.fee, + data=withdrawal_request.calldata, + gas_limit=200_000, + ) + for sender, withdrawal_request in zip( + senders, withdrawal_requests, strict=True + ) + ] + + excess_after_block_1 = Spec7002.get_excess_withdrawal_requests( + 0, num_requests + ) + + block_1_expectations: dict = { + sender: BalAccountExpectation( + nonce_changes=[ + BalNonceChange(block_access_index=i + 1, post_nonce=1) + ] + ) + for i, sender in enumerate(senders) + } + + # Build queue writes and reads + queue_writes, queue_reads = _build_queue_storage_slots( + senders, withdrawal_requests + ) + + block_1_expectations[eip7002_address] = BalAccountExpectation( + balance_changes=_build_incremental_changes( + num_requests, + BalBalanceChange, + "post_balance", + lambda i: fee * i, + ), + storage_reads=queue_reads, + storage_changes=[ + # Excess is only updated once during + # dequeue + BalStorageSlot( + slot=Spec7002.EXCESS_WITHDRAWAL_REQUESTS_STORAGE_SLOT, + slot_changes=[ + BalStorageChange( + block_access_index=num_requests + 1, + post_value=excess_after_block_1, + ) + ], + ), + BalStorageSlot( + slot=Spec7002.WITHDRAWAL_REQUEST_COUNT_STORAGE_SLOT, + slot_changes=_build_incremental_changes( + num_requests, + BalStorageChange, + "post_value", + lambda i: i, + reset_to=0, + ), + ), + BalStorageSlot( + slot=Spec7002.WITHDRAWAL_REQUEST_QUEUE_HEAD_STORAGE_SLOT, + slot_changes=[ + BalStorageChange( + block_access_index=num_requests + 1, + post_value=Spec7002.MAX_WITHDRAWAL_REQUESTS_PER_BLOCK, + ) + ], + ), + BalStorageSlot( + slot=Spec7002.WITHDRAWAL_REQUEST_QUEUE_TAIL_STORAGE_SLOT, + slot_changes=_build_incremental_changes( + num_requests, + BalStorageChange, + "post_value", + lambda i: i, + ), + ), + ] + + queue_writes, + ) + + # Block 2: Empty block, clean sweep of remaining 4 requests + excess_after_block_2 = Spec7002.get_excess_withdrawal_requests( + excess_after_block_1, 0 + ) + + block_2_expectations = { + eip7002_address: BalAccountExpectation( + storage_reads=[Spec7002.WITHDRAWAL_REQUEST_COUNT_STORAGE_SLOT], + storage_changes=[ + BalStorageSlot( + slot=Spec7002.EXCESS_WITHDRAWAL_REQUESTS_STORAGE_SLOT, + slot_changes=[ + BalStorageChange( + block_access_index=1, + post_value=excess_after_block_2, + ) + ], + ), + # Head is cleared + BalStorageSlot( + slot=Spec7002.WITHDRAWAL_REQUEST_QUEUE_HEAD_STORAGE_SLOT, + slot_changes=[ + BalStorageChange(block_access_index=1, post_value=0) + ], + ), + # Tail is cleared + BalStorageSlot( + slot=Spec7002.WITHDRAWAL_REQUEST_QUEUE_TAIL_STORAGE_SLOT, + slot_changes=[ + BalStorageChange(block_access_index=1, post_value=0) + ], + ), + ], + ) + } + + # Build post state storage: queue data persists even after dequeue + post_storage = _extract_post_storage_from_queue_writes(queue_writes) + post_storage[Spec7002.EXCESS_WITHDRAWAL_REQUESTS_STORAGE_SLOT] = ( + excess_after_block_2 + ) + + blockchain_test( + pre=pre, + blocks=[ + Block( + txs=txs_block_1, + expected_block_access_list=BlockAccessListExpectation( + account_expectations=block_1_expectations + ), + ), + Block( + txs=[], + expected_block_access_list=BlockAccessListExpectation( + account_expectations=block_2_expectations + ), + ), + ], + post={ + **{sender: Account(nonce=1) for sender in senders}, + eip7002_address: Account( + balance=fee * num_requests, + storage=post_storage, + ), + }, + ) + + +def test_bal_7002_no_withdrawal_requests( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Ensure BAL captures EIP-7002 system contract dequeue operation even + when block has no withdrawal requests. + + This test verifies that the post-execution dequeue system call always + reads queue state (slots 0-3), even when no requests are present. The + system contract should have storage_reads but no storage_changes. + """ + alice = pre.fund_eoa() + bob = pre.fund_eoa(amount=0) + + value = 10 + + tx = Transaction( + sender=alice, + to=bob, + value=value, + gas_limit=200_000, + ) + + block = Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], + ), + bob: BalAccountExpectation( + balance_changes=[ + BalBalanceChange( + block_access_index=1, post_balance=value + ) + ], + ), + Spec7002.WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS: BalAccountExpectation( # noqa: E501 + storage_reads=[ + Spec7002.EXCESS_WITHDRAWAL_REQUESTS_STORAGE_SLOT, + Spec7002.WITHDRAWAL_REQUEST_COUNT_STORAGE_SLOT, + Spec7002.WITHDRAWAL_REQUEST_QUEUE_HEAD_STORAGE_SLOT, + Spec7002.WITHDRAWAL_REQUEST_QUEUE_TAIL_STORAGE_SLOT, + ], + storage_changes=[], + ), + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + alice: Account(nonce=1), + bob: Account(balance=value), + }, + ) + + +def test_bal_7002_request_from_contract( + pre: Alloc, + blockchain_test: BlockchainTestFiller, +) -> None: + """ + Ensure BAL captures withdrawal request from contract with correct + source address. + + Alice calls RelayContract which internally calls EIP-7002 system + contract with withdrawal request. Withdrawal request should have + source_address = RelayContract (not Alice). + """ + fee = Spec7002.get_fee(0) + + # Create withdrawal request interaction using Prague helper + interaction = WithdrawalRequestContract( + requests=[ + WithdrawalRequest( + validator_pubkey=0x01, + amount=0, + fee=fee, + ) + ], + contract_balance=fee, + ) + + # Set up pre-state using helper + interaction.update_pre(pre) + + alice = interaction.sender_account + relay_contract = interaction.contract_address + + # Build queue storage slots with contract as source + queue_writes, queue_reads = _build_queue_storage_slots( + [relay_contract], interaction.requests + ) + + block = Block( + txs=interaction.transactions(), + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], + ), + relay_contract: BalAccountExpectation( + balance_changes=[ + BalBalanceChange( + block_access_index=1, + post_balance=0, + ) + ], + ), + Spec7002.WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS: BalAccountExpectation( # noqa: E501 + balance_changes=[ + BalBalanceChange( + block_access_index=1, + post_balance=fee, + ) + ], + storage_reads=[ + Spec7002.EXCESS_WITHDRAWAL_REQUESTS_STORAGE_SLOT, + Spec7002.WITHDRAWAL_REQUEST_QUEUE_HEAD_STORAGE_SLOT, + ] + + queue_reads, + storage_changes=[ + BalStorageSlot( + slot=Spec7002.WITHDRAWAL_REQUEST_COUNT_STORAGE_SLOT, + slot_changes=_build_incremental_changes( + 1, + BalStorageChange, + "post_value", + lambda i: i, + reset_to=0, + ), + ), + BalStorageSlot( + slot=Spec7002.WITHDRAWAL_REQUEST_QUEUE_TAIL_STORAGE_SLOT, + slot_changes=_build_incremental_changes( + 1, + BalStorageChange, + "post_value", + lambda i: i, + reset_to=0, + ), + ), + ] + + queue_writes, + ), + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + alice: Account(nonce=1), + relay_contract: Account(balance=0), + Spec7002.WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS: Account( + balance=fee, + storage=_extract_post_storage_from_queue_writes(queue_writes), + ), + }, + ) + + +@pytest.mark.parametrize( + "interaction", + [ + pytest.param( + WithdrawalRequestTransaction( + requests=[ + WithdrawalRequest( + validator_pubkey=0x01, + amount=0, + fee=0, # Below MIN_WITHDRAWAL_REQUEST_FEE + valid=False, + ) + ] + ), + id="insufficient_fee", + ), + pytest.param( + WithdrawalRequestTransaction( + requests=[ + WithdrawalRequest( + validator_pubkey=0x01, + amount=0, + fee=Spec7002.get_fee(0), + calldata_modifier=lambda x: x[ + :-1 + ], # 55 bytes instead of 56 + valid=False, + ) + ] + ), + id="calldata_too_short", + ), + pytest.param( + WithdrawalRequestTransaction( + requests=[ + WithdrawalRequest( + validator_pubkey=0x01, + amount=0, + fee=Spec7002.get_fee(0), + calldata_modifier=lambda x: x + + b"\x00", # 57 bytes instead of 56 + valid=False, + ) + ] + ), + id="calldata_too_long", + ), + pytest.param( + WithdrawalRequestTransaction( + requests=[ + WithdrawalRequest( + validator_pubkey=0x01, + amount=0, + fee=Spec7002.get_fee(0), + gas_limit=25_000, # Insufficient gas + valid=False, + ) + ] + ), + id="oog", + ), + pytest.param( + WithdrawalRequestContract( + requests=[ + WithdrawalRequest( + validator_pubkey=0x01, + amount=0, + fee=Spec7002.get_fee(0), + valid=False, + ) + ], + call_type=Op.DELEGATECALL, + ), + id="invalid_call_type_delegatecall", + ), + pytest.param( + WithdrawalRequestContract( + requests=[ + WithdrawalRequest( + validator_pubkey=0x01, + amount=0, + fee=Spec7002.get_fee(0), + valid=False, + ) + ], + call_type=Op.STATICCALL, + ), + id="invalid_call_type_staticcall", + ), + pytest.param( + WithdrawalRequestContract( + requests=[ + WithdrawalRequest( + validator_pubkey=0x01, + amount=0, + fee=Spec7002.get_fee(0), + valid=False, + ) + ], + call_type=Op.CALLCODE, + ), + id="invalid_call_type_callcode", + ), + pytest.param( + WithdrawalRequestContract( + requests=[ + WithdrawalRequest( + validator_pubkey=0x01, + amount=0, + fee=Spec7002.get_fee(0), + valid=False, + ) + ], + extra_code=Op.REVERT(0, 0), + ), + id="contract_reverts", + ), + ], +) +def test_bal_7002_request_invalid( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + interaction: WithdrawalRequestInteractionBase, +) -> None: + """ + Ensure BAL correctly handles invalid withdrawal request scenarios. + + Tests various failure modes: + - insufficient_fee: Transaction reverts due to fee below minimum + - calldata_too_short: Transaction reverts due to short calldata (55 bytes) + - calldata_too_long: Transaction reverts due to long calldata (57 bytes) + - oog: Transaction runs out of gas before completion + - invalid_call_type_*: Contract call via DELEGATECALL/STATICCALL/CALLCODE + - contract_reverts: Contract calls system contract but reverts after + + In all cases: + - Sender's nonce increments (transaction executed) + - Sender pays gas costs + - System contract is accessed during dequeue but has no state changes + - No withdrawal request is queued + """ + # Use helper to set up pre-state and get transaction + interaction.update_pre(pre) + tx = interaction.transactions()[0] + alice = interaction.sender_account + + # Build account expectations + account_expectations = { + alice: BalAccountExpectation( + nonce_changes=[BalNonceChange(block_access_index=1, post_nonce=1)], + ), + Spec7002.WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS: BalAccountExpectation( + storage_reads=[ + Spec7002.EXCESS_WITHDRAWAL_REQUESTS_STORAGE_SLOT, + Spec7002.WITHDRAWAL_REQUEST_COUNT_STORAGE_SLOT, + Spec7002.WITHDRAWAL_REQUEST_QUEUE_HEAD_STORAGE_SLOT, + Spec7002.WITHDRAWAL_REQUEST_QUEUE_TAIL_STORAGE_SLOT, + ], + storage_changes=[], + ), + } + + # For all invalid scenarios, system contract should have reads but + # no write since the dequeue operation still happens post-execution + block = Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations=account_expectations + ), + ) + + post: dict = { + alice: Account(nonce=1), + Spec7002.WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS: Account(storage={}), + } + + # Add relay contract to post-state for contract scenarios + if isinstance(interaction, WithdrawalRequestContract): + post[interaction.contract_address] = Account() + + blockchain_test( + pre=pre, + blocks=[block], + post=post, + ) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index 6f70977982..09cd675424 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -23,7 +23,6 @@ | `test_bal_noop_storage_write` | Ensure BAL includes storage read but not write for no-op writes where pre-state equals post-state | Contract with pre-existing storage value `0x42` in slot `0x01`; transaction executes `SSTORE(0x01, 0x42)` (writing same value) | BAL **MUST** include the contract address with `storage_reads` for slot `0x01` since it was accessed, but **MUST NOT** include it in `storage_changes` (no actual state change). | ✅ Completed | | `test_bal_fully_unmutated_account` | Ensure BAL captures account that has zero net mutations | Alice sends 0 wei to `Oracle` which writes same pre-existing value to storage | BAL MUST include Alice with `nonce_changes` and balance changes (gas), `Oracle` with `storage_reads` for accessed slot but empty `storage_changes`. | ✅ Completed | | `test_bal_net_zero_balance_transfer` | BAL includes accounts with net-zero balance change but excludes them from balance changes | Contract receives and sends same amount to recipient using CALL or SELFDESTRUCT | BAL **MUST** include contract in `account_changes` without `balance_changes` (net zero). BAL **MUST** record non-zero `balance_changes` for recipient. | ✅ Completed | -| `test_bal_system_dequeue_withdrawals_eip7002` | BAL tracks post-exec system dequeues for withdrawals | Pre-populate EIP-7002 withdrawal requests; produce a block where dequeues occur | BAL MUST include the 7002 system contract with `storage_changes` (queue head/tail slots 0–3) using `block_access_index = len(txs)` and balance changes for withdrawal recipients. | 🟡 Planned | | `test_bal_system_dequeue_consolidations_eip7251` | BAL tracks post-exec system dequeues for consolidations | Pre-populate EIP-7251 consolidation requests; produce a block where dequeues occur | BAL MUST include the 7251 system contract with `storage_changes` (queue slots 0–3) using `block_access_index = len(txs)`. | 🟡 Planned | | `test_bal_aborted_storage_access` | Ensure BAL captures storage access in aborted transactions correctly | Alice calls contract that reads storage slot `0x01`, writes to slot `0x02`, then aborts with `REVERT`/`INVALID` | BAL MUST include storage_reads for slots `0x01` and `0x02` (aborted writes become reads), empty storage_changes. Only nonce changes for Alice. | ✅ Completed | | `test_bal_aborted_account_access` | Ensure BAL captures account access in aborted transactions for all account accessing opcodes | Alice calls `AbortContract` that performs account access operations (`BALANCE`, `EXTCODESIZE`, `EXTCODECOPY`, `EXTCODEHASH`, `CALL`, `CALLCODE`, `DELEGATECALL`, `STATICCALL`) on `TargetContract` and aborts via `REVERT`/`INVALID` | BAL MUST include Alice, `TargetContract`, and `AbortContract` in account_changes and nonce changes for Alice. | ✅ Completed | @@ -121,3 +120,8 @@ | `test_bal_spurious_entry_index_plus_2_with_cross_tx_read` | Ensure clients reject BALs containing a spurious entry at `bal_index = len(transactions)+2`, even if its slot is legitimately read elsewhere in the block | Block with `N` txs. BAL is modified to include an extra `StorageKey` entry with `block_access_index = N+2` for `(VictimContract, slot=0x01)`. Additionally include another tx in the same block that performs `SLOAD(VictimContract, 0x01)` (legitimate read). | Block **MUST** be rejected with `INVALID_BLOCK_ACCESS_LIST` exception. Clients **MUST** validate that every `block_access_index` in BAL is in-range (≤ `N` plus any allowed system ops), and **MUST NOT** accept out-of-range indices even if the `(address, slot)` appears elsewhere legitimately. | 🟡 Planned | | `test_bal_spurious_entry_index_plus_2_with_cross_tx_write` | Ensure clients reject BALs containing a spurious entry at `bal_index = len(transactions)+2`, even if its slot is legitimately written elsewhere in the block | Block with `N` txs. BAL is modified to include an extra `StorageKey` entry with `block_access_index = N+2` for `(VictimContract, slot=0x01)`. Additionally include another tx in the same block that performs `SSTORE(VictimContract, 0x01, 0x42)` (legitimate write). | Block **MUST** be rejected with `INVALID_BLOCK_ACCESS_LIST` exception. Clients **MUST** validate `block_access_index` bounds and reject spurious indices regardless of whether the referenced `(address, slot)` is otherwise accessed or mutated in the block. | 🟡 Planned | | `test_bal_spurious_entry_index_plus_2_no_other_txs` | Ensure clients reject BALs containing a spurious entry at `bal_index = len(transactions)+2` when no other transaction touches the referenced slot | Block with `N` txs that do not access `(VictimContract, slot=0x01)`. BAL is modified to include an extra `StorageKey` entry with `block_access_index = N+2` for `(VictimContract, slot=0x01)`. | Block **MUST** be rejected with `INVALID_BLOCK_ACCESS_LIST` exception. Clients **MUST** reject any BAL that contains out-of-range `block_access_index` values, independent of access patterns in the executed block. | 🟡 Planned | +| `test_bal_7002_clean_sweep` | Ensure BAL correctly tracks "clean sweep" where all withdrawal requests are dequeued in same block (requests ≤ MAX). Parameterized: (1) pubkey first 32 bytes zero / non-zero, (2) amount zero / non-zero | Alice sends transaction to `WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS` with 1 withdrawal request. Validator pubkey has either first 32 bytes zero or non-zero. Amount is either zero or non-zero. Since 1 ≤ MAX_WITHDRAWAL_REQUESTS_PER_BLOCK, post-execution system call dequeues all requests ("clean sweep"), resetting head and tail to 0. | BAL **MUST** include Alice with `nonce_changes` at `block_access_index=1`. `WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS` **MUST** have: `balance_changes` at `block_access_index=1` (receives fee), `storage_reads` for excess, head, and slot 5 (first 32 bytes of pubkey) if zero. At `block_access_index=1` (tx enqueue): `storage_changes` for count (0→1), tail (0→1), slot 4 (source address), slot 5 (first 32 bytes, **ONLY** if non-zero), slot 6. At `block_access_index=2` (post-exec dequeue): `storage_changes` for count (1→0), tail (1→0). Clean sweep invariant: when all requests dequeued, both head and tail reset to 0. | ✅ Completed | +| `test_bal_7002_partial_sweep` | Ensure BAL correctly tracks queue overflow when requests exceed MAX, demonstrating partial sweep in block 1 and cleanup in block 2 | Block 1: 20 different EOAs each send withdrawal request to `WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS`. Since 20 > MAX_WITHDRAWAL_REQUESTS_PER_BLOCK, only first MAX requests dequeued ("partial sweep"), leaving 4 in queue. Block 2: Empty block (no transactions), remaining 4 requests dequeued ("clean sweep"), queue becomes empty. | Block 1 BAL **MUST** include all 20 senders with `nonce_changes` at respective `block_access_index` (1-20). `WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS` at each tx: `storage_changes` for count (increments to 20), tail (increments to 20). At `block_access_index=21` (post-exec partial dequeue): `storage_changes` for count (20→0), head (0→MAX). Partial sweep: head advances by MAX, tail stays 20, queue has 4 remaining (tail - head = 4). Block 2 BAL **MUST** include `WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS` at `block_access_index=1` (post-exec clean sweep): `storage_changes` for head (MAX→0), tail (20→0). Clean sweep: both head and tail reset to 0, queue empty. |✅ Completed | +| `test_bal_7002_no_withdrawal_requests` | Ensure BAL captures EIP-7002 system contract dequeue operation even when block has no withdrawal requests | Block with 1 transaction: Alice sends 10 wei to Bob. No withdrawal requests submitted. | BAL **MUST** include Alice with `nonce_changes` at `block_access_index=1`. BAL **MUST** include Bob with `balance_changes` at `block_access_index=1`. BAL **MUST** include EIP-7002 system contract (`WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS`) with `storage_reads` for slots: excess (slot 0), count (slot 1), head (slot 2), tail (slot 3). System contract **MUST NOT** have `storage_changes` (no writes occur when queue is empty). This test demonstrates that the post-execution dequeue operation always runs and reads queue state, even when no requests are present. | ✅ Completed | +| `test_bal_7002_request_from_contract` | Ensure BAL captures withdrawal request from contract with correct source address | Alice calls `RelayContract` which internally calls EIP-7002 system contract with withdrawal request. Withdrawal request should have `source_address = RelayContract` (not Alice). | BAL **MUST** include Alice with `nonce_changes` at `block_access_index=1`. BAL **MUST** include `RelayContract` with `balance_changes` (fee paid to system contract) at `block_access_index=1`. BAL **MUST** include system contract with `balance_changes`, `storage_reads`, and `storage_changes` (queue modified). Source address in withdrawal request **MUST** be `RelayContract`. Clean sweep: count and tail reset to 0 at `block_access_index=2`. | ✅ Completed | +| `test_bal_7002_request_invalid` | Ensure BAL correctly handles invalid withdrawal request scenarios | Parameterized test with 8 invalid scenarios: (1) insufficient_fee (fee=0), (2) calldata_too_short (55 bytes), (3) calldata_too_long (57 bytes), (4) oog (insufficient gas), (5-7) invalid_call_type (DELEGATECALL/STATICCALL/CALLCODE), (8) contract_reverts. Tests both EOA and contract-based withdrawal requests. | BAL **MUST** include sender with `nonce_changes` at `block_access_index=1`. BAL **MUST** include system contract with `storage_reads` for slots: excess (slot 0), count (slot 1), head (slot 2), tail (slot 3). System contract **MUST NOT** have `storage_changes` (transaction failed, no queue modification). | ✅ Completed | From ba6e8842b11a876a464a2337598a4616f8d683ed Mon Sep 17 00:00:00 2001 From: raxhvl <10168946+raxhvl@users.noreply.github.com> Date: Thu, 8 Jan 2026 18:24:52 +0100 Subject: [PATCH 062/154] feat(tests): Test extraneous entries for BAL (#1992) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 🧪 test(EIP-7928): test_bal_invalid_extraneous_entries * 🥢 nit: * chore: fix lint issues --------- Co-authored-by: raxhvl Co-authored-by: fselmo --- .../test_types/block_access_list/modifiers.py | 108 ++++++++++ .../test_block_access_lists_invalid.py | 193 ++++++++++++++++++ .../test_cases.md | 4 +- 3 files changed, 302 insertions(+), 3 deletions(-) diff --git a/packages/testing/src/execution_testing/test_types/block_access_list/modifiers.py b/packages/testing/src/execution_testing/test_types/block_access_list/modifiers.py index b71f8e73f8..6638be94e1 100644 --- a/packages/testing/src/execution_testing/test_types/block_access_list/modifiers.py +++ b/packages/testing/src/execution_testing/test_types/block_access_list/modifiers.py @@ -335,6 +335,112 @@ def transform(bal: BlockAccessList) -> BlockAccessList: return transform +def append_change( + account: Address, + change: BalNonceChange | BalBalanceChange | BalCodeChange, +) -> Callable[[BlockAccessList], BlockAccessList]: + """ + Append a change to an account's field list. + + Generic function to add extraneous entries to nonce_changes, balance_changes, + or code_changes fields. The field is inferred from the change type. + """ + # Infer field name from change type + if isinstance(change, BalNonceChange): + field = "nonce_changes" + elif isinstance(change, BalBalanceChange): + field = "balance_changes" + elif isinstance(change, BalCodeChange): + field = "code_changes" + else: + raise TypeError(f"Unsupported change type: {type(change)}") + + found_address = False + + def transform(bal: BlockAccessList) -> BlockAccessList: + nonlocal found_address + new_root = [] + for account_change in bal.root: + if account_change.address == account: + found_address = True + new_account = account_change.model_copy(deep=True) + # Get the field list and append the change + field_list = getattr(new_account, field) + field_list.append(change) + new_root.append(new_account) + else: + new_root.append(account_change) + + if not found_address: + raise ValueError( + f"Address {account} not found in BAL to append change to {field}" + ) + + return BlockAccessList(root=new_root) + + return transform + + +def append_storage( + address: Address, + slot: int, + change: Optional[BalStorageChange] = None, + read: bool = False, +) -> Callable[[BlockAccessList], BlockAccessList]: + """ + Append storage-related entries to an account. + + Generic function for all storage operations: + - If read=True: appends to storage_reads + - If change provided and slot exists: appends to existing slot's slot_changes + - If change provided and slot new: creates new BalStorageSlot + """ + found_address = False + + def transform(bal: BlockAccessList) -> BlockAccessList: + nonlocal found_address + new_root = [] + for account_change in bal.root: + if account_change.address == address: + found_address = True + new_account = account_change.model_copy(deep=True) + + if read: + # Append to storage_reads + new_account.storage_reads.append(ZeroPaddedHexNumber(slot)) + elif change is not None: + # Find if slot already exists + slot_found = False + for storage_slot in new_account.storage_changes: + if storage_slot.slot == slot: + # Append to existing slot's slot_changes + storage_slot.slot_changes.append(change) + slot_found = True + break + + if not slot_found: + # Create new BalStorageSlot + from . import BalStorageSlot + + new_storage_slot = BalStorageSlot( + slot=slot, slot_changes=[change] + ) + new_account.storage_changes.append(new_storage_slot) + + new_root.append(new_account) + else: + new_root.append(account_change) + + if not found_address: + raise ValueError( + f"Address {address} not found in BAL to append storage entry" + ) + + return BlockAccessList(root=new_root) + + return transform + + def duplicate_account( address: Address, ) -> Callable[[BlockAccessList], BlockAccessList]: @@ -433,6 +539,8 @@ def transform(bal: BlockAccessList) -> BlockAccessList: # Account-level modifiers "remove_accounts", "append_account", + "append_change", + "append_storage", "duplicate_account", "reverse_accounts", "keep_only", diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_invalid.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_invalid.py index 13d18c7fef..788eb0c2a3 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_invalid.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_invalid.py @@ -4,6 +4,8 @@ These tests verify that clients properly reject blocks with corrupted BALs. """ +from typing import Callable + import pytest from execution_testing import ( Account, @@ -11,6 +13,7 @@ BalAccountChange, BalAccountExpectation, BalBalanceChange, + BalCodeChange, BalNonceChange, BalStorageChange, BalStorageSlot, @@ -24,6 +27,8 @@ ) from execution_testing.test_types.block_access_list.modifiers import ( append_account, + append_change, + append_storage, duplicate_account, modify_balance, modify_nonce, @@ -643,3 +648,191 @@ def test_bal_invalid_balance_value( ) ], ) + + +@pytest.mark.valid_from("Amsterdam") +@pytest.mark.exception_test +@pytest.mark.parametrize( + "modifier", + [ + pytest.param( + lambda idx, **actors: append_change( + account=actors["oracle"], + change=BalNonceChange(block_access_index=idx, post_nonce=999), + ), + id="extra_nonce", + ), + pytest.param( + lambda idx, **actors: append_account( + BalAccountChange( + address=actors["charlie"], + balance_changes=[ + BalBalanceChange( + block_access_index=idx, post_balance=999 + ) + ], + ) + ), + id="extra_balance", + ), + pytest.param( + lambda idx, **actors: append_change( + account=actors["oracle"], + change=BalCodeChange( + block_access_index=idx, new_code=b"Amsterdam" + ), + ), + id="extra_code", + ), + pytest.param( + lambda idx, **actors: append_storage( + address=actors["oracle"], + slot=0, + change=BalStorageChange( + block_access_index=idx, post_value=0xCAFE + ), + ), + id="extra_storage_write_touched", + ), + pytest.param( + lambda idx, **actors: append_storage( + address=actors["oracle"], + slot=1, + change=BalStorageChange( + block_access_index=idx, post_value=0xCAFE + ), + ), + id="extra_storage_write_untouched", + ), + pytest.param( + lambda idx, **actors: append_account( + BalAccountChange( + address=actors["charlie"], + storage_changes=[ + BalStorageSlot( + slot=0, + slot_changes=[ + BalStorageChange( + block_access_index=idx, + post_value=0xDEAD, + ) + ], + ) + ], + ) + ), + id="extra_storage_write_uninvolved_account", + ), + pytest.param( + lambda idx, **actors: append_account( # noqa: ARG005 + BalAccountChange( + address=actors["charlie"], + ) + ), + id="extra_account_access", + ), + pytest.param( + lambda idx, **actors: append_storage( # noqa: ARG005 + address=actors["oracle"], + slot=999, + read=True, + ), + id="extra_storage_read", + ), + ], +) +@pytest.mark.parametrize( + "bal_index", + [ + pytest.param(1, id="same_tx"), + pytest.param(2, id="system_tx"), + pytest.param(3, id="out_of_bounds"), + ], +) +def test_bal_invalid_extraneous_entries( + blockchain_test: BlockchainTestFiller, + pre: Alloc, + modifier: Callable, + bal_index: int, +) -> None: + """ + Test that clients reject blocks where BAL contains extraneous entries. + + Alice sends 100 wei to Oracle (1 transaction). Oracle reads storage slot 0. + Charlie is uninvolved in this transaction. + A valid BAL is created containing nonce change for Alice, balance change + and storage read for Oracle which is further modified as: + + - extra_nonce: Extra nonce change for Oracle. + - extra_balance: Extra balance change for uninvolved Charlie. + - extra_code: Extra code change for Oracle. + - extra_storage_write_touched: Extra storage write for an already read slot + (slot 0) for Oracle. + - extra_storage_write_untouched: Extra storage write for an unread slot + (slot 1) for Oracle. + - extra_storage_write_uninvolved_account: Extra storage write for + uninvolved account (Charlie) that isn't accessed at all. + - extra_account_access: Uninvolved account (Charlie) added to BAL entirely. + - extra_storage_read: Extra storage read for Oracle (slot 999). + + BAL is corrupted with extraneous entries at various block_access_index + values: + - bal_index=1: current transaction + - bal_index=2: system transaction (tx_count + 1) + - bal_index=3: beyond system transaction (tx_count + 2) + """ + transfer_value = 100 + + alice = pre.fund_eoa() + oracle = pre.deploy_contract(code=Op.SLOAD(0), storage={0: 42}) + charlie = pre.fund_eoa(amount=0) + + tx = Transaction( + sender=alice, + to=oracle, + value=transfer_value, + gas_limit=1_000_000, + ) + + blockchain_test( + pre=pre, + # The block reverts and the post state remains unchanged. + post=pre, + blocks=[ + Block( + txs=[tx], + exception=BlockException.INVALID_BLOCK_ACCESS_LIST, + expected_block_access_list=BlockAccessListExpectation( + # Valid BAL expectation: nonce change for Alice, + # balance change and storage read for Oracle. + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[ + BalNonceChange( + block_access_index=1, post_nonce=1 + ) + ], + ), + oracle: BalAccountExpectation( + balance_changes=[ + BalBalanceChange( + block_access_index=1, + post_balance=transfer_value, + ) + ], + storage_reads=[0], + ), + } + ).modify( + # The parameterized modifier is applied to the BAL + # which adds an extraneous entry. + modifier( + idx=bal_index, + alice=alice, + oracle=oracle, + charlie=charlie, + ) + ), + ) + ], + ) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index 09cd675424..b605f43136 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -117,11 +117,9 @@ | `test_bal_4788_selfdestruct_to_beacon_root` | Ensure BAL captures `SELFDESTRUCT` to beacon root address alongside system call storage writes | Single block: Pre-execution system call writes beacon root to storage. Transaction: Alice calls contract (pre-funded with 100 wei) that selfdestructs with `BEACON_ROOTS_ADDRESS` as beneficiary. | BAL **MUST** include at `block_access_index=0`: `BEACON_ROOTS_ADDRESS` with `storage_changes` (timestamp and root slots from system call). At `block_access_index=1`: Alice with `nonce_changes`, contract with `balance_changes` (100→0), `BEACON_ROOTS_ADDRESS` with `balance_changes` (receives 100 wei). | ✅ Completed | | `test_bal_7702_double_auth_reset_minimal` | Ensure BAL tracks multiple 7702 nonce increments but filters net-zero code change | Single transaction contains two EIP-7702 authorizations for `Alice`: (1) first auth sets delegation `0xef0100\|\|Oracle`, (2) second auth clears delegation back to empty. Transaction sends 10 wei to `Bob`. Two variants: (a) Self-funded: `Alice` is tx sender (one tx nonce bump + two auth bumps → nonce 0→3). (b) Sponsored: `Relayer` is tx sender (`Alice` only in auths → nonce 0→2 for `Alice`, plus one nonce bump for `Relayer`). | Variant (a): BAL **MUST** include `Alice` with `nonce_changes` 0→3. Variant (b): BAL **MUST** include `Alice` with `nonce_changes` 0→2 and `Relayer` with its own `nonce_changes`. For both variants, BAL **MUST NOT** include `code_changes` for `Alice` (net code is empty), **MUST** include `Bob` with `balance_changes` (receives 10 wei), and `Oracle` **MUST NOT** appear in BAL. | 🟡 Planned | | `test_bal_selfdestruct_send_to_sender` | Ensure BAL tracks SELFDESTRUCT sending all funds back to the tx sender (no burn) | Pre-state: contract `C` exists from a prior transaction with non-empty code and balance = 100 wei. EOA `Alice` sends a transaction calling `C`. `C`’s code executes `SELFDESTRUCT(Alice)`. Under EIP-6780, because `C` was not created in this transaction, SELFDESTRUCT does not delete code or storage; it only transfers the entire 100 wei balance from `C` to `Alice`. Final post-state: `C` still exists with the same code and balance = 0; `Alice`’s balance increased by 100 wei (ignoring gas for this test). | BAL **MUST** include `Alice` with `nonce_changes` (tx sender) and `balance_changes` reflecting receipt of 100 wei, and **MUST** include `C` with `balance_changes` 100→0 and no `code_changes`. BAL **MUST NOT** include any other accounts. This test ensures SELFDESTRUCT-to-sender is modeled as a pure value transfer (no burn, no code deletion). | 🟡 Planned | -| `test_bal_spurious_entry_index_plus_2_with_cross_tx_read` | Ensure clients reject BALs containing a spurious entry at `bal_index = len(transactions)+2`, even if its slot is legitimately read elsewhere in the block | Block with `N` txs. BAL is modified to include an extra `StorageKey` entry with `block_access_index = N+2` for `(VictimContract, slot=0x01)`. Additionally include another tx in the same block that performs `SLOAD(VictimContract, 0x01)` (legitimate read). | Block **MUST** be rejected with `INVALID_BLOCK_ACCESS_LIST` exception. Clients **MUST** validate that every `block_access_index` in BAL is in-range (≤ `N` plus any allowed system ops), and **MUST NOT** accept out-of-range indices even if the `(address, slot)` appears elsewhere legitimately. | 🟡 Planned | -| `test_bal_spurious_entry_index_plus_2_with_cross_tx_write` | Ensure clients reject BALs containing a spurious entry at `bal_index = len(transactions)+2`, even if its slot is legitimately written elsewhere in the block | Block with `N` txs. BAL is modified to include an extra `StorageKey` entry with `block_access_index = N+2` for `(VictimContract, slot=0x01)`. Additionally include another tx in the same block that performs `SSTORE(VictimContract, 0x01, 0x42)` (legitimate write). | Block **MUST** be rejected with `INVALID_BLOCK_ACCESS_LIST` exception. Clients **MUST** validate `block_access_index` bounds and reject spurious indices regardless of whether the referenced `(address, slot)` is otherwise accessed or mutated in the block. | 🟡 Planned | -| `test_bal_spurious_entry_index_plus_2_no_other_txs` | Ensure clients reject BALs containing a spurious entry at `bal_index = len(transactions)+2` when no other transaction touches the referenced slot | Block with `N` txs that do not access `(VictimContract, slot=0x01)`. BAL is modified to include an extra `StorageKey` entry with `block_access_index = N+2` for `(VictimContract, slot=0x01)`. | Block **MUST** be rejected with `INVALID_BLOCK_ACCESS_LIST` exception. Clients **MUST** reject any BAL that contains out-of-range `block_access_index` values, independent of access patterns in the executed block. | 🟡 Planned | | `test_bal_7002_clean_sweep` | Ensure BAL correctly tracks "clean sweep" where all withdrawal requests are dequeued in same block (requests ≤ MAX). Parameterized: (1) pubkey first 32 bytes zero / non-zero, (2) amount zero / non-zero | Alice sends transaction to `WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS` with 1 withdrawal request. Validator pubkey has either first 32 bytes zero or non-zero. Amount is either zero or non-zero. Since 1 ≤ MAX_WITHDRAWAL_REQUESTS_PER_BLOCK, post-execution system call dequeues all requests ("clean sweep"), resetting head and tail to 0. | BAL **MUST** include Alice with `nonce_changes` at `block_access_index=1`. `WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS` **MUST** have: `balance_changes` at `block_access_index=1` (receives fee), `storage_reads` for excess, head, and slot 5 (first 32 bytes of pubkey) if zero. At `block_access_index=1` (tx enqueue): `storage_changes` for count (0→1), tail (0→1), slot 4 (source address), slot 5 (first 32 bytes, **ONLY** if non-zero), slot 6. At `block_access_index=2` (post-exec dequeue): `storage_changes` for count (1→0), tail (1→0). Clean sweep invariant: when all requests dequeued, both head and tail reset to 0. | ✅ Completed | | `test_bal_7002_partial_sweep` | Ensure BAL correctly tracks queue overflow when requests exceed MAX, demonstrating partial sweep in block 1 and cleanup in block 2 | Block 1: 20 different EOAs each send withdrawal request to `WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS`. Since 20 > MAX_WITHDRAWAL_REQUESTS_PER_BLOCK, only first MAX requests dequeued ("partial sweep"), leaving 4 in queue. Block 2: Empty block (no transactions), remaining 4 requests dequeued ("clean sweep"), queue becomes empty. | Block 1 BAL **MUST** include all 20 senders with `nonce_changes` at respective `block_access_index` (1-20). `WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS` at each tx: `storage_changes` for count (increments to 20), tail (increments to 20). At `block_access_index=21` (post-exec partial dequeue): `storage_changes` for count (20→0), head (0→MAX). Partial sweep: head advances by MAX, tail stays 20, queue has 4 remaining (tail - head = 4). Block 2 BAL **MUST** include `WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS` at `block_access_index=1` (post-exec clean sweep): `storage_changes` for head (MAX→0), tail (20→0). Clean sweep: both head and tail reset to 0, queue empty. |✅ Completed | | `test_bal_7002_no_withdrawal_requests` | Ensure BAL captures EIP-7002 system contract dequeue operation even when block has no withdrawal requests | Block with 1 transaction: Alice sends 10 wei to Bob. No withdrawal requests submitted. | BAL **MUST** include Alice with `nonce_changes` at `block_access_index=1`. BAL **MUST** include Bob with `balance_changes` at `block_access_index=1`. BAL **MUST** include EIP-7002 system contract (`WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS`) with `storage_reads` for slots: excess (slot 0), count (slot 1), head (slot 2), tail (slot 3). System contract **MUST NOT** have `storage_changes` (no writes occur when queue is empty). This test demonstrates that the post-execution dequeue operation always runs and reads queue state, even when no requests are present. | ✅ Completed | | `test_bal_7002_request_from_contract` | Ensure BAL captures withdrawal request from contract with correct source address | Alice calls `RelayContract` which internally calls EIP-7002 system contract with withdrawal request. Withdrawal request should have `source_address = RelayContract` (not Alice). | BAL **MUST** include Alice with `nonce_changes` at `block_access_index=1`. BAL **MUST** include `RelayContract` with `balance_changes` (fee paid to system contract) at `block_access_index=1`. BAL **MUST** include system contract with `balance_changes`, `storage_reads`, and `storage_changes` (queue modified). Source address in withdrawal request **MUST** be `RelayContract`. Clean sweep: count and tail reset to 0 at `block_access_index=2`. | ✅ Completed | | `test_bal_7002_request_invalid` | Ensure BAL correctly handles invalid withdrawal request scenarios | Parameterized test with 8 invalid scenarios: (1) insufficient_fee (fee=0), (2) calldata_too_short (55 bytes), (3) calldata_too_long (57 bytes), (4) oog (insufficient gas), (5-7) invalid_call_type (DELEGATECALL/STATICCALL/CALLCODE), (8) contract_reverts. Tests both EOA and contract-based withdrawal requests. | BAL **MUST** include sender with `nonce_changes` at `block_access_index=1`. BAL **MUST** include system contract with `storage_reads` for slots: excess (slot 0), count (slot 1), head (slot 2), tail (slot 3). System contract **MUST NOT** have `storage_changes` (transaction failed, no queue modification). | ✅ Completed | +| `test_bal_invalid_extraneous_entries` | Verify clients reject blocks with any type of extraneous BAL entries | Alice sends 100 wei to Oracle contract (which reads storage slot 0). Charlie is uninvolved in this transaction. A valid BAL is created containing nonce change for Alice, balance change and storage read for Oracle. The BAL is corrupted by adding various extraneous entries: (1) extra_nonce, (2) extra_balance, (3) extra_code, (4) extra_storage_write_touched (slot 0 - already read), (5) extra_storage_write_untouched (slot 1 - not accessed), (6) extra_storage_write_uninvolved_account (Charlie - uninvolved account), (7) extra_account_access (Charlie), (8) extra_storage_read (slot 999). Each tested at block_access_index 1 (same tx), 2 (system tx), 3 (out of bounds). | Block **MUST** be rejected with `INVALID_BLOCK_ACCESS_LIST` exception. Clients **MUST** detect any extraneous entries in BAL. | ✅ Completed | From 1075f15a997daab676eb1436492aaeeff1d4c35d Mon Sep 17 00:00:00 2001 From: Guruprasad Kamath <48196632+gurukamath@users.noreply.github.com> Date: Mon, 12 Jan 2026 17:02:29 +0100 Subject: [PATCH 063/154] refactor(eip7928): refactor net zero filtering in BALs (#1899) * refactor(eip7928): refactor net zero filtering in BALs * refactor(eip7928): handle selfdestruct correctly --- src/ethereum/forks/amsterdam/fork.py | 12 +- src/ethereum/forks/amsterdam/state_tracker.py | 138 +++++++++--------- .../forks/amsterdam/vm/interpreter.py | 3 + 3 files changed, 74 insertions(+), 79 deletions(-) diff --git a/src/ethereum/forks/amsterdam/fork.py b/src/ethereum/forks/amsterdam/fork.py index 7da3ed03ce..3e45c3e953 100644 --- a/src/ethereum/forks/amsterdam/fork.py +++ b/src/ethereum/forks/amsterdam/fork.py @@ -681,7 +681,7 @@ def process_system_transaction( # Commit system transaction changes to block frame # System transactions always succeed (or block is invalid) - commit_transaction_frame(tx_env.state_changes, block_env.state) + commit_transaction_frame(tx_env.state_changes) return system_tx_output @@ -1091,15 +1091,11 @@ def process_transaction( for address in tx_output.accounts_to_delete: destroy_account(block_env.state, address) + track_selfdestruct(tx_env.state_changes, address) # EIP-7928: Commit transaction frame (includes net-zero filtering). # Must happen AFTER destroy_account so filtering sees correct state. - commit_transaction_frame(tx_env.state_changes, block_env.state) - - # EIP-7928: Track in-transaction self-destruct normalization AFTER merge - # Convert storage writes to reads and remove nonce/code changes - for address in tx_output.accounts_to_delete: - track_selfdestruct(block_env.state_changes, address) + commit_transaction_frame(tx_env.state_changes) def process_withdrawals( @@ -1140,7 +1136,7 @@ def increase_recipient_balance(recipient: Account) -> None: destroy_account(block_env.state, wd.address) # EIP-7928: Filter net-zero balance changes for withdrawals - filter_net_zero_frame_changes(block_env.state_changes, block_env.state) + filter_net_zero_frame_changes(block_env.state_changes) def check_gas_limit(gas_limit: Uint, parent_gas_limit: Uint) -> bool: diff --git a/src/ethereum/forks/amsterdam/state_tracker.py b/src/ethereum/forks/amsterdam/state_tracker.py index 05461ea89b..3ed1360e62 100644 --- a/src/ethereum/forks/amsterdam/state_tracker.py +++ b/src/ethereum/forks/amsterdam/state_tracker.py @@ -12,7 +12,7 @@ """ from dataclasses import dataclass, field -from typing import TYPE_CHECKING, Dict, Optional, Set, Tuple +from typing import Dict, Optional, Set, Tuple from ethereum_types.bytes import Bytes, Bytes32 from ethereum_types.numeric import U64, U256, Uint @@ -20,9 +20,6 @@ from .block_access_lists.rlp_types import BlockAccessIndex from .fork_types import Address -if TYPE_CHECKING: - from .state import State - @dataclass class StateChanges: @@ -135,30 +132,13 @@ def capture_pre_balance( The current balance value. """ + # Only capture pre-values in a transaction level + # or block level frame + assert tx_frame.parent is None or tx_frame.parent.parent is None if address not in tx_frame.pre_balances: tx_frame.pre_balances[address] = balance -def capture_pre_nonce( - tx_frame: StateChanges, address: Address, nonce: U64 -) -> None: - """ - Capture pre-nonce if not already captured (first-write-wins). - - Parameters - ---------- - tx_frame : - The transaction-level frame. - address : - The address whose nonce to capture. - nonce : - The current nonce value. - - """ - if address not in tx_frame.pre_nonces: - tx_frame.pre_nonces[address] = nonce - - def capture_pre_storage( tx_frame: StateChanges, address: Address, key: Bytes32, value: U256 ) -> None: @@ -177,6 +157,9 @@ def capture_pre_storage( The current storage value. """ + # Only capture pre-values in a transaction level + # or block level frame + assert tx_frame.parent is None or tx_frame.parent.parent is None slot = (address, key) if slot not in tx_frame.pre_storage: tx_frame.pre_storage[slot] = value @@ -198,6 +181,9 @@ def capture_pre_code( The current code value. """ + # Only capture pre-values in a transaction level + # or block level frame + assert tx_frame.parent is None or tx_frame.parent.parent is None if address not in tx_frame.pre_code: tx_frame.pre_code[address] = code @@ -328,7 +314,7 @@ def track_code_change( def track_selfdestruct( - state_changes: StateChanges, + tx_frame: StateChanges, address: Address, ) -> None: """ @@ -339,30 +325,42 @@ def track_selfdestruct( Parameters ---------- - state_changes : - The state changes tracker. + tx_frame : + The state changes tracker. Should be a transaction frame. address : The address that self-destructed. """ - idx = state_changes.block_access_index + # Has to be a transaction frame + assert tx_frame.parent is not None and tx_frame.parent.parent is None + + idx = tx_frame.block_access_index # Remove nonce changes from current transaction - state_changes.nonce_changes = { + tx_frame.nonce_changes = { (addr, i, nonce) - for addr, i, nonce in state_changes.nonce_changes + for addr, i, nonce in tx_frame.nonce_changes if not (addr == address and i == idx) } + # Remove balance changes from current transaction + if (address, idx) in tx_frame.balance_changes: + pre_balance = tx_frame.pre_balances[address] + if pre_balance == U256(0): + # Post balance will be U256(0) after deletion. + # So no change and hence bal does not need to + # capture anything. + del tx_frame.balance_changes[(address, idx)] + # Remove code changes from current transaction - if (address, idx) in state_changes.code_changes: - del state_changes.code_changes[(address, idx)] + if (address, idx) in tx_frame.code_changes: + del tx_frame.code_changes[(address, idx)] # Convert storage writes from current transaction to reads - for addr, key, i in list(state_changes.storage_writes.keys()): + for addr, key, i in list(tx_frame.storage_writes.keys()): if addr == address and i == idx: - del state_changes.storage_writes[(addr, key, i)] - state_changes.storage_reads.add((addr, key)) + del tx_frame.storage_writes[(addr, key, i)] + tx_frame.storage_reads.add((addr, key)) def merge_on_success(child_frame: StateChanges) -> None: @@ -436,10 +434,7 @@ def merge_on_failure(child_frame: StateChanges) -> None: # merged on failure - they are discarded -def commit_transaction_frame( - tx_frame: StateChanges, - state: "State", -) -> None: +def commit_transaction_frame(tx_frame: StateChanges) -> None: """ Commit transaction frame to block frame. @@ -450,15 +445,13 @@ def commit_transaction_frame( ---------- tx_frame : The transaction frame to commit. - state : - The current state (used for net-zero filtering). """ assert tx_frame.parent is not None block_frame = tx_frame.parent # Filter net-zero changes before committing - filter_net_zero_frame_changes(tx_frame, state) + filter_net_zero_frame_changes(tx_frame) # Merge address accesses block_frame.touched_addresses.update(tx_frame.touched_addresses) @@ -506,10 +499,7 @@ def create_child_frame(parent: StateChanges) -> StateChanges: ) -def filter_net_zero_frame_changes( - tx_frame: StateChanges, - state: "State", -) -> None: +def filter_net_zero_frame_changes(tx_frame: StateChanges) -> None: """ Filter net-zero changes from transaction frame before commit. @@ -521,44 +511,50 @@ def filter_net_zero_frame_changes( ---------- tx_frame : The transaction-level state changes frame. - state : - The current state to read final values from. """ - # Import locally to avoid circular import - from .state import get_account - idx = tx_frame.block_access_index # Filter storage: compare against pre_storage, convert net-zero to reads - for addr, key, i in list(tx_frame.storage_writes.keys()): - if i != idx: - continue - final_value = tx_frame.storage_writes[(addr, key, i)] + addresses_to_check_storage = [ + (addr, key) + for (addr, key, i) in tx_frame.storage_writes.keys() + if i == idx + ] + for addr, key in addresses_to_check_storage: + # For any (address, key) whose balance has changed, its + # pre-value should have been captured + assert (addr, key) in tx_frame.pre_storage + pre_value = tx_frame.pre_storage[(addr, key)] + post_value = tx_frame.storage_writes[(addr, key, idx)] if (addr, key) in tx_frame.pre_storage: - if tx_frame.pre_storage[(addr, key)] == final_value: + if pre_value == post_value: # Net-zero write - convert to read - del tx_frame.storage_writes[(addr, key, i)] + del tx_frame.storage_writes[(addr, key, idx)] tx_frame.storage_reads.add((addr, key)) # Filter balance: compare pre vs post, remove if equal - addresses_to_check = [ + addresses_to_check_balance = [ addr for (addr, i) in tx_frame.balance_changes.keys() if i == idx ] - for addr in addresses_to_check: - if addr in tx_frame.pre_balances: - pre_balance = tx_frame.pre_balances[addr] - post_balance = get_account(state, addr).balance - if pre_balance == post_balance: - del tx_frame.balance_changes[(addr, idx)] + for addr in addresses_to_check_balance: + # For any account whose balance has changed, its + # pre-balance should have been captured + assert addr in tx_frame.pre_balances + pre_balance = tx_frame.pre_balances[addr] + post_balance = tx_frame.balance_changes[(addr, idx)] + if pre_balance == post_balance: + del tx_frame.balance_changes[(addr, idx)] # Filter code: compare pre vs post, remove if equal - for addr, i in list(tx_frame.code_changes.keys()): - if i != idx: - continue - final_code = tx_frame.code_changes[(addr, i)] - pre_code = tx_frame.pre_code.get(addr, b"") - if pre_code == final_code: - del tx_frame.code_changes[(addr, i)] + addresses_to_check_code = [ + addr for (addr, i) in tx_frame.code_changes.keys() if i == idx + ] + for addr in addresses_to_check_code: + assert addr in tx_frame.pre_code + pre_code = tx_frame.pre_code[addr] + post_code = tx_frame.code_changes[(addr, idx)] + if pre_code == post_code: + del tx_frame.code_changes[(addr, idx)] # Nonces: no filtering needed (nonces only increment, never net-zero) diff --git a/src/ethereum/forks/amsterdam/vm/interpreter.py b/src/ethereum/forks/amsterdam/vm/interpreter.py index 154c56de11..d73ba88a72 100644 --- a/src/ethereum/forks/amsterdam/vm/interpreter.py +++ b/src/ethereum/forks/amsterdam/vm/interpreter.py @@ -47,6 +47,7 @@ from ..state_tracker import ( StateChanges, capture_pre_balance, + capture_pre_code, merge_on_failure, merge_on_success, track_address, @@ -213,6 +214,8 @@ def process_create_message(message: Message) -> Evm: U64(nonce_after), ) + capture_pre_code(message.tx_env.state_changes, message.current_target, b"") + evm = process_message(message) if not evm.error: contract_code = evm.output From 9f2237a2ea6e3f09e48b5e83e5c683276ea09f32 Mon Sep 17 00:00:00 2001 From: fselmo Date: Mon, 29 Dec 2025 13:41:20 -0700 Subject: [PATCH 064/154] feat(test): OOG and success selfdestruct tests to all precompiles feat(test): Expand fork range for selfdestruct to precompile tests - For successful tests, start at Homestead where precompiles were introduced (EIPs 196, 197, 198). - For OOG tests, start at Tangerine where operation gas costs were introduced (EIP 150). --- .../src/execution_testing/forks/__init__.py | 2 + .../execution_testing/forks/forks/forks.py | 2 +- .../test_block_access_lists_opcodes.py | 89 -------- .../test_cases.md | 2 +- tests/homestead/selfdestruct/__init__.py | 1 + .../selfdestruct/test_selfdestruct.py | 198 ++++++++++++++++++ tests/homestead/yul/__init__.py | 1 - tests/tangerine/__init__.py | 4 + .../eip150_operation_gas_costs/__init__.py | 1 + .../eip150_operation_gas_costs/spec.py | 21 ++ .../test_eip150_selfdestruct.py | 117 +++++++++++ 11 files changed, 346 insertions(+), 92 deletions(-) create mode 100644 tests/homestead/selfdestruct/__init__.py create mode 100644 tests/homestead/selfdestruct/test_selfdestruct.py delete mode 100644 tests/homestead/yul/__init__.py create mode 100644 tests/tangerine/__init__.py create mode 100644 tests/tangerine/eip150_operation_gas_costs/__init__.py create mode 100644 tests/tangerine/eip150_operation_gas_costs/spec.py create mode 100644 tests/tangerine/eip150_operation_gas_costs/test_eip150_selfdestruct.py diff --git a/packages/testing/src/execution_testing/forks/__init__.py b/packages/testing/src/execution_testing/forks/__init__.py index 009d13837f..4000069af1 100644 --- a/packages/testing/src/execution_testing/forks/__init__.py +++ b/packages/testing/src/execution_testing/forks/__init__.py @@ -25,6 +25,7 @@ Paris, Prague, Shanghai, + Tangerine, ) from .forks.transition import ( BerlinToLondonAt5, @@ -97,6 +98,7 @@ "Frontier", "GrayGlacier", "Homestead", + "Tangerine", "InvalidForkError", "Istanbul", "London", diff --git a/packages/testing/src/execution_testing/forks/forks/forks.py b/packages/testing/src/execution_testing/forks/forks/forks.py index e10e87d86e..92e07d4b55 100644 --- a/packages/testing/src/execution_testing/forks/forks/forks.py +++ b/packages/testing/src/execution_testing/forks/forks/forks.py @@ -1472,7 +1472,7 @@ class DAOFork(Homestead, ignore=True): pass -class Tangerine(DAOFork, ignore=True): +class Tangerine(DAOFork, transition_tool_name="TangerineWhistle"): """Tangerine fork (EIP-150).""" pass diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py index 99f2ab58ba..80d0c504a1 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py @@ -2873,95 +2873,6 @@ def test_bal_transient_storage_not_tracked( ) -@pytest.mark.pre_alloc_group( - "selfdestruct_to_precompile", - reason="Modifies precompile balance, must be isolated in EngineX format", -) -def test_bal_selfdestruct_to_precompile( - pre: Alloc, - blockchain_test: BlockchainTestFiller, -) -> None: - """ - Test BAL with SELFDESTRUCT to precompile (ecrecover 0x01). - - Victim (balance=100) selfdestructs to precompile 0x01. - - Expected BAL: - - Victim: balance_changes (100→0) - - Precompile 0x01: balance_changes (0→100), no code/nonce changes - """ - alice = pre.fund_eoa() - - contract_balance = 100 - ecrecover_precompile = Address(1) # 0x0000...0001 - - # Contract that selfdestructs to ecrecover precompile - victim_code = Op.SELFDESTRUCT(ecrecover_precompile) - - victim = pre.deploy_contract(code=victim_code, balance=contract_balance) - - # Caller that triggers the selfdestruct - caller_code = Op.CALL(100_000, victim, 0, 0, 0, 0, 0) + Op.STOP - caller = pre.deploy_contract(code=caller_code) - - tx = Transaction( - sender=alice, - to=caller, - gas_limit=1_000_000, - ) - - block = Block( - txs=[tx], - expected_block_access_list=BlockAccessListExpectation( - account_expectations={ - alice: BalAccountExpectation( - nonce_changes=[ - BalNonceChange(block_access_index=1, post_nonce=1) - ], - ), - caller: BalAccountExpectation.empty(), - # Victim (selfdestructing contract): balance changes 100→0 - # Explicitly verify ALL fields to avoid false positives - victim: BalAccountExpectation( - nonce_changes=[], # Contract nonce unchanged - balance_changes=[ - BalBalanceChange(block_access_index=1, post_balance=0) - ], - code_changes=[], # Code unchanged (post-Cancun) - storage_changes=[], # No storage changes - storage_reads=[], # No storage reads - ), - # Precompile receives selfdestruct balance - # Explicitly verify ALL fields to avoid false positives - ecrecover_precompile: BalAccountExpectation( - nonce_changes=[], # MUST NOT have nonce changes - balance_changes=[ - BalBalanceChange( - block_access_index=1, post_balance=contract_balance - ) - ], - code_changes=[], # MUST NOT have code changes - storage_changes=[], # MUST NOT have storage changes - storage_reads=[], # MUST NOT have storage reads - ), - } - ), - ) - - blockchain_test( - pre=pre, - blocks=[block], - post={ - alice: Account(nonce=1), - caller: Account(), - # Victim still exists with 0 balance (post-Cancun SELFDESTRUCT) - victim: Account(balance=0), - # Precompile has received the balance - ecrecover_precompile: Account(balance=contract_balance), - }, - ) - - def test_bal_create_early_failure( pre: Alloc, blockchain_test: BlockchainTestFiller, diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index b605f43136..0d2fc153d1 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -103,7 +103,7 @@ | `test_bal_call_revert_insufficient_funds` | Ensure BAL handles CALL failure due to insufficient balance (not OOG) | Contract (balance=100, storage slot 0x02=0xDEAD) executes: `SLOAD(0x01), CALL(target, value=1000), SSTORE(0x02, result)`. CALL fails because 1000 > 100. Target address 0xDEAD (pre-existing with non-zero balance to avoid pruning). Note: slot 0x02 must start non-zero so SSTORE(0) is a change. | BAL **MUST** include: (1) Contract with `storage_reads` for slot 0x01, `storage_changes` for slot 0x02 (value=0, CALL returned failure). (2) Target (0xDEAD) **MUST** appear in BAL with empty changes - target is accessed before balance check fails. | ✅ Completed | | `test_bal_lexicographic_address_ordering` | Ensure BAL enforces strict lexicographic byte-wise ordering | Pre-fund three addresses with specific byte patterns: `addr_low = 0x0000...0001`, `addr_mid = 0x0000...0100`, `addr_high = 0x0100...0000`. Contract touches them in reverse order: `BALANCE(addr_high), BALANCE(addr_low), BALANCE(addr_mid)`. Additionally, include two endian-trap addresses that are byte-reversals of each other: `addr_endian_low = 0x0100000000000000000000000000000000000002`, `addr_endian_high = 0x0200000000000000000000000000000000000001`. Note: `reverse(addr_endian_low) = addr_endian_high`. Correct lexicographic order: `addr_endian_low < addr_endian_high` (0x01 < 0x02 at byte 0). If implementation incorrectly reverses bytes before comparing, it would get `addr_endian_low > addr_endian_high` (wrong). | BAL account list **MUST** be sorted lexicographically by address bytes: `addr_low` < `addr_mid` < `addr_high` < `addr_endian_low` < `addr_endian_high`, regardless of access order. The endian-trap addresses specifically catch byte-reversal bugs where addresses are compared with wrong byte order. Complements `test_bal_invalid_account_order` which tests rejection; this tests correct generation. | ✅ Completed | | `test_bal_transient_storage_not_tracked` | Ensure BAL excludes EIP-1153 transient storage operations | Contract executes: `TSTORE(0x01, 0x42)` (transient write), `TLOAD(0x01)` (transient read), `SSTORE(0x02, result)` (persistent write using transient value). | BAL **MUST** include slot 0x02 in `storage_changes` (persistent storage was modified). BAL **MUST NOT** include slot 0x01 in `storage_reads` or `storage_changes` (transient storage is not persisted, not needed for stateless execution). This verifies TSTORE/TLOAD don't pollute BAL. | ✅ Completed | -| `test_bal_selfdestruct_to_precompile` | Ensure BAL captures SELFDESTRUCT with precompile as beneficiary | Caller triggers victim contract (balance=100) to execute `SELFDESTRUCT(0x0000...0001)` (ecrecover precompile). Precompile starts with balance=0. | BAL **MUST** include: (1) Contract with `balance_changes` (100→0, loses balance to selfdestruct). (2) Precompile address 0x01 with `balance_changes` (0→100, receives selfdestruct balance). Precompile **MUST NOT** have `code_changes` or `nonce_changes`. This complements `test_bal_withdrawal_to_precompiles` (withdrawal) and `test_bal_precompile_funded` (tx value). | ✅ Completed | +| `test_bal_selfdestruct_to_precompile_and_oog` | Ensure BAL captures SELFDESTRUCT to precompile at different gas boundaries | Victim executes `SELFDESTRUCT(precompile)`. Parameterized by all precompiles and three scenarios: (1) Success, (2) OOG before state access, (3) OOG after state access. | Success: victim and precompile have `balance_changes`. OOG before state access: precompile **NOT** in BAL. OOG after state access: precompile in BAL with empty changes. | ✅ Completed | | `test_bal_self_destruct_oog` | Ensure BAL correctly tracks SELFDESTRUCT beneficiary based on gas boundaries | Alice calls `Caller` contract which CALLs `SelfDestructContract` with precisely controlled gas. `SelfDestructContract` attempts SELFDESTRUCT to new account `Beneficiary`. Static gas = G_VERY_LOW + G_SELF_DESTRUCT + G_COLD_ACCOUNT_ACCESS. Parameterized: (1) OOG before state access (gas = static - 1), (2) OOG after state access (gas = static, but insufficient for G_NEW_ACCOUNT). | For OOG before state access: BAL **MUST NOT** include `Beneficiary` (no state access occurred). For OOG after state access: BAL **MUST** include `Beneficiary` with empty changes (state was accessed before G_NEW_ACCOUNT check failed). Both cases: Alice with `nonce_changes`, `Caller` and `SelfDestructContract` with empty changes. Contract balance unchanged. | ✅ Completed | | `test_bal_withdrawal_to_7702_delegation` | Ensure BAL correctly handles withdrawal to a 7702 delegated account (no code execution on recipient) | Tx1: Alice authorizes delegation to Oracle (sets code to `0xef0100\|\|Oracle`). Withdrawal: 10 gwei sent to Alice. Single block with tx + withdrawal. | BAL **MUST** include: (1) Alice at block_access_index=1 with `code_changes` (delegation), `nonce_changes`. (2) Alice at block_access_index=2 with `balance_changes` (receives withdrawal). **Oracle MUST NOT appear** - withdrawals credit balance without executing recipient code, so delegation target is never accessed. This complements `test_bal_selfdestruct_to_7702_delegation` (selfdestruct) and `test_bal_withdrawal_no_evm_execution` (withdrawal to contract). | ✅ Completed | | `test_init_collision_create_tx` | Ensure BAL tracks CREATE collisions correctly (pre-Amsterdam test with BAL) | CREATE transaction targeting address with existing storage aborts | BAL **MUST** show empty expectations for collision address (no changes occur due to abort) | ✅ Completed | diff --git a/tests/homestead/selfdestruct/__init__.py b/tests/homestead/selfdestruct/__init__.py new file mode 100644 index 0000000000..8dd1611067 --- /dev/null +++ b/tests/homestead/selfdestruct/__init__.py @@ -0,0 +1 @@ +"""Tests for SELFDESTRUCT opcode behavior in various scenarios.""" diff --git a/tests/homestead/selfdestruct/test_selfdestruct.py b/tests/homestead/selfdestruct/test_selfdestruct.py new file mode 100644 index 0000000000..7df22fc549 --- /dev/null +++ b/tests/homestead/selfdestruct/test_selfdestruct.py @@ -0,0 +1,198 @@ +"""Test the SELFDESTRUCT opcode.""" + +from typing import Dict + +import pytest +from execution_testing import ( + Account, + Address, + Alloc, + BalAccountExpectation, + BalBalanceChange, + BalNonceChange, + Block, + BlockAccessListExpectation, + BlockchainTestFiller, + Initcode, + Op, + Transaction, + compute_create_address, +) +from execution_testing.forks import Byzantium, Cancun +from execution_testing.forks.helpers import Fork + + +@pytest.mark.pre_alloc_group( + "selfdestruct_to_precompile", + reason="Modifies precompile balance, must be isolated in EngineX format", +) +@pytest.mark.parametrize("same_tx_selfdestruct", [False, True]) +@pytest.mark.with_all_precompiles +@pytest.mark.valid_from("Homestead") +def test_selfdestruct_to_precompile( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + fork: Fork, + precompile: Address, + same_tx_selfdestruct: bool, +) -> None: + """ + Test successful SELFDESTRUCT to precompile with exact gas. + + Pre-Cancun: Contract is always destroyed. + >=Cancun (EIP-6780): Contract only destroyed if created in same + transaction. + """ + alice = pre.fund_eoa() + + victim_balance = 100 + victim_code = Op.SELFDESTRUCT(precompile) + + gas_costs = fork.gas_costs() + push_cost = gas_costs.G_VERY_LOW + selfdestruct_cost = gas_costs.G_SELF_DESTRUCT + new_account_cost = gas_costs.G_NEW_ACCOUNT + exact_gas = push_cost + selfdestruct_cost + new_account_cost + + if same_tx_selfdestruct: + # Deploy and selfdestruct in same transaction + # Factory creates victim via CREATE, then calls it + initcode = Initcode(deploy_code=victim_code) + initcode_bytes = bytes(initcode) + + # pre-calculate the factory and victim addresses + factory_address = next(pre._contract_address_iterator) # type: ignore + victim = compute_create_address(address=factory_address, nonce=1) + + factory_code = ( + Op.MSTORE(0, Op.PUSH32(initcode_bytes)) + + Op.CREATE( + value=victim_balance, + offset=32 - len(initcode_bytes), + size=len(initcode_bytes), + ) + + Op.POP # Discard CREATE result, we know the address + + Op.CALL(gas=exact_gas, address=victim) + ) + # actual deploy using known address + factory = pre.deploy_contract( + address=factory_address, + code=factory_code, + balance=victim_balance, + ) + caller = factory + else: + # pre-existing contract + victim = pre.deploy_contract(code=victim_code, balance=victim_balance) + caller_code = Op.CALL(gas=exact_gas, address=victim) + caller = pre.deploy_contract(code=caller_code) + + tx = Transaction( + sender=alice, + to=caller, + gas_limit=200_000, + protected=fork >= Byzantium, + ) + + # BAL expectations >= Amsterdam + expected_block_access_list = None + if fork.header_bal_hash_required(): + if same_tx_selfdestruct: + # Factory does CREATE (nonce 1->2) and transfers balance to victim + # Victim is created and destroyed in same tx - no net changes + account_expectations: Dict[ + Address, BalAccountExpectation | None + ] = { + alice: BalAccountExpectation( + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], + ), + caller: BalAccountExpectation( + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=2) + ], + balance_changes=[ + BalBalanceChange(block_access_index=1, post_balance=0) + ], + code_changes=[], + storage_changes=[], + storage_reads=[], + ), + # Victim created and destroyed in same tx - empty changes + victim: BalAccountExpectation.empty(), + precompile: BalAccountExpectation( + balance_changes=[ + BalBalanceChange( + block_access_index=1, post_balance=victim_balance + ) + ], + nonce_changes=[], + code_changes=[], + storage_changes=[], + storage_reads=[], + ), + } + else: + account_expectations = { + alice: BalAccountExpectation( + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], + ), + caller: BalAccountExpectation.empty(), + victim: BalAccountExpectation( + balance_changes=[ + BalBalanceChange(block_access_index=1, post_balance=0) + ], + nonce_changes=[], + code_changes=[], + storage_changes=[], + storage_reads=[], + ), + precompile: BalAccountExpectation( + balance_changes=[ + BalBalanceChange( + block_access_index=1, post_balance=victim_balance + ) + ], + nonce_changes=[], + code_changes=[], + storage_changes=[], + storage_reads=[], + ), + } + expected_block_access_list = BlockAccessListExpectation( + account_expectations=account_expectations + ) + + # post state depends on fork and same_tx_selfdestruct + contract_destroyed = fork < Cancun or same_tx_selfdestruct + # Factory nonce is 2 after CREATE, otherwise caller nonce stays at 1 + caller_nonce = 2 if same_tx_selfdestruct else 1 + if contract_destroyed: + post = { + alice: Account(nonce=1), + caller: Account(nonce=caller_nonce), + victim: Account.NONEXISTENT, + precompile: Account(balance=victim_balance), + } + else: + # >=Cancun with pre-existing contract, code preserved + post = { + alice: Account(nonce=1), + caller: Account(nonce=caller_nonce), + victim: Account(balance=0, code=victim_code), + precompile: Account(balance=victim_balance), + } + + blockchain_test( + pre=pre, + blocks=[ + Block( + txs=[tx], + expected_block_access_list=expected_block_access_list, + ) + ], + post=post, + ) diff --git a/tests/homestead/yul/__init__.py b/tests/homestead/yul/__init__.py deleted file mode 100644 index 172309b311..0000000000 --- a/tests/homestead/yul/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests using Yul source for contracts.""" diff --git a/tests/tangerine/__init__.py b/tests/tangerine/__init__.py new file mode 100644 index 0000000000..2d6e14d640 --- /dev/null +++ b/tests/tangerine/__init__.py @@ -0,0 +1,4 @@ +""" +Test cases for EVM functionality introduced in Tangerine, [EIP-608: Hardfork +Meta - Tangerine Whistle](https://eips.ethereum.org/EIPS/eip-608). +""" diff --git a/tests/tangerine/eip150_operation_gas_costs/__init__.py b/tests/tangerine/eip150_operation_gas_costs/__init__.py new file mode 100644 index 0000000000..87e9060643 --- /dev/null +++ b/tests/tangerine/eip150_operation_gas_costs/__init__.py @@ -0,0 +1 @@ +"""Tests for EIP-150 operation gas costs in the Tangerine Whistle fork.""" diff --git a/tests/tangerine/eip150_operation_gas_costs/spec.py b/tests/tangerine/eip150_operation_gas_costs/spec.py new file mode 100644 index 0000000000..edd24dd82d --- /dev/null +++ b/tests/tangerine/eip150_operation_gas_costs/spec.py @@ -0,0 +1,21 @@ +""" +[EIP-150: Operation Gas Costs](https://eips.ethereum.org/EIPS/eip-150) +introduced changes to the gas costs of certain EVM operations to mitigate DOS +attacks. This module contains tests that verify the correct implementation +of these gas cost changes in the Ethereum Virtual Machine (EVM). +""" + +from dataclasses import dataclass + + +@dataclass(frozen=True) +class ReferenceSpec: + """Defines the reference spec version and git path.""" + + git_path: str + version: str + + +ref_spec_150 = ReferenceSpec( + "EIPS/eip-150.md", "34acf72522b989d86e76efcaf42eba4cdb0b31ad" +) diff --git a/tests/tangerine/eip150_operation_gas_costs/test_eip150_selfdestruct.py b/tests/tangerine/eip150_operation_gas_costs/test_eip150_selfdestruct.py new file mode 100644 index 0000000000..039ec89ca9 --- /dev/null +++ b/tests/tangerine/eip150_operation_gas_costs/test_eip150_selfdestruct.py @@ -0,0 +1,117 @@ +""" +Tests for EIP-150 SELFDESTRUCT operation gas costs in the Tangerine +Whistle fork. +""" + +from typing import Dict + +import pytest +from execution_testing import ( + Account, + Address, + Alloc, + BalAccountExpectation, + BalNonceChange, + Block, + BlockAccessListExpectation, + BlockchainTestFiller, + Op, + Transaction, +) +from execution_testing.forks import Byzantium +from execution_testing.forks.helpers import Fork + +from .spec import ref_spec_150 + +REFERENCE_SPEC_GIT_PATH = ref_spec_150.git_path +REFERENCE_SPEC_VERSION = ref_spec_150.version + + +@pytest.mark.pre_alloc_group( + "selfdestruct_to_precompile_oog", + reason="Modifies precompile balance, must be isolated in EngineX format", +) +@pytest.mark.parametrize("oog_before_state_access", [True, False]) +@pytest.mark.with_all_precompiles +@pytest.mark.valid_from("Tangerine") +def test_selfdestruct_to_precompile_oog( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + fork: Fork, + precompile: Address, + oog_before_state_access: bool, +) -> None: + """ + Test SELFDESTRUCT to precompile with out-of-gas at different boundaries. + + - before_state_access: Precompile not touched (>= Amsterdam). + - after_state_access: Precompile touched but no balance change + (>= Amsterdam). + """ + alice = pre.fund_eoa() + + victim_balance = 100 + victim_code = Op.SELFDESTRUCT(precompile) + victim = pre.deploy_contract(code=victim_code, balance=victim_balance) + + gas_costs = fork.gas_costs() + push_cost = gas_costs.G_VERY_LOW + selfdestruct_cost = gas_costs.G_SELF_DESTRUCT + # exact gas would be: + # push_cost + selfdestruct_cost + new_account_cost + G_NEW_ACCOUNT + + if oog_before_state_access: + gas = push_cost + selfdestruct_cost - 1 + else: + gas = push_cost + selfdestruct_cost + + caller_code = Op.CALL(gas=gas, address=victim) + caller = pre.deploy_contract(code=caller_code) + + tx = Transaction( + sender=alice, + to=caller, + gas_limit=100_000, + protected=True if fork >= Byzantium else False, + ) + + # BAL expectations >= Amsterdam + expected_block_access_list = None + if fork.header_bal_hash_required(): + account_expectations: Dict[Address, BalAccountExpectation | None] = { + alice: BalAccountExpectation( + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], + ), + caller: BalAccountExpectation.empty(), + victim: BalAccountExpectation.empty(), + } + if oog_before_state_access: + # precompile not touched, not in BAL + account_expectations[precompile] = None + else: + # precompile touched, in BAL with empty expectation + account_expectations[precompile] = BalAccountExpectation.empty() + expected_block_access_list = BlockAccessListExpectation( + account_expectations=account_expectations + ) + + # OOG: victim keeps balance and code, precompile unchanged + post = { + alice: Account(nonce=1), + caller: Account(), + victim: Account(balance=victim_balance, code=victim_code), + precompile: Account.NONEXISTENT, + } + + blockchain_test( + pre=pre, + blocks=[ + Block( + txs=[tx], + expected_block_access_list=expected_block_access_list, + ) + ], + post=post, + ) From dcca0f40aea14e631606f860e209d650de49818d Mon Sep 17 00:00:00 2001 From: fselmo Date: Tue, 30 Dec 2025 16:28:05 -0700 Subject: [PATCH 065/154] refactor(test): `Tangerine` -> `TangerineWhistle`; comments on PR #1954 Bonus: - fix(test): remove unnecessary isolation for enginex --- .../plugins/execute/eth_config/networks.yml | 2 +- .../execute/eth_config/tests/test_execute_eth_config.py | 2 +- .../cli/pytest_commands/plugins/forks/tests/test_forks.py | 5 ++++- .../pytest_commands/plugins/forks/tests/test_markers.py | 2 +- packages/testing/src/execution_testing/forks/__init__.py | 4 ++-- .../testing/src/execution_testing/forks/forks/forks.py | 8 ++++---- tests/homestead/selfdestruct/test_selfdestruct.py | 4 ---- tests/{tangerine => tangerine_whistle}/__init__.py | 0 .../eip150_operation_gas_costs/__init__.py | 0 .../eip150_operation_gas_costs/spec.py | 0 .../test_eip150_selfdestruct.py | 2 +- 11 files changed, 14 insertions(+), 15 deletions(-) rename tests/{tangerine => tangerine_whistle}/__init__.py (100%) rename tests/{tangerine => tangerine_whistle}/eip150_operation_gas_costs/__init__.py (100%) rename tests/{tangerine => tangerine_whistle}/eip150_operation_gas_costs/spec.py (100%) rename tests/{tangerine => tangerine_whistle}/eip150_operation_gas_costs/test_eip150_selfdestruct.py (98%) diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/eth_config/networks.yml b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/eth_config/networks.yml index 4b067f50bd..ce92de2959 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/eth_config/networks.yml +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/eth_config/networks.yml @@ -5,7 +5,7 @@ Mainnet: Frontier: 0 Homestead: 1150000 DAOFork: 1920000 - Tangerine: 2463000 + TangerineWhistle: 2463000 SpuriousDragon: 2675000 Byzantium: 4370000 Constantinople: 7280000 diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/eth_config/tests/test_execute_eth_config.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/eth_config/tests/test_execute_eth_config.py index 0f2f0826bb..2e47ecbfdf 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/eth_config/tests/test_execute_eth_config.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/eth_config/tests/test_execute_eth_config.py @@ -293,7 +293,7 @@ Frontier: 0 Homestead: 1150000 DAOFork: 1920000 - Tangerine: 2463000 + TangerineWhistle: 2463000 SpuriousDragon: 2675000 Byzantium: 4370000 Constantinople: 7280000 diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_forks.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_forks.py index 7aa4d0cd68..4fe7c3d44f 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_forks.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_forks.py @@ -45,11 +45,14 @@ def test_all_forks({StateTest.pytest_parameter_name()}): ] expected_skipped = 2 # eels doesn't support Constantinople expected_passed = ( - len(forks_under_test) * len(StateTest.supported_fixture_formats) + len([f for f in forks_under_test if not f.ignore()]) + * len(StateTest.supported_fixture_formats) - expected_skipped ) stdout = "\n".join(result.stdout.lines) for test_fork in forks_under_test: + if test_fork.ignore(): + continue for fixture_format in StateTest.supported_fixture_formats: if isinstance(fixture_format, LabeledFixtureFormat): fixture_format_label = fixture_format.label diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_markers.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_markers.py index fbbc1b3123..b740872cc3 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_markers.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_markers.py @@ -26,7 +26,7 @@ def test_case(state_test): valid_until='"Cancun"', ), [], - {"passed": 10, "failed": 0, "skipped": 1, "errors": 0}, + {"passed": 11, "failed": 0, "skipped": 1, "errors": 0}, id="valid_until", ), pytest.param( diff --git a/packages/testing/src/execution_testing/forks/__init__.py b/packages/testing/src/execution_testing/forks/__init__.py index 4000069af1..4a18c389b3 100644 --- a/packages/testing/src/execution_testing/forks/__init__.py +++ b/packages/testing/src/execution_testing/forks/__init__.py @@ -25,7 +25,7 @@ Paris, Prague, Shanghai, - Tangerine, + TangerineWhistle, ) from .forks.transition import ( BerlinToLondonAt5, @@ -98,7 +98,7 @@ "Frontier", "GrayGlacier", "Homestead", - "Tangerine", + "TangerineWhistle", "InvalidForkError", "Istanbul", "London", diff --git a/packages/testing/src/execution_testing/forks/forks/forks.py b/packages/testing/src/execution_testing/forks/forks/forks.py index 92e07d4b55..0435d43faa 100644 --- a/packages/testing/src/execution_testing/forks/forks/forks.py +++ b/packages/testing/src/execution_testing/forks/forks/forks.py @@ -1472,14 +1472,14 @@ class DAOFork(Homestead, ignore=True): pass -class Tangerine(DAOFork, transition_tool_name="TangerineWhistle"): - """Tangerine fork (EIP-150).""" +class TangerineWhistle(DAOFork): + """TangerineWhistle fork (EIP-150).""" pass -class SpuriousDragon(Tangerine, ignore=True): - """SpuriousDragon fork (EIP-155, EIP-158).""" +class SpuriousDragon(TangerineWhistle, ignore=True): + """SpuriousDragon fork.""" @classmethod def _calculate_call_gas( diff --git a/tests/homestead/selfdestruct/test_selfdestruct.py b/tests/homestead/selfdestruct/test_selfdestruct.py index 7df22fc549..46890da597 100644 --- a/tests/homestead/selfdestruct/test_selfdestruct.py +++ b/tests/homestead/selfdestruct/test_selfdestruct.py @@ -22,10 +22,6 @@ from execution_testing.forks.helpers import Fork -@pytest.mark.pre_alloc_group( - "selfdestruct_to_precompile", - reason="Modifies precompile balance, must be isolated in EngineX format", -) @pytest.mark.parametrize("same_tx_selfdestruct", [False, True]) @pytest.mark.with_all_precompiles @pytest.mark.valid_from("Homestead") diff --git a/tests/tangerine/__init__.py b/tests/tangerine_whistle/__init__.py similarity index 100% rename from tests/tangerine/__init__.py rename to tests/tangerine_whistle/__init__.py diff --git a/tests/tangerine/eip150_operation_gas_costs/__init__.py b/tests/tangerine_whistle/eip150_operation_gas_costs/__init__.py similarity index 100% rename from tests/tangerine/eip150_operation_gas_costs/__init__.py rename to tests/tangerine_whistle/eip150_operation_gas_costs/__init__.py diff --git a/tests/tangerine/eip150_operation_gas_costs/spec.py b/tests/tangerine_whistle/eip150_operation_gas_costs/spec.py similarity index 100% rename from tests/tangerine/eip150_operation_gas_costs/spec.py rename to tests/tangerine_whistle/eip150_operation_gas_costs/spec.py diff --git a/tests/tangerine/eip150_operation_gas_costs/test_eip150_selfdestruct.py b/tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py similarity index 98% rename from tests/tangerine/eip150_operation_gas_costs/test_eip150_selfdestruct.py rename to tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py index 039ec89ca9..ec639e4b7e 100644 --- a/tests/tangerine/eip150_operation_gas_costs/test_eip150_selfdestruct.py +++ b/tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py @@ -33,7 +33,7 @@ ) @pytest.mark.parametrize("oog_before_state_access", [True, False]) @pytest.mark.with_all_precompiles -@pytest.mark.valid_from("Tangerine") +@pytest.mark.valid_from("TangerineWhistle") def test_selfdestruct_to_precompile_oog( pre: Alloc, blockchain_test: BlockchainTestFiller, From 6e74073713685cc962be3dcb80f557f8e39bdbd0 Mon Sep 17 00:00:00 2001 From: fselmo Date: Tue, 30 Dec 2025 17:11:03 -0700 Subject: [PATCH 066/154] fix(tool): Fix EvmOneTransitionTool parsing for TangerineWhistle (add space) --- .../execution_testing/client_clis/clis/evmone.py | 6 ++++++ .../client_clis/transition_tool.py | 13 ++++++++++--- tests/homestead/selfdestruct/test_selfdestruct.py | 12 +++++++++--- 3 files changed, 25 insertions(+), 6 deletions(-) diff --git a/packages/testing/src/execution_testing/client_clis/clis/evmone.py b/packages/testing/src/execution_testing/client_clis/clis/evmone.py index bd762c4af3..6e2d404ccb 100644 --- a/packages/testing/src/execution_testing/client_clis/clis/evmone.py +++ b/packages/testing/src/execution_testing/client_clis/clis/evmone.py @@ -47,6 +47,12 @@ class EvmOneTransitionTool(TransitionTool): supports_opcode_count: ClassVar[bool] = True supports_blob_params: ClassVar[bool] = True + # evmone uses space-separated fork names for some forks + fork_name_map: ClassVar[Dict[str, str]] = { + "TangerineWhistle": "Tangerine Whistle", + "SpuriousDragon": "Spurious Dragon", + } + def __init__( self, *, diff --git a/packages/testing/src/execution_testing/client_clis/transition_tool.py b/packages/testing/src/execution_testing/client_clis/transition_tool.py index 149b0b837d..1ac1b4cffe 100644 --- a/packages/testing/src/execution_testing/client_clis/transition_tool.py +++ b/packages/testing/src/execution_testing/client_clis/transition_tool.py @@ -145,6 +145,7 @@ class TransitionTool(EthereumCLI): supports_xdist: ClassVar[bool] = True supports_blob_params: ClassVar[bool] = False + fork_name_map: ClassVar[Dict[str, str]] = {} @abstractmethod def __init__( @@ -326,13 +327,19 @@ def _evaluate_filesystem( } output_paths["body"] = os.path.join("output", "txs.rlp") + # Get fork name and apply any tool-specific mapping + fork_name = ( + t8n_data.fork_name_if_supports_blob_params + if self.supports_blob_params + else t8n_data.fork_name + ) + fork_name = self.fork_name_map.get(fork_name, fork_name) + # Construct args for evmone-t8n binary args = [ str(self.binary), "--state.fork", - t8n_data.fork_name_if_supports_blob_params - if self.supports_blob_params - else t8n_data.fork_name, + fork_name, "--input.alloc", input_paths["alloc"], "--input.env", diff --git a/tests/homestead/selfdestruct/test_selfdestruct.py b/tests/homestead/selfdestruct/test_selfdestruct.py index 46890da597..991f9b73c5 100644 --- a/tests/homestead/selfdestruct/test_selfdestruct.py +++ b/tests/homestead/selfdestruct/test_selfdestruct.py @@ -22,15 +22,17 @@ from execution_testing.forks.helpers import Fork -@pytest.mark.parametrize("same_tx_selfdestruct", [False, True]) @pytest.mark.with_all_precompiles +@pytest.mark.parametrize("same_tx_selfdestruct", [False, True]) +@pytest.mark.parametrize("warm_beneficiary", [False, True]) @pytest.mark.valid_from("Homestead") -def test_selfdestruct_to_precompile( +def test_selfdestruct_to_precompile_and_oog_at_minus_1( pre: Alloc, blockchain_test: BlockchainTestFiller, fork: Fork, precompile: Address, same_tx_selfdestruct: bool, + warm_beneficiary: bool, ) -> None: """ Test successful SELFDESTRUCT to precompile with exact gas. @@ -48,7 +50,11 @@ def test_selfdestruct_to_precompile( push_cost = gas_costs.G_VERY_LOW selfdestruct_cost = gas_costs.G_SELF_DESTRUCT new_account_cost = gas_costs.G_NEW_ACCOUNT - exact_gas = push_cost + selfdestruct_cost + new_account_cost + if warm_beneficiary: + warming_cost = 0 + else: + warming_cost = gas_costs.G_COLD_ACCOUNT_ACCESS + exact_gas = push_cost + selfdestruct_cost + new_account_cost + warming_cost if same_tx_selfdestruct: # Deploy and selfdestruct in same transaction From c288a1671ac8028978153be6330119a91bc4c991 Mon Sep 17 00:00:00 2001 From: fselmo Date: Mon, 5 Jan 2026 22:17:19 -0700 Subject: [PATCH 067/154] feat(test): Extend selfdestruct tests to all Amsterdam gas boundaries --- .../plugins/forks/tests/test_markers.py | 2 +- .../src/execution_testing/forks/__init__.py | 2 + .../execution_testing/forks/forks/forks.py | 2 +- .../forks/amsterdam/vm/eoa_delegation.py | 1 - .../amsterdam/vm/instructions/environment.py | 8 +- .../forks/amsterdam/vm/instructions/system.py | 4 +- .../test_block_access_lists_opcodes.py | 248 ---- .../test_cases.md | 10 +- .../eip2930_access_list/test_tx_type.py | 4 +- tests/byzantium/eip196_ec_add_mul/test_gas.py | 4 +- tests/byzantium/eip197_ec_pairing/test_gas.py | 4 +- .../create/test_create_deposit_oog.py | 7 +- tests/frontier/create/test_create_one_byte.py | 4 +- .../create/test_create_suicide_during_init.py | 4 +- .../create/test_create_suicide_store.py | 4 +- tests/frontier/opcodes/test_all_opcodes.py | 4 +- tests/frontier/opcodes/test_blockhash.py | 6 +- .../test_call_and_callcode_gas_calculation.py | 9 +- tests/frontier/opcodes/test_calldatacopy.py | 4 +- tests/frontier/opcodes/test_calldataload.py | 6 +- tests/frontier/opcodes/test_calldatasize.py | 6 +- tests/frontier/opcodes/test_dup.py | 7 +- tests/frontier/opcodes/test_push.py | 6 +- tests/frontier/opcodes/test_swap.py | 6 +- tests/frontier/precompiles/test_ecrecover.py | 4 +- tests/frontier/precompiles/test_ripemd.py | 4 +- tests/homestead/selfdestruct/__init__.py | 1 - .../selfdestruct/test_selfdestruct.py | 200 ---- .../eip1559_fee_market_change/test_tx_type.py | 4 +- .../test_eip150_selfdestruct.py | 1007 ++++++++++++++++- 30 files changed, 1015 insertions(+), 567 deletions(-) delete mode 100644 tests/homestead/selfdestruct/__init__.py delete mode 100644 tests/homestead/selfdestruct/test_selfdestruct.py diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_markers.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_markers.py index b740872cc3..fbbc1b3123 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_markers.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_markers.py @@ -26,7 +26,7 @@ def test_case(state_test): valid_until='"Cancun"', ), [], - {"passed": 11, "failed": 0, "skipped": 1, "errors": 0}, + {"passed": 10, "failed": 0, "skipped": 1, "errors": 0}, id="valid_until", ), pytest.param( diff --git a/packages/testing/src/execution_testing/forks/__init__.py b/packages/testing/src/execution_testing/forks/__init__.py index 4a18c389b3..fb5e9b4c76 100644 --- a/packages/testing/src/execution_testing/forks/__init__.py +++ b/packages/testing/src/execution_testing/forks/__init__.py @@ -25,6 +25,7 @@ Paris, Prague, Shanghai, + SpuriousDragon, TangerineWhistle, ) from .forks.transition import ( @@ -99,6 +100,7 @@ "GrayGlacier", "Homestead", "TangerineWhistle", + "SpuriousDragon", "InvalidForkError", "Istanbul", "London", diff --git a/packages/testing/src/execution_testing/forks/forks/forks.py b/packages/testing/src/execution_testing/forks/forks/forks.py index 0435d43faa..3804250c43 100644 --- a/packages/testing/src/execution_testing/forks/forks/forks.py +++ b/packages/testing/src/execution_testing/forks/forks/forks.py @@ -1472,7 +1472,7 @@ class DAOFork(Homestead, ignore=True): pass -class TangerineWhistle(DAOFork): +class TangerineWhistle(DAOFork, ignore=True): """TangerineWhistle fork (EIP-150).""" pass diff --git a/src/ethereum/forks/amsterdam/vm/eoa_delegation.py b/src/ethereum/forks/amsterdam/vm/eoa_delegation.py index 4509c5917f..e56fb0cccd 100644 --- a/src/ethereum/forks/amsterdam/vm/eoa_delegation.py +++ b/src/ethereum/forks/amsterdam/vm/eoa_delegation.py @@ -196,7 +196,6 @@ def set_delegation(message: Message) -> U256: authority_account = get_account(state, authority) authority_code = authority_account.code - track_address(message.tx_env.state_changes, authority) if authority_code and not is_valid_delegation(authority_code): diff --git a/src/ethereum/forks/amsterdam/vm/instructions/environment.py b/src/ethereum/forks/amsterdam/vm/instructions/environment.py index 3d23b8f136..79fd56cc3c 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/environment.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/environment.py @@ -84,13 +84,13 @@ def balance(evm: Evm) -> None: check_gas(evm, gas_cost) if is_cold_access: evm.accessed_addresses.add(address) - track_address(evm.state_changes, address) charge_gas(evm, gas_cost) # OPERATION # Non-existent accounts default to EMPTY_ACCOUNT, which has balance 0. state = evm.message.block_env.state balance = get_account(state, address).balance + track_address(evm.state_changes, address) push(evm.stack, balance) @@ -354,12 +354,12 @@ def extcodesize(evm: Evm) -> None: check_gas(evm, access_gas_cost) if is_cold_access: evm.accessed_addresses.add(address) - track_address(evm.state_changes, address) charge_gas(evm, access_gas_cost) # OPERATION state = evm.message.block_env.state code = get_account(state, address).code + track_address(evm.state_changes, address) codesize = U256(len(code)) push(evm.stack, codesize) @@ -400,13 +400,13 @@ def extcodecopy(evm: Evm) -> None: check_gas(evm, total_gas_cost) if is_cold_access: evm.accessed_addresses.add(address) - track_address(evm.state_changes, address) charge_gas(evm, total_gas_cost) # OPERATION evm.memory += b"\x00" * extend_memory.expand_by state = evm.message.block_env.state code = get_account(state, address).code + track_address(evm.state_changes, address) value = buffer_read(code, code_start_index, size) memory_write(evm.memory, memory_start_index, value) @@ -494,12 +494,12 @@ def extcodehash(evm: Evm) -> None: check_gas(evm, access_gas_cost) if is_cold_access: evm.accessed_addresses.add(address) - track_address(evm.state_changes, address) charge_gas(evm, access_gas_cost) # OPERATION state = evm.message.block_env.state account = get_account(state, address) + track_address(evm.state_changes, address) if account == EMPTY_ACCOUNT: codehash = U256(0) diff --git a/src/ethereum/forks/amsterdam/vm/instructions/system.py b/src/ethereum/forks/amsterdam/vm/instructions/system.py index 02604f68f2..9b54fab312 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/system.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/system.py @@ -120,7 +120,6 @@ def generic_create( evm.accessed_addresses.add(contract_address) track_address(evm.state_changes, contract_address) - if account_has_code_or_nonce( state, contract_address ) or account_has_storage(state, contract_address): @@ -640,7 +639,8 @@ def selfdestruct(evm: Evm) -> None: state = evm.message.block_env.state if is_cold_access: evm.accessed_addresses.add(beneficiary) - track_address(evm.state_changes, beneficiary) + + track_address(evm.state_changes, beneficiary) if ( not is_account_alive(state, beneficiary) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py index 80d0c504a1..c0e8886d35 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_opcodes.py @@ -1729,254 +1729,6 @@ def test_bal_extcodecopy_and_oog( ) -@pytest.mark.parametrize( - "self_destruct_in_same_tx", [True, False], ids=["same_tx", "new_tx"] -) -@pytest.mark.parametrize( - "pre_funded", [True, False], ids=["pre_funded", "not_pre_funded"] -) -def test_bal_self_destruct( - pre: Alloc, - blockchain_test: BlockchainTestFiller, - self_destruct_in_same_tx: bool, - pre_funded: bool, -) -> None: - """Ensure BAL captures balance changes caused by `SELFDESTRUCT`.""" - alice = pre.fund_eoa() - bob = pre.fund_eoa(amount=0) - - selfdestruct_code = ( - Op.SLOAD(0x01) # Read from storage slot 0x01 - + Op.SSTORE(0x02, 0x42) # Write to storage slot 0x02 - + Op.SELFDESTRUCT(bob) - ) - # A pre existing self-destruct contract with initial storage - kaboom = pre.deploy_contract(code=selfdestruct_code, storage={0x01: 0x123}) - - # A template for self-destruct contract - self_destruct_init_code = Initcode(deploy_code=selfdestruct_code) - template = pre.deploy_contract(code=self_destruct_init_code) - - transfer_amount = expected_recipient_balance = 100 - pre_fund_amount = 10 - - if self_destruct_in_same_tx: - # The goal is to create a self-destructing contract in the same - # transaction to trigger deletion of code as per EIP-6780. - # The factory contract below creates a new self-destructing - # contract and calls it in this transaction. - - bytecode_size = len(self_destruct_init_code) - factory_bytecode = ( - # Clone template memory - Op.EXTCODECOPY(template, 0, 0, bytecode_size) - # Fund 100 wei and deploy the clone - + Op.CREATE(transfer_amount, 0, bytecode_size) - # Call the clone, which self-destructs - + Op.CALL(1_000_000, Op.DUP6, 0, 0, 0, 0, 0) - + Op.STOP - ) - - factory = pre.deploy_contract(code=factory_bytecode) - kaboom_same_tx = compute_create_address(address=factory, nonce=1) - - # Determine which account will be self-destructed - self_destructed_account = ( - kaboom_same_tx if self_destruct_in_same_tx else kaboom - ) - - if pre_funded: - expected_recipient_balance += pre_fund_amount - pre.fund_address( - address=self_destructed_account, amount=pre_fund_amount - ) - - tx = Transaction( - sender=alice, - to=factory if self_destruct_in_same_tx else kaboom, - value=transfer_amount, - gas_limit=1_000_000, - ) - - block = Block( - txs=[tx], - expected_block_access_list=BlockAccessListExpectation( - account_expectations={ - alice: BalAccountExpectation( - nonce_changes=[ - BalNonceChange(block_access_index=1, post_nonce=1) - ], - ), - bob: BalAccountExpectation( - balance_changes=[ - BalBalanceChange( - block_access_index=1, - post_balance=expected_recipient_balance, - ) - ] - ), - self_destructed_account: BalAccountExpectation( - balance_changes=[ - BalBalanceChange(block_access_index=1, post_balance=0) - ] - if pre_funded - else [], - # Accessed slots for same-tx are recorded as reads (0x02) - storage_reads=[0x01, 0x02] - if self_destruct_in_same_tx - else [0x01], - # Storage changes are recorded for non-same-tx - # self-destructs - storage_changes=[ - BalStorageSlot( - slot=0x02, - slot_changes=[ - BalStorageChange( - block_access_index=1, post_value=0x42 - ) - ], - ) - ] - if not self_destruct_in_same_tx - else [], - code_changes=[], # should not be present - nonce_changes=[], # should not be present - ), - } - ), - ) - - post: Dict[Address, Account] = { - alice: Account(nonce=1), - bob: Account(balance=expected_recipient_balance), - } - - # If the account was self-destructed in the same transaction, - # we expect the account to non-existent and its balance to be 0. - if self_destruct_in_same_tx: - post.update( - { - factory: Account( - nonce=2, # incremented after CREATE - balance=0, # spent on CREATE - code=factory_bytecode, - ), - kaboom_same_tx: Account.NONEXISTENT, # type: ignore - # The pre-existing contract remains unaffected - kaboom: Account( - balance=0, code=selfdestruct_code, storage={0x01: 0x123} - ), - } - ) - else: - post.update( - { - # This contract was self-destructed in a separate tx. - # From EIP 6780: `SELFDESTRUCT` does not delete any data - # (including storage keys, code, or the account itself). - kaboom: Account( - balance=0, - code=selfdestruct_code, - storage={0x01: 0x123, 0x2: 0x42}, - ), - } - ) - - blockchain_test( - pre=pre, - blocks=[block], - post=post, - ) - - -@pytest.mark.parametrize("oog_before_state_access", [True, False]) -def test_bal_self_destruct_oog( - pre: Alloc, - blockchain_test: BlockchainTestFiller, - fork: Fork, - oog_before_state_access: bool, -) -> None: - """ - Test SELFDESTRUCT BAL behavior at gas boundaries. - - SELFDESTRUCT has two gas checkpoints: - 1. static checks: G_SELF_DESTRUCT + G_COLD_ACCOUNT_ACCESS - OOG here = no state access, beneficiary NOT in BAL - 2. state access: same as static checks, plus G_NEW_ACCOUNT for new account - OOG here = enough gas to access state but not enough for new account, - beneficiary IS in BAL - """ - alice = pre.fund_eoa() - # always use new account so we incur extra G_NEW_ACCOUNT cost - # there is no other gas boundary to test between cold access - # and new account - beneficiary = pre.empty_account() - - # selfdestruct_contract: PUSH20 SELFDESTRUCT - selfdestruct_code = Op.SELFDESTRUCT(beneficiary) - selfdestruct_contract = pre.deploy_contract( - code=selfdestruct_code, balance=1000 - ) - - # Gas needed inside the CALL for SELFDESTRUCT: - # - PUSH20: G_VERY_LOW = 3 - # - SELFDESTRUCT: G_SELF_DESTRUCT - # - G_COLD_ACCOUNT_ACCESS (beneficiary cold access) - gas_costs = fork.gas_costs() - exact_static_gas = ( - gas_costs.G_VERY_LOW - + gas_costs.G_SELF_DESTRUCT - + gas_costs.G_COLD_ACCOUNT_ACCESS - ) - - # subtract one from the exact gas to trigger OOG before state access - oog_gas = ( - exact_static_gas - 1 if oog_before_state_access else exact_static_gas - ) - - # caller_contract: CALL with oog_gas - caller_code = Op.CALL(gas=oog_gas, address=selfdestruct_contract) - caller_contract = pre.deploy_contract(code=caller_code) - - tx = Transaction( - sender=alice, - to=caller_contract, - gas_limit=100_000, - ) - - account_expectations: Dict[Address, BalAccountExpectation | None] = { - alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(block_access_index=1, post_nonce=1)], - ), - caller_contract: BalAccountExpectation.empty(), - selfdestruct_contract: BalAccountExpectation.empty(), - # beneficiary only in BAL if we passed check_gas (state accessed) - beneficiary: None - if oog_before_state_access - else BalAccountExpectation.empty(), - } - - block = Block( - txs=[tx], - expected_block_access_list=BlockAccessListExpectation( - account_expectations=account_expectations - ), - ) - - blockchain_test( - pre=pre, - blocks=[block], - post={ - alice: Account(nonce=1), - caller_contract: Account(code=caller_code), - # selfdestruct_contract still exists - SELFDESTRUCT failed - selfdestruct_contract: Account( - balance=1000, code=selfdestruct_code - ), - }, - ) - - def test_bal_storage_write_read_same_frame( pre: Alloc, blockchain_test: BlockchainTestFiller, diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index 0d2fc153d1..5bd0cf31ac 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -5,7 +5,11 @@ | `test_bal_nonce_changes` | Ensure BAL captures changes to nonce | Alice sends 100 wei to Bob | BAL MUST include changes to Alice's nonce. | ✅ Completed | | `test_bal_balance_changes` | Ensure BAL captures changes to balance | Alice sends 100 wei to Bob | BAL MUST include balance change for Alice, Bob, and Coinbase | ✅ Completed | | `test_bal_code_changes` | Ensure BAL captures changes to account code | Alice deploys factory contract that creates new contract | BAL MUST include code changes for newly deployed contract | ✅ Completed | -| `test_bal_self_destruct` | Ensure BAL captures storage access and balance changes caused by `SELFDESTRUCT` | Parameterized test: Alice interacts with a contract (either existing or created same-tx) that reads from storage slot 0x01, writes to storage slot 0x02, then executes `SELFDESTRUCT` with Bob as recipient. Contract may be pre-funded with 10 wei | BAL MUST include Alice's nonce change (increment) and Bob's balance change (100 or 110 depending on pre-funding). For the self-destructing contract: storage_reads=[0x01], empty storage_changes=[], and if pre-funded, balance_changes with post_balance=0; if not pre-funded, no balance change recorded. MUST NOT have code_changes or nonce_changes entries | ✅ Completed | +| `test_selfdestruct_to_account` (TangerineWhistle) | Ensure BAL captures SELFDESTRUCT success boundary for account beneficiaries | Victim executes `SELFDESTRUCT(beneficiary)` at exact gas boundary. Tests final gas boundary where operation completes. Parametrized: is_success (exact_gas/exact_gas_minus_1), beneficiary (EOA/contract), warm (cold/warm where warm=Berlin+), same_tx (pre_deploy/same_tx), originator_balance (0/1), beneficiary_balance (0/1). File: `tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py`. | exact_gas: Beneficiary in BAL with `balance_changes`, victim destroyed (pre-Cancun/same_tx) or preserved (>=Cancun). exact_gas_minus_1: OOG, beneficiary in BAL only if G_NEW_ACCOUNT was part of gas calculation. | ✅ Completed | +| `test_selfdestruct_state_access_boundary` (TangerineWhistle) | Ensure BAL correctly tracks beneficiary access at state access boundary (consensus check) | Victim executes `SELFDESTRUCT(beneficiary)` at state access boundary (base + cold). Verifies beneficiary is accessed before G_NEW_ACCOUNT check. Parametrized: is_success (exact_gas/exact_gas_minus_1), beneficiary (EOA/contract), warm (cold/warm), same_tx, originator_balance (0/1), beneficiary_balance (0/1). File: `tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py`. | exact_gas: Beneficiary **IN** BAL (state accessed). exact_gas_minus_1: Beneficiary **NOT** in BAL (OOG before state access). Operation may succeed at exact_gas if no G_NEW_ACCOUNT needed. | ✅ Completed | +| `test_selfdestruct_to_self` (TangerineWhistle) | Ensure BAL captures SELFDESTRUCT where beneficiary is self at gas boundary | Victim executes `SELFDESTRUCT(ADDRESS)` - selfdestructs to itself. Always warm, always alive (no G_NEW_ACCOUNT, no cold access). Gas = G_BASE + G_SELF_DESTRUCT. Parametrized: is_success (exact_gas/exact_gas_minus_1), originator_balance (0/1), same_tx (pre_deploy/same_tx). File: `tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py`. | exact_gas_minus_1: Victim in BAL with unchanged state. exact_gas: Pre-Cancun/same_tx: destroyed, balance=0. >=Cancun pre-existing: preserved with original balance. | ✅ Completed | +| `test_selfdestruct_to_precompile` (TangerineWhistle) | Ensure BAL captures SELFDESTRUCT success boundary for precompile beneficiaries | Victim executes `SELFDESTRUCT(precompile)` at exact gas boundary. Precompiles are always warm (no cold access charge). Parametrized: is_success (exact_gas/exact_gas_minus_1), all precompiles via `@pytest.mark.with_all_precompiles`, same_tx (pre_deploy/same_tx), originator_balance (0/1), beneficiary_balance (0/1). File: `tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py`. | exact_gas: Precompile in BAL with `balance_changes`, victim destroyed (pre-Cancun/same_tx) or preserved (>=Cancun). exact_gas_minus_1: OOG, precompile in BAL only if G_NEW_ACCOUNT was part of gas calculation. | ✅ Completed | +| `test_selfdestruct_to_precompile_state_access_boundary` (TangerineWhistle) | Ensure BAL correctly tracks precompile access at state access boundary (consensus check) | Victim executes `SELFDESTRUCT(precompile)` at state access boundary (base only, precompiles always warm). Verifies precompile is accessed before G_NEW_ACCOUNT check. Parametrized: is_success (exact_gas/exact_gas_minus_1), all precompiles, same_tx, originator_balance (0/1), beneficiary_balance (0/1). File: `tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py`. | exact_gas: Precompile **IN** BAL (state accessed). exact_gas_minus_1: Precompile **NOT** in BAL (OOG before state access). Operation may succeed at exact_gas if no G_NEW_ACCOUNT needed. | ✅ Completed | | `test_bal_account_access_target` | Ensure BAL captures target addresses of account access opcodes | Alice calls `Oracle` contract which uses account access opcodes (`BALANCE`, `EXTCODESIZE`, `EXTCODECOPY`, `EXTCODEHASH`, `CALL`, `CALLCODE`, `DELEGATECALL`, `STATICCALL`) on `TargetContract`. | BAL MUST include Alice, `Oracle`, and `TargetContract` with empty changes for `TargetContract` and nonce changes for Alice. | ✅ Completed | | `test_bal_call_no_delegation_and_oog_before_target_access` | Ensure BAL handles OOG before target access and success for non-delegated CALL | Parametrized: target warm/cold, target empty/existing, value 0/1, memory expansion, OOG boundary (before_target_access/success). | OOG: target in BAL ONLY if pre-warmed. Success: target always in BAL with balance changes when value > 0. | ✅ Completed | | `test_bal_call_no_delegation_oog_after_target_access` | Ensure BAL includes target but excludes value transfer when OOG after target access | Hardcoded: empty target, value=1 (required for create_cost gap). Parametrized: warm/cold, memory expansion. | Target always in BAL. No balance changes (value transfer fails after G_NEW_ACCOUNT check). | ✅ Completed | @@ -27,7 +31,7 @@ | `test_bal_aborted_storage_access` | Ensure BAL captures storage access in aborted transactions correctly | Alice calls contract that reads storage slot `0x01`, writes to slot `0x02`, then aborts with `REVERT`/`INVALID` | BAL MUST include storage_reads for slots `0x01` and `0x02` (aborted writes become reads), empty storage_changes. Only nonce changes for Alice. | ✅ Completed | | `test_bal_aborted_account_access` | Ensure BAL captures account access in aborted transactions for all account accessing opcodes | Alice calls `AbortContract` that performs account access operations (`BALANCE`, `EXTCODESIZE`, `EXTCODECOPY`, `EXTCODEHASH`, `CALL`, `CALLCODE`, `DELEGATECALL`, `STATICCALL`) on `TargetContract` and aborts via `REVERT`/`INVALID` | BAL MUST include Alice, `TargetContract`, and `AbortContract` in account_changes and nonce changes for Alice. | ✅ Completed | | `test_bal_pure_contract_call` | Ensure BAL captures contract access for pure computation calls | Alice calls `PureContract` that performs pure arithmetic (ADD operation) without storage or balance changes | BAL MUST include Alice and `PureContract` in `account_changes`, and `nonce_changes` for Alice. | ✅ Completed | -| `test_bal_create2_to_A_read_then_selfdestruct` | BAL records balance change for A and storage access (no persistent change) | Tx0: Alice sends ETH to address **A**. Tx1: Deployer `CREATE2` a contract **at A**; contract does `SLOAD(B)` and immediately `SELFDESTRUCT(beneficiary=X)` in the same tx. | BAL **MUST** include **A** with `balance_changes` (funding in Tx0 and transfer on selfdestruct in Tx1). BAL **MUST** include storage key **B** as an accessed `StorageKey`, and **MUST NOT** include **B** under `storage_changes` (no persistence due to same-tx create+destruct). | 🟡 Planned | +| `test_bal_create2_to_A_read_then_selfdestruct` | BAL records balance change for A and storage access (no persistent change) | Tx0: Alice sends ETH to address **A**. Tx1: Deployer `CREATE2` a contract **at A**; contract does `SLOAD(B)` and immediately `SELFDESTRUCT(beneficiary=X)` in the same tx. | BAL **MUST** include **A** with `balance_changes` (funding in Tx0 and transfer on selfdestruct in Tx1). BAL **MUST** include storage key **B** as an accessed `StorageKey`, and **MUST NOT** include **B** under `storage_changes` (no persistence due to same-tx create+destruct). | 🟡 Planned | | `test_bal_create2_to_A_write_then_selfdestruct` | BAL records balance change for A and storage access even if a write occurred (no persistent change) | Tx0: Alice sends ETH to **A**. Tx1: Deployer `CREATE2` contract **at A**; contract does `SSTORE(B, v)` (optionally `SLOAD(B)`), then `SELFDESTRUCT(beneficiary=Y)` in the same tx. | BAL **MUST** include **A** with `balance_changes` (Tx0 fund; Tx1 outflow to `Y`). BAL **MUST** include **B** as `StorageKey` accessed, and **MUST NOT** include **B** under `storage_changes` (ephemeral write discarded because the contract was created and destroyed in the same tx). | 🟡 Planned | | `test_bal_precompile_funded` | BAL records precompile value transfer with or without balance change | Alice sends value to precompile (all precompiles) via direct transaction. Parameterized: (1) with value (1 ETH), (2) without value (0 ETH). | For with_value: BAL **MUST** include precompile with `balance_changes`. For no_value: BAL **MUST** include precompile with empty `balance_changes`. No `storage_changes` or `code_changes` in either case. | ✅ Completed | | `test_bal_precompile_call` | BAL records precompile when called via contract | Alice calls Oracle contract which calls precompile (all precompiles) via CALL opcode with 0 ETH | BAL **MUST** include Alice with `nonce_changes`, Oracle with empty changes, and precompile with empty changes. No `balance_changes`, `storage_changes`, or `code_changes` for precompile. | ✅ Completed | @@ -103,8 +107,6 @@ | `test_bal_call_revert_insufficient_funds` | Ensure BAL handles CALL failure due to insufficient balance (not OOG) | Contract (balance=100, storage slot 0x02=0xDEAD) executes: `SLOAD(0x01), CALL(target, value=1000), SSTORE(0x02, result)`. CALL fails because 1000 > 100. Target address 0xDEAD (pre-existing with non-zero balance to avoid pruning). Note: slot 0x02 must start non-zero so SSTORE(0) is a change. | BAL **MUST** include: (1) Contract with `storage_reads` for slot 0x01, `storage_changes` for slot 0x02 (value=0, CALL returned failure). (2) Target (0xDEAD) **MUST** appear in BAL with empty changes - target is accessed before balance check fails. | ✅ Completed | | `test_bal_lexicographic_address_ordering` | Ensure BAL enforces strict lexicographic byte-wise ordering | Pre-fund three addresses with specific byte patterns: `addr_low = 0x0000...0001`, `addr_mid = 0x0000...0100`, `addr_high = 0x0100...0000`. Contract touches them in reverse order: `BALANCE(addr_high), BALANCE(addr_low), BALANCE(addr_mid)`. Additionally, include two endian-trap addresses that are byte-reversals of each other: `addr_endian_low = 0x0100000000000000000000000000000000000002`, `addr_endian_high = 0x0200000000000000000000000000000000000001`. Note: `reverse(addr_endian_low) = addr_endian_high`. Correct lexicographic order: `addr_endian_low < addr_endian_high` (0x01 < 0x02 at byte 0). If implementation incorrectly reverses bytes before comparing, it would get `addr_endian_low > addr_endian_high` (wrong). | BAL account list **MUST** be sorted lexicographically by address bytes: `addr_low` < `addr_mid` < `addr_high` < `addr_endian_low` < `addr_endian_high`, regardless of access order. The endian-trap addresses specifically catch byte-reversal bugs where addresses are compared with wrong byte order. Complements `test_bal_invalid_account_order` which tests rejection; this tests correct generation. | ✅ Completed | | `test_bal_transient_storage_not_tracked` | Ensure BAL excludes EIP-1153 transient storage operations | Contract executes: `TSTORE(0x01, 0x42)` (transient write), `TLOAD(0x01)` (transient read), `SSTORE(0x02, result)` (persistent write using transient value). | BAL **MUST** include slot 0x02 in `storage_changes` (persistent storage was modified). BAL **MUST NOT** include slot 0x01 in `storage_reads` or `storage_changes` (transient storage is not persisted, not needed for stateless execution). This verifies TSTORE/TLOAD don't pollute BAL. | ✅ Completed | -| `test_bal_selfdestruct_to_precompile_and_oog` | Ensure BAL captures SELFDESTRUCT to precompile at different gas boundaries | Victim executes `SELFDESTRUCT(precompile)`. Parameterized by all precompiles and three scenarios: (1) Success, (2) OOG before state access, (3) OOG after state access. | Success: victim and precompile have `balance_changes`. OOG before state access: precompile **NOT** in BAL. OOG after state access: precompile in BAL with empty changes. | ✅ Completed | -| `test_bal_self_destruct_oog` | Ensure BAL correctly tracks SELFDESTRUCT beneficiary based on gas boundaries | Alice calls `Caller` contract which CALLs `SelfDestructContract` with precisely controlled gas. `SelfDestructContract` attempts SELFDESTRUCT to new account `Beneficiary`. Static gas = G_VERY_LOW + G_SELF_DESTRUCT + G_COLD_ACCOUNT_ACCESS. Parameterized: (1) OOG before state access (gas = static - 1), (2) OOG after state access (gas = static, but insufficient for G_NEW_ACCOUNT). | For OOG before state access: BAL **MUST NOT** include `Beneficiary` (no state access occurred). For OOG after state access: BAL **MUST** include `Beneficiary` with empty changes (state was accessed before G_NEW_ACCOUNT check failed). Both cases: Alice with `nonce_changes`, `Caller` and `SelfDestructContract` with empty changes. Contract balance unchanged. | ✅ Completed | | `test_bal_withdrawal_to_7702_delegation` | Ensure BAL correctly handles withdrawal to a 7702 delegated account (no code execution on recipient) | Tx1: Alice authorizes delegation to Oracle (sets code to `0xef0100\|\|Oracle`). Withdrawal: 10 gwei sent to Alice. Single block with tx + withdrawal. | BAL **MUST** include: (1) Alice at block_access_index=1 with `code_changes` (delegation), `nonce_changes`. (2) Alice at block_access_index=2 with `balance_changes` (receives withdrawal). **Oracle MUST NOT appear** - withdrawals credit balance without executing recipient code, so delegation target is never accessed. This complements `test_bal_selfdestruct_to_7702_delegation` (selfdestruct) and `test_bal_withdrawal_no_evm_execution` (withdrawal to contract). | ✅ Completed | | `test_init_collision_create_tx` | Ensure BAL tracks CREATE collisions correctly (pre-Amsterdam test with BAL) | CREATE transaction targeting address with existing storage aborts | BAL **MUST** show empty expectations for collision address (no changes occur due to abort) | ✅ Completed | | `test_call_to_pre_authorized_oog` | Ensure BAL handles OOG during EIP-7702 delegation access (pre-Amsterdam test with BAL) | Call to delegated account that OOGs before accessing delegation contract | BAL **MUST** include auth_signer (code read for delegation check) but **MUST NOT** include delegation contract (OOG before access) | ✅ Completed | diff --git a/tests/berlin/eip2930_access_list/test_tx_type.py b/tests/berlin/eip2930_access_list/test_tx_type.py index c9e22e55d9..e96d8fefcd 100644 --- a/tests/berlin/eip2930_access_list/test_tx_type.py +++ b/tests/berlin/eip2930_access_list/test_tx_type.py @@ -13,7 +13,7 @@ TransactionException, ) from execution_testing import Opcodes as Op -from execution_testing.forks import Byzantium +from execution_testing.forks import SpuriousDragon from .spec import ref_spec_2930 @@ -62,7 +62,7 @@ def test_eip2930_tx_validity( sender=sender, gas_limit=100_000, access_list=[], - protected=fork >= Byzantium, + protected=fork >= SpuriousDragon, error=TransactionException.TYPE_1_TX_PRE_FORK if not valid else None, ) diff --git a/tests/byzantium/eip196_ec_add_mul/test_gas.py b/tests/byzantium/eip196_ec_add_mul/test_gas.py index 59f84e5aa8..eb5d134360 100644 --- a/tests/byzantium/eip196_ec_add_mul/test_gas.py +++ b/tests/byzantium/eip196_ec_add_mul/test_gas.py @@ -8,7 +8,7 @@ StateTestFiller, Transaction, ) -from execution_testing.forks import Byzantium +from execution_testing.forks import SpuriousDragon from execution_testing.forks.helpers import Fork from execution_testing.vm import Opcodes as Op @@ -55,7 +55,7 @@ def test_gas_costs( to=account, sender=pre.fund_eoa(), gas_limit=100_0000, - protected=fork >= Byzantium, + protected=fork >= SpuriousDragon, ) post = {account: Account(storage={0: 1 if enough_gas else 0})} diff --git a/tests/byzantium/eip197_ec_pairing/test_gas.py b/tests/byzantium/eip197_ec_pairing/test_gas.py index 1efddcb85c..5a0425eaf3 100644 --- a/tests/byzantium/eip197_ec_pairing/test_gas.py +++ b/tests/byzantium/eip197_ec_pairing/test_gas.py @@ -8,7 +8,7 @@ Transaction, ) from execution_testing.base_types.base_types import Address -from execution_testing.forks import Byzantium +from execution_testing.forks import SpuriousDragon from execution_testing.forks.helpers import Fork from execution_testing.vm import Opcodes as Op @@ -49,7 +49,7 @@ def test_gas_costs( to=account, sender=pre.fund_eoa(), gas_limit=100_0000, - protected=fork >= Byzantium, + protected=fork >= SpuriousDragon, ) post = {account: Account(storage={0: 1 if enough_gas else 0})} diff --git a/tests/frontier/create/test_create_deposit_oog.py b/tests/frontier/create/test_create_deposit_oog.py index 18932d3ca3..590fef42b2 100644 --- a/tests/frontier/create/test_create_deposit_oog.py +++ b/tests/frontier/create/test_create_deposit_oog.py @@ -3,6 +3,7 @@ """ import pytest + from execution_testing import ( Account, Alloc, @@ -12,7 +13,7 @@ Transaction, compute_create_address, ) -from execution_testing.forks import Byzantium, Frontier +from execution_testing.forks import Frontier, SpuriousDragon, TangerineWhistle SLOT_CREATE_RESULT = 1 SLOT_CREATE_RESULT_PRE = 0xDEADBEEF @@ -63,7 +64,7 @@ def test_create_deposit_oog( create_gas = return_code.gas_cost(fork) + expand_memory_code.gas_cost(fork) if not enough_gas: create_gas -= 1 - if fork >= Byzantium: + if fork >= TangerineWhistle: # Increment the gas for the 63/64 rule create_gas = (create_gas * 64) // 63 call_gas = create_gas + factory_code.gas_cost(fork) @@ -86,7 +87,7 @@ def test_create_deposit_oog( gas_limit=10_000_000, to=caller_address, sender=sender, - protected=fork >= Byzantium, + protected=fork >= SpuriousDragon, ) created_account: Account | None = Account(code=b"\x00" * deposited_len) diff --git a/tests/frontier/create/test_create_one_byte.py b/tests/frontier/create/test_create_one_byte.py index a0baf294cc..6e0d596589 100644 --- a/tests/frontier/create/test_create_one_byte.py +++ b/tests/frontier/create/test_create_one_byte.py @@ -17,7 +17,7 @@ Transaction, compute_create_address, ) -from execution_testing.forks import Byzantium, London +from execution_testing.forks import London, SpuriousDragon @pytest.mark.ported_from( @@ -100,7 +100,7 @@ def test_create_one_byte( data=b"", nonce=0, sender=sender, - protected=fork >= Byzantium, + protected=fork >= SpuriousDragon, ) post = { diff --git a/tests/frontier/create/test_create_suicide_during_init.py b/tests/frontier/create/test_create_suicide_during_init.py index e05f521468..30caa37cee 100644 --- a/tests/frontier/create/test_create_suicide_during_init.py +++ b/tests/frontier/create/test_create_suicide_during_init.py @@ -14,7 +14,7 @@ Transaction, compute_create_address, ) -from execution_testing.forks import Byzantium +from execution_testing.forks import SpuriousDragon class Operation(Enum): @@ -93,7 +93,7 @@ def test_create_suicide_during_transaction_create( data=contract_initcode, value=tx_value, sender=sender, - protected=fork >= Byzantium, + protected=fork >= SpuriousDragon, ) post = { diff --git a/tests/frontier/create/test_create_suicide_store.py b/tests/frontier/create/test_create_suicide_store.py index 7d3e661035..e28c213be7 100644 --- a/tests/frontier/create/test_create_suicide_store.py +++ b/tests/frontier/create/test_create_suicide_store.py @@ -19,7 +19,7 @@ Transaction, compute_create_address, ) -from execution_testing.forks import Byzantium +from execution_testing.forks import SpuriousDragon class Operation(IntEnum): @@ -147,7 +147,7 @@ def test_create_suicide_store( to=create_contract, data=suicide_initcode, sender=sender, - protected=fork >= Byzantium, + protected=fork >= SpuriousDragon, ) post = { diff --git a/tests/frontier/opcodes/test_all_opcodes.py b/tests/frontier/opcodes/test_all_opcodes.py index 022a1f4968..436052ab96 100644 --- a/tests/frontier/opcodes/test_all_opcodes.py +++ b/tests/frontier/opcodes/test_all_opcodes.py @@ -21,7 +21,7 @@ UndefinedOpcodes, gas_test, ) -from execution_testing.forks import Byzantium +from execution_testing.forks import SpuriousDragon REFERENCE_SPEC_GIT_PATH = "N/A" REFERENCE_SPEC_VERSION = "N/A" @@ -183,7 +183,7 @@ def test_stack_overflow( gas_limit=100_000, to=contract, sender=pre.fund_eoa(), - protected=fork >= Byzantium, + protected=fork >= SpuriousDragon, ) expected_storage = { slot_code_worked: value_code_failed if fails else value_code_worked diff --git a/tests/frontier/opcodes/test_blockhash.py b/tests/frontier/opcodes/test_blockhash.py index 9c42639ed7..34e1e91ee1 100644 --- a/tests/frontier/opcodes/test_blockhash.py +++ b/tests/frontier/opcodes/test_blockhash.py @@ -9,7 +9,7 @@ Op, Transaction, ) -from execution_testing.forks import Byzantium +from execution_testing.forks import SpuriousDragon from execution_testing.forks.helpers import Fork @@ -60,7 +60,7 @@ def test_genesis_hash_available( sender=sender, to=contract, gas_limit=100_000, - protected=fork >= Byzantium, + protected=fork >= SpuriousDragon, ) ] if not setup_blocks_empty @@ -76,7 +76,7 @@ def test_genesis_hash_available( sender=sender, to=contract, gas_limit=100_000, - protected=fork >= Byzantium, + protected=fork >= SpuriousDragon, ) ] ) diff --git a/tests/frontier/opcodes/test_call_and_callcode_gas_calculation.py b/tests/frontier/opcodes/test_call_and_callcode_gas_calculation.py index ae3dd53df2..ee8dfc9350 100644 --- a/tests/frontier/opcodes/test_call_and_callcode_gas_calculation.py +++ b/tests/frontier/opcodes/test_call_and_callcode_gas_calculation.py @@ -51,7 +51,12 @@ StateTestFiller, Transaction, ) -from execution_testing.forks.forks.forks import Berlin, Byzantium, Homestead +from execution_testing.forks.forks.forks import ( + Berlin, + Byzantium, + Homestead, + SpuriousDragon, +) from execution_testing.forks.helpers import Fork @@ -196,7 +201,7 @@ def caller_tx(sender: EOA, caller_address: Address, fork: Fork) -> Transaction: value=1, gas_limit=500_000, sender=sender, - protected=fork >= Byzantium, + protected=fork >= SpuriousDragon, ) diff --git a/tests/frontier/opcodes/test_calldatacopy.py b/tests/frontier/opcodes/test_calldatacopy.py index fae9897356..a03e613ef4 100644 --- a/tests/frontier/opcodes/test_calldatacopy.py +++ b/tests/frontier/opcodes/test_calldatacopy.py @@ -10,7 +10,7 @@ StateTestFiller, Transaction, ) -from execution_testing.forks import Byzantium +from execution_testing.forks import SpuriousDragon @pytest.mark.ported_from( @@ -193,7 +193,7 @@ def test_calldatacopy( data=tx_data, gas_limit=100_000, gas_price=0x0A, - protected=fork >= Byzantium, + protected=fork >= SpuriousDragon, sender=pre.fund_eoa(), to=to, value=0x01, diff --git a/tests/frontier/opcodes/test_calldataload.py b/tests/frontier/opcodes/test_calldataload.py index b22638ebdf..0faaac71f1 100644 --- a/tests/frontier/opcodes/test_calldataload.py +++ b/tests/frontier/opcodes/test_calldataload.py @@ -10,7 +10,7 @@ Transaction, ) from execution_testing import Macros as Om -from execution_testing.forks import Byzantium +from execution_testing.forks import SpuriousDragon @pytest.mark.ported_from( @@ -92,7 +92,7 @@ def test_calldataload( tx = Transaction( data=calldata, gas_limit=100_000, - protected=fork >= Byzantium, + protected=fork >= SpuriousDragon, sender=pre.fund_eoa(), to=to, ) @@ -101,7 +101,7 @@ def test_calldataload( tx = Transaction( data=calldata, gas_limit=100_000, - protected=fork >= Byzantium, + protected=fork >= SpuriousDragon, sender=pre.fund_eoa(), to=contract_address, ) diff --git a/tests/frontier/opcodes/test_calldatasize.py b/tests/frontier/opcodes/test_calldatasize.py index 4adf1a8e4d..1e060fff41 100644 --- a/tests/frontier/opcodes/test_calldatasize.py +++ b/tests/frontier/opcodes/test_calldatasize.py @@ -10,7 +10,7 @@ Transaction, ) from execution_testing import Macros as Om -from execution_testing.forks import Byzantium +from execution_testing.forks import SpuriousDragon @pytest.mark.ported_from( @@ -69,7 +69,7 @@ def test_calldatasize( tx = Transaction( gas_limit=100_000, - protected=fork >= Byzantium, + protected=fork >= SpuriousDragon, sender=pre.fund_eoa(), to=to, ) @@ -78,7 +78,7 @@ def test_calldatasize( tx = Transaction( data=calldata, gas_limit=100_000, - protected=fork >= Byzantium, + protected=fork >= SpuriousDragon, sender=pre.fund_eoa(), to=contract_address, ) diff --git a/tests/frontier/opcodes/test_dup.py b/tests/frontier/opcodes/test_dup.py index 35fd3c428f..170e95d92f 100644 --- a/tests/frontier/opcodes/test_dup.py +++ b/tests/frontier/opcodes/test_dup.py @@ -5,12 +5,13 @@ Account, Alloc, Environment, + Fork, Op, StateTestFiller, Storage, Transaction, ) -from execution_testing.forks import Frontier, Homestead +from execution_testing.forks import SpuriousDragon @pytest.mark.parametrize( @@ -38,7 +39,7 @@ @pytest.mark.with_all_evm_code_types def test_dup( state_test: StateTestFiller, - fork: str, + fork: Fork, dup_opcode: Op, pre: Alloc, ) -> None: @@ -71,7 +72,7 @@ def test_dup( ty=0x0, to=account, gas_limit=500000, - protected=False if fork in [Frontier, Homestead] else True, + protected=fork >= SpuriousDragon, sender=sender, ) diff --git a/tests/frontier/opcodes/test_push.py b/tests/frontier/opcodes/test_push.py index 707264aa07..180b1d7fb5 100644 --- a/tests/frontier/opcodes/test_push.py +++ b/tests/frontier/opcodes/test_push.py @@ -18,7 +18,7 @@ StateTestFiller, Transaction, ) -from execution_testing.forks import Frontier, Homestead +from execution_testing.forks import SpuriousDragon def get_input_for_push_opcode(opcode: Op) -> bytes: @@ -77,7 +77,7 @@ def test_push( sender=pre.fund_eoa(), to=contract, gas_limit=500_000, - protected=False if fork in [Frontier, Homestead] else True, + protected=fork >= SpuriousDragon, ) post = {} @@ -149,7 +149,7 @@ def test_stack_overflow( sender=pre.fund_eoa(), to=contract, gas_limit=500_000, - protected=False if fork in [Frontier, Homestead] else True, + protected=fork >= SpuriousDragon, ) post = {} diff --git a/tests/frontier/opcodes/test_swap.py b/tests/frontier/opcodes/test_swap.py index e271bec920..59063d8bbd 100644 --- a/tests/frontier/opcodes/test_swap.py +++ b/tests/frontier/opcodes/test_swap.py @@ -13,7 +13,7 @@ Bytecode, Environment, ) -from execution_testing.forks import Frontier, Homestead +from execution_testing.forks import SpuriousDragon from execution_testing import Op from execution_testing import ( StateTestFiller, @@ -76,7 +76,7 @@ def test_swap( sender=pre.fund_eoa(), to=contract_address, gas_limit=500_000, - protected=False if fork in [Frontier, Homestead] else True, + protected=fork >= SpuriousDragon, ) # Calculate expected storage values after SWAP and storage operations @@ -146,7 +146,7 @@ def test_stack_underflow( sender=pre.fund_eoa(), to=contract, gas_limit=500_000, - protected=False if fork in [Frontier, Homestead] else True, + protected=fork >= SpuriousDragon, ) # Define the expected post-state. diff --git a/tests/frontier/precompiles/test_ecrecover.py b/tests/frontier/precompiles/test_ecrecover.py index d248b67f4c..4d2b9a0e4b 100644 --- a/tests/frontier/precompiles/test_ecrecover.py +++ b/tests/frontier/precompiles/test_ecrecover.py @@ -8,7 +8,7 @@ StateTestFiller, Transaction, ) -from execution_testing.forks.forks.forks import Byzantium +from execution_testing.forks.forks.forks import SpuriousDragon from execution_testing.forks.helpers import Fork from execution_testing.vm import Opcodes as Op @@ -388,7 +388,7 @@ def test_precompiles( to=account, sender=pre.fund_eoa(), gas_limit=1_000_000, - protected=fork >= Byzantium, + protected=fork >= SpuriousDragon, ) post = {account: Account(storage={0: output})} diff --git a/tests/frontier/precompiles/test_ripemd.py b/tests/frontier/precompiles/test_ripemd.py index dc34ce1ad8..5f382e03b6 100644 --- a/tests/frontier/precompiles/test_ripemd.py +++ b/tests/frontier/precompiles/test_ripemd.py @@ -8,7 +8,7 @@ StateTestFiller, Transaction, ) -from execution_testing.forks.forks.forks import Byzantium +from execution_testing.forks.forks.forks import SpuriousDragon from execution_testing.forks.helpers import Fork from execution_testing.vm import Opcodes as Op @@ -176,7 +176,7 @@ def test_precompiles( sender=pre.fund_eoa(), gas_limit=1_000_0000, data=msg, - protected=fork >= Byzantium, + protected=fork >= SpuriousDragon, ) post = {account: Account(storage={0: output if not oog else 0})} diff --git a/tests/homestead/selfdestruct/__init__.py b/tests/homestead/selfdestruct/__init__.py deleted file mode 100644 index 8dd1611067..0000000000 --- a/tests/homestead/selfdestruct/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for SELFDESTRUCT opcode behavior in various scenarios.""" diff --git a/tests/homestead/selfdestruct/test_selfdestruct.py b/tests/homestead/selfdestruct/test_selfdestruct.py deleted file mode 100644 index 991f9b73c5..0000000000 --- a/tests/homestead/selfdestruct/test_selfdestruct.py +++ /dev/null @@ -1,200 +0,0 @@ -"""Test the SELFDESTRUCT opcode.""" - -from typing import Dict - -import pytest -from execution_testing import ( - Account, - Address, - Alloc, - BalAccountExpectation, - BalBalanceChange, - BalNonceChange, - Block, - BlockAccessListExpectation, - BlockchainTestFiller, - Initcode, - Op, - Transaction, - compute_create_address, -) -from execution_testing.forks import Byzantium, Cancun -from execution_testing.forks.helpers import Fork - - -@pytest.mark.with_all_precompiles -@pytest.mark.parametrize("same_tx_selfdestruct", [False, True]) -@pytest.mark.parametrize("warm_beneficiary", [False, True]) -@pytest.mark.valid_from("Homestead") -def test_selfdestruct_to_precompile_and_oog_at_minus_1( - pre: Alloc, - blockchain_test: BlockchainTestFiller, - fork: Fork, - precompile: Address, - same_tx_selfdestruct: bool, - warm_beneficiary: bool, -) -> None: - """ - Test successful SELFDESTRUCT to precompile with exact gas. - - Pre-Cancun: Contract is always destroyed. - >=Cancun (EIP-6780): Contract only destroyed if created in same - transaction. - """ - alice = pre.fund_eoa() - - victim_balance = 100 - victim_code = Op.SELFDESTRUCT(precompile) - - gas_costs = fork.gas_costs() - push_cost = gas_costs.G_VERY_LOW - selfdestruct_cost = gas_costs.G_SELF_DESTRUCT - new_account_cost = gas_costs.G_NEW_ACCOUNT - if warm_beneficiary: - warming_cost = 0 - else: - warming_cost = gas_costs.G_COLD_ACCOUNT_ACCESS - exact_gas = push_cost + selfdestruct_cost + new_account_cost + warming_cost - - if same_tx_selfdestruct: - # Deploy and selfdestruct in same transaction - # Factory creates victim via CREATE, then calls it - initcode = Initcode(deploy_code=victim_code) - initcode_bytes = bytes(initcode) - - # pre-calculate the factory and victim addresses - factory_address = next(pre._contract_address_iterator) # type: ignore - victim = compute_create_address(address=factory_address, nonce=1) - - factory_code = ( - Op.MSTORE(0, Op.PUSH32(initcode_bytes)) - + Op.CREATE( - value=victim_balance, - offset=32 - len(initcode_bytes), - size=len(initcode_bytes), - ) - + Op.POP # Discard CREATE result, we know the address - + Op.CALL(gas=exact_gas, address=victim) - ) - # actual deploy using known address - factory = pre.deploy_contract( - address=factory_address, - code=factory_code, - balance=victim_balance, - ) - caller = factory - else: - # pre-existing contract - victim = pre.deploy_contract(code=victim_code, balance=victim_balance) - caller_code = Op.CALL(gas=exact_gas, address=victim) - caller = pre.deploy_contract(code=caller_code) - - tx = Transaction( - sender=alice, - to=caller, - gas_limit=200_000, - protected=fork >= Byzantium, - ) - - # BAL expectations >= Amsterdam - expected_block_access_list = None - if fork.header_bal_hash_required(): - if same_tx_selfdestruct: - # Factory does CREATE (nonce 1->2) and transfers balance to victim - # Victim is created and destroyed in same tx - no net changes - account_expectations: Dict[ - Address, BalAccountExpectation | None - ] = { - alice: BalAccountExpectation( - nonce_changes=[ - BalNonceChange(block_access_index=1, post_nonce=1) - ], - ), - caller: BalAccountExpectation( - nonce_changes=[ - BalNonceChange(block_access_index=1, post_nonce=2) - ], - balance_changes=[ - BalBalanceChange(block_access_index=1, post_balance=0) - ], - code_changes=[], - storage_changes=[], - storage_reads=[], - ), - # Victim created and destroyed in same tx - empty changes - victim: BalAccountExpectation.empty(), - precompile: BalAccountExpectation( - balance_changes=[ - BalBalanceChange( - block_access_index=1, post_balance=victim_balance - ) - ], - nonce_changes=[], - code_changes=[], - storage_changes=[], - storage_reads=[], - ), - } - else: - account_expectations = { - alice: BalAccountExpectation( - nonce_changes=[ - BalNonceChange(block_access_index=1, post_nonce=1) - ], - ), - caller: BalAccountExpectation.empty(), - victim: BalAccountExpectation( - balance_changes=[ - BalBalanceChange(block_access_index=1, post_balance=0) - ], - nonce_changes=[], - code_changes=[], - storage_changes=[], - storage_reads=[], - ), - precompile: BalAccountExpectation( - balance_changes=[ - BalBalanceChange( - block_access_index=1, post_balance=victim_balance - ) - ], - nonce_changes=[], - code_changes=[], - storage_changes=[], - storage_reads=[], - ), - } - expected_block_access_list = BlockAccessListExpectation( - account_expectations=account_expectations - ) - - # post state depends on fork and same_tx_selfdestruct - contract_destroyed = fork < Cancun or same_tx_selfdestruct - # Factory nonce is 2 after CREATE, otherwise caller nonce stays at 1 - caller_nonce = 2 if same_tx_selfdestruct else 1 - if contract_destroyed: - post = { - alice: Account(nonce=1), - caller: Account(nonce=caller_nonce), - victim: Account.NONEXISTENT, - precompile: Account(balance=victim_balance), - } - else: - # >=Cancun with pre-existing contract, code preserved - post = { - alice: Account(nonce=1), - caller: Account(nonce=caller_nonce), - victim: Account(balance=0, code=victim_code), - precompile: Account(balance=victim_balance), - } - - blockchain_test( - pre=pre, - blocks=[ - Block( - txs=[tx], - expected_block_access_list=expected_block_access_list, - ) - ], - post=post, - ) diff --git a/tests/london/eip1559_fee_market_change/test_tx_type.py b/tests/london/eip1559_fee_market_change/test_tx_type.py index d4e2caaa84..d760d554f4 100644 --- a/tests/london/eip1559_fee_market_change/test_tx_type.py +++ b/tests/london/eip1559_fee_market_change/test_tx_type.py @@ -13,7 +13,7 @@ TransactionException, ) from execution_testing import Opcodes as Op -from execution_testing.forks import Byzantium +from execution_testing.forks import SpuriousDragon from .spec import ref_spec_1559 @@ -62,7 +62,7 @@ def test_eip1559_tx_validity( sender=sender, gas_limit=100_000, max_priority_fee_per_gas=1, - protected=fork >= Byzantium, + protected=fork >= SpuriousDragon, error=TransactionException.TYPE_2_TX_PRE_FORK if not valid else None, ) diff --git a/tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py b/tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py index ec639e4b7e..dd494164b9 100644 --- a/tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py +++ b/tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py @@ -1,24 +1,32 @@ """ -Tests for EIP-150 SELFDESTRUCT operation gas costs in the Tangerine -Whistle fork. +Tests for EIP-150 SELFDESTRUCT operation gas costs. + +EIP-150 introduced the 5000 gas cost for SELFDESTRUCT and precise gas +boundaries for state access during the operation. """ from typing import Dict import pytest from execution_testing import ( + EOA, + AccessList, Account, Address, Alloc, BalAccountExpectation, + BalBalanceChange, + BalCodeChange, BalNonceChange, Block, BlockAccessListExpectation, BlockchainTestFiller, + Initcode, Op, Transaction, + compute_create_address, ) -from execution_testing.forks import Byzantium +from execution_testing.forks import Berlin, Cancun, SpuriousDragon from execution_testing.forks.helpers import Fork from .spec import ref_spec_150 @@ -27,91 +35,970 @@ REFERENCE_SPEC_VERSION = ref_spec_150.version -@pytest.mark.pre_alloc_group( - "selfdestruct_to_precompile_oog", - reason="Modifies precompile balance, must be isolated in EngineX format", +# --- helper functions --- # + + +def calculate_selfdestruct_gas( + fork: Fork, + beneficiary_warm: bool, + beneficiary_dead: bool, + originator_balance: int, +) -> int: + """Calculate exact gas needed for SELFDESTRUCT.""" + gas_costs = fork.gas_costs() + gas = ( + gas_costs.G_VERY_LOW + gas_costs.G_SELF_DESTRUCT + ) # PUSH + SELFDESTRUCT + + # Cold access cost (>=Berlin only) + if fork >= Berlin and not beneficiary_warm: + gas += gas_costs.G_COLD_ACCOUNT_ACCESS + + # G_NEW_ACCOUNT: + # - Pre-EIP-161 (TangerineWhistle): charged when beneficiary is dead + # - Post-EIP-161 (>=SpuriousDragon): charged when beneficiary is dead + # AND originator has balance > 0 + if beneficiary_dead: + if fork >= SpuriousDragon: + if originator_balance > 0: + gas += gas_costs.G_NEW_ACCOUNT + else: + # Pre-EIP-161: always charged when beneficiary is dead + gas += gas_costs.G_NEW_ACCOUNT + + return gas + + +def setup_selfdestruct_test( + pre: Alloc, + fork: Fork, + beneficiary: Address, + originator_balance: int, + same_tx: bool, + beneficiary_warm: bool, + inner_call_gas: int, +) -> tuple[Address, Address, Address, Transaction]: + """ + Set up SELFDESTRUCT test with caller contract pattern. + + Returns: (alice, caller, victim, tx) + """ + alice = pre.fund_eoa() + victim_code = Op.SELFDESTRUCT(beneficiary) + + if same_tx: + # Deploy and selfdestruct in same transaction via factory + initcode = Initcode(deploy_code=victim_code) + initcode_len = len(initcode) + + # Pre-deploy initcode at separate address, then use EXTCODECOPY + initcode_address = pre.deploy_contract(initcode) + + factory_address = next(pre._contract_address_iterator) # type: ignore + victim = compute_create_address(address=factory_address, nonce=1) + + factory_code = ( + Op.EXTCODECOPY(initcode_address, 0, 0, initcode_len) + + Op.CREATE(value=originator_balance, offset=0, size=initcode_len) + + Op.POP + + Op.CALL(gas=inner_call_gas, address=victim) + ) + caller = pre.deploy_contract( + address=factory_address, + code=factory_code, + balance=originator_balance, + ) + else: + # Pre-existing contract + victim = pre.deploy_contract( + code=victim_code, balance=originator_balance + ) + caller_code = Op.CALL(gas=inner_call_gas, address=victim) + caller = pre.deploy_contract(code=caller_code) + + # Warm beneficiary via access list (>=Berlin only, + # doesn't add to BAL >= Amsterdam) + access_list = ( + [AccessList(address=beneficiary, storage_keys=[])] + if beneficiary_warm and fork >= Berlin + else None + ) + + tx = Transaction( + sender=alice, + to=caller, + gas_limit=500_000, + protected=fork >= SpuriousDragon, + access_list=access_list, + ) + + return alice, caller, victim, tx + + +def build_bal_expectations( + fork: Fork, + alice: Address, + caller: Address, + victim: Address, + beneficiary: Address, + originator_balance: int, + beneficiary_initial_balance: int, + same_tx: bool, + success: bool, + beneficiary_in_bal: bool, +) -> BlockAccessListExpectation | None: + """Build BAL expectations for >=Amsterdam.""" + if not fork.header_bal_hash_required(): + return None + + victim_code = Op.SELFDESTRUCT(beneficiary) + + # Beneficiary expectation + if not beneficiary_in_bal: + beneficiary_expectation: BalAccountExpectation | None = None + elif not success: + beneficiary_expectation = BalAccountExpectation.empty() + else: + # Success: balance transferred + final_balance = beneficiary_initial_balance + originator_balance + if final_balance > beneficiary_initial_balance: + beneficiary_expectation = BalAccountExpectation( + balance_changes=[ + BalBalanceChange( + block_access_index=1, post_balance=final_balance + ) + ], + ) + else: + beneficiary_expectation = BalAccountExpectation.empty() + + # Victim expectation + if same_tx: + if success: + # Created and destroyed in same tx - no net changes + victim_expectation = BalAccountExpectation.empty() + else: + # OOG: CREATE succeeded but SELFDESTRUCT failed + # Only include balance_changes if originator_balance > 0 + if originator_balance > 0: + victim_expectation = BalAccountExpectation( + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], + balance_changes=[ + BalBalanceChange( + block_access_index=1, + post_balance=originator_balance, + ) + ], + code_changes=[ + BalCodeChange( + block_access_index=1, new_code=bytes(victim_code) + ) + ], + ) + else: + victim_expectation = BalAccountExpectation( + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], + code_changes=[ + BalCodeChange( + block_access_index=1, new_code=bytes(victim_code) + ) + ], + ) + else: + if success and originator_balance > 0: + victim_expectation = BalAccountExpectation( + balance_changes=[ + BalBalanceChange(block_access_index=1, post_balance=0) + ], + ) + else: + victim_expectation = BalAccountExpectation.empty() + + # Caller expectation + if same_tx: + # Only include balance_changes if originator_balance > 0 + if originator_balance > 0: + caller_expectation = BalAccountExpectation( + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=2) + ], + balance_changes=[ + BalBalanceChange(block_access_index=1, post_balance=0) + ], + ) + else: + caller_expectation = BalAccountExpectation( + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=2) + ], + ) + else: + caller_expectation = BalAccountExpectation.empty() + + account_expectations: Dict[Address, BalAccountExpectation | None] = { + alice: BalAccountExpectation( + nonce_changes=[BalNonceChange(block_access_index=1, post_nonce=1)], + ), + caller: caller_expectation, + victim: victim_expectation, + beneficiary: beneficiary_expectation, + } + + return BlockAccessListExpectation( + account_expectations=account_expectations + ) + + +def build_post_state( + fork: Fork, + alice: Address, + caller: Address, + victim: Address, + beneficiary: Address, + originator_balance: int, + beneficiary_initial_balance: int, + same_tx: bool, + success: bool, + beneficiary_has_code: bool = False, +) -> dict: + """Build expected post state.""" + victim_code = Op.SELFDESTRUCT(beneficiary) + caller_nonce = 2 if same_tx else 1 + + if success: + contract_destroyed = fork < Cancun or same_tx + final_beneficiary_balance = ( + beneficiary_initial_balance + originator_balance + ) + + if contract_destroyed: + post: dict = { + alice: Account(nonce=1), + caller: Account(nonce=caller_nonce), + victim: Account.NONEXISTENT, + } + else: + # >=Cancun pre-existing: code preserved, balance transferred + post = { + alice: Account(nonce=1), + caller: Account(nonce=caller_nonce), + victim: Account(balance=0, code=victim_code), + } + + # Beneficiary: verify balance if non-empty, NONEXISTENT if empty + # Pre-EIP-161: empty accounts touched during execution persist + if final_beneficiary_balance > 0 or beneficiary_has_code: + post[beneficiary] = Account(balance=final_beneficiary_balance) + elif fork >= SpuriousDragon: + # EIP-161 (>=SpuriousDragon): empty accounts are deleted + post[beneficiary] = Account.NONEXISTENT + else: + # Pre-EIP-161: empty accounts persist after being touched + post[beneficiary] = Account(balance=0) + else: + # OOG: SELFDESTRUCT failed + if same_tx: + post = { + alice: Account(nonce=1), + caller: Account(nonce=caller_nonce, balance=0), + victim: Account(balance=originator_balance, code=victim_code), + } + else: + post = { + alice: Account(nonce=1), + caller: Account(nonce=caller_nonce), + victim: Account(balance=originator_balance, code=victim_code), + } + + return post + + +@pytest.mark.parametrize( + "is_success", [True, False], ids=["exact_gas", "exact_gas_minus_1"] +) +@pytest.mark.parametrize( + "beneficiary", ["eoa", "contract"], ids=["eoa", "contract"] +) +@pytest.mark.parametrize( + "warm", + [ + pytest.param(False, id="cold"), + pytest.param(True, id="warm", marks=pytest.mark.valid_from("Berlin")), + ], +) +@pytest.mark.parametrize( + "same_tx", [False, True], ids=["pre_deploy", "same_tx"] +) +@pytest.mark.parametrize( + "originator_balance", + [0, 1], + ids=["no_balance", "has_balance"], +) +@pytest.mark.parametrize( + "beneficiary_initial_balance", + [0, 1], + ids=["dead_beneficiary", "alive_beneficiary"], +) +@pytest.mark.valid_from("TangerineWhistle") +def test_selfdestruct_to_account( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + fork: Fork, + is_success: bool, + beneficiary: str, + warm: bool, + same_tx: bool, + originator_balance: int, + beneficiary_initial_balance: int, +) -> None: + """ + Test SELFDESTRUCT success boundary for account beneficiaries. + + - exact_gas: succeeds, balance transferred, contract destroyed + - exact_gas_minus_1: OOG, operation fails + """ + # Create beneficiary + if beneficiary == "eoa": + beneficiary_addr: EOA | Address = pre.fund_eoa( + amount=beneficiary_initial_balance + ) + else: + beneficiary_addr = pre.deploy_contract( + code=Op.STOP, balance=beneficiary_initial_balance + ) + + # Determine if beneficiary is dead (for G_NEW_ACCOUNT calculation) + # Contract with code is NOT dead even with balance=0 + beneficiary_dead = ( + beneficiary_initial_balance == 0 and beneficiary == "eoa" + ) + + # Calculate exact gas for success (includes G_NEW_ACCOUNT if applicable) + inner_call_gas = calculate_selfdestruct_gas( + fork, + beneficiary_warm=warm, + beneficiary_dead=beneficiary_dead, + originator_balance=originator_balance, + ) + if not is_success: + inner_call_gas -= 1 + + # In BAL if: success OR G_NEW_ACCOUNT charged (OOG after access) + needs_new_account = False + if beneficiary_dead: + if fork >= SpuriousDragon: + needs_new_account = originator_balance > 0 + else: + needs_new_account = True + + beneficiary_in_bal = is_success or needs_new_account + + alice, caller, victim, tx = setup_selfdestruct_test( + pre, + fork, + beneficiary_addr, + originator_balance, + same_tx, + beneficiary_warm=warm, + inner_call_gas=inner_call_gas, + ) + + expected_bal = build_bal_expectations( + fork, + alice, + caller, + victim, + beneficiary_addr, + originator_balance, + beneficiary_initial_balance, + same_tx, + success=is_success, + beneficiary_in_bal=beneficiary_in_bal, + ) + + post = build_post_state( + fork, + alice, + caller, + victim, + beneficiary_addr, + originator_balance, + beneficiary_initial_balance, + same_tx, + success=is_success, + beneficiary_has_code=(beneficiary == "contract"), + ) + + blockchain_test( + pre=pre, + blocks=[Block(txs=[tx], expected_block_access_list=expected_bal)], + post=post, + ) + + +@pytest.mark.parametrize( + "is_success", [True, False], ids=["exact_gas", "exact_gas_minus_1"] +) +@pytest.mark.parametrize( + "beneficiary", ["eoa", "contract"], ids=["eoa", "contract"] +) +@pytest.mark.parametrize( + "warm", + [ + pytest.param(False, id="cold"), + pytest.param(True, id="warm", marks=pytest.mark.valid_from("Berlin")), + ], +) +@pytest.mark.parametrize( + "same_tx", [False, True], ids=["pre_deploy", "same_tx"] +) +@pytest.mark.parametrize( + "originator_balance", + [0, 1], + ids=["no_balance", "has_balance"], +) +@pytest.mark.parametrize( + "beneficiary_initial_balance", + [0, 1], + ids=["dead_beneficiary", "alive_beneficiary"], +) +@pytest.mark.valid_from("TangerineWhistle") +def test_selfdestruct_state_access_boundary( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + fork: Fork, + is_success: bool, + beneficiary: str, + warm: bool, + same_tx: bool, + originator_balance: int, + beneficiary_initial_balance: int, +) -> None: + """ + Test state access boundary for account beneficiaries. + + Consensus check: beneficiary must be accessed at base cost boundary, + before G_NEW_ACCOUNT is evaluated. + + - exact_gas: beneficiary IS accessed (in BAL) + - exact_gas_minus_1: beneficiary NOT accessed (not in BAL) + """ + # Create beneficiary + if beneficiary == "eoa": + beneficiary_addr: EOA | Address = pre.fund_eoa( + amount=beneficiary_initial_balance + ) + else: + beneficiary_addr = pre.deploy_contract( + code=Op.STOP, balance=beneficiary_initial_balance + ) + + # Determine if beneficiary is dead (for G_NEW_ACCOUNT calculation) + # Contract with code is NOT dead even with balance=0 + beneficiary_dead = ( + beneficiary_initial_balance == 0 and beneficiary == "eoa" + ) + + # Calculate gas for state access boundary only (base + cold access) + # Does NOT include G_NEW_ACCOUNT + gas_costs = fork.gas_costs() + inner_call_gas = gas_costs.G_VERY_LOW + gas_costs.G_SELF_DESTRUCT + if fork >= Berlin and not warm: + inner_call_gas += gas_costs.G_COLD_ACCOUNT_ACCESS + + if not is_success: + inner_call_gas -= 1 + + # Determine if operation succeeds at this gas level + # At state access boundary, we have enough gas for base + cold access + # Operation succeeds if NO G_NEW_ACCOUNT is needed: + # - Beneficiary is alive (has balance or has code) + # - OR beneficiary is dead but originator_balance=0 (>=SpuriousDragon) + needs_new_account = False + if beneficiary_dead: + if fork >= SpuriousDragon: + needs_new_account = originator_balance > 0 + else: + needs_new_account = True + + # At exact_gas: success if no G_NEW_ACCOUNT needed + # At exact_gas_minus_1: always OOG (before state access) + operation_success = is_success and not needs_new_account + + alice, caller, victim, tx = setup_selfdestruct_test( + pre, + fork, + beneficiary_addr, + originator_balance, + same_tx, + beneficiary_warm=warm, + inner_call_gas=inner_call_gas, + ) + + # Key difference: beneficiary_in_bal depends on is_success + # exact_gas: state accessed, beneficiary in BAL + # exact_gas_minus_1: OOG before state access, beneficiary NOT in BAL + expected_bal = build_bal_expectations( + fork, + alice, + caller, + victim, + beneficiary_addr, + originator_balance, + beneficiary_initial_balance, + same_tx, + success=operation_success, + beneficiary_in_bal=is_success, + ) + + post = build_post_state( + fork, + alice, + caller, + victim, + beneficiary_addr, + originator_balance, + beneficiary_initial_balance, + same_tx, + success=operation_success, + beneficiary_has_code=(beneficiary == "contract"), + ) + + blockchain_test( + pre=pre, + blocks=[Block(txs=[tx], expected_block_access_list=expected_bal)], + post=post, + ) + + +@pytest.mark.parametrize( + "is_success", [True, False], ids=["exact_gas", "exact_gas_minus_1"] ) -@pytest.mark.parametrize("oog_before_state_access", [True, False]) @pytest.mark.with_all_precompiles +@pytest.mark.pre_alloc_group("precompile_funding") +@pytest.mark.parametrize( + "same_tx", [False, True], ids=["pre_deploy", "same_tx"] +) +@pytest.mark.parametrize( + "originator_balance", + [0, 1], + ids=["no_balance", "has_balance"], +) +@pytest.mark.parametrize( + "beneficiary_initial_balance", + [0, 1], + ids=["dead_beneficiary", "alive_beneficiary"], +) @pytest.mark.valid_from("TangerineWhistle") -def test_selfdestruct_to_precompile_oog( +def test_selfdestruct_to_precompile( pre: Alloc, blockchain_test: BlockchainTestFiller, fork: Fork, + is_success: bool, precompile: Address, - oog_before_state_access: bool, + same_tx: bool, + originator_balance: int, + beneficiary_initial_balance: int, ) -> None: """ - Test SELFDESTRUCT to precompile with out-of-gas at different boundaries. + Test SELFDESTRUCT success boundary for precompile beneficiaries. - - before_state_access: Precompile not touched (>= Amsterdam). - - after_state_access: Precompile touched but no balance change - (>= Amsterdam). + Precompiles are always warm (no cold access charge). + + - exact_gas: succeeds, balance transferred, contract destroyed + - exact_gas_minus_1: OOG, operation fails """ - alice = pre.fund_eoa() + # Fund precompile if needed + if beneficiary_initial_balance > 0: + pre.fund_address(precompile, beneficiary_initial_balance) + + # Precompiles are dead when they have no balance + beneficiary_dead = beneficiary_initial_balance == 0 + + # Calculate exact gas for success (includes G_NEW_ACCOUNT if applicable) + # Precompiles are always warm + inner_call_gas = calculate_selfdestruct_gas( + fork, + beneficiary_warm=True, # Precompiles are always warm + beneficiary_dead=beneficiary_dead, + originator_balance=originator_balance, + ) + if not is_success: + inner_call_gas -= 1 + + # In BAL if: success OR G_NEW_ACCOUNT charged (OOG after access) + needs_new_account = False + if beneficiary_dead: + if fork >= SpuriousDragon: + needs_new_account = originator_balance > 0 + else: + needs_new_account = True + + beneficiary_in_bal = is_success or needs_new_account + + alice, caller, victim, tx = setup_selfdestruct_test( + pre, + fork, + precompile, + originator_balance, + same_tx, + beneficiary_warm=True, # Precompiles are always warm + inner_call_gas=inner_call_gas, + ) + + expected_bal = build_bal_expectations( + fork, + alice, + caller, + victim, + precompile, + originator_balance, + beneficiary_initial_balance, + same_tx, + success=is_success, + beneficiary_in_bal=beneficiary_in_bal, + ) + + post = build_post_state( + fork, + alice, + caller, + victim, + precompile, + originator_balance, + beneficiary_initial_balance, + same_tx, + success=is_success, + beneficiary_has_code=False, # Precompiles don't have stored code + ) + + blockchain_test( + pre=pre, + blocks=[Block(txs=[tx], expected_block_access_list=expected_bal)], + post=post, + ) + + +@pytest.mark.parametrize( + "is_success", [True, False], ids=["exact_gas", "exact_gas_minus_1"] +) +@pytest.mark.with_all_precompiles +@pytest.mark.pre_alloc_group("precompile_funding") +@pytest.mark.parametrize( + "same_tx", [False, True], ids=["pre_deploy", "same_tx"] +) +@pytest.mark.parametrize( + "originator_balance", + [0, 1], + ids=["no_balance", "has_balance"], +) +@pytest.mark.parametrize( + "beneficiary_initial_balance", + [0, 1], + ids=["dead_beneficiary", "alive_beneficiary"], +) +@pytest.mark.valid_from("TangerineWhistle") +def test_selfdestruct_to_precompile_state_access_boundary( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + fork: Fork, + is_success: bool, + precompile: Address, + same_tx: bool, + originator_balance: int, + beneficiary_initial_balance: int, +) -> None: + """ + Test state access boundary for precompile beneficiaries. + + Consensus check: precompile must be accessed at base cost boundary, + before G_NEW_ACCOUNT is evaluated. Precompiles are always warm. + + - exact_gas: precompile IS accessed (in BAL) + - exact_gas_minus_1: precompile NOT accessed (not in BAL) + """ + # Fund precompile if needed + if beneficiary_initial_balance > 0: + pre.fund_address(precompile, beneficiary_initial_balance) - victim_balance = 100 - victim_code = Op.SELFDESTRUCT(precompile) - victim = pre.deploy_contract(code=victim_code, balance=victim_balance) + beneficiary_dead = beneficiary_initial_balance == 0 + # State access boundary: base cost only (no G_NEW_ACCOUNT) gas_costs = fork.gas_costs() - push_cost = gas_costs.G_VERY_LOW - selfdestruct_cost = gas_costs.G_SELF_DESTRUCT - # exact gas would be: - # push_cost + selfdestruct_cost + new_account_cost + G_NEW_ACCOUNT + inner_call_gas = gas_costs.G_VERY_LOW + gas_costs.G_SELF_DESTRUCT - if oog_before_state_access: - gas = push_cost + selfdestruct_cost - 1 - else: - gas = push_cost + selfdestruct_cost + if not is_success: + inner_call_gas -= 1 + + # Success at base cost if no G_NEW_ACCOUNT needed + needs_new_account = False + if beneficiary_dead: + if fork >= SpuriousDragon: + needs_new_account = originator_balance > 0 + else: + needs_new_account = True + + operation_success = is_success and not needs_new_account - caller_code = Op.CALL(gas=gas, address=victim) - caller = pre.deploy_contract(code=caller_code) + alice, caller, victim, tx = setup_selfdestruct_test( + pre, + fork, + precompile, + originator_balance, + same_tx, + beneficiary_warm=True, # Precompiles are always warm + inner_call_gas=inner_call_gas, + ) + + # Key difference: beneficiary_in_bal depends on is_success + # exact_gas: state accessed, precompile in BAL + # exact_gas_minus_1: OOG before state access, precompile NOT in BAL + expected_bal = build_bal_expectations( + fork, + alice, + caller, + victim, + precompile, + originator_balance, + beneficiary_initial_balance, + same_tx, + success=operation_success, + beneficiary_in_bal=is_success, + ) + + post = build_post_state( + fork, + alice, + caller, + victim, + precompile, + originator_balance, + beneficiary_initial_balance, + same_tx, + success=operation_success, + beneficiary_has_code=False, # Precompiles don't have stored code + ) + + blockchain_test( + pre=pre, + blocks=[Block(txs=[tx], expected_block_access_list=expected_bal)], + post=post, + ) + + +# --- SELFDESTRUCT to self tests --- # + + +@pytest.mark.parametrize( + "is_success", [True, False], ids=["exact_gas", "exact_gas_minus_1"] +) +@pytest.mark.parametrize( + "originator_balance", + [0, 1], + ids=["no_balance", "has_balance"], +) +@pytest.mark.parametrize( + "same_tx", [False, True], ids=["pre_deploy", "same_tx"] +) +@pytest.mark.valid_from("TangerineWhistle") +def test_selfdestruct_to_self( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + fork: Fork, + is_success: bool, + originator_balance: int, + same_tx: bool, +) -> None: + """ + Test SELFDESTRUCT where beneficiary is the executing contract itself. + + Uses Op.SELFDESTRUCT(Op.ADDRESS) - the victim selfdestructs to itself. + + Key characteristics: + - Beneficiary is always warm (it's the executing contract) + - Beneficiary is always alive (has code) + - No G_NEW_ACCOUNT charge + - No cold access charge (>=Berlin) + - Balance is "transferred" to self (no net change until destruction) + + Gas boundary: + - exact_gas: SELFDESTRUCT completes successfully + - exact_gas_minus_1: OOG, SELFDESTRUCT fails + + Post-destruction behavior (is_success=True only): + - Pre-Cancun or same_tx: contract destroyed, balance = 0 + - >=Cancun pre-existing: contract NOT destroyed, balance preserved + """ + alice = pre.fund_eoa() + victim_code = Op.SELFDESTRUCT(Op.ADDRESS) + + # Gas: ADDRESS + SELFDESTRUCT (no cold access, no G_NEW_ACCOUNT) + # Note: ADDRESS opcode costs G_BASE (2), not G_VERY_LOW (3) like PUSH + gas_costs = fork.gas_costs() + base_gas = gas_costs.G_BASE + gas_costs.G_SELF_DESTRUCT + inner_call_gas = base_gas if is_success else base_gas - 1 + + if same_tx: + # Deploy and selfdestruct in same transaction via factory + initcode = Initcode(deploy_code=victim_code) + initcode_len = len(initcode) + + initcode_address = pre.deploy_contract(initcode) + + factory_address = next(pre._contract_address_iterator) # type: ignore + victim = compute_create_address(address=factory_address, nonce=1) + + factory_code = ( + Op.EXTCODECOPY(initcode_address, 0, 0, initcode_len) + + Op.CREATE(value=originator_balance, offset=0, size=initcode_len) + + Op.POP + + Op.CALL(gas=inner_call_gas, address=victim) + ) + caller = pre.deploy_contract( + address=factory_address, + code=factory_code, + balance=originator_balance, + ) + else: + # Pre-existing contract + victim = pre.deploy_contract( + code=victim_code, balance=originator_balance + ) + caller_code = Op.CALL(gas=inner_call_gas, address=victim) + caller = pre.deploy_contract(code=caller_code) tx = Transaction( sender=alice, to=caller, - gas_limit=100_000, - protected=True if fork >= Byzantium else False, + gas_limit=500_000, + protected=fork >= SpuriousDragon, ) - # BAL expectations >= Amsterdam - expected_block_access_list = None + # Build BAL expectations + expected_bal: BlockAccessListExpectation | None = None if fork.header_bal_hash_required(): - account_expectations: Dict[Address, BalAccountExpectation | None] = { - alice: BalAccountExpectation( - nonce_changes=[ - BalNonceChange(block_access_index=1, post_nonce=1) - ], - ), - caller: BalAccountExpectation.empty(), - victim: BalAccountExpectation.empty(), - } - if oog_before_state_access: - # precompile not touched, not in BAL - account_expectations[precompile] = None + if same_tx: + if is_success: + # Created and destroyed in same tx - no net changes for victim + victim_expectation = BalAccountExpectation.empty() + else: + # OOG: CREATE succeeded but SELFDESTRUCT failed + if originator_balance > 0: + victim_expectation = BalAccountExpectation( + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], + balance_changes=[ + BalBalanceChange( + block_access_index=1, + post_balance=originator_balance, + ) + ], + code_changes=[ + BalCodeChange( + block_access_index=1, + new_code=bytes(victim_code), + ) + ], + ) + else: + victim_expectation = BalAccountExpectation( + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], + code_changes=[ + BalCodeChange( + block_access_index=1, + new_code=bytes(victim_code), + ) + ], + ) + + if originator_balance > 0: + caller_expectation = BalAccountExpectation( + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=2) + ], + balance_changes=[ + BalBalanceChange(block_access_index=1, post_balance=0) + ], + ) + else: + caller_expectation = BalAccountExpectation( + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=2) + ], + ) else: - # precompile touched, in BAL with empty expectation - account_expectations[precompile] = BalAccountExpectation.empty() - expected_block_access_list = BlockAccessListExpectation( - account_expectations=account_expectations + # Pre-existing: victim in BAL + if not is_success: + # OOG: victim accessed but no state changes + victim_expectation = BalAccountExpectation.empty() + elif fork >= Cancun: + # >=Cancun success: contract survives with original balance + victim_expectation = BalAccountExpectation.empty() + elif originator_balance > 0: + # Pre-Cancun success: contract destroyed + victim_expectation = BalAccountExpectation( + balance_changes=[ + BalBalanceChange(block_access_index=1, post_balance=0) + ], + ) + else: + victim_expectation = BalAccountExpectation.empty() + caller_expectation = BalAccountExpectation.empty() + + expected_bal = BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], + ), + caller: caller_expectation, + victim: victim_expectation, + } ) - # OOG: victim keeps balance and code, precompile unchanged - post = { - alice: Account(nonce=1), - caller: Account(), - victim: Account(balance=victim_balance, code=victim_code), - precompile: Account.NONEXISTENT, - } + # Build post state + caller_nonce = 2 if same_tx else 1 + + if not is_success: + # OOG: SELFDESTRUCT failed, contract survives + if same_tx: + post: dict = { + alice: Account(nonce=1), + caller: Account(nonce=caller_nonce, balance=0), + victim: Account(balance=originator_balance, code=victim_code), + } + else: + post = { + alice: Account(nonce=1), + caller: Account(nonce=caller_nonce), + victim: Account(balance=originator_balance, code=victim_code), + } + else: + contract_destroyed = fork < Cancun or same_tx + if contract_destroyed: + post = { + alice: Account(nonce=1), + caller: Account(nonce=caller_nonce), + victim: Account.NONEXISTENT, + } + else: + # >=Cancun pre-existing: code preserved, balance preserved + post = { + alice: Account(nonce=1), + caller: Account(nonce=caller_nonce), + victim: Account(balance=originator_balance, code=victim_code), + } blockchain_test( pre=pre, - blocks=[ - Block( - txs=[tx], - expected_block_access_list=expected_block_access_list, - ) - ], + blocks=[Block(txs=[tx], expected_block_access_list=expected_bal)], post=post, ) From c2c020b1817581b3f6f4f99fc2cec773fabd9ff4 Mon Sep 17 00:00:00 2001 From: fselmo Date: Thu, 8 Jan 2026 14:17:05 -0700 Subject: [PATCH 068/154] refactor(test): fork.supports_protected_txs() instead of direct comparison - This will also help with support for any forks of this repo for L2s or other projects if some fork definitions don't have any meaningful relationship to SpuriousDragon. --- .../src/execution_testing/forks/base_fork.py | 6 ++++++ .../src/execution_testing/forks/forks/forks.py | 14 ++++++++++++++ tests/berlin/eip2930_access_list/test_tx_type.py | 3 +-- tests/byzantium/eip196_ec_add_mul/test_gas.py | 3 +-- tests/byzantium/eip197_ec_pairing/test_gas.py | 3 +-- tests/frontier/create/test_create_deposit_oog.py | 4 ++-- tests/frontier/create/test_create_one_byte.py | 4 ++-- .../create/test_create_suicide_during_init.py | 3 +-- tests/frontier/create/test_create_suicide_store.py | 3 +-- tests/frontier/opcodes/test_all_opcodes.py | 3 +-- tests/frontier/opcodes/test_blockhash.py | 5 ++--- .../test_call_and_callcode_gas_calculation.py | 3 +-- tests/frontier/opcodes/test_calldatacopy.py | 3 +-- tests/frontier/opcodes/test_calldataload.py | 5 ++--- tests/frontier/opcodes/test_calldatasize.py | 5 ++--- tests/frontier/opcodes/test_dup.py | 5 +++-- tests/frontier/opcodes/test_push.py | 5 ++--- tests/frontier/opcodes/test_swap.py | 5 ++--- tests/frontier/precompiles/test_ecrecover.py | 3 +-- tests/frontier/precompiles/test_ripemd.py | 3 +-- .../eip1559_fee_market_change/test_tx_type.py | 3 +-- .../test_eip150_selfdestruct.py | 4 ++-- 22 files changed, 50 insertions(+), 45 deletions(-) diff --git a/packages/testing/src/execution_testing/forks/base_fork.py b/packages/testing/src/execution_testing/forks/base_fork.py index 2a465af677..8beba96ac3 100644 --- a/packages/testing/src/execution_testing/forks/base_fork.py +++ b/packages/testing/src/execution_testing/forks/base_fork.py @@ -597,6 +597,12 @@ def get_reward(cls, *, block_number: int = 0, timestamp: int = 0) -> int: # Transaction related abstract methods + @classmethod + @abstractmethod + def supports_protected_txs(cls) -> bool: + """Return whether the fork implements EIP-155 transaction protection""" + pass + @classmethod @abstractmethod def tx_types( diff --git a/packages/testing/src/execution_testing/forks/forks/forks.py b/packages/testing/src/execution_testing/forks/forks/forks.py index 3804250c43..f8eeb4168a 100644 --- a/packages/testing/src/execution_testing/forks/forks/forks.py +++ b/packages/testing/src/execution_testing/forks/forks/forks.py @@ -1072,6 +1072,13 @@ def get_reward(cls, *, block_number: int = 0, timestamp: int = 0) -> int: del block_number, timestamp return 5_000_000_000_000_000_000 + @classmethod + def supports_protected_txs(cls) -> bool: + """ + At Genesis, fork does not have support for EIP-155 protected transactions. + """ + return False + @classmethod def tx_types( cls, *, block_number: int = 0, timestamp: int = 0 @@ -1505,6 +1512,13 @@ def _calculate_call_gas( return base_cost + @classmethod + def supports_protected_txs(cls) -> bool: + """ + At Genesis, supports EIP-155 protected transactions. + """ + return True + class Byzantium(SpuriousDragon): """Byzantium fork.""" diff --git a/tests/berlin/eip2930_access_list/test_tx_type.py b/tests/berlin/eip2930_access_list/test_tx_type.py index e96d8fefcd..4eb7434abe 100644 --- a/tests/berlin/eip2930_access_list/test_tx_type.py +++ b/tests/berlin/eip2930_access_list/test_tx_type.py @@ -13,7 +13,6 @@ TransactionException, ) from execution_testing import Opcodes as Op -from execution_testing.forks import SpuriousDragon from .spec import ref_spec_2930 @@ -62,7 +61,7 @@ def test_eip2930_tx_validity( sender=sender, gas_limit=100_000, access_list=[], - protected=fork >= SpuriousDragon, + protected=fork.supports_protected_txs(), error=TransactionException.TYPE_1_TX_PRE_FORK if not valid else None, ) diff --git a/tests/byzantium/eip196_ec_add_mul/test_gas.py b/tests/byzantium/eip196_ec_add_mul/test_gas.py index eb5d134360..a82e622da9 100644 --- a/tests/byzantium/eip196_ec_add_mul/test_gas.py +++ b/tests/byzantium/eip196_ec_add_mul/test_gas.py @@ -8,7 +8,6 @@ StateTestFiller, Transaction, ) -from execution_testing.forks import SpuriousDragon from execution_testing.forks.helpers import Fork from execution_testing.vm import Opcodes as Op @@ -55,7 +54,7 @@ def test_gas_costs( to=account, sender=pre.fund_eoa(), gas_limit=100_0000, - protected=fork >= SpuriousDragon, + protected=fork.supports_protected_txs(), ) post = {account: Account(storage={0: 1 if enough_gas else 0})} diff --git a/tests/byzantium/eip197_ec_pairing/test_gas.py b/tests/byzantium/eip197_ec_pairing/test_gas.py index 5a0425eaf3..3a79c0db85 100644 --- a/tests/byzantium/eip197_ec_pairing/test_gas.py +++ b/tests/byzantium/eip197_ec_pairing/test_gas.py @@ -8,7 +8,6 @@ Transaction, ) from execution_testing.base_types.base_types import Address -from execution_testing.forks import SpuriousDragon from execution_testing.forks.helpers import Fork from execution_testing.vm import Opcodes as Op @@ -49,7 +48,7 @@ def test_gas_costs( to=account, sender=pre.fund_eoa(), gas_limit=100_0000, - protected=fork >= SpuriousDragon, + protected=fork.supports_protected_txs(), ) post = {account: Account(storage={0: 1 if enough_gas else 0})} diff --git a/tests/frontier/create/test_create_deposit_oog.py b/tests/frontier/create/test_create_deposit_oog.py index 590fef42b2..ba226bba82 100644 --- a/tests/frontier/create/test_create_deposit_oog.py +++ b/tests/frontier/create/test_create_deposit_oog.py @@ -13,7 +13,7 @@ Transaction, compute_create_address, ) -from execution_testing.forks import Frontier, SpuriousDragon, TangerineWhistle +from execution_testing.forks import Frontier, TangerineWhistle SLOT_CREATE_RESULT = 1 SLOT_CREATE_RESULT_PRE = 0xDEADBEEF @@ -87,7 +87,7 @@ def test_create_deposit_oog( gas_limit=10_000_000, to=caller_address, sender=sender, - protected=fork >= SpuriousDragon, + protected=fork.supports_protected_txs(), ) created_account: Account | None = Account(code=b"\x00" * deposited_len) diff --git a/tests/frontier/create/test_create_one_byte.py b/tests/frontier/create/test_create_one_byte.py index 6e0d596589..cede8b537a 100644 --- a/tests/frontier/create/test_create_one_byte.py +++ b/tests/frontier/create/test_create_one_byte.py @@ -17,7 +17,7 @@ Transaction, compute_create_address, ) -from execution_testing.forks import London, SpuriousDragon +from execution_testing.forks import London @pytest.mark.ported_from( @@ -100,7 +100,7 @@ def test_create_one_byte( data=b"", nonce=0, sender=sender, - protected=fork >= SpuriousDragon, + protected=fork.supports_protected_txs(), ) post = { diff --git a/tests/frontier/create/test_create_suicide_during_init.py b/tests/frontier/create/test_create_suicide_during_init.py index 30caa37cee..dbd7febbce 100644 --- a/tests/frontier/create/test_create_suicide_during_init.py +++ b/tests/frontier/create/test_create_suicide_during_init.py @@ -14,7 +14,6 @@ Transaction, compute_create_address, ) -from execution_testing.forks import SpuriousDragon class Operation(Enum): @@ -93,7 +92,7 @@ def test_create_suicide_during_transaction_create( data=contract_initcode, value=tx_value, sender=sender, - protected=fork >= SpuriousDragon, + protected=fork.supports_protected_txs(), ) post = { diff --git a/tests/frontier/create/test_create_suicide_store.py b/tests/frontier/create/test_create_suicide_store.py index e28c213be7..34cc31d53f 100644 --- a/tests/frontier/create/test_create_suicide_store.py +++ b/tests/frontier/create/test_create_suicide_store.py @@ -19,7 +19,6 @@ Transaction, compute_create_address, ) -from execution_testing.forks import SpuriousDragon class Operation(IntEnum): @@ -147,7 +146,7 @@ def test_create_suicide_store( to=create_contract, data=suicide_initcode, sender=sender, - protected=fork >= SpuriousDragon, + protected=fork.supports_protected_txs(), ) post = { diff --git a/tests/frontier/opcodes/test_all_opcodes.py b/tests/frontier/opcodes/test_all_opcodes.py index 436052ab96..bf87d21836 100644 --- a/tests/frontier/opcodes/test_all_opcodes.py +++ b/tests/frontier/opcodes/test_all_opcodes.py @@ -21,7 +21,6 @@ UndefinedOpcodes, gas_test, ) -from execution_testing.forks import SpuriousDragon REFERENCE_SPEC_GIT_PATH = "N/A" REFERENCE_SPEC_VERSION = "N/A" @@ -183,7 +182,7 @@ def test_stack_overflow( gas_limit=100_000, to=contract, sender=pre.fund_eoa(), - protected=fork >= SpuriousDragon, + protected=fork.supports_protected_txs(), ) expected_storage = { slot_code_worked: value_code_failed if fails else value_code_worked diff --git a/tests/frontier/opcodes/test_blockhash.py b/tests/frontier/opcodes/test_blockhash.py index 34e1e91ee1..de0b7a3034 100644 --- a/tests/frontier/opcodes/test_blockhash.py +++ b/tests/frontier/opcodes/test_blockhash.py @@ -9,7 +9,6 @@ Op, Transaction, ) -from execution_testing.forks import SpuriousDragon from execution_testing.forks.helpers import Fork @@ -60,7 +59,7 @@ def test_genesis_hash_available( sender=sender, to=contract, gas_limit=100_000, - protected=fork >= SpuriousDragon, + protected=fork.supports_protected_txs(), ) ] if not setup_blocks_empty @@ -76,7 +75,7 @@ def test_genesis_hash_available( sender=sender, to=contract, gas_limit=100_000, - protected=fork >= SpuriousDragon, + protected=fork.supports_protected_txs(), ) ] ) diff --git a/tests/frontier/opcodes/test_call_and_callcode_gas_calculation.py b/tests/frontier/opcodes/test_call_and_callcode_gas_calculation.py index ee8dfc9350..3c178fb506 100644 --- a/tests/frontier/opcodes/test_call_and_callcode_gas_calculation.py +++ b/tests/frontier/opcodes/test_call_and_callcode_gas_calculation.py @@ -55,7 +55,6 @@ Berlin, Byzantium, Homestead, - SpuriousDragon, ) from execution_testing.forks.helpers import Fork @@ -201,7 +200,7 @@ def caller_tx(sender: EOA, caller_address: Address, fork: Fork) -> Transaction: value=1, gas_limit=500_000, sender=sender, - protected=fork >= SpuriousDragon, + protected=fork.supports_protected_txs(), ) diff --git a/tests/frontier/opcodes/test_calldatacopy.py b/tests/frontier/opcodes/test_calldatacopy.py index a03e613ef4..93012bb4dd 100644 --- a/tests/frontier/opcodes/test_calldatacopy.py +++ b/tests/frontier/opcodes/test_calldatacopy.py @@ -10,7 +10,6 @@ StateTestFiller, Transaction, ) -from execution_testing.forks import SpuriousDragon @pytest.mark.ported_from( @@ -193,7 +192,7 @@ def test_calldatacopy( data=tx_data, gas_limit=100_000, gas_price=0x0A, - protected=fork >= SpuriousDragon, + protected=fork.supports_protected_txs(), sender=pre.fund_eoa(), to=to, value=0x01, diff --git a/tests/frontier/opcodes/test_calldataload.py b/tests/frontier/opcodes/test_calldataload.py index 0faaac71f1..d9ee82225e 100644 --- a/tests/frontier/opcodes/test_calldataload.py +++ b/tests/frontier/opcodes/test_calldataload.py @@ -10,7 +10,6 @@ Transaction, ) from execution_testing import Macros as Om -from execution_testing.forks import SpuriousDragon @pytest.mark.ported_from( @@ -92,7 +91,7 @@ def test_calldataload( tx = Transaction( data=calldata, gas_limit=100_000, - protected=fork >= SpuriousDragon, + protected=fork.supports_protected_txs(), sender=pre.fund_eoa(), to=to, ) @@ -101,7 +100,7 @@ def test_calldataload( tx = Transaction( data=calldata, gas_limit=100_000, - protected=fork >= SpuriousDragon, + protected=fork.supports_protected_txs(), sender=pre.fund_eoa(), to=contract_address, ) diff --git a/tests/frontier/opcodes/test_calldatasize.py b/tests/frontier/opcodes/test_calldatasize.py index 1e060fff41..7b190f8b5c 100644 --- a/tests/frontier/opcodes/test_calldatasize.py +++ b/tests/frontier/opcodes/test_calldatasize.py @@ -10,7 +10,6 @@ Transaction, ) from execution_testing import Macros as Om -from execution_testing.forks import SpuriousDragon @pytest.mark.ported_from( @@ -69,7 +68,7 @@ def test_calldatasize( tx = Transaction( gas_limit=100_000, - protected=fork >= SpuriousDragon, + protected=fork.supports_protected_txs(), sender=pre.fund_eoa(), to=to, ) @@ -78,7 +77,7 @@ def test_calldatasize( tx = Transaction( data=calldata, gas_limit=100_000, - protected=fork >= SpuriousDragon, + protected=fork.supports_protected_txs(), sender=pre.fund_eoa(), to=contract_address, ) diff --git a/tests/frontier/opcodes/test_dup.py b/tests/frontier/opcodes/test_dup.py index 170e95d92f..210c744072 100644 --- a/tests/frontier/opcodes/test_dup.py +++ b/tests/frontier/opcodes/test_dup.py @@ -11,7 +11,6 @@ Storage, Transaction, ) -from execution_testing.forks import SpuriousDragon @pytest.mark.parametrize( @@ -72,7 +71,9 @@ def test_dup( ty=0x0, to=account, gas_limit=500000, - protected=fork >= SpuriousDragon, + gas_price=10, + protected=fork.supports_protected_txs(), + data="", sender=sender, ) diff --git a/tests/frontier/opcodes/test_push.py b/tests/frontier/opcodes/test_push.py index 180b1d7fb5..c1e59bf9f0 100644 --- a/tests/frontier/opcodes/test_push.py +++ b/tests/frontier/opcodes/test_push.py @@ -18,7 +18,6 @@ StateTestFiller, Transaction, ) -from execution_testing.forks import SpuriousDragon def get_input_for_push_opcode(opcode: Op) -> bytes: @@ -77,7 +76,7 @@ def test_push( sender=pre.fund_eoa(), to=contract, gas_limit=500_000, - protected=fork >= SpuriousDragon, + protected=fork.supports_protected_txs(), ) post = {} @@ -149,7 +148,7 @@ def test_stack_overflow( sender=pre.fund_eoa(), to=contract, gas_limit=500_000, - protected=fork >= SpuriousDragon, + protected=fork.supports_protected_txs(), ) post = {} diff --git a/tests/frontier/opcodes/test_swap.py b/tests/frontier/opcodes/test_swap.py index 59063d8bbd..2de28b8c37 100644 --- a/tests/frontier/opcodes/test_swap.py +++ b/tests/frontier/opcodes/test_swap.py @@ -13,7 +13,6 @@ Bytecode, Environment, ) -from execution_testing.forks import SpuriousDragon from execution_testing import Op from execution_testing import ( StateTestFiller, @@ -76,7 +75,7 @@ def test_swap( sender=pre.fund_eoa(), to=contract_address, gas_limit=500_000, - protected=fork >= SpuriousDragon, + protected=fork.supports_protected_txs(), ) # Calculate expected storage values after SWAP and storage operations @@ -146,7 +145,7 @@ def test_stack_underflow( sender=pre.fund_eoa(), to=contract, gas_limit=500_000, - protected=fork >= SpuriousDragon, + protected=fork.supports_protected_txs(), ) # Define the expected post-state. diff --git a/tests/frontier/precompiles/test_ecrecover.py b/tests/frontier/precompiles/test_ecrecover.py index 4d2b9a0e4b..a6fa669bef 100644 --- a/tests/frontier/precompiles/test_ecrecover.py +++ b/tests/frontier/precompiles/test_ecrecover.py @@ -8,7 +8,6 @@ StateTestFiller, Transaction, ) -from execution_testing.forks.forks.forks import SpuriousDragon from execution_testing.forks.helpers import Fork from execution_testing.vm import Opcodes as Op @@ -388,7 +387,7 @@ def test_precompiles( to=account, sender=pre.fund_eoa(), gas_limit=1_000_000, - protected=fork >= SpuriousDragon, + protected=fork.supports_protected_txs(), ) post = {account: Account(storage={0: output})} diff --git a/tests/frontier/precompiles/test_ripemd.py b/tests/frontier/precompiles/test_ripemd.py index 5f382e03b6..129c555845 100644 --- a/tests/frontier/precompiles/test_ripemd.py +++ b/tests/frontier/precompiles/test_ripemd.py @@ -8,7 +8,6 @@ StateTestFiller, Transaction, ) -from execution_testing.forks.forks.forks import SpuriousDragon from execution_testing.forks.helpers import Fork from execution_testing.vm import Opcodes as Op @@ -176,7 +175,7 @@ def test_precompiles( sender=pre.fund_eoa(), gas_limit=1_000_0000, data=msg, - protected=fork >= SpuriousDragon, + protected=fork.supports_protected_txs(), ) post = {account: Account(storage={0: output if not oog else 0})} diff --git a/tests/london/eip1559_fee_market_change/test_tx_type.py b/tests/london/eip1559_fee_market_change/test_tx_type.py index d760d554f4..c8ade2fb6b 100644 --- a/tests/london/eip1559_fee_market_change/test_tx_type.py +++ b/tests/london/eip1559_fee_market_change/test_tx_type.py @@ -13,7 +13,6 @@ TransactionException, ) from execution_testing import Opcodes as Op -from execution_testing.forks import SpuriousDragon from .spec import ref_spec_1559 @@ -62,7 +61,7 @@ def test_eip1559_tx_validity( sender=sender, gas_limit=100_000, max_priority_fee_per_gas=1, - protected=fork >= SpuriousDragon, + protected=fork.supports_protected_txs(), error=TransactionException.TYPE_2_TX_PRE_FORK if not valid else None, ) diff --git a/tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py b/tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py index dd494164b9..850e6a1e4a 100644 --- a/tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py +++ b/tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py @@ -128,7 +128,7 @@ def setup_selfdestruct_test( sender=alice, to=caller, gas_limit=500_000, - protected=fork >= SpuriousDragon, + protected=fork.supports_protected_txs(), access_list=access_list, ) @@ -875,7 +875,7 @@ def test_selfdestruct_to_self( sender=alice, to=caller, gas_limit=500_000, - protected=fork >= SpuriousDragon, + protected=fork.supports_protected_txs(), ) # Build BAL expectations From a21221e636a0316263715b1db60db7b78c6fe72f Mon Sep 17 00:00:00 2001 From: fselmo Date: Thu, 8 Jan 2026 15:55:52 -0700 Subject: [PATCH 069/154] refactor: address comments from PR #1954 --- .../test_eip150_selfdestruct.py | 102 +++++++----------- 1 file changed, 38 insertions(+), 64 deletions(-) diff --git a/tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py b/tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py index 850e6a1e4a..4ad8e50fe0 100644 --- a/tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py +++ b/tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py @@ -26,6 +26,9 @@ Transaction, compute_create_address, ) +from execution_testing import ( + Macros as Om, +) from execution_testing.forks import Berlin, Cancun, SpuriousDragon from execution_testing.forks.helpers import Fork @@ -91,23 +94,17 @@ def setup_selfdestruct_test( initcode = Initcode(deploy_code=victim_code) initcode_len = len(initcode) - # Pre-deploy initcode at separate address, then use EXTCODECOPY - initcode_address = pre.deploy_contract(initcode) - - factory_address = next(pre._contract_address_iterator) # type: ignore - victim = compute_create_address(address=factory_address, nonce=1) - - factory_code = ( - Op.EXTCODECOPY(initcode_address, 0, 0, initcode_len) - + Op.CREATE(value=originator_balance, offset=0, size=initcode_len) - + Op.POP - + Op.CALL(gas=inner_call_gas, address=victim) + factory_code = Om.MSTORE(initcode, 0) + Op.CALL( + gas=inner_call_gas, + address=Op.CREATE( + value=originator_balance, offset=0, size=initcode_len + ), ) caller = pre.deploy_contract( - address=factory_address, code=factory_code, balance=originator_balance, ) + victim = compute_create_address(address=caller, nonce=1) else: # Pre-existing contract victim = pre.deploy_contract( @@ -847,22 +844,17 @@ def test_selfdestruct_to_self( initcode = Initcode(deploy_code=victim_code) initcode_len = len(initcode) - initcode_address = pre.deploy_contract(initcode) - - factory_address = next(pre._contract_address_iterator) # type: ignore - victim = compute_create_address(address=factory_address, nonce=1) - - factory_code = ( - Op.EXTCODECOPY(initcode_address, 0, 0, initcode_len) - + Op.CREATE(value=originator_balance, offset=0, size=initcode_len) - + Op.POP - + Op.CALL(gas=inner_call_gas, address=victim) + factory_code = Om.MSTORE(initcode, 0) + Op.CALL( + gas=inner_call_gas, + address=Op.CREATE( + value=originator_balance, offset=0, size=initcode_len + ), ) caller = pre.deploy_contract( - address=factory_address, code=factory_code, balance=originator_balance, ) + victim = compute_create_address(address=caller, nonce=1) else: # Pre-existing contract victim = pre.deploy_contract( @@ -887,51 +879,33 @@ def test_selfdestruct_to_self( victim_expectation = BalAccountExpectation.empty() else: # OOG: CREATE succeeded but SELFDESTRUCT failed - if originator_balance > 0: - victim_expectation = BalAccountExpectation( - nonce_changes=[ - BalNonceChange(block_access_index=1, post_nonce=1) - ], - balance_changes=[ - BalBalanceChange( - block_access_index=1, - post_balance=originator_balance, - ) - ], - code_changes=[ - BalCodeChange( - block_access_index=1, - new_code=bytes(victim_code), - ) - ], - ) - else: - victim_expectation = BalAccountExpectation( - nonce_changes=[ - BalNonceChange(block_access_index=1, post_nonce=1) - ], - code_changes=[ - BalCodeChange( - block_access_index=1, - new_code=bytes(victim_code), - ) - ], - ) - - if originator_balance > 0: - caller_expectation = BalAccountExpectation( + victim_expectation = BalAccountExpectation( nonce_changes=[ - BalNonceChange(block_access_index=1, post_nonce=2) + BalNonceChange(block_access_index=1, post_nonce=1) ], - balance_changes=[ - BalBalanceChange(block_access_index=1, post_balance=0) + code_changes=[ + BalCodeChange( + block_access_index=1, + new_code=bytes(victim_code), + ) ], ) - else: - caller_expectation = BalAccountExpectation( - nonce_changes=[ - BalNonceChange(block_access_index=1, post_nonce=2) - ], + if originator_balance > 0: + victim_expectation.balance_changes.append( + BalBalanceChange( + block_access_index=1, + post_balance=originator_balance, + ) + ) + + caller_expectation = BalAccountExpectation( + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=2) + ], + ) + if originator_balance > 0: + caller_expectation.balance_changes.append( + BalBalanceChange(block_access_index=1, post_balance=0) ) else: # Pre-existing: victim in BAL From d7324debca32157240209821f9215790d407fefe Mon Sep 17 00:00:00 2001 From: fselmo Date: Thu, 8 Jan 2026 17:22:58 -0700 Subject: [PATCH 070/154] feat(test): Add selfdestruct to system contracts + to self from initcode --- .../test_cases.md | 2 + .../test_eip150_selfdestruct.py | 331 ++++++++++++++---- 2 files changed, 274 insertions(+), 59 deletions(-) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index 5bd0cf31ac..29d77ea46a 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -10,6 +10,8 @@ | `test_selfdestruct_to_self` (TangerineWhistle) | Ensure BAL captures SELFDESTRUCT where beneficiary is self at gas boundary | Victim executes `SELFDESTRUCT(ADDRESS)` - selfdestructs to itself. Always warm, always alive (no G_NEW_ACCOUNT, no cold access). Gas = G_BASE + G_SELF_DESTRUCT. Parametrized: is_success (exact_gas/exact_gas_minus_1), originator_balance (0/1), same_tx (pre_deploy/same_tx). File: `tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py`. | exact_gas_minus_1: Victim in BAL with unchanged state. exact_gas: Pre-Cancun/same_tx: destroyed, balance=0. >=Cancun pre-existing: preserved with original balance. | ✅ Completed | | `test_selfdestruct_to_precompile` (TangerineWhistle) | Ensure BAL captures SELFDESTRUCT success boundary for precompile beneficiaries | Victim executes `SELFDESTRUCT(precompile)` at exact gas boundary. Precompiles are always warm (no cold access charge). Parametrized: is_success (exact_gas/exact_gas_minus_1), all precompiles via `@pytest.mark.with_all_precompiles`, same_tx (pre_deploy/same_tx), originator_balance (0/1), beneficiary_balance (0/1). File: `tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py`. | exact_gas: Precompile in BAL with `balance_changes`, victim destroyed (pre-Cancun/same_tx) or preserved (>=Cancun). exact_gas_minus_1: OOG, precompile in BAL only if G_NEW_ACCOUNT was part of gas calculation. | ✅ Completed | | `test_selfdestruct_to_precompile_state_access_boundary` (TangerineWhistle) | Ensure BAL correctly tracks precompile access at state access boundary (consensus check) | Victim executes `SELFDESTRUCT(precompile)` at state access boundary (base only, precompiles always warm). Verifies precompile is accessed before G_NEW_ACCOUNT check. Parametrized: is_success (exact_gas/exact_gas_minus_1), all precompiles, same_tx, originator_balance (0/1), beneficiary_balance (0/1). File: `tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py`. | exact_gas: Precompile **IN** BAL (state accessed). exact_gas_minus_1: Precompile **NOT** in BAL (OOG before state access). Operation may succeed at exact_gas if no G_NEW_ACCOUNT needed. | ✅ Completed | +| `test_selfdestruct_to_system_contract` (Cancun) | Ensure BAL captures SELFDESTRUCT success boundary for system contract beneficiaries | Victim executes `SELFDESTRUCT(system_contract)` at exact gas boundary. System contracts are always warm (no cold access charge) and always have code (no G_NEW_ACCOUNT charge). Gas = G_VERY_LOW + G_SELF_DESTRUCT. Parametrized: is_success (exact_gas/exact_gas_minus_1), all system contracts via `@pytest.mark.with_all_system_contracts`, same_tx (pre_deploy/same_tx), originator_balance (0/1). File: `tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py`. | exact_gas: System contract in BAL with `balance_changes` if originator had balance, victim destroyed (same_tx) or balance=0 (pre-existing). exact_gas_minus_1: OOG, system contract not in BAL (no state access). | ✅ Completed | +| `test_initcode_selfdestruct_to_self` (TangerineWhistle) | Ensure BAL captures SELFDESTRUCT during initcode where beneficiary is self | Initcode executes `SELFDESTRUCT(ADDRESS)` during CREATE, before any code is deployed. Contract has nonce=1 (post-EIP-161), making it non-empty. Always warm (executing contract), no G_NEW_ACCOUNT (nonce > 0). Gas boundary testing not possible (CREATE uses all available gas). Parametrized: originator_balance (0/1). File: `tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py`. | Contract created and destroyed in same tx - victim has empty BAL changes. Caller has `nonce_changes` (incremented by CREATE) and `balance_changes` if originator had balance. Victim is NONEXISTENT in post state. | ✅ Completed | | `test_bal_account_access_target` | Ensure BAL captures target addresses of account access opcodes | Alice calls `Oracle` contract which uses account access opcodes (`BALANCE`, `EXTCODESIZE`, `EXTCODECOPY`, `EXTCODEHASH`, `CALL`, `CALLCODE`, `DELEGATECALL`, `STATICCALL`) on `TargetContract`. | BAL MUST include Alice, `Oracle`, and `TargetContract` with empty changes for `TargetContract` and nonce changes for Alice. | ✅ Completed | | `test_bal_call_no_delegation_and_oog_before_target_access` | Ensure BAL handles OOG before target access and success for non-delegated CALL | Parametrized: target warm/cold, target empty/existing, value 0/1, memory expansion, OOG boundary (before_target_access/success). | OOG: target in BAL ONLY if pre-warmed. Success: target always in BAL with balance changes when value > 0. | ✅ Completed | | `test_bal_call_no_delegation_oog_after_target_access` | Ensure BAL includes target but excludes value transfer when OOG after target access | Hardcoded: empty target, value=1 (required for create_cost gap). Parametrized: warm/cold, memory expansion. | Target always in BAL. No balance changes (value transfer fails after G_NEW_ACCOUNT check). | ✅ Completed | diff --git a/tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py b/tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py index 4ad8e50fe0..5996b50b3e 100644 --- a/tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py +++ b/tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py @@ -1,7 +1,7 @@ """ Tests for EIP-150 SELFDESTRUCT operation gas costs. -EIP-150 introduced the 5000 gas cost for SELFDESTRUCT and precise gas +EIP-150 introduced G_SELF_DESTRUCT for SELFDESTRUCT and precise gas boundaries for state access during the operation. """ @@ -50,8 +50,9 @@ def calculate_selfdestruct_gas( """Calculate exact gas needed for SELFDESTRUCT.""" gas_costs = fork.gas_costs() gas = ( + # PUSH + SELFDESTRUCT gas_costs.G_VERY_LOW + gas_costs.G_SELF_DESTRUCT - ) # PUSH + SELFDESTRUCT + ) # Cold access cost (>=Berlin only) if fork >= Berlin and not beneficiary_warm: @@ -101,8 +102,7 @@ def setup_selfdestruct_test( ), ) caller = pre.deploy_contract( - code=factory_code, - balance=originator_balance, + code=factory_code, balance=originator_balance ) victim = compute_create_address(address=caller, nonce=1) else: @@ -110,8 +110,9 @@ def setup_selfdestruct_test( victim = pre.deploy_contract( code=victim_code, balance=originator_balance ) - caller_code = Op.CALL(gas=inner_call_gas, address=victim) - caller = pre.deploy_contract(code=caller_code) + caller = pre.deploy_contract( + code=Op.CALL(gas=inner_call_gas, address=victim) + ) # Warm beneficiary via access list (>=Berlin only, # doesn't add to BAL >= Amsterdam) @@ -177,33 +178,21 @@ def build_bal_expectations( else: # OOG: CREATE succeeded but SELFDESTRUCT failed # Only include balance_changes if originator_balance > 0 + victim_expectation = BalAccountExpectation( + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], + code_changes=[ + BalCodeChange( + block_access_index=1, new_code=bytes(victim_code) + ) + ], + ) if originator_balance > 0: - victim_expectation = BalAccountExpectation( - nonce_changes=[ - BalNonceChange(block_access_index=1, post_nonce=1) - ], - balance_changes=[ - BalBalanceChange( - block_access_index=1, - post_balance=originator_balance, - ) - ], - code_changes=[ - BalCodeChange( - block_access_index=1, new_code=bytes(victim_code) - ) - ], - ) - else: - victim_expectation = BalAccountExpectation( - nonce_changes=[ - BalNonceChange(block_access_index=1, post_nonce=1) - ], - code_changes=[ - BalCodeChange( - block_access_index=1, new_code=bytes(victim_code) - ) - ], + victim_expectation.balance_changes.append( + BalBalanceChange( + block_access_index=1, post_balance=originator_balance + ) ) else: if success and originator_balance > 0: @@ -217,36 +206,27 @@ def build_bal_expectations( # Caller expectation if same_tx: - # Only include balance_changes if originator_balance > 0 + caller_expectation = BalAccountExpectation( + nonce_changes=[BalNonceChange(block_access_index=1, post_nonce=2)], + ) if originator_balance > 0: - caller_expectation = BalAccountExpectation( - nonce_changes=[ - BalNonceChange(block_access_index=1, post_nonce=2) - ], - balance_changes=[ - BalBalanceChange(block_access_index=1, post_balance=0) - ], - ) - else: - caller_expectation = BalAccountExpectation( - nonce_changes=[ - BalNonceChange(block_access_index=1, post_nonce=2) - ], + caller_expectation.balance_changes.append( + BalBalanceChange(block_access_index=1, post_balance=0) ) else: caller_expectation = BalAccountExpectation.empty() - account_expectations: Dict[Address, BalAccountExpectation | None] = { - alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(block_access_index=1, post_nonce=1)], - ), - caller: caller_expectation, - victim: victim_expectation, - beneficiary: beneficiary_expectation, - } - return BlockAccessListExpectation( - account_expectations=account_expectations + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], + ), + caller: caller_expectation, + victim: victim_expectation, + beneficiary: beneficiary_expectation, + } ) @@ -314,6 +294,9 @@ def build_post_state( return post +# --- tests --- # + + @pytest.mark.parametrize( "is_success", [True, False], ids=["exact_gas", "exact_gas_minus_1"] ) @@ -787,7 +770,152 @@ def test_selfdestruct_to_precompile_state_access_boundary( ) -# --- SELFDESTRUCT to self tests --- # +@pytest.mark.parametrize( + "is_success", [True, False], ids=["exact_gas", "exact_gas_minus_1"] +) +@pytest.mark.with_all_system_contracts +@pytest.mark.parametrize( + "same_tx", [False, True], ids=["pre_deploy", "same_tx"] +) +@pytest.mark.parametrize( + "originator_balance", + [0, 1], + ids=["no_balance", "has_balance"], +) +@pytest.mark.valid_from("Cancun") +def test_selfdestruct_to_system_contract( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + fork: Fork, + is_success: bool, + system_contract: Address, + same_tx: bool, + originator_balance: int, +) -> None: + """ + Test SELFDESTRUCT success boundary for system contract beneficiaries. + + System contracts are always warm (no cold access charge) and always have + code (so beneficiary is never dead, no G_NEW_ACCOUNT charge). + + - exact_gas: succeeds, balance transferred + - exact_gas_minus_1: OOG, operation fails + """ + # Calculate exact gas for success + # System contracts are always warm and never dead (have code) + inner_call_gas = calculate_selfdestruct_gas( + fork, + beneficiary_warm=True, + beneficiary_dead=False, + originator_balance=originator_balance, + ) + if not is_success: + inner_call_gas -= 1 + + alice, caller, victim, tx = setup_selfdestruct_test( + pre, + fork, + system_contract, + originator_balance, + same_tx, + beneficiary_warm=True, + inner_call_gas=inner_call_gas, + ) + + # Build minimal BAL expectations for test-specific accounts only + expected_bal: BlockAccessListExpectation | None = None + if fork.header_bal_hash_required(): + account_expectations: Dict[Address, BalAccountExpectation | None] = { + alice: BalAccountExpectation( + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], + ), + } + + # Victim expectation + if same_tx: + if is_success: + # Created and destroyed in same tx - no net changes + victim_expectation = BalAccountExpectation.empty() + else: + # OOG: contract created but selfdestruct failed + victim_expectation = BalAccountExpectation( + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], + code_changes=[ + BalCodeChange( + block_access_index=1, + new_code=bytes(Op.SELFDESTRUCT(system_contract)), + ) + ], + ) + if originator_balance > 0: + victim_expectation.balance_changes.append( + BalBalanceChange( + block_access_index=1, + post_balance=originator_balance, + ) + ) + # Caller nonce incremented for CREATE + caller_expectation = BalAccountExpectation( + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=2) + ], + ) + if originator_balance > 0 and is_success: + caller_expectation.balance_changes.append( + BalBalanceChange(block_access_index=1, post_balance=0) + ) + account_expectations[caller] = caller_expectation + else: + # Pre-existing victim + if is_success and originator_balance > 0: + victim_expectation = BalAccountExpectation( + balance_changes=[ + BalBalanceChange(block_access_index=1, post_balance=0) + ], + ) + else: + victim_expectation = BalAccountExpectation.empty() + account_expectations[caller] = BalAccountExpectation.empty() + + account_expectations[victim] = victim_expectation + + # System contract receives balance if success and originator + # had balance + if is_success and originator_balance > 0: + account_expectations[system_contract] = BalAccountExpectation( + balance_changes=[ + BalBalanceChange( + block_access_index=1, post_balance=originator_balance + ) + ], + ) + + expected_bal = BlockAccessListExpectation( + account_expectations=account_expectations + ) + + post = build_post_state( + fork, + alice, + caller, + victim, + system_contract, + originator_balance, + beneficiary_initial_balance=0, + same_tx=same_tx, + success=is_success, + beneficiary_has_code=True, + ) + + blockchain_test( + pre=pre, + blocks=[Block(txs=[tx], expected_block_access_list=expected_bal)], + post=post, + ) @pytest.mark.parametrize( @@ -817,7 +945,7 @@ def test_selfdestruct_to_self( Key characteristics: - Beneficiary is always warm (it's the executing contract) - - Beneficiary is always alive (has code) + - Beneficiary is always alive (EIP-161 nonce=1) - No G_NEW_ACCOUNT charge - No cold access charge (>=Berlin) - Balance is "transferred" to self (no net change until destruction) @@ -834,7 +962,7 @@ def test_selfdestruct_to_self( victim_code = Op.SELFDESTRUCT(Op.ADDRESS) # Gas: ADDRESS + SELFDESTRUCT (no cold access, no G_NEW_ACCOUNT) - # Note: ADDRESS opcode costs G_BASE (2), not G_VERY_LOW (3) like PUSH + # Note: ADDRESS opcode costs G_BASE, not G_VERY_LOW like PUSH gas_costs = fork.gas_costs() base_gas = gas_costs.G_BASE + gas_costs.G_SELF_DESTRUCT inner_call_gas = base_gas if is_success else base_gas - 1 @@ -976,3 +1104,88 @@ def test_selfdestruct_to_self( blocks=[Block(txs=[tx], expected_block_access_list=expected_bal)], post=post, ) + + +@pytest.mark.parametrize( + "originator_balance", + [0, 1], + ids=["no_balance", "has_balance"], +) +@pytest.mark.valid_from("TangerineWhistle") +def test_initcode_selfdestruct_to_self( + pre: Alloc, + blockchain_test: BlockchainTestFiller, + fork: Fork, + originator_balance: int, +) -> None: + """ + Test SELFDESTRUCT during initcode execution where beneficiary is self. + + Unlike test_selfdestruct_to_self, this tests the case where the initcode + itself executes SELFDESTRUCT(ADDRESS) during contract creation, before + any code is deployed. + + Key characteristics: + - During initcode, the contract has no code yet + - Contract has nonce=1 (post-EIP-161) making it non-empty + - Beneficiary is always warm (it's the executing contract) + - No G_NEW_ACCOUNT charge (contract has nonce > 0) + - No cold access charge (>=Berlin) + + Note: Gas boundary testing not possible for initcode since CREATE + doesn't accept a gas parameter - it uses all available gas. + """ + alice = pre.fund_eoa() + initcode = Op.SELFDESTRUCT(Op.ADDRESS) + initcode_len = len(initcode) + + factory_code = Om.MSTORE(initcode, 0) + Op.CREATE( + value=originator_balance, offset=0, size=initcode_len + ) + caller = pre.deploy_contract(code=factory_code, balance=originator_balance) + victim = compute_create_address(address=caller, nonce=1) + + tx = Transaction( + sender=alice, + to=caller, + gas_limit=500_000, + protected=fork.supports_protected_txs(), + ) + + # Build BAL expectations + expected_bal: BlockAccessListExpectation | None = None + if fork.header_bal_hash_required(): + # Contract created and immediately destroyed - no net changes + # for victim + caller_expectation = BalAccountExpectation( + nonce_changes=[BalNonceChange(block_access_index=1, post_nonce=2)], + ) + if originator_balance > 0: + caller_expectation.balance_changes.append( + BalBalanceChange(block_access_index=1, post_balance=0) + ) + + expected_bal = BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], + ), + caller: caller_expectation, + victim: BalAccountExpectation.empty(), + } + ) + + # Contract was created and destroyed in same tx + post: dict = { + alice: Account(nonce=1), + caller: Account(nonce=2), + victim: Account.NONEXISTENT, + } + + blockchain_test( + pre=pre, + blocks=[Block(txs=[tx], expected_block_access_list=expected_bal)], + post=post, + ) From a0ae1dbdb8839eba6308d5b6d50f1368ffcd5447 Mon Sep 17 00:00:00 2001 From: fselmo Date: Thu, 8 Jan 2026 18:44:47 -0700 Subject: [PATCH 071/154] feat(test-commands): Allow pytest valid fork markers as params; add unit tests --- .../pytest_commands/plugins/forks/forks.py | 51 +++++ .../forks/tests/test_bad_validity_markers.py | 70 +++++++ .../plugins/forks/tests/test_markers.py | 179 ++++++++++++++++++ .../test_eip150_selfdestruct.py | 16 +- 4 files changed, 311 insertions(+), 5 deletions(-) diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/forks.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/forks.py index 1c929709ec..6544826705 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/forks.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/forks.py @@ -1208,3 +1208,54 @@ def parametrize_fork( metafunc.parametrize( param_names, param_values, scope="function", indirect=indirect ) + + +def pytest_collection_modifyitems( + config: pytest.Config, items: List[pytest.Item] +) -> None: + """ + Filter tests based on param-level validity markers. + + The pytest_generate_tests hook only considers function-level validity markers. + This hook runs after parametrization and can access all markers including + param-level ones, allowing us to properly filter tests based on param-level + valid_from/valid_until markers. + """ + items_to_remove = [] + + for i, item in enumerate(items): + # Get fork from params if available + params = None + if hasattr(item, "callspec"): + params = item.callspec.params + elif hasattr(item, "params"): + params = item.params + + if not params or "fork" not in params or params["fork"] is None: + continue + + fork: Fork = params["fork"] + + # Get all markers including param-level ones + markers = item.iter_markers() + + # Calculate valid fork set from all markers + # If this raises (e.g., duplicate markers from combining function-level + # and param-level), exit immediately with error + try: + valid_fork_set = ValidityMarker.get_test_fork_set_from_markers( + markers + ) + except Exception as e: + pytest.exit( + f"Error in test '{item.name}': {e}", + returncode=pytest.ExitCode.USAGE_ERROR, + ) + + # If the fork is not in the valid set, mark for removal + if fork not in valid_fork_set: + items_to_remove.append(i) + + # Remove items in reverse order to maintain indices + for i in reversed(items_to_remove): + del items[i] diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_bad_validity_markers.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_bad_validity_markers.py index 6749591d54..4e8aa2d4be 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_bad_validity_markers.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_bad_validity_markers.py @@ -236,3 +236,73 @@ def test_invalid_validity_markers( errors=1, ) assert error_string in "\n".join(result.stdout.lines) + + +# --- Tests for param-level marker errors --- # + + +param_level_marker_error_test_cases = ( + ( + "param_level_valid_from_with_function_level_valid_from", + ( + """ + import pytest + @pytest.mark.parametrize( + "value", + [ + pytest.param(True, marks=pytest.mark.valid_from("Paris")), + ], + ) + @pytest.mark.valid_from("Berlin") + def test_case(state_test, value): + assert 1 + """, + "Too many 'valid_from' markers applied to test", + ), + ), + ( + "param_level_valid_until_with_function_level_valid_until", + ( + """ + import pytest + @pytest.mark.parametrize( + "value", + [ + pytest.param(True, marks=pytest.mark.valid_until("Cancun")), + ], + ) + @pytest.mark.valid_until("Prague") + def test_case(state_test, value): + assert 1 + """, + "Too many 'valid_until' markers applied to test", + ), + ), +) + + +@pytest.mark.parametrize( + "test_function, error_string", + [test_case for _, test_case in param_level_marker_error_test_cases], + ids=[test_id for test_id, _ in param_level_marker_error_test_cases], +) +def test_param_level_marker_errors( + pytester: pytest.Pytester, error_string: str, test_function: str +) -> None: + """ + Test that combining function-level and param-level validity markers + of the same type produces an error. + + Unlike function-level errors (caught during test generation), param-level + errors are caught during collection and cause pytest to exit immediately. + """ + pytester.makepyfile(test_function) + pytester.copy_example( + name="src/execution_testing/cli/pytest_commands/pytest_ini_files/pytest-fill.ini" + ) + result = pytester.runpytest("-c", "pytest-fill.ini") + + # pytest.exit() causes the run to terminate with no test outcomes + assert result.ret != 0, "Expected non-zero exit code" + stdout = "\n".join(result.stdout.lines) + assert error_string in stdout, f"Expected '{error_string}' in output" diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_markers.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_markers.py index fbbc1b3123..9379d90b40 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_markers.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_markers.py @@ -222,3 +222,182 @@ def test_fork_markers( *pytest_args, ) result.assert_outcomes(**outcomes) + + +# --- Tests for param-level validity markers --- # + + +def generate_param_level_marker_test() -> str: + """Generate a test function with param-level fork validity markers.""" + return """ +import pytest + +@pytest.mark.parametrize( + "value", + [ + pytest.param( + True, + id="from_tangerine", + marks=pytest.mark.valid_from("TangerineWhistle"), + ), + pytest.param( + False, + id="from_paris", + marks=pytest.mark.valid_from("Paris"), + ), + ], +) +@pytest.mark.state_test_only +def test_param_level_valid_from(state_test, value): + pass +""" + + +def generate_param_level_valid_until_test() -> str: + """Generate a test function with param-level valid_until markers.""" + return """ +import pytest + +@pytest.mark.parametrize( + "value", + [ + pytest.param( + True, + id="until_cancun", + marks=pytest.mark.valid_until("Cancun"), + ), + pytest.param( + False, + id="until_paris", + marks=pytest.mark.valid_until("Paris"), + ), + ], +) +@pytest.mark.state_test_only +def test_param_level_valid_until(state_test, value): + pass +""" + + +def generate_param_level_mixed_test() -> str: + """Generate a test with both function-level and param-level markers.""" + return """ +import pytest + +@pytest.mark.parametrize( + "value", + [ + pytest.param( + True, + id="all_forks", + marks=pytest.mark.valid_from("TangerineWhistle"), + ), + pytest.param( + False, + id="paris_only", + marks=pytest.mark.valid_from("Paris"), + ), + ], +) +@pytest.mark.valid_until("Cancun") +@pytest.mark.state_test_only +def test_mixed_function_and_param_markers(state_test, value): + pass +""" + + +@pytest.mark.parametrize( + "test_function,pytest_args,outcomes", + [ + pytest.param( + generate_param_level_marker_test(), + ["--from=Paris", "--until=Cancun"], + # from_tangerine: Paris, Shanghai, Cancun = 3 forks + # from_paris: Paris, Shanghai, Cancun = 3 forks + # Total: 6 tests + {"passed": 6, "failed": 0, "skipped": 0, "errors": 0}, + id="param_level_valid_from_paris_to_cancun", + ), + pytest.param( + generate_param_level_marker_test(), + ["--from=Berlin", "--until=Shanghai"], + # from_tangerine: Berlin, London, Paris, Shanghai = 4 forks + # from_paris: Paris, Shanghai = 2 forks + # Total: 6 tests + {"passed": 6, "failed": 0, "skipped": 0, "errors": 0}, + id="param_level_valid_from_berlin_to_shanghai", + ), + pytest.param( + generate_param_level_marker_test(), + ["--from=Berlin", "--until=London"], + # from_tangerine: Berlin, London = 2 forks + # from_paris: none (Paris > London) + # Total: 2 tests + {"passed": 2, "failed": 0, "skipped": 0, "errors": 0}, + id="param_level_valid_from_berlin_to_london", + ), + pytest.param( + generate_param_level_valid_until_test(), + ["--from=Paris", "--until=Prague"], + # until_cancun: Paris, Shanghai, Cancun = 3 forks + # until_paris: Paris = 1 fork + # Total: 4 tests + {"passed": 4, "failed": 0, "skipped": 0, "errors": 0}, + id="param_level_valid_until_paris_to_prague", + ), + pytest.param( + generate_param_level_valid_until_test(), + ["--from=Shanghai", "--until=Prague"], + # until_cancun: Shanghai, Cancun = 2 forks + # until_paris: none (Shanghai > Paris) + # Total: 2 tests + {"passed": 2, "failed": 0, "skipped": 0, "errors": 0}, + id="param_level_valid_until_shanghai_to_prague", + ), + pytest.param( + generate_param_level_mixed_test(), + ["--from=Berlin", "--until=Prague"], + # Function marker: valid_until("Cancun") limits to <= Cancun + # all_forks (TangerineWhistle): Berlin, London, Paris, Shanghai, Cancun = 5 + # paris_only: Paris, Shanghai, Cancun = 3 + # Total: 8 tests + {"passed": 8, "failed": 0, "skipped": 0, "errors": 0}, + id="mixed_markers_berlin_to_prague", + ), + pytest.param( + generate_param_level_mixed_test(), + ["--from=Paris", "--until=Shanghai"], + # Function marker: valid_until("Cancun") limits to <= Cancun + # Command line: --until=Shanghai further limits to <= Shanghai + # all_forks: Paris, Shanghai = 2 forks + # paris_only: Paris, Shanghai = 2 forks + # Total: 4 tests + {"passed": 4, "failed": 0, "skipped": 0, "errors": 0}, + id="mixed_markers_paris_to_shanghai", + ), + ], +) +def test_param_level_validity_markers( + pytester: pytest.Pytester, + test_function: str, + outcomes: dict, + pytest_args: List[str], +) -> None: + """ + Test param-level validity markers (valid_from, valid_until on pytest.param). + + The pytest_collection_modifyitems hook filters tests based on param-level + markers after parametrization, allowing different parameter values to have + different fork validity ranges. + """ + pytester.makepyfile(test_function) + pytester.copy_example( + name="src/execution_testing/cli/pytest_commands/pytest_ini_files/pytest-fill.ini" + ) + result = pytester.runpytest( + "-c", + "pytest-fill.ini", + "-v", + *pytest_args, + ) + result.assert_outcomes(**outcomes) diff --git a/tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py b/tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py index 5996b50b3e..d2ba634cf4 100644 --- a/tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py +++ b/tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py @@ -29,7 +29,11 @@ from execution_testing import ( Macros as Om, ) -from execution_testing.forks import Berlin, Cancun, SpuriousDragon +from execution_testing.forks import ( + Berlin, + Cancun, + SpuriousDragon, +) from execution_testing.forks.helpers import Fork from .spec import ref_spec_150 @@ -306,7 +310,9 @@ def build_post_state( @pytest.mark.parametrize( "warm", [ - pytest.param(False, id="cold"), + pytest.param( + False, id="cold", marks=pytest.mark.valid_from("TangerineWhistle") + ), pytest.param(True, id="warm", marks=pytest.mark.valid_from("Berlin")), ], ) @@ -323,7 +329,6 @@ def build_post_state( [0, 1], ids=["dead_beneficiary", "alive_beneficiary"], ) -@pytest.mark.valid_from("TangerineWhistle") def test_selfdestruct_to_account( pre: Alloc, blockchain_test: BlockchainTestFiller, @@ -429,7 +434,9 @@ def test_selfdestruct_to_account( @pytest.mark.parametrize( "warm", [ - pytest.param(False, id="cold"), + pytest.param( + False, id="cold", marks=pytest.mark.valid_from("TangerineWhistle") + ), pytest.param(True, id="warm", marks=pytest.mark.valid_from("Berlin")), ], ) @@ -446,7 +453,6 @@ def test_selfdestruct_to_account( [0, 1], ids=["dead_beneficiary", "alive_beneficiary"], ) -@pytest.mark.valid_from("TangerineWhistle") def test_selfdestruct_state_access_boundary( pre: Alloc, blockchain_test: BlockchainTestFiller, From fe257174413bcef261e37d207cf4bb5060a3a494 Mon Sep 17 00:00:00 2001 From: fselmo Date: Mon, 12 Jan 2026 08:48:06 -0700 Subject: [PATCH 072/154] refactor(spec): Remove `check_gas` where unnecessary for instructions --- .../forks/amsterdam/vm/instructions/environment.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/src/ethereum/forks/amsterdam/vm/instructions/environment.py b/src/ethereum/forks/amsterdam/vm/instructions/environment.py index 79fd56cc3c..5ee84a3d49 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/environment.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/environment.py @@ -37,7 +37,6 @@ calculate_blob_gas_price, calculate_gas_extend_memory, charge_gas, - check_gas, ) from ..stack import pop, push @@ -81,9 +80,9 @@ def balance(evm: Evm) -> None: # GAS is_cold_access = address not in evm.accessed_addresses gas_cost = GAS_COLD_ACCOUNT_ACCESS if is_cold_access else GAS_WARM_ACCESS - check_gas(evm, gas_cost) if is_cold_access: evm.accessed_addresses.add(address) + charge_gas(evm, gas_cost) # OPERATION @@ -351,9 +350,9 @@ def extcodesize(evm: Evm) -> None: access_gas_cost = ( GAS_COLD_ACCOUNT_ACCESS if is_cold_access else GAS_WARM_ACCESS ) - check_gas(evm, access_gas_cost) if is_cold_access: evm.accessed_addresses.add(address) + charge_gas(evm, access_gas_cost) # OPERATION @@ -397,9 +396,9 @@ def extcodecopy(evm: Evm) -> None: ) total_gas_cost = access_gas_cost + copy_gas_cost + extend_memory.cost - check_gas(evm, total_gas_cost) if is_cold_access: evm.accessed_addresses.add(address) + charge_gas(evm, total_gas_cost) # OPERATION @@ -491,9 +490,9 @@ def extcodehash(evm: Evm) -> None: access_gas_cost = ( GAS_COLD_ACCOUNT_ACCESS if is_cold_access else GAS_WARM_ACCESS ) - check_gas(evm, access_gas_cost) if is_cold_access: evm.accessed_addresses.add(address) + charge_gas(evm, access_gas_cost) # OPERATION From aa9f2538a7d08c50d88d1433ed7d1074e8665d23 Mon Sep 17 00:00:00 2001 From: fselmo Date: Mon, 12 Jan 2026 16:23:57 -0700 Subject: [PATCH 073/154] fix(test-fill): organize alloc groups by pytest param (enginex) --- .../test_eip150_selfdestruct.py | 38 ++++++++++++++++--- 1 file changed, 32 insertions(+), 6 deletions(-) diff --git a/tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py b/tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py index d2ba634cf4..df3674129b 100644 --- a/tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py +++ b/tests/tangerine_whistle/eip150_operation_gas_costs/test_eip150_selfdestruct.py @@ -565,7 +565,6 @@ def test_selfdestruct_state_access_boundary( "is_success", [True, False], ids=["exact_gas", "exact_gas_minus_1"] ) @pytest.mark.with_all_precompiles -@pytest.mark.pre_alloc_group("precompile_funding") @pytest.mark.parametrize( "same_tx", [False, True], ids=["pre_deploy", "same_tx"] ) @@ -576,8 +575,22 @@ def test_selfdestruct_state_access_boundary( ) @pytest.mark.parametrize( "beneficiary_initial_balance", - [0, 1], - ids=["dead_beneficiary", "alive_beneficiary"], + [ + pytest.param( + 0, + id="dead_beneficiary", + marks=pytest.mark.pre_alloc_group( + "eip150_selfdestruct_precompile_dead" + ), + ), + pytest.param( + 1, + id="alive_beneficiary", + marks=pytest.mark.pre_alloc_group( + "eip150_selfdestruct_precompile_alive" + ), + ), + ], ) @pytest.mark.valid_from("TangerineWhistle") def test_selfdestruct_to_precompile( @@ -673,7 +686,6 @@ def test_selfdestruct_to_precompile( "is_success", [True, False], ids=["exact_gas", "exact_gas_minus_1"] ) @pytest.mark.with_all_precompiles -@pytest.mark.pre_alloc_group("precompile_funding") @pytest.mark.parametrize( "same_tx", [False, True], ids=["pre_deploy", "same_tx"] ) @@ -684,8 +696,22 @@ def test_selfdestruct_to_precompile( ) @pytest.mark.parametrize( "beneficiary_initial_balance", - [0, 1], - ids=["dead_beneficiary", "alive_beneficiary"], + [ + pytest.param( + 0, + id="dead_beneficiary", + marks=pytest.mark.pre_alloc_group( + "eip150_selfdestruct_precompile_boundary_dead" + ), + ), + pytest.param( + 1, + id="alive_beneficiary", + marks=pytest.mark.pre_alloc_group( + "eip150_selfdestruct_precompile_boundary_alive" + ), + ), + ], ) @pytest.mark.valid_from("TangerineWhistle") def test_selfdestruct_to_precompile_state_access_boundary( From 4f6006c551c4e75606961391534afb060362b49a Mon Sep 17 00:00:00 2001 From: Guruprasad Kamath <48196632+gurukamath@users.noreply.github.com> Date: Tue, 13 Jan 2026 18:57:59 +0100 Subject: [PATCH 074/154] fix(tests): remove bal from block body (#2017) * tests(eip-7928): remove bal from block body * fix(tests): clean up json_infra and genesis bal in block * fix(tests): fix add_genesis_block empty bal hash --------- Co-authored-by: fselmo --- .../testing/src/execution_testing/fixtures/blockchain.py | 6 ------ .../testing/src/execution_testing/specs/blockchain.py | 8 -------- src/ethereum/genesis.py | 5 +---- tests/json_infra/helpers/load_blockchain_tests.py | 3 --- 4 files changed, 1 insertion(+), 21 deletions(-) diff --git a/packages/testing/src/execution_testing/fixtures/blockchain.py b/packages/testing/src/execution_testing/fixtures/blockchain.py index 14116d2803..a55c281039 100644 --- a/packages/testing/src/execution_testing/fixtures/blockchain.py +++ b/packages/testing/src/execution_testing/fixtures/blockchain.py @@ -554,9 +554,6 @@ def strip_block_number_computed_field(cls, data: Any) -> Any: ) withdrawals: List[FixtureWithdrawal] | None = None execution_witness: WitnessChunk | None = None - block_access_list: BlockAccessList | None = Field( - None, description="EIP-7928 Block Access List" - ) fork: Fork | None = Field(None, exclude=True) @computed_field(alias="blocknumber") # type: ignore[prop-decorator] @@ -578,9 +575,6 @@ def with_rlp(self, txs: List[Transaction]) -> "FixtureBlock": if self.withdrawals is not None: block.append([w.to_serializable_list() for w in self.withdrawals]) - if self.block_access_list is not None: - block.append(self.block_access_list.to_list()) - return FixtureBlock( **self.model_dump(), rlp=eth_rlp.encode(block), diff --git a/packages/testing/src/execution_testing/specs/blockchain.py b/packages/testing/src/execution_testing/specs/blockchain.py index aef020049b..06d6bebfc9 100644 --- a/packages/testing/src/execution_testing/specs/blockchain.py +++ b/packages/testing/src/execution_testing/specs/blockchain.py @@ -380,9 +380,6 @@ def get_fixture_block(self) -> FixtureBlock | InvalidFixtureBlock: if self.withdrawals is not None else None ), - block_access_list=self.block_access_list - if self.block_access_list - else None, fork=self.fork, ).with_rlp(txs=self.txs) @@ -563,16 +560,11 @@ def make_genesis( state_root = pre_alloc.state_root() genesis = FixtureHeader.genesis(self.fork, env, state_root) - genesis_bal = None - if self.fork.header_bal_hash_required(block_number=0, timestamp=0): - genesis_bal = BlockAccessList() - return ( pre_alloc, FixtureBlockBase( header=genesis, withdrawals=None if env.withdrawals is None else [], - block_access_list=genesis_bal, ).with_rlp(txs=[]), ) diff --git a/src/ethereum/genesis.py b/src/ethereum/genesis.py index 7ba79d6c26..444302ffeb 100644 --- a/src/ethereum/genesis.py +++ b/src/ethereum/genesis.py @@ -260,7 +260,7 @@ def add_genesis_block( fields["requests_hash"] = Hash32(b"\0" * 32) if has_field(hardfork.Header, "block_access_list_hash"): - fields["block_access_list_hash"] = Hash32(b"\0" * 32) + fields["block_access_list_hash"] = keccak256(rlp.encode([])) genesis_header = hardfork.Header(**fields) @@ -276,9 +276,6 @@ def add_genesis_block( if has_field(hardfork.Block, "requests"): block_fields["requests"] = () - if has_field(hardfork.Block, "block_access_list"): - block_fields["block_access_list"] = rlp.encode([]) - genesis_block = hardfork.Block(**block_fields) chain.blocks.append(genesis_block) diff --git a/tests/json_infra/helpers/load_blockchain_tests.py b/tests/json_infra/helpers/load_blockchain_tests.py index 73cf2c662d..ec82b42f47 100644 --- a/tests/json_infra/helpers/load_blockchain_tests.py +++ b/tests/json_infra/helpers/load_blockchain_tests.py @@ -138,9 +138,6 @@ def runtest(self) -> None: if hasattr(genesis_header, "requests_root"): parameters.append(()) - if hasattr(genesis_header, "block_access_list_hash"): - parameters.append([]) - genesis_block = load.fork.Block(*parameters) genesis_header_hash = hex_to_bytes( From a378636064f636ed59cbb7f10631adc4109326d4 Mon Sep 17 00:00:00 2001 From: felipe Date: Wed, 14 Jan 2026 22:03:42 -0700 Subject: [PATCH 075/154] feat(spec-tests): Update json_infra tests version (#1939) --- tests/json_infra/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/json_infra/__init__.py b/tests/json_infra/__init__.py index 6fa7c5dbbe..605a843a16 100644 --- a/tests/json_infra/__init__.py +++ b/tests/json_infra/__init__.py @@ -31,7 +31,7 @@ class _FixtureSource(TypedDict): "fixture_path": "tests/json_infra/fixtures/latest_fork_tests", }, "amsterdam_tests": { - "url": "https://github.com/ethereum/execution-spec-tests/releases/download/bal%40v1.8.0/fixtures_bal.tar.gz", + "url": "https://github.com/ethereum/execution-spec-tests/releases/download/bal%40v3.0.1/fixtures_bal.tar.gz", "fixture_path": "tests/json_infra/fixtures/amsterdam_tests", }, } From 031306387db11c2dcdb304c62b74d5f38bfaf820 Mon Sep 17 00:00:00 2001 From: fselmo Date: Thu, 15 Jan 2026 11:40:32 -0700 Subject: [PATCH 076/154] chore: clean up based on comments on PR #1719 - Add CHANGELOG entry --- docs/CHANGELOG.md | 2 + .../consume/simulators/helpers/ruleset.py | 3 + .../test_types/block_access_list/modifiers.py | 85 ++++++++----- .../amsterdam/block_access_lists/__init__.py | 2 - .../amsterdam/block_access_lists/rlp_utils.py | 115 +----------------- src/ethereum/forks/amsterdam/blocks.py | 8 +- src/ethereum/forks/amsterdam/state_tracker.py | 10 +- .../test_block_access_lists_invalid.py | 6 +- .../create/test_create_deposit_oog.py | 1 - 9 files changed, 72 insertions(+), 160 deletions(-) diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md index 4dae0d4ff7..9e7d567cd0 100644 --- a/docs/CHANGELOG.md +++ b/docs/CHANGELOG.md @@ -29,6 +29,8 @@ Test fixtures for use by clients are available for each release on the [Github r ### 📋 Misc +- ✨ Implement EIP-7928 Block-Level Access Lists ([#1719](https://github.com/ethereum/execution-specs/pull/1719)). + ### 🧪 Test Cases - ✨ Add missing fuzzy-compute benchmark configurations for `KECCAK256`, `CODECOPY`, `CALLDATACOPY`, `RETURNDATACOPY`, `MLOAD`, `MSTORE`, `MSTORE8`, `MCOPY`, `LOG*`, `CALLDATASIZE`, `CALLDATALOAD`, and `RETURNDATASIZE` opcodes ([#1956](https://github.com/ethereum/execution-specs/pull/1956)). diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/helpers/ruleset.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/helpers/ruleset.py index bcd8cd891d..87d3783ce9 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/helpers/ruleset.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/helpers/ruleset.py @@ -296,6 +296,7 @@ def get_blob_schedule_entries(fork: Fork) -> Dict[str, int]: "HIVE_CANCUN_TIMESTAMP": 0, "HIVE_PRAGUE_TIMESTAMP": 0, "HIVE_OSAKA_TIMESTAMP": 0, + **get_blob_schedule_entries(Osaka), }, PragueToOsakaAtTime15k: { "HIVE_FORK_HOMESTEAD": 0, @@ -313,6 +314,7 @@ def get_blob_schedule_entries(fork: Fork) -> Dict[str, int]: "HIVE_CANCUN_TIMESTAMP": 0, "HIVE_PRAGUE_TIMESTAMP": 0, "HIVE_OSAKA_TIMESTAMP": 15000, + **get_blob_schedule_entries(Osaka), }, BPO1: { "HIVE_FORK_HOMESTEAD": 0, @@ -502,5 +504,6 @@ def get_blob_schedule_entries(fork: Fork) -> Dict[str, int]: # "HIVE_BPO3_TIMESTAMP": 0, # "HIVE_BPO4_TIMESTAMP": 0, "HIVE_AMSTERDAM_TIMESTAMP": 0, + **get_blob_schedule_entries(Amsterdam), }, } diff --git a/packages/testing/src/execution_testing/test_types/block_access_list/modifiers.py b/packages/testing/src/execution_testing/test_types/block_access_list/modifiers.py index 6638be94e1..ddad9605d4 100644 --- a/packages/testing/src/execution_testing/test_types/block_access_list/modifiers.py +++ b/packages/testing/src/execution_testing/test_types/block_access_list/modifiers.py @@ -236,15 +236,15 @@ def modify_code( "code_changes", BalCodeChange, code, - "post_code", + "new_code", ) -def swap_tx_indices( - tx1: int, tx2: int +def swap_bal_indices( + idx1: int, idx2: int ) -> Callable[[BlockAccessList], BlockAccessList]: - """Swap transaction indices throughout the BAL, modifying tx ordering.""" - nonce_indices = {tx1: False, tx2: False} + """Swap block access indices throughout the BAL, modifying ordering.""" + nonce_indices = {idx1: False, idx2: False} balance_indices = nonce_indices.copy() storage_indices = nonce_indices.copy() code_indices = nonce_indices.copy() @@ -258,44 +258,44 @@ def transform(bal: BlockAccessList) -> BlockAccessList: # Swap in nonce changes if new_account.nonce_changes: for nonce_change in new_account.nonce_changes: - if nonce_change.block_access_index == tx1: - nonce_indices[tx1] = True + if nonce_change.block_access_index == idx1: + nonce_indices[idx1] = True nonce_change.block_access_index = ZeroPaddedHexNumber( - tx2 + idx2 ) - elif nonce_change.block_access_index == tx2: - nonce_indices[tx2] = True + elif nonce_change.block_access_index == idx2: + nonce_indices[idx2] = True nonce_change.block_access_index = ZeroPaddedHexNumber( - tx1 + idx1 ) # Swap in balance changes if new_account.balance_changes: for balance_change in new_account.balance_changes: - if balance_change.block_access_index == tx1: - balance_indices[tx1] = True + if balance_change.block_access_index == idx1: + balance_indices[idx1] = True balance_change.block_access_index = ( - ZeroPaddedHexNumber(tx2) + ZeroPaddedHexNumber(idx2) ) - elif balance_change.block_access_index == tx2: - balance_indices[tx2] = True + elif balance_change.block_access_index == idx2: + balance_indices[idx2] = True balance_change.block_access_index = ( - ZeroPaddedHexNumber(tx1) + ZeroPaddedHexNumber(idx1) ) # Swap in storage changes (nested structure) if new_account.storage_changes: for storage_slot in new_account.storage_changes: for storage_change in storage_slot.slot_changes: - if storage_change.block_access_index == tx1: - balance_indices[tx1] = True + if storage_change.block_access_index == idx1: + storage_indices[idx1] = True storage_change.block_access_index = ( - ZeroPaddedHexNumber(tx2) + ZeroPaddedHexNumber(idx2) ) - elif storage_change.block_access_index == tx2: - balance_indices[tx2] = True + elif storage_change.block_access_index == idx2: + storage_indices[idx2] = True storage_change.block_access_index = ( - ZeroPaddedHexNumber(tx1) + ZeroPaddedHexNumber(idx1) ) # Note: storage_reads is just a list of StorageKey, no block_access_index to @@ -304,19 +304,42 @@ def transform(bal: BlockAccessList) -> BlockAccessList: # Swap in code changes if new_account.code_changes: for code_change in new_account.code_changes: - if code_change.block_access_index == tx1: - code_indices[tx1] = True + if code_change.block_access_index == idx1: + code_indices[idx1] = True code_change.block_access_index = ZeroPaddedHexNumber( - tx2 + idx2 ) - elif code_change.block_access_index == tx2: - code_indices[tx2] = True + elif code_change.block_access_index == idx2: + code_indices[idx2] = True code_change.block_access_index = ZeroPaddedHexNumber( - tx1 + idx1 ) new_root.append(new_account) + # Validate that at least one swap occurred for each index across all change types + idx1_found = ( + nonce_indices[idx1] + or balance_indices[idx1] + or storage_indices[idx1] + or code_indices[idx1] + ) + idx2_found = ( + nonce_indices[idx2] + or balance_indices[idx2] + or storage_indices[idx2] + or code_indices[idx2] + ) + + if not idx1_found: + raise ValueError( + f"Block access index {idx1} not found in any BAL changes to swap" + ) + if not idx2_found: + raise ValueError( + f"Block access index {idx2} not found in any BAL changes to swap" + ) + return BlockAccessList(root=new_root) return transform @@ -555,6 +578,6 @@ def transform(bal: BlockAccessList) -> BlockAccessList: "modify_balance", "modify_storage", "modify_code", - # Transaction index modifiers - "swap_tx_indices", + # Block access index modifiers + "swap_bal_indices", ] diff --git a/src/ethereum/forks/amsterdam/block_access_lists/__init__.py b/src/ethereum/forks/amsterdam/block_access_lists/__init__.py index a83523861a..8c3fef14a0 100644 --- a/src/ethereum/forks/amsterdam/block_access_lists/__init__.py +++ b/src/ethereum/forks/amsterdam/block_access_lists/__init__.py @@ -15,7 +15,6 @@ from .rlp_utils import ( compute_block_access_list_hash, rlp_encode_block_access_list, - validate_block_access_list_against_execution, ) __all__ = [ @@ -29,5 +28,4 @@ "build_block_access_list", "compute_block_access_list_hash", "rlp_encode_block_access_list", - "validate_block_access_list_against_execution", ] diff --git a/src/ethereum/forks/amsterdam/block_access_lists/rlp_utils.py b/src/ethereum/forks/amsterdam/block_access_lists/rlp_utils.py index 2cd5b827f3..cdbd1f4626 100644 --- a/src/ethereum/forks/amsterdam/block_access_lists/rlp_utils.py +++ b/src/ethereum/forks/amsterdam/block_access_lists/rlp_utils.py @@ -21,8 +21,7 @@ from ethereum.crypto.hash import Hash32, keccak256 -from .builder import BlockAccessListBuilder -from .rlp_types import MAX_CODE_SIZE, MAX_TXS, BlockAccessList +from .rlp_types import BlockAccessList def compute_block_access_list_hash( @@ -116,115 +115,3 @@ def rlp_encode_block_access_list(block_access_list: BlockAccessList) -> Bytes: encoded = rlp.encode(cast(Extended, account_changes_list)) return Bytes(encoded) - - -def validate_block_access_list_against_execution( - block_access_list: BlockAccessList, - block_access_list_builder: BlockAccessListBuilder | None = None, -) -> bool: - """ - Validate that a Block Access List is structurally correct and - optionally matches a builder's state. - - Parameters - ---------- - block_access_list : - The Block Access List to validate. - block_access_list_builder : - Optional Block Access List builder to validate against. - If provided, checks that the - Block Access List hash matches what would be built from - the builder's current state. - - Returns - ------- - valid : - True if the Block Access List is structurally valid and - matches the builder (if provided). - - """ - # 1. Validate structural constraints - - # Check that storage changes and reads don't overlap for the same slot - for account in block_access_list: - changed_slots = {sc.slot for sc in account.storage_changes} - read_slots = set(account.storage_reads) - - # A slot should not be in both changes and reads (per EIP-7928) - if changed_slots & read_slots: - return False - - # 2. Validate ordering (addresses should be sorted lexicographically) - addresses = [account.address for account in block_access_list] - if addresses != sorted(addresses): - return False - - # 3. Validate all data is within bounds - max_block_access_index = ( - MAX_TXS + 1 - ) # 0 for pre-exec, 1..MAX_TXS for txs, MAX_TXS+1 for post-exec - for account in block_access_list: - # Validate storage slots are sorted within each account - storage_slots = [sc.slot for sc in account.storage_changes] - if storage_slots != sorted(storage_slots): - return False - - # Check storage changes - for slot_changes in account.storage_changes: - # Check changes are sorted by block_access_index - indices = [c.block_access_index for c in slot_changes.changes] - if indices != sorted(indices): - return False - - for change in slot_changes.changes: - if int(change.block_access_index) > max_block_access_index: - return False - - # Check balance changes are sorted by block_access_index - balance_indices = [ - bc.block_access_index for bc in account.balance_changes - ] - if balance_indices != sorted(balance_indices): - return False - - for balance_change in account.balance_changes: - if int(balance_change.block_access_index) > max_block_access_index: - return False - - # Check nonce changes are sorted by block_access_index - nonce_indices = [nc.block_access_index for nc in account.nonce_changes] - if nonce_indices != sorted(nonce_indices): - return False - - for nonce_change in account.nonce_changes: - if int(nonce_change.block_access_index) > max_block_access_index: - return False - - # Check code changes are sorted by block_access_index - code_indices = [cc.block_access_index for cc in account.code_changes] - if code_indices != sorted(code_indices): - return False - - for code_change in account.code_changes: - if int(code_change.block_access_index) > max_block_access_index: - return False - if len(code_change.new_code) > MAX_CODE_SIZE: - return False - - # 4. If Block Access List builder provided, validate against it - # by comparing hashes - if block_access_list_builder is not None: - from .builder import _build_from_builder - - # Build a Block Access List from the builder - expected_block_access_list = _build_from_builder( - block_access_list_builder - ) - - # Compare hashes - if compute_block_access_list_hash( - block_access_list - ) != compute_block_access_list_hash(expected_block_access_list): - return False - - return True diff --git a/src/ethereum/forks/amsterdam/blocks.py b/src/ethereum/forks/amsterdam/blocks.py index 143b3d18fe..70038d54cd 100644 --- a/src/ethereum/forks/amsterdam/blocks.py +++ b/src/ethereum/forks/amsterdam/blocks.py @@ -244,13 +244,15 @@ class Header: block_access_list_hash: Hash32 """ - [SHA2-256] hash of the Block Access List containing all accounts and + [`keccak256`] hash of the Block Access List containing all accounts and storage locations accessed during block execution. Introduced in [EIP-7928]. See [`compute_block_access_list_hash`][cbalh] for more details. + + [`keccak256`]: ref:ethereum.crypto.hash.keccak256 [EIP-7928]: https://eips.ethereum.org/EIPS/eip-7928 - [cbalh]: ref:ethereum.forks.amsterdam.block_access_lists.rlp_utils.compute_block_access_list_hash # noqa: E501 - """ + [cbalh]: ref:ethereum.forks.amsterdam.block_access_lists.rlp_utils.compute_block_access_list_hash + """ # noqa: E501 @slotted_freezable diff --git a/src/ethereum/forks/amsterdam/state_tracker.py b/src/ethereum/forks/amsterdam/state_tracker.py index 3ed1360e62..189e088895 100644 --- a/src/ethereum/forks/amsterdam/state_tracker.py +++ b/src/ethereum/forks/amsterdam/state_tracker.py @@ -52,7 +52,6 @@ class StateChanges: # Pre-state captures (transaction-scoped, only populated at tx frame) pre_balances: Dict[Address, U256] = field(default_factory=dict) - pre_nonces: Dict[Address, U64] = field(default_factory=dict) pre_storage: Dict[Tuple[Address, Bytes32], U256] = field( default_factory=dict ) @@ -527,11 +526,10 @@ def filter_net_zero_frame_changes(tx_frame: StateChanges) -> None: assert (addr, key) in tx_frame.pre_storage pre_value = tx_frame.pre_storage[(addr, key)] post_value = tx_frame.storage_writes[(addr, key, idx)] - if (addr, key) in tx_frame.pre_storage: - if pre_value == post_value: - # Net-zero write - convert to read - del tx_frame.storage_writes[(addr, key, idx)] - tx_frame.storage_reads.add((addr, key)) + if pre_value == post_value: + # Net-zero write - convert to read + del tx_frame.storage_writes[(addr, key, idx)] + tx_frame.storage_reads.add((addr, key)) # Filter balance: compare pre vs post, remove if equal addresses_to_check_balance = [ diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_invalid.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_invalid.py index 788eb0c2a3..f819c64143 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_invalid.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_invalid.py @@ -37,7 +37,7 @@ remove_balances, remove_nonces, reverse_accounts, - swap_tx_indices, + swap_bal_indices, ) from .spec import ref_spec_7928 @@ -284,7 +284,7 @@ def test_bal_invalid_tx_order( ], ), } - ).modify(swap_tx_indices(1, 2)), + ).modify(swap_bal_indices(1, 2)), ) ], ) @@ -542,7 +542,7 @@ def test_bal_invalid_complex_corruption( contract, block_access_index=1, slot=0x01, value=0xFF ), remove_balances(receiver), - swap_tx_indices(1, 2), + swap_bal_indices(1, 2), ), ) ], diff --git a/tests/frontier/create/test_create_deposit_oog.py b/tests/frontier/create/test_create_deposit_oog.py index ba226bba82..fac46ba629 100644 --- a/tests/frontier/create/test_create_deposit_oog.py +++ b/tests/frontier/create/test_create_deposit_oog.py @@ -3,7 +3,6 @@ """ import pytest - from execution_testing import ( Account, Alloc, From 1d11d8d2e0ca104a4731e37aaa671fc19a6d66f1 Mon Sep 17 00:00:00 2001 From: Guruprasad Kamath <48196632+gurukamath@users.noreply.github.com> Date: Fri, 16 Jan 2026 17:30:38 +0100 Subject: [PATCH 077/154] tests(eip-7928): generalize eip-7934 tests (#2022) * tests(eip-7928): generalize eip-7934 tests * ci(eip-7928): run all amsterdam tests on py3 and pypy3 --------- Co-authored-by: fselmo --- docs/CHANGELOG.md | 1 + .../execution_testing/base_types/__init__.py | 2 + .../base_types/typing_utils.py | 33 +++++++++ .../execution_testing/fixtures/blockchain.py | 72 +++++++++++++++++++ .../src/execution_testing/forks/base_fork.py | 14 ++++ .../execution_testing/forks/forks/forks.py | 56 +++++++++++++++ .../test_max_block_rlp_size.py | 60 +++++----------- tox.ini | 2 +- 8 files changed, 198 insertions(+), 42 deletions(-) create mode 100644 packages/testing/src/execution_testing/base_types/typing_utils.py diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md index 9e7d567cd0..a703a9b485 100644 --- a/docs/CHANGELOG.md +++ b/docs/CHANGELOG.md @@ -79,6 +79,7 @@ Test fixtures for use by clients are available for each release on the [Github r - ✨ Add tests that EIP-1559 and EIP-2930 typed txs are invalid and void before their fork ([#1754](https://github.com/ethereum/execution-specs/pull/1754)). - ✨ Add tests for an old validation rule for gas limit above 5000 ([#1731](https://github.com/ethereum/execution-specs/pull/1731)). - ✨ Add tests for OOG in EXP, LOG and others ([#1686](https://github.com/ethereum/execution-specs/pull/1686)). +- ✨ Make EIP-7934 tests more dynamic and able to handle new header fields added in future forks ([#2022](https://github.com/ethereum/execution-specs/pull/2022)). ## [v5.3.0](https://github.com/ethereum/execution-spec-tests/releases/tag/v5.3.0) - 2025-10-09 diff --git a/packages/testing/src/execution_testing/base_types/__init__.py b/packages/testing/src/execution_testing/base_types/__init__.py index 9655d43ea5..7221fc1799 100644 --- a/packages/testing/src/execution_testing/base_types/__init__.py +++ b/packages/testing/src/execution_testing/base_types/__init__.py @@ -44,6 +44,7 @@ from .pydantic import CamelModel, EthereumTestBaseModel, EthereumTestRootModel from .reference_spec import ReferenceSpec from .serialization import RLPSerializable, SignableRLPSerializable +from .typing_utils import unwrap_annotation __all__ = ( "AccessList", @@ -88,4 +89,5 @@ "to_bytes", "to_hex", "to_json", + "unwrap_annotation", ) diff --git a/packages/testing/src/execution_testing/base_types/typing_utils.py b/packages/testing/src/execution_testing/base_types/typing_utils.py new file mode 100644 index 0000000000..18412663a3 --- /dev/null +++ b/packages/testing/src/execution_testing/base_types/typing_utils.py @@ -0,0 +1,33 @@ +"""Utilities for working with Python type annotations.""" + +from typing import Any, get_args + + +def unwrap_annotation(hint: Any) -> Any: + """ + Recursively unwrap Annotated and Union types to find the actual type. + + This function is useful for introspecting complex type annotations like: + - `Annotated[int, ...]` -> `int` + - `int | None` -> `int` + - `Annotated[int, ...] | None` -> `int` + + Args: + hint: Type annotation to unwrap + + Returns: + The unwrapped base type + """ + type_args = get_args(hint) + if not type_args: + # Base case: simple type with no parameters + return hint + + # For Union types (including Optional), find the first non-None type + for arg in type_args: + if arg is not type(None): + # Recursively unwrap (handles nested Annotated/Union) + return unwrap_annotation(arg) + + # All args were None (shouldn't happen in practice) + return hint diff --git a/packages/testing/src/execution_testing/fixtures/blockchain.py b/packages/testing/src/execution_testing/fixtures/blockchain.py index a55c281039..786d6dd34e 100644 --- a/packages/testing/src/execution_testing/fixtures/blockchain.py +++ b/packages/testing/src/execution_testing/fixtures/blockchain.py @@ -28,6 +28,7 @@ computed_field, model_validator, ) +from pydantic_core import PydanticUndefined from execution_testing.base_types import ( Address, @@ -42,6 +43,7 @@ HexNumber, Number, ZeroPaddedHexNumber, + unwrap_annotation, ) from execution_testing.exceptions import ( EngineAPIError, @@ -274,6 +276,76 @@ def block_hash(self) -> Hash: """Compute the RLP of the header.""" return self.rlp.keccak256() + @classmethod + def get_default_from_annotation( + cls, + fork: Fork, + field_name: str, + field_hint: Any, + block_number: int = 0, + timestamp: int = 0, + ) -> Any: + """ + Get appropriate default value for a header field based on its type hint. + + This method handles: + 1. Fork requirement checking - only returns a default if the fork requires the field + 2. Model-defined defaults - uses the field's default value if available + 3. Type-based defaults - constructs defaults based on the field type + + Args: + fork: Fork to check requirements against + field_name: Name of the field + field_hint: Type annotation of the field + block_number: Block number for fork requirement checking (default: 0) + timestamp: Timestamp for fork requirement checking (default: 0) + + Returns: + Default value appropriate for the field type, or None if + the field is not required by the fork + + Raises: + TypeError: If the field type is not supported and no default value + is defined in the model. This indicates that support for the type + needs to be added or an explicit default must be provided. + """ + # Check if this field has a HeaderForkRequirement annotation + header_fork_requirement = HeaderForkRequirement.get_from_annotation( + field_hint + ) + if header_fork_requirement is not None: + # Only provide a default if the fork requires this field + if not header_fork_requirement.required(fork, block_number, timestamp): + return None + + # Check if the field has a default value defined in the model + if field_name in cls.model_fields: + field_info = cls.model_fields[field_name] + if field_info.default is not None and field_info.default is not PydanticUndefined: + return field_info.default + if field_info.default_factory is not None: + return field_info.default_factory() # type: ignore[call-arg] + + # Unwrap type annotations to get the actual type + actual_type = unwrap_annotation(field_hint) + + # Construct default based on type + if actual_type == ZeroPaddedHexNumber: + return ZeroPaddedHexNumber(0) + elif actual_type == Hash: + return Hash(0) + elif actual_type == Address: + return Address(0) + elif actual_type == Bytes: + return Bytes(b"") + else: + # Unsupported type - raise an error to catch this during development + raise TypeError( + f"Cannot generate default value for field '{field_name}' " + f"with unsupported type '{actual_type}'. " + f"Add support for this type or provide a default value explicitly." + ) + @classmethod def genesis(cls, fork: Fork, env: Environment, state_root: Hash) -> Self: """Get the genesis header for the given fork.""" diff --git a/packages/testing/src/execution_testing/forks/base_fork.py b/packages/testing/src/execution_testing/forks/base_fork.py index 8beba96ac3..584f92ba5d 100644 --- a/packages/testing/src/execution_testing/forks/base_fork.py +++ b/packages/testing/src/execution_testing/forks/base_fork.py @@ -2,6 +2,7 @@ from abc import ABCMeta, abstractmethod from typing import ( + TYPE_CHECKING, Any, Callable, ClassVar, @@ -18,6 +19,9 @@ Union, ) +if TYPE_CHECKING: + from execution_testing.fixtures.blockchain import FixtureHeader + from execution_testing.base_types import ( AccessList, Address, @@ -978,3 +982,13 @@ def non_bpo_ancestor(cls) -> Type["BaseFork"]: def children(cls) -> Set[Type["BaseFork"]]: """Return the children forks.""" return set(cls._children) + + @classmethod + @abstractmethod + def build_default_block_header( + cls, *, block_number: int = 0, timestamp: int = 0 + ) -> "FixtureHeader": + """ + Build a default block header for this fork with the given attributes. + """ + pass diff --git a/packages/testing/src/execution_testing/forks/forks/forks.py b/packages/testing/src/execution_testing/forks/forks/forks.py index f8eeb4168a..3c8bb9976f 100644 --- a/packages/testing/src/execution_testing/forks/forks/forks.py +++ b/packages/testing/src/execution_testing/forks/forks/forks.py @@ -1,10 +1,13 @@ """All Ethereum fork class definitions.""" +from __future__ import annotations + from dataclasses import replace from hashlib import sha256 from os.path import realpath from pathlib import Path from typing import ( + TYPE_CHECKING, Callable, Dict, List, @@ -15,12 +18,17 @@ Tuple, ) +if TYPE_CHECKING: + from execution_testing.fixtures.blockchain import FixtureHeader + + from execution_testing.base_types import ( AccessList, Address, BlobSchedule, Bytes, ForkBlobSchedule, + ZeroPaddedHexNumber, ) from execution_testing.base_types.conversions import BytesConvertible from execution_testing.vm import ( @@ -1375,6 +1383,54 @@ def pre_allocation_blockchain( del block_number, timestamp return {} + @classmethod + def build_default_block_header( + cls, *, block_number: int = 0, timestamp: int = 0 + ) -> FixtureHeader: + """ + Build a default block header for this fork with the given attributes. + + This method automatically detects which header fields are required by the fork + and assigns appropriate default values. It introspects the FixtureHeader model + to find fields with HeaderForkRequirement annotations and automatically includes + them if the fork requires them. + + Args: + block_number: The block number + timestamp: The block timestamp + + Returns: + FixtureHeader instance with default values applied based on fork requirements + + Raises: + TypeError: If the overrides don't have the correct type. + """ + from execution_testing.fixtures.blockchain import FixtureHeader + + defaults = { + "number": ZeroPaddedHexNumber(block_number), + "timestamp": ZeroPaddedHexNumber(timestamp), + "fork": cls, + } + + # Iterate through FixtureHeader fields to populate defaults + for field_name, field_info in FixtureHeader.model_fields.items(): + if field_name in defaults: + continue + + # Get default value, checking fork requirements and model defaults + default_value = FixtureHeader.get_default_from_annotation( + fork=cls, + field_name=field_name, + field_hint=field_info.annotation, + block_number=int(block_number), + timestamp=int(timestamp), + ) + if default_value is not None: + defaults[field_name] = default_value + + return FixtureHeader(**defaults) + class Homestead(Frontier): """Homestead fork.""" diff --git a/tests/osaka/eip7934_block_rlp_limit/test_max_block_rlp_size.py b/tests/osaka/eip7934_block_rlp_limit/test_max_block_rlp_size.py index af7e3191ba..fb062f413f 100644 --- a/tests/osaka/eip7934_block_rlp_limit/test_max_block_rlp_size.py +++ b/tests/osaka/eip7934_block_rlp_limit/test_max_block_rlp_size.py @@ -27,7 +27,6 @@ ) from execution_testing.fixtures.blockchain import ( FixtureBlockBase, - FixtureHeader, FixtureWithdrawal, ) @@ -71,43 +70,22 @@ def block_errors() -> List[BlockException]: return [BlockException.RLP_BLOCK_LIMIT_EXCEEDED] -def create_test_header(gas_used: int) -> FixtureHeader: - """Create a standard test header for RLP size calculations.""" - return FixtureHeader( - difficulty="0x0", - number="0x1", - gas_limit=hex(BLOCK_GAS_LIMIT), - timestamp=hex(HEADER_TIMESTAMP), - fee_recipient="0x" + "00" * 20, - parent_hash="0x" + "00" * 32, - ommers_hash="0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", - state_root="0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", - transactions_trie="0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", - receipts_root="0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", - logs_bloom="0x" + "00" * 256, - gas_used=hex(gas_used), - extra_data=EXTRA_DATA_AT_LIMIT.hex(), - prev_randao="0x" + "00" * 32, - nonce="0x0000000000000042", - base_fee_per_gas="0x0", - withdrawals_root="0x" + "00" * 32, - blob_gas_used="0x0", - excess_blob_gas="0x0", - parent_beacon_block_root="0x" + "00" * 32, - requests_hash="0xe3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - ) - - def get_block_rlp_size( + fork: Fork, transactions: List[Transaction], - gas_used: int, withdrawals: List[Withdrawal] | None = None, ) -> int: """ Calculate the RLP size of a block with given transactions and withdrawals. """ - header = create_test_header(gas_used) + header = fork.build_default_block_header( + block_number=1, + timestamp=HEADER_TIMESTAMP, + ) + header.gas_limit = ZeroPaddedHexNumber(BLOCK_GAS_LIMIT) + header.extra_data = Bytes(EXTRA_DATA_AT_LIMIT) + total_gas = sum((tx.gas_limit or 21000) for tx in transactions) header.gas_used = ZeroPaddedHexNumber(total_gas) @@ -323,7 +301,7 @@ def _exact_size_transactions_impl( total_gas_used += last_tx.gas_limit current_size = get_block_rlp_size( - transactions, gas_used=total_gas_used, withdrawals=withdrawals + fork, transactions, withdrawals=withdrawals ) remaining_bytes = block_size_limit - current_size remaining_gas = block_gas_limit - total_gas_used @@ -340,8 +318,8 @@ def _exact_size_transactions_impl( ) empty_block_size = get_block_rlp_size( + fork, transactions + [empty_tx], - gas_used=total_gas_used + empty_tx.gas_limit, withdrawals=withdrawals, ) empty_contribution = empty_block_size - current_size @@ -363,8 +341,8 @@ def _exact_size_transactions_impl( ) test_size = get_block_rlp_size( + fork, transactions + [test_tx], - gas_used=total_gas_used + target_gas, withdrawals=withdrawals, ) @@ -397,8 +375,8 @@ def _exact_size_transactions_impl( ) adjusted_test_size = get_block_rlp_size( + fork, transactions + [adjusted_tx], - gas_used=total_gas_used + adjusted_gas, withdrawals=withdrawals, ) @@ -421,8 +399,8 @@ def _exact_size_transactions_impl( transactions.append(empty_tx) final_size = get_block_rlp_size( + fork, transactions, - gas_used=sum(tx.gas_limit for tx in transactions), withdrawals=withdrawals, ) final_gas = sum(tx.gas_limit for tx in transactions) @@ -475,7 +453,7 @@ def test_block_at_rlp_size_limit_boundary( pre, env.gas_limit, ) - block_rlp_size = get_block_rlp_size(transactions, gas_used=gas_used) + block_rlp_size = get_block_rlp_size(fork, transactions) assert block_rlp_size == block_size_limit, ( f"Block RLP size {block_rlp_size} does not exactly match " f"limit {block_size_limit}, difference: " @@ -528,7 +506,7 @@ def test_block_rlp_size_at_limit_with_all_typed_transactions( env.gas_limit, specific_transaction_to_include=typed_transaction, ) - block_rlp_size = get_block_rlp_size(transactions, gas_used=gas_used) + block_rlp_size = get_block_rlp_size(fork, transactions) assert block_rlp_size == block_size_limit, ( f"Block RLP size {block_rlp_size} does not exactly match limit " f"{block_size_limit}, difference: {block_rlp_size - block_size_limit} " @@ -572,7 +550,7 @@ def test_block_at_rlp_limit_with_logs( emit_logs=True, ) - block_rlp_size = get_block_rlp_size(transactions, gas_used=gas_used) + block_rlp_size = get_block_rlp_size(fork, transactions) assert block_rlp_size == block_size_limit, ( f"Block RLP size {block_rlp_size} does not exactly match limit " f"{block_size_limit}, difference: {block_rlp_size - block_size_limit} " @@ -632,7 +610,7 @@ def test_block_at_rlp_limit_with_withdrawals( ) block_rlp_size = get_block_rlp_size( - transactions, gas_used=gas_used, withdrawals=withdrawals + fork, transactions, withdrawals=withdrawals ) assert block_rlp_size == block_size_limit, ( f"Block RLP size {block_rlp_size} does not exactly match limit " @@ -703,8 +681,8 @@ def test_fork_transition_block_rlp_limit( ) for fork_block_rlp_size in [ - get_block_rlp_size(transactions_before, gas_used=gas_used_before), - get_block_rlp_size(transactions_at_fork, gas_used=gas_used_at_fork), + get_block_rlp_size(fork, transactions_before), + get_block_rlp_size(fork, transactions_at_fork), ]: assert fork_block_rlp_size == block_size_limit, ( f"Block RLP size {fork_block_rlp_size} does not exactly match " diff --git a/tox.ini b/tox.ini index cd7ce8bfb7..22b12f6121 100644 --- a/tox.ini +++ b/tox.ini @@ -102,7 +102,7 @@ commands = --basetemp="{temp_dir}/pytest" \ --log-to "{toxworkdir}/logs" \ --clean \ - --until BPO4 \ + --until Amsterdam \ {posargs} \ tests From 05117e5b171978ea4ffb5975704dcfb7b8bee658 Mon Sep 17 00:00:00 2001 From: danceratopz Date: Mon, 19 Jan 2026 09:18:39 +0100 Subject: [PATCH 078/154] chore(spec-specs): fix typos in `src/ethereum/` (#1965) Co-authored-by: Carson --- src/ethereum/ethash.py | 4 ++-- src/ethereum/exceptions.py | 2 +- src/ethereum/forks/amsterdam/blocks.py | 4 ++-- src/ethereum/forks/amsterdam/bloom.py | 2 +- src/ethereum/forks/amsterdam/requests.py | 2 +- src/ethereum/forks/amsterdam/state.py | 8 ++++---- src/ethereum/forks/amsterdam/trie.py | 2 +- src/ethereum/forks/amsterdam/vm/__init__.py | 2 +- src/ethereum/forks/amsterdam/vm/eoa_delegation.py | 2 -- src/ethereum/forks/amsterdam/vm/gas.py | 4 ++-- .../forks/amsterdam/vm/instructions/arithmetic.py | 4 ++-- src/ethereum/forks/amsterdam/vm/instructions/block.py | 2 +- .../forks/amsterdam/vm/instructions/environment.py | 2 +- src/ethereum/forks/amsterdam/vm/instructions/memory.py | 6 +++--- src/ethereum/forks/amsterdam/vm/instructions/stack.py | 8 ++++---- src/ethereum/forks/amsterdam/vm/instructions/system.py | 4 ++-- .../bls12_381/bls12_381_pairing.py | 6 +++--- .../amsterdam/vm/precompiled_contracts/ecrecover.py | 2 +- .../amsterdam/vm/precompiled_contracts/mapping.py | 2 +- .../forks/amsterdam/vm/precompiled_contracts/modexp.py | 2 +- .../amsterdam/vm/precompiled_contracts/p256verify.py | 5 +++-- .../vm/precompiled_contracts/point_evaluation.py | 2 +- src/ethereum/forks/amsterdam/vm/runtime.py | 2 +- src/ethereum/forks/arrow_glacier/blocks.py | 4 ++-- src/ethereum/forks/arrow_glacier/bloom.py | 2 +- src/ethereum/forks/arrow_glacier/state.py | 10 +++++----- src/ethereum/forks/arrow_glacier/trie.py | 2 +- src/ethereum/forks/arrow_glacier/vm/__init__.py | 2 +- src/ethereum/forks/arrow_glacier/vm/gas.py | 2 +- .../forks/arrow_glacier/vm/instructions/arithmetic.py | 4 ++-- .../forks/arrow_glacier/vm/instructions/block.py | 2 +- .../forks/arrow_glacier/vm/instructions/environment.py | 2 +- .../forks/arrow_glacier/vm/instructions/memory.py | 4 ++-- .../forks/arrow_glacier/vm/instructions/stack.py | 8 ++++---- .../forks/arrow_glacier/vm/instructions/system.py | 4 ++-- .../vm/precompiled_contracts/ecrecover.py | 2 +- .../arrow_glacier/vm/precompiled_contracts/mapping.py | 2 +- .../arrow_glacier/vm/precompiled_contracts/modexp.py | 2 +- src/ethereum/forks/arrow_glacier/vm/runtime.py | 2 +- src/ethereum/forks/berlin/blocks.py | 4 ++-- src/ethereum/forks/berlin/bloom.py | 2 +- src/ethereum/forks/berlin/state.py | 10 +++++----- src/ethereum/forks/berlin/trie.py | 2 +- src/ethereum/forks/berlin/vm/__init__.py | 2 +- src/ethereum/forks/berlin/vm/gas.py | 2 +- .../forks/berlin/vm/instructions/arithmetic.py | 4 ++-- src/ethereum/forks/berlin/vm/instructions/block.py | 2 +- .../forks/berlin/vm/instructions/environment.py | 2 +- src/ethereum/forks/berlin/vm/instructions/memory.py | 4 ++-- src/ethereum/forks/berlin/vm/instructions/stack.py | 8 ++++---- src/ethereum/forks/berlin/vm/instructions/system.py | 4 ++-- .../forks/berlin/vm/precompiled_contracts/ecrecover.py | 2 +- .../forks/berlin/vm/precompiled_contracts/mapping.py | 2 +- .../forks/berlin/vm/precompiled_contracts/modexp.py | 2 +- src/ethereum/forks/berlin/vm/runtime.py | 2 +- src/ethereum/forks/bpo1/blocks.py | 4 ++-- src/ethereum/forks/bpo1/bloom.py | 2 +- src/ethereum/forks/bpo1/requests.py | 2 +- src/ethereum/forks/bpo1/state.py | 8 ++++---- src/ethereum/forks/bpo1/trie.py | 2 +- src/ethereum/forks/bpo1/vm/__init__.py | 2 +- src/ethereum/forks/bpo1/vm/eoa_delegation.py | 2 -- src/ethereum/forks/bpo1/vm/gas.py | 4 ++-- src/ethereum/forks/bpo1/vm/instructions/arithmetic.py | 4 ++-- src/ethereum/forks/bpo1/vm/instructions/block.py | 2 +- src/ethereum/forks/bpo1/vm/instructions/environment.py | 2 +- src/ethereum/forks/bpo1/vm/instructions/memory.py | 6 +++--- src/ethereum/forks/bpo1/vm/instructions/stack.py | 8 ++++---- src/ethereum/forks/bpo1/vm/instructions/system.py | 4 ++-- .../bls12_381/bls12_381_pairing.py | 6 +++--- .../forks/bpo1/vm/precompiled_contracts/ecrecover.py | 2 +- .../forks/bpo1/vm/precompiled_contracts/mapping.py | 2 +- .../forks/bpo1/vm/precompiled_contracts/modexp.py | 2 +- .../forks/bpo1/vm/precompiled_contracts/p256verify.py | 5 +++-- .../bpo1/vm/precompiled_contracts/point_evaluation.py | 2 +- src/ethereum/forks/bpo1/vm/runtime.py | 2 +- src/ethereum/forks/bpo2/blocks.py | 4 ++-- src/ethereum/forks/bpo2/bloom.py | 2 +- src/ethereum/forks/bpo2/requests.py | 2 +- src/ethereum/forks/bpo2/state.py | 8 ++++---- src/ethereum/forks/bpo2/trie.py | 2 +- src/ethereum/forks/bpo2/vm/__init__.py | 2 +- src/ethereum/forks/bpo2/vm/eoa_delegation.py | 2 -- src/ethereum/forks/bpo2/vm/gas.py | 4 ++-- src/ethereum/forks/bpo2/vm/instructions/arithmetic.py | 4 ++-- src/ethereum/forks/bpo2/vm/instructions/block.py | 2 +- src/ethereum/forks/bpo2/vm/instructions/environment.py | 2 +- src/ethereum/forks/bpo2/vm/instructions/memory.py | 6 +++--- src/ethereum/forks/bpo2/vm/instructions/stack.py | 8 ++++---- src/ethereum/forks/bpo2/vm/instructions/system.py | 4 ++-- .../bls12_381/bls12_381_pairing.py | 6 +++--- .../forks/bpo2/vm/precompiled_contracts/ecrecover.py | 2 +- .../forks/bpo2/vm/precompiled_contracts/mapping.py | 2 +- .../forks/bpo2/vm/precompiled_contracts/modexp.py | 2 +- .../forks/bpo2/vm/precompiled_contracts/p256verify.py | 5 +++-- .../bpo2/vm/precompiled_contracts/point_evaluation.py | 2 +- src/ethereum/forks/bpo2/vm/runtime.py | 2 +- src/ethereum/forks/bpo3/blocks.py | 4 ++-- src/ethereum/forks/bpo3/bloom.py | 2 +- src/ethereum/forks/bpo3/requests.py | 2 +- src/ethereum/forks/bpo3/state.py | 8 ++++---- src/ethereum/forks/bpo3/trie.py | 2 +- src/ethereum/forks/bpo3/vm/__init__.py | 2 +- src/ethereum/forks/bpo3/vm/eoa_delegation.py | 2 -- src/ethereum/forks/bpo3/vm/gas.py | 4 ++-- src/ethereum/forks/bpo3/vm/instructions/arithmetic.py | 4 ++-- src/ethereum/forks/bpo3/vm/instructions/block.py | 2 +- src/ethereum/forks/bpo3/vm/instructions/environment.py | 2 +- src/ethereum/forks/bpo3/vm/instructions/memory.py | 6 +++--- src/ethereum/forks/bpo3/vm/instructions/stack.py | 8 ++++---- src/ethereum/forks/bpo3/vm/instructions/system.py | 4 ++-- .../bls12_381/bls12_381_pairing.py | 6 +++--- .../forks/bpo3/vm/precompiled_contracts/ecrecover.py | 2 +- .../forks/bpo3/vm/precompiled_contracts/mapping.py | 2 +- .../forks/bpo3/vm/precompiled_contracts/modexp.py | 2 +- .../forks/bpo3/vm/precompiled_contracts/p256verify.py | 5 +++-- .../bpo3/vm/precompiled_contracts/point_evaluation.py | 2 +- src/ethereum/forks/bpo3/vm/runtime.py | 2 +- src/ethereum/forks/bpo4/blocks.py | 4 ++-- src/ethereum/forks/bpo4/bloom.py | 2 +- src/ethereum/forks/bpo4/requests.py | 2 +- src/ethereum/forks/bpo4/state.py | 8 ++++---- src/ethereum/forks/bpo4/trie.py | 2 +- src/ethereum/forks/bpo4/vm/__init__.py | 2 +- src/ethereum/forks/bpo4/vm/eoa_delegation.py | 2 -- src/ethereum/forks/bpo4/vm/gas.py | 4 ++-- src/ethereum/forks/bpo4/vm/instructions/arithmetic.py | 4 ++-- src/ethereum/forks/bpo4/vm/instructions/block.py | 2 +- src/ethereum/forks/bpo4/vm/instructions/environment.py | 2 +- src/ethereum/forks/bpo4/vm/instructions/memory.py | 6 +++--- src/ethereum/forks/bpo4/vm/instructions/stack.py | 8 ++++---- src/ethereum/forks/bpo4/vm/instructions/system.py | 4 ++-- .../bls12_381/bls12_381_pairing.py | 6 +++--- .../forks/bpo4/vm/precompiled_contracts/ecrecover.py | 2 +- .../forks/bpo4/vm/precompiled_contracts/mapping.py | 2 +- .../forks/bpo4/vm/precompiled_contracts/modexp.py | 2 +- .../forks/bpo4/vm/precompiled_contracts/p256verify.py | 5 +++-- .../bpo4/vm/precompiled_contracts/point_evaluation.py | 2 +- src/ethereum/forks/bpo4/vm/runtime.py | 2 +- src/ethereum/forks/bpo5/__init__.py | 2 +- src/ethereum/forks/bpo5/blocks.py | 4 ++-- src/ethereum/forks/bpo5/bloom.py | 2 +- src/ethereum/forks/bpo5/requests.py | 2 +- src/ethereum/forks/bpo5/state.py | 8 ++++---- src/ethereum/forks/bpo5/trie.py | 2 +- src/ethereum/forks/bpo5/vm/__init__.py | 2 +- src/ethereum/forks/bpo5/vm/eoa_delegation.py | 2 -- src/ethereum/forks/bpo5/vm/gas.py | 4 ++-- src/ethereum/forks/bpo5/vm/instructions/arithmetic.py | 4 ++-- src/ethereum/forks/bpo5/vm/instructions/block.py | 2 +- src/ethereum/forks/bpo5/vm/instructions/environment.py | 2 +- src/ethereum/forks/bpo5/vm/instructions/memory.py | 6 +++--- src/ethereum/forks/bpo5/vm/instructions/stack.py | 8 ++++---- src/ethereum/forks/bpo5/vm/instructions/system.py | 4 ++-- .../bls12_381/bls12_381_pairing.py | 6 +++--- .../forks/bpo5/vm/precompiled_contracts/ecrecover.py | 2 +- .../forks/bpo5/vm/precompiled_contracts/mapping.py | 2 +- .../forks/bpo5/vm/precompiled_contracts/modexp.py | 2 +- .../forks/bpo5/vm/precompiled_contracts/p256verify.py | 5 +++-- .../bpo5/vm/precompiled_contracts/point_evaluation.py | 2 +- src/ethereum/forks/bpo5/vm/runtime.py | 2 +- src/ethereum/forks/byzantium/blocks.py | 4 ++-- src/ethereum/forks/byzantium/bloom.py | 2 +- src/ethereum/forks/byzantium/state.py | 10 +++++----- src/ethereum/forks/byzantium/trie.py | 2 +- src/ethereum/forks/byzantium/vm/__init__.py | 2 +- src/ethereum/forks/byzantium/vm/gas.py | 2 +- .../forks/byzantium/vm/instructions/arithmetic.py | 4 ++-- src/ethereum/forks/byzantium/vm/instructions/block.py | 2 +- .../forks/byzantium/vm/instructions/environment.py | 2 +- src/ethereum/forks/byzantium/vm/instructions/memory.py | 4 ++-- src/ethereum/forks/byzantium/vm/instructions/stack.py | 8 ++++---- .../byzantium/vm/precompiled_contracts/ecrecover.py | 2 +- .../byzantium/vm/precompiled_contracts/mapping.py | 2 +- .../forks/byzantium/vm/precompiled_contracts/modexp.py | 2 +- src/ethereum/forks/byzantium/vm/runtime.py | 2 +- src/ethereum/forks/cancun/blocks.py | 4 ++-- src/ethereum/forks/cancun/bloom.py | 2 +- src/ethereum/forks/cancun/state.py | 8 ++++---- src/ethereum/forks/cancun/trie.py | 2 +- src/ethereum/forks/cancun/vm/__init__.py | 2 +- src/ethereum/forks/cancun/vm/gas.py | 4 ++-- .../forks/cancun/vm/instructions/arithmetic.py | 4 ++-- src/ethereum/forks/cancun/vm/instructions/block.py | 2 +- .../forks/cancun/vm/instructions/environment.py | 2 +- src/ethereum/forks/cancun/vm/instructions/memory.py | 6 +++--- src/ethereum/forks/cancun/vm/instructions/stack.py | 8 ++++---- src/ethereum/forks/cancun/vm/instructions/system.py | 4 ++-- .../forks/cancun/vm/precompiled_contracts/ecrecover.py | 2 +- .../forks/cancun/vm/precompiled_contracts/mapping.py | 2 +- .../forks/cancun/vm/precompiled_contracts/modexp.py | 2 +- .../vm/precompiled_contracts/point_evaluation.py | 2 +- src/ethereum/forks/cancun/vm/runtime.py | 2 +- src/ethereum/forks/constantinople/blocks.py | 4 ++-- src/ethereum/forks/constantinople/bloom.py | 2 +- src/ethereum/forks/constantinople/state.py | 10 +++++----- src/ethereum/forks/constantinople/trie.py | 2 +- src/ethereum/forks/constantinople/vm/__init__.py | 2 +- src/ethereum/forks/constantinople/vm/gas.py | 2 +- .../forks/constantinople/vm/instructions/arithmetic.py | 4 ++-- .../forks/constantinople/vm/instructions/block.py | 2 +- .../constantinople/vm/instructions/environment.py | 2 +- .../forks/constantinople/vm/instructions/memory.py | 4 ++-- .../forks/constantinople/vm/instructions/stack.py | 8 ++++---- .../forks/constantinople/vm/instructions/system.py | 4 ++-- .../vm/precompiled_contracts/ecrecover.py | 2 +- .../constantinople/vm/precompiled_contracts/mapping.py | 2 +- .../constantinople/vm/precompiled_contracts/modexp.py | 2 +- src/ethereum/forks/constantinople/vm/runtime.py | 2 +- src/ethereum/forks/dao_fork/__init__.py | 2 +- src/ethereum/forks/dao_fork/blocks.py | 4 ++-- src/ethereum/forks/dao_fork/bloom.py | 2 +- src/ethereum/forks/dao_fork/state.py | 10 +++++----- src/ethereum/forks/dao_fork/trie.py | 2 +- src/ethereum/forks/dao_fork/vm/__init__.py | 2 +- .../forks/dao_fork/vm/instructions/arithmetic.py | 4 ++-- src/ethereum/forks/dao_fork/vm/instructions/block.py | 2 +- src/ethereum/forks/dao_fork/vm/instructions/memory.py | 4 ++-- src/ethereum/forks/dao_fork/vm/instructions/stack.py | 8 ++++---- .../dao_fork/vm/precompiled_contracts/ecrecover.py | 2 +- .../forks/dao_fork/vm/precompiled_contracts/mapping.py | 2 +- src/ethereum/forks/dao_fork/vm/runtime.py | 2 +- src/ethereum/forks/frontier/blocks.py | 4 ++-- src/ethereum/forks/frontier/bloom.py | 2 +- src/ethereum/forks/frontier/state.py | 10 +++++----- src/ethereum/forks/frontier/trie.py | 2 +- src/ethereum/forks/frontier/vm/__init__.py | 2 +- .../forks/frontier/vm/instructions/arithmetic.py | 4 ++-- src/ethereum/forks/frontier/vm/instructions/block.py | 2 +- src/ethereum/forks/frontier/vm/instructions/memory.py | 4 ++-- src/ethereum/forks/frontier/vm/instructions/stack.py | 8 ++++---- .../frontier/vm/precompiled_contracts/ecrecover.py | 2 +- .../forks/frontier/vm/precompiled_contracts/mapping.py | 2 +- src/ethereum/forks/frontier/vm/runtime.py | 2 +- src/ethereum/forks/gray_glacier/blocks.py | 4 ++-- src/ethereum/forks/gray_glacier/bloom.py | 2 +- src/ethereum/forks/gray_glacier/state.py | 10 +++++----- src/ethereum/forks/gray_glacier/trie.py | 2 +- src/ethereum/forks/gray_glacier/vm/__init__.py | 2 +- src/ethereum/forks/gray_glacier/vm/gas.py | 2 +- .../forks/gray_glacier/vm/instructions/arithmetic.py | 4 ++-- .../forks/gray_glacier/vm/instructions/block.py | 2 +- .../forks/gray_glacier/vm/instructions/environment.py | 2 +- .../forks/gray_glacier/vm/instructions/memory.py | 4 ++-- .../forks/gray_glacier/vm/instructions/stack.py | 8 ++++---- .../forks/gray_glacier/vm/instructions/system.py | 4 ++-- .../gray_glacier/vm/precompiled_contracts/ecrecover.py | 2 +- .../gray_glacier/vm/precompiled_contracts/mapping.py | 2 +- .../gray_glacier/vm/precompiled_contracts/modexp.py | 2 +- src/ethereum/forks/gray_glacier/vm/runtime.py | 2 +- src/ethereum/forks/homestead/blocks.py | 4 ++-- src/ethereum/forks/homestead/bloom.py | 2 +- src/ethereum/forks/homestead/state.py | 10 +++++----- src/ethereum/forks/homestead/trie.py | 2 +- src/ethereum/forks/homestead/vm/__init__.py | 2 +- .../forks/homestead/vm/instructions/arithmetic.py | 4 ++-- src/ethereum/forks/homestead/vm/instructions/block.py | 2 +- src/ethereum/forks/homestead/vm/instructions/memory.py | 4 ++-- src/ethereum/forks/homestead/vm/instructions/stack.py | 8 ++++---- .../homestead/vm/precompiled_contracts/ecrecover.py | 2 +- .../homestead/vm/precompiled_contracts/mapping.py | 2 +- src/ethereum/forks/homestead/vm/runtime.py | 2 +- src/ethereum/forks/istanbul/blocks.py | 4 ++-- src/ethereum/forks/istanbul/bloom.py | 2 +- src/ethereum/forks/istanbul/state.py | 10 +++++----- src/ethereum/forks/istanbul/trie.py | 2 +- src/ethereum/forks/istanbul/vm/__init__.py | 2 +- src/ethereum/forks/istanbul/vm/gas.py | 2 +- .../forks/istanbul/vm/instructions/arithmetic.py | 4 ++-- src/ethereum/forks/istanbul/vm/instructions/block.py | 2 +- .../forks/istanbul/vm/instructions/environment.py | 2 +- src/ethereum/forks/istanbul/vm/instructions/memory.py | 4 ++-- src/ethereum/forks/istanbul/vm/instructions/stack.py | 8 ++++---- src/ethereum/forks/istanbul/vm/instructions/system.py | 4 ++-- .../istanbul/vm/precompiled_contracts/ecrecover.py | 2 +- .../forks/istanbul/vm/precompiled_contracts/mapping.py | 2 +- .../forks/istanbul/vm/precompiled_contracts/modexp.py | 2 +- src/ethereum/forks/istanbul/vm/runtime.py | 2 +- src/ethereum/forks/london/blocks.py | 4 ++-- src/ethereum/forks/london/bloom.py | 2 +- src/ethereum/forks/london/state.py | 10 +++++----- src/ethereum/forks/london/trie.py | 2 +- src/ethereum/forks/london/vm/__init__.py | 2 +- src/ethereum/forks/london/vm/gas.py | 2 +- .../forks/london/vm/instructions/arithmetic.py | 4 ++-- src/ethereum/forks/london/vm/instructions/block.py | 2 +- .../forks/london/vm/instructions/environment.py | 2 +- src/ethereum/forks/london/vm/instructions/memory.py | 4 ++-- src/ethereum/forks/london/vm/instructions/stack.py | 8 ++++---- src/ethereum/forks/london/vm/instructions/system.py | 4 ++-- .../forks/london/vm/precompiled_contracts/ecrecover.py | 2 +- .../forks/london/vm/precompiled_contracts/mapping.py | 2 +- .../forks/london/vm/precompiled_contracts/modexp.py | 2 +- src/ethereum/forks/london/vm/runtime.py | 2 +- src/ethereum/forks/muir_glacier/blocks.py | 4 ++-- src/ethereum/forks/muir_glacier/bloom.py | 2 +- src/ethereum/forks/muir_glacier/state.py | 10 +++++----- src/ethereum/forks/muir_glacier/trie.py | 2 +- src/ethereum/forks/muir_glacier/vm/__init__.py | 2 +- src/ethereum/forks/muir_glacier/vm/gas.py | 2 +- .../forks/muir_glacier/vm/instructions/arithmetic.py | 4 ++-- .../forks/muir_glacier/vm/instructions/block.py | 2 +- .../forks/muir_glacier/vm/instructions/environment.py | 2 +- .../forks/muir_glacier/vm/instructions/memory.py | 4 ++-- .../forks/muir_glacier/vm/instructions/stack.py | 8 ++++---- .../forks/muir_glacier/vm/instructions/system.py | 4 ++-- .../muir_glacier/vm/precompiled_contracts/ecrecover.py | 2 +- .../muir_glacier/vm/precompiled_contracts/mapping.py | 2 +- .../muir_glacier/vm/precompiled_contracts/modexp.py | 2 +- src/ethereum/forks/muir_glacier/vm/runtime.py | 2 +- src/ethereum/forks/osaka/blocks.py | 4 ++-- src/ethereum/forks/osaka/bloom.py | 2 +- src/ethereum/forks/osaka/requests.py | 2 +- src/ethereum/forks/osaka/state.py | 8 ++++---- src/ethereum/forks/osaka/trie.py | 2 +- src/ethereum/forks/osaka/vm/__init__.py | 2 +- src/ethereum/forks/osaka/vm/eoa_delegation.py | 2 -- src/ethereum/forks/osaka/vm/gas.py | 4 ++-- src/ethereum/forks/osaka/vm/instructions/arithmetic.py | 4 ++-- src/ethereum/forks/osaka/vm/instructions/block.py | 2 +- .../forks/osaka/vm/instructions/environment.py | 2 +- src/ethereum/forks/osaka/vm/instructions/memory.py | 6 +++--- src/ethereum/forks/osaka/vm/instructions/stack.py | 8 ++++---- src/ethereum/forks/osaka/vm/instructions/system.py | 4 ++-- .../bls12_381/bls12_381_pairing.py | 6 +++--- .../forks/osaka/vm/precompiled_contracts/ecrecover.py | 2 +- .../forks/osaka/vm/precompiled_contracts/mapping.py | 2 +- .../forks/osaka/vm/precompiled_contracts/modexp.py | 2 +- .../forks/osaka/vm/precompiled_contracts/p256verify.py | 5 +++-- .../osaka/vm/precompiled_contracts/point_evaluation.py | 2 +- src/ethereum/forks/osaka/vm/runtime.py | 2 +- src/ethereum/forks/paris/blocks.py | 4 ++-- src/ethereum/forks/paris/bloom.py | 2 +- src/ethereum/forks/paris/state.py | 8 ++++---- src/ethereum/forks/paris/trie.py | 2 +- src/ethereum/forks/paris/vm/__init__.py | 2 +- src/ethereum/forks/paris/vm/gas.py | 2 +- src/ethereum/forks/paris/vm/instructions/arithmetic.py | 4 ++-- src/ethereum/forks/paris/vm/instructions/block.py | 2 +- .../forks/paris/vm/instructions/environment.py | 2 +- src/ethereum/forks/paris/vm/instructions/memory.py | 4 ++-- src/ethereum/forks/paris/vm/instructions/stack.py | 8 ++++---- src/ethereum/forks/paris/vm/instructions/system.py | 4 ++-- .../forks/paris/vm/precompiled_contracts/ecrecover.py | 2 +- .../forks/paris/vm/precompiled_contracts/mapping.py | 2 +- .../forks/paris/vm/precompiled_contracts/modexp.py | 2 +- src/ethereum/forks/paris/vm/runtime.py | 2 +- src/ethereum/forks/prague/blocks.py | 4 ++-- src/ethereum/forks/prague/bloom.py | 2 +- src/ethereum/forks/prague/state.py | 8 ++++---- src/ethereum/forks/prague/trie.py | 2 +- src/ethereum/forks/prague/vm/__init__.py | 2 +- src/ethereum/forks/prague/vm/eoa_delegation.py | 2 -- src/ethereum/forks/prague/vm/gas.py | 4 ++-- .../forks/prague/vm/instructions/arithmetic.py | 4 ++-- src/ethereum/forks/prague/vm/instructions/block.py | 2 +- .../forks/prague/vm/instructions/environment.py | 2 +- src/ethereum/forks/prague/vm/instructions/memory.py | 6 +++--- src/ethereum/forks/prague/vm/instructions/stack.py | 8 ++++---- src/ethereum/forks/prague/vm/instructions/system.py | 4 ++-- .../bls12_381/bls12_381_pairing.py | 6 +++--- .../forks/prague/vm/precompiled_contracts/ecrecover.py | 2 +- .../forks/prague/vm/precompiled_contracts/mapping.py | 2 +- .../forks/prague/vm/precompiled_contracts/modexp.py | 2 +- .../vm/precompiled_contracts/point_evaluation.py | 2 +- src/ethereum/forks/prague/vm/runtime.py | 2 +- src/ethereum/forks/shanghai/blocks.py | 4 ++-- src/ethereum/forks/shanghai/bloom.py | 2 +- src/ethereum/forks/shanghai/state.py | 8 ++++---- src/ethereum/forks/shanghai/trie.py | 2 +- src/ethereum/forks/shanghai/vm/__init__.py | 2 +- src/ethereum/forks/shanghai/vm/gas.py | 2 +- .../forks/shanghai/vm/instructions/arithmetic.py | 4 ++-- src/ethereum/forks/shanghai/vm/instructions/block.py | 2 +- .../forks/shanghai/vm/instructions/environment.py | 2 +- src/ethereum/forks/shanghai/vm/instructions/memory.py | 4 ++-- src/ethereum/forks/shanghai/vm/instructions/stack.py | 8 ++++---- src/ethereum/forks/shanghai/vm/instructions/system.py | 4 ++-- .../shanghai/vm/precompiled_contracts/ecrecover.py | 2 +- .../forks/shanghai/vm/precompiled_contracts/mapping.py | 2 +- .../forks/shanghai/vm/precompiled_contracts/modexp.py | 2 +- src/ethereum/forks/shanghai/vm/runtime.py | 2 +- src/ethereum/forks/spurious_dragon/blocks.py | 4 ++-- src/ethereum/forks/spurious_dragon/bloom.py | 2 +- src/ethereum/forks/spurious_dragon/state.py | 10 +++++----- src/ethereum/forks/spurious_dragon/trie.py | 2 +- src/ethereum/forks/spurious_dragon/vm/__init__.py | 2 +- src/ethereum/forks/spurious_dragon/vm/gas.py | 2 +- .../spurious_dragon/vm/instructions/arithmetic.py | 4 ++-- .../forks/spurious_dragon/vm/instructions/block.py | 2 +- .../forks/spurious_dragon/vm/instructions/memory.py | 4 ++-- .../forks/spurious_dragon/vm/instructions/stack.py | 8 ++++---- .../vm/precompiled_contracts/ecrecover.py | 2 +- .../vm/precompiled_contracts/mapping.py | 2 +- src/ethereum/forks/spurious_dragon/vm/runtime.py | 2 +- src/ethereum/forks/tangerine_whistle/blocks.py | 4 ++-- src/ethereum/forks/tangerine_whistle/bloom.py | 2 +- src/ethereum/forks/tangerine_whistle/state.py | 10 +++++----- src/ethereum/forks/tangerine_whistle/trie.py | 2 +- src/ethereum/forks/tangerine_whistle/vm/__init__.py | 2 +- src/ethereum/forks/tangerine_whistle/vm/gas.py | 2 +- .../tangerine_whistle/vm/instructions/arithmetic.py | 4 ++-- .../forks/tangerine_whistle/vm/instructions/block.py | 2 +- .../forks/tangerine_whistle/vm/instructions/memory.py | 4 ++-- .../forks/tangerine_whistle/vm/instructions/stack.py | 8 ++++---- .../vm/precompiled_contracts/ecrecover.py | 2 +- .../vm/precompiled_contracts/mapping.py | 2 +- src/ethereum/forks/tangerine_whistle/vm/runtime.py | 2 +- src/ethereum_spec_tools/docc.py | 2 +- src/ethereum_spec_tools/evm_tools/daemon.py | 2 +- src/ethereum_spec_tools/evm_tools/t8n/t8n_types.py | 2 +- src/ethereum_spec_tools/lint/__init__.py | 2 +- src/ethereum_spec_tools/new_fork/builder.py | 2 +- 413 files changed, 701 insertions(+), 710 deletions(-) diff --git a/src/ethereum/ethash.py b/src/ethereum/ethash.py index 830eba02ea..413a056c3b 100644 --- a/src/ethereum/ethash.py +++ b/src/ethereum/ethash.py @@ -398,8 +398,8 @@ def hashimoto_light( dataset_size: Uint, ) -> Tuple[Bytes, Hash32]: """ - Run the [`hashimoto`] algorithm by generating dataset item using the cache - instead of loading the full dataset into main memory. + Run the [`hashimoto`] algorithm by generating a dataset item using the + cache instead of loading the full dataset into main memory. #### Parameters diff --git a/src/ethereum/exceptions.py b/src/ethereum/exceptions.py index 578b55c502..dafb73b64f 100644 --- a/src/ethereum/exceptions.py +++ b/src/ethereum/exceptions.py @@ -18,7 +18,7 @@ class InvalidBlock(EthereumException): class StateWithEmptyAccount(EthereumException): """ - Thrown when the state has empty account. + Thrown when the state has an empty account. """ diff --git a/src/ethereum/forks/amsterdam/blocks.py b/src/ethereum/forks/amsterdam/blocks.py index 70038d54cd..ef44549d28 100644 --- a/src/ethereum/forks/amsterdam/blocks.py +++ b/src/ethereum/forks/amsterdam/blocks.py @@ -152,7 +152,7 @@ class Header: number: Uint """ - Block number, (height) in the chain. + Block number (height) in the chain. """ gas_limit: Uint @@ -284,7 +284,7 @@ class Block: header: Header """ The block header containing metadata and cryptographic commitments. Refer - [headers] for more details on the fields included in the header. + to [headers] for more details on the fields included in the header. [headers]: ref:ethereum.forks.amsterdam.blocks.Header """ diff --git a/src/ethereum/forks/amsterdam/bloom.py b/src/ethereum/forks/amsterdam/bloom.py index 8a12ec081d..0e079df6b3 100644 --- a/src/ethereum/forks/amsterdam/bloom.py +++ b/src/ethereum/forks/amsterdam/bloom.py @@ -8,7 +8,7 @@ Introduction ------------ -This modules defines functions for calculating bloom filters of logs. For the +This module defines functions for calculating bloom filters of logs. For the general theory of bloom filters see e.g. `Wikipedia `_. Bloom filters are used to allow for efficient searching of logs by address and/or topic, by rapidly diff --git a/src/ethereum/forks/amsterdam/requests.py b/src/ethereum/forks/amsterdam/requests.py index 929e973e58..542c9264d3 100644 --- a/src/ethereum/forks/amsterdam/requests.py +++ b/src/ethereum/forks/amsterdam/requests.py @@ -1,5 +1,5 @@ """ -Requests were introduced in EIP-7685 as a a general purpose framework for +Requests were introduced in EIP-7685 as a general purpose framework for storing contract-triggered requests. It extends the execution header and body with a single field each to store the request information. This inherently exposes the requests to the consensus layer, which can diff --git a/src/ethereum/forks/amsterdam/state.py b/src/ethereum/forks/amsterdam/state.py index fcf12e971b..b4f17e953f 100644 --- a/src/ethereum/forks/amsterdam/state.py +++ b/src/ethereum/forks/amsterdam/state.py @@ -403,7 +403,7 @@ def account_exists(state: State, address: Address) -> bool: def account_has_code_or_nonce(state: State, address: Address) -> bool: """ - Checks if an account has non zero nonce or non empty code. + Checks if an account has non-zero nonce or non-empty code. Parameters ---------- @@ -415,7 +415,7 @@ def account_has_code_or_nonce(state: State, address: Address) -> bool: Returns ------- has_code_or_nonce : `bool` - True if the account has non zero nonce or non empty code, + True if the account has non-zero nonce or non-empty code, False otherwise. """ @@ -551,7 +551,7 @@ def set_account_balance(state: State, address: Address, amount: U256) -> None: Address of the account whose balance needs to be set. amount: - The amount that needs to set in balance. + The amount that needs to be set in the balance. """ @@ -591,7 +591,7 @@ def set_code(state: State, address: Address, code: Bytes) -> None: The current state. address: - Address of the account whose code needs to be update. + Address of the account whose code needs to be updated. code: The bytecode that needs to be set. diff --git a/src/ethereum/forks/amsterdam/trie.py b/src/ethereum/forks/amsterdam/trie.py index 09f67447db..77c41f7ff8 100644 --- a/src/ethereum/forks/amsterdam/trie.py +++ b/src/ethereum/forks/amsterdam/trie.py @@ -138,7 +138,7 @@ class BranchNode: def encode_internal_node(node: Optional[InternalNode]) -> Extended: """ Encodes a Merkle Trie node into its RLP form. The RLP will then be - serialized into a `Bytes` and hashed unless it is less that 32 bytes + serialized into a `Bytes` object and hashed unless it is less than 32 bytes when serialized. This function also accepts `None`, representing the absence of a node, diff --git a/src/ethereum/forks/amsterdam/vm/__init__.py b/src/ethereum/forks/amsterdam/vm/__init__.py index 6c47b50acf..3d69fbd706 100644 --- a/src/ethereum/forks/amsterdam/vm/__init__.py +++ b/src/ethereum/forks/amsterdam/vm/__init__.py @@ -66,7 +66,7 @@ class BlockOutput: receipts_trie : `ethereum.fork_types.Root` Trie root of all the receipts in the block. receipt_keys : - Key of all the receipts in the block. + Keys of all the receipts in the block. block_logs : `Bloom` Logs bloom of all the logs included in all the transactions of the block. diff --git a/src/ethereum/forks/amsterdam/vm/eoa_delegation.py b/src/ethereum/forks/amsterdam/vm/eoa_delegation.py index e56fb0cccd..fe21a1c9f7 100644 --- a/src/ethereum/forks/amsterdam/vm/eoa_delegation.py +++ b/src/ethereum/forks/amsterdam/vm/eoa_delegation.py @@ -169,8 +169,6 @@ def set_delegation(message: Message) -> U256: ---------- message : Transaction specific items. - env : - External items required for EVM execution. Returns ------- diff --git a/src/ethereum/forks/amsterdam/vm/gas.py b/src/ethereum/forks/amsterdam/vm/gas.py index 8fe1820feb..c4655cf0ba 100644 --- a/src/ethereum/forks/amsterdam/vm/gas.py +++ b/src/ethereum/forks/amsterdam/vm/gas.py @@ -249,7 +249,7 @@ def calculate_message_call_gas( account inside a message call. call_stipend : The amount of stipend provided to a message call to execute code while - transferring value(ETH). + transferring value (ETH). Returns ------- @@ -305,7 +305,7 @@ def init_code_cost(init_code_length: Uint) -> Uint: def calculate_excess_blob_gas(parent_header: Header) -> U64: """ - Calculated the excess blob gas for the current block based + Calculates the excess blob gas for the current block based on the gas used in the parent block. Parameters diff --git a/src/ethereum/forks/amsterdam/vm/instructions/arithmetic.py b/src/ethereum/forks/amsterdam/vm/instructions/arithmetic.py index de5d6ab43d..b7b1a370ad 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/arithmetic.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/arithmetic.py @@ -84,7 +84,7 @@ def sub(evm: Evm) -> None: def mul(evm: Evm) -> None: """ - Multiply the top two elements of the stack, and pushes the result back + Multiplies the top two elements of the stack, and pushes the result back on the stack. Parameters @@ -353,7 +353,7 @@ def signextend(evm: Evm) -> None: # Can't extend any further result = value else: - # U256(0).to_be_bytes() gives b'' instead b'\x00'. + # U256(0).to_be_bytes() gives b'' instead of b'\x00'. value_bytes = Bytes(value.to_be_bytes32()) # Now among the obtained value bytes, consider only # N `least significant bytes`, where N is `byte_num + 1`. diff --git a/src/ethereum/forks/amsterdam/vm/instructions/block.py b/src/ethereum/forks/amsterdam/vm/instructions/block.py index 78783751dd..e563a2e96e 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/block.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/block.py @@ -101,7 +101,7 @@ def coinbase(evm: Evm) -> None: def timestamp(evm: Evm) -> None: """ Push the current block's timestamp onto the stack. Here the timestamp - being referred is actually the unix timestamp in seconds. + being referred to is actually the unix timestamp in seconds. Here the current block refers to the block in which the currently executing transaction/call resides. diff --git a/src/ethereum/forks/amsterdam/vm/instructions/environment.py b/src/ethereum/forks/amsterdam/vm/instructions/environment.py index 5ee84a3d49..45c3bfe835 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/environment.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/environment.py @@ -439,7 +439,7 @@ def returndatasize(evm: Evm) -> None: def returndatacopy(evm: Evm) -> None: """ - Copies data from the return data buffer code to memory. + Copies data from the return data buffer to memory. Parameters ---------- diff --git a/src/ethereum/forks/amsterdam/vm/instructions/memory.py b/src/ethereum/forks/amsterdam/vm/instructions/memory.py index 631d33a7fd..6e111051ee 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/memory.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/memory.py @@ -93,7 +93,7 @@ def mstore8(evm: Evm) -> None: def mload(evm: Evm) -> None: """ - Load word from memory. + Loads a word from memory. Parameters ---------- @@ -123,7 +123,7 @@ def mload(evm: Evm) -> None: def msize(evm: Evm) -> None: """ - Push the size of active memory in bytes onto the stack. + Pushes the size of active memory in bytes onto the stack. Parameters ---------- @@ -146,7 +146,7 @@ def msize(evm: Evm) -> None: def mcopy(evm: Evm) -> None: """ - Copy the bytes in memory from one location to another. + Copies the bytes in memory from one location to another. Parameters ---------- diff --git a/src/ethereum/forks/amsterdam/vm/instructions/stack.py b/src/ethereum/forks/amsterdam/vm/instructions/stack.py index e381b52c37..0007a28acd 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/stack.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/stack.py @@ -23,7 +23,7 @@ def pop(evm: Evm) -> None: """ - Remove item from stack. + Removes an item from the stack. Parameters ---------- @@ -46,7 +46,7 @@ def pop(evm: Evm) -> None: def push_n(evm: Evm, num_bytes: int) -> None: """ - Pushes a N-byte immediate onto the stack. Push zero if num_bytes is zero. + Pushes an N-byte immediate onto the stack. Push zero if num_bytes is zero. Parameters ---------- @@ -79,7 +79,7 @@ def push_n(evm: Evm, num_bytes: int) -> None: def dup_n(evm: Evm, item_number: int) -> None: """ - Duplicate the Nth stack item (from top of the stack) to the top of stack. + Duplicates the Nth stack item (from top of the stack) to the top of stack. Parameters ---------- @@ -107,7 +107,7 @@ def dup_n(evm: Evm, item_number: int) -> None: def swap_n(evm: Evm, item_number: int) -> None: """ - Swap the top and the `item_number` element of the stack, where + Swaps the top and the `item_number` element of the stack, where the top of the stack is position zero. If `item_number` is zero, this function does nothing (which should not be diff --git a/src/ethereum/forks/amsterdam/vm/instructions/system.py b/src/ethereum/forks/amsterdam/vm/instructions/system.py index 9b54fab312..72f44cdf70 100644 --- a/src/ethereum/forks/amsterdam/vm/instructions/system.py +++ b/src/ethereum/forks/amsterdam/vm/instructions/system.py @@ -226,8 +226,8 @@ def create2(evm: Evm) -> None: """ Creates a new account with associated code. - It's similar to CREATE opcode except that the address of new account - depends on the init_code instead of the nonce of sender. + It's similar to the CREATE opcode except that the address of the new + account depends on the init_code instead of the nonce of sender. Parameters ---------- diff --git a/src/ethereum/forks/amsterdam/vm/precompiled_contracts/bls12_381/bls12_381_pairing.py b/src/ethereum/forks/amsterdam/vm/precompiled_contracts/bls12_381/bls12_381_pairing.py index 6cb29a32fd..c7a62cb49c 100644 --- a/src/ethereum/forks/amsterdam/vm/precompiled_contracts/bls12_381/bls12_381_pairing.py +++ b/src/ethereum/forks/amsterdam/vm/precompiled_contracts/bls12_381/bls12_381_pairing.py @@ -33,7 +33,7 @@ def bls12_pairing(evm: Evm) -> None: Raises ------ InvalidParameter - If the input length is invalid or if sub-group check fails. + If the input length is invalid or if the subgroup check fails. """ data = evm.message.data @@ -54,12 +54,12 @@ def bls12_pairing(evm: Evm) -> None: g1_slice = data[g1_start : g1_start + Uint(128)] g1_point = bytes_to_g1(bytes(g1_slice)) if not is_inf(bls12_multiply(g1_point, curve_order)): - raise InvalidParameter("Sub-group check failed for G1 point.") + raise InvalidParameter("Subgroup check failed for G1 point.") g2_slice = data[g2_start : g2_start + Uint(256)] g2_point = bytes_to_g2(bytes(g2_slice)) if not is_inf(bls12_multiply(g2_point, curve_order)): - raise InvalidParameter("Sub-group check failed for G2 point.") + raise InvalidParameter("Subgroup check failed for G2 point.") result *= pairing(g2_point, g1_point) diff --git a/src/ethereum/forks/amsterdam/vm/precompiled_contracts/ecrecover.py b/src/ethereum/forks/amsterdam/vm/precompiled_contracts/ecrecover.py index d2eeaf75df..d8f6602e55 100644 --- a/src/ethereum/forks/amsterdam/vm/precompiled_contracts/ecrecover.py +++ b/src/ethereum/forks/amsterdam/vm/precompiled_contracts/ecrecover.py @@ -8,7 +8,7 @@ Introduction ------------ -Implementation of the ECRECOVER precompiled contract. +Implementation of the `ECRECOVER` precompiled contract. """ from ethereum_types.numeric import U256 diff --git a/src/ethereum/forks/amsterdam/vm/precompiled_contracts/mapping.py b/src/ethereum/forks/amsterdam/vm/precompiled_contracts/mapping.py index a80e2b0235..7486203c3e 100644 --- a/src/ethereum/forks/amsterdam/vm/precompiled_contracts/mapping.py +++ b/src/ethereum/forks/amsterdam/vm/precompiled_contracts/mapping.py @@ -8,7 +8,7 @@ Introduction ------------ -Mapping of precompiled contracts their implementations. +Mapping of precompiled contracts to their implementations. """ from typing import Callable, Dict diff --git a/src/ethereum/forks/amsterdam/vm/precompiled_contracts/modexp.py b/src/ethereum/forks/amsterdam/vm/precompiled_contracts/modexp.py index 5e7e895b91..bf828ee8f6 100644 --- a/src/ethereum/forks/amsterdam/vm/precompiled_contracts/modexp.py +++ b/src/ethereum/forks/amsterdam/vm/precompiled_contracts/modexp.py @@ -22,7 +22,7 @@ def modexp(evm: Evm) -> None: """ - Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and. + Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and `modulus`. The return value is the same length as the modulus. """ data = evm.message.data diff --git a/src/ethereum/forks/amsterdam/vm/precompiled_contracts/p256verify.py b/src/ethereum/forks/amsterdam/vm/precompiled_contracts/p256verify.py index 6f6e7ff4e9..106dd548a6 100644 --- a/src/ethereum/forks/amsterdam/vm/precompiled_contracts/p256verify.py +++ b/src/ethereum/forks/amsterdam/vm/precompiled_contracts/p256verify.py @@ -5,9 +5,10 @@ :backlinks: none :local: -Introduction. +Introduction ------------ -Implementation of the P256VERIFY precompiled contract. + +Implementation of the `P256VERIFY` precompiled contract. """ from ethereum_types.numeric import U256 diff --git a/src/ethereum/forks/amsterdam/vm/precompiled_contracts/point_evaluation.py b/src/ethereum/forks/amsterdam/vm/precompiled_contracts/point_evaluation.py index 760af47736..ce4eed7129 100644 --- a/src/ethereum/forks/amsterdam/vm/precompiled_contracts/point_evaluation.py +++ b/src/ethereum/forks/amsterdam/vm/precompiled_contracts/point_evaluation.py @@ -8,7 +8,7 @@ Introduction ------------ -Implementation of the POINT EVALUATION precompiled contract. +Implementation of the `POINT EVALUATION` precompiled contract. """ from ethereum_types.bytes import Bytes, Bytes32, Bytes48 diff --git a/src/ethereum/forks/amsterdam/vm/runtime.py b/src/ethereum/forks/amsterdam/vm/runtime.py index 505b3488de..0aa5ddd5e2 100644 --- a/src/ethereum/forks/amsterdam/vm/runtime.py +++ b/src/ethereum/forks/amsterdam/vm/runtime.py @@ -21,7 +21,7 @@ def get_valid_jump_destinations(code: Bytes) -> Set[Uint]: """ - Analyze the evm code to obtain the set of valid jump destinations. + Analyze the EVM code to obtain the set of valid jump destinations. Valid jump destinations are defined as follows: * The jump destination is less than the length of the code. diff --git a/src/ethereum/forks/arrow_glacier/blocks.py b/src/ethereum/forks/arrow_glacier/blocks.py index 23bc1c4bd6..497a331c0d 100644 --- a/src/ethereum/forks/arrow_glacier/blocks.py +++ b/src/ethereum/forks/arrow_glacier/blocks.py @@ -121,7 +121,7 @@ class Header: number: Uint """ - Block number, (height) in the chain. + Block number (height) in the chain. """ gas_limit: Uint @@ -207,7 +207,7 @@ class Block: header: Header """ The block header containing metadata and cryptographic commitments. Refer - [headers] for more details on the fields included in the header. + to [headers] for more details on the fields included in the header. [headers]: ref:ethereum.forks.arrow_glacier.blocks.Header """ diff --git a/src/ethereum/forks/arrow_glacier/bloom.py b/src/ethereum/forks/arrow_glacier/bloom.py index 8a12ec081d..0e079df6b3 100644 --- a/src/ethereum/forks/arrow_glacier/bloom.py +++ b/src/ethereum/forks/arrow_glacier/bloom.py @@ -8,7 +8,7 @@ Introduction ------------ -This modules defines functions for calculating bloom filters of logs. For the +This module defines functions for calculating bloom filters of logs. For the general theory of bloom filters see e.g. `Wikipedia `_. Bloom filters are used to allow for efficient searching of logs by address and/or topic, by rapidly diff --git a/src/ethereum/forks/arrow_glacier/state.py b/src/ethereum/forks/arrow_glacier/state.py index 5fbcf920cd..91a0b28a1e 100644 --- a/src/ethereum/forks/arrow_glacier/state.py +++ b/src/ethereum/forks/arrow_glacier/state.py @@ -367,7 +367,7 @@ def account_exists(state: State, address: Address) -> bool: def account_has_code_or_nonce(state: State, address: Address) -> bool: """ - Checks if an account has non zero nonce or non empty code. + Checks if an account has non-zero nonce or non-empty code. Parameters ---------- @@ -379,7 +379,7 @@ def account_has_code_or_nonce(state: State, address: Address) -> bool: Returns ------- has_code_or_nonce : `bool` - True if the account has non zero nonce or non empty code, + True if the account has non-zero nonce or non-empty code, False otherwise. """ @@ -500,7 +500,7 @@ def set_account_balance(state: State, address: Address, amount: U256) -> None: Address of the account whose nonce needs to be incremented. amount: - The amount that needs to set in balance. + The amount that needs to be set in the balance. """ @@ -520,7 +520,7 @@ def touch_account(state: State, address: Address) -> None: The current state. address: - The address of the account that need to initialised. + The address of the account that needs to be initialized. """ if not account_exists(state, address): @@ -557,7 +557,7 @@ def set_code(state: State, address: Address, code: Bytes) -> None: The current state. address: - Address of the account whose code needs to be update. + Address of the account whose code needs to be updated. code: The bytecode that needs to be set. diff --git a/src/ethereum/forks/arrow_glacier/trie.py b/src/ethereum/forks/arrow_glacier/trie.py index d81b24e668..fce5af9482 100644 --- a/src/ethereum/forks/arrow_glacier/trie.py +++ b/src/ethereum/forks/arrow_glacier/trie.py @@ -128,7 +128,7 @@ class BranchNode: def encode_internal_node(node: Optional[InternalNode]) -> Extended: """ Encodes a Merkle Trie node into its RLP form. The RLP will then be - serialized into a `Bytes` and hashed unless it is less that 32 bytes + serialized into a `Bytes` and hashed unless it is less than 32 bytes when serialized. This function also accepts `None`, representing the absence of a node, diff --git a/src/ethereum/forks/arrow_glacier/vm/__init__.py b/src/ethereum/forks/arrow_glacier/vm/__init__.py index 40ee422fb6..ebbb444829 100644 --- a/src/ethereum/forks/arrow_glacier/vm/__init__.py +++ b/src/ethereum/forks/arrow_glacier/vm/__init__.py @@ -62,7 +62,7 @@ class BlockOutput: receipts_trie : `ethereum.fork_types.Root` Trie root of all the receipts in the block. receipt_keys : - Key of all the receipts in the block. + Keys of all the receipts in the block. block_logs : `Bloom` Logs bloom of all the logs included in all the transactions of the block. diff --git a/src/ethereum/forks/arrow_glacier/vm/gas.py b/src/ethereum/forks/arrow_glacier/vm/gas.py index b5c96dea85..4f2dde3b9d 100644 --- a/src/ethereum/forks/arrow_glacier/vm/gas.py +++ b/src/ethereum/forks/arrow_glacier/vm/gas.py @@ -211,7 +211,7 @@ def calculate_message_call_gas( account inside a message call. call_stipend : The amount of stipend provided to a message call to execute code while - transferring value(ETH). + transferring value (ETH). Returns ------- diff --git a/src/ethereum/forks/arrow_glacier/vm/instructions/arithmetic.py b/src/ethereum/forks/arrow_glacier/vm/instructions/arithmetic.py index de5d6ab43d..b7b1a370ad 100644 --- a/src/ethereum/forks/arrow_glacier/vm/instructions/arithmetic.py +++ b/src/ethereum/forks/arrow_glacier/vm/instructions/arithmetic.py @@ -84,7 +84,7 @@ def sub(evm: Evm) -> None: def mul(evm: Evm) -> None: """ - Multiply the top two elements of the stack, and pushes the result back + Multiplies the top two elements of the stack, and pushes the result back on the stack. Parameters @@ -353,7 +353,7 @@ def signextend(evm: Evm) -> None: # Can't extend any further result = value else: - # U256(0).to_be_bytes() gives b'' instead b'\x00'. + # U256(0).to_be_bytes() gives b'' instead of b'\x00'. value_bytes = Bytes(value.to_be_bytes32()) # Now among the obtained value bytes, consider only # N `least significant bytes`, where N is `byte_num + 1`. diff --git a/src/ethereum/forks/arrow_glacier/vm/instructions/block.py b/src/ethereum/forks/arrow_glacier/vm/instructions/block.py index ecbeac10f4..bbf01739ff 100644 --- a/src/ethereum/forks/arrow_glacier/vm/instructions/block.py +++ b/src/ethereum/forks/arrow_glacier/vm/instructions/block.py @@ -87,7 +87,7 @@ def coinbase(evm: Evm) -> None: def timestamp(evm: Evm) -> None: """ Push the current block's timestamp onto the stack. Here the timestamp - being referred is actually the unix timestamp in seconds. + being referred to is actually the unix timestamp in seconds. Here the current block refers to the block in which the currently executing transaction/call resides. diff --git a/src/ethereum/forks/arrow_glacier/vm/instructions/environment.py b/src/ethereum/forks/arrow_glacier/vm/instructions/environment.py index 1abfaeafa9..2a7b434661 100644 --- a/src/ethereum/forks/arrow_glacier/vm/instructions/environment.py +++ b/src/ethereum/forks/arrow_glacier/vm/instructions/environment.py @@ -423,7 +423,7 @@ def returndatasize(evm: Evm) -> None: def returndatacopy(evm: Evm) -> None: """ - Copies data from the return data buffer code to memory. + Copies data from the return data buffer to memory. Parameters ---------- diff --git a/src/ethereum/forks/arrow_glacier/vm/instructions/memory.py b/src/ethereum/forks/arrow_glacier/vm/instructions/memory.py index fdd9518843..3a242ba2f7 100644 --- a/src/ethereum/forks/arrow_glacier/vm/instructions/memory.py +++ b/src/ethereum/forks/arrow_glacier/vm/instructions/memory.py @@ -90,7 +90,7 @@ def mstore8(evm: Evm) -> None: def mload(evm: Evm) -> None: """ - Load word from memory. + Loads a word from memory. Parameters ---------- @@ -120,7 +120,7 @@ def mload(evm: Evm) -> None: def msize(evm: Evm) -> None: """ - Push the size of active memory in bytes onto the stack. + Pushes the size of active memory in bytes onto the stack. Parameters ---------- diff --git a/src/ethereum/forks/arrow_glacier/vm/instructions/stack.py b/src/ethereum/forks/arrow_glacier/vm/instructions/stack.py index bdf232db6a..0fc0d3fe4b 100644 --- a/src/ethereum/forks/arrow_glacier/vm/instructions/stack.py +++ b/src/ethereum/forks/arrow_glacier/vm/instructions/stack.py @@ -23,7 +23,7 @@ def pop(evm: Evm) -> None: """ - Remove item from stack. + Removes an item from the stack. Parameters ---------- @@ -46,7 +46,7 @@ def pop(evm: Evm) -> None: def push_n(evm: Evm, num_bytes: int) -> None: """ - Pushes a N-byte immediate onto the stack. + Pushes an N-byte immediate onto the stack. Parameters ---------- @@ -76,7 +76,7 @@ def push_n(evm: Evm, num_bytes: int) -> None: def dup_n(evm: Evm, item_number: int) -> None: """ - Duplicate the Nth stack item (from top of the stack) to the top of stack. + Duplicates the Nth stack item (from top of the stack) to the top of stack. Parameters ---------- @@ -104,7 +104,7 @@ def dup_n(evm: Evm, item_number: int) -> None: def swap_n(evm: Evm, item_number: int) -> None: """ - Swap the top and the `item_number` element of the stack, where + Swaps the top and the `item_number` element of the stack, where the top of the stack is position zero. If `item_number` is zero, this function does nothing (which should not be diff --git a/src/ethereum/forks/arrow_glacier/vm/instructions/system.py b/src/ethereum/forks/arrow_glacier/vm/instructions/system.py index 2ef31f9595..44ba77a073 100644 --- a/src/ethereum/forks/arrow_glacier/vm/instructions/system.py +++ b/src/ethereum/forks/arrow_glacier/vm/instructions/system.py @@ -179,8 +179,8 @@ def create2(evm: Evm) -> None: """ Creates a new account with associated code. - It's similar to CREATE opcode except that the address of new account - depends on the init_code instead of the nonce of sender. + It's similar to the CREATE opcode except that the address of the new + account depends on the init_code instead of the nonce of sender. Parameters ---------- diff --git a/src/ethereum/forks/arrow_glacier/vm/precompiled_contracts/ecrecover.py b/src/ethereum/forks/arrow_glacier/vm/precompiled_contracts/ecrecover.py index d2eeaf75df..d8f6602e55 100644 --- a/src/ethereum/forks/arrow_glacier/vm/precompiled_contracts/ecrecover.py +++ b/src/ethereum/forks/arrow_glacier/vm/precompiled_contracts/ecrecover.py @@ -8,7 +8,7 @@ Introduction ------------ -Implementation of the ECRECOVER precompiled contract. +Implementation of the `ECRECOVER` precompiled contract. """ from ethereum_types.numeric import U256 diff --git a/src/ethereum/forks/arrow_glacier/vm/precompiled_contracts/mapping.py b/src/ethereum/forks/arrow_glacier/vm/precompiled_contracts/mapping.py index 827c44cfac..44013f70e8 100644 --- a/src/ethereum/forks/arrow_glacier/vm/precompiled_contracts/mapping.py +++ b/src/ethereum/forks/arrow_glacier/vm/precompiled_contracts/mapping.py @@ -8,7 +8,7 @@ Introduction ------------ -Mapping of precompiled contracts their implementations. +Mapping of precompiled contracts to their implementations. """ from typing import Callable, Dict diff --git a/src/ethereum/forks/arrow_glacier/vm/precompiled_contracts/modexp.py b/src/ethereum/forks/arrow_glacier/vm/precompiled_contracts/modexp.py index 462341f69f..755a396dda 100644 --- a/src/ethereum/forks/arrow_glacier/vm/precompiled_contracts/modexp.py +++ b/src/ethereum/forks/arrow_glacier/vm/precompiled_contracts/modexp.py @@ -23,7 +23,7 @@ def modexp(evm: Evm) -> None: """ - Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and. + Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and `modulus`. The return value is the same length as the modulus. """ data = evm.message.data diff --git a/src/ethereum/forks/arrow_glacier/vm/runtime.py b/src/ethereum/forks/arrow_glacier/vm/runtime.py index 505b3488de..0aa5ddd5e2 100644 --- a/src/ethereum/forks/arrow_glacier/vm/runtime.py +++ b/src/ethereum/forks/arrow_glacier/vm/runtime.py @@ -21,7 +21,7 @@ def get_valid_jump_destinations(code: Bytes) -> Set[Uint]: """ - Analyze the evm code to obtain the set of valid jump destinations. + Analyze the EVM code to obtain the set of valid jump destinations. Valid jump destinations are defined as follows: * The jump destination is less than the length of the code. diff --git a/src/ethereum/forks/berlin/blocks.py b/src/ethereum/forks/berlin/blocks.py index 8f460316da..a52af69bae 100644 --- a/src/ethereum/forks/berlin/blocks.py +++ b/src/ethereum/forks/berlin/blocks.py @@ -113,7 +113,7 @@ class Header: number: Uint """ - Block number, (height) in the chain. + Block number (height) in the chain. """ gas_limit: Uint @@ -184,7 +184,7 @@ class Block: header: Header """ The block header containing metadata and cryptographic commitments. Refer - [headers] for more details on the fields included in the header. + to [headers] for more details on the fields included in the header. [headers]: ref:ethereum.forks.berlin.blocks.Header """ diff --git a/src/ethereum/forks/berlin/bloom.py b/src/ethereum/forks/berlin/bloom.py index 8a12ec081d..0e079df6b3 100644 --- a/src/ethereum/forks/berlin/bloom.py +++ b/src/ethereum/forks/berlin/bloom.py @@ -8,7 +8,7 @@ Introduction ------------ -This modules defines functions for calculating bloom filters of logs. For the +This module defines functions for calculating bloom filters of logs. For the general theory of bloom filters see e.g. `Wikipedia `_. Bloom filters are used to allow for efficient searching of logs by address and/or topic, by rapidly diff --git a/src/ethereum/forks/berlin/state.py b/src/ethereum/forks/berlin/state.py index 5fbcf920cd..91a0b28a1e 100644 --- a/src/ethereum/forks/berlin/state.py +++ b/src/ethereum/forks/berlin/state.py @@ -367,7 +367,7 @@ def account_exists(state: State, address: Address) -> bool: def account_has_code_or_nonce(state: State, address: Address) -> bool: """ - Checks if an account has non zero nonce or non empty code. + Checks if an account has non-zero nonce or non-empty code. Parameters ---------- @@ -379,7 +379,7 @@ def account_has_code_or_nonce(state: State, address: Address) -> bool: Returns ------- has_code_or_nonce : `bool` - True if the account has non zero nonce or non empty code, + True if the account has non-zero nonce or non-empty code, False otherwise. """ @@ -500,7 +500,7 @@ def set_account_balance(state: State, address: Address, amount: U256) -> None: Address of the account whose nonce needs to be incremented. amount: - The amount that needs to set in balance. + The amount that needs to be set in the balance. """ @@ -520,7 +520,7 @@ def touch_account(state: State, address: Address) -> None: The current state. address: - The address of the account that need to initialised. + The address of the account that needs to be initialized. """ if not account_exists(state, address): @@ -557,7 +557,7 @@ def set_code(state: State, address: Address, code: Bytes) -> None: The current state. address: - Address of the account whose code needs to be update. + Address of the account whose code needs to be updated. code: The bytecode that needs to be set. diff --git a/src/ethereum/forks/berlin/trie.py b/src/ethereum/forks/berlin/trie.py index a1fc5a03d6..01e4f06209 100644 --- a/src/ethereum/forks/berlin/trie.py +++ b/src/ethereum/forks/berlin/trie.py @@ -128,7 +128,7 @@ class BranchNode: def encode_internal_node(node: Optional[InternalNode]) -> Extended: """ Encodes a Merkle Trie node into its RLP form. The RLP will then be - serialized into a `Bytes` and hashed unless it is less that 32 bytes + serialized into a `Bytes` and hashed unless it is less than 32 bytes when serialized. This function also accepts `None`, representing the absence of a node, diff --git a/src/ethereum/forks/berlin/vm/__init__.py b/src/ethereum/forks/berlin/vm/__init__.py index b1973823af..1c6469d033 100644 --- a/src/ethereum/forks/berlin/vm/__init__.py +++ b/src/ethereum/forks/berlin/vm/__init__.py @@ -61,7 +61,7 @@ class BlockOutput: receipts_trie : `ethereum.fork_types.Root` Trie root of all the receipts in the block. receipt_keys : - Key of all the receipts in the block. + Keys of all the receipts in the block. block_logs : `Bloom` Logs bloom of all the logs included in all the transactions of the block. diff --git a/src/ethereum/forks/berlin/vm/gas.py b/src/ethereum/forks/berlin/vm/gas.py index dcb7dfb30b..74a186d6c4 100644 --- a/src/ethereum/forks/berlin/vm/gas.py +++ b/src/ethereum/forks/berlin/vm/gas.py @@ -212,7 +212,7 @@ def calculate_message_call_gas( account inside a message call. call_stipend : The amount of stipend provided to a message call to execute code while - transferring value(ETH). + transferring value (ETH). Returns ------- diff --git a/src/ethereum/forks/berlin/vm/instructions/arithmetic.py b/src/ethereum/forks/berlin/vm/instructions/arithmetic.py index de5d6ab43d..b7b1a370ad 100644 --- a/src/ethereum/forks/berlin/vm/instructions/arithmetic.py +++ b/src/ethereum/forks/berlin/vm/instructions/arithmetic.py @@ -84,7 +84,7 @@ def sub(evm: Evm) -> None: def mul(evm: Evm) -> None: """ - Multiply the top two elements of the stack, and pushes the result back + Multiplies the top two elements of the stack, and pushes the result back on the stack. Parameters @@ -353,7 +353,7 @@ def signextend(evm: Evm) -> None: # Can't extend any further result = value else: - # U256(0).to_be_bytes() gives b'' instead b'\x00'. + # U256(0).to_be_bytes() gives b'' instead of b'\x00'. value_bytes = Bytes(value.to_be_bytes32()) # Now among the obtained value bytes, consider only # N `least significant bytes`, where N is `byte_num + 1`. diff --git a/src/ethereum/forks/berlin/vm/instructions/block.py b/src/ethereum/forks/berlin/vm/instructions/block.py index ecbeac10f4..bbf01739ff 100644 --- a/src/ethereum/forks/berlin/vm/instructions/block.py +++ b/src/ethereum/forks/berlin/vm/instructions/block.py @@ -87,7 +87,7 @@ def coinbase(evm: Evm) -> None: def timestamp(evm: Evm) -> None: """ Push the current block's timestamp onto the stack. Here the timestamp - being referred is actually the unix timestamp in seconds. + being referred to is actually the unix timestamp in seconds. Here the current block refers to the block in which the currently executing transaction/call resides. diff --git a/src/ethereum/forks/berlin/vm/instructions/environment.py b/src/ethereum/forks/berlin/vm/instructions/environment.py index 8ff0e0b2f0..7158c504e8 100644 --- a/src/ethereum/forks/berlin/vm/instructions/environment.py +++ b/src/ethereum/forks/berlin/vm/instructions/environment.py @@ -423,7 +423,7 @@ def returndatasize(evm: Evm) -> None: def returndatacopy(evm: Evm) -> None: """ - Copies data from the return data buffer code to memory. + Copies data from the return data buffer to memory. Parameters ---------- diff --git a/src/ethereum/forks/berlin/vm/instructions/memory.py b/src/ethereum/forks/berlin/vm/instructions/memory.py index fdd9518843..3a242ba2f7 100644 --- a/src/ethereum/forks/berlin/vm/instructions/memory.py +++ b/src/ethereum/forks/berlin/vm/instructions/memory.py @@ -90,7 +90,7 @@ def mstore8(evm: Evm) -> None: def mload(evm: Evm) -> None: """ - Load word from memory. + Loads a word from memory. Parameters ---------- @@ -120,7 +120,7 @@ def mload(evm: Evm) -> None: def msize(evm: Evm) -> None: """ - Push the size of active memory in bytes onto the stack. + Pushes the size of active memory in bytes onto the stack. Parameters ---------- diff --git a/src/ethereum/forks/berlin/vm/instructions/stack.py b/src/ethereum/forks/berlin/vm/instructions/stack.py index bdf232db6a..0fc0d3fe4b 100644 --- a/src/ethereum/forks/berlin/vm/instructions/stack.py +++ b/src/ethereum/forks/berlin/vm/instructions/stack.py @@ -23,7 +23,7 @@ def pop(evm: Evm) -> None: """ - Remove item from stack. + Removes an item from the stack. Parameters ---------- @@ -46,7 +46,7 @@ def pop(evm: Evm) -> None: def push_n(evm: Evm, num_bytes: int) -> None: """ - Pushes a N-byte immediate onto the stack. + Pushes an N-byte immediate onto the stack. Parameters ---------- @@ -76,7 +76,7 @@ def push_n(evm: Evm, num_bytes: int) -> None: def dup_n(evm: Evm, item_number: int) -> None: """ - Duplicate the Nth stack item (from top of the stack) to the top of stack. + Duplicates the Nth stack item (from top of the stack) to the top of stack. Parameters ---------- @@ -104,7 +104,7 @@ def dup_n(evm: Evm, item_number: int) -> None: def swap_n(evm: Evm, item_number: int) -> None: """ - Swap the top and the `item_number` element of the stack, where + Swaps the top and the `item_number` element of the stack, where the top of the stack is position zero. If `item_number` is zero, this function does nothing (which should not be diff --git a/src/ethereum/forks/berlin/vm/instructions/system.py b/src/ethereum/forks/berlin/vm/instructions/system.py index 3b915e29ce..afd813e3fa 100644 --- a/src/ethereum/forks/berlin/vm/instructions/system.py +++ b/src/ethereum/forks/berlin/vm/instructions/system.py @@ -180,8 +180,8 @@ def create2(evm: Evm) -> None: """ Creates a new account with associated code. - It's similar to CREATE opcode except that the address of new account - depends on the init_code instead of the nonce of sender. + It's similar to the CREATE opcode except that the address of the new + account depends on the init_code instead of the nonce of sender. Parameters ---------- diff --git a/src/ethereum/forks/berlin/vm/precompiled_contracts/ecrecover.py b/src/ethereum/forks/berlin/vm/precompiled_contracts/ecrecover.py index d2eeaf75df..d8f6602e55 100644 --- a/src/ethereum/forks/berlin/vm/precompiled_contracts/ecrecover.py +++ b/src/ethereum/forks/berlin/vm/precompiled_contracts/ecrecover.py @@ -8,7 +8,7 @@ Introduction ------------ -Implementation of the ECRECOVER precompiled contract. +Implementation of the `ECRECOVER` precompiled contract. """ from ethereum_types.numeric import U256 diff --git a/src/ethereum/forks/berlin/vm/precompiled_contracts/mapping.py b/src/ethereum/forks/berlin/vm/precompiled_contracts/mapping.py index 827c44cfac..44013f70e8 100644 --- a/src/ethereum/forks/berlin/vm/precompiled_contracts/mapping.py +++ b/src/ethereum/forks/berlin/vm/precompiled_contracts/mapping.py @@ -8,7 +8,7 @@ Introduction ------------ -Mapping of precompiled contracts their implementations. +Mapping of precompiled contracts to their implementations. """ from typing import Callable, Dict diff --git a/src/ethereum/forks/berlin/vm/precompiled_contracts/modexp.py b/src/ethereum/forks/berlin/vm/precompiled_contracts/modexp.py index 462341f69f..755a396dda 100644 --- a/src/ethereum/forks/berlin/vm/precompiled_contracts/modexp.py +++ b/src/ethereum/forks/berlin/vm/precompiled_contracts/modexp.py @@ -23,7 +23,7 @@ def modexp(evm: Evm) -> None: """ - Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and. + Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and `modulus`. The return value is the same length as the modulus. """ data = evm.message.data diff --git a/src/ethereum/forks/berlin/vm/runtime.py b/src/ethereum/forks/berlin/vm/runtime.py index 505b3488de..0aa5ddd5e2 100644 --- a/src/ethereum/forks/berlin/vm/runtime.py +++ b/src/ethereum/forks/berlin/vm/runtime.py @@ -21,7 +21,7 @@ def get_valid_jump_destinations(code: Bytes) -> Set[Uint]: """ - Analyze the evm code to obtain the set of valid jump destinations. + Analyze the EVM code to obtain the set of valid jump destinations. Valid jump destinations are defined as follows: * The jump destination is less than the length of the code. diff --git a/src/ethereum/forks/bpo1/blocks.py b/src/ethereum/forks/bpo1/blocks.py index ffa9c59a9f..dcfe788d8a 100644 --- a/src/ethereum/forks/bpo1/blocks.py +++ b/src/ethereum/forks/bpo1/blocks.py @@ -152,7 +152,7 @@ class Header: number: Uint """ - Block number, (height) in the chain. + Block number (height) in the chain. """ gas_limit: Uint @@ -272,7 +272,7 @@ class Block: header: Header """ The block header containing metadata and cryptographic commitments. Refer - [headers] for more details on the fields included in the header. + to [headers] for more details on the fields included in the header. [headers]: ref:ethereum.forks.bpo1.blocks.Header """ diff --git a/src/ethereum/forks/bpo1/bloom.py b/src/ethereum/forks/bpo1/bloom.py index 8a12ec081d..0e079df6b3 100644 --- a/src/ethereum/forks/bpo1/bloom.py +++ b/src/ethereum/forks/bpo1/bloom.py @@ -8,7 +8,7 @@ Introduction ------------ -This modules defines functions for calculating bloom filters of logs. For the +This module defines functions for calculating bloom filters of logs. For the general theory of bloom filters see e.g. `Wikipedia `_. Bloom filters are used to allow for efficient searching of logs by address and/or topic, by rapidly diff --git a/src/ethereum/forks/bpo1/requests.py b/src/ethereum/forks/bpo1/requests.py index 929e973e58..542c9264d3 100644 --- a/src/ethereum/forks/bpo1/requests.py +++ b/src/ethereum/forks/bpo1/requests.py @@ -1,5 +1,5 @@ """ -Requests were introduced in EIP-7685 as a a general purpose framework for +Requests were introduced in EIP-7685 as a general purpose framework for storing contract-triggered requests. It extends the execution header and body with a single field each to store the request information. This inherently exposes the requests to the consensus layer, which can diff --git a/src/ethereum/forks/bpo1/state.py b/src/ethereum/forks/bpo1/state.py index e997411f6d..6571aa05c6 100644 --- a/src/ethereum/forks/bpo1/state.py +++ b/src/ethereum/forks/bpo1/state.py @@ -400,7 +400,7 @@ def account_exists(state: State, address: Address) -> bool: def account_has_code_or_nonce(state: State, address: Address) -> bool: """ - Checks if an account has non zero nonce or non empty code. + Checks if an account has non-zero nonce or non-empty code. Parameters ---------- @@ -412,7 +412,7 @@ def account_has_code_or_nonce(state: State, address: Address) -> bool: Returns ------- has_code_or_nonce : `bool` - True if the account has non zero nonce or non empty code, + True if the account has non-zero nonce or non-empty code, False otherwise. """ @@ -517,7 +517,7 @@ def set_account_balance(state: State, address: Address, amount: U256) -> None: Address of the account whose nonce needs to be incremented. amount: - The amount that needs to set in balance. + The amount that needs to be set in the balance. """ @@ -557,7 +557,7 @@ def set_code(state: State, address: Address, code: Bytes) -> None: The current state. address: - Address of the account whose code needs to be update. + Address of the account whose code needs to be updated. code: The bytecode that needs to be set. diff --git a/src/ethereum/forks/bpo1/trie.py b/src/ethereum/forks/bpo1/trie.py index 11a2e035ab..3e6b5cab25 100644 --- a/src/ethereum/forks/bpo1/trie.py +++ b/src/ethereum/forks/bpo1/trie.py @@ -138,7 +138,7 @@ class BranchNode: def encode_internal_node(node: Optional[InternalNode]) -> Extended: """ Encodes a Merkle Trie node into its RLP form. The RLP will then be - serialized into a `Bytes` and hashed unless it is less that 32 bytes + serialized into a `Bytes` and hashed unless it is less than 32 bytes when serialized. This function also accepts `None`, representing the absence of a node, diff --git a/src/ethereum/forks/bpo1/vm/__init__.py b/src/ethereum/forks/bpo1/vm/__init__.py index b2a8c5e2b9..641695ea0e 100644 --- a/src/ethereum/forks/bpo1/vm/__init__.py +++ b/src/ethereum/forks/bpo1/vm/__init__.py @@ -63,7 +63,7 @@ class BlockOutput: receipts_trie : `ethereum.fork_types.Root` Trie root of all the receipts in the block. receipt_keys : - Key of all the receipts in the block. + Keys of all the receipts in the block. block_logs : `Bloom` Logs bloom of all the logs included in all the transactions of the block. diff --git a/src/ethereum/forks/bpo1/vm/eoa_delegation.py b/src/ethereum/forks/bpo1/vm/eoa_delegation.py index 29909b5fa5..bc96f4c327 100644 --- a/src/ethereum/forks/bpo1/vm/eoa_delegation.py +++ b/src/ethereum/forks/bpo1/vm/eoa_delegation.py @@ -157,8 +157,6 @@ def set_delegation(message: Message) -> U256: ---------- message : Transaction specific items. - env : - External items required for EVM execution. Returns ------- diff --git a/src/ethereum/forks/bpo1/vm/gas.py b/src/ethereum/forks/bpo1/vm/gas.py index fc4c8618f5..2bf030e190 100644 --- a/src/ethereum/forks/bpo1/vm/gas.py +++ b/src/ethereum/forks/bpo1/vm/gas.py @@ -232,7 +232,7 @@ def calculate_message_call_gas( account inside a message call. call_stipend : The amount of stipend provided to a message call to execute code while - transferring value(ETH). + transferring value (ETH). Returns ------- @@ -288,7 +288,7 @@ def init_code_cost(init_code_length: Uint) -> Uint: def calculate_excess_blob_gas(parent_header: Header) -> U64: """ - Calculated the excess blob gas for the current block based + Calculates the excess blob gas for the current block based on the gas used in the parent block. Parameters diff --git a/src/ethereum/forks/bpo1/vm/instructions/arithmetic.py b/src/ethereum/forks/bpo1/vm/instructions/arithmetic.py index de5d6ab43d..b7b1a370ad 100644 --- a/src/ethereum/forks/bpo1/vm/instructions/arithmetic.py +++ b/src/ethereum/forks/bpo1/vm/instructions/arithmetic.py @@ -84,7 +84,7 @@ def sub(evm: Evm) -> None: def mul(evm: Evm) -> None: """ - Multiply the top two elements of the stack, and pushes the result back + Multiplies the top two elements of the stack, and pushes the result back on the stack. Parameters @@ -353,7 +353,7 @@ def signextend(evm: Evm) -> None: # Can't extend any further result = value else: - # U256(0).to_be_bytes() gives b'' instead b'\x00'. + # U256(0).to_be_bytes() gives b'' instead of b'\x00'. value_bytes = Bytes(value.to_be_bytes32()) # Now among the obtained value bytes, consider only # N `least significant bytes`, where N is `byte_num + 1`. diff --git a/src/ethereum/forks/bpo1/vm/instructions/block.py b/src/ethereum/forks/bpo1/vm/instructions/block.py index c0520797e5..75810aacd3 100644 --- a/src/ethereum/forks/bpo1/vm/instructions/block.py +++ b/src/ethereum/forks/bpo1/vm/instructions/block.py @@ -101,7 +101,7 @@ def coinbase(evm: Evm) -> None: def timestamp(evm: Evm) -> None: """ Push the current block's timestamp onto the stack. Here the timestamp - being referred is actually the unix timestamp in seconds. + being referred to is actually the unix timestamp in seconds. Here the current block refers to the block in which the currently executing transaction/call resides. diff --git a/src/ethereum/forks/bpo1/vm/instructions/environment.py b/src/ethereum/forks/bpo1/vm/instructions/environment.py index 8369043465..28c595ee51 100644 --- a/src/ethereum/forks/bpo1/vm/instructions/environment.py +++ b/src/ethereum/forks/bpo1/vm/instructions/environment.py @@ -426,7 +426,7 @@ def returndatasize(evm: Evm) -> None: def returndatacopy(evm: Evm) -> None: """ - Copies data from the return data buffer code to memory. + Copies data from the return data buffer to memory. Parameters ---------- diff --git a/src/ethereum/forks/bpo1/vm/instructions/memory.py b/src/ethereum/forks/bpo1/vm/instructions/memory.py index 631d33a7fd..6e111051ee 100644 --- a/src/ethereum/forks/bpo1/vm/instructions/memory.py +++ b/src/ethereum/forks/bpo1/vm/instructions/memory.py @@ -93,7 +93,7 @@ def mstore8(evm: Evm) -> None: def mload(evm: Evm) -> None: """ - Load word from memory. + Loads a word from memory. Parameters ---------- @@ -123,7 +123,7 @@ def mload(evm: Evm) -> None: def msize(evm: Evm) -> None: """ - Push the size of active memory in bytes onto the stack. + Pushes the size of active memory in bytes onto the stack. Parameters ---------- @@ -146,7 +146,7 @@ def msize(evm: Evm) -> None: def mcopy(evm: Evm) -> None: """ - Copy the bytes in memory from one location to another. + Copies the bytes in memory from one location to another. Parameters ---------- diff --git a/src/ethereum/forks/bpo1/vm/instructions/stack.py b/src/ethereum/forks/bpo1/vm/instructions/stack.py index e381b52c37..0007a28acd 100644 --- a/src/ethereum/forks/bpo1/vm/instructions/stack.py +++ b/src/ethereum/forks/bpo1/vm/instructions/stack.py @@ -23,7 +23,7 @@ def pop(evm: Evm) -> None: """ - Remove item from stack. + Removes an item from the stack. Parameters ---------- @@ -46,7 +46,7 @@ def pop(evm: Evm) -> None: def push_n(evm: Evm, num_bytes: int) -> None: """ - Pushes a N-byte immediate onto the stack. Push zero if num_bytes is zero. + Pushes an N-byte immediate onto the stack. Push zero if num_bytes is zero. Parameters ---------- @@ -79,7 +79,7 @@ def push_n(evm: Evm, num_bytes: int) -> None: def dup_n(evm: Evm, item_number: int) -> None: """ - Duplicate the Nth stack item (from top of the stack) to the top of stack. + Duplicates the Nth stack item (from top of the stack) to the top of stack. Parameters ---------- @@ -107,7 +107,7 @@ def dup_n(evm: Evm, item_number: int) -> None: def swap_n(evm: Evm, item_number: int) -> None: """ - Swap the top and the `item_number` element of the stack, where + Swaps the top and the `item_number` element of the stack, where the top of the stack is position zero. If `item_number` is zero, this function does nothing (which should not be diff --git a/src/ethereum/forks/bpo1/vm/instructions/system.py b/src/ethereum/forks/bpo1/vm/instructions/system.py index fea7a0c1b9..44c36ae000 100644 --- a/src/ethereum/forks/bpo1/vm/instructions/system.py +++ b/src/ethereum/forks/bpo1/vm/instructions/system.py @@ -193,8 +193,8 @@ def create2(evm: Evm) -> None: """ Creates a new account with associated code. - It's similar to CREATE opcode except that the address of new account - depends on the init_code instead of the nonce of sender. + It's similar to the CREATE opcode except that the address of the new + account depends on the init_code instead of the nonce of sender. Parameters ---------- diff --git a/src/ethereum/forks/bpo1/vm/precompiled_contracts/bls12_381/bls12_381_pairing.py b/src/ethereum/forks/bpo1/vm/precompiled_contracts/bls12_381/bls12_381_pairing.py index 6cb29a32fd..c7a62cb49c 100644 --- a/src/ethereum/forks/bpo1/vm/precompiled_contracts/bls12_381/bls12_381_pairing.py +++ b/src/ethereum/forks/bpo1/vm/precompiled_contracts/bls12_381/bls12_381_pairing.py @@ -33,7 +33,7 @@ def bls12_pairing(evm: Evm) -> None: Raises ------ InvalidParameter - If the input length is invalid or if sub-group check fails. + If the input length is invalid or if the subgroup check fails. """ data = evm.message.data @@ -54,12 +54,12 @@ def bls12_pairing(evm: Evm) -> None: g1_slice = data[g1_start : g1_start + Uint(128)] g1_point = bytes_to_g1(bytes(g1_slice)) if not is_inf(bls12_multiply(g1_point, curve_order)): - raise InvalidParameter("Sub-group check failed for G1 point.") + raise InvalidParameter("Subgroup check failed for G1 point.") g2_slice = data[g2_start : g2_start + Uint(256)] g2_point = bytes_to_g2(bytes(g2_slice)) if not is_inf(bls12_multiply(g2_point, curve_order)): - raise InvalidParameter("Sub-group check failed for G2 point.") + raise InvalidParameter("Subgroup check failed for G2 point.") result *= pairing(g2_point, g1_point) diff --git a/src/ethereum/forks/bpo1/vm/precompiled_contracts/ecrecover.py b/src/ethereum/forks/bpo1/vm/precompiled_contracts/ecrecover.py index d2eeaf75df..d8f6602e55 100644 --- a/src/ethereum/forks/bpo1/vm/precompiled_contracts/ecrecover.py +++ b/src/ethereum/forks/bpo1/vm/precompiled_contracts/ecrecover.py @@ -8,7 +8,7 @@ Introduction ------------ -Implementation of the ECRECOVER precompiled contract. +Implementation of the `ECRECOVER` precompiled contract. """ from ethereum_types.numeric import U256 diff --git a/src/ethereum/forks/bpo1/vm/precompiled_contracts/mapping.py b/src/ethereum/forks/bpo1/vm/precompiled_contracts/mapping.py index a80e2b0235..7486203c3e 100644 --- a/src/ethereum/forks/bpo1/vm/precompiled_contracts/mapping.py +++ b/src/ethereum/forks/bpo1/vm/precompiled_contracts/mapping.py @@ -8,7 +8,7 @@ Introduction ------------ -Mapping of precompiled contracts their implementations. +Mapping of precompiled contracts to their implementations. """ from typing import Callable, Dict diff --git a/src/ethereum/forks/bpo1/vm/precompiled_contracts/modexp.py b/src/ethereum/forks/bpo1/vm/precompiled_contracts/modexp.py index 5e7e895b91..bf828ee8f6 100644 --- a/src/ethereum/forks/bpo1/vm/precompiled_contracts/modexp.py +++ b/src/ethereum/forks/bpo1/vm/precompiled_contracts/modexp.py @@ -22,7 +22,7 @@ def modexp(evm: Evm) -> None: """ - Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and. + Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and `modulus`. The return value is the same length as the modulus. """ data = evm.message.data diff --git a/src/ethereum/forks/bpo1/vm/precompiled_contracts/p256verify.py b/src/ethereum/forks/bpo1/vm/precompiled_contracts/p256verify.py index 6f6e7ff4e9..106dd548a6 100644 --- a/src/ethereum/forks/bpo1/vm/precompiled_contracts/p256verify.py +++ b/src/ethereum/forks/bpo1/vm/precompiled_contracts/p256verify.py @@ -5,9 +5,10 @@ :backlinks: none :local: -Introduction. +Introduction ------------ -Implementation of the P256VERIFY precompiled contract. + +Implementation of the `P256VERIFY` precompiled contract. """ from ethereum_types.numeric import U256 diff --git a/src/ethereum/forks/bpo1/vm/precompiled_contracts/point_evaluation.py b/src/ethereum/forks/bpo1/vm/precompiled_contracts/point_evaluation.py index 760af47736..ce4eed7129 100644 --- a/src/ethereum/forks/bpo1/vm/precompiled_contracts/point_evaluation.py +++ b/src/ethereum/forks/bpo1/vm/precompiled_contracts/point_evaluation.py @@ -8,7 +8,7 @@ Introduction ------------ -Implementation of the POINT EVALUATION precompiled contract. +Implementation of the `POINT EVALUATION` precompiled contract. """ from ethereum_types.bytes import Bytes, Bytes32, Bytes48 diff --git a/src/ethereum/forks/bpo1/vm/runtime.py b/src/ethereum/forks/bpo1/vm/runtime.py index 505b3488de..0aa5ddd5e2 100644 --- a/src/ethereum/forks/bpo1/vm/runtime.py +++ b/src/ethereum/forks/bpo1/vm/runtime.py @@ -21,7 +21,7 @@ def get_valid_jump_destinations(code: Bytes) -> Set[Uint]: """ - Analyze the evm code to obtain the set of valid jump destinations. + Analyze the EVM code to obtain the set of valid jump destinations. Valid jump destinations are defined as follows: * The jump destination is less than the length of the code. diff --git a/src/ethereum/forks/bpo2/blocks.py b/src/ethereum/forks/bpo2/blocks.py index 2cf9298739..eb40de4eaf 100644 --- a/src/ethereum/forks/bpo2/blocks.py +++ b/src/ethereum/forks/bpo2/blocks.py @@ -152,7 +152,7 @@ class Header: number: Uint """ - Block number, (height) in the chain. + Block number (height) in the chain. """ gas_limit: Uint @@ -272,7 +272,7 @@ class Block: header: Header """ The block header containing metadata and cryptographic commitments. Refer - [headers] for more details on the fields included in the header. + to [headers] for more details on the fields included in the header. [headers]: ref:ethereum.forks.bpo2.blocks.Header """ diff --git a/src/ethereum/forks/bpo2/bloom.py b/src/ethereum/forks/bpo2/bloom.py index 8a12ec081d..0e079df6b3 100644 --- a/src/ethereum/forks/bpo2/bloom.py +++ b/src/ethereum/forks/bpo2/bloom.py @@ -8,7 +8,7 @@ Introduction ------------ -This modules defines functions for calculating bloom filters of logs. For the +This module defines functions for calculating bloom filters of logs. For the general theory of bloom filters see e.g. `Wikipedia `_. Bloom filters are used to allow for efficient searching of logs by address and/or topic, by rapidly diff --git a/src/ethereum/forks/bpo2/requests.py b/src/ethereum/forks/bpo2/requests.py index 929e973e58..542c9264d3 100644 --- a/src/ethereum/forks/bpo2/requests.py +++ b/src/ethereum/forks/bpo2/requests.py @@ -1,5 +1,5 @@ """ -Requests were introduced in EIP-7685 as a a general purpose framework for +Requests were introduced in EIP-7685 as a general purpose framework for storing contract-triggered requests. It extends the execution header and body with a single field each to store the request information. This inherently exposes the requests to the consensus layer, which can diff --git a/src/ethereum/forks/bpo2/state.py b/src/ethereum/forks/bpo2/state.py index e997411f6d..6571aa05c6 100644 --- a/src/ethereum/forks/bpo2/state.py +++ b/src/ethereum/forks/bpo2/state.py @@ -400,7 +400,7 @@ def account_exists(state: State, address: Address) -> bool: def account_has_code_or_nonce(state: State, address: Address) -> bool: """ - Checks if an account has non zero nonce or non empty code. + Checks if an account has non-zero nonce or non-empty code. Parameters ---------- @@ -412,7 +412,7 @@ def account_has_code_or_nonce(state: State, address: Address) -> bool: Returns ------- has_code_or_nonce : `bool` - True if the account has non zero nonce or non empty code, + True if the account has non-zero nonce or non-empty code, False otherwise. """ @@ -517,7 +517,7 @@ def set_account_balance(state: State, address: Address, amount: U256) -> None: Address of the account whose nonce needs to be incremented. amount: - The amount that needs to set in balance. + The amount that needs to be set in the balance. """ @@ -557,7 +557,7 @@ def set_code(state: State, address: Address, code: Bytes) -> None: The current state. address: - Address of the account whose code needs to be update. + Address of the account whose code needs to be updated. code: The bytecode that needs to be set. diff --git a/src/ethereum/forks/bpo2/trie.py b/src/ethereum/forks/bpo2/trie.py index ec585b0d05..314651ab9e 100644 --- a/src/ethereum/forks/bpo2/trie.py +++ b/src/ethereum/forks/bpo2/trie.py @@ -138,7 +138,7 @@ class BranchNode: def encode_internal_node(node: Optional[InternalNode]) -> Extended: """ Encodes a Merkle Trie node into its RLP form. The RLP will then be - serialized into a `Bytes` and hashed unless it is less that 32 bytes + serialized into a `Bytes` and hashed unless it is less than 32 bytes when serialized. This function also accepts `None`, representing the absence of a node, diff --git a/src/ethereum/forks/bpo2/vm/__init__.py b/src/ethereum/forks/bpo2/vm/__init__.py index b2a8c5e2b9..641695ea0e 100644 --- a/src/ethereum/forks/bpo2/vm/__init__.py +++ b/src/ethereum/forks/bpo2/vm/__init__.py @@ -63,7 +63,7 @@ class BlockOutput: receipts_trie : `ethereum.fork_types.Root` Trie root of all the receipts in the block. receipt_keys : - Key of all the receipts in the block. + Keys of all the receipts in the block. block_logs : `Bloom` Logs bloom of all the logs included in all the transactions of the block. diff --git a/src/ethereum/forks/bpo2/vm/eoa_delegation.py b/src/ethereum/forks/bpo2/vm/eoa_delegation.py index 29909b5fa5..bc96f4c327 100644 --- a/src/ethereum/forks/bpo2/vm/eoa_delegation.py +++ b/src/ethereum/forks/bpo2/vm/eoa_delegation.py @@ -157,8 +157,6 @@ def set_delegation(message: Message) -> U256: ---------- message : Transaction specific items. - env : - External items required for EVM execution. Returns ------- diff --git a/src/ethereum/forks/bpo2/vm/gas.py b/src/ethereum/forks/bpo2/vm/gas.py index 360a4430e3..aa07e7cae7 100644 --- a/src/ethereum/forks/bpo2/vm/gas.py +++ b/src/ethereum/forks/bpo2/vm/gas.py @@ -232,7 +232,7 @@ def calculate_message_call_gas( account inside a message call. call_stipend : The amount of stipend provided to a message call to execute code while - transferring value(ETH). + transferring value (ETH). Returns ------- @@ -288,7 +288,7 @@ def init_code_cost(init_code_length: Uint) -> Uint: def calculate_excess_blob_gas(parent_header: Header) -> U64: """ - Calculated the excess blob gas for the current block based + Calculates the excess blob gas for the current block based on the gas used in the parent block. Parameters diff --git a/src/ethereum/forks/bpo2/vm/instructions/arithmetic.py b/src/ethereum/forks/bpo2/vm/instructions/arithmetic.py index de5d6ab43d..b7b1a370ad 100644 --- a/src/ethereum/forks/bpo2/vm/instructions/arithmetic.py +++ b/src/ethereum/forks/bpo2/vm/instructions/arithmetic.py @@ -84,7 +84,7 @@ def sub(evm: Evm) -> None: def mul(evm: Evm) -> None: """ - Multiply the top two elements of the stack, and pushes the result back + Multiplies the top two elements of the stack, and pushes the result back on the stack. Parameters @@ -353,7 +353,7 @@ def signextend(evm: Evm) -> None: # Can't extend any further result = value else: - # U256(0).to_be_bytes() gives b'' instead b'\x00'. + # U256(0).to_be_bytes() gives b'' instead of b'\x00'. value_bytes = Bytes(value.to_be_bytes32()) # Now among the obtained value bytes, consider only # N `least significant bytes`, where N is `byte_num + 1`. diff --git a/src/ethereum/forks/bpo2/vm/instructions/block.py b/src/ethereum/forks/bpo2/vm/instructions/block.py index e3d73657bf..9923433c7b 100644 --- a/src/ethereum/forks/bpo2/vm/instructions/block.py +++ b/src/ethereum/forks/bpo2/vm/instructions/block.py @@ -101,7 +101,7 @@ def coinbase(evm: Evm) -> None: def timestamp(evm: Evm) -> None: """ Push the current block's timestamp onto the stack. Here the timestamp - being referred is actually the unix timestamp in seconds. + being referred to is actually the unix timestamp in seconds. Here the current block refers to the block in which the currently executing transaction/call resides. diff --git a/src/ethereum/forks/bpo2/vm/instructions/environment.py b/src/ethereum/forks/bpo2/vm/instructions/environment.py index 8369043465..28c595ee51 100644 --- a/src/ethereum/forks/bpo2/vm/instructions/environment.py +++ b/src/ethereum/forks/bpo2/vm/instructions/environment.py @@ -426,7 +426,7 @@ def returndatasize(evm: Evm) -> None: def returndatacopy(evm: Evm) -> None: """ - Copies data from the return data buffer code to memory. + Copies data from the return data buffer to memory. Parameters ---------- diff --git a/src/ethereum/forks/bpo2/vm/instructions/memory.py b/src/ethereum/forks/bpo2/vm/instructions/memory.py index 631d33a7fd..6e111051ee 100644 --- a/src/ethereum/forks/bpo2/vm/instructions/memory.py +++ b/src/ethereum/forks/bpo2/vm/instructions/memory.py @@ -93,7 +93,7 @@ def mstore8(evm: Evm) -> None: def mload(evm: Evm) -> None: """ - Load word from memory. + Loads a word from memory. Parameters ---------- @@ -123,7 +123,7 @@ def mload(evm: Evm) -> None: def msize(evm: Evm) -> None: """ - Push the size of active memory in bytes onto the stack. + Pushes the size of active memory in bytes onto the stack. Parameters ---------- @@ -146,7 +146,7 @@ def msize(evm: Evm) -> None: def mcopy(evm: Evm) -> None: """ - Copy the bytes in memory from one location to another. + Copies the bytes in memory from one location to another. Parameters ---------- diff --git a/src/ethereum/forks/bpo2/vm/instructions/stack.py b/src/ethereum/forks/bpo2/vm/instructions/stack.py index e381b52c37..0007a28acd 100644 --- a/src/ethereum/forks/bpo2/vm/instructions/stack.py +++ b/src/ethereum/forks/bpo2/vm/instructions/stack.py @@ -23,7 +23,7 @@ def pop(evm: Evm) -> None: """ - Remove item from stack. + Removes an item from the stack. Parameters ---------- @@ -46,7 +46,7 @@ def pop(evm: Evm) -> None: def push_n(evm: Evm, num_bytes: int) -> None: """ - Pushes a N-byte immediate onto the stack. Push zero if num_bytes is zero. + Pushes an N-byte immediate onto the stack. Push zero if num_bytes is zero. Parameters ---------- @@ -79,7 +79,7 @@ def push_n(evm: Evm, num_bytes: int) -> None: def dup_n(evm: Evm, item_number: int) -> None: """ - Duplicate the Nth stack item (from top of the stack) to the top of stack. + Duplicates the Nth stack item (from top of the stack) to the top of stack. Parameters ---------- @@ -107,7 +107,7 @@ def dup_n(evm: Evm, item_number: int) -> None: def swap_n(evm: Evm, item_number: int) -> None: """ - Swap the top and the `item_number` element of the stack, where + Swaps the top and the `item_number` element of the stack, where the top of the stack is position zero. If `item_number` is zero, this function does nothing (which should not be diff --git a/src/ethereum/forks/bpo2/vm/instructions/system.py b/src/ethereum/forks/bpo2/vm/instructions/system.py index fea7a0c1b9..44c36ae000 100644 --- a/src/ethereum/forks/bpo2/vm/instructions/system.py +++ b/src/ethereum/forks/bpo2/vm/instructions/system.py @@ -193,8 +193,8 @@ def create2(evm: Evm) -> None: """ Creates a new account with associated code. - It's similar to CREATE opcode except that the address of new account - depends on the init_code instead of the nonce of sender. + It's similar to the CREATE opcode except that the address of the new + account depends on the init_code instead of the nonce of sender. Parameters ---------- diff --git a/src/ethereum/forks/bpo2/vm/precompiled_contracts/bls12_381/bls12_381_pairing.py b/src/ethereum/forks/bpo2/vm/precompiled_contracts/bls12_381/bls12_381_pairing.py index 6cb29a32fd..c7a62cb49c 100644 --- a/src/ethereum/forks/bpo2/vm/precompiled_contracts/bls12_381/bls12_381_pairing.py +++ b/src/ethereum/forks/bpo2/vm/precompiled_contracts/bls12_381/bls12_381_pairing.py @@ -33,7 +33,7 @@ def bls12_pairing(evm: Evm) -> None: Raises ------ InvalidParameter - If the input length is invalid or if sub-group check fails. + If the input length is invalid or if the subgroup check fails. """ data = evm.message.data @@ -54,12 +54,12 @@ def bls12_pairing(evm: Evm) -> None: g1_slice = data[g1_start : g1_start + Uint(128)] g1_point = bytes_to_g1(bytes(g1_slice)) if not is_inf(bls12_multiply(g1_point, curve_order)): - raise InvalidParameter("Sub-group check failed for G1 point.") + raise InvalidParameter("Subgroup check failed for G1 point.") g2_slice = data[g2_start : g2_start + Uint(256)] g2_point = bytes_to_g2(bytes(g2_slice)) if not is_inf(bls12_multiply(g2_point, curve_order)): - raise InvalidParameter("Sub-group check failed for G2 point.") + raise InvalidParameter("Subgroup check failed for G2 point.") result *= pairing(g2_point, g1_point) diff --git a/src/ethereum/forks/bpo2/vm/precompiled_contracts/ecrecover.py b/src/ethereum/forks/bpo2/vm/precompiled_contracts/ecrecover.py index d2eeaf75df..d8f6602e55 100644 --- a/src/ethereum/forks/bpo2/vm/precompiled_contracts/ecrecover.py +++ b/src/ethereum/forks/bpo2/vm/precompiled_contracts/ecrecover.py @@ -8,7 +8,7 @@ Introduction ------------ -Implementation of the ECRECOVER precompiled contract. +Implementation of the `ECRECOVER` precompiled contract. """ from ethereum_types.numeric import U256 diff --git a/src/ethereum/forks/bpo2/vm/precompiled_contracts/mapping.py b/src/ethereum/forks/bpo2/vm/precompiled_contracts/mapping.py index a80e2b0235..7486203c3e 100644 --- a/src/ethereum/forks/bpo2/vm/precompiled_contracts/mapping.py +++ b/src/ethereum/forks/bpo2/vm/precompiled_contracts/mapping.py @@ -8,7 +8,7 @@ Introduction ------------ -Mapping of precompiled contracts their implementations. +Mapping of precompiled contracts to their implementations. """ from typing import Callable, Dict diff --git a/src/ethereum/forks/bpo2/vm/precompiled_contracts/modexp.py b/src/ethereum/forks/bpo2/vm/precompiled_contracts/modexp.py index 5e7e895b91..bf828ee8f6 100644 --- a/src/ethereum/forks/bpo2/vm/precompiled_contracts/modexp.py +++ b/src/ethereum/forks/bpo2/vm/precompiled_contracts/modexp.py @@ -22,7 +22,7 @@ def modexp(evm: Evm) -> None: """ - Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and. + Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and `modulus`. The return value is the same length as the modulus. """ data = evm.message.data diff --git a/src/ethereum/forks/bpo2/vm/precompiled_contracts/p256verify.py b/src/ethereum/forks/bpo2/vm/precompiled_contracts/p256verify.py index 6f6e7ff4e9..106dd548a6 100644 --- a/src/ethereum/forks/bpo2/vm/precompiled_contracts/p256verify.py +++ b/src/ethereum/forks/bpo2/vm/precompiled_contracts/p256verify.py @@ -5,9 +5,10 @@ :backlinks: none :local: -Introduction. +Introduction ------------ -Implementation of the P256VERIFY precompiled contract. + +Implementation of the `P256VERIFY` precompiled contract. """ from ethereum_types.numeric import U256 diff --git a/src/ethereum/forks/bpo2/vm/precompiled_contracts/point_evaluation.py b/src/ethereum/forks/bpo2/vm/precompiled_contracts/point_evaluation.py index 760af47736..ce4eed7129 100644 --- a/src/ethereum/forks/bpo2/vm/precompiled_contracts/point_evaluation.py +++ b/src/ethereum/forks/bpo2/vm/precompiled_contracts/point_evaluation.py @@ -8,7 +8,7 @@ Introduction ------------ -Implementation of the POINT EVALUATION precompiled contract. +Implementation of the `POINT EVALUATION` precompiled contract. """ from ethereum_types.bytes import Bytes, Bytes32, Bytes48 diff --git a/src/ethereum/forks/bpo2/vm/runtime.py b/src/ethereum/forks/bpo2/vm/runtime.py index 505b3488de..0aa5ddd5e2 100644 --- a/src/ethereum/forks/bpo2/vm/runtime.py +++ b/src/ethereum/forks/bpo2/vm/runtime.py @@ -21,7 +21,7 @@ def get_valid_jump_destinations(code: Bytes) -> Set[Uint]: """ - Analyze the evm code to obtain the set of valid jump destinations. + Analyze the EVM code to obtain the set of valid jump destinations. Valid jump destinations are defined as follows: * The jump destination is less than the length of the code. diff --git a/src/ethereum/forks/bpo3/blocks.py b/src/ethereum/forks/bpo3/blocks.py index 5104960d8c..cf2915f7ac 100644 --- a/src/ethereum/forks/bpo3/blocks.py +++ b/src/ethereum/forks/bpo3/blocks.py @@ -152,7 +152,7 @@ class Header: number: Uint """ - Block number, (height) in the chain. + Block number (height) in the chain. """ gas_limit: Uint @@ -272,7 +272,7 @@ class Block: header: Header """ The block header containing metadata and cryptographic commitments. Refer - [headers] for more details on the fields included in the header. + to [headers] for more details on the fields included in the header. [headers]: ref:ethereum.forks.bpo3.blocks.Header """ diff --git a/src/ethereum/forks/bpo3/bloom.py b/src/ethereum/forks/bpo3/bloom.py index 8a12ec081d..0e079df6b3 100644 --- a/src/ethereum/forks/bpo3/bloom.py +++ b/src/ethereum/forks/bpo3/bloom.py @@ -8,7 +8,7 @@ Introduction ------------ -This modules defines functions for calculating bloom filters of logs. For the +This module defines functions for calculating bloom filters of logs. For the general theory of bloom filters see e.g. `Wikipedia `_. Bloom filters are used to allow for efficient searching of logs by address and/or topic, by rapidly diff --git a/src/ethereum/forks/bpo3/requests.py b/src/ethereum/forks/bpo3/requests.py index 929e973e58..542c9264d3 100644 --- a/src/ethereum/forks/bpo3/requests.py +++ b/src/ethereum/forks/bpo3/requests.py @@ -1,5 +1,5 @@ """ -Requests were introduced in EIP-7685 as a a general purpose framework for +Requests were introduced in EIP-7685 as a general purpose framework for storing contract-triggered requests. It extends the execution header and body with a single field each to store the request information. This inherently exposes the requests to the consensus layer, which can diff --git a/src/ethereum/forks/bpo3/state.py b/src/ethereum/forks/bpo3/state.py index e997411f6d..6571aa05c6 100644 --- a/src/ethereum/forks/bpo3/state.py +++ b/src/ethereum/forks/bpo3/state.py @@ -400,7 +400,7 @@ def account_exists(state: State, address: Address) -> bool: def account_has_code_or_nonce(state: State, address: Address) -> bool: """ - Checks if an account has non zero nonce or non empty code. + Checks if an account has non-zero nonce or non-empty code. Parameters ---------- @@ -412,7 +412,7 @@ def account_has_code_or_nonce(state: State, address: Address) -> bool: Returns ------- has_code_or_nonce : `bool` - True if the account has non zero nonce or non empty code, + True if the account has non-zero nonce or non-empty code, False otherwise. """ @@ -517,7 +517,7 @@ def set_account_balance(state: State, address: Address, amount: U256) -> None: Address of the account whose nonce needs to be incremented. amount: - The amount that needs to set in balance. + The amount that needs to be set in the balance. """ @@ -557,7 +557,7 @@ def set_code(state: State, address: Address, code: Bytes) -> None: The current state. address: - Address of the account whose code needs to be update. + Address of the account whose code needs to be updated. code: The bytecode that needs to be set. diff --git a/src/ethereum/forks/bpo3/trie.py b/src/ethereum/forks/bpo3/trie.py index 30cf636ca7..60d669de9d 100644 --- a/src/ethereum/forks/bpo3/trie.py +++ b/src/ethereum/forks/bpo3/trie.py @@ -138,7 +138,7 @@ class BranchNode: def encode_internal_node(node: Optional[InternalNode]) -> Extended: """ Encodes a Merkle Trie node into its RLP form. The RLP will then be - serialized into a `Bytes` and hashed unless it is less that 32 bytes + serialized into a `Bytes` and hashed unless it is less than 32 bytes when serialized. This function also accepts `None`, representing the absence of a node, diff --git a/src/ethereum/forks/bpo3/vm/__init__.py b/src/ethereum/forks/bpo3/vm/__init__.py index b2a8c5e2b9..641695ea0e 100644 --- a/src/ethereum/forks/bpo3/vm/__init__.py +++ b/src/ethereum/forks/bpo3/vm/__init__.py @@ -63,7 +63,7 @@ class BlockOutput: receipts_trie : `ethereum.fork_types.Root` Trie root of all the receipts in the block. receipt_keys : - Key of all the receipts in the block. + Keys of all the receipts in the block. block_logs : `Bloom` Logs bloom of all the logs included in all the transactions of the block. diff --git a/src/ethereum/forks/bpo3/vm/eoa_delegation.py b/src/ethereum/forks/bpo3/vm/eoa_delegation.py index 29909b5fa5..bc96f4c327 100644 --- a/src/ethereum/forks/bpo3/vm/eoa_delegation.py +++ b/src/ethereum/forks/bpo3/vm/eoa_delegation.py @@ -157,8 +157,6 @@ def set_delegation(message: Message) -> U256: ---------- message : Transaction specific items. - env : - External items required for EVM execution. Returns ------- diff --git a/src/ethereum/forks/bpo3/vm/gas.py b/src/ethereum/forks/bpo3/vm/gas.py index 360a4430e3..aa07e7cae7 100644 --- a/src/ethereum/forks/bpo3/vm/gas.py +++ b/src/ethereum/forks/bpo3/vm/gas.py @@ -232,7 +232,7 @@ def calculate_message_call_gas( account inside a message call. call_stipend : The amount of stipend provided to a message call to execute code while - transferring value(ETH). + transferring value (ETH). Returns ------- @@ -288,7 +288,7 @@ def init_code_cost(init_code_length: Uint) -> Uint: def calculate_excess_blob_gas(parent_header: Header) -> U64: """ - Calculated the excess blob gas for the current block based + Calculates the excess blob gas for the current block based on the gas used in the parent block. Parameters diff --git a/src/ethereum/forks/bpo3/vm/instructions/arithmetic.py b/src/ethereum/forks/bpo3/vm/instructions/arithmetic.py index de5d6ab43d..b7b1a370ad 100644 --- a/src/ethereum/forks/bpo3/vm/instructions/arithmetic.py +++ b/src/ethereum/forks/bpo3/vm/instructions/arithmetic.py @@ -84,7 +84,7 @@ def sub(evm: Evm) -> None: def mul(evm: Evm) -> None: """ - Multiply the top two elements of the stack, and pushes the result back + Multiplies the top two elements of the stack, and pushes the result back on the stack. Parameters @@ -353,7 +353,7 @@ def signextend(evm: Evm) -> None: # Can't extend any further result = value else: - # U256(0).to_be_bytes() gives b'' instead b'\x00'. + # U256(0).to_be_bytes() gives b'' instead of b'\x00'. value_bytes = Bytes(value.to_be_bytes32()) # Now among the obtained value bytes, consider only # N `least significant bytes`, where N is `byte_num + 1`. diff --git a/src/ethereum/forks/bpo3/vm/instructions/block.py b/src/ethereum/forks/bpo3/vm/instructions/block.py index 8a90f826cf..10fcbdbdb5 100644 --- a/src/ethereum/forks/bpo3/vm/instructions/block.py +++ b/src/ethereum/forks/bpo3/vm/instructions/block.py @@ -101,7 +101,7 @@ def coinbase(evm: Evm) -> None: def timestamp(evm: Evm) -> None: """ Push the current block's timestamp onto the stack. Here the timestamp - being referred is actually the unix timestamp in seconds. + being referred to is actually the unix timestamp in seconds. Here the current block refers to the block in which the currently executing transaction/call resides. diff --git a/src/ethereum/forks/bpo3/vm/instructions/environment.py b/src/ethereum/forks/bpo3/vm/instructions/environment.py index 8369043465..28c595ee51 100644 --- a/src/ethereum/forks/bpo3/vm/instructions/environment.py +++ b/src/ethereum/forks/bpo3/vm/instructions/environment.py @@ -426,7 +426,7 @@ def returndatasize(evm: Evm) -> None: def returndatacopy(evm: Evm) -> None: """ - Copies data from the return data buffer code to memory. + Copies data from the return data buffer to memory. Parameters ---------- diff --git a/src/ethereum/forks/bpo3/vm/instructions/memory.py b/src/ethereum/forks/bpo3/vm/instructions/memory.py index 631d33a7fd..6e111051ee 100644 --- a/src/ethereum/forks/bpo3/vm/instructions/memory.py +++ b/src/ethereum/forks/bpo3/vm/instructions/memory.py @@ -93,7 +93,7 @@ def mstore8(evm: Evm) -> None: def mload(evm: Evm) -> None: """ - Load word from memory. + Loads a word from memory. Parameters ---------- @@ -123,7 +123,7 @@ def mload(evm: Evm) -> None: def msize(evm: Evm) -> None: """ - Push the size of active memory in bytes onto the stack. + Pushes the size of active memory in bytes onto the stack. Parameters ---------- @@ -146,7 +146,7 @@ def msize(evm: Evm) -> None: def mcopy(evm: Evm) -> None: """ - Copy the bytes in memory from one location to another. + Copies the bytes in memory from one location to another. Parameters ---------- diff --git a/src/ethereum/forks/bpo3/vm/instructions/stack.py b/src/ethereum/forks/bpo3/vm/instructions/stack.py index e381b52c37..0007a28acd 100644 --- a/src/ethereum/forks/bpo3/vm/instructions/stack.py +++ b/src/ethereum/forks/bpo3/vm/instructions/stack.py @@ -23,7 +23,7 @@ def pop(evm: Evm) -> None: """ - Remove item from stack. + Removes an item from the stack. Parameters ---------- @@ -46,7 +46,7 @@ def pop(evm: Evm) -> None: def push_n(evm: Evm, num_bytes: int) -> None: """ - Pushes a N-byte immediate onto the stack. Push zero if num_bytes is zero. + Pushes an N-byte immediate onto the stack. Push zero if num_bytes is zero. Parameters ---------- @@ -79,7 +79,7 @@ def push_n(evm: Evm, num_bytes: int) -> None: def dup_n(evm: Evm, item_number: int) -> None: """ - Duplicate the Nth stack item (from top of the stack) to the top of stack. + Duplicates the Nth stack item (from top of the stack) to the top of stack. Parameters ---------- @@ -107,7 +107,7 @@ def dup_n(evm: Evm, item_number: int) -> None: def swap_n(evm: Evm, item_number: int) -> None: """ - Swap the top and the `item_number` element of the stack, where + Swaps the top and the `item_number` element of the stack, where the top of the stack is position zero. If `item_number` is zero, this function does nothing (which should not be diff --git a/src/ethereum/forks/bpo3/vm/instructions/system.py b/src/ethereum/forks/bpo3/vm/instructions/system.py index fea7a0c1b9..44c36ae000 100644 --- a/src/ethereum/forks/bpo3/vm/instructions/system.py +++ b/src/ethereum/forks/bpo3/vm/instructions/system.py @@ -193,8 +193,8 @@ def create2(evm: Evm) -> None: """ Creates a new account with associated code. - It's similar to CREATE opcode except that the address of new account - depends on the init_code instead of the nonce of sender. + It's similar to the CREATE opcode except that the address of the new + account depends on the init_code instead of the nonce of sender. Parameters ---------- diff --git a/src/ethereum/forks/bpo3/vm/precompiled_contracts/bls12_381/bls12_381_pairing.py b/src/ethereum/forks/bpo3/vm/precompiled_contracts/bls12_381/bls12_381_pairing.py index 6cb29a32fd..c7a62cb49c 100644 --- a/src/ethereum/forks/bpo3/vm/precompiled_contracts/bls12_381/bls12_381_pairing.py +++ b/src/ethereum/forks/bpo3/vm/precompiled_contracts/bls12_381/bls12_381_pairing.py @@ -33,7 +33,7 @@ def bls12_pairing(evm: Evm) -> None: Raises ------ InvalidParameter - If the input length is invalid or if sub-group check fails. + If the input length is invalid or if the subgroup check fails. """ data = evm.message.data @@ -54,12 +54,12 @@ def bls12_pairing(evm: Evm) -> None: g1_slice = data[g1_start : g1_start + Uint(128)] g1_point = bytes_to_g1(bytes(g1_slice)) if not is_inf(bls12_multiply(g1_point, curve_order)): - raise InvalidParameter("Sub-group check failed for G1 point.") + raise InvalidParameter("Subgroup check failed for G1 point.") g2_slice = data[g2_start : g2_start + Uint(256)] g2_point = bytes_to_g2(bytes(g2_slice)) if not is_inf(bls12_multiply(g2_point, curve_order)): - raise InvalidParameter("Sub-group check failed for G2 point.") + raise InvalidParameter("Subgroup check failed for G2 point.") result *= pairing(g2_point, g1_point) diff --git a/src/ethereum/forks/bpo3/vm/precompiled_contracts/ecrecover.py b/src/ethereum/forks/bpo3/vm/precompiled_contracts/ecrecover.py index d2eeaf75df..d8f6602e55 100644 --- a/src/ethereum/forks/bpo3/vm/precompiled_contracts/ecrecover.py +++ b/src/ethereum/forks/bpo3/vm/precompiled_contracts/ecrecover.py @@ -8,7 +8,7 @@ Introduction ------------ -Implementation of the ECRECOVER precompiled contract. +Implementation of the `ECRECOVER` precompiled contract. """ from ethereum_types.numeric import U256 diff --git a/src/ethereum/forks/bpo3/vm/precompiled_contracts/mapping.py b/src/ethereum/forks/bpo3/vm/precompiled_contracts/mapping.py index a80e2b0235..7486203c3e 100644 --- a/src/ethereum/forks/bpo3/vm/precompiled_contracts/mapping.py +++ b/src/ethereum/forks/bpo3/vm/precompiled_contracts/mapping.py @@ -8,7 +8,7 @@ Introduction ------------ -Mapping of precompiled contracts their implementations. +Mapping of precompiled contracts to their implementations. """ from typing import Callable, Dict diff --git a/src/ethereum/forks/bpo3/vm/precompiled_contracts/modexp.py b/src/ethereum/forks/bpo3/vm/precompiled_contracts/modexp.py index 5e7e895b91..bf828ee8f6 100644 --- a/src/ethereum/forks/bpo3/vm/precompiled_contracts/modexp.py +++ b/src/ethereum/forks/bpo3/vm/precompiled_contracts/modexp.py @@ -22,7 +22,7 @@ def modexp(evm: Evm) -> None: """ - Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and. + Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and `modulus`. The return value is the same length as the modulus. """ data = evm.message.data diff --git a/src/ethereum/forks/bpo3/vm/precompiled_contracts/p256verify.py b/src/ethereum/forks/bpo3/vm/precompiled_contracts/p256verify.py index 6f6e7ff4e9..106dd548a6 100644 --- a/src/ethereum/forks/bpo3/vm/precompiled_contracts/p256verify.py +++ b/src/ethereum/forks/bpo3/vm/precompiled_contracts/p256verify.py @@ -5,9 +5,10 @@ :backlinks: none :local: -Introduction. +Introduction ------------ -Implementation of the P256VERIFY precompiled contract. + +Implementation of the `P256VERIFY` precompiled contract. """ from ethereum_types.numeric import U256 diff --git a/src/ethereum/forks/bpo3/vm/precompiled_contracts/point_evaluation.py b/src/ethereum/forks/bpo3/vm/precompiled_contracts/point_evaluation.py index 760af47736..ce4eed7129 100644 --- a/src/ethereum/forks/bpo3/vm/precompiled_contracts/point_evaluation.py +++ b/src/ethereum/forks/bpo3/vm/precompiled_contracts/point_evaluation.py @@ -8,7 +8,7 @@ Introduction ------------ -Implementation of the POINT EVALUATION precompiled contract. +Implementation of the `POINT EVALUATION` precompiled contract. """ from ethereum_types.bytes import Bytes, Bytes32, Bytes48 diff --git a/src/ethereum/forks/bpo3/vm/runtime.py b/src/ethereum/forks/bpo3/vm/runtime.py index 505b3488de..0aa5ddd5e2 100644 --- a/src/ethereum/forks/bpo3/vm/runtime.py +++ b/src/ethereum/forks/bpo3/vm/runtime.py @@ -21,7 +21,7 @@ def get_valid_jump_destinations(code: Bytes) -> Set[Uint]: """ - Analyze the evm code to obtain the set of valid jump destinations. + Analyze the EVM code to obtain the set of valid jump destinations. Valid jump destinations are defined as follows: * The jump destination is less than the length of the code. diff --git a/src/ethereum/forks/bpo4/blocks.py b/src/ethereum/forks/bpo4/blocks.py index e63e283b00..735bda3f39 100644 --- a/src/ethereum/forks/bpo4/blocks.py +++ b/src/ethereum/forks/bpo4/blocks.py @@ -152,7 +152,7 @@ class Header: number: Uint """ - Block number, (height) in the chain. + Block number (height) in the chain. """ gas_limit: Uint @@ -272,7 +272,7 @@ class Block: header: Header """ The block header containing metadata and cryptographic commitments. Refer - [headers] for more details on the fields included in the header. + to [headers] for more details on the fields included in the header. [headers]: ref:ethereum.forks.bpo4.blocks.Header """ diff --git a/src/ethereum/forks/bpo4/bloom.py b/src/ethereum/forks/bpo4/bloom.py index 8a12ec081d..0e079df6b3 100644 --- a/src/ethereum/forks/bpo4/bloom.py +++ b/src/ethereum/forks/bpo4/bloom.py @@ -8,7 +8,7 @@ Introduction ------------ -This modules defines functions for calculating bloom filters of logs. For the +This module defines functions for calculating bloom filters of logs. For the general theory of bloom filters see e.g. `Wikipedia `_. Bloom filters are used to allow for efficient searching of logs by address and/or topic, by rapidly diff --git a/src/ethereum/forks/bpo4/requests.py b/src/ethereum/forks/bpo4/requests.py index 929e973e58..542c9264d3 100644 --- a/src/ethereum/forks/bpo4/requests.py +++ b/src/ethereum/forks/bpo4/requests.py @@ -1,5 +1,5 @@ """ -Requests were introduced in EIP-7685 as a a general purpose framework for +Requests were introduced in EIP-7685 as a general purpose framework for storing contract-triggered requests. It extends the execution header and body with a single field each to store the request information. This inherently exposes the requests to the consensus layer, which can diff --git a/src/ethereum/forks/bpo4/state.py b/src/ethereum/forks/bpo4/state.py index e997411f6d..6571aa05c6 100644 --- a/src/ethereum/forks/bpo4/state.py +++ b/src/ethereum/forks/bpo4/state.py @@ -400,7 +400,7 @@ def account_exists(state: State, address: Address) -> bool: def account_has_code_or_nonce(state: State, address: Address) -> bool: """ - Checks if an account has non zero nonce or non empty code. + Checks if an account has non-zero nonce or non-empty code. Parameters ---------- @@ -412,7 +412,7 @@ def account_has_code_or_nonce(state: State, address: Address) -> bool: Returns ------- has_code_or_nonce : `bool` - True if the account has non zero nonce or non empty code, + True if the account has non-zero nonce or non-empty code, False otherwise. """ @@ -517,7 +517,7 @@ def set_account_balance(state: State, address: Address, amount: U256) -> None: Address of the account whose nonce needs to be incremented. amount: - The amount that needs to set in balance. + The amount that needs to be set in the balance. """ @@ -557,7 +557,7 @@ def set_code(state: State, address: Address, code: Bytes) -> None: The current state. address: - Address of the account whose code needs to be update. + Address of the account whose code needs to be updated. code: The bytecode that needs to be set. diff --git a/src/ethereum/forks/bpo4/trie.py b/src/ethereum/forks/bpo4/trie.py index 59fc45a739..e25acebe5c 100644 --- a/src/ethereum/forks/bpo4/trie.py +++ b/src/ethereum/forks/bpo4/trie.py @@ -138,7 +138,7 @@ class BranchNode: def encode_internal_node(node: Optional[InternalNode]) -> Extended: """ Encodes a Merkle Trie node into its RLP form. The RLP will then be - serialized into a `Bytes` and hashed unless it is less that 32 bytes + serialized into a `Bytes` and hashed unless it is less than 32 bytes when serialized. This function also accepts `None`, representing the absence of a node, diff --git a/src/ethereum/forks/bpo4/vm/__init__.py b/src/ethereum/forks/bpo4/vm/__init__.py index b2a8c5e2b9..641695ea0e 100644 --- a/src/ethereum/forks/bpo4/vm/__init__.py +++ b/src/ethereum/forks/bpo4/vm/__init__.py @@ -63,7 +63,7 @@ class BlockOutput: receipts_trie : `ethereum.fork_types.Root` Trie root of all the receipts in the block. receipt_keys : - Key of all the receipts in the block. + Keys of all the receipts in the block. block_logs : `Bloom` Logs bloom of all the logs included in all the transactions of the block. diff --git a/src/ethereum/forks/bpo4/vm/eoa_delegation.py b/src/ethereum/forks/bpo4/vm/eoa_delegation.py index 29909b5fa5..bc96f4c327 100644 --- a/src/ethereum/forks/bpo4/vm/eoa_delegation.py +++ b/src/ethereum/forks/bpo4/vm/eoa_delegation.py @@ -157,8 +157,6 @@ def set_delegation(message: Message) -> U256: ---------- message : Transaction specific items. - env : - External items required for EVM execution. Returns ------- diff --git a/src/ethereum/forks/bpo4/vm/gas.py b/src/ethereum/forks/bpo4/vm/gas.py index 360a4430e3..aa07e7cae7 100644 --- a/src/ethereum/forks/bpo4/vm/gas.py +++ b/src/ethereum/forks/bpo4/vm/gas.py @@ -232,7 +232,7 @@ def calculate_message_call_gas( account inside a message call. call_stipend : The amount of stipend provided to a message call to execute code while - transferring value(ETH). + transferring value (ETH). Returns ------- @@ -288,7 +288,7 @@ def init_code_cost(init_code_length: Uint) -> Uint: def calculate_excess_blob_gas(parent_header: Header) -> U64: """ - Calculated the excess blob gas for the current block based + Calculates the excess blob gas for the current block based on the gas used in the parent block. Parameters diff --git a/src/ethereum/forks/bpo4/vm/instructions/arithmetic.py b/src/ethereum/forks/bpo4/vm/instructions/arithmetic.py index de5d6ab43d..b7b1a370ad 100644 --- a/src/ethereum/forks/bpo4/vm/instructions/arithmetic.py +++ b/src/ethereum/forks/bpo4/vm/instructions/arithmetic.py @@ -84,7 +84,7 @@ def sub(evm: Evm) -> None: def mul(evm: Evm) -> None: """ - Multiply the top two elements of the stack, and pushes the result back + Multiplies the top two elements of the stack, and pushes the result back on the stack. Parameters @@ -353,7 +353,7 @@ def signextend(evm: Evm) -> None: # Can't extend any further result = value else: - # U256(0).to_be_bytes() gives b'' instead b'\x00'. + # U256(0).to_be_bytes() gives b'' instead of b'\x00'. value_bytes = Bytes(value.to_be_bytes32()) # Now among the obtained value bytes, consider only # N `least significant bytes`, where N is `byte_num + 1`. diff --git a/src/ethereum/forks/bpo4/vm/instructions/block.py b/src/ethereum/forks/bpo4/vm/instructions/block.py index 221082f23e..52145aa9f8 100644 --- a/src/ethereum/forks/bpo4/vm/instructions/block.py +++ b/src/ethereum/forks/bpo4/vm/instructions/block.py @@ -101,7 +101,7 @@ def coinbase(evm: Evm) -> None: def timestamp(evm: Evm) -> None: """ Push the current block's timestamp onto the stack. Here the timestamp - being referred is actually the unix timestamp in seconds. + being referred to is actually the unix timestamp in seconds. Here the current block refers to the block in which the currently executing transaction/call resides. diff --git a/src/ethereum/forks/bpo4/vm/instructions/environment.py b/src/ethereum/forks/bpo4/vm/instructions/environment.py index 8369043465..28c595ee51 100644 --- a/src/ethereum/forks/bpo4/vm/instructions/environment.py +++ b/src/ethereum/forks/bpo4/vm/instructions/environment.py @@ -426,7 +426,7 @@ def returndatasize(evm: Evm) -> None: def returndatacopy(evm: Evm) -> None: """ - Copies data from the return data buffer code to memory. + Copies data from the return data buffer to memory. Parameters ---------- diff --git a/src/ethereum/forks/bpo4/vm/instructions/memory.py b/src/ethereum/forks/bpo4/vm/instructions/memory.py index 631d33a7fd..6e111051ee 100644 --- a/src/ethereum/forks/bpo4/vm/instructions/memory.py +++ b/src/ethereum/forks/bpo4/vm/instructions/memory.py @@ -93,7 +93,7 @@ def mstore8(evm: Evm) -> None: def mload(evm: Evm) -> None: """ - Load word from memory. + Loads a word from memory. Parameters ---------- @@ -123,7 +123,7 @@ def mload(evm: Evm) -> None: def msize(evm: Evm) -> None: """ - Push the size of active memory in bytes onto the stack. + Pushes the size of active memory in bytes onto the stack. Parameters ---------- @@ -146,7 +146,7 @@ def msize(evm: Evm) -> None: def mcopy(evm: Evm) -> None: """ - Copy the bytes in memory from one location to another. + Copies the bytes in memory from one location to another. Parameters ---------- diff --git a/src/ethereum/forks/bpo4/vm/instructions/stack.py b/src/ethereum/forks/bpo4/vm/instructions/stack.py index e381b52c37..0007a28acd 100644 --- a/src/ethereum/forks/bpo4/vm/instructions/stack.py +++ b/src/ethereum/forks/bpo4/vm/instructions/stack.py @@ -23,7 +23,7 @@ def pop(evm: Evm) -> None: """ - Remove item from stack. + Removes an item from the stack. Parameters ---------- @@ -46,7 +46,7 @@ def pop(evm: Evm) -> None: def push_n(evm: Evm, num_bytes: int) -> None: """ - Pushes a N-byte immediate onto the stack. Push zero if num_bytes is zero. + Pushes an N-byte immediate onto the stack. Push zero if num_bytes is zero. Parameters ---------- @@ -79,7 +79,7 @@ def push_n(evm: Evm, num_bytes: int) -> None: def dup_n(evm: Evm, item_number: int) -> None: """ - Duplicate the Nth stack item (from top of the stack) to the top of stack. + Duplicates the Nth stack item (from top of the stack) to the top of stack. Parameters ---------- @@ -107,7 +107,7 @@ def dup_n(evm: Evm, item_number: int) -> None: def swap_n(evm: Evm, item_number: int) -> None: """ - Swap the top and the `item_number` element of the stack, where + Swaps the top and the `item_number` element of the stack, where the top of the stack is position zero. If `item_number` is zero, this function does nothing (which should not be diff --git a/src/ethereum/forks/bpo4/vm/instructions/system.py b/src/ethereum/forks/bpo4/vm/instructions/system.py index fea7a0c1b9..44c36ae000 100644 --- a/src/ethereum/forks/bpo4/vm/instructions/system.py +++ b/src/ethereum/forks/bpo4/vm/instructions/system.py @@ -193,8 +193,8 @@ def create2(evm: Evm) -> None: """ Creates a new account with associated code. - It's similar to CREATE opcode except that the address of new account - depends on the init_code instead of the nonce of sender. + It's similar to the CREATE opcode except that the address of the new + account depends on the init_code instead of the nonce of sender. Parameters ---------- diff --git a/src/ethereum/forks/bpo4/vm/precompiled_contracts/bls12_381/bls12_381_pairing.py b/src/ethereum/forks/bpo4/vm/precompiled_contracts/bls12_381/bls12_381_pairing.py index 6cb29a32fd..c7a62cb49c 100644 --- a/src/ethereum/forks/bpo4/vm/precompiled_contracts/bls12_381/bls12_381_pairing.py +++ b/src/ethereum/forks/bpo4/vm/precompiled_contracts/bls12_381/bls12_381_pairing.py @@ -33,7 +33,7 @@ def bls12_pairing(evm: Evm) -> None: Raises ------ InvalidParameter - If the input length is invalid or if sub-group check fails. + If the input length is invalid or if the subgroup check fails. """ data = evm.message.data @@ -54,12 +54,12 @@ def bls12_pairing(evm: Evm) -> None: g1_slice = data[g1_start : g1_start + Uint(128)] g1_point = bytes_to_g1(bytes(g1_slice)) if not is_inf(bls12_multiply(g1_point, curve_order)): - raise InvalidParameter("Sub-group check failed for G1 point.") + raise InvalidParameter("Subgroup check failed for G1 point.") g2_slice = data[g2_start : g2_start + Uint(256)] g2_point = bytes_to_g2(bytes(g2_slice)) if not is_inf(bls12_multiply(g2_point, curve_order)): - raise InvalidParameter("Sub-group check failed for G2 point.") + raise InvalidParameter("Subgroup check failed for G2 point.") result *= pairing(g2_point, g1_point) diff --git a/src/ethereum/forks/bpo4/vm/precompiled_contracts/ecrecover.py b/src/ethereum/forks/bpo4/vm/precompiled_contracts/ecrecover.py index d2eeaf75df..d8f6602e55 100644 --- a/src/ethereum/forks/bpo4/vm/precompiled_contracts/ecrecover.py +++ b/src/ethereum/forks/bpo4/vm/precompiled_contracts/ecrecover.py @@ -8,7 +8,7 @@ Introduction ------------ -Implementation of the ECRECOVER precompiled contract. +Implementation of the `ECRECOVER` precompiled contract. """ from ethereum_types.numeric import U256 diff --git a/src/ethereum/forks/bpo4/vm/precompiled_contracts/mapping.py b/src/ethereum/forks/bpo4/vm/precompiled_contracts/mapping.py index a80e2b0235..7486203c3e 100644 --- a/src/ethereum/forks/bpo4/vm/precompiled_contracts/mapping.py +++ b/src/ethereum/forks/bpo4/vm/precompiled_contracts/mapping.py @@ -8,7 +8,7 @@ Introduction ------------ -Mapping of precompiled contracts their implementations. +Mapping of precompiled contracts to their implementations. """ from typing import Callable, Dict diff --git a/src/ethereum/forks/bpo4/vm/precompiled_contracts/modexp.py b/src/ethereum/forks/bpo4/vm/precompiled_contracts/modexp.py index 5e7e895b91..bf828ee8f6 100644 --- a/src/ethereum/forks/bpo4/vm/precompiled_contracts/modexp.py +++ b/src/ethereum/forks/bpo4/vm/precompiled_contracts/modexp.py @@ -22,7 +22,7 @@ def modexp(evm: Evm) -> None: """ - Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and. + Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and `modulus`. The return value is the same length as the modulus. """ data = evm.message.data diff --git a/src/ethereum/forks/bpo4/vm/precompiled_contracts/p256verify.py b/src/ethereum/forks/bpo4/vm/precompiled_contracts/p256verify.py index 6f6e7ff4e9..106dd548a6 100644 --- a/src/ethereum/forks/bpo4/vm/precompiled_contracts/p256verify.py +++ b/src/ethereum/forks/bpo4/vm/precompiled_contracts/p256verify.py @@ -5,9 +5,10 @@ :backlinks: none :local: -Introduction. +Introduction ------------ -Implementation of the P256VERIFY precompiled contract. + +Implementation of the `P256VERIFY` precompiled contract. """ from ethereum_types.numeric import U256 diff --git a/src/ethereum/forks/bpo4/vm/precompiled_contracts/point_evaluation.py b/src/ethereum/forks/bpo4/vm/precompiled_contracts/point_evaluation.py index 760af47736..ce4eed7129 100644 --- a/src/ethereum/forks/bpo4/vm/precompiled_contracts/point_evaluation.py +++ b/src/ethereum/forks/bpo4/vm/precompiled_contracts/point_evaluation.py @@ -8,7 +8,7 @@ Introduction ------------ -Implementation of the POINT EVALUATION precompiled contract. +Implementation of the `POINT EVALUATION` precompiled contract. """ from ethereum_types.bytes import Bytes, Bytes32, Bytes48 diff --git a/src/ethereum/forks/bpo4/vm/runtime.py b/src/ethereum/forks/bpo4/vm/runtime.py index 505b3488de..0aa5ddd5e2 100644 --- a/src/ethereum/forks/bpo4/vm/runtime.py +++ b/src/ethereum/forks/bpo4/vm/runtime.py @@ -21,7 +21,7 @@ def get_valid_jump_destinations(code: Bytes) -> Set[Uint]: """ - Analyze the evm code to obtain the set of valid jump destinations. + Analyze the EVM code to obtain the set of valid jump destinations. Valid jump destinations are defined as follows: * The jump destination is less than the length of the code. diff --git a/src/ethereum/forks/bpo5/__init__.py b/src/ethereum/forks/bpo5/__init__.py index 19bb26e43c..1ef794837d 100644 --- a/src/ethereum/forks/bpo5/__init__.py +++ b/src/ethereum/forks/bpo5/__init__.py @@ -1,5 +1,5 @@ """ -The fifth blob parameter only (BPO) fork, BPO3, includes only changes to the +The fifth blob parameter only (BPO) fork, BPO5, includes only changes to the blob fee schedule. ### Changes diff --git a/src/ethereum/forks/bpo5/blocks.py b/src/ethereum/forks/bpo5/blocks.py index 68c7135519..3bd791f1b8 100644 --- a/src/ethereum/forks/bpo5/blocks.py +++ b/src/ethereum/forks/bpo5/blocks.py @@ -152,7 +152,7 @@ class Header: number: Uint """ - Block number, (height) in the chain. + Block number (height) in the chain. """ gas_limit: Uint @@ -272,7 +272,7 @@ class Block: header: Header """ The block header containing metadata and cryptographic commitments. Refer - [headers] for more details on the fields included in the header. + to [headers] for more details on the fields included in the header. [headers]: ref:ethereum.forks.bpo5.blocks.Header """ diff --git a/src/ethereum/forks/bpo5/bloom.py b/src/ethereum/forks/bpo5/bloom.py index 8a12ec081d..0e079df6b3 100644 --- a/src/ethereum/forks/bpo5/bloom.py +++ b/src/ethereum/forks/bpo5/bloom.py @@ -8,7 +8,7 @@ Introduction ------------ -This modules defines functions for calculating bloom filters of logs. For the +This module defines functions for calculating bloom filters of logs. For the general theory of bloom filters see e.g. `Wikipedia `_. Bloom filters are used to allow for efficient searching of logs by address and/or topic, by rapidly diff --git a/src/ethereum/forks/bpo5/requests.py b/src/ethereum/forks/bpo5/requests.py index 929e973e58..542c9264d3 100644 --- a/src/ethereum/forks/bpo5/requests.py +++ b/src/ethereum/forks/bpo5/requests.py @@ -1,5 +1,5 @@ """ -Requests were introduced in EIP-7685 as a a general purpose framework for +Requests were introduced in EIP-7685 as a general purpose framework for storing contract-triggered requests. It extends the execution header and body with a single field each to store the request information. This inherently exposes the requests to the consensus layer, which can diff --git a/src/ethereum/forks/bpo5/state.py b/src/ethereum/forks/bpo5/state.py index e997411f6d..6571aa05c6 100644 --- a/src/ethereum/forks/bpo5/state.py +++ b/src/ethereum/forks/bpo5/state.py @@ -400,7 +400,7 @@ def account_exists(state: State, address: Address) -> bool: def account_has_code_or_nonce(state: State, address: Address) -> bool: """ - Checks if an account has non zero nonce or non empty code. + Checks if an account has non-zero nonce or non-empty code. Parameters ---------- @@ -412,7 +412,7 @@ def account_has_code_or_nonce(state: State, address: Address) -> bool: Returns ------- has_code_or_nonce : `bool` - True if the account has non zero nonce or non empty code, + True if the account has non-zero nonce or non-empty code, False otherwise. """ @@ -517,7 +517,7 @@ def set_account_balance(state: State, address: Address, amount: U256) -> None: Address of the account whose nonce needs to be incremented. amount: - The amount that needs to set in balance. + The amount that needs to be set in the balance. """ @@ -557,7 +557,7 @@ def set_code(state: State, address: Address, code: Bytes) -> None: The current state. address: - Address of the account whose code needs to be update. + Address of the account whose code needs to be updated. code: The bytecode that needs to be set. diff --git a/src/ethereum/forks/bpo5/trie.py b/src/ethereum/forks/bpo5/trie.py index 1f32f8db3e..a024b3433b 100644 --- a/src/ethereum/forks/bpo5/trie.py +++ b/src/ethereum/forks/bpo5/trie.py @@ -138,7 +138,7 @@ class BranchNode: def encode_internal_node(node: Optional[InternalNode]) -> Extended: """ Encodes a Merkle Trie node into its RLP form. The RLP will then be - serialized into a `Bytes` and hashed unless it is less that 32 bytes + serialized into a `Bytes` and hashed unless it is less than 32 bytes when serialized. This function also accepts `None`, representing the absence of a node, diff --git a/src/ethereum/forks/bpo5/vm/__init__.py b/src/ethereum/forks/bpo5/vm/__init__.py index b2a8c5e2b9..641695ea0e 100644 --- a/src/ethereum/forks/bpo5/vm/__init__.py +++ b/src/ethereum/forks/bpo5/vm/__init__.py @@ -63,7 +63,7 @@ class BlockOutput: receipts_trie : `ethereum.fork_types.Root` Trie root of all the receipts in the block. receipt_keys : - Key of all the receipts in the block. + Keys of all the receipts in the block. block_logs : `Bloom` Logs bloom of all the logs included in all the transactions of the block. diff --git a/src/ethereum/forks/bpo5/vm/eoa_delegation.py b/src/ethereum/forks/bpo5/vm/eoa_delegation.py index 29909b5fa5..bc96f4c327 100644 --- a/src/ethereum/forks/bpo5/vm/eoa_delegation.py +++ b/src/ethereum/forks/bpo5/vm/eoa_delegation.py @@ -157,8 +157,6 @@ def set_delegation(message: Message) -> U256: ---------- message : Transaction specific items. - env : - External items required for EVM execution. Returns ------- diff --git a/src/ethereum/forks/bpo5/vm/gas.py b/src/ethereum/forks/bpo5/vm/gas.py index 360a4430e3..aa07e7cae7 100644 --- a/src/ethereum/forks/bpo5/vm/gas.py +++ b/src/ethereum/forks/bpo5/vm/gas.py @@ -232,7 +232,7 @@ def calculate_message_call_gas( account inside a message call. call_stipend : The amount of stipend provided to a message call to execute code while - transferring value(ETH). + transferring value (ETH). Returns ------- @@ -288,7 +288,7 @@ def init_code_cost(init_code_length: Uint) -> Uint: def calculate_excess_blob_gas(parent_header: Header) -> U64: """ - Calculated the excess blob gas for the current block based + Calculates the excess blob gas for the current block based on the gas used in the parent block. Parameters diff --git a/src/ethereum/forks/bpo5/vm/instructions/arithmetic.py b/src/ethereum/forks/bpo5/vm/instructions/arithmetic.py index de5d6ab43d..b7b1a370ad 100644 --- a/src/ethereum/forks/bpo5/vm/instructions/arithmetic.py +++ b/src/ethereum/forks/bpo5/vm/instructions/arithmetic.py @@ -84,7 +84,7 @@ def sub(evm: Evm) -> None: def mul(evm: Evm) -> None: """ - Multiply the top two elements of the stack, and pushes the result back + Multiplies the top two elements of the stack, and pushes the result back on the stack. Parameters @@ -353,7 +353,7 @@ def signextend(evm: Evm) -> None: # Can't extend any further result = value else: - # U256(0).to_be_bytes() gives b'' instead b'\x00'. + # U256(0).to_be_bytes() gives b'' instead of b'\x00'. value_bytes = Bytes(value.to_be_bytes32()) # Now among the obtained value bytes, consider only # N `least significant bytes`, where N is `byte_num + 1`. diff --git a/src/ethereum/forks/bpo5/vm/instructions/block.py b/src/ethereum/forks/bpo5/vm/instructions/block.py index 2712e06ea3..84930244d4 100644 --- a/src/ethereum/forks/bpo5/vm/instructions/block.py +++ b/src/ethereum/forks/bpo5/vm/instructions/block.py @@ -101,7 +101,7 @@ def coinbase(evm: Evm) -> None: def timestamp(evm: Evm) -> None: """ Push the current block's timestamp onto the stack. Here the timestamp - being referred is actually the unix timestamp in seconds. + being referred to is actually the unix timestamp in seconds. Here the current block refers to the block in which the currently executing transaction/call resides. diff --git a/src/ethereum/forks/bpo5/vm/instructions/environment.py b/src/ethereum/forks/bpo5/vm/instructions/environment.py index 8369043465..28c595ee51 100644 --- a/src/ethereum/forks/bpo5/vm/instructions/environment.py +++ b/src/ethereum/forks/bpo5/vm/instructions/environment.py @@ -426,7 +426,7 @@ def returndatasize(evm: Evm) -> None: def returndatacopy(evm: Evm) -> None: """ - Copies data from the return data buffer code to memory. + Copies data from the return data buffer to memory. Parameters ---------- diff --git a/src/ethereum/forks/bpo5/vm/instructions/memory.py b/src/ethereum/forks/bpo5/vm/instructions/memory.py index 631d33a7fd..6e111051ee 100644 --- a/src/ethereum/forks/bpo5/vm/instructions/memory.py +++ b/src/ethereum/forks/bpo5/vm/instructions/memory.py @@ -93,7 +93,7 @@ def mstore8(evm: Evm) -> None: def mload(evm: Evm) -> None: """ - Load word from memory. + Loads a word from memory. Parameters ---------- @@ -123,7 +123,7 @@ def mload(evm: Evm) -> None: def msize(evm: Evm) -> None: """ - Push the size of active memory in bytes onto the stack. + Pushes the size of active memory in bytes onto the stack. Parameters ---------- @@ -146,7 +146,7 @@ def msize(evm: Evm) -> None: def mcopy(evm: Evm) -> None: """ - Copy the bytes in memory from one location to another. + Copies the bytes in memory from one location to another. Parameters ---------- diff --git a/src/ethereum/forks/bpo5/vm/instructions/stack.py b/src/ethereum/forks/bpo5/vm/instructions/stack.py index e381b52c37..0007a28acd 100644 --- a/src/ethereum/forks/bpo5/vm/instructions/stack.py +++ b/src/ethereum/forks/bpo5/vm/instructions/stack.py @@ -23,7 +23,7 @@ def pop(evm: Evm) -> None: """ - Remove item from stack. + Removes an item from the stack. Parameters ---------- @@ -46,7 +46,7 @@ def pop(evm: Evm) -> None: def push_n(evm: Evm, num_bytes: int) -> None: """ - Pushes a N-byte immediate onto the stack. Push zero if num_bytes is zero. + Pushes an N-byte immediate onto the stack. Push zero if num_bytes is zero. Parameters ---------- @@ -79,7 +79,7 @@ def push_n(evm: Evm, num_bytes: int) -> None: def dup_n(evm: Evm, item_number: int) -> None: """ - Duplicate the Nth stack item (from top of the stack) to the top of stack. + Duplicates the Nth stack item (from top of the stack) to the top of stack. Parameters ---------- @@ -107,7 +107,7 @@ def dup_n(evm: Evm, item_number: int) -> None: def swap_n(evm: Evm, item_number: int) -> None: """ - Swap the top and the `item_number` element of the stack, where + Swaps the top and the `item_number` element of the stack, where the top of the stack is position zero. If `item_number` is zero, this function does nothing (which should not be diff --git a/src/ethereum/forks/bpo5/vm/instructions/system.py b/src/ethereum/forks/bpo5/vm/instructions/system.py index fea7a0c1b9..44c36ae000 100644 --- a/src/ethereum/forks/bpo5/vm/instructions/system.py +++ b/src/ethereum/forks/bpo5/vm/instructions/system.py @@ -193,8 +193,8 @@ def create2(evm: Evm) -> None: """ Creates a new account with associated code. - It's similar to CREATE opcode except that the address of new account - depends on the init_code instead of the nonce of sender. + It's similar to the CREATE opcode except that the address of the new + account depends on the init_code instead of the nonce of sender. Parameters ---------- diff --git a/src/ethereum/forks/bpo5/vm/precompiled_contracts/bls12_381/bls12_381_pairing.py b/src/ethereum/forks/bpo5/vm/precompiled_contracts/bls12_381/bls12_381_pairing.py index 6cb29a32fd..c7a62cb49c 100644 --- a/src/ethereum/forks/bpo5/vm/precompiled_contracts/bls12_381/bls12_381_pairing.py +++ b/src/ethereum/forks/bpo5/vm/precompiled_contracts/bls12_381/bls12_381_pairing.py @@ -33,7 +33,7 @@ def bls12_pairing(evm: Evm) -> None: Raises ------ InvalidParameter - If the input length is invalid or if sub-group check fails. + If the input length is invalid or if the subgroup check fails. """ data = evm.message.data @@ -54,12 +54,12 @@ def bls12_pairing(evm: Evm) -> None: g1_slice = data[g1_start : g1_start + Uint(128)] g1_point = bytes_to_g1(bytes(g1_slice)) if not is_inf(bls12_multiply(g1_point, curve_order)): - raise InvalidParameter("Sub-group check failed for G1 point.") + raise InvalidParameter("Subgroup check failed for G1 point.") g2_slice = data[g2_start : g2_start + Uint(256)] g2_point = bytes_to_g2(bytes(g2_slice)) if not is_inf(bls12_multiply(g2_point, curve_order)): - raise InvalidParameter("Sub-group check failed for G2 point.") + raise InvalidParameter("Subgroup check failed for G2 point.") result *= pairing(g2_point, g1_point) diff --git a/src/ethereum/forks/bpo5/vm/precompiled_contracts/ecrecover.py b/src/ethereum/forks/bpo5/vm/precompiled_contracts/ecrecover.py index d2eeaf75df..d8f6602e55 100644 --- a/src/ethereum/forks/bpo5/vm/precompiled_contracts/ecrecover.py +++ b/src/ethereum/forks/bpo5/vm/precompiled_contracts/ecrecover.py @@ -8,7 +8,7 @@ Introduction ------------ -Implementation of the ECRECOVER precompiled contract. +Implementation of the `ECRECOVER` precompiled contract. """ from ethereum_types.numeric import U256 diff --git a/src/ethereum/forks/bpo5/vm/precompiled_contracts/mapping.py b/src/ethereum/forks/bpo5/vm/precompiled_contracts/mapping.py index a80e2b0235..7486203c3e 100644 --- a/src/ethereum/forks/bpo5/vm/precompiled_contracts/mapping.py +++ b/src/ethereum/forks/bpo5/vm/precompiled_contracts/mapping.py @@ -8,7 +8,7 @@ Introduction ------------ -Mapping of precompiled contracts their implementations. +Mapping of precompiled contracts to their implementations. """ from typing import Callable, Dict diff --git a/src/ethereum/forks/bpo5/vm/precompiled_contracts/modexp.py b/src/ethereum/forks/bpo5/vm/precompiled_contracts/modexp.py index 5e7e895b91..bf828ee8f6 100644 --- a/src/ethereum/forks/bpo5/vm/precompiled_contracts/modexp.py +++ b/src/ethereum/forks/bpo5/vm/precompiled_contracts/modexp.py @@ -22,7 +22,7 @@ def modexp(evm: Evm) -> None: """ - Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and. + Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and `modulus`. The return value is the same length as the modulus. """ data = evm.message.data diff --git a/src/ethereum/forks/bpo5/vm/precompiled_contracts/p256verify.py b/src/ethereum/forks/bpo5/vm/precompiled_contracts/p256verify.py index 6f6e7ff4e9..106dd548a6 100644 --- a/src/ethereum/forks/bpo5/vm/precompiled_contracts/p256verify.py +++ b/src/ethereum/forks/bpo5/vm/precompiled_contracts/p256verify.py @@ -5,9 +5,10 @@ :backlinks: none :local: -Introduction. +Introduction ------------ -Implementation of the P256VERIFY precompiled contract. + +Implementation of the `P256VERIFY` precompiled contract. """ from ethereum_types.numeric import U256 diff --git a/src/ethereum/forks/bpo5/vm/precompiled_contracts/point_evaluation.py b/src/ethereum/forks/bpo5/vm/precompiled_contracts/point_evaluation.py index 760af47736..ce4eed7129 100644 --- a/src/ethereum/forks/bpo5/vm/precompiled_contracts/point_evaluation.py +++ b/src/ethereum/forks/bpo5/vm/precompiled_contracts/point_evaluation.py @@ -8,7 +8,7 @@ Introduction ------------ -Implementation of the POINT EVALUATION precompiled contract. +Implementation of the `POINT EVALUATION` precompiled contract. """ from ethereum_types.bytes import Bytes, Bytes32, Bytes48 diff --git a/src/ethereum/forks/bpo5/vm/runtime.py b/src/ethereum/forks/bpo5/vm/runtime.py index 505b3488de..0aa5ddd5e2 100644 --- a/src/ethereum/forks/bpo5/vm/runtime.py +++ b/src/ethereum/forks/bpo5/vm/runtime.py @@ -21,7 +21,7 @@ def get_valid_jump_destinations(code: Bytes) -> Set[Uint]: """ - Analyze the evm code to obtain the set of valid jump destinations. + Analyze the EVM code to obtain the set of valid jump destinations. Valid jump destinations are defined as follows: * The jump destination is less than the length of the code. diff --git a/src/ethereum/forks/byzantium/blocks.py b/src/ethereum/forks/byzantium/blocks.py index a70fe41923..2f027b04dc 100644 --- a/src/ethereum/forks/byzantium/blocks.py +++ b/src/ethereum/forks/byzantium/blocks.py @@ -112,7 +112,7 @@ class Header: number: Uint """ - Block number, (height) in the chain. + Block number (height) in the chain. """ gas_limit: Uint @@ -183,7 +183,7 @@ class Block: header: Header """ The block header containing metadata and cryptographic commitments. Refer - [headers] for more details on the fields included in the header. + to [headers] for more details on the fields included in the header. [headers]: ref:ethereum.forks.byzantium.blocks.Header """ diff --git a/src/ethereum/forks/byzantium/bloom.py b/src/ethereum/forks/byzantium/bloom.py index 8a12ec081d..0e079df6b3 100644 --- a/src/ethereum/forks/byzantium/bloom.py +++ b/src/ethereum/forks/byzantium/bloom.py @@ -8,7 +8,7 @@ Introduction ------------ -This modules defines functions for calculating bloom filters of logs. For the +This module defines functions for calculating bloom filters of logs. For the general theory of bloom filters see e.g. `Wikipedia `_. Bloom filters are used to allow for efficient searching of logs by address and/or topic, by rapidly diff --git a/src/ethereum/forks/byzantium/state.py b/src/ethereum/forks/byzantium/state.py index 2e21b4f814..a0dd7d77ab 100644 --- a/src/ethereum/forks/byzantium/state.py +++ b/src/ethereum/forks/byzantium/state.py @@ -340,7 +340,7 @@ def account_exists(state: State, address: Address) -> bool: def account_has_code_or_nonce(state: State, address: Address) -> bool: """ - Checks if an account has non zero nonce or non empty code. + Checks if an account has non-zero nonce or non-empty code. Parameters ---------- @@ -352,7 +352,7 @@ def account_has_code_or_nonce(state: State, address: Address) -> bool: Returns ------- has_code_or_nonce : `bool` - True if the account has non zero nonce or non empty code, + True if the account has non-zero nonce or non-empty code, False otherwise. """ @@ -473,7 +473,7 @@ def set_account_balance(state: State, address: Address, amount: U256) -> None: Address of the account whose nonce needs to be incremented. amount: - The amount that needs to set in balance. + The amount that needs to be set in the balance. """ @@ -493,7 +493,7 @@ def touch_account(state: State, address: Address) -> None: The current state. address: - The address of the account that need to initialised. + The address of the account that needs to be initialized. """ if not account_exists(state, address): @@ -530,7 +530,7 @@ def set_code(state: State, address: Address, code: Bytes) -> None: The current state. address: - Address of the account whose code needs to be update. + Address of the account whose code needs to be updated. code: The bytecode that needs to be set. diff --git a/src/ethereum/forks/byzantium/trie.py b/src/ethereum/forks/byzantium/trie.py index 555ec32d3c..2858183a40 100644 --- a/src/ethereum/forks/byzantium/trie.py +++ b/src/ethereum/forks/byzantium/trie.py @@ -128,7 +128,7 @@ class BranchNode: def encode_internal_node(node: Optional[InternalNode]) -> Extended: """ Encodes a Merkle Trie node into its RLP form. The RLP will then be - serialized into a `Bytes` and hashed unless it is less that 32 bytes + serialized into a `Bytes` and hashed unless it is less than 32 bytes when serialized. This function also accepts `None`, representing the absence of a node, diff --git a/src/ethereum/forks/byzantium/vm/__init__.py b/src/ethereum/forks/byzantium/vm/__init__.py index 6cdd9c9bc7..068648ac36 100644 --- a/src/ethereum/forks/byzantium/vm/__init__.py +++ b/src/ethereum/forks/byzantium/vm/__init__.py @@ -61,7 +61,7 @@ class BlockOutput: receipts_trie : `ethereum.fork_types.Root` Trie root of all the receipts in the block. receipt_keys : - Key of all the receipts in the block. + Keys of all the receipts in the block. block_logs : `Bloom` Logs bloom of all the logs included in all the transactions of the block. diff --git a/src/ethereum/forks/byzantium/vm/gas.py b/src/ethereum/forks/byzantium/vm/gas.py index 6944128369..80d9b2cd76 100644 --- a/src/ethereum/forks/byzantium/vm/gas.py +++ b/src/ethereum/forks/byzantium/vm/gas.py @@ -211,7 +211,7 @@ def calculate_message_call_gas( account inside a message call. call_stipend : The amount of stipend provided to a message call to execute code while - transferring value(ETH). + transferring value (ETH). Returns ------- diff --git a/src/ethereum/forks/byzantium/vm/instructions/arithmetic.py b/src/ethereum/forks/byzantium/vm/instructions/arithmetic.py index de5d6ab43d..b7b1a370ad 100644 --- a/src/ethereum/forks/byzantium/vm/instructions/arithmetic.py +++ b/src/ethereum/forks/byzantium/vm/instructions/arithmetic.py @@ -84,7 +84,7 @@ def sub(evm: Evm) -> None: def mul(evm: Evm) -> None: """ - Multiply the top two elements of the stack, and pushes the result back + Multiplies the top two elements of the stack, and pushes the result back on the stack. Parameters @@ -353,7 +353,7 @@ def signextend(evm: Evm) -> None: # Can't extend any further result = value else: - # U256(0).to_be_bytes() gives b'' instead b'\x00'. + # U256(0).to_be_bytes() gives b'' instead of b'\x00'. value_bytes = Bytes(value.to_be_bytes32()) # Now among the obtained value bytes, consider only # N `least significant bytes`, where N is `byte_num + 1`. diff --git a/src/ethereum/forks/byzantium/vm/instructions/block.py b/src/ethereum/forks/byzantium/vm/instructions/block.py index 8b50f1bc07..0edcbf000d 100644 --- a/src/ethereum/forks/byzantium/vm/instructions/block.py +++ b/src/ethereum/forks/byzantium/vm/instructions/block.py @@ -87,7 +87,7 @@ def coinbase(evm: Evm) -> None: def timestamp(evm: Evm) -> None: """ Push the current block's timestamp onto the stack. Here the timestamp - being referred is actually the unix timestamp in seconds. + being referred to is actually the unix timestamp in seconds. Here the current block refers to the block in which the currently executing transaction/call resides. diff --git a/src/ethereum/forks/byzantium/vm/instructions/environment.py b/src/ethereum/forks/byzantium/vm/instructions/environment.py index d98f13b0d8..f657e25299 100644 --- a/src/ethereum/forks/byzantium/vm/instructions/environment.py +++ b/src/ethereum/forks/byzantium/vm/instructions/environment.py @@ -403,7 +403,7 @@ def returndatasize(evm: Evm) -> None: def returndatacopy(evm: Evm) -> None: """ - Copies data from the return data buffer code to memory. + Copies data from the return data buffer to memory. Parameters ---------- diff --git a/src/ethereum/forks/byzantium/vm/instructions/memory.py b/src/ethereum/forks/byzantium/vm/instructions/memory.py index fdd9518843..3a242ba2f7 100644 --- a/src/ethereum/forks/byzantium/vm/instructions/memory.py +++ b/src/ethereum/forks/byzantium/vm/instructions/memory.py @@ -90,7 +90,7 @@ def mstore8(evm: Evm) -> None: def mload(evm: Evm) -> None: """ - Load word from memory. + Loads a word from memory. Parameters ---------- @@ -120,7 +120,7 @@ def mload(evm: Evm) -> None: def msize(evm: Evm) -> None: """ - Push the size of active memory in bytes onto the stack. + Pushes the size of active memory in bytes onto the stack. Parameters ---------- diff --git a/src/ethereum/forks/byzantium/vm/instructions/stack.py b/src/ethereum/forks/byzantium/vm/instructions/stack.py index bdf232db6a..0fc0d3fe4b 100644 --- a/src/ethereum/forks/byzantium/vm/instructions/stack.py +++ b/src/ethereum/forks/byzantium/vm/instructions/stack.py @@ -23,7 +23,7 @@ def pop(evm: Evm) -> None: """ - Remove item from stack. + Removes an item from the stack. Parameters ---------- @@ -46,7 +46,7 @@ def pop(evm: Evm) -> None: def push_n(evm: Evm, num_bytes: int) -> None: """ - Pushes a N-byte immediate onto the stack. + Pushes an N-byte immediate onto the stack. Parameters ---------- @@ -76,7 +76,7 @@ def push_n(evm: Evm, num_bytes: int) -> None: def dup_n(evm: Evm, item_number: int) -> None: """ - Duplicate the Nth stack item (from top of the stack) to the top of stack. + Duplicates the Nth stack item (from top of the stack) to the top of stack. Parameters ---------- @@ -104,7 +104,7 @@ def dup_n(evm: Evm, item_number: int) -> None: def swap_n(evm: Evm, item_number: int) -> None: """ - Swap the top and the `item_number` element of the stack, where + Swaps the top and the `item_number` element of the stack, where the top of the stack is position zero. If `item_number` is zero, this function does nothing (which should not be diff --git a/src/ethereum/forks/byzantium/vm/precompiled_contracts/ecrecover.py b/src/ethereum/forks/byzantium/vm/precompiled_contracts/ecrecover.py index d2eeaf75df..d8f6602e55 100644 --- a/src/ethereum/forks/byzantium/vm/precompiled_contracts/ecrecover.py +++ b/src/ethereum/forks/byzantium/vm/precompiled_contracts/ecrecover.py @@ -8,7 +8,7 @@ Introduction ------------ -Implementation of the ECRECOVER precompiled contract. +Implementation of the `ECRECOVER` precompiled contract. """ from ethereum_types.numeric import U256 diff --git a/src/ethereum/forks/byzantium/vm/precompiled_contracts/mapping.py b/src/ethereum/forks/byzantium/vm/precompiled_contracts/mapping.py index 13746350b5..d866782adc 100644 --- a/src/ethereum/forks/byzantium/vm/precompiled_contracts/mapping.py +++ b/src/ethereum/forks/byzantium/vm/precompiled_contracts/mapping.py @@ -8,7 +8,7 @@ Introduction ------------ -Mapping of precompiled contracts their implementations. +Mapping of precompiled contracts to their implementations. """ from typing import Callable, Dict diff --git a/src/ethereum/forks/byzantium/vm/precompiled_contracts/modexp.py b/src/ethereum/forks/byzantium/vm/precompiled_contracts/modexp.py index fd0501616c..8ed5c851a7 100644 --- a/src/ethereum/forks/byzantium/vm/precompiled_contracts/modexp.py +++ b/src/ethereum/forks/byzantium/vm/precompiled_contracts/modexp.py @@ -23,7 +23,7 @@ def modexp(evm: Evm) -> None: """ - Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and. + Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and `modulus`. The return value is the same length as the modulus. """ data = evm.message.data diff --git a/src/ethereum/forks/byzantium/vm/runtime.py b/src/ethereum/forks/byzantium/vm/runtime.py index 505b3488de..0aa5ddd5e2 100644 --- a/src/ethereum/forks/byzantium/vm/runtime.py +++ b/src/ethereum/forks/byzantium/vm/runtime.py @@ -21,7 +21,7 @@ def get_valid_jump_destinations(code: Bytes) -> Set[Uint]: """ - Analyze the evm code to obtain the set of valid jump destinations. + Analyze the EVM code to obtain the set of valid jump destinations. Valid jump destinations are defined as follows: * The jump destination is less than the length of the code. diff --git a/src/ethereum/forks/cancun/blocks.py b/src/ethereum/forks/cancun/blocks.py index e2668062f6..eb413f5200 100644 --- a/src/ethereum/forks/cancun/blocks.py +++ b/src/ethereum/forks/cancun/blocks.py @@ -151,7 +151,7 @@ class Header: number: Uint """ - Block number, (height) in the chain. + Block number (height) in the chain. """ gas_limit: Uint @@ -261,7 +261,7 @@ class Block: header: Header """ The block header containing metadata and cryptographic commitments. Refer - [headers] for more details on the fields included in the header. + to [headers] for more details on the fields included in the header. [headers]: ref:ethereum.forks.cancun.blocks.Header """ diff --git a/src/ethereum/forks/cancun/bloom.py b/src/ethereum/forks/cancun/bloom.py index 8a12ec081d..0e079df6b3 100644 --- a/src/ethereum/forks/cancun/bloom.py +++ b/src/ethereum/forks/cancun/bloom.py @@ -8,7 +8,7 @@ Introduction ------------ -This modules defines functions for calculating bloom filters of logs. For the +This module defines functions for calculating bloom filters of logs. For the general theory of bloom filters see e.g. `Wikipedia `_. Bloom filters are used to allow for efficient searching of logs by address and/or topic, by rapidly diff --git a/src/ethereum/forks/cancun/state.py b/src/ethereum/forks/cancun/state.py index e997411f6d..6571aa05c6 100644 --- a/src/ethereum/forks/cancun/state.py +++ b/src/ethereum/forks/cancun/state.py @@ -400,7 +400,7 @@ def account_exists(state: State, address: Address) -> bool: def account_has_code_or_nonce(state: State, address: Address) -> bool: """ - Checks if an account has non zero nonce or non empty code. + Checks if an account has non-zero nonce or non-empty code. Parameters ---------- @@ -412,7 +412,7 @@ def account_has_code_or_nonce(state: State, address: Address) -> bool: Returns ------- has_code_or_nonce : `bool` - True if the account has non zero nonce or non empty code, + True if the account has non-zero nonce or non-empty code, False otherwise. """ @@ -517,7 +517,7 @@ def set_account_balance(state: State, address: Address, amount: U256) -> None: Address of the account whose nonce needs to be incremented. amount: - The amount that needs to set in balance. + The amount that needs to be set in the balance. """ @@ -557,7 +557,7 @@ def set_code(state: State, address: Address, code: Bytes) -> None: The current state. address: - Address of the account whose code needs to be update. + Address of the account whose code needs to be updated. code: The bytecode that needs to be set. diff --git a/src/ethereum/forks/cancun/trie.py b/src/ethereum/forks/cancun/trie.py index 2efc9c07cd..c6bb4fe928 100644 --- a/src/ethereum/forks/cancun/trie.py +++ b/src/ethereum/forks/cancun/trie.py @@ -138,7 +138,7 @@ class BranchNode: def encode_internal_node(node: Optional[InternalNode]) -> Extended: """ Encodes a Merkle Trie node into its RLP form. The RLP will then be - serialized into a `Bytes` and hashed unless it is less that 32 bytes + serialized into a `Bytes` and hashed unless it is less than 32 bytes when serialized. This function also accepts `None`, representing the absence of a node, diff --git a/src/ethereum/forks/cancun/vm/__init__.py b/src/ethereum/forks/cancun/vm/__init__.py index 0f03a02cb7..96803699ea 100644 --- a/src/ethereum/forks/cancun/vm/__init__.py +++ b/src/ethereum/forks/cancun/vm/__init__.py @@ -63,7 +63,7 @@ class BlockOutput: receipts_trie : `ethereum.fork_types.Root` Trie root of all the receipts in the block. receipt_keys : - Key of all the receipts in the block. + Keys of all the receipts in the block. block_logs : `Bloom` Logs bloom of all the logs included in all the transactions of the block. diff --git a/src/ethereum/forks/cancun/vm/gas.py b/src/ethereum/forks/cancun/vm/gas.py index 241315a994..e1ab03155e 100644 --- a/src/ethereum/forks/cancun/vm/gas.py +++ b/src/ethereum/forks/cancun/vm/gas.py @@ -221,7 +221,7 @@ def calculate_message_call_gas( account inside a message call. call_stipend : The amount of stipend provided to a message call to execute code while - transferring value(ETH). + transferring value (ETH). Returns ------- @@ -277,7 +277,7 @@ def init_code_cost(init_code_length: Uint) -> Uint: def calculate_excess_blob_gas(parent_header: Header) -> U64: """ - Calculated the excess blob gas for the current block based + Calculates the excess blob gas for the current block based on the gas used in the parent block. Parameters diff --git a/src/ethereum/forks/cancun/vm/instructions/arithmetic.py b/src/ethereum/forks/cancun/vm/instructions/arithmetic.py index de5d6ab43d..b7b1a370ad 100644 --- a/src/ethereum/forks/cancun/vm/instructions/arithmetic.py +++ b/src/ethereum/forks/cancun/vm/instructions/arithmetic.py @@ -84,7 +84,7 @@ def sub(evm: Evm) -> None: def mul(evm: Evm) -> None: """ - Multiply the top two elements of the stack, and pushes the result back + Multiplies the top two elements of the stack, and pushes the result back on the stack. Parameters @@ -353,7 +353,7 @@ def signextend(evm: Evm) -> None: # Can't extend any further result = value else: - # U256(0).to_be_bytes() gives b'' instead b'\x00'. + # U256(0).to_be_bytes() gives b'' instead of b'\x00'. value_bytes = Bytes(value.to_be_bytes32()) # Now among the obtained value bytes, consider only # N `least significant bytes`, where N is `byte_num + 1`. diff --git a/src/ethereum/forks/cancun/vm/instructions/block.py b/src/ethereum/forks/cancun/vm/instructions/block.py index fac1e1c95f..4f971a9dbf 100644 --- a/src/ethereum/forks/cancun/vm/instructions/block.py +++ b/src/ethereum/forks/cancun/vm/instructions/block.py @@ -101,7 +101,7 @@ def coinbase(evm: Evm) -> None: def timestamp(evm: Evm) -> None: """ Push the current block's timestamp onto the stack. Here the timestamp - being referred is actually the unix timestamp in seconds. + being referred to is actually the unix timestamp in seconds. Here the current block refers to the block in which the currently executing transaction/call resides. diff --git a/src/ethereum/forks/cancun/vm/instructions/environment.py b/src/ethereum/forks/cancun/vm/instructions/environment.py index 8369043465..28c595ee51 100644 --- a/src/ethereum/forks/cancun/vm/instructions/environment.py +++ b/src/ethereum/forks/cancun/vm/instructions/environment.py @@ -426,7 +426,7 @@ def returndatasize(evm: Evm) -> None: def returndatacopy(evm: Evm) -> None: """ - Copies data from the return data buffer code to memory. + Copies data from the return data buffer to memory. Parameters ---------- diff --git a/src/ethereum/forks/cancun/vm/instructions/memory.py b/src/ethereum/forks/cancun/vm/instructions/memory.py index 631d33a7fd..6e111051ee 100644 --- a/src/ethereum/forks/cancun/vm/instructions/memory.py +++ b/src/ethereum/forks/cancun/vm/instructions/memory.py @@ -93,7 +93,7 @@ def mstore8(evm: Evm) -> None: def mload(evm: Evm) -> None: """ - Load word from memory. + Loads a word from memory. Parameters ---------- @@ -123,7 +123,7 @@ def mload(evm: Evm) -> None: def msize(evm: Evm) -> None: """ - Push the size of active memory in bytes onto the stack. + Pushes the size of active memory in bytes onto the stack. Parameters ---------- @@ -146,7 +146,7 @@ def msize(evm: Evm) -> None: def mcopy(evm: Evm) -> None: """ - Copy the bytes in memory from one location to another. + Copies the bytes in memory from one location to another. Parameters ---------- diff --git a/src/ethereum/forks/cancun/vm/instructions/stack.py b/src/ethereum/forks/cancun/vm/instructions/stack.py index e381b52c37..0007a28acd 100644 --- a/src/ethereum/forks/cancun/vm/instructions/stack.py +++ b/src/ethereum/forks/cancun/vm/instructions/stack.py @@ -23,7 +23,7 @@ def pop(evm: Evm) -> None: """ - Remove item from stack. + Removes an item from the stack. Parameters ---------- @@ -46,7 +46,7 @@ def pop(evm: Evm) -> None: def push_n(evm: Evm, num_bytes: int) -> None: """ - Pushes a N-byte immediate onto the stack. Push zero if num_bytes is zero. + Pushes an N-byte immediate onto the stack. Push zero if num_bytes is zero. Parameters ---------- @@ -79,7 +79,7 @@ def push_n(evm: Evm, num_bytes: int) -> None: def dup_n(evm: Evm, item_number: int) -> None: """ - Duplicate the Nth stack item (from top of the stack) to the top of stack. + Duplicates the Nth stack item (from top of the stack) to the top of stack. Parameters ---------- @@ -107,7 +107,7 @@ def dup_n(evm: Evm, item_number: int) -> None: def swap_n(evm: Evm, item_number: int) -> None: """ - Swap the top and the `item_number` element of the stack, where + Swaps the top and the `item_number` element of the stack, where the top of the stack is position zero. If `item_number` is zero, this function does nothing (which should not be diff --git a/src/ethereum/forks/cancun/vm/instructions/system.py b/src/ethereum/forks/cancun/vm/instructions/system.py index 89b3b6cfda..ca462164d2 100644 --- a/src/ethereum/forks/cancun/vm/instructions/system.py +++ b/src/ethereum/forks/cancun/vm/instructions/system.py @@ -191,8 +191,8 @@ def create2(evm: Evm) -> None: """ Creates a new account with associated code. - It's similar to CREATE opcode except that the address of new account - depends on the init_code instead of the nonce of sender. + It's similar to the CREATE opcode except that the address of the new + account depends on the init_code instead of the nonce of sender. Parameters ---------- diff --git a/src/ethereum/forks/cancun/vm/precompiled_contracts/ecrecover.py b/src/ethereum/forks/cancun/vm/precompiled_contracts/ecrecover.py index d2eeaf75df..d8f6602e55 100644 --- a/src/ethereum/forks/cancun/vm/precompiled_contracts/ecrecover.py +++ b/src/ethereum/forks/cancun/vm/precompiled_contracts/ecrecover.py @@ -8,7 +8,7 @@ Introduction ------------ -Implementation of the ECRECOVER precompiled contract. +Implementation of the `ECRECOVER` precompiled contract. """ from ethereum_types.numeric import U256 diff --git a/src/ethereum/forks/cancun/vm/precompiled_contracts/mapping.py b/src/ethereum/forks/cancun/vm/precompiled_contracts/mapping.py index dc24767ab6..4acff465fb 100644 --- a/src/ethereum/forks/cancun/vm/precompiled_contracts/mapping.py +++ b/src/ethereum/forks/cancun/vm/precompiled_contracts/mapping.py @@ -8,7 +8,7 @@ Introduction ------------ -Mapping of precompiled contracts their implementations. +Mapping of precompiled contracts to their implementations. """ from typing import Callable, Dict diff --git a/src/ethereum/forks/cancun/vm/precompiled_contracts/modexp.py b/src/ethereum/forks/cancun/vm/precompiled_contracts/modexp.py index 462341f69f..755a396dda 100644 --- a/src/ethereum/forks/cancun/vm/precompiled_contracts/modexp.py +++ b/src/ethereum/forks/cancun/vm/precompiled_contracts/modexp.py @@ -23,7 +23,7 @@ def modexp(evm: Evm) -> None: """ - Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and. + Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and `modulus`. The return value is the same length as the modulus. """ data = evm.message.data diff --git a/src/ethereum/forks/cancun/vm/precompiled_contracts/point_evaluation.py b/src/ethereum/forks/cancun/vm/precompiled_contracts/point_evaluation.py index 760af47736..ce4eed7129 100644 --- a/src/ethereum/forks/cancun/vm/precompiled_contracts/point_evaluation.py +++ b/src/ethereum/forks/cancun/vm/precompiled_contracts/point_evaluation.py @@ -8,7 +8,7 @@ Introduction ------------ -Implementation of the POINT EVALUATION precompiled contract. +Implementation of the `POINT EVALUATION` precompiled contract. """ from ethereum_types.bytes import Bytes, Bytes32, Bytes48 diff --git a/src/ethereum/forks/cancun/vm/runtime.py b/src/ethereum/forks/cancun/vm/runtime.py index 505b3488de..0aa5ddd5e2 100644 --- a/src/ethereum/forks/cancun/vm/runtime.py +++ b/src/ethereum/forks/cancun/vm/runtime.py @@ -21,7 +21,7 @@ def get_valid_jump_destinations(code: Bytes) -> Set[Uint]: """ - Analyze the evm code to obtain the set of valid jump destinations. + Analyze the EVM code to obtain the set of valid jump destinations. Valid jump destinations are defined as follows: * The jump destination is less than the length of the code. diff --git a/src/ethereum/forks/constantinople/blocks.py b/src/ethereum/forks/constantinople/blocks.py index 8d476d8451..e53cf2c1c6 100644 --- a/src/ethereum/forks/constantinople/blocks.py +++ b/src/ethereum/forks/constantinople/blocks.py @@ -112,7 +112,7 @@ class Header: number: Uint """ - Block number, (height) in the chain. + Block number (height) in the chain. """ gas_limit: Uint @@ -183,7 +183,7 @@ class Block: header: Header """ The block header containing metadata and cryptographic commitments. Refer - [headers] for more details on the fields included in the header. + to [headers] for more details on the fields included in the header. [headers]: ref:ethereum.forks.constantinople.blocks.Header """ diff --git a/src/ethereum/forks/constantinople/bloom.py b/src/ethereum/forks/constantinople/bloom.py index 8a12ec081d..0e079df6b3 100644 --- a/src/ethereum/forks/constantinople/bloom.py +++ b/src/ethereum/forks/constantinople/bloom.py @@ -8,7 +8,7 @@ Introduction ------------ -This modules defines functions for calculating bloom filters of logs. For the +This module defines functions for calculating bloom filters of logs. For the general theory of bloom filters see e.g. `Wikipedia `_. Bloom filters are used to allow for efficient searching of logs by address and/or topic, by rapidly diff --git a/src/ethereum/forks/constantinople/state.py b/src/ethereum/forks/constantinople/state.py index 2e21b4f814..a0dd7d77ab 100644 --- a/src/ethereum/forks/constantinople/state.py +++ b/src/ethereum/forks/constantinople/state.py @@ -340,7 +340,7 @@ def account_exists(state: State, address: Address) -> bool: def account_has_code_or_nonce(state: State, address: Address) -> bool: """ - Checks if an account has non zero nonce or non empty code. + Checks if an account has non-zero nonce or non-empty code. Parameters ---------- @@ -352,7 +352,7 @@ def account_has_code_or_nonce(state: State, address: Address) -> bool: Returns ------- has_code_or_nonce : `bool` - True if the account has non zero nonce or non empty code, + True if the account has non-zero nonce or non-empty code, False otherwise. """ @@ -473,7 +473,7 @@ def set_account_balance(state: State, address: Address, amount: U256) -> None: Address of the account whose nonce needs to be incremented. amount: - The amount that needs to set in balance. + The amount that needs to be set in the balance. """ @@ -493,7 +493,7 @@ def touch_account(state: State, address: Address) -> None: The current state. address: - The address of the account that need to initialised. + The address of the account that needs to be initialized. """ if not account_exists(state, address): @@ -530,7 +530,7 @@ def set_code(state: State, address: Address, code: Bytes) -> None: The current state. address: - Address of the account whose code needs to be update. + Address of the account whose code needs to be updated. code: The bytecode that needs to be set. diff --git a/src/ethereum/forks/constantinople/trie.py b/src/ethereum/forks/constantinople/trie.py index c730e05515..0ff06ea4ec 100644 --- a/src/ethereum/forks/constantinople/trie.py +++ b/src/ethereum/forks/constantinople/trie.py @@ -128,7 +128,7 @@ class BranchNode: def encode_internal_node(node: Optional[InternalNode]) -> Extended: """ Encodes a Merkle Trie node into its RLP form. The RLP will then be - serialized into a `Bytes` and hashed unless it is less that 32 bytes + serialized into a `Bytes` and hashed unless it is less than 32 bytes when serialized. This function also accepts `None`, representing the absence of a node, diff --git a/src/ethereum/forks/constantinople/vm/__init__.py b/src/ethereum/forks/constantinople/vm/__init__.py index 6cdd9c9bc7..068648ac36 100644 --- a/src/ethereum/forks/constantinople/vm/__init__.py +++ b/src/ethereum/forks/constantinople/vm/__init__.py @@ -61,7 +61,7 @@ class BlockOutput: receipts_trie : `ethereum.fork_types.Root` Trie root of all the receipts in the block. receipt_keys : - Key of all the receipts in the block. + Keys of all the receipts in the block. block_logs : `Bloom` Logs bloom of all the logs included in all the transactions of the block. diff --git a/src/ethereum/forks/constantinople/vm/gas.py b/src/ethereum/forks/constantinople/vm/gas.py index 7ec51fd8b5..940e266b51 100644 --- a/src/ethereum/forks/constantinople/vm/gas.py +++ b/src/ethereum/forks/constantinople/vm/gas.py @@ -212,7 +212,7 @@ def calculate_message_call_gas( account inside a message call. call_stipend : The amount of stipend provided to a message call to execute code while - transferring value(ETH). + transferring value (ETH). Returns ------- diff --git a/src/ethereum/forks/constantinople/vm/instructions/arithmetic.py b/src/ethereum/forks/constantinople/vm/instructions/arithmetic.py index de5d6ab43d..b7b1a370ad 100644 --- a/src/ethereum/forks/constantinople/vm/instructions/arithmetic.py +++ b/src/ethereum/forks/constantinople/vm/instructions/arithmetic.py @@ -84,7 +84,7 @@ def sub(evm: Evm) -> None: def mul(evm: Evm) -> None: """ - Multiply the top two elements of the stack, and pushes the result back + Multiplies the top two elements of the stack, and pushes the result back on the stack. Parameters @@ -353,7 +353,7 @@ def signextend(evm: Evm) -> None: # Can't extend any further result = value else: - # U256(0).to_be_bytes() gives b'' instead b'\x00'. + # U256(0).to_be_bytes() gives b'' instead of b'\x00'. value_bytes = Bytes(value.to_be_bytes32()) # Now among the obtained value bytes, consider only # N `least significant bytes`, where N is `byte_num + 1`. diff --git a/src/ethereum/forks/constantinople/vm/instructions/block.py b/src/ethereum/forks/constantinople/vm/instructions/block.py index 8b50f1bc07..0edcbf000d 100644 --- a/src/ethereum/forks/constantinople/vm/instructions/block.py +++ b/src/ethereum/forks/constantinople/vm/instructions/block.py @@ -87,7 +87,7 @@ def coinbase(evm: Evm) -> None: def timestamp(evm: Evm) -> None: """ Push the current block's timestamp onto the stack. Here the timestamp - being referred is actually the unix timestamp in seconds. + being referred to is actually the unix timestamp in seconds. Here the current block refers to the block in which the currently executing transaction/call resides. diff --git a/src/ethereum/forks/constantinople/vm/instructions/environment.py b/src/ethereum/forks/constantinople/vm/instructions/environment.py index f5a0be72f4..8db76a38cc 100644 --- a/src/ethereum/forks/constantinople/vm/instructions/environment.py +++ b/src/ethereum/forks/constantinople/vm/instructions/environment.py @@ -406,7 +406,7 @@ def returndatasize(evm: Evm) -> None: def returndatacopy(evm: Evm) -> None: """ - Copies data from the return data buffer code to memory. + Copies data from the return data buffer to memory. Parameters ---------- diff --git a/src/ethereum/forks/constantinople/vm/instructions/memory.py b/src/ethereum/forks/constantinople/vm/instructions/memory.py index fdd9518843..3a242ba2f7 100644 --- a/src/ethereum/forks/constantinople/vm/instructions/memory.py +++ b/src/ethereum/forks/constantinople/vm/instructions/memory.py @@ -90,7 +90,7 @@ def mstore8(evm: Evm) -> None: def mload(evm: Evm) -> None: """ - Load word from memory. + Loads a word from memory. Parameters ---------- @@ -120,7 +120,7 @@ def mload(evm: Evm) -> None: def msize(evm: Evm) -> None: """ - Push the size of active memory in bytes onto the stack. + Pushes the size of active memory in bytes onto the stack. Parameters ---------- diff --git a/src/ethereum/forks/constantinople/vm/instructions/stack.py b/src/ethereum/forks/constantinople/vm/instructions/stack.py index bdf232db6a..0fc0d3fe4b 100644 --- a/src/ethereum/forks/constantinople/vm/instructions/stack.py +++ b/src/ethereum/forks/constantinople/vm/instructions/stack.py @@ -23,7 +23,7 @@ def pop(evm: Evm) -> None: """ - Remove item from stack. + Removes an item from the stack. Parameters ---------- @@ -46,7 +46,7 @@ def pop(evm: Evm) -> None: def push_n(evm: Evm, num_bytes: int) -> None: """ - Pushes a N-byte immediate onto the stack. + Pushes an N-byte immediate onto the stack. Parameters ---------- @@ -76,7 +76,7 @@ def push_n(evm: Evm, num_bytes: int) -> None: def dup_n(evm: Evm, item_number: int) -> None: """ - Duplicate the Nth stack item (from top of the stack) to the top of stack. + Duplicates the Nth stack item (from top of the stack) to the top of stack. Parameters ---------- @@ -104,7 +104,7 @@ def dup_n(evm: Evm, item_number: int) -> None: def swap_n(evm: Evm, item_number: int) -> None: """ - Swap the top and the `item_number` element of the stack, where + Swaps the top and the `item_number` element of the stack, where the top of the stack is position zero. If `item_number` is zero, this function does nothing (which should not be diff --git a/src/ethereum/forks/constantinople/vm/instructions/system.py b/src/ethereum/forks/constantinople/vm/instructions/system.py index fadd963417..af2527d31c 100644 --- a/src/ethereum/forks/constantinople/vm/instructions/system.py +++ b/src/ethereum/forks/constantinople/vm/instructions/system.py @@ -179,8 +179,8 @@ def create2(evm: Evm) -> None: """ Creates a new account with associated code. - It's similar to CREATE opcode except that the address of new account - depends on the init_code instead of the nonce of sender. + It's similar to the CREATE opcode except that the address of the new + account depends on the init_code instead of the nonce of sender. Parameters ---------- diff --git a/src/ethereum/forks/constantinople/vm/precompiled_contracts/ecrecover.py b/src/ethereum/forks/constantinople/vm/precompiled_contracts/ecrecover.py index d2eeaf75df..d8f6602e55 100644 --- a/src/ethereum/forks/constantinople/vm/precompiled_contracts/ecrecover.py +++ b/src/ethereum/forks/constantinople/vm/precompiled_contracts/ecrecover.py @@ -8,7 +8,7 @@ Introduction ------------ -Implementation of the ECRECOVER precompiled contract. +Implementation of the `ECRECOVER` precompiled contract. """ from ethereum_types.numeric import U256 diff --git a/src/ethereum/forks/constantinople/vm/precompiled_contracts/mapping.py b/src/ethereum/forks/constantinople/vm/precompiled_contracts/mapping.py index 13746350b5..d866782adc 100644 --- a/src/ethereum/forks/constantinople/vm/precompiled_contracts/mapping.py +++ b/src/ethereum/forks/constantinople/vm/precompiled_contracts/mapping.py @@ -8,7 +8,7 @@ Introduction ------------ -Mapping of precompiled contracts their implementations. +Mapping of precompiled contracts to their implementations. """ from typing import Callable, Dict diff --git a/src/ethereum/forks/constantinople/vm/precompiled_contracts/modexp.py b/src/ethereum/forks/constantinople/vm/precompiled_contracts/modexp.py index fd0501616c..8ed5c851a7 100644 --- a/src/ethereum/forks/constantinople/vm/precompiled_contracts/modexp.py +++ b/src/ethereum/forks/constantinople/vm/precompiled_contracts/modexp.py @@ -23,7 +23,7 @@ def modexp(evm: Evm) -> None: """ - Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and. + Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and `modulus`. The return value is the same length as the modulus. """ data = evm.message.data diff --git a/src/ethereum/forks/constantinople/vm/runtime.py b/src/ethereum/forks/constantinople/vm/runtime.py index 505b3488de..0aa5ddd5e2 100644 --- a/src/ethereum/forks/constantinople/vm/runtime.py +++ b/src/ethereum/forks/constantinople/vm/runtime.py @@ -21,7 +21,7 @@ def get_valid_jump_destinations(code: Bytes) -> Set[Uint]: """ - Analyze the evm code to obtain the set of valid jump destinations. + Analyze the EVM code to obtain the set of valid jump destinations. Valid jump destinations are defined as follows: * The jump destination is less than the length of the code. diff --git a/src/ethereum/forks/dao_fork/__init__.py b/src/ethereum/forks/dao_fork/__init__.py index 7516eb2de4..a194cdf3e6 100644 --- a/src/ethereum/forks/dao_fork/__init__.py +++ b/src/ethereum/forks/dao_fork/__init__.py @@ -12,7 +12,7 @@ | Network | Block | Expected Date | Fork Hash | | ------- | ----------- | ------------- | ------------ | -| Mainnet | 1,920,000 | July 20, 1026 | `0x91d1f948` | +| Mainnet | 1,920,000 | July 20, 2016 | `0x91d1f948` | ### Releases diff --git a/src/ethereum/forks/dao_fork/blocks.py b/src/ethereum/forks/dao_fork/blocks.py index 8a6db3ea07..f0de09559a 100644 --- a/src/ethereum/forks/dao_fork/blocks.py +++ b/src/ethereum/forks/dao_fork/blocks.py @@ -112,7 +112,7 @@ class Header: number: Uint """ - Block number, (height) in the chain. + Block number (height) in the chain. """ gas_limit: Uint @@ -183,7 +183,7 @@ class Block: header: Header """ The block header containing metadata and cryptographic commitments. Refer - [headers] for more details on the fields included in the header. + to [headers] for more details on the fields included in the header. [headers]: ref:ethereum.forks.dao_fork.blocks.Header """ diff --git a/src/ethereum/forks/dao_fork/bloom.py b/src/ethereum/forks/dao_fork/bloom.py index 8a12ec081d..0e079df6b3 100644 --- a/src/ethereum/forks/dao_fork/bloom.py +++ b/src/ethereum/forks/dao_fork/bloom.py @@ -8,7 +8,7 @@ Introduction ------------ -This modules defines functions for calculating bloom filters of logs. For the +This module defines functions for calculating bloom filters of logs. For the general theory of bloom filters see e.g. `Wikipedia `_. Bloom filters are used to allow for efficient searching of logs by address and/or topic, by rapidly diff --git a/src/ethereum/forks/dao_fork/state.py b/src/ethereum/forks/dao_fork/state.py index 8fb03a07fe..3b6255055d 100644 --- a/src/ethereum/forks/dao_fork/state.py +++ b/src/ethereum/forks/dao_fork/state.py @@ -340,7 +340,7 @@ def account_exists(state: State, address: Address) -> bool: def account_has_code_or_nonce(state: State, address: Address) -> bool: """ - Checks if an account has non zero nonce or non empty code. + Checks if an account has non-zero nonce or non-empty code. Parameters ---------- @@ -352,7 +352,7 @@ def account_has_code_or_nonce(state: State, address: Address) -> bool: Returns ------- has_code_or_nonce : `bool` - True if the account has non zero nonce or non empty code, + True if the account has non-zero nonce or non-empty code, False otherwise. """ @@ -424,7 +424,7 @@ def set_account_balance(state: State, address: Address, amount: U256) -> None: Address of the account whose nonce needs to be incremented. amount: - The amount that needs to set in balance. + The amount that needs to be set in the balance. """ @@ -444,7 +444,7 @@ def touch_account(state: State, address: Address) -> None: The current state. address: - The address of the account that need to initialised. + The address of the account that needs to be initialized. """ if not account_exists(state, address): @@ -481,7 +481,7 @@ def set_code(state: State, address: Address, code: Bytes) -> None: The current state. address: - Address of the account whose code needs to be update. + Address of the account whose code needs to be updated. code: The bytecode that needs to be set. diff --git a/src/ethereum/forks/dao_fork/trie.py b/src/ethereum/forks/dao_fork/trie.py index 3ce2378853..679922ff44 100644 --- a/src/ethereum/forks/dao_fork/trie.py +++ b/src/ethereum/forks/dao_fork/trie.py @@ -128,7 +128,7 @@ class BranchNode: def encode_internal_node(node: Optional[InternalNode]) -> Extended: """ Encodes a Merkle Trie node into its RLP form. The RLP will then be - serialized into a `Bytes` and hashed unless it is less that 32 bytes + serialized into a `Bytes` and hashed unless it is less than 32 bytes when serialized. This function also accepts `None`, representing the absence of a node, diff --git a/src/ethereum/forks/dao_fork/vm/__init__.py b/src/ethereum/forks/dao_fork/vm/__init__.py index 21585c758c..e4739cebde 100644 --- a/src/ethereum/forks/dao_fork/vm/__init__.py +++ b/src/ethereum/forks/dao_fork/vm/__init__.py @@ -60,7 +60,7 @@ class BlockOutput: receipts_trie : `ethereum.fork_types.Root` Trie root of all the receipts in the block. receipt_keys : - Key of all the receipts in the block. + Keys of all the receipts in the block. block_logs : `Bloom` Logs bloom of all the logs included in all the transactions of the block. diff --git a/src/ethereum/forks/dao_fork/vm/instructions/arithmetic.py b/src/ethereum/forks/dao_fork/vm/instructions/arithmetic.py index de5d6ab43d..b7b1a370ad 100644 --- a/src/ethereum/forks/dao_fork/vm/instructions/arithmetic.py +++ b/src/ethereum/forks/dao_fork/vm/instructions/arithmetic.py @@ -84,7 +84,7 @@ def sub(evm: Evm) -> None: def mul(evm: Evm) -> None: """ - Multiply the top two elements of the stack, and pushes the result back + Multiplies the top two elements of the stack, and pushes the result back on the stack. Parameters @@ -353,7 +353,7 @@ def signextend(evm: Evm) -> None: # Can't extend any further result = value else: - # U256(0).to_be_bytes() gives b'' instead b'\x00'. + # U256(0).to_be_bytes() gives b'' instead of b'\x00'. value_bytes = Bytes(value.to_be_bytes32()) # Now among the obtained value bytes, consider only # N `least significant bytes`, where N is `byte_num + 1`. diff --git a/src/ethereum/forks/dao_fork/vm/instructions/block.py b/src/ethereum/forks/dao_fork/vm/instructions/block.py index 8b50f1bc07..0edcbf000d 100644 --- a/src/ethereum/forks/dao_fork/vm/instructions/block.py +++ b/src/ethereum/forks/dao_fork/vm/instructions/block.py @@ -87,7 +87,7 @@ def coinbase(evm: Evm) -> None: def timestamp(evm: Evm) -> None: """ Push the current block's timestamp onto the stack. Here the timestamp - being referred is actually the unix timestamp in seconds. + being referred to is actually the unix timestamp in seconds. Here the current block refers to the block in which the currently executing transaction/call resides. diff --git a/src/ethereum/forks/dao_fork/vm/instructions/memory.py b/src/ethereum/forks/dao_fork/vm/instructions/memory.py index fdd9518843..3a242ba2f7 100644 --- a/src/ethereum/forks/dao_fork/vm/instructions/memory.py +++ b/src/ethereum/forks/dao_fork/vm/instructions/memory.py @@ -90,7 +90,7 @@ def mstore8(evm: Evm) -> None: def mload(evm: Evm) -> None: """ - Load word from memory. + Loads a word from memory. Parameters ---------- @@ -120,7 +120,7 @@ def mload(evm: Evm) -> None: def msize(evm: Evm) -> None: """ - Push the size of active memory in bytes onto the stack. + Pushes the size of active memory in bytes onto the stack. Parameters ---------- diff --git a/src/ethereum/forks/dao_fork/vm/instructions/stack.py b/src/ethereum/forks/dao_fork/vm/instructions/stack.py index bdf232db6a..0fc0d3fe4b 100644 --- a/src/ethereum/forks/dao_fork/vm/instructions/stack.py +++ b/src/ethereum/forks/dao_fork/vm/instructions/stack.py @@ -23,7 +23,7 @@ def pop(evm: Evm) -> None: """ - Remove item from stack. + Removes an item from the stack. Parameters ---------- @@ -46,7 +46,7 @@ def pop(evm: Evm) -> None: def push_n(evm: Evm, num_bytes: int) -> None: """ - Pushes a N-byte immediate onto the stack. + Pushes an N-byte immediate onto the stack. Parameters ---------- @@ -76,7 +76,7 @@ def push_n(evm: Evm, num_bytes: int) -> None: def dup_n(evm: Evm, item_number: int) -> None: """ - Duplicate the Nth stack item (from top of the stack) to the top of stack. + Duplicates the Nth stack item (from top of the stack) to the top of stack. Parameters ---------- @@ -104,7 +104,7 @@ def dup_n(evm: Evm, item_number: int) -> None: def swap_n(evm: Evm, item_number: int) -> None: """ - Swap the top and the `item_number` element of the stack, where + Swaps the top and the `item_number` element of the stack, where the top of the stack is position zero. If `item_number` is zero, this function does nothing (which should not be diff --git a/src/ethereum/forks/dao_fork/vm/precompiled_contracts/ecrecover.py b/src/ethereum/forks/dao_fork/vm/precompiled_contracts/ecrecover.py index d2eeaf75df..d8f6602e55 100644 --- a/src/ethereum/forks/dao_fork/vm/precompiled_contracts/ecrecover.py +++ b/src/ethereum/forks/dao_fork/vm/precompiled_contracts/ecrecover.py @@ -8,7 +8,7 @@ Introduction ------------ -Implementation of the ECRECOVER precompiled contract. +Implementation of the `ECRECOVER` precompiled contract. """ from ethereum_types.numeric import U256 diff --git a/src/ethereum/forks/dao_fork/vm/precompiled_contracts/mapping.py b/src/ethereum/forks/dao_fork/vm/precompiled_contracts/mapping.py index 33412f4c28..1d44a6f2e3 100644 --- a/src/ethereum/forks/dao_fork/vm/precompiled_contracts/mapping.py +++ b/src/ethereum/forks/dao_fork/vm/precompiled_contracts/mapping.py @@ -8,7 +8,7 @@ Introduction ------------ -Mapping of precompiled contracts their implementations. +Mapping of precompiled contracts to their implementations. """ from typing import Callable, Dict diff --git a/src/ethereum/forks/dao_fork/vm/runtime.py b/src/ethereum/forks/dao_fork/vm/runtime.py index 505b3488de..0aa5ddd5e2 100644 --- a/src/ethereum/forks/dao_fork/vm/runtime.py +++ b/src/ethereum/forks/dao_fork/vm/runtime.py @@ -21,7 +21,7 @@ def get_valid_jump_destinations(code: Bytes) -> Set[Uint]: """ - Analyze the evm code to obtain the set of valid jump destinations. + Analyze the EVM code to obtain the set of valid jump destinations. Valid jump destinations are defined as follows: * The jump destination is less than the length of the code. diff --git a/src/ethereum/forks/frontier/blocks.py b/src/ethereum/forks/frontier/blocks.py index 57adc6dae6..b9fe4f377f 100644 --- a/src/ethereum/forks/frontier/blocks.py +++ b/src/ethereum/forks/frontier/blocks.py @@ -112,7 +112,7 @@ class Header: number: Uint """ - Block number, (height) in the chain. + Block number (height) in the chain. """ gas_limit: Uint @@ -183,7 +183,7 @@ class Block: header: Header """ The block header containing metadata and cryptographic commitments. Refer - [headers] for more details on the fields included in the header. + to [headers] for more details on the fields included in the header. [headers]: ref:ethereum.forks.frontier.blocks.Header """ diff --git a/src/ethereum/forks/frontier/bloom.py b/src/ethereum/forks/frontier/bloom.py index 8a12ec081d..0e079df6b3 100644 --- a/src/ethereum/forks/frontier/bloom.py +++ b/src/ethereum/forks/frontier/bloom.py @@ -8,7 +8,7 @@ Introduction ------------ -This modules defines functions for calculating bloom filters of logs. For the +This module defines functions for calculating bloom filters of logs. For the general theory of bloom filters see e.g. `Wikipedia `_. Bloom filters are used to allow for efficient searching of logs by address and/or topic, by rapidly diff --git a/src/ethereum/forks/frontier/state.py b/src/ethereum/forks/frontier/state.py index 8fb03a07fe..3b6255055d 100644 --- a/src/ethereum/forks/frontier/state.py +++ b/src/ethereum/forks/frontier/state.py @@ -340,7 +340,7 @@ def account_exists(state: State, address: Address) -> bool: def account_has_code_or_nonce(state: State, address: Address) -> bool: """ - Checks if an account has non zero nonce or non empty code. + Checks if an account has non-zero nonce or non-empty code. Parameters ---------- @@ -352,7 +352,7 @@ def account_has_code_or_nonce(state: State, address: Address) -> bool: Returns ------- has_code_or_nonce : `bool` - True if the account has non zero nonce or non empty code, + True if the account has non-zero nonce or non-empty code, False otherwise. """ @@ -424,7 +424,7 @@ def set_account_balance(state: State, address: Address, amount: U256) -> None: Address of the account whose nonce needs to be incremented. amount: - The amount that needs to set in balance. + The amount that needs to be set in the balance. """ @@ -444,7 +444,7 @@ def touch_account(state: State, address: Address) -> None: The current state. address: - The address of the account that need to initialised. + The address of the account that needs to be initialized. """ if not account_exists(state, address): @@ -481,7 +481,7 @@ def set_code(state: State, address: Address, code: Bytes) -> None: The current state. address: - Address of the account whose code needs to be update. + Address of the account whose code needs to be updated. code: The bytecode that needs to be set. diff --git a/src/ethereum/forks/frontier/trie.py b/src/ethereum/forks/frontier/trie.py index b89039fa90..55cb19a570 100644 --- a/src/ethereum/forks/frontier/trie.py +++ b/src/ethereum/forks/frontier/trie.py @@ -127,7 +127,7 @@ class BranchNode: def encode_internal_node(node: Optional[InternalNode]) -> Extended: """ Encodes a Merkle Trie node into its RLP form. The RLP will then be - serialized into a `Bytes` and hashed unless it is less that 32 bytes + serialized into a `Bytes` and hashed unless it is less than 32 bytes when serialized. This function also accepts `None`, representing the absence of a node, diff --git a/src/ethereum/forks/frontier/vm/__init__.py b/src/ethereum/forks/frontier/vm/__init__.py index 69edaa837e..f7db64a826 100644 --- a/src/ethereum/forks/frontier/vm/__init__.py +++ b/src/ethereum/forks/frontier/vm/__init__.py @@ -60,7 +60,7 @@ class BlockOutput: receipts_trie : `ethereum.fork_types.Root` Trie root of all the receipts in the block. receipt_keys : - Key of all the receipts in the block. + Keys of all the receipts in the block. block_logs : `Bloom` Logs bloom of all the logs included in all the transactions of the block. diff --git a/src/ethereum/forks/frontier/vm/instructions/arithmetic.py b/src/ethereum/forks/frontier/vm/instructions/arithmetic.py index de5d6ab43d..b7b1a370ad 100644 --- a/src/ethereum/forks/frontier/vm/instructions/arithmetic.py +++ b/src/ethereum/forks/frontier/vm/instructions/arithmetic.py @@ -84,7 +84,7 @@ def sub(evm: Evm) -> None: def mul(evm: Evm) -> None: """ - Multiply the top two elements of the stack, and pushes the result back + Multiplies the top two elements of the stack, and pushes the result back on the stack. Parameters @@ -353,7 +353,7 @@ def signextend(evm: Evm) -> None: # Can't extend any further result = value else: - # U256(0).to_be_bytes() gives b'' instead b'\x00'. + # U256(0).to_be_bytes() gives b'' instead of b'\x00'. value_bytes = Bytes(value.to_be_bytes32()) # Now among the obtained value bytes, consider only # N `least significant bytes`, where N is `byte_num + 1`. diff --git a/src/ethereum/forks/frontier/vm/instructions/block.py b/src/ethereum/forks/frontier/vm/instructions/block.py index 8b50f1bc07..0edcbf000d 100644 --- a/src/ethereum/forks/frontier/vm/instructions/block.py +++ b/src/ethereum/forks/frontier/vm/instructions/block.py @@ -87,7 +87,7 @@ def coinbase(evm: Evm) -> None: def timestamp(evm: Evm) -> None: """ Push the current block's timestamp onto the stack. Here the timestamp - being referred is actually the unix timestamp in seconds. + being referred to is actually the unix timestamp in seconds. Here the current block refers to the block in which the currently executing transaction/call resides. diff --git a/src/ethereum/forks/frontier/vm/instructions/memory.py b/src/ethereum/forks/frontier/vm/instructions/memory.py index fdd9518843..3a242ba2f7 100644 --- a/src/ethereum/forks/frontier/vm/instructions/memory.py +++ b/src/ethereum/forks/frontier/vm/instructions/memory.py @@ -90,7 +90,7 @@ def mstore8(evm: Evm) -> None: def mload(evm: Evm) -> None: """ - Load word from memory. + Loads a word from memory. Parameters ---------- @@ -120,7 +120,7 @@ def mload(evm: Evm) -> None: def msize(evm: Evm) -> None: """ - Push the size of active memory in bytes onto the stack. + Pushes the size of active memory in bytes onto the stack. Parameters ---------- diff --git a/src/ethereum/forks/frontier/vm/instructions/stack.py b/src/ethereum/forks/frontier/vm/instructions/stack.py index bdf232db6a..0fc0d3fe4b 100644 --- a/src/ethereum/forks/frontier/vm/instructions/stack.py +++ b/src/ethereum/forks/frontier/vm/instructions/stack.py @@ -23,7 +23,7 @@ def pop(evm: Evm) -> None: """ - Remove item from stack. + Removes an item from the stack. Parameters ---------- @@ -46,7 +46,7 @@ def pop(evm: Evm) -> None: def push_n(evm: Evm, num_bytes: int) -> None: """ - Pushes a N-byte immediate onto the stack. + Pushes an N-byte immediate onto the stack. Parameters ---------- @@ -76,7 +76,7 @@ def push_n(evm: Evm, num_bytes: int) -> None: def dup_n(evm: Evm, item_number: int) -> None: """ - Duplicate the Nth stack item (from top of the stack) to the top of stack. + Duplicates the Nth stack item (from top of the stack) to the top of stack. Parameters ---------- @@ -104,7 +104,7 @@ def dup_n(evm: Evm, item_number: int) -> None: def swap_n(evm: Evm, item_number: int) -> None: """ - Swap the top and the `item_number` element of the stack, where + Swaps the top and the `item_number` element of the stack, where the top of the stack is position zero. If `item_number` is zero, this function does nothing (which should not be diff --git a/src/ethereum/forks/frontier/vm/precompiled_contracts/ecrecover.py b/src/ethereum/forks/frontier/vm/precompiled_contracts/ecrecover.py index d2eeaf75df..d8f6602e55 100644 --- a/src/ethereum/forks/frontier/vm/precompiled_contracts/ecrecover.py +++ b/src/ethereum/forks/frontier/vm/precompiled_contracts/ecrecover.py @@ -8,7 +8,7 @@ Introduction ------------ -Implementation of the ECRECOVER precompiled contract. +Implementation of the `ECRECOVER` precompiled contract. """ from ethereum_types.numeric import U256 diff --git a/src/ethereum/forks/frontier/vm/precompiled_contracts/mapping.py b/src/ethereum/forks/frontier/vm/precompiled_contracts/mapping.py index 33412f4c28..1d44a6f2e3 100644 --- a/src/ethereum/forks/frontier/vm/precompiled_contracts/mapping.py +++ b/src/ethereum/forks/frontier/vm/precompiled_contracts/mapping.py @@ -8,7 +8,7 @@ Introduction ------------ -Mapping of precompiled contracts their implementations. +Mapping of precompiled contracts to their implementations. """ from typing import Callable, Dict diff --git a/src/ethereum/forks/frontier/vm/runtime.py b/src/ethereum/forks/frontier/vm/runtime.py index 505b3488de..0aa5ddd5e2 100644 --- a/src/ethereum/forks/frontier/vm/runtime.py +++ b/src/ethereum/forks/frontier/vm/runtime.py @@ -21,7 +21,7 @@ def get_valid_jump_destinations(code: Bytes) -> Set[Uint]: """ - Analyze the evm code to obtain the set of valid jump destinations. + Analyze the EVM code to obtain the set of valid jump destinations. Valid jump destinations are defined as follows: * The jump destination is less than the length of the code. diff --git a/src/ethereum/forks/gray_glacier/blocks.py b/src/ethereum/forks/gray_glacier/blocks.py index 2c00953f9f..82303c764c 100644 --- a/src/ethereum/forks/gray_glacier/blocks.py +++ b/src/ethereum/forks/gray_glacier/blocks.py @@ -121,7 +121,7 @@ class Header: number: Uint """ - Block number, (height) in the chain. + Block number (height) in the chain. """ gas_limit: Uint @@ -207,7 +207,7 @@ class Block: header: Header """ The block header containing metadata and cryptographic commitments. Refer - [headers] for more details on the fields included in the header. + to [headers] for more details on the fields included in the header. [headers]: ref:ethereum.forks.gray_glacier.blocks.Header """ diff --git a/src/ethereum/forks/gray_glacier/bloom.py b/src/ethereum/forks/gray_glacier/bloom.py index 8a12ec081d..0e079df6b3 100644 --- a/src/ethereum/forks/gray_glacier/bloom.py +++ b/src/ethereum/forks/gray_glacier/bloom.py @@ -8,7 +8,7 @@ Introduction ------------ -This modules defines functions for calculating bloom filters of logs. For the +This module defines functions for calculating bloom filters of logs. For the general theory of bloom filters see e.g. `Wikipedia `_. Bloom filters are used to allow for efficient searching of logs by address and/or topic, by rapidly diff --git a/src/ethereum/forks/gray_glacier/state.py b/src/ethereum/forks/gray_glacier/state.py index 5fbcf920cd..91a0b28a1e 100644 --- a/src/ethereum/forks/gray_glacier/state.py +++ b/src/ethereum/forks/gray_glacier/state.py @@ -367,7 +367,7 @@ def account_exists(state: State, address: Address) -> bool: def account_has_code_or_nonce(state: State, address: Address) -> bool: """ - Checks if an account has non zero nonce or non empty code. + Checks if an account has non-zero nonce or non-empty code. Parameters ---------- @@ -379,7 +379,7 @@ def account_has_code_or_nonce(state: State, address: Address) -> bool: Returns ------- has_code_or_nonce : `bool` - True if the account has non zero nonce or non empty code, + True if the account has non-zero nonce or non-empty code, False otherwise. """ @@ -500,7 +500,7 @@ def set_account_balance(state: State, address: Address, amount: U256) -> None: Address of the account whose nonce needs to be incremented. amount: - The amount that needs to set in balance. + The amount that needs to be set in the balance. """ @@ -520,7 +520,7 @@ def touch_account(state: State, address: Address) -> None: The current state. address: - The address of the account that need to initialised. + The address of the account that needs to be initialized. """ if not account_exists(state, address): @@ -557,7 +557,7 @@ def set_code(state: State, address: Address, code: Bytes) -> None: The current state. address: - Address of the account whose code needs to be update. + Address of the account whose code needs to be updated. code: The bytecode that needs to be set. diff --git a/src/ethereum/forks/gray_glacier/trie.py b/src/ethereum/forks/gray_glacier/trie.py index 4b6297e086..21044c1a07 100644 --- a/src/ethereum/forks/gray_glacier/trie.py +++ b/src/ethereum/forks/gray_glacier/trie.py @@ -128,7 +128,7 @@ class BranchNode: def encode_internal_node(node: Optional[InternalNode]) -> Extended: """ Encodes a Merkle Trie node into its RLP form. The RLP will then be - serialized into a `Bytes` and hashed unless it is less that 32 bytes + serialized into a `Bytes` and hashed unless it is less than 32 bytes when serialized. This function also accepts `None`, representing the absence of a node, diff --git a/src/ethereum/forks/gray_glacier/vm/__init__.py b/src/ethereum/forks/gray_glacier/vm/__init__.py index 40ee422fb6..ebbb444829 100644 --- a/src/ethereum/forks/gray_glacier/vm/__init__.py +++ b/src/ethereum/forks/gray_glacier/vm/__init__.py @@ -62,7 +62,7 @@ class BlockOutput: receipts_trie : `ethereum.fork_types.Root` Trie root of all the receipts in the block. receipt_keys : - Key of all the receipts in the block. + Keys of all the receipts in the block. block_logs : `Bloom` Logs bloom of all the logs included in all the transactions of the block. diff --git a/src/ethereum/forks/gray_glacier/vm/gas.py b/src/ethereum/forks/gray_glacier/vm/gas.py index b5c96dea85..4f2dde3b9d 100644 --- a/src/ethereum/forks/gray_glacier/vm/gas.py +++ b/src/ethereum/forks/gray_glacier/vm/gas.py @@ -211,7 +211,7 @@ def calculate_message_call_gas( account inside a message call. call_stipend : The amount of stipend provided to a message call to execute code while - transferring value(ETH). + transferring value (ETH). Returns ------- diff --git a/src/ethereum/forks/gray_glacier/vm/instructions/arithmetic.py b/src/ethereum/forks/gray_glacier/vm/instructions/arithmetic.py index de5d6ab43d..b7b1a370ad 100644 --- a/src/ethereum/forks/gray_glacier/vm/instructions/arithmetic.py +++ b/src/ethereum/forks/gray_glacier/vm/instructions/arithmetic.py @@ -84,7 +84,7 @@ def sub(evm: Evm) -> None: def mul(evm: Evm) -> None: """ - Multiply the top two elements of the stack, and pushes the result back + Multiplies the top two elements of the stack, and pushes the result back on the stack. Parameters @@ -353,7 +353,7 @@ def signextend(evm: Evm) -> None: # Can't extend any further result = value else: - # U256(0).to_be_bytes() gives b'' instead b'\x00'. + # U256(0).to_be_bytes() gives b'' instead of b'\x00'. value_bytes = Bytes(value.to_be_bytes32()) # Now among the obtained value bytes, consider only # N `least significant bytes`, where N is `byte_num + 1`. diff --git a/src/ethereum/forks/gray_glacier/vm/instructions/block.py b/src/ethereum/forks/gray_glacier/vm/instructions/block.py index ecbeac10f4..bbf01739ff 100644 --- a/src/ethereum/forks/gray_glacier/vm/instructions/block.py +++ b/src/ethereum/forks/gray_glacier/vm/instructions/block.py @@ -87,7 +87,7 @@ def coinbase(evm: Evm) -> None: def timestamp(evm: Evm) -> None: """ Push the current block's timestamp onto the stack. Here the timestamp - being referred is actually the unix timestamp in seconds. + being referred to is actually the unix timestamp in seconds. Here the current block refers to the block in which the currently executing transaction/call resides. diff --git a/src/ethereum/forks/gray_glacier/vm/instructions/environment.py b/src/ethereum/forks/gray_glacier/vm/instructions/environment.py index 1abfaeafa9..2a7b434661 100644 --- a/src/ethereum/forks/gray_glacier/vm/instructions/environment.py +++ b/src/ethereum/forks/gray_glacier/vm/instructions/environment.py @@ -423,7 +423,7 @@ def returndatasize(evm: Evm) -> None: def returndatacopy(evm: Evm) -> None: """ - Copies data from the return data buffer code to memory. + Copies data from the return data buffer to memory. Parameters ---------- diff --git a/src/ethereum/forks/gray_glacier/vm/instructions/memory.py b/src/ethereum/forks/gray_glacier/vm/instructions/memory.py index fdd9518843..3a242ba2f7 100644 --- a/src/ethereum/forks/gray_glacier/vm/instructions/memory.py +++ b/src/ethereum/forks/gray_glacier/vm/instructions/memory.py @@ -90,7 +90,7 @@ def mstore8(evm: Evm) -> None: def mload(evm: Evm) -> None: """ - Load word from memory. + Loads a word from memory. Parameters ---------- @@ -120,7 +120,7 @@ def mload(evm: Evm) -> None: def msize(evm: Evm) -> None: """ - Push the size of active memory in bytes onto the stack. + Pushes the size of active memory in bytes onto the stack. Parameters ---------- diff --git a/src/ethereum/forks/gray_glacier/vm/instructions/stack.py b/src/ethereum/forks/gray_glacier/vm/instructions/stack.py index bdf232db6a..0fc0d3fe4b 100644 --- a/src/ethereum/forks/gray_glacier/vm/instructions/stack.py +++ b/src/ethereum/forks/gray_glacier/vm/instructions/stack.py @@ -23,7 +23,7 @@ def pop(evm: Evm) -> None: """ - Remove item from stack. + Removes an item from the stack. Parameters ---------- @@ -46,7 +46,7 @@ def pop(evm: Evm) -> None: def push_n(evm: Evm, num_bytes: int) -> None: """ - Pushes a N-byte immediate onto the stack. + Pushes an N-byte immediate onto the stack. Parameters ---------- @@ -76,7 +76,7 @@ def push_n(evm: Evm, num_bytes: int) -> None: def dup_n(evm: Evm, item_number: int) -> None: """ - Duplicate the Nth stack item (from top of the stack) to the top of stack. + Duplicates the Nth stack item (from top of the stack) to the top of stack. Parameters ---------- @@ -104,7 +104,7 @@ def dup_n(evm: Evm, item_number: int) -> None: def swap_n(evm: Evm, item_number: int) -> None: """ - Swap the top and the `item_number` element of the stack, where + Swaps the top and the `item_number` element of the stack, where the top of the stack is position zero. If `item_number` is zero, this function does nothing (which should not be diff --git a/src/ethereum/forks/gray_glacier/vm/instructions/system.py b/src/ethereum/forks/gray_glacier/vm/instructions/system.py index 2ef31f9595..44ba77a073 100644 --- a/src/ethereum/forks/gray_glacier/vm/instructions/system.py +++ b/src/ethereum/forks/gray_glacier/vm/instructions/system.py @@ -179,8 +179,8 @@ def create2(evm: Evm) -> None: """ Creates a new account with associated code. - It's similar to CREATE opcode except that the address of new account - depends on the init_code instead of the nonce of sender. + It's similar to the CREATE opcode except that the address of the new + account depends on the init_code instead of the nonce of sender. Parameters ---------- diff --git a/src/ethereum/forks/gray_glacier/vm/precompiled_contracts/ecrecover.py b/src/ethereum/forks/gray_glacier/vm/precompiled_contracts/ecrecover.py index d2eeaf75df..d8f6602e55 100644 --- a/src/ethereum/forks/gray_glacier/vm/precompiled_contracts/ecrecover.py +++ b/src/ethereum/forks/gray_glacier/vm/precompiled_contracts/ecrecover.py @@ -8,7 +8,7 @@ Introduction ------------ -Implementation of the ECRECOVER precompiled contract. +Implementation of the `ECRECOVER` precompiled contract. """ from ethereum_types.numeric import U256 diff --git a/src/ethereum/forks/gray_glacier/vm/precompiled_contracts/mapping.py b/src/ethereum/forks/gray_glacier/vm/precompiled_contracts/mapping.py index 827c44cfac..44013f70e8 100644 --- a/src/ethereum/forks/gray_glacier/vm/precompiled_contracts/mapping.py +++ b/src/ethereum/forks/gray_glacier/vm/precompiled_contracts/mapping.py @@ -8,7 +8,7 @@ Introduction ------------ -Mapping of precompiled contracts their implementations. +Mapping of precompiled contracts to their implementations. """ from typing import Callable, Dict diff --git a/src/ethereum/forks/gray_glacier/vm/precompiled_contracts/modexp.py b/src/ethereum/forks/gray_glacier/vm/precompiled_contracts/modexp.py index 462341f69f..755a396dda 100644 --- a/src/ethereum/forks/gray_glacier/vm/precompiled_contracts/modexp.py +++ b/src/ethereum/forks/gray_glacier/vm/precompiled_contracts/modexp.py @@ -23,7 +23,7 @@ def modexp(evm: Evm) -> None: """ - Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and. + Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and `modulus`. The return value is the same length as the modulus. """ data = evm.message.data diff --git a/src/ethereum/forks/gray_glacier/vm/runtime.py b/src/ethereum/forks/gray_glacier/vm/runtime.py index 505b3488de..0aa5ddd5e2 100644 --- a/src/ethereum/forks/gray_glacier/vm/runtime.py +++ b/src/ethereum/forks/gray_glacier/vm/runtime.py @@ -21,7 +21,7 @@ def get_valid_jump_destinations(code: Bytes) -> Set[Uint]: """ - Analyze the evm code to obtain the set of valid jump destinations. + Analyze the EVM code to obtain the set of valid jump destinations. Valid jump destinations are defined as follows: * The jump destination is less than the length of the code. diff --git a/src/ethereum/forks/homestead/blocks.py b/src/ethereum/forks/homestead/blocks.py index fd671bf00f..2b71f1d74c 100644 --- a/src/ethereum/forks/homestead/blocks.py +++ b/src/ethereum/forks/homestead/blocks.py @@ -112,7 +112,7 @@ class Header: number: Uint """ - Block number, (height) in the chain. + Block number (height) in the chain. """ gas_limit: Uint @@ -183,7 +183,7 @@ class Block: header: Header """ The block header containing metadata and cryptographic commitments. Refer - [headers] for more details on the fields included in the header. + to [headers] for more details on the fields included in the header. [headers]: ref:ethereum.forks.homestead.blocks.Header """ diff --git a/src/ethereum/forks/homestead/bloom.py b/src/ethereum/forks/homestead/bloom.py index 8a12ec081d..0e079df6b3 100644 --- a/src/ethereum/forks/homestead/bloom.py +++ b/src/ethereum/forks/homestead/bloom.py @@ -8,7 +8,7 @@ Introduction ------------ -This modules defines functions for calculating bloom filters of logs. For the +This module defines functions for calculating bloom filters of logs. For the general theory of bloom filters see e.g. `Wikipedia `_. Bloom filters are used to allow for efficient searching of logs by address and/or topic, by rapidly diff --git a/src/ethereum/forks/homestead/state.py b/src/ethereum/forks/homestead/state.py index 8fb03a07fe..3b6255055d 100644 --- a/src/ethereum/forks/homestead/state.py +++ b/src/ethereum/forks/homestead/state.py @@ -340,7 +340,7 @@ def account_exists(state: State, address: Address) -> bool: def account_has_code_or_nonce(state: State, address: Address) -> bool: """ - Checks if an account has non zero nonce or non empty code. + Checks if an account has non-zero nonce or non-empty code. Parameters ---------- @@ -352,7 +352,7 @@ def account_has_code_or_nonce(state: State, address: Address) -> bool: Returns ------- has_code_or_nonce : `bool` - True if the account has non zero nonce or non empty code, + True if the account has non-zero nonce or non-empty code, False otherwise. """ @@ -424,7 +424,7 @@ def set_account_balance(state: State, address: Address, amount: U256) -> None: Address of the account whose nonce needs to be incremented. amount: - The amount that needs to set in balance. + The amount that needs to be set in the balance. """ @@ -444,7 +444,7 @@ def touch_account(state: State, address: Address) -> None: The current state. address: - The address of the account that need to initialised. + The address of the account that needs to be initialized. """ if not account_exists(state, address): @@ -481,7 +481,7 @@ def set_code(state: State, address: Address, code: Bytes) -> None: The current state. address: - Address of the account whose code needs to be update. + Address of the account whose code needs to be updated. code: The bytecode that needs to be set. diff --git a/src/ethereum/forks/homestead/trie.py b/src/ethereum/forks/homestead/trie.py index 1a59771898..331802a660 100644 --- a/src/ethereum/forks/homestead/trie.py +++ b/src/ethereum/forks/homestead/trie.py @@ -128,7 +128,7 @@ class BranchNode: def encode_internal_node(node: Optional[InternalNode]) -> Extended: """ Encodes a Merkle Trie node into its RLP form. The RLP will then be - serialized into a `Bytes` and hashed unless it is less that 32 bytes + serialized into a `Bytes` and hashed unless it is less than 32 bytes when serialized. This function also accepts `None`, representing the absence of a node, diff --git a/src/ethereum/forks/homestead/vm/__init__.py b/src/ethereum/forks/homestead/vm/__init__.py index 21585c758c..e4739cebde 100644 --- a/src/ethereum/forks/homestead/vm/__init__.py +++ b/src/ethereum/forks/homestead/vm/__init__.py @@ -60,7 +60,7 @@ class BlockOutput: receipts_trie : `ethereum.fork_types.Root` Trie root of all the receipts in the block. receipt_keys : - Key of all the receipts in the block. + Keys of all the receipts in the block. block_logs : `Bloom` Logs bloom of all the logs included in all the transactions of the block. diff --git a/src/ethereum/forks/homestead/vm/instructions/arithmetic.py b/src/ethereum/forks/homestead/vm/instructions/arithmetic.py index de5d6ab43d..b7b1a370ad 100644 --- a/src/ethereum/forks/homestead/vm/instructions/arithmetic.py +++ b/src/ethereum/forks/homestead/vm/instructions/arithmetic.py @@ -84,7 +84,7 @@ def sub(evm: Evm) -> None: def mul(evm: Evm) -> None: """ - Multiply the top two elements of the stack, and pushes the result back + Multiplies the top two elements of the stack, and pushes the result back on the stack. Parameters @@ -353,7 +353,7 @@ def signextend(evm: Evm) -> None: # Can't extend any further result = value else: - # U256(0).to_be_bytes() gives b'' instead b'\x00'. + # U256(0).to_be_bytes() gives b'' instead of b'\x00'. value_bytes = Bytes(value.to_be_bytes32()) # Now among the obtained value bytes, consider only # N `least significant bytes`, where N is `byte_num + 1`. diff --git a/src/ethereum/forks/homestead/vm/instructions/block.py b/src/ethereum/forks/homestead/vm/instructions/block.py index 8b50f1bc07..0edcbf000d 100644 --- a/src/ethereum/forks/homestead/vm/instructions/block.py +++ b/src/ethereum/forks/homestead/vm/instructions/block.py @@ -87,7 +87,7 @@ def coinbase(evm: Evm) -> None: def timestamp(evm: Evm) -> None: """ Push the current block's timestamp onto the stack. Here the timestamp - being referred is actually the unix timestamp in seconds. + being referred to is actually the unix timestamp in seconds. Here the current block refers to the block in which the currently executing transaction/call resides. diff --git a/src/ethereum/forks/homestead/vm/instructions/memory.py b/src/ethereum/forks/homestead/vm/instructions/memory.py index fdd9518843..3a242ba2f7 100644 --- a/src/ethereum/forks/homestead/vm/instructions/memory.py +++ b/src/ethereum/forks/homestead/vm/instructions/memory.py @@ -90,7 +90,7 @@ def mstore8(evm: Evm) -> None: def mload(evm: Evm) -> None: """ - Load word from memory. + Loads a word from memory. Parameters ---------- @@ -120,7 +120,7 @@ def mload(evm: Evm) -> None: def msize(evm: Evm) -> None: """ - Push the size of active memory in bytes onto the stack. + Pushes the size of active memory in bytes onto the stack. Parameters ---------- diff --git a/src/ethereum/forks/homestead/vm/instructions/stack.py b/src/ethereum/forks/homestead/vm/instructions/stack.py index bdf232db6a..0fc0d3fe4b 100644 --- a/src/ethereum/forks/homestead/vm/instructions/stack.py +++ b/src/ethereum/forks/homestead/vm/instructions/stack.py @@ -23,7 +23,7 @@ def pop(evm: Evm) -> None: """ - Remove item from stack. + Removes an item from the stack. Parameters ---------- @@ -46,7 +46,7 @@ def pop(evm: Evm) -> None: def push_n(evm: Evm, num_bytes: int) -> None: """ - Pushes a N-byte immediate onto the stack. + Pushes an N-byte immediate onto the stack. Parameters ---------- @@ -76,7 +76,7 @@ def push_n(evm: Evm, num_bytes: int) -> None: def dup_n(evm: Evm, item_number: int) -> None: """ - Duplicate the Nth stack item (from top of the stack) to the top of stack. + Duplicates the Nth stack item (from top of the stack) to the top of stack. Parameters ---------- @@ -104,7 +104,7 @@ def dup_n(evm: Evm, item_number: int) -> None: def swap_n(evm: Evm, item_number: int) -> None: """ - Swap the top and the `item_number` element of the stack, where + Swaps the top and the `item_number` element of the stack, where the top of the stack is position zero. If `item_number` is zero, this function does nothing (which should not be diff --git a/src/ethereum/forks/homestead/vm/precompiled_contracts/ecrecover.py b/src/ethereum/forks/homestead/vm/precompiled_contracts/ecrecover.py index d2eeaf75df..d8f6602e55 100644 --- a/src/ethereum/forks/homestead/vm/precompiled_contracts/ecrecover.py +++ b/src/ethereum/forks/homestead/vm/precompiled_contracts/ecrecover.py @@ -8,7 +8,7 @@ Introduction ------------ -Implementation of the ECRECOVER precompiled contract. +Implementation of the `ECRECOVER` precompiled contract. """ from ethereum_types.numeric import U256 diff --git a/src/ethereum/forks/homestead/vm/precompiled_contracts/mapping.py b/src/ethereum/forks/homestead/vm/precompiled_contracts/mapping.py index 33412f4c28..1d44a6f2e3 100644 --- a/src/ethereum/forks/homestead/vm/precompiled_contracts/mapping.py +++ b/src/ethereum/forks/homestead/vm/precompiled_contracts/mapping.py @@ -8,7 +8,7 @@ Introduction ------------ -Mapping of precompiled contracts their implementations. +Mapping of precompiled contracts to their implementations. """ from typing import Callable, Dict diff --git a/src/ethereum/forks/homestead/vm/runtime.py b/src/ethereum/forks/homestead/vm/runtime.py index 505b3488de..0aa5ddd5e2 100644 --- a/src/ethereum/forks/homestead/vm/runtime.py +++ b/src/ethereum/forks/homestead/vm/runtime.py @@ -21,7 +21,7 @@ def get_valid_jump_destinations(code: Bytes) -> Set[Uint]: """ - Analyze the evm code to obtain the set of valid jump destinations. + Analyze the EVM code to obtain the set of valid jump destinations. Valid jump destinations are defined as follows: * The jump destination is less than the length of the code. diff --git a/src/ethereum/forks/istanbul/blocks.py b/src/ethereum/forks/istanbul/blocks.py index 06dc59ab24..e3f124bcd5 100644 --- a/src/ethereum/forks/istanbul/blocks.py +++ b/src/ethereum/forks/istanbul/blocks.py @@ -112,7 +112,7 @@ class Header: number: Uint """ - Block number, (height) in the chain. + Block number (height) in the chain. """ gas_limit: Uint @@ -183,7 +183,7 @@ class Block: header: Header """ The block header containing metadata and cryptographic commitments. Refer - [headers] for more details on the fields included in the header. + to [headers] for more details on the fields included in the header. [headers]: ref:ethereum.forks.istanbul.blocks.Header """ diff --git a/src/ethereum/forks/istanbul/bloom.py b/src/ethereum/forks/istanbul/bloom.py index 8a12ec081d..0e079df6b3 100644 --- a/src/ethereum/forks/istanbul/bloom.py +++ b/src/ethereum/forks/istanbul/bloom.py @@ -8,7 +8,7 @@ Introduction ------------ -This modules defines functions for calculating bloom filters of logs. For the +This module defines functions for calculating bloom filters of logs. For the general theory of bloom filters see e.g. `Wikipedia `_. Bloom filters are used to allow for efficient searching of logs by address and/or topic, by rapidly diff --git a/src/ethereum/forks/istanbul/state.py b/src/ethereum/forks/istanbul/state.py index 5fbcf920cd..91a0b28a1e 100644 --- a/src/ethereum/forks/istanbul/state.py +++ b/src/ethereum/forks/istanbul/state.py @@ -367,7 +367,7 @@ def account_exists(state: State, address: Address) -> bool: def account_has_code_or_nonce(state: State, address: Address) -> bool: """ - Checks if an account has non zero nonce or non empty code. + Checks if an account has non-zero nonce or non-empty code. Parameters ---------- @@ -379,7 +379,7 @@ def account_has_code_or_nonce(state: State, address: Address) -> bool: Returns ------- has_code_or_nonce : `bool` - True if the account has non zero nonce or non empty code, + True if the account has non-zero nonce or non-empty code, False otherwise. """ @@ -500,7 +500,7 @@ def set_account_balance(state: State, address: Address, amount: U256) -> None: Address of the account whose nonce needs to be incremented. amount: - The amount that needs to set in balance. + The amount that needs to be set in the balance. """ @@ -520,7 +520,7 @@ def touch_account(state: State, address: Address) -> None: The current state. address: - The address of the account that need to initialised. + The address of the account that needs to be initialized. """ if not account_exists(state, address): @@ -557,7 +557,7 @@ def set_code(state: State, address: Address, code: Bytes) -> None: The current state. address: - Address of the account whose code needs to be update. + Address of the account whose code needs to be updated. code: The bytecode that needs to be set. diff --git a/src/ethereum/forks/istanbul/trie.py b/src/ethereum/forks/istanbul/trie.py index 74c118fda3..a040432bd7 100644 --- a/src/ethereum/forks/istanbul/trie.py +++ b/src/ethereum/forks/istanbul/trie.py @@ -128,7 +128,7 @@ class BranchNode: def encode_internal_node(node: Optional[InternalNode]) -> Extended: """ Encodes a Merkle Trie node into its RLP form. The RLP will then be - serialized into a `Bytes` and hashed unless it is less that 32 bytes + serialized into a `Bytes` and hashed unless it is less than 32 bytes when serialized. This function also accepts `None`, representing the absence of a node, diff --git a/src/ethereum/forks/istanbul/vm/__init__.py b/src/ethereum/forks/istanbul/vm/__init__.py index 6cdd9c9bc7..068648ac36 100644 --- a/src/ethereum/forks/istanbul/vm/__init__.py +++ b/src/ethereum/forks/istanbul/vm/__init__.py @@ -61,7 +61,7 @@ class BlockOutput: receipts_trie : `ethereum.fork_types.Root` Trie root of all the receipts in the block. receipt_keys : - Key of all the receipts in the block. + Keys of all the receipts in the block. block_logs : `Bloom` Logs bloom of all the logs included in all the transactions of the block. diff --git a/src/ethereum/forks/istanbul/vm/gas.py b/src/ethereum/forks/istanbul/vm/gas.py index 93cacf71ab..af180fa654 100644 --- a/src/ethereum/forks/istanbul/vm/gas.py +++ b/src/ethereum/forks/istanbul/vm/gas.py @@ -214,7 +214,7 @@ def calculate_message_call_gas( account inside a message call. call_stipend : The amount of stipend provided to a message call to execute code while - transferring value(ETH). + transferring value (ETH). Returns ------- diff --git a/src/ethereum/forks/istanbul/vm/instructions/arithmetic.py b/src/ethereum/forks/istanbul/vm/instructions/arithmetic.py index de5d6ab43d..b7b1a370ad 100644 --- a/src/ethereum/forks/istanbul/vm/instructions/arithmetic.py +++ b/src/ethereum/forks/istanbul/vm/instructions/arithmetic.py @@ -84,7 +84,7 @@ def sub(evm: Evm) -> None: def mul(evm: Evm) -> None: """ - Multiply the top two elements of the stack, and pushes the result back + Multiplies the top two elements of the stack, and pushes the result back on the stack. Parameters @@ -353,7 +353,7 @@ def signextend(evm: Evm) -> None: # Can't extend any further result = value else: - # U256(0).to_be_bytes() gives b'' instead b'\x00'. + # U256(0).to_be_bytes() gives b'' instead of b'\x00'. value_bytes = Bytes(value.to_be_bytes32()) # Now among the obtained value bytes, consider only # N `least significant bytes`, where N is `byte_num + 1`. diff --git a/src/ethereum/forks/istanbul/vm/instructions/block.py b/src/ethereum/forks/istanbul/vm/instructions/block.py index ecbeac10f4..bbf01739ff 100644 --- a/src/ethereum/forks/istanbul/vm/instructions/block.py +++ b/src/ethereum/forks/istanbul/vm/instructions/block.py @@ -87,7 +87,7 @@ def coinbase(evm: Evm) -> None: def timestamp(evm: Evm) -> None: """ Push the current block's timestamp onto the stack. Here the timestamp - being referred is actually the unix timestamp in seconds. + being referred to is actually the unix timestamp in seconds. Here the current block refers to the block in which the currently executing transaction/call resides. diff --git a/src/ethereum/forks/istanbul/vm/instructions/environment.py b/src/ethereum/forks/istanbul/vm/instructions/environment.py index a854355eb4..f776671541 100644 --- a/src/ethereum/forks/istanbul/vm/instructions/environment.py +++ b/src/ethereum/forks/istanbul/vm/instructions/environment.py @@ -407,7 +407,7 @@ def returndatasize(evm: Evm) -> None: def returndatacopy(evm: Evm) -> None: """ - Copies data from the return data buffer code to memory. + Copies data from the return data buffer to memory. Parameters ---------- diff --git a/src/ethereum/forks/istanbul/vm/instructions/memory.py b/src/ethereum/forks/istanbul/vm/instructions/memory.py index fdd9518843..3a242ba2f7 100644 --- a/src/ethereum/forks/istanbul/vm/instructions/memory.py +++ b/src/ethereum/forks/istanbul/vm/instructions/memory.py @@ -90,7 +90,7 @@ def mstore8(evm: Evm) -> None: def mload(evm: Evm) -> None: """ - Load word from memory. + Loads a word from memory. Parameters ---------- @@ -120,7 +120,7 @@ def mload(evm: Evm) -> None: def msize(evm: Evm) -> None: """ - Push the size of active memory in bytes onto the stack. + Pushes the size of active memory in bytes onto the stack. Parameters ---------- diff --git a/src/ethereum/forks/istanbul/vm/instructions/stack.py b/src/ethereum/forks/istanbul/vm/instructions/stack.py index bdf232db6a..0fc0d3fe4b 100644 --- a/src/ethereum/forks/istanbul/vm/instructions/stack.py +++ b/src/ethereum/forks/istanbul/vm/instructions/stack.py @@ -23,7 +23,7 @@ def pop(evm: Evm) -> None: """ - Remove item from stack. + Removes an item from the stack. Parameters ---------- @@ -46,7 +46,7 @@ def pop(evm: Evm) -> None: def push_n(evm: Evm, num_bytes: int) -> None: """ - Pushes a N-byte immediate onto the stack. + Pushes an N-byte immediate onto the stack. Parameters ---------- @@ -76,7 +76,7 @@ def push_n(evm: Evm, num_bytes: int) -> None: def dup_n(evm: Evm, item_number: int) -> None: """ - Duplicate the Nth stack item (from top of the stack) to the top of stack. + Duplicates the Nth stack item (from top of the stack) to the top of stack. Parameters ---------- @@ -104,7 +104,7 @@ def dup_n(evm: Evm, item_number: int) -> None: def swap_n(evm: Evm, item_number: int) -> None: """ - Swap the top and the `item_number` element of the stack, where + Swaps the top and the `item_number` element of the stack, where the top of the stack is position zero. If `item_number` is zero, this function does nothing (which should not be diff --git a/src/ethereum/forks/istanbul/vm/instructions/system.py b/src/ethereum/forks/istanbul/vm/instructions/system.py index fadd963417..af2527d31c 100644 --- a/src/ethereum/forks/istanbul/vm/instructions/system.py +++ b/src/ethereum/forks/istanbul/vm/instructions/system.py @@ -179,8 +179,8 @@ def create2(evm: Evm) -> None: """ Creates a new account with associated code. - It's similar to CREATE opcode except that the address of new account - depends on the init_code instead of the nonce of sender. + It's similar to the CREATE opcode except that the address of the new + account depends on the init_code instead of the nonce of sender. Parameters ---------- diff --git a/src/ethereum/forks/istanbul/vm/precompiled_contracts/ecrecover.py b/src/ethereum/forks/istanbul/vm/precompiled_contracts/ecrecover.py index d2eeaf75df..d8f6602e55 100644 --- a/src/ethereum/forks/istanbul/vm/precompiled_contracts/ecrecover.py +++ b/src/ethereum/forks/istanbul/vm/precompiled_contracts/ecrecover.py @@ -8,7 +8,7 @@ Introduction ------------ -Implementation of the ECRECOVER precompiled contract. +Implementation of the `ECRECOVER` precompiled contract. """ from ethereum_types.numeric import U256 diff --git a/src/ethereum/forks/istanbul/vm/precompiled_contracts/mapping.py b/src/ethereum/forks/istanbul/vm/precompiled_contracts/mapping.py index 827c44cfac..44013f70e8 100644 --- a/src/ethereum/forks/istanbul/vm/precompiled_contracts/mapping.py +++ b/src/ethereum/forks/istanbul/vm/precompiled_contracts/mapping.py @@ -8,7 +8,7 @@ Introduction ------------ -Mapping of precompiled contracts their implementations. +Mapping of precompiled contracts to their implementations. """ from typing import Callable, Dict diff --git a/src/ethereum/forks/istanbul/vm/precompiled_contracts/modexp.py b/src/ethereum/forks/istanbul/vm/precompiled_contracts/modexp.py index fd0501616c..8ed5c851a7 100644 --- a/src/ethereum/forks/istanbul/vm/precompiled_contracts/modexp.py +++ b/src/ethereum/forks/istanbul/vm/precompiled_contracts/modexp.py @@ -23,7 +23,7 @@ def modexp(evm: Evm) -> None: """ - Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and. + Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and `modulus`. The return value is the same length as the modulus. """ data = evm.message.data diff --git a/src/ethereum/forks/istanbul/vm/runtime.py b/src/ethereum/forks/istanbul/vm/runtime.py index 505b3488de..0aa5ddd5e2 100644 --- a/src/ethereum/forks/istanbul/vm/runtime.py +++ b/src/ethereum/forks/istanbul/vm/runtime.py @@ -21,7 +21,7 @@ def get_valid_jump_destinations(code: Bytes) -> Set[Uint]: """ - Analyze the evm code to obtain the set of valid jump destinations. + Analyze the EVM code to obtain the set of valid jump destinations. Valid jump destinations are defined as follows: * The jump destination is less than the length of the code. diff --git a/src/ethereum/forks/london/blocks.py b/src/ethereum/forks/london/blocks.py index 3cae46582d..20971a661c 100644 --- a/src/ethereum/forks/london/blocks.py +++ b/src/ethereum/forks/london/blocks.py @@ -121,7 +121,7 @@ class Header: number: Uint """ - Block number, (height) in the chain. + Block number (height) in the chain. """ gas_limit: Uint @@ -207,7 +207,7 @@ class Block: header: Header """ The block header containing metadata and cryptographic commitments. Refer - [headers] for more details on the fields included in the header. + to [headers] for more details on the fields included in the header. [headers]: ref:ethereum.forks.london.blocks.Header """ diff --git a/src/ethereum/forks/london/bloom.py b/src/ethereum/forks/london/bloom.py index 8a12ec081d..0e079df6b3 100644 --- a/src/ethereum/forks/london/bloom.py +++ b/src/ethereum/forks/london/bloom.py @@ -8,7 +8,7 @@ Introduction ------------ -This modules defines functions for calculating bloom filters of logs. For the +This module defines functions for calculating bloom filters of logs. For the general theory of bloom filters see e.g. `Wikipedia `_. Bloom filters are used to allow for efficient searching of logs by address and/or topic, by rapidly diff --git a/src/ethereum/forks/london/state.py b/src/ethereum/forks/london/state.py index 5fbcf920cd..91a0b28a1e 100644 --- a/src/ethereum/forks/london/state.py +++ b/src/ethereum/forks/london/state.py @@ -367,7 +367,7 @@ def account_exists(state: State, address: Address) -> bool: def account_has_code_or_nonce(state: State, address: Address) -> bool: """ - Checks if an account has non zero nonce or non empty code. + Checks if an account has non-zero nonce or non-empty code. Parameters ---------- @@ -379,7 +379,7 @@ def account_has_code_or_nonce(state: State, address: Address) -> bool: Returns ------- has_code_or_nonce : `bool` - True if the account has non zero nonce or non empty code, + True if the account has non-zero nonce or non-empty code, False otherwise. """ @@ -500,7 +500,7 @@ def set_account_balance(state: State, address: Address, amount: U256) -> None: Address of the account whose nonce needs to be incremented. amount: - The amount that needs to set in balance. + The amount that needs to be set in the balance. """ @@ -520,7 +520,7 @@ def touch_account(state: State, address: Address) -> None: The current state. address: - The address of the account that need to initialised. + The address of the account that needs to be initialized. """ if not account_exists(state, address): @@ -557,7 +557,7 @@ def set_code(state: State, address: Address, code: Bytes) -> None: The current state. address: - Address of the account whose code needs to be update. + Address of the account whose code needs to be updated. code: The bytecode that needs to be set. diff --git a/src/ethereum/forks/london/trie.py b/src/ethereum/forks/london/trie.py index 4e0452c740..0deeb2d3c9 100644 --- a/src/ethereum/forks/london/trie.py +++ b/src/ethereum/forks/london/trie.py @@ -128,7 +128,7 @@ class BranchNode: def encode_internal_node(node: Optional[InternalNode]) -> Extended: """ Encodes a Merkle Trie node into its RLP form. The RLP will then be - serialized into a `Bytes` and hashed unless it is less that 32 bytes + serialized into a `Bytes` and hashed unless it is less than 32 bytes when serialized. This function also accepts `None`, representing the absence of a node, diff --git a/src/ethereum/forks/london/vm/__init__.py b/src/ethereum/forks/london/vm/__init__.py index 40ee422fb6..ebbb444829 100644 --- a/src/ethereum/forks/london/vm/__init__.py +++ b/src/ethereum/forks/london/vm/__init__.py @@ -62,7 +62,7 @@ class BlockOutput: receipts_trie : `ethereum.fork_types.Root` Trie root of all the receipts in the block. receipt_keys : - Key of all the receipts in the block. + Keys of all the receipts in the block. block_logs : `Bloom` Logs bloom of all the logs included in all the transactions of the block. diff --git a/src/ethereum/forks/london/vm/gas.py b/src/ethereum/forks/london/vm/gas.py index b5c96dea85..4f2dde3b9d 100644 --- a/src/ethereum/forks/london/vm/gas.py +++ b/src/ethereum/forks/london/vm/gas.py @@ -211,7 +211,7 @@ def calculate_message_call_gas( account inside a message call. call_stipend : The amount of stipend provided to a message call to execute code while - transferring value(ETH). + transferring value (ETH). Returns ------- diff --git a/src/ethereum/forks/london/vm/instructions/arithmetic.py b/src/ethereum/forks/london/vm/instructions/arithmetic.py index de5d6ab43d..b7b1a370ad 100644 --- a/src/ethereum/forks/london/vm/instructions/arithmetic.py +++ b/src/ethereum/forks/london/vm/instructions/arithmetic.py @@ -84,7 +84,7 @@ def sub(evm: Evm) -> None: def mul(evm: Evm) -> None: """ - Multiply the top two elements of the stack, and pushes the result back + Multiplies the top two elements of the stack, and pushes the result back on the stack. Parameters @@ -353,7 +353,7 @@ def signextend(evm: Evm) -> None: # Can't extend any further result = value else: - # U256(0).to_be_bytes() gives b'' instead b'\x00'. + # U256(0).to_be_bytes() gives b'' instead of b'\x00'. value_bytes = Bytes(value.to_be_bytes32()) # Now among the obtained value bytes, consider only # N `least significant bytes`, where N is `byte_num + 1`. diff --git a/src/ethereum/forks/london/vm/instructions/block.py b/src/ethereum/forks/london/vm/instructions/block.py index ecbeac10f4..bbf01739ff 100644 --- a/src/ethereum/forks/london/vm/instructions/block.py +++ b/src/ethereum/forks/london/vm/instructions/block.py @@ -87,7 +87,7 @@ def coinbase(evm: Evm) -> None: def timestamp(evm: Evm) -> None: """ Push the current block's timestamp onto the stack. Here the timestamp - being referred is actually the unix timestamp in seconds. + being referred to is actually the unix timestamp in seconds. Here the current block refers to the block in which the currently executing transaction/call resides. diff --git a/src/ethereum/forks/london/vm/instructions/environment.py b/src/ethereum/forks/london/vm/instructions/environment.py index 1abfaeafa9..2a7b434661 100644 --- a/src/ethereum/forks/london/vm/instructions/environment.py +++ b/src/ethereum/forks/london/vm/instructions/environment.py @@ -423,7 +423,7 @@ def returndatasize(evm: Evm) -> None: def returndatacopy(evm: Evm) -> None: """ - Copies data from the return data buffer code to memory. + Copies data from the return data buffer to memory. Parameters ---------- diff --git a/src/ethereum/forks/london/vm/instructions/memory.py b/src/ethereum/forks/london/vm/instructions/memory.py index fdd9518843..3a242ba2f7 100644 --- a/src/ethereum/forks/london/vm/instructions/memory.py +++ b/src/ethereum/forks/london/vm/instructions/memory.py @@ -90,7 +90,7 @@ def mstore8(evm: Evm) -> None: def mload(evm: Evm) -> None: """ - Load word from memory. + Loads a word from memory. Parameters ---------- @@ -120,7 +120,7 @@ def mload(evm: Evm) -> None: def msize(evm: Evm) -> None: """ - Push the size of active memory in bytes onto the stack. + Pushes the size of active memory in bytes onto the stack. Parameters ---------- diff --git a/src/ethereum/forks/london/vm/instructions/stack.py b/src/ethereum/forks/london/vm/instructions/stack.py index bdf232db6a..0fc0d3fe4b 100644 --- a/src/ethereum/forks/london/vm/instructions/stack.py +++ b/src/ethereum/forks/london/vm/instructions/stack.py @@ -23,7 +23,7 @@ def pop(evm: Evm) -> None: """ - Remove item from stack. + Removes an item from the stack. Parameters ---------- @@ -46,7 +46,7 @@ def pop(evm: Evm) -> None: def push_n(evm: Evm, num_bytes: int) -> None: """ - Pushes a N-byte immediate onto the stack. + Pushes an N-byte immediate onto the stack. Parameters ---------- @@ -76,7 +76,7 @@ def push_n(evm: Evm, num_bytes: int) -> None: def dup_n(evm: Evm, item_number: int) -> None: """ - Duplicate the Nth stack item (from top of the stack) to the top of stack. + Duplicates the Nth stack item (from top of the stack) to the top of stack. Parameters ---------- @@ -104,7 +104,7 @@ def dup_n(evm: Evm, item_number: int) -> None: def swap_n(evm: Evm, item_number: int) -> None: """ - Swap the top and the `item_number` element of the stack, where + Swaps the top and the `item_number` element of the stack, where the top of the stack is position zero. If `item_number` is zero, this function does nothing (which should not be diff --git a/src/ethereum/forks/london/vm/instructions/system.py b/src/ethereum/forks/london/vm/instructions/system.py index 2ef31f9595..44ba77a073 100644 --- a/src/ethereum/forks/london/vm/instructions/system.py +++ b/src/ethereum/forks/london/vm/instructions/system.py @@ -179,8 +179,8 @@ def create2(evm: Evm) -> None: """ Creates a new account with associated code. - It's similar to CREATE opcode except that the address of new account - depends on the init_code instead of the nonce of sender. + It's similar to the CREATE opcode except that the address of the new + account depends on the init_code instead of the nonce of sender. Parameters ---------- diff --git a/src/ethereum/forks/london/vm/precompiled_contracts/ecrecover.py b/src/ethereum/forks/london/vm/precompiled_contracts/ecrecover.py index d2eeaf75df..d8f6602e55 100644 --- a/src/ethereum/forks/london/vm/precompiled_contracts/ecrecover.py +++ b/src/ethereum/forks/london/vm/precompiled_contracts/ecrecover.py @@ -8,7 +8,7 @@ Introduction ------------ -Implementation of the ECRECOVER precompiled contract. +Implementation of the `ECRECOVER` precompiled contract. """ from ethereum_types.numeric import U256 diff --git a/src/ethereum/forks/london/vm/precompiled_contracts/mapping.py b/src/ethereum/forks/london/vm/precompiled_contracts/mapping.py index 827c44cfac..44013f70e8 100644 --- a/src/ethereum/forks/london/vm/precompiled_contracts/mapping.py +++ b/src/ethereum/forks/london/vm/precompiled_contracts/mapping.py @@ -8,7 +8,7 @@ Introduction ------------ -Mapping of precompiled contracts their implementations. +Mapping of precompiled contracts to their implementations. """ from typing import Callable, Dict diff --git a/src/ethereum/forks/london/vm/precompiled_contracts/modexp.py b/src/ethereum/forks/london/vm/precompiled_contracts/modexp.py index 462341f69f..755a396dda 100644 --- a/src/ethereum/forks/london/vm/precompiled_contracts/modexp.py +++ b/src/ethereum/forks/london/vm/precompiled_contracts/modexp.py @@ -23,7 +23,7 @@ def modexp(evm: Evm) -> None: """ - Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and. + Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and `modulus`. The return value is the same length as the modulus. """ data = evm.message.data diff --git a/src/ethereum/forks/london/vm/runtime.py b/src/ethereum/forks/london/vm/runtime.py index 505b3488de..0aa5ddd5e2 100644 --- a/src/ethereum/forks/london/vm/runtime.py +++ b/src/ethereum/forks/london/vm/runtime.py @@ -21,7 +21,7 @@ def get_valid_jump_destinations(code: Bytes) -> Set[Uint]: """ - Analyze the evm code to obtain the set of valid jump destinations. + Analyze the EVM code to obtain the set of valid jump destinations. Valid jump destinations are defined as follows: * The jump destination is less than the length of the code. diff --git a/src/ethereum/forks/muir_glacier/blocks.py b/src/ethereum/forks/muir_glacier/blocks.py index 3f17dc8fd7..05dd03d1c0 100644 --- a/src/ethereum/forks/muir_glacier/blocks.py +++ b/src/ethereum/forks/muir_glacier/blocks.py @@ -112,7 +112,7 @@ class Header: number: Uint """ - Block number, (height) in the chain. + Block number (height) in the chain. """ gas_limit: Uint @@ -183,7 +183,7 @@ class Block: header: Header """ The block header containing metadata and cryptographic commitments. Refer - [headers] for more details on the fields included in the header. + to [headers] for more details on the fields included in the header. [headers]: ref:ethereum.forks.muir_glacier.blocks.Header """ diff --git a/src/ethereum/forks/muir_glacier/bloom.py b/src/ethereum/forks/muir_glacier/bloom.py index 8a12ec081d..0e079df6b3 100644 --- a/src/ethereum/forks/muir_glacier/bloom.py +++ b/src/ethereum/forks/muir_glacier/bloom.py @@ -8,7 +8,7 @@ Introduction ------------ -This modules defines functions for calculating bloom filters of logs. For the +This module defines functions for calculating bloom filters of logs. For the general theory of bloom filters see e.g. `Wikipedia `_. Bloom filters are used to allow for efficient searching of logs by address and/or topic, by rapidly diff --git a/src/ethereum/forks/muir_glacier/state.py b/src/ethereum/forks/muir_glacier/state.py index 5fbcf920cd..91a0b28a1e 100644 --- a/src/ethereum/forks/muir_glacier/state.py +++ b/src/ethereum/forks/muir_glacier/state.py @@ -367,7 +367,7 @@ def account_exists(state: State, address: Address) -> bool: def account_has_code_or_nonce(state: State, address: Address) -> bool: """ - Checks if an account has non zero nonce or non empty code. + Checks if an account has non-zero nonce or non-empty code. Parameters ---------- @@ -379,7 +379,7 @@ def account_has_code_or_nonce(state: State, address: Address) -> bool: Returns ------- has_code_or_nonce : `bool` - True if the account has non zero nonce or non empty code, + True if the account has non-zero nonce or non-empty code, False otherwise. """ @@ -500,7 +500,7 @@ def set_account_balance(state: State, address: Address, amount: U256) -> None: Address of the account whose nonce needs to be incremented. amount: - The amount that needs to set in balance. + The amount that needs to be set in the balance. """ @@ -520,7 +520,7 @@ def touch_account(state: State, address: Address) -> None: The current state. address: - The address of the account that need to initialised. + The address of the account that needs to be initialized. """ if not account_exists(state, address): @@ -557,7 +557,7 @@ def set_code(state: State, address: Address, code: Bytes) -> None: The current state. address: - Address of the account whose code needs to be update. + Address of the account whose code needs to be updated. code: The bytecode that needs to be set. diff --git a/src/ethereum/forks/muir_glacier/trie.py b/src/ethereum/forks/muir_glacier/trie.py index 4b02439003..70e0ba33c2 100644 --- a/src/ethereum/forks/muir_glacier/trie.py +++ b/src/ethereum/forks/muir_glacier/trie.py @@ -128,7 +128,7 @@ class BranchNode: def encode_internal_node(node: Optional[InternalNode]) -> Extended: """ Encodes a Merkle Trie node into its RLP form. The RLP will then be - serialized into a `Bytes` and hashed unless it is less that 32 bytes + serialized into a `Bytes` and hashed unless it is less than 32 bytes when serialized. This function also accepts `None`, representing the absence of a node, diff --git a/src/ethereum/forks/muir_glacier/vm/__init__.py b/src/ethereum/forks/muir_glacier/vm/__init__.py index 6cdd9c9bc7..068648ac36 100644 --- a/src/ethereum/forks/muir_glacier/vm/__init__.py +++ b/src/ethereum/forks/muir_glacier/vm/__init__.py @@ -61,7 +61,7 @@ class BlockOutput: receipts_trie : `ethereum.fork_types.Root` Trie root of all the receipts in the block. receipt_keys : - Key of all the receipts in the block. + Keys of all the receipts in the block. block_logs : `Bloom` Logs bloom of all the logs included in all the transactions of the block. diff --git a/src/ethereum/forks/muir_glacier/vm/gas.py b/src/ethereum/forks/muir_glacier/vm/gas.py index 93cacf71ab..af180fa654 100644 --- a/src/ethereum/forks/muir_glacier/vm/gas.py +++ b/src/ethereum/forks/muir_glacier/vm/gas.py @@ -214,7 +214,7 @@ def calculate_message_call_gas( account inside a message call. call_stipend : The amount of stipend provided to a message call to execute code while - transferring value(ETH). + transferring value (ETH). Returns ------- diff --git a/src/ethereum/forks/muir_glacier/vm/instructions/arithmetic.py b/src/ethereum/forks/muir_glacier/vm/instructions/arithmetic.py index de5d6ab43d..b7b1a370ad 100644 --- a/src/ethereum/forks/muir_glacier/vm/instructions/arithmetic.py +++ b/src/ethereum/forks/muir_glacier/vm/instructions/arithmetic.py @@ -84,7 +84,7 @@ def sub(evm: Evm) -> None: def mul(evm: Evm) -> None: """ - Multiply the top two elements of the stack, and pushes the result back + Multiplies the top two elements of the stack, and pushes the result back on the stack. Parameters @@ -353,7 +353,7 @@ def signextend(evm: Evm) -> None: # Can't extend any further result = value else: - # U256(0).to_be_bytes() gives b'' instead b'\x00'. + # U256(0).to_be_bytes() gives b'' instead of b'\x00'. value_bytes = Bytes(value.to_be_bytes32()) # Now among the obtained value bytes, consider only # N `least significant bytes`, where N is `byte_num + 1`. diff --git a/src/ethereum/forks/muir_glacier/vm/instructions/block.py b/src/ethereum/forks/muir_glacier/vm/instructions/block.py index ecbeac10f4..bbf01739ff 100644 --- a/src/ethereum/forks/muir_glacier/vm/instructions/block.py +++ b/src/ethereum/forks/muir_glacier/vm/instructions/block.py @@ -87,7 +87,7 @@ def coinbase(evm: Evm) -> None: def timestamp(evm: Evm) -> None: """ Push the current block's timestamp onto the stack. Here the timestamp - being referred is actually the unix timestamp in seconds. + being referred to is actually the unix timestamp in seconds. Here the current block refers to the block in which the currently executing transaction/call resides. diff --git a/src/ethereum/forks/muir_glacier/vm/instructions/environment.py b/src/ethereum/forks/muir_glacier/vm/instructions/environment.py index a854355eb4..f776671541 100644 --- a/src/ethereum/forks/muir_glacier/vm/instructions/environment.py +++ b/src/ethereum/forks/muir_glacier/vm/instructions/environment.py @@ -407,7 +407,7 @@ def returndatasize(evm: Evm) -> None: def returndatacopy(evm: Evm) -> None: """ - Copies data from the return data buffer code to memory. + Copies data from the return data buffer to memory. Parameters ---------- diff --git a/src/ethereum/forks/muir_glacier/vm/instructions/memory.py b/src/ethereum/forks/muir_glacier/vm/instructions/memory.py index fdd9518843..3a242ba2f7 100644 --- a/src/ethereum/forks/muir_glacier/vm/instructions/memory.py +++ b/src/ethereum/forks/muir_glacier/vm/instructions/memory.py @@ -90,7 +90,7 @@ def mstore8(evm: Evm) -> None: def mload(evm: Evm) -> None: """ - Load word from memory. + Loads a word from memory. Parameters ---------- @@ -120,7 +120,7 @@ def mload(evm: Evm) -> None: def msize(evm: Evm) -> None: """ - Push the size of active memory in bytes onto the stack. + Pushes the size of active memory in bytes onto the stack. Parameters ---------- diff --git a/src/ethereum/forks/muir_glacier/vm/instructions/stack.py b/src/ethereum/forks/muir_glacier/vm/instructions/stack.py index bdf232db6a..0fc0d3fe4b 100644 --- a/src/ethereum/forks/muir_glacier/vm/instructions/stack.py +++ b/src/ethereum/forks/muir_glacier/vm/instructions/stack.py @@ -23,7 +23,7 @@ def pop(evm: Evm) -> None: """ - Remove item from stack. + Removes an item from the stack. Parameters ---------- @@ -46,7 +46,7 @@ def pop(evm: Evm) -> None: def push_n(evm: Evm, num_bytes: int) -> None: """ - Pushes a N-byte immediate onto the stack. + Pushes an N-byte immediate onto the stack. Parameters ---------- @@ -76,7 +76,7 @@ def push_n(evm: Evm, num_bytes: int) -> None: def dup_n(evm: Evm, item_number: int) -> None: """ - Duplicate the Nth stack item (from top of the stack) to the top of stack. + Duplicates the Nth stack item (from top of the stack) to the top of stack. Parameters ---------- @@ -104,7 +104,7 @@ def dup_n(evm: Evm, item_number: int) -> None: def swap_n(evm: Evm, item_number: int) -> None: """ - Swap the top and the `item_number` element of the stack, where + Swaps the top and the `item_number` element of the stack, where the top of the stack is position zero. If `item_number` is zero, this function does nothing (which should not be diff --git a/src/ethereum/forks/muir_glacier/vm/instructions/system.py b/src/ethereum/forks/muir_glacier/vm/instructions/system.py index fadd963417..af2527d31c 100644 --- a/src/ethereum/forks/muir_glacier/vm/instructions/system.py +++ b/src/ethereum/forks/muir_glacier/vm/instructions/system.py @@ -179,8 +179,8 @@ def create2(evm: Evm) -> None: """ Creates a new account with associated code. - It's similar to CREATE opcode except that the address of new account - depends on the init_code instead of the nonce of sender. + It's similar to the CREATE opcode except that the address of the new + account depends on the init_code instead of the nonce of sender. Parameters ---------- diff --git a/src/ethereum/forks/muir_glacier/vm/precompiled_contracts/ecrecover.py b/src/ethereum/forks/muir_glacier/vm/precompiled_contracts/ecrecover.py index d2eeaf75df..d8f6602e55 100644 --- a/src/ethereum/forks/muir_glacier/vm/precompiled_contracts/ecrecover.py +++ b/src/ethereum/forks/muir_glacier/vm/precompiled_contracts/ecrecover.py @@ -8,7 +8,7 @@ Introduction ------------ -Implementation of the ECRECOVER precompiled contract. +Implementation of the `ECRECOVER` precompiled contract. """ from ethereum_types.numeric import U256 diff --git a/src/ethereum/forks/muir_glacier/vm/precompiled_contracts/mapping.py b/src/ethereum/forks/muir_glacier/vm/precompiled_contracts/mapping.py index 827c44cfac..44013f70e8 100644 --- a/src/ethereum/forks/muir_glacier/vm/precompiled_contracts/mapping.py +++ b/src/ethereum/forks/muir_glacier/vm/precompiled_contracts/mapping.py @@ -8,7 +8,7 @@ Introduction ------------ -Mapping of precompiled contracts their implementations. +Mapping of precompiled contracts to their implementations. """ from typing import Callable, Dict diff --git a/src/ethereum/forks/muir_glacier/vm/precompiled_contracts/modexp.py b/src/ethereum/forks/muir_glacier/vm/precompiled_contracts/modexp.py index fd0501616c..8ed5c851a7 100644 --- a/src/ethereum/forks/muir_glacier/vm/precompiled_contracts/modexp.py +++ b/src/ethereum/forks/muir_glacier/vm/precompiled_contracts/modexp.py @@ -23,7 +23,7 @@ def modexp(evm: Evm) -> None: """ - Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and. + Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and `modulus`. The return value is the same length as the modulus. """ data = evm.message.data diff --git a/src/ethereum/forks/muir_glacier/vm/runtime.py b/src/ethereum/forks/muir_glacier/vm/runtime.py index 505b3488de..0aa5ddd5e2 100644 --- a/src/ethereum/forks/muir_glacier/vm/runtime.py +++ b/src/ethereum/forks/muir_glacier/vm/runtime.py @@ -21,7 +21,7 @@ def get_valid_jump_destinations(code: Bytes) -> Set[Uint]: """ - Analyze the evm code to obtain the set of valid jump destinations. + Analyze the EVM code to obtain the set of valid jump destinations. Valid jump destinations are defined as follows: * The jump destination is less than the length of the code. diff --git a/src/ethereum/forks/osaka/blocks.py b/src/ethereum/forks/osaka/blocks.py index fbbeeff15e..857570d58c 100644 --- a/src/ethereum/forks/osaka/blocks.py +++ b/src/ethereum/forks/osaka/blocks.py @@ -152,7 +152,7 @@ class Header: number: Uint """ - Block number, (height) in the chain. + Block number (height) in the chain. """ gas_limit: Uint @@ -272,7 +272,7 @@ class Block: header: Header """ The block header containing metadata and cryptographic commitments. Refer - [headers] for more details on the fields included in the header. + to [headers] for more details on the fields included in the header. [headers]: ref:ethereum.forks.osaka.blocks.Header """ diff --git a/src/ethereum/forks/osaka/bloom.py b/src/ethereum/forks/osaka/bloom.py index 8a12ec081d..0e079df6b3 100644 --- a/src/ethereum/forks/osaka/bloom.py +++ b/src/ethereum/forks/osaka/bloom.py @@ -8,7 +8,7 @@ Introduction ------------ -This modules defines functions for calculating bloom filters of logs. For the +This module defines functions for calculating bloom filters of logs. For the general theory of bloom filters see e.g. `Wikipedia `_. Bloom filters are used to allow for efficient searching of logs by address and/or topic, by rapidly diff --git a/src/ethereum/forks/osaka/requests.py b/src/ethereum/forks/osaka/requests.py index 929e973e58..542c9264d3 100644 --- a/src/ethereum/forks/osaka/requests.py +++ b/src/ethereum/forks/osaka/requests.py @@ -1,5 +1,5 @@ """ -Requests were introduced in EIP-7685 as a a general purpose framework for +Requests were introduced in EIP-7685 as a general purpose framework for storing contract-triggered requests. It extends the execution header and body with a single field each to store the request information. This inherently exposes the requests to the consensus layer, which can diff --git a/src/ethereum/forks/osaka/state.py b/src/ethereum/forks/osaka/state.py index e997411f6d..6571aa05c6 100644 --- a/src/ethereum/forks/osaka/state.py +++ b/src/ethereum/forks/osaka/state.py @@ -400,7 +400,7 @@ def account_exists(state: State, address: Address) -> bool: def account_has_code_or_nonce(state: State, address: Address) -> bool: """ - Checks if an account has non zero nonce or non empty code. + Checks if an account has non-zero nonce or non-empty code. Parameters ---------- @@ -412,7 +412,7 @@ def account_has_code_or_nonce(state: State, address: Address) -> bool: Returns ------- has_code_or_nonce : `bool` - True if the account has non zero nonce or non empty code, + True if the account has non-zero nonce or non-empty code, False otherwise. """ @@ -517,7 +517,7 @@ def set_account_balance(state: State, address: Address, amount: U256) -> None: Address of the account whose nonce needs to be incremented. amount: - The amount that needs to set in balance. + The amount that needs to be set in the balance. """ @@ -557,7 +557,7 @@ def set_code(state: State, address: Address, code: Bytes) -> None: The current state. address: - Address of the account whose code needs to be update. + Address of the account whose code needs to be updated. code: The bytecode that needs to be set. diff --git a/src/ethereum/forks/osaka/trie.py b/src/ethereum/forks/osaka/trie.py index bde4318a64..fea8e0ece4 100644 --- a/src/ethereum/forks/osaka/trie.py +++ b/src/ethereum/forks/osaka/trie.py @@ -138,7 +138,7 @@ class BranchNode: def encode_internal_node(node: Optional[InternalNode]) -> Extended: """ Encodes a Merkle Trie node into its RLP form. The RLP will then be - serialized into a `Bytes` and hashed unless it is less that 32 bytes + serialized into a `Bytes` and hashed unless it is less than 32 bytes when serialized. This function also accepts `None`, representing the absence of a node, diff --git a/src/ethereum/forks/osaka/vm/__init__.py b/src/ethereum/forks/osaka/vm/__init__.py index b2a8c5e2b9..641695ea0e 100644 --- a/src/ethereum/forks/osaka/vm/__init__.py +++ b/src/ethereum/forks/osaka/vm/__init__.py @@ -63,7 +63,7 @@ class BlockOutput: receipts_trie : `ethereum.fork_types.Root` Trie root of all the receipts in the block. receipt_keys : - Key of all the receipts in the block. + Keys of all the receipts in the block. block_logs : `Bloom` Logs bloom of all the logs included in all the transactions of the block. diff --git a/src/ethereum/forks/osaka/vm/eoa_delegation.py b/src/ethereum/forks/osaka/vm/eoa_delegation.py index 0913fa63ff..e6dd0f1201 100644 --- a/src/ethereum/forks/osaka/vm/eoa_delegation.py +++ b/src/ethereum/forks/osaka/vm/eoa_delegation.py @@ -158,8 +158,6 @@ def set_delegation(message: Message) -> U256: ---------- message : Transaction specific items. - env : - External items required for EVM execution. Returns ------- diff --git a/src/ethereum/forks/osaka/vm/gas.py b/src/ethereum/forks/osaka/vm/gas.py index 62118f4c6a..042abe0a53 100644 --- a/src/ethereum/forks/osaka/vm/gas.py +++ b/src/ethereum/forks/osaka/vm/gas.py @@ -232,7 +232,7 @@ def calculate_message_call_gas( account inside a message call. call_stipend : The amount of stipend provided to a message call to execute code while - transferring value(ETH). + transferring value (ETH). Returns ------- @@ -288,7 +288,7 @@ def init_code_cost(init_code_length: Uint) -> Uint: def calculate_excess_blob_gas(parent_header: Header) -> U64: """ - Calculated the excess blob gas for the current block based + Calculates the excess blob gas for the current block based on the gas used in the parent block. Parameters diff --git a/src/ethereum/forks/osaka/vm/instructions/arithmetic.py b/src/ethereum/forks/osaka/vm/instructions/arithmetic.py index de5d6ab43d..b7b1a370ad 100644 --- a/src/ethereum/forks/osaka/vm/instructions/arithmetic.py +++ b/src/ethereum/forks/osaka/vm/instructions/arithmetic.py @@ -84,7 +84,7 @@ def sub(evm: Evm) -> None: def mul(evm: Evm) -> None: """ - Multiply the top two elements of the stack, and pushes the result back + Multiplies the top two elements of the stack, and pushes the result back on the stack. Parameters @@ -353,7 +353,7 @@ def signextend(evm: Evm) -> None: # Can't extend any further result = value else: - # U256(0).to_be_bytes() gives b'' instead b'\x00'. + # U256(0).to_be_bytes() gives b'' instead of b'\x00'. value_bytes = Bytes(value.to_be_bytes32()) # Now among the obtained value bytes, consider only # N `least significant bytes`, where N is `byte_num + 1`. diff --git a/src/ethereum/forks/osaka/vm/instructions/block.py b/src/ethereum/forks/osaka/vm/instructions/block.py index e33f26aea4..43be9e58e2 100644 --- a/src/ethereum/forks/osaka/vm/instructions/block.py +++ b/src/ethereum/forks/osaka/vm/instructions/block.py @@ -101,7 +101,7 @@ def coinbase(evm: Evm) -> None: def timestamp(evm: Evm) -> None: """ Push the current block's timestamp onto the stack. Here the timestamp - being referred is actually the unix timestamp in seconds. + being referred to is actually the unix timestamp in seconds. Here the current block refers to the block in which the currently executing transaction/call resides. diff --git a/src/ethereum/forks/osaka/vm/instructions/environment.py b/src/ethereum/forks/osaka/vm/instructions/environment.py index 8369043465..28c595ee51 100644 --- a/src/ethereum/forks/osaka/vm/instructions/environment.py +++ b/src/ethereum/forks/osaka/vm/instructions/environment.py @@ -426,7 +426,7 @@ def returndatasize(evm: Evm) -> None: def returndatacopy(evm: Evm) -> None: """ - Copies data from the return data buffer code to memory. + Copies data from the return data buffer to memory. Parameters ---------- diff --git a/src/ethereum/forks/osaka/vm/instructions/memory.py b/src/ethereum/forks/osaka/vm/instructions/memory.py index 631d33a7fd..6e111051ee 100644 --- a/src/ethereum/forks/osaka/vm/instructions/memory.py +++ b/src/ethereum/forks/osaka/vm/instructions/memory.py @@ -93,7 +93,7 @@ def mstore8(evm: Evm) -> None: def mload(evm: Evm) -> None: """ - Load word from memory. + Loads a word from memory. Parameters ---------- @@ -123,7 +123,7 @@ def mload(evm: Evm) -> None: def msize(evm: Evm) -> None: """ - Push the size of active memory in bytes onto the stack. + Pushes the size of active memory in bytes onto the stack. Parameters ---------- @@ -146,7 +146,7 @@ def msize(evm: Evm) -> None: def mcopy(evm: Evm) -> None: """ - Copy the bytes in memory from one location to another. + Copies the bytes in memory from one location to another. Parameters ---------- diff --git a/src/ethereum/forks/osaka/vm/instructions/stack.py b/src/ethereum/forks/osaka/vm/instructions/stack.py index e381b52c37..0007a28acd 100644 --- a/src/ethereum/forks/osaka/vm/instructions/stack.py +++ b/src/ethereum/forks/osaka/vm/instructions/stack.py @@ -23,7 +23,7 @@ def pop(evm: Evm) -> None: """ - Remove item from stack. + Removes an item from the stack. Parameters ---------- @@ -46,7 +46,7 @@ def pop(evm: Evm) -> None: def push_n(evm: Evm, num_bytes: int) -> None: """ - Pushes a N-byte immediate onto the stack. Push zero if num_bytes is zero. + Pushes an N-byte immediate onto the stack. Push zero if num_bytes is zero. Parameters ---------- @@ -79,7 +79,7 @@ def push_n(evm: Evm, num_bytes: int) -> None: def dup_n(evm: Evm, item_number: int) -> None: """ - Duplicate the Nth stack item (from top of the stack) to the top of stack. + Duplicates the Nth stack item (from top of the stack) to the top of stack. Parameters ---------- @@ -107,7 +107,7 @@ def dup_n(evm: Evm, item_number: int) -> None: def swap_n(evm: Evm, item_number: int) -> None: """ - Swap the top and the `item_number` element of the stack, where + Swaps the top and the `item_number` element of the stack, where the top of the stack is position zero. If `item_number` is zero, this function does nothing (which should not be diff --git a/src/ethereum/forks/osaka/vm/instructions/system.py b/src/ethereum/forks/osaka/vm/instructions/system.py index fea7a0c1b9..44c36ae000 100644 --- a/src/ethereum/forks/osaka/vm/instructions/system.py +++ b/src/ethereum/forks/osaka/vm/instructions/system.py @@ -193,8 +193,8 @@ def create2(evm: Evm) -> None: """ Creates a new account with associated code. - It's similar to CREATE opcode except that the address of new account - depends on the init_code instead of the nonce of sender. + It's similar to the CREATE opcode except that the address of the new + account depends on the init_code instead of the nonce of sender. Parameters ---------- diff --git a/src/ethereum/forks/osaka/vm/precompiled_contracts/bls12_381/bls12_381_pairing.py b/src/ethereum/forks/osaka/vm/precompiled_contracts/bls12_381/bls12_381_pairing.py index 6cb29a32fd..c7a62cb49c 100644 --- a/src/ethereum/forks/osaka/vm/precompiled_contracts/bls12_381/bls12_381_pairing.py +++ b/src/ethereum/forks/osaka/vm/precompiled_contracts/bls12_381/bls12_381_pairing.py @@ -33,7 +33,7 @@ def bls12_pairing(evm: Evm) -> None: Raises ------ InvalidParameter - If the input length is invalid or if sub-group check fails. + If the input length is invalid or if the subgroup check fails. """ data = evm.message.data @@ -54,12 +54,12 @@ def bls12_pairing(evm: Evm) -> None: g1_slice = data[g1_start : g1_start + Uint(128)] g1_point = bytes_to_g1(bytes(g1_slice)) if not is_inf(bls12_multiply(g1_point, curve_order)): - raise InvalidParameter("Sub-group check failed for G1 point.") + raise InvalidParameter("Subgroup check failed for G1 point.") g2_slice = data[g2_start : g2_start + Uint(256)] g2_point = bytes_to_g2(bytes(g2_slice)) if not is_inf(bls12_multiply(g2_point, curve_order)): - raise InvalidParameter("Sub-group check failed for G2 point.") + raise InvalidParameter("Subgroup check failed for G2 point.") result *= pairing(g2_point, g1_point) diff --git a/src/ethereum/forks/osaka/vm/precompiled_contracts/ecrecover.py b/src/ethereum/forks/osaka/vm/precompiled_contracts/ecrecover.py index d2eeaf75df..d8f6602e55 100644 --- a/src/ethereum/forks/osaka/vm/precompiled_contracts/ecrecover.py +++ b/src/ethereum/forks/osaka/vm/precompiled_contracts/ecrecover.py @@ -8,7 +8,7 @@ Introduction ------------ -Implementation of the ECRECOVER precompiled contract. +Implementation of the `ECRECOVER` precompiled contract. """ from ethereum_types.numeric import U256 diff --git a/src/ethereum/forks/osaka/vm/precompiled_contracts/mapping.py b/src/ethereum/forks/osaka/vm/precompiled_contracts/mapping.py index a80e2b0235..7486203c3e 100644 --- a/src/ethereum/forks/osaka/vm/precompiled_contracts/mapping.py +++ b/src/ethereum/forks/osaka/vm/precompiled_contracts/mapping.py @@ -8,7 +8,7 @@ Introduction ------------ -Mapping of precompiled contracts their implementations. +Mapping of precompiled contracts to their implementations. """ from typing import Callable, Dict diff --git a/src/ethereum/forks/osaka/vm/precompiled_contracts/modexp.py b/src/ethereum/forks/osaka/vm/precompiled_contracts/modexp.py index 5e7e895b91..bf828ee8f6 100644 --- a/src/ethereum/forks/osaka/vm/precompiled_contracts/modexp.py +++ b/src/ethereum/forks/osaka/vm/precompiled_contracts/modexp.py @@ -22,7 +22,7 @@ def modexp(evm: Evm) -> None: """ - Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and. + Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and `modulus`. The return value is the same length as the modulus. """ data = evm.message.data diff --git a/src/ethereum/forks/osaka/vm/precompiled_contracts/p256verify.py b/src/ethereum/forks/osaka/vm/precompiled_contracts/p256verify.py index 6f6e7ff4e9..106dd548a6 100644 --- a/src/ethereum/forks/osaka/vm/precompiled_contracts/p256verify.py +++ b/src/ethereum/forks/osaka/vm/precompiled_contracts/p256verify.py @@ -5,9 +5,10 @@ :backlinks: none :local: -Introduction. +Introduction ------------ -Implementation of the P256VERIFY precompiled contract. + +Implementation of the `P256VERIFY` precompiled contract. """ from ethereum_types.numeric import U256 diff --git a/src/ethereum/forks/osaka/vm/precompiled_contracts/point_evaluation.py b/src/ethereum/forks/osaka/vm/precompiled_contracts/point_evaluation.py index 760af47736..ce4eed7129 100644 --- a/src/ethereum/forks/osaka/vm/precompiled_contracts/point_evaluation.py +++ b/src/ethereum/forks/osaka/vm/precompiled_contracts/point_evaluation.py @@ -8,7 +8,7 @@ Introduction ------------ -Implementation of the POINT EVALUATION precompiled contract. +Implementation of the `POINT EVALUATION` precompiled contract. """ from ethereum_types.bytes import Bytes, Bytes32, Bytes48 diff --git a/src/ethereum/forks/osaka/vm/runtime.py b/src/ethereum/forks/osaka/vm/runtime.py index 505b3488de..0aa5ddd5e2 100644 --- a/src/ethereum/forks/osaka/vm/runtime.py +++ b/src/ethereum/forks/osaka/vm/runtime.py @@ -21,7 +21,7 @@ def get_valid_jump_destinations(code: Bytes) -> Set[Uint]: """ - Analyze the evm code to obtain the set of valid jump destinations. + Analyze the EVM code to obtain the set of valid jump destinations. Valid jump destinations are defined as follows: * The jump destination is less than the length of the code. diff --git a/src/ethereum/forks/paris/blocks.py b/src/ethereum/forks/paris/blocks.py index 4ad446c946..af1f4e4777 100644 --- a/src/ethereum/forks/paris/blocks.py +++ b/src/ethereum/forks/paris/blocks.py @@ -117,7 +117,7 @@ class Header: number: Uint """ - Block number, (height) in the chain. + Block number (height) in the chain. """ gas_limit: Uint @@ -194,7 +194,7 @@ class Block: header: Header """ The block header containing metadata and cryptographic commitments. Refer - [headers] for more details on the fields included in the header. + to [headers] for more details on the fields included in the header. [headers]: ref:ethereum.forks.paris.blocks.Header """ diff --git a/src/ethereum/forks/paris/bloom.py b/src/ethereum/forks/paris/bloom.py index 8a12ec081d..0e079df6b3 100644 --- a/src/ethereum/forks/paris/bloom.py +++ b/src/ethereum/forks/paris/bloom.py @@ -8,7 +8,7 @@ Introduction ------------ -This modules defines functions for calculating bloom filters of logs. For the +This module defines functions for calculating bloom filters of logs. For the general theory of bloom filters see e.g. `Wikipedia `_. Bloom filters are used to allow for efficient searching of logs by address and/or topic, by rapidly diff --git a/src/ethereum/forks/paris/state.py b/src/ethereum/forks/paris/state.py index ec753edd33..8db1566f56 100644 --- a/src/ethereum/forks/paris/state.py +++ b/src/ethereum/forks/paris/state.py @@ -367,7 +367,7 @@ def account_exists(state: State, address: Address) -> bool: def account_has_code_or_nonce(state: State, address: Address) -> bool: """ - Checks if an account has non zero nonce or non empty code. + Checks if an account has non-zero nonce or non-empty code. Parameters ---------- @@ -379,7 +379,7 @@ def account_has_code_or_nonce(state: State, address: Address) -> bool: Returns ------- has_code_or_nonce : `bool` - True if the account has non zero nonce or non empty code, + True if the account has non-zero nonce or non-empty code, False otherwise. """ @@ -484,7 +484,7 @@ def set_account_balance(state: State, address: Address, amount: U256) -> None: Address of the account whose nonce needs to be incremented. amount: - The amount that needs to set in balance. + The amount that needs to be set in the balance. """ @@ -524,7 +524,7 @@ def set_code(state: State, address: Address, code: Bytes) -> None: The current state. address: - Address of the account whose code needs to be update. + Address of the account whose code needs to be updated. code: The bytecode that needs to be set. diff --git a/src/ethereum/forks/paris/trie.py b/src/ethereum/forks/paris/trie.py index 0217f83204..38db711ff4 100644 --- a/src/ethereum/forks/paris/trie.py +++ b/src/ethereum/forks/paris/trie.py @@ -128,7 +128,7 @@ class BranchNode: def encode_internal_node(node: Optional[InternalNode]) -> Extended: """ Encodes a Merkle Trie node into its RLP form. The RLP will then be - serialized into a `Bytes` and hashed unless it is less that 32 bytes + serialized into a `Bytes` and hashed unless it is less than 32 bytes when serialized. This function also accepts `None`, representing the absence of a node, diff --git a/src/ethereum/forks/paris/vm/__init__.py b/src/ethereum/forks/paris/vm/__init__.py index 1bac4ee213..22f11d4822 100644 --- a/src/ethereum/forks/paris/vm/__init__.py +++ b/src/ethereum/forks/paris/vm/__init__.py @@ -61,7 +61,7 @@ class BlockOutput: receipts_trie : `ethereum.fork_types.Root` Trie root of all the receipts in the block. receipt_keys : - Key of all the receipts in the block. + Keys of all the receipts in the block. block_logs : `Bloom` Logs bloom of all the logs included in all the transactions of the block. diff --git a/src/ethereum/forks/paris/vm/gas.py b/src/ethereum/forks/paris/vm/gas.py index b5c96dea85..4f2dde3b9d 100644 --- a/src/ethereum/forks/paris/vm/gas.py +++ b/src/ethereum/forks/paris/vm/gas.py @@ -211,7 +211,7 @@ def calculate_message_call_gas( account inside a message call. call_stipend : The amount of stipend provided to a message call to execute code while - transferring value(ETH). + transferring value (ETH). Returns ------- diff --git a/src/ethereum/forks/paris/vm/instructions/arithmetic.py b/src/ethereum/forks/paris/vm/instructions/arithmetic.py index de5d6ab43d..b7b1a370ad 100644 --- a/src/ethereum/forks/paris/vm/instructions/arithmetic.py +++ b/src/ethereum/forks/paris/vm/instructions/arithmetic.py @@ -84,7 +84,7 @@ def sub(evm: Evm) -> None: def mul(evm: Evm) -> None: """ - Multiply the top two elements of the stack, and pushes the result back + Multiplies the top two elements of the stack, and pushes the result back on the stack. Parameters @@ -353,7 +353,7 @@ def signextend(evm: Evm) -> None: # Can't extend any further result = value else: - # U256(0).to_be_bytes() gives b'' instead b'\x00'. + # U256(0).to_be_bytes() gives b'' instead of b'\x00'. value_bytes = Bytes(value.to_be_bytes32()) # Now among the obtained value bytes, consider only # N `least significant bytes`, where N is `byte_num + 1`. diff --git a/src/ethereum/forks/paris/vm/instructions/block.py b/src/ethereum/forks/paris/vm/instructions/block.py index 098a23386a..3aa1fb2f6b 100644 --- a/src/ethereum/forks/paris/vm/instructions/block.py +++ b/src/ethereum/forks/paris/vm/instructions/block.py @@ -101,7 +101,7 @@ def coinbase(evm: Evm) -> None: def timestamp(evm: Evm) -> None: """ Push the current block's timestamp onto the stack. Here the timestamp - being referred is actually the unix timestamp in seconds. + being referred to is actually the unix timestamp in seconds. Here the current block refers to the block in which the currently executing transaction/call resides. diff --git a/src/ethereum/forks/paris/vm/instructions/environment.py b/src/ethereum/forks/paris/vm/instructions/environment.py index 1abfaeafa9..2a7b434661 100644 --- a/src/ethereum/forks/paris/vm/instructions/environment.py +++ b/src/ethereum/forks/paris/vm/instructions/environment.py @@ -423,7 +423,7 @@ def returndatasize(evm: Evm) -> None: def returndatacopy(evm: Evm) -> None: """ - Copies data from the return data buffer code to memory. + Copies data from the return data buffer to memory. Parameters ---------- diff --git a/src/ethereum/forks/paris/vm/instructions/memory.py b/src/ethereum/forks/paris/vm/instructions/memory.py index fdd9518843..3a242ba2f7 100644 --- a/src/ethereum/forks/paris/vm/instructions/memory.py +++ b/src/ethereum/forks/paris/vm/instructions/memory.py @@ -90,7 +90,7 @@ def mstore8(evm: Evm) -> None: def mload(evm: Evm) -> None: """ - Load word from memory. + Loads a word from memory. Parameters ---------- @@ -120,7 +120,7 @@ def mload(evm: Evm) -> None: def msize(evm: Evm) -> None: """ - Push the size of active memory in bytes onto the stack. + Pushes the size of active memory in bytes onto the stack. Parameters ---------- diff --git a/src/ethereum/forks/paris/vm/instructions/stack.py b/src/ethereum/forks/paris/vm/instructions/stack.py index bdf232db6a..0fc0d3fe4b 100644 --- a/src/ethereum/forks/paris/vm/instructions/stack.py +++ b/src/ethereum/forks/paris/vm/instructions/stack.py @@ -23,7 +23,7 @@ def pop(evm: Evm) -> None: """ - Remove item from stack. + Removes an item from the stack. Parameters ---------- @@ -46,7 +46,7 @@ def pop(evm: Evm) -> None: def push_n(evm: Evm, num_bytes: int) -> None: """ - Pushes a N-byte immediate onto the stack. + Pushes an N-byte immediate onto the stack. Parameters ---------- @@ -76,7 +76,7 @@ def push_n(evm: Evm, num_bytes: int) -> None: def dup_n(evm: Evm, item_number: int) -> None: """ - Duplicate the Nth stack item (from top of the stack) to the top of stack. + Duplicates the Nth stack item (from top of the stack) to the top of stack. Parameters ---------- @@ -104,7 +104,7 @@ def dup_n(evm: Evm, item_number: int) -> None: def swap_n(evm: Evm, item_number: int) -> None: """ - Swap the top and the `item_number` element of the stack, where + Swaps the top and the `item_number` element of the stack, where the top of the stack is position zero. If `item_number` is zero, this function does nothing (which should not be diff --git a/src/ethereum/forks/paris/vm/instructions/system.py b/src/ethereum/forks/paris/vm/instructions/system.py index a1d4ba48e1..f2af30a7ee 100644 --- a/src/ethereum/forks/paris/vm/instructions/system.py +++ b/src/ethereum/forks/paris/vm/instructions/system.py @@ -178,8 +178,8 @@ def create2(evm: Evm) -> None: """ Creates a new account with associated code. - It's similar to CREATE opcode except that the address of new account - depends on the init_code instead of the nonce of sender. + It's similar to the CREATE opcode except that the address of the new + account depends on the init_code instead of the nonce of sender. Parameters ---------- diff --git a/src/ethereum/forks/paris/vm/precompiled_contracts/ecrecover.py b/src/ethereum/forks/paris/vm/precompiled_contracts/ecrecover.py index d2eeaf75df..d8f6602e55 100644 --- a/src/ethereum/forks/paris/vm/precompiled_contracts/ecrecover.py +++ b/src/ethereum/forks/paris/vm/precompiled_contracts/ecrecover.py @@ -8,7 +8,7 @@ Introduction ------------ -Implementation of the ECRECOVER precompiled contract. +Implementation of the `ECRECOVER` precompiled contract. """ from ethereum_types.numeric import U256 diff --git a/src/ethereum/forks/paris/vm/precompiled_contracts/mapping.py b/src/ethereum/forks/paris/vm/precompiled_contracts/mapping.py index 827c44cfac..44013f70e8 100644 --- a/src/ethereum/forks/paris/vm/precompiled_contracts/mapping.py +++ b/src/ethereum/forks/paris/vm/precompiled_contracts/mapping.py @@ -8,7 +8,7 @@ Introduction ------------ -Mapping of precompiled contracts their implementations. +Mapping of precompiled contracts to their implementations. """ from typing import Callable, Dict diff --git a/src/ethereum/forks/paris/vm/precompiled_contracts/modexp.py b/src/ethereum/forks/paris/vm/precompiled_contracts/modexp.py index 462341f69f..755a396dda 100644 --- a/src/ethereum/forks/paris/vm/precompiled_contracts/modexp.py +++ b/src/ethereum/forks/paris/vm/precompiled_contracts/modexp.py @@ -23,7 +23,7 @@ def modexp(evm: Evm) -> None: """ - Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and. + Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and `modulus`. The return value is the same length as the modulus. """ data = evm.message.data diff --git a/src/ethereum/forks/paris/vm/runtime.py b/src/ethereum/forks/paris/vm/runtime.py index 505b3488de..0aa5ddd5e2 100644 --- a/src/ethereum/forks/paris/vm/runtime.py +++ b/src/ethereum/forks/paris/vm/runtime.py @@ -21,7 +21,7 @@ def get_valid_jump_destinations(code: Bytes) -> Set[Uint]: """ - Analyze the evm code to obtain the set of valid jump destinations. + Analyze the EVM code to obtain the set of valid jump destinations. Valid jump destinations are defined as follows: * The jump destination is less than the length of the code. diff --git a/src/ethereum/forks/prague/blocks.py b/src/ethereum/forks/prague/blocks.py index 54cec5306b..33d7bedfef 100644 --- a/src/ethereum/forks/prague/blocks.py +++ b/src/ethereum/forks/prague/blocks.py @@ -152,7 +152,7 @@ class Header: number: Uint """ - Block number, (height) in the chain. + Block number (height) in the chain. """ gas_limit: Uint @@ -272,7 +272,7 @@ class Block: header: Header """ The block header containing metadata and cryptographic commitments. Refer - [headers] for more details on the fields included in the header. + to [headers] for more details on the fields included in the header. [headers]: ref:ethereum.forks.prague.blocks.Header """ diff --git a/src/ethereum/forks/prague/bloom.py b/src/ethereum/forks/prague/bloom.py index 8a12ec081d..0e079df6b3 100644 --- a/src/ethereum/forks/prague/bloom.py +++ b/src/ethereum/forks/prague/bloom.py @@ -8,7 +8,7 @@ Introduction ------------ -This modules defines functions for calculating bloom filters of logs. For the +This module defines functions for calculating bloom filters of logs. For the general theory of bloom filters see e.g. `Wikipedia `_. Bloom filters are used to allow for efficient searching of logs by address and/or topic, by rapidly diff --git a/src/ethereum/forks/prague/state.py b/src/ethereum/forks/prague/state.py index e997411f6d..6571aa05c6 100644 --- a/src/ethereum/forks/prague/state.py +++ b/src/ethereum/forks/prague/state.py @@ -400,7 +400,7 @@ def account_exists(state: State, address: Address) -> bool: def account_has_code_or_nonce(state: State, address: Address) -> bool: """ - Checks if an account has non zero nonce or non empty code. + Checks if an account has non-zero nonce or non-empty code. Parameters ---------- @@ -412,7 +412,7 @@ def account_has_code_or_nonce(state: State, address: Address) -> bool: Returns ------- has_code_or_nonce : `bool` - True if the account has non zero nonce or non empty code, + True if the account has non-zero nonce or non-empty code, False otherwise. """ @@ -517,7 +517,7 @@ def set_account_balance(state: State, address: Address, amount: U256) -> None: Address of the account whose nonce needs to be incremented. amount: - The amount that needs to set in balance. + The amount that needs to be set in the balance. """ @@ -557,7 +557,7 @@ def set_code(state: State, address: Address, code: Bytes) -> None: The current state. address: - Address of the account whose code needs to be update. + Address of the account whose code needs to be updated. code: The bytecode that needs to be set. diff --git a/src/ethereum/forks/prague/trie.py b/src/ethereum/forks/prague/trie.py index e3265a7273..ac266a60f9 100644 --- a/src/ethereum/forks/prague/trie.py +++ b/src/ethereum/forks/prague/trie.py @@ -138,7 +138,7 @@ class BranchNode: def encode_internal_node(node: Optional[InternalNode]) -> Extended: """ Encodes a Merkle Trie node into its RLP form. The RLP will then be - serialized into a `Bytes` and hashed unless it is less that 32 bytes + serialized into a `Bytes` and hashed unless it is less than 32 bytes when serialized. This function also accepts `None`, representing the absence of a node, diff --git a/src/ethereum/forks/prague/vm/__init__.py b/src/ethereum/forks/prague/vm/__init__.py index b2a8c5e2b9..641695ea0e 100644 --- a/src/ethereum/forks/prague/vm/__init__.py +++ b/src/ethereum/forks/prague/vm/__init__.py @@ -63,7 +63,7 @@ class BlockOutput: receipts_trie : `ethereum.fork_types.Root` Trie root of all the receipts in the block. receipt_keys : - Key of all the receipts in the block. + Keys of all the receipts in the block. block_logs : `Bloom` Logs bloom of all the logs included in all the transactions of the block. diff --git a/src/ethereum/forks/prague/vm/eoa_delegation.py b/src/ethereum/forks/prague/vm/eoa_delegation.py index 29909b5fa5..bc96f4c327 100644 --- a/src/ethereum/forks/prague/vm/eoa_delegation.py +++ b/src/ethereum/forks/prague/vm/eoa_delegation.py @@ -157,8 +157,6 @@ def set_delegation(message: Message) -> U256: ---------- message : Transaction specific items. - env : - External items required for EVM execution. Returns ------- diff --git a/src/ethereum/forks/prague/vm/gas.py b/src/ethereum/forks/prague/vm/gas.py index 8bb3c042cb..e713334056 100644 --- a/src/ethereum/forks/prague/vm/gas.py +++ b/src/ethereum/forks/prague/vm/gas.py @@ -228,7 +228,7 @@ def calculate_message_call_gas( account inside a message call. call_stipend : The amount of stipend provided to a message call to execute code while - transferring value(ETH). + transferring value (ETH). Returns ------- @@ -284,7 +284,7 @@ def init_code_cost(init_code_length: Uint) -> Uint: def calculate_excess_blob_gas(parent_header: Header) -> U64: """ - Calculated the excess blob gas for the current block based + Calculates the excess blob gas for the current block based on the gas used in the parent block. Parameters diff --git a/src/ethereum/forks/prague/vm/instructions/arithmetic.py b/src/ethereum/forks/prague/vm/instructions/arithmetic.py index de5d6ab43d..b7b1a370ad 100644 --- a/src/ethereum/forks/prague/vm/instructions/arithmetic.py +++ b/src/ethereum/forks/prague/vm/instructions/arithmetic.py @@ -84,7 +84,7 @@ def sub(evm: Evm) -> None: def mul(evm: Evm) -> None: """ - Multiply the top two elements of the stack, and pushes the result back + Multiplies the top two elements of the stack, and pushes the result back on the stack. Parameters @@ -353,7 +353,7 @@ def signextend(evm: Evm) -> None: # Can't extend any further result = value else: - # U256(0).to_be_bytes() gives b'' instead b'\x00'. + # U256(0).to_be_bytes() gives b'' instead of b'\x00'. value_bytes = Bytes(value.to_be_bytes32()) # Now among the obtained value bytes, consider only # N `least significant bytes`, where N is `byte_num + 1`. diff --git a/src/ethereum/forks/prague/vm/instructions/block.py b/src/ethereum/forks/prague/vm/instructions/block.py index ddaa221bfc..b3bfb500a0 100644 --- a/src/ethereum/forks/prague/vm/instructions/block.py +++ b/src/ethereum/forks/prague/vm/instructions/block.py @@ -101,7 +101,7 @@ def coinbase(evm: Evm) -> None: def timestamp(evm: Evm) -> None: """ Push the current block's timestamp onto the stack. Here the timestamp - being referred is actually the unix timestamp in seconds. + being referred to is actually the unix timestamp in seconds. Here the current block refers to the block in which the currently executing transaction/call resides. diff --git a/src/ethereum/forks/prague/vm/instructions/environment.py b/src/ethereum/forks/prague/vm/instructions/environment.py index 8369043465..28c595ee51 100644 --- a/src/ethereum/forks/prague/vm/instructions/environment.py +++ b/src/ethereum/forks/prague/vm/instructions/environment.py @@ -426,7 +426,7 @@ def returndatasize(evm: Evm) -> None: def returndatacopy(evm: Evm) -> None: """ - Copies data from the return data buffer code to memory. + Copies data from the return data buffer to memory. Parameters ---------- diff --git a/src/ethereum/forks/prague/vm/instructions/memory.py b/src/ethereum/forks/prague/vm/instructions/memory.py index 631d33a7fd..6e111051ee 100644 --- a/src/ethereum/forks/prague/vm/instructions/memory.py +++ b/src/ethereum/forks/prague/vm/instructions/memory.py @@ -93,7 +93,7 @@ def mstore8(evm: Evm) -> None: def mload(evm: Evm) -> None: """ - Load word from memory. + Loads a word from memory. Parameters ---------- @@ -123,7 +123,7 @@ def mload(evm: Evm) -> None: def msize(evm: Evm) -> None: """ - Push the size of active memory in bytes onto the stack. + Pushes the size of active memory in bytes onto the stack. Parameters ---------- @@ -146,7 +146,7 @@ def msize(evm: Evm) -> None: def mcopy(evm: Evm) -> None: """ - Copy the bytes in memory from one location to another. + Copies the bytes in memory from one location to another. Parameters ---------- diff --git a/src/ethereum/forks/prague/vm/instructions/stack.py b/src/ethereum/forks/prague/vm/instructions/stack.py index e381b52c37..0007a28acd 100644 --- a/src/ethereum/forks/prague/vm/instructions/stack.py +++ b/src/ethereum/forks/prague/vm/instructions/stack.py @@ -23,7 +23,7 @@ def pop(evm: Evm) -> None: """ - Remove item from stack. + Removes an item from the stack. Parameters ---------- @@ -46,7 +46,7 @@ def pop(evm: Evm) -> None: def push_n(evm: Evm, num_bytes: int) -> None: """ - Pushes a N-byte immediate onto the stack. Push zero if num_bytes is zero. + Pushes an N-byte immediate onto the stack. Push zero if num_bytes is zero. Parameters ---------- @@ -79,7 +79,7 @@ def push_n(evm: Evm, num_bytes: int) -> None: def dup_n(evm: Evm, item_number: int) -> None: """ - Duplicate the Nth stack item (from top of the stack) to the top of stack. + Duplicates the Nth stack item (from top of the stack) to the top of stack. Parameters ---------- @@ -107,7 +107,7 @@ def dup_n(evm: Evm, item_number: int) -> None: def swap_n(evm: Evm, item_number: int) -> None: """ - Swap the top and the `item_number` element of the stack, where + Swaps the top and the `item_number` element of the stack, where the top of the stack is position zero. If `item_number` is zero, this function does nothing (which should not be diff --git a/src/ethereum/forks/prague/vm/instructions/system.py b/src/ethereum/forks/prague/vm/instructions/system.py index fea7a0c1b9..44c36ae000 100644 --- a/src/ethereum/forks/prague/vm/instructions/system.py +++ b/src/ethereum/forks/prague/vm/instructions/system.py @@ -193,8 +193,8 @@ def create2(evm: Evm) -> None: """ Creates a new account with associated code. - It's similar to CREATE opcode except that the address of new account - depends on the init_code instead of the nonce of sender. + It's similar to the CREATE opcode except that the address of the new + account depends on the init_code instead of the nonce of sender. Parameters ---------- diff --git a/src/ethereum/forks/prague/vm/precompiled_contracts/bls12_381/bls12_381_pairing.py b/src/ethereum/forks/prague/vm/precompiled_contracts/bls12_381/bls12_381_pairing.py index 6cb29a32fd..c7a62cb49c 100644 --- a/src/ethereum/forks/prague/vm/precompiled_contracts/bls12_381/bls12_381_pairing.py +++ b/src/ethereum/forks/prague/vm/precompiled_contracts/bls12_381/bls12_381_pairing.py @@ -33,7 +33,7 @@ def bls12_pairing(evm: Evm) -> None: Raises ------ InvalidParameter - If the input length is invalid or if sub-group check fails. + If the input length is invalid or if the subgroup check fails. """ data = evm.message.data @@ -54,12 +54,12 @@ def bls12_pairing(evm: Evm) -> None: g1_slice = data[g1_start : g1_start + Uint(128)] g1_point = bytes_to_g1(bytes(g1_slice)) if not is_inf(bls12_multiply(g1_point, curve_order)): - raise InvalidParameter("Sub-group check failed for G1 point.") + raise InvalidParameter("Subgroup check failed for G1 point.") g2_slice = data[g2_start : g2_start + Uint(256)] g2_point = bytes_to_g2(bytes(g2_slice)) if not is_inf(bls12_multiply(g2_point, curve_order)): - raise InvalidParameter("Sub-group check failed for G2 point.") + raise InvalidParameter("Subgroup check failed for G2 point.") result *= pairing(g2_point, g1_point) diff --git a/src/ethereum/forks/prague/vm/precompiled_contracts/ecrecover.py b/src/ethereum/forks/prague/vm/precompiled_contracts/ecrecover.py index d2eeaf75df..d8f6602e55 100644 --- a/src/ethereum/forks/prague/vm/precompiled_contracts/ecrecover.py +++ b/src/ethereum/forks/prague/vm/precompiled_contracts/ecrecover.py @@ -8,7 +8,7 @@ Introduction ------------ -Implementation of the ECRECOVER precompiled contract. +Implementation of the `ECRECOVER` precompiled contract. """ from ethereum_types.numeric import U256 diff --git a/src/ethereum/forks/prague/vm/precompiled_contracts/mapping.py b/src/ethereum/forks/prague/vm/precompiled_contracts/mapping.py index 91ae92faa5..818665d0a4 100644 --- a/src/ethereum/forks/prague/vm/precompiled_contracts/mapping.py +++ b/src/ethereum/forks/prague/vm/precompiled_contracts/mapping.py @@ -8,7 +8,7 @@ Introduction ------------ -Mapping of precompiled contracts their implementations. +Mapping of precompiled contracts to their implementations. """ from typing import Callable, Dict diff --git a/src/ethereum/forks/prague/vm/precompiled_contracts/modexp.py b/src/ethereum/forks/prague/vm/precompiled_contracts/modexp.py index 462341f69f..755a396dda 100644 --- a/src/ethereum/forks/prague/vm/precompiled_contracts/modexp.py +++ b/src/ethereum/forks/prague/vm/precompiled_contracts/modexp.py @@ -23,7 +23,7 @@ def modexp(evm: Evm) -> None: """ - Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and. + Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and `modulus`. The return value is the same length as the modulus. """ data = evm.message.data diff --git a/src/ethereum/forks/prague/vm/precompiled_contracts/point_evaluation.py b/src/ethereum/forks/prague/vm/precompiled_contracts/point_evaluation.py index 760af47736..ce4eed7129 100644 --- a/src/ethereum/forks/prague/vm/precompiled_contracts/point_evaluation.py +++ b/src/ethereum/forks/prague/vm/precompiled_contracts/point_evaluation.py @@ -8,7 +8,7 @@ Introduction ------------ -Implementation of the POINT EVALUATION precompiled contract. +Implementation of the `POINT EVALUATION` precompiled contract. """ from ethereum_types.bytes import Bytes, Bytes32, Bytes48 diff --git a/src/ethereum/forks/prague/vm/runtime.py b/src/ethereum/forks/prague/vm/runtime.py index 505b3488de..0aa5ddd5e2 100644 --- a/src/ethereum/forks/prague/vm/runtime.py +++ b/src/ethereum/forks/prague/vm/runtime.py @@ -21,7 +21,7 @@ def get_valid_jump_destinations(code: Bytes) -> Set[Uint]: """ - Analyze the evm code to obtain the set of valid jump destinations. + Analyze the EVM code to obtain the set of valid jump destinations. Valid jump destinations are defined as follows: * The jump destination is less than the length of the code. diff --git a/src/ethereum/forks/shanghai/blocks.py b/src/ethereum/forks/shanghai/blocks.py index 74ecc48336..aed53e2a4b 100644 --- a/src/ethereum/forks/shanghai/blocks.py +++ b/src/ethereum/forks/shanghai/blocks.py @@ -150,7 +150,7 @@ class Header: number: Uint """ - Block number, (height) in the chain. + Block number (height) in the chain. """ gas_limit: Uint @@ -237,7 +237,7 @@ class Block: header: Header """ The block header containing metadata and cryptographic commitments. Refer - [headers] for more details on the fields included in the header. + to [headers] for more details on the fields included in the header. [headers]: ref:ethereum.forks.shanghai.blocks.Header """ diff --git a/src/ethereum/forks/shanghai/bloom.py b/src/ethereum/forks/shanghai/bloom.py index 8a12ec081d..0e079df6b3 100644 --- a/src/ethereum/forks/shanghai/bloom.py +++ b/src/ethereum/forks/shanghai/bloom.py @@ -8,7 +8,7 @@ Introduction ------------ -This modules defines functions for calculating bloom filters of logs. For the +This module defines functions for calculating bloom filters of logs. For the general theory of bloom filters see e.g. `Wikipedia `_. Bloom filters are used to allow for efficient searching of logs by address and/or topic, by rapidly diff --git a/src/ethereum/forks/shanghai/state.py b/src/ethereum/forks/shanghai/state.py index ec753edd33..8db1566f56 100644 --- a/src/ethereum/forks/shanghai/state.py +++ b/src/ethereum/forks/shanghai/state.py @@ -367,7 +367,7 @@ def account_exists(state: State, address: Address) -> bool: def account_has_code_or_nonce(state: State, address: Address) -> bool: """ - Checks if an account has non zero nonce or non empty code. + Checks if an account has non-zero nonce or non-empty code. Parameters ---------- @@ -379,7 +379,7 @@ def account_has_code_or_nonce(state: State, address: Address) -> bool: Returns ------- has_code_or_nonce : `bool` - True if the account has non zero nonce or non empty code, + True if the account has non-zero nonce or non-empty code, False otherwise. """ @@ -484,7 +484,7 @@ def set_account_balance(state: State, address: Address, amount: U256) -> None: Address of the account whose nonce needs to be incremented. amount: - The amount that needs to set in balance. + The amount that needs to be set in the balance. """ @@ -524,7 +524,7 @@ def set_code(state: State, address: Address, code: Bytes) -> None: The current state. address: - Address of the account whose code needs to be update. + Address of the account whose code needs to be updated. code: The bytecode that needs to be set. diff --git a/src/ethereum/forks/shanghai/trie.py b/src/ethereum/forks/shanghai/trie.py index ef363d889b..9d4c25986c 100644 --- a/src/ethereum/forks/shanghai/trie.py +++ b/src/ethereum/forks/shanghai/trie.py @@ -138,7 +138,7 @@ class BranchNode: def encode_internal_node(node: Optional[InternalNode]) -> Extended: """ Encodes a Merkle Trie node into its RLP form. The RLP will then be - serialized into a `Bytes` and hashed unless it is less that 32 bytes + serialized into a `Bytes` and hashed unless it is less than 32 bytes when serialized. This function also accepts `None`, representing the absence of a node, diff --git a/src/ethereum/forks/shanghai/vm/__init__.py b/src/ethereum/forks/shanghai/vm/__init__.py index ddff78c537..e754cd3aa1 100644 --- a/src/ethereum/forks/shanghai/vm/__init__.py +++ b/src/ethereum/forks/shanghai/vm/__init__.py @@ -61,7 +61,7 @@ class BlockOutput: receipts_trie : `ethereum.fork_types.Root` Trie root of all the receipts in the block. receipt_keys : - Key of all the receipts in the block. + Keys of all the receipts in the block. block_logs : `Bloom` Logs bloom of all the logs included in all the transactions of the block. diff --git a/src/ethereum/forks/shanghai/vm/gas.py b/src/ethereum/forks/shanghai/vm/gas.py index c3681112de..e345cad4fd 100644 --- a/src/ethereum/forks/shanghai/vm/gas.py +++ b/src/ethereum/forks/shanghai/vm/gas.py @@ -212,7 +212,7 @@ def calculate_message_call_gas( account inside a message call. call_stipend : The amount of stipend provided to a message call to execute code while - transferring value(ETH). + transferring value (ETH). Returns ------- diff --git a/src/ethereum/forks/shanghai/vm/instructions/arithmetic.py b/src/ethereum/forks/shanghai/vm/instructions/arithmetic.py index de5d6ab43d..b7b1a370ad 100644 --- a/src/ethereum/forks/shanghai/vm/instructions/arithmetic.py +++ b/src/ethereum/forks/shanghai/vm/instructions/arithmetic.py @@ -84,7 +84,7 @@ def sub(evm: Evm) -> None: def mul(evm: Evm) -> None: """ - Multiply the top two elements of the stack, and pushes the result back + Multiplies the top two elements of the stack, and pushes the result back on the stack. Parameters @@ -353,7 +353,7 @@ def signextend(evm: Evm) -> None: # Can't extend any further result = value else: - # U256(0).to_be_bytes() gives b'' instead b'\x00'. + # U256(0).to_be_bytes() gives b'' instead of b'\x00'. value_bytes = Bytes(value.to_be_bytes32()) # Now among the obtained value bytes, consider only # N `least significant bytes`, where N is `byte_num + 1`. diff --git a/src/ethereum/forks/shanghai/vm/instructions/block.py b/src/ethereum/forks/shanghai/vm/instructions/block.py index d9d8357b33..de2569869b 100644 --- a/src/ethereum/forks/shanghai/vm/instructions/block.py +++ b/src/ethereum/forks/shanghai/vm/instructions/block.py @@ -101,7 +101,7 @@ def coinbase(evm: Evm) -> None: def timestamp(evm: Evm) -> None: """ Push the current block's timestamp onto the stack. Here the timestamp - being referred is actually the unix timestamp in seconds. + being referred to is actually the unix timestamp in seconds. Here the current block refers to the block in which the currently executing transaction/call resides. diff --git a/src/ethereum/forks/shanghai/vm/instructions/environment.py b/src/ethereum/forks/shanghai/vm/instructions/environment.py index 1abfaeafa9..2a7b434661 100644 --- a/src/ethereum/forks/shanghai/vm/instructions/environment.py +++ b/src/ethereum/forks/shanghai/vm/instructions/environment.py @@ -423,7 +423,7 @@ def returndatasize(evm: Evm) -> None: def returndatacopy(evm: Evm) -> None: """ - Copies data from the return data buffer code to memory. + Copies data from the return data buffer to memory. Parameters ---------- diff --git a/src/ethereum/forks/shanghai/vm/instructions/memory.py b/src/ethereum/forks/shanghai/vm/instructions/memory.py index fdd9518843..3a242ba2f7 100644 --- a/src/ethereum/forks/shanghai/vm/instructions/memory.py +++ b/src/ethereum/forks/shanghai/vm/instructions/memory.py @@ -90,7 +90,7 @@ def mstore8(evm: Evm) -> None: def mload(evm: Evm) -> None: """ - Load word from memory. + Loads a word from memory. Parameters ---------- @@ -120,7 +120,7 @@ def mload(evm: Evm) -> None: def msize(evm: Evm) -> None: """ - Push the size of active memory in bytes onto the stack. + Pushes the size of active memory in bytes onto the stack. Parameters ---------- diff --git a/src/ethereum/forks/shanghai/vm/instructions/stack.py b/src/ethereum/forks/shanghai/vm/instructions/stack.py index e381b52c37..0007a28acd 100644 --- a/src/ethereum/forks/shanghai/vm/instructions/stack.py +++ b/src/ethereum/forks/shanghai/vm/instructions/stack.py @@ -23,7 +23,7 @@ def pop(evm: Evm) -> None: """ - Remove item from stack. + Removes an item from the stack. Parameters ---------- @@ -46,7 +46,7 @@ def pop(evm: Evm) -> None: def push_n(evm: Evm, num_bytes: int) -> None: """ - Pushes a N-byte immediate onto the stack. Push zero if num_bytes is zero. + Pushes an N-byte immediate onto the stack. Push zero if num_bytes is zero. Parameters ---------- @@ -79,7 +79,7 @@ def push_n(evm: Evm, num_bytes: int) -> None: def dup_n(evm: Evm, item_number: int) -> None: """ - Duplicate the Nth stack item (from top of the stack) to the top of stack. + Duplicates the Nth stack item (from top of the stack) to the top of stack. Parameters ---------- @@ -107,7 +107,7 @@ def dup_n(evm: Evm, item_number: int) -> None: def swap_n(evm: Evm, item_number: int) -> None: """ - Swap the top and the `item_number` element of the stack, where + Swaps the top and the `item_number` element of the stack, where the top of the stack is position zero. If `item_number` is zero, this function does nothing (which should not be diff --git a/src/ethereum/forks/shanghai/vm/instructions/system.py b/src/ethereum/forks/shanghai/vm/instructions/system.py index 2b6382ad2b..0a1e3244b2 100644 --- a/src/ethereum/forks/shanghai/vm/instructions/system.py +++ b/src/ethereum/forks/shanghai/vm/instructions/system.py @@ -190,8 +190,8 @@ def create2(evm: Evm) -> None: """ Creates a new account with associated code. - It's similar to CREATE opcode except that the address of new account - depends on the init_code instead of the nonce of sender. + It's similar to the CREATE opcode except that the address of the new + account depends on the init_code instead of the nonce of sender. Parameters ---------- diff --git a/src/ethereum/forks/shanghai/vm/precompiled_contracts/ecrecover.py b/src/ethereum/forks/shanghai/vm/precompiled_contracts/ecrecover.py index d2eeaf75df..d8f6602e55 100644 --- a/src/ethereum/forks/shanghai/vm/precompiled_contracts/ecrecover.py +++ b/src/ethereum/forks/shanghai/vm/precompiled_contracts/ecrecover.py @@ -8,7 +8,7 @@ Introduction ------------ -Implementation of the ECRECOVER precompiled contract. +Implementation of the `ECRECOVER` precompiled contract. """ from ethereum_types.numeric import U256 diff --git a/src/ethereum/forks/shanghai/vm/precompiled_contracts/mapping.py b/src/ethereum/forks/shanghai/vm/precompiled_contracts/mapping.py index 827c44cfac..44013f70e8 100644 --- a/src/ethereum/forks/shanghai/vm/precompiled_contracts/mapping.py +++ b/src/ethereum/forks/shanghai/vm/precompiled_contracts/mapping.py @@ -8,7 +8,7 @@ Introduction ------------ -Mapping of precompiled contracts their implementations. +Mapping of precompiled contracts to their implementations. """ from typing import Callable, Dict diff --git a/src/ethereum/forks/shanghai/vm/precompiled_contracts/modexp.py b/src/ethereum/forks/shanghai/vm/precompiled_contracts/modexp.py index 462341f69f..755a396dda 100644 --- a/src/ethereum/forks/shanghai/vm/precompiled_contracts/modexp.py +++ b/src/ethereum/forks/shanghai/vm/precompiled_contracts/modexp.py @@ -23,7 +23,7 @@ def modexp(evm: Evm) -> None: """ - Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and. + Calculates `(base**exp) % modulus` for arbitrary sized `base`, `exp` and `modulus`. The return value is the same length as the modulus. """ data = evm.message.data diff --git a/src/ethereum/forks/shanghai/vm/runtime.py b/src/ethereum/forks/shanghai/vm/runtime.py index 505b3488de..0aa5ddd5e2 100644 --- a/src/ethereum/forks/shanghai/vm/runtime.py +++ b/src/ethereum/forks/shanghai/vm/runtime.py @@ -21,7 +21,7 @@ def get_valid_jump_destinations(code: Bytes) -> Set[Uint]: """ - Analyze the evm code to obtain the set of valid jump destinations. + Analyze the EVM code to obtain the set of valid jump destinations. Valid jump destinations are defined as follows: * The jump destination is less than the length of the code. diff --git a/src/ethereum/forks/spurious_dragon/blocks.py b/src/ethereum/forks/spurious_dragon/blocks.py index b0b15081d8..507546a8f9 100644 --- a/src/ethereum/forks/spurious_dragon/blocks.py +++ b/src/ethereum/forks/spurious_dragon/blocks.py @@ -112,7 +112,7 @@ class Header: number: Uint """ - Block number, (height) in the chain. + Block number (height) in the chain. """ gas_limit: Uint @@ -183,7 +183,7 @@ class Block: header: Header """ The block header containing metadata and cryptographic commitments. Refer - [headers] for more details on the fields included in the header. + to [headers] for more details on the fields included in the header. [headers]: ref:ethereum.forks.spurious_dragon.blocks.Header """ diff --git a/src/ethereum/forks/spurious_dragon/bloom.py b/src/ethereum/forks/spurious_dragon/bloom.py index 8a12ec081d..0e079df6b3 100644 --- a/src/ethereum/forks/spurious_dragon/bloom.py +++ b/src/ethereum/forks/spurious_dragon/bloom.py @@ -8,7 +8,7 @@ Introduction ------------ -This modules defines functions for calculating bloom filters of logs. For the +This module defines functions for calculating bloom filters of logs. For the general theory of bloom filters see e.g. `Wikipedia `_. Bloom filters are used to allow for efficient searching of logs by address and/or topic, by rapidly diff --git a/src/ethereum/forks/spurious_dragon/state.py b/src/ethereum/forks/spurious_dragon/state.py index 2e21b4f814..a0dd7d77ab 100644 --- a/src/ethereum/forks/spurious_dragon/state.py +++ b/src/ethereum/forks/spurious_dragon/state.py @@ -340,7 +340,7 @@ def account_exists(state: State, address: Address) -> bool: def account_has_code_or_nonce(state: State, address: Address) -> bool: """ - Checks if an account has non zero nonce or non empty code. + Checks if an account has non-zero nonce or non-empty code. Parameters ---------- @@ -352,7 +352,7 @@ def account_has_code_or_nonce(state: State, address: Address) -> bool: Returns ------- has_code_or_nonce : `bool` - True if the account has non zero nonce or non empty code, + True if the account has non-zero nonce or non-empty code, False otherwise. """ @@ -473,7 +473,7 @@ def set_account_balance(state: State, address: Address, amount: U256) -> None: Address of the account whose nonce needs to be incremented. amount: - The amount that needs to set in balance. + The amount that needs to be set in the balance. """ @@ -493,7 +493,7 @@ def touch_account(state: State, address: Address) -> None: The current state. address: - The address of the account that need to initialised. + The address of the account that needs to be initialized. """ if not account_exists(state, address): @@ -530,7 +530,7 @@ def set_code(state: State, address: Address, code: Bytes) -> None: The current state. address: - Address of the account whose code needs to be update. + Address of the account whose code needs to be updated. code: The bytecode that needs to be set. diff --git a/src/ethereum/forks/spurious_dragon/trie.py b/src/ethereum/forks/spurious_dragon/trie.py index d603f5ee41..fce2ff05bf 100644 --- a/src/ethereum/forks/spurious_dragon/trie.py +++ b/src/ethereum/forks/spurious_dragon/trie.py @@ -128,7 +128,7 @@ class BranchNode: def encode_internal_node(node: Optional[InternalNode]) -> Extended: """ Encodes a Merkle Trie node into its RLP form. The RLP will then be - serialized into a `Bytes` and hashed unless it is less that 32 bytes + serialized into a `Bytes` and hashed unless it is less than 32 bytes when serialized. This function also accepts `None`, representing the absence of a node, diff --git a/src/ethereum/forks/spurious_dragon/vm/__init__.py b/src/ethereum/forks/spurious_dragon/vm/__init__.py index 1e3c0b3767..2ab52236b6 100644 --- a/src/ethereum/forks/spurious_dragon/vm/__init__.py +++ b/src/ethereum/forks/spurious_dragon/vm/__init__.py @@ -61,7 +61,7 @@ class BlockOutput: receipts_trie : `ethereum.fork_types.Root` Trie root of all the receipts in the block. receipt_keys : - Key of all the receipts in the block. + Keys of all the receipts in the block. block_logs : `Bloom` Logs bloom of all the logs included in all the transactions of the block. diff --git a/src/ethereum/forks/spurious_dragon/vm/gas.py b/src/ethereum/forks/spurious_dragon/vm/gas.py index af0da82f6e..9bc7122f91 100644 --- a/src/ethereum/forks/spurious_dragon/vm/gas.py +++ b/src/ethereum/forks/spurious_dragon/vm/gas.py @@ -210,7 +210,7 @@ def calculate_message_call_gas( account inside a message call. call_stipend : The amount of stipend provided to a message call to execute code while - transferring value(ETH). + transferring value (ETH). Returns ------- diff --git a/src/ethereum/forks/spurious_dragon/vm/instructions/arithmetic.py b/src/ethereum/forks/spurious_dragon/vm/instructions/arithmetic.py index de5d6ab43d..b7b1a370ad 100644 --- a/src/ethereum/forks/spurious_dragon/vm/instructions/arithmetic.py +++ b/src/ethereum/forks/spurious_dragon/vm/instructions/arithmetic.py @@ -84,7 +84,7 @@ def sub(evm: Evm) -> None: def mul(evm: Evm) -> None: """ - Multiply the top two elements of the stack, and pushes the result back + Multiplies the top two elements of the stack, and pushes the result back on the stack. Parameters @@ -353,7 +353,7 @@ def signextend(evm: Evm) -> None: # Can't extend any further result = value else: - # U256(0).to_be_bytes() gives b'' instead b'\x00'. + # U256(0).to_be_bytes() gives b'' instead of b'\x00'. value_bytes = Bytes(value.to_be_bytes32()) # Now among the obtained value bytes, consider only # N `least significant bytes`, where N is `byte_num + 1`. diff --git a/src/ethereum/forks/spurious_dragon/vm/instructions/block.py b/src/ethereum/forks/spurious_dragon/vm/instructions/block.py index 8b50f1bc07..0edcbf000d 100644 --- a/src/ethereum/forks/spurious_dragon/vm/instructions/block.py +++ b/src/ethereum/forks/spurious_dragon/vm/instructions/block.py @@ -87,7 +87,7 @@ def coinbase(evm: Evm) -> None: def timestamp(evm: Evm) -> None: """ Push the current block's timestamp onto the stack. Here the timestamp - being referred is actually the unix timestamp in seconds. + being referred to is actually the unix timestamp in seconds. Here the current block refers to the block in which the currently executing transaction/call resides. diff --git a/src/ethereum/forks/spurious_dragon/vm/instructions/memory.py b/src/ethereum/forks/spurious_dragon/vm/instructions/memory.py index fdd9518843..3a242ba2f7 100644 --- a/src/ethereum/forks/spurious_dragon/vm/instructions/memory.py +++ b/src/ethereum/forks/spurious_dragon/vm/instructions/memory.py @@ -90,7 +90,7 @@ def mstore8(evm: Evm) -> None: def mload(evm: Evm) -> None: """ - Load word from memory. + Loads a word from memory. Parameters ---------- @@ -120,7 +120,7 @@ def mload(evm: Evm) -> None: def msize(evm: Evm) -> None: """ - Push the size of active memory in bytes onto the stack. + Pushes the size of active memory in bytes onto the stack. Parameters ---------- diff --git a/src/ethereum/forks/spurious_dragon/vm/instructions/stack.py b/src/ethereum/forks/spurious_dragon/vm/instructions/stack.py index bdf232db6a..0fc0d3fe4b 100644 --- a/src/ethereum/forks/spurious_dragon/vm/instructions/stack.py +++ b/src/ethereum/forks/spurious_dragon/vm/instructions/stack.py @@ -23,7 +23,7 @@ def pop(evm: Evm) -> None: """ - Remove item from stack. + Removes an item from the stack. Parameters ---------- @@ -46,7 +46,7 @@ def pop(evm: Evm) -> None: def push_n(evm: Evm, num_bytes: int) -> None: """ - Pushes a N-byte immediate onto the stack. + Pushes an N-byte immediate onto the stack. Parameters ---------- @@ -76,7 +76,7 @@ def push_n(evm: Evm, num_bytes: int) -> None: def dup_n(evm: Evm, item_number: int) -> None: """ - Duplicate the Nth stack item (from top of the stack) to the top of stack. + Duplicates the Nth stack item (from top of the stack) to the top of stack. Parameters ---------- @@ -104,7 +104,7 @@ def dup_n(evm: Evm, item_number: int) -> None: def swap_n(evm: Evm, item_number: int) -> None: """ - Swap the top and the `item_number` element of the stack, where + Swaps the top and the `item_number` element of the stack, where the top of the stack is position zero. If `item_number` is zero, this function does nothing (which should not be diff --git a/src/ethereum/forks/spurious_dragon/vm/precompiled_contracts/ecrecover.py b/src/ethereum/forks/spurious_dragon/vm/precompiled_contracts/ecrecover.py index d2eeaf75df..d8f6602e55 100644 --- a/src/ethereum/forks/spurious_dragon/vm/precompiled_contracts/ecrecover.py +++ b/src/ethereum/forks/spurious_dragon/vm/precompiled_contracts/ecrecover.py @@ -8,7 +8,7 @@ Introduction ------------ -Implementation of the ECRECOVER precompiled contract. +Implementation of the `ECRECOVER` precompiled contract. """ from ethereum_types.numeric import U256 diff --git a/src/ethereum/forks/spurious_dragon/vm/precompiled_contracts/mapping.py b/src/ethereum/forks/spurious_dragon/vm/precompiled_contracts/mapping.py index 33412f4c28..1d44a6f2e3 100644 --- a/src/ethereum/forks/spurious_dragon/vm/precompiled_contracts/mapping.py +++ b/src/ethereum/forks/spurious_dragon/vm/precompiled_contracts/mapping.py @@ -8,7 +8,7 @@ Introduction ------------ -Mapping of precompiled contracts their implementations. +Mapping of precompiled contracts to their implementations. """ from typing import Callable, Dict diff --git a/src/ethereum/forks/spurious_dragon/vm/runtime.py b/src/ethereum/forks/spurious_dragon/vm/runtime.py index 505b3488de..0aa5ddd5e2 100644 --- a/src/ethereum/forks/spurious_dragon/vm/runtime.py +++ b/src/ethereum/forks/spurious_dragon/vm/runtime.py @@ -21,7 +21,7 @@ def get_valid_jump_destinations(code: Bytes) -> Set[Uint]: """ - Analyze the evm code to obtain the set of valid jump destinations. + Analyze the EVM code to obtain the set of valid jump destinations. Valid jump destinations are defined as follows: * The jump destination is less than the length of the code. diff --git a/src/ethereum/forks/tangerine_whistle/blocks.py b/src/ethereum/forks/tangerine_whistle/blocks.py index 310d2bfe3d..664418b9f7 100644 --- a/src/ethereum/forks/tangerine_whistle/blocks.py +++ b/src/ethereum/forks/tangerine_whistle/blocks.py @@ -112,7 +112,7 @@ class Header: number: Uint """ - Block number, (height) in the chain. + Block number (height) in the chain. """ gas_limit: Uint @@ -183,7 +183,7 @@ class Block: header: Header """ The block header containing metadata and cryptographic commitments. Refer - [headers] for more details on the fields included in the header. + to [headers] for more details on the fields included in the header. [headers]: ref:ethereum.forks.tangerine_whistle.blocks.Header """ diff --git a/src/ethereum/forks/tangerine_whistle/bloom.py b/src/ethereum/forks/tangerine_whistle/bloom.py index 8a12ec081d..0e079df6b3 100644 --- a/src/ethereum/forks/tangerine_whistle/bloom.py +++ b/src/ethereum/forks/tangerine_whistle/bloom.py @@ -8,7 +8,7 @@ Introduction ------------ -This modules defines functions for calculating bloom filters of logs. For the +This module defines functions for calculating bloom filters of logs. For the general theory of bloom filters see e.g. `Wikipedia `_. Bloom filters are used to allow for efficient searching of logs by address and/or topic, by rapidly diff --git a/src/ethereum/forks/tangerine_whistle/state.py b/src/ethereum/forks/tangerine_whistle/state.py index 8fb03a07fe..3b6255055d 100644 --- a/src/ethereum/forks/tangerine_whistle/state.py +++ b/src/ethereum/forks/tangerine_whistle/state.py @@ -340,7 +340,7 @@ def account_exists(state: State, address: Address) -> bool: def account_has_code_or_nonce(state: State, address: Address) -> bool: """ - Checks if an account has non zero nonce or non empty code. + Checks if an account has non-zero nonce or non-empty code. Parameters ---------- @@ -352,7 +352,7 @@ def account_has_code_or_nonce(state: State, address: Address) -> bool: Returns ------- has_code_or_nonce : `bool` - True if the account has non zero nonce or non empty code, + True if the account has non-zero nonce or non-empty code, False otherwise. """ @@ -424,7 +424,7 @@ def set_account_balance(state: State, address: Address, amount: U256) -> None: Address of the account whose nonce needs to be incremented. amount: - The amount that needs to set in balance. + The amount that needs to be set in the balance. """ @@ -444,7 +444,7 @@ def touch_account(state: State, address: Address) -> None: The current state. address: - The address of the account that need to initialised. + The address of the account that needs to be initialized. """ if not account_exists(state, address): @@ -481,7 +481,7 @@ def set_code(state: State, address: Address, code: Bytes) -> None: The current state. address: - Address of the account whose code needs to be update. + Address of the account whose code needs to be updated. code: The bytecode that needs to be set. diff --git a/src/ethereum/forks/tangerine_whistle/trie.py b/src/ethereum/forks/tangerine_whistle/trie.py index 77d2c16983..a758051941 100644 --- a/src/ethereum/forks/tangerine_whistle/trie.py +++ b/src/ethereum/forks/tangerine_whistle/trie.py @@ -128,7 +128,7 @@ class BranchNode: def encode_internal_node(node: Optional[InternalNode]) -> Extended: """ Encodes a Merkle Trie node into its RLP form. The RLP will then be - serialized into a `Bytes` and hashed unless it is less that 32 bytes + serialized into a `Bytes` and hashed unless it is less than 32 bytes when serialized. This function also accepts `None`, representing the absence of a node, diff --git a/src/ethereum/forks/tangerine_whistle/vm/__init__.py b/src/ethereum/forks/tangerine_whistle/vm/__init__.py index 21585c758c..e4739cebde 100644 --- a/src/ethereum/forks/tangerine_whistle/vm/__init__.py +++ b/src/ethereum/forks/tangerine_whistle/vm/__init__.py @@ -60,7 +60,7 @@ class BlockOutput: receipts_trie : `ethereum.fork_types.Root` Trie root of all the receipts in the block. receipt_keys : - Key of all the receipts in the block. + Keys of all the receipts in the block. block_logs : `Bloom` Logs bloom of all the logs included in all the transactions of the block. diff --git a/src/ethereum/forks/tangerine_whistle/vm/gas.py b/src/ethereum/forks/tangerine_whistle/vm/gas.py index b930942995..0187f55d95 100644 --- a/src/ethereum/forks/tangerine_whistle/vm/gas.py +++ b/src/ethereum/forks/tangerine_whistle/vm/gas.py @@ -210,7 +210,7 @@ def calculate_message_call_gas( account inside a message call. call_stipend : The amount of stipend provided to a message call to execute code while - transferring value(ETH). + transferring value (ETH). Returns ------- diff --git a/src/ethereum/forks/tangerine_whistle/vm/instructions/arithmetic.py b/src/ethereum/forks/tangerine_whistle/vm/instructions/arithmetic.py index de5d6ab43d..b7b1a370ad 100644 --- a/src/ethereum/forks/tangerine_whistle/vm/instructions/arithmetic.py +++ b/src/ethereum/forks/tangerine_whistle/vm/instructions/arithmetic.py @@ -84,7 +84,7 @@ def sub(evm: Evm) -> None: def mul(evm: Evm) -> None: """ - Multiply the top two elements of the stack, and pushes the result back + Multiplies the top two elements of the stack, and pushes the result back on the stack. Parameters @@ -353,7 +353,7 @@ def signextend(evm: Evm) -> None: # Can't extend any further result = value else: - # U256(0).to_be_bytes() gives b'' instead b'\x00'. + # U256(0).to_be_bytes() gives b'' instead of b'\x00'. value_bytes = Bytes(value.to_be_bytes32()) # Now among the obtained value bytes, consider only # N `least significant bytes`, where N is `byte_num + 1`. diff --git a/src/ethereum/forks/tangerine_whistle/vm/instructions/block.py b/src/ethereum/forks/tangerine_whistle/vm/instructions/block.py index 8b50f1bc07..0edcbf000d 100644 --- a/src/ethereum/forks/tangerine_whistle/vm/instructions/block.py +++ b/src/ethereum/forks/tangerine_whistle/vm/instructions/block.py @@ -87,7 +87,7 @@ def coinbase(evm: Evm) -> None: def timestamp(evm: Evm) -> None: """ Push the current block's timestamp onto the stack. Here the timestamp - being referred is actually the unix timestamp in seconds. + being referred to is actually the unix timestamp in seconds. Here the current block refers to the block in which the currently executing transaction/call resides. diff --git a/src/ethereum/forks/tangerine_whistle/vm/instructions/memory.py b/src/ethereum/forks/tangerine_whistle/vm/instructions/memory.py index fdd9518843..3a242ba2f7 100644 --- a/src/ethereum/forks/tangerine_whistle/vm/instructions/memory.py +++ b/src/ethereum/forks/tangerine_whistle/vm/instructions/memory.py @@ -90,7 +90,7 @@ def mstore8(evm: Evm) -> None: def mload(evm: Evm) -> None: """ - Load word from memory. + Loads a word from memory. Parameters ---------- @@ -120,7 +120,7 @@ def mload(evm: Evm) -> None: def msize(evm: Evm) -> None: """ - Push the size of active memory in bytes onto the stack. + Pushes the size of active memory in bytes onto the stack. Parameters ---------- diff --git a/src/ethereum/forks/tangerine_whistle/vm/instructions/stack.py b/src/ethereum/forks/tangerine_whistle/vm/instructions/stack.py index bdf232db6a..0fc0d3fe4b 100644 --- a/src/ethereum/forks/tangerine_whistle/vm/instructions/stack.py +++ b/src/ethereum/forks/tangerine_whistle/vm/instructions/stack.py @@ -23,7 +23,7 @@ def pop(evm: Evm) -> None: """ - Remove item from stack. + Removes an item from the stack. Parameters ---------- @@ -46,7 +46,7 @@ def pop(evm: Evm) -> None: def push_n(evm: Evm, num_bytes: int) -> None: """ - Pushes a N-byte immediate onto the stack. + Pushes an N-byte immediate onto the stack. Parameters ---------- @@ -76,7 +76,7 @@ def push_n(evm: Evm, num_bytes: int) -> None: def dup_n(evm: Evm, item_number: int) -> None: """ - Duplicate the Nth stack item (from top of the stack) to the top of stack. + Duplicates the Nth stack item (from top of the stack) to the top of stack. Parameters ---------- @@ -104,7 +104,7 @@ def dup_n(evm: Evm, item_number: int) -> None: def swap_n(evm: Evm, item_number: int) -> None: """ - Swap the top and the `item_number` element of the stack, where + Swaps the top and the `item_number` element of the stack, where the top of the stack is position zero. If `item_number` is zero, this function does nothing (which should not be diff --git a/src/ethereum/forks/tangerine_whistle/vm/precompiled_contracts/ecrecover.py b/src/ethereum/forks/tangerine_whistle/vm/precompiled_contracts/ecrecover.py index d2eeaf75df..d8f6602e55 100644 --- a/src/ethereum/forks/tangerine_whistle/vm/precompiled_contracts/ecrecover.py +++ b/src/ethereum/forks/tangerine_whistle/vm/precompiled_contracts/ecrecover.py @@ -8,7 +8,7 @@ Introduction ------------ -Implementation of the ECRECOVER precompiled contract. +Implementation of the `ECRECOVER` precompiled contract. """ from ethereum_types.numeric import U256 diff --git a/src/ethereum/forks/tangerine_whistle/vm/precompiled_contracts/mapping.py b/src/ethereum/forks/tangerine_whistle/vm/precompiled_contracts/mapping.py index 33412f4c28..1d44a6f2e3 100644 --- a/src/ethereum/forks/tangerine_whistle/vm/precompiled_contracts/mapping.py +++ b/src/ethereum/forks/tangerine_whistle/vm/precompiled_contracts/mapping.py @@ -8,7 +8,7 @@ Introduction ------------ -Mapping of precompiled contracts their implementations. +Mapping of precompiled contracts to their implementations. """ from typing import Callable, Dict diff --git a/src/ethereum/forks/tangerine_whistle/vm/runtime.py b/src/ethereum/forks/tangerine_whistle/vm/runtime.py index 505b3488de..0aa5ddd5e2 100644 --- a/src/ethereum/forks/tangerine_whistle/vm/runtime.py +++ b/src/ethereum/forks/tangerine_whistle/vm/runtime.py @@ -21,7 +21,7 @@ def get_valid_jump_destinations(code: Bytes) -> Set[Uint]: """ - Analyze the evm code to obtain the set of valid jump destinations. + Analyze the EVM code to obtain the set of valid jump destinations. Valid jump destinations are defined as follows: * The jump destination is less than the length of the code. diff --git a/src/ethereum_spec_tools/docc.py b/src/ethereum_spec_tools/docc.py index f99ef60777..cc1623174a 100644 --- a/src/ethereum_spec_tools/docc.py +++ b/src/ethereum_spec_tools/docc.py @@ -371,7 +371,7 @@ class FixIndexTransform(Transform): identifiers specific to the diff. Without fixing these identifiers, every Python class would be defined - multiples times (the actual definition and then again in each diff), + multiple times (the actual definition and then again in each diff), cluttering up tables of contents. """ diff --git a/src/ethereum_spec_tools/evm_tools/daemon.py b/src/ethereum_spec_tools/evm_tools/daemon.py index 67ba3ede96..39268f04df 100644 --- a/src/ethereum_spec_tools/evm_tools/daemon.py +++ b/src/ethereum_spec_tools/evm_tools/daemon.py @@ -25,7 +25,7 @@ def daemon_arguments(subparsers: argparse._SubParsersAction) -> None: parser.add_argument("--uds", help="Unix domain socket path") parser.add_argument( "--timeout", - help="Timeout to shutdown daemon if there are not requests" + help="Timeout to shutdown daemon if there are no requests" " (0 for no timeout)", type=int, ) diff --git a/src/ethereum_spec_tools/evm_tools/t8n/t8n_types.py b/src/ethereum_spec_tools/evm_tools/t8n/t8n_types.py index 0e84189598..544838a5db 100644 --- a/src/ethereum_spec_tools/evm_tools/t8n/t8n_types.py +++ b/src/ethereum_spec_tools/evm_tools/t8n/t8n_types.py @@ -37,7 +37,7 @@ def __init__(self, t8n: "T8N", stdin: Optional[Dict] = None): with open(t8n.options.input_alloc, "r") as f: data = json.load(f) - # The json_to_state functions expects the values to hex + # The json_to_state function expects the values to be hex # strings, so we convert them here. for address, account in data.items(): for key, value in account.items(): diff --git a/src/ethereum_spec_tools/lint/__init__.py b/src/ethereum_spec_tools/lint/__init__.py index edd8565f74..f3cc4ae45d 100644 --- a/src/ethereum_spec_tools/lint/__init__.py +++ b/src/ethereum_spec_tools/lint/__init__.py @@ -18,7 +18,7 @@ def compare_ast(old: ast.AST, new: ast.AST) -> bool: """ - Check if two nodes are the equal. + Checks if two nodes are equal. """ if type(old) is not type(new): return False diff --git a/src/ethereum_spec_tools/new_fork/builder.py b/src/ethereum_spec_tools/new_fork/builder.py index cb625d214e..05f875a923 100644 --- a/src/ethereum_spec_tools/new_fork/builder.py +++ b/src/ethereum_spec_tools/new_fork/builder.py @@ -413,7 +413,7 @@ def _create_working_directory(self) -> TemporaryDirectory: """ Create a temporary working directory so we don't end up in the state where this process ~~barfs~~ abnormally terminates and we leave a - half-modified fork directory laying around. + half-modified fork directory lying around. """ output = self.new_fork_path.parent output.mkdir(parents=True, exist_ok=True) From acd8d6c7f924e4e99c2bf191a470c9817351854c Mon Sep 17 00:00:00 2001 From: danceratopz Date: Mon, 19 Jan 2026 11:36:51 +0100 Subject: [PATCH 079/154] chore(ci): add all EL clients to hive master config (#2034) --- .github/configs/hive/master.yaml | 56 ++++++++++++++++++++++++++++++-- 1 file changed, 54 insertions(+), 2 deletions(-) diff --git a/.github/configs/hive/master.yaml b/.github/configs/hive/master.yaml index 47a5395c07..297cc431b8 100644 --- a/.github/configs/hive/master.yaml +++ b/.github/configs/hive/master.yaml @@ -1,9 +1,61 @@ -# Hive client configuration file with master/nightly images for CI -# Uses ethpandaops mirror for faster pulls in CI environments +# This is a Hive client configuration file that uses master/main client builds +# from the ethquokkaops / ethpandaops container registry. +# +# The latest available configurations are here: +# https://github.com/ethpandaops/eth-client-docker-image-builder/blob/master/branches.yaml + +# Besu +# https://github.com/hyperledger/besu +- client: besu + nametag: default + build_args: + baseimage: docker.ethquokkaops.io/dh/ethpandaops/besu + tag: main + +# Erigon +# https://github.com/erigontech/erigon +- client: erigon + nametag: default + build_args: + baseimage: docker.ethquokkaops.io/dh/ethpandaops/erigon + tag: main + +# Ethrex +# https://github.com/lambdaclass/ethrex +- client: ethrex + nametag: default + build_args: + baseimage: docker.ethquokkaops.io/dh/ethpandaops/ethrex + tag: main # Geth (Go-Ethereum) +# https://github.com/ethereum/go-ethereum - client: go-ethereum nametag: default build_args: baseimage: docker.ethquokkaops.io/dh/ethpandaops/geth tag: master + +# Nethermind +# https://github.com/NethermindEth/nethermind +- client: nethermind + nametag: default + build_args: + baseimage: docker.ethquokkaops.io/dh/ethpandaops/nethermind + tag: master + +# Nimbus EL +# https://github.com/status-im/nimbus-eth1 +- client: nimbus-el + nametag: default + build_args: + baseimage: docker.ethquokkaops.io/dh/ethpandaops/nimbus-eth1 + tag: master + +# Reth +# https://github.com/paradigmxyz/reth +- client: reth + nametag: default + build_args: + baseimage: docker.ethquokkaops.io/dh/ethpandaops/reth + tag: main From dbbb75f5872172e36e27ecb262dd228eb48b1aed Mon Sep 17 00:00:00 2001 From: danceratopz Date: Mon, 19 Jan 2026 11:42:52 +0100 Subject: [PATCH 080/154] chore: fix PR template links to execution-specs (#2035) --- .github/PULL_REQUEST_TEMPLATE.md | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index f78eac317c..05cff6d8d6 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -10,16 +10,17 @@ N/A. - [ ] All: Ran fast `tox` checks to avoid unnecessary CI fails, see also [Code Standards](https://eest.ethereum.org/main/getting_started/code_standards/) and [Enabling Pre-commit Checks](https://eest.ethereum.org/main/dev/precommit/): + ```console uvx tox -e static ``` + - [ ] All: PR title adheres to the [repo standard](https://eest.ethereum.org/main/getting_started/contributing/?h=contri#commit-messages-issue-and-pr-titles) - it will be used as the squash commit message and should start `type(scope):`. -- [ ] All: Considered adding an entry to [CHANGELOG.md](/ethereum/execution-spec-tests/blob/main/docs/CHANGELOG.md). -- [ ] All: Considered updating the online docs in the [./docs/](/ethereum/execution-spec-tests/blob/main/docs/) directory. +- [ ] All: Considered updating the online docs in the [./docs/](/ethereum/execution-specs/blob/HEAD/docs/) directory. - [ ] All: Set appropriate labels for the changes (only maintainers can apply labels). - [ ] Tests: Ran `mkdocs serve` locally and verified the auto-generated docs for new tests in the [Test Case Reference](https://eest.ethereum.org/main/tests/) are correctly formatted. -- [ ] Tests: For PRs implementing a missed test case, update the [post-mortem document](/ethereum/execution-spec-tests/blob/main/docs/writing_tests/post_mortems.md) to add an entry the list. -- [ ] Ported Tests: All converted JSON/YML tests from [ethereum/tests](/ethereum/tests) or [tests/static](/ethereum/execution-spec-tests/blob/main/tests/static) have been assigned `@ported_from` marker. +- [ ] Tests: For PRs implementing a missed test case, update the [post-mortem document](/ethereum/execution-specs/blob/HEAD/docs/writing_tests/post_mortems.md) to add an entry the list. +- [ ] Ported Tests: All converted JSON/YML tests from [ethereum/tests](/ethereum/tests) or [tests/static](/ethereum/execution-specs/blob/HEAD/tests/static) have been assigned `@ported_from` marker. #### Cute Animal Picture From eeb65f77c8d2ef4a436e37a4e34796ef6f4bbbc2 Mon Sep 17 00:00:00 2001 From: danceratopz Date: Mon, 19 Jan 2026 11:45:42 +0100 Subject: [PATCH 081/154] fix(docs): replace `master` with `mainnet` branch (#2037) --- EIP_AUTHORS_MANUAL.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/EIP_AUTHORS_MANUAL.md b/EIP_AUTHORS_MANUAL.md index 7deac5a26e..2d549e23cf 100644 --- a/EIP_AUTHORS_MANUAL.md +++ b/EIP_AUTHORS_MANUAL.md @@ -39,13 +39,13 @@ For example, at the time of writing, the Prague Fork is still under development #### Forks live on mainnet -The final stable specification for all forks that are currently live on mainnet are in the `master` branch. +The final stable specification for all forks that are currently live on mainnet are in the `mainnet` branch. #### Fork under development At any given time, there can only be one fork under active development. The branch structure for the fork under development is as follows: -- `forks/`: The main branch for the fork under development. For example, `forks/prague` is the branch for the Prague fork. This branch will be merged into `master` after the fork has gone live on mainnet. +- `forks/`: The main branch for the fork under development. For example, `forks/prague` is the branch for the Prague fork. This branch will be merged into `mainnet` after the fork has gone live on mainnet. - `eips//`: Branches for each EIP within the fork under development. For example, `eips/prague/eip-7702` is the branch for EIP-7702 for the Prague fork. This branch will be merged into `forks/prague` after the EIP has been confirmed for release in the fork. ## Writing New EIPS From b84b68a2d8aeb707c9cbe8525948768b2affee3c Mon Sep 17 00:00:00 2001 From: Ignacio Hagopian Date: Mon, 19 Jan 2026 10:26:26 -0300 Subject: [PATCH 082/154] feat(benchmarks): fix bytecode attack for EXT-like opcodes to work in Osaka (#1971) --- .../compute/instruction/test_account_query.py | 217 +-------------- .../compute/instruction/test_system.py | 231 ---------------- .../scenario/test_unchunkified_bytecode.py | 255 ++++++++++++++++++ 3 files changed, 256 insertions(+), 447 deletions(-) create mode 100644 tests/benchmark/compute/scenario/test_unchunkified_bytecode.py diff --git a/tests/benchmark/compute/instruction/test_account_query.py b/tests/benchmark/compute/instruction/test_account_query.py index 0c67c38e84..4cc9f60315 100644 --- a/tests/benchmark/compute/instruction/test_account_query.py +++ b/tests/benchmark/compute/instruction/test_account_query.py @@ -22,9 +22,7 @@ Alloc, BenchmarkTestFiller, Block, - BlockchainTestFiller, Bytecode, - Environment, ExtCallGenerator, Fork, Hash, @@ -33,11 +31,6 @@ TestPhaseManager, Transaction, While, - compute_create2_address, -) - -from tests.benchmark.compute.helpers import ( - XOR_TABLE, ) @@ -136,215 +129,7 @@ def test_codecopy_benchmark( ) -@pytest.mark.parametrize( - "opcode", - [ - Op.EXTCODESIZE, - Op.EXTCODEHASH, - Op.EXTCODECOPY, - ], -) -def test_extcode_ops( - blockchain_test: BlockchainTestFiller, - pre: Alloc, - fork: Fork, - opcode: Op, - env: Environment, - gas_benchmark_value: int, -) -> None: - """ - Benchmark a block execution where a single opcode is executed. - """ - # The attack gas limit is the gas limit which the target tx will use The - # test will scale the block gas limit to setup the contracts accordingly to - # be able to pay for the contract deposit. This has to take into account - # the 200 gas per byte, but also the quadratic memory expansion costs which - # have to be paid each time the memory is being setup - attack_gas_limit = gas_benchmark_value - max_contract_size = fork.max_code_size() - - gas_costs = fork.gas_costs() - - # Calculate the absolute minimum gas costs to deploy the contract This does - # not take into account setting up the actual memory (using KECCAK256 and - # XOR) so the actual costs of deploying the contract is higher - memory_expansion_gas_calculator = fork.memory_expansion_gas_calculator() - memory_gas_minimum = memory_expansion_gas_calculator( - new_bytes=len(bytes(max_contract_size)) - ) - code_deposit_gas_minimum = ( - fork.gas_costs().G_CODE_DEPOSIT_BYTE * max_contract_size - + memory_gas_minimum - ) - - intrinsic_gas_cost_calc = fork.transaction_intrinsic_cost_calculator() - # Calculate the loop cost of the attacker to query one address - loop_cost = ( - gas_costs.G_KECCAK_256 # KECCAK static cost - + math.ceil(85 / 32) * gas_costs.G_KECCAK_256_WORD # KECCAK dynamic - # cost for CREATE2 - + gas_costs.G_VERY_LOW * 3 # ~MSTOREs+ADDs - + gas_costs.G_COLD_ACCOUNT_ACCESS # Opcode cost - + 30 # ~Gluing opcodes - ) - # Calculate the number of contracts to be targeted - num_contracts = ( - # Base available gas = GAS_LIMIT - intrinsic - (out of loop MSTOREs) - attack_gas_limit - intrinsic_gas_cost_calc() - gas_costs.G_VERY_LOW * 4 - ) // loop_cost - - # Set the block gas limit to a relative high value to ensure the code - # deposit tx fits in the block (there is enough gas available in the block - # to execute this) - minimum_gas_limit = code_deposit_gas_minimum * 2 * num_contracts - if env.gas_limit < minimum_gas_limit: - raise Exception( - f"`BENCHMARKING_MAX_GAS` ({env.gas_limit}) is no longer enough to" - f" support this test, which requires {minimum_gas_limit} gas for " - "its setup. Update the value or consider optimizing gas usage " - "during the setup phase of this test." - ) - - # The initcode will take its address as a starting point to the input to - # the keccak hash function. It will reuse the output of the hash function - # in a loop to create a large amount of seemingly random code, until it - # reaches the maximum contract size. - initcode = ( - Op.MSTORE(0, Op.ADDRESS) - + While( - body=( - Op.SHA3(Op.SUB(Op.MSIZE, 32), 32) - # Use a xor table to avoid having to call the "expensive" sha3 - # opcode as much - + sum( - ( - Op.PUSH32[xor_value] - + Op.XOR - + Op.DUP1 - + Op.MSIZE - + Op.MSTORE - ) - for xor_value in XOR_TABLE - ) - + Op.POP - ), - condition=Op.LT(Op.MSIZE, max_contract_size), - ) - # Despite the whole contract has random bytecode, we make the first - # opcode be a STOP so CALL-like attacks return as soon as possible, - # while EXTCODE(HASH|SIZE) work as intended. - + Op.MSTORE8(0, 0x00) - + Op.RETURN(0, max_contract_size) - ) - initcode_address = pre.deploy_contract(code=initcode) - - # The factory contract will simply use the initcode that is already - # deployed, and create a new contract and return its address if successful. - factory_code = ( - Op.EXTCODECOPY( - address=initcode_address, - dest_offset=0, - offset=0, - size=Op.EXTCODESIZE(initcode_address), - ) - + Op.MSTORE( - 0, - Op.CREATE2( - value=0, - offset=0, - size=Op.EXTCODESIZE(initcode_address), - salt=Op.SLOAD(0), - ), - ) - + Op.SSTORE(0, Op.ADD(Op.SLOAD(0), 1)) - + Op.RETURN(0, 32) - ) - factory_address = pre.deploy_contract(code=factory_code) - - # The factory caller will call the factory contract N times, creating N new - # contracts. Calldata should contain the N value. - factory_caller_code = Op.CALLDATALOAD(0) + While( - body=Op.POP(Op.CALL(address=factory_address)), - condition=Op.PUSH1(1) - + Op.SWAP1 - + Op.SUB - + Op.DUP1 - + Op.ISZERO - + Op.ISZERO, - ) - factory_caller_address = pre.deploy_contract(code=factory_caller_code) - - with TestPhaseManager.setup(): - contracts_deployment_tx = Transaction( - to=factory_caller_address, - gas_limit=env.gas_limit, - gas_price=10**6, - data=Hash(num_contracts), - sender=pre.fund_eoa(), - ) - - post = {} - deployed_contract_addresses = [] - for i in range(num_contracts): - deployed_contract_address = compute_create2_address( - address=factory_address, - salt=i, - initcode=initcode, - ) - post[deployed_contract_address] = Account(nonce=1) - deployed_contract_addresses.append(deployed_contract_address) - - attack_call = Bytecode() - if opcode == Op.EXTCODECOPY: - attack_call = Op.EXTCODECOPY( - address=Op.SHA3(32 - 20 - 1, 85), dest_offset=96, size=1000 - ) - else: - # For the rest of the opcodes, we can use the same generic attack call - # since all only minimally need the `address` of the target. - attack_call = Op.POP(opcode(address=Op.SHA3(32 - 20 - 1, 85))) - attack_code = ( - # Setup memory for later CREATE2 address generation loop. - # 0xFF+[Address(20bytes)]+[seed(32bytes)]+[initcode keccak(32bytes)] - Op.MSTORE(0, factory_address) - + Op.MSTORE8(32 - 20 - 1, 0xFF) - + Op.MSTORE(32, 0) - + Op.MSTORE(64, initcode.keccak256()) - # Main loop - + While( - body=attack_call + Op.MSTORE(32, Op.ADD(Op.MLOAD(32), 1)), - ) - ) - - if len(attack_code) > max_contract_size: - # TODO: A workaround could be to split the opcode code into multiple - # contracts and call them in sequence. - raise ValueError( - f"Code size {len(attack_code)} exceeds maximum " - f"code size {max_contract_size}" - ) - opcode_address = pre.deploy_contract(code=attack_code) - - with TestPhaseManager.execution(): - opcode_tx = Transaction( - to=opcode_address, - gas_limit=attack_gas_limit, - gas_price=10**9, - sender=pre.fund_eoa(), - ) - - blockchain_test( - pre=pre, - post=post, - blocks=[ - Block(txs=[contracts_deployment_tx]), - Block(txs=[opcode_tx]), - ], - exclude_full_post_state_in_output=True, - ) - - -@pytest.mark.repricing +@pytest.mark.repricing(copied_size=512) @pytest.mark.parametrize( "copy_size", [0, 32, 256, 512, 1024], diff --git a/tests/benchmark/compute/instruction/test_system.py b/tests/benchmark/compute/instruction/test_system.py index b7fbdc10a9..6433f0eaa1 100644 --- a/tests/benchmark/compute/instruction/test_system.py +++ b/tests/benchmark/compute/instruction/test_system.py @@ -18,11 +18,9 @@ import pytest from execution_testing import ( Account, - Address, Alloc, BenchmarkTestFiller, Block, - BlockchainTestFiller, Bytecode, Environment, ExtCallGenerator, @@ -37,235 +35,6 @@ compute_create_address, ) -from tests.benchmark.compute.helpers import XOR_TABLE - - -@pytest.mark.parametrize( - "opcode", - [ - Op.CALL, - Op.CALLCODE, - Op.DELEGATECALL, - Op.STATICCALL, - ], -) -def test_xcall( - blockchain_test: BlockchainTestFiller, - pre: Alloc, - fork: Fork, - opcode: Op, - gas_benchmark_value: int, - tx_gas_limit: int, -) -> None: - """Benchmark a system execution where a single opcode execution.""" - # The attack gas limit represents the transaction gas limit cap or - # the block gas limit. If eip-7825 is applied, the test will create - # multiple transactions for contract deployment. It should account - # for the 200 gas per byte cost and the quadratic memory-expansion - # costs, which must be paid each time memory is initialized. - attack_gas_limit = gas_benchmark_value - max_contract_size = fork.max_code_size() - - gas_costs = fork.gas_costs() - - intrinsic_gas_cost_calc = fork.transaction_intrinsic_cost_calculator() - # Calculate the loop cost of the attacker to query one address - loop_cost = ( - gas_costs.G_KECCAK_256 # KECCAK static cost - + math.ceil(85 / 32) * gas_costs.G_KECCAK_256_WORD # KECCAK dynamic - # cost for CREATE2 - + gas_costs.G_VERY_LOW * 3 # ~MSTOREs+ADDs - + gas_costs.G_COLD_ACCOUNT_ACCESS # Opcode cost - + 30 # ~Gluing opcodes - ) - # Calculate an upper bound of the number of contracts to be targeted - num_contracts = ( - # Base available gas = GAS_LIMIT - intrinsic - (out of loop MSTOREs) - attack_gas_limit - intrinsic_gas_cost_calc() - gas_costs.G_VERY_LOW * 4 - ) // loop_cost - - initcode, factory_address, factory_caller_address = ( - _deploy_max_contract_factory(pre, fork) - ) - - # Deploy num_contracts via multiple txs (each capped by tx gas limit). - with TestPhaseManager.setup(): - # Rough estimate (rounded down) of contracts per tx based on dominant - # cost factor only, and up to 90% of the block gas limit. - # The goal is to involve the minimum amount of gas pricing to avoid - # complexity and potential brittleness. - num_contracts_per_tx = int(tx_gas_limit * 0.9) // ( - gas_costs.G_CODE_DEPOSIT_BYTE * max_contract_size - ) - if num_contracts_per_tx == 0: - pytest.skip("tx_gas_limit too low to deploy max-size contract") - setup_txs = math.ceil(num_contracts / num_contracts_per_tx) - - contracts_deployment_txs = [] - for _ in range(setup_txs): - contracts_deployment_txs.append( - Transaction( - to=factory_caller_address, - gas_limit=tx_gas_limit, - data=Hash(num_contracts_per_tx), - sender=pre.fund_eoa(), - ) - ) - - post = {} - for i in range(num_contracts): - deployed_contract_address = compute_create2_address( - address=factory_address, - salt=i, - initcode=initcode, - ) - post[deployed_contract_address] = Account(nonce=1) - - attack_call = Bytecode() - if opcode == Op.EXTCODECOPY: - attack_call = Op.EXTCODECOPY( - address=Op.SHA3(32 - 20 - 1, 85), dest_offset=96, size=1000 - ) - else: - # For the rest of the opcodes, we can use the same generic attack call - # since all only minimally need the `address` of the target. - attack_call = Op.POP(opcode(address=Op.SHA3(32 - 20 - 1, 85))) - attack_code = ( - # Setup memory for later CREATE2 address generation loop. - # 0xFF+[Address(20bytes)]+[seed(32bytes)]+[initcode keccak(32bytes)] - Op.MSTORE(0, factory_address) - + Op.MSTORE8(32 - 20 - 1, 0xFF) - + Op.MSTORE( - 32, Op.CALLDATALOAD(0) - ) # Calldata is the starting value of the CREATE2 salt - + Op.MSTORE(64, initcode.keccak256()) - # Main loop - + While( - body=attack_call + Op.MSTORE(32, Op.ADD(Op.MLOAD(32), 1)), - ) - ) - - attack_address = pre.deploy_contract(code=attack_code) - - with TestPhaseManager.execution(): - full_txs = attack_gas_limit // tx_gas_limit - remainder = attack_gas_limit % tx_gas_limit - - num_targeted_contracts_per_full_tx = ( - # Base available gas: - # TX_GAS_LIMIT - intrinsic - (out of loop MSTOREs) - tx_gas_limit - intrinsic_gas_cost_calc() - gas_costs.G_VERY_LOW * 4 - ) // loop_cost - contract_start_index = 0 - opcode_txs = [] - for _ in range(full_txs): - opcode_txs.append( - Transaction( - to=attack_address, - gas_limit=tx_gas_limit, - data=Hash(contract_start_index), - sender=pre.fund_eoa(), - ) - ) - contract_start_index += num_targeted_contracts_per_full_tx - if remainder > intrinsic_gas_cost_calc(calldata=bytes(32)): - opcode_txs.append( - Transaction( - to=attack_address, - gas_limit=remainder, - data=Hash(contract_start_index), - sender=pre.fund_eoa(), - ) - ) - - blockchain_test( - pre=pre, - post=post, - blocks=[ - Block(txs=contracts_deployment_txs), - Block(txs=opcode_txs), - ], - exclude_full_post_state_in_output=True, - ) - - -def _deploy_max_contract_factory( - pre: Alloc, - fork: Fork, -) -> tuple[Bytecode, Address, Address]: - max_contract_size = fork.max_code_size() - - # The initcode will take its address as a starting point to the input to - # the keccak hash function. It will reuse the output of the hash function - # in a loop to create a large amount of seemingly random code, until it - # reaches the maximum contract size. - initcode = ( - Op.MSTORE(0, Op.ADDRESS) - + While( - body=( - Op.SHA3(Op.SUB(Op.MSIZE, 32), 32) - # Use a xor table to avoid having to call the "expensive" sha3 - # opcode as much - + sum( - ( - Op.PUSH32[xor_value] - + Op.XOR - + Op.DUP1 - + Op.MSIZE - + Op.MSTORE - ) - for xor_value in XOR_TABLE - ) - + Op.POP - ), - condition=Op.LT(Op.MSIZE, max_contract_size), - ) - # Despite the whole contract has random bytecode, we make the first - # opcode be a STOP so CALL-like attacks return as soon as possible, - # while EXTCODE(HASH|SIZE) work as intended. - + Op.MSTORE8(0, 0x00) - + Op.RETURN(0, max_contract_size) - ) - initcode_address = pre.deploy_contract(code=initcode) - - # The factory contract will simply use the initcode that is already - # deployed, and create a new contract and return its address if successful. - factory_code = ( - Op.EXTCODECOPY( - address=initcode_address, - dest_offset=0, - offset=0, - size=Op.EXTCODESIZE(initcode_address), - ) - + Op.MSTORE( - 0, - Op.CREATE2( - value=0, - offset=0, - size=Op.EXTCODESIZE(initcode_address), - salt=Op.SLOAD(0), - ), - ) - + Op.SSTORE(0, Op.ADD(Op.SLOAD(0), 1)) - + Op.RETURN(0, 32) - ) - factory_address = pre.deploy_contract(code=factory_code) - - # The factory caller will call the factory contract N times, creating N new - # contracts. Calldata should contain the N value. - factory_caller_code = Op.CALLDATALOAD(0) + While( - body=Op.POP(Op.CALL(address=factory_address)), - condition=Op.PUSH1(1) - + Op.SWAP1 - + Op.SUB - + Op.DUP1 - + Op.ISZERO - + Op.ISZERO, - ) - factory_caller_address = pre.deploy_contract(code=factory_caller_code) - - return initcode, factory_address, factory_caller_address - @pytest.mark.repricing(max_code_size_ratio=0) @pytest.mark.parametrize( diff --git a/tests/benchmark/compute/scenario/test_unchunkified_bytecode.py b/tests/benchmark/compute/scenario/test_unchunkified_bytecode.py new file mode 100644 index 0000000000..5e60d85a2a --- /dev/null +++ b/tests/benchmark/compute/scenario/test_unchunkified_bytecode.py @@ -0,0 +1,255 @@ +""" +Benchmark operations that force the inclusion of max size bytecodes. +This scenario is relevant in forks that have unchunkified bytecode. +""" + +import math + +import pytest +from execution_testing import ( + Account, + Address, + Alloc, + Block, + BlockchainTestFiller, + Bytecode, + Fork, + Hash, + Op, + TestPhaseManager, + Transaction, + While, + compute_create2_address, +) + +from tests.benchmark.compute.helpers import XOR_TABLE + + +@pytest.mark.parametrize( + "opcode", + [ + Op.CALL, + Op.CALLCODE, + Op.DELEGATECALL, + Op.STATICCALL, + Op.EXTCODESIZE, + Op.EXTCODEHASH, + Op.EXTCODECOPY, + ], +) +def test_unchunkified_bytecode( + blockchain_test: BlockchainTestFiller, + pre: Alloc, + fork: Fork, + opcode: Op, + gas_benchmark_value: int, + tx_gas_limit: int, +) -> None: + """Benchmark scenario of accessing max-code size bytecode.""" + # The attack gas limit represents the transaction gas limit cap or + # the block gas limit. If eip-7825 is applied, the test will create + # multiple transactions for contract deployment. It should account + # for the 200 gas per byte cost and the quadratic memory-expansion + # costs, which must be paid each time memory is initialized. + attack_gas_limit = gas_benchmark_value + max_contract_size = fork.max_code_size() + + gas_costs = fork.gas_costs() + + intrinsic_gas_cost_calc = fork.transaction_intrinsic_cost_calculator() + # Calculate the loop cost of the attacker to query one address + loop_cost = ( + gas_costs.G_KECCAK_256 # KECCAK static cost + + math.ceil(85 / 32) * gas_costs.G_KECCAK_256_WORD # KECCAK dynamic + # cost for CREATE2 + + gas_costs.G_VERY_LOW * 3 # ~MSTOREs+ADDs + + gas_costs.G_COLD_ACCOUNT_ACCESS # Opcode cost + + 30 # ~Gluing opcodes + ) + # Calculate an upper bound of the number of contracts to be targeted + num_contracts = ( + # Base available gas = GAS_LIMIT - intrinsic - (out of loop MSTOREs) + attack_gas_limit - intrinsic_gas_cost_calc() - gas_costs.G_VERY_LOW * 4 + ) // loop_cost + + initcode, factory_address, factory_caller_address = ( + _deploy_max_contract_factory(pre, fork) + ) + + # Deploy num_contracts via multiple txs (each capped by tx gas limit). + with TestPhaseManager.setup(): + # Rough estimate (rounded down) of contracts per tx based on dominant + # cost factor only, and up to 90% of the block gas limit. + # The goal is to involve the minimum amount of gas pricing to avoid + # complexity and potential brittleness. + num_contracts_per_tx = int(tx_gas_limit * 0.9) // ( + gas_costs.G_CODE_DEPOSIT_BYTE * max_contract_size + ) + if num_contracts_per_tx == 0: + pytest.skip("tx_gas_limit too low to deploy max-size contract") + setup_txs = math.ceil(num_contracts / num_contracts_per_tx) + + contracts_deployment_txs = [] + for _ in range(setup_txs): + contracts_deployment_txs.append( + Transaction( + to=factory_caller_address, + gas_limit=tx_gas_limit, + data=Hash(num_contracts_per_tx), + sender=pre.fund_eoa(), + ) + ) + + post = {} + for i in range(num_contracts): + deployed_contract_address = compute_create2_address( + address=factory_address, + salt=i, + initcode=initcode, + ) + post[deployed_contract_address] = Account(nonce=1) + + attack_call = Bytecode() + if opcode == Op.EXTCODECOPY: + attack_call = Op.EXTCODECOPY( + address=Op.SHA3(32 - 20 - 1, 85), dest_offset=96, size=1000 + ) + else: + # For the rest of the opcodes, we can use the same generic attack call + # since all only minimally need the `address` of the target. + attack_call = Op.POP(opcode(address=Op.SHA3(32 - 20 - 1, 85))) + attack_code = ( + # Setup memory for later CREATE2 address generation loop. + # 0xFF+[Address(20bytes)]+[seed(32bytes)]+[initcode keccak(32bytes)] + Op.MSTORE(0, factory_address) + + Op.MSTORE8(32 - 20 - 1, 0xFF) + + Op.MSTORE( + 32, Op.CALLDATALOAD(0) + ) # Calldata is the starting value of the CREATE2 salt + + Op.MSTORE(64, initcode.keccak256()) + # Main loop + + While( + body=attack_call + Op.MSTORE(32, Op.ADD(Op.MLOAD(32), 1)), + ) + ) + + attack_address = pre.deploy_contract(code=attack_code) + + with TestPhaseManager.execution(): + full_txs = attack_gas_limit // tx_gas_limit + remainder = attack_gas_limit % tx_gas_limit + + num_targeted_contracts_per_full_tx = ( + # Base available gas: + # TX_GAS_LIMIT - intrinsic - (out of loop MSTOREs) + tx_gas_limit - intrinsic_gas_cost_calc() - gas_costs.G_VERY_LOW * 4 + ) // loop_cost + contract_start_index = 0 + opcode_txs = [] + for _ in range(full_txs): + opcode_txs.append( + Transaction( + to=attack_address, + gas_limit=tx_gas_limit, + data=Hash(contract_start_index), + sender=pre.fund_eoa(), + ) + ) + contract_start_index += num_targeted_contracts_per_full_tx + if remainder > intrinsic_gas_cost_calc(calldata=bytes(32)): + opcode_txs.append( + Transaction( + to=attack_address, + gas_limit=remainder, + data=Hash(contract_start_index), + sender=pre.fund_eoa(), + ) + ) + + blockchain_test( + pre=pre, + post=post, + blocks=[ + Block(txs=contracts_deployment_txs), + Block(txs=opcode_txs), + ], + exclude_full_post_state_in_output=True, + ) + + +def _deploy_max_contract_factory( + pre: Alloc, + fork: Fork, +) -> tuple[Bytecode, Address, Address]: + max_contract_size = fork.max_code_size() + + # The initcode will take its address as a starting point to the input to + # the keccak hash function. It will reuse the output of the hash function + # in a loop to create a large amount of seemingly random code, until it + # reaches the maximum contract size. + initcode = ( + Op.MSTORE(0, Op.ADDRESS) + + While( + body=( + Op.SHA3(Op.SUB(Op.MSIZE, 32), 32) + # Use a xor table to avoid having to call the "expensive" sha3 + # opcode as much + + sum( + ( + Op.PUSH32[xor_value] + + Op.XOR + + Op.DUP1 + + Op.MSIZE + + Op.MSTORE + ) + for xor_value in XOR_TABLE + ) + + Op.POP + ), + condition=Op.LT(Op.MSIZE, max_contract_size), + ) + # Despite the whole contract has random bytecode, we make the first + # opcode be a STOP so CALL-like attacks return as soon as possible, + # while EXTCODE(HASH|SIZE) work as intended. + + Op.MSTORE8(0, 0x00) + + Op.RETURN(0, max_contract_size) + ) + initcode_address = pre.deploy_contract(code=initcode) + + # The factory contract will simply use the initcode that is already + # deployed, and create a new contract and return its address if successful. + factory_code = ( + Op.EXTCODECOPY( + address=initcode_address, + dest_offset=0, + offset=0, + size=Op.EXTCODESIZE(initcode_address), + ) + + Op.MSTORE( + 0, + Op.CREATE2( + value=0, + offset=0, + size=Op.EXTCODESIZE(initcode_address), + salt=Op.SLOAD(0), + ), + ) + + Op.SSTORE(0, Op.ADD(Op.SLOAD(0), 1)) + + Op.RETURN(0, 32) + ) + factory_address = pre.deploy_contract(code=factory_code) + + # The factory caller will call the factory contract N times, creating N new + # contracts. Calldata should contain the N value. + factory_caller_code = Op.CALLDATALOAD(0) + While( + body=Op.POP(Op.CALL(address=factory_address)), + condition=Op.PUSH1(1) + + Op.SWAP1 + + Op.SUB + + Op.DUP1 + + Op.ISZERO + + Op.ISZERO, + ) + factory_caller_address = pre.deploy_contract(code=factory_caller_code) + + return initcode, factory_address, factory_caller_address From 40a25f0ab28f71c98b4d17a7d58778d17cff3d2c Mon Sep 17 00:00:00 2001 From: spencer Date: Mon, 19 Jan 2026 14:34:56 +0000 Subject: [PATCH 083/154] chore(tests,test-*,doc): remove eof from eest (#1873) Co-authored-by: danceratopz --- docs/dev/docs.md | 2 +- docs/writing_tests/fork_methods.md | 5 +- docs/writing_tests/test_markers.md | 61 +- packages/testing/pyproject.toml | 1 - .../testing/src/execution_testing/__init__.py | 14 - .../src/execution_testing/cli/eofwrap.py | 440 ---- .../src/execution_testing/cli/evm_bytes.py | 21 - .../plugins/consume/direct/conftest.py | 8 +- .../plugins/execute/pre_alloc.py | 77 +- .../pytest_commands/plugins/filler/filler.py | 1 - .../filler/gen_test_doc/gen_test_doc.py | 6 +- .../plugins/filler/gen_test_doc/page_props.py | 1 - .../plugins/filler/pre_alloc.py | 42 +- .../plugins/filler/tests/test_pre_alloc.py | 21 +- .../pytest_commands/plugins/forks/forks.py | 15 +- .../forks/tests/test_covariant_markers.py | 31 - .../cli/tests/test_eofwrap.py | 67 - .../cli/tests/test_evm_bytes.py | 24 - .../cli/tests/test_pytest_fill_command.py | 1 - .../client_clis/clis/evmone.py | 48 +- .../client_clis/clis/nethermind.py | 88 +- .../execution_testing/exceptions/__init__.py | 4 - .../exceptions/exceptions.py | 128 +- .../exceptions/exceptions/__init__.py | 4 - .../exceptions/exceptions/eof.py | 125 - .../exceptions/exceptions/exceptions_types.py | 9 +- .../execution_testing/fixtures/__init__.py | 2 - .../src/execution_testing/fixtures/eof.py | 51 - .../fixtures/tests/test_eof.py | 108 - .../src/execution_testing/forks/__init__.py | 2 - .../src/execution_testing/forks/base_fork.py | 24 +- .../execution_testing/forks/forks/forks.py | 83 +- .../src/execution_testing/specs/__init__.py | 14 - .../src/execution_testing/specs/eof.py | 729 ------ .../execution_testing/test_types/__init__.py | 2 - .../test_types/account_types.py | 2 - .../test_types/eof/__init__.py | 5 - .../test_types/eof/constants.py | 20 - .../test_types/eof/v1/__init__.py | 622 ----- .../test_types/eof/v1/constants.py | 45 - .../execution_testing/test_types/helpers.py | 13 - .../test_types/tests/test_eof_v1.py | 903 -------- .../tools/tools_code/generators.py | 115 +- .../tools/utility/generators.py | 2 +- .../src/execution_testing/vm/__init__.py | 2 - .../src/execution_testing/vm/evm_types.py | 16 - .../src/execution_testing/vm/helpers.py | 10 +- .../src/execution_testing/vm/opcodes.py | 798 +------ .../src/execution_testing/vm/tests/test_vm.py | 131 -- .../test_tstorage_clear_after_tx.py | 16 +- .../test_point_evaluation_precompile.py | 14 +- tests/cancun/eip5656_mcopy/test_mcopy.py | 2 - .../eip5656_mcopy/test_mcopy_contexts.py | 8 +- .../test_mcopy_memory_expansion.py | 2 - tests/frontier/opcodes/test_dup.py | 1 - .../scenarios/scenarios/call_combinations.py | 8 +- .../scenarios/create_combinations.py | 13 +- .../security/test_selfdestruct_balance_bug.py | 2 - .../eip7702_set_code_tx/test_set_code_txs.py | 115 +- .../test_set_code_txs_2.py | 48 - .../eip3860_initcode/test_with_eof.py | 104 - tests/unscheduled/eip7692_eof_v1/__init__.py | 22 - .../eip7692_eof_v1/eip3540_eof_v1/__init__.py | 10 - .../eip7692_eof_v1/eip3540_eof_v1/opcodes.py | 250 -- .../eip7692_eof_v1/eip3540_eof_v1/spec.py | 1 - .../test_all_opcodes_in_container.py | 484 ---- .../eip3540_eof_v1/test_container_size.py | 126 - .../test_container_validation.py | 1461 ------------ .../eip3540_eof_v1/test_eof_example.py | 173 -- .../eip3540_eof_v1/test_execution.py | 83 - .../eip3540_eof_v1/test_execution_function.py | 461 ---- .../eip3540_eof_v1/test_extcode.py | 93 - .../test_migrated_valid_invalid.py | 477 ---- .../eip3540_eof_v1/test_opcodes_in_legacy.py | 263 --- .../test_section_header_body_mismatch.py | 160 -- .../eip3540_eof_v1/test_section_order.py | 386 ---- .../eip3540_eof_v1/test_section_size.py | 348 --- .../eip4200_relative_jumps/__init__.py | 7 - .../eip4200_relative_jumps/helpers.py | 23 - .../eip4200_relative_jumps/test_rjump.py | 1212 ---------- .../eip4200_relative_jumps/test_rjumpi.py | 2024 ----------------- .../eip4200_relative_jumps/test_rjumpv.py | 1986 ---------------- .../eip4750_functions/__init__.py | 7 - .../eip4750_functions/helpers.py | 14 - .../eip4750_functions/test_callf_execution.py | 707 ------ .../eip4750_functions/test_code_validation.py | 1407 ------------ .../eip7692_eof_v1/eip5450_stack/__init__.py | 8 - .../eip5450_stack/test_code_validation.py | 732 ------ .../eip5450_stack/test_execution.py | 82 - .../eip7692_eof_v1/eip6206_jumpf/__init__.py | 7 - .../eip7692_eof_v1/eip6206_jumpf/helpers.py | 14 - .../eip7692_eof_v1/eip6206_jumpf/spec.py | 1 - .../eip6206_jumpf/test_jumpf_execution.py | 772 ------- .../eip6206_jumpf/test_jumpf_stack.py | 424 ---- .../eip6206_jumpf/test_jumpf_target.py | 135 -- .../eip6206_jumpf/test_jumpf_validation.py | 554 ----- .../test_nonreturning_validation.py | 313 --- .../eip663_dupn_swapn_exchange/__init__.py | 10 - .../eip663_dupn_swapn_exchange/test_dupn.py | 174 -- .../test_exchange.py | 148 -- .../eip663_dupn_swapn_exchange/test_swapn.py | 154 -- .../eip7069_extcall/__init__.py | 11 - .../eip7692_eof_v1/eip7069_extcall/helpers.py | 34 - .../eip7692_eof_v1/eip7069_extcall/spec.py | 7 - .../test_address_space_extension.py | 258 --- .../eip7069_extcall/test_calldata.py | 601 ----- .../eip7069_extcall/test_calls.py | 1371 ----------- .../eip7069_extcall/test_gas.py | 247 -- .../test_returndatacopy_memory_expansion.py | 309 --- .../eip7069_extcall/test_returndataload.py | 410 ---- .../eip7480_data_section/__init__.py | 9 - .../eip7480_data_section/spec.py | 1 - .../test_code_validation.py | 301 --- .../eip7480_data_section/test_data_opcodes.py | 177 -- .../test_datacopy_memory_expansion.py | 338 --- .../eip7620_eof_create/__init__.py | 26 - .../eip7620_eof_create/helpers.py | 90 - .../eip7692_eof_v1/eip7620_eof_create/spec.py | 3 - .../eip7620_eof_create/test_eofcreate.py | 892 -------- .../test_eofcreate_failures.py | 927 -------- .../eip7620_eof_create/test_gas.py | 170 -- .../test_legacy_eof_creates.py | 265 --- .../eip7620_eof_create/test_memory.py | 145 -- .../eip7620_eof_create/test_returncode.py | 319 --- .../test_subcontainer_validation.py | 1009 -------- .../eip7873_tx_create/__init__.py | 3 - .../eip7692_eof_v1/eip7873_tx_create/spec.py | 3 - .../eip7873_tx_create/test_creation_tx.py | 135 -- .../eip7873_tx_create/test_txcreate.py | 822 ------- .../test_txcreate_failures.py | 1089 --------- .../test_txcreate_validates.py | 273 --- .../unscheduled/eip7692_eof_v1/eof_tracker.md | 502 ---- tests/unscheduled/eip7692_eof_v1/gas_test.py | 215 -- whitelist.txt | 9 - 134 files changed, 149 insertions(+), 31811 deletions(-) delete mode 100644 packages/testing/src/execution_testing/cli/eofwrap.py delete mode 100644 packages/testing/src/execution_testing/cli/tests/test_eofwrap.py delete mode 100644 packages/testing/src/execution_testing/exceptions/exceptions/eof.py delete mode 100644 packages/testing/src/execution_testing/fixtures/eof.py delete mode 100644 packages/testing/src/execution_testing/fixtures/tests/test_eof.py delete mode 100644 packages/testing/src/execution_testing/specs/eof.py delete mode 100644 packages/testing/src/execution_testing/test_types/eof/__init__.py delete mode 100644 packages/testing/src/execution_testing/test_types/eof/constants.py delete mode 100644 packages/testing/src/execution_testing/test_types/eof/v1/__init__.py delete mode 100644 packages/testing/src/execution_testing/test_types/eof/v1/constants.py delete mode 100644 packages/testing/src/execution_testing/test_types/tests/test_eof_v1.py delete mode 100644 packages/testing/src/execution_testing/vm/evm_types.py delete mode 100644 tests/shanghai/eip3860_initcode/test_with_eof.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/__init__.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/__init__.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/opcodes.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/spec.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_all_opcodes_in_container.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_size.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_eof_example.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_execution.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_execution_function.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_extcode.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_migrated_valid_invalid.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_opcodes_in_legacy.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_section_header_body_mismatch.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_section_order.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_section_size.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/__init__.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/helpers.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjump.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpi.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpv.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip4750_functions/__init__.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip4750_functions/helpers.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_callf_execution.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_code_validation.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip5450_stack/__init__.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip5450_stack/test_code_validation.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip5450_stack/test_execution.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/__init__.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/helpers.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/spec.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_execution.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_stack.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_target.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_validation.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_nonreturning_validation.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip663_dupn_swapn_exchange/__init__.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip663_dupn_swapn_exchange/test_dupn.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip663_dupn_swapn_exchange/test_exchange.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip663_dupn_swapn_exchange/test_swapn.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip7069_extcall/__init__.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip7069_extcall/helpers.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip7069_extcall/spec.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_address_space_extension.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calldata.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_gas.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_returndatacopy_memory_expansion.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_returndataload.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip7480_data_section/__init__.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip7480_data_section/spec.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip7480_data_section/test_code_validation.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip7480_data_section/test_data_opcodes.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip7480_data_section/test_datacopy_memory_expansion.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/__init__.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/helpers.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/spec.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_eofcreate.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_eofcreate_failures.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_gas.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_legacy_eof_creates.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_memory.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_returncode.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_subcontainer_validation.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip7873_tx_create/__init__.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip7873_tx_create/spec.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip7873_tx_create/test_creation_tx.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip7873_tx_create/test_txcreate.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip7873_tx_create/test_txcreate_failures.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eip7873_tx_create/test_txcreate_validates.py delete mode 100644 tests/unscheduled/eip7692_eof_v1/eof_tracker.md delete mode 100644 tests/unscheduled/eip7692_eof_v1/gas_test.py diff --git a/docs/dev/docs.md b/docs/dev/docs.md index 40ecf380dd..8673217ae5 100644 --- a/docs/dev/docs.md +++ b/docs/dev/docs.md @@ -113,7 +113,7 @@ uv run mike list Delete a version: ```console -uv run mike delete v1.2.3a1-eof +uv run mike delete v1.2.3a1 ``` ### Set Default Version diff --git a/docs/writing_tests/fork_methods.md b/docs/writing_tests/fork_methods.md index b9bad8be2a..c70642246f 100644 --- a/docs/writing_tests/fork_methods.md +++ b/docs/writing_tests/fork_methods.md @@ -124,10 +124,9 @@ fork.system_contracts(block_number=0, timestamp=0) # Returns list of system con Methods for determining EVM features and valid opcodes: ```python -fork.evm_code_types(block_number=0, timestamp=0) # Returns list of supported code types (e.g., Legacy, EOF) fork.valid_opcodes() # Returns list of valid opcodes for this fork -fork.call_opcodes(block_number=0, timestamp=0) # Returns list of call opcodes with their code types -fork.create_opcodes(block_number=0, timestamp=0) # Returns list of create opcodes with their code types +fork.call_opcodes(block_number=0, timestamp=0) # Returns list of call opcodes +fork.create_opcodes(block_number=0, timestamp=0) # Returns list of create opcodes ``` ### Blob-related Methods (Cancun+) diff --git a/docs/writing_tests/test_markers.md b/docs/writing_tests/test_markers.md index 32aba68e10..73cf74cf45 100644 --- a/docs/writing_tests/test_markers.md +++ b/docs/writing_tests/test_markers.md @@ -129,59 +129,6 @@ def test_something_with_all_precompiles( In this example, the test will be parameterized for parameter `precompile` with values `[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]` for fork Shanghai, but with values `[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]` for fork Cancun which introduced the [point evaluation precompile](https://eips.ethereum.org/EIPS/eip-4844#point-evaluation-precompile) defined in EIP-4844. -### `@pytest.mark.with_all_evm_code_types` - -This marker is used to automatically parameterize a test with all EVM code types that are valid for the fork being tested. - -```python -import pytest - -from execution_testing.tools import Alloc, StateTestFiller - -@pytest.mark.with_all_evm_code_types -@pytest.mark.valid_from("Frontier") -def test_something_with_all_evm_code_types( - state_test: StateTestFiller, - pre: Alloc, -): - pass -``` - -In this example, the test will be parameterized for parameter `evm_code_type` only with value `[EVMCodeType.LEGACY]` starting on fork Frontier, and eventually it will be parametrized with with values `[EVMCodeType.LEGACY, EVMCodeType.EOF_V1]` on the EOF activation fork. - -In all calls to `pre.deploy_contract`, if the code parameter is `Bytecode` type, and `evm_code_type==EVMCodeType.EOF_V1`, the bytecode will be automatically wrapped in an EOF V1 container. - -Code wrapping might fail in the following circumstances: - -- The code contains invalid EOF V1 opcodes. -- The code does not end with a valid EOF V1 terminating opcode (such as `Op.STOP` or `Op.REVERT` or `Op.RETURN`). - -In the case where the code wrapping fails, `evm_code_type` can be added as a parameter to the test and the bytecode can be dynamically modified to be compatible with the EOF V1 container. - -One thing to note is that `evm_code_type` is not necessary to be added as a parameter to the test because the `pre: Alloc` fixture automatically consumes this fixture, and therefore it only needs to be added to the test signature if the test's logic needs it. - -```python -import pytest - -from execution_testing.tools import Alloc, StateTestFiller -from execution_testing.vm import EVMCodeType -from execution_testing.vm import Opcodes as Op - -@pytest.mark.with_all_evm_code_types -@pytest.mark.valid_from("Frontier") -def test_something_with_all_evm_code_types( - state_test: StateTestFiller, - pre: Alloc, - evm_code_type: EVMCodeType -): - code = Op.SSTORE(1, 1) - if evm_code_type == EVMCodeType.EOF_V1: - # Modify the bytecode to be compatible with EOF V1 container - code += Op.STOP - pre.deploy_contract(code) - ... -``` - ### `@pytest.mark.with_all_call_opcodes` This marker is used to automatically parameterize a test with all EVM call opcodes that are valid for the fork being tested. @@ -202,9 +149,7 @@ def test_something_with_all_call_opcodes( pass ``` -In this example, the test will be parametrized for parameter `call_opcode` with values `[Op.CALL, Op.CALLCODE]` starting on fork Frontier, `[Op.CALL, Op.CALLCODE, Op.DELEGATECALL]` on fork Homestead, `[Op.CALL, Op.CALLCODE, Op.DELEGATECALL, Op.STATICCALL]` on fork Byzantium, and eventually it will be parametrized with with values `[Op.CALL, Op.CALLCODE, Op.DELEGATECALL, Op.STATICCALL, Op.EXTCALL, Op.EXTSTATICCALL, Op.EXTDELEGATECALL]` on the EOF activation fork. - -Parameter `evm_code_type` will also be parametrized with the correct EVM code type for the opcode under test. +In this example, the test will be parametrized for parameter `call_opcode` with values `[Op.CALL, Op.CALLCODE]` starting on fork Frontier, `[Op.CALL, Op.CALLCODE, Op.DELEGATECALL]` on fork Homestead, and `[Op.CALL, Op.CALLCODE, Op.DELEGATECALL, Op.STATICCALL]` on fork Byzantium and later. ### `@pytest.mark.with_all_create_opcodes` @@ -226,9 +171,7 @@ def test_something_with_all_create_opcodes( pass ``` -In this example, the test will be parametrized for parameter `create_opcode` with values `[Op.CREATE]` starting on fork Frontier, `[Op.CREATE, Op.CREATE2]` starting on fork Constantinople, and eventually it will be parametrized with with values `[Op.CREATE, Op.CREATE2, Op.EOFCREATE]` on the EOF activation fork. - -Parameter `evm_code_type` will also be parametrized with the correct EVM code type for the opcode under test. +In this example, the test will be parametrized for parameter `create_opcode` with values `[Op.CREATE]` starting on fork Frontier, and `[Op.CREATE, Op.CREATE2]` starting on fork Constantinople and later. ### `@pytest.mark.with_all_system_contracts` diff --git a/packages/testing/pyproject.toml b/packages/testing/pyproject.toml index 669163764e..f567d51ea0 100644 --- a/packages/testing/pyproject.toml +++ b/packages/testing/pyproject.toml @@ -88,7 +88,6 @@ checklist = "execution_testing.cli.pytest_commands.checklist:checklist" generate_checklist_stubs = "execution_testing.cli.generate_checklist_stubs:generate_checklist_stubs" genindex = "execution_testing.cli.gen_index:generate_fixtures_index_cli" gentest = "execution_testing.cli.gentest:generate" -eofwrap = "execution_testing.cli.eofwrap:eof_wrap" pyspelling_soft_fail = "execution_testing.cli.tox_helpers:pyspelling" markdownlintcli2_soft_fail = "execution_testing.cli.tox_helpers:markdownlint" order_fixtures = "execution_testing.cli.order_fixtures:order_fixtures" diff --git a/packages/testing/src/execution_testing/__init__.py b/packages/testing/src/execution_testing/__init__.py index 8f092e6b48..0af1799866 100644 --- a/packages/testing/src/execution_testing/__init__.py +++ b/packages/testing/src/execution_testing/__init__.py @@ -27,7 +27,6 @@ from .exceptions import ( BlockException, EngineAPIError, - EOFException, TransactionException, ) from .fixtures import BaseFixture, FixtureCollector @@ -41,10 +40,6 @@ Block, BlockchainTest, BlockchainTestFiller, - EOFStateTest, - EOFStateTestFiller, - EOFTest, - EOFTestFiller, Header, StateTest, StateTestFiller, @@ -85,7 +80,6 @@ compute_create2_address, compute_create_address, compute_deterministic_create2_address, - compute_eofcreate_address, keccak256, ) from .tools import ( @@ -105,7 +99,6 @@ ) from .vm import ( Bytecode, - EVMCodeType, Macro, Macros, MemoryVariable, @@ -161,12 +154,6 @@ "EngineAPIError", "Environment", "EOA", - "EOFException", - "EOFStateTest", - "EOFStateTestFiller", - "EOFTest", - "EOFTestFiller", - "EVMCodeType", "FixtureCollector", "Fork", "GasCosts", @@ -214,7 +201,6 @@ "compute_create_address", "compute_create2_address", "compute_deterministic_create2_address", - "compute_eofcreate_address", "extend_with_defaults", "gas_test", "generate_system_contract_deploy_test", diff --git a/packages/testing/src/execution_testing/cli/eofwrap.py b/packages/testing/src/execution_testing/cli/eofwrap.py deleted file mode 100644 index 2f37444134..0000000000 --- a/packages/testing/src/execution_testing/cli/eofwrap.py +++ /dev/null @@ -1,440 +0,0 @@ -""" -Generate a JSON blockchain test from an existing JSON blockchain test by -wrapping its pre-state code in EOF wherever possible. - -Example Usage: - -1. Wrap tests - - ```console - eofwrap - ``` -""" - -import json -import os -import sys -from pathlib import Path -from typing import Any, Dict, cast, no_type_check - -import click - -from execution_testing.base_types import Bytes, EthereumTestRootModel -from execution_testing.base_types.conversions import to_hex -from execution_testing.client_clis import CLINotFoundInPathError -from execution_testing.client_clis.clis.evmone import ( - EvmOneTransitionTool, -) -from execution_testing.fixtures.blockchain import ( - FixtureBlock, - InvalidFixtureBlock, -) -from execution_testing.fixtures.file import Fixtures -from execution_testing.forks.forks.forks import EOFv1 -from execution_testing.specs.blockchain import ( - Block, - BlockchainFixture, - BlockchainTest, -) -from execution_testing.specs.debugging import print_traces -from execution_testing.specs.eof import EOFParse -from execution_testing.test_types import Transaction -from execution_testing.test_types.block_types import Environment -from execution_testing.test_types.eof.v1 import Container -from execution_testing.vm import Bytecode, Op - -from .evm_bytes import OpcodeWithOperands, process_evm_bytes - - -@click.command() -@click.argument( - "input_path", type=click.Path(exists=True, dir_okay=True, file_okay=True) -) -@click.argument("output_dir", type=click.Path(dir_okay=True, file_okay=False)) -@click.option("--traces", is_flag=True, type=bool) -def eof_wrap(input_path: str, output_dir: str, traces: bool) -> None: - """ - Wrap JSON blockchain test file(s) found at `input_path`, output to - `output_dir`. - """ - eof_wrapper = EofWrapper() - - try: - EvmOneTransitionTool() - except CLINotFoundInPathError: - print( - f"Error: {EvmOneTransitionTool.default_binary} must be in the PATH." - ) - sys.exit(1) - except Exception as e: - raise Exception(f"Unexpected exception: {e}") from e - - if os.path.isfile(input_path): - file = os.path.basename(input_path) - out_file = "eof_wrapped_" + file - out_path = os.path.join(output_dir, out_file) - - eof_wrapper.wrap_file(input_path, out_path, traces) - else: - for subdir, _, files in os.walk(input_path): - for file in files: - rel_dir = Path(subdir).relative_to(input_path) - out_file = "eof_wrapped_" + file - out_path = os.path.join(output_dir, rel_dir, out_file) - in_path = os.path.join(subdir, file) - - eof_wrapper.wrap_file(in_path, out_path, traces) - - os.makedirs(output_dir, exist_ok=True) - with open(os.path.join(output_dir, "metrics.json"), "w") as f: - json.dump(eof_wrapper.metrics, f, indent=4) - - -class BlockchainFixtures(EthereumTestRootModel): - """ - Class needed due to some of the `ethereum/tests` fixtures not having the - `_info.fixture_format` field in the JSON files. - """ - - root: Dict[str, BlockchainFixture] - - -class EofWrapper: - """EOF wrapping of blockchain tests with some simple metrics tracking.""" - - # JSON files had at least one fixture generated successfully with EOF - FILES_GENERATED = "files_generated" - # JSON files skipped explicitly or didn't have a fixture with EOF - FILES_SKIPPED = "files_skipped" - # Test fixtures with at least one EOF code and generated successfully - FIXTURES_GENERATED = "fixtures_generated" - # Test fixtures with no code able to be EOF-wrapped - FIXTURES_CANT_WRAP = "fixtures_cant_wrap" - # Test fixtures with EOF code but test doesn't pass and generation fails - FIXTURES_CANT_GENERATE = "fixtures_cant_generate" - # Invalid blocks in fixtures skipped - INVALID_BLOCKS_SKIPPED = "invalid_blocks_skipped" - # State accounts with code wrapped into valid EOF - ACCOUNTS_WRAPPED = "accounts_wrapped" - # State accounts with code wrapped into valid unique EOF - UNIQUE_ACCOUNTS_WRAPPED = "unique_accounts_wrapped" - # State accounts wrapped but the code is not valid EOF - ACCOUNTS_INVALID_EOF = "accounts_invalid_eof" - # State accounts wrapped into valid EOF but in a fixture of a failing test - ACCOUNTS_CANT_GENERATE = "accounts_cant_generate" - # Breakdown of EOF validation errors summing up to `accounts_invalid_eof` - VALIDATION_ERRORS = "validation_errors" - # Breakdown of runtime test failures summing up to `fixtures_cant_generate` - GENERATION_ERRORS = "generation_errors" - - def __init__(self) -> None: - """ - Initialize EofWrapper with metrics tracking and unique EOF set. - """ - self.metrics = { - self.FILES_GENERATED: 0, - self.FILES_SKIPPED: 0, - self.FIXTURES_GENERATED: 0, - self.FIXTURES_CANT_WRAP: 0, - self.FIXTURES_CANT_GENERATE: 0, - self.INVALID_BLOCKS_SKIPPED: 0, - self.ACCOUNTS_WRAPPED: 0, - self.UNIQUE_ACCOUNTS_WRAPPED: 0, - self.ACCOUNTS_INVALID_EOF: 0, - self.ACCOUNTS_CANT_GENERATE: 0, - self.VALIDATION_ERRORS: {}, - self.GENERATION_ERRORS: {}, - } - self.unique_eof: set[str] = set() - - file_skip_list = [ - "Pyspecs", - # EXTCODE* opcodes return different results for EOF targets and that is - # tested elsewhere - "stExtCodeHash", - # bigint syntax - "ValueOverflowParis", - "bc4895-withdrawals", - # EOF opcodes at diff places - tests obsolete - "opcD0DiffPlaces", - "opcD1DiffPlaces", - "opcD2DiffPlaces", - "opcD3DiffPlaces", - "opcE0DiffPlaces", - "opcE1DiffPlaces", - "opcE2DiffPlaces", - "opcE3DiffPlaces", - "opcE4DiffPlaces", - "opcE5DiffPlaces", - "opcE6DiffPlaces", - "opcE7DiffPlaces", - "opcE8DiffPlaces", - "opcECDiffPlaces", - "opcEEDiffPlaces", - "opcF7DiffPlaces", - "opcF8DiffPlaces", - "opcF9DiffPlaces", - "opcFBDiffPlaces", - # stack overflow always (limit of `max_stack_height` is 1023!) - "push0_fill_stack", - "push0_stack_overflow", - "blobbasefee_stack_overflow", - ] - - def wrap_file(self, in_path: str, out_path: str, traces: bool) -> None: - """ - Wrap code from blockchain test JSON file from `in_path` into - EOF containers. Tracks in metrics. - """ - for skip in self.file_skip_list: - if skip in in_path: - self.metrics[self.FILES_SKIPPED] = ( - cast(int, self.metrics[self.FILES_SKIPPED]) + 1 - ) - return - - fixtures: BlockchainFixtures = BlockchainFixtures.model_validate_json( - Path(in_path).read_text() - ) - - out_fixtures = Fixtures({}) - fixture: BlockchainFixture - for fixture_id, fixture in fixtures.root.items(): - fixture_eof_codes = [] - wrapped_at_least_one_account = False - - if fixture.pre: - for address, account in fixture.pre.root.items(): - if ( - account is None - or account.code is None - or len(account.code) == 0 - ): - continue - - try: - wrapped = wrap_code(account.code) - except ValueError as e: - self.metrics[self.ACCOUNTS_INVALID_EOF] = ( - cast(int, self.metrics[self.ACCOUNTS_INVALID_EOF]) - + 1 - ) - _inc_counter( - cast( - dict[Any, Any], - self.metrics[self.VALIDATION_ERRORS], - ), - self._short_exception_msg(e), - ) - continue - - if self._validate_eof(wrapped): - account.code = Bytes(wrapped) - wrapped_at_least_one_account = True - self.metrics[self.ACCOUNTS_WRAPPED] = ( - cast(int, self.metrics[self.ACCOUNTS_WRAPPED]) + 1 - ) - fixture_eof_codes.append(to_hex(account.code)) - - # wrap the same account in post state the same way - if ( - fixture.post_state - and fixture.post_state.root[address] - ): - fixture.post_state.root[address].code = Bytes( # type: ignore - wrapped - ) - else: - self.metrics[self.ACCOUNTS_INVALID_EOF] = ( - cast(int, self.metrics[self.ACCOUNTS_INVALID_EOF]) - + 1 - ) - if not wrapped_at_least_one_account: - self.metrics[self.FIXTURES_CANT_WRAP] = ( - cast(int, self.metrics[self.FIXTURES_CANT_WRAP]) + 1 - ) - continue - - try: - out_fixture = self._wrap_fixture(fixture, traces) - out_fixtures[fixture_id] = out_fixture - self.metrics[self.FIXTURES_GENERATED] = ( - cast(int, self.metrics[self.FIXTURES_GENERATED]) + 1 - ) - self.unique_eof.update(fixture_eof_codes) - self.metrics[self.UNIQUE_ACCOUNTS_WRAPPED] = len( - self.unique_eof - ) - except Exception as e: - _inc_counter( - cast( - dict[Any, Any], - self.metrics[self.GENERATION_ERRORS], - ), - self._short_exception_msg(e), - ) - - self.metrics[self.FIXTURES_CANT_GENERATE] = ( - cast(int, self.metrics[self.FIXTURES_CANT_GENERATE]) + 1 - ) - self.metrics[self.ACCOUNTS_CANT_GENERATE] = cast( - int, self.metrics[self.ACCOUNTS_CANT_GENERATE] - ) + len(fixture_eof_codes) - - print( - f"Exception {e} occurred during generation of {in_path}: {fixture_id}" - ) - - if len(out_fixtures) == 0: - self.metrics[self.FILES_SKIPPED] = ( - cast(int, self.metrics[self.FILES_SKIPPED]) + 1 - ) - return - - os.makedirs(os.path.dirname(out_path), exist_ok=True) - out_fixtures.collect_into_file(Path(out_path)) - self.metrics[self.FILES_GENERATED] = ( - cast(int, self.metrics[self.FILES_GENERATED]) + 1 - ) - - def _short_exception_msg(self, e: Exception) -> str: - """Shorten exception message for display.""" - threshold = 30 - short = str(e) - if len(short) > threshold: - short = short[:threshold] + "..." - return short - - def _wrap_fixture( - self, fixture: BlockchainFixture, traces: bool - ) -> BlockchainFixture: - env = Environment( - difficulty=fixture.genesis.difficulty, - gas_limit=fixture.genesis.gas_limit, - base_fee_per_gas=fixture.genesis.base_fee_per_gas, - blob_gas_used=fixture.genesis.blob_gas_used, - excess_blob_gas=fixture.genesis.excess_blob_gas, - parent_beacon_block_root=fixture.genesis.parent_beacon_block_root, - ) - - pre = fixture.pre - - t8n = EvmOneTransitionTool(trace=traces) - - test = BlockchainTest( - genesis_environment=env, - fork=EOFv1, - pre=pre.root, - post=fixture.post_state.root if fixture.post_state else {}, - blocks=[], - tag="wrapped test", - ) - - for fixture_block in fixture.blocks: - if isinstance(fixture_block, FixtureBlock): - header = fixture_block.header - block = Block( - ommers_hash=header.ommers_hash, - fee_recipient=header.fee_recipient, - difficulty=header.difficulty, - number=header.number, - gas_limit=header.gas_limit, - timestamp=header.timestamp, - extra_data=header.extra_data, - prev_randao=header.prev_randao, - nonce=header.nonce, - base_fee_per_gas=header.base_fee_per_gas, - withdrawals_root=header.withdrawals_root, - parent_beacon_block_root=header.parent_beacon_block_root, - ) - assert not fixture_block.ommers - assert not fixture_block.withdrawals - - for fixture_tx in fixture_block.txs: - fixture_tx_dump = fixture_tx.model_dump() - fixture_tx_dump.pop("ty") - fixture_tx_dump.pop("data") - tx = Transaction( - ty=fixture_tx.ty, - data=fixture_tx.data, - **fixture_tx_dump, - ) - block.txs.append(tx) - - test.blocks.append(block) - elif isinstance(fixture_block, InvalidFixtureBlock): - # Skip - invalid blocks are not supported. Reason: - # FixtureTransaction doesn't support expected exception. But we - # can continue and test the remaining blocks. - self.metrics[self.INVALID_BLOCKS_SKIPPED] = ( - cast(int, self.metrics[self.INVALID_BLOCKS_SKIPPED]) + 1 - ) - else: - raise TypeError("not a FixtureBlock") - - result = test.generate( - t8n=t8n, - fixture_format=BlockchainFixture, - ) - assert isinstance(result, BlockchainFixture) - result.info["fixture-format"] = "blockchain_test" - if traces: - print_traces(t8n.get_traces()) - return result - - def _validate_eof( - self, container: Container, metrics: bool = True - ) -> bool: - eof_parse = EOFParse() - - result = eof_parse.run(input_value=to_hex(container)) - actual_message = result.stdout.strip() - if "OK" not in actual_message: - if metrics: - _inc_counter( - cast( - dict[Any, Any], - self.metrics[self.VALIDATION_ERRORS], - ), - actual_message, - ) - return False - - return True - - -# `no_type_check` required because OpcodeWithOperand.opcode can be `None` when -# formatting as a string, but here it can never be `None`. -@no_type_check -def wrap_code(account_code: Bytes) -> Container: - """ - Wrap `account_code` into a simplest EOF container, applying some simple - heuristics in order to obtain a valid code section termination. - """ - assert len(account_code) > 0 - - opcodes = process_evm_bytes(account_code) - - if not opcodes[-1].terminating: - opcodes.append(OpcodeWithOperands(opcode=Op.STOP)) - - while ( - len(opcodes) > 1 - and opcodes[-2].terminating - and opcodes[-1].terminating - ): - opcodes.pop() - - bytecode = Bytecode() - - for opcode in opcodes: - bytecode += opcode.bytecode - - return Container.Code(bytecode) - - -def _inc_counter(d: dict, key: Any) -> None: - if key in d: - d[key] += 1 - else: - d[key] = 1 diff --git a/packages/testing/src/execution_testing/cli/evm_bytes.py b/packages/testing/src/execution_testing/cli/evm_bytes.py index 1c91891cf1..ce5a1c6390 100644 --- a/packages/testing/src/execution_testing/cli/evm_bytes.py +++ b/packages/testing/src/execution_testing/cli/evm_bytes.py @@ -46,11 +46,6 @@ def format_assembly(self) -> str: opcode_name = self.opcode._name_.lower() if self.opcode.data_portion_length == 0: return f"{opcode_name}" - elif self.opcode == Op.RJUMPV: - operands = ", ".join( - str(ZeroPaddedHexNumber(operand)) for operand in self.operands - ) - return f"{opcode_name} {operands}" else: operands = ", ".join( str(ZeroPaddedHexNumber(operand)) for operand in self.operands @@ -94,7 +89,6 @@ def process_evm_bytes(evm_bytes: bytes) -> List[OpcodeWithOperands]: # noqa: D1 raise ValueError(f"Unknown opcode: {opcode_byte}") if opcode.data_portion_length > 0: - signed = opcode in [Op.RJUMP, Op.RJUMPI] opcodes.append( OpcodeWithOperands( opcode=opcode, @@ -102,26 +96,11 @@ def process_evm_bytes(evm_bytes: bytes) -> List[OpcodeWithOperands]: # noqa: D1 int.from_bytes( evm_bytes_array[: opcode.data_portion_length], "big", - signed=signed, ) ], ) ) evm_bytes_array = evm_bytes_array[opcode.data_portion_length :] - elif opcode == Op.RJUMPV: - if len(evm_bytes_array) == 0: - opcodes.append(OpcodeWithOperands(opcode=opcode)) - else: - max_index = evm_bytes_array.pop(0) - operands: List[int] = [] - for _ in range(max_index + 1): - operands.append( - int.from_bytes(evm_bytes_array[:2], "big", signed=True) - ) - evm_bytes_array = evm_bytes_array[2:] - opcodes.append( - OpcodeWithOperands(opcode=opcode, operands=operands) - ) else: opcodes.append(OpcodeWithOperands(opcode=opcode)) diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/direct/conftest.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/direct/conftest.py index 40a146602e..14d4744884 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/direct/conftest.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/direct/conftest.py @@ -14,17 +14,16 @@ import pytest from execution_testing.base_types import to_json +from execution_testing.cli.pytest_commands.plugins.consume.consume import ( + FixturesSource, +) from execution_testing.client_clis.ethereum_cli import EthereumCLI from execution_testing.client_clis.fixture_consumer_tool import ( FixtureConsumerTool, ) -from execution_testing.cli.pytest_commands.plugins.consume.consume import ( - FixturesSource, -) from execution_testing.fixtures import ( BaseFixture, BlockchainFixture, - EOFFixture, StateFixture, ) from execution_testing.fixtures.consume import ( @@ -91,7 +90,6 @@ def pytest_configure(config: pytest.Config) -> None: # noqa: D103 config.supported_fixture_formats = [ # type: ignore[attr-defined] StateFixture, BlockchainFixture, - EOFFixture, ] fixture_consumers = [] for fixture_consumer_bin_path in config.getoption("fixture_consumer_bin"): diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/pre_alloc.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/pre_alloc.py index af931e66dd..caea02a545 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/pre_alloc.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/pre_alloc.py @@ -41,9 +41,8 @@ compute_deterministic_create2_address, ) from execution_testing.test_types import Alloc as BaseAlloc -from execution_testing.test_types.eof.v1 import Container from execution_testing.tools import Initcode -from execution_testing.vm import Bytecode, EVMCodeType, Op +from execution_testing.vm import Bytecode, Op from .contracts import ( check_deterministic_factory_deployment, @@ -113,15 +112,6 @@ def pytest_addoption(parser: pytest.Parser) -> None: type=int, help="The start private key from which tests will deploy EOAs.", ) - pre_alloc_group.addoption( - "--evm-code-type", - action="store", - dest="evm_code_type", - default=None, - type=EVMCodeType, - choices=list(EVMCodeType), - help="Type of EVM code to deploy in each test by default.", - ) pre_alloc_group.addoption( "--skip-cleanup", action="store_true", @@ -240,11 +230,10 @@ class Alloc(BaseAlloc): _sender: EOA = PrivateAttr() _eth_rpc: EthRPC = PrivateAttr() _pending_txs: List[PendingTransaction] = PrivateAttr(default_factory=list) - _deployed_contracts: List[Tuple[Address, Bytes | Bytecode | Container]] = ( - PrivateAttr(default_factory=list) + _deployed_contracts: List[Tuple[Address, Bytes | Bytecode]] = PrivateAttr( + default_factory=list ) _funded_eoa: List[EOA] = PrivateAttr(default_factory=list) - _evm_code_type: EVMCodeType | None = PrivateAttr(None) _chain_id: int = PrivateAttr() _node_id: str = PrivateAttr("") _address_stubs: AddressStubs = PrivateAttr() @@ -257,7 +246,6 @@ def __init__( eth_rpc: EthRPC, eoa_iterator: Iterator[EOA], chain_id: int, - evm_code_type: EVMCodeType | None = None, node_id: str = "", address_stubs: AddressStubs | None = None, **kwargs: Any, @@ -268,7 +256,6 @@ def __init__( self._sender = sender self._eth_rpc = eth_rpc self._eoa_iterator = eoa_iterator - self._evm_code_type = evm_code_type self._chain_id = chain_id self._node_id = node_id self._address_stubs = address_stubs or AddressStubs(root={}) @@ -283,20 +270,8 @@ def __setitem__( "Tests are not allowed to set pre-alloc items in execute mode" ) - def code_pre_processor( - self, - code: Bytecode | Container, - *, - evm_code_type: EVMCodeType | None, - ) -> Bytecode | Container: + def code_pre_processor(self, code: Bytecode) -> Bytecode: """Pre-processes the code before setting it.""" - if evm_code_type is None: - evm_code_type = self._evm_code_type - if evm_code_type == EVMCodeType.EOF_V1: - if not isinstance(code, Container): - if isinstance(code, Bytecode) and not code.terminating: - return Container.Code(code + Op.STOP) - return Container.Code(code) return code def _add_pending_tx( @@ -447,7 +422,6 @@ def deploy_contract( balance: NumberConvertible = 0, nonce: NumberConvertible = 1, address: Address | None = None, - evm_code_type: EVMCodeType | None = None, label: str | None = None, stub: str | None = None, ) -> Address: @@ -511,10 +485,10 @@ def deploy_contract( ) deploy_gas_limit += len(storage.root) * 22_600 - assert isinstance(code, Bytecode) or isinstance(code, Container), ( + assert isinstance(code, Bytecode), ( f"incompatible code type: {type(code)}" ) - code = self.code_pre_processor(code, evm_code_type=evm_code_type) + code = self.code_pre_processor(code) max_code_size = self._fork.max_code_size() if len(code) > max_code_size: @@ -522,20 +496,12 @@ def deploy_contract( deploy_gas_limit += len(code) * gas_costs.G_CODE_DEPOSIT_BYTE - prepared_initcode: Bytecode | Container - - if evm_code_type == EVMCodeType.EOF_V1: - assert isinstance(code, Container) - prepared_initcode = Container.Init( - deploy_container=code, initcode_prefix=initcode_prefix - ) - else: - prepared_initcode = Initcode( - deploy_code=code, initcode_prefix=initcode_prefix - ) - deploy_gas_limit += memory_expansion_gas_calculator( - new_bytes=len(bytes(prepared_initcode)) - ) + prepared_initcode = Initcode( + deploy_code=code, initcode_prefix=initcode_prefix + ) + deploy_gas_limit += memory_expansion_gas_calculator( + new_bytes=len(bytes(prepared_initcode)) + ) max_initcode_size = self._fork.max_initcode_size() if len(prepared_initcode) > max_initcode_size: @@ -923,27 +889,12 @@ def send_pending_transactions(self) -> List[TransactionByHashResponse]: return responses -@pytest.fixture(autouse=True) -def evm_code_type(request: pytest.FixtureRequest) -> EVMCodeType: - """Return default EVM code type for all tests (LEGACY).""" - parameter_evm_code_type = request.config.getoption("evm_code_type") - if parameter_evm_code_type is not None: - assert type(parameter_evm_code_type) is EVMCodeType, ( - "Invalid EVM code type" - ) - logger.info(f"Using EVM code type: {parameter_evm_code_type}") - return parameter_evm_code_type - logger.debug(f"Using default EVM code type: {EVMCodeType.LEGACY}") - return EVMCodeType.LEGACY - - @pytest.fixture(autouse=True, scope="function") def pre( fork: Fork, worker_key: EOA, eoa_iterator: Iterator[EOA], eth_rpc: EthRPC, - evm_code_type: EVMCodeType, chain_config: ChainConfig, address_stubs: AddressStubs | None, skip_cleanup: bool, @@ -962,15 +913,13 @@ def pre( # Prepare the pre-alloc logger.debug( f"Initializing pre-alloc for test {request.node.nodeid} " - f"(fork={actual_fork}, chain_id={chain_config.chain_id}, " - f"evm_code_type={evm_code_type})" + f"(fork={actual_fork}, chain_id={chain_config.chain_id})" ) pre = Alloc( fork=actual_fork, sender=worker_key, eth_rpc=eth_rpc, eoa_iterator=eoa_iterator, - evm_code_type=evm_code_type, chain_id=chain_config.chain_id, node_id=request.node.nodeid, address_stubs=address_stubs, diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/filler.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/filler.py index 514fc91d6a..e75f01f227 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/filler.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/filler.py @@ -955,7 +955,6 @@ def pytest_runtest_makereport( ("evm_dump_dir", item.config.evm_dump_dir) ) else: - # not yet for EOF report.user_properties.append(("evm_dump_dir", "N/A")) diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/gen_test_doc/gen_test_doc.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/gen_test_doc/gen_test_doc.py index 7a7f8cdcac..235c7d49c2 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/gen_test_doc/gen_test_doc.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/gen_test_doc/gen_test_doc.py @@ -452,7 +452,6 @@ def create_function_page_props( if not valid_from_marker: valid_from_fork = "Frontier" else: - # NOTE: The EOF tests cases contain two fork names in their # valid_from marker, separated by a comma. Take the last. valid_from_fork = valid_from_marker.args[0].split(",")[-1] @@ -583,8 +582,7 @@ def add_directory_page_props(self) -> None: str(directory), branch_or_commit_or_tag=self.ref ), # TODO: This won't work in all cases; should be from the - # development fork Currently breaks for - # `tests/unscheduled/eip7692_eof_v1/index.md` + # development fork target_or_valid_fork=fork.capitalize() if fork else "Unknown", # init.py will be used for docstrings package_name=get_import_path(directory), @@ -670,8 +668,6 @@ def sort_by_fork_deployment_and_path(x: PageProps) -> Tuple[Any, ...]: - ("Test Case Reference",) -> tests/index.md - ("Test Case Reference", "Berlin") -> tests/berlin/index.md - - ("Test Case Reference", "EIP-7692 EOF V1", tracker.md") - tests/unscheduled/eip7692_eof_v1/tracker.md - ("Test Case Reference", "Shanghai", "EIP-3855 PUSH0", "Spec") -> tests/shanghai/eip3855_push0/spec.py diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/gen_test_doc/page_props.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/gen_test_doc/page_props.py index 6e166b8d63..9e44233cfc 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/gen_test_doc/page_props.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/gen_test_doc/page_props.py @@ -44,7 +44,6 @@ def apply_name_filters(input_string: str) -> str: "bls12 map fp to g1": "BLS12_MAP_FP_TO_G1", "bls12 pairing": "BLS12_PAIRING_CHECK", "eips": "EIPs", - "eof": "EOF", "vm": "VM", } # adding these is the expensive part diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/pre_alloc.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/pre_alloc.py index c2008de700..ce75f76131 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/pre_alloc.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/pre_alloc.py @@ -37,9 +37,7 @@ compute_deterministic_create2_address, ) from execution_testing.test_types import Alloc as BaseAlloc -from execution_testing.test_types.eof.v1 import Container from execution_testing.tools import Initcode -from execution_testing.vm import Bytecode, EVMCodeType, Opcodes CONTRACT_START_ADDRESS_DEFAULT = 0x1000000000000000000000000000000000001000 CONTRACT_ADDRESS_INCREMENTS_DEFAULT = 0x100 @@ -79,15 +77,6 @@ def pytest_addoption(parser: pytest.Parser) -> None: type=str, help="The address increment value for each deployed contract by a test.", ) - pre_alloc_group.addoption( - "--evm-code-type", - action="store", - dest="evm_code_type", - default=None, - type=EVMCodeType, - choices=list(EVMCodeType), - help="Type of EVM code to deploy in each test by default.", - ) class AllocMode(IntEnum): @@ -107,7 +96,6 @@ class Alloc(BaseAlloc): _alloc_mode: AllocMode = PrivateAttr() _contract_address_iterator: Iterator[Address] = PrivateAttr() _eoa_iterator: Iterator[EOA] = PrivateAttr() - _evm_code_type: EVMCodeType | None = PrivateAttr(None) _fork: Fork = PrivateAttr() def __init__( @@ -117,7 +105,6 @@ def __init__( contract_address_iterator: Iterator[Address], eoa_iterator: Iterator[EOA], fork: Fork, - evm_code_type: EVMCodeType | None = None, **kwargs: Any, ) -> None: """Initialize allocation with the given properties.""" @@ -125,7 +112,6 @@ def __init__( self._alloc_mode = alloc_mode self._contract_address_iterator = contract_address_iterator self._eoa_iterator = eoa_iterator - self._evm_code_type = evm_code_type self._fork = fork def __setitem__( @@ -138,17 +124,8 @@ def __setitem__( raise ValueError("Cannot set items in strict mode") super().__setitem__(address, account) - def code_pre_processor( - self, code: BytesConvertible, *, evm_code_type: EVMCodeType | None - ) -> BytesConvertible: + def code_pre_processor(self, code: BytesConvertible) -> BytesConvertible: """Pre-processes the code before setting it.""" - if evm_code_type is None: - evm_code_type = self._evm_code_type - if evm_code_type == EVMCodeType.EOF_V1: - if not isinstance(code, Container): - if isinstance(code, Bytecode) and not code.terminating: - return Container.Code(code + Opcodes.STOP) - return Container.Code(code) return code def deterministic_deploy_contract( @@ -235,7 +212,6 @@ def deploy_contract( balance: NumberConvertible = 0, nonce: NumberConvertible = 1, address: Address | None = None, - evm_code_type: EVMCodeType | None = None, label: str | None = None, stub: str | None = None, ) -> Address: @@ -266,7 +242,7 @@ def deploy_contract( "impossible to deploy contract with nonce lower than one" ) - code = self.code_pre_processor(code, evm_code_type=evm_code_type) + code = self.code_pre_processor(code) code_bytes = ( bytes(code) if not isinstance(code, (bytes, str)) else code ) @@ -546,24 +522,11 @@ def eoa_iterator( return iter(eoa_by_index(i).copy() for i in count()) -@pytest.fixture(autouse=True) -def evm_code_type(request: pytest.FixtureRequest) -> EVMCodeType: - """Return default EVM code type for all tests (LEGACY).""" - parameter_evm_code_type = request.config.getoption("evm_code_type") - if parameter_evm_code_type is not None: - assert type(parameter_evm_code_type) is EVMCodeType, ( - "Invalid EVM code type" - ) - return parameter_evm_code_type - return EVMCodeType.LEGACY - - @pytest.fixture(scope="function") def pre( alloc_mode: AllocMode, contract_address_iterator: Iterator[Address], eoa_iterator: Iterator[EOA], - evm_code_type: EVMCodeType, fork: Fork | None, request: pytest.FixtureRequest, ) -> Alloc: @@ -579,5 +542,4 @@ def pre( contract_address_iterator=contract_address_iterator, eoa_iterator=eoa_iterator, fork=actual_fork, - evm_code_type=evm_code_type, ) diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_pre_alloc.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_pre_alloc.py index 5484435ea8..779dedb353 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_pre_alloc.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_pre_alloc.py @@ -11,7 +11,7 @@ ) from execution_testing.forks import Fork, Prague from execution_testing.test_types import EOA -from execution_testing.vm import EVMCodeType, Op +from execution_testing.vm import Op from ..pre_alloc import ( CONTRACT_ADDRESS_INCREMENTS_DEFAULT, @@ -24,7 +24,6 @@ def create_test_alloc( alloc_mode: AllocMode = AllocMode.PERMISSIVE, fork: Fork = Prague, - evm_code_type: EVMCodeType = EVMCodeType.LEGACY, ) -> Alloc: """Create a test Alloc instance with default iterators.""" contract_iter = iter( @@ -46,7 +45,6 @@ def create_test_alloc( contract_address_iterator=contract_iter, eoa_iterator=eoa_iter, fork=fork, - evm_code_type=evm_code_type, ) @@ -153,12 +151,9 @@ def test_alloc_empty_account() -> None: # Note: empty_account() only returns address, doesn't add to pre -@pytest.mark.parametrize( - "evm_code_type", [EVMCodeType.LEGACY, EVMCodeType.EOF_V1] -) -def test_alloc_deploy_contract_code_types(evm_code_type: EVMCodeType) -> None: - """Test `Alloc.deploy_contract` with different EVM code types.""" - pre = create_test_alloc(evm_code_type=evm_code_type) +def test_alloc_deploy_contract_code_types() -> None: + """Test `Alloc.deploy_contract` bytecode output.""" + pre = create_test_alloc() contract = pre.deploy_contract(Op.SSTORE(0, 1) + Op.STOP) @@ -167,12 +162,8 @@ def test_alloc_deploy_contract_code_types(evm_code_type: EVMCodeType) -> None: assert account is not None assert account.code is not None - if evm_code_type == EVMCodeType.LEGACY: - # Legacy bytecode should be raw opcodes - assert account.code == bytes.fromhex("600160005500") - elif evm_code_type == EVMCodeType.EOF_V1: - # EOF v1 should have the EOF container header - assert account.code.startswith(b"\xef\x00\x01") + # Bytecode should be raw opcodes + assert account.code == bytes.fromhex("600160005500") @pytest.mark.parametrize( diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/forks.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/forks.py index 6544826705..115a94aa78 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/forks.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/forks.py @@ -413,26 +413,19 @@ def covariant_decorator( fork_attribute_name="precompiles", argnames=["precompile"], ), - covariant_decorator( - marker_name="with_all_evm_code_types", - description="marks a test to be parametrized for all EVM code types at parameter named" - " `evm_code_type` of type `EVMCodeType`, such as `LEGACY` and `EOF_V1`", - fork_attribute_name="evm_code_types", - argnames=["evm_code_type"], - ), covariant_decorator( marker_name="with_all_call_opcodes", description="marks a test to be parametrized for all *CALL opcodes at parameter named" - " call_opcode, and also the appropriate EVM code type at parameter named evm_code_type", + " call_opcode", fork_attribute_name="call_opcodes", - argnames=["call_opcode", "evm_code_type"], + argnames=["call_opcode"], ), covariant_decorator( marker_name="with_all_create_opcodes", description="marks a test to be parametrized for all *CREATE* opcodes at parameter named" - " create_opcode, and also the appropriate EVM code type at parameter named evm_code_type", + " create_opcode", fork_attribute_name="create_opcodes", - argnames=["create_opcode", "evm_code_type"], + argnames=["create_opcode"], ), covariant_decorator( marker_name="with_all_system_contracts", diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_covariant_markers.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_covariant_markers.py index 62ddbe9496..a256bb539d 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_covariant_markers.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_covariant_markers.py @@ -168,20 +168,6 @@ def test_case(state_test, precompile): None, id="with_all_precompiles", ), - pytest.param( - """ - import pytest - @pytest.mark.with_all_evm_code_types() - @pytest.mark.valid_from("Cancun") - @pytest.mark.valid_until("Cancun") - @pytest.mark.state_test_only - def test_case(state_test, evm_code_type): - pass - """, - {"passed": 1, "failed": 0, "skipped": 0, "errors": 0}, - None, - id="with_all_evm_code_types", - ), pytest.param( """ import pytest @@ -196,23 +182,6 @@ def test_case(state_test, call_opcode): None, id="with_all_call_opcodes", ), - pytest.param( - """ - import pytest - from execution_testing import EVMCodeType - @pytest.mark.with_all_call_opcodes( - selector=(lambda _, evm_code_type: evm_code_type == EVMCodeType.LEGACY) - ) - @pytest.mark.valid_from("Cancun") - @pytest.mark.valid_until("Cancun") - @pytest.mark.state_test_only - def test_case(state_test, call_opcode): - pass - """, - {"passed": 4, "failed": 0, "skipped": 0, "errors": 0}, - None, - id="with_all_call_opcodes_with_selector_for_evm_code_type", - ), pytest.param( """ import pytest diff --git a/packages/testing/src/execution_testing/cli/tests/test_eofwrap.py b/packages/testing/src/execution_testing/cli/tests/test_eofwrap.py deleted file mode 100644 index 5418dba3d2..0000000000 --- a/packages/testing/src/execution_testing/cli/tests/test_eofwrap.py +++ /dev/null @@ -1,67 +0,0 @@ -"""Tests for the eofwrap module and click CLI.""" - -from typing import Any - -import pytest - -from execution_testing.base_types.conversions import to_hex -from execution_testing.test_types.eof.v1 import Container -from execution_testing.vm import Op - -from ..eofwrap import wrap_code - - -@pytest.mark.parametrize( - "code,result", - [ - [Op.STOP, Container.Code(Op.STOP)], - [Op.RETURN(0, 0), Container.Code(Op.RETURN(0, 0))], - [Op.REVERT(0, 0), Container.Code(Op.REVERT(0, 0))], - [Op.INVALID, Container.Code(Op.INVALID)], - [Op.PUSH1, Container.Code(Op.PUSH1[0] + Op.STOP)], - [Op.PUSH1[0], Container.Code(Op.PUSH1[0] + Op.STOP)], - [Op.PUSH1[0] + Op.STOP, Container.Code(Op.PUSH1[0] + Op.STOP)], - [Op.STOP + Op.STOP, Container.Code(Op.STOP)], - [Op.RETURN(0, 0) + Op.STOP, Container.Code(Op.RETURN(0, 0))], - [Op.REVERT(0, 0) + Op.STOP, Container.Code(Op.REVERT(0, 0))], - [Op.INVALID + Op.STOP, Container.Code(Op.INVALID)], - [Op.ADDRESS, Container.Code(Op.ADDRESS + Op.STOP)], - [Op.ADDRESS + Op.STOP, Container.Code(Op.ADDRESS + Op.STOP)], - [ - Op.ADDRESS + Op.RETURN(0, 0), - Container.Code(Op.ADDRESS + Op.RETURN(0, 0)), - ], - [ - Op.ADDRESS + Op.REVERT(0, 0), - Container.Code(Op.ADDRESS + Op.REVERT(0, 0)), - ], - [Op.ADDRESS + Op.INVALID, Container.Code(Op.ADDRESS + Op.INVALID)], - [Op.ADDRESS + Op.STOP + Op.STOP, Container.Code(Op.ADDRESS + Op.STOP)], - [ - Op.ADDRESS + Op.RETURN(0, 0) + Op.STOP, - Container.Code(Op.ADDRESS + Op.RETURN(0, 0)), - ], - [ - Op.ADDRESS + Op.REVERT(0, 0) + Op.STOP, - Container.Code(Op.ADDRESS + Op.REVERT(0, 0)), - ], - [ - Op.ADDRESS + Op.INVALID + Op.STOP, - Container.Code(Op.ADDRESS + Op.INVALID), - ], - [Op.GAS + Op.STOP, Container.Code(Op.GAS + Op.STOP)], - [Op.GAS + Op.RETURN(0, 0), Container.Code(Op.GAS + Op.RETURN(0, 0))], - [Op.GAS + Op.REVERT(0, 0), Container.Code(Op.GAS + Op.REVERT(0, 0))], - [Op.GAS + Op.INVALID, Container.Code(Op.GAS + Op.INVALID)], - [Op.RJUMPV[1, 2, 3], Container.Code(Op.RJUMPV[1, 2, 3] + Op.STOP)], - [Op.RJUMPV, Container.Code(Op.RJUMPV + Op.STOP)], - [ - Op.RJUMPV[-1, 0x7FFF, -0x7FFF], - Container.Code(Op.RJUMPV[-1, 0x7FFF, -0x7FFF] + Op.STOP), - ], - ], - ids=lambda param: to_hex(param), -) -def test_wrap_code(code: Any, result: Any) -> None: - """Tests for the EOF wrapping logic and heuristics.""" - assert wrap_code(bytes(code)) == result diff --git a/packages/testing/src/execution_testing/cli/tests/test_evm_bytes.py b/packages/testing/src/execution_testing/cli/tests/test_evm_bytes.py index 293bacfdf6..c984be287c 100644 --- a/packages/testing/src/execution_testing/cli/tests/test_evm_bytes.py +++ b/packages/testing/src/execution_testing/cli/tests/test_evm_bytes.py @@ -16,21 +16,6 @@ "0x7fa0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b9babbbcbdbebf5f527fc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d8d9dadbdcdddedf6020527fe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f7f8f9fafbfcfdfeff60405260786040356020355f35608a565b5f515f55602051600155604051600255005b5e56", # noqa: E501 "Op.PUSH32[0xa0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b9babbbcbdbebf] + Op.PUSH0 + Op.MSTORE + Op.PUSH32[0xc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d8d9dadbdcdddedf] + Op.PUSH1[0x20] + Op.MSTORE + Op.PUSH32[0xe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f7f8f9fafbfcfdfeff] + Op.PUSH1[0x40] + Op.MSTORE + Op.PUSH1[0x78] + Op.PUSH1[0x40] + Op.CALLDATALOAD + Op.PUSH1[0x20] + Op.CALLDATALOAD + Op.PUSH0 + Op.CALLDATALOAD + Op.PUSH1[0x8a] + Op.JUMP + Op.JUMPDEST + Op.PUSH0 + Op.MLOAD + Op.PUSH0 + Op.SSTORE + Op.PUSH1[0x20] + Op.MLOAD + Op.PUSH1[0x1] + Op.SSTORE + Op.PUSH1[0x40] + Op.MLOAD + Op.PUSH1[0x2] + Op.SSTORE + Op.STOP + Op.JUMPDEST + Op.MCOPY + Op.JUMP", # noqa: E501 ] -rjump_vector = [ - "0xe0fffe", - "Op.RJUMP[-0x2]", -] -rjumpi_vector = [ - "0xe1fffe", - "Op.RJUMPI[-0x2]", -] -rjumpv_vector = [ - "0xe213b1465aef60276095472e3250cf64736f6c63430008150033a26469706673582212206eab0a7969fe", - "Op.RJUMPV[-0x4eba, 0x5aef, 0x6027, 0x6095, 0x472e, 0x3250, -0x309c, " - "0x736f, 0x6c63, 0x4300," - + " 0x815, 0x33, -0x5d9c, 0x6970, 0x6673, 0x5822, 0x1220, 0x6eab, " - "0xa79, 0x69fe]", -] @pytest.mark.parametrize( @@ -40,12 +25,6 @@ (basic_vector[0][2:], basic_vector[1]), # no "0x" prefix (complex_vector[0], complex_vector[1]), (complex_vector[0][2:], complex_vector[1]), # no "0x" prefix - (rjump_vector[0], rjump_vector[1]), - (rjump_vector[0][2:], rjump_vector[1]), # no "0x" prefix - (rjumpi_vector[0], rjumpi_vector[1]), - (rjumpi_vector[0][2:], rjumpi_vector[1]), # no "0x" prefix - (rjumpv_vector[0], rjumpv_vector[1]), - (rjumpv_vector[0][2:], rjumpv_vector[1]), # no "0x" prefix ], ) def test_evm_bytes(evm_bytes: str, python_opcodes: str) -> None: @@ -67,9 +46,6 @@ def test_individual_opcodes(opcode: Op) -> None: if opcode.data_portion_length > 0: expected_output = f"Op.{opcode._name_}[0x0]" data_portion = b"\x00" * opcode.data_portion_length - elif opcode == Op.RJUMPV: - expected_output = f"Op.{opcode._name_}[0x0]" - data_portion = b"\0\0\0" else: expected_output = f"Op.{opcode._name_}" diff --git a/packages/testing/src/execution_testing/cli/tests/test_pytest_fill_command.py b/packages/testing/src/execution_testing/cli/tests/test_pytest_fill_command.py index f75d51ef8a..dcb3d5a9e0 100644 --- a/packages/testing/src/execution_testing/cli/tests/test_pytest_fill_command.py +++ b/packages/testing/src/execution_testing/cli/tests/test_pytest_fill_command.py @@ -57,7 +57,6 @@ def test_fill_help(self, run_fill: Callable[..., Result]) -> None: result = run_fill("--help") assert "[--evm-bin EVM_BIN]" in result.output assert "[--traces]" in result.output - assert "[--evm-code-type EVM_CODE_TYPE]" in result.output assert "--help" in result.output assert "Arguments defining evm executable behavior:" in result.output diff --git a/packages/testing/src/execution_testing/client_clis/clis/evmone.py b/packages/testing/src/execution_testing/client_clis/clis/evmone.py index 6e2d404ccb..541f88539a 100644 --- a/packages/testing/src/execution_testing/client_clis/clis/evmone.py +++ b/packages/testing/src/execution_testing/client_clis/clis/evmone.py @@ -11,7 +11,6 @@ from pathlib import Path from typing import Any, ClassVar, Dict, List, Optional -from execution_testing.exceptions.exceptions.block import BlockException import pytest from execution_testing.client_clis.file_utils import ( @@ -21,11 +20,11 @@ FixtureConsumerTool, ) from execution_testing.exceptions import ( - EOFException, ExceptionBase, ExceptionMapper, TransactionException, ) +from execution_testing.exceptions.exceptions.block import BlockException from execution_testing.fixtures.base import FixtureFormat from execution_testing.fixtures.blockchain import BlockchainFixture from execution_testing.fixtures.state import StateFixture @@ -372,50 +371,5 @@ class EvmoneExceptionMapper(ExceptionMapper): # missing or failing BlockException.SYSTEM_CONTRACT_EMPTY: "system contract empty or failed", BlockException.SYSTEM_CONTRACT_CALL_FAILED: "system contract empty or failed", - # TODO EVMONE needs to differentiate when the section is missing in the - # header or body - EOFException.MISSING_STOP_OPCODE: "err: no_terminating_instruction", - EOFException.MISSING_CODE_HEADER: "err: code_section_missing", - EOFException.MISSING_TYPE_HEADER: "err: type_section_missing", - # TODO EVMONE these exceptions are too similar, this leeds to ambiguity - EOFException.MISSING_TERMINATOR: "err: header_terminator_missing", - EOFException.MISSING_HEADERS_TERMINATOR: "err: section_headers_not_terminated", - EOFException.INVALID_VERSION: "err: eof_version_unknown", - EOFException.INVALID_NON_RETURNING_FLAG: "err: invalid_non_returning_flag", - EOFException.INVALID_MAGIC: "err: invalid_prefix", - EOFException.INVALID_FIRST_SECTION_TYPE: "err: invalid_first_section_type", - EOFException.INVALID_SECTION_BODIES_SIZE: "err: invalid_section_bodies_size", - EOFException.INVALID_TYPE_SECTION_SIZE: "err: invalid_type_section_size", - EOFException.INCOMPLETE_SECTION_SIZE: "err: incomplete_section_size", - EOFException.INCOMPLETE_SECTION_NUMBER: "err: incomplete_section_number", - EOFException.TOO_MANY_CODE_SECTIONS: "err: too_many_code_sections", - EOFException.ZERO_SECTION_SIZE: "err: zero_section_size", - EOFException.MISSING_DATA_SECTION: "err: data_section_missing", - EOFException.UNDEFINED_INSTRUCTION: "err: undefined_instruction", - EOFException.INPUTS_OUTPUTS_NUM_ABOVE_LIMIT: "err: inputs_outputs_num_above_limit", - EOFException.UNREACHABLE_INSTRUCTIONS: "err: unreachable_instructions", - EOFException.INVALID_RJUMP_DESTINATION: "err: invalid_rjump_destination", - EOFException.UNREACHABLE_CODE_SECTIONS: "err: unreachable_code_sections", - EOFException.STACK_UNDERFLOW: "err: stack_underflow", - EOFException.STACK_OVERFLOW: "err: stack_overflow", - EOFException.MAX_STACK_INCREASE_ABOVE_LIMIT: "err: max_stack_increase_above_limit", - EOFException.STACK_HIGHER_THAN_OUTPUTS: "err: stack_higher_than_outputs_required", - EOFException.JUMPF_DESTINATION_INCOMPATIBLE_OUTPUTS: ( - "err: jumpf_destination_incompatible_outputs" - ), - EOFException.INVALID_MAX_STACK_INCREASE: "err: invalid_max_stack_increase", - EOFException.INVALID_DATALOADN_INDEX: "err: invalid_dataloadn_index", - EOFException.TRUNCATED_INSTRUCTION: "err: truncated_instruction", - EOFException.TOPLEVEL_CONTAINER_TRUNCATED: "err: toplevel_container_truncated", - EOFException.ORPHAN_SUBCONTAINER: "err: unreferenced_subcontainer", - EOFException.CONTAINER_SIZE_ABOVE_LIMIT: "err: container_size_above_limit", - EOFException.INVALID_CONTAINER_SECTION_INDEX: "err: invalid_container_section_index", - EOFException.INCOMPATIBLE_CONTAINER_KIND: "err: incompatible_container_kind", - EOFException.AMBIGUOUS_CONTAINER_KIND: "err: ambiguous_container_kind", - EOFException.STACK_HEIGHT_MISMATCH: "err: stack_height_mismatch", - EOFException.TOO_MANY_CONTAINERS: "err: too_many_container_sections", - EOFException.INVALID_CODE_SECTION_INDEX: "err: invalid_code_section_index", - EOFException.CALLF_TO_NON_RETURNING: "err: callf_to_non_returning_function", - EOFException.EOFCREATE_WITH_TRUNCATED_CONTAINER: "err: eofcreate_with_truncated_container", } mapping_regex: ClassVar[Dict[ExceptionBase, str]] = {} diff --git a/packages/testing/src/execution_testing/client_clis/clis/nethermind.py b/packages/testing/src/execution_testing/client_clis/clis/nethermind.py index a068b87c4a..5db8488f92 100644 --- a/packages/testing/src/execution_testing/client_clis/clis/nethermind.py +++ b/packages/testing/src/execution_testing/client_clis/clis/nethermind.py @@ -9,8 +9,6 @@ from pathlib import Path from typing import Any, Dict, List, Optional, Tuple -import pytest - from execution_testing.exceptions import ( BlockException, ExceptionMapper, @@ -18,7 +16,6 @@ ) from execution_testing.fixtures import ( BlockchainFixture, - EOFFixture, FixtureFormat, StateFixture, ) @@ -105,22 +102,11 @@ def help(self, subcommand: str | None = None) -> str: help_command.append("--help") return self._run_command(help_command).stdout - @cache # noqa - def has_eof_support(self) -> bool: - """ - Return True if the `nethtest` binary supports the `--eofTest` flag. - - Currently, nethtest EOF support is only available in nethermind's - feature/evm/eof branch - https://github.com/NethermindEth/nethermind/tree/feature/evm/eof - """ - return "--eofTest" in self.help() - class NethtestFixtureConsumer( Nethtest, FixtureConsumerTool, - fixture_formats=[StateFixture, BlockchainFixture, EOFFixture], + fixture_formats=[StateFixture, BlockchainFixture], ): """Nethermind implementation of the fixture consumer.""" @@ -143,8 +129,6 @@ def _build_command_with_options( # TODO: consider using `--filter` here to readily access traces # from the output pass # no additional options needed - elif fixture_format is EOFFixture: - command += ["--eofTest"] else: raise Exception( f"Fixture format {fixture_format.format_name} not supported by {self.binary}" @@ -273,69 +257,6 @@ def consume_blockchain_test( f"{' '.join(command)}" ) - @cache # noqa - def consume_eof_test_file( - self, - fixture_path: Path, - command: Tuple[str, ...], - debug_output_path: Optional[Path] = None, - ) -> Tuple[Dict[Any, Any], str, str]: - """Consume an entire EOF fixture file.""" - del fixture_path - result = subprocess.run( - command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True - ) - - pattern = re.compile(r"^(test_.+?)\s+(PASS|FAIL)$", re.MULTILINE) - test_results = { - match.group(1): match.group(2) == "PASS" # Convert "PASS" to True - # and "FAIL" to False - for match in pattern.finditer(result.stdout) - } - - if debug_output_path: - self._consume_debug_dump(command, result, debug_output_path) - - if result.returncode != 0: - raise Exception( - f"Unexpected exit code:\n{' '.join(command)}\n\n Error:\n{result.stderr}" - ) - - return test_results, result.stdout, result.stderr - - def consume_eof_test( - self, - command: Tuple[str, ...], - fixture_path: Path, - fixture_name: Optional[str], - debug_output_path: Optional[Path], - ) -> None: - """Execute the the EOF fixture at `fixture_path` via `nethtest`.""" - if not self.has_eof_support(): - pytest.skip( - "This version of nethtest does not support the `--eofTest` flag." - ) - file_results, stdout, stderr = self.consume_eof_test_file( - fixture_path=fixture_path, - command=command, - debug_output_path=debug_output_path, - ) - assert fixture_name, "fixture_name is required for EOF tests" - modified_fixture_name = fixture_name.split("::")[-1].replace( - "\\x", "/x" - ) - assert modified_fixture_name in file_results, ( - f"Test result for {fixture_name} missing, available stdout:\n{stdout}.\n" - f"Parsed test results: {file_results}" - ) - if stderr: - available_stderr = f"Available stderr:\n{stderr}" - else: - available_stderr = "(No output available.)" - assert file_results[modified_fixture_name], ( - f"EOF test '{fixture_name}' failed. {available_stderr}" - ) - def consume_fixture( self, fixture_format: FixtureFormat, @@ -364,13 +285,6 @@ def consume_fixture( fixture_name=fixture_name, debug_output_path=debug_output_path, ) - elif fixture_format == EOFFixture: - self.consume_eof_test( - command=command, - fixture_path=fixture_path, - fixture_name=fixture_name, - debug_output_path=debug_output_path, - ) else: raise Exception( f"Fixture format {fixture_format.format_name} not supported by {self.binary}" diff --git a/packages/testing/src/execution_testing/exceptions/__init__.py b/packages/testing/src/execution_testing/exceptions/__init__.py index 3e2f1d2961..b26512d66f 100644 --- a/packages/testing/src/execution_testing/exceptions/__init__.py +++ b/packages/testing/src/execution_testing/exceptions/__init__.py @@ -9,8 +9,6 @@ from .exceptions import ( BlockException, BlockExceptionInstanceOrList, - EOFException, - EOFExceptionInstanceOrList, ExceptionBase, ExceptionInstanceOrList, TransactionException, @@ -21,8 +19,6 @@ __all__ = [ "BlockException", "BlockExceptionInstanceOrList", - "EOFException", - "EOFExceptionInstanceOrList", "ExceptionBase", "EngineAPIError", "ExceptionMapper", diff --git a/packages/testing/src/execution_testing/exceptions/exceptions.py b/packages/testing/src/execution_testing/exceptions/exceptions.py index c97553045f..ce5d7de6b7 100644 --- a/packages/testing/src/execution_testing/exceptions/exceptions.py +++ b/packages/testing/src/execution_testing/exceptions/exceptions.py @@ -481,126 +481,6 @@ class BlockException(ExceptionBase): """ -@unique -class EOFException(ExceptionBase): - """Exception raised when an EOF container is invalid.""" - - DEFAULT_EXCEPTION = auto() - """Expect some exception, not yet known.""" - - UNDEFINED_EXCEPTION = auto() - """Indicates that exception string is not mapped to an exception enum.""" - - UNDEFINED_INSTRUCTION = auto() - """EOF container has undefined instruction in its body code.""" - - UNKNOWN_VERSION = auto() - """EOF container has an unknown version.""" - INCOMPLETE_MAGIC = auto() - """EOF container has not enough bytes to read magic.""" - INVALID_MAGIC = auto() - """EOF container has not allowed magic version byte.""" - INVALID_VERSION = auto() - """EOF container version bytes mismatch.""" - INVALID_NON_RETURNING_FLAG = auto() - """EOF container's section has non-returning flag set incorrectly.""" - INVALID_RJUMP_DESTINATION = auto() - """Code has RJUMP instruction with invalid parameters.""" - MISSING_TYPE_HEADER = auto() - """EOF container missing types section.""" - INVALID_TYPE_SECTION_SIZE = auto() - """EOF container types section has wrong size.""" - INVALID_TYPE_BODY = auto() - """EOF container types body section bytes are wrong.""" - MISSING_CODE_HEADER = auto() - """EOF container missing code section.""" - INVALID_CODE_SECTION = auto() - """EOF container code section bytes are incorrect.""" - INCOMPLETE_CODE_HEADER = auto() - """EOF container code header missing bytes.""" - INCOMPLETE_DATA_HEADER = auto() - """EOF container data header missing bytes.""" - ZERO_SECTION_SIZE = auto() - """EOF container data header construction is wrong.""" - MISSING_DATA_SECTION = auto() - """EOF container missing data section""" - INCOMPLETE_CONTAINER = auto() - """EOF container bytes are incomplete.""" - INVALID_SECTION_BODIES_SIZE = auto() - """Sections bodies does not match sections headers.""" - TRAILING_BYTES = auto() - """EOF container has bytes beyond data section.""" - MISSING_TERMINATOR = auto() - """EOF container missing terminator bytes between header and body.""" - MISSING_HEADERS_TERMINATOR = auto() - """Some type of another exception about missing headers terminator.""" - INVALID_FIRST_SECTION_TYPE = auto() - """EOF container header does not have types section first.""" - INCOMPLETE_SECTION_NUMBER = auto() - """EOF container header has section that is missing declaration bytes.""" - INCOMPLETE_SECTION_SIZE = auto() - """EOF container header has section that is defined incorrectly.""" - TOO_MANY_CODE_SECTIONS = auto() - """EOF container header has too many code sections.""" - MISSING_STOP_OPCODE = auto() - """EOF container's code missing STOP bytecode at its end.""" - INPUTS_OUTPUTS_NUM_ABOVE_LIMIT = auto() - """EOF container code section inputs/outputs number is above the limit""" - UNREACHABLE_INSTRUCTIONS = auto() - """EOF container's code have instructions that are unreachable.""" - UNREACHABLE_CODE_SECTIONS = auto() - """EOF container's body have code sections that are unreachable.""" - STACK_UNDERFLOW = auto() - """EOF container's code produces a stack underflow.""" - STACK_OVERFLOW = auto() - """EOF container's code produces a stack overflow.""" - STACK_HEIGHT_MISMATCH = auto() - """EOF container section stack height mismatch.""" - MAX_STACK_INCREASE_ABOVE_LIMIT = auto() - """EOF container's specified max stack increase is above the limit.""" - STACK_HIGHER_THAN_OUTPUTS = auto() - """ - EOF container section stack height is higher than the outputs. when - returning - """ - JUMPF_DESTINATION_INCOMPATIBLE_OUTPUTS = auto() - """ - EOF container section JUMPF's to a destination section with incompatible - outputs. - """ - INVALID_MAX_STACK_INCREASE = auto() - """ - EOF container section's specified max stack increase does not match the - actual stack height. - """ - INVALID_DATALOADN_INDEX = auto() - """A DATALOADN instruction has out-of-bounds index for the data section.""" - TRUNCATED_INSTRUCTION = auto() - """EOF container's code section has truncated instruction.""" - TOPLEVEL_CONTAINER_TRUNCATED = auto() - """Top-level EOF container has data section truncated""" - ORPHAN_SUBCONTAINER = auto() - """EOF container has an unreferenced subcontainer. '""" - CONTAINER_SIZE_ABOVE_LIMIT = auto() - """EOF container is above size limit""" - INVALID_CONTAINER_SECTION_INDEX = auto() - """Instruction references container section that does not exist.""" - INCOMPATIBLE_CONTAINER_KIND = auto() - """Incompatible instruction found in a container of a specific kind.""" - AMBIGUOUS_CONTAINER_KIND = auto() - """The kind of a sub-container cannot be uniquely deduced.""" - TOO_MANY_CONTAINERS = auto() - """EOF container header has too many sub-containers.""" - INVALID_CODE_SECTION_INDEX = auto() - """CALLF Operation refers to a non-existent code section""" - UNEXPECTED_HEADER_KIND = auto() - """Header parsing encountered a section kind it wasn't expecting""" - CALLF_TO_NON_RETURNING = auto() - """CALLF instruction targeting a non-returning code section""" - EOFCREATE_WITH_TRUNCATED_CONTAINER = auto() - """EOFCREATE with truncated container""" - - """Pydantic Annotated Types""" ExceptionInstanceOrList = Annotated[ @@ -623,12 +503,6 @@ class EOFException(ExceptionBase): PlainSerializer(to_pipe_str), ] -EOFExceptionInstanceOrList = Annotated[ - List[EOFException] | EOFException, - BeforeValidator(from_pipe_str), - PlainSerializer(to_pipe_str), -] - ExceptionBoundTypeVar = TypeVar( - "ExceptionBoundTypeVar", TransactionException, BlockException, EOFException + "ExceptionBoundTypeVar", TransactionException, BlockException ) diff --git a/packages/testing/src/execution_testing/exceptions/exceptions/__init__.py b/packages/testing/src/execution_testing/exceptions/exceptions/__init__.py index 8434e5f331..a789986778 100644 --- a/packages/testing/src/execution_testing/exceptions/exceptions/__init__.py +++ b/packages/testing/src/execution_testing/exceptions/exceptions/__init__.py @@ -2,10 +2,8 @@ from .base import ExceptionBase, UndefinedException, from_pipe_str, to_pipe_str from .block import BlockException -from .eof import EOFException from .exceptions_types import ( BlockExceptionInstanceOrList, - EOFExceptionInstanceOrList, ExceptionBoundTypeVar, ExceptionInstanceOrList, TransactionExceptionInstanceOrList, @@ -19,10 +17,8 @@ "to_pipe_str", "TransactionException", "BlockException", - "EOFException", "ExceptionInstanceOrList", "TransactionExceptionInstanceOrList", "BlockExceptionInstanceOrList", - "EOFExceptionInstanceOrList", "ExceptionBoundTypeVar", ] diff --git a/packages/testing/src/execution_testing/exceptions/exceptions/eof.py b/packages/testing/src/execution_testing/exceptions/exceptions/eof.py deleted file mode 100644 index 2e1f7f1b8d..0000000000 --- a/packages/testing/src/execution_testing/exceptions/exceptions/eof.py +++ /dev/null @@ -1,125 +0,0 @@ -"""EOF Exceptions.""" - -from enum import auto, unique - -from .base import ExceptionBase - - -@unique -class EOFException(ExceptionBase): - """Exception raised when an EOF container is invalid.""" - - DEFAULT_EXCEPTION = auto() - """Expect some exception, not yet known.""" - - UNDEFINED_EXCEPTION = auto() - """Indicates that exception string is not mapped to an exception enum.""" - - UNDEFINED_INSTRUCTION = auto() - """EOF container has undefined instruction in its body code.""" - - UNKNOWN_VERSION = auto() - """EOF container has an unknown version.""" - INCOMPLETE_MAGIC = auto() - """EOF container has not enough bytes to read magic.""" - INVALID_MAGIC = auto() - """EOF container has not allowed magic version byte.""" - INVALID_VERSION = auto() - """EOF container version bytes mismatch.""" - INVALID_NON_RETURNING_FLAG = auto() - """EOF container's section has non-returning flag set incorrectly.""" - INVALID_RJUMP_DESTINATION = auto() - """Code has RJUMP instruction with invalid parameters.""" - MISSING_TYPE_HEADER = auto() - """EOF container missing types section.""" - INVALID_TYPE_SECTION_SIZE = auto() - """EOF container types section has wrong size.""" - INVALID_TYPE_BODY = auto() - """EOF container types body section bytes are wrong.""" - MISSING_CODE_HEADER = auto() - """EOF container missing code section.""" - INVALID_CODE_SECTION = auto() - """EOF container code section bytes are incorrect.""" - INCOMPLETE_CODE_HEADER = auto() - """EOF container code header missing bytes.""" - INCOMPLETE_DATA_HEADER = auto() - """EOF container data header missing bytes.""" - ZERO_SECTION_SIZE = auto() - """EOF container data header construction is wrong.""" - MISSING_DATA_SECTION = auto() - """EOF container missing data section""" - INCOMPLETE_CONTAINER = auto() - """EOF container bytes are incomplete.""" - INVALID_SECTION_BODIES_SIZE = auto() - """Sections bodies does not match sections headers.""" - TRAILING_BYTES = auto() - """EOF container has bytes beyond data section.""" - MISSING_TERMINATOR = auto() - """EOF container missing terminator bytes between header and body.""" - MISSING_HEADERS_TERMINATOR = auto() - """Some type of another exception about missing headers terminator.""" - INVALID_FIRST_SECTION_TYPE = auto() - """EOF container header does not have types section first.""" - INCOMPLETE_SECTION_NUMBER = auto() - """EOF container header has section that is missing declaration bytes.""" - INCOMPLETE_SECTION_SIZE = auto() - """EOF container header has section that is defined incorrectly.""" - TOO_MANY_CODE_SECTIONS = auto() - """EOF container header has too many code sections.""" - MISSING_STOP_OPCODE = auto() - """EOF container's code missing STOP bytecode at its end.""" - INPUTS_OUTPUTS_NUM_ABOVE_LIMIT = auto() - """EOF container code section inputs/outputs number is above the limit""" - UNREACHABLE_INSTRUCTIONS = auto() - """EOF container's code have instructions that are unreachable.""" - UNREACHABLE_CODE_SECTIONS = auto() - """EOF container's body have code sections that are unreachable.""" - STACK_UNDERFLOW = auto() - """EOF container's code produces a stack underflow.""" - STACK_OVERFLOW = auto() - """EOF container's code produces a stack overflow.""" - STACK_HEIGHT_MISMATCH = auto() - """EOF container section stack height mismatch.""" - MAX_STACK_INCREASE_ABOVE_LIMIT = auto() - """EOF container's specified max stack increase is above the limit.""" - STACK_HIGHER_THAN_OUTPUTS = auto() - """ - EOF container section stack height is higher than the outputs. when - returning - """ - JUMPF_DESTINATION_INCOMPATIBLE_OUTPUTS = auto() - """ - EOF container section JUMPF's to a destination section with incompatible - outputs. - """ - INVALID_MAX_STACK_INCREASE = auto() - """ - EOF container section's specified max stack increase does not match the - actual stack height. - """ - INVALID_DATALOADN_INDEX = auto() - """A DATALOADN instruction has out-of-bounds index for the data section.""" - TRUNCATED_INSTRUCTION = auto() - """EOF container's code section has truncated instruction.""" - TOPLEVEL_CONTAINER_TRUNCATED = auto() - """Top-level EOF container has data section truncated""" - ORPHAN_SUBCONTAINER = auto() - """EOF container has an unreferenced subcontainer. '""" - CONTAINER_SIZE_ABOVE_LIMIT = auto() - """EOF container is above size limit""" - INVALID_CONTAINER_SECTION_INDEX = auto() - """Instruction references container section that does not exist.""" - INCOMPATIBLE_CONTAINER_KIND = auto() - """Incompatible instruction found in a container of a specific kind.""" - AMBIGUOUS_CONTAINER_KIND = auto() - """The kind of a sub-container cannot be uniquely deduced.""" - TOO_MANY_CONTAINERS = auto() - """EOF container header has too many sub-containers.""" - INVALID_CODE_SECTION_INDEX = auto() - """CALLF Operation refers to a non-existent code section""" - UNEXPECTED_HEADER_KIND = auto() - """Header parsing encountered a section kind it wasn't expecting""" - CALLF_TO_NON_RETURNING = auto() - """CALLF instruction targeting a non-returning code section""" - EOFCREATE_WITH_TRUNCATED_CONTAINER = auto() - """EOFCREATE with truncated container""" diff --git a/packages/testing/src/execution_testing/exceptions/exceptions/exceptions_types.py b/packages/testing/src/execution_testing/exceptions/exceptions/exceptions_types.py index 9c82caa608..e0652a0493 100644 --- a/packages/testing/src/execution_testing/exceptions/exceptions/exceptions_types.py +++ b/packages/testing/src/execution_testing/exceptions/exceptions/exceptions_types.py @@ -6,7 +6,6 @@ from .base import from_pipe_str, to_pipe_str from .block import BlockException -from .eof import EOFException from .transaction import TransactionException """ @@ -33,12 +32,6 @@ PlainSerializer(to_pipe_str), ] -EOFExceptionInstanceOrList = Annotated[ - List[EOFException] | EOFException, - BeforeValidator(from_pipe_str), - PlainSerializer(to_pipe_str), -] - ExceptionBoundTypeVar = TypeVar( - "ExceptionBoundTypeVar", TransactionException, BlockException, EOFException + "ExceptionBoundTypeVar", TransactionException, BlockException ) diff --git a/packages/testing/src/execution_testing/fixtures/__init__.py b/packages/testing/src/execution_testing/fixtures/__init__.py index c8e2a3b1bb..9d882707e7 100644 --- a/packages/testing/src/execution_testing/fixtures/__init__.py +++ b/packages/testing/src/execution_testing/fixtures/__init__.py @@ -16,7 +16,6 @@ ) from .collector import FixtureCollector, TestInfo from .consume import FixtureConsumer -from .eof import EOFFixture from .pre_alloc_groups import ( PreAllocGroup, PreAllocGroupBuilder, @@ -34,7 +33,6 @@ "BlockchainEngineXFixture", "BlockchainFixture", "BlockchainFixtureCommon", - "EOFFixture", "FixtureCollector", "FixtureConsumer", "FixtureFillingPhase", diff --git a/packages/testing/src/execution_testing/fixtures/eof.py b/packages/testing/src/execution_testing/fixtures/eof.py deleted file mode 100644 index f5eaca3a0f..0000000000 --- a/packages/testing/src/execution_testing/fixtures/eof.py +++ /dev/null @@ -1,51 +0,0 @@ -"""EOFTest Type Definitions.""" - -from typing import Any, ClassVar, Mapping - -from pydantic import Field - -from execution_testing.base_types import Bytes, CamelModel, Number -from execution_testing.exceptions.exceptions import EOFExceptionInstanceOrList -from execution_testing.forks import Fork -from execution_testing.test_types.eof.v1 import ContainerKind - -from .base import BaseFixture - - -class Result(CamelModel): - """Result for a single fork in a fixture.""" - - exception: EOFExceptionInstanceOrList | None = None - valid: bool = Field(..., alias="result") - - def model_post_init(self, __context: Any) -> None: - """ - Cross-field validation that a test cannot have an empty exception if - the valid is False. - """ - if not self.valid and self.exception is None: - raise ValueError("Invalid test: invalid but exception is not set") - elif self.valid and self.exception is not None: - raise ValueError("Invalid test: valid but exception is set") - super().model_post_init(__context) - - -class Vector(CamelModel): - """Single test vector in a fixture.""" - - code: Bytes - container_kind: ContainerKind = ContainerKind.RUNTIME - results: Mapping[Fork, Result] - - -class EOFFixture(BaseFixture): - """Fixture for a single EOFTest.""" - - format_name: ClassVar[str] = "eof_test" - description: ClassVar[str] = "Tests that generate an EOF test fixture." - - vectors: Mapping[Number, Vector] - - def get_fork(self) -> Fork | None: - """Return fork of the fixture as a string.""" - return None diff --git a/packages/testing/src/execution_testing/fixtures/tests/test_eof.py b/packages/testing/src/execution_testing/fixtures/tests/test_eof.py deleted file mode 100644 index fe19c21048..0000000000 --- a/packages/testing/src/execution_testing/fixtures/tests/test_eof.py +++ /dev/null @@ -1,108 +0,0 @@ -"""Test the EOF fixture types.""" - -from typing import Any, Dict - -import pytest - -from execution_testing.base_types import Bytes, to_json -from execution_testing.exceptions import EOFException - -from ..eof import ContainerKind, EOFFixture, Result, Vector - - -@pytest.mark.parametrize( - ["can_be_deserialized", "model_instance", "json_repr"], - [ - pytest.param( - True, - EOFFixture( - vectors={ - 1: Vector( - code=Bytes(b"\x00"), - container_kind=ContainerKind.INITCODE, - results={ - "Paris": Result( - exception=None, - valid=True, - ), - }, - ), - } - ), - { - "vectors": { - "1": { - "code": "0x00", - "containerKind": "INITCODE", - "results": { - "Paris": { - "result": True, - }, - }, - }, - }, - }, - id="eof_fixture", - ), - pytest.param( - True, - EOFFixture( - vectors={ - 1: Vector( - code=Bytes(b"\x00"), - container_kind=ContainerKind.RUNTIME, - results={ - "Paris": Result( - exception=EOFException.INVALID_MAGIC, - valid=False, - ), - }, - ), - } - ), - { - "vectors": { - "1": { - "code": "0x00", - "containerKind": "RUNTIME", - "results": { - "Paris": { - "exception": "EOFException.INVALID_MAGIC", - "result": False, - }, - }, - }, - }, - }, - id="eof_fixture_with_exception", - ), - ], -) -class TestPydanticModelConversion: - """Test that Pydantic models are converted to and from JSON correctly.""" - - def test_json_serialization( - self, - can_be_deserialized: bool, - model_instance: Any, - json_repr: str | Dict[str, Any], - ) -> None: - """Test that to_json returns the expected JSON for the given object.""" - del can_be_deserialized - serialized = to_json(model_instance) - serialized.pop("_info") - assert serialized == json_repr - - def test_json_deserialization( - self, - can_be_deserialized: bool, - model_instance: Any, - json_repr: str | Dict[str, Any], - ) -> None: - """Test that to_json returns the expected JSON for the given object.""" - if not can_be_deserialized: - pytest.skip( - reason="The model instance in this case can not be deserialized" - ) - model_type = type(model_instance) - assert model_type(**json_repr) == model_instance diff --git a/packages/testing/src/execution_testing/forks/__init__.py b/packages/testing/src/execution_testing/forks/__init__.py index fb5e9b4c76..760dbd0677 100644 --- a/packages/testing/src/execution_testing/forks/__init__.py +++ b/packages/testing/src/execution_testing/forks/__init__.py @@ -14,7 +14,6 @@ Cancun, Constantinople, ConstantinopleFix, - EOFv1, Frontier, GrayGlacier, Homestead, @@ -94,7 +93,6 @@ "Byzantium", "Constantinople", "ConstantinopleFix", - "EOFv1", "ForkRangeDescriptor", "Frontier", "GrayGlacier", diff --git a/packages/testing/src/execution_testing/forks/base_fork.py b/packages/testing/src/execution_testing/forks/base_fork.py index 584f92ba5d..6c549f2b76 100644 --- a/packages/testing/src/execution_testing/forks/base_fork.py +++ b/packages/testing/src/execution_testing/forks/base_fork.py @@ -14,7 +14,6 @@ Protocol, Set, Sized, - Tuple, Type, Union, ) @@ -29,7 +28,6 @@ ) from execution_testing.base_types.conversions import BytesConvertible from execution_testing.vm import ( - EVMCodeType, ForkOpcodeInterface, OpcodeBase, Opcodes, @@ -827,14 +825,6 @@ def engine_get_blobs_version( pass # EVM information abstract methods - @classmethod - @abstractmethod - def evm_code_types( - cls, *, block_number: int = 0, timestamp: int = 0 - ) -> List[EVMCodeType]: - """Return list of EVM code types supported by the fork.""" - pass - @classmethod @abstractmethod def max_code_size( @@ -869,11 +859,8 @@ def max_initcode_size( @abstractmethod def call_opcodes( cls, *, block_number: int = 0, timestamp: int = 0 - ) -> List[Tuple[Opcodes, EVMCodeType]]: - """ - Return list of tuples with the call opcodes and its corresponding EVM - code type. - """ + ) -> List[Opcodes]: + """Return list of call opcodes supported by the fork.""" pass @classmethod @@ -888,11 +875,8 @@ def valid_opcodes( @abstractmethod def create_opcodes( cls, *, block_number: int = 0, timestamp: int = 0 - ) -> List[Tuple[Opcodes, EVMCodeType]]: - """ - Return list of tuples with the create opcodes and its corresponding EVM - code type. - """ + ) -> List[Opcodes]: + """Return list of create opcodes supported by the fork.""" pass @classmethod diff --git a/packages/testing/src/execution_testing/forks/forks/forks.py b/packages/testing/src/execution_testing/forks/forks/forks.py index 3c8bb9976f..5bd10b95f4 100644 --- a/packages/testing/src/execution_testing/forks/forks/forks.py +++ b/packages/testing/src/execution_testing/forks/forks/forks.py @@ -15,13 +15,11 @@ Mapping, Optional, Sized, - Tuple, ) if TYPE_CHECKING: from execution_testing.fixtures.blockchain import FixtureHeader - from execution_testing.base_types import ( AccessList, Address, @@ -32,7 +30,6 @@ ) from execution_testing.base_types.conversions import BytesConvertible from execution_testing.vm import ( - EVMCodeType, OpcodeBase, OpcodeGasCalculator, Opcodes, @@ -1143,14 +1140,6 @@ def deterministic_factory_predeploy_address( del block_number, timestamp return None - @classmethod - def evm_code_types( - cls, *, block_number: int = 0, timestamp: int = 0 - ) -> List[EVMCodeType]: - """At Genesis, only legacy EVM code is supported.""" - del block_number, timestamp - return [EVMCodeType.LEGACY] - @classmethod def max_code_size( cls, *, block_number: int = 0, timestamp: int = 0 @@ -1187,13 +1176,10 @@ def max_initcode_size( @classmethod def call_opcodes( cls, *, block_number: int = 0, timestamp: int = 0 - ) -> List[Tuple[Opcodes, EVMCodeType]]: + ) -> List[Opcodes]: """Return list of call opcodes supported by the fork.""" del block_number, timestamp - return [ - (Opcodes.CALL, EVMCodeType.LEGACY), - (Opcodes.CALLCODE, EVMCodeType.LEGACY), - ] + return [Opcodes.CALL, Opcodes.CALLCODE] @classmethod def valid_opcodes( @@ -1336,12 +1322,10 @@ def valid_opcodes( @classmethod def create_opcodes( cls, *, block_number: int = 0, timestamp: int = 0 - ) -> List[Tuple[Opcodes, EVMCodeType]]: + ) -> List[Opcodes]: """At Genesis, only `CREATE` opcode is supported.""" del block_number, timestamp - return [ - (Opcodes.CREATE, EVMCodeType.LEGACY), - ] + return [Opcodes.CREATE] @classmethod def max_refund_quotient( @@ -1455,11 +1439,11 @@ def precompiles( @classmethod def call_opcodes( cls, *, block_number: int = 0, timestamp: int = 0 - ) -> List[Tuple[Opcodes, EVMCodeType]]: + ) -> List[Opcodes]: """At Homestead, DELEGATECALL opcode was introduced.""" - return [(Opcodes.DELEGATECALL, EVMCodeType.LEGACY)] + super( - Homestead, cls - ).call_opcodes(block_number=block_number, timestamp=timestamp) + return [Opcodes.DELEGATECALL] + super(Homestead, cls).call_opcodes( + block_number=block_number, timestamp=timestamp + ) @classmethod def opcode_gas_map( @@ -1622,11 +1606,11 @@ def max_code_size( @classmethod def call_opcodes( cls, *, block_number: int = 0, timestamp: int = 0 - ) -> List[Tuple[Opcodes, EVMCodeType]]: + ) -> List[Opcodes]: """At Byzantium, STATICCALL opcode was introduced.""" - return [(Opcodes.STATICCALL, EVMCodeType.LEGACY)] + super( - Byzantium, cls - ).call_opcodes(block_number=block_number, timestamp=timestamp) + return [Opcodes.STATICCALL] + super(Byzantium, cls).call_opcodes( + block_number=block_number, timestamp=timestamp + ) @classmethod def opcode_gas_map( @@ -1718,11 +1702,11 @@ def _calculate_create2_gas( @classmethod def create_opcodes( cls, *, block_number: int = 0, timestamp: int = 0 - ) -> List[Tuple[Opcodes, EVMCodeType]]: + ) -> List[Opcodes]: """At Constantinople, `CREATE2` opcode is added.""" - return [(Opcodes.CREATE2, EVMCodeType.LEGACY)] + super( - Constantinople, cls - ).create_opcodes(block_number=block_number, timestamp=timestamp) + return [Opcodes.CREATE2] + super(Constantinople, cls).create_opcodes( + block_number=block_number, timestamp=timestamp + ) @classmethod def opcode_gas_map( @@ -3383,38 +3367,3 @@ def engine_execution_payload_block_access_list( """ del block_number, timestamp return True - - -class EOFv1(Prague, solc_name="cancun"): - """EOF fork.""" - - @classmethod - def evm_code_types( - cls, *, block_number: int = 0, timestamp: int = 0 - ) -> List[EVMCodeType]: - """EOF V1 is supported starting from Osaka.""" - return super(EOFv1, cls).evm_code_types( - block_number=block_number, - timestamp=timestamp, - ) + [EVMCodeType.EOF_V1] - - @classmethod - def call_opcodes( - cls, *, block_number: int = 0, timestamp: int = 0 - ) -> List[Tuple[Opcodes, EVMCodeType]]: - """EOF V1 introduces EXTCALL, EXTSTATICCALL, EXTDELEGATECALL.""" - return [ - (Opcodes.EXTCALL, EVMCodeType.EOF_V1), - (Opcodes.EXTSTATICCALL, EVMCodeType.EOF_V1), - (Opcodes.EXTDELEGATECALL, EVMCodeType.EOF_V1), - ] + super(EOFv1, cls).call_opcodes( - block_number=block_number, timestamp=timestamp - ) - - @classmethod - def is_deployed(cls) -> bool: - """ - Flag that the fork has not been deployed to mainnet; it is under active - development. - """ - return False diff --git a/packages/testing/src/execution_testing/specs/__init__.py b/packages/testing/src/execution_testing/specs/__init__.py index 5f4955a441..1194e16513 100644 --- a/packages/testing/src/execution_testing/specs/__init__.py +++ b/packages/testing/src/execution_testing/specs/__init__.py @@ -11,14 +11,6 @@ BlockchainTestSpec, Header, ) -from .eof import ( - EOFStateTest, - EOFStateTestFiller, - EOFStateTestSpec, - EOFTest, - EOFTestFiller, - EOFTestSpec, -) from .state import StateTest, StateTestFiller, StateTestSpec from .static_state.state_static import StateStaticTest from .transaction import ( @@ -43,12 +35,6 @@ "BlockchainTestSpec", "Block", "Header", - "EOFStateTest", - "EOFStateTestFiller", - "EOFStateTestSpec", - "EOFTest", - "EOFTestFiller", - "EOFTestSpec", "StateStaticTest", "StateTest", "StateTestFiller", diff --git a/packages/testing/src/execution_testing/specs/eof.py b/packages/testing/src/execution_testing/specs/eof.py deleted file mode 100644 index f314d08a88..0000000000 --- a/packages/testing/src/execution_testing/specs/eof.py +++ /dev/null @@ -1,729 +0,0 @@ -"""Ethereum EOF test spec definition and filler.""" - -import subprocess -import warnings -from pathlib import Path -from shutil import which -from subprocess import CompletedProcess -from typing import ( - Annotated, - Any, - Callable, - ClassVar, - Dict, - Generator, - List, - Optional, - Sequence, - Type, -) - -import pytest -from pydantic import Field, TypeAdapter - -from execution_testing.base_types import Account, Bytes, HexNumber -from execution_testing.client_clis import ( - EvmoneExceptionMapper, - TransitionTool, -) -from execution_testing.exceptions import ( - EOFException, - ExceptionMapperValidator, - ExceptionWithMessage, - UndefinedException, -) -from execution_testing.exceptions.exceptions import ( - EOFExceptionInstanceOrList, - to_pipe_str, -) -from execution_testing.execution import ( - BaseExecute, - ExecuteFormat, - LabeledExecuteFormat, - TransactionPost, -) -from execution_testing.fixtures import ( - BaseFixture, - EOFFixture, - FixtureFormat, - LabeledFixtureFormat, -) -from execution_testing.fixtures.eof import Result, Vector -from execution_testing.forks import Fork -from execution_testing.test_types import ( - EOA, - Alloc, - Environment, - Transaction, -) -from execution_testing.test_types.eof.v1 import ( - Container, - ContainerKind, - Section, - SectionKind, -) -from execution_testing.test_types.helpers import ( - compute_eofcreate_address, -) -from execution_testing.vm import Op - -from .base import BaseTest -from .state import StateTest - -existing_tests: Dict[Bytes, str] = {} - - -class EOFBaseExceptionError(Exception): - """Base exception class for exceptions raised when verifying EOF code.""" - - def __init__(self, message: str) -> None: - """Initialize the exception with the message.""" - super().__init__(message) - - @staticmethod - def format_code(code: Bytes, max_length: int = 60) -> str: - """ - Avoid printing long bytecode strings in the terminal upon test failure. - """ - if len(code) > max_length: - half_length = max_length // 2 - 5 # Floor; adjust for ellipsis - return f"{code[:half_length].hex()}...{code[-half_length:].hex()}" - return code.hex() - - -class UnexpectedEOFExceptionError(EOFBaseExceptionError): - """ - Exception used when valid EOF code unexpectedly raises an exception in - eofparse. - """ - - def __init__(self, *, code: Bytes, got: str): - """Initialize the exception with the code and the exception message.""" - message = ( - "Expected EOF code to be valid, but an exception occurred:\n" - f" Code: {self.format_code(code)}\n" - f"Expected: No Exception\n" - f" Got: {got}" - ) - super().__init__(message) - - -class ExpectedEOFExceptionError(EOFBaseExceptionError): - """ - Exception used when EOF code is expected to raise an exception, but - eofparse did not raise an exception. - """ - - def __init__(self, *, code: Bytes, expected: str): - """ - Initialize the exception with the code and the expected exception - message. - """ - message = ( - "Expected EOF code to be invalid, but no exception was raised:\n" - f" Code: {self.format_code(code)}\n" - f"Expected: {expected}\n" - f" Got: No Exception" - ) - super().__init__(message) - - -class EOFExceptionMismatchError(EOFBaseExceptionError): - """ - Exception used when the actual EOF exception differs from the expected one. - """ - - def __init__(self, code: Bytes, expected: str, got: str): - """ - Initialize the exception with the code, the expected/actual exception - message. - """ - message = ( - "EOF code raised a different exception than expected:\n" - f" Code: {self.format_code(code)}\n" - f"Expected: {expected}\n" - f" Got: {got}" - ) - super().__init__(message) - - -class EOFExceptionWithMessage(ExceptionWithMessage[EOFException]): - """Exception returned from the eof validator with a message.""" - - pass - - -eof_exception_type_adapter: TypeAdapter[ - EOFExceptionWithMessage | UndefinedException -] = TypeAdapter( - Annotated[ - EOFExceptionWithMessage | UndefinedException, ExceptionMapperValidator - ] -) - - -class EOFParse: - """evmone-eofparse binary.""" - - binary: Path - - def __new__(cls) -> "EOFParse": - """Make EOF binary a singleton.""" - if not hasattr(cls, "instance"): - cls.instance = super(EOFParse, cls).__new__(cls) - return cls.instance - - def __init__( - self, - binary: Optional[Path | str] = None, - ): - """Initialize the EOF binary.""" - if binary is None: - which_path = which("evmone-eofparse") - if which_path is not None: - binary = Path(which_path) - if binary is None or not Path(binary).exists(): - raise FileNotFoundError( - "`evmone-eofparse` binary executable not found/not executable." - ) - self.binary = Path(binary) - - def run( - self, *args: str, input_value: str | None = None - ) -> CompletedProcess: - """Run evmone with the given arguments.""" - result = subprocess.run( - [self.binary, *args], - capture_output=True, - text=True, - input=input_value, - ) - if result.returncode not in [0, 1]: - raise Exception( - f"`{self.binary.name}` call failed with return code {result.returncode}." - ) - return result - - -class EOFTest(BaseTest): - """ - Filler type that generates a test for EOF container validation. - - A state test is also automatically generated where the container is wrapped - in a contract-creating transaction to test deployment/validation on the - instantiated blockchain. - """ - - container: Container - """ - EOF container that will be tested for validity. - - The only supported type at the moment is - `execution_testing.test_types.eof.v1.Container`. - - If an invalid container needs to be tested, and it cannot be generated - using the Container class features, the `raw_bytes` field can be used to - provide the raw container bytes. - """ - expect_exception: EOFExceptionInstanceOrList | None = None - """ - Expected exception that the container should raise when parsed by an EOF - parser. - - Can be a single exception or a list of exceptions that the container is - expected to raise, in which case the test will pass if any of the - exceptions are raised. - - The list of supported exceptions can be found in the - `execution_testing.exceptions.EOFException` class. - """ - container_kind: ContainerKind = ContainerKind.RUNTIME - """ - Container kind type that the container should be treated as. - - The container kind can be one of the following: - `ContainerKind.INITCODE`: - The container is an initcode container. - `ContainerKind.RUNTIME`: The - container is a runtime container. - - The default value is `ContainerKind.RUNTIME`. - """ - deployed_container: Container | None = None - """ - To be used when the container is an initcode container and the expected - deployed container is known. - - The value is only used when a State Test is generated from this EOF test to - set the expected deployed container that should be found in the post state. - - If this field is not set, and the container is valid: - If the container - kind is `ContainerKind.RUNTIME`, the deployed container is assumed to be - the container itself, and an initcode container that wraps the container is - generated automatically. - If the container kind is - `ContainerKind.INITCODE`, `model_post_init` will attempt to infer the - deployed container from the sections of the init-container, and the first - container-type section will be used. An error will be raised if the - deployed container cannot be inferred. - - If the value is set to `None`, it is assumed that the container is invalid - and the test will expect that no contract is created. - - It is considered an error if: - The `deployed_container` field is set and - the `container_kind` field is not set to `ContainerKind.INITCODE`. - The - `deployed_container` field is set and the `expect_exception` is not `None`. - - The deployed container is **not** executed at any point during the EOF - validation test nor the generated State Test. For container runtime testing - use the `EOFStateTest` class. - """ - pre: Alloc | None = None - """ - Pre alloc object that is used during State Test generation. - - This field is automatically set by the test filler when generating a State - Test from this EOF test and should otherwise be left unset. - """ - post: Alloc | None = None - """ - Post alloc object that is used during State Test generation. - - This field is automatically set by the test filler when generating a State - Test from this EOF test and is normally not set by the user. - """ - sender: EOA | None = None - """ - Sender EOA object that is used during State Test generation. - - This field is automatically set by the `model_post_init` method and should - otherwise be left unset. - """ - - supported_fixture_formats: ClassVar[ - Sequence[FixtureFormat | LabeledFixtureFormat] - ] = [EOFFixture] + [ - LabeledFixtureFormat( - fixture_format, - f"{fixture_format.format_name}_from_eof_test", - f"A {fixture_format.format_name} generated from an eof_test.", - ) - for fixture_format in StateTest.supported_fixture_formats - ] - - supported_execute_formats: ClassVar[Sequence[LabeledExecuteFormat]] = [ - LabeledExecuteFormat( - execute_format, - f"{execute_format.label}_from_eof_test", - f"A {execute_format.label} generated from an eof_test.", - ) - for execute_format in StateTest.supported_execute_formats - ] - - supported_markers: ClassVar[Dict[str, str]] = { - "eof_test_only": "Only generate an EOF test fixture", - } - - @classmethod - def discard_fixture_format_by_marks( - cls, - fixture_format: FixtureFormat, - fork: Fork, - markers: List[pytest.Mark], - ) -> bool: - """ - Discard a fixture format from filling if the appropriate marker is - used. - """ - del fork - - if "eof_test_only" in [m.name for m in markers]: - return fixture_format != EOFFixture - return False - - @classmethod - def pytest_parameter_name(cls) -> str: - """Workaround for pytest parameter name.""" - return "eof_test" - - def model_post_init(self, __context: Any) -> None: - """Prepare the test exception based on the container.""" - if self.container.validity_error is not None: - if self.expect_exception is not None: - assert ( - self.expect_exception == self.container.validity_error - ), ( - f"Container validity error {self.container.validity_error} " - f"does not match expected exception {self.expect_exception}." - ) - self.expect_exception = self.container.validity_error # type: ignore[assignment] - assert self.deployed_container is None, ( - "deployed_container must be None for invalid containers." - ) - if ( - "kind" in self.container.model_fields_set - or "container_kind" in self.model_fields_set - ): - if ( - "kind" in self.container.model_fields_set - and "container_kind" in self.model_fields_set - ): - assert self.container.kind == self.container_kind, ( - f"Container kind type {str(self.container.kind)} " - f"does not match test {self.container_kind}." - ) - elif "kind" in self.container.model_fields_set: - self.container_kind = self.container.kind - elif "container_kind" in self.model_fields_set: - self.container.kind = self.container_kind - - assert self.pre is not None, "pre must be set to generate a StateTest." - self.sender = self.pre.fund_eoa() - if self.post is None: - self.post = Alloc() - - def make_eof_test_fixture(self) -> EOFFixture: - """Generate the EOF test fixture.""" - container_bytes = Bytes(self.container) - if container_bytes in existing_tests: - pytest.fail( - f"Duplicate EOF test: {container_bytes}, " - f"existing test: {existing_tests[container_bytes]}" - ) - existing_tests[container_bytes] = self.node_id() - vectors = [ - Vector( - code=container_bytes, - container_kind=self.container_kind, - results={ - self.fork: Result( - exception=self.expect_exception, - valid=self.expect_exception is None, - ), - }, - ) - ] - fixture = EOFFixture(vectors=dict(enumerate(vectors))) - try: - eof_parse = EOFParse() - except FileNotFoundError as e: - warnings.warn( - f"{e} Skipping EOF fixture verification. Fixtures may be invalid!", - stacklevel=2, - ) - return fixture - - for _, vector in fixture.vectors.items(): - expected_result = vector.results.get(self.fork) - if expected_result is None: - raise Exception( - f"EOF Fixture missing vector result for fork: {self.fork}" - ) - args = [] - if vector.container_kind == ContainerKind.INITCODE: - args.append("--initcode") - result = eof_parse.run(*args, input_value=str(vector.code)) - self.verify_result(result, expected_result, vector.code) - - return fixture - - def verify_result( - self, result: CompletedProcess, expected_result: Result, code: Bytes - ) -> None: - """ - Check that the reported exception string matches the expected error. - """ - evmone_exception_mapper = EvmoneExceptionMapper() - actual_exception_str = result.stdout.strip() - actual_exception: ( - EOFExceptionWithMessage | UndefinedException | None - ) = None - if not actual_exception_str.startswith("OK"): - actual_exception = eof_exception_type_adapter.validate_python( - actual_exception_str, - context={"exception_mapper": evmone_exception_mapper}, - ) - - if expected_result.exception is None: - if actual_exception is not None: - raise UnexpectedEOFExceptionError( - code=code, got=f"{actual_exception}" - ) - else: - expected_string = to_pipe_str(expected_result.exception) - if actual_exception is None: - raise ExpectedEOFExceptionError( - code=code, - expected=f"{expected_string}", - ) - if ( - not isinstance(actual_exception, EOFExceptionWithMessage) - or expected_result.exception not in actual_exception - ): - raise EOFExceptionMismatchError( - code=code, - expected=f"{expected_string}", - got=f"{actual_exception}", - ) - - def generate_eof_contract_create_transaction(self) -> Transaction: - """Generate a transaction that creates a contract.""" - assert self.sender is not None, ( - "sender must be set to generate a StateTest." - ) - assert self.post is not None, ( - "post must be set to generate a StateTest." - ) - assert self.pre is not None, "pre must be set to generate a StateTest." - - initcode: Container - deployed_container: Container | Bytes | None = None - if self.container_kind == ContainerKind.INITCODE: - initcode = self.container - if "deployed_container" in self.model_fields_set: - # In the case of an initcontainer where we know the deployed - # container, we can use the initcontainer as-is. - deployed_container = self.deployed_container - elif self.expect_exception is None: - # We have a valid init-container, but we don't know the - # deployed container. Try to infer the deployed container from - # the sections of the init-container. - assert self.container.raw_bytes is None, ( - "deployed_container must be set for initcode containers with raw_bytes." - ) - for section in self.container.sections: - if section.kind == SectionKind.CONTAINER: - deployed_container = section.data - break - - assert deployed_container is not None, ( - "Unable to infer deployed container for init-container. " - "Use field `deployed_container` to set the expected deployed container." - ) - else: - assert self.deployed_container is None, ( - "deployed_container must be None for runtime containers." - ) - initcode = Container( - sections=[ - Section.Code(Op.RETURNCODE[0](0, 0)), - Section.Container(self.container), - ] - ) - deployed_container = self.container - - factory_address = self.pre.deploy_contract( - Op.TXCREATE(tx_initcode_hash=initcode.hash) + Op.STOP - ) - - tx = Transaction( - sender=self.sender, - to=factory_address, - gas_limit=10_000_000, - max_priority_fee_per_gas=10, - max_fee_per_gas=10, - initcodes=[initcode], - ) - - if self.expect_exception is not None or deployed_container is None: - self.post[compute_eofcreate_address(factory_address, 0)] = None - else: - self.post[compute_eofcreate_address(factory_address, 0)] = Account( - code=deployed_container, - ) - return tx - - def generate_state_test(self) -> StateTest: - """Generate the StateTest filler.""" - return StateTest.from_test( - base_test=self, - pre=self.pre, - tx=self.generate_eof_contract_create_transaction(), - env=Environment(), - post=self.post, - ) - - def generate( - self, - *, - t8n: TransitionTool, - fixture_format: FixtureFormat, - **_: Any, - ) -> BaseFixture: - """Generate the BlockchainTest fixture.""" - if fixture_format == EOFFixture: - return self.make_eof_test_fixture() - elif fixture_format in StateTest.supported_fixture_formats: - return self.generate_state_test().generate( - t8n=t8n, fixture_format=fixture_format - ) - raise Exception(f"Unknown fixture format: {fixture_format}") - - def execute( - self, - *, - execute_format: ExecuteFormat, - ) -> BaseExecute: - """Generate the list of test fixtures.""" - if execute_format == TransactionPost: - return self.generate_state_test().execute( - execute_format=execute_format - ) - raise Exception(f"Unsupported execute format: {execute_format}") - - -EOFTestSpec = Callable[[str], Generator[EOFTest, None, None]] -EOFTestFiller = Type[EOFTest] - - -class EOFStateTest(EOFTest, Transaction): - """ - Filler type that generates an EOF test for container validation, and also - tests the container during runtime using a state test (and blockchain - test). - - In the state or blockchain test, the container is first deployed to the - pre-allocation and then a transaction is sent to the deployed container. - - Container deployment/validation is **not** tested like in the `EOFTest` - unless the container under test is an initcode container. - - All fields from `execution_testing.test_types.Transaction` are available for use in - the test. - """ - - gas_limit: HexNumber = Field( - HexNumber(10_000_000), serialization_alias="gas" - ) - """Gas limit for the transaction that deploys the container.""" - tx_sender_funding_amount: int = 1_000_000_000_000_000_000_000 - """Amount of funds to send to the sender EOA before the transaction.""" - env: Environment = Field(default_factory=Environment) - """Environment object that is used during State Test generation.""" - container_post: Account = Field(default_factory=Account) - """Account object used to verify the container post state.""" - - supported_fixture_formats: ClassVar[ - Sequence[FixtureFormat | LabeledFixtureFormat] - ] = [EOFFixture] + [ - LabeledFixtureFormat( - fixture_format, - f"eof_{fixture_format.format_name}", - f"Tests that generate an EOF {fixture_format.format_name}.", - ) - for fixture_format in StateTest.supported_fixture_formats - ] - - supported_execute_formats: ClassVar[Sequence[LabeledExecuteFormat]] = [ - LabeledExecuteFormat( - execute_format, - f"eof_{execute_format.label}", - f"Tests that generate an EOF {execute_format.label}.", - ) - for execute_format in StateTest.supported_execute_formats - ] - - @classmethod - def pytest_parameter_name(cls) -> str: - """Workaround for pytest parameter name.""" - return "eof_state_test" - - def model_post_init(self, __context: Any) -> None: - """Prepare the transaction parameters required to fill the test.""" - assert self.pre is not None, "pre must be set to generate a StateTest." - - EOFTest.model_post_init(self, __context) - - self.sender = self.pre.fund_eoa(amount=self.tx_sender_funding_amount) - if self.post is None: - self.post = Alloc() - - if ( - self.expect_exception is not None - and self.container_kind == ContainerKind.RUNTIME - ): - # Invalid EOF runtime code - initcode = Container.Init(deploy_container=self.container) - self.to = self.pre.deploy_contract( - Op.TXCREATE(tx_initcode_hash=initcode.hash) + Op.STOP - ) - self.initcodes = [initcode] # type: ignore[list-item] - - # Run transaction model validation - Transaction.model_post_init(self, __context) - - self.post[compute_eofcreate_address(self.to, 0)] = None # Expect - # failure. - elif ( - self.expect_exception is not None - and self.container_kind == ContainerKind.INITCODE - ): - # Invalid EOF initcode - self.to = self.pre.deploy_contract( - Op.TXCREATE(tx_initcode_hash=self.container.hash) + Op.STOP - ) - self.initcodes = [self.container] # type: ignore[list-item] - - # Run transaction model validation - Transaction.model_post_init(self, __context) - - self.post[compute_eofcreate_address(self.to, 0)] = None # Expect - # failure. - elif self.container_kind == ContainerKind.INITCODE: - self.to = self.pre.deploy_contract( - Op.TXCREATE(tx_initcode_hash=self.container.hash) + Op.STOP - ) - self.initcodes = [self.container] # type: ignore[list-item] - - # Run transaction model validation - Transaction.model_post_init(self, __context) - - self.post[compute_eofcreate_address(self.to, 0)] = ( - self.container_post - ) - else: - self.to = self.pre.deploy_contract(code=self.container) - - # Run transaction model validation - Transaction.model_post_init(self, __context) - - self.post[self.to] = self.container_post - - def generate_state_test(self) -> StateTest: - """Generate the StateTest filler.""" - assert self.pre is not None, "pre must be set to generate a StateTest." - assert self.post is not None, ( - "post must be set to generate a StateTest." - ) - - return StateTest.from_test( - base_test=self, - pre=self.pre, - tx=self, - env=self.env, - post=self.post, - ) - - def generate( - self, - *, - t8n: TransitionTool, - fixture_format: FixtureFormat, - **_: Any, - ) -> BaseFixture: - """Generate the BlockchainTest fixture.""" - if fixture_format == EOFFixture: - if Bytes(self.container) in existing_tests: - # Gracefully skip duplicate tests because one EOFStateTest can - # generate multiple state fixtures with the same data. - pytest.skip( - f"Duplicate EOF container on EOFStateTest: {self.node_id()}" - ) - return self.make_eof_test_fixture() - elif fixture_format in StateTest.supported_fixture_formats: - return self.generate_state_test().generate( - t8n=t8n, fixture_format=fixture_format - ) - - raise Exception(f"Unknown fixture format: {fixture_format}") - - -EOFStateTestSpec = Callable[[str], Generator[EOFStateTest, None, None]] -EOFStateTestFiller = Type[EOFStateTest] diff --git a/packages/testing/src/execution_testing/test_types/__init__.py b/packages/testing/src/execution_testing/test_types/__init__.py index fd2cfcf100..ef30805950 100644 --- a/packages/testing/src/execution_testing/test_types/__init__.py +++ b/packages/testing/src/execution_testing/test_types/__init__.py @@ -28,7 +28,6 @@ compute_create2_address, compute_create_address, compute_deterministic_create2_address, - compute_eofcreate_address, ) from .phase_manager import TestPhase, TestPhaseManager from .receipt_types import TransactionReceipt @@ -88,6 +87,5 @@ "compute_create_address", "compute_create2_address", "compute_deterministic_create2_address", - "compute_eofcreate_address", "keccak256", ) diff --git a/packages/testing/src/execution_testing/test_types/account_types.py b/packages/testing/src/execution_testing/test_types/account_types.py index 148c729782..42922ba375 100644 --- a/packages/testing/src/execution_testing/test_types/account_types.py +++ b/packages/testing/src/execution_testing/test_types/account_types.py @@ -33,7 +33,6 @@ FixedSizeBytesConvertible, NumberConvertible, ) -from execution_testing.vm import EVMCodeType from .trie import ( EMPTY_TRIE_ROOT, @@ -424,7 +423,6 @@ def deploy_contract( balance: NumberConvertible = 0, nonce: NumberConvertible = 1, address: Address | None = None, - evm_code_type: EVMCodeType | None = None, label: str | None = None, stub: str | None = None, ) -> Address: diff --git a/packages/testing/src/execution_testing/test_types/eof/__init__.py b/packages/testing/src/execution_testing/test_types/eof/__init__.py deleted file mode 100644 index e530ebe2cb..0000000000 --- a/packages/testing/src/execution_testing/test_types/eof/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -"""EVM Object Format Library to generate bytecode for testing purposes.""" - -from .constants import LATEST_EOF_VERSION - -__all__ = ("LATEST_EOF_VERSION",) diff --git a/packages/testing/src/execution_testing/test_types/eof/constants.py b/packages/testing/src/execution_testing/test_types/eof/constants.py deleted file mode 100644 index c047002fa2..0000000000 --- a/packages/testing/src/execution_testing/test_types/eof/constants.py +++ /dev/null @@ -1,20 +0,0 @@ -"""EVM Object Format generic constants. Applicable to all EOF versions.""" - -EOF_MAGIC = b"\xef\x00" -""" -The second byte found on every EOF formatted contract, which was chosen to -avoid clashes with three contracts which were deployed on Mainnet. -""" -EOF_HEADER_TERMINATOR = b"\x00" -"""Byte that terminates the header of the EOF format.""" -LATEST_EOF_VERSION = 1 -"""Latest existing EOF version.""" -VERSION_BYTE_LENGTH = 1 -"""Length of the version byte.""" - -MAX_RUNTIME_STACK_HEIGHT = 1024 -""" -Maximum height of the EVM runtime operand stack. Exceeding this value during -execution will result in the stack overflow exception. This value applies to -both legacy EVM and EOF. -""" diff --git a/packages/testing/src/execution_testing/test_types/eof/v1/__init__.py b/packages/testing/src/execution_testing/test_types/eof/v1/__init__.py deleted file mode 100644 index a7fe650b5a..0000000000 --- a/packages/testing/src/execution_testing/test_types/eof/v1/__init__.py +++ /dev/null @@ -1,622 +0,0 @@ -""" -EVM Object Format Version 1 Library to generate bytecode for testing purposes. -""" - -from dataclasses import dataclass -from enum import Enum, IntEnum, auto -from functools import cached_property -from typing import Any, Dict, List, Optional, Tuple - -from pydantic import Field, GetCoreSchemaHandler -from pydantic_core.core_schema import ( - PlainValidatorFunctionSchema, - no_info_plain_validator_function, - to_string_ser_schema, -) - -from execution_testing.base_types import Bytes, CoerceBytes, Hash -from execution_testing.base_types.conversions import BytesConvertible -from execution_testing.base_types.pydantic import CopyValidateModel -from execution_testing.exceptions.exceptions import ( - EOFExceptionInstanceOrList, -) -from execution_testing.vm import Bytecode, Op - -from ..constants import EOF_HEADER_TERMINATOR, EOF_MAGIC -from .constants import ( - HEADER_SECTION_CONTAINER_SIZE_BYTE_LENGTH, - HEADER_SECTION_COUNT_BYTE_LENGTH, - HEADER_SECTION_KIND_BYTE_LENGTH, - HEADER_SECTION_SIZE_BYTE_LENGTH, - NON_RETURNING_SECTION, - TYPES_INPUTS_BYTE_LENGTH, - TYPES_OUTPUTS_BYTE_LENGTH, - TYPES_STACK_BYTE_LENGTH, - VERSION_NUMBER_BYTES, -) - - -class SectionKind(IntEnum): - """Enum class of V1 valid section kind values.""" - - TYPE = 1 - CODE = 2 - CONTAINER = 3 - DATA = 0xFF - - def __str__(self) -> str: - """Return string representation of the section kind.""" - return self.name - - -class ContainerKind(Enum): - """Enum class of V1 valid container kind values.""" - - RUNTIME = auto() - INITCODE = auto() - - @staticmethod - def __get_pydantic_core_schema__( - source_type: Any, handler: GetCoreSchemaHandler - ) -> PlainValidatorFunctionSchema: - """ - Call class constructor without info and appends the serialization - schema. - """ - return no_info_plain_validator_function( - source_type.from_str, - serialization=to_string_ser_schema(), - ) - - @staticmethod - def from_str( - value: "str | ContainerKind | None", - ) -> "ContainerKind | None": - """Return ContainerKind enum value from a string.""" - if value is None: - return None - if isinstance(value, ContainerKind): - return value - return ContainerKind[value.upper()] - - def __str__(self) -> str: - """Return string representation of the container kind.""" - return self.name - - -class AutoSection(Enum): - """Enum class for auto section generation approach.""" - - AUTO = 1 - ONLY_HEADER = 2 - ONLY_BODY = 3 - NONE = 4 - - def any(self) -> bool: - """Return True if the enum is not NONE.""" - return self != AutoSection.NONE - - def header(self) -> bool: - """Return True if the enum is not ONLY_BODY.""" - return self != AutoSection.ONLY_BODY and self != AutoSection.NONE - - def body(self) -> bool: - """Return True if the enum is not ONLY_HEADER.""" - return self != AutoSection.ONLY_HEADER and self != AutoSection.NONE - - -SUPPORT_MULTI_SECTION_HEADER = [SectionKind.CODE, SectionKind.CONTAINER] - - -class Section(CopyValidateModel): - """Class that represents a section in an EOF V1 container.""" - - data: Bytes = Bytes(b"") - """ - Data to be contained by this section. Can be SupportsBytes, another EOF - container or any other abstract data. - """ - custom_size: int = 0 - """ - Custom size value to be used in the header. If unset, the header is built - with length of the data. - """ - kind: SectionKind | int - """ - Kind of section that is represented by this object. Can be any `int` - outside of the values defined by `SectionKind` for testing purposes. - """ - force_type_listing: bool = False - """ - Forces this section to appear in the TYPE section at the beginning of the - container. - """ - code_inputs: int = 0 - """Data stack items consumed by this code section (function)""" - code_outputs: int = NON_RETURNING_SECTION - """ - Data stack items produced by or expected at the end of this code section - (function) - """ - max_stack_increase: int | None = None - """Maximum operand stack height increase above the code section inputs.""" - max_stack_height: int | None = None - """Maximum height data stack reaches during execution of code section.""" - auto_max_stack_height: bool = False - """ - Whether to automatically compute the best suggestion for the - max_stack_height value for this code section. - """ - auto_code_inputs_outputs: bool = False - """ - Whether to automatically compute the best suggestion for the code_inputs, - code_outputs values for this code section. - """ - skip_header_listing: bool = False - """Skip section from listing in the header""" - skip_body_listing: bool = False - """Skip section from listing in the body""" - skip_types_body_listing: bool = False - """ - Skip section from listing in the types body (input, output, stack) bytes - """ - skip_types_header_listing: bool = False - """ - Skip section from listing in the types header (not calculating input, - output, stack size) - """ - - @cached_property - def header(self) -> bytes: - """Get formatted header for this section according to its contents.""" - size = ( - self.custom_size - if "custom_size" in self.model_fields_set - else len(self.data) - ) - if self.kind == SectionKind.CODE: - raise Exception( - "Need container-wide view of code sections to generate header" - ) - return self.kind.to_bytes( - HEADER_SECTION_KIND_BYTE_LENGTH, byteorder="big" - ) + size.to_bytes(HEADER_SECTION_SIZE_BYTE_LENGTH, byteorder="big") - - @cached_property - def type_definition(self) -> bytes: - """Returns a serialized type section entry for this section.""" - if self.kind != SectionKind.CODE and not self.force_type_listing: - return bytes() - - code_inputs, code_outputs, max_stack_increase, max_stack_height = ( - self.code_inputs, - self.code_outputs, - self.max_stack_increase, - self.max_stack_height, - ) - if self.auto_max_stack_height or self.auto_code_inputs_outputs: - ( - auto_code_inputs, - auto_code_outputs, - auto_max_height, - ) = compute_code_stack_values(self.data) - if self.auto_code_inputs_outputs: - code_inputs, code_outputs = ( - auto_code_inputs, - auto_code_outputs, - ) - if self.auto_max_stack_height: - max_stack_increase = auto_max_height - code_inputs - - if max_stack_increase is not None: - assert max_stack_height is None - elif max_stack_height is not None: - max_stack_increase = max_stack_height - code_inputs - else: - max_stack_increase = 0 - assert max_stack_increase >= 0, "incorrect max stack height value" - return ( - code_inputs.to_bytes( - length=TYPES_INPUTS_BYTE_LENGTH, byteorder="big" - ) - + code_outputs.to_bytes( - length=TYPES_OUTPUTS_BYTE_LENGTH, byteorder="big" - ) - + max_stack_increase.to_bytes( - length=TYPES_STACK_BYTE_LENGTH, byteorder="big" - ) - ) - - def with_max_stack_height(self, max_stack_height: int) -> "Section": - """ - Create copy of the section with `max_stack_height` set to the specified - value. - """ - return self.copy(max_stack_height=max_stack_height) - - def with_auto_max_stack_height(self) -> "Section": - """ - Create copy of the section with `auto_max_stack_height` set to True. - """ - return self.copy(auto_max_stack_height=True) - - def with_auto_code_inputs_outputs(self) -> "Section": - """ - Create copy of the section with `auto_code_inputs_outputs` set to True. - """ - return self.copy(auto_code_inputs_outputs=True) - - @staticmethod - def list_header(sections: List["Section"]) -> bytes: - """ - Create single code header for all code sections contained in the list. - """ - # Allow 'types section' to use skip_header_listing flag - if sections[0].skip_header_listing: - return b"" - - if sections[0].kind not in SUPPORT_MULTI_SECTION_HEADER: - return b"".join(s.header for s in sections) - - h = sections[0].kind.to_bytes(HEADER_SECTION_KIND_BYTE_LENGTH, "big") - - # Count only those sections that are not marked to be skipped for - # header calculation - header_registered_sections = 0 - for cs in sections: - if not cs.skip_header_listing: - header_registered_sections += 1 - - h += header_registered_sections.to_bytes( - HEADER_SECTION_COUNT_BYTE_LENGTH, "big" - ) - for cs in sections: - # If section is marked to skip the header calculation, don't make - # header for it - if cs.skip_header_listing: - continue - size = ( - cs.custom_size - if "custom_size" in cs.model_fields_set - else len(cs.data) - ) - body_size_length = ( - HEADER_SECTION_SIZE_BYTE_LENGTH - if cs.kind != SectionKind.CONTAINER - else HEADER_SECTION_CONTAINER_SIZE_BYTE_LENGTH - ) - h += size.to_bytes(body_size_length, "big") - - return h - - @classmethod - def Code( # noqa: N802 - cls, code: Optional[BytesConvertible | Bytecode] = None, **kwargs: Any - ) -> "Section": - """Create new code section with the specified code.""" - if code is None: - code = Bytecode() - kwargs.pop("kind", None) - if ( - "max_stack_height" not in kwargs - and "max_stack_increase" not in kwargs - and isinstance(code, Bytecode) - ): - # If not specified, take the max_stack_increase from the Bytecode. - kwargs["max_stack_increase"] = code.max_stack_height - kwargs.get( - "code_inputs", 0 - ) - return cls(kind=SectionKind.CODE, data=code, **kwargs) - - @classmethod - def Container( # noqa: N802 - cls, container: "Container" | BytesConvertible, **kwargs: Any - ) -> "Section": - """Create new container section with the specified container.""" - kwargs.pop("kind", None) - return cls(kind=SectionKind.CONTAINER, data=container, **kwargs) - - @classmethod - def Data(cls, data: BytesConvertible = b"", **kwargs: Any) -> "Section": # noqa: N802 - """Create new data section with the specified data.""" - kwargs.pop("kind", None) - return cls(kind=SectionKind.DATA, data=data, **kwargs) - - -class Container(CopyValidateModel): - """Class that represents an EOF V1 container.""" - - name: Optional[str] = None - """Name of the container""" - sections: List[Section] = Field(default_factory=list) - """List of sections in the container""" - magic: Bytes = Bytes(EOF_MAGIC) - """ - Custom magic value used to override the mandatory EOF value for testing - purposes. - """ - version: Bytes = Bytes(VERSION_NUMBER_BYTES) - """ - Custom version value used to override the mandatory EOF V1 value for - testing purposes. - """ - header_terminator: Bytes = Bytes(EOF_HEADER_TERMINATOR) - """Bytes used to terminate the header.""" - extra: Bytes = Bytes(b"") - """ - Extra data to be appended at the end of the container, which will not be - considered part of any of the sections, for testing purposes. - """ - auto_type_section: AutoSection = AutoSection.AUTO - """ - Automatically generate a `TYPE` section based on the included `CODE` kind - sections. - """ - auto_data_section: bool = True - """Automatically generate a `DATA` section.""" - auto_sort_sections: AutoSection = AutoSection.AUTO - """ - Automatically sort sections for the header and body: Headers: type section - first, all code sections, container sections, last data section(s) Body: - type section first, all code sections, data section(s), last container - sections - """ - skip_join_concurrent_sections_in_header: bool = False - """Skip joining concurrent sections in the header (code and container)""" - validity_error: EOFExceptionInstanceOrList | str | None = None - """Optional error expected for the container. TODO: Remove str""" - kind: ContainerKind = ContainerKind.RUNTIME - """Kind type of the container.""" - raw_bytes: Optional[CoerceBytes] = None - """ - Optional raw bytes that represent the container. Used to have a cohesive - type among all test cases, even those that do not resemble a valid EOF V1 - container. - """ - expected_bytecode: Optional[CoerceBytes] = None - """ - Optional raw bytes of the expected constructed bytecode. This allows - confirming that raw EOF and Container() representations are identical. - """ - - @cached_property - def bytecode(self) -> bytes: - """Converts the EOF V1 Container into bytecode.""" - if self.raw_bytes is not None: - assert len(self.sections) == 0 - return self.raw_bytes - - c = self.magic + self.version - - # Prepare auto-generated sections - sections = self.sections - - # Add type section if needed - if ( - self.auto_type_section.any() - and count_sections(sections, SectionKind.TYPE) == 0 - ): - # Calculate skipping flags - types_header_size = 0 - type_section_data = b"" - for s in sections: - types_header_size += ( - len(s.type_definition) - if not s.skip_types_header_listing - else 0 - ) - type_section_data += ( - s.type_definition if not s.skip_types_body_listing else b"" - ) - - sections = [ - Section( - kind=SectionKind.TYPE, - data=type_section_data, - custom_size=types_header_size, - ) - ] + sections - - # Add data section if needed - if ( - self.auto_data_section - and count_sections(sections, SectionKind.DATA) == 0 - ): - sections = sections + [Section(kind=SectionKind.DATA, data="0x")] - - header_sections = [ - s - for s in sections - if s.kind != SectionKind.TYPE - or self.auto_type_section != AutoSection.ONLY_BODY - ] - if self.auto_sort_sections.header(): - header_sections.sort(key=lambda x: x.kind) - - # Add headers - if header_sections: - # Join headers of the same kind in a list of lists, only if they - # are next to each other - concurrent_sections: List[List[Section]] = [[header_sections[0]]] - for s in header_sections[1:]: - if ( - s.kind == concurrent_sections[-1][-1].kind - and not self.skip_join_concurrent_sections_in_header - ): - concurrent_sections[-1].append(s) - else: - concurrent_sections.append([s]) - c += b"".join( - Section.list_header(cs) for cs in concurrent_sections - ) - - # Add header terminator - c += self.header_terminator - - body_sections = sections[:] - if self.auto_sort_sections.body(): - # Sort sections for the body - body_sections.sort(key=lambda x: x.kind) - - # Add section bodies - for s in body_sections: - if ( - s.kind == SectionKind.TYPE - and self.auto_type_section == AutoSection.ONLY_HEADER - ): - continue - if s.data and not s.skip_body_listing: - c += s.data - - # Add extra (garbage) - c += self.extra - - # Check if the constructed bytecode matches the expected one - if self.expected_bytecode is not None: - assert c == self.expected_bytecode - - return c - - @classmethod - def Code( - cls, code: Optional[BytesConvertible] = None, **kwargs: Any - ) -> "Container": # noqa: N802 - """Create simple container with a single code section.""" - if code is None: - code = Bytecode() - kwargs.pop("kind", None) - return cls(sections=[Section.Code(code=code, **kwargs)]) - - @classmethod - def Init( # noqa: N802 - cls, - deploy_container: "Container", - initcode_prefix: Optional[Bytecode] = None, - ) -> "Container": - """ - Create simple init container that deploys the specified container. - """ - if initcode_prefix is None: - initcode_prefix = Bytecode() - return cls( - sections=[ - Section.Code( - code=initcode_prefix + Op.RETURNCODE[0](0, 0), - ), - Section.Container( - container=deploy_container, - ), - ], - ) - - @cached_property - def hash(self) -> Hash: - """Returns hash of the container bytecode.""" - return Bytes(self.bytecode).keccak256() - - def __bytes__(self) -> bytes: - """Return bytecode of the container.""" - return self.bytecode - - def __len__(self) -> int: - """Return length of the container bytecode.""" - return len(self.bytecode) - - def __str__(self) -> str: - """ - Return name of the container if available, otherwise the bytecode of - the container as a string. - """ - if self.name: - return self.name - return str(self.bytecode) - - -@dataclass(kw_only=True) -class Initcode(Bytecode): - """ - Helper class used to generate initcode for the specified deployment code, - using EOF V1 container as init code. - """ - - name: str = "EOF V1 Initcode" - """Name used to identify the initcode.""" - deploy_container: Container - """Container to be deployed.""" - - @cached_property - def init_container(self) -> Container: - """Generate a container that will be used as the initcode.""" - return Container( - sections=[ - Section.Code( - code=Op.RETURNCODE[0](0, 0), - max_stack_height=2, - ), - Section.Container( - container=self.deploy_container, - ), - ], - ) - - @cached_property - def bytecode(self) -> bytes: - """ - Generate an EOF container performs `EOFCREATE` with the specified code. - """ - initcode = Container( - sections=[ - Section.Code( - # TODO: Pass calldata - code=Op.EOFCREATE[0](0, 0, 0, 0) + Op.STOP(), - max_stack_height=4, - ), - Section.Container( - container=self.init_container, - ), - ] - ) - - return bytes(initcode) - - -def count_sections(sections: List[Section], kind: SectionKind | int) -> int: - """Count sections from a list that match a specific kind.""" - return len([s for s in sections if s.kind == kind]) - - -OPCODE_MAP: Dict[int, Op] = {x.int(): x for x in Op} - - -def compute_code_stack_values(code: bytes) -> Tuple[int, int, int]: - """ - Compute stack values for the given bytecode. - - TODO: THIS DOES NOT WORK WHEN THE RJUMP* JUMPS BACKWARDS (and many other - things). - """ - i = 0 - stack_height = 0 - min_stack_height = 0 - max_stack_height = 0 - - # compute type annotation - while i < len(code): - op = OPCODE_MAP.get(code[i]) - if op is None: - return (0, 0, 0) - elif op == Op.RJUMPV: - i += 1 - if i < len(code): - count = code[i] - i += count * 2 - else: - i += 1 + op.data_portion_length - - stack_height -= op.popped_stack_items - min_stack_height = min(stack_height, min_stack_height) - stack_height += op.pushed_stack_items - max_stack_height = max(stack_height, max_stack_height) - if stack_height < 0: - stack_height = 0 - return (abs(min_stack_height), stack_height, max_stack_height) diff --git a/packages/testing/src/execution_testing/test_types/eof/v1/constants.py b/packages/testing/src/execution_testing/test_types/eof/v1/constants.py deleted file mode 100644 index 5d9cacbf83..0000000000 --- a/packages/testing/src/execution_testing/test_types/eof/v1/constants.py +++ /dev/null @@ -1,45 +0,0 @@ -""" -EVM Object Format generic constants. -Applicable to EOF version 1. -""" - -from ..constants import VERSION_BYTE_LENGTH - -VERSION_NUMBER = 0x01 - -VERSION_NUMBER_BYTES = VERSION_NUMBER.to_bytes(VERSION_BYTE_LENGTH, "big") - -MAX_CODE_SECTIONS = 1024 - -MAX_RETURN_STACK_HEIGHT = 1024 - -MAX_STACK_INCREASE_LIMIT = 0x3FF -""" -Maximum value for the max stack increase accepted by the EOF format. -""" - -MAX_CODE_INPUTS = 127 - -MAX_CODE_OUTPUTS = 127 - -NON_RETURNING_SECTION = 0x80 - -MAX_RJUMPV_COUNT = 0xFF - -MAX_BYTECODE_SIZE = 24576 - -MAX_INITCODE_SIZE = MAX_BYTECODE_SIZE * 2 - -HEADER_SECTION_KIND_BYTE_LENGTH = 1 - -HEADER_SECTION_COUNT_BYTE_LENGTH = 2 - -HEADER_SECTION_SIZE_BYTE_LENGTH = 2 - -HEADER_SECTION_CONTAINER_SIZE_BYTE_LENGTH = 4 - -TYPES_INPUTS_BYTE_LENGTH = 1 - -TYPES_OUTPUTS_BYTE_LENGTH = 1 - -TYPES_STACK_BYTE_LENGTH = 2 diff --git a/packages/testing/src/execution_testing/test_types/helpers.py b/packages/testing/src/execution_testing/test_types/helpers.py index 4b9403775a..7151141168 100644 --- a/packages/testing/src/execution_testing/test_types/helpers.py +++ b/packages/testing/src/execution_testing/test_types/helpers.py @@ -121,19 +121,6 @@ def compute_deterministic_create2_address( ) -def compute_eofcreate_address( - address: FixedSizeBytesConvertible, salt: FixedSizeBytesConvertible -) -> Address: - """ - Compute address of the resulting contract created using the `EOFCREATE` - opcode. - """ - hash_bytes = Bytes( - b"\xff" + b"\x00" * 12 + Address(address) + Hash(salt) - ).keccak256() - return Address(hash_bytes[-20:]) - - def add_kzg_version( b_hashes: List[bytes | SupportsBytes | int | str], kzg_version: int ) -> List[Hash]: diff --git a/packages/testing/src/execution_testing/test_types/tests/test_eof_v1.py b/packages/testing/src/execution_testing/test_types/tests/test_eof_v1.py deleted file mode 100644 index 866e697e77..0000000000 --- a/packages/testing/src/execution_testing/test_types/tests/test_eof_v1.py +++ /dev/null @@ -1,903 +0,0 @@ -"""Test suite for `code.eof.v1` module.""" - -from typing import List, Tuple - -import pytest - -from execution_testing.base_types import to_json -from execution_testing.base_types.pydantic import CopyValidateModel -from execution_testing.vm import Op - -from ..eof.v1 import AutoSection, Container, Section, SectionKind - -test_cases: List[Tuple[str, Container, str]] = [ - ( - "No sections", - Container( - auto_data_section=False, - auto_type_section=AutoSection.NONE, - sections=[], - ), - "ef0001 00", - ), - ( - "Single code section", - Container( - sections=[ - Section.Code("0x00"), - ], - ), - "ef0001 01 0004 02 0001 0001 ff 0000 00 00800000 00", - ), - ( - "Single code section, single container section", - Container( - sections=[ - Section.Code("0x0A"), - Section.Container("0x0B"), - ], - ), - "ef0001 01 0004 02 0001 0001 03 0001 00000001 ff 0000 00 00800000 0A 0B", - ), - ( - "Single code section, single container section, single data", - Container( - sections=[ - Section.Code("0x0A"), - Section.Container("0x0B"), - Section.Data("0x0C"), - ], - ), - "ef0001 01 0004 02 0001 0001 03 0001 00000001 ff 0001 00 00800000 0A 0B 0C", - ), - ( - "Single code section, single container section, single data 2", - Container( - sections=[ - Section.Code("0x0A"), - Section.Data("0x0C"), - Section.Container("0x0B"), - ], - ), - "ef0001 01 0004 02 0001 0001 03 0001 00000001 ff 0001 00 00800000 0A 0B 0C", - ), - ( - "Single code section, multiple container section, single data", - Container( - sections=[ - Section.Code("0x0A"), - Section.Container("0x0B"), - Section.Data("0x0C"), - Section.Container("0x0D"), - ], - ), - "ef0001 01 0004 02 0001 0001 03 0002 00000001 00000001 ff 0001 00 00800000 0A 0B 0D 0C", - ), - ( - "Single code section, multiple container sections", - Container( - sections=[ - Section.Code("0x00"), - Section.Container("0x0001"), - Section.Container("0x00"), - ], - ), - "ef0001 01 0004 02 0001 0001 03 0002 00000002 00000001 ff 0000 00 00800000 00 0001 00", - ), - ( - "No code section", - Container( - sections=[Section.Data("0x00")], - ), - "ef0001 01 0000 ff 0001 00 00", - ), - ( - "Single data section", - Container( - auto_type_section=AutoSection.NONE, - sections=[ - Section.Data("0x00"), - ], - ), - "ef0001 ff 0001 00 00", - ), - ( - "Custom invalid section", - Container( - auto_data_section=False, - auto_type_section=AutoSection.NONE, - sections=[ - Section( - kind=0xFE, - data="0x00", - ), - ], - ), - "ef0001 fe 0001 00 00", - ), - ( - "Multiple sections", - Container( - sections=[ - Section.Code("0x0e"), - Section.Data("0x0f"), - ], - ), - "ef0001 01 0004 02 0001 0001 ff 0001 00 00800000 0e 0f", - ), - ( - "Multiple type sections", - Container( - sections=[ - Section( - kind=SectionKind.TYPE, - data="0x00000000", - ), - Section( - kind=SectionKind.TYPE, - data="0x00000000", - ), - Section.Code("0x00"), - ], - auto_type_section=AutoSection.NONE, - ), - "ef0001 01 0004 01 0004 02 0001 0001 ff 0000 00 00000000 00000000 00", - ), - ( - "Invalid Magic", - Container( - magic=b"\xef\xfe", - sections=[ - Section.Code("0x00"), - ], - ), - "effe01 01 0004 02 0001 0001 ff 0000 00 00800000 00", - ), - ( - "Invalid Version", - Container( - version=b"\x02", - sections=[ - Section.Code("0x00"), - ], - ), - "ef0002 01 0004 02 0001 0001 ff 0000 00 00800000 00", - ), - ( - "Section Invalid size Version", - Container( - sections=[ - Section.Code( - "0x00", - custom_size=0xFFFF, - ), - ], - ), - "ef0001 01 0004 02 0001 ffff ff 0000 00 00800000 00", - ), - ( - "Nested EOF", - Container( - sections=[ - Section.Code("0x00"), - Section( - kind=SectionKind.CONTAINER, - data=Container( - sections=[Section.Code("0x01")], - ), - ), - ], - ), - "ef0001 01 0004 02 0001 0001 03 0001 00000014 ff 0000 00 00800000 00" - "ef0001 01 0004 02 0001 0001 ff 0000 00 00800000 01", - ), - ( - "Nested EOF in Data", - Container( - sections=[ - Section.Code("0x00"), - Section.Data( - data=Container( - sections=[Section.Code("0x01")], - ), - ), - ], - ), - "ef0001 01 0004 02 0001 0001 ff 0014 00 00800000 00" - "ef0001 01 0004 02 0001 0001 ff 0000 00 00800000 01", - ), - ( - "Incomplete code section", - Container( - sections=[ - Section.Code( - code=b"", - custom_size=0x02, - ), - ], - ), - "ef0001 01 0004 02 0001 0002 ff 0000 00 00800000", - ), - ( - "Trailing bytes after code section", - Container( - sections=[ - Section.Code("0x600000"), - ], - extra=bytes.fromhex("deadbeef"), - ), - "ef0001 01 0004 02 0001 0003 ff 0000 00 00800000 600000 deadbeef", - ), - ( - "Multiple code sections", - Container( - sections=[ - Section.Code("0x600000"), - Section.Code("0x600000"), - ], - ), - """ - ef0001 01 0008 02 0002 0003 0003 ff 0000 00 - 00800000 00800000 - 600000 - 600000 - """, - ), - ( - "No section terminator", - Container( - sections=[ - Section.Code("0x600000"), - ], - header_terminator=bytes(), - ), - "ef0001 01 0004 02 0001 0003 ff 0000 00800000 600000", - ), - ( - "No auto type section", - Container( - auto_type_section=AutoSection.NONE, - sections=[ - Section.Code("0x00"), - ], - ), - "ef0001 02 0001 0001 ff 0000 00 00", - ), - ( - "Data section in types", - Container( - sections=[ - Section.Code("0x00"), - Section.Data( - data="0x00", - force_type_listing=True, - ), - ], - ), - """ - ef0001 01 0008 02 0001 0001 ff 0001 00 - 00800000 00800000 - 00 00 - """, - ), - ( - "Code section inputs", - Container( - sections=[ - Section.Code( - "0x00", - code_inputs=1, - max_stack_height=1, - ), - ], - ), - """ - ef0001 01 0004 02 0001 0001 ff 0000 00 - 01800000 - 00 - """, - ), - ( - "Code section inputs 2", - Container( - sections=[ - Section.Code( - "0x00", - code_inputs=0xFF, - max_stack_height=0xFF, - ), - ], - ), - """ - ef0001 01 0004 02 0001 0001 ff 0000 00 - ff800000 - 00 - """, - ), - ( - "Code section outputs", - Container( - sections=[ - Section.Code( - "0x00", - code_outputs=1, - ), - ], - ), - """ - ef0001 01 0004 02 0001 0001 ff 0000 00 - 00010000 - 00 - """, - ), - ( - "Code section outputs 2", - Container( - sections=[ - Section.Code( - "0x00", - code_outputs=0xFF, - ), - ], - ), - """ - ef0001 01 0004 02 0001 0001 ff 0000 00 - 00ff0000 - 00 - """, - ), - ( - "Code section max stack height", - Container( - sections=[ - Section.Code( - "0x00", - max_stack_height=0x0201, - ), - ], - ), - """ - ef0001 01 0004 02 0001 0001 ff 0000 00 - 00800201 - 00 - """, - ), - ( - "Code section max stack height 2", - Container( - sections=[ - Section.Code( - "0x00", - max_stack_height=0xFFFF, - ), - ], - ), - """ - ef0001 01 0004 02 0001 0001 ff 0000 00 - 0080FFFF - 00 - """, - ), - ( - "Code section max stack height 3", - Container( - sections=[ - Section.Code( - "0x00", - max_stack_height=0xFFFF, - ), - Section.Code("0x00"), - ], - ), - """ - ef0001 01 0008 02 0002 0001 0001 ff 0000 00 - 0080FFFF 00800000 - 00 - 00 - """, - ), - ( - "Custom type section", - Container( - sections=[ - Section( - kind=SectionKind.TYPE, - data="0x00", - ), - Section.Code("0x00"), - ], - ), - "ef0001 01 0001 02 0001 0001 ff 0000 00 00 00", - ), - ( - "EIP-4750 Single code section oversized type", - Container( - sections=[ - Section( - kind=SectionKind.TYPE, - data="0x0000000000", - ), - Section.Code("0x00"), - ], - ), - "ef0001 01 0005 02 0001 0001 ff 0000 00 0000000000 00", - ), - ( - "Empty type section", - Container( - sections=[ - Section(kind=SectionKind.TYPE, data="0x"), - Section.Code("0x00"), - ], - auto_type_section=AutoSection.NONE, - ), - "ef0001 01 0000 02 0001 0001 ff 0000 00 00", - ), - ( - "Check that simple valid EOF1 deploys", - Container( - sections=[ - Section.Code( - "0x305000", - code_inputs=0, - code_outputs=128, # Non returning - max_stack_height=1, - ), - Section.Data("0xef"), - ], - auto_type_section=AutoSection.AUTO, - ), - """ - # EOF deployed code - ef0001 # Magic followed by version - 010004 # One code segment - 020001 # One code segment - 0003 # code seg 0: 3 bytes - ff0001 # One byte data segment - 00 # End of header - # Code segment 0 header - 00 # Zero inputs - 80 # Non-Returning Function - 0001 # Max stack height 1 - # Code segment 0 code - 30 # 1 ADDRESS - 50 # 2 POP - 00 # 3 STOP - # Data segment - ef - """, - ), - ( - "Data Section custom_size parameter overwrites bytes size", - Container( - sections=[ - Section.Code( - "0x305000", - code_inputs=0, - code_outputs=128, # Non returning - max_stack_height=1, - ), - Section.Data("0x0bad", custom_size=4), - ], - auto_type_section=AutoSection.AUTO, - ), - """ - # EOF deployed code - ef0001 # Magic followed by version - 010004 # One code segment - 020001 # One code segment - 0003 # code seg 0: 3 bytes - ff0004 # Four byte data segment - 00 # End of header - # Code segment 0 header - 00 # Zero inputs - 80 # Non-Returning Function - 0001 # Max stack height 1 - # Code segment 0 code - 30 # 1 ADDRESS - 50 # 2 POP - 00 # 3 STOP - # Data segment - 0bad # 2 bytes instead of four - """, - ), - ( - "Multiple code segments", - Container( - sections=[ - Section.Code( - "0x5f35e2030000000300060009e50001e50002e50003e3000400", - code_inputs=0, - code_outputs=128, # Non returning - max_stack_height=1, - ), - Section.Code( - "0x5f5ff3", - code_inputs=0, - code_outputs=128, # Non returning - max_stack_height=2, - ), - Section.Code( - "0x5f5ffd", - code_inputs=0, - code_outputs=128, # Non returning - max_stack_height=2, - ), - Section.Code( - "0xfe", - code_inputs=0, - code_outputs=128, # Non returning - max_stack_height=0, - ), - Section.Code( - "0xe4", - code_inputs=0, - code_outputs=0, - max_stack_height=0, - ), - Section.Data("0x0bad60a7", custom_size=4), - ], - auto_type_section=AutoSection.AUTO, - ), - """ - # EOF deployed code - EF0001 # Magic and Version ( 1 ) - 010014 # Types length ( 20 ) - 020005 # Total code sections ( 5 ) - 0019 # Code section 0 , 25 bytes - 0003 # Code section 1 , 3 bytes - 0003 # Code section 2 , 3 bytes - 0001 # Code section 3 , 1 bytes - 0001 # Code section 4 , 1 bytes - ff0004 # Data section length ( 4 ) - 00 # Terminator (end of header) - # Code 0 types - 00 # 0 inputs - 80 # 0 outputs (Non-returning function) - 0001 # max stack: 1 - # Code 1 types - 00 # 0 inputs - 80 # 0 outputs (Non-returning function) - 0002 # max stack: 2 - # Code 2 types - 00 # 0 inputs - 80 # 0 outputs (Non-returning function) - 0002 # max stack: 2 - # Code 3 types - 00 # 0 inputs - 80 # 0 outputs (Non-returning function) - 0000 # max stack: 0 - # Code 4 types - 00 # 0 inputs - 00 # 0 outputs - 0000 # max stack: 0 - # Code section 0 - 5f # [0] PUSH0 - 35 # [1] CALLDATALOAD - e2030000000300060009 # [2] RJUMPV(0,3,6,9) - e50001 # [12] JUMPF(1) - e50002 # [15] JUMPF(2) - e50003 # [18] JUMPF(3) - e30004 # [21] CALLF(4) - 00 # [24] STOP - # Code section 1 - 5f # [0] PUSH0 - 5f # [1] PUSH0 - f3 # [2] RETURN - # Code section 2 - 5f # [0] PUSH0 - 5f # [1] PUSH0 - fd # [2] REVERT - # Code section 3 - fe # [0] INVALID - # Code section 4 - e4 # [0] RETF - # Data section - 0bad60a7 - """, - ), - ( - "Custom Types Section overrides code", - Container( - sections=[ - Section( - kind=SectionKind.TYPE, data="0x00700002", custom_size=8 - ), - Section( - kind=SectionKind.CODE, - code_inputs=0, - code_outputs=128, # Non returning - max_stack_height=1, - data="0x305000", - ), - Section(kind=SectionKind.DATA, data="0x0bad60A7"), - ], - ), - """ - # EOF deployed code - ef0001 # Magic followed by version - 010008 # Two code segments - 020001 # One code segment - 0003 # code seg 0: 3 bytes - ff0004 # Four byte data segment - 00 # End of header - # Code segment 0 header - 00 # Zero inputs - 70 # Non-Returning Function - 0002 # Max stack height 1 - # Code segment 0 code - 30 # 1 ADDRESS - 50 # 2 POP - 00 # 3 STOP - # Data segment - 0bad60A7 # 4 bytes (valid) - """, - ), - ( - "Type section wrong order, but only in HEADER", - Container( - sections=[ - Section( - kind=SectionKind.CODE, - code_inputs=0, - code_outputs=128, # Non returning - max_stack_height=1, - data="0x305000", - ), - Section( - kind=SectionKind.TYPE, - data="0x00800001", - ), - Section(kind=SectionKind.DATA, data="0xef"), - ], - auto_sort_sections=AutoSection.ONLY_BODY, - ), - """ - # EOF deployed code - ef0001 # Magic followed by version - 020001 # One code segment - 0003 # code seg 0: 3 bytes - 010004 # One code segment - ff0001 # One byte data segment - 00 # End of header - # Code segment 0 header - 00 # Zero inputs - 80 # Non-Returning Function - 0001 # Max stack height 1 - # Code segment 0 code - 30 # 1 ADDRESS - 50 # 2 POP - 00 # 3 STOP - # Data segment - ef - """, - ), - ( - "Type section wrong order, but only in BODY", - Container( - sections=[ - Section( - kind=SectionKind.CODE, - code_inputs=0, - code_outputs=128, # Non returning - max_stack_height=1, - data="0x305000", - ), - Section( - kind=SectionKind.TYPE, - data="0x00800001", - ), - Section(kind=SectionKind.DATA, data="0xef"), - ], - auto_sort_sections=AutoSection.ONLY_HEADER, - ), - """ - # EOF deployed code - ef0001 # Magic followed by version - 010004 # One code segment - 020001 # One code segment - 0003 # code seg 0: 3 bytes - ff0001 # One byte data segment - 00 # End of header - # Code segment 0 code - 30 # 1 ADDRESS - 50 # 2 POP - 00 # 3 STOP - # Code segment 0 header - 00 # Zero inputs - 80 # Non-Returning Function - 0001 # Max stack height 1 - # Data segment - ef - """, - ), - ( - "Type section missing, but only in HEADER", - Container( - sections=[ - Section( - kind=SectionKind.CODE, - code_inputs=0, - code_outputs=128, # Non returning - max_stack_height=1, - data="0x305000", - ), - Section(kind=SectionKind.DATA, data="0xef"), - ], - auto_type_section=AutoSection.ONLY_BODY, - ), - """ - # EOF deployed code - ef0001 # Magic followed by version - 020001 # One code segment - 0003 # code seg 0: 3 bytes - ff0001 # One byte data segment - 00 # End of header - # Code segment 0 header - 00 # Zero inputs - 80 # Non-Returning Function - 0001 # Max stack height 1 - # Code segment 0 code - 30 # 1 ADDRESS - 50 # 2 POP - 00 # 3 STOP - # Data segment - ef - """, - ), - ( - "Type section missing, but only in BODY", - Container( - sections=[ - Section( - kind=SectionKind.CODE, - code_inputs=0, - code_outputs=128, # Non returning - max_stack_height=1, - data="0x305000", - ), - Section(kind=SectionKind.DATA, data="0xef"), - ], - auto_type_section=AutoSection.ONLY_HEADER, - ), - """ - # EOF deployed code - ef0001 # Magic followed by version - 010004 # Types section - 020001 # One code segment - 0003 # code seg 0: 3 bytes - ff0001 # One byte data segment - 00 # End of header - # Code segment 0 header - # Code segment 0 code - 30 # 1 ADDRESS - 50 # 2 POP - 00 # 3 STOP - # Data segment - ef - """, - ), - ( - "Container.Init simple test", - Container.Init(deploy_container=Container.Code(b"\0")), - """ - # EOF deployed code - ef0001 # Magic followed by version - 010004 # Types section - 020001 # One code segment - 0006 # code seg 0: 6 bytes - 030001 # One container segment - 00000014 # container seg 0: 20 bytes - ff0000 # Zero byte data segment - 00 # End of header - 0080 0002 # Types section - # Code segment 0 code - 6000 # 1 PUSH1 0 - 6000 # 2 PUSH1 0 - ee00 # 3 RETURNCODE[0] - # Subcontainer 0 - ef0001 # Magic followed by version - 010004 # Types section - 020001 # One code segment - 0001 # code seg 0: 1 byte - ff0000 # Zero byte data segment - 00 # End of header - 0080 0000 # Types section - # Code segment 0 code - 00 # 1 STOP - """, - ), - ( - "Container.Init initcode prefix", - Container.Init( - deploy_container=Container.Code(b"\0"), - initcode_prefix=Op.SSTORE(0, 0), - ), - """ - # EOF deployed code - ef0001 # Magic followed by version - 010004 # Types section - 020001 # One code segment - 000b # code seg 0: 11 bytes - 030001 # One container segment - 00000014 # container seg 0: 20 bytes - ff0000 # Zero byte data segment - 00 # End of header - 0080 0002 # Types section - # Code segment 0 code - 6000 # 1 PUSH1 0 - 6000 # 2 PUSH1 0 - 55 # 3 SSTORE - 6000 # 4 PUSH1 0 - 6000 # 5 PUSH1 0 - ee00 # 6 RETURNCODE[0] - # Subcontainer 0 - ef0001 # Magic followed by version - 010004 # Types section - 020001 # One code segment - 0001 # code seg 0: 1 byte - ff0000 # Zero byte data segment - 00 # End of header - 0080 0000 # Types section - # Code segment 0 code - 00 # 1 STOP - """, - ), -] - - -@pytest.mark.parametrize( - ["container", "hex_value"], - [(x[1], x[2]) for x in test_cases], - ids=[x[0] for x in test_cases], -) -def test_eof_v1_assemble(container: Container, hex_value: str) -> None: - """Test `ethereum_test.types.code`.""" - expected_string = remove_comments_from_string(hex_value) - expected_bytes = bytes.fromhex( - expected_string.replace(" ", "").replace("\n", "") - ) - assert bytes(container) == expected_bytes, f""" - Container: {bytes(container).hex()} - Expected : {expected_bytes.hex()} - """ - - -def remove_comments_from_string(input_string: str) -> str: - """Remove comments from a string and leave only valid hex characters.""" - # Split the string into individual lines - lines = input_string.split("\n") - - # Process each line to remove text following a '#' - cleaned_lines = [] - for line in lines: - # Find the index of the first '#' character - comment_start = line.find("#") - - # If a '#' is found, slice up to that point; otherwise, take the whole - # line - if comment_start != -1: - cleaned_line = line[:comment_start].rstrip() - else: - cleaned_line = line - - # Only add non-empty lines if needed - if cleaned_line.strip(): - cleaned_lines.append(cleaned_line) - - # Join the cleaned lines back into a single string - cleaned_string = "\n".join(cleaned_lines) - return cleaned_string - - -@pytest.mark.parametrize( - "model", - [ - Container(), - ], - ids=lambda model: model.__class__.__name__, -) -def test_model_copy(model: CopyValidateModel) -> None: - """Test that the copy method returns a correct copy of the model.""" - assert to_json(model.copy()) == to_json(model) - assert model.copy().model_fields_set == model.model_fields_set diff --git a/packages/testing/src/execution_testing/tools/tools_code/generators.py b/packages/testing/src/execution_testing/tools/tools_code/generators.py index 56ca9fb336..6d7985bfd9 100644 --- a/packages/testing/src/execution_testing/tools/tools_code/generators.py +++ b/packages/testing/src/execution_testing/tools/tools_code/generators.py @@ -7,7 +7,7 @@ from execution_testing.base_types import Bytes from execution_testing.test_types import ceiling_division -from execution_testing.vm import Bytecode, EVMCodeType, Op +from execution_testing.vm import Bytecode, Op GAS_PER_DEPLOYED_CODE_BYTE = 0xC8 @@ -200,7 +200,6 @@ def __new__( condition: Bytecode | Op, if_true: Bytecode | Op | None = None, if_false: Bytecode | Op | None = None, - evm_code_type: EVMCodeType = EVMCodeType.LEGACY, ) -> Self: """ Assemble the conditional bytecode by generating the necessary jump and @@ -214,29 +213,20 @@ def __new__( if if_false is None: if_false = Bytecode() - if evm_code_type == EVMCodeType.LEGACY: - # First we append a jumpdest to the start of the true branch - if_true = Op.JUMPDEST + if_true + # First we append a jumpdest to the start of the true branch + if_true = Op.JUMPDEST + if_true - # Then we append the unconditional jump to the end of the false - # branch, used to skip the true branch - if_false += Op.JUMP(Op.ADD(Op.PC, len(if_true) + 3)) + # Then we append the unconditional jump to the end of the false + # branch, used to skip the true branch + if_false += Op.JUMP(Op.ADD(Op.PC, len(if_true) + 3)) - # Then we need to do the conditional jump by skipping the false - # branch - condition = Op.JUMPI(Op.ADD(Op.PC, len(if_false) + 3), condition) + # Then we need to do the conditional jump by skipping the false + # branch + condition = Op.JUMPI(Op.ADD(Op.PC, len(if_false) + 3), condition) - # Finally we append the condition, false and true branches, plus - # the jumpdest at the very end - bytecode = condition + if_false + if_true + Op.JUMPDEST - - elif evm_code_type == EVMCodeType.EOF_V1: - if not if_false.terminating: - if_false += Op.RJUMP[len(if_true)] - condition = Op.RJUMPI[len(if_false)](condition) - - # Finally we append the condition, false and true branches - bytecode = condition + if_false + if_true + # Finally we append the condition, false and true branches, plus + # the jumpdest at the very end + bytecode = condition + if_false + if_true + Op.JUMPDEST return super().__new__(cls, bytecode) @@ -249,7 +239,6 @@ def __new__( *, body: Bytecode | Op, condition: Bytecode | Op | None = None, - evm_code_type: EVMCodeType = EVMCodeType.LEGACY, ) -> Self: """ Assemble the loop bytecode. @@ -257,18 +246,15 @@ def __new__( The condition nor the body can leave a stack item on the stack. """ bytecode = Bytecode() - if evm_code_type == EVMCodeType.LEGACY: - bytecode += Op.JUMPDEST - bytecode += body - if condition is not None: - bytecode += Op.JUMPI( - Op.SUB(Op.PC, Op.PUSH4[len(body) + len(condition) + 6]), - condition, - ) - else: - bytecode += Op.JUMP(Op.SUB(Op.PC, Op.PUSH4[len(body) + 6])) - elif evm_code_type == EVMCodeType.EOF_V1: - raise NotImplementedError("EOF while loops are not implemented") + bytecode += Op.JUMPDEST + bytecode += body + if condition is not None: + bytecode += Op.JUMPI( + Op.SUB(Op.PC, Op.PUSH4[len(body) + len(condition) + 6]), + condition, + ) + else: + bytecode += Op.JUMP(Op.SUB(Op.PC, Op.PUSH4[len(body) + 6])) return super().__new__(cls, bytecode) @@ -340,17 +326,11 @@ class Switch(Bytecode): evaluates to a non-zero value is the one that is executed. """ - evm_code_type: EVMCodeType - """ - The EVM code type to use for the switch-case bytecode. - """ - def __new__( cls, *, default_action: Bytecode | Op | None = None, cases: List[Case], - evm_code_type: EVMCodeType = EVMCodeType.LEGACY, ) -> Self: """ Assemble the bytecode by looping over the list of cases and adding the @@ -369,28 +349,11 @@ def __new__( # All conditions get prepended to this bytecode; if none are met, we # reach the default - if evm_code_type == EVMCodeType.LEGACY: - action_jump_length = ( - sum(len(case.action) + 6 for case in cases) + 3 - ) - bytecode = default_action + Op.JUMP( - Op.ADD(Op.PC, action_jump_length) - ) - # The length required to jump over the default action and its JUMP - # bytecode - condition_jump_length = len(bytecode) + 3 - elif evm_code_type == EVMCodeType.EOF_V1: - action_jump_length = sum( - len(case.action) - + (len(Op.RJUMP[0]) if not case.is_terminating else 0) - for case in cases - # On not terminating cases, we need to add 3 bytes for the - # RJUMP - ) - bytecode = default_action + Op.RJUMP[action_jump_length] - # The length required to jump over the default action and its JUMP - # bytecode - condition_jump_length = len(bytecode) + action_jump_length = sum(len(case.action) + 6 for case in cases) + 3 + bytecode = default_action + Op.JUMP(Op.ADD(Op.PC, action_jump_length)) + # The length required to jump over the default action and its JUMP + # bytecode + condition_jump_length = len(bytecode) + 3 # Reversed: first case in the list has priority; it will become the # outer-most onion layer. We build up layers around the default_action, @@ -411,23 +374,15 @@ def __new__( # + JUMPDEST + case[0].action + JUMP() for case in reversed(cases): action = case.action - if evm_code_type == EVMCodeType.LEGACY: - action_jump_length -= len(action) + 6 - action = ( - Op.JUMPDEST - + action - + Op.JUMP(Op.ADD(Op.PC, action_jump_length)) - ) - condition = Op.JUMPI( - Op.ADD(Op.PC, condition_jump_length), case.condition - ) - elif evm_code_type == EVMCodeType.EOF_V1: - action_jump_length -= len(action) + ( - len(Op.RJUMP[0]) if not case.is_terminating else 0 - ) - if not case.is_terminating: - action += Op.RJUMP[action_jump_length] - condition = Op.RJUMPI[condition_jump_length](case.condition) + action_jump_length -= len(action) + 6 + action = ( + Op.JUMPDEST + + action + + Op.JUMP(Op.ADD(Op.PC, action_jump_length)) + ) + condition = Op.JUMPI( + Op.ADD(Op.PC, condition_jump_length), case.condition + ) # wrap the current case around the onion as its next layer bytecode = condition + bytecode + action condition_jump_length += len(condition) + len(action) diff --git a/packages/testing/src/execution_testing/tools/utility/generators.py b/packages/testing/src/execution_testing/tools/utility/generators.py index ef50b67ec7..003225f7dc 100644 --- a/packages/testing/src/execution_testing/tools/utility/generators.py +++ b/packages/testing/src/execution_testing/tools/utility/generators.py @@ -458,7 +458,7 @@ def gas_test( tx_gas: int | None = None, ) -> None: """ - Create State Test to check the gas cost of a sequence of EOF code. + Create State Test to check the gas cost of a sequence of code. `setup_code` and `tear_down_code` are called multiple times during the test, and MUST NOT have any side-effects which persist across message diff --git a/packages/testing/src/execution_testing/vm/__init__.py b/packages/testing/src/execution_testing/vm/__init__.py index 3f9332ac62..40f4410fe0 100644 --- a/packages/testing/src/execution_testing/vm/__init__.py +++ b/packages/testing/src/execution_testing/vm/__init__.py @@ -6,7 +6,6 @@ OpcodeGasCalculator, ) from .bytecode import Bytecode -from .evm_types import EVMCodeType from .helpers import MemoryVariable, call_return_code from .opcodes import ( Macro, @@ -22,7 +21,6 @@ __all__ = ( "Bytecode", - "EVMCodeType", "ForkOpcodeInterface", "Macro", "Macros", diff --git a/packages/testing/src/execution_testing/vm/evm_types.py b/packages/testing/src/execution_testing/vm/evm_types.py deleted file mode 100644 index 992f580e89..0000000000 --- a/packages/testing/src/execution_testing/vm/evm_types.py +++ /dev/null @@ -1,16 +0,0 @@ -"""EVM types definitions.""" - -from enum import Enum - - -class EVMCodeType(str, Enum): - """ - Enum representing the type of EVM code that is supported in a given fork. - """ - - LEGACY = "legacy" - EOF_V1 = "eof_v1" - - def __str__(self) -> str: - """Return the name of the EVM code type.""" - return self.name diff --git a/packages/testing/src/execution_testing/vm/helpers.py b/packages/testing/src/execution_testing/vm/helpers.py index 9547325813..9cf6c67e3e 100644 --- a/packages/testing/src/execution_testing/vm/helpers.py +++ b/packages/testing/src/execution_testing/vm/helpers.py @@ -93,16 +93,8 @@ def return_value(self) -> Bytecode: return Op.RETURN(offset=self.offset, size=32) -def call_return_code( - opcode: Op, success: bool, *, revert: bool = False -) -> int: +def call_return_code(opcode: Op, success: bool) -> int: """Return return code for a CALL operation.""" if opcode in [Op.CALL, Op.CALLCODE, Op.DELEGATECALL, Op.STATICCALL]: return int(success) - elif opcode in [Op.EXTCALL, Op.EXTDELEGATECALL, Op.EXTSTATICCALL]: - if success: - return 0 - if revert: - return 1 - return 2 raise ValueError(f"Not a call opcode: {opcode}") diff --git a/packages/testing/src/execution_testing/vm/opcodes.py b/packages/testing/src/execution_testing/vm/opcodes.py index 7d79d28c96..3fa693e2cc 100644 --- a/packages/testing/src/execution_testing/vm/opcodes.py +++ b/packages/testing/src/execution_testing/vm/opcodes.py @@ -511,94 +511,6 @@ def __call__(self, *args_t: OpcodeCallArg, **kwargs: Any) -> Bytecode: return pre_opcode_bytecode + self -# Constants - -RJUMPV_MAX_INDEX_BYTE_LENGTH = 1 -RJUMPV_BRANCH_OFFSET_BYTE_LENGTH = 2 - - -# TODO: Allowing Iterable here is a hacky way to support `range`, -# because Python 3.11+ will allow `Op.RJUMPV[*range(5)]`. -# This is a temporary solution until Python 3.11+ is the minimum required -# version. - - -def _rjumpv_encoder(*args: int | bytes | Iterable[int]) -> bytes: - if len(args) == 1: - if isinstance(args[0], bytes) or isinstance(args[0], SupportsBytes): - return bytes(args[0]) - elif isinstance(args[0], Iterable): - int_args = list(args[0]) - return b"".join( - [ - (len(int_args) - 1).to_bytes( - RJUMPV_MAX_INDEX_BYTE_LENGTH, "big" - ) - ] - + [ - i.to_bytes( - RJUMPV_BRANCH_OFFSET_BYTE_LENGTH, "big", signed=True - ) - for i in int_args - ] - ) - return b"".join( - [(len(args) - 1).to_bytes(RJUMPV_MAX_INDEX_BYTE_LENGTH, "big")] - + [ - i.to_bytes(RJUMPV_BRANCH_OFFSET_BYTE_LENGTH, "big", signed=True) - for i in args - if isinstance(i, int) - ] - ) - - -def _exchange_encoder(*args: int) -> bytes: - assert 1 <= len(args) <= 2, ( - f"Exchange opcode requires one or two arguments, got {len(args)}" - ) - if len(args) == 1: - return int.to_bytes(args[0], 1, "big") - # n = imm >> 4 + 1 - # m = imm & 0xF + 1 - # x = n + 1 - # y = n + m + 1 - # ... - # n = x - 1 - # m = y - x - # m = y - n - 1 - x, y = args - assert 2 <= x <= 0x11 - assert x + 1 <= y <= x + 0x10 - n = x - 1 - m = y - x - imm = (n - 1) << 4 | m - 1 - return int.to_bytes(imm, 1, "big") - - -def _swapn_stack_properties_modifier(data: bytes) -> tuple[int, int, int, int]: - imm = int.from_bytes(data, "big") - n = imm + 1 - min_stack_height = n + 1 - return 0, 0, min_stack_height, min_stack_height - - -def _dupn_stack_properties_modifier(data: bytes) -> tuple[int, int, int, int]: - imm = int.from_bytes(data, "big") - n = imm + 1 - min_stack_height = n - return 0, 1, min_stack_height, min_stack_height + 1 - - -def _exchange_stack_properties_modifier( - data: bytes, -) -> tuple[int, int, int, int]: - imm = int.from_bytes(data, "big") - n = (imm >> 4) + 1 - m = (imm & 0x0F) + 1 - min_stack_height = n + m + 1 - return 0, 0, min_stack_height, min_stack_height - - class Opcodes(Opcode, Enum): """ Enum containing all known opcodes. @@ -5168,538 +5080,6 @@ class Opcodes(Opcode, Enum): Source: [evm.codes/#A4](https://www.evm.codes/#A4) """ - RJUMP = Opcode(0xE0, data_portion_length=2) - """ - !!! Note: This opcode is under development - - RJUMP() - ---- - - Description - ---- - - Inputs - ---- - - Outputs - ---- - - Fork - ---- - EOF Fork - - Gas - ---- - - Source: [EIP-4200](https://eips.ethereum.org/EIPS/eip-4200) - """ - - DATALOAD = Opcode( - 0xD0, popped_stack_items=1, pushed_stack_items=1, kwargs=["offset"] - ) - """ - !!! Note: This opcode is under development - - DATALOAD(offset) - ---- - - Description - ---- - Reads 32 bytes of data at offset onto the stack - - Inputs - ---- - - offset: offset within the data section to start copying - - Outputs - ---- - none - - Fork - ---- - EOF Fork - - Gas - ---- - 4 - - Source: [EIP-7480](https://eips.ethereum.org/EIPS/eip-7480) - """ - - DATALOADN = Opcode(0xD1, pushed_stack_items=1, data_portion_length=2) - """ - !!! Note: This opcode is under development - - DATALOADN() - ---- - - Description - ---- - Reads 32 bytes of data at offset onto the stack - - Immediates - ---- - 2 bytes forming a UInt16, which is the offset into the data section. - - Inputs - ---- - none - - Outputs - ---- - none - - Fork - ---- - EOF Fork - - Gas - ---- - 3 - - Source: [EIP-7480](https://eips.ethereum.org/EIPS/eip-7480) - """ - - DATASIZE = Opcode(0xD2, pushed_stack_items=1) - """ - !!! Note: This opcode is under development - - DATASIZE() - ---- - - Description - ---- - Returns the size of the data section - - Inputs - ---- - - Outputs - ---- - The size of the data section. If there is no data section, returns 0. - - Fork - ---- - EOF Fork - - Gas - ---- - 2 - - Source: [EIP-7480](https://eips.ethereum.org/EIPS/eip-7480) - """ - - DATACOPY = Opcode( - 0xD3, popped_stack_items=3, kwargs=["dest_offset", "offset", "size"] - ) - """ - !!! Note: This opcode is under development - - DATACOPY(dest_offset, offset, size) - ---- - - Description - ---- - Copies data from the data section into call frame memory - - Inputs - ---- - - dest_offset: The offset within the memory section to start copying to - - offset: The offset within the data section to start copying from - - size: The number of bytes to copy - - Outputs - ---- - none - - Fork - ---- - EOF Fork - - Gas - ---- - - minimum_word_size = (size + 31) / 32 - - static_gas = 3 - - dynamic_gas = 3 * minimum_word_size + memory_expansion_cost - - Source: [EIP-7480](https://eips.ethereum.org/EIPS/eip-7480) - """ - - RJUMPI = Opcode(0xE1, popped_stack_items=1, data_portion_length=2) - """ - !!! Note: This opcode is under development - - RJUMPI() - ---- - - Description - ---- - - Inputs - ---- - - Outputs - ---- - - Fork - ---- - EOF Fork - - Gas - ---- - - Source: [EIP-4200](https://eips.ethereum.org/EIPS/eip-4200) - """ - - RJUMPV = Opcode( - 0xE2, - popped_stack_items=1, - data_portion_formatter=_rjumpv_encoder, - ) - """ - !!! Note: This opcode is under development - - RJUMPV() - ---- - - Description - ---- - Relative jump with variable offset. - - When calling this opcode to generate bytecode, the first argument is - used to format the data portion of the opcode, and it can be either - of two types: - - A bytes type, and in this instance the bytes are used verbatim - as the data portion. - - An integer iterable, list or tuple or any other iterable, where - each element is a jump offset. - - Inputs - ---- - - Outputs - ---- - - Fork - ---- - EOF Fork - - Gas - ---- - - Source: [EIP-4200](https://eips.ethereum.org/EIPS/eip-4200) - """ - - CALLF = Opcode(0xE3, data_portion_length=2, unchecked_stack=True) - """ - !!! Note: This opcode is under development - - CALLF() - ---- - - Description - ---- - - - deduct 5 gas - - read uint16 operand idx - - if 1024 < len(stack) + types[idx].max_stack_height - types[idx].inputs, - execution results in an exceptional halt - - if 1024 <= len(return_stack), execution results in an exceptional halt - - push new element to return_stack (current_code_idx, pc+3) - - update current_code_idx to idx and set pc to 0 - - Inputs - ---- - Any: The inputs are not checked because we cannot know how many inputs - the callee function/section requires - - Outputs - ---- - Any: The outputs are variable because we cannot know how many outputs the - callee function/section produces - - Fork - ---- - EOF Fork - - Gas - ---- - 5 - - Source: - [`eof.md`](https://github.com/ipsilon/eof/blob/main/spec/eof.md) - """ - - RETF = Opcode(0xE4, terminating=True) - """ - !!! Note: This opcode is under development - - RETF() - ---- - - Description - ---- - - Inputs - ---- - - Outputs - ---- - - Fork - ---- - EOF Fork - - Gas - ---- - 3 - """ - - JUMPF = Opcode( - 0xE5, data_portion_length=2, terminating=True, unchecked_stack=True - ) - """ - !!! Note: This opcode is under development - - JUMPF() - ---- - - Description - ---- - - - deduct 5 gas - - read uint16 operand idx - - if 1024 < len(stack) + types[idx].max_stack_height - types[idx].inputs, - execution results in an exceptional halt - - set current_code_idx to idx - - set pc = 0 - - - Inputs - ---- - - Outputs - ---- - - Fork - ---- - EOF Fork - - Gas - ---- - 5 - - """ - - DUPN = Opcode( - 0xE6, - pushed_stack_items=1, - data_portion_length=1, - stack_properties_modifier=_dupn_stack_properties_modifier, - ) - """ - !!! Note: This opcode is under development - - DUPN() - ---- - - Description - ---- - - - deduct 3 gas - - read uint8 operand imm - - n = imm + 1 - - n‘th (1-based) stack item is duplicated at the top of the stack - - Stack validation: stack_height >= n - - - Inputs - ---- - - Outputs - ---- - - Fork - ---- - EOF Fork - - Gas - ---- - - """ - - SWAPN = Opcode( - 0xE7, - data_portion_length=1, - stack_properties_modifier=_swapn_stack_properties_modifier, - ) - """ - !!! Note: This opcode is under development - - SWAPN() - ---- - - Description - ---- - - - deduct 3 gas - - read uint8 operand imm - - n = imm + 1 - - n + 1th stack item is swapped with the top stack item (1-based). - - Stack validation: stack_height >= n + 1 - - - Inputs - ---- - - Outputs - ---- - - Fork - ---- - EOF Fork - - Gas - ---- - - """ - - EXCHANGE = Opcode( - 0xE8, - data_portion_length=1, - data_portion_formatter=_exchange_encoder, - stack_properties_modifier=_exchange_stack_properties_modifier, - ) - """ - !!! Note: This opcode is under development - - EXCHANGE[x, y] - ---- - - Description - ---- - Exchanges two stack positions. Two nybbles, n is high 4 bits + 1, - then m is 4 low bits + 1. - Exchanges the n+1'th item with the n + m + 1 item. - - Inputs x and y when the opcode is used as `EXCHANGE[x, y]`, are equal to: - - x = n + 1 - - y = n + m + 1 - Which each equals to 1-based stack positions swapped. - - Inputs - ---- - n + m + 1, or ((imm >> 4) + (imm &0x0F) + 3) from the raw immediate, - - Outputs - ---- - n + m + 1, or ((imm >> 4) + (imm &0x0F) + 3) from the raw immediate, - - Fork - ---- - EOF_FORK - - Gas - ---- - 3 - - """ - - EOFCREATE = Opcode( - 0xEC, - popped_stack_items=4, - pushed_stack_items=1, - data_portion_length=1, - kwargs=["salt", "input_offset", "input_size", "value"], - ) - """ - !!! Note: This opcode is under development - - EOFCREATE[initcontainer_index] (salt, input_offset, input_size, value) - ---- - - Description - ---- - - Inputs - ---- - - Outputs - ---- - - Fork - ---- - - Gas - ---- - - """ - - TXCREATE = Opcode( - 0xED, - popped_stack_items=5, - pushed_stack_items=1, - kwargs=[ - "tx_initcode_hash", - "salt", - "input_offset", - "input_size", - "value", - ], - ) - """ - !!! Note: This opcode is under development - - TXCREATE (tx_initcode_hash, salt, input_offset, input_size, value) - ---- - - Description - ---- - - Inputs - ---- - - Outputs - ---- - - Fork - ---- - - Gas - ---- - - """ - - RETURNCODE = Opcode( - 0xEE, - popped_stack_items=2, - data_portion_length=1, - terminating=True, - kwargs=["auxdata_offset", "auxdata_size"], - ) - """ - !!! Note: This opcode is under development - - RETURNCODE() - ---- - - Description - ---- - - Inputs - ---- - - Outputs - ---- - - Fork - ---- - - Gas - ---- - - """ - CREATE = Opcode( 0xF0, popped_stack_items=3, @@ -6092,95 +5472,6 @@ class Opcodes(Opcode, Enum): Source: [evm.codes/#F5](https://www.evm.codes/#F5) """ - EXTCALL = Opcode( - 0xF8, - popped_stack_items=4, - pushed_stack_items=1, - kwargs=["address", "args_offset", "args_size", "value"], - ) - """ - EXTCALL(address, args_offset, args_size, value) = address - ---- - - Description - ---- - Message-call into an account - - Inputs - ---- - - address: the account which context to execute - - args_offset: byte offset in the memory in bytes, the calldata of - the sub context - - args_size: byte size to copy (size of the calldata) - - value: value in wei to send to the account - - Outputs - ---- - - success: - - `0` if the call was successful. - - `1` if the call has reverted (also can be pushed earlier in a - light failure scenario). - - `2` if the call has failed. - - Fork - ---- - Prague - - Gas - ---- - ``` - static_gas = 0 - dynamic_gas = memory_expansion_cost + code_execution_cost + - address_access_cost + positive_value_cost + - value_to_empty_account_cost - ``` - - Source: [EIP-7069](https://eips.ethereum.org/EIPS/eip-7069) - """ - - EXTDELEGATECALL = Opcode( - 0xF9, - popped_stack_items=3, - pushed_stack_items=1, - kwargs=["address", "args_offset", "args_size"], - ) - """ - EXTDELEGATECALL(address, args_offset, args_size) = address - ---- - - Description - ---- - Message-call into this account with an alternative account's code, - but persisting the current values for sender and value - - Inputs - ---- - - address: the account which context to execute - - args_offset: byte offset in the memory in bytes, the calldata of - the sub context - - args_size: byte size to copy (size of the calldata) - - Outputs - ---- - - success: - - `0` if the call was successful. - - `1` if the call has reverted (also can be pushed earlier in a - light failure scenario). - - `2` if the call has failed. - - Fork - ---- - Prague - - Gas - ---- - - static_gas = 0 - - dynamic_gas = memory_expansion_cost + code_execution_cost + - address_access_cost - - Source: [EIP-7069](https://eips.ethereum.org/EIPS/eip-7069) - """ - STATICCALL = Opcode( 0xFA, popped_stack_items=6, @@ -6252,73 +5543,6 @@ class Opcodes(Opcode, Enum): Source: [evm.codes/#FA](https://www.evm.codes/#FA) """ - EXTSTATICCALL = Opcode( - 0xFB, - popped_stack_items=3, - pushed_stack_items=1, - kwargs=["address", "args_offset", "args_size"], - ) - """ - EXTSTATICCALL(address, args_offset, args_size) = address - ---- - - Description - ---- - Static message-call into an account - - Inputs - ---- - - address: the account which context to execute - - args_offset: byte offset in the memory in bytes, the calldata - of the sub context - - args_size: byte size to copy (size of the calldata) - - Outputs - ---- - - success: - - `0` if the call was successful. - - `1` if the call has reverted (also can be pushed earlier in a - light failure scenario). - - `2` if the call has failed. - - Fork - ---- - Prague - - Gas - ---- - - static_gas = 0 - - dynamic_gas = memory_expansion_cost + code_execution_cost + - address_access_cost - - Source: [EIP-7069](https://eips.ethereum.org/EIPS/eip-7069) - """ - - RETURNDATALOAD = Opcode( - 0xF7, popped_stack_items=1, pushed_stack_items=1, kwargs=["offset"] - ) - """ - RETURNDATALOAD(offset) - ---- - - Description - ---- - Copy 32 bytes from returndata at offset onto the stack - - Inputs - ---- - - offset: byte offset in the return data from the last executed - sub context to copy - - Fork - ---- - EOF - - Gas - ---- - 3 - """ - REVERT = Opcode( 0xFD, popped_stack_items=2, @@ -6547,7 +5771,7 @@ class Macros(Macro, Enum): class UndefinedOpcodes(Opcode, Enum): - """Enum containing all unknown opcodes (86 at the moment).""" + """Enum containing all unknown opcodes (107 at the moment).""" OPCODE_0C = Opcode(0x0C) OPCODE_0D = Opcode(0x0D) @@ -6617,6 +5841,10 @@ class UndefinedOpcodes(Opcode, Enum): OPCODE_CD = Opcode(0xCD) OPCODE_CE = Opcode(0xCE) OPCODE_CF = Opcode(0xCF) + OPCODE_D0 = Opcode(0xD0) + OPCODE_D1 = Opcode(0xD1) + OPCODE_D2 = Opcode(0xD2) + OPCODE_D3 = Opcode(0xD3) OPCODE_D4 = Opcode(0xD4) OPCODE_D5 = Opcode(0xD5) OPCODE_D6 = Opcode(0xD6) @@ -6629,9 +5857,25 @@ class UndefinedOpcodes(Opcode, Enum): OPCODE_DD = Opcode(0xDD) OPCODE_DE = Opcode(0xDE) OPCODE_DF = Opcode(0xDF) + OPCODE_E0 = Opcode(0xE0) + OPCODE_E1 = Opcode(0xE1) + OPCODE_E2 = Opcode(0xE2) + OPCODE_E3 = Opcode(0xE3) + OPCODE_E4 = Opcode(0xE4) + OPCODE_E5 = Opcode(0xE5) + OPCODE_E6 = Opcode(0xE6) + OPCODE_E7 = Opcode(0xE7) + OPCODE_E8 = Opcode(0xE8) OPCODE_E9 = Opcode(0xE9) OPCODE_EA = Opcode(0xEA) OPCODE_EB = Opcode(0xEB) + OPCODE_EC = Opcode(0xEC) + OPCODE_ED = Opcode(0xED) + OPCODE_EE = Opcode(0xEE) OPCODE_EF = Opcode(0xEF) OPCODE_F6 = Opcode(0xF6) + OPCODE_F7 = Opcode(0xF7) + OPCODE_F8 = Opcode(0xF8) + OPCODE_F9 = Opcode(0xF9) + OPCODE_FB = Opcode(0xFB) OPCODE_FC = Opcode(0xFC) diff --git a/packages/testing/src/execution_testing/vm/tests/test_vm.py b/packages/testing/src/execution_testing/vm/tests/test_vm.py index b9e9c6bf89..5e30845f86 100644 --- a/packages/testing/src/execution_testing/vm/tests/test_vm.py +++ b/packages/testing/src/execution_testing/vm/tests/test_vm.py @@ -131,87 +131,6 @@ bytes([0x64, 0x17, 0x48, 0x76, 0xE8, 0x00, 0x60, 0x00, 0x20]), id="OOG()", ), - pytest.param( - Op.RJUMPV[1, 2, 3](Op.ORIGIN), - bytes( - [ - Op.ORIGIN.int(), - Op.RJUMPV.int(), - 0x02, # Data portion, defined by the [1, 2, 3] argument - 0x00, - 0x01, - 0x00, - 0x02, - 0x00, - 0x03, - ] - ), - id="RJUMPV[1, 2, 3](ORIGIN)", - ), - pytest.param( - Op.RJUMPV[b"\x00"], - bytes( - [ - Op.RJUMPV.int(), - 0x00, - ] - ), - id="RJUMPV[b'\\x00']", - ), - pytest.param( - Op.RJUMPV[-1, -2, -3], - bytes( - [ - Op.RJUMPV.int(), - 0x02, - 0xFF, - 0xFF, - 0xFF, - 0xFE, - 0xFF, - 0xFD, - ] - ), - id="RJUMPV[-1, -2, -3]", - ), - pytest.param( - Op.RJUMPV[range(5)], # TODO: on Python 3.11+: Op.RJUMPV[*range(5)] - bytes( - [ - Op.RJUMPV.int(), - 0x04, - 0x00, - 0x00, - 0x00, - 0x01, - 0x00, - 0x02, - 0x00, - 0x03, - 0x00, - 0x04, - ] - ), - id="RJUMPV[range(5)]", - ), - pytest.param( - Op.RJUMPV[1, 2, 3](Op.ORIGIN) + Op.STOP, - bytes( - [ - Op.ORIGIN.int(), - Op.RJUMPV.int(), - 0x02, # Data portion, defined by the [1, 2, 3] argument - 0x00, - 0x01, - 0x00, - 0x02, - 0x00, - 0x03, - Op.STOP.int(), - ] - ), - id="RJUMPV[1, 2, 3](ORIGIN) + STOP", - ), pytest.param( Op.STOP * 2, bytes( @@ -222,39 +141,6 @@ ), id="STOP * 2", ), - pytest.param( - Op.RJUMPV[0, 3, 6, 9], - bytes.fromhex("e2030000000300060009"), - id="RJUMPV[0, 3, 6, 9]", - ), - pytest.param( - Op.RJUMPV[2, 0], bytes.fromhex("e20100020000"), id="RJUMPV[2, 0]" - ), - pytest.param( - Op.RJUMPV[b"\x02\x00\x02\xff\xff"], - bytes.fromhex("e2020002ffff"), - id="RJUMPV[b'\\x02\\x00\\x02\\xFF\\xFF']", - ), - pytest.param( - Op.EXCHANGE[0x2 + 0x0, 0x3 + 0x0], - bytes.fromhex("e800"), - id="EXCHANGE[0x2 + 0x0, 0x3 + 0x0]", - ), - pytest.param( - Op.EXCHANGE[0x2 + 0x0, 0x3 + 0xF], - bytes.fromhex("e80f"), - id="EXCHANGE[0x2 + 0x0, 0x3 + 0xF]", - ), - pytest.param( - Op.EXCHANGE[0x2 + 0xF, 0x3 + 0xF + 0x0], - bytes.fromhex("e8f0"), - id="EXCHANGE[0x2 + 0xF, 0x3 + 0xF + 0x0]", - ), - pytest.param( - Op.EXCHANGE[0x2 + 0xF, 0x3 + 0xF + 0xF], - bytes.fromhex("e8ff"), - id="EXCHANGE[0x2 + 0xF, 0x3 + 0xF + 0xF]", - ), pytest.param(Op.PUSH0 * 0, bytes(), id="PUSH0 * 0"), pytest.param( Op.CREATE(value=1, offset=2, size=3), @@ -286,21 +172,6 @@ b"\x60\x00\x60\x00\x60\x00\x60\x00\x60\x01\x5a\xf4", id="Op.DELEGATECALL(address=1)", ), - pytest.param( - Op.EXTCALL(address=1), - b"\x60\x00\x60\x00\x60\x00\x60\x01\xf8", - id="Op.EXTCALL(address=1)", - ), - pytest.param( - Op.EXTSTATICCALL(address=1), - b"\x60\x00\x60\x00\x60\x01\xfb", - id="Op.EXTSTATICCALL(address=1)", - ), - pytest.param( - Op.EXTDELEGATECALL(address=1), - b"\x60\x00\x60\x00\x60\x01\xf9", - id="Op.EXTDELEGATECALL(address=1)", - ), pytest.param( Om.MSTORE(b""), b"", @@ -360,8 +231,6 @@ def test_opcodes_repr() -> None: assert f"{Op.DELEGATECALL}" == "DELEGATECALL" assert f"{Om.OOG}" == "OOG" assert str(Op.ADD) == "ADD" - assert f"{Op.DUPN[1]}" == "DUPN_0x01" - assert f"{Op.DATALOADN[1]}" == "DATALOADN_0x0001" def test_macros() -> None: diff --git a/tests/cancun/eip1153_tstore/test_tstorage_clear_after_tx.py b/tests/cancun/eip1153_tstore/test_tstorage_clear_after_tx.py index a07fb973bb..f8850f50c4 100644 --- a/tests/cancun/eip1153_tstore/test_tstorage_clear_after_tx.py +++ b/tests/cancun/eip1153_tstore/test_tstorage_clear_after_tx.py @@ -1,7 +1,5 @@ """EIP-1153 Transient Storage tests.""" -from typing import Optional - import pytest from execution_testing import ( Account, @@ -9,12 +7,10 @@ Block, BlockchainTestFiller, Environment, - EVMCodeType, Initcode, Op, Transaction, ) -from execution_testing.test_types.eof.v1 import Container from .spec import ref_spec_1153 @@ -23,11 +19,9 @@ @pytest.mark.valid_from("Cancun") -@pytest.mark.with_all_evm_code_types def test_tstore_clear_after_deployment_tx( blockchain_test: BlockchainTestFiller, pre: Alloc, - evm_code_type: EVMCodeType, ) -> None: """ First creates a contract, which TSTOREs a value 1 in slot 1. After creating @@ -40,14 +34,7 @@ def test_tstore_clear_after_deployment_tx( init_code = Op.TSTORE(1, 1) deploy_code = Op.SSTORE(1, Op.TLOAD(1)) - code: Optional[Container | Initcode] = None - if evm_code_type == EVMCodeType.EOF_V1: - code = Container.Init( - deploy_container=Container.Code(deploy_code + Op.STOP), - initcode_prefix=init_code, - ) - else: - code = Initcode(deploy_code=deploy_code, initcode_prefix=init_code) + code = Initcode(deploy_code=deploy_code, initcode_prefix=init_code) sender = pre.fund_eoa() @@ -76,7 +63,6 @@ def test_tstore_clear_after_deployment_tx( @pytest.mark.valid_from("Cancun") -@pytest.mark.with_all_evm_code_types def test_tstore_clear_after_tx( blockchain_test: BlockchainTestFiller, pre: Alloc, diff --git a/tests/cancun/eip4844_blobs/test_point_evaluation_precompile.py b/tests/cancun/eip4844_blobs/test_point_evaluation_precompile.py index fb7107c1d2..8a0e26aebf 100644 --- a/tests/cancun/eip4844_blobs/test_point_evaluation_precompile.py +++ b/tests/cancun/eip4844_blobs/test_point_evaluation_precompile.py @@ -210,14 +210,6 @@ def success( call_opcode: Op, ) -> bool: """Prepare expected success or failure for each test.""" - if call_opcode == Op.EXTDELEGATECALL: - return False - if result == Result.OUT_OF_GAS and call_opcode in [ - Op.EXTCALL, - Op.EXTSTATICCALL, - ]: - return True - return result == Result.SUCCESS @@ -235,7 +227,7 @@ def post( expected_storage: Storage.StorageDictType = {} # CALL operation return code expected_storage[key_call_return_code] = call_return_code( - call_opcode, success, revert=call_opcode == Op.EXTDELEGATECALL + call_opcode, success ) if success: # Success return values @@ -256,10 +248,6 @@ def post( expected_storage[key_return_2] = precompile_input[32:64] expected_storage[key_return_copy_1] = expected_storage[1] expected_storage[key_return_copy_2] = expected_storage[2] - if call_opcode in [Op.EXTCALL, Op.EXTSTATICCALL, Op.EXTDELEGATECALL]: - # Input parameters were not overwritten - expected_storage[key_return_1] = precompile_input[0:32] - expected_storage[key_return_2] = precompile_input[32:64] return { precompile_caller_address: Account( storage=expected_storage, diff --git a/tests/cancun/eip5656_mcopy/test_mcopy.py b/tests/cancun/eip5656_mcopy/test_mcopy.py index 8921ddb2fb..1008774619 100644 --- a/tests/cancun/eip5656_mcopy/test_mcopy.py +++ b/tests/cancun/eip5656_mcopy/test_mcopy.py @@ -181,7 +181,6 @@ def post(code_address: Address, code_storage: Storage) -> Mapping: # noqa: D103 "out_of_bounds_memory_extension", ], ) -@pytest.mark.with_all_evm_code_types @pytest.mark.valid_from("Cancun") def test_valid_mcopy_operations( state_test: StateTestFiller, @@ -210,7 +209,6 @@ def test_valid_mcopy_operations( @pytest.mark.parametrize("src", [0x00, 0x20]) @pytest.mark.parametrize("length", [0x00, 0x01]) @pytest.mark.parametrize("initial_memory", [bytes()], ids=["empty_memory"]) -@pytest.mark.with_all_evm_code_types @pytest.mark.valid_from("Cancun") def test_mcopy_on_empty_memory( state_test: StateTestFiller, diff --git a/tests/cancun/eip5656_mcopy/test_mcopy_contexts.py b/tests/cancun/eip5656_mcopy/test_mcopy_contexts.py index e1ac0f86d6..1bacbb0a9c 100644 --- a/tests/cancun/eip5656_mcopy/test_mcopy_contexts.py +++ b/tests/cancun/eip5656_mcopy/test_mcopy_contexts.py @@ -53,7 +53,7 @@ def callee_bytecode( bytecode += Op.MCOPY(0x00, initial_memory_length * 2, 1) bytecode += Op.MCOPY(initial_memory_length * 2, 0x00, 1) - if call_opcode != Op.STATICCALL and call_opcode != Op.EXTSTATICCALL: + if call_opcode != Op.STATICCALL: # Simple sstore to make sure we actually ran the code bytecode += Op.SSTORE(200_000, 1) @@ -154,9 +154,9 @@ def post( # noqa: D103 call_opcode: Op, ) -> Mapping: callee_storage: Storage.StorageDictType = {} - if call_opcode in [Op.DELEGATECALL, Op.CALLCODE, Op.EXTDELEGATECALL]: + if call_opcode in [Op.DELEGATECALL, Op.CALLCODE]: caller_storage[200_000] = 1 - elif call_opcode in [Op.CALL, Op.EXTCALL]: + elif call_opcode == Op.CALL: callee_storage[200_000] = 1 return { caller_address: Account(storage=caller_storage), @@ -203,8 +203,6 @@ def test_no_memory_corruption_on_upper_create_stack_levels( during its execution, and verify that the caller's memory is unaffected: - `CREATE` - `CREATE2`. - - TODO: [EOF] Add EOFCREATE opcode """ state_test( env=Environment(), diff --git a/tests/cancun/eip5656_mcopy/test_mcopy_memory_expansion.py b/tests/cancun/eip5656_mcopy/test_mcopy_memory_expansion.py index 0d49e55766..7dcc5fecc5 100644 --- a/tests/cancun/eip5656_mcopy/test_mcopy_memory_expansion.py +++ b/tests/cancun/eip5656_mcopy/test_mcopy_memory_expansion.py @@ -190,7 +190,6 @@ def post( # noqa: D103 "from_empty_memory", ], ) -@pytest.mark.with_all_evm_code_types @pytest.mark.valid_from("Cancun") def test_mcopy_memory_expansion( state_test: StateTestFiller, @@ -248,7 +247,6 @@ def test_mcopy_memory_expansion( "from_empty_memory", ], ) -@pytest.mark.with_all_evm_code_types @pytest.mark.valid_from("Cancun") def test_mcopy_huge_memory_expansion( state_test: StateTestFiller, diff --git a/tests/frontier/opcodes/test_dup.py b/tests/frontier/opcodes/test_dup.py index 210c744072..c75a295395 100644 --- a/tests/frontier/opcodes/test_dup.py +++ b/tests/frontier/opcodes/test_dup.py @@ -35,7 +35,6 @@ ], ids=lambda op: str(op), ) -@pytest.mark.with_all_evm_code_types def test_dup( state_test: StateTestFiller, fork: Fork, diff --git a/tests/frontier/scenarios/scenarios/call_combinations.py b/tests/frontier/scenarios/scenarios/call_combinations.py index f58f8a12d3..d1660285f8 100644 --- a/tests/frontier/scenarios/scenarios/call_combinations.py +++ b/tests/frontier/scenarios/scenarios/call_combinations.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from typing import List -from execution_testing import Address, Alloc, EVMCodeType, Op, Opcode +from execution_testing import Address, Alloc, Op, Opcode from ..common import Scenario, ScenarioEnvironment, ScenarioGeneratorInput @@ -41,11 +41,7 @@ class AddressBalance: def __init__(self, scenario_input: ScenarioGeneratorInput): """Define possible call combinations given the fork.""" - self.first_call_opcodes = [ - callcode - for callcode, evm_type in scenario_input.fork.call_opcodes() - if evm_type == EVMCodeType.LEGACY - ] + self.first_call_opcodes = list(scenario_input.fork.call_opcodes()) self.second_call_opcodes = self.first_call_opcodes[:] self.second_call_opcodes.append(Op.NOOP) self.scenario_input = scenario_input diff --git a/tests/frontier/scenarios/scenarios/create_combinations.py b/tests/frontier/scenarios/scenarios/create_combinations.py index 4b3ace9aaa..bfa247ca1d 100644 --- a/tests/frontier/scenarios/scenarios/create_combinations.py +++ b/tests/frontier/scenarios/scenarios/create_combinations.py @@ -6,7 +6,6 @@ from execution_testing import ( Alloc, Bytecode, - EVMCodeType, Op, Opcode, compute_create_address, @@ -49,11 +48,7 @@ def _compute_selfbalance() -> int: scenarios_list: List[Scenario] = [] keep_gas = 100000 - create_types: List[Opcode] = [ - create_code - for create_code, evm_type in scenario_input.fork.create_opcodes() - if evm_type == EVMCodeType.LEGACY - ] + create_types: List[Opcode] = list(scenario_input.fork.create_opcodes()) env: ScenarioEnvironment balance: AddressBalance = AddressBalance() @@ -111,11 +106,7 @@ def _compute_selfbalance() -> int: + Op.RETURN(0, Op.EXTCODESIZE(operation_contract)) ) deploy_code_size: int = int(len(deploy_code.hex()) / 2) - call_types: List[Opcode] = [ - callcode - for callcode, evm_type in scenario_input.fork.call_opcodes() - if evm_type == EVMCodeType.LEGACY - ] + call_types: List[Opcode] = list(scenario_input.fork.call_opcodes()) pre: Alloc = scenario_input.pre for create in create_types: diff --git a/tests/paris/security/test_selfdestruct_balance_bug.py b/tests/paris/security/test_selfdestruct_balance_bug.py index e0bec66e12..994003c2c5 100644 --- a/tests/paris/security/test_selfdestruct_balance_bug.py +++ b/tests/paris/security/test_selfdestruct_balance_bug.py @@ -54,8 +54,6 @@ def test_tx_selfdestruct_balance_bug( - The balances of `0xaa` after each tx are correct. - During tx 2, code in `0xaa` does not execute, hence self-destruct mechanism does not trigger. - - TODO: EOF - This test could be parametrized for EOFCREATE """ deploy_code = Switch( default_action=Op.REVERT(0, 0), diff --git a/tests/prague/eip7702_set_code_tx/test_set_code_txs.py b/tests/prague/eip7702_set_code_tx/test_set_code_txs.py index 84f9982b0d..ac5685a759 100644 --- a/tests/prague/eip7702_set_code_tx/test_set_code_txs.py +++ b/tests/prague/eip7702_set_code_tx/test_set_code_txs.py @@ -26,7 +26,6 @@ Conditional, EIPChecklist, Environment, - EVMCodeType, Fork, Hash, Initcode, @@ -43,7 +42,6 @@ ) from execution_testing import Macros as Om from execution_testing.base_types import HexNumber -from execution_testing.test_types.eof.v1 import Container, Section from ...cancun.eip4844_blobs.spec import Spec as Spec4844 from ..eip6110_deposits.helpers import DepositRequest @@ -372,7 +370,6 @@ def test_set_code_to_tstore_reentry( pre: Alloc, call_opcode: Op, return_opcode: Op, - evm_code_type: EVMCodeType, ) -> None: """ Test the executing a simple TSTORE in a set-code transaction, which also @@ -388,7 +385,6 @@ def test_set_code_to_tstore_reentry( + Op.RETURNDATACOPY(0, 0, 32) + Op.SSTORE(2, Op.MLOAD(0)), if_false=Op.MSTORE(0, Op.TLOAD(1)) + return_opcode(size=32), - evm_code_type=evm_code_type, ) set_code_to_address = pre.deploy_contract(set_code) @@ -426,8 +422,6 @@ def test_set_code_to_tstore_reentry( Op.DELEGATECALL, Op.CALLCODE, Op.STATICCALL, - Op.EXTDELEGATECALL, - Op.EXTSTATICCALL, ] ) @pytest.mark.parametrize("call_eoa_first", [True, False]) @@ -720,7 +714,6 @@ def test_set_code_to_contract_creator( state_test: StateTestFiller, pre: Alloc, create_opcode: Op, - evm_code_type: EVMCodeType, ) -> None: """ Test the executing a contract-creating opcode in a set-code transaction. @@ -728,16 +721,8 @@ def test_set_code_to_contract_creator( storage = Storage() auth_signer = pre.fund_eoa(auth_account_start_balance) - deployed_code: Bytecode | Container = Op.STOP - initcode: Bytecode | Container - - if evm_code_type == EVMCodeType.LEGACY: - initcode = Initcode(deploy_code=deployed_code) - elif evm_code_type == EVMCodeType.EOF_V1: - deployed_code = Container.Code(deployed_code) - initcode = Container.Init(deploy_container=deployed_code) - else: - raise ValueError(f"Unsupported EVM code type: {evm_code_type}") + deployed_code: Bytecode = Op.STOP + initcode: Bytecode = Initcode(deploy_code=deployed_code) deployed_contract_address = compute_create_address( address=auth_signer, @@ -746,25 +731,12 @@ def test_set_code_to_contract_creator( opcode=create_opcode, ) - creator_code: Bytecode | Container - if evm_code_type == EVMCodeType.LEGACY: - creator_code = Op.CALLDATACOPY(0, 0, Op.CALLDATASIZE) + Op.SSTORE( - storage.store_next(deployed_contract_address), - create_opcode(value=0, offset=0, size=Op.CALLDATASIZE), - ) - elif evm_code_type == EVMCodeType.EOF_V1: - creator_code = Container( - sections=[ - Section.Code( - code=Op.EOFCREATE[0](0, 0, 0, 0) + Op.STOP(), - ), - Section.Container( - container=initcode, - ), - ] - ) - else: - raise ValueError(f"Unsupported EVM code type: {evm_code_type}") + creator_code: Bytecode = Op.CALLDATACOPY( + 0, 0, Op.CALLDATASIZE + ) + Op.SSTORE( + storage.store_next(deployed_contract_address), + create_opcode(value=0, offset=0, size=Op.CALLDATASIZE), + ) creator_code_address = pre.deploy_contract(creator_code) @@ -772,7 +744,7 @@ def test_set_code_to_contract_creator( gas_limit=10_000_000, to=auth_signer, value=0, - data=initcode if evm_code_type == EVMCodeType.LEGACY else b"", + data=initcode, authorization_list=[ AuthorizationTuple( address=creator_code_address, @@ -812,7 +784,6 @@ def test_set_code_to_self_caller( pre: Alloc, call_opcode: Op, value: int, - evm_code_type: EVMCodeType, ) -> None: """Test the executing a self-call in a set-code transaction.""" if "value" not in call_opcode.kwargs and value != 0: @@ -821,7 +792,7 @@ def test_set_code_to_self_caller( storage = Storage() auth_signer = pre.fund_eoa(auth_account_start_balance) - static_call = call_opcode in [Op.STATICCALL, Op.EXTSTATICCALL] + static_call = call_opcode == Op.STATICCALL first_entry_slot = storage.store_next(True) re_entry_success_slot = storage.store_next(not static_call) @@ -838,7 +809,6 @@ def test_set_code_to_self_caller( + Op.SSTORE(re_entry_call_return_code_slot, call_bytecode) + Op.STOP, if_false=Op.SSTORE(re_entry_success_slot, 1) + Op.STOP, - evm_code_type=evm_code_type, ) set_code_to_address = pre.deploy_contract(set_code) @@ -954,7 +924,7 @@ def test_set_code_call_set_code( auth_signer_1 = pre.fund_eoa(auth_account_start_balance) storage_1 = Storage() - static_call = call_opcode in [Op.STATICCALL, Op.EXTSTATICCALL] + static_call = call_opcode == Op.STATICCALL set_code_1_call_result_slot = storage_1.store_next( call_return_code(opcode=call_opcode, success=not static_call) @@ -1015,20 +985,17 @@ def test_set_code_call_set_code( code=Spec.delegation_designation(set_code_to_address_1), storage=( storage_1 - if call_opcode - in [Op.CALL, Op.STATICCALL, Op.EXTCALL, Op.EXTSTATICCALL] + if call_opcode in [Op.CALL, Op.STATICCALL] else storage_1 + storage_2 ), - balance=(0 if call_opcode in [Op.CALL, Op.EXTCALL] else value) + balance=(0 if call_opcode == Op.CALL else value) + auth_account_start_balance, ), auth_signer_2: Account( nonce=1, code=Spec.delegation_designation(set_code_to_address_2), - storage=storage_2 - if call_opcode in [Op.CALL, Op.EXTCALL] - else {}, - balance=(value if call_opcode in [Op.CALL, Op.EXTCALL] else 0) + storage=storage_2 if call_opcode == Op.CALL else {}, + balance=(value if call_opcode == Op.CALL else 0) + auth_account_start_balance, ), }, @@ -1173,7 +1140,6 @@ def test_call_into_self_delegating_set_code( call_return_code( opcode=call_opcode, success=False, - revert=(call_opcode == Op.EXTDELEGATECALL), ) ), call_opcode(address=auth_signer), @@ -1229,7 +1195,6 @@ def test_call_into_chain_delegating_set_code( call_return_code( opcode=call_opcode, success=False, - revert=(call_opcode == Op.EXTDELEGATECALL), ) ), call_opcode(address=auth_signer_1), @@ -1434,7 +1399,6 @@ def test_ext_code_on_self_set_code( ) -@pytest.mark.with_all_evm_code_types() @pytest.mark.parametrize( "set_code_address_first", [ @@ -1488,9 +1452,7 @@ def test_set_code_address_and_authority_warm_state( callee_code += code_gas_measure_authority + code_gas_measure_set_code callee_code += Op.SSTORE(slot_call_success, 1) + Op.STOP - callee_address = pre.deploy_contract( - callee_code, evm_code_type=EVMCodeType.LEGACY - ) + callee_address = pre.deploy_contract(callee_code) callee_storage = Storage() callee_storage[slot_call_success] = 1 callee_storage[slot_set_code_to_warm_state] = ( @@ -1545,7 +1507,7 @@ def test_set_code_address_and_authority_warm_state_call_types( """ Test set to code address and authority warm status after a call to authority address, or vice-versa, using all available call opcodes without - using `GAS` opcode (unavailable in EOF). + using `GAS` opcode. """ auth_signer = pre.fund_eoa(auth_account_start_balance) @@ -1845,7 +1807,6 @@ def test_set_code_to_account_deployed_in_same_tx( state_test: StateTestFiller, pre: Alloc, create_opcode: Op, - evm_code_type: EVMCodeType, ) -> None: """ Test setting the code of an account to an address that is deployed in the @@ -1856,30 +1817,17 @@ def test_set_code_to_account_deployed_in_same_tx( success_slot = 1 - deployed_code: Bytecode | Container = Op.SSTORE(success_slot, 1) + Op.STOP - initcode: Bytecode | Container - - if evm_code_type == EVMCodeType.LEGACY: - initcode = Initcode(deploy_code=deployed_code) - elif evm_code_type == EVMCodeType.EOF_V1: - deployed_code = Container.Code(deployed_code) - initcode = Container.Init(deploy_container=deployed_code) - else: - raise ValueError(f"Unsupported EVM code type: {evm_code_type}") + deployed_code: Bytecode = Op.SSTORE(success_slot, 1) + Op.STOP + initcode: Bytecode = Initcode(deploy_code=deployed_code) deployed_contract_address_slot = 1 signer_call_return_code_slot = 2 deployed_contract_call_return_code_slot = 3 - call_opcode = ( - Op.CALL if evm_code_type == EVMCodeType.LEGACY else Op.EXTCALL - ) - - if create_opcode == Op.EOFCREATE: - create_opcode = Op.EOFCREATE[0] # type: ignore + call_opcode = Op.CALL - contract_creator_code: Bytecode | Container = ( - Op.CALLDATACOPY(0, 0, Op.CALLDATASIZE) # NOOP on EOF + contract_creator_code: Bytecode = ( + Op.CALLDATACOPY(0, 0, Op.CALLDATASIZE) + Op.SSTORE( deployed_contract_address_slot, create_opcode(offset=0, size=Op.CALLDATASIZE), @@ -1894,14 +1842,6 @@ def test_set_code_to_account_deployed_in_same_tx( + Op.STOP() ) - if evm_code_type == EVMCodeType.EOF_V1: - contract_creator_code = Container( - sections=[ - Section.Code(contract_creator_code), - Section.Container(container=initcode), - ], - ) - contract_creator_address = pre.deploy_contract(contract_creator_code) deployed_contract_address = compute_create_address( @@ -1916,7 +1856,7 @@ def test_set_code_to_account_deployed_in_same_tx( gas_limit=10_000_000, to=contract_creator_address, value=0, - data=initcode if evm_code_type == EVMCodeType.LEGACY else b"", + data=initcode, authorization_list=[ AuthorizationTuple( address=deployed_contract_address, @@ -1960,9 +1900,7 @@ def test_set_code_to_account_deployed_in_same_tx( [0, 1], ) @pytest.mark.parametrize("call_set_code_first", [False, True]) -@pytest.mark.parametrize( - "create_opcode", [Op.CREATE, Op.CREATE2] -) # EOF code does not support SELFDESTRUCT +@pytest.mark.parametrize("create_opcode", [Op.CREATE, Op.CREATE2]) def test_set_code_to_self_destructing_account_deployed_in_same_tx( state_test: StateTestFiller, pre: Alloc, @@ -2843,7 +2781,6 @@ def test_nonce_overflow_after_first_authorization( Op.LOG4, ], ) -@pytest.mark.with_all_evm_code_types def test_set_code_to_log( state_test: StateTestFiller, pre: Alloc, @@ -3030,8 +2967,6 @@ def deposit_contract_initial_storage() -> Storage: Op.STATICCALL, Op.CALLCODE, Op.DELEGATECALL, - Op.EXTDELEGATECALL, - Op.EXTSTATICCALL, ] ) ) @@ -3194,7 +3129,6 @@ def test_set_code_to_system_contract( ) -@pytest.mark.with_all_evm_code_types @pytest.mark.with_all_tx_types( selector=lambda tx_type: tx_type != 4, marks=lambda tx_type: pytest.mark.execute( @@ -3223,7 +3157,6 @@ def test_eoa_tx_after_set_code( pre: Alloc, tx_type: int, fork: Fork, - evm_code_type: EVMCodeType, same_block: bool, ) -> None: """ diff --git a/tests/prague/eip7702_set_code_tx/test_set_code_txs_2.py b/tests/prague/eip7702_set_code_tx/test_set_code_txs_2.py index d988da7fbc..eca78f7e57 100644 --- a/tests/prague/eip7702_set_code_tx/test_set_code_txs_2.py +++ b/tests/prague/eip7702_set_code_tx/test_set_code_txs_2.py @@ -29,7 +29,6 @@ TransactionException, compute_create_address, ) -from execution_testing.test_types.eof.v1 import Container, Section from .spec import Spec, ref_spec_7702 @@ -1144,53 +1143,6 @@ def test_static_to_pointer( ) -@pytest.mark.valid_from("EOFv1") -def test_pointer_to_eof(state_test: StateTestFiller, pre: Alloc) -> None: - """ - Tx -> call -> pointer A -> EOF. - - Pointer to eof contract works. - """ - env = Environment() - storage: Storage = Storage() - sender = pre.fund_eoa() - pointer_a = pre.fund_eoa() - - contract_a = pre.deploy_contract( - code=Container( - sections=[ - Section.Code( - code=Op.SSTORE(storage.store_next(5, "eof_call_result"), 5) - + Op.STOP, - ) - ] - ) - ) - - tx = Transaction( - to=pointer_a, - gas_limit=3_000_000, - data=b"", - value=0, - sender=sender, - authorization_list=[ - AuthorizationTuple( - address=contract_a, - nonce=0, - signer=pointer_a, - ) - ], - ) - - post = {pointer_a: Account(storage=storage)} - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) - - @pytest.mark.valid_from("Prague") @pytest.mark.parametrize("sender_delegated", [True, False]) @pytest.mark.parametrize("sender_is_auth_signer", [True, False]) diff --git a/tests/shanghai/eip3860_initcode/test_with_eof.py b/tests/shanghai/eip3860_initcode/test_with_eof.py deleted file mode 100644 index 430d4fbc79..0000000000 --- a/tests/shanghai/eip3860_initcode/test_with_eof.py +++ /dev/null @@ -1,104 +0,0 @@ -""" -Test CREATE / CREATE2 and EOF interaction for EIP-3860 initcode limits. -""" - -import itertools - -import pytest -from execution_testing import ( - Account, - Alloc, - Bytecode, - Environment, - Initcode, - Op, - StateTestFiller, - Transaction, - compute_create_address, -) -from execution_testing.test_types.eof.v1.constants import ( - MAX_BYTECODE_SIZE, - MAX_INITCODE_SIZE, -) - -from .spec import ref_spec_3860 - -REFERENCE_SPEC_GIT_PATH = ref_spec_3860.git_path -REFERENCE_SPEC_VERSION = ref_spec_3860.version - -pytestmark = pytest.mark.valid_from("Shanghai") - -_slot = itertools.count() -next(_slot) # don't use slot 0 -slot_code_worked = next(_slot) -slot_create_address = next(_slot) -value_code_worked = 0x2015 - - -@pytest.mark.parametrize( - "opcode", - [ - Op.CREATE, - Op.CREATE2, - ], -) -@pytest.mark.parametrize( - "init_code", - [ - pytest.param(Bytecode(), id="empty_initcode"), - pytest.param( - Initcode(initcode_length=MAX_INITCODE_SIZE), id="max_initcode" - ), - pytest.param(Initcode(deploy_code=Bytecode()), id="empty_code"), - pytest.param( - Initcode(deploy_code=Op.STOP * MAX_BYTECODE_SIZE), id="max_code" - ), - ], -) -def test_legacy_create_edge_code_size( - state_test: StateTestFiller, - pre: Alloc, - opcode: Op, - init_code: Bytecode, -) -> None: - """ - Test legacy initcode and deployed code edge cases with EOF enabled. - - Verify that legacy initcode/deploycode having 0 or max size continues to - work in the fork where EOF is enabled. Handling of EOF magic prefix and - version interferes with the handling of legacy creation, so a specific test - was proposed to test behavior doesn't change. - """ - env = Environment() - - salt_param = [0] if opcode == Op.CREATE2 else [] - factory_code = ( - Op.CALLDATACOPY(0, 0, Op.CALLDATASIZE) - + Op.SSTORE( - slot_create_address, opcode(0, 0, Op.CALLDATASIZE, *salt_param) - ) - + Op.SSTORE(slot_code_worked, value_code_worked) - ) - - contract_address = pre.deploy_contract(code=factory_code) - - new_address = compute_create_address( - address=contract_address, initcode=init_code, nonce=1, opcode=opcode - ) - - post = { - contract_address: Account( - storage={ - slot_create_address: new_address, - slot_code_worked: value_code_worked, - } - ) - } - tx = Transaction( - to=contract_address, - gas_limit=10_000_000, - data=init_code, - sender=pre.fund_eoa(), - ) - - state_test(env=env, pre=pre, post=post, tx=tx) diff --git a/tests/unscheduled/eip7692_eof_v1/__init__.py b/tests/unscheduled/eip7692_eof_v1/__init__.py deleted file mode 100644 index 47bb9a0684..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -""" -Test cases for [EIP-7692: EVM Object Format (EOFv1) Meta](https://eips.ethereum.org/EIPS/eip-7692). - Test cases for the EIPs included in EIP-7692 EOFv1 Meta. - - * [EIP-663: SWAPN, DUPN and EXCHANGE instructions](https://eips.ethereum.org/EIPS/eip-663). - * [EIP-3540: EOF - EVM Object Format v1](https://eips.ethereum.org/EIPS/eip-3540). - * [EIP-3670: EOF - Code Validation](https://eips.ethereum.org/EIPS/eip-3670). - * [EIP-4200: EOF - Static relative jumps](https://eips.ethereum.org/EIPS/eip-4200). - * [EIP-4750: EOF - Functions](https://eips.ethereum.org/EIPS/eip-4750). - * [EIP-5450: EOF - Stack Validation](https://eips.ethereum.org/EIPS/eip-5450). - * [EIP-6206: EOF - JUMPF and non-returning functions](https://eips.ethereum.org/EIPS/eip-6206). - * [EIP-7069: Revamped CALL instructions](https://eips.ethereum.org/EIPS/eip-7069). - * [EIP-7480: EOF - Data section access instructions](https://eips.ethereum.org/EIPS/eip-7480). - * [EIP-7620: EOF Contract Creation](https://eips.ethereum.org/EIPS/eip-7620). - * [EIP-7873: EOF - TXCREATE and InitcodeTransaction type](https://eips.ethereum.org/EIPS/eip-7873). - -## Devnet Specifications - - - [ethpandaops/eof-devnet-0](https://notes.ethereum.org/@ethpandaops/eof-devnet-0). -""" - -EOF_FORK_NAME = "EOFv1" diff --git a/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/__init__.py b/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/__init__.py deleted file mode 100644 index 3d69914537..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -""" -EOF - EVM Object Format v1 tests. - -Test cases for -[EIP-3540: EOF - EVM Object Format v1](https://eips.ethereum.org/EIPS/eip-3540). - -EIP-3540 introduces a structured format for EVM bytecode, with separate -sections for code and data. Opcodes introduced: None (defines a new -bytecode structure but no new opcodes). -""" diff --git a/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/opcodes.py b/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/opcodes.py deleted file mode 100644 index 8137b8b1f5..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/opcodes.py +++ /dev/null @@ -1,250 +0,0 @@ -"""EOF Valid Opcodes.""" - -from typing import List - -from execution_testing import Op - -V1_EOF_OPCODES: List[Op] = [ - # new eof ops - # EIP-663 Swap and Dup - Op.DUPN, - Op.SWAPN, - Op.EXCHANGE, - # EIP-4200 Relative Jumps - Op.RJUMP, - Op.RJUMPI, - Op.RJUMPV, - # EIP-4750 functions - Op.CALLF, - Op.RETF, - # EIP-6209 JUMPF Instruction - Op.JUMPF, - # EIP-7069 Revamped EOF Call - Op.EXTCALL, - Op.EXTDELEGATECALL, - Op.EXTSTATICCALL, - Op.RETURNDATALOAD, - # EIP-7480 EOF Data Section Access - Op.DATALOAD, - Op.DATALOADN, - Op.DATASIZE, - Op.DATACOPY, - # EIP-7620 EOF Create and Return Contract operation - Op.EOFCREATE, - # EIP-7873 TXCREATE and InitcodeTransaction - Op.TXCREATE, - # EIP-7620 EOF Create and Return Contract operation - Op.RETURNCODE, - # Non-deprecated Legacy Opcodes - Op.STOP, - Op.ADD, - Op.MUL, - Op.SUB, - Op.DIV, - Op.SDIV, - Op.MOD, - Op.SMOD, - Op.ADDMOD, - Op.MULMOD, - Op.EXP, - Op.SIGNEXTEND, - Op.LT, - Op.GT, - Op.SLT, - Op.SGT, - Op.EQ, - Op.ISZERO, - Op.AND, - Op.OR, - Op.XOR, - Op.NOT, - Op.BYTE, - Op.SHL, - Op.SHR, - Op.SAR, - Op.SHA3, - Op.ADDRESS, - Op.BALANCE, - Op.ORIGIN, - Op.CALLER, - Op.CALLVALUE, - Op.CALLDATALOAD, - Op.CALLDATASIZE, - Op.CALLDATACOPY, - Op.CODESIZE, - Op.CODECOPY, - Op.GASPRICE, - Op.EXTCODESIZE, - Op.EXTCODECOPY, - Op.RETURNDATASIZE, - Op.RETURNDATACOPY, - Op.EXTCODEHASH, - Op.BLOCKHASH, - Op.COINBASE, - Op.TIMESTAMP, - Op.NUMBER, - Op.PREVRANDAO, - Op.GASLIMIT, - Op.CHAINID, - Op.SELFBALANCE, - Op.BASEFEE, - Op.POP, - Op.MLOAD, - Op.MSTORE, - Op.MSTORE8, - Op.SLOAD, - Op.SSTORE, - Op.MSIZE, - Op.GAS, - Op.JUMPDEST, - Op.PUSH1, - Op.PUSH2, - Op.PUSH3, - Op.PUSH4, - Op.PUSH5, - Op.PUSH6, - Op.PUSH7, - Op.PUSH8, - Op.PUSH9, - Op.PUSH10, - Op.PUSH11, - Op.PUSH12, - Op.PUSH13, - Op.PUSH14, - Op.PUSH15, - Op.PUSH16, - Op.PUSH17, - Op.PUSH18, - Op.PUSH19, - Op.PUSH20, - Op.PUSH21, - Op.PUSH22, - Op.PUSH23, - Op.PUSH24, - Op.PUSH25, - Op.PUSH26, - Op.PUSH27, - Op.PUSH28, - Op.PUSH29, - Op.PUSH30, - Op.PUSH31, - Op.PUSH32, - Op.DUP1, - Op.DUP2, - Op.DUP3, - Op.DUP4, - Op.DUP5, - Op.DUP6, - Op.DUP7, - Op.DUP8, - Op.DUP9, - Op.DUP10, - Op.DUP11, - Op.DUP12, - Op.DUP13, - Op.DUP14, - Op.DUP15, - Op.DUP16, - Op.SWAP1, - Op.SWAP2, - Op.SWAP3, - Op.SWAP4, - Op.SWAP5, - Op.SWAP6, - Op.SWAP7, - Op.SWAP8, - Op.SWAP9, - Op.SWAP10, - Op.SWAP11, - Op.SWAP12, - Op.SWAP13, - Op.SWAP14, - Op.SWAP15, - Op.SWAP16, - Op.LOG0, - Op.LOG1, - Op.LOG2, - Op.LOG3, - Op.LOG4, - Op.CREATE, - Op.CALL, - # Op.CALLCODE, - Op.RETURN, - Op.DELEGATECALL, - Op.CREATE2, - Op.STATICCALL, - Op.REVERT, - Op.INVALID, - # Op.SELFDESTRUCT, -] -""" -List of all valid EOF V1 opcodes for Shanghai. -""" - -V1_EOF_DEPRECATED_OPCODES = [ - Op.SELFDESTRUCT, - Op.CALLCODE, - Op.JUMP, - Op.JUMPI, - Op.PC, -] -""" -List of opcodes that will be deprecated for EOF V1. - -For these opcodes we will also add the correct expected amount of stack items -so the container is not considered invalid due to buffer underflow. -""" - -V1_EOF_ONLY_OPCODES = [ - Op.DUPN, - Op.SWAPN, - Op.EXCHANGE, - # EIP-4200 Relative Jumps - Op.RJUMP, - Op.RJUMPI, - Op.RJUMPV, - # EIP-4750 functions - Op.CALLF, - Op.RETF, - # EIP-6209 JUMPF Instruction - Op.JUMPF, - # EIP-7069 Revamped EOF Call - Op.EXTCALL, - Op.EXTDELEGATECALL, - Op.EXTSTATICCALL, - Op.RETURNDATALOAD, - # EIP-7480 EOF Data Section Access - Op.DATALOAD, - Op.DATALOADN, - Op.DATASIZE, - Op.DATACOPY, - # EIP-7620 EOF Create and Return Contract operation - Op.EOFCREATE, - Op.RETURNCODE, - # EIP-7873 TXCREATE and InitcodeTransaction - Op.TXCREATE, -] -""" -List of valid EOF V1 opcodes that are disabled in legacy bytecode. -""" - -VALID_TERMINATING_OPCODES = [ - Op.STOP, - Op.RETURN, - Op.REVERT, - Op.INVALID, - Op.RETF, - Op.JUMPF, -] - -INVALID_TERMINATING_OPCODES = [ - op for op in V1_EOF_OPCODES if op not in VALID_TERMINATING_OPCODES -] - -INVALID_OPCODES = [ - bytes([i]) - for i in range(256) - if i - not in [x.int() for x in V1_EOF_OPCODES] - + [x.int() for x in V1_EOF_DEPRECATED_OPCODES] -] diff --git a/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/spec.py b/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/spec.py deleted file mode 100644 index d382a9ca43..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/spec.py +++ /dev/null @@ -1 +0,0 @@ -"""EOF V1 Constants used throughout all tests.""" diff --git a/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_all_opcodes_in_container.py b/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_all_opcodes_in_container.py deleted file mode 100644 index f8e77563eb..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_all_opcodes_in_container.py +++ /dev/null @@ -1,484 +0,0 @@ -""" -EOF Container: check how every opcode behaves in the middle of the valid eof -container code. -""" - -import itertools -from typing import Any, Dict, Generator, List, Tuple - -import pytest -from execution_testing import ( - Bytecode, - EOFException, - EOFTestFiller, - Op, - Opcode, - UndefinedOpcodes, -) -from execution_testing.test_types.eof.constants import ( - MAX_RUNTIME_STACK_HEIGHT, -) -from execution_testing.test_types.eof.v1 import ( - Container, - ContainerKind, - Section, -) -from execution_testing.test_types.eof.v1.constants import ( - MAX_STACK_INCREASE_LIMIT, -) - -from .. import EOF_FORK_NAME - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-3540.md" -REFERENCE_SPEC_VERSION = "8dcb0a8c1c0102c87224308028632cc986a61183" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - -all_opcodes = set(Op) -undefined_opcodes = set(UndefinedOpcodes) - -# Invalid Opcodes will produce EOFException.UNDEFINED_INSTRUCTION when used in -# EOFContainer -invalid_eof_opcodes = { - Op.CODESIZE, - Op.SELFDESTRUCT, - Op.CREATE2, - Op.CODECOPY, - Op.EXTCODESIZE, - Op.EXTCODECOPY, - Op.EXTCODEHASH, - Op.JUMP, - Op.JUMPI, - Op.PC, - Op.GAS, - Op.CREATE, - Op.CALL, - Op.CALLCODE, - Op.DELEGATECALL, - Op.STATICCALL, -} - -valid_eof_opcodes = all_opcodes - invalid_eof_opcodes - -# Halting the execution opcodes can be placed without STOP instruction at the -# end -halting_opcodes = { - Op.STOP, - Op.RETURNCODE, - Op.RETURN, - Op.REVERT, - Op.INVALID, -} - -# Opcodes that end the code section and can be placed without STOP instruction -# at the end -section_terminating_opcodes = { - Op.RETF, - Op.JUMPF, -} - -data_portion_opcodes = {op for op in all_opcodes if op.has_data_portion()} - - -# NOTE: `sorted` is used to ensure that the tests are collected in a -# deterministic order. - - -@pytest.mark.parametrize( - "opcode", - sorted((all_opcodes | undefined_opcodes) - {Op.RETF}), -) -def test_all_opcodes_in_container( - eof_test: EOFTestFiller, - opcode: Opcode, -) -> None: - """ - Test all opcodes inside valid container 257 because 0x5B is duplicated. - """ - data_portion = 1 if opcode == Op.CALLF else 0 - opcode_with_data_portion = ( - opcode[data_portion] if opcode.has_data_portion() else opcode - ) - - # opcode_with_data_portion has the correct minimum stack height - bytecode = ( - Op.PUSH0 * opcode_with_data_portion.min_stack_height - + opcode_with_data_portion - ) - - if opcode not in (halting_opcodes | section_terminating_opcodes): - bytecode += Op.STOP - - sections = [Section.Code(code=bytecode)] - - match opcode: - case Op.EOFCREATE | Op.RETURNCODE: - sections.append( - Section.Container( - container=Container( - sections=[ - Section.Code(code=Op.REVERT(0, 0)), - ] - ) - ) - ) - case Op.CALLF: - sections.append( - Section.Code( - code=Op.RETF, - code_outputs=0, - ) - ) - sections.append(Section.Data("1122334455667788" * 4)) - - if opcode == Op.RETURNCODE: - eof_code = Container(sections=sections, kind=ContainerKind.INITCODE) - else: - eof_code = Container(sections=sections) - - eof_test( - container=eof_code, - expect_exception=( - None - if opcode in valid_eof_opcodes - else EOFException.UNDEFINED_INSTRUCTION - ), - ) - - -@pytest.mark.parametrize( - "opcode", - sorted(invalid_eof_opcodes | undefined_opcodes), -) -@pytest.mark.parametrize( - "terminating_opcode", - sorted(halting_opcodes) + [Op.RJUMP], -) -def test_invalid_opcodes_after_stop( - eof_test: EOFTestFiller, - opcode: Opcode, - terminating_opcode: Opcode, -) -> None: - """ - Test that an invalid opcode placed after STOP (terminating instruction) - invalidates EOF. - """ - terminating_code = Bytecode(terminating_opcode) - match terminating_opcode: # Enhance the code for complex opcodes. - case Op.RETURNCODE: - terminating_code = Op.RETURNCODE[0] - case Op.RETURN | Op.REVERT: - terminating_code = Op.PUSH0 + Op.PUSH0 + terminating_opcode - case Op.RJUMP: - terminating_code = Op.RJUMP[-3] - - eof_code = Container( - kind=ContainerKind.INITCODE - if terminating_opcode == Op.RETURNCODE - else ContainerKind.RUNTIME, - sections=[ - Section.Code(code=terminating_code + opcode), - Section.Data("00" * 32), - ] - + ( - [Section.Container(container=Container.Code(Op.INVALID))] - if terminating_opcode == Op.RETURNCODE - else [] - ), - ) - - eof_test( - container=eof_code, - expect_exception=EOFException.UNDEFINED_INSTRUCTION, - ) - - -@pytest.mark.parametrize( - "opcode", - sorted( - valid_eof_opcodes - - halting_opcodes - - section_terminating_opcodes - - {Op.RJUMP, Op.RJUMPI, Op.RJUMPV} - ), -) -def test_all_invalid_terminating_opcodes( - eof_test: EOFTestFiller, - opcode: Opcode, -) -> None: - """Test all opcodes that are invalid as the last opcode in a container.""" - if opcode.has_data_portion(): - # Add the appropriate data portion to the opcode by using the get_item - # method. On the CALLF opcode we need to reference the second code - # section, hence the [1] index. - opcode = opcode[0] if opcode != Op.CALLF else opcode[1] - - bytecode = (Op.PUSH0 * opcode.min_stack_height) + opcode - - sections = [Section.Code(code=bytecode)] - - if opcode == Op.CALLF[1]: - sections += [Section.Code(code=Op.RETF, code_outputs=0)] - elif opcode == Op.EOFCREATE[0]: - sections += [ - Section.Container( - container=Container( - sections=[ - Section.Code(code=Op.RETURNCODE[0](0, 0)), - Section.Container(Container.Code(code=Op.STOP)), - ] - ) - ) - ] - - sections += [Section.Data(b"\0" * 32)] - - eof_test( - container=Container( - sections=sections, - ), - expect_exception=EOFException.MISSING_STOP_OPCODE, - ) - - -@pytest.mark.parametrize( - "opcode", - sorted(halting_opcodes | section_terminating_opcodes), -) -def test_all_unreachable_terminating_opcodes_after_stop( - eof_test: EOFTestFiller, - opcode: Opcode, -) -> None: - """Test all terminating opcodes after stop.""" - match opcode: - case Op.STOP: - sections = [Section.Code(code=Op.STOP + Op.STOP)] - case Op.RETF: - sections = [ - Section.Code(code=Op.CALLF[1] + Op.STOP), - Section.Code(code=Op.STOP + Op.RETF, code_outputs=0), - ] - case Op.JUMPF: - sections = [ - Section.Code(code=Op.STOP + Op.JUMPF[1]), - Section.Code(code=Op.STOP), - ] - case Op.RETURNCODE: - sections = [ - Section.Code(code=Op.EOFCREATE[0](0, 0, 0, 0) + Op.STOP), - Section.Container( - container=Container( - sections=[ - Section.Code( - code=Op.STOP + Op.RETURNCODE[0](0, 0) - ), - Section.Container(Container.Code(code=Op.STOP)), - ] - ) - ), - ] - case Op.RETURN | Op.REVERT | Op.INVALID: - sections = [ - Section.Code(code=Op.PUSH0 + Op.PUSH0 + Op.STOP + opcode), - ] - case _: - raise NotImplementedError(f"Opcode {opcode} is not implemented") - - eof_test( - container=Container( - sections=sections, - ), - expect_exception=EOFException.UNREACHABLE_INSTRUCTIONS - if opcode != Op.RETURNCODE - else EOFException.INCOMPATIBLE_CONTAINER_KIND, - ) - - -@pytest.mark.parametrize( - "opcode", - sorted((halting_opcodes | section_terminating_opcodes) - {Op.STOP}), -) -def test_all_unreachable_terminating_opcodes_before_stop( - eof_test: EOFTestFiller, - opcode: Opcode, -) -> None: - """Test all opcodes terminating opcodes before.""" - match opcode: - case Op.RETF: - sections = [ - Section.Code(code=Op.CALLF[1] + Op.STOP), - Section.Code(code=Op.RETF + Op.STOP, code_outputs=0), - ] - case Op.JUMPF: - sections = [ - Section.Code(code=Op.JUMPF[1] + Op.STOP), - Section.Code(code=Op.STOP), - ] - case Op.RETURNCODE: - sections = [ - Section.Code(code=Op.EOFCREATE[0](0, 0, 0, 0) + Op.STOP), - Section.Container( - container=Container( - sections=[ - Section.Code( - code=Op.RETURNCODE[0](0, 0) + Op.STOP - ), - Section.Container(Container.Code(code=Op.STOP)), - ] - ) - ), - ] - case Op.RETURN | Op.REVERT | Op.INVALID: - sections = [ - Section.Code( - code=Op.PUSH1(0) + Op.PUSH1(0) + opcode + Op.STOP - ), - ] - case _: - raise NotImplementedError(f"Opcode {opcode} is not implemented") - - eof_test( - container=Container( - sections=sections, - ), - expect_exception=EOFException.UNREACHABLE_INSTRUCTIONS - if opcode != Op.RETURNCODE - else EOFException.INCOMPATIBLE_CONTAINER_KIND, - ) - - -@pytest.mark.parametrize( - "opcode", - sorted( - op - for op in valid_eof_opcodes - if op.pushed_stack_items > op.popped_stack_items - ) - + [ - Op.DUPN[0xFF], - ], -) -@pytest.mark.parametrize( - "exception", - # We test two types of exceptions here: - # 1. Invalid max stack height, where we modify the `max_stack_height` - # field of the code section to the maximum stack height allowed by - # the EIP-3540, so the code still has to be checked for stack overflow. - # - # 2. Max stack height above limit, where we don't modify the - # `max_stack_height` field of the code section, so the actual - # code doesn't have to be verified for the stack overflow. - [ - EOFException.INVALID_MAX_STACK_INCREASE, - EOFException.MAX_STACK_INCREASE_ABOVE_LIMIT, - ], -) -def test_all_opcodes_stack_overflow( - eof_test: EOFTestFiller, - opcode: Opcode, - exception: EOFException, -) -> None: - """ - Test stack overflow on all opcodes that push more items than they pop. - """ - opcode = opcode[0] if opcode.has_data_portion() else opcode - - assert opcode.pushed_stack_items - opcode.popped_stack_items == 1 - opcode_count = MAX_RUNTIME_STACK_HEIGHT - opcode.min_stack_height - - bytecode = Op.PUSH0 * opcode.min_stack_height - bytecode += opcode * opcode_count - bytecode += Op.STOP - - kwargs: Dict[str, Any] = {"code": bytecode} - - if exception == EOFException.INVALID_MAX_STACK_INCREASE: - # Lie about the max stack height to make the code be checked for stack - # overflow. - kwargs["max_stack_height"] = MAX_STACK_INCREASE_LIMIT - - sections = [Section.Code(**kwargs)] - - if opcode == Op.DATALOADN[0]: - sections.append(Section.Data(b"\0" * 32)) - eof_code = Container(sections=sections) - - eof_test( - container=eof_code, - expect_exception=exception, - ) - - -def valid_opcode_combinations( - compute_max_stack_height_options: List[bool], - truncate_all_options: List[bool], - opcodes: List[Opcode], -) -> Generator[Tuple[bool, bool, Opcode], None, None]: - """ - Create valid parameter combinations for - test_truncated_data_portion_opcodes(). - """ - for opcode, truncate_all, compute_max_stack_height in itertools.product( - opcodes, truncate_all_options, compute_max_stack_height_options - ): - opcode_with_data_portion: bytes = bytes(opcode[1]) - - # Skip invalid or redundant combinations to avoid using pytest.skip in - # the test - if len(opcode_with_data_portion) == 2 and truncate_all: - continue - if ( - compute_max_stack_height - and max(opcode.min_stack_height, opcode.pushed_stack_items) == 0 - ): - continue - - yield compute_max_stack_height, truncate_all, opcode - - -@pytest.mark.parametrize( - "compute_max_stack_height, truncate_all, opcode", - valid_opcode_combinations( - [False, True], [False, True], sorted(data_portion_opcodes) - ), -) -def test_truncated_data_portion_opcodes( - eof_test: EOFTestFiller, - opcode: Opcode, - truncate_all: bool, - compute_max_stack_height: bool, -) -> None: - """ - Test that an instruction with data portion and truncated immediate bytes - (therefore a terminating instruction is also missing) invalidates EOF. - """ - opcode_with_data_portion: bytes = bytes(opcode[1]) - - # Compose instruction bytes with empty imm bytes (truncate_all) or 1 byte - # shorter imm bytes. - opcode_bytes = ( - opcode_with_data_portion[0:1] - if truncate_all - else opcode_with_data_portion[:-1] - ) - - if opcode.min_stack_height > 0: - opcode_bytes = bytes(Op.PUSH0 * opcode.min_stack_height) + opcode_bytes - - max_stack_height = ( - max(opcode.min_stack_height, opcode.pushed_stack_items) - if compute_max_stack_height - else 0 - ) - - eof_code = Container( - sections=[ - Section.Code(opcode_bytes, max_stack_height=max_stack_height), - # Provide data section potentially confused with missing imm bytes. - Section.Data(b"\0" * 64), - ] - ) - eof_test( - container=eof_code, - expect_exception=EOFException.TRUNCATED_INSTRUCTION, - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_size.py b/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_size.py deleted file mode 100644 index 51021b8797..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_size.py +++ /dev/null @@ -1,126 +0,0 @@ -"""EOF validation tests for EIP-3540 container size.""" - -import pytest -from execution_testing import EOFException, EOFTestFiller, Op -from execution_testing.test_types.eof.v1 import Container, Section -from execution_testing.test_types.eof.v1.constants import ( - MAX_INITCODE_SIZE, -) - -from .. import EOF_FORK_NAME - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-3540.md" -REFERENCE_SPEC_VERSION = "6b313505c75afa49a4f34de39c609ebebc7be87f" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - -VALID_CONTAINER = Container(sections=[Section.Code(code=Op.STOP)]) - - -@pytest.mark.parametrize( - "over_limit", - [0, 1, 2, 2**16 - MAX_INITCODE_SIZE], -) -@pytest.mark.eof_test_only -def test_max_size( - eof_test: EOFTestFiller, - over_limit: int, -) -> None: - """Verify EOF container valid at maximum size, invalid above.""" - # Expand the minimal EOF code by more noop code, reaching the desired - # target container size. - code = Container( - sections=[ - Section.Code( - code=Op.JUMPDEST - * (MAX_INITCODE_SIZE - len(VALID_CONTAINER) + over_limit) - + Op.STOP - ) - ] - ) - assert len(code) == MAX_INITCODE_SIZE + over_limit - eof_test( - container=code, - expect_exception=None - if over_limit == 0 - else EOFException.CONTAINER_SIZE_ABOVE_LIMIT, - ) - - -@pytest.mark.parametrize( - "size", - [MAX_INITCODE_SIZE + 1, MAX_INITCODE_SIZE * 2], -) -@pytest.mark.eof_test_only -def test_above_max_size_raw( - eof_test: EOFTestFiller, - size: int, -) -> None: - """ - Verify EOF container invalid above maximum size, regardless of header - contents. - """ - code = Op.INVALID * size - eof_test( - container=Container(raw_bytes=code), - expect_exception=EOFException.CONTAINER_SIZE_ABOVE_LIMIT, - ) - - -@pytest.mark.parametrize( - "code", - [ - pytest.param( - Container( - sections=[ - Section.Code(code=Op.STOP, custom_size=MAX_INITCODE_SIZE) - ] - ), - id="1st_code_section", - ), - pytest.param( - Container( - sections=[ - Section.Code(code=Op.STOP), - Section.Code(code=Op.STOP, custom_size=MAX_INITCODE_SIZE), - ] - ), - id="2nd_code_section", - ), - pytest.param( - Container( - sections=[ - Section.Code(code=Op.STOP), - Section.Container( - container=Op.STOP, custom_size=MAX_INITCODE_SIZE - ), - ] - ), - id="1st_container_section", - ), - pytest.param( - Container( - sections=[ - Section.Code(code=Op.STOP), - Section.Container(container=Op.STOP), - Section.Container( - container=Op.STOP, custom_size=MAX_INITCODE_SIZE - ), - ] - ), - id="2nd_container_section", - ), - ], -) -def test_section_after_end_of_container( - eof_test: EOFTestFiller, - code: Container, -) -> None: - """ - Verify EOF container is invalid if any of sections declares above container - size. - """ - eof_test( - container=code, - expect_exception=EOFException.INVALID_SECTION_BODIES_SIZE, - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py b/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py deleted file mode 100644 index a0e6e7f4a6..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py +++ /dev/null @@ -1,1461 +0,0 @@ -"""EOF validation tests for EIP-3540 container format.""" - -import itertools - -import pytest -from execution_testing import EOFException, EOFTestFiller, Op -from execution_testing.test_types.eof.v1 import ( - AutoSection, - Container, - ContainerKind, - Section, - SectionKind, -) -from execution_testing.test_types.eof.v1.constants import ( - MAX_CODE_INPUTS, - MAX_CODE_OUTPUTS, - MAX_CODE_SECTIONS, - MAX_STACK_INCREASE_LIMIT, -) - -from .. import EOF_FORK_NAME - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-3540.md" -REFERENCE_SPEC_VERSION = "8dcb0a8c1c0102c87224308028632cc986a61183" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - -VALID_CONTAINER = Container(sections=[Section.Code(code=Op.STOP)]) - - -@pytest.mark.parametrize( - "container", - [ - Container( - name="single_code_section_max_stack_size", - sections=[ - Section.Code( - code=(Op.CALLER * MAX_STACK_INCREASE_LIMIT) - + (Op.POP * MAX_STACK_INCREASE_LIMIT) - + Op.STOP, - max_stack_height=MAX_STACK_INCREASE_LIMIT, - ), - ], - ), - Container( - name="code_section_with_inputs_outputs", - sections=[ - Section.Code( - code=(Op.PUSH0 + Op.CALLF[1] + Op.STOP), - ), - Section.Code( - code=Op.POP + Op.PUSH0 + Op.RETF, - code_inputs=1, - code_outputs=1, - ), - ], - ), - Container( - name="code_section_input_maximum", - sections=[ - Section.Code( - code=( - (Op.PUSH0 * MAX_CODE_INPUTS) + Op.CALLF[1] + Op.STOP - ), - max_stack_height=MAX_CODE_INPUTS, - ), - Section.Code( - code=(Op.POP * MAX_CODE_INPUTS) + Op.RETF, - code_inputs=MAX_CODE_INPUTS, - code_outputs=0, - max_stack_height=MAX_CODE_INPUTS, - ), - ], - ), - Container( - name="code_section_output_maximum", - sections=[ - Section.Code( - code=(Op.CALLF[1] + Op.STOP), - max_stack_height=MAX_CODE_OUTPUTS, - ), - Section.Code( - code=(Op.PUSH0 * MAX_CODE_OUTPUTS) + Op.RETF, - code_inputs=0, - code_outputs=MAX_CODE_OUTPUTS, - max_stack_height=MAX_CODE_OUTPUTS, - ), - ], - ), - Container( - name="multiple_code_sections", - sections=[ - Section.Code( - Op.CALLF[1] + Op.STOP, - ), - Section.Code( - code=Op.RETF, - code_inputs=0, - code_outputs=0, - ), - ], - ), - Container( - name="multiple_code_sections_max_inputs_max_outputs", - sections=[ - Section.Code( - (Op.PUSH0 * MAX_CODE_OUTPUTS) + Op.CALLF[1] + Op.STOP, - max_stack_height=MAX_CODE_OUTPUTS, - ), - Section.Code( - code=Op.RETF, - code_inputs=MAX_CODE_INPUTS, - code_outputs=MAX_CODE_OUTPUTS, - max_stack_height=MAX_CODE_INPUTS, - ), - ], - ), - Container( - name="single_subcontainer_without_data", - sections=[ - Section.Code(Op.EOFCREATE[0](0, 0, 0, 0) + Op.STOP), - Section.Container(Container.Code(Op.INVALID)), - ], - ), - Container( - name="single_subcontainer_with_data", - sections=[ - Section.Code(Op.EOFCREATE[0](0, 0, 0, 0) + Op.STOP), - Section.Container(Container.Code(Op.INVALID)), - Section.Data(data="0xAA"), - ], - ), - ], - ids=lambda c: c.name, -) -def test_valid_containers( - eof_test: EOFTestFiller, - container: Container, -) -> None: - """Test various types of valid containers.""" - assert container.validity_error is None, ( - f"Valid container with validity error: {container.validity_error}" - ) - eof_test(container=container) - - -@pytest.mark.parametrize( - "container", - [ - Container( - name="empty_container", - raw_bytes=b"", - validity_error=[ - EOFException.INVALID_MAGIC, - ], - ), - Container( - name="single_code_section_no_data_section", - sections=[ - Section.Code(Op.STOP), - ], - auto_data_section=False, - validity_error=[ - EOFException.MISSING_DATA_SECTION, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ), - Container( - name="incomplete_magic", - raw_bytes="ef", - validity_error=EOFException.INVALID_MAGIC, - ), - Container( - name="no_version", - raw_bytes="ef00", - validity_error=[ - EOFException.INVALID_VERSION, - EOFException.INVALID_MAGIC, - ], - ), - Container( - name="no_type_header", - raw_bytes="ef00 01", - validity_error=EOFException.MISSING_HEADERS_TERMINATOR, - ), - Container( - name="no_type_section_size", - raw_bytes="ef00 01 01", - validity_error=[ - EOFException.MISSING_HEADERS_TERMINATOR, - EOFException.INVALID_TYPE_SECTION_SIZE, - ], - ), - Container( - name="incomplete_type_section_size", - raw_bytes="ef00010100", - validity_error=[ - EOFException.INCOMPLETE_SECTION_SIZE, - EOFException.INVALID_TYPE_SECTION_SIZE, - ], - ), - Container( - name="no_code_header", - raw_bytes=bytes([0xEF, 0x00, 0x01, 0x01, 0x00, 0x04]), - validity_error=[ - EOFException.MISSING_CODE_HEADER, - EOFException.MISSING_HEADERS_TERMINATOR, - ], - ), - Container( - name="no_code_header_2", - raw_bytes=bytes([0xEF, 0x00, 0x01, 0x01, 0x00, 0x04, 0xFE]), - validity_error=[ - EOFException.MISSING_CODE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ), - Container( - name="no_code_header_3", - raw_bytes=bytes([0xEF, 0x00, 0x01, 0x01, 0x00, 0x04, 0x00]), - validity_error=[ - EOFException.MISSING_CODE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ), - Container( - name="no_code_header_4", - sections=[ - Section(kind=SectionKind.TYPE, data="00800000"), - Section.Data("da"), - ], - expected_bytecode="ef0001 010004 ff0001 00 00800000 da", - validity_error=[ - EOFException.MISSING_CODE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ), - Container( - name="code_section_count_missing", - raw_bytes=bytes([0xEF, 0x00, 0x01, 0x01, 0x00, 0x04, 0x02]), - validity_error=EOFException.INCOMPLETE_SECTION_NUMBER, - ), - Container( - name="code_section_count_incomplete", - raw_bytes="ef00 01 01 0004 02 00", - validity_error=EOFException.INCOMPLETE_SECTION_NUMBER, - ), - Container( - name="code_section_size_missing", - raw_bytes=bytes( - [0xEF, 0x00, 0x01, 0x01, 0x00, 0x04, 0x02, 0x00, 0x01] - ), - validity_error=[ - EOFException.MISSING_HEADERS_TERMINATOR, - EOFException.ZERO_SECTION_SIZE, - ], - ), - Container( - name="code_section_size_incomplete", - raw_bytes="ef00 01 01 0004 02 0001 00", - validity_error=[ - EOFException.INCOMPLETE_SECTION_SIZE, - EOFException.ZERO_SECTION_SIZE, - ], - ), - Container( - name="code_section_count_0x8000_truncated", - raw_bytes=bytes( - [0xEF, 0x00, 0x01, 0x01, 0x00, 0x04, 0x02, 0x80, 0x00] - ), - validity_error=EOFException.TOO_MANY_CODE_SECTIONS, - ), - Container( - name="code_section_count_0xFFFF_truncated", - raw_bytes=bytes( - [0xEF, 0x00, 0x01, 0x01, 0x00, 0x04, 0x02, 0xFF, 0xFF] - ), - validity_error=EOFException.TOO_MANY_CODE_SECTIONS, - ), - pytest.param( - Container( - name="code_section_count_0x8000", - raw_bytes=bytes( - [0xEF, 0x00, 0x01, 0x01, 0x00, 0x04, 0x02, 0x80, 0x00] - + [0x00, 0x01] * 0x8000 - ), - validity_error=EOFException.CONTAINER_SIZE_ABOVE_LIMIT, - ), - marks=pytest.mark.eof_test_only(reason="initcode too large"), - ), - pytest.param( - Container( - name="code_section_count_0xFFFF", - raw_bytes=bytes( - [0xEF, 0x00, 0x01, 0x01, 0x00, 0x04, 0x02, 0xFF, 0xFF] - + [0x00, 0x01] * 0xFFFF - ), - validity_error=EOFException.CONTAINER_SIZE_ABOVE_LIMIT, - ), - marks=pytest.mark.eof_test_only(reason="initcode too large"), - ), - Container( - name="code_section_size_0x8000_truncated", - raw_bytes=bytes( - [ - 0xEF, - 0x00, - 0x01, - 0x01, - 0x00, - 0x04, - 0x02, - 0x00, - 0x01, - 0x80, - 0x00, - ] - ), - validity_error=EOFException.MISSING_HEADERS_TERMINATOR, - ), - Container( - name="code_section_size_0xFFFF_truncated", - raw_bytes=bytes( - [ - 0xEF, - 0x00, - 0x01, - 0x01, - 0x00, - 0x04, - 0x02, - 0x00, - 0x01, - 0xFF, - 0xFF, - ] - ), - validity_error=EOFException.MISSING_HEADERS_TERMINATOR, - ), - Container( - name="terminator_incomplete", - header_terminator=b"", - sections=[ - Section(kind=SectionKind.TYPE, data=b"", custom_size=4), - Section.Code(code=b"", custom_size=0x01), - ], - expected_bytecode="ef00 01 01 0004 02 0001 0001 ff 0000", - validity_error=EOFException.MISSING_HEADERS_TERMINATOR, - ), - Container( - name="truncated_header_data_section", - raw_bytes="ef00 01 01 0004 02 0001 0001", - validity_error=EOFException.MISSING_HEADERS_TERMINATOR, - ), - Container( - name="no_data_section_size", - raw_bytes="ef00 01 01 0004 02 0001 0001 ff", - validity_error=EOFException.MISSING_HEADERS_TERMINATOR, - ), - Container( - name="data_section_size_incomplete", - raw_bytes="ef00 01 01 0004 02 0001 0001 ff 00", - validity_error=EOFException.INCOMPLETE_SECTION_SIZE, - ), - Container( - name="no_container_section_count", - raw_bytes="ef00 01 01 0004 02 0001 0001 03", - validity_error=EOFException.INCOMPLETE_SECTION_NUMBER, - ), - Container( - name="incomplete_container_section_count", - raw_bytes="ef00 01 01 0004 02 0001 0001 03 00", - validity_error=EOFException.INCOMPLETE_SECTION_NUMBER, - ), - Container( - name="zero_container_section_count", - raw_bytes="ef00 01 01 0004 02 " - "0001 0001 03 0000 ff 0000 00 00800000 00", - validity_error=EOFException.ZERO_SECTION_SIZE, - ), - Container( - name="no_container_section_size", - raw_bytes="ef00 01 01 0004 02 0001 0001 03 0001", - validity_error=EOFException.MISSING_HEADERS_TERMINATOR, - ), - Container( - name="incomplete_container_section_size", - raw_bytes="ef00 01 01 0004 02 0001 0001 03 0001 00", - validity_error=EOFException.INCOMPLETE_SECTION_SIZE, - ), - Container( - name="incomplete_container_section_size_2", - raw_bytes="ef00 01 01 0004 02 0001 0001 03 0001 0000", - validity_error=EOFException.INCOMPLETE_SECTION_SIZE, - ), - Container( - name="incomplete_container_section_size_3", - raw_bytes="ef00 01 01 0004 02 0001 0001 03 0001 000000", - validity_error=EOFException.INCOMPLETE_SECTION_SIZE, - ), - Container( - name="incomplete_container_section_size_4", - raw_bytes="ef00 01 01 0004 02 0001 0001 03 0002 00000001", - validity_error=EOFException.INCOMPLETE_SECTION_SIZE, - ), - Container( - name="incomplete_container_section_size_5", - raw_bytes="ef00 01 01 0004 02 0001 0001 03 0002 00000001 00", - validity_error=EOFException.INCOMPLETE_SECTION_SIZE, - ), - Container( - name="incomplete_container_section_size_6", - raw_bytes="ef00 01 01 0004 02 0001 0001 03 0002 00000001 0000", - validity_error=EOFException.INCOMPLETE_SECTION_SIZE, - ), - Container( - name="incomplete_container_section_size_7", - raw_bytes="ef00 01 01 0004 02 0001 0001 03 0002 00000001 000000", - validity_error=EOFException.INCOMPLETE_SECTION_SIZE, - ), - Container( - name="zero_size_container_section", - raw_bytes="ef00 01 01 0004 02 0001 0001 03 0001 00000000 ff 0000 " - "00 00800000 00", - validity_error=EOFException.ZERO_SECTION_SIZE, - ), - Container( - name="container_section_size_2bytes", - raw_bytes="ef00 01 01 0004 02 0001 000b 03 0001 0014 ff 0000 00 " - "00800004" - "6000600060006000ec0000" - "ef00 01 01 0004 02 0001 0001 ff 0000 00 00800000 fe", - validity_error=EOFException.MISSING_DATA_SECTION, - ), - Container( - name="truncated_header_data_section_with_container_section", - raw_bytes="ef00 01 01 0004 02 0001 0001 03 0001 00000001", - validity_error=EOFException.MISSING_HEADERS_TERMINATOR, - ), - Container( - name="no_data_section_size_with_container_section", - raw_bytes="ef00 01 01 0004 02 0001 0001 03 0001 00000001 ff", - validity_error=EOFException.MISSING_HEADERS_TERMINATOR, - ), - Container( - name="data_section_size_incomplete_with_container_section", - raw_bytes="ef00 01 01 0004 02 0001 0001 03 0001 00000001 ff 00", - validity_error=EOFException.INCOMPLETE_SECTION_SIZE, - ), - Container( - # EOF code missing mandatory type section - name="EOF1I4750_0001", - raw_bytes="ef00010200010001ff00000000800000fe", - validity_error=EOFException.MISSING_TYPE_HEADER, - ), - Container( - # EOF code containing multiple type headers - name="multiple_type_headers_1", # EOF1I4750_0002 - raw_bytes="ef0001010004010004ff00000000800000fe", - validity_error=EOFException.MISSING_CODE_HEADER, - ), - Container( - # EOF code containing multiple type headers, second one matches - # code length - name="multiple_type_headers_2", - raw_bytes="ef0001010004010001ff00000000800000fe", - validity_error=EOFException.MISSING_CODE_HEADER, - ), - Container( - # EOF code containing multiple type headers followed by 2 code - # sections - name="multiple_type_headers_3", - sections=[ - Section(kind=SectionKind.TYPE, data="00800000"), - Section(kind=SectionKind.TYPE, data="00800000"), - Section.Code(Op.JUMPF[1]), - Section.Code(Op.INVALID), - ], - validity_error=EOFException.MISSING_CODE_HEADER, - ), - Container( - # EOF code containing type section size (Size 1) - name="EOF1I4750_0003", - raw_bytes="ef00010100010200010001ff00000000800000fe", - validity_error=[ - EOFException.INVALID_TYPE_SECTION_SIZE, - EOFException.INVALID_SECTION_BODIES_SIZE, - ], - ), - Container( - # EOF code containing type section size (Size 8 - 1 Code section) - name="EOF1I4750_0004", - raw_bytes="ef00010100080200010001ff00000000800000fe", - validity_error=EOFException.INVALID_SECTION_BODIES_SIZE, - ), - Container( - # EOF code containing type section size (Size 8 - 3 Code sections) - name="EOF1I4750_0005", - raw_bytes="ef0001010008020003000100010001ff0000000080000000800000fefefe", - validity_error=EOFException.INVALID_TYPE_SECTION_SIZE, - ), - Container( - name="no_sections", - sections=[], - auto_data_section=False, - auto_type_section=AutoSection.NONE, - expected_bytecode="ef0001 00", - validity_error=[ - EOFException.MISSING_TYPE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ), - Container( - name="no_code_section_header", - sections=[ - Section(kind=SectionKind.TYPE, data=b"\0\x80\0\0"), - Section.Data("0x00"), - ], - expected_bytecode="ef00 01 01 0004 ff 0001 00 00800000 00", - auto_type_section=AutoSection.NONE, - validity_error=[ - EOFException.MISSING_CODE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ), - Container( - name="too_many_code_sections", - sections=[ - Section.Code( - Op.JUMPF[i + 1] if i < MAX_CODE_SECTIONS else Op.STOP - ) - for i in range(MAX_CODE_SECTIONS + 1) - ], - validity_error=EOFException.TOO_MANY_CODE_SECTIONS, - ), - Container( - name="zero_code_sections_header", - raw_bytes="ef00 01 01 0004 02 0000 ff 0000 00 00800000", - validity_error=[ - EOFException.ZERO_SECTION_SIZE, - EOFException.INCOMPLETE_SECTION_NUMBER, - ], - ), - Container( - name="zero_code_sections_header_empty_type_section", - raw_bytes="ef00 01 01 0000 02 0000 ff 0000 00", - validity_error=[ - EOFException.ZERO_SECTION_SIZE, - EOFException.INCOMPLETE_SECTION_NUMBER, - ], - ), - # The basic `no_section_terminator` cases just remove the terminator - # and the `00` for zeroth section inputs looks like one. Error is - # because the sections are wrongly sized. - Container( - name="no_section_terminator", - header_terminator=bytes(), - sections=[Section.Code(code=Op.STOP)], - validity_error=[ - EOFException.INVALID_SECTION_BODIES_SIZE, - EOFException.INVALID_FIRST_SECTION_TYPE, - ], - ), - Container( - name="no_section_terminator_1", - header_terminator=bytes(), - sections=[Section.Code(code=Op.STOP, custom_size=2)], - validity_error=[ - EOFException.INVALID_SECTION_BODIES_SIZE, - EOFException.INVALID_FIRST_SECTION_TYPE, - ], - ), - Container( - name="no_section_terminator_2", - header_terminator=bytes(), - sections=[Section.Code(code="0x", custom_size=3)], - validity_error=EOFException.INVALID_SECTION_BODIES_SIZE, - ), - Container( - name="no_section_terminator_3", - header_terminator=bytes(), - sections=[Section.Code(code=Op.PUSH1(0) + Op.STOP)], - validity_error=[ - EOFException.INVALID_SECTION_BODIES_SIZE, - EOFException.INVALID_FIRST_SECTION_TYPE, - ], - ), - # The following cases just remove the terminator and the `00` for - # zeroth section inputs looks like one. Section bodies are as the size - # prescribes here, so the error is about the inputs of zeroth section. - Container( - name="no_section_terminator_section_bodies_ok_1", - header_terminator=bytes(), - sections=[Section.Code(code=Op.JUMPDEST + Op.STOP, custom_size=1)], - validity_error=EOFException.INVALID_FIRST_SECTION_TYPE, - ), - Container( - name="no_section_terminator_section_bodies_ok_2", - header_terminator=bytes(), - sections=[ - Section.Code(code=Op.JUMPDEST * 2 + Op.STOP, custom_size=2) - ], - validity_error=EOFException.INVALID_FIRST_SECTION_TYPE, - ), - # Here the terminator is missing but made to look like a different - # section or arbitrary byte - Container( - name="no_section_terminator_nonzero", - header_terminator=b"01", - sections=[Section.Code(code=Op.STOP)], - validity_error=[ - EOFException.MISSING_TERMINATOR, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ), - Container( - name="no_section_terminator_nonzero_1", - header_terminator=b"02", - sections=[Section.Code(code=Op.STOP, custom_size=2)], - validity_error=[ - EOFException.MISSING_TERMINATOR, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ), - Container( - name="no_section_terminator_nonzero_2", - header_terminator=b"03", - sections=[Section.Code(code="0x", custom_size=3)], - validity_error=[ - EOFException.MISSING_TERMINATOR, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ), - Container( - name="no_section_terminator_nonzero_3", - header_terminator=b"04", - sections=[Section.Code(code=Op.PUSH1(0) + Op.STOP)], - validity_error=[ - EOFException.MISSING_TERMINATOR, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ), - Container( - name="no_section_terminator_nonzero_4", - header_terminator=b"fe", - sections=[Section.Code(code=Op.PUSH1(0) + Op.STOP)], - validity_error=[ - EOFException.MISSING_TERMINATOR, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ), - Container( - name="truncated_before_type_section", - sections=[ - Section(kind=SectionKind.TYPE, data=b"", custom_size=4), - Section.Code(code=b"", custom_size=0x01), - ], - expected_bytecode="ef00 01 01 0004 02 0001 0001 ff 0000 00", - validity_error=EOFException.INVALID_SECTION_BODIES_SIZE, - ), - Container( - name="truncated_type_section_before_outputs", - sections=[ - Section(kind=SectionKind.TYPE, data=b"\0", custom_size=4), - Section.Code(code=b"", custom_size=0x01), - ], - expected_bytecode="ef00 01 01 0004 02 0001 0001 ff 0000 00 00", - validity_error=EOFException.INVALID_SECTION_BODIES_SIZE, - ), - Container( - name="truncated_type_section_before_max_stack_height", - sections=[ - Section(kind=SectionKind.TYPE, data=b"\0\x80", custom_size=4), - Section.Code(code=b"", custom_size=0x01), - ], - expected_bytecode="ef00 01 01 0004 02 0001 0001 ff 0000 00 0080", - validity_error=EOFException.INVALID_SECTION_BODIES_SIZE, - ), - Container( - name="truncated_type_section_truncated_max_stack_height", - sections=[ - Section( - kind=SectionKind.TYPE, data=b"\0\x80\0", custom_size=4 - ), - Section.Code(code=b"", custom_size=0x01), - ], - expected_bytecode="ef00 01 01 0004 02 0001 0001 ff 0000 00 008000", - validity_error=EOFException.INVALID_SECTION_BODIES_SIZE, - ), - Container( - name="no_code_section_contents", - sections=[Section.Code(code="0x", custom_size=0x01)], - validity_error=EOFException.INVALID_SECTION_BODIES_SIZE, - ), - Container( - name="incomplete_code_section_contents", - sections=[ - Section.Code(code=Op.STOP, custom_size=0x02), - ], - validity_error=EOFException.INVALID_SECTION_BODIES_SIZE, - ), - Container( - name="trailing_bytes_after_code_section", - sections=[Section.Code(code=Op.PUSH1(0) + Op.STOP)], - extra=bytes([0xDE, 0xAD, 0xBE, 0xEF]), - validity_error=EOFException.INVALID_SECTION_BODIES_SIZE, - ), - Container( - name="empty_code_section", - sections=[Section.Code(code="0x")], - # TODO the exception must be about code section - # EOFException.INVALID_CODE_SECTION, - validity_error=EOFException.ZERO_SECTION_SIZE, - ), - Container( - name="empty_code_section_with_non_empty_data", - sections=[ - Section.Code(code="0x"), - Section.Data(data="0xDEADBEEF"), - ], - # TODO the exception must be about code section - # EOFException.INVALID_CODE_SECTION, - validity_error=EOFException.ZERO_SECTION_SIZE, - ), - Container( - name="no_container_section_contents", - sections=[ - Section.Code(Op.EOFCREATE[0](0, 0, 0, 0) + Op.STOP), - Section(kind=SectionKind.CONTAINER, data=b"", custom_size=20), - ], - validity_error=EOFException.INVALID_SECTION_BODIES_SIZE, - ), - Container( - name="no_container_section_contents_with_data", - sections=[ - Section.Code(Op.EOFCREATE[0](0, 0, 0, 0) + Op.STOP), - Section(kind=SectionKind.CONTAINER, data=b"", custom_size=20), - Section.Data(b"\0" * 20), - ], - validity_error=EOFException.TOPLEVEL_CONTAINER_TRUNCATED, - ), - Container( - name="no_data_section_contents", - sections=[ - Section.Code(Op.STOP), - Section.Data(data="0x", custom_size=1), - ], - code="ef0001 010004 0200010001 ff0001 00 00800000 00", - validity_error=EOFException.TOPLEVEL_CONTAINER_TRUNCATED, - ), - Container( - name="data_section_contents_incomplete", - sections=[ - Section.Code(Op.STOP), - Section.Data(data="0xAABBCC", custom_size=4), - ], - validity_error=EOFException.TOPLEVEL_CONTAINER_TRUNCATED, - ), - Container( - name="data_section_preceding_code_section", - auto_data_section=False, - auto_sort_sections=AutoSection.NONE, - sections=[ - Section.Data(data="0xDEADBEEF"), - Section.Code(Op.STOP), - ], - validity_error=[ - EOFException.MISSING_CODE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ), - Container( - name="data_section_without_code_section", - sections=[Section.Data(data="0xDEADBEEF")], - # TODO the actual exception should be - # EOFException.MISSING_CODE_HEADER - validity_error=[ - EOFException.ZERO_SECTION_SIZE, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ), - Container( - name="no_section_terminator_3a", - header_terminator=bytes(), - sections=[Section.Code(code="0x030004")], - # TODO the exception must be about terminator - validity_error=[ - EOFException.INVALID_SECTION_BODIES_SIZE, - EOFException.INVALID_FIRST_SECTION_TYPE, - ], - ), - Container( - name="no_section_terminator_4a", - header_terminator=bytes(), - sections=[ - Section.Code(Op.STOP), - Section.Data(data="0xAABBCCDD"), - ], - # TODO: The error of this validation can be random. - validity_error=EOFException.INVALID_FIRST_SECTION_TYPE, - ), - Container( - name="trailing_bytes_after_data_section", - extra=bytes([0xEE]), - sections=[ - Section.Code(code=Op.PUSH1(0) + Op.STOP), - Section.Data(data="0xAABBCCDD"), - ], - # TODO should be more specific exception about trailing bytes - validity_error=EOFException.INVALID_SECTION_BODIES_SIZE, - ), - Container( - name="multiple_data_sections", - sections=[ - Section.Code(code=Op.PUSH1(0) + Op.STOP), - Section.Data(data="0xAABBCC"), - Section.Data(data="0xAABBCC"), - ], - expected_bytecode=( - "ef00 01 01 0004 02 0001 0003 ff 0003 ff 0003 00 00800001 " - "600000 AABBCC AABBCC" - ), - validity_error=[ - EOFException.MISSING_TERMINATOR, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ), - Container( - name="multiple_code_headers", - sections=[ - Section.Code(Op.JUMPF[1]), - Section.Data(data="0xAA"), - Section.Code(Op.STOP), - ], - auto_sort_sections=AutoSection.ONLY_BODY, - expected_bytecode=( - "ef00 01 01 0008 02 0001 0003 ff 0001 02 0001 0001 00" - "00800000 00800000 E50001 00 AA" - ), - validity_error=[ - EOFException.MISSING_TERMINATOR, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ), - Container( - name="multiple_code_headers_2", - sections=[ - Section.Code(Op.JUMPF[1]), - Section.Code(Op.STOP), - Section.Data(data="0xAA"), - ], - skip_join_concurrent_sections_in_header=True, - expected_bytecode=( - "ef00 01 01 0008 02 0001 0003 02 0001 0001 ff 0001 00" - "00800000 00800000 E50001 00 AA" - ), - validity_error=[ - EOFException.MISSING_DATA_SECTION, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ), - Container( - name="duplicated_code_header", - sections=[ - Section.Code(Op.STOP), - Section.Code( - b"", - custom_size=1, - skip_types_header_listing=True, - skip_types_body_listing=True, - ), - Section.Data(data="0xAA"), - ], - skip_join_concurrent_sections_in_header=True, - expected_bytecode=( - "ef00 01 01 0004 02 0001 0001 02 0001 0001 ff 0001 00 " - "00800000 00 AA" - ), - validity_error=[ - EOFException.MISSING_DATA_SECTION, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ), - Container( - name="multiple_code_and_data_sections", - sections=[ - Section.Code(Op.JUMPF[1]), - Section.Code(Op.STOP), - Section.Data(data="0xAA"), - Section.Data(data="0xAA"), - ], - expected_bytecode=( - "ef00 01 01 0008 02 0002 0003 0001 ff 0001 ff 0001 00" - "00800000 00800000 E50001 00 AA AA" - ), - validity_error=[ - EOFException.MISSING_TERMINATOR, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ), - Container( - name="multiple_code_and_data_sections_2", - sections=[ - Section.Code(Op.JUMPF[1]), - Section.Code(Op.STOP), - Section.Data(data="0xAA"), - Section.Data(data="0xAA"), - ], - skip_join_concurrent_sections_in_header=True, - expected_bytecode=( - "ef00 01 01 0008 02 0001 0003 02 0001 0001 ff 0001 ff 0001 00" - "00800000 00800000 E50001 00 AA AA" - ), - validity_error=[ - EOFException.MISSING_DATA_SECTION, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ), - Container( - name="multiple_container_headers", - sections=[ - Section.Code( - Op.EOFCREATE[0](0, 0, 0, 0) - + Op.EOFCREATE[1](0, 0, 0, 0) - + Op.STOP - ), - Section.Container(Container.Code(code=Op.INVALID)), - Section.Data(data="0xAA"), - Section.Container(Container.Code(code=Op.INVALID)), - ], - auto_sort_sections=AutoSection.ONLY_BODY, - expected_bytecode=( - "ef00 01 01 0004 02 0001 0015 03 0001 00000014 ff 0001 03 " - "0001 00000014 00" - "00800005 6000600060006000ec00 6000600060006000ec01 00" - "ef00 01 01 0004 02 0001 0001 ff 0000 00 00800000 fe" - "ef00 01 01 0004 02 0001 0001 ff 0000 00 00800000 fe" - "aa" - ), - validity_error=[ - EOFException.MISSING_TERMINATOR, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ), - Container( - name="multiple_container_headers_2", - sections=[ - Section.Code( - Op.EOFCREATE[0](0, 0, 0, 0) - + Op.EOFCREATE[1](0, 0, 0, 0) - + Op.STOP - ), - Section.Container(Container.Code(code=Op.INVALID)), - Section.Container(Container.Code(code=Op.INVALID)), - Section.Data(data="0xAA"), - ], - skip_join_concurrent_sections_in_header=True, - expected_bytecode=( - "ef00 01 01 0004 02 0001 0015 03 0001 00000014 03 0001 " - "00000014 ff 0001 00" - "00800005 6000600060006000ec00 6000600060006000ec01 00" - "ef00 01 01 0004 02 0001 0001 ff 0000 00 00800000 fe" - "ef00 01 01 0004 02 0001 0001 ff 0000 00 00800000 fe" - "aa" - ), - validity_error=[ - EOFException.MISSING_DATA_SECTION, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ), - Container( - name="duplicated_container_header", - sections=[ - Section.Code(Op.EOFCREATE[0](0, 0, 0, 0) + Op.STOP), - Section.Container(Container.Code(code=Op.INVALID)), - Section(kind=SectionKind.CONTAINER, data=b"", custom_size=20), - Section.Data(data="0xAA"), - ], - skip_join_concurrent_sections_in_header=True, - expected_bytecode=( - "ef00 01 01 0004 02 0001 000b 03 0001 00000014 03 0001 " - "00000014 ff 0001 00" - "00800004 6000600060006000ec00 00" - "ef00 01 01 0004 02 0001 0001 ff 0000 00 00800000 fe" - "aa" - ), - validity_error=[ - EOFException.MISSING_DATA_SECTION, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ), - Container( - name="unknown_section_1", - sections=[ - Section.Code(Op.STOP), - Section.Data(data="0x"), - Section(kind=4, data="0x01"), - ], - auto_sort_sections=AutoSection.NONE, - validity_error=[ - EOFException.MISSING_TERMINATOR, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ), - Container( - name="unknown_section_2", - sections=[ - Section(kind=4, data="0x01"), - Section.Data(data="0x"), - Section.Code(Op.STOP), - ], - auto_sort_sections=AutoSection.NONE, - # TODO the exception should be about unknown section definition - validity_error=[ - EOFException.MISSING_CODE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ), - Container( - name="unknown_section_empty", - sections=[ - Section.Code(Op.STOP), - Section.Data(data="0x"), - Section(kind=4, data="0x"), - ], - auto_sort_sections=AutoSection.NONE, - validity_error=[ - EOFException.MISSING_TERMINATOR, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ), - Container( - name="no_type_section", - sections=[ - Section.Code(code=Op.STOP), - Section.Data("0x00"), - ], - auto_type_section=AutoSection.NONE, - validity_error=[ - EOFException.MISSING_TYPE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ), - Container( - name="no_type_section_2_codes", - sections=[ - Section.Code(Op.INVALID), - Section.Code(Op.INVALID), - ], - auto_type_section=AutoSection.NONE, - auto_data_section=False, - expected_bytecode="ef0001 020002 0001 0001 00 fefe", - validity_error=[ - EOFException.MISSING_TYPE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ), - Container( - name="no_type_section_data_section", - sections=[ - Section.Code(Op.INVALID), - Section.Data("0xda"), - ], - auto_type_section=AutoSection.NONE, - expected_bytecode="ef0001 020001 0001 ff0001 00 feda", - validity_error=[ - EOFException.MISSING_TYPE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ), - Container( - name="no_type_section_container_section", - sections=[ - Section.Code(Op.INVALID), - Section.Container( - Container( - sections=[ - Section.Code(code=Op.RETURNCODE[0](0, 0)), - Section.Container( - container=Container.Code(code=Op.STOP) - ), - ], - ) - ), - ], - auto_type_section=AutoSection.NONE, - expected_bytecode="ef0001 020001 0001 030001 00000034 ff0000 00 fe" - "ef0001 010004 020001 0006 030001 00000014 ff0000 00 00800002 " - "60006000ee00" - "ef0001 010004 020001 0001 ff0000 00 0080000000", - validity_error=[ - EOFException.MISSING_TYPE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ), - Container( - name="too_many_type_sections", - sections=[ - Section(kind=SectionKind.TYPE, data="0x00000000"), - Section(kind=SectionKind.TYPE, data="0x00000000"), - Section.Code(Op.STOP), - ], - auto_type_section=AutoSection.NONE, - validity_error=[ - EOFException.MISSING_CODE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ), - Container( - name="too_many_type_sections_2", - sections=[ - Section(kind=SectionKind.TYPE, data="0x00800000"), - Section(kind=SectionKind.TYPE, data="0x00800000"), - Section.Code(Op.STOP), - ], - auto_type_section=AutoSection.NONE, - validity_error=[ - EOFException.MISSING_CODE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ), - Container( - name="empty_type_section_empty_code", - sections=[ - Section(kind=SectionKind.TYPE), - Section.Code(), - ], - expected_bytecode="ef00 01 01 0000 02 0001 0000 ff 0000 00", - validity_error=[ - EOFException.ZERO_SECTION_SIZE, - EOFException.INVALID_SECTION_BODIES_SIZE, - ], - ), - Container( - name="empty_type_section_with_code", - sections=[ - Section(kind=SectionKind.TYPE), - Section.Code(Op.STOP), - ], - expected_bytecode="ef00 01 01 0000 02 0001 0001 ff 0000 00 00", - validity_error=[ - EOFException.ZERO_SECTION_SIZE, - EOFException.INVALID_SECTION_BODIES_SIZE, - ], - ), - Container( - name="type_section_too_small_single_code_section_1", - sections=[ - Section(kind=SectionKind.TYPE, data="0x00"), - Section.Code(Op.STOP), - ], - auto_type_section=AutoSection.NONE, - validity_error=EOFException.INVALID_TYPE_SECTION_SIZE, - ), - Container( - name="type_section_too_small_single_code_section_2", - sections=[ - Section(kind=SectionKind.TYPE, data="0x008000"), - Section.Code(Op.STOP), - ], - auto_type_section=AutoSection.NONE, - validity_error=EOFException.INVALID_TYPE_SECTION_SIZE, - ), - Container( - name="type_section_too_big_single_code_section", - sections=[ - Section(kind=SectionKind.TYPE, data="0x0080000000"), - Section.Code(Op.STOP), - ], - auto_type_section=AutoSection.NONE, - validity_error=EOFException.INVALID_TYPE_SECTION_SIZE, - ), - Container( - name="type_section_too_small_multiple_code_sections_1", - sections=[ - Section(kind=SectionKind.TYPE, data="0x0080000000"), - Section.Code(Op.STOP), - Section.Code(Op.STOP), - ], - auto_type_section=AutoSection.NONE, - validity_error=EOFException.INVALID_TYPE_SECTION_SIZE, - ), - Container( - name="type_section_too_small_multiple_code_sections_2", - sections=[ - Section(kind=SectionKind.TYPE, data="0x008000000080"), - Section.Code(Op.STOP), - Section.Code(Op.STOP), - ], - auto_type_section=AutoSection.NONE, - validity_error=EOFException.INVALID_TYPE_SECTION_SIZE, - ), - Container( - name="type_section_too_big_multiple_code_sections", - sections=[ - Section(kind=SectionKind.TYPE, data="0x008000000080000000"), - Section.Code(Op.STOP), - Section.Code(Op.STOP), - ], - auto_type_section=AutoSection.NONE, - validity_error=EOFException.INVALID_TYPE_SECTION_SIZE, - ), - Container( - name="multiple_code_section_non_zero_inputs", - sections=[ - Section.Code(code=Op.POP + Op.RETF, code_inputs=1), - Section.Code(Op.STOP), - ], - # TODO the actual exception should be - # EOFException.INVALID_TYPE_BODY, - validity_error=EOFException.INVALID_FIRST_SECTION_TYPE, - ), - Container( - name="multiple_code_section_non_zero_outputs", - sections=[ - Section.Code(code=Op.PUSH0, code_outputs=1), - Section.Code(Op.STOP), - ], - # TODO the actual exception should be - # EOFException.INVALID_TYPE_BODY, - validity_error=EOFException.INVALID_FIRST_SECTION_TYPE, - ), - Container( - name="data_section_before_code_with_type", - sections=[ - Section.Data(data="0xAA"), - Section.Code(Op.STOP), - ], - auto_sort_sections=AutoSection.NONE, - validity_error=[ - EOFException.MISSING_CODE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ), - Container( - name="data_section_listed_in_type", - sections=[ - Section.Data(data="0x00", force_type_listing=True), - Section.Code(Op.STOP), - ], - validity_error=[ - EOFException.INVALID_TYPE_SECTION_SIZE, - EOFException.INVALID_SECTION_BODIES_SIZE, - ], - ), - Container( - name="single_code_section_incomplete_type", - sections=[ - Section(kind=SectionKind.TYPE, data="0x00", custom_size=2), - Section.Code(Op.STOP), - ], - validity_error=[ - EOFException.INVALID_SECTION_BODIES_SIZE, - EOFException.INVALID_TYPE_SECTION_SIZE, - ], - ), - Container( - name="code_section_input_too_large", - sections=[ - Section.Code( - code=( - (Op.PUSH0 * (MAX_CODE_INPUTS + 1)) - + Op.CALLF[1] - + Op.STOP - ), - max_stack_height=(MAX_CODE_INPUTS + 1), - ), - Section.Code( - code=(Op.POP * (MAX_CODE_INPUTS + 1)) + Op.RETF, - code_inputs=(MAX_CODE_INPUTS + 1), - code_outputs=0, - max_stack_height=(MAX_CODE_INPUTS + 1), - ), - ], - validity_error=EOFException.INPUTS_OUTPUTS_NUM_ABOVE_LIMIT, - ), - Container( - name="invalid_inputs_to_non_returning_code_section_2", - sections=[ - Section.Code( - code=Op.PUSH1(0) * 128 + Op.CALLF[1] + Op.STOP, - max_stack_height=128, - ), - Section.Code( - Op.STOP, - code_inputs=128, - code_outputs=0, - max_stack_height=128, - ), - ], - validity_error=EOFException.INPUTS_OUTPUTS_NUM_ABOVE_LIMIT, - ), - Container( - name="code_section_output_too_large", - sections=[ - Section.Code( - code=(Op.CALLF[1] + Op.STOP), - max_stack_height=(MAX_CODE_OUTPUTS + 2), - ), - Section.Code( - code=(Op.PUSH0 * (MAX_CODE_OUTPUTS + 2)) + Op.RETF, - code_inputs=0, - code_outputs=(MAX_CODE_OUTPUTS + 2), - max_stack_height=(MAX_CODE_OUTPUTS + 2), - ), - ], - validity_error=EOFException.INPUTS_OUTPUTS_NUM_ABOVE_LIMIT, - ), - Container( - name="single_code_section_max_stack_size_too_large", - sections=[ - Section.Code( - code=Op.CALLER * 1024 + Op.POP * 1024 + Op.STOP, - max_stack_height=1024, - ), - ], - # TODO auto types section generation probably failed, the exception - # must be about code - validity_error=EOFException.MAX_STACK_INCREASE_ABOVE_LIMIT, - ), - ], - ids=lambda c: c.name, -) -def test_invalid_containers( - eof_test: EOFTestFiller, - container: Container, -) -> None: - """Test invalid containers.""" - assert container.validity_error is not None, ( - "Invalid container without validity error" - ) - eof_test( - container=container, - expect_exception=container.validity_error, - ) - - -@pytest.mark.parametrize( - "magic", - set( - itertools.product( - [0, 1, 0x60, 0xEE, 0xEF, 0xF0, 0xFF], [0, 1, 2, 0xFE, 0xFF] - ) - ) - - {(0xEF, 0)}, -) -def test_magic_validation( - eof_test: EOFTestFiller, - magic: tuple[int, int], -) -> None: - """Verify EOF container 2-byte magic.""" - code = bytearray(bytes(VALID_CONTAINER)) - code[0:2] = magic - eof_test( - container=Container(raw_bytes=bytes(code)), - expect_exception=EOFException.INVALID_MAGIC, - ) - - -@pytest.mark.parametrize("version", [0, 2, 0xEF, 0xFE, 0xFF]) -def test_version_validation( - eof_test: EOFTestFiller, - version: int, -) -> None: - """Verify EOF container version.""" - code = bytearray(bytes(VALID_CONTAINER)) - code[2] = version - eof_test( - container=Container(raw_bytes=bytes(code)), - expect_exception=EOFException.INVALID_VERSION, - ) - - -@pytest.mark.parametrize("plus_data", [False, True]) -@pytest.mark.parametrize("plus_container", [False, True]) -def test_single_code_section( - eof_test: EOFTestFiller, - plus_data: bool, - plus_container: bool, -) -> None: - """Verify EOF container single code section.""" - sections = [ - Section.Code(Op.RETURNCODE[0](0, 0) if plus_container else Op.STOP) - ] - if plus_container: - sections.append( - Section.Container( - container=Container( - sections=[ - Section.Code( - Op.JUMPF[i + 1] - if i < (MAX_CODE_SECTIONS - 1) - else Op.STOP - ) - for i in range(MAX_CODE_SECTIONS) - ], - ) - ) - ) - if plus_data: - sections.append(Section.Data(data=b"\0")) - eof_test( - container=Container( - name="single_code_section", - sections=sections, - kind=ContainerKind.INITCODE - if plus_container - else ContainerKind.RUNTIME, - ), - ) - - -@pytest.mark.parametrize("plus_data", [False, True]) -@pytest.mark.parametrize("plus_container", [False, True]) -def test_max_code_sections( - eof_test: EOFTestFiller, - plus_data: bool, - plus_container: bool, -) -> None: - """Verify EOF container maximum number of code sections.""" - if plus_container: - sections = [ - Section.Code( - Op.JUMPF[i + 1] - if i < (MAX_CODE_SECTIONS - 1) - else Op.RETURNCODE[0](0, 0) - ) - for i in range(MAX_CODE_SECTIONS) - ] - sections.append( - Section.Container( - container=Container( - sections=[ - Section.Code( - Op.JUMPF[i + 1] - if i < (MAX_CODE_SECTIONS - 1) - else Op.STOP - ) - for i in range(MAX_CODE_SECTIONS) - ], - ) - ) - ) - else: - sections = [ - Section.Code( - Op.JUMPF[i + 1] if i < (MAX_CODE_SECTIONS - 1) else Op.STOP - ) - for i in range(MAX_CODE_SECTIONS) - ] - if plus_data: - sections.append(Section.Data(data=b"\0")) - eof_test( - container=Container( - name="max_code_sections", - sections=sections, - kind=ContainerKind.INITCODE - if plus_container - else ContainerKind.RUNTIME, - ), - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_eof_example.py b/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_eof_example.py deleted file mode 100644 index dd986215d9..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_eof_example.py +++ /dev/null @@ -1,173 +0,0 @@ -"""EOF Classes example use.""" - -import pytest -from execution_testing import ( - Bytecode, - EOFException, - EOFTestFiller, - Op, -) -from execution_testing.test_types.eof.v1 import ( - AutoSection, - Container, - Section, -) - -from .. import EOF_FORK_NAME - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-3540.md" -REFERENCE_SPEC_VERSION = "8dcb0a8c1c0102c87224308028632cc986a61183" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - - -def test_eof_example(eof_test: EOFTestFiller) -> None: - """Example of python EOF classes.""" - # Lets construct an EOF container code - eof_code = Container( - name="valid_container_example", - sections=[ - # TYPES section is constructed automatically based on CODE - # CODE section - Section.Code( - code=Op.CALLF[1](Op.PUSH0) + Op.STOP, # bytecode to be - # deployed in the body - # Code: call section 1 with a single zero as input, then stop. - max_stack_increase=1, # define code header (in body) stack - # size - ), - # There can be multiple code sections - Section.Code( - # Remove input and call section 2 with no inputs, then remove - # output and return - code=Op.POP + Op.CALLF[2]() + Op.POP + Op.RETF, - code_inputs=1, - code_outputs=0, - max_stack_increase=0, - ), - Section.Code( - # Call section 3 with two inputs (address twice), return - code=Op.CALLF[3](Op.DUP1, Op.ADDRESS) - + Op.POP - + Op.POP - + Op.RETF, - code_outputs=1, - max_stack_increase=3, - ), - Section.Code( - # Duplicate one input and return - code=Op.DUP1 + Op.RETF, - code_inputs=2, - code_outputs=3, - max_stack_increase=1, - ), - # DATA section - Section.Data("0xef"), - ], - expected_bytecode="ef00010100100200040005000600080002ff000100" - "00800001 01000000 00010003 02030001" - "5fe300010050e3000250e43080e300035050e480e4ef", - ) - - eof_test( - container=eof_code, - expect_exception=eof_code.validity_error, - ) - - -def test_eof_example_custom_fields(eof_test: EOFTestFiller) -> None: - """Example of python EOF container class tuning.""" - # if you need to overwrite certain structure bytes, you can use - # customization. this is useful for unit testing the eof structure format, - # you can reorganize sections and overwrite the header bytes for testing - # purposes. most of the combinations are covered by the unit tests - - # This features are subject for development and will change in the future - - eof_code = Container( - name="valid_container_example_2", - magic=b"\xef\x00", # magic can be overwritten for test purposes, - # (default is 0xEF00) - version=b"\x01", # version can be overwritten for testing purposes - # (default is 0x01) - header_terminator=b"\x00", # terminator byte can be overwritten - # (default is 0x00) - extra=b"", # extra bytes to be trailed after the container body bytes - # (default is None) - sections=[ - # TYPES section is constructed automatically based on CODE - # CODE section - Section.Code( - code=Op.PUSH1(2) - + Op.STOP, # this is the actual bytecode to be deployed in the - # body - max_stack_height=1, # define code header (in body) stack size - ), - # DATA section - Section.Data( - data="0xef", - # custom_size overrides the size bytes, so you can put only 1 - # byte into data but still make the header size of 2 to produce - # invalid section - # - # if custom_size != len(data), the section will be invalid - custom_size=1, - ), - ], - # auto generate types section based on provided code sections - # AutoSection.ONLY_BODY - means the section will be generated only for - # the body bytes - # - # AutoSection.ONLY_BODY - means the section will be generated only for - # the header bytes - auto_type_section=AutoSection.AUTO, - # auto generate default data section (0x empty), by default is True - auto_data_section=True, - # auto sort section by order 01 02 03 04 - # AutoSection.ONLY_BODY - means the sorting will be done only for the - # body bytes - # AutoSection.ONLY_BODY - means the section will be done only for the - # header bytes - auto_sort_sections=AutoSection.AUTO, - ) - - eof_test( - container=eof_code, - expect_exception=eof_code.validity_error, - ) - - -@pytest.mark.parametrize( - "data_section_bytes", - (b"\x01", b"\xef"), -) -@pytest.mark.parametrize( - "code_section_code, exception", - [ - (Op.PUSH1(10) + Op.STOP, None), - (Op.PUSH1(14), EOFException.MISSING_STOP_OPCODE), - ], -) -def test_eof_example_parameters( - eof_test: EOFTestFiller, - data_section_bytes: bytes, - code_section_code: Bytecode, - exception: EOFException, -) -> None: - """Example of EOF example parameters.""" - eof_code = Container( - name="parametrized_eof_example", - sections=[ - Section.Code( - code=code_section_code, - max_stack_height=1, - ), - Section.Data(data_section_bytes), - ], - validity_error=exception, - ) - - eof_test( - container=eof_code, - expect_exception=eof_code.validity_error, - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_execution.py b/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_execution.py deleted file mode 100644 index 16bfb63af5..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_execution.py +++ /dev/null @@ -1,83 +0,0 @@ -"""Execution of basic EOF containers.""" - -import pytest -from execution_testing import ( - Account, - Alloc, - Environment, - Op, - StateTestFiller, - Storage, - Transaction, -) -from execution_testing.test_types.eof.v1 import Container, Section - -from .. import EOF_FORK_NAME - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-3540.md" -REFERENCE_SPEC_VERSION = "8dcb0a8c1c0102c87224308028632cc986a61183" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - -EXPECTED_STORAGE = (bytes.fromhex("EF"), bytes.fromhex("BADDCAFE")) -"""Expected storage (key => value) to be produced by the EOF containers""" - - -@pytest.mark.parametrize( - "container", - ( - Container( - name="store_from_push", - sections=[Section.Code(Op.SSTORE(*EXPECTED_STORAGE) + Op.STOP)], - ), - Container( - name="store_with_data", - sections=[ - Section.Code( - Op.SSTORE(Op.DATALOADN[0], Op.DATALOADN[32]) + Op.STOP - ), - Section.Data( - EXPECTED_STORAGE[0].rjust(32, b"\x00") - + EXPECTED_STORAGE[1].rjust(32, b"\x00") - ), - ], - ), - ), - ids=lambda x: x.name, -) -def test_eof_execution( - state_test: StateTestFiller, - pre: Alloc, - container: Container, -) -> None: - """Test simple contracts that are expected to succeed on call.""" - env = Environment() - - storage = Storage() - sender = pre.fund_eoa() - container_address = pre.deploy_contract(container) - caller_contract = Op.SSTORE( - storage.store_next(1), - Op.CALL(Op.GAS, container_address, 0, 0, 0, 0, 0), - ) - caller_address = pre.deploy_contract(caller_contract) - - tx = Transaction( - to=caller_address, - gas_limit=1_000_000, - gas_price=10, - protected=False, - sender=sender, - ) - - post = { - caller_address: Account(storage=storage), - container_address: Account(storage=dict([EXPECTED_STORAGE])), - } - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_execution_function.py b/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_execution_function.py deleted file mode 100644 index d9e08affec..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_execution_function.py +++ /dev/null @@ -1,461 +0,0 @@ -"""Execution of CALLF, RETF opcodes within EOF V1 containers tests.""" - -from typing import List - -import pytest -from execution_testing import ( - Account, - Alloc, - Environment, - Op, - StateTestFiller, - Transaction, -) -from execution_testing.test_types.eof.v1 import Container, Section -from execution_testing.test_types.eof.v1.constants import ( - MAX_CODE_SECTIONS, - MAX_RETURN_STACK_HEIGHT, -) - -from .. import EOF_FORK_NAME - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-4750.md" -REFERENCE_SPEC_VERSION = "90f716078d0b08ce508a1e57803f885cc2f2e15e" - -# List all containers used within execution tests, since they will need to be -# valid EOF V1 containers too - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - -contract_call_within_deep_nested_callf = Container( - name="contract_call_within_deep_nested_callf", - sections=[ - Section.Code( - code=Op.CALLF[1] + Op.SSTORE(0, 1) + Op.STOP, - ) - ] - + [ - # All sections call next section and on return, store a 1 - # to their call stack height key - Section.Code( - code=(Op.CALLF[i] + Op.SSTORE(i - 1, 1) + Op.RETF), - code_inputs=0, - code_outputs=0, - ) - for i in range(2, MAX_CODE_SECTIONS) - ] - + [ - # Last section makes external contract call - Section.Code( - code=( - Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH2(0x200) - + Op.EXTCALL - + Op.ISZERO - + Op.PUSH2(MAX_CODE_SECTIONS - 1) - + Op.SSTORE - + Op.RETF - ), - code_inputs=0, - code_outputs=0, - ) - ], -) - -recursive_contract_call_within_deep_nested_callf = Container( - name="recursive_contract_call_within_deep_nested_callf", - sections=[ - # All sections call next section and on return, store a 1 - # to their call stack height key - Section.Code( - code=Op.CALLF[i + 1] + Op.SSTORE(i, 1) + Op.STOP, - ) - for i in range(MAX_CODE_SECTIONS - 1) - ] - + [ - # Last section makes external contract call - Section.Code( - code=( - Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH2(0x200) - + Op.GAS - + Op.CALL - + Op.PUSH2(MAX_CODE_SECTIONS - 1) - + Op.SSTORE - + Op.RETF - ), - code_inputs=0, - code_outputs=0, - ) - ], -) - -CALL_SUCCEED_CONTRACTS: List[Container] = [ - Container( - name="function_finishes_contract_execution", - sections=[ - Section.Code( - code=(Op.CALLF[1] + Op.STOP), - ), - Section.Code( - code=(Op.RETF), - code_inputs=0, - code_outputs=0, - ), - ], - ), - Container( - name="max_recursive_callf", - sections=[ - Section.Code( - code=(Op.PUSH1(1) + Op.CALLF[1] + Op.STOP), - ), - Section.Code( - code=( - Op.DUP1 - + Op.PUSH2(MAX_RETURN_STACK_HEIGHT) - + Op.SUB - + Op.RJUMPI[len(Op.POP) + len(Op.RETF)] - + Op.POP - + Op.RETF - + Op.PUSH1(1) - + Op.ADD - + Op.CALLF[1] - + Op.RETF - ), - code_inputs=1, - code_outputs=0, - max_stack_height=3, - ), - ], - ), - Container( - name="max_recursive_callf_sstore", - sections=[ - Section.Code( - code=Op.SSTORE(0, 1) + Op.CALLF[1] + Op.STOP, - max_stack_height=2, - ), - Section.Code( - code=( - Op.PUSH0 - + Op.SLOAD - + Op.DUP1 - + Op.PUSH2(MAX_RETURN_STACK_HEIGHT) - + Op.SUB - + Op.RJUMPI[len(Op.POP) + len(Op.STOP)] - + Op.POP - + Op.RETF - + Op.PUSH1(1) - + Op.ADD - + Op.PUSH0 - + Op.SSTORE - + Op.CALLF[1] - + Op.RETF - ), - code_inputs=0, - code_outputs=0, - max_stack_height=3, - ), - ], - ), - Container( - name="max_recursive_callf_memory", - sections=[ - Section.Code( - code=( - Op.PUSH1(1) + Op.PUSH0 + Op.MSTORE + Op.CALLF[1] + Op.STOP - ), - ), - Section.Code( - code=( - Op.PUSH0 - + Op.MLOAD - + Op.DUP1 - + Op.PUSH2(MAX_RETURN_STACK_HEIGHT) - + Op.SUB - + Op.RJUMPI[len(Op.POP) + len(Op.RETF)] - + Op.POP - + Op.RETF - + Op.PUSH1(1) - + Op.ADD - + Op.PUSH0 - + Op.MSTORE - + Op.CALLF[1] - + Op.RETF - ), - code_inputs=0, - code_outputs=0, - max_stack_height=3, - ), - ], - ), -] -""" -List of all EOF V1 Containers that simply need to succeed on execution. -""" - -CALL_FAIL_CONTRACTS: List[Container] = [ - Container( - name="invalid_opcode", - sections=[ - Section.Code( - code=(Op.INVALID), - ), - ], - ), - Container( - name="overflow_recursive_callf", - sections=[ - Section.Code( - code=(Op.PUSH1(1) + Op.CALLF[1] + Op.STOP), - ), - Section.Code( - code=( - Op.DUP1 - + Op.PUSH2(MAX_RETURN_STACK_HEIGHT + 1) - + Op.SUB - + Op.RJUMPI[len(Op.POP) + len(Op.RETF)] - + Op.POP - + Op.RETF - + Op.PUSH1(1) - + Op.ADD - + Op.CALLF[1] - + Op.RETF - ), - code_inputs=1, - code_outputs=0, - max_stack_height=3, - ), - ], - ), - Container( - name="overflow_recursive_callf_sstore", - sections=[ - Section.Code( - code=Op.SSTORE(0, 1) + Op.CALLF[1] + Op.STOP, - max_stack_height=2, - ), - Section.Code( - code=( - Op.PUSH0 - + Op.SLOAD - + Op.DUP1 - + Op.PUSH2(MAX_RETURN_STACK_HEIGHT + 1) - + Op.SUB - + Op.RJUMPI[len(Op.POP) + len(Op.RETF)] - + Op.POP - + Op.RETF - + Op.PUSH1(1) - + Op.ADD - + Op.PUSH0 - + Op.SSTORE - + Op.CALLF[1] - + Op.RETF - ), - code_inputs=0, - code_outputs=0, - max_stack_height=3, - ), - ], - ), - Container( - name="overflow_recursive_callf_memory", - sections=[ - Section.Code( - code=Op.MSTORE(0, 1) + Op.CALLF[1] + Op.STOP, - max_stack_height=2, - ), - Section.Code( - code=( - Op.PUSH0 - + Op.MLOAD - + Op.DUP1 - + Op.PUSH2(MAX_RETURN_STACK_HEIGHT + 1) - + Op.SUB - + Op.RJUMPI[len(Op.POP) + len(Op.RETF)] - + Op.POP - + Op.RETF - + Op.PUSH1(1) - + Op.ADD - + Op.PUSH0 - + Op.MSTORE - + Op.CALLF[1] - + Op.RETF - ), - code_inputs=0, - code_outputs=0, - max_stack_height=3, - ), - ], - ), -] -""" -List of all EOF V1 Containers that simply need to fail (exceptional halt) on -execution. -These contracts have a valid EOF V1 container format but fail when executed. -""" - -VALID: List[Container] = CALL_SUCCEED_CONTRACTS + CALL_FAIL_CONTRACTS -""" -List of all EOF V1 Containers used during execution tests. -""" - - -@pytest.mark.parametrize( - "container", CALL_SUCCEED_CONTRACTS, ids=lambda x: x.name -) -def test_eof_functions_contract_call_succeed( - state_test: StateTestFiller, - pre: Alloc, - container: Container, -) -> None: - """Test simple contracts that are simply expected to succeed on call.""" - env = Environment() - - sender = pre.fund_eoa() - container_address = pre.deploy_contract(container) - caller_contract = ( - Op.SSTORE(0, Op.CALL(Op.GAS, container_address, 0, 0, 0, 0, 0)) - + Op.STOP() - ) - caller_address = pre.deploy_contract(caller_contract) - - tx = Transaction( - to=caller_address, - gas_limit=50000000, - gas_price=10, - protected=False, - data="", - sender=sender, - ) - - post = {caller_address: Account(storage={0: 1})} - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) - - -@pytest.mark.parametrize( - "container", CALL_FAIL_CONTRACTS, ids=lambda x: x.name -) -def test_eof_functions_contract_call_fail( - state_test: StateTestFiller, - pre: Alloc, - container: Container, -) -> None: - """Test simple contracts that are simply expected to fail on call.""" - env = Environment() - - sender = pre.fund_eoa() - container_address = pre.deploy_contract(container) - caller_contract = ( - Op.SSTORE(Op.CALL(Op.GAS, container_address, 0, 0, 0, 0, 0), 1) - + Op.STOP() - ) - caller_address = pre.deploy_contract(caller_contract) - - tx = Transaction( - to=caller_address, - gas_limit=50000000, - gas_price=10, - protected=False, - data="", - sender=sender, - ) - - post = {caller_address: Account(storage={0: 1})} - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) - - -def test_eof_functions_contract_call_within_deep_nested( - state_test: StateTestFiller, - pre: Alloc, -) -> None: - """ - Test performing a call within a nested callf and verify correct behavior of - return stack in calling contract. - - TODO: This test belongs in EIP-7069 test folder, not code validation. - """ - env = Environment() - - nested_callee_address = pre.deploy_contract( - code=Op.SSTORE(0, 1) + Op.STOP() - ) - contract_call_within_deep_nested_callf = Container( - name="contract_call_within_deep_nested_callf", - sections=[ - Section.Code( - code=Op.CALLF[1] + Op.SSTORE(0, 1) + Op.STOP, - ) - ] - + [ - # All sections call next section and on return, store a 1 - # to their call stack height key - Section.Code( - code=(Op.CALLF[i] + Op.SSTORE(i - 1, 1) + Op.RETF), - code_outputs=0, - ) - for i in range(2, MAX_CODE_SECTIONS) - ] - + [ - # Last section makes external contract call - Section.Code( - code=( - Op.EXTCALL(nested_callee_address, 0, 0, 0) - + Op.ISZERO - + Op.PUSH2(MAX_CODE_SECTIONS - 1) - + Op.SSTORE - + Op.RETF - ), - code_outputs=0, - ) - ], - ) - callee_address = pre.deploy_contract( - contract_call_within_deep_nested_callf - ) - sender = pre.fund_eoa() - - tx = Transaction( - to=callee_address, - gas_limit=50000000, - gas_price=10, - protected=False, - data="", - sender=sender, - ) - post = { - callee_address: Account( - storage=dict.fromkeys(range(MAX_CODE_SECTIONS), 1) - ), - nested_callee_address: Account( - storage={ - 0: 1, - } - ), - } - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_extcode.py b/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_extcode.py deleted file mode 100644 index f6fdb640ae..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_extcode.py +++ /dev/null @@ -1,93 +0,0 @@ -"""test execution semantics changes.""" - -import pytest -from execution_testing import ( - Account, - Alloc, - Environment, - Op, - StateTestFiller, - Storage, - Transaction, - keccak256, -) -from execution_testing.test_types.eof.v1 import Container - -from .. import EOF_FORK_NAME - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-4750.md" -REFERENCE_SPEC_VERSION = "90f716078d0b08ce508a1e57803f885cc2f2e15e" - - -def test_legacy_calls_eof_sstore( - state_test: StateTestFiller, - pre: Alloc, -) -> None: - """Test EXTCODE* opcodes calling EOF and legacy contracts.""" - env = Environment() - eof_code = Container.Code(Op.RJUMP[0] + Op.STOP) - address_eof_contract = pre.deploy_contract(eof_code) - legacy_code = Op.PUSH1(2) + Op.JUMPDEST + Op.STOP - address_legacy_contract = pre.deploy_contract(legacy_code) - - storage_test = Storage() - test_contract_code = ( - Op.SSTORE( - storage_test.store_next(4), Op.EXTCODESIZE(address_legacy_contract) - ) - + Op.EXTCODECOPY( - address_legacy_contract, - 0, - 0, - Op.EXTCODESIZE(address_legacy_contract), - ) - + Op.SSTORE( - storage_test.store_next(bytes(legacy_code).ljust(32, b"\0")), - Op.MLOAD(0), - ) - + Op.SSTORE( - storage_test.store_next(legacy_code.keccak256()), - Op.EXTCODEHASH(address_legacy_contract), - ) - + Op.SSTORE( - storage_test.store_next(2), Op.EXTCODESIZE(address_eof_contract) - ) - + Op.EXTCODECOPY(address_eof_contract, 0x20, 0, 6) - + Op.SSTORE( - storage_test.store_next(b"\xef".ljust(32, b"\0")), Op.MLOAD(0x20) - ) - + Op.MSTORE(0x40, b"\xcc" * 32) # clobber memory slot - + Op.EXTCODECOPY( - address_eof_contract, 0x40, len(eof_code) - 4, 8 - ) # out-of-bounds "read" - + Op.SSTORE(storage_test.store_next(b"\xcc" * 24), Op.MLOAD(0x40)) - + Op.SSTORE( - storage_test.store_next(keccak256(b"\xef\x00")), - Op.EXTCODEHASH(address_eof_contract), - ) - ) - address_test_contract = pre.deploy_contract(test_contract_code) - - sender = pre.fund_eoa() - - tx = Transaction( - sender=sender, - to=address_test_contract, - gas_limit=50_000_000, - gas_price=10, - protected=False, - data="", - ) - - post = { - address_test_contract: Account(storage=storage_test), - } - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_migrated_valid_invalid.py b/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_migrated_valid_invalid.py deleted file mode 100644 index b4797a5fb3..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_migrated_valid_invalid.py +++ /dev/null @@ -1,477 +0,0 @@ -""" -EOF validation tests for EIP-3540 migrated from -ethereum/tests/src/EOFTestsFiller/EIP3540/validInvalidFiller.yml. -""" - -import pytest -from execution_testing import EOFException, EOFTestFiller, Op -from execution_testing.exceptions.exceptions import ( - EOFExceptionInstanceOrList, -) -from execution_testing.test_types.eof.v1 import Container, Section - -from .. import EOF_FORK_NAME - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-3540.md" -REFERENCE_SPEC_VERSION = "8dcb0a8c1c0102c87224308028632cc986a61183" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - - -@pytest.mark.parametrize( - "eof_code,exception", - [ - pytest.param( - # Deployed code without data section - Container( - name="EOF1V3540_0001", - sections=[ - Section.Code(code=Op.PUSH1[0] + Op.POP + Op.STOP), - ], - ), - None, - id="EOF1V3540_0001_deployed_code_without_data_section", - ), - pytest.param( - # Deployed code with data section - Container( - name="EOF1V3540_0002", - sections=[ - Section.Code(code=Op.PUSH1[0] + Op.POP + Op.STOP), - Section.Data("aabbccdd"), - ], - ), - None, - id="EOF1V3540_0002_deployed_code_with_data_section", - ), - pytest.param( - # Empty code section with non-empty data section - Container( - sections=[Section.Code(code_outputs=0), Section.Data("aabb")], - expected_bytecode="ef00010100040200010000ff00020000000000aabb", - ), - EOFException.ZERO_SECTION_SIZE, - id="EOF1I3540_0012_empty_code_section_with_non_empty_data_section", - ), - pytest.param( - # No section terminator after data section size - Container(raw_bytes=bytes.fromhex("ef00010100040200010001ff0002")), - EOFException.MISSING_HEADERS_TERMINATOR, - id="EOF1I3540_0020_no_section_terminator_after_data_section_size", - ), - pytest.param( - # No type section contents - Container( - raw_bytes=bytes.fromhex("ef00010100040200010001ff000200") - ), - EOFException.INVALID_SECTION_BODIES_SIZE, - id="EOF1I3540_0021_no_type_section_contents", - ), - pytest.param( - # Type section contents (no outputs and max stack) - Container( - raw_bytes=bytes.fromhex("ef00010100040200010001ff00020000") - ), - EOFException.INVALID_SECTION_BODIES_SIZE, - id="EOF1I3540_0022_invalid_type_section_no_outputs_and_max_stack", - ), - pytest.param( - # Type section contents (no max stack) - Container( - raw_bytes=bytes.fromhex("ef00010100040200010001ff0002000000") - ), - EOFException.INVALID_SECTION_BODIES_SIZE, - id="EOF1I3540_0023_invalid_type_section_no_max_stack", - ), - pytest.param( - # Type section contents (max stack incomplete) - Container( - raw_bytes=bytes.fromhex("ef00010100040200010001ff000200000000") - ), - EOFException.INVALID_SECTION_BODIES_SIZE, - id="EOF1I3540_0024_invalid_type_section_max_stack_incomplete", - ), - pytest.param( - # No code section contents - Container( - raw_bytes=bytes.fromhex( - "ef00010100040200010001ff00020000000000" - ) - ), - EOFException.INVALID_SECTION_BODIES_SIZE, - id="EOF1I3540_0025_no_code_section_contents", - ), - pytest.param( - # Code section contents incomplete - Container( - raw_bytes=bytes.fromhex( - "ef00010100040200010029ff000000000000027f" - ) - ), - EOFException.INVALID_SECTION_BODIES_SIZE, - id="EOF1I3540_0026_code_section_contents_incomplete", - ), - pytest.param( - # Trailing bytes after code section - Container( - raw_bytes=bytes.fromhex( - "ef0001 010004 0200010001 ff0000 00 00800000 fe aabbcc" - ) - ), - EOFException.INVALID_SECTION_BODIES_SIZE, - id="EOF1I3540_0027_trailing_bytes_after_code_section", - ), - pytest.param( - # Trailing bytes after code section with wrong first section type - Container( - raw_bytes=bytes.fromhex( - "ef0001 010004 0200010001 ff0000 00 00000000 fe aabbcc" - ) - ), - [ - EOFException.INVALID_FIRST_SECTION_TYPE, - EOFException.INVALID_SECTION_BODIES_SIZE, - ], - id="EOF1I3540_0027_trailing_bytes_after_code_section_with_wrong_first_section_type", - ), - pytest.param( - # Empty code section - Container( - raw_bytes=bytes.fromhex( - "ef00010100040200010000ff00000000000000" - ) - ), - EOFException.ZERO_SECTION_SIZE, - id="EOF1I3540_0028_empty_code_section", - ), - pytest.param( - # Code section preceding type section - Container( - raw_bytes=bytes.fromhex( - "ef00010200010001010004ff00020000000000feaabb" - ) - ), - [ - EOFException.MISSING_TYPE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - id="EOF1I3540_0030_code_section_preceding_type_section", - ), - pytest.param( - # Data section preceding type section - Container( - raw_bytes=bytes.fromhex( - "ef0001ff000201000402000100010000000000feaabb" - ) - ), - [ - EOFException.MISSING_TYPE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - id="EOF1I3540_0031_data_section_preceding_type_section", - ), - pytest.param( - # Data section preceding code section - Container( - raw_bytes=bytes.fromhex( - "ef0001010004ff000202000100010000000000feaabb" - ) - ), - [ - EOFException.MISSING_CODE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - id="EOF1I3540_0032_data_section_preceding_code_section", - ), - pytest.param( - # Data section without code section - Container( - raw_bytes=bytes.fromhex("ef0001010004ff00020000000000aabb") - ), - [ - EOFException.MISSING_CODE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - id="EOF1I3540_0033_data_section_without_code_section", - ), - pytest.param( - # No data section - Container( - raw_bytes=bytes.fromhex("ef000101000402000100010000000000fe") - ), - [ - EOFException.MISSING_DATA_SECTION, - EOFException.UNEXPECTED_HEADER_KIND, - ], - id="EOF1I3540_0034_no_data_section", - ), - pytest.param( - # Trailing bytes after data section - Container( - sections=[ - Section.Code(Op.INVALID), - Section.Data("aabb"), - ], - extra="ccdd", - expected_bytecode="ef0001 010004 " - "0200010001 ff0002 00 00800000 fe aabbccdd", - ), - EOFException.INVALID_SECTION_BODIES_SIZE, - id="EOF1I3540_0035_trailing_bytes_after_data_section", - ), - pytest.param( - # Trailing bytes after data section with wrong first section type - Container( - raw_bytes=bytes.fromhex( - "ef0001 010004 0200010001 ff0002 00 00000000 fe aabbccdd" - ) - ), - [ - EOFException.INVALID_FIRST_SECTION_TYPE, - EOFException.INVALID_SECTION_BODIES_SIZE, - ], - id="EOF1I3540_0035_trailing_bytes_after_data_section_with_wrong_first_section_type", - ), - pytest.param( - # Multiple data sections - Container( - raw_bytes=bytes.fromhex( - "ef00010100040200010001ff0002ff00020000000000feaabbaabb" - ) - ), - [ - EOFException.MISSING_TERMINATOR, - EOFException.UNEXPECTED_HEADER_KIND, - ], - id="EOF1I3540_0036_multiple_data_sections", - ), - pytest.param( - # Multiple code and data sections - Container( - raw_bytes=bytes.fromhex( - "ef000101000802" - "000200010001ff0002ff0002000000000000000000fefeaabbaabb" - ) - ), - [ - EOFException.MISSING_TERMINATOR, - EOFException.UNEXPECTED_HEADER_KIND, - ], - id="EOF1I3540_0037_multiple_code_and_data_sections", - ), - pytest.param( - # Unknown section ID (at the beginning) - Container( - raw_bytes=bytes.fromhex( - "ef00010400010100040200010001ff00000000000000fe" - ) - ), - [ - EOFException.MISSING_TYPE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - id="EOF1I3540_0038_unknown_section_id_at_the_beginning_05", - ), - pytest.param( - # Unknown section ID (at the beginning) - Container( - raw_bytes=bytes.fromhex( - "ef00010500010100040200010001ff00000000000000fe" - ) - ), - [ - EOFException.MISSING_TYPE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - id="EOF1I3540_0039_unknown_section_id_at_the_beginning_06", - ), - pytest.param( - # Unknown section ID (at the beginning) - Container( - raw_bytes=bytes.fromhex( - "ef0001fe00010100040200010001ff00000000000000fe" - ) - ), - [ - EOFException.MISSING_TYPE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - id="EOF1I3540_0040_unknown_section_id_at_the_beginning_ff", - ), - pytest.param( - # Unknown section ID (after types section) - Container( - raw_bytes=bytes.fromhex( - "ef00010100040400010200010001ff00000000000000fe" - ) - ), - [ - EOFException.MISSING_CODE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - id="EOF1I3540_0041_unknown_section_id_after_types_section_05", - ), - pytest.param( - # Unknown section ID (after types section) - Container( - raw_bytes=bytes.fromhex( - "ef00010100040500010200010001ff00000000000000fe" - ) - ), - [ - EOFException.MISSING_CODE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - id="EOF1I3540_0042_unknown_section_id_after_types_section_06", - ), - pytest.param( - # Unknown section ID (after types section) - Container( - raw_bytes=bytes.fromhex( - "ef0001010004fe00010200010001ff00000000000000fe" - ) - ), - [ - EOFException.MISSING_CODE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - id="EOF1I3540_0043_unknown_section_id_after_types_section_ff", - ), - pytest.param( - # Unknown section ID (after code section) - Container( - raw_bytes=bytes.fromhex( - "ef00010100040200010001050001ff00000000000000fe" - ) - ), - [ - EOFException.MISSING_DATA_SECTION, - EOFException.UNEXPECTED_HEADER_KIND, - ], - id="EOF1I3540_0044_unknown_section_id_after_code_section_05", - ), - pytest.param( - # Unknown section ID (after code section) - Container( - raw_bytes=bytes.fromhex( - "ef00010100040200010001060001ff00000000000000fe" - ) - ), - [ - EOFException.MISSING_DATA_SECTION, - EOFException.UNEXPECTED_HEADER_KIND, - ], - id="EOF1I3540_0045_unknown_section_id_after_code_section_06", - ), - pytest.param( - # Unknown section ID (after code section) - Container( - raw_bytes=bytes.fromhex( - "ef00010100040200010001040001ff00000000000000fe" - ) - ), - [ - EOFException.MISSING_DATA_SECTION, - EOFException.UNEXPECTED_HEADER_KIND, - ], - id="EOF1I3540_0046_unknown_section_id_after_code_section_04", - ), - pytest.param( - # Unknown section ID (after data section) - Container( - raw_bytes=bytes.fromhex( - "ef00010100040200010001ff00000500010000000000fe" - ) - ), - [ - EOFException.MISSING_TERMINATOR, - EOFException.UNEXPECTED_HEADER_KIND, - ], - id="EOF1I3540_0047_unknown_section_id_after_data_section_05", - ), - pytest.param( - # Unknown section ID (after data section) - Container( - raw_bytes=bytes.fromhex( - "ef00010100040200010001ff00000600010000000000fe" - ) - ), - [ - EOFException.MISSING_TERMINATOR, - EOFException.UNEXPECTED_HEADER_KIND, - ], - id="EOF1I3540_0048_unknown_section_id_after_data_section_06", - ), - pytest.param( - # Unknown section ID (after data section) - Container( - raw_bytes=bytes.fromhex( - "ef00010100040200010001ff00000400010000000000fe" - ) - ), - [ - EOFException.MISSING_TERMINATOR, - EOFException.UNEXPECTED_HEADER_KIND, - ], - id="EOF1I3540_0049_unknown_section_id_after_data_section_04", - ), - pytest.param( - Container( - name="EOF1I3540_0002 (Invalid) Invalid magic", - raw_bytes="ef01010100040200010001ff00000000000000fe", - ), - EOFException.INVALID_MAGIC, - id="EOF1I3540_0002_invalid_incorrect_magic_01", - ), - pytest.param( - Container( - name="EOF1I3540_0003", - raw_bytes="ef02010100040200010001ff00000000000000fe", - ), - EOFException.INVALID_MAGIC, - id="EOF1I3540_0003_invalid_incorrect_magic_02", - ), - pytest.param( - Container( - name="EOF1I3540_0004", - raw_bytes="efff010100040200010001ff00000000000000fe", - ), - EOFException.INVALID_MAGIC, - id="EOF1I3540_0004_invalid_incorrect_magic_ff", - ), - pytest.param( - Container( - name="EOF1I3540_0006 (Invalid) Invalid version", - raw_bytes="ef00000100040200010001ff00000000000000fe", - ), - EOFException.INVALID_VERSION, - id="EOF1I3540_0006_invalid_incorrect_version_00", - ), - pytest.param( - Container( - name="EOF1I3540_0007", - raw_bytes="ef00020100040200010001ff00000000000000fe", - ), - EOFException.INVALID_VERSION, - id="EOF1I3540_0007_invalid_incorrect_version_02", - ), - pytest.param( - Container( - name="EOF1I3540_0008", - raw_bytes="ef00ff0100040200010001ff00000000000000fe", - ), - EOFException.INVALID_VERSION, - id="EOF1I3540_0008_invalid_incorrect_version_ff", - ), - ], -) -def test_migrated_valid_invalid( - eof_test: EOFTestFiller, - eof_code: Container, - exception: EOFExceptionInstanceOrList | None, -) -> None: - """Verify EOF container construction and exception.""" - eof_test( - container=eof_code, - expect_exception=exception, - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_opcodes_in_legacy.py b/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_opcodes_in_legacy.py deleted file mode 100644 index 4d90acdbad..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_opcodes_in_legacy.py +++ /dev/null @@ -1,263 +0,0 @@ -"""Tests all EOF-only opcodes in legacy contracts and expects failure.""" - -import pytest -from execution_testing import ( - Account, - Alloc, - Environment, - Initcode, - Op, - Opcodes, - StateTestFiller, - Transaction, -) -from execution_testing.test_types.eof.v1 import Container, Section - -from .. import EOF_FORK_NAME - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-7692.md" -REFERENCE_SPEC_VERSION = "f0e7661ee0d16e612e0931ec88b4c9f208e9d513" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - -slot_code_executed = b"EXEC" -slot_code_worked = b"WORK" -slot_create_address = b"ADDR" - -value_code_executed = b"exec" -value_code_worked = b"work" -value_non_execution_canary = b"brid" -value_create_failed = 0 - -eof_opcode_blocks = [ - pytest.param(Op.PUSH0 + Op.DUPN[0], id="DUPN"), - pytest.param(Op.PUSH0 + Op.PUSH0 + Op.SWAPN[0], id="SWAPN"), - pytest.param( - Op.PUSH0 + Op.PUSH0 + Op.PUSH0 + Op.EXCHANGE[2, 3], id="EXCHANGE" - ), - pytest.param(Op.RJUMP[0], id="RJUMP"), - pytest.param(Op.PUSH0 + Op.RJUMPI[0], id="RJUMPI"), - pytest.param(Op.PUSH0 + Op.RJUMPV[0, 0], id="RJUMPI"), - pytest.param(Op.CALLF[1], id="CALLF"), - pytest.param(Op.RETF, id="RETF"), - pytest.param(Op.JUMPF[0], id="JUMPF"), - pytest.param( - Op.PUSH0 + Op.PUSH0 + Op.PUSH0 + Op.PUSH1(2) + Op.EXTCALL, id="EXTCALL" - ), - pytest.param( - Op.PUSH0 + Op.PUSH0 + Op.PUSH0 + Op.PUSH1(2) + Op.EXTDELEGATECALL, - id="EXTDELEGATECALL", - ), - pytest.param( - Op.PUSH0 + Op.PUSH0 + Op.PUSH0 + Op.PUSH1(2) + Op.EXTSTATICCALL, - id="EXTSTATICCALL", - ), - pytest.param(Op.DATALOAD(0), id="DATALOAD"), - pytest.param(Op.DATALOADN[0], id="DATALOADN"), - pytest.param(Op.DATASIZE, id="DATASIZE"), - pytest.param(Op.DATACOPY(0, 0, 32), id="DATACOPY"), - pytest.param(Op.EOFCREATE[0](0, 0, 0, 0), id="EOFCREATE"), - # pytest.param(Op.TXCREATE(0, 0, 0, 0, 0), id="TXCREATE"), not EOF-only - # anymore - pytest.param(Op.RETURNCODE[0], id="RETURNCODE"), -] - - -@pytest.mark.parametrize( - "code", - eof_opcode_blocks, -) -def test_opcodes_in_legacy( - state_test: StateTestFiller, pre: Alloc, code: Opcodes -) -> None: - """Test all EOF only opcodes in legacy contracts and expects failure.""" - env = Environment() - - address_test_contract = pre.deploy_contract( - code=code + Op.SSTORE(slot_code_executed, value_code_executed), - storage={slot_code_executed: value_non_execution_canary}, - ) - - post = { - # assert the canary is not over-written. If it was written then the EOF - # opcode was valid - address_test_contract: Account( - storage={slot_code_executed: value_non_execution_canary} - ), - } - - sender = pre.fund_eoa() - - tx = Transaction( - sender=sender, - to=address_test_contract, - gas_limit=5_000_000, - gas_price=10, - protected=False, - data="", - ) - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) - - -@pytest.mark.parametrize( - "code", - eof_opcode_blocks, -) -def test_opcodes_in_create_tx( - state_test: StateTestFiller, pre: Alloc, code: Opcodes -) -> None: - """Test all EOF only opcodes in create tx and expects failure.""" - env = Environment() - - sender = pre.fund_eoa() - - tx = Transaction( - sender=sender, - to=None, - gas_limit=5_000_000, - gas_price=10, - protected=False, - data=code, - ) - - post = { - # Should revert in initcode, which results in no contract created - tx.created_contract: Account.NONEXISTENT - } - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) - - -@pytest.mark.parametrize( - "legacy_create_opcode", - [ - pytest.param(Op.CREATE, id="CREATE"), - pytest.param(Op.CREATE2, id="CREATE2"), - ], -) -@pytest.mark.parametrize( - "code", - eof_opcode_blocks, -) -def test_opcodes_in_create_operation( - state_test: StateTestFiller, - pre: Alloc, - code: Opcodes, - legacy_create_opcode: Opcodes, -) -> None: - """Test opcodes in create operation.""" - env = Environment() - - init_code = Initcode(initcode_prefix=code, deploy_code=Op.RETURN(0, 0)) - factory_code = ( - Op.CALLDATACOPY(0, 0, Op.CALLDATASIZE) - + Op.SSTORE( - slot_create_address, legacy_create_opcode(size=Op.CALLDATASIZE) - ) - + Op.SSTORE(slot_code_worked, value_code_worked) - ) - - sender = pre.fund_eoa() - contract_address = pre.deploy_contract(code=factory_code) - - post = { - contract_address: Account( - storage={ - slot_create_address: value_create_failed, - slot_code_worked: value_code_worked, - } - ) - } - tx = Transaction( - to=contract_address, - gas_limit=10_000_000, - gas_price=10, - protected=False, - data=init_code, - sender=sender, - ) - - state_test(env=env, pre=pre, post=post, tx=tx) - - -@pytest.mark.parametrize( - ("ext_call_opcode"), - [ - pytest.param(Op.EXTCALL, id="EXTCALL"), - pytest.param(Op.EXTDELEGATECALL, id="EXTDELEGATECALL"), - pytest.param(Op.EXTSTATICCALL, id="EXTSTATICCALL"), - ], -) -@pytest.mark.parametrize( - "code", - eof_opcode_blocks, -) -def test_opcodes_in_eof_calling_legacy( - state_test: StateTestFiller, - pre: Alloc, - code: Opcodes, - ext_call_opcode: Op, -) -> None: - """Test all opcodes in eof calling legacy and expects failure.""" - env = Environment() - - address_test_contract = pre.deploy_contract( - code=code + Op.SSTORE(slot_code_executed, value_code_executed), - storage={slot_code_executed: value_non_execution_canary}, - ) - - address_entry_contract = pre.deploy_contract( - code=Container( - sections=[ - Section.Code( - ext_call_opcode(address=address_test_contract) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP - ) - ] - ), - storage={slot_code_executed: value_non_execution_canary}, - ) - - post = { - # assert the canary is not over-written. If it was written then the EOF - # opcode was valid - address_test_contract: Account( - storage={slot_code_executed: value_non_execution_canary} - ), - address_entry_contract: Account( - storage={ - slot_code_executed: value_non_execution_canary, - slot_code_worked: value_code_worked, - } - ), - } - - sender = pre.fund_eoa() - - tx = Transaction( - sender=sender, - to=address_entry_contract, - gas_limit=5_000_000, - gas_price=10, - protected=False, - data="", - ) - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_section_header_body_mismatch.py b/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_section_header_body_mismatch.py deleted file mode 100644 index 8578452186..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_section_header_body_mismatch.py +++ /dev/null @@ -1,160 +0,0 @@ -"""EOF Container construction test.""" - -import pytest -from execution_testing import ( - EOFException, - EOFTestFiller, - Op, - extend_with_defaults, -) -from execution_testing.exceptions.exceptions import ( - EOFExceptionInstanceOrList, -) -from execution_testing.test_types.eof.v1 import Container, Section - -from .. import EOF_FORK_NAME - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-3540.md" -REFERENCE_SPEC_VERSION = "8dcb0a8c1c0102c87224308028632cc986a61183" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - - -@pytest.mark.parametrize( - **extend_with_defaults( - defaults={ - # second section is mentioned in code header array - "skip_header_listing": False, - # second section code is in container's body - "skip_body_listing": False, - # code input bytes not listed in container's body - "skip_types_body_listing": False, - # code input bytes size not added to types section size - "skip_types_header_listing": False, - "expected_code": "", - "expected_exception": None, - }, - cases=[ - pytest.param( - { - "skip_header_listing": True, - "skip_body_listing": True, - "expected_code": "ef00010100080200010003ff0004000080000" - "1000000003050000bad60A7", - "expected_exception": [ - EOFException.INVALID_TYPE_SECTION_SIZE, - EOFException.INVALID_SECTION_BODIES_SIZE, - ], - }, - id="drop_code_section_and_header", - ), - pytest.param( - { - "skip_header_listing": True, - "skip_body_listing": False, - "expected_code": "ef00010100080200010003ff0004000080000" - "1000000003050003050000bad60A7", - "expected_exception": [ - EOFException.INVALID_TYPE_SECTION_SIZE, - EOFException.INVALID_SECTION_BODIES_SIZE, - ], - }, - id="drop_code_header", - ), - pytest.param( - { - "skip_header_listing": False, - "skip_body_listing": True, - "expected_code": "ef000101000802000200030003ff000400008" - "00001000000003050000bad60A7", - "expected_exception": [ - EOFException.UNREACHABLE_CODE_SECTIONS, - EOFException.TOPLEVEL_CONTAINER_TRUNCATED, - ], - }, - id="drop_code_section", - ), - pytest.param( - { - "skip_header_listing": False, - "skip_body_listing": False, - "expected_code": "ef000101000802" - "000200030003ff000400008" - "00001000000003050003050000bad60A7", - "expected_exception": ( - EOFException.UNREACHABLE_CODE_SECTIONS - ), - }, - id="layout_ok_code_bad", - ), - pytest.param( - { - "skip_header_listing": True, - "skip_body_listing": True, - "skip_types_body_listing": True, - "expected_code": "ef000101000802" - "00010003ff000400008000013050000bad60a7", - "expected_exception": [ - EOFException.INVALID_TYPE_SECTION_SIZE, - EOFException.INVALID_SECTION_BODIES_SIZE, - ], - }, - id="drop_types_header", - ), - pytest.param( - { - "skip_header_listing": True, - "skip_body_listing": True, - "skip_types_body_listing": True, - "skip_types_header_listing": True, - "expected_code": "ef000101000402" - "00010003ff000400008000013050000bad60a7", - "expected_exception": None, - }, - id="drop_everything", - ), - ], - ) -) -def test_code_section_header_body_mismatch( - eof_test: EOFTestFiller, - skip_header_listing: bool, - skip_body_listing: bool, - skip_types_body_listing: bool, - skip_types_header_listing: bool, - expected_code: str, - expected_exception: EOFExceptionInstanceOrList | None, -) -> None: - """Inconsistent number of code sections (between types and code).""" - eof_code = Container( - name="EOF1I0018", - sections=[ - Section.Code( - code=Op.ADDRESS + Op.POP + Op.STOP, - ), - Section.Code( - code=Op.ADDRESS + Op.POP + Op.STOP, - code_inputs=0, - code_outputs=0, - max_stack_height=0, - # whether to not mention it in code section header list - skip_header_listing=skip_header_listing, - # whether to not print its code in containers body - skip_body_listing=skip_body_listing, - # whether to not print its input bytes in containers body - skip_types_body_listing=skip_types_body_listing, - # whether to not calculate its input bytes size in types - # section's header - skip_types_header_listing=skip_types_header_listing, - ), - Section.Data("0x0bad60A7"), - ], - ) - - # TODO remove this after Container class implementation is reliable - assert bytes(eof_code).hex() == bytes.fromhex(expected_code).hex() - - eof_test( - container=eof_code, - expect_exception=expected_exception, - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_section_order.py b/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_section_order.py deleted file mode 100644 index 243b3145e1..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_section_order.py +++ /dev/null @@ -1,386 +0,0 @@ -"""Different variations of EOF sections displacement.""" - -from enum import Enum -from typing import List - -import pytest -from execution_testing import EOFException, EOFTestFiller, Op -from execution_testing.exceptions.exceptions import ( - EOFExceptionInstanceOrList, -) -from execution_testing.test_types.eof.v1 import ( - AutoSection, - Container, - Section, - SectionKind, -) - -from .. import EOF_FORK_NAME - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-3540.md" -REFERENCE_SPEC_VERSION = "8dcb0a8c1c0102c87224308028632cc986a61183" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - - -class SectionTest(Enum): - """Enum for the test type.""" - - MISSING = 1 - WRONG_ORDER = 2 - - -class CasePosition(Enum): - """Enum for the test position.""" - - BODY = 1 - HEADER = 2 - BODY_AND_HEADER = 3 - - -def get_expected_code_exception( - section_kind: SectionKind, - section_test: SectionTest, - test_position: CasePosition, -) -> tuple[str, EOFExceptionInstanceOrList | None]: - """ - Verification vectors with code and exception based on test combinations. - """ - match (section_kind, section_test, test_position): - case (SectionKind.TYPE, SectionTest.MISSING, CasePosition.HEADER): - return ( - "ef00010200010003ff00010000800001305000ef", - [ - EOFException.MISSING_TYPE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ) - case (SectionKind.TYPE, SectionTest.MISSING, CasePosition.BODY): - return ( - "ef00010100040200010003ff000100305000ef", - [ - EOFException.INVALID_SECTION_BODIES_SIZE, - EOFException.INVALID_FIRST_SECTION_TYPE, - ], - ) - case ( - SectionKind.TYPE, - SectionTest.MISSING, - CasePosition.BODY_AND_HEADER, - ): - return ( - "ef00010200010003ff000100305000ef", - [ - EOFException.MISSING_TYPE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ) - case (SectionKind.TYPE, SectionTest.WRONG_ORDER, CasePosition.HEADER): - return ( - "ef00010200010003010004ff00010000800001305000ef", - [ - EOFException.MISSING_TYPE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ) - case (SectionKind.TYPE, SectionTest.WRONG_ORDER, CasePosition.BODY): - return ( - "ef00010100040200010003ff00010030500000800001ef", - # TODO why invalid first section type? it should say that the - # body incorrect - EOFException.INVALID_FIRST_SECTION_TYPE, - ) - case ( - SectionKind.TYPE, - SectionTest.WRONG_ORDER, - CasePosition.BODY_AND_HEADER, - ): - return ( - "ef00010200010003010004ff00010030500000800001ef", - [ - EOFException.MISSING_TYPE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ) - case (SectionKind.CODE, SectionTest.MISSING, CasePosition.HEADER): - return ( - "ef0001010004ff00010000800001305000ef", - [ - EOFException.MISSING_CODE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ) - case (SectionKind.CODE, SectionTest.MISSING, CasePosition.BODY): - return ( - "ef00010100040200010003ff00010000800001ef", - # TODO should be an exception of empty code bytes, because it - # can understand that last byte is data section byte - [ - EOFException.INVALID_SECTION_BODIES_SIZE, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ) - case ( - SectionKind.CODE, - SectionTest.MISSING, - CasePosition.BODY_AND_HEADER, - ): - return ( - "ef0001010004ff00010000800001ef", - [ - EOFException.MISSING_CODE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ) - case (SectionKind.CODE, SectionTest.WRONG_ORDER, CasePosition.HEADER): - return ( - "ef0001010004ff000102000100030000800001305000ef", - [ - EOFException.MISSING_CODE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ) - case (SectionKind.CODE, SectionTest.WRONG_ORDER, CasePosition.BODY): - return ( - "ef00010100040200010003ff00010000800001ef305000", - EOFException.UNDEFINED_INSTRUCTION, - ) - case ( - SectionKind.CODE, - SectionTest.WRONG_ORDER, - CasePosition.BODY_AND_HEADER, - ): - return ( - "ef0001010004ff000102000100030000800001ef305000", - [ - EOFException.MISSING_CODE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ) - case (SectionKind.DATA, SectionTest.MISSING, CasePosition.HEADER): - return ( - "ef000101000402000100030000800001305000ef", - [ - EOFException.MISSING_DATA_SECTION, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ) - case (SectionKind.DATA, SectionTest.MISSING, CasePosition.BODY): - return ( - "ef00010100040200010003ff00010000800001305000", - EOFException.TOPLEVEL_CONTAINER_TRUNCATED, - ) - case ( - SectionKind.DATA, - SectionTest.MISSING, - CasePosition.BODY_AND_HEADER, - ): - return ( - "ef000101000402000100030000800001305000", - [ - EOFException.MISSING_DATA_SECTION, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ) - case (SectionKind.DATA, SectionTest.WRONG_ORDER, CasePosition.HEADER): - return ( - "ef0001ff000101000402000100030000800001305000ef", - [ - EOFException.MISSING_TYPE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ) - case (SectionKind.DATA, SectionTest.WRONG_ORDER, CasePosition.BODY): - return ( - "ef00010100040200010003ff000100ef00800001305000", - EOFException.INVALID_FIRST_SECTION_TYPE, - ) - case ( - SectionKind.DATA, - SectionTest.WRONG_ORDER, - CasePosition.BODY_AND_HEADER, - ): - return ( - "ef0001ff0001010004020001000300ef00800001305000", - [ - EOFException.MISSING_TYPE_HEADER, - EOFException.UNEXPECTED_HEADER_KIND, - ], - ) - return "", None - - -@pytest.mark.parametrize( - "section_kind", [SectionKind.TYPE, SectionKind.CODE, SectionKind.DATA] -) -@pytest.mark.parametrize( - "section_test", [SectionTest.MISSING, SectionTest.WRONG_ORDER] -) -@pytest.mark.parametrize( - "test_position", - [CasePosition.BODY, CasePosition.HEADER, CasePosition.BODY_AND_HEADER], -) -def test_section_order( - eof_test: EOFTestFiller, - section_kind: SectionKind, - section_test: SectionTest, - test_position: CasePosition, -) -> None: - """Test sections order and it appearance in body and header.""" - - def calculate_skip_flag(kind: SectionKind, position: CasePosition) -> bool: - return ( - False - if (section_kind != kind) - else ( - True - if section_test == SectionTest.MISSING - and ( - test_position == position - or test_position == CasePosition.BODY_AND_HEADER - ) - else False - ) - ) - - def make_section_order(kind: SectionKind) -> List[Section]: - if section_test != SectionTest.WRONG_ORDER: - return [section_type, section_code, section_data] - if kind == SectionKind.TYPE: - return [section_code, section_type, section_data] - if kind == SectionKind.CODE: - return [section_type, section_data, section_code] - if kind == SectionKind.DATA: - return [section_data, section_type, section_code] - return [section_type, section_code, section_data] - - section_code = Section.Code( - code=Op.ADDRESS + Op.POP + Op.STOP, - skip_header_listing=calculate_skip_flag( - SectionKind.CODE, CasePosition.HEADER - ), - skip_body_listing=calculate_skip_flag( - SectionKind.CODE, CasePosition.BODY - ), - ) - section_type = Section( - kind=SectionKind.TYPE, - data=bytes.fromhex("00800001"), - custom_size=4, - skip_header_listing=calculate_skip_flag( - SectionKind.TYPE, CasePosition.HEADER - ), - skip_body_listing=calculate_skip_flag( - SectionKind.TYPE, CasePosition.BODY - ), - ) - section_data = Section.Data( - "ef", - skip_header_listing=calculate_skip_flag( - SectionKind.DATA, CasePosition.HEADER - ), - skip_body_listing=calculate_skip_flag( - SectionKind.DATA, CasePosition.BODY - ), - ) - - expected_code, expected_exception = get_expected_code_exception( - section_kind, section_test, test_position - ) - - eof_code = Container( - sections=make_section_order(section_kind), - auto_type_section=AutoSection.NONE, - auto_sort_sections=( - AutoSection.AUTO - if section_test != SectionTest.WRONG_ORDER - else ( - AutoSection.ONLY_BODY - if test_position == CasePosition.HEADER - else ( - AutoSection.ONLY_HEADER - if test_position == CasePosition.BODY - else AutoSection.NONE - ) - ) - ), - expected_bytecode=expected_code, - ) - - eof_test( - container=eof_code, - expect_exception=expected_exception, - ) - - -@pytest.mark.parametrize("container_position", range(4)) -@pytest.mark.parametrize( - "test_position", - [CasePosition.BODY, CasePosition.HEADER, CasePosition.BODY_AND_HEADER], -) -def test_container_section_order( - eof_test: EOFTestFiller, - container_position: int, - test_position: CasePosition, -) -> None: - """ - Test containers section being out of order in the header and/or body. This - extends and follows the convention of the test_section_order() for the - optional container section. - """ - if container_position == 2: - pytest.skip("Skip valid container section position") - - section_code = Section.Code( - code=Op.EOFCREATE[0](0, 0, 0, 0) - # TODO: Migrated tests had the following infinite loop, so it is kept - # here to equalize code coverage. - + Op.RJUMP[0] - + Op.STOP() - ) - section_type = Section( - kind=SectionKind.TYPE, data=bytes.fromhex("00800004") - ) - section_data = Section.Data("ef") - section_container = Section.Container(Container.Code(Op.INVALID)) - - sections = [section_type, section_code, section_data] - sections.insert(container_position, section_container) - eof_code = Container( - sections=sections, - auto_type_section=AutoSection.NONE, - auto_sort_sections=( - AutoSection.ONLY_BODY - if test_position == CasePosition.HEADER - else ( - AutoSection.ONLY_HEADER - if test_position == CasePosition.BODY - else AutoSection.NONE - ) - ), - ) - - def get_expected_exception() -> EOFExceptionInstanceOrList | None: - match container_position, test_position: - case 2, _: - return None # Valid containers section position - case 0, CasePosition.BODY: # Messes up with the type section - return EOFException.INVALID_FIRST_SECTION_TYPE - case 1, CasePosition.BODY: # Messes up with the code section - return EOFException.UNDEFINED_INSTRUCTION - case 3, CasePosition.BODY: # Data section messes up with the - # container section - return EOFException.INVALID_MAGIC - case 0, CasePosition.HEADER | CasePosition.BODY_AND_HEADER: - return EOFException.MISSING_TYPE_HEADER - case 1, CasePosition.HEADER | CasePosition.BODY_AND_HEADER: - return EOFException.MISSING_CODE_HEADER - case 3, CasePosition.HEADER | CasePosition.BODY_AND_HEADER: - return EOFException.MISSING_TERMINATOR - case _: - return None - - eof_test( - container=eof_code, - expect_exception=get_expected_exception(), - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_section_size.py b/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_section_size.py deleted file mode 100644 index 886bf29fef..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_section_size.py +++ /dev/null @@ -1,348 +0,0 @@ -"""EOF Container, test custom_size field for sections.""" - -from enum import IntEnum - -import pytest -from execution_testing import EOFException, EOFTestFiller, Op -from execution_testing.exceptions.exceptions import ( - EOFExceptionInstanceOrList, -) -from execution_testing.test_types.eof.v1 import ( - Container, - Section, - SectionKind, -) - -from .. import EOF_FORK_NAME - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-3540.md" -REFERENCE_SPEC_VERSION = "8dcb0a8c1c0102c87224308028632cc986a61183" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - - -class SectionSize(IntEnum): - """Enum for the section size.""" - - NORMAL = -1 - ZERO = 0 - UNDERSIZE = 2 - OVERSIZE = 100 - HUGE = 0x8000 - MAX = 0xFFFF - CONTAINER_BIG = 0x00010000 - CONTAINER_MAX = 0xFFFFFFFF - - def __str__(self) -> str: - """Return string representation of the section kind.""" - return self.name - - -@pytest.mark.parametrize( - "section_kind, section_size, exception", - [ - pytest.param(SectionKind.DATA, SectionSize.NORMAL, None), - pytest.param( - SectionKind.DATA, - SectionSize.ZERO, - EOFException.INVALID_SECTION_BODIES_SIZE, - ), - pytest.param( - SectionKind.DATA, - SectionSize.UNDERSIZE, - EOFException.INVALID_SECTION_BODIES_SIZE, - ), - pytest.param( - SectionKind.DATA, - SectionSize.OVERSIZE, - EOFException.TOPLEVEL_CONTAINER_TRUNCATED, - ), - pytest.param( - SectionKind.DATA, - SectionSize.HUGE, - EOFException.TOPLEVEL_CONTAINER_TRUNCATED, - ), - pytest.param( - SectionKind.DATA, - SectionSize.MAX, - EOFException.TOPLEVEL_CONTAINER_TRUNCATED, - ), - pytest.param( - SectionKind.CODE, - SectionSize.NORMAL, - None, - marks=pytest.mark.skip(reason="duplicate"), - ), - pytest.param( - SectionKind.CODE, SectionSize.ZERO, EOFException.ZERO_SECTION_SIZE - ), - pytest.param( - SectionKind.CODE, - SectionSize.UNDERSIZE, - EOFException.INVALID_SECTION_BODIES_SIZE, - ), - pytest.param( - SectionKind.CODE, - SectionSize.OVERSIZE, - EOFException.INVALID_SECTION_BODIES_SIZE, - ), - pytest.param( - SectionKind.CODE, - SectionSize.HUGE, - EOFException.INVALID_SECTION_BODIES_SIZE, - ), - pytest.param( - SectionKind.CODE, - SectionSize.MAX, - EOFException.INVALID_SECTION_BODIES_SIZE, - ), - pytest.param( - SectionKind.TYPE, - SectionSize.NORMAL, - None, - marks=pytest.mark.skip(reason="duplicate"), - ), - pytest.param( - SectionKind.TYPE, - SectionSize.ZERO, - [ - EOFException.ZERO_SECTION_SIZE, - EOFException.INVALID_SECTION_BODIES_SIZE, - ], - id="type_size_zero", - ), - pytest.param( - SectionKind.TYPE, - SectionSize.UNDERSIZE, - [ - EOFException.INVALID_SECTION_BODIES_SIZE, - EOFException.INVALID_TYPE_SECTION_SIZE, - ], - id="type_size_undersize", - ), - pytest.param( - SectionKind.TYPE, - SectionSize.OVERSIZE, - EOFException.INVALID_SECTION_BODIES_SIZE, - ), - pytest.param( - SectionKind.TYPE, - SectionSize.HUGE, - EOFException.INVALID_SECTION_BODIES_SIZE, - ), - pytest.param( - SectionKind.TYPE, - SectionSize.MAX, - [ - EOFException.INVALID_SECTION_BODIES_SIZE, - EOFException.INVALID_TYPE_SECTION_SIZE, - ], - id="type_size_max", - ), - pytest.param( - SectionKind.CONTAINER, - SectionSize.NORMAL, - None, - marks=pytest.mark.skip(reason="duplicate"), - ), - pytest.param( - SectionKind.CONTAINER, - SectionSize.ZERO, - EOFException.ZERO_SECTION_SIZE, - ), - pytest.param( - SectionKind.CONTAINER, - SectionSize.UNDERSIZE, - EOFException.INVALID_SECTION_BODIES_SIZE, - ), - pytest.param( - SectionKind.CONTAINER, - SectionSize.OVERSIZE, - EOFException.INVALID_SECTION_BODIES_SIZE, - ), - pytest.param( - SectionKind.CONTAINER, - SectionSize.HUGE, - EOFException.INVALID_SECTION_BODIES_SIZE, - ), - pytest.param( - SectionKind.CONTAINER, - SectionSize.CONTAINER_BIG, - EOFException.INVALID_SECTION_BODIES_SIZE, - ), - pytest.param( - SectionKind.CONTAINER, - SectionSize.CONTAINER_MAX, - EOFException.INVALID_SECTION_BODIES_SIZE, - ), - ], -) -def test_section_size( - eof_test: EOFTestFiller, - section_size: SectionSize, - section_kind: SectionKind, - exception: EOFExceptionInstanceOrList, -) -> None: - """ - Test custom_size is auto, more or less than the actual size of the section. - """ - eof_code = Container() - - if section_size != SectionSize.NORMAL and section_kind == SectionKind.TYPE: - eof_code.sections.append( - Section( - kind=SectionKind.TYPE, - data="0x00800001", - custom_size=section_size, - ), - ) - - if section_size != SectionSize.NORMAL and section_kind == SectionKind.CODE: - eof_code.sections.append( - Section.Code( - code=Op.ADDRESS - + Op.POP - + Op.EOFCREATE[0](0, 0, 0, 0) - + Op.STOP, - custom_size=section_size, - ) - ) - else: - eof_code.sections.append( - Section.Code( - code=Op.ADDRESS - + Op.POP - + Op.EOFCREATE[0](0, 0, 0, 0) - + Op.STOP, - ) - ) - - if ( - section_size != SectionSize.NORMAL - and section_kind == SectionKind.CONTAINER - ): - eof_code.sections.append( - Section.Container( - container=Container( - sections=[ - Section.Code(Op.RETURNCODE[0](0, 0)), - Section.Container( - container=Container( - sections=[Section.Code(Op.STOP)] - ) - ), - ] - ), - custom_size=section_size, - ) - ) - else: - eof_code.sections.append( - Section.Container( - container=Container( - sections=[ - Section.Code(Op.RETURNCODE[0](0, 0)), - Section.Container( - container=Container( - sections=[Section.Code(Op.STOP)] - ) - ), - ] - ), - ) - ) - - if section_size != SectionSize.NORMAL and section_kind == SectionKind.DATA: - eof_code.sections.append( - Section.Data("0x00daaa", custom_size=section_size) - ) - else: - eof_code.sections.append(Section.Data("0x00aaaa")) - eof_test( - container=eof_code, - expect_exception=exception, - ) - - -@pytest.mark.parametrize( - "truncation_len, exception", - [ - # The original container is not valid by itself because its 2-byte code - # section starts with the terminating instruction: INVALID. - pytest.param(0, EOFException.UNREACHABLE_INSTRUCTIONS), - pytest.param( - 1, - EOFException.INVALID_SECTION_BODIES_SIZE, - id="EOF1_truncated_section_2", - ), - pytest.param( - 3, - EOFException.INVALID_SECTION_BODIES_SIZE, - id="EOF1_truncated_section_1", - ), - pytest.param( - 6, - EOFException.INVALID_SECTION_BODIES_SIZE, - id="EOF1_truncated_section_0", - ), - ], -) -def test_truncated_container_without_data( - eof_test: EOFTestFiller, - truncation_len: int, - exception: EOFException, -) -> None: - """ - Test takes a semi-valid container and removes some bytes from its tail. - Migrated from EOFTests/efValidation/EOF1_truncated_section_.json (cases - without data section). - """ - container = Container(sections=[Section.Code(Op.INVALID + Op.INVALID)]) - bytecode = bytes(container) - eof_test( - container=Container( - raw_bytes=bytecode[: len(bytecode) - truncation_len] - ), - expect_exception=exception, - ) - - -@pytest.mark.parametrize( - "truncation_len, exception", - [ - pytest.param(0, None), - pytest.param( - 1, - EOFException.TOPLEVEL_CONTAINER_TRUNCATED, - id="EOF1_truncated_section_4", - ), - pytest.param( - 2, - EOFException.TOPLEVEL_CONTAINER_TRUNCATED, - id="EOF1_truncated_section_3", - ), - ], -) -def test_truncated_container_with_data( - eof_test: EOFTestFiller, - truncation_len: int, - exception: EOFException, -) -> None: - """ - Test takes a valid container with data and removes some bytes from its - tail. Migrated from EOFTests/efValidation/EOF1_truncated_section_.json - (cases with data section). - """ - data = b"\xaa\xbb" - container = Container( - sections=[ - Section.Code(Op.INVALID), - Section.Data( - data[0 : (len(data) - truncation_len)], custom_size=2 - ), - ] - ) - eof_test( - container=container, - expect_exception=exception, - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/__init__.py b/tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/__init__.py deleted file mode 100644 index f08cf8e60e..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -""" -Test cases for [EIP-4200: EOF - Static relative jumps](https://eips.ethereum.org/EIPS/eip-4200). - -EIP-4200 replaces dynamic jump instructions with relative jump offsets for -improved control flow predictability. Opcodes introduced: `RJUMP` (`0xE0`), -`RJUMPI` (`0xE1`), `RJUMPV` (`0xE2`). -""" diff --git a/tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/helpers.py b/tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/helpers.py deleted file mode 100644 index 8e81ab78b1..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/helpers.py +++ /dev/null @@ -1,23 +0,0 @@ -"""EOF RJump tests helpers.""" - -import itertools -from enum import Enum - -"""Storage addresses for common testing fields""" -_slot = itertools.count() -next(_slot) # don't use slot 0 -slot_code_worked = next(_slot) -slot_conditional_result = next(_slot) -slot_last_slot = next(_slot) - -"""Storage values for common testing fields""" -value_code_worked = 0x2015 -value_calldata_true = 10 -value_calldata_false = 11 - - -class JumpDirection(Enum): - """Enum for the direction of the jump.""" - - FORWARD = 1 - BACKWARD = -1 diff --git a/tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjump.py b/tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjump.py deleted file mode 100644 index c1823194a4..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjump.py +++ /dev/null @@ -1,1212 +0,0 @@ -"""EOF JUMPF tests covering stack and code validation rules.""" - -import pytest -from execution_testing import ( - Account, - Bytecode, - EOFException, - EOFStateTestFiller, - EOFTestFiller, - Op, -) -from execution_testing.test_types.eof.v1 import Container, Section -from execution_testing.test_types.eof.v1.constants import ( - MAX_BYTECODE_SIZE, -) - -from .. import EOF_FORK_NAME -from .helpers import JumpDirection, slot_code_worked, value_code_worked - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-4200.md" -REFERENCE_SPEC_VERSION = "17d4a8d12d2b5e0f2985c866376c16c8c6df7cba" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - -RJUMP_LEN = len(Op.RJUMP[0]) - - -def test_rjump_negative( - eof_state_test: EOFStateTestFiller, -) -> None: - """Test for a forward RJUMPI and backward RJUMP.""" - eof_state_test( - container=Container.Code( - Op.PUSH1[1] - + Op.RJUMPI[ - 7 - ] # RJUMP cannot be used because of the backward jump restriction - + Op.SSTORE(slot_code_worked, Op.MLOAD(0)) - + Op.STOP - + Op.MSTORE(0, value_code_worked) - + Op.RJUMP[-16] - ), - container_post=Account(storage={slot_code_worked: value_code_worked}), - ) - - -def test_rjump_positive_negative( - eof_state_test: EOFStateTestFiller, -) -> None: - """ - EOF1V4200_0001 (Valid) EOF code containing RJUMP (Positive, Negative). - """ - eof_state_test( - container=Container.Code( - Op.PUSH0 - + Op.RJUMPI[3] - + Op.RJUMP[7] - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP - + Op.RJUMP[-10], - ), - container_post=Account(storage={slot_code_worked: value_code_worked}), - ) - - -def test_rjump_zero( - eof_state_test: EOFStateTestFiller, -) -> None: - """EOF1V4200_0002 (Valid) EOF code containing RJUMP (Zero).""" - eof_state_test( - container=Container.Code( - Op.RJUMP[0] - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ), - container_post=Account(storage={slot_code_worked: value_code_worked}), - ) - - -def test_rjump_maxes( - eof_state_test: EOFStateTestFiller, -) -> None: - """ - EOF1V4200_0003 EOF with RJUMP containing the max positive and negative - offset (32767). - """ - eof_state_test( - container=Container.Code( - Op.PUSH0 - + Op.RJUMPI[ - RJUMP_LEN - ] # The push/jumpi is to allow the NOOPs to be forward referenced - + Op.RJUMP[0x7FFF] - + Op.NOOP * (0x7FFF - 7) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP - + Op.RJUMP[0x8000], - ), - container_post=Account(storage={slot_code_worked: value_code_worked}), - ) - - -def test_rjump_max_bytecode_size( - eof_test: EOFTestFiller, -) -> None: - """ - EOF1V4200_0003 EOF with RJUMP containing the maximum offset that does not - exceed the maximum bytecode size. - """ - noop_count = MAX_BYTECODE_SIZE - 27 - code = ( - Op.RJUMPI[RJUMP_LEN]( - Op.ORIGIN - ) # The jumpi is to allow the NOOPs to be forward referenced - + Op.RJUMP[len(Op.NOOP) * noop_count] - + (Op.NOOP * noop_count) - + Op.STOP - ) - container = Container.Code(code) - assert len(container) == MAX_BYTECODE_SIZE - eof_test(container=container) - - -def test_rjump_truncated_rjump( - eof_test: EOFTestFiller, -) -> None: - """EOF1I4200_0001 (Invalid) EOF code containing truncated RJUMP.""" - eof_test( - container=Container.Code(Op.RJUMP), - expect_exception=EOFException.TRUNCATED_INSTRUCTION, - ) - - -def test_rjump_truncated_rjump_2( - eof_test: EOFTestFiller, -) -> None: - """EOF1I4200_0002 (Invalid) EOF code containing truncated RJUMP.""" - eof_test( - container=Container.Code(Op.RJUMP + b"\x00"), - expect_exception=EOFException.TRUNCATED_INSTRUCTION, - ) - - -@pytest.mark.parametrize("offset", [-5, -13]) -def test_rjump_into_header( - eof_test: EOFTestFiller, - offset: int, -) -> None: - """ - EOF1I4200_0003 (Invalid) EOF code containing RJUMP with target outside code - bounds (Jumping into header). - """ - eof_test( - container=Container.Code(Op.RJUMP[offset]), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -def test_rjump_before_header( - eof_test: EOFTestFiller, -) -> None: - """ - EOF1I4200_0004 (Invalid) EOF code containing RJUMP with target outside code - bounds (Jumping before code begin). - """ - eof_test( - container=Container.Code(Op.RJUMP[-23]), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -def test_rjump_into_data( - eof_test: EOFTestFiller, -) -> None: - """ - EOF1I4200_0005 (Invalid) EOF code containing RJUMP with target outside code - bounds (Jumping into data section). - """ - eof_test( - container=Container( - sections=[ - Section.Code(Op.RJUMP[2]), - Section.Data(data=b"\xaa\xbb\xcc"), - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -def test_rjump_outside_other_section_before( - eof_test: EOFTestFiller, -) -> None: - """ - EOF code containing RJUMP with target outside code bounds (prior code - section). - """ - eof_test( - container=Container( - sections=[ - Section.Code(code=Op.JUMPF[1]), - Section.Code(code=Op.RJUMP[-6]), - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -def test_rjump_outside_other_section_after( - eof_test: EOFTestFiller, -) -> None: - """ - EOF code containing RJUMP with target outside code bounds (Subsequent code - section). - """ - eof_test( - container=Container( - sections=[ - Section.Code(code=Op.JUMPF[1]), - Section.Code(code=Op.RJUMP[3] + Op.JUMPF[2]), - Section.Code(code=Op.STOP), - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -def test_rjump_after_container( - eof_test: EOFTestFiller, -) -> None: - """ - EOF1I4200_0006 (Invalid) EOF code containing RJUMP with target outside code - bounds (Jumping after code end). - """ - eof_test( - container=Container.Code(Op.RJUMP[2]), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -def test_rjump_to_code_end( - eof_test: EOFTestFiller, -) -> None: - """ - EOF1I4200_0007 (Invalid) EOF code containing RJUMP with target outside code - bounds (Jumping to code end). - """ - eof_test( - container=Container.Code(Op.RJUMP[1] + Op.STOP), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -@pytest.mark.parametrize("offset", range(1, Op.RJUMP.data_portion_length + 1)) -def test_rjump_into_self_data_portion( - eof_test: EOFTestFiller, - offset: int, -) -> None: - """ - EOF1I4200_0008 (Invalid) EOF code containing RJUMP with target self RJUMP - immediate. - """ - eof_test( - container=Container.Code(Op.RJUMP[-offset] + Op.STOP), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -def test_rjump_into_self_remaining_code( - eof_test: EOFTestFiller, -) -> None: - """ - EOF1I4200_0008 (Invalid) EOF code containing RJUMP with target self RJUMP - but remaining unreachable code. - """ - eof_test( - container=Container.Code(Op.RJUMP[-len(Op.RJUMP[0])] + Op.STOP), - expect_exception=EOFException.UNREACHABLE_INSTRUCTIONS, - ) - - -@pytest.mark.parametrize("stack_height_spread", [-1, 0, 1, 2]) -def test_rjump_into_self( - eof_test: EOFTestFiller, - stack_height_spread: int, -) -> None: - """EOF code containing RJUMP with target self RJUMP.""" - # Create variadic stack height by the parametrized spread. - stack_spread_code = Bytecode() - if stack_height_spread >= 0: - stack_spread_code = ( - Op.RJUMPI[stack_height_spread](0) + Op.PUSH0 * stack_height_spread - ) - - eof_test( - container=Container.Code( - stack_spread_code + Op.RJUMP[-len(Op.RJUMP[0])] - ), - ) - - -def test_rjump_into_self_pre_code( - eof_test: EOFTestFiller, -) -> None: - """ - EOF code containing RJUMP with target self RJUMP with non-zero stack before - RJUMP. - """ - eof_test( - container=Container.Code(Op.PUSH1[0] + Op.RJUMP[-len(Op.RJUMP[0])]), - ) - - -@pytest.mark.parametrize( - "container", - [ - Container( - name="forwards_rjump_0", - sections=[ - Section.Code( - code=Op.RJUMP[0] + Op.STOP, - max_stack_increase=0, - ), - ], - expected_bytecode="ef00010100040200010004ff00000000800000e0000000", - ), - Container( - name="forwards_rjump_1", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[3] - + Op.RJUMP[1] - + Op.NOT - + Op.STOP, - max_stack_increase=2, - ), - ], - expected_bytecode="ef0001010004020001000bff000000008000025f6000e10003e000011900", - ), - Container( - name="forwards_rjump_2", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[8] - + Op.PUSH1[0] - + Op.RJUMPI[6] - + Op.RJUMP[4] - + Op.RJUMP[1] - + Op.NOT - + Op.STOP, - max_stack_increase=2, - ), - ], - expected_bytecode="ef00010100040200010013ff000000008000025f6000e100086000e10006e00004e000011900", - ), - Container( - name="forwards_rjump_3", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[3] - + Op.RJUMP[1] - + Op.PUSH0 - + Op.STOP, - max_stack_increase=2, - ), - ], - expected_bytecode="ef0001010004020001000bff000000008000025f6000e10003e000015f00", - ), - Container( - name="forwards_rjump_4", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[8] - + Op.PUSH1[0] - + Op.RJUMPI[7] - + Op.RJUMP[5] - + Op.PUSH0 - + Op.RJUMP[1] - + Op.NOT - + Op.STOP, - max_stack_increase=2, - ), - ], - expected_bytecode="ef00010100040200010014ff000000008000025f6000e100086000e10007e000055fe000011900", - ), - Container( - name="forwards_rjump_variable_stack_0", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.RJUMP[0] - + Op.STOP, - max_stack_increase=3, - ), - ], - expected_bytecode="ef0001010004020001000cff000000008000035f6000e100025f5fe0000000", - ), - Container( - name="forwards_rjump_variable_stack_1", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[3] - + Op.RJUMP[1] - + Op.NOT - + Op.STOP, - max_stack_increase=5, - ), - ], - expected_bytecode="ef00010100040200010013ff000000008000055f6000e100025f5f5f6000e10003e000011900", - ), - Container( - name="forwards_rjump_variable_stack_2", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[8] - + Op.PUSH1[0] - + Op.RJUMPI[6] - + Op.RJUMP[4] - + Op.RJUMP[1] - + Op.NOT - + Op.STOP, - max_stack_increase=5, - ), - ], - expected_bytecode="ef0001010004020001001bff000000008000055f6000e100025f5f5f6000e100086000e10006e00004e000011900", - ), - Container( - name="forwards_rjump_variable_stack_3", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[3] - + Op.RJUMP[1] - + Op.PUSH0 - + Op.STOP, - max_stack_increase=5, - ), - ], - expected_bytecode="ef00010100040200010013ff000000008000055f6000e100025f5f5f6000e10003e000015f00", - ), - Container( - name="forwards_rjump_variable_stack_4", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[8] - + Op.PUSH1[0] - + Op.RJUMPI[7] - + Op.RJUMP[5] - + Op.PUSH0 - + Op.RJUMP[1] - + Op.NOT - + Op.STOP, - max_stack_increase=4, - ), - ], - expected_bytecode="ef0001010004020001001bff000000008000045f6000e100025f5f6000e100086000e10007e000055fe000011900", - ), - ], - ids=lambda x: x.name, -) -def test_rjump_valid_forward( - eof_test: EOFTestFiller, - container: Container, -) -> None: - """ - Validate a valid code section containing at least one forward RJUMP. These - tests exercise the stack height validation. - """ - eof_test(container=container) - - -@pytest.mark.parametrize( - "container", - [ - Container( - name="backwards_rjump_1", - sections=[ - Section.Code( - code=Op.PUSH0 + Op.POP + Op.RJUMP[-5], - max_stack_increase=1, - ), - ], - expected_bytecode="ef00010100040200010005ff000000008000015f50e0fffb", - ), - Container( - name="backwards_rjump_2", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.POP - + Op.PUSH1[1] - + Op.RJUMPI[3] - + Op.RJUMP[-8] - + Op.RJUMP[-11], - max_stack_increase=1, - ), - ], - expected_bytecode="ef0001010004020001000dff000000008000015f506001e10003e0fff8e0fff5", - ), - Container( - name="backwards_rjump_variable_stack_0", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.RJUMP[-3], - max_stack_increase=3, - ), - ], - expected_bytecode="ef0001010004020001000bff000000008000035f6000e100025f5fe0fffd", - ), - Container( - name="backwards_rjump_variable_stack_1", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.POP - + Op.RJUMP[-5], - max_stack_increase=4, - ), - ], - expected_bytecode="ef0001010004020001000dff000000008000045f6000e100025f5f5f50e0fffb", - ), - Container( - name="backwards_rjump_variable_stack_2", - sections=[ - Section.Code( - code=( - Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.POP - + Op.PUSH1[1] - + Op.RJUMPI[3] - + Op.RJUMP[-8] - + Op.RJUMP[-11] - ), - max_stack_increase=4, - ), - ], - expected_bytecode="ef00010100040200010015ff000000008000045f6000e100025f5f5f506001e10003e0fff8e0fff5", - ), - ], - ids=lambda x: x.name, -) -def test_rjump_valid_backward( - eof_test: EOFTestFiller, - container: Container, -) -> None: - """ - Validate a valid code section containing at least one backward RJUMP. These - tests exercise the stack height validation. - """ - eof_test(container=container) - - -def test_rjump_into_stack_height_diff( - eof_test: EOFTestFiller, -) -> None: - """ - EOF code containing RJUMP with target instruction that causes stack height - difference. - """ - eof_test( - container=Container.Code( - Op.PUSH1[0] + Op.RJUMP[-(len(Op.RJUMP[0]) + len(Op.PUSH1[0]))] - ), - expect_exception=EOFException.STACK_HEIGHT_MISMATCH, - ) - - -def test_rjump_into_stack_height_diff_2( - eof_test: EOFTestFiller, -) -> None: - """ - EOF code containing RJUMP with target instruction that cause stack height - difference. - """ - eof_test( - container=Container.Code( - Op.PUSH1[0] + Op.POP + Op.RJUMP[-(len(Op.RJUMP[0]) + len(Op.POP))] - ), - expect_exception=EOFException.STACK_HEIGHT_MISMATCH, - ) - - -@pytest.mark.parametrize( - "container", - [ - Container( - name="backwards_rjump_3", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.POP - + Op.PUSH1[1] - + Op.RJUMPI[3] - + Op.RJUMP[-8] - + Op.PUSH0 - + Op.RJUMP[-12], - max_stack_increase=1, - ), - ], - expected_bytecode="ef0001010004020001000eff000000008000015f506001e10003e0fff85fe0fff4", - ), - Container( - name="backwards_rjump_4", - sections=[ - Section.Code( - code=Op.PUSH0 + Op.RJUMP[-4], - max_stack_increase=1, - ), - ], - expected_bytecode="ef00010100040200010004ff000000008000015fe0fffc", - ), - Container( - name="backwards_rjump_5", - sections=[ - Section.Code( - code=Op.PUSH0 + Op.POP + Op.RJUMP[-4], - max_stack_increase=1, - ), - ], - expected_bytecode="ef00010100040200010005ff000000008000015f50e0fffc", - ), - Container( - name="backwards_rjump_8", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.POP - + Op.PUSH1[0] - + Op.RJUMPI[-7] - + Op.PUSH0 - + Op.RJUMP[-11], - max_stack_increase=1, - ), - ], - expected_bytecode="ef0001010004020001000bff000000008000015f506000e1fff95fe0fff5", - ), - Container( - name="backwards_rjump_variable_stack_3", - sections=[ - Section.Code( - code=( - Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.POP - + Op.PUSH1[1] - + Op.RJUMPI[3] - + Op.RJUMP[-8] - + Op.PUSH0 - + Op.RJUMP[-12] - ), - max_stack_increase=4, - ), - ], - expected_bytecode="ef00010100040200010016ff000000008000045f6000e100025f5f5f506001e10003e0fff85fe0fff4", - ), - Container( - name="backwards_rjump_variable_stack_4", - sections=[ - Section.Code( - code=( - Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[1] - + Op.PUSH0 - + Op.RJUMP[-7] - ), - max_stack_increase=4, - ), - ], - expected_bytecode="ef00010100040200010011ff000000008000045f6000e100025f5f6000e100015fe0fff9", - ), - Container( - name="backwards_rjump_variable_stack_5", - sections=[ - Section.Code( - code=( - Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[1] - + Op.POP - + Op.RJUMP[-7] - ), - max_stack_increase=4, - ), - ], - expected_bytecode="ef00010100040200010011ff000000008000045f6000e100025f5f6000e1000150e0fff9", - ), - Container( - name="backwards_rjump_variable_stack_6", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.RJUMP[-4], - max_stack_increase=4, - ), - ], - expected_bytecode="ef0001010004020001000cff000000008000045f6000e100025f5f5fe0fffc", - ), - Container( - name="backwards_rjump_variable_stack_7", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.POP - + Op.RJUMP[-4], - max_stack_increase=3, - ), - ], - expected_bytecode="ef0001010004020001000dff000000008000035f6000e100025f5f5f50e0fffc", - ), - Container( - name="backwards_rjump_variable_stack_8", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.POP - + Op.PUSH1[0] - + Op.RJUMPI[-7] - + Op.PUSH0 - + Op.RJUMP[-11], - max_stack_increase=4, - ), - ], - expected_bytecode="ef00010100040200010013ff000000008000045f6000e100025f5f5f506000e1fff95fe0fff5", - ), - ], - ids=lambda x: x.name, -) -def test_rjump_backward_invalid_max_stack_height( - eof_test: EOFTestFiller, - container: Container, -) -> None: - """ - Validate a code section containing at least one backward RJUMP invalid - because of the incorrect max stack height. - """ - eof_test( - container=container, - expect_exception=EOFException.STACK_HEIGHT_MISMATCH, - ) - - -def test_rjump_into_stack_underflow( - eof_test: EOFTestFiller, -) -> None: - """ - EOF code containing RJUMP with target instruction that cause stack - underflow. - """ - eof_test( - container=Container.Code( - Op.ORIGIN - + Op.RJUMPI[len(Op.RJUMP[0])] - + Op.RJUMP[len(Op.STOP)] - + Op.STOP - + Op.POP - + Op.STOP - ), - expect_exception=EOFException.STACK_UNDERFLOW, - ) - - -def test_rjump_into_rjump( - eof_test: EOFTestFiller, -) -> None: - """ - EOF1I4200_0009 (Invalid) EOF code containing RJUMP with target other RJUMP - immediate. - """ - eof_test( - container=Container.Code(Op.RJUMP[1] + Op.RJUMP[0]), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -def test_rjump_into_rjumpi( - eof_test: EOFTestFiller, -) -> None: - """ - EOF1I4200_0010 (Invalid) EOF code containing RJUMP with target RJUMPI - immediate. - """ - eof_test( - container=Container.Code( - Op.RJUMP[5] + Op.STOP + Op.PUSH1[1] + Op.RJUMPI[-6] + Op.STOP - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -@pytest.mark.parametrize( - "jump", [JumpDirection.FORWARD, JumpDirection.BACKWARD] -) -def test_rjump_into_push_1( - eof_test: EOFTestFiller, jump: JumpDirection -) -> None: - """ - EOF1I4200_0011 (Invalid) EOF code containing RJUMP with target PUSH1 - immediate. - """ - code = ( - Op.PUSH1[1] + Op.RJUMP[-4] - if jump == JumpDirection.BACKWARD - else Op.RJUMP[1] + Op.PUSH1[1] - ) + Op.STOP - eof_test( - container=Container.Code(code), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -@pytest.mark.parametrize( - "opcode", - [ - Op.PUSH2, - Op.PUSH3, - Op.PUSH4, - Op.PUSH5, - Op.PUSH6, - Op.PUSH7, - Op.PUSH8, - Op.PUSH9, - Op.PUSH10, - Op.PUSH11, - Op.PUSH12, - Op.PUSH13, - Op.PUSH14, - Op.PUSH15, - Op.PUSH16, - Op.PUSH17, - Op.PUSH18, - Op.PUSH19, - Op.PUSH20, - Op.PUSH21, - Op.PUSH22, - Op.PUSH23, - Op.PUSH24, - Op.PUSH25, - Op.PUSH26, - Op.PUSH27, - Op.PUSH28, - Op.PUSH29, - Op.PUSH30, - Op.PUSH31, - Op.PUSH32, - ], -) -@pytest.mark.parametrize( - "jump", [JumpDirection.FORWARD, JumpDirection.BACKWARD] -) -@pytest.mark.parametrize( - "data_portion_end", - [True, False], - ids=["data_portion_end", "data_portion_start"], -) -def test_rjump_into_push_n( - eof_test: EOFTestFiller, - opcode: Op, - jump: JumpDirection, - data_portion_end: bool, -) -> None: - """ - EOF1I4200_0011 (Invalid) EOF code containing RJUMP with target PUSH2+ - immediate. - """ - data_portion_length = int.from_bytes(opcode, byteorder="big") - 0x5F - if jump == JumpDirection.FORWARD: - offset = data_portion_length if data_portion_end else 1 - code = Op.RJUMP[offset] + opcode[0] + Op.STOP - else: - offset = -4 if data_portion_end else -4 - data_portion_length + 1 - code = opcode[0] + Op.RJUMP[offset] - eof_test( - container=Container.Code(code), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -@pytest.mark.parametrize("target_rjumpv_table_size", [1, 256]) -@pytest.mark.parametrize( - "data_portion_end", - [True, False], - ids=["data_portion_end", "data_portion_start"], -) -def test_rjump_into_rjumpv( - eof_test: EOFTestFiller, - target_rjumpv_table_size: int, - data_portion_end: bool, -) -> None: - """ - EOF1I4200_0012 (Invalid) EOF code containing RJUMP with target RJUMPV - immediate. - """ - invalid_destination = ( - 4 + (2 * target_rjumpv_table_size) if data_portion_end else 4 - ) - target_jump_table = [0 for _ in range(target_rjumpv_table_size)] - eof_test( - container=Container.Code( - Op.RJUMP[invalid_destination] - + Op.STOP - + Op.PUSH1[1] - + Op.RJUMPV[target_jump_table] - + Op.STOP, - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -@pytest.mark.parametrize( - "data_portion_end", - [True, False], - ids=["data_portion_end", "data_portion_start"], -) -def test_rjump_into_callf( - eof_test: EOFTestFiller, - data_portion_end: bool, -) -> None: - """ - EOF1I4200_0013 (Invalid) EOF code containing RJUMP with target CALLF - immediate. - """ - invalid_destination = 2 if data_portion_end else 1 - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.RJUMP[invalid_destination] + Op.CALLF[1] + Op.STOP, - ), - Section.Code( - code=Op.SSTORE(1, 1) + Op.RETF, - code_outputs=0, - ), - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -def test_rjump_into_dupn( - eof_test: EOFTestFiller, -) -> None: - """EOF code containing RJUMP with target DUPN immediate.""" - eof_test( - container=Container.Code( - Op.PUSH1[1] - + Op.PUSH1[1] - + Op.RJUMP[1] - + Op.DUPN[1] - + Op.SSTORE - + Op.STOP, - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -def test_rjump_into_swapn( - eof_test: EOFTestFiller, -) -> None: - """EOF code containing RJUMP with target SWAPN immediate.""" - eof_test( - container=Container.Code( - Op.PUSH1[1] - + Op.PUSH1[1] - + Op.RJUMP[1] - + Op.SWAPN[1] - + Op.SSTORE - + Op.STOP, - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -def test_rjump_into_exchange( - eof_test: EOFTestFiller, -) -> None: - """EOF code containing RJUMP with target EXCHANGE immediate.""" - eof_test( - container=Container.Code( - Op.PUSH1[1] - + Op.PUSH1[2] - + Op.PUSH1[3] - + Op.RJUMP[1] - + Op.EXCHANGE[0x00] - + Op.SSTORE - + Op.STOP, - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -def test_rjump_into_eofcreate( - eof_test: EOFTestFiller, -) -> None: - """EOF code containing RJUMP with target EOFCREATE immediate.""" - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH0 * 4 - + Op.RJUMP[1] - + Op.EOFCREATE[0] - + Op.STOP, - ), - Section.Container( - container=Container( - sections=[ - Section.Code( - code=Op.RETURNCODE[0](0, 0), - ), - Section.Container( - container=Container.Code(code=Op.STOP), - ), - ] - ) - ), - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -def test_rjump_into_returncode( - eof_test: EOFTestFiller, -) -> None: - """EOF code containing RJUMP with target RETURNCODE immediate.""" - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.EOFCREATE[0](0, 0, 0, 0) + Op.STOP, - ), - Section.Container( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH0 * 2 - + Op.RJUMP[1] - + Op.RETURNCODE[0], - ), - Section.Container( - container=Container.Code(code=Op.STOP), - ), - ] - ) - ), - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -@pytest.mark.parametrize( - "unreachable_op", - [Op.STOP, Op.PUSH1[0], Op.PUSH2[0], Op.RJUMP[-3], Op.RJUMP[0], Op.INVALID], -) -@pytest.mark.parametrize( - "terminating_op", - [Op.STOP, Op.RJUMP[-3], Op.INVALID], -) -def test_rjump_unreachable_code( - eof_test: EOFTestFiller, - unreachable_op: Op, - terminating_op: Op, -) -> None: - """EOF code containing instructions skipped by RJUMP.""" - container = Container.Code( - code=(Op.RJUMP[len(unreachable_op)] + unreachable_op + terminating_op) - ) - eof_test( - container=container, - expect_exception=EOFException.UNREACHABLE_INSTRUCTIONS, - ) - - -def test_rjump_backwards_reference_only( - eof_test: EOFTestFiller, -) -> None: - """EOF code containing instructions only reachable by backwards RJUMP.""" - container = Container.Code( - code=( - Op.RJUMP[RJUMP_LEN] - + Op.RJUMP[RJUMP_LEN] - + Op.RJUMP[-(2 * RJUMP_LEN)] - + Op.STOP - ) - ) - eof_test( - container=container, - expect_exception=EOFException.UNREACHABLE_INSTRUCTIONS, - ) - - -def test_rjump_backwards_illegal_stack_height( - eof_test: EOFTestFiller, -) -> None: - """Invalid backward jump, found via fuzzing coverage.""" - eof_test( - container=Container.Code( - code=( - Op.PUSH0 - + Op.RJUMPI[3] - + Op.RJUMP(7) - + Op.PUSH2[0x2015] - + Op.PUSH3[0x015500] - + Op.RJUMP[-10] - ), - max_stack_increase=0x24, - ), - expect_exception=EOFException.STACK_HEIGHT_MISMATCH, - ) - - -def test_rjump_backwards_infinite_loop( - eof_test: EOFTestFiller, -) -> None: - """Validate that a backwards RJUMP as terminal operation is valid.""" - eof_test( - container=Container( - name="backwards_rjump_terminal", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.RJUMPI[3] - + Op.RJUMP[7] - + Op.SSTORE(1, 0x2015) - + Op.STOP - + Op.RJUMP[-10] - ), - Section.Data(data="0xdeadbeef"), - ], - ), - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpi.py b/tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpi.py deleted file mode 100644 index 2a515bfae0..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpi.py +++ /dev/null @@ -1,2024 +0,0 @@ -"""EOF JUMPF tests covering stack and code validation rules.""" - -import pytest -from execution_testing import ( - Account, - Alloc, - Bytecode, - Environment, - EOFException, - EOFStateTestFiller, - EOFTestFiller, - Op, - StateTestFiller, - Transaction, -) -from execution_testing.test_types.eof.v1 import Container, Section -from execution_testing.test_types.eof.v1.constants import ( - MAX_BYTECODE_SIZE, -) - -from .. import EOF_FORK_NAME -from .helpers import ( - JumpDirection, - slot_code_worked, - slot_conditional_result, - value_calldata_false, - value_calldata_true, - value_code_worked, -) - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-4200.md" -REFERENCE_SPEC_VERSION = "17d4a8d12d2b5e0f2985c866376c16c8c6df7cba" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - -RJUMP_LEN = len(Op.RJUMP[0]) -RJUMPI_LEN = len(Op.RJUMPI[0]) - - -@pytest.mark.parametrize( - "calldata", - [pytest.param(b"\x00", id="False"), pytest.param(b"\x01", id="True")], -) -def test_rjumpi_condition_forwards( - state_test: StateTestFiller, - pre: Alloc, - calldata: bytes, -) -> None: - """Test RJUMPI contract switching based on external input (forwards).""" - env = Environment() - sender = pre.fund_eoa(10**18) - contract_address = pre.deploy_contract( - code=Container.Code( - Op.PUSH1(0) - + Op.CALLDATALOAD - + Op.RJUMPI[6] - + Op.SSTORE(slot_conditional_result, value_calldata_false) - + Op.STOP - + Op.SSTORE(slot_conditional_result, value_calldata_true) - + Op.STOP, - ) - ) - tx = Transaction( - to=contract_address, - gas_limit=10_000_000, - data=calldata, - sender=sender, - ) - post = { - contract_address: Account( - storage={ - slot_conditional_result: value_calldata_false - if calldata == b"\0" - else value_calldata_true - } - ) - } - state_test(env=env, tx=tx, pre=pre, post=post) - - -@pytest.mark.parametrize( - "calldata", - [pytest.param(b"\x00", id="False"), pytest.param(b"\x01", id="True")], -) -def test_rjumpi_condition_backwards( - state_test: StateTestFiller, - pre: Alloc, - calldata: bytes, -) -> None: - """Test RJUMPI contract switching based on external input.""" - env = Environment() - sender = pre.fund_eoa(10**18) - contract_address = pre.deploy_contract( - code=Container.Code( - Op.PUSH1(1) - + Op.RJUMPI[6] - + Op.SSTORE(slot_conditional_result, value_calldata_true) - + Op.STOP - + Op.PUSH0 - + Op.CALLDATALOAD - + Op.RJUMPI[-11] - + Op.SSTORE(slot_conditional_result, value_calldata_false) - + Op.STOP, - ) - ) - tx = Transaction( - to=contract_address, - gas_limit=10_000_000, - data=calldata, - sender=sender, - ) - post = { - contract_address: Account( - storage={ - slot_conditional_result: value_calldata_false - if calldata == b"\0" - else value_calldata_true - } - ) - } - state_test(env=env, tx=tx, pre=pre, post=post) - - -@pytest.mark.parametrize( - "calldata", - [pytest.param(b"\x00", id="False"), pytest.param(b"\x01", id="True")], -) -def test_rjumpi_condition_zero( - state_test: StateTestFiller, - pre: Alloc, - calldata: bytes, -) -> None: - """ - Test RJUMPI contract switching based on external input (condition zero). - """ - env = Environment() - sender = pre.fund_eoa(10**18) - contract_address = pre.deploy_contract( - code=Container( - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.CALLDATALOAD - + Op.RJUMPI[0] - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ) - ] - ), - ) - tx = Transaction( - to=contract_address, - gas_limit=10_000_000, - data=calldata, - sender=sender, - ) - post = { - contract_address: Account( - storage={slot_code_worked: value_code_worked} - ) - } - state_test(env=env, tx=tx, pre=pre, post=post) - - -def test_rjumpi_forwards( - eof_state_test: EOFStateTestFiller, -) -> None: - """EOF1V4200_0004 (Valid) EOF code containing RJUMPI (Positive).""" - eof_state_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(1) - + Op.RJUMPI[3] - + Op.NOOP - + Op.NOOP - + Op.STOP - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ) - ], - ), - container_post=Account(storage={slot_code_worked: value_code_worked}), - ) - - -def test_rjumpi_backwards( - eof_state_test: EOFStateTestFiller, -) -> None: - """EOF1V4200_0005 (Valid) EOF code containing RJUMPI (Negative).""" - eof_state_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(1) - + Op.RJUMPI[7] - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP - + Op.PUSH1(1) - + Op.RJUMPI[-12] - + Op.STOP, - ) - ], - ), - container_post=Account(storage={slot_code_worked: value_code_worked}), - ) - - -def test_rjumpi_zero( - eof_state_test: EOFStateTestFiller, -) -> None: - """EOF1V4200_0006 (Valid) EOF code containing RJUMPI (Zero).""" - eof_state_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(1) - + Op.RJUMPI[0] - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ) - ], - ), - container_post=Account(storage={slot_code_worked: value_code_worked}), - ) - - -def test_rjumpi_max_forward( - eof_state_test: EOFStateTestFiller, -) -> None: - """ - EOF1V4200_0007 (Valid) EOF with RJUMPI containing the maximum offset - (32767). - """ - eof_state_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(1) - + Op.RJUMPI[32767] - + Op.NOOP * 32768 - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ) - ], - ), - container_post=Account(storage={slot_code_worked: value_code_worked}), - ) - - -def test_rjumpi_max_backward( - eof_state_test: EOFStateTestFiller, -) -> None: - """EOF with RJUMPI containing the maximum negative offset (-32768).""" - ( - eof_state_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.RJUMPI[0x7FFF] - + Op.NOOP * (0x7FFF - 7) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP - + Op.PUSH0 - + Op.RJUMPI[0x8000] - + Op.STOP, - ) - ], - ), - container_post=Account( - storage={slot_code_worked: value_code_worked} - ), - ), - ) - - -@pytest.mark.parametrize( - "container", - [ - Container( - name="forwards_rjumpi_0", - sections=[ - Section.Code( - code=Op.PUSH1[1] + Op.RJUMPI[0] + Op.STOP, - max_stack_increase=1, - ), - ], - expected_bytecode="ef000101000402" - "00010006ff000000008000016001e1000000", - ), - Container( - name="forwards_rjumpi_1", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[1] - + Op.NOT - + Op.STOP, - max_stack_increase=2, - ), - ], - expected_bytecode="ef000101000402" - "00010008ff000000008000025f6000e100011900", - ), - Container( - name="forwards_rjumpi_10", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[4] - + Op.POP - + Op.RJUMP[1] - + Op.NOT - + Op.STOP, - max_stack_increase=2, - ), - ], - expected_bytecode="ef000101000402" - "0001000cff000000008000025f6000e1000450e000011900", - ), - Container( - name="forwards_rjumpi_11", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[3] - + Op.RJUMP[0] - + Op.STOP, - max_stack_increase=2, - ), - ], - expected_bytecode="ef000101000402" - "0001000aff000000008000025f6000e10003e0000000", - ), - Container( - name="forwards_rjumpi_12", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[4] - + Op.PUSH0 - + Op.RJUMP[0] - + Op.STOP, - max_stack_increase=2, - ), - ], - expected_bytecode="ef000101000402" - "0001000bff000000008000025f6000e100045fe0000000", - ), - Container( - name="forwards_rjumpi_2", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[6] - + Op.PUSH1[0] - + Op.RJUMPI[1] - + Op.NOT - + Op.STOP, - max_stack_increase=2, - ), - ], - expected_bytecode="ef000101000402" - "0001000dff000000008000025f6000e100066000e100011900", - ), - Container( - name="forwards_rjumpi_3", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[1] - + Op.PUSH0 - + Op.STOP, - max_stack_increase=2, - ), - ], - expected_bytecode="ef000101000402" - "00010008ff000000008000025f6000e100015f00", - ), - Container( - name="forwards_rjumpi_4", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[7] - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[1] - + Op.NOT - + Op.STOP, - max_stack_increase=3, - ), - ], - expected_bytecode="ef000101000402" - "0001000eff000000008000035f6000e100075f6000e100011900", - ), - Container( - name="forwards_rjumpi_5", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[1] - + Op.ADD - + Op.DUP1 - + Op.PUSH1[10] - + Op.GT - + Op.RJUMPI[4] - + Op.DUP1 - + Op.RJUMPI[-14] - + Op.STOP, - max_stack_increase=3, - ), - ], - expected_bytecode="ef000101000402" - "00010010ff000000008000035f60010180600a11e1000480e1fff200", - ), - Container( - name="forwards_rjumpi_6", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[1] - + Op.ADD - + Op.DUP1 - + Op.PUSH1[10] - + Op.GT - + Op.RJUMPI[5] - + Op.PUSH0 - + Op.DUP1 - + Op.RJUMPI[-13] - + Op.STOP, - max_stack_increase=3, - ), - ], - expected_bytecode="ef000101000402" - "00010011ff000000008000035f60010180600a11e100055f80e1fff300", - ), - Container( - name="forwards_rjumpi_7", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[4] - + Op.PUSH0 - + Op.RJUMP[1] - + Op.PUSH0 - + Op.STOP, - max_stack_increase=2, - ), - ], - expected_bytecode="ef000101000402" - "0001000cff000000008000025f6000e100045fe000015f00", - ), - Container( - name="forwards_rjumpi_8", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[4] - + Op.PUSH0 - + Op.RJUMP[1] - + Op.NOT - + Op.STOP, - max_stack_increase=2, - ), - ], - expected_bytecode="ef000101000402" - "0001000cff000000008000025f6000e100045fe000011900", - ), - Container( - name="forwards_rjumpi_9", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[4] - + Op.POP - + Op.RJUMP[1] - + Op.POP - + Op.STOP, - max_stack_increase=2, - ), - ], - expected_bytecode="ef000101000402" - "0001000cff000000008000025f6000e1000450e000015000", - ), - Container( - name="forwards_rjumpi_variable_stack_0", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[1] - + Op.RJUMPI[0] - + Op.STOP, - max_stack_increase=4, - ), - ], - expected_bytecode="ef000101000402" - "0001000eff000000008000045f6000e100025f5f6001e1000000", - ), - Container( - name="forwards_rjumpi_variable_stack_1", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[1] - + Op.NOT - + Op.STOP, - max_stack_increase=5, - ), - ], - expected_bytecode="ef000101000402" - "00010010ff000000008000055f6000e100025f5f5f6000e100011900", - ), - Container( - name="forwards_rjumpi_variable_stack_10", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[4] - + Op.POP - + Op.RJUMP[1] - + Op.NOT - + Op.STOP, - max_stack_increase=5, - ), - ], - expected_bytecode="ef000101000402" - "00010014ff000000008000055f6000e100025f5f5f6000e1000450e000011900", - ), - Container( - name="forwards_rjumpi_variable_stack_11", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[3] - + Op.RJUMP[0] - + Op.STOP, - max_stack_increase=5, - ), - ], - expected_bytecode="ef000101000402" - "00010012ff000000008000055f6000e100025f5f5f6000e10003e0000000", - ), - Container( - name="forwards_rjumpi_variable_stack_12", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[4] - + Op.PUSH0 - + Op.RJUMP[0] - + Op.STOP, - max_stack_increase=5, - ), - ], - expected_bytecode="ef000101000402" - "00010013ff000000008000055f6000e100025f5f5f6000e100045fe0000000", - ), - Container( - name="forwards_rjumpi_variable_stack_2", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[6] - + Op.PUSH1[0] - + Op.RJUMPI[1] - + Op.NOT - + Op.STOP, - max_stack_increase=5, - ), - ], - expected_bytecode="ef000101000402" - "00010015ff000000008000055f6000e1" - "00025f5f5f6000e100066000e100011900", - ), - Container( - name="forwards_rjumpi_variable_stack_3", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[1] - + Op.PUSH0 - + Op.STOP, - max_stack_increase=5, - ), - ], - expected_bytecode="ef000101000402" - "00010010ff000000008000055f6000e100025f5f5f6000e100015f00", - ), - Container( - name="forwards_rjumpi_variable_stack_4", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[7] - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[1] - + Op.NOT - + Op.STOP, - max_stack_increase=6, - ), - ], - expected_bytecode="ef000101000402" - "00010016ff000000008000065f6000e1" - "00025f5f5f6000e100075f6000e100011900", - ), - Container( - name="forwards_rjumpi_variable_stack_5", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[1] - + Op.ADD - + Op.DUP1 - + Op.PUSH1[10] - + Op.GT - + Op.RJUMPI[4] - + Op.DUP1 - + Op.RJUMPI[-14] - + Op.STOP, - max_stack_increase=6, - ), - ], - expected_bytecode="ef000101000402" - "00010018ff000000008000065f6000e1" - "00025f5f5f60010180600a11e1000480e1fff200", - ), - Container( - name="forwards_rjumpi_variable_stack_6", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[1] - + Op.ADD - + Op.DUP1 - + Op.PUSH1[10] - + Op.GT - + Op.RJUMPI[5] - + Op.PUSH0 - + Op.DUP1 - + Op.RJUMPI[-13] - + Op.STOP, - max_stack_increase=6, - ), - ], - expected_bytecode="ef000101000402" - "00010019ff000000008000065f6000e1" - "00025f5f5f60010180600a11e100055f80e1fff300", - ), - Container( - name="forwards_rjumpi_variable_stack_7", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[4] - + Op.PUSH0 - + Op.RJUMP[1] - + Op.PUSH0 - + Op.STOP, - max_stack_increase=5, - ), - ], - expected_bytecode="ef000101000402" - "00010014ff000000008000055f6000e100025f5f5f6000e100045fe000015f00", - ), - Container( - name="forwards_rjumpi_variable_stack_8", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[4] - + Op.PUSH0 - + Op.RJUMP[1] - + Op.NOT - + Op.STOP, - max_stack_increase=5, - ), - ], - expected_bytecode="ef000101000402" - "00010014ff000000008000055f6000e1" - "00025f5f5f6000e100045fe000011900", - ), - Container( - name="forwards_rjumpi_variable_stack_9", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[4] - + Op.POP - + Op.RJUMP[1] - + Op.POP - + Op.STOP, - max_stack_increase=5, - ), - ], - expected_bytecode="ef000101000402" - "00010014ff000000008000055f6000e1" - "00025f5f5f6000e1000450e000015000", - ), - ], - ids=lambda x: x.name, -) -def test_rjumpi_valid_forward( - eof_test: EOFTestFiller, - container: Container, -) -> None: - """ - Validate a valid code section containing at least one forward RJUMPI. These - tests exercise the stack height validation. - """ - eof_test(container=container) - - -@pytest.mark.parametrize( - "container", - [ - Container( - name="backwards_rjumpi_0", - sections=[ - Section.Code( - code=Op.PUSH1[0] + Op.RJUMPI[-5] + Op.STOP, - max_stack_increase=1, - ), - ], - expected_bytecode="ef000101000402" - "00010006ff000000008000016000e1fffb00", - ), - Container( - name="backwards_rjumpi_1", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.POP - + Op.PUSH1[0] - + Op.RJUMPI[-7] - + Op.STOP, - max_stack_increase=1, - ), - ], - expected_bytecode="ef000101000402" - "00010008ff000000008000015f506000e1fff900", - ), - Container( - name="backwards_rjumpi_2", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.POP - + Op.PUSH1[0] - + Op.RJUMPI[-7] - + Op.PUSH1[0] - + Op.RJUMPI[-12] - + Op.STOP, - max_stack_increase=1, - ), - ], - expected_bytecode="ef000101000402" - "0001000dff000000008000015f506000e1fff96000e1fff400", - ), - Container( - name="backwards_rjumpi_4", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[1] - + Op.ADD - + Op.DUP1 - + Op.RJUMPI[-7] - + Op.STOP, - max_stack_increase=2, - ), - ], - expected_bytecode="ef000101000402" - "00010009ff00000000800002 5f60010180e1fff900", - ), - Container( - name="backwards_rjumpi_7", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.POP - + Op.PUSH1[0] - + Op.RJUMPI[-7] - + Op.RJUMP[-10], - max_stack_increase=1, - ), - ], - expected_bytecode="ef000101000402" - "0001000aff000000008000015f506000e1fff9e0fff6", - ), - Container( - name="backwards_rjumpi_variable_stack_0", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[-5] - + Op.STOP, - max_stack_increase=4, - ), - ], - expected_bytecode="ef000101000402" - "0001000eff000000008000045f6000e100025f5f6000e1fffb00", - ), - Container( - name="backwards_rjumpi_variable_stack_1", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.POP - + Op.PUSH1[0] - + Op.RJUMPI[-7] - + Op.STOP, - max_stack_increase=4, - ), - ], - expected_bytecode="ef000101000402" - "00010010ff000000008000045f6000e100025f5f5f506000e1fff900", - ), - Container( - name="backwards_rjumpi_variable_stack_2", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.POP - + Op.PUSH1[0] - + Op.RJUMPI[-7] - + Op.PUSH1[0] - + Op.RJUMPI[-12] - + Op.STOP, - max_stack_increase=4, - ), - ], - expected_bytecode="ef000101000402" - "00010015ff000000008000045f6000e1" - "00025f5f5f506000e1fff96000e1fff400", - ), - Container( - name="backwards_rjumpi_variable_stack_4", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[1] - + Op.ADD - + Op.DUP1 - + Op.RJUMPI[-7] - + Op.STOP, - max_stack_increase=5, - ), - ], - expected_bytecode="ef000101000402" - "00010011ff000000008000055f6000e1" - "00025f5f5f60010180e1fff900", - ), - Container( - name="backwards_rjumpi_variable_stack_7", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.POP - + Op.PUSH1[0] - + Op.RJUMPI[-7] - + Op.RJUMP[-10], - max_stack_increase=4, - ), - ], - expected_bytecode="ef000101000402" - "00010012ff000000008000045f6000e1" - "00025f5f5f506000e1fff9e0fff6", - ), - ], - ids=lambda x: x.name, -) -def test_rjumpi_valid_backward( - eof_test: EOFTestFiller, - container: Container, -) -> None: - """ - Validate a valid code section containing at least one backward RJUMPI. - These tests exercise the stack height validation. - """ - eof_test(container=container) - - -def test_rjumpi_max_bytecode_size( - eof_test: EOFTestFiller, -) -> None: - """ - EOF1V4200_0003 EOF with RJUMPI containing the maximum offset that does not - exceed the maximum bytecode size. - """ - noop_count = MAX_BYTECODE_SIZE - 24 - code = ( - Op.RJUMPI[len(Op.NOOP) * noop_count](Op.ORIGIN) - + (Op.NOOP * noop_count) - + Op.STOP - ) - container = Container.Code(code=code) - assert len(container) == MAX_BYTECODE_SIZE - eof_test(container=container) - - -def test_rjumpi_truncated( - eof_test: EOFTestFiller, -) -> None: - """EOF1I4200_0014 (Invalid) EOF code containing truncated RJUMPI.""" - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(0) + Op.RJUMPI, - ) - ], - ), - expect_exception=EOFException.TRUNCATED_INSTRUCTION, - ) - - -def test_rjumpi_truncated_2( - eof_test: EOFTestFiller, -) -> None: - """EOF1I4200_0015 (Invalid) EOF code containing truncated RJUMPI.""" - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(0) + Op.RJUMPI + b"\x00", - ) - ], - ), - expect_exception=EOFException.TRUNCATED_INSTRUCTION, - ) - - -@pytest.mark.parametrize("offset", [-7, -15]) -def test_rjumpi_into_header( - eof_test: EOFTestFiller, - offset: int, -) -> None: - """ - EOF1I4200_0016 (Invalid) EOF code containing RJUMPI with target outside - code bounds (Jumping into header). - """ - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(1) + Op.RJUMPI[offset] + Op.STOP, - ) - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -def test_rjumpi_jump_before_header( - eof_test: EOFTestFiller, -) -> None: - """ - EOF1I4200_0017 (Invalid) EOF code containing RJUMPI with target outside - code bounds (Jumping to before code begin). - """ - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(1) + Op.RJUMPI[-25] + Op.STOP, - ) - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -def test_rjumpi_into_data( - eof_test: EOFTestFiller, -) -> None: - """ - EOF1I4200_0018 (Invalid) EOF code containing RJUMPI with target outside - code bounds (Jumping into data section). - """ - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(1) + Op.RJUMPI[2] + Op.STOP, - ), - Section.Data(data=b"\xaa\xbb\xcc"), - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -def test_rjumpi_after_container( - eof_test: EOFTestFiller, -) -> None: - """ - EOF1I4200_0019 (Invalid) EOF code containing RJUMPI with target outside - code bounds (Jumping to after code end). - """ - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(1) + Op.RJUMPI[2] + Op.STOP, - ) - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -def test_rjumpi_to_code_end( - eof_test: EOFTestFiller, -) -> None: - """ - EOF1I4200_0020 (Invalid) EOF code containing RJUMPI with target outside - code bounds (Jumping to code end). - """ - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(1) + Op.RJUMPI[1] + Op.STOP, - ), - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -@pytest.mark.parametrize("offset", range(1, Op.RJUMP.data_portion_length + 1)) -def test_rjumpi_into_self_data_portion( - eof_test: EOFTestFiller, - offset: int, -) -> None: - """ - EOF1I4200_0021 (Invalid) EOF code containing RJUMPI with target same RJUMPI - immediate (with offset). - """ - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(1) + Op.RJUMPI[-offset] + Op.STOP, - ) - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -@pytest.mark.parametrize("stack_height_spread", [-1, 0, 1, 2]) -def test_rjumpi_into_self( - eof_test: EOFTestFiller, - stack_height_spread: int, -) -> None: - """ - EOF code containing RJUMPI targeting itself (-3). This can never be valid - because this is backward jump and RJUMPI consumes one stack item. - """ - # Create variadic stack height by the parametrized spread. - stack_spread_code = Bytecode() - if stack_height_spread >= 0: - stack_spread_code = ( - Op.RJUMPI[stack_height_spread](0) + Op.PUSH0 * stack_height_spread - ) - - eof_test( - container=Container( - sections=[ - Section.Code( - code=stack_spread_code - + Op.RJUMPI[-len(Op.RJUMPI[0])](0) - + Op.STOP, - ) - ], - ), - expect_exception=EOFException.STACK_HEIGHT_MISMATCH, - ) - - -def test_rjumpi_into_stack_height_diff( - eof_test: EOFTestFiller, -) -> None: - """ - EOF code containing RJUMPI with target instruction that causes stack height - difference. - """ - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(0) - + Op.PUSH1(0) - + Op.RJUMPI[ - -( - len(Op.RJUMPI[0]) - + len(Op.PUSH1(0)) - + len(Op.PUSH1(0)) - ) - ] - + Op.STOP, - ), - ], - ), - expect_exception=EOFException.STACK_HEIGHT_MISMATCH, - ) - - -def test_rjumpi_into_stack_underflow( - eof_test: EOFTestFiller, -) -> None: - """ - EOF code containing RJUMPI with target instruction that cause stack - underflow. - """ - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.ORIGIN - + Op.RJUMPI[len(Op.STOP)] - + Op.STOP - + Op.POP - + Op.STOP - ), - ], - ), - expect_exception=EOFException.STACK_UNDERFLOW, - ) - - -def test_rjumpi_skips_stack_underflow( - eof_test: EOFTestFiller, -) -> None: - """ - EOF code containing RJUMPI where the default path produces a stack - underflow. - """ - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.ORIGIN + Op.RJUMPI[len(Op.POP)] + Op.POP + Op.STOP - ), - ], - ), - expect_exception=EOFException.STACK_UNDERFLOW, - ) - - -def test_rjumpi_into_rjump( - eof_test: EOFTestFiller, -) -> None: - """ - EOF1I4200_0023 (Invalid) EOF code containing RJUMPI with target RJUMP - immediate. - """ - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(1) + Op.RJUMPI[3] + Op.STOP + Op.RJUMP[-9], - ) - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -def test_rjumpi_into_rjumpi( - eof_test: EOFTestFiller, -) -> None: - """ - EOF1I4200_0022 (Invalid) EOF code containing RJUMPI with target other - RJUMPI immediate. - """ - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(1) - + Op.RJUMPI[5] - + Op.STOP - + Op.PUSH1(1) - + Op.RJUMPI[-11] - + Op.STOP, - ) - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -@pytest.mark.parametrize( - "jump", [JumpDirection.FORWARD, JumpDirection.BACKWARD] -) -def test_rjumpi_into_push_1( - eof_test: EOFTestFiller, - jump: JumpDirection, -) -> None: - """ - EOF1I4200_0024 (Invalid) EOF code containing RJUMPI with target PUSH1 - immediate. - """ - code = ( - Op.PUSH1[1] + Op.RJUMPI[-4] - if jump == JumpDirection.BACKWARD - else Op.PUSH1[1] + Op.RJUMPI[1] + Op.PUSH1[1] + Op.POP - ) + Op.STOP - eof_test( - container=Container( - sections=[ - Section.Code(code=code), - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -@pytest.mark.parametrize( - "opcode", - [ - Op.PUSH2, - Op.PUSH3, - Op.PUSH4, - Op.PUSH5, - Op.PUSH6, - Op.PUSH7, - Op.PUSH8, - Op.PUSH9, - Op.PUSH10, - Op.PUSH11, - Op.PUSH12, - Op.PUSH13, - Op.PUSH14, - Op.PUSH15, - Op.PUSH16, - Op.PUSH17, - Op.PUSH18, - Op.PUSH19, - Op.PUSH20, - Op.PUSH21, - Op.PUSH22, - Op.PUSH23, - Op.PUSH24, - Op.PUSH25, - Op.PUSH26, - Op.PUSH27, - Op.PUSH28, - Op.PUSH29, - Op.PUSH30, - Op.PUSH31, - Op.PUSH32, - ], -) -@pytest.mark.parametrize( - "jump", [JumpDirection.FORWARD, JumpDirection.BACKWARD] -) -@pytest.mark.parametrize( - "data_portion_end", - [True, False], - ids=["data_portion_end", "data_portion_start"], -) -def test_rjumpi_into_push_n( - eof_test: EOFTestFiller, - opcode: Op, - jump: JumpDirection, - data_portion_end: bool, -) -> None: - """ - EOF1I4200_0024 (Invalid) EOF code containing RJUMPI with target PUSH2+ - immediate. - """ - data_portion_length = int.from_bytes(opcode, byteorder="big") - 0x5F - if jump == JumpDirection.FORWARD: - offset = data_portion_length if data_portion_end else 1 - code = Op.PUSH1(1) + Op.RJUMPI[offset] + opcode[0] + Op.STOP - else: - offset = -4 if data_portion_end else -4 - data_portion_length + 1 - code = opcode[0] + Op.RJUMPI[offset] + Op.STOP - eof_test( - container=Container( - sections=[ - Section.Code(code=code), - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -@pytest.mark.parametrize("target_rjumpv_table_size", [1, 256]) -@pytest.mark.parametrize( - "data_portion_end", - [True, False], - ids=["data_portion_end", "data_portion_start"], -) -def test_rjumpi_into_rjumpv( - eof_test: EOFTestFiller, - target_rjumpv_table_size: int, - data_portion_end: bool, -) -> None: - """ - EOF1I4200_0025 (Invalid) EOF code containing RJUMPI with target RJUMPV - immediate. - """ - invalid_destination = ( - 4 + (2 * target_rjumpv_table_size) if data_portion_end else 4 - ) - target_jump_table = [0 for _ in range(target_rjumpv_table_size)] - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(1) - + Op.RJUMPI[invalid_destination] - + Op.STOP - + Op.PUSH1(1) - + Op.RJUMPV[target_jump_table] - + Op.STOP, - ) - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -@pytest.mark.parametrize( - "data_portion_end", - [True, False], - ids=["data_portion_end", "data_portion_start"], -) -def test_rjumpi_into_callf( - eof_test: EOFTestFiller, - data_portion_end: bool, -) -> None: - """ - EOF1I4200_0026 (Invalid) EOF code containing RJUMPI with target CALLF - immediate. - """ - invalid_destination = 2 if data_portion_end else 1 - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(1) - + Op.RJUMPI[invalid_destination] - + Op.CALLF[1] - + Op.STOP, - ), - Section.Code( - code=Op.SSTORE(1, 1) + Op.RETF, - code_outputs=0, - ), - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -def test_rjumpi_into_dupn( - eof_test: EOFTestFiller, -) -> None: - """EOF code containing RJUMPI with target DUPN immediate.""" - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(1) - + Op.PUSH1(1) - + Op.PUSH1(1) - + Op.RJUMPI[1] - + Op.DUPN[1] - + Op.SSTORE - + Op.STOP, - ), - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -def test_rjumpi_into_swapn( - eof_test: EOFTestFiller, -) -> None: - """EOF code containing RJUMPI with target SWAPN immediate.""" - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(1) - + Op.PUSH1(1) - + Op.PUSH1(1) - + Op.RJUMPI[1] - + Op.SWAPN[1] - + Op.SSTORE - + Op.STOP, - ), - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -def test_rjumpi_into_exchange( - eof_test: EOFTestFiller, -) -> None: - """EOF code containing RJUMPI with target EXCHANGE immediate.""" - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(1) - + Op.PUSH1(2) - + Op.PUSH1(3) - + Op.PUSH1(1) - + Op.RJUMPI[1] - + Op.EXCHANGE[0x00] - + Op.SSTORE - + Op.STOP, - ), - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -def test_rjumpi_into_eofcreate( - eof_test: EOFTestFiller, -) -> None: - """EOF code containing RJUMPI with target EOFCREATE immediate.""" - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH0 * 5 - + Op.RJUMPI[1] - + Op.EOFCREATE[0] - + Op.STOP, - ), - Section.Container( - container=Container( - sections=[ - Section.Code( - code=Op.RETURNCODE[0](0, 0), - ), - Section.Container( - container=Container.Code(code=Op.STOP), - ), - ] - ) - ), - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -def test_rjumpi_into_returncode( - eof_test: EOFTestFiller, -) -> None: - """EOF code containing RJUMPI with target RETURNCODE immediate.""" - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.EOFCREATE[0](0, 0, 0, 0) + Op.STOP, - ), - Section.Container( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH0 * 3 - + Op.RJUMPI[1] - + Op.RETURNCODE[0], - ), - Section.Container( - container=Container.Code(code=Op.STOP), - ), - ] - ) - ), - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -def test_rjumpi_backwards_reference_only( - eof_test: EOFTestFiller, -) -> None: - """EOF code containing instructions only reachable by backwards RJUMPI.""" - container = Container.Code( - code=( - Op.RJUMP[RJUMP_LEN] - + Op.RJUMP[RJUMPI_LEN + len(Op.ORIGIN)] - + Op.ORIGIN - + Op.RJUMPI[-(RJUMP_LEN + RJUMPI_LEN + len(Op.ORIGIN))] - + Op.STOP - ) - ) - eof_test( - container=container, - expect_exception=EOFException.UNREACHABLE_INSTRUCTIONS, - ) - - -def test_rjumpi_stack_validation( - eof_test: EOFTestFiller, -) -> None: - """ - Check that you can get to the same opcode with two different stack heights - Spec now allows this: 4.b in - https://github.com/ipsilon/eof/blob/main/spec/eof.md#stack-validation. - """ - container = Container.Code( - code=Op.RJUMPI[1](1) + Op.ADDRESS + Op.NOOP + Op.STOP - ) - eof_test( - container=container, - expect_exception=None, - ) - - -def test_rjumpi_at_the_end( - eof_test: EOFTestFiller, -) -> None: - """ - Test invalid RJUMPI as the end of a code section. - https://github.com/ipsilon/eof/blob/main/spec/eof.md#stack-validation 4.i: - This implies that the last instruction must be a terminating instruction or - RJUMP. - """ - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH0 - + Op.RJUMPI[1] - + Op.STOP - + Op.RJUMPI[-4], - ) - ], - ), - expect_exception=EOFException.MISSING_STOP_OPCODE, - ) - - -def test_tangled_rjumpi( - eof_test: EOFTestFiller, -) -> None: - """EOF code containing tangled RJUMPI paths.""" - container = Container.Code( - code=( - Op.PUSH0 # [0,0] - + Op.PUSH0 # [1,1] - + Op.RJUMPI[8] # [2,2] - + Op.PUSH1(127) # [1,1] - + Op.RJUMPI[7] # [2,2] - + Op.RJUMP[5] # [1,1] - + Op.PUSH0 # [1,1] - + Op.RJUMP[0] # [2,1] - + Op.LT # [1,x] - + Op.STOP # [1,x] - ) - ) - eof_test( - container=container, - expect_exception=EOFException.STACK_UNDERFLOW, - ) - - -def test_rjumpi_backwards_onto_dup( - eof_test: EOFTestFiller, -) -> None: - """Backwards jumpi onto a dup.""" - container = Container.Code( - code=(Op.PUSH0 + Op.DUP1 + Op.RJUMPI[-4] + Op.STOP), - max_stack_increase=2, - ) - eof_test( - container=container, - ) - - -def test_rjumpi_backwards_min_stack_wrong( - eof_test: EOFTestFiller, -) -> None: - """Backwards rjumpi where min_stack does not match.""" - container = Container.Code( - code=( - Op.PUSH0 # (0, 0) - + Op.PUSH1(0) # (1, 1) - + Op.RJUMPI[1] # (2, 2) To PUSH1 - + Op.PUSH0 # (1, 1) - + Op.PUSH1(4) # (1, 2) - + Op.RJUMPI[-9] # (2, 3) To first RJUMPI with (1, 2) - + Op.STOP # (1, 2) - ), - max_stack_increase=3, - ) - eof_test( - container=container, - expect_exception=EOFException.STACK_HEIGHT_MISMATCH, - ) - - -def test_rjumpi_rjumpv_backwards_min_stack_wrong( - eof_test: EOFTestFiller, -) -> None: - """Backwards rjumpi rjumpv where min_stack does not match.""" - container = Container.Code( - code=( - Op.PUSH0 # (0, 0) - + Op.PUSH1(0) # (1, 1) - + Op.RJUMPI[1] # (2, 2) To PUSH1 - + Op.PUSH0 # (1, 1) - + Op.PUSH1(4) # (1, 2) - + Op.RJUMPV[-10] # (2, 3) To first RJUMPI with (1, 2) - + Op.STOP # (1, 2) - ), - max_stack_increase=3, - ) - eof_test( - container=container, - expect_exception=EOFException.STACK_HEIGHT_MISMATCH, - ) - - -def test_double_rjumpi_stack_underflow( - eof_test: EOFTestFiller, -) -> None: - """Two RJUMPIs, causing the min stack to underflow.""" - container = Container.Code( - code=( - Op.PUSH0 # (0, 0) - + Op.PUSH0 # (1, 1) - + Op.RJUMPI[5] # (2, 2) To RETURN - + Op.PUSH0 # (1, 1) - + Op.PUSH0 # (2, 2) - + Op.RJUMPI[0] # (3, 3) - + Op.RETURN # (1, 2) Underflow - ), - max_stack_increase=3, - ) - eof_test( - container=container, - expect_exception=EOFException.STACK_UNDERFLOW, - ) - - -def test_double_rjumpi_stack_height_mismatch( - eof_test: EOFTestFiller, -) -> None: - """ - Test stack height check of the backward RJUMP targeted by two RJUMPIs with - the non-uniform stack height range. - """ - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH0 # BEGIN: (0, 0) - + Op.PUSH0 # (1, 1) - + Op.RJUMPI[3] # (2, 2) to LAST - + Op.RJUMPI[0] # (1, 1) to LAST - + Op.RJUMP[ - -11 - ], # LAST: (0, 1) to BEGIN; stack height mismatch - max_stack_increase=2, - ), - ], - ), - expect_exception=EOFException.STACK_HEIGHT_MISMATCH, - ) - - -def test_double_rjumpi_invalid_max_stack_height( - eof_test: EOFTestFiller, -) -> None: - """ - Test max stack height of the final block targeted by two RJUMPIs with the - non-uniform stack height range. - """ - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH0 # (0, 0) - + Op.PUSH0 # (1, 1) - + Op.RJUMPI[3] # (2, 2) to EXIT - + Op.RJUMPI[0] # (1, 1) to EXIT - + Op.PUSH0 # EXIT: (0, 1) - + Op.PUSH0 # (1, 2) - + Op.INVALID, # (2, 3) - max_stack_increase=2, # should be 3 - ), - ], - ), - expect_exception=EOFException.INVALID_MAX_STACK_INCREASE, - ) - - -@pytest.mark.parametrize( - "container", - [ - Container( - name="backwards_rjumpi_10", - sections=[ - Section.Code( - code=Op.PUSH1[190] - + Op.PUSH1[0] - + Op.RJUMPI[1] - + Op.POP - + Op.PUSH1[0] - + Op.RJUMPI[-11] - + Op.STOP, - max_stack_increase=3, - ), - ], - expected_bytecode="ef000101000402" - "0001000eff0000000080000360be6000e10001506000e1fff500", - ), - Container( - name="backwards_rjumpi_3", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.POP - + Op.PUSH1[0] - + Op.RJUMPI[-7] - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[-13] - + Op.STOP, - max_stack_increase=2, - ), - ], - expected_bytecode="ef000101000402" - "0001000eff000000008000025f506000e1fff95f6000e1fff300", - ), - Container( - name="backwards_rjumpi_5", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[1] - + Op.ADD - + Op.DUP1 - + Op.DUP1 - + Op.RJUMPI[-8] - + Op.STOP, - max_stack_increase=2, - ), - ], - expected_bytecode="ef000101000402" - "0001000aff000000008000025f6001018080e1fff800", - ), - Container( - name="backwards_rjumpi_8", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.POP - + Op.PUSH1[0] - + Op.RJUMPI[-6] - + Op.PUSH0 - + Op.RJUMP[-10], - max_stack_increase=1, - ), - ], - ), - Container( - name="backwards_rjumpi_9", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[1] - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[-11] - + Op.STOP, - max_stack_increase=3, - ), - ], - expected_bytecode="ef000101000402" - "0001000dff000000008000035f6000e100015f6000e1fff500", - ), - Container( - name="backwards_rjumpi_variable_stack_3", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.POP - + Op.PUSH1[0] - + Op.RJUMPI[-7] - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[-13] - + Op.STOP, - max_stack_increase=5, - ), - ], - expected_bytecode="ef000101000402" - "00010016ff000000008000055f6000e1" - "00025f5f5f506000e1fff95f6000e1fff300", - ), - Container( - name="backwards_rjumpi_variable_stack_5", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[1] - + Op.ADD - + Op.DUP1 - + Op.DUP1 - + Op.RJUMPI[-8] - + Op.STOP, - max_stack_increase=5, - ), - ], - expected_bytecode="ef000101000402" - "00010012ff000000008000055f6000e1" - "00025f5f5f6001018080e1fff800", - ), - Container( - name="backwards_rjumpi_variable_stack_6", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.POP - + Op.RJUMPI[-4] - + Op.STOP, - max_stack_increase=5, - ), - ], - expected_bytecode="ef000101000402" - "00010010ff000000008000055f6000e1" - "00025f5f5f5f5f50e1fffc00", - ), - Container( - name="backwards_rjumpi_variable_stack_6a", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.POP - + Op.PUSH0 - + Op.RJUMPI[-5] - + Op.STOP, - max_stack_increase=5, - ), - ], - ), - Container( - name="backwards_rjumpi_variable_stack_8", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.POP - + Op.PUSH1[0] - + Op.RJUMPI[-8] - + Op.PUSH0 - + Op.RJUMP[-10], - max_stack_increase=4, - ), - ], - ), - ], - ids=lambda x: x.name, -) -def test_rjumpi_backward_invalid_max_stack_height( - eof_test: EOFTestFiller, - container: Container, -) -> None: - """ - Validate a code section containing at least one backward RJUMPI invalid - because of the incorrect max stack height. - """ - eof_test( - container=container, - expect_exception=EOFException.STACK_HEIGHT_MISMATCH, - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpv.py b/tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpv.py deleted file mode 100644 index 04087f4ea4..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpv.py +++ /dev/null @@ -1,1986 +0,0 @@ -"""EOF JUMPF tests covering stack and code validation rules.""" - -import pytest -from execution_testing import ( - Account, - Bytecode, - EOFException, - EOFStateTestFiller, - EOFTestFiller, - Op, -) -from execution_testing.test_types.eof.v1 import Container, Section - -from .. import EOF_FORK_NAME -from .helpers import ( - JumpDirection, - slot_code_worked, - slot_conditional_result, - value_code_worked, -) - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-4200.md" -REFERENCE_SPEC_VERSION = "17d4a8d12d2b5e0f2985c866376c16c8c6df7cba" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - -RJUMP_LEN = len(Op.RJUMP[0]) - - -@pytest.mark.parametrize( - "calldata", - [ - pytest.param(0, id="c0"), - pytest.param(1, id="c1"), - pytest.param(3, id="c3"), - pytest.param(255, id="c255"), - pytest.param(256, id="c256"), - pytest.param(2**256 - 1, id="c2^256-1"), - ], -) -@pytest.mark.parametrize( - "table_size", - [ - pytest.param(1, id="t1"), - pytest.param(3, id="t3"), - pytest.param(256, id="t256"), - ], -) -def test_rjumpv_condition( - eof_state_test: EOFStateTestFiller, - calldata: int, - table_size: int, -) -> None: - """Test RJUMPV contract switching based on external input.""" - value_fall_through = 0xFFFF - value_base = ( - 0x1000 # Force a `PUSH2` instruction to be used on all targets - ) - target_length = 7 - jump_table = [(i + 1) * target_length for i in range(table_size)] - - jump_targets = sum( - (Op.SSTORE(slot_conditional_result, i + value_base) + Op.STOP) - for i in range(table_size) - ) - - fall_through_case = ( - Op.SSTORE(slot_conditional_result, value_fall_through) + Op.STOP - ) - - eof_state_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.CALLDATALOAD - + Op.RJUMPV[jump_table] - + fall_through_case - + jump_targets, - ) - ] - ), - data=calldata.to_bytes(32, "big"), - container_post=Account( - storage={ - slot_conditional_result: calldata + value_base - if calldata < table_size - else value_fall_through, - } - ), - ) - - -def test_rjumpv_forwards( - eof_state_test: EOFStateTestFiller, -) -> None: - """EOF1V4200_0008 (Valid) EOF with RJUMPV table size 1 (Positive).""" - eof_state_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(0) - + Op.RJUMPV[3] - + Op.NOOP - + Op.NOOP - + Op.STOP - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ) - ], - ), - container_post=Account(storage={slot_code_worked: value_code_worked}), - ) - - -def test_rjumpv_backwards( - eof_state_test: EOFStateTestFiller, -) -> None: - """EOF1V4200_0009 (Valid) EOF with RJUMPV table size 1 (Negative).""" - eof_state_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(0) - + Op.RJUMPI[7] - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP - + Op.PUSH1(0) - + Op.RJUMPV[-13] - + Op.STOP, - ) - ], - ), - container_post=Account(storage={slot_code_worked: value_code_worked}), - ) - - -def test_rjumpv_backwards_onto_dup( - eof_test: EOFTestFiller, -) -> None: - """Backwards jumpv vector onto a dup.""" - container = Container.Code( - code=(Op.PUSH0 + Op.DUP1 + Op.RJUMPV[-5] + Op.STOP), - max_stack_increase=2, - ) - eof_test( - container=container, - ) - - -@pytest.mark.parametrize("length", [8, 9]) -def test_rjumpv_backwards_large_table( - eof_test: EOFTestFiller, - length: int, -) -> None: - """Backwards jump vector with a large table.""" - jump_table = [0] * length - jump_table += [length * -2 - 6] - container = Container.Code( - code=(Op.RJUMPV[jump_table](length) + Op.STOP), - max_stack_increase=1, - ) - eof_test( - container=container, - ) - - -def test_rjumpv_zero( - eof_state_test: EOFStateTestFiller, -) -> None: - """EOF1V4200_0010 (Valid) EOF with RJUMPV table size 1 (Zero).""" - eof_state_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(0) - + Op.RJUMPV[0] - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ) - ], - ), - container_post=Account(storage={slot_code_worked: value_code_worked}), - ) - - -def test_rjumpv_size_3( - eof_state_test: EOFStateTestFiller, -) -> None: - """EOF1V4200_0011 (Valid) EOF with RJUMPV table size 3.""" - eof_state_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(0) - + Op.RJUMPV[3, 0, -10] - + Op.NOOP - + Op.NOOP - + Op.STOP - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ) - ], - ), - container_post=Account(storage={slot_code_worked: value_code_worked}), - ) - - -@pytest.mark.parametrize( - "target", - [0, 1, 100, 254, 255, 256], -) -def test_rjumpv_full_table( - eof_state_test: EOFStateTestFiller, - target: int, -) -> None: - """ - EOF1V4200_0012/13/14/15 (Valid) EOF with RJUMPV table size 256 (target - parameterized). - """ - eof_state_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH2[target] - + Op.RJUMPV[range(256)] - + Op.NOOP * 256 - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ) - ], - ), - container_post=Account(storage={slot_code_worked: value_code_worked}), - ) - - -def test_rjumpv_max_forwards( - eof_state_test: EOFStateTestFiller, -) -> None: - """ - EOF1V4200_0016 (Valid) EOF with RJUMPV containing the maximum offset - (32767). - """ - eof_state_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(1) - + Op.RJUMPV[32767] - + Op.NOOP * 32768 - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ) - ], - ), - container_post=Account(storage={slot_code_worked: value_code_worked}), - ) - - -def test_rjumpv_truncated_empty( - eof_test: EOFTestFiller, -) -> None: - """ - EOF1I4200_0027 (Invalid) EOF code containing RJUMPV with max_index 0 but no - immediates. - """ - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(1) + Op.RJUMPV, - ) - ], - ), - expect_exception=EOFException.TRUNCATED_INSTRUCTION, - ) - - -@pytest.mark.parametrize( - "branches", - [1, 2, 256], -) -@pytest.mark.parametrize( - "byte_count_last_branch", - [0, 1], -) -def test_rjumpv_truncated( - eof_test: EOFTestFiller, - branches: int, - byte_count_last_branch: int, -) -> None: - """EOF1I4200_0028/29/30 (Invalid) EOF code containing truncated RJUMPV.""" - rjumpv_bytes = int.to_bytes(branches - 1, 1, "big") - rjumpv_bytes += b"\0" * ((2 * (branches - 1)) + byte_count_last_branch) - - eof_test( - container=Container.Code(code=Op.PUSH1(1) + Op.RJUMPV[rjumpv_bytes]), - expect_exception=EOFException.TRUNCATED_INSTRUCTION, - ) - - -@pytest.mark.parametrize( - "table_size,invalid_index", - [ - pytest.param(1, 0, id="t1i0"), - pytest.param(256, 0, id="t256i0"), - pytest.param(256, 255, id="t256i255"), - ], -) -def test_rjumpv_into_header( - eof_test: EOFTestFiller, - table_size: int, - invalid_index: int, -) -> None: - """ - EOF1I4200_0031 (Invalid) EOF code containing RJUMPV with target outside - code bounds (Jumping into header). - """ - invalid_destination = -5 - (2 * table_size) - jump_table = [0 for _ in range(table_size)] - jump_table[invalid_index] = invalid_destination - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(1) + Op.RJUMPV[jump_table] + Op.STOP, - ) - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -@pytest.mark.parametrize("offset", [-13, -23]) -@pytest.mark.parametrize( - "table_size,invalid_index", - [ - pytest.param(1, 0, id="t1i0"), - pytest.param(256, 0, id="t256i0"), - pytest.param(256, 255, id="t256i255"), - ], -) -def test_rjumpv_before_container( - eof_test: EOFTestFiller, - table_size: int, - invalid_index: int, - offset: int, -) -> None: - """ - EOF1I4200_0032 (Invalid) EOF code containing RJUMPV with target outside - code bounds (Jumping to before code begin). - """ - invalid_destination = offset - (2 * table_size) - jump_table = [0 for _ in range(table_size)] - jump_table[invalid_index] = invalid_destination - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(1) + Op.RJUMPV[jump_table] + Op.STOP, - ) - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -@pytest.mark.parametrize( - "table_size,invalid_index", - [ - pytest.param(1, 0, id="t1i0"), - pytest.param(256, 0, id="t256i0"), - pytest.param(256, 255, id="t256i255"), - ], -) -def test_rjumpv_into_data( - eof_test: EOFTestFiller, - table_size: int, - invalid_index: int, -) -> None: - """ - EOF1I4200_0033 (Invalid) EOF code containing RJUMPV with target outside - code bounds (Jumping into data section). - """ - invalid_destination = 2 - jump_table = [0 for _ in range(table_size)] - jump_table[invalid_index] = invalid_destination - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(1) + Op.RJUMPV[jump_table] + Op.STOP, - ), - Section.Data(data=b"\xaa\xbb\xcc"), - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -@pytest.mark.parametrize( - "table_size,invalid_index", - [ - pytest.param(1, 0, id="t1i0"), - pytest.param(256, 0, id="t256i0"), - pytest.param(256, 255, id="t256i255"), - ], -) -def test_rjumpv_after_container( - eof_test: EOFTestFiller, - table_size: int, - invalid_index: int, -) -> None: - """ - EOF1I4200_0034 (Invalid) EOF code containing RJUMPV with target outside - code bounds (Jumping to after code end). - """ - invalid_destination = 2 - jump_table = [0 for _ in range(table_size)] - jump_table[invalid_index] = invalid_destination - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(1) + Op.RJUMPV[jump_table] + Op.STOP, - ) - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -@pytest.mark.parametrize( - "table_size,invalid_index", - [ - pytest.param(1, 0, id="t1i0"), - pytest.param(256, 0, id="t256i0"), - pytest.param(256, 255, id="t256i255"), - ], -) -def test_rjumpv_at_end( - eof_test: EOFTestFiller, - table_size: int, - invalid_index: int, -) -> None: - """ - EOF1I4200_0035 (Invalid) EOF code containing RJUMPV with target outside - code bounds (Jumping to code end). - """ - invalid_destination = 1 - jump_table = [0 for _ in range(table_size)] - jump_table[invalid_index] = invalid_destination - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(1) + Op.RJUMPV[jump_table] + Op.STOP, - ) - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -@pytest.mark.parametrize( - "table_size,invalid_index", - [ - pytest.param(1, 0, id="t1i0"), - pytest.param(256, 0, id="t256i0"), - pytest.param(256, 255, id="t256i255"), - ], -) -@pytest.mark.parametrize( - "data_portion_end", - [True, False], - ids=["data_portion_end", "data_portion_start"], -) -def test_rjumpv_into_self_data_portion( - eof_test: EOFTestFiller, - table_size: int, - invalid_index: int, - data_portion_end: bool, -) -> None: - """ - EOF1I4200_0036 (Invalid) EOF code containing RJUMPV with target same RJUMPV - immediate. - """ - invalid_destination = -1 if data_portion_end else -(2 * table_size) - 1 - jump_table = [0 for _ in range(table_size)] - jump_table[invalid_index] = invalid_destination - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(1) + Op.RJUMPV[jump_table] + Op.STOP, - ) - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -@pytest.mark.parametrize( - "table_size,invalid_index", - [ - pytest.param(1, 0, id="t1i0"), - pytest.param(256, 0, id="t256i0"), - pytest.param(256, 255, id="t256i255"), - ], -) -@pytest.mark.parametrize("stack_height_spread", [-1, 0, 1, 2]) -def test_rjumpv_into_self( - eof_test: EOFTestFiller, - table_size: int, - invalid_index: int, - stack_height_spread: int, -) -> None: - """ - EOF code containing RJUMPV targeting itself. This can never be valid - because this is backward jump and RJUMPV consumes one stack item. - """ - # Create variadic stack height by the parametrized spread. - stack_spread_code = Bytecode() - if stack_height_spread >= 0: - stack_spread_code = ( - Op.RJUMPI[stack_height_spread](0) + Op.PUSH0 * stack_height_spread - ) - - jump_table = [0 for _ in range(table_size)] - jump_table[invalid_index] = -len(Op.RJUMPV[jump_table]) - - eof_test( - container=Container( - sections=[ - Section.Code( - code=stack_spread_code - + Op.RJUMPV[jump_table](0) - + Op.STOP, - # max stack increase is computed correctly - ) - ], - ), - expect_exception=EOFException.STACK_HEIGHT_MISMATCH, - ) - - -@pytest.mark.parametrize( - "table_size,invalid_index", - [ - pytest.param(1, 0, id="t1i0"), - pytest.param(256, 0, id="t256i0"), - pytest.param(256, 255, id="t256i255"), - ], -) -def test_rjumpv_into_stack_height_diff( - eof_test: EOFTestFiller, - table_size: int, - invalid_index: int, -) -> None: - """ - EOF code containing RJUMPV with target instruction that causes stack height - difference. - """ - jump_table = [0 for _ in range(table_size)] - jump_table[invalid_index] = -( - len(Op.RJUMPV[jump_table]) + len(Op.PUSH1[0]) + len(Op.PUSH1[0]) - ) - - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1[0] - + Op.PUSH1[0] - + Op.RJUMPV[jump_table] - + Op.STOP, - ), - ], - ), - expect_exception=EOFException.STACK_HEIGHT_MISMATCH, - ) - - -@pytest.mark.parametrize( - "table_size,invalid_index", - [ - pytest.param(1, 0, id="t1i0"), - pytest.param(256, 0, id="t256i0"), - pytest.param(256, 255, id="t256i255"), - ], -) -def test_rjumpv_into_stack_underflow( - eof_test: EOFTestFiller, - table_size: int, - invalid_index: int, -) -> None: - """ - EOF code containing RJUMPV with target instruction that cause stack - underflow. - """ - jump_table = [0 for _ in range(table_size)] - jump_table[invalid_index] = 1 - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.ORIGIN - + Op.RJUMPV[jump_table] - + Op.STOP - + Op.POP - + Op.STOP - ), - ], - ), - expect_exception=EOFException.STACK_UNDERFLOW, - ) - - -@pytest.mark.parametrize( - "table_size", - [ - pytest.param(1, id="t1"), - pytest.param(256, id="t256"), - ], -) -def test_rjumpv_skips_stack_underflow( - eof_test: EOFTestFiller, - table_size: int, -) -> None: - """ - EOF code containing RJUMPV where the default path produces a stack - underflow. - """ - jump_table = [1 for _ in range(table_size)] - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.ORIGIN + Op.RJUMPV[jump_table] + Op.POP + Op.STOP - ), - ], - ), - expect_exception=EOFException.STACK_UNDERFLOW, - ) - - -@pytest.mark.parametrize( - "table_size,invalid_index", - [ - pytest.param(1, 0, id="t1i0"), - pytest.param(256, 0, id="t256i0"), - pytest.param(256, 255, id="t256i255"), - ], -) -@pytest.mark.parametrize( - "data_portion_end", - [True, False], - ids=["data_portion_end", "data_portion_start"], -) -def test_rjumpv_into_rjump( - eof_test: EOFTestFiller, - table_size: int, - invalid_index: int, - data_portion_end: bool, -) -> None: - """ - EOF1I4200_0037 (Invalid) EOF code containing RJUMPV with target RJUMP - immediate. - """ - invalid_destination = 3 if data_portion_end else 2 - jump_table = [0 for _ in range(table_size)] - jump_table[invalid_index] = invalid_destination - if table_size > 1: - valid_index = 0 - if valid_index == invalid_index: - valid_index += 1 - jump_table[valid_index] = 1 - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(1) - + Op.RJUMPV[jump_table] - + Op.STOP - + Op.RJUMP[0] - + Op.STOP, - ) - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -@pytest.mark.parametrize( - "table_size,invalid_index", - [ - pytest.param(1, 0, id="t1i0"), - pytest.param(256, 0, id="t256i0"), - pytest.param(256, 255, id="t256i255"), - ], -) -@pytest.mark.parametrize( - "data_portion_end", - [True, False], - ids=["data_portion_end", "data_portion_start"], -) -def test_rjumpv_into_rjumpi( - eof_test: EOFTestFiller, - table_size: int, - invalid_index: int, - data_portion_end: bool, -) -> None: - """ - EOF1I4200_0038 (Invalid) EOF code containing RJUMPV with target RJUMPI - immediate. - """ - invalid_destination = 5 if data_portion_end else 4 - jump_table = [0 for _ in range(table_size)] - jump_table[invalid_index] = invalid_destination - if table_size > 1: - valid_index = 0 - if valid_index == invalid_index: - valid_index += 1 - jump_table[valid_index] = 1 - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(1) - + Op.RJUMPV[jump_table] - + Op.STOP - + Op.PUSH1(1) - + Op.RJUMPI[0] - + Op.STOP, - ) - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -@pytest.mark.parametrize( - "table_size,invalid_index", - [ - pytest.param(1, 0, id="t1i0"), - pytest.param(256, 0, id="t256i0"), - pytest.param(256, 255, id="t256i255"), - ], -) -@pytest.mark.parametrize( - "jump", [JumpDirection.FORWARD, JumpDirection.BACKWARD] -) -def test_rjumpv_into_push_1( - eof_test: EOFTestFiller, - jump: JumpDirection, - table_size: int, - invalid_index: int, -) -> None: - """ - EOF1I4200_0039 (Invalid) EOF code containing RJUMPV with target PUSH1 - immediate. - """ - if jump == JumpDirection.FORWARD: - invalid_destination = 2 - jump_table = [0 for _ in range(table_size)] - jump_table[invalid_index] = invalid_destination - code = ( - Op.PUSH1(1) - + Op.RJUMPV[jump_table] - + Op.STOP - + Op.PUSH1(1) - + Op.PUSH1(1) - + Op.SSTORE - + Op.STOP - ) - else: - invalid_destination = -(2 * table_size) - 3 - jump_table = [0 for _ in range(table_size)] - jump_table[invalid_index] = invalid_destination - code = Op.PUSH1(1) + Op.RJUMPV[jump_table] + Op.STOP - eof_test( - container=Container( - sections=[Section.Code(code=code)], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -@pytest.mark.parametrize( - "opcode", - [ - Op.PUSH2, - Op.PUSH3, - Op.PUSH4, - Op.PUSH5, - Op.PUSH6, - Op.PUSH7, - Op.PUSH8, - Op.PUSH9, - Op.PUSH10, - Op.PUSH11, - Op.PUSH12, - Op.PUSH13, - Op.PUSH14, - Op.PUSH15, - Op.PUSH16, - Op.PUSH17, - Op.PUSH18, - Op.PUSH19, - Op.PUSH20, - Op.PUSH21, - Op.PUSH22, - Op.PUSH23, - Op.PUSH24, - Op.PUSH25, - Op.PUSH26, - Op.PUSH27, - Op.PUSH28, - Op.PUSH29, - Op.PUSH30, - Op.PUSH31, - Op.PUSH32, - ], -) -@pytest.mark.parametrize( - "table_size,invalid_index", - [ - pytest.param(1, 0, id="t1i0"), - pytest.param(256, 0, id="t256i0"), - pytest.param(256, 255, id="t256i255"), - ], -) -@pytest.mark.parametrize( - "data_portion_end", - [True, False], - ids=["data_portion_end", "data_portion_start"], -) -@pytest.mark.parametrize( - "jump", [JumpDirection.FORWARD, JumpDirection.BACKWARD] -) -def test_rjumpv_into_push_n( - eof_test: EOFTestFiller, - opcode: Op, - jump: JumpDirection, - table_size: int, - invalid_index: int, - data_portion_end: bool, -) -> None: - """ - EOF1I4200_0039 (Invalid) EOF code containing RJUMPV with target PUSHN - immediate. - """ - data_portion_length = int.from_bytes(opcode, byteorder="big") - 0x5F - if jump == JumpDirection.FORWARD: - invalid_destination = ( - data_portion_length + 1 if data_portion_end else 2 - ) - jump_table = [0 for _ in range(table_size)] - jump_table[invalid_index] = invalid_destination - code = ( - Op.PUSH1(1) - + Op.RJUMPV[jump_table] - + Op.STOP - + opcode[1] - + Op.PUSH1(1) - + Op.SSTORE - + Op.STOP - ) - else: - invalid_destination = ( - -(2 * table_size) - 3 - if data_portion_end - else -(2 * table_size) - 2 - data_portion_length - ) - jump_table = [0 for _ in range(table_size)] - jump_table[invalid_index] = invalid_destination - code = opcode[1] + Op.RJUMPV[jump_table] + Op.STOP - eof_test( - container=Container( - sections=[Section.Code(code=code)], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -@pytest.mark.parametrize( - "source_table_size,invalid_index", - [ - pytest.param(1, 0, id="s1i0"), - pytest.param(256, 0, id="s256i0"), - pytest.param(256, 255, id="s256i255"), - ], -) -@pytest.mark.parametrize("target_table_size", [1, 256], ids=["t1", "t256"]) -@pytest.mark.parametrize( - "data_portion_end", - [True, False], - ids=["data_portion_end", "data_portion_start"], -) -def test_rjumpv_into_rjumpv( - eof_test: EOFTestFiller, - source_table_size: int, - target_table_size: int, - invalid_index: int, - data_portion_end: bool, -) -> None: - """ - EOF1I4200_0040 (Invalid) EOF code containing RJUMPV with target other - RJUMPV immediate. - """ - invalid_destination = ( - 4 + (2 * target_table_size) if data_portion_end else 4 - ) - source_jump_table = [0 for _ in range(source_table_size)] - source_jump_table[invalid_index] = invalid_destination - target_jump_table = [0 for _ in range(target_table_size)] - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(1) - + Op.RJUMPV[source_jump_table] - + Op.STOP - + Op.PUSH1(1) - + Op.RJUMPV[target_jump_table] - + Op.STOP, - ) - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -@pytest.mark.parametrize( - "table_size,invalid_index", - [ - pytest.param(1, 0, id="t1i0"), - pytest.param(256, 0, id="t256i0"), - pytest.param(256, 255, id="t256i255"), - ], -) -@pytest.mark.parametrize( - "data_portion_end", - [True, False], - ids=["data_portion_end", "data_portion_start"], -) -def test_rjumpv_into_callf( - eof_test: EOFTestFiller, - table_size: int, - invalid_index: int, - data_portion_end: bool, -) -> None: - """ - EOF1I4200_0041 (Invalid) EOF code containing RJUMPV with target CALLF - immediate. - """ - invalid_destination = 2 if data_portion_end else 1 - jump_table = [0 for _ in range(table_size)] - jump_table[invalid_index] = invalid_destination - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(0) - + Op.RJUMPV[jump_table] - + Op.CALLF[1] - + Op.STOP, - ), - Section.Code( - code=Op.SSTORE(1, 1) + Op.RETF, - code_outputs=0, - ), - ] - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -@pytest.mark.parametrize( - "table_size,invalid_index", - [ - pytest.param(1, 0, id="t1i0"), - pytest.param(256, 0, id="t256i0"), - pytest.param(256, 255, id="t256i255"), - ], -) -def test_rjumpv_into_dupn( - eof_test: EOFTestFiller, - table_size: int, - invalid_index: int, -) -> None: - """EOF code containing RJUMPV with target DUPN immediate.""" - invalid_destination = 1 - jump_table = [0 for _ in range(table_size)] - jump_table[invalid_index] = invalid_destination - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(1) - + Op.PUSH1(1) - + Op.PUSH1(0) - + Op.RJUMPV[jump_table] - + Op.DUPN[1] - + Op.SSTORE - + Op.STOP, - ), - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -@pytest.mark.parametrize( - "table_size,invalid_index", - [ - pytest.param(1, 0, id="t1i0"), - pytest.param(256, 0, id="t256i0"), - pytest.param(256, 255, id="t256i255"), - ], -) -def test_rjumpv_into_swapn( - eof_test: EOFTestFiller, - table_size: int, - invalid_index: int, -) -> None: - """EOF code containing RJUMPV with target SWAPN immediate.""" - invalid_destination = 1 - jump_table = [0 for _ in range(table_size)] - jump_table[invalid_index] = invalid_destination - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(1) - + Op.PUSH1(1) - + Op.PUSH1(0) - + Op.RJUMPV[jump_table] - + Op.SWAPN[1] - + Op.SSTORE - + Op.STOP, - ), - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -@pytest.mark.parametrize( - "table_size,invalid_index", - [ - pytest.param(1, 0, id="t1i0"), - pytest.param(256, 0, id="t256i0"), - pytest.param(256, 255, id="t256i255"), - ], -) -def test_rjumpv_into_exchange( - eof_test: EOFTestFiller, - table_size: int, - invalid_index: int, -) -> None: - """EOF code containing RJUMPV with target EXCHANGE immediate.""" - invalid_destination = 1 - jump_table = [0 for _ in range(table_size)] - jump_table[invalid_index] = invalid_destination - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(1) - + Op.PUSH1(2) - + Op.PUSH1(3) - + Op.PUSH1(0) - + Op.RJUMPV[jump_table] - + Op.EXCHANGE[0x00] - + Op.SSTORE - + Op.STOP, - ), - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -@pytest.mark.parametrize( - "table_size,invalid_index", - [ - pytest.param(1, 0, id="t1i0"), - pytest.param(256, 0, id="t256i0"), - pytest.param(256, 255, id="t256i255"), - ], -) -def test_rjumpv_into_eofcreate( - eof_test: EOFTestFiller, - table_size: int, - invalid_index: int, -) -> None: - """EOF code containing RJUMPV with target EOFCREATE immediate.""" - invalid_destination = 1 - jump_table = [0 for _ in range(table_size)] - jump_table[invalid_index] = invalid_destination - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH0 * 5 - + Op.RJUMPV[jump_table] - + Op.EOFCREATE[0] - + Op.STOP, - ), - Section.Container( - container=Container( - sections=[ - Section.Code( - code=Op.RETURNCODE[0](0, 0), - ), - Section.Container( - container=Container.Code(Op.STOP), - ), - ] - ) - ), - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -@pytest.mark.parametrize( - "table_size,invalid_index", - [ - pytest.param(1, 0, id="t1i0"), - pytest.param(256, 0, id="t256i0"), - pytest.param(256, 255, id="t256i255"), - ], -) -def test_rjumpv_into_returncode( - eof_test: EOFTestFiller, - table_size: int, - invalid_index: int, -) -> None: - """EOF code containing RJUMPV with target RETURNCODE immediate.""" - invalid_destination = 1 - jump_table = [0 for _ in range(table_size)] - jump_table[invalid_index] = invalid_destination - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.EOFCREATE[0](0, 0, 0, 0) + Op.STOP, - ), - Section.Container( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH0 * 3 - + Op.RJUMPV[jump_table] - + Op.RETURNCODE[0], - ), - Section.Container( - container=Container.Code(Op.STOP), - ), - ] - ) - ), - ], - ), - expect_exception=EOFException.INVALID_RJUMP_DESTINATION, - ) - - -def test_rjumpv_backwards_reference_only( - eof_test: EOFTestFiller, -) -> None: - """EOF code containing instructions only reachable by backwards RJUMPV.""" - rjumpv_len = len(Op.RJUMPV[0]) - container = Container.Code( - code=( - Op.RJUMP[RJUMP_LEN] - + Op.RJUMP[rjumpv_len + len(Op.ORIGIN)] - + Op.ORIGIN - + Op.RJUMPV[-(RJUMP_LEN + rjumpv_len + len(Op.ORIGIN))] - + Op.STOP - ) - ) - eof_test( - container=container, - expect_exception=EOFException.UNREACHABLE_INSTRUCTIONS, - ) - - -def test_rjumpv_at_the_end( - eof_test: EOFTestFiller, -) -> None: - """ - https://github.com/ipsilon/eof/blob/main/spec/eof.md#stack-validation 4.i: - This implies that the last instruction may be a terminating instruction or - RJUMPV. - """ - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH1(0) - + Op.PUSH1(0) - + Op.RJUMPI[1] - + Op.STOP - + Op.RJUMPV[-7](1), - ) - ], - ), - expect_exception=EOFException.MISSING_STOP_OPCODE, - ) - - -def test_rjumpv_backwards_min_stack_wrong( - eof_test: EOFTestFiller, -) -> None: - """Backwards rjumpv where min_stack does not match.""" - container = Container.Code( - code=( - Op.PUSH0 # (0, 0) - + Op.PUSH1(0) # (1, 1) - + Op.RJUMPV[1] # (2, 2) To PUSH1 - + Op.PUSH0 # (1, 1) - + Op.PUSH1(4) # (1, 2) - + Op.RJUMPV[-11] # (2, 3) To first RJUMPV with (1, 2) - + Op.STOP # (1, 2) - ), - max_stack_increase=3, - ) - eof_test( - container=container, - expect_exception=EOFException.STACK_HEIGHT_MISMATCH, - ) - - -def test_rjumpv_rjumpi_backwards_min_stack_wrong( - eof_test: EOFTestFiller, -) -> None: - """Backwards rjumpv rjumpi where min_stack does not match.""" - container = Container.Code( - code=( - Op.PUSH0 # (0, 0) - + Op.PUSH1(0) # (1, 1) - + Op.RJUMPV[1] # (2, 2) To PUSH1 - + Op.PUSH0 # (1, 1) - + Op.PUSH1(4) # (1, 2) - + Op.RJUMPI[-10] # (2, 3) To first RJUMPV with (1, 2) - + Op.STOP # (1, 2) - ), - max_stack_increase=3, - ) - eof_test( - container=container, - expect_exception=EOFException.STACK_HEIGHT_MISMATCH, - ) - - -def test_double_rjumpv( - eof_test: EOFTestFiller, -) -> None: - """Two RJUMPVs, causing the min stack to underflow.""" - container = Container.Code( - code=( - Op.PUSH0 - + Op.PUSH0 - + Op.RJUMPV[6] - + Op.PUSH0 - + Op.PUSH0 - + Op.RJUMPV[0] - + Op.RETURN - ), - max_stack_increase=3, - ) - eof_test( - container=container, - expect_exception=EOFException.STACK_UNDERFLOW, - ) - - -@pytest.mark.parametrize( - "container", - [ - Container( - name="forwards_rjumpv_0", - sections=[ - Section.Code( - code=Op.PUSH1(1) + Op.RJUMPV[0] + Op.STOP, - max_stack_increase=1, - ), - ], - expected_bytecode="ef00010100040200010007ff000000008000016001e200000000", - ), - Container( - name="forwards_rjumpv_1", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1(0) - + Op.RJUMPV[1] - + Op.NOT - + Op.STOP, - max_stack_increase=2, - ), - ], - expected_bytecode="ef00010100040200010009ff000000008000025f6000e20000011900", - ), - Container( - name="forwards_rjumpv_2", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPV[[2, 3]] - + Op.PUSH0 - + Op.POP - + Op.NOT - + Op.STOP, - max_stack_increase=2, - ), - ], - expected_bytecode="ef0001010004020001000dff000000008000025f6000e201000200035f501900", - ), - Container( - name="forwards_rjumpv_3", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPV[1] - + Op.PUSH0 - + Op.STOP, - max_stack_increase=2, - ), - ], - expected_bytecode="ef00010100040200010009ff000000008000025f6000e20000015f00", - ), - Container( - name="forwards_rjumpv_4", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPV[[1, 2]] - + Op.PUSH0 - + Op.PUSH0 - + Op.NOT - + Op.STOP, - max_stack_increase=3, - ), - ], - expected_bytecode="ef0001010004020001000dff000000008000035f6000e201000100025f5f1900", - ), - Container( - name="forwards_rjumpv_5", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPV[[5, 10]] - + Op.PUSH1(1) - + Op.RJUMP[7] - + Op.PUSH1(2) - + Op.RJUMP[2] - + Op.PUSH1(3) - + Op.STOP, - max_stack_increase=2, - ), - ], - expected_bytecode="ef00010100040200010016ff000000008000025f6000e2010005000a6001e000076002e00002600300", - ), - Container( - name="forwards_rjumpv_6", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPV[[4, 9]] - + Op.PUSH0 - + Op.RJUMP[8] - + Op.PUSH0 - + Op.PUSH0 - + Op.RJUMP[3] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.STOP, - max_stack_increase=4, - ), - ], - expected_bytecode="ef00010100040200010016ff000000008000045f6000e201000400095fe000085f5fe000035f5f5f00", - ), - Container( - name="forwards_rjumpv_7", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPV[[4, 9]] - + Op.POP - + Op.RJUMP[8] - + Op.POP - + Op.POP - + Op.RJUMP[3] - + Op.POP - + Op.POP - + Op.POP - + Op.STOP, - max_stack_increase=5, - ), - ], - expected_bytecode="ef00010100040200010019ff000000008000055f5f5f5f6000e2010004000950e000085050e0000350505000", - ), - Container( - name="forwards_rjumpv_8", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPV[3] - + Op.RJUMP[0] - + Op.STOP, - max_stack_increase=2, - ), - ], - expected_bytecode="ef0001010004020001000bff000000008000025f6000e2000003e0000000", - ), - Container( - name="forwards_rjumpv_9", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPV[4] - + Op.PUSH0 - + Op.RJUMP[0] - + Op.STOP, - max_stack_increase=2, - ), - ], - expected_bytecode="ef0001010004020001000cff000000008000025f6000e20000045fe0000000", - ), - Container( - name="forwards_rjumpv_variable_stack_0", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1(1) - + Op.RJUMPV[0] - + Op.STOP, - max_stack_increase=4, - ), - ], - expected_bytecode="ef0001010004020001000fff000000008000045f6000e100025f5f6001e200000000", - ), - Container( - name="forwards_rjumpv_variable_stack_1", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPV[1] - + Op.NOT - + Op.STOP, - max_stack_increase=5, - ), - ], - expected_bytecode="ef00010100040200010011ff000000008000055f6000e100025f5f5f6000e20000011900", - ), - Container( - name="forwards_rjumpv_variable_stack_2", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPV[[2, 3]] - + Op.PUSH0 - + Op.POP - + Op.NOT - + Op.STOP, - max_stack_increase=5, - ), - ], - expected_bytecode="ef00010100040200010015ff000000008000055f6000e100025f5f5f6000e201000200035f501900", - ), - Container( - name="forwards_rjumpv_variable_stack_3", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPV[1] - + Op.PUSH0 - + Op.STOP, - max_stack_increase=5, - ), - ], - expected_bytecode="ef00010100040200010011ff000000008000055f6000e100025f5f5f6000e20000015f00", - ), - Container( - name="forwards_rjumpv_variable_stack_4", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPV[[1, 2]] - + Op.PUSH0 - + Op.PUSH0 - + Op.NOT - + Op.STOP, - max_stack_increase=6, - ), - ], - expected_bytecode="ef00010100040200010015ff000000008000065f6000e100025f5f5f6000e201000100025f5f1900", - ), - Container( - name="forwards_rjumpv_variable_stack_5", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPV[[5, 10]] - + Op.PUSH1(1) - + Op.RJUMP[7] - + Op.PUSH1(2) - + Op.RJUMP[2] - + Op.PUSH1(3) - + Op.STOP, - max_stack_increase=5, - ), - ], - expected_bytecode="ef0001010004020001001eff000000008000055f6000e100025f5f5f6000e2010005000a6001e000076002e00002600300", - ), - Container( - name="forwards_rjumpv_variable_stack_6", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPV[[4, 9]] - + Op.PUSH0 - + Op.RJUMP[8] - + Op.PUSH0 - + Op.PUSH0 - + Op.RJUMP[3] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.STOP, - max_stack_increase=7, - ), - ], - expected_bytecode="ef0001010004020001001eff000000008000075f6000e100025f5f5f6000e201000400095fe000085f5fe000035f5f5f00", - ), - Container( - name="forwards_rjumpv_variable_stack_7", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPV[[4, 9]] - + Op.POP - + Op.RJUMP[8] - + Op.POP - + Op.POP - + Op.RJUMP[3] - + Op.POP - + Op.POP - + Op.POP - + Op.STOP, - max_stack_increase=8, - ), - ], - expected_bytecode="ef00010100040200010021ff000000008000085f6000e100025f5f5f5f5f5f6000e2010004000950e000085050e0000350505000", - ), - Container( - name="forwards_rjumpv_variable_stack_8", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPV[3] - + Op.RJUMP[0] - + Op.STOP, - max_stack_increase=5, - ), - ], - expected_bytecode="ef00010100040200010013ff000000008000055f6000e100025f5f5f6000e2000003e0000000", - ), - Container( - name="forwards_rjumpv_variable_stack_9", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPV[4] - + Op.PUSH0 - + Op.RJUMP[0] - + Op.STOP, - max_stack_increase=5, - ), - ], - expected_bytecode="ef00010100040200010014ff000000008000055f6000e100025f5f5f6000e20000045fe0000000", - ), - ], - ids=lambda x: x.name, -) -def test_rjumpv_valid_forward( - eof_test: EOFTestFiller, - container: Container, -) -> None: - """ - Validate a valid code section containing at least one forward RJUMPV. These - tests exercise the stack height validation. - """ - eof_test(container=container) - - -@pytest.mark.parametrize( - "container", - [ - Container( - name="backwards_rjumpv_0", - sections=[ - Section.Code( - code=Op.PUSH1[0] + Op.RJUMPV[-6] + Op.STOP, - max_stack_increase=1, - ), - ], - expected_bytecode="ef00010100040200010007ff000000008000016000e200fffa00", - ), - Container( - name="backwards_rjumpv_1", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.POP - + Op.PUSH1[0] - + Op.RJUMPV[-8] - + Op.STOP, - max_stack_increase=1, - ), - ], - expected_bytecode="ef00010100040200010009ff000000008000015f506000e200fff800", - ), - Container( - name="backwards_rjumpv_2", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.POP - + Op.PUSH1[0] - + Op.RJUMPV[-8] - + Op.PUSH1[0] - + Op.RJUMPV[-14] - + Op.STOP, - max_stack_increase=1, - ), - ], - expected_bytecode="ef0001010004020001000fff000000008000015f506000e200fff86000e200fff200", - ), - Container( - name="backwards_rjumpv_4", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.POP - + Op.PUSH1[0] - + Op.RJUMPV[-8] - + Op.RJUMP[-11], - max_stack_increase=1, - ), - ], - expected_bytecode="ef0001010004020001000bff000000008000015f506000e200fff8e0fff5", - ), - Container( - name="backwards_rjumpv_variable_stack_0", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPV[-6] - + Op.STOP, - max_stack_increase=4, - ), - ], - expected_bytecode="ef0001010004020001000fff000000008000045f6000e100025f5f6000e200fffa00", - ), - Container( - name="backwards_rjumpv_variable_stack_1", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.POP - + Op.PUSH1[0] - + Op.RJUMPV[-8] - + Op.STOP, - max_stack_increase=4, - ), - ], - expected_bytecode="ef00010100040200010011ff000000008000045f6000e100025f5f5f506000e200fff800", - ), - Container( - name="backwards_rjumpv_variable_stack_2", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.POP - + Op.PUSH1[0] - + Op.RJUMPV[-8] - + Op.PUSH1[0] - + Op.RJUMPV[-14] - + Op.STOP, - max_stack_increase=4, - ), - ], - expected_bytecode="ef00010100040200010017ff000000008000045f6000e100025f5f5f506000e200fff86000e200fff200", - ), - Container( - name="backwards_rjumpv_variable_stack_4", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.POP - + Op.PUSH1[0] - + Op.RJUMPV[-8] - + Op.RJUMP[-11], - max_stack_increase=4, - ), - ], - expected_bytecode="ef00010100040200010013ff000000008000045f6000e100025f5f5f506000e200fff8e0fff5", - ), - ], - ids=lambda x: x.name, -) -def test_rjumpv_valid_backward( - eof_test: EOFTestFiller, - container: Container, -) -> None: - """ - Validate a valid code section containing at least one backward RJUMPV. - These tests exercise the stack height validation. - """ - eof_test(container=container) - - -@pytest.mark.parametrize( - "container", - [ - Container( - name="backwards_rjumpv_3", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.POP - + Op.PUSH1[0] - + Op.RJUMPV[-8] - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPV[-15] - + Op.STOP, - max_stack_increase=2, - ), - ], - expected_bytecode="ef00010100040200010010ff000000008000025f506000e200fff85f6000e200fff100", - ), - Container( - name="backwards_rjumpv_5", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.POP - + Op.PUSH1[0] - + Op.RJUMPV[-7] - + Op.PUSH0 - + Op.RJUMP[-11], - max_stack_increase=1, - ), - ], - ), - Container( - name="backwards_rjumpv_6", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[1] - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPV[-12] - + Op.STOP, - max_stack_increase=3, - ), - ], - expected_bytecode="ef0001010004020001000eff000000008000035f6000e100015f6000e200fff400", - ), - Container( - name="backwards_rjumpv_7", - sections=[ - Section.Code( - code=Op.PUSH1[190] - + Op.PUSH1[0] - + Op.RJUMPI[1] - + Op.POP - + Op.PUSH1[0] - + Op.RJUMPV[-12] - + Op.STOP, - max_stack_increase=3, - ), - ], - expected_bytecode="ef0001010004020001000fff0000000080000360be6000e10001506000e200fff400", - ), - Container( - name="backwards_rjumpv_variable_stack_3", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.POP - + Op.PUSH1[0] - + Op.RJUMPV[-8] - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPV[-15] - + Op.STOP, - max_stack_increase=5, - ), - ], - expected_bytecode="ef00010100040200010018ff000000008000055f6000e100025f5f5f506000e200fff85f6000e200fff100", - ), - Container( - name="backwards_rjumpv_variable_stack_5", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.POP - + Op.PUSH1[0] - + Op.RJUMPV[-7] - + Op.PUSH0 - + Op.RJUMP[-11], - max_stack_increase=4, - ), - ], - ), - Container( - name="backwards_rjumpv_variable_stack_6", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[1] - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPV[-12] - + Op.STOP, - max_stack_increase=5, - ), - ], - expected_bytecode="ef00010100040200010016ff000000008000055f6000e100025f5f5f6000e100015f6000e200fff400", - ), - Container( - name="backwards_rjumpv_variable_stack_7", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[1] - + Op.POP - + Op.PUSH1[0] - + Op.RJUMPV[-12] - + Op.STOP, - max_stack_increase=5, - ), - ], - expected_bytecode="ef00010100040200010017ff000000008000055f6000e100025f5f5f5f6000e10001506000e200fff400", - ), - ], - ids=lambda x: x.name, -) -def test_rjumpv_backward_invalid_max_stack_height( - eof_test: EOFTestFiller, - container: Container, -) -> None: - """ - Validate a code section containing at least one backward RJUMPV invalid - because of the incorrect max stack height. - """ - eof_test( - container=container, - expect_exception=EOFException.STACK_HEIGHT_MISMATCH, - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip4750_functions/__init__.py b/tests/unscheduled/eip7692_eof_v1/eip4750_functions/__init__.py deleted file mode 100644 index 3889e56081..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip4750_functions/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -""" -Test cases for [EIP-4750: EOF - Functions](https://eips.ethereum.org/EIPS/eip-4750). - -EIP-4750 formalizes functions in the EVM object format, introducing -callable units of code. Opcodes introduced: `CALLF` (`0xE3`), `RETF` -(`0xE4`). -""" diff --git a/tests/unscheduled/eip7692_eof_v1/eip4750_functions/helpers.py b/tests/unscheduled/eip7692_eof_v1/eip4750_functions/helpers.py deleted file mode 100644 index e6186de64d..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip4750_functions/helpers.py +++ /dev/null @@ -1,14 +0,0 @@ -"""EOF Functions tests helpers.""" - -import itertools - -"""Storage addresses for common testing fields""" -_slot = itertools.count() -next(_slot) # don't use slot 0 -slot_code_worked = next(_slot) -slot_last_slot = next(_slot) -slot_stack_canary = next(_slot) - -"""Storage values for common testing fields""" -value_code_worked = 0x2015 -value_canary_written = 0xDEADB12D diff --git a/tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_callf_execution.py b/tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_callf_execution.py deleted file mode 100644 index 5c0f3a7cb0..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_callf_execution.py +++ /dev/null @@ -1,707 +0,0 @@ -"""EOF CALLF execution tests.""" - -import math - -import pytest -from execution_testing import ( - Account, - Alloc, - Environment, - EOFStateTestFiller, - Hash, - Op, - StateTestFiller, - Storage, - Transaction, -) -from execution_testing.test_types.eof.v1 import Container, Section - -from .. import EOF_FORK_NAME -from ..eip7620_eof_create.helpers import ( - value_canary_should_not_change, - value_canary_to_be_overwritten, -) -from .helpers import ( - slot_code_worked, - slot_stack_canary, - value_canary_written, - value_code_worked, -) - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-4750.md" -REFERENCE_SPEC_VERSION = "14400434e1199c57d912082127b1d22643788d11" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - - -@pytest.mark.parametrize( - "n,result", - ( - (0, 1), - (1, 1), - (5, 120), - (57, math.factorial(57)), - (58, math.factorial(58) % 2**256), - ), -) -def test_callf_factorial( - eof_state_test: EOFStateTestFiller, n: int, result: int -) -> None: - """Test factorial implementation with recursive CALLF instructions.""" - eof_state_test( - container=Container( - sections=[ - Section.Code( - Op.SSTORE(0, Op.CALLF[1](Op.CALLDATALOAD(0))) + Op.STOP, - ), - Section.Code( - Op.PUSH1[1] - + Op.DUP2 - + Op.GT - + Op.RJUMPI[4] - + Op.POP - + Op.PUSH1[1] - + Op.RETF - + Op.PUSH1[1] - + Op.DUP2 - + Op.SUB - + Op.CALLF[1] - + Op.DUP2 - + Op.MUL - + Op.SWAP1 - + Op.POP - + Op.RETF, - code_inputs=1, - code_outputs=1, - ), - ] - ), - data=Hash(n), - container_post=Account(storage={0: result}), - ) - - -@pytest.mark.parametrize( - "n,result", - ((0, 1), (1, 1), (13, 233), (27, 196418)), -) -def test_callf_fibonacci( - eof_state_test: EOFStateTestFiller, n: int, result: int -) -> None: - """ - Test fibonacci sequence implementation with recursive CALLF instructions. - """ - eof_state_test( - container=Container( - sections=[ - Section.Code( - Op.SSTORE(0, Op.CALLF[1](Op.CALLDATALOAD(0))) + Op.STOP, - ), - Section.Code( - Op.PUSH1[2] - + Op.DUP2 - + Op.GT - + Op.RJUMPI[4] - + Op.POP - + Op.PUSH1[1] - + Op.RETF - + Op.PUSH1[2] - + Op.DUP2 - + Op.SUB - + Op.CALLF[1] - + Op.PUSH1[1] - + Op.DUP3 - + Op.SUB - + Op.CALLF[1] - + Op.ADD - + Op.SWAP1 - + Op.POP - + Op.RETF, - code_inputs=1, - code_outputs=1, - ), - ] - ), - gas_limit=15_000_000, - data=Hash(n), - container_post=Account(storage={0: result}), - ) - - -@pytest.mark.parametrize( - "container", - ( - Container( - name="callf_sub_retf", - sections=[ - Section.Code( - Op.SSTORE( - slot_code_worked, - Op.CALLF[1](value_code_worked + 1, 1), - ) - + Op.STOP, - max_stack_increase=2, - ), - Section.Code( - Op.SUB + Op.RETF, - code_inputs=2, - code_outputs=1, - ), - ], - ), - Container( - name="max_code_sections_retf2", - sections=[ - Section.Code( - Op.CALLF[1] + Op.SSTORE + Op.STOP, - ) - ] - + [ - Section.Code( - Op.CALLF[i] + Op.RETF, - code_outputs=2, - max_stack_increase=2, - ) - for i in range(2, 1024) - ] - + [ - Section.Code( - Op.PUSH2[value_code_worked] - + Op.PUSH1[slot_code_worked] - + Op.RETF, - code_outputs=2, - ), - ], - ), - Container( - name="multiple_sections_of_different_types", # EOF1V4750_0005 - sections=[ - Section.Code( - Op.PUSH0 - + Op.CALLF[1] - + Op.CALLF[2] - + Op.PUSH0 - + Op.CALLF[3] - + Op.SSTORE - + Op.STOP, - max_stack_increase=4, - ), - Section.Code( - Op.POP + Op.RETF, - code_inputs=1, - code_outputs=0, - ), - Section.Code( - Op.PUSH2[value_code_worked] + Op.RETF, - code_outputs=1, - ), - Section.Code( - Op.DUP2 + Op.PUSH2[slot_code_worked] + Op.RETF, - code_inputs=2, - code_outputs=4, - ), - ], - ), - ), - ids=lambda x: x.name, -) -def test_callf( - eof_state_test: EOFStateTestFiller, container: Container -) -> None: - """Test basic usage of CALLF and RETF instructions.""" - eof_state_test( - container=container, - container_post=Account(storage={slot_code_worked: value_code_worked}), - ) - - -@pytest.mark.parametrize( - "container", - ( - Container( - name="no_inputs", - sections=[ - Section.Code( - code=Op.PUSH0 * 1023 - + Op.CALLF[1] - + Op.POP * 1023 - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.RETURN(0, 0), - ), - Section.Code( - Op.PUSH0 + Op.POP + Op.RETF, - code_outputs=0, - ), - ], - ), - Container( - name="with_inputs", - sections=[ - Section.Code( - code=Op.PUSH0 * 1023 - + Op.CALLF[1] - + Op.POP * 1023 - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.RETURN(0, 0), - ), - Section.Code( - Op.PUSH0 + Op.POP + Op.RETF, - code_inputs=3, - code_outputs=3, - max_stack_increase=1, - ), - ], - ), - Container( - name="at_callf", - sections=[ - Section.Code( - code=Op.PUSH0 * 1023 - + Op.CALLF[1] - + Op.POP * 1023 - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.RETURN(0, 0), - ), - Section.Code( - Op.CALLF[2] - + - # stack has 1024 items - Op.POP - + Op.RETF, - code_outputs=0, - ), - Section.Code( - Op.PUSH0 + Op.RETF, # stack has 1024 items - code_outputs=1, - ), - ], - ), - Container( - name="at_push0", - sections=[ - Section.Code( - code=Op.PUSH0 * 1022 - + Op.CALLF[1] - + Op.POP * 1022 - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.RETURN(0, 0), - ), - Section.Code( - Op.PUSH0 - + - # stack has 1023 items - Op.CALLF[2] - + Op.POP - + Op.RETF, - code_outputs=0, - ), - Section.Code( - Op.PUSH0 - + - # stack has 1024 items - Op.POP - + Op.RETF, - code_outputs=0, - ), - ], - ), - Container( - name="nested_with_inputs_at_push0", - sections=[ - Section.Code( - code=Op.PUSH0 * 1022 - + Op.CALLF[1] - + Op.POP * 1022 - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.RETURN(0, 0), - ), - Section.Code( - Op.PUSH0 - + - # Stack has 1023 items - Op.CALLF[2] - + Op.POP - + Op.RETF, - code_inputs=3, - code_outputs=3, - max_stack_increase=1, - ), - Section.Code( - Op.PUSH0 - + - # Stack has 1024 items - Op.POP - + Op.RETF, - code_inputs=3, - code_outputs=3, - max_stack_increase=1, - ), - ], - ), - Container( - name="store_value_unmodified_by_callf", - sections=[ - Section.Code( - Op.PUSH2[value_code_worked] # to be stored after CALLF - + Op.PUSH0 # input to CALLF - + Op.CALLF[1] - + Op.PUSH1[slot_code_worked] - + Op.SSTORE - + Op.STOP, - max_stack_increase=2, - ), - Section.Code( - Op.POP # clear input - + Op.PUSH0 * 1023 # reach max stack height - + Op.POP * 1023 - + Op.RETF, # return nothing - code_inputs=1, - code_outputs=0, - ), - ], - ), - Container( - name="with_rjumpi", - sections=[ - Section.Code( - Op.PUSH1[1] # input[1] to CALLF - + Op.PUSH0 # input[0] to CALLF - + Op.CALLF[1] - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - max_stack_increase=2, - ), - Section.Code( - Op.POP # clear input[0] - + Op.RJUMPI[2 * 1023] # jump to RETF based on input[1] - + Op.PUSH0 * 1023 # reach max stack height - + Op.POP * 1023 - + Op.RETF, # return nothing - code_inputs=2, - code_outputs=0, - ), - ], - ), - ), - ids=lambda x: x.name, -) -def test_callf_operand_stack_size_max( - eof_state_test: EOFStateTestFiller, container: Container -) -> None: - """Test operand stack reaching 1024 items.""" - eof_state_test( - container=container, - container_post=Account(storage={slot_code_worked: value_code_worked}), - ) - - -@pytest.mark.parametrize( - "container", - ( - Container( - name="no_inputs", - sections=[ - Section.Code( - code=Op.PUSH0 * 1023 - + Op.CALLF[1] - + Op.POP * 1023 - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.RETURN(0, 0), - ), - Section.Code( - Op.PUSH0 # Stack has 1024 items - + Op.CALLF[2] - + Op.POP - + Op.RETF, - code_outputs=0, - ), - Section.Code( - Op.PUSH0 # Runtime stack overflow - + Op.POP - + Op.RETF, - code_outputs=0, - ), - ], - ), - Container( - name="with_inputs", - sections=[ - Section.Code( - code=Op.PUSH0 * 1023 - + Op.CALLF[1] - + Op.POP * 1023 - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.RETURN(0, 0), - ), - Section.Code( - Op.PUSH0 # Stack has 1024 items - + Op.CALLF[2] - + Op.POP - + Op.RETF, - code_inputs=3, - code_outputs=3, - max_stack_increase=1, - ), - Section.Code( - Op.PUSH0 # Runtime stackoverflow - + Op.POP - + Op.RETF, - code_inputs=3, - code_outputs=3, - max_stack_increase=1, - ), - ], - ), - ), - ids=lambda x: x.name, -) -def test_callf_operand_stack_overflow( - eof_state_test: EOFStateTestFiller, container: Container -) -> None: - """Test stack overflowing 1024 items in called function.""" - eof_state_test( - container=container, - container_post=Account(storage={slot_code_worked: 0}), - ) - - -@pytest.mark.parametrize( - ("stack_height", "failure"), - ( - pytest.param(1020, False, id="no_overflow"), - pytest.param(1021, True, id="with_overflow"), - ), -) -def test_callf_sneaky_stack_overflow( - stack_height: int, - failure: bool, - state_test: StateTestFiller, - pre: Alloc, -) -> None: - """ - CALLF where a normal execution would not overflow, but EIP-4750 CALLF rule - #3 triggers. - - Code Section 0 - Mostly fills the stack - Code Section 1 - jumper to 2, so container verification passes (we want a - runtime failure) - Code Section 2 - Could require too much stack, but doesn't as it JUMPFs - to 3 - Code Section 3 - Writes canary values - - The intent is to catch implementations of CALLF that don't enforce rule #3 - """ - env = Environment() - sender = pre.fund_eoa() - inputs = 1 - outputs = 3 - contract_address = pre.deploy_contract( - code=Container( - sections=[ - Section.Code( - code=Op.PUSH0 * stack_height - + Op.CALLF[1] - + Op.POP * stack_height - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.RETURN(0, 0), - max_stack_increase=stack_height + outputs - inputs, - ), - Section.Code( - Op.CALLF[2] + Op.POP + Op.RETF, - code_inputs=inputs, - code_outputs=outputs, - max_stack_increase=outputs - inputs + 1, - ), - Section.Code( - Op.RJUMPI[4] - + Op.PUSH0 - + Op.JUMPF[3] - + Op.PUSH0 * (outputs - inputs + 3) - + Op.POP - + Op.RETF, - code_inputs=inputs, - code_outputs=outputs + 1, - max_stack_increase=outputs - inputs + 2, - ), - Section.Code( - Op.POP * inputs - + Op.SSTORE(slot_stack_canary, value_canary_written) - + Op.PUSH0 * (outputs + 1) - + Op.RETF, - code_inputs=inputs, - code_outputs=outputs + 1, - max_stack_increase=outputs - inputs + 1, - ), - ], - ), - storage={ - slot_code_worked: ( - value_canary_should_not_change - if failure - else value_canary_to_be_overwritten - ), - slot_stack_canary: ( - value_canary_should_not_change - if failure - else value_canary_to_be_overwritten - ), - }, - ) - - post = { - contract_address: Account( - storage={ - slot_code_worked: ( - value_canary_should_not_change - if failure - else value_code_worked - ), - slot_stack_canary: ( - value_canary_should_not_change - if failure - else value_canary_written - ), - } - ) - } - - tx = Transaction( - to=contract_address, - gas_limit=10_000_000, - sender=sender, - ) - state_test(env=env, pre=pre, post=post, tx=tx) - - -@pytest.mark.parametrize( - ("stack_height", "failure"), - ( - pytest.param(1018, False, id="no_max_stack"), - pytest.param(1019, False, id="with_max_stack"), - pytest.param(1020, True, id="over_max_stack"), - ), -) -def test_callf_max_stack( - stack_height: int, - failure: bool, - state_test: StateTestFiller, - pre: Alloc, -) -> None: - """ - CALLF where a normal execution would not overflow, but EIP-4750 CALLF rule - #4 triggers. - - Code Section 0 - calls #1 with the configured height, but we load some - operands so the return stack does not overflow - Code Section 1 - expands stack, calls #2, THEN recursively calls itself - until input is zero, and returns. - Code Section 2 - Just returns, zero inputs, zero outputs - - This will catch CALLF execution rule #3: always fail if the operand stack - is full. Not checking rule 3 results in a call to section 2 and not - overfilling the stack (as it is just RETF). - """ - env = Environment() - sender = pre.fund_eoa() - contract_address = pre.deploy_contract( - code=Container( - sections=[ - Section.Code( - code=Op.PUSH0 * 4 # fill the stack up a little bit - + Op.PUSH2(stack_height) - + Op.CALLF[1] - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.RETURN(0, 0), - ), - Section.Code( - Op.PUSH1(1) # arg, 1 - + Op.SWAP1 # 1, arg - + Op.SUB # arg-1, - + Op.DUP1 # arg-1, arg-1 - + Op.CALLF[2] # arg-1, arg-1 - + Op.ISZERO # jump?, arg-1, - + Op.RJUMPI[5] # arg-1 - + Op.DUP1 # arg-1, arg-1 - + Op.CALLF[1] # ret, arg-1 - + Op.POP # arg-1 - + Op.RETF, - code_inputs=1, - code_outputs=1, - ), - Section.Code( - Op.RETF, - code_outputs=0, - ), - ], - ), - storage={ - slot_code_worked: ( - value_canary_should_not_change - if failure - else value_canary_to_be_overwritten - ), - }, - ) - - post = { - contract_address: Account( - storage={ - slot_code_worked: ( - value_canary_should_not_change - if failure - else value_code_worked - ), - } - ) - } - - tx = Transaction( - to=contract_address, - gas_limit=100_000, - sender=sender, - ) - state_test(env=env, pre=pre, post=post, tx=tx) - - -def test_callf_retf_memory_context( - state_test: StateTestFiller, - pre: Alloc, -) -> None: - """Verifies CALLF and RETF don't corrupt memory.""" - env = Environment() - storage = Storage() - contract_address = pre.deploy_contract( - code=Container( - sections=[ - Section.Code( - Op.SSTORE( - storage.store_next(value_code_worked), - value_code_worked, - ) - + Op.MSTORE(0, 1) - + Op.CALLF[1] - + Op.SSTORE(storage.store_next(64), Op.MSIZE()) - + Op.SSTORE(storage.store_next(2), Op.MLOAD(0)) - + Op.SSTORE(storage.store_next(3), Op.MLOAD(32)) - + Op.STOP, - ), - Section.Code( - Op.SSTORE(storage.store_next(32), Op.MSIZE()) - + Op.SSTORE(storage.store_next(1), Op.MLOAD(0)) - + Op.MSTORE(0, 2) - + Op.MSTORE(32, 3) - + Op.RETF, - code_outputs=0, - ), - ], - ), - ) - post = { - contract_address: Account(storage=storage), - } - tx = Transaction( - to=contract_address, - gas_limit=500_000, - sender=pre.fund_eoa(), - ) - state_test(env=env, pre=pre, post=post, tx=tx) diff --git a/tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_code_validation.py b/tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_code_validation.py deleted file mode 100644 index cda77d4417..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_code_validation.py +++ /dev/null @@ -1,1407 +0,0 @@ -"""Code validation of CALLF, RETF opcodes tests.""" - -from typing import List - -import pytest -from execution_testing import EOFException, EOFTestFiller, Op -from execution_testing.test_types.eof.constants import ( - MAX_RUNTIME_STACK_HEIGHT, -) -from execution_testing.test_types.eof.v1 import Container, Section -from execution_testing.test_types.eof.v1.constants import ( - MAX_CODE_OUTPUTS, - MAX_CODE_SECTIONS, - MAX_STACK_INCREASE_LIMIT, -) - -from .. import EOF_FORK_NAME - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-4750.md" -REFERENCE_SPEC_VERSION = "14400434e1199c57d912082127b1d22643788d11" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - -VALID: List[Container] = [ - Container( - name="retf_stack_validation_0", - sections=[ - Section.Code(code=Op.CALLF[1] + Op.STOP, max_stack_height=2), - Section.Code( - code=Op.PUSH0 * 2 + Op.RETF, - code_outputs=2, - max_stack_height=2, - ), - ], - ), - Container( - name="retf_stack_validation_3", - sections=[ - Section.Code( - code=Op.PUSH0 + Op.CALLF[1] + Op.STOP, - max_stack_height=2, - ), - Section.Code( - code=Op.RJUMPI[7] - + Op.PUSH1[1] * 2 - + Op.RJUMP[2] - + Op.PUSH0 * 2 - + Op.RETF, - code_inputs=1, - code_outputs=2, - max_stack_height=2, - ), - ], - ), - Container( - name="retf_code_input_output", - sections=[ - Section.Code( - code=Op.PUSH0 + Op.CALLF[1] + Op.POP + Op.POP + Op.STOP - ), - Section.Code( - code=Op.PUSH0 + Op.RETF, - code_outputs=1, - ), - ], - ), - Container( - name="stack_height_equal_code_outputs_retf_zero_stop", - sections=[ - Section.Code( - code=Op.CALLF[1] + Op.POP + Op.STOP, - code_inputs=0, - max_stack_height=1, - ), - Section.Code( - code=( - Op.RJUMPI[len(Op.PUSH0) + len(Op.RETF)](Op.ORIGIN) - + Op.PUSH0 - + Op.RETF - + Op.STOP - ), - code_inputs=0, - code_outputs=1, - max_stack_height=1, - ), - ], - ), - Container( - name="callf_max_code_sections_1", - sections=[ - Section.Code( - code=( - sum(Op.CALLF[i] for i in range(1, MAX_CODE_SECTIONS)) - + Op.STOP - ) - ) - ] - + ( - [ - Section.Code( - code=Op.RETF, - code_outputs=0, - ) - ] - * (MAX_CODE_SECTIONS - 1) - ), - ), - Container( - name="callf_max_code_sections_2", - sections=[Section.Code(code=(Op.CALLF[1] + Op.STOP))] - + [ - Section.Code( - code=(Op.CALLF[i + 2] + Op.RETF), - code_outputs=0, - ) - for i in range(MAX_CODE_SECTIONS - 2) - ] - + [ - Section.Code( - code=Op.RETF, - code_outputs=0, - ) - ], - ), -] - -INVALID: List[Container] = [ - Container( - name="retf_stack_validation_1", - sections=[ - Section.Code(code=Op.CALLF[1] + Op.STOP, max_stack_height=2), - Section.Code( - code=Op.PUSH0 + Op.RETF, - code_outputs=2, - max_stack_height=1, - ), - ], - validity_error=EOFException.STACK_UNDERFLOW, - ), - Container( - name="retf_variable_stack_0", - sections=[ - Section.Code(code=Op.CALLF[1] + Op.STOP, max_stack_height=5), - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 * 2 - + Op.RETF, - code_outputs=5, - max_stack_height=3, - ), - ], - validity_error=EOFException.STACK_UNDERFLOW, - ), - Container( - name="retf_variable_stack_1", - sections=[ - Section.Code(code=Op.CALLF[1] + Op.STOP, max_stack_height=3), - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 * 2 - + Op.RETF, - code_outputs=3, - max_stack_height=3, - ), - ], - validity_error=EOFException.STACK_UNDERFLOW, - ), - Container( - name="retf_variable_stack_4", - sections=[ - Section.Code(code=Op.CALLF[1] + Op.STOP, max_stack_height=3), - Section.Code( - code=Op.PUSH0 * 3 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.POP * 2 - + Op.RETF, - code_outputs=3, - max_stack_height=4, - ), - ], - validity_error=EOFException.STACK_UNDERFLOW, - ), - Container( - name="callf_inputs_underflow_0", - sections=[ - Section.Code(code=Op.CALLF[1] + Op.STOP, max_stack_height=1), - Section.Code( - code=Op.PUSH0 + Op.CALLF[2] + Op.RETF, - code_outputs=1, - max_stack_height=1, - ), - Section.Code( - code=Op.POP + Op.RETF, - code_inputs=2, - code_outputs=1, - max_stack_height=2, - ), - ], - validity_error=EOFException.STACK_UNDERFLOW, - ), - Container( - # CALLF to function with incorrectly specified number of inputs - name="code_inputs_underflow_1", # EOF1I4750_0020 - sections=[ - Section.Code(code=(Op.PUSH0 + Op.PUSH0 + Op.CALLF[1] + Op.STOP)), - Section.Code( - code=(Op.ADD + Op.RETF), - code_inputs=0, - code_outputs=0, - ), - ], - validity_error=EOFException.STACK_UNDERFLOW, - ), - Container( - name="code_inputs_underflow_2", - sections=[ - Section.Code(code=(Op.PUSH0 + Op.CALLF[1] + Op.STOP)), - Section.Code( - code=(Op.POP + Op.POP + Op.RETF), - code_inputs=1, - code_outputs=0, - ), - ], - validity_error=EOFException.STACK_UNDERFLOW, - ), - Container( - # CALLF without enough inputs - name="callf_inputs_underflow", # EOF1I4750_0019 - sections=[ - Section.Code(code=(Op.CALLF[1] + Op.STOP)), - Section.Code( - code=(Op.ADD + Op.RETF), - code_inputs=2, - code_outputs=1, - ), - ], - validity_error=EOFException.STACK_UNDERFLOW, - ), - Container( - name="retf_stack_validation_2", - sections=[ - Section.Code(code=Op.CALLF[1] + Op.STOP, max_stack_height=2), - Section.Code( - code=Op.PUSH0 * 3 + Op.RETF, - code_outputs=2, - max_stack_height=3, - ), - ], - validity_error=EOFException.STACK_HIGHER_THAN_OUTPUTS, - ), - Container( - name="retf_variable_stack_2", - sections=[ - Section.Code(code=Op.CALLF[1] + Op.STOP, max_stack_height=1), - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 * 2 - + Op.RETF, - code_outputs=1, - max_stack_height=3, - ), - ], - validity_error=EOFException.STACK_HIGHER_THAN_OUTPUTS, - ), - Container( - name="retf_variable_stack_5", - sections=[ - Section.Code(code=Op.CALLF[1] + Op.STOP, max_stack_height=1), - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[1] - + Op.PUSH0 - + Op.RETF, - code_outputs=1, - max_stack_height=3, - ), - ], - validity_error=EOFException.STACK_HIGHER_THAN_OUTPUTS, - ), - Container( - name="retf_variable_stack_6", - sections=[ - Section.Code(code=Op.CALLF[1] + Op.STOP, max_stack_height=1), - Section.Code( - code=Op.PUSH0 * 2 - + Op.PUSH1[0] - + Op.RJUMPI[1] - + Op.POP - + Op.RETF, - code_outputs=1, - max_stack_height=3, - ), - ], - validity_error=EOFException.STACK_HIGHER_THAN_OUTPUTS, - ), - Container( - name="retf_variable_stack_3", - sections=[ - Section.Code(code=Op.CALLF[1] + Op.STOP), - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 * 2 - + Op.RETF, - code_outputs=0, - max_stack_height=3, - ), - ], - validity_error=EOFException.STACK_HIGHER_THAN_OUTPUTS, - ), - Container( - name="stack_higher_than_code_outputs", - sections=[ - Section.Code( - code=(Op.CALLF[1] + Op.STOP), - ), - Section.Code( - code=(Op.PUSH0 + Op.RETF), - code_outputs=0, - ), - ], - validity_error=EOFException.STACK_HIGHER_THAN_OUTPUTS, - ), - Container( - name="stack_shorter_than_code_outputs", - sections=[ - Section.Code( - code=(Op.CALLF[1] + Op.STOP), - ), - Section.Code( - code=(Op.PUSH0 + Op.RETF), - code_outputs=2, - max_stack_height=1, - ), - ], - validity_error=EOFException.INVALID_MAX_STACK_INCREASE, - ), - Container( - name="stack_shorter_than_code_outputs_1", - sections=[ - Section.Code( - code=(Op.CALLF[1] + Op.STOP), - # max_stack_heights of sections aligned with actual stack - max_stack_height=1, - ), - Section.Code( - code=(Op.PUSH0 + Op.RETF), - code_outputs=2, - max_stack_height=1, - ), - ], - validity_error=EOFException.INVALID_MAX_STACK_INCREASE, - ), - Container( - name="stack_shorter_than_code_outputs_2", - sections=[ - Section.Code( - code=(Op.CALLF[1] + Op.STOP), - # max_stack_heights of sections aligned with declared outputs - max_stack_height=2, - ), - Section.Code( - code=(Op.PUSH0 + Op.RETF), - code_outputs=2, - max_stack_height=2, - ), - ], - validity_error=EOFException.STACK_UNDERFLOW, - ), - Container( - name="overflow_code_sections_1", - sections=[ - Section.Code( - code=(Op.CALLF[1] + Op.STOP), - ) - ] - + [ - Section.Code( - code=(Op.CALLF[i + 2] + Op.RETF), - code_outputs=0, - ) - for i in range(MAX_CODE_SECTIONS) - ] - + [ - Section.Code( - code=Op.RETF, - code_outputs=0, - ) - ], - validity_error=EOFException.TOO_MANY_CODE_SECTIONS, - ), -] - - -def container_name(c: Container) -> str: - """Return the name of the container for use in pytest ids.""" - if hasattr(c, "name") and c.name is not None: - return c.name - else: - return c.__class__.__name__ - - -@pytest.mark.parametrize( - "container", - [*VALID, *INVALID], - ids=container_name, -) -def test_eof_validity( - eof_test: EOFTestFiller, - container: Container, -) -> None: - """ - Test EOF container validation for features around EIP-4750 / Functions / - Code Sections. - """ - eof_test(container=container) - - -@pytest.mark.parametrize( - "container", - [ - Container( - name="imm0", - sections=[ - Section.Code( - code=Op.CALLF, - ) - ], - ), - Container( - name="imm1", - sections=[ - Section.Code( - code=Op.CALLF + b"\x00", - ) - ], - ), - Container( - name="imm_from_next_section", - sections=[ - Section.Code( - code=Op.PUSH0 + Op.PUSH0 + Op.CALLF[1] + Op.STOP, - ), - Section.Code( - code=Op.CALLF - + b"\x00", # would be valid with "02" + Op.RETF. - code_inputs=2, - code_outputs=1, - max_stack_height=2, - ), - Section.Code( - code=Op.SUB - + Op.RETF, # SUB (0x02) can be confused with CALLF[2]. - code_inputs=2, - code_outputs=1, - max_stack_height=2, - ), - ], - ), - ], - ids=container_name, -) -def test_callf_truncated_immediate( - eof_test: EOFTestFiller, - container: Container, -) -> None: - """Test cases for CALLF instructions with truncated immediate bytes.""" - eof_test( - container=container, - expect_exception=EOFException.TRUNCATED_INSTRUCTION, - ) - - -@pytest.mark.parametrize( - "container", - [ - Container( - name="callf1", # EOF1I4750_0010 - sections=[ - Section.Code( - Op.CALLF[1] + Op.STOP, - ) - ], - ), - Container( - name="callf2", # EOF1I0011 - sections=[ - Section.Code( - Op.CALLF[2] + Op.STOP, - ), - Section.Code( - Op.RETF, - code_outputs=0, - ), - ], - ), - Container( - name="callf1_callf2", - sections=[ - Section.Code( - Op.CALLF[1] + Op.STOP, - ), - Section.Code( - Op.CALLF[2] + Op.RETF, - code_outputs=0, - ), - ], - ), - ], - ids=container_name, -) -def test_invalid_code_section_index( - eof_test: EOFTestFiller, - container: Container, -) -> None: - """ - Test cases for CALLF instructions with invalid target code section index. - """ - eof_test( - container=container, - expect_exception=EOFException.INVALID_CODE_SECTION_INDEX, - ) - - -@pytest.mark.parametrize( - "container", - [ - Container( - name="unreachable1", - sections=[ - Section.Code(Op.INVALID), - Section.Code(Op.INVALID), # unreachable - ], - ), - Container( - name="unreachable1_selfjumpf", - sections=[ - Section.Code(Op.INVALID), - Section.Code(Op.JUMPF[1]), # unreachable - ], - ), - Container( - name="unreachable1_selfcallf", - sections=[ - Section.Code(Op.INVALID), - Section.Code(Op.CALLF[1] + Op.STOP), # unreachable - ], - ), - Container( - name="unreachable1_jumpf0", - sections=[ - Section.Code(Op.INVALID), - Section.Code(Op.JUMPF[0]), # unreachable - ], - ), - Container( - name="unreachable1_callf0", - sections=[ - Section.Code(Op.INVALID), - Section.Code(Op.CALLF[0] + Op.STOP), # unreachable - ], - ), - Container( - name="unreachable1_selfcall_jumpf0", - sections=[ - Section.Code(Op.INVALID), - Section.Code(Op.CALLF[1] + Op.JUMPF[0]), # unreachable - ], - ), - Container( - name="unreachable12_of3_2jumpf1", - sections=[ - Section.Code(Op.INVALID), - Section.Code(Op.STOP), # unreachable - Section.Code(Op.JUMPF[1]), # unreachable - ], - ), - Container( - name="unreachable12_of3_2callf1", - sections=[ - Section.Code(Op.INVALID), - Section.Code(Op.STOP), # unreachable - Section.Code(Op.CALLF[1] + Op.STOP), # unreachable - ], - ), - Container( - name="unreachable12_of3_jumpf_loop", - sections=[ - Section.Code(Op.INVALID), - Section.Code(Op.JUMPF[2]), # unreachable - Section.Code(Op.JUMPF[1]), # unreachable - ], - ), - Container( - name="unreachable12_of3_callf_loop_stop", - sections=[ - Section.Code(Op.INVALID), - Section.Code(Op.CALLF[2] + Op.STOP), # unreachable - Section.Code(Op.CALLF[1] + Op.STOP), # unreachable - ], - ), - Container( - name="unreachable12_of3_callf_loop_retf", - sections=[ - Section.Code(Op.INVALID), - Section.Code( - Op.CALLF[2] + Op.RETF, code_outputs=0 - ), # unreachable - Section.Code( - Op.CALLF[1] + Op.RETF, code_outputs=0 - ), # unreachable - ], - ), - Container( - name="unreachable12_of3_callf_loop_mixed", - sections=[ - Section.Code(Op.INVALID), - Section.Code(Op.CALLF[2] + Op.STOP), # unreachable - Section.Code( - Op.CALLF[1] + Op.RETF, code_outputs=0 - ), # unreachable - ], - ), - Container( - name="selfjumpf0_unreachable1", - sections=[ - Section.Code(Op.JUMPF[0]), # self-reference - Section.Code(Op.JUMPF[1]), # unreachable - ], - ), - Container( - name="unreachable2_of3", - sections=[ - Section.Code(Op.CALLF[1] + Op.STOP), - Section.Code(Op.RETF, code_outputs=0), - Section.Code(Op.INVALID), # unreachable - ], - ), - Container( - name="unreachable1_of3", - sections=[ - Section.Code(Op.CALLF[2] + Op.STOP), - Section.Code(Op.INVALID), # unreachable - Section.Code(Op.RETF, code_outputs=0), - ], - ), - Container( - name="unreachable1_of4", - sections=[ - Section.Code(Op.CALLF[3] + Op.STOP), - Section.Code(Op.INVALID), # unreachable - Section.Code(Op.RETF, code_outputs=0), - Section.Code(Op.CALLF[2] + Op.RETF, code_outputs=0), - ], - ), - Container( - name="unreachable2_of3_retf", - sections=[ - Section.Code(Op.JUMPF[1]), - Section.Code(Op.STOP), - Section.Code(Op.RETF, code_outputs=0), - ], - ), - Container( - name="unreachable2-255", - sections=[ - Section.Code(Op.JUMPF[1]), - Section.Code(Op.JUMPF[1]), # self-reference - ] - + [Section.Code(Op.JUMPF[i]) for i in range(3, 255)] # unreachable - + [Section.Code(Op.STOP)], # unreachable - ), - Container( - name="unreachable255", - sections=[Section.Code(Op.JUMPF[i]) for i in range(1, 255)] - + [ - Section.Code(Op.JUMPF[254]), # self-reference - Section.Code(Op.STOP), # unreachable - ], - ), - ], - ids=container_name, -) -def test_unreachable_code_sections( - eof_test: EOFTestFiller, - container: Container, -) -> None: - """ - Test cases for EOF unreachable code sections (i.e. code sections not - reachable from the code section 0). - """ - eof_test( - container=container, - expect_exception=EOFException.UNREACHABLE_CODE_SECTIONS, - ) - - -@pytest.mark.parametrize("callee_outputs", [1, 2, MAX_CODE_OUTPUTS]) -def test_callf_stack_height_limit_exceeded( - eof_test: EOFTestFiller, callee_outputs: int -) -> None: - """ - Test for invalid EOF code containing CALLF instruction exceeding the stack - height limit. The code reaches the maximum runtime stack height (1024) - which is above the EOF limit for the stack height in the type section - (1023). - """ - callf_stack_height = MAX_RUNTIME_STACK_HEIGHT - callee_outputs - container = Container( - sections=[ - Section.Code( - Op.PUSH0 * callf_stack_height + Op.CALLF[1] + Op.STOP, - max_stack_height=MAX_RUNTIME_STACK_HEIGHT, - ), - Section.Code( - Op.PUSH0 * callee_outputs + Op.RETF, - code_outputs=callee_outputs, - max_stack_height=callee_outputs, - ), - ], - ) - eof_test( - container=container, - expect_exception=EOFException.MAX_STACK_INCREASE_ABOVE_LIMIT, - ) - - -@pytest.mark.parametrize("stack_height", [512, 513, 1023]) -def test_callf_stack_overflow( - eof_test: EOFTestFiller, stack_height: int -) -> None: - """ - Test CALLF instruction recursively calling itself causing stack overflow. - """ - container = Container( - sections=[ - Section.Code(code=Op.CALLF[1] + Op.STOP), - Section.Code( - code=Op.PUSH1[1] * stack_height - + Op.CALLF[1] - + Op.POP * stack_height - + Op.RETF, - code_outputs=0, - max_stack_height=stack_height, - ), - ], - ) - stack_overflow = stack_height > MAX_RUNTIME_STACK_HEIGHT // 2 - eof_test( - container=container, - expect_exception=EOFException.STACK_OVERFLOW - if stack_overflow - else None, - ) - - -@pytest.mark.parametrize("stack_height", [1, 2]) -def test_callf_stack_overflow_after_callf( - eof_test: EOFTestFiller, stack_height: int -) -> None: - """ - Test CALLF instruction calling next function causing stack overflow at - validation time. - """ - container = Container( - sections=[ - Section.Code(code=Op.CALLF[1] + Op.STOP), - Section.Code( - code=Op.PUSH1[1] * 1023 - + Op.CALLF[2] - + Op.POP * 1023 - + Op.RETF, - code_outputs=0, - max_stack_height=1023, - ), - Section.Code( - code=Op.PUSH0 * stack_height + Op.POP * stack_height + Op.RETF, - code_outputs=0, - max_stack_height=stack_height, - ), - ], - ) - stack_overflow = 1023 + stack_height > MAX_RUNTIME_STACK_HEIGHT - eof_test( - container=container, - expect_exception=EOFException.STACK_OVERFLOW - if stack_overflow - else None, - ) - - -@pytest.mark.parametrize("stack_height", [512, 514, 515]) -def test_callf_stack_overflow_variable_stack( - eof_test: EOFTestFiller, stack_height: int -) -> None: - """Test CALLF instruction causing stack overflow.""" - container = Container( - sections=[ - Section.Code( - code=Op.RJUMPI[2](0) - + Op.PUSH0 * (MAX_RUNTIME_STACK_HEIGHT // 2) - + Op.CALLF[1] - + Op.STOP, - max_stack_height=512, - ), - Section.Code( - code=Op.PUSH1[1] * stack_height - + Op.POP * stack_height - + Op.RETF, - code_outputs=0, - max_stack_height=stack_height, - ), - ], - ) - stack_overflow = stack_height > MAX_RUNTIME_STACK_HEIGHT // 2 - eof_test( - container=container, - expect_exception=EOFException.STACK_OVERFLOW - if stack_overflow - else None, - ) - - -@pytest.mark.parametrize("stack_height", [509, 510, 512]) -def test_callf_stack_overflow_variable_stack_2( - eof_test: EOFTestFiller, stack_height: int -) -> None: - """Test CALLF instruction causing stack overflow.""" - container = Container( - sections=[ - Section.Code( - code=Op.PUSH0 * 2 - + Op.RJUMPI[2](0) - + Op.POP * 2 - + Op.PUSH0 * (MAX_RUNTIME_STACK_HEIGHT // 2) - + Op.CALLF[1] - + Op.STOP, - max_stack_height=514, - ), - Section.Code( - code=Op.PUSH1[1] * stack_height - + Op.POP * stack_height - + Op.RETF, - code_outputs=0, - max_stack_height=stack_height, - ), - ], - ) - stack_overflow = stack_height > (MAX_RUNTIME_STACK_HEIGHT // 2) - 2 - eof_test( - container=container, - expect_exception=EOFException.STACK_OVERFLOW - if stack_overflow - else None, - ) - - -@pytest.mark.parametrize("stack_height", [1, 2, 5]) -def test_callf_stack_overflow_variable_stack_3( - eof_test: EOFTestFiller, stack_height: int -) -> None: - """Test CALLF instruction causing stack overflow.""" - container = Container( - sections=[ - Section.Code( - code=Op.RJUMPI[2](0) - + Op.PUSH0 * (MAX_RUNTIME_STACK_HEIGHT - 1) - + Op.CALLF[1] - + Op.STOP, - max_stack_height=1023, - ), - Section.Code( - code=Op.PUSH0 * stack_height + Op.POP * stack_height + Op.RETF, - code_outputs=0, - max_stack_height=stack_height, - ), - ], - ) - assert container.sections[0].max_stack_height is not None - stack_overflow = ( - container.sections[0].max_stack_height + stack_height - > MAX_RUNTIME_STACK_HEIGHT - ) - eof_test( - container=container, - expect_exception=EOFException.STACK_OVERFLOW - if stack_overflow - else None, - ) - - -def test_callf_stack_overflow_variable_stack_4( - eof_test: EOFTestFiller, -) -> None: - """Test reaching stack overflow before CALLF instruction.""" - container = Container( - sections=[ - Section.Code( - code=Op.PUSH0 * 2 - + Op.RJUMPI[2](0) - + Op.POP * 2 - + Op.PUSH0 * (MAX_RUNTIME_STACK_HEIGHT - 1) - + Op.CALLF[1] - + Op.STOP, - max_stack_height=1023, - ), - Section.Code( - code=Op.RETF, - code_outputs=0, - max_stack_height=0, - ), - ], - ) - eof_test( - container=container, - expect_exception=EOFException.STACK_OVERFLOW, - ) - - -@pytest.mark.parametrize("stack_height", [2, 3]) -def test_callf_validate_outputs( - eof_test: EOFTestFiller, stack_height: int -) -> None: - """ - Test CALLF instruction when calling a function returning more outputs than - expected. - """ - container = Container( - sections=[ - Section.Code(code=Op.CALLF[1] + Op.STOP, max_stack_height=1), - Section.Code( - code=Op.PUSH0 * stack_height + Op.CALLF[2] + Op.RETF, - code_outputs=1, - max_stack_height=stack_height, - ), - Section.Code( - code=Op.POP + Op.RETF, - code_inputs=2, - code_outputs=1, - max_stack_height=2, - ), - ], - ) - # Only 1 item consumed by function 2, if stack height > 2 - # there will be more than 1 item as outputs in function 1 - outputs_error = stack_height > 2 - eof_test( - container=container, - expect_exception=EOFException.STACK_HIGHER_THAN_OUTPUTS - if outputs_error - else None, - ) - - -@pytest.mark.parametrize("push_stack", [1023, 1024]) -@pytest.mark.parametrize("pop_stack", [1019, 1020, 1021]) -@pytest.mark.parametrize( - "code_section", - [ - pytest.param( - Section.Code( - code=Op.POP * 2 + Op.RETF, - code_inputs=2, - code_outputs=0, - max_stack_height=2, - ), - id="pop2", - ), - pytest.param( - Section.Code( - code=Op.PUSH1[1] + Op.POP + Op.RETF, - code_inputs=3, - code_outputs=3, - max_stack_height=4, - ), - id="push_pop", - ), - pytest.param( - Section.Code( - code=Op.PUSH0 * 2 + Op.RETF, - code_inputs=3, - code_outputs=5, - max_stack_height=5, - ), - id="push2", - ), - pytest.param( - Section.Code( - code=Op.PUSH0 * 2 + Op.POP * 2 + Op.RETF, - code_inputs=3, - code_outputs=3, - max_stack_height=5, - ), - id="push2_pop2", - ), - pytest.param( - Section.Code( - code=Op.PUSH0 + Op.POP * 3 + Op.RETF, - code_inputs=2, - code_outputs=0, - max_stack_height=3, - ), - id="push_pop3", - ), - pytest.param( - Section.Code( - code=Op.PUSH0 * 2 + Op.POP * 4 + Op.RETF, - code_inputs=2, - code_outputs=0, - max_stack_height=4, - ), - id="push2_pop4", - ), - ], -) -def test_callf_with_inputs_stack_overflow( - eof_test: EOFTestFiller, - code_section: Section, - push_stack: int, - pop_stack: int, -) -> None: - """Test CALLF to code section with inputs.""" - container = Container( - name="callf_with_inputs_stack_overflow_0", - sections=[ - Section.Code( - code=Op.PUSH1[1] * push_stack - + Op.CALLF[1] - + Op.POP * pop_stack - + Op.RETURN, - max_stack_height=1023, - ), - code_section, - ], - ) - assert code_section.max_stack_height is not None - exception = None - if ( - push_stack + code_section.max_stack_height - code_section.code_inputs - > MAX_RUNTIME_STACK_HEIGHT - ): - exception = EOFException.STACK_OVERFLOW - elif ( - push_stack - - code_section.code_inputs - + code_section.code_outputs - - pop_stack - < 2 - ): - exception = EOFException.STACK_UNDERFLOW - elif push_stack != container.sections[0].max_stack_height: - exception = EOFException.INVALID_MAX_STACK_INCREASE - - eof_test(container=container, expect_exception=exception) - - -@pytest.mark.parametrize( - "code_section", - [ - pytest.param( - Section.Code( - code=Op.POP * 2 + Op.RETF, - code_inputs=2, - code_outputs=0, - max_stack_height=2, - ), - id="pop2", - ), - pytest.param( - Section.Code( - code=Op.PUSH1[1] + Op.POP + Op.RETF, - code_inputs=3, - code_outputs=3, - max_stack_height=4, - ), - id="push_pop", - ), - pytest.param( - Section.Code( - code=Op.PUSH0 * 4 + Op.RETF, - code_inputs=3, - code_outputs=7, - max_stack_height=7, - ), - id="push4", - ), - pytest.param( - Section.Code( - code=Op.PUSH0 * 2 + Op.RETF, - code_inputs=3, - code_outputs=5, - max_stack_height=5, - ), - id="push2", - ), - pytest.param( - Section.Code( - code=Op.PUSH0 * 4 + Op.POP * 2 + Op.RETF, - code_inputs=3, - code_outputs=3, - max_stack_height=7, - ), - id="push4_pop2", - ), - pytest.param( - Section.Code( - code=Op.PUSH0 * 2 + Op.POP * 2 + Op.RETF, - code_inputs=3, - code_outputs=3, - max_stack_height=5, - ), - id="push2_pop2", - ), - pytest.param( - Section.Code( - code=Op.PUSH0 * 3 + Op.POP * 5 + Op.RETF, - code_inputs=2, - code_outputs=0, - max_stack_height=5, - ), - id="push3_pop5", - ), - pytest.param( - Section.Code( - code=Op.PUSH0 + Op.POP * 3 + Op.RETF, - code_inputs=2, - code_outputs=0, - max_stack_height=3, - ), - id="push_pop3", - ), - pytest.param( - Section.Code( - code=Op.PUSH0 * 4 + Op.POP * 6 + Op.RETF, - code_inputs=2, - code_outputs=0, - max_stack_height=6, - ), - id="push4_pop6", - ), - pytest.param( - Section.Code( - code=Op.PUSH0 * 2 + Op.POP * 4 + Op.RETF, - code_inputs=2, - code_outputs=0, - max_stack_height=4, - ), - id="push2_pop4", - ), - ], -) -@pytest.mark.parametrize("push_stack", [1020, 1021]) -def test_callf_with_inputs_stack_overflow_variable_stack( - eof_test: EOFTestFiller, code_section: Section, push_stack: int -) -> None: - """Test CALLF to code section with inputs (variable stack).""" - container = Container( - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 * 2 - + Op.PUSH1[1] * push_stack - + Op.CALLF[1] - + Op.STOP, - max_stack_height=1023, - ), - code_section, - ], - ) - initial_stack = 3 # Initial items in the scak - assert code_section.max_stack_height is not None - exception = None - if ( - push_stack - + initial_stack - + code_section.max_stack_height - - code_section.code_inputs - > MAX_RUNTIME_STACK_HEIGHT - ): - exception = EOFException.STACK_OVERFLOW - elif push_stack + initial_stack > 1023: - exception = EOFException.INVALID_MAX_STACK_INCREASE - - eof_test(container=container, expect_exception=exception) - - -@pytest.mark.parametrize( - "callee_outputs", [1, 2, MAX_CODE_OUTPUTS - 1, MAX_CODE_OUTPUTS] -) -@pytest.mark.parametrize( - "max_stack_height", - [0, 1, MAX_STACK_INCREASE_LIMIT - 1, MAX_STACK_INCREASE_LIMIT], -) -def test_callf_stack_overflow_by_outputs( - eof_test: EOFTestFiller, callee_outputs: int, max_stack_height: int -) -> None: - """ - Test for invalid EOF code containing CALLF instruction exceeding the - runtime stack height limit by calling a function with at least one output. - The computed stack height of the code section 0 is always above the maximum - allowed in the EOF type section. Therefore, the test declares an invalid - max_stack_height. - """ - callf_stack_height = (MAX_RUNTIME_STACK_HEIGHT + 1) - callee_outputs - container = Container( - sections=[ - Section.Code( - Op.PUSH0 * callf_stack_height + Op.CALLF[1] + Op.STOP, - max_stack_height=max_stack_height, - ), - Section.Code( - Op.PUSH0 + Op.DUP1 + Op.RETF, - code_outputs=callee_outputs, - max_stack_height=callee_outputs, - ), - ], - ) - eof_test(container=container, expect_exception=EOFException.STACK_OVERFLOW) - - -@pytest.mark.parametrize( - "callee_stack_height", - [2, 3, MAX_STACK_INCREASE_LIMIT - 1, MAX_STACK_INCREASE_LIMIT], -) -def test_callf_stack_overflow_by_height( - eof_test: EOFTestFiller, callee_stack_height: int -) -> None: - """ - Test for invalid EOF code containing CALLF instruction exceeding the - runtime stack height limit by calling a function with 2+ maximum stack - height. The callee with the maximum stack height of 1 is valid because - runtime limit (1024) is 1 bigger than the EOF limit (1023). - """ - container = Container( - sections=[ - Section.Code( - Op.PUSH0 * MAX_STACK_INCREASE_LIMIT + Op.CALLF[1] + Op.STOP, - max_stack_height=MAX_STACK_INCREASE_LIMIT, - ), - Section.Code( - Op.PUSH0 * callee_stack_height - + Op.POP * callee_stack_height - + Op.RETF, - code_outputs=0, - max_stack_height=callee_stack_height, - ), - ], - ) - eof_test(container=container, expect_exception=EOFException.STACK_OVERFLOW) - - -@pytest.mark.parametrize( - "container", - [ - Container( - name="underflow_1", - sections=[ - Section.Code( - code=Op.CALLF[1] + Op.STOP, - max_stack_height=1, - ), - Section.Code( - code=Op.PUSH0 + Op.RETF, - code_inputs=1, - code_outputs=2, - max_stack_height=2, - ), - ], - ), - Container( - name="underflow_2", - sections=[ - Section.Code( - code=Op.CALLF[1] + Op.STOP, - max_stack_height=2, - ), - Section.Code( - code=Op.PUSH0 + Op.RETF, - code_inputs=1, - code_outputs=2, - max_stack_height=2, - ), - ], - ), - Container( - name="underflow_variable_stack_2", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.RJUMPI[2](0) - + Op.PUSH0 - + Op.PUSH0 - + Op.CALLF[1] - + Op.STOP, - max_stack_height=4, - ), - Section.Code( - code=Op.PUSH0 + Op.RETF, - code_inputs=4, - code_outputs=5, - max_stack_height=5, - ), - ], - ), - Container( - name="underflow_variable_stack_2a", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH0 - + Op.RJUMPI[2](0) - + Op.PUSH0 - + Op.PUSH0 - + Op.CALLF[1] - + Op.STOP, - max_stack_height=5, - ), - Section.Code( - code=Op.PUSH0 + Op.RETF, - code_inputs=4, - code_outputs=5, - max_stack_height=5, - ), - ], - ), - Container( - name="underflow_variable_stack_3", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.RJUMPI[2](0) - + Op.PUSH0 - + Op.PUSH0 - + Op.CALLF[1] - + Op.STOP, - max_stack_height=4, - ), - Section.Code( - code=Op.PUSH0 + Op.RETF, - code_inputs=3, - code_outputs=4, - max_stack_height=4, - ), - ], - ), - Container( - name="underflow_variable_stack_4", - sections=[ - Section.Code( - code=Op.PUSH0 * 3 - + Op.RJUMPI[1](0) - + Op.POP - + Op.CALLF[1] - + Op.STOP, - max_stack_height=4, - ), - Section.Code( - code=Op.PUSH0 + Op.RETF, - code_inputs=3, - code_outputs=4, - max_stack_height=4, - ), - ], - ), - ], - ids=lambda x: x.name, -) -def test_callf_stack_underflow_examples( - eof_test: EOFTestFiller, container: Container -) -> None: - """Test CALLF instruction causing validation time stack underflow.""" - eof_test( - container=container, expect_exception=EOFException.STACK_UNDERFLOW - ) - - -def test_returning_section_aborts( - eof_test: EOFTestFiller, -) -> None: - """ - Test EOF container validation where in the same code section we have - returning and nonreturning terminating instructions. - """ - container = Container( - name="returning_section_aborts", - sections=[ - Section.Code( - code=Op.PUSH0 + Op.CALLF[1] + Op.POP + Op.POP + Op.STOP - ), - Section.Code( - code=Op.PUSH0 * 2 + Op.RJUMPI[1] + Op.RETF + Op.INVALID, - code_outputs=1, - ), - ], - ) - eof_test(container=container) diff --git a/tests/unscheduled/eip7692_eof_v1/eip5450_stack/__init__.py b/tests/unscheduled/eip7692_eof_v1/eip5450_stack/__init__.py deleted file mode 100644 index 3935e40789..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip5450_stack/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -""" -Tests for -[EIP-5450: EOF - Stack Validation](https://eips.ethereum.org/EIPS/eip-5450). - -EIP-5450 defines stack validation requirements to ensure consistent -behavior during execution. Opcodes introduced: None (specifies validation -rules for stack usage). -""" diff --git a/tests/unscheduled/eip7692_eof_v1/eip5450_stack/test_code_validation.py b/tests/unscheduled/eip7692_eof_v1/eip5450_stack/test_code_validation.py deleted file mode 100644 index f2ce035edf..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip5450_stack/test_code_validation.py +++ /dev/null @@ -1,732 +0,0 @@ -""" -Code validation of CALLF, JUMPF, RETF opcodes in conjunction with static -relative jumps. -""" - -import itertools -from enum import Enum, auto, unique -from typing import Generator, Tuple, Union - -import pytest -from execution_testing import ( - Account, - Bytecode, - EOFException, - EOFStateTestFiller, - EOFTestFiller, - Op, - Opcode, -) -from execution_testing.test_types.eof.v1 import Container, Section -from execution_testing.test_types.eof.v1.constants import ( - MAX_STACK_INCREASE_LIMIT, - NON_RETURNING_SECTION, -) - -from .. import EOF_FORK_NAME -from ..eip3540_eof_v1.test_all_opcodes_in_container import valid_eof_opcodes -from ..eip7620_eof_create.helpers import ( - smallest_initcode_subcontainer, - smallest_runtime_subcontainer, -) - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-5450.md" -REFERENCE_SPEC_VERSION = "f20b164b00ae5553f7536a6d7a83a0f254455e09" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - - -@unique -class RjumpKind(Enum): - """Kinds of RJUMP* instruction snippets to generate.""" - - EMPTY_RJUMP = auto() - EMPTY_RJUMPI = auto() - RJUMPI_OVER_PUSH = auto() - RJUMPI_OVER_NOOP = auto() - RJUMPI_OVER_STOP = auto() - RJUMPI_OVER_PUSH_POP = auto() - RJUMPI_OVER_POP = auto() - RJUMPI_OVER_NEXT = auto() - RJUMPI_OVER_NEXT_NESTED = auto() - RJUMPI_TO_START = auto() - RJUMPV_EMPTY_AND_OVER_NEXT = auto() - RJUMPV_OVER_PUSH_AND_TO_START = auto() - RJUMPI_OVER_RETF = auto() - - def __str__(self) -> str: - """Return string representation of the enum.""" - return f"{self.name}" - - -@unique -class RjumpSpot(Enum): - """ - Possible spots in the code section layout where the RJUMP* is injected. - """ - - BEGINNING = auto() - BEFORE_TERMINATION = auto() - - def __str__(self) -> str: - """Return string representation of the enum.""" - return f"{self.name}" - - -def rjump_code_with( - rjump_kind: RjumpKind | None, code_so_far_len: int, next_code: Bytecode -) -> Tuple[Bytecode, bool, bool, bool]: - """ - Unless `rjump_kind` is None generates a code snippet with an RJUMP* - instruction. For some kinds `code_so_far_len` must be code length in bytes - preceding the snippet. For some kinds `next_code_len` must be code length - in bytes of some code which follows. - - It is expected that the snippet and the jump target are valid, but the - resulting code or its stack balance might not. - - Also returns some traits of the snippet: `is_backwards`, `pops` and - `pushes` - """ - body = Bytecode() - - is_backwards = False - pops = False - pushes = False - jumps_over_next = False - - if rjump_kind == RjumpKind.EMPTY_RJUMP: - body = Op.RJUMP[0] - elif rjump_kind == RjumpKind.EMPTY_RJUMPI: - body = Op.RJUMPI[0](0) - elif rjump_kind == RjumpKind.RJUMPI_OVER_PUSH: - body = Op.RJUMPI[1](0) + Op.PUSH0 - pushes = True - elif rjump_kind == RjumpKind.RJUMPI_OVER_NOOP: - body = Op.RJUMPI[1](0) + Op.NOOP - elif rjump_kind == RjumpKind.RJUMPI_OVER_STOP: - body = Op.RJUMPI[1](0) + Op.STOP - elif rjump_kind == RjumpKind.RJUMPI_OVER_PUSH_POP: - body = Op.RJUMPI[2](0) + Op.PUSH0 + Op.POP - elif rjump_kind == RjumpKind.RJUMPI_OVER_POP: - body = Op.RJUMPI[1](0) + Op.POP - pops = True - elif rjump_kind == RjumpKind.RJUMPI_OVER_NEXT: - body = Op.RJUMPI[len(next_code)](0) - jumps_over_next = True - elif rjump_kind == RjumpKind.RJUMPI_OVER_NEXT_NESTED: - rjump_inner = Op.RJUMPI[len(next_code)](0) - body = Op.RJUMPI[len(rjump_inner)](0) + rjump_inner - jumps_over_next = True - elif rjump_kind == RjumpKind.RJUMPI_TO_START: - rjumpi_len = len(Op.RJUMPI[0](0)) - body = Op.RJUMPI[-code_so_far_len - rjumpi_len](0) - is_backwards = True - elif rjump_kind == RjumpKind.RJUMPV_EMPTY_AND_OVER_NEXT: - body = Op.RJUMPV[[0, len(next_code)]](0) - jumps_over_next = True - elif rjump_kind == RjumpKind.RJUMPV_OVER_PUSH_AND_TO_START: - rjumpv_two_destinations_len = len(Op.RJUMPV[[0, 0]](0)) - body = ( - Op.RJUMPV[[1, -code_so_far_len - rjumpv_two_destinations_len]](0) - + Op.PUSH0 - ) - is_backwards = True - pushes = True - elif rjump_kind == RjumpKind.RJUMPI_OVER_RETF: - body = Op.RJUMPI[1](0) + Op.RETF - elif not rjump_kind: - pass - else: - raise TypeError("unknown rjumps value" + str(rjump_kind)) - - if jumps_over_next: - # This is against intuition, but if the code we're jumping over pushes, - # the path which misses it will be short of stack items, as if the - # RJUMP* popped and vice versa. - if next_code.pushed_stack_items > next_code.popped_stack_items: - pops = True - elif next_code.popped_stack_items > next_code.pushed_stack_items: - pushes = True - - return body, is_backwards, pops, pushes - - -def call_code_with(inputs: int, outputs: int, call: Bytecode) -> Bytecode: - """ - Generate code snippet with the `call` bytecode provided and its respective - input/output management. - - `inputs` and `outputs` are understood as those of the code section we're - generating for. - """ - body = Bytecode() - - if call.popped_stack_items > inputs: - body += Op.PUSH0 * (call.popped_stack_items - inputs) - elif call.popped_stack_items < inputs: - body += Op.POP * (inputs - call.popped_stack_items) - - body += call - if call.pushed_stack_items < outputs: - body += Op.PUSH0 * (outputs - call.pushed_stack_items) - elif call.pushed_stack_items > outputs: - body += Op.POP * (call.pushed_stack_items - outputs) - - return body - - -def section_code_with( - inputs: int, - outputs: int, - rjump_kind: RjumpKind | None, - rjump_spot: RjumpSpot, - call: Bytecode | None, - termination: Bytecode, -) -> Tuple[Bytecode, bool, bool, bool, bool]: - """ - Generate code section with RJUMP* and CALLF/RETF instructions. - - Also returns some traits of the section: `has_invalid_back_jump`, - `rjump_snippet_pops`, `rjump_snippet_pushes`, `rjump_falls_off_code` - """ - code = Bytecode() - code.pushed_stack_items, code.max_stack_height = (inputs, inputs) - - if call: - body = call_code_with(inputs, outputs, call) - else: - body = Op.POP * inputs + Op.PUSH0 * outputs - - has_invalid_back_jump = False - rjump_snippet_pushes = False - rjump_snippet_pops = False - rjump_falls_off_code = False - - if rjump_spot == RjumpSpot.BEGINNING: - rjump, is_backwards, rjump_snippet_pops, rjump_snippet_pushes = ( - rjump_code_with(rjump_kind, 0, body) - ) - if rjump_kind == RjumpKind.RJUMPI_OVER_RETF: - if inputs > outputs: - rjump_snippet_pushes = True - elif outputs > inputs: - rjump_snippet_pops = True - code += rjump - - code += body - - if rjump_spot == RjumpSpot.BEFORE_TERMINATION: - rjump, is_backwards, rjump_snippet_pops, rjump_snippet_pushes = ( - rjump_code_with(rjump_kind, len(code), next_code=termination) - ) - code += rjump - - if is_backwards and inputs != outputs: - has_invalid_back_jump = True - - if rjump_spot == RjumpSpot.BEFORE_TERMINATION or ( - rjump_spot == RjumpSpot.BEGINNING and len(termination) == 0 - ): - if rjump_kind in [ - RjumpKind.RJUMPI_OVER_NEXT, - RjumpKind.RJUMPI_OVER_NEXT_NESTED, - RjumpKind.RJUMPV_EMPTY_AND_OVER_NEXT, - ]: - # Jump over termination or jump over body, but there is nothing - # after the body. - rjump_falls_off_code = True - - code += termination - - return ( - code, - has_invalid_back_jump, - rjump_snippet_pops, - rjump_snippet_pushes, - rjump_falls_off_code, - ) - - -num_sections = 3 -possible_inputs_outputs = range(2) - - -@pytest.mark.parametrize( - ["inputs", "outputs"], - itertools.product( - list( - itertools.product( - *([possible_inputs_outputs] * (num_sections - 1)) - ) - ), - list( - itertools.product( - *([possible_inputs_outputs] * (num_sections - 1)) - ) - ), - ), -) -@pytest.mark.parametrize( - "rjump_kind", - RjumpKind, -) -# Parameter value fixed for first iteration, to cover the most important case. -@pytest.mark.parametrize("rjump_section_idx", [0, 1]) -@pytest.mark.parametrize( - "rjump_spot", - RjumpSpot, -) -def test_rjumps_callf_retf( - eof_test: EOFTestFiller, - inputs: Tuple[int, ...], - outputs: Tuple[int, ...], - rjump_kind: RjumpKind, - rjump_section_idx: int, - rjump_spot: RjumpSpot, -) -> None: - """ - Test EOF container validation for EIP-4200 vs EIP-4750 interactions. - - Each test's code consists of `num_sections` code sections, which call into - one another and then return. Code may include RJUMP* snippets of - `rjump_kind` in various `rjump_spots`. - """ - # Zeroth section has always 0 inputs and 0 outputs, so is excluded from - # param - inputs = (0,) + inputs - outputs = (0,) + outputs - - assert len(inputs) == len(outputs) == num_sections - - sections = [] - container_has_invalid_back_jump = False - container_has_rjump_pops = False - container_has_rjump_pushes = False - container_has_rjump_off_code = False - container_has_section_0_retf = ( - rjump_section_idx == 0 and rjump_kind == RjumpKind.RJUMPI_OVER_RETF - ) - - for section_idx in range(num_sections): - if section_idx == 0: - call = Op.CALLF[section_idx + 1] - call.popped_stack_items = inputs[section_idx + 1] - call.pushed_stack_items = outputs[section_idx + 1] - call.min_stack_height = call.popped_stack_items - call.max_stack_height = max( - call.popped_stack_items, call.pushed_stack_items - ) - termination = Op.STOP - elif section_idx < num_sections - 1: - call = Op.CALLF[section_idx + 1] - call.popped_stack_items = inputs[section_idx + 1] - call.pushed_stack_items = outputs[section_idx + 1] - call.min_stack_height = call.popped_stack_items - call.max_stack_height = max( - call.popped_stack_items, call.pushed_stack_items - ) - termination = Op.RETF - else: - call = None - termination = Op.RETF - - ( - code, - section_has_invalid_back_jump, - rjump_snippet_pops, - rjump_snippet_pushes, - rjump_falls_off_code, - ) = section_code_with( - inputs[section_idx], - outputs[section_idx], - rjump_kind if rjump_section_idx == section_idx else None, - rjump_spot, - call, - termination, - ) - - if section_has_invalid_back_jump: - container_has_invalid_back_jump = True - if rjump_snippet_pops: - container_has_rjump_pops = True - # Pushes to the stack never affect the zeroth section, because it - # `STOP`s and not `RETF`s. - if rjump_snippet_pushes and section_idx != 0: - container_has_rjump_pushes = True - if rjump_falls_off_code: - container_has_rjump_off_code = True - - if section_idx > 0: - sections.append( - Section.Code( - code, - code_inputs=inputs[section_idx], - code_outputs=outputs[section_idx], - ) - ) - else: - sections.append(Section.Code(code)) - - possible_exceptions = [] - if container_has_invalid_back_jump: - possible_exceptions.append(EOFException.STACK_HEIGHT_MISMATCH) - if container_has_rjump_pops: - possible_exceptions.append(EOFException.STACK_UNDERFLOW) - if container_has_rjump_pushes: - possible_exceptions.append(EOFException.STACK_HIGHER_THAN_OUTPUTS) - if container_has_rjump_off_code: - possible_exceptions.append(EOFException.INVALID_RJUMP_DESTINATION) - if container_has_section_0_retf: - possible_exceptions.append(EOFException.INVALID_NON_RETURNING_FLAG) - - eof_test( - container=Container(sections=sections), - expect_exception=possible_exceptions or None, - ) - - -@pytest.mark.parametrize( - "inputs", - itertools.product(*([possible_inputs_outputs] * (num_sections - 1))), -) -@pytest.mark.parametrize( - "rjump_kind", - RjumpKind, -) -# Parameter value fixed for first iteration, to cover the most important case. -@pytest.mark.parametrize("rjump_section_idx", [0, 1]) -@pytest.mark.parametrize( - "rjump_spot", - # `termination` is empty for JUMPF codes, because JUMPF serves as one. Spot - # `BEFORE_TERMINATION` is unreachable code. - [k for k in RjumpSpot if k not in [RjumpSpot.BEFORE_TERMINATION]], -) -def test_rjumps_jumpf_nonreturning( - eof_test: EOFTestFiller, - inputs: Tuple[int, ...], - rjump_kind: RjumpKind, - rjump_section_idx: int, - rjump_spot: RjumpSpot, -) -> None: - """ - Test EOF container validation for EIP-4200 vs EIP-6206 interactions on - non-returning functions. - """ - # Zeroth section has always 0 inputs and 0 outputs, so is excluded from - # param - inputs = (0,) + inputs - - sections = [] - container_has_rjump_pops = False - container_has_rjump_off_code = False - container_has_non_returning_retf = False - - for section_idx in range(num_sections): - if section_idx < num_sections - 1: - call = Op.JUMPF[section_idx + 1] - call.popped_stack_items = inputs[section_idx + 1] - call.pushed_stack_items = 0 - call.min_stack_height = call.popped_stack_items - call.max_stack_height = max( - call.popped_stack_items, call.pushed_stack_items - ) - termination = Bytecode() - else: - call = None - termination = Op.STOP - - # `section_has_invalid_back_jump` - never happens: we excluded RJUMP - # from the end `rjump_snippet_pushes` - never happens: we never RETF - # where too large stack would fail - ( - code, - _section_has_invalid_back_jump, - rjump_snippet_pops, - _rjump_snippet_pushes, - rjump_falls_off_code, - ) = section_code_with( - inputs[section_idx], - 0, - rjump_kind if rjump_section_idx == section_idx else None, - rjump_spot, - call, - termination, - ) - - if rjump_snippet_pops: - container_has_rjump_pops = True - if rjump_falls_off_code: - container_has_rjump_off_code = True - if rjump_kind == RjumpKind.RJUMPI_OVER_RETF: - container_has_non_returning_retf = True - - if section_idx > 0: - sections.append( - Section.Code( - code, - code_inputs=inputs[section_idx], - code_outputs=NON_RETURNING_SECTION, - ) - ) - else: - sections.append(Section.Code(code)) - - possible_exceptions = [] - if container_has_rjump_pops: - possible_exceptions.append(EOFException.STACK_UNDERFLOW) - if container_has_rjump_off_code: - possible_exceptions.append(EOFException.INVALID_RJUMP_DESTINATION) - if container_has_non_returning_retf: - possible_exceptions.append(EOFException.INVALID_NON_RETURNING_FLAG) - - eof_test( - container=Container(sections=sections), - expect_exception=possible_exceptions or None, - ) - - -def gen_stack_underflow_params() -> Generator[ - tuple[Union[Op, Opcode], int], None, None -]: - """Generate parameters for stack underflow tests.""" - opcodes = sorted( - op for op in valid_eof_opcodes if op.min_stack_height > 0 - ) + [ - # Opcodes that have variable min_stack_height - Op.SWAPN[0x00], - Op.SWAPN[0xFF], - Op.DUPN[0x00], - Op.DUPN[0xFF], - Op.EXCHANGE[0x00], - Op.EXCHANGE[0xFF], - ] - for op in opcodes: - yield op, 0 - if op.min_stack_height > 1: - yield op, op.min_stack_height - 1 - - -@pytest.mark.parametrize("spread", [-1, 0, 1, MAX_STACK_INCREASE_LIMIT]) -@pytest.mark.parametrize("op,stack_height", gen_stack_underflow_params()) -def test_all_opcodes_stack_underflow( - eof_test: EOFTestFiller, op: Op, stack_height: int, spread: int -) -> None: - """ - Test EOF validation failing due to stack overflow caused by the specific - instruction `op`. - """ - code = Bytecode() - - if spread >= 0: - # Check if the op increases the stack height (e.g. DUP instructions). - # We need to leave space for this increase not to cause stack overflow. - max_stack_increase = max( - op.pushed_stack_items - op.popped_stack_items, 0 - ) - # Cap the spread if it would exceed the maximum stack height. - spread = min( - spread, - MAX_STACK_INCREASE_LIMIT - (stack_height + max_stack_increase), - ) - # Create a range stack height of 0-spread. - code += Op.RJUMPI[spread](Op.CALLVALUE) + Op.PUSH0 * spread - - # Create the desired stack height. - code += Op.PUSH0 * stack_height - - if op.has_data_portion(): - code += op[0] # produce required imm bytes - else: - code += op - - if not op.terminating: - code += Op.STOP - - sections = [ - Section.Code( - code, - # Set reasonable stack height. Don't rely on automatic calculation, - # because we are in the invalid stack height scenario. - max_stack_height=max(spread, stack_height, int(spread >= 0)), - ) - ] - - if op == Op.EOFCREATE: - # Make EOFCREATE valid by adding the target subcontainer. - sections.append(Section.Container(smallest_initcode_subcontainer)) - elif op == Op.RETURNCODE: - # Make RETURNCODE valid by wrapping it with a container with EOFCREATE. - sections = [ - Section.Code(Op.EOFCREATE[0](0, 0, 0, 0) + Op.STOP), - Section.Container( - container=Container( - sections=[ - sections[0], - Section.Container(smallest_runtime_subcontainer), - ] - ) - ), - ] - - eof_test( - container=Container( - sections=sections, - validity_error=EOFException.STACK_UNDERFLOW, - ) - ) - - -@pytest.mark.parametrize( - "container", - [ - Container( - name="underflow_0", - sections=[ - Section.Code( - code=Op.ADD + Op.STOP, - max_stack_height=1, - ), - ], - ), - Container( - name="underflow_variable_stack_0", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.RJUMPI[2](0) - + Op.PUSH0 - + Op.PUSH0 - + Op.LOG2 - + Op.STOP, - max_stack_height=3, - ), - ], - ), - Container( - name="underflow_variable_stack_1", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.RJUMPI[2](0) - + Op.PUSH0 - + Op.PUSH0 - + Op.ADD - + Op.STOP, - max_stack_height=3, - ), - ], - ), - Container( - name="underflow_variable_stack_2", - sections=[ - Section.Code( - code=Op.PUSH0 * 2 - + Op.RJUMPI[1](0) - + Op.POP - + Op.ADD - + Op.STOP, - max_stack_height=3, - ), - ], - ), - Container( - name="underflow_variable_stack_3", - sections=[ - Section.Code( - Op.RJUMPI[8](0) - + Op.PUSH0 * 2 - + Op.RJUMPI[1](0) - + Op.POP * 2 - + Op.PUSH0 * 2 - + Op.REVERT, - max_stack_height=3, - ), - ], - ), - ], - ids=lambda x: x.name, -) -def test_stack_underflow_examples( - eof_test: EOFTestFiller, container: Container -) -> None: - """ - Test EOF validation failing due to stack underflow at basic instructions. - """ - eof_test( - container=container, expect_exception=EOFException.STACK_UNDERFLOW - ) - - -@pytest.mark.parametrize("initial_stack", [0, 1, 2]) -@pytest.mark.parametrize("calldata_1", [0, 1]) -@pytest.mark.parametrize("calldata_2", [0, 1]) -def test_valid_non_constant_stack_examples( - eof_state_test: EOFStateTestFiller, - initial_stack: int, - calldata_1: int, - calldata_2: int, -) -> None: - """Test valid containers with non constant stack items.""" - # Stores the number of added items to the stack in storage slot 0 - # calldata_1 == 1: number of items = 2 - # calldata_1 == 0: - # calldata_2 == 0: number of items: 3 - # calldata_2 == 1: number of items: 4 - expected_storage = ( - {0: 2} if calldata_1 == 1 else ({0: 3} if calldata_2 == 0 else {0: 4}) - ) - data = calldata_1.to_bytes(32, "big") + calldata_2.to_bytes(32, "big") - container = Container( - sections=[ - Section.Code( - code=Op.PUSH0 * initial_stack - + Op.CALLDATALOAD(0) - + Op.RJUMPI[19] - + Op.PUSH0 * 2 - + Op.SSTORE(0, 2) # Stores added items (2) - + Op.CALLDATALOAD(32) - + Op.RJUMPI[6] - + Op.POP - + Op.SSTORE(0, 1) # Updates number of added items to 1 - + Op.PUSH0 * 2 # <-- RJUMPI[19]/RJUMPI[6] target - + Op.SLOAD(0) - + Op.PUSH1(2) - + Op.ADD # Add latest added items (+2) - + Op.PUSH1(0) - + Op.SSTORE - + Op.STOP, - max_stack_height=6 + initial_stack, - ), - ], - ) - eof_state_test( - container=container, - expect_exception=None, - data=data, - container_post=Account(storage=expected_storage), - ) - - -@pytest.mark.parametrize( - "num_rjumpi", [MAX_STACK_INCREASE_LIMIT, MAX_STACK_INCREASE_LIMIT + 1] -) -def test_stack_range_maximally_broad( - eof_test: EOFTestFiller, num_rjumpi: int -) -> None: - """Test stack range 0-1023 at final instruction.""" - code = Op.STOP() - for i in range(0, num_rjumpi): - offset = i * 5 + 1 - code = Op.PUSH0 + Op.RJUMPI[offset] + Op.PUSH0 + code - - container = Container.Code( - code=code, max_stack_increase=MAX_STACK_INCREASE_LIMIT - ) - - eof_test( - container=container, - expect_exception=None - if num_rjumpi <= MAX_STACK_INCREASE_LIMIT - else EOFException.INVALID_MAX_STACK_INCREASE, - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip5450_stack/test_execution.py b/tests/unscheduled/eip7692_eof_v1/eip5450_stack/test_execution.py deleted file mode 100644 index cc1395d104..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip5450_stack/test_execution.py +++ /dev/null @@ -1,82 +0,0 @@ -"""Test execution of EOF code in the context of the operand stack height.""" - -import pytest -from execution_testing import Account, EOFStateTestFiller, Op -from execution_testing.exceptions import EOFException -from execution_testing.test_types.eof.constants import ( - MAX_RUNTIME_STACK_HEIGHT, -) -from execution_testing.test_types.eof.v1 import Container, Section -from execution_testing.test_types.eof.v1.constants import ( - MAX_CODE_INPUTS, - MAX_STACK_INCREASE_LIMIT, - NON_RETURNING_SECTION, -) - -from .. import EOF_FORK_NAME - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-5450.md" -REFERENCE_SPEC_VERSION = "f20b164b00ae5553f7536a6d7a83a0f254455e09" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - - -@pytest.mark.parametrize("code_inputs", [0, 1, 16, 127, 128]) -@pytest.mark.parametrize("call_op", [Op.CALLF, Op.JUMPF]) -def test_execution_at_max_stack_height( - eof_state_test: EOFStateTestFiller, - code_inputs: int, - call_op: Op, -) -> None: - """ - Test execution at the maximum runtime operand stack height (1024). EOF - doesn't allow to increase the stack height of a single code section more - than 1023. The effect of the maximum runtime stack height is achieved by - using non-zero number of the code section inputs and increasing the runtime - stack to the limit accordingly. The test pushes consecutive numbers - starting from 0 (including inputs). At the maximum stack height SSTORE is - used so it should store 1022 at key 1023. - """ - max_stack_increase = MAX_RUNTIME_STACK_HEIGHT - code_inputs - container = Container( - sections=[ - Section.Code( - ( - sum(Op.PUSH1(x) for x in range(code_inputs)) - + call_op[1] - + (Op.STOP if call_op == Op.CALLF else b"") - ), - ), - Section.Code( - sum( - Op.PUSH2(x) - for x in range(code_inputs, MAX_RUNTIME_STACK_HEIGHT) - ) - + Op.SSTORE - + Op.POP - * (MAX_RUNTIME_STACK_HEIGHT - Op.SSTORE.popped_stack_items) - + (Op.RETF if call_op == Op.CALLF else Op.STOP), - code_inputs=code_inputs, - code_outputs=0 - if call_op == Op.CALLF - else NON_RETURNING_SECTION, - max_stack_increase=max_stack_increase, - ), - ], - ) - - exception = None - if max_stack_increase > MAX_STACK_INCREASE_LIMIT: - exception = EOFException.MAX_STACK_INCREASE_ABOVE_LIMIT - elif code_inputs > MAX_CODE_INPUTS: - exception = EOFException.INPUTS_OUTPUTS_NUM_ABOVE_LIMIT - - eof_state_test( - container=container, - expect_exception=exception, - container_post=Account( - storage={ - MAX_RUNTIME_STACK_HEIGHT - 1: MAX_RUNTIME_STACK_HEIGHT - 2 - } - ), - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/__init__.py b/tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/__init__.py deleted file mode 100644 index dcd27c79cd..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -""" -Test cases for [EIP-6206: EOF - JUMPF and non-returning functions](https:// -eips.ethereum.org/EIPS/eip-6206). - -EIP-6206 adds a conditional forward jump instruction and support for -functions without return values. Opcodes introduced: `JUMPF` (`0xE5`). -""" diff --git a/tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/helpers.py b/tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/helpers.py deleted file mode 100644 index f5098389a8..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/helpers.py +++ /dev/null @@ -1,14 +0,0 @@ -"""EOF JumpF tests helpers.""" - -import itertools - -"""Storage addresses for common testing fields""" -_slot = itertools.count() -next(_slot) # don't use slot 0 -slot_code_worked = next(_slot) -slot_last_slot = next(_slot) -slot_stack_canary = next(_slot) - -"""Storage values for common testing fields""" -value_code_worked = 0x2015 -value_canary_written = 0xDEADB12D diff --git a/tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/spec.py b/tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/spec.py deleted file mode 100644 index d382a9ca43..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/spec.py +++ /dev/null @@ -1 +0,0 @@ -"""EOF V1 Constants used throughout all tests.""" diff --git a/tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_execution.py b/tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_execution.py deleted file mode 100644 index 98c5a5246a..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_execution.py +++ /dev/null @@ -1,772 +0,0 @@ -"""EOF JUMPF tests covering simple cases.""" - -import pytest -from execution_testing import ( - Account, - Alloc, - Environment, - EOFException, - EOFStateTestFiller, - Op, - StateTestFiller, - Storage, - Transaction, -) -from execution_testing.test_types.eof.v1 import Container, Section -from execution_testing.test_types.eof.v1.constants import ( - NON_RETURNING_SECTION, -) - -from .. import EOF_FORK_NAME -from .helpers import ( - slot_code_worked, - slot_stack_canary, - value_canary_written, - value_code_worked, -) - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-6206.md" -REFERENCE_SPEC_VERSION = "2f365ea0cd58faa6e26013ea77ce6d538175f7d0" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - - -def test_jumpf_forward( - eof_state_test: EOFStateTestFiller, -) -> None: - """Test JUMPF jumping forward.""" - eof_state_test( - container=Container( - sections=[ - Section.Code( - code=Op.JUMPF[1], - ), - Section.Code( - Op.SSTORE(slot_code_worked, value_code_worked) + Op.STOP, - ), - ], - ), - container_post=Account(storage={slot_code_worked: value_code_worked}), - data=b"\1", - ) - - -@pytest.mark.parametrize( - "container", - [ - Container( - name="forward", - sections=[ - Section.Code( - code=Op.CALLF[1] - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ), - Section.Code( - code=Op.JUMPF[2], - code_outputs=0, - ), - Section.Code( - code=Op.RETF, - code_outputs=0, - ), - ], - ), - Container( - name="backward", - sections=[ - Section.Code( - code=Op.CALLF[2] - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ), - Section.Code( - code=Op.RETF, - code_outputs=0, - ), - Section.Code( - code=Op.JUMPF[1], - code_outputs=0, - ), - ], - ), - Container( - name="equal_outputs", - sections=[ - Section.Code( - Op.CALLF[1] + Op.SSTORE + Op.STOP, - max_stack_height=2, - ), - Section.Code( - Op.JUMPF[2], - code_outputs=2, - max_stack_height=0, - ), - Section.Code( - Op.PUSH2[value_code_worked] - + Op.PUSH2[slot_code_worked] - + Op.RETF, - code_outputs=2, - max_stack_height=2, - ), - ], - ), - Container( - name="compatible_outputs", - sections=[ - Section.Code( - code=Op.CALLF[1] + Op.SSTORE + Op.STOP, - max_stack_height=2, - ), - Section.Code( - Op.PUSH2[value_code_worked] + Op.JUMPF[2], - code_outputs=2, - max_stack_height=1, - ), - Section.Code( - Op.PUSH2[slot_code_worked] + Op.RETF, - code_outputs=1, - max_stack_height=1, - ), - ], - ), - ], - ids=lambda container: container.name, -) -def test_jumpf_to_retf( - eof_state_test: EOFStateTestFiller, container: Container -) -> None: - """Tests JUMPF to a returning section with RETF.""" - eof_state_test( - container=container, - container_post=Account(storage={slot_code_worked: value_code_worked}), - data=b"\1", - ) - - -def test_jumpf_to_self( - eof_state_test: EOFStateTestFiller, -) -> None: - """Tests JUMPF jumping to self.""" - eof_state_test( - container=Container( - sections=[ - Section.Code( - code=Op.SLOAD(slot_code_worked) - + Op.ISZERO - + Op.RJUMPI[1] - + Op.STOP - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.JUMPF[0], - ) - ], - ), - container_post=Account(storage={slot_code_worked: value_code_worked}), - data=b"\1", - ) - - -@pytest.mark.parametrize( - "container", - [ - Container( - name="1_to_2_arg0", - sections=[ - Section.Code( - Op.CALLF[1] + Op.STOP, - ), - Section.Code( - Op.PUSH0 + Op.RJUMPI[3] + Op.JUMPF[2] + Op.RETF, - code_outputs=0, - ), - Section.Code( - Op.SSTORE(slot_code_worked, value_code_worked) + Op.RETF, - code_outputs=0, - ), - ], - ), - Container( - name="1_to_2_arg1", - sections=[ - Section.Code( - Op.PUSH1[1] + Op.CALLF[1] + Op.STOP, - ), - Section.Code( - Op.RJUMPI[1] - + Op.RETF - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.JUMPF[2], - code_inputs=1, - code_outputs=0, - ), - Section.Code( - Op.RETF, - code_outputs=0, - ), - ], - ), - Container( - name="1_to_0_to_1", - sections=[ - Section.Code( - Op.ISZERO(Op.SLOAD(slot_code_worked)) - + Op.CALLF[1] - + Op.STOP, - ), - Section.Code( - Op.RJUMPI[1] - + Op.RETF - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.JUMPF[0], - code_inputs=1, - code_outputs=0, - ), - ], - ), - Container( - name="retf_in_nonreturning", - sections=[ - Section.Code( - Op.PUSH0 + Op.JUMPF[1], - ), - Section.Code( - Op.RJUMPI[1] + Op.RETF + Op.JUMPF[0], - code_inputs=1, - ), - ], - validity_error=EOFException.INVALID_NON_RETURNING_FLAG, - ), - Container( - name="jumpf_to_returning", - sections=[ - Section.Code( - Op.PUSH0 + Op.JUMPF[1], - ), - Section.Code( - Op.RJUMPI[1] + Op.RETF + Op.JUMPF[2], - code_inputs=1, - ), - Section.Code( - Op.RETF, - code_outputs=0, - ), - ], - validity_error=EOFException.INVALID_NON_RETURNING_FLAG, - ), - Container( - name="jumpf_to_returning_2", - sections=[ - Section.Code( - Op.PUSH0 + Op.JUMPF[1], - ), - Section.Code( - Op.RJUMPI[3] + Op.JUMPF[2] + Op.RETF, - code_inputs=1, - ), - Section.Code( - Op.RETF, - code_outputs=0, - ), - ], - validity_error=EOFException.INVALID_NON_RETURNING_FLAG, - ), - ], - ids=lambda container: container.name, -) -def test_jumpf_and_retf( - eof_state_test: EOFStateTestFiller, container: Container -) -> None: - """Tests JUMPF and RETF in the same section.""" - eof_state_test( - container=container, - container_post=Account(storage={slot_code_worked: value_code_worked}), - ) - - -def test_jumpf_too_large( - eof_state_test: EOFStateTestFiller, -) -> None: - """Tests JUMPF jumping to a section outside the max section range.""" - eof_state_test( - container=Container( - sections=[ - Section.Code( - code=Op.JUMPF[1025], - ) - ], - validity_error=EOFException.INVALID_CODE_SECTION_INDEX, - ), - ) - - -def test_jumpf_way_too_large( - eof_state_test: EOFStateTestFiller, -) -> None: - """Tests JUMPF jumping to uint64.MAX.""" - eof_state_test( - container=Container( - sections=[ - Section.Code( - code=Op.JUMPF[0xFFFF], - ) - ], - validity_error=EOFException.INVALID_CODE_SECTION_INDEX, - ), - ) - - -def test_jumpf_to_nonexistent_section( - eof_state_test: EOFStateTestFiller, -) -> None: - """ - Tests JUMPF jumping to valid section number but where the section does not - exist. - """ - eof_state_test( - container=Container( - sections=[ - Section.Code( - code=Op.JUMPF[5], - ) - ], - validity_error=EOFException.INVALID_CODE_SECTION_INDEX, - ), - ) - - -def test_callf_to_non_returning_section( - eof_state_test: EOFStateTestFiller, -) -> None: - """Tests CALLF into a non-returning section.""" - eof_state_test( - container=Container( - sections=[ - Section.Code( - code=Op.CALLF[1], - ), - Section.Code( - code=Op.STOP, - code_outputs=0, - ), - ], - validity_error=EOFException.MISSING_STOP_OPCODE, - ), - ) - - -def test_jumpf_stack_size_1024( - eof_state_test: EOFStateTestFiller, -) -> None: - """Test stack reaching 1024 items in target function of JUMPF.""" - eof_state_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH0 * 1022 + Op.JUMPF[1], - max_stack_height=1022, - ), - Section.Code( - Op.SSTORE(slot_code_worked, value_code_worked) + Op.STOP, - code_inputs=0, - code_outputs=NON_RETURNING_SECTION, - max_stack_height=2, - ), - ], - ), - container_post=Account(storage={slot_code_worked: value_code_worked}), - ) - - -def test_jumpf_with_inputs_stack_size_1024( - eof_state_test: EOFStateTestFiller, -) -> None: - """ - Test stack reaching 1024 items in target function of JUMPF with inputs. - """ - eof_state_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH0 * 1022 + Op.JUMPF[1], - max_stack_height=1022, - ), - Section.Code( - Op.SSTORE(slot_code_worked, value_code_worked) + Op.STOP, - code_inputs=3, - code_outputs=NON_RETURNING_SECTION, - max_stack_height=5, - ), - ], - ), - container_post=Account(storage={slot_code_worked: value_code_worked}), - ) - - -def test_jumpf_stack_size_1024_at_push( - eof_state_test: EOFStateTestFiller, -) -> None: - """ - Test stack reaching 1024 items in JUMPF target function at PUSH0 - instruction. - """ - eof_state_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH0 * 1023 - + Op.CALLF[1] - + Op.POP * 1023 - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.RETURN(0, 0), - max_stack_height=1023, - ), - Section.Code( - # stack has 1023 items - Op.JUMPF[2], - code_inputs=0, - code_outputs=0, - max_stack_height=0, - ), - Section.Code( - Op.PUSH0 - + - # stack has 1024 items - Op.POP - + Op.RETF, - code_inputs=0, - code_outputs=0, - max_stack_height=1, - ), - ], - ), - container_post=Account(storage={slot_code_worked: value_code_worked}), - ) - - -@pytest.mark.parametrize( - ("stack_height", "failure"), - ( - pytest.param(1021, False, id="no_overflow"), - pytest.param(1022, True, id="rule_overflow"), - pytest.param(1023, True, id="execution_overflow"), - ), -) -def test_jumpf_stack_overflow( - stack_height: int, - failure: bool, - eof_state_test: EOFStateTestFiller, -) -> None: - """ - Test rule #2 in execution semantics, where we make sure we have enough - stack to guarantee safe execution (the "reserved stack rule") max possible - stack will not exceed 1024. But some executions may not overflow the stack, - so we need to ensure the rule is checked. - - `no_overflow` - the stack does not overflow at JUMPF call, executes to end - - `rule_overflow` - reserved stack rule triggers, but execution would not - overflow if allowed - - `execution_overflow` - execution would overflow (but still blocked by - reserved stack rule) - """ - eof_state_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH0 * stack_height - + Op.CALLF[1] - + Op.POP * stack_height - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.RETURN(0, 0), - max_stack_height=stack_height, - ), - Section.Code( - # Stack has stack_height items - Op.JUMPF[2], - code_inputs=0, - code_outputs=0, - max_stack_height=0, - ), - Section.Code( - Op.CALLDATALOAD(0) - + Op.ISZERO - + Op.RJUMPI[6] - + Op.PUSH0 * 3 - + Op.POP * 3 - + Op.SSTORE(slot_stack_canary, value_canary_written) - + Op.RETF, - code_inputs=0, - code_outputs=0, - max_stack_height=3, - ), - ], - ), - container_post=Account( - storage={ - slot_code_worked: 0 if failure else value_code_worked, - slot_stack_canary: 0 if failure else value_canary_written, - } - ), - ) - - -def test_jumpf_with_inputs_stack_size_1024_at_push( - eof_state_test: EOFStateTestFiller, -) -> None: - """ - Test stack reaching 1024 items in JUMPF target function with inputs at - PUSH0 instruction. - """ - eof_state_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH0 * 1023 - + Op.CALLF[1] - + Op.POP * 1023 - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.RETURN(0, 0), - max_stack_height=1023, - ), - Section.Code( - # Stack has 1023 items - Op.JUMPF[2], - code_inputs=3, - code_outputs=3, - max_stack_height=3, - ), - Section.Code( - Op.PUSH0 - + - # Stack has 1024 items - Op.POP - + Op.RETF, - code_inputs=3, - code_outputs=3, - max_stack_height=4, - ), - ], - ), - container_post=Account(storage={slot_code_worked: value_code_worked}), - ) - - -def test_jumpf_with_inputs_stack_overflow( - eof_state_test: EOFStateTestFiller, -) -> None: - """ - Test stack overflowing 1024 items in JUMPF target function with inputs. - """ - eof_state_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH0 * 1023 - + Op.CALLF[1] - + Op.POP * 1023 - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.RETURN(0, 0), - max_stack_height=1023, - ), - Section.Code( - # Stack has 1023 items - Op.JUMPF[2], - code_inputs=3, - code_outputs=3, - max_stack_height=3, - ), - Section.Code( - Op.PUSH0 - + Op.PUSH0 - + - # Runtime stackoverflow - Op.POP - + Op.POP - + Op.RETF, - code_inputs=3, - code_outputs=3, - max_stack_height=5, - ), - ], - ), - container_post=Account(storage={slot_code_worked: 0}), - ) - - -@pytest.mark.parametrize( - "container", - [ - Container( - name="self", - sections=[ - Section.Code( - Op.SSTORE(slot_code_worked, value_code_worked) - + Op.JUMPF[0], - ), - ], - ), - Container( - name="1_to_0", - sections=[ - Section.Code( - Op.SSTORE(slot_code_worked, value_code_worked) - + Op.JUMPF[1], - ), - Section.Code( - Op.JUMPF[0], - ), - ], - ), - Container( - name="2_to_1", - sections=[ - Section.Code( - Op.SSTORE(slot_code_worked, value_code_worked) - + Op.JUMPF[1], - ), - Section.Code( - Op.JUMPF[2], - ), - Section.Code( - Op.JUMPF[1], - ), - ], - ), - Container( - name="2_to_1_returning", - sections=[ - Section.Code( - Op.SSTORE(slot_code_worked, value_code_worked) - + Op.CALLF[1] - + Op.STOP, - ), - Section.Code( - Op.JUMPF[2], - code_outputs=0, - ), - Section.Code( - Op.JUMPF[1], - code_outputs=0, - ), - ], - ), - Container( - name="1_to_0_invalid", - sections=[ - Section.Code( - Op.JUMPF[1], - ), - Section.Code( - Op.JUMPF[0], - code_outputs=0, - ), - ], - validity_error=EOFException.INVALID_NON_RETURNING_FLAG, - ), - ], - ids=lambda container: container.name, -) -def test_jumpf_infinite_loop( - eof_state_test: EOFStateTestFiller, container: Container -) -> None: - """Tests JUMPF causing an infinite loop.""" - eof_state_test( - container=container, - container_post=Account(storage={slot_code_worked: 0}), - ) - - -def test_jumpf_memory_context( - state_test: StateTestFiller, - pre: Alloc, -) -> None: - """Verifies JUMPF doesn't corrupt memory.""" - env = Environment() - storage = Storage() - contract_address = pre.deploy_contract( - code=Container( - sections=[ - Section.Code( - Op.SSTORE( - storage.store_next(value_code_worked), - value_code_worked, - ) - + Op.MSTORE(0, 1) - + Op.JUMPF[1], - ), - Section.Code( - Op.SSTORE(storage.store_next(32), Op.MSIZE()) - + Op.SSTORE(storage.store_next(1), Op.MLOAD(0)) - + Op.STOP, - ), - ], - ), - ) - post = { - contract_address: Account( - storage=storage, - ), - } - tx = Transaction( - to=contract_address, - gas_limit=500_000, - sender=pre.fund_eoa(), - ) - state_test(env=env, pre=pre, post=post, tx=tx) - - -def test_callf_jumpf_retf_memory_context( - state_test: StateTestFiller, - pre: Alloc, -) -> None: - """Verifies CALLF, JUMPF and RETF don't corrupt memory.""" - env = Environment() - storage = Storage() - contract_address = pre.deploy_contract( - code=Container( - sections=[ - Section.Code( - Op.SSTORE( - storage.store_next(value_code_worked), - value_code_worked, - ) - + Op.MSTORE(0, 1) - + Op.CALLF[1] - + Op.SSTORE(storage.store_next(96), Op.MSIZE()) - + Op.SSTORE(storage.store_next(2), Op.MLOAD(0)) - + Op.SSTORE(storage.store_next(21), Op.MLOAD(32)) - + Op.SSTORE(storage.store_next(31), Op.MLOAD(64)) - + Op.STOP, - ), - Section.Code( - Op.SSTORE(storage.store_next(32), Op.MSIZE()) - + Op.SSTORE(storage.store_next(1), Op.MLOAD(0)) - + Op.MSTORE(0, 2) - + Op.MSTORE(32, 3) - + Op.JUMPF[2], - code_outputs=0, - ), - Section.Code( - Op.SSTORE(storage.store_next(64), Op.MSIZE()) - + Op.SSTORE(storage.store_next(2), Op.MLOAD(0)) - + Op.SSTORE(storage.store_next(3), Op.MLOAD(32)) - + Op.MSTORE(32, 21) - + Op.MSTORE(64, 31) - + Op.RETF, - code_outputs=0, - ), - ], - ), - ) - post = { - contract_address: Account( - storage=storage, - ), - } - tx = Transaction( - to=contract_address, - gas_limit=500_000, - sender=pre.fund_eoa(), - ) - state_test(env=env, pre=pre, post=post, tx=tx) diff --git a/tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_stack.py b/tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_stack.py deleted file mode 100644 index 508e020764..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_stack.py +++ /dev/null @@ -1,424 +0,0 @@ -"""EOF JUMPF tests covering stack validation rules.""" - -import pytest -from execution_testing import ( - Account, - EOFException, - EOFStateTestFiller, - EOFTestFiller, - Op, -) -from execution_testing.test_types.eof.constants import ( - MAX_RUNTIME_STACK_HEIGHT, -) -from execution_testing.test_types.eof.v1 import Container, Section - -from .. import EOF_FORK_NAME -from .helpers import slot_code_worked, value_code_worked - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-6206.md" -REFERENCE_SPEC_VERSION = "2f365ea0cd58faa6e26013ea77ce6d538175f7d0" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - - -@pytest.mark.parametrize( - "target_inputs", - [0, 2, 4], - ids=lambda x: "ti-%d" % x, -) -@pytest.mark.parametrize( - "stack_height", - [0, 2, 4], - ids=lambda x: "h-%d" % x, -) -def test_jumpf_stack_non_returning_rules( - eof_state_test: EOFStateTestFiller, - target_inputs: int, - stack_height: int, -) -> None: - """ - Tests for JUMPF validation stack rules. Non-returning section cases. Valid - cases are executed. - """ - container = Container( - name="stack-non-retuning_h-%d_ti-%d" % (stack_height, target_inputs), - sections=[ - Section.Code( - code=Op.JUMPF[1], - ), - Section.Code( - code=Op.PUSH0 * stack_height + Op.JUMPF[2], - max_stack_height=stack_height, - ), - Section.Code( - code=Op.POP * target_inputs - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - code_inputs=target_inputs, - max_stack_height=max(2, target_inputs), - ), - ], - ) - - if stack_height < target_inputs: - container.validity_error = EOFException.STACK_UNDERFLOW - - eof_state_test( - container=container, - container_post=Account(storage={slot_code_worked: value_code_worked}), - data=b"\1", - ) - - -@pytest.mark.parametrize( - "source_outputs", - [0, 2, 4], - ids=lambda x: "so-%d" % x, -) -@pytest.mark.parametrize( - "target_outputs", - [0, 2, 4], - ids=lambda x: "to-%d" % x, -) -@pytest.mark.parametrize( - "target_inputs", - [0, 2, 4], - ids=lambda x: "ti-%d" % x, -) -@pytest.mark.parametrize( - "stack_diff", [-1, 0, 1], ids=["less-stack", "same-stack", "more-stack"] -) -def test_jumpf_stack_returning_rules( - eof_state_test: EOFStateTestFiller, - source_outputs: int, - target_outputs: int, - target_inputs: int, - stack_diff: int, -) -> None: - """ - Tests for JUMPF validation stack rules. Returning section cases. Valid - cases are executed. - """ - if target_outputs > source_outputs: - # These create invalid containers without JUMPF validation, Don't test. - return - if target_inputs == 0 and stack_diff < 0: - # Code generation is impossible for this configuration. Don't test. - return - - target_delta = target_outputs - target_inputs - container = Container( - name="stack-retuning_co-%d_to-%d_ti-%d_diff-%d" - % (source_outputs, target_outputs, target_inputs, stack_diff), - sections=[ - Section.Code( - code=Op.CALLF[1] - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - max_stack_height=2 + source_outputs, - ), - Section.Code( - code=Op.PUSH0 * max(0, target_inputs + stack_diff) - + Op.JUMPF[2], - code_outputs=source_outputs, - max_stack_height=target_inputs, - ), - Section.Code( - code=( - Op.POP * -target_delta - if target_delta < 0 - else Op.PUSH0 * target_delta - ) - + Op.RETF, - code_inputs=target_inputs, - code_outputs=target_outputs, - max_stack_height=max(target_inputs, target_outputs), - ), - ], - ) - - if stack_diff < source_outputs - target_outputs: - container.validity_error = EOFException.STACK_UNDERFLOW - elif stack_diff > source_outputs - target_outputs: - container.validity_error = EOFException.STACK_HIGHER_THAN_OUTPUTS - - eof_state_test( - container=container, - container_post=Account(storage={slot_code_worked: value_code_worked}), - data=b"\1", - ) - - -@pytest.mark.parametrize( - ["target_inputs", "target_outputs", "stack_height", "expected_exception"], - [ - pytest.param(1, 0, 1, EOFException.STACK_UNDERFLOW, id="less_stack"), - pytest.param(2, 1, 2, None, id="same_stack"), - pytest.param( - 3, - 2, - 3, - EOFException.JUMPF_DESTINATION_INCOMPATIBLE_OUTPUTS, - id="more_stack", - ), - pytest.param( - 2, - 2, - 1, - EOFException.JUMPF_DESTINATION_INCOMPATIBLE_OUTPUTS, - id="less_output", - ), - pytest.param(1, 1, 1, None, id="same_output"), - pytest.param(0, 0, 1, None, id="more_output"), - ], -) -def test_jumpf_incompatible_outputs( - eof_test: EOFTestFiller, - target_inputs: int, - target_outputs: int, - stack_height: int, - expected_exception: EOFException, -) -> None: - """Tests JUMPF into a section with incorrect number of outputs.""" - current_section_outputs = 1 - if ( - current_section_outputs + target_inputs - target_outputs - ) != stack_height: - assert expected_exception is not None - eof_test( - container=Container( - sections=[ - Section.Code(Op.CALLF(1) + Op.STOP, max_stack_height=1), - Section.Code( - Op.PUSH0 * stack_height + Op.JUMPF(2), - code_outputs=current_section_outputs, - ), - Section.Code( - Op.POP * (target_inputs - target_outputs) + Op.RETF, - code_inputs=target_inputs, - code_outputs=target_outputs, - max_stack_height=target_inputs, - ), - ] - ), - expect_exception=expected_exception, - ) - - -@pytest.mark.parametrize( - ["target_inputs", "target_outputs", "stack_height", "expected_exception"], - [ - pytest.param(1, 0, 1, EOFException.STACK_UNDERFLOW, id="less_stack"), - pytest.param( - 2, 1, 2, EOFException.STACK_HIGHER_THAN_OUTPUTS, id="same_stack" - ), - pytest.param( - 3, - 2, - 3, - EOFException.JUMPF_DESTINATION_INCOMPATIBLE_OUTPUTS, - id="more_stack", - ), - pytest.param( - 2, - 2, - 1, - EOFException.JUMPF_DESTINATION_INCOMPATIBLE_OUTPUTS, - id="less_output", - ), - pytest.param( - 1, 1, 1, EOFException.STACK_HIGHER_THAN_OUTPUTS, id="same_output" - ), - pytest.param( - 0, 0, 1, EOFException.STACK_HIGHER_THAN_OUTPUTS, id="more_output" - ), - ], -) -def test_jumpf_diff_max_stack_height( - eof_test: EOFTestFiller, - target_inputs: int, - target_outputs: int, - stack_height: int, - expected_exception: EOFException, -) -> None: - """Tests jumpf with a different max stack height.""" - current_section_outputs = 1 - eof_test( - container=Container( - sections=[ - Section.Code(Op.CALLF(1) + Op.STOP, max_stack_height=1), - Section.Code( - (Op.PUSH0 * stack_height) # (0, 0) - + Op.PUSH0 # (stack_height, stack_height) - + Op.RJUMPI[1] # (stack_height + 1, stack_height + 1) - + Op.PUSH0 # (stack_height, stack_height) - + Op.JUMPF(2), # (stack_height, stack_height + 1) - code_outputs=current_section_outputs, - ), - Section.Code( - Op.POP * (target_inputs - target_outputs) + Op.RETF, - code_inputs=target_inputs, - code_outputs=target_outputs, - max_stack_height=target_inputs, - ), - ] - ), - expect_exception=expected_exception, - ) - - -@pytest.mark.parametrize( - ["target_inputs", "target_outputs", "stack_height", "expected_exception"], - [ - pytest.param(1, 0, 1, EOFException.STACK_UNDERFLOW, id="less_stack"), - pytest.param(2, 1, 2, EOFException.STACK_UNDERFLOW, id="same_stack"), - pytest.param( - 3, - 2, - 3, - EOFException.JUMPF_DESTINATION_INCOMPATIBLE_OUTPUTS, - id="more_stack", - ), - pytest.param( - 2, - 2, - 1, - EOFException.JUMPF_DESTINATION_INCOMPATIBLE_OUTPUTS, - id="less_output", - ), - pytest.param(1, 1, 1, EOFException.STACK_UNDERFLOW, id="same_output"), - pytest.param(0, 0, 1, EOFException.STACK_UNDERFLOW, id="more_output"), - ], -) -def test_jumpf_diff_min_stack_height( - eof_test: EOFTestFiller, - target_inputs: int, - target_outputs: int, - stack_height: int, - expected_exception: EOFException, -) -> None: - """Tests jumpf with a different min stack height.""" - current_section_outputs = 1 - eof_test( - container=Container( - sections=[ - Section.Code(Op.CALLF(1) + Op.STOP, max_stack_height=1), - Section.Code( - (Op.PUSH0 * (stack_height - 1)) # (0, 0) - + Op.PUSH0 # (stack_height - 1, stack_height - 1) - + Op.RJUMPI[1] # (stack_height, stack_height) - + Op.PUSH0 # (stack_height - 1, stack_height - 1) - + Op.JUMPF(2), # (stack_height - 1, stack_height) - code_outputs=current_section_outputs, - ), - Section.Code( - Op.POP * (target_inputs - target_outputs) + Op.RETF, - code_inputs=target_inputs, - code_outputs=target_outputs, - max_stack_height=target_inputs, - ), - ] - ), - expect_exception=expected_exception, - ) - - -def test_jumpf_self_variadic_stack_overflow(eof_test: EOFTestFiller) -> None: - """Test JUMPF calling self causing EOF validation stack overflow.""" - container = Container( - name="jumpf_stack_overflow_variable_stack_0", - sections=[ - Section.Code( - code=Op.PUSH0 + Op.RJUMPI[2](0) + Op.PUSH0 * 511 + Op.JUMPF[0], - max_stack_height=512, - ), - ], - ) - eof_test(container=container) - - -@pytest.mark.parametrize("stack_height", [512, 1022, 1023]) -@pytest.mark.parametrize("callee_stack_height", [0, 1, 2, 5, 511, 512, 513]) -def test_jumpf_variadic_stack_overflow( - eof_test: EOFTestFiller, stack_height: int, callee_stack_height: int -) -> None: - """ - Test JUMPF stack validation causing stack overflow with variable stack - height. - """ - container = Container( - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.RJUMPI[2](0) - + Op.PUSH0 * (stack_height - 1) - + Op.JUMPF[1], - max_stack_height=stack_height, - ), - Section.Code( - code=Op.PUSH0 * callee_stack_height + Op.STOP, - max_stack_height=callee_stack_height, - ), - ], - validity_error=EOFException.STACK_OVERFLOW - if stack_height + callee_stack_height > MAX_RUNTIME_STACK_HEIGHT - else None, - ) - eof_test(container=container) - - -@pytest.mark.parametrize("stack_height", [1022, 1023]) -@pytest.mark.parametrize("callee_stack_increase", [0, 1, 2]) -def test_jumpf_with_inputs_stack_overflow( - eof_test: EOFTestFiller, stack_height: int, callee_stack_increase: int -) -> None: - """Test validation of JUMPF with inputs causing stack overflow.""" - container = Container( - sections=[ - Section.Code( - code=Op.PUSH0 * stack_height + Op.JUMPF[1], - max_stack_height=stack_height, - ), - Section.Code( - code=Op.PUSH0 * callee_stack_increase + Op.STOP, - code_inputs=2, - max_stack_height=2 + callee_stack_increase, - ), - ], - validity_error=EOFException.STACK_OVERFLOW - if stack_height + callee_stack_increase > MAX_RUNTIME_STACK_HEIGHT - else None, - ) - eof_test(container=container) - - -@pytest.mark.parametrize("stack_height", [1022, 1023]) -@pytest.mark.parametrize("callee_stack_increase", [0, 1, 2]) -def test_jumpf_with_inputs_stack_overflow_variable_stack( - eof_test: EOFTestFiller, stack_height: int, callee_stack_increase: int -) -> None: - """ - Test JUMPF with variable stack depending on RJUMPI calling function with - inputs. - """ - container = Container( - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.RJUMPI[2](0) - + Op.PUSH0 * (stack_height - 1) - + Op.JUMPF[1], - max_stack_height=stack_height, - ), - Section.Code( - code=Op.PUSH0 * callee_stack_increase + Op.STOP, - code_inputs=2, - max_stack_height=2 + callee_stack_increase, - ), - ], - validity_error=EOFException.STACK_OVERFLOW - if stack_height + callee_stack_increase > MAX_RUNTIME_STACK_HEIGHT - else None, - ) - eof_test(container=container) diff --git a/tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_target.py b/tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_target.py deleted file mode 100644 index 3bce3fbbd5..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_target.py +++ /dev/null @@ -1,135 +0,0 @@ -"""EOF JUMPF tests covering JUMPF target rules.""" - -import pytest -from execution_testing import ( - Account, - EOFException, - EOFStateTestFiller, - Op, -) -from execution_testing.test_types.eof.v1 import Container, Section -from execution_testing.test_types.eof.v1.constants import ( - NON_RETURNING_SECTION, -) - -from .. import EOF_FORK_NAME -from .helpers import slot_code_worked, value_code_worked - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-6206.md" -REFERENCE_SPEC_VERSION = "2f365ea0cd58faa6e26013ea77ce6d538175f7d0" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - - -@pytest.mark.parametrize( - "target_outputs", - [NON_RETURNING_SECTION, 0, 2, 4, 127], - ids=lambda x: "to-%s" % ("N" if x == NON_RETURNING_SECTION else x), -) -@pytest.mark.parametrize( - "source_outputs", - [NON_RETURNING_SECTION, 0, 2, 4, 127], - ids=lambda x: "so-%s" % ("N" if x == NON_RETURNING_SECTION else x), -) -def test_jumpf_target_rules( - eof_state_test: EOFStateTestFiller, - source_outputs: int, - target_outputs: int, -) -> None: - """ - Validate the target section rules of JUMPF, and execute valid cases. We are - not testing stack so a lot of the logic is to get correct stack values. - """ - source_non_returning = source_outputs == NON_RETURNING_SECTION - source_height = 0 if source_non_returning else source_outputs - source_section_index = 1 - - target_non_returning = target_outputs == NON_RETURNING_SECTION - target_height = 0 if target_non_returning else target_outputs - target_section_index = 2 - - # Because we are testing the target and not the stack height validation we - # need to do some work to make sure the stack passes validation. - - # `source_extra_push` is how many more pushes we need to match our stack - # commitments - source_extra_push = max(0, source_height - target_height) - source_section = Section.Code( - code=Op.PUSH0 * (source_height) - + Op.CALLDATALOAD(0) - + Op.RJUMPI[1] - + (Op.STOP if source_non_returning else Op.RETF) - + Op.PUSH0 * source_extra_push - + Op.JUMPF[target_section_index], - code_inputs=0, - code_outputs=source_outputs, - max_stack_height=source_height + max(1, source_extra_push), - ) - - # `delta` is how many stack items the target output is from the input - # height, and tracks the number of pushes or (if negative) pops the target - # needs to do to match output commitments - delta = ( - 0 - if target_non_returning or source_non_returning - else target_outputs - source_height - ) - target_section = Section.Code( - code=((Op.PUSH0 * delta) if delta >= 0 else (Op.POP * -delta)) - + Op.CALLF[3] - + (Op.STOP if target_non_returning else Op.RETF), - code_inputs=source_height, - code_outputs=target_outputs, - max_stack_height=max(source_height, source_height + delta), - ) - - base_code = ( - Op.JUMPF[source_section_index] - if source_non_returning - else (Op.CALLF[source_section_index](0, 0) + Op.STOP) - ) - base_height = 0 if source_non_returning else 2 + source_outputs - container = Container( - name="so-%s_to-%s" - % ( - "N" if source_non_returning else source_outputs, - "N" if target_non_returning else target_outputs, - ), - sections=[ - Section.Code( - code=base_code, - max_stack_height=base_height, - ), - source_section, - target_section, - Section.Code( - code=Op.SSTORE(slot_code_worked, value_code_worked) + Op.RETF, - code_outputs=0, - ), - ], - ) - if target_non_returning or source_non_returning: - if not target_non_returning and source_non_returning: - # both as non-returning handled above - container.validity_error = EOFException.INVALID_NON_RETURNING_FLAG - elif source_outputs < target_outputs: - container.validity_error = ( - EOFException.JUMPF_DESTINATION_INCOMPATIBLE_OUTPUTS - ) - - eof_state_test( - container=container, - container_post=Account(storage={slot_code_worked: value_code_worked}), - data=b"\1", - ) - - -@pytest.mark.skip("Not implemented") -def test_jumpf_multi_target_rules( - eof_state_test: EOFStateTestFiller, -) -> None: - """ - NOT IMPLEMENTED: Test a section that contains multiple JUMPF to different - targets with different outputs. - """ - pass diff --git a/tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_validation.py b/tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_validation.py deleted file mode 100644 index 813bd27e15..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_validation.py +++ /dev/null @@ -1,554 +0,0 @@ -"""EOF validation tests for JUMPF instruction.""" - -import pytest -from execution_testing import EOFException, EOFTestFiller, Op -from execution_testing.test_types.eof.constants import ( - MAX_RUNTIME_STACK_HEIGHT, -) -from execution_testing.test_types.eof.v1 import Container, Section - -from .. import EOF_FORK_NAME - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-6206.md" -REFERENCE_SPEC_VERSION = "2f365ea0cd58faa6e26013ea77ce6d538175f7d0" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - - -@pytest.mark.parametrize( - "container", - [ - Container( - name="to_0", - sections=[ - Section.Code( - Op.CALLF[1] + Op.STOP, - ), - Section.Code( - Op.JUMPF[0], - code_outputs=0, - ), - ], - ), - Container( - name="to_2", - sections=[ - Section.Code( - Op.CALLF[1] + Op.STOP, - ), - Section.Code( - Op.JUMPF[2], - code_outputs=0, - ), - Section.Code( - Op.INVALID, - ), - ], - ), - Container( - name="to_retf", - sections=[ - Section.Code( - Op.CALLF[1] + Op.STOP, - ), - Section.Code( - Op.JUMPF[2], - code_outputs=0, - ), - Section.Code( - Op.RETF, - ), - ], - ), - ], - ids=lambda container: container.name, -) -def test_returning_jumpf( - eof_test: EOFTestFiller, - container: Container, -) -> None: - """Test cases for JUMPF instruction validation in a returning sections.""" - eof_test( - container=container, - expect_exception=EOFException.INVALID_NON_RETURNING_FLAG, - ) - - -@pytest.mark.parametrize( - "container", - [ - Container( - name="jumpf1", - sections=[ - Section.Code( - Op.JUMPF[1], - ) - ], - ), - Container( - name="jumpf2", - sections=[ - Section.Code( - Op.JUMPF[2], - ), - Section.Code( - Op.STOP, - ), - ], - ), - Container( - name="jumpf1_jumpf2", - sections=[ - Section.Code( - Op.JUMPF[1], - ), - Section.Code( - Op.JUMPF[2], - ), - ], - ), - ], - ids=lambda container: container.name, -) -def test_invalid_code_section_index( - eof_test: EOFTestFiller, - container: Container, -) -> None: - """ - Test cases for JUMPF instructions with invalid target code section index. - """ - eof_test( - container=container, - expect_exception=EOFException.INVALID_CODE_SECTION_INDEX, - ) - - -def test_returning_section_aborts_jumpf( - eof_test: EOFTestFiller, -) -> None: - """ - Test EOF container validation where in the same code section we have - returning and nonreturning terminating instructions. - """ - container = Container( - sections=[ - Section.Code(code=Op.CALLF[1] + Op.STOP, max_stack_height=1), - Section.Code( - code=Op.PUSH0 * 2 - + Op.RJUMPI[4] - + Op.POP - + Op.JUMPF[2] - + Op.RETF, - code_outputs=1, - ), - Section.Code( - code=Op.PUSH0 * 2 + Op.RJUMPI[1] + Op.RETF + Op.INVALID, - code_inputs=0, - code_outputs=1, - ), - ], - ) - eof_test(container=container) - - -@pytest.mark.parametrize("stack_height", [512, 513, 1023]) -def test_jumpf_self_stack_overflow( - eof_test: EOFTestFiller, stack_height: int -) -> None: - """ - Test JUMPF instruction jumping to itself causing validation time stack - overflow. - """ - container = Container( - sections=[ - Section.Code( - code=(Op.PUSH0 * stack_height) + Op.JUMPF[0], - max_stack_height=stack_height, - ), - ], - ) - stack_overflow = stack_height > MAX_RUNTIME_STACK_HEIGHT // 2 - eof_test( - container=container, - expect_exception=EOFException.STACK_OVERFLOW - if stack_overflow - else None, - ) - - -@pytest.mark.parametrize("stack_height_other", [1, 2, 512, 513, 1023]) -@pytest.mark.parametrize("stack_height", [1, 2, 512, 513, 1023]) -def test_jumpf_other_stack_overflow( - eof_test: EOFTestFiller, stack_height: int, stack_height_other: int -) -> None: - """ - Test JUMPF instruction jumping to other section causing validation time - stack overflow. - """ - container = Container( - sections=[ - Section.Code( - code=(Op.PUSH0 * stack_height) + Op.JUMPF[1], - max_stack_height=stack_height, - ), - Section.Code( - code=(Op.PUSH0 * stack_height_other) + Op.STOP, - max_stack_height=stack_height_other, - ), - ], - ) - stack_overflow = ( - stack_height + stack_height_other > MAX_RUNTIME_STACK_HEIGHT - ) - eof_test( - container=container, - expect_exception=EOFException.STACK_OVERFLOW - if stack_overflow - else None, - ) - - -@pytest.mark.parametrize("code_inputs", [0, 3]) -@pytest.mark.parametrize("stack_height", [0, 2, 3, 4]) -def test_jumpf_to_non_returning( - eof_test: EOFTestFiller, stack_height: int, code_inputs: int -) -> None: - """Test JUMPF jumping to a non-returning function.""" - container = Container( - sections=[ - Section.Code( - code=Op.PUSH0 * stack_height + Op.JUMPF[1], - max_stack_height=stack_height, - ), - Section.Code( - code=Op.STOP, - code_inputs=code_inputs, - max_stack_height=code_inputs, - ), - ], - ) - eof_test( - container=container, - expect_exception=EOFException.STACK_UNDERFLOW - if stack_height < code_inputs - else None, - ) - - -@pytest.mark.parametrize("code_inputs", [0, 1, 3, 5]) -def test_jumpf_to_non_returning_variable_stack( - eof_test: EOFTestFiller, code_inputs: int -) -> None: - """ - Test JUMPF jumping to a non-returning function with stack depending on - RJUMPI. - """ - container = Container( - sections=[ - Section.Code( - code=Op.PUSH0 + Op.RJUMPI[2](0) + Op.PUSH0 * 2 + Op.JUMPF[1], - max_stack_height=3, - ), - Section.Code( - code=Op.INVALID, - code_inputs=code_inputs, - max_stack_height=code_inputs, - ), - ], - ) - eof_test( - container=container, - expect_exception=EOFException.STACK_UNDERFLOW - if code_inputs >= 3 - else None, - ) - - -@pytest.mark.parametrize("code_inputs", [0, 3]) -@pytest.mark.parametrize("code_outputs", [1, 2]) -@pytest.mark.parametrize("stack_height", [0, 1, 2, 3, 4, 5]) -def test_jumpf_to_returning( - eof_test: EOFTestFiller, - code_inputs: int, - code_outputs: int, - stack_height: int, -) -> None: - """Test JUMPF jumping to a returning function.""" - exceptions = [] - if ( - code_inputs > stack_height - or (stack_height - code_inputs + code_outputs) < 2 - ): - exceptions.append(EOFException.STACK_UNDERFLOW) - if stack_height - code_inputs + code_outputs > 2: - exceptions.append(EOFException.STACK_HIGHER_THAN_OUTPUTS) - - third_cs_stack_height = ( - code_inputs if code_inputs > code_outputs else code_outputs - ) - third_cs = None - if code_outputs < code_inputs: - third_cs = Op.POP * (code_inputs - code_outputs) + Op.RETF - else: - third_cs = Op.PUSH0 * (code_outputs - code_inputs) + Op.RETF - - container = Container( - sections=[ - Section.Code(code=Op.CALLF[1] + Op.STOP, max_stack_height=2), - Section.Code( - code=Op.PUSH0 * stack_height + Op.JUMPF[2], code_outputs=2 - ), - Section.Code( - code=third_cs, - code_inputs=code_inputs, - code_outputs=code_outputs, - max_stack_height=third_cs_stack_height, - ), - ], - ) - - eof_test( - container=container, - expect_exception=exceptions if exceptions else None, - ) - - -@pytest.mark.parametrize("code_inputs", [0, 1, 3, 5]) -@pytest.mark.parametrize("code_outputs", [1, 3]) -@pytest.mark.parametrize("stack_increase", [0, 1, 2, 3, 4]) -def test_jumpf_to_returning_variable_stack_1( - eof_test: EOFTestFiller, - code_inputs: int, - code_outputs: int, - stack_increase: int, -) -> None: - """ - Test JUMPF with variable stack jumping to a returning function increasing - the stack. - """ - exception = None - if code_inputs >= 3 or code_outputs + 1 < 3: # 3 = Section 1's max stack - exception = EOFException.STACK_UNDERFLOW - if 3 - code_inputs + code_outputs > 3: - exception = EOFException.STACK_HIGHER_THAN_OUTPUTS - - container = Container( - sections=[ - Section.Code(code=Op.CALLF[1] + Op.STOP, max_stack_height=3), - Section.Code( - code=Op.PUSH0 + Op.RJUMPI[2](0) + Op.PUSH0 * 2 + Op.JUMPF[2], - code_outputs=3, - max_stack_height=3, - ), - Section.Code( - code=Op.PUSH0 * stack_increase + Op.RETF, - code_inputs=code_inputs, - code_outputs=code_outputs, - max_stack_height=code_inputs - if code_inputs > code_outputs - else code_outputs, - ), - ], - ) - - eof_test( - container=container, - expect_exception=exception, - ) - - -@pytest.mark.parametrize("code_inputs", [1, 3, 5]) -@pytest.mark.parametrize("code_outputs", [1]) -@pytest.mark.parametrize("stack_decrease", [0, 2, 4]) -def test_jumpf_to_returning_variable_stack_2( - eof_test: EOFTestFiller, - code_inputs: int, - code_outputs: int, - stack_decrease: int, -) -> None: - """ - Test JUMPF with variable stack jumping to a returning function decreasing - the stack. - """ - exceptions = [] - if code_inputs >= 3 or code_outputs + 1 < 3: # 3 = Section 1's max stack - exceptions.append(EOFException.STACK_UNDERFLOW) - if 3 - code_inputs + code_outputs > 2: - exceptions.append(EOFException.STACK_HIGHER_THAN_OUTPUTS) - - container = Container( - sections=[ - Section.Code(code=Op.CALLF[1] + Op.STOP, max_stack_height=2), - Section.Code( - code=Op.PUSH0 + Op.RJUMPI[2](0) + Op.PUSH0 * 2 + Op.JUMPF[2], - code_outputs=2, - max_stack_height=3, - ), - Section.Code( - code=Op.POP * stack_decrease + Op.RETF, - code_inputs=code_inputs, - code_outputs=code_outputs, - max_stack_height=code_inputs - if code_inputs > code_outputs - else code_outputs, - ), - ], - ) - - eof_test( - container=container, - expect_exception=exceptions, - ) - - -def test_jumpf_to_returning_variable_stack_3(eof_test: EOFTestFiller) -> None: - """ - Test JUMPF with variable stack jumping to a returning function increasing - the stack. - """ - container = Container( - sections=[ - Section.Code(code=Op.CALLF[1] + Op.STOP, max_stack_height=2), - Section.Code( - code=Op.PUSH0 - + Op.PUSH1[0] - + Op.RJUMPI[2] - + Op.PUSH0 * 2 - + Op.JUMPF[2], - code_outputs=2, - max_stack_height=3, - ), - Section.Code( - code=Op.PUSH0 + Op.RETF, - code_outputs=1, - max_stack_height=1, - ), - ], - ) - - eof_test( - container=container, - expect_exception=EOFException.STACK_HIGHER_THAN_OUTPUTS, - ) - - -@pytest.mark.parametrize( - "container", - [ - Container( - name="underflow_2", - sections=[ - Section.Code( - code=Op.CALLF[1] + Op.STOP, - max_stack_height=2, - ), - Section.Code( - code=Op.JUMPF[2], - code_outputs=2, - max_stack_height=0, - ), - Section.Code( - code=Op.PUSH0 + Op.RETF, - code_inputs=1, - code_outputs=2, - max_stack_height=2, - ), - ], - ), - Container( - name="underflow_3", - sections=[ - Section.Code( - code=Op.JUMPF[1], - ), - Section.Code( - code=Op.REVERT(0, 0), - code_inputs=1, - max_stack_height=3, - ), - ], - ), - Container( - name="underflow_variable_stack_4", - sections=[ - Section.Code( - code=Op.CALLF[1] + Op.STOP, - max_stack_height=3, - ), - Section.Code( - code=Op.PUSH0 - + Op.RJUMPI[2](0) - + Op.PUSH0 - + Op.PUSH0 - + Op.JUMPF[2], - code_outputs=3, - max_stack_height=3, - ), - Section.Code( - code=Op.POP + Op.POP + Op.RETF, - code_inputs=5, - code_outputs=3, - max_stack_height=5, - ), - ], - ), - Container( - name="underflow_variable_stack_6", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.RJUMPI[2](0) - + Op.PUSH0 - + Op.PUSH0 - + Op.JUMPF[1], - max_stack_height=3, - ), - Section.Code( - code=Op.REVERT(0, 0), - code_inputs=4, - max_stack_height=6, - ), - ], - ), - Container( - name="underflow_variable_stack_7", - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.RJUMPI[2](0) - + Op.PUSH0 - + Op.PUSH0 - + Op.JUMPF[1], - max_stack_height=3, - ), - Section.Code( - code=Op.REVERT(0, 0), - code_inputs=3, - max_stack_height=5, - ), - ], - ), - Container( - name="underflow_variable_stack_8", - sections=[ - Section.Code( - code=Op.PUSH0 * 3 + Op.RJUMPI[1](0) + Op.POP + Op.JUMPF[1], - max_stack_height=3, - ), - Section.Code( - code=Op.REVERT(0, 0), - code_inputs=3, - max_stack_height=5, - ), - ], - ), - ], - ids=lambda x: x.name, -) -def test_jumpf_stack_underflow_examples( - eof_test: EOFTestFiller, container: Container -) -> None: - """Test JUMPF instruction causing validation time stack underflow.""" - eof_test( - container=container, expect_exception=EOFException.STACK_UNDERFLOW - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_nonreturning_validation.py b/tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_nonreturning_validation.py deleted file mode 100644 index 5713c4a69f..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_nonreturning_validation.py +++ /dev/null @@ -1,313 +0,0 @@ -"""EOF validation tests for non-returning code sections.""" - -import pytest -from execution_testing import Bytecode, EOFException, EOFTestFiller, Op -from execution_testing.test_types.eof.v1 import ( - NON_RETURNING_SECTION, - Container, - ContainerKind, - Section, -) - -from .. import EOF_FORK_NAME - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-6206.md" -REFERENCE_SPEC_VERSION = "2f365ea0cd58faa6e26013ea77ce6d538175f7d0" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - - -@pytest.mark.parametrize( - "code", - [ - pytest.param(Op.STOP, id="STOP"), - pytest.param(Op.INVALID, id="INVALID"), - pytest.param( - Op.ADDRESS + Op.POP + Op.INVALID, id="ADDRESS_POP_INVALID" - ), - pytest.param(Op.RETURN(0, 0), id="RETURN"), - pytest.param(Op.RETF, id="RETF"), - pytest.param(Op.PUSH0 + Op.RETF, id="PUSH0_RETF"), - ], -) -@pytest.mark.parametrize( - "outputs", - [0, 1, 0x7F, 0x81, 0xFF], -) -def test_first_section_returning( - eof_test: EOFTestFiller, code: Bytecode, outputs: int -) -> None: - """ - Test EOF validation failing because the first section is not non-returning. - """ - eof_test( - container=Container( - sections=[Section.Code(code, code_outputs=outputs)], - validity_error=EOFException.INVALID_FIRST_SECTION_TYPE, - ) - ) - - -@pytest.mark.parametrize( - "code", - [ - pytest.param(Op.INVALID, id="INVALID"), - pytest.param(Op.RETF, id="RETF"), - pytest.param(Op.POP + Op.RETF, id="POP_RETF"), - ], -) -@pytest.mark.parametrize( - "inputs", - [1, 2, 0x7F, 0x80, 0x81, 0xFF], -) -@pytest.mark.parametrize( - "outputs", - [ - 0, - NON_RETURNING_SECTION, - ], -) -def test_first_section_with_inputs( - eof_test: EOFTestFiller, code: Bytecode, inputs: int, outputs: int -) -> None: - """ - Test EOF validation failing because the first section has non-zero number - of inputs. - """ - eof_test( - container=Container( - sections=[ - Section.Code( - code, - code_inputs=inputs, - code_outputs=outputs, - max_stack_height=max(inputs, outputs), - ) - ], - validity_error=EOFException.INVALID_FIRST_SECTION_TYPE, - ) - ) - - -@pytest.mark.parametrize( - "code_section", - [ - pytest.param(Section.Code(Op.STOP, code_outputs=0), id="stop0"), - pytest.param( - Section.Code(Op.PUSH0 + Op.STOP, code_outputs=1), id="stop1" - ), - pytest.param(Section.Code(Op.INVALID, code_outputs=0), id="invalid0"), - pytest.param( - Section.Code(Op.PUSH0 + Op.INVALID, code_outputs=1), id="invalid1" - ), - pytest.param( - Section.Code(Op.RETURN(0, 0), code_outputs=0), id="return0" - ), - pytest.param( - Section.Code(Op.PUSH0 + Op.RETURN(0, 0), code_outputs=1), - id="return1", - ), - pytest.param( - Section.Code(Op.REVERT(0, 0), code_outputs=0), id="revert0" - ), - pytest.param( - Section.Code(Op.PUSH0 + Op.REVERT(0, 0), code_outputs=1), - id="revert1", - ), - pytest.param(Section.Code(Op.RJUMP[-3], code_outputs=0), id="rjump0"), - pytest.param( - Section.Code(Op.PUSH0 + Op.RJUMP[-3], code_outputs=1), id="rjump1" - ), - ], -) -def test_returning_section_not_returning( - eof_test: EOFTestFiller, code_section: Section -) -> None: - """ - Test EOF validation failing due to returning section with no RETF or - JUMPF-to-returning. - """ - eof_test( - container=Container( - sections=[ - Section.Code( - Op.CALLF[1] + Op.STOP, - max_stack_height=code_section.code_outputs, - ), - code_section, - ], - validity_error=EOFException.INVALID_NON_RETURNING_FLAG, - ), - ) - - -@pytest.mark.parametrize( - "code_section", - [ - pytest.param( - Section.Code(Op.RETURNCODE[0](0, 0), code_outputs=0), - id="returncode0", - ), - pytest.param( - Section.Code(Op.PUSH0 + Op.RETURNCODE[0](0, 0), code_outputs=1), - id="returncode1", - ), - ], -) -def test_returning_section_returncode( - eof_test: EOFTestFiller, code_section: Section -) -> None: - """ - Test EOF validation failing because a returning section has no RETF or - JUMPF-to-returning - RETURNCODE version. - """ - eof_test( - container=Container( - sections=[ - Section.Code( - Op.CALLF[1] + Op.INVALID, - max_stack_height=code_section.code_outputs, - ), - code_section, - ] - + [Section.Container(Container.Code(Op.INVALID))], - validity_error=EOFException.INVALID_NON_RETURNING_FLAG, - kind=ContainerKind.INITCODE, - ) - ) - - -first = pytest.mark.parametrize("first", [True, False]) -code_prefix = pytest.mark.parametrize( - "code_prefix", - [ - Bytecode(), - Op.PUSH0, - pytest.param(Op.PUSH0 * NON_RETURNING_SECTION, id="PUSH0x0x80"), - ], -) - - -@first -@code_prefix -def test_retf_in_nonreturning( - eof_test: EOFTestFiller, first: bool, code_prefix: Bytecode -) -> None: - """ - Test EOF validation failing due to non-returning section with the RETF - instruction. - """ - sections = [ - Section.Code(code_prefix + Op.RETF, code_outputs=NON_RETURNING_SECTION) - ] - if ( - not first - ): # Prefix sections with additional valid JUMPF to invalid section - sections = [Section.Code(Op.JUMPF[1])] + sections - eof_test( - container=Container( - sections=sections, - validity_error=EOFException.INVALID_NON_RETURNING_FLAG, - ) - ) - - -@first -@code_prefix -def test_jumpf_in_nonreturning( - eof_test: EOFTestFiller, first: bool, code_prefix: Bytecode -) -> None: - """ - Test EOF validation failing due to non-returning section with the JUMPF - instruction. - """ - invalid_section = Section.Code( - code_prefix + Op.JUMPF[1 if first else 2], - code_outputs=NON_RETURNING_SECTION, - ) - target_section = Section.Code(Op.RETF, code_outputs=0) - sections = [invalid_section, target_section] - if ( - not first - ): # Prefix sections with additional valid JUMPF to invalid section - sections = [Section.Code(Op.JUMPF[1])] + sections - - eof_test( - container=Container( - sections=sections, - validity_error=EOFException.INVALID_NON_RETURNING_FLAG, - ) - ) - - -@pytest.mark.parametrize( - "container", - [ - Container( - name="0_to_1", - sections=[ - Section.Code( - Op.CALLF[1], - ), - Section.Code( - Op.STOP, - ), - ], - ), - Container( - name="self_0", - sections=[ - Section.Code( - Op.CALLF[0] + Op.STOP, - ) - ], - ), - Container( - name="self_1", - sections=[ - Section.Code( - Op.JUMPF[1], - ), - Section.Code( - Op.CALLF[1] + Op.STOP, - ), - ], - ), - Container( - name="1_to_0", - sections=[ - Section.Code( - Op.CALLF[1] + Op.STOP, - ), - Section.Code( - Op.CALLF[0] + Op.RETF, - code_outputs=0, - ), - ], - ), - Container( - name="1_to_2", - sections=[ - Section.Code( - Op.CALLF[1] + Op.STOP, - ), - Section.Code( - Op.CALLF[2] + Op.RETF, - code_outputs=0, - ), - Section.Code( - Op.INVALID, - ), - ], - ), - ], - ids=lambda x: x.name, -) -def test_callf_to_nonreturning( - eof_test: EOFTestFiller, container: Container -) -> None: - """Test EOF validation failing due to CALLF to non-returning section.""" - eof_test( - container=container, - expect_exception=EOFException.CALLF_TO_NON_RETURNING, - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip663_dupn_swapn_exchange/__init__.py b/tests/unscheduled/eip7692_eof_v1/eip663_dupn_swapn_exchange/__init__.py deleted file mode 100644 index 01a78b4734..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip663_dupn_swapn_exchange/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -""" -Test cases for EIP-663 SWAPN, DUPN and EXCHANGE instructions - [EIP-663](https://eips.ethereum.org/EIPS/eip-663) defines new stack - manipulation instructions that allow accessing the stack at higher depths. - Opcodes introduced: `DUPN` (`0xE6`), `SWAPN` (`0xE7`), `EXCHANGEN` - (`0xE8`). -""" - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-663.md" -REFERENCE_SPEC_VERSION = "b658bb87fe039d29e9475d5cfaebca9b92e0fca2" diff --git a/tests/unscheduled/eip7692_eof_v1/eip663_dupn_swapn_exchange/test_dupn.py b/tests/unscheduled/eip7692_eof_v1/eip663_dupn_swapn_exchange/test_dupn.py deleted file mode 100644 index d466c397a3..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip663_dupn_swapn_exchange/test_dupn.py +++ /dev/null @@ -1,174 +0,0 @@ -""" -DUPN instruction tests - Tests for DUPN instruction in - [EIP-663: SWAPN, DUPN and EXCHANGE instructions](https://eips.ethereum.org/EIPS/eip-663). -""" - -import pytest -from execution_testing import ( - Account, - Alloc, - Environment, - EOFException, - EOFStateTestFiller, - EOFTestFiller, - Op, - StateTestFiller, - Transaction, -) -from execution_testing.test_types.eof.v1 import Container, Section -from execution_testing.test_types.eof.v1.constants import ( - MAX_STACK_INCREASE_LIMIT, -) - -from .. import EOF_FORK_NAME -from . import REFERENCE_SPEC_GIT_PATH, REFERENCE_SPEC_VERSION - -REFERENCE_SPEC_GIT_PATH = REFERENCE_SPEC_GIT_PATH -REFERENCE_SPEC_VERSION = REFERENCE_SPEC_VERSION - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - - -def test_dupn_all_valid_immediates(eof_state_test: EOFStateTestFiller) -> None: - """Test case for all valid DUPN immediates.""" - n = 2**8 - values = range(0xD00, 0xD00 + n) - - eof_code = Container( - sections=[ - Section.Code( - code=sum(Op.PUSH2[v] for v in values) - + sum(Op.SSTORE(x, Op.DUPN[x]) for x in range(0, n)) - + Op.STOP, - ) - ], - ) - - post = Account( - storage=dict(zip(range(0, n), reversed(values), strict=False)) - ) - - eof_state_test( - tx_sender_funding_amount=1_000_000_000, - container=eof_code, - container_post=post, - ) - - -@pytest.mark.parametrize( - "stack_height,max_stack_height", - [ - # [0, 0] is tested in test_all_opcodes_stack_underflow() - [0, 1], - [1, 1], - [1, 2], - [2**8 - 1, 2**8 - 1], - [2**8 - 1, 2**8], - ], -) -def test_dupn_stack_underflow( - stack_height: int, - max_stack_height: int, - eof_test: EOFTestFiller, -) -> None: - """Test case out of bounds DUPN immediate.""" - eof_code = Container( - sections=[ - Section.Code( - code=sum(Op.PUSH2[v] for v in range(0, stack_height)) - + Op.DUPN[stack_height] - + Op.STOP, - max_stack_height=max_stack_height, - ) - ], - ) - eof_test( - container=eof_code, - expect_exception=EOFException.STACK_UNDERFLOW, - ) - - -@pytest.mark.parametrize( - "dupn_operand,max_stack_height,expect_exception", - [ - [0, MAX_STACK_INCREASE_LIMIT, EOFException.INVALID_MAX_STACK_INCREASE], - [ - 0, - MAX_STACK_INCREASE_LIMIT + 1, - EOFException.MAX_STACK_INCREASE_ABOVE_LIMIT, - ], - [ - 2**8 - 1, - MAX_STACK_INCREASE_LIMIT, - EOFException.INVALID_MAX_STACK_INCREASE, - ], - [ - 2**8 - 1, - MAX_STACK_INCREASE_LIMIT + 1, - EOFException.MAX_STACK_INCREASE_ABOVE_LIMIT, - ], - ], -) -def test_dupn_stack_overflow( - dupn_operand: int, - max_stack_height: int, - expect_exception: EOFException, - eof_test: EOFTestFiller, -) -> None: - """Test case where DUPN produces an stack overflow.""" - eof_code = Container( - sections=[ - Section.Code( - code=sum( - Op.PUSH2[v] for v in range(0, MAX_STACK_INCREASE_LIMIT) - ) - + Op.DUPN[dupn_operand] - + Op.STOP, - max_stack_height=max_stack_height, - ) - ], - ) - eof_test( - container=eof_code, - expect_exception=expect_exception, - ) - - -@pytest.mark.parametrize( - "dupn_arg,stack_height", - [pytest.param(5, 9, id="5_of_9"), pytest.param(12, 30, id="12_of_30")], -) -def test_dupn_simple( - stack_height: int, - dupn_arg: int, - pre: Alloc, - state_test: StateTestFiller, -) -> None: - """Test case for simple DUPN operations.""" - sender = pre.fund_eoa() - contract_address = pre.deploy_contract( - code=Container( - sections=[ - Section.Code( - code=sum(Op.PUSH2[v] for v in range(stack_height, 0, -1)) - + Op.DUPN[dupn_arg] - + sum( - (Op.PUSH1(v) + Op.SSTORE) - for v in range(0, stack_height + 1) - ) - + Op.STOP, - max_stack_height=stack_height + 2, - ) - ], - ) - ) - - storage = {v: v for v in range(1, stack_height + 1)} - storage[0] = dupn_arg + 1 - print(storage) - post = {contract_address: Account(storage=storage)} - - tx = Transaction(to=contract_address, sender=sender, gas_limit=10_000_000) - - state_test(env=Environment(), pre=pre, post=post, tx=tx) diff --git a/tests/unscheduled/eip7692_eof_v1/eip663_dupn_swapn_exchange/test_exchange.py b/tests/unscheduled/eip7692_eof_v1/eip663_dupn_swapn_exchange/test_exchange.py deleted file mode 100644 index a4e9057159..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip663_dupn_swapn_exchange/test_exchange.py +++ /dev/null @@ -1,148 +0,0 @@ -""" -Tests [EIP-663: SWAPN, DUPN and EXCHANGE instructions](https://eips.ethereum.org/EIPS/eip-663). -""" - -import pytest -from execution_testing import ( - Account, - Alloc, - Environment, - EOFException, - EOFStateTestFiller, - EOFTestFiller, - Op, - StateTestFiller, - Transaction, -) -from execution_testing.test_types.eof.v1 import Container, Section - -from .. import EOF_FORK_NAME -from . import REFERENCE_SPEC_GIT_PATH, REFERENCE_SPEC_VERSION - -REFERENCE_SPEC_GIT_PATH = REFERENCE_SPEC_GIT_PATH -REFERENCE_SPEC_VERSION = REFERENCE_SPEC_VERSION - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - - -def test_exchange_all_valid_immediates( - eof_state_test: EOFStateTestFiller, -) -> None: - """Test case for all valid EXCHANGE immediates.""" - n = 256 - s = 34 - values = range(0x3E8, 0x3E8 + s) - - eof_code = Container( - sections=[ - Section.Code( - code=sum(Op.PUSH2[v] for v in values) - + sum(Op.EXCHANGE[x] for x in range(0, n)) - + sum((Op.PUSH1[x] + Op.SSTORE) for x in range(0, s)) - + Op.STOP, - ) - ], - ) - - # this does the same full-loop exchange - values_rotated = list(range(0x3E8, 0x3E8 + s)) - for e in range(0, n): - a = (e >> 4) + 1 - b = (e & 0x0F) + 1 + a - temp = values_rotated[a] - values_rotated[a] = values_rotated[b] - values_rotated[b] = temp - - post = Account( - storage=dict(zip(range(0, s), reversed(values_rotated), strict=False)) - ) - - eof_state_test( - tx_sender_funding_amount=1_000_000_000, - container=eof_code, - container_post=post, - ) - - -@pytest.mark.parametrize( - "stack_height,x,y", - [ - # 2 and 3 are the lowest valid values for x and y, - # which translates to the zero immediate value. - # (0, 2, 3) is tested in test_all_opcodes_stack_underflow() - pytest.param(1, 2, 3, id="stack_height=1_n=1_m=1"), - pytest.param(2, 2, 3, id="stack_height=2_n=1_m=1"), - pytest.param(17, 2, 18, id="stack_height=17_n=1_m=16"), - pytest.param(17, 17, 18, id="stack_height=17_n=16_m=1"), - pytest.param(32, 17, 33, id="stack_height=32_n=16_m=16"), - ], -) -def test_exchange_stack_underflow( - eof_test: EOFTestFiller, - stack_height: int, - x: int, - y: int, -) -> None: - """Test case the EXCHANGE causing stack underflow.""" - eof_code = Container( - sections=[ - Section.Code( - code=sum(Op.PUSH2[v] for v in range(stack_height)) - + Op.EXCHANGE[x, y] - + Op.POP * stack_height - + Op.STOP, - max_stack_height=stack_height, - ) - ], - ) - - eof_test( - container=eof_code, - expect_exception=EOFException.STACK_UNDERFLOW, - ) - - -@pytest.mark.parametrize( - "m_arg,n_arg,extra_stack", - [ - pytest.param(0, 0, 3, id="m0_n0_extra3"), - pytest.param(2, 3, 7, id="m2_n3_extra7"), - ], -) -def test_exchange_simple( - m_arg: int, - n_arg: int, - extra_stack: int, - pre: Alloc, - state_test: StateTestFiller, -) -> None: - """Test case for simple EXCHANGE operations.""" - sender = pre.fund_eoa() - stack_height = m_arg + n_arg + 2 + extra_stack - contract_address = pre.deploy_contract( - code=Container( - sections=[ - Section.Code( - code=sum(Op.PUSH2[v] for v in range(stack_height, 0, -1)) - + Op.EXCHANGE[m_arg << 4 | n_arg] - + sum( - (Op.PUSH1(v) + Op.SSTORE) - for v in range(1, stack_height + 1) - ) - + Op.STOP, - max_stack_height=stack_height + 1, - ) - ], - ) - ) - - storage = {v: v for v in range(1, stack_height + 1)} - first = m_arg + 2 # one based index, plus m=0 means first non-top item - second = first + n_arg + 1 # n+1 past m - storage[first], storage[second] = storage[second], storage[first] - print(storage) - post = {contract_address: Account(storage=storage)} - - tx = Transaction(to=contract_address, sender=sender, gas_limit=10_000_000) - - state_test(env=Environment(), pre=pre, post=post, tx=tx) diff --git a/tests/unscheduled/eip7692_eof_v1/eip663_dupn_swapn_exchange/test_swapn.py b/tests/unscheduled/eip7692_eof_v1/eip663_dupn_swapn_exchange/test_swapn.py deleted file mode 100644 index 5f5559a45e..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip663_dupn_swapn_exchange/test_swapn.py +++ /dev/null @@ -1,154 +0,0 @@ -"""Tests [EIP-663: SWAPN, DUPN and EXCHANGE instructions](https://eips.ethereum.org/EIPS/eip-663).""" - -import pytest -from execution_testing import ( - Account, - Alloc, - Environment, - EOFException, - EOFStateTestFiller, - EOFTestFiller, - Op, - StateTestFiller, - Transaction, -) -from execution_testing.test_types.eof.v1 import Container, Section -from execution_testing.test_types.eof.v1.constants import ( - MAX_STACK_INCREASE_LIMIT, -) - -from .. import EOF_FORK_NAME -from . import REFERENCE_SPEC_GIT_PATH, REFERENCE_SPEC_VERSION - -REFERENCE_SPEC_GIT_PATH = REFERENCE_SPEC_GIT_PATH -REFERENCE_SPEC_VERSION = REFERENCE_SPEC_VERSION - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - - -def test_swapn_all_valid_immediates( - eof_state_test: EOFStateTestFiller, -) -> None: - """Test case for all valid SWAPN immediates.""" - n = 256 - values = range(0x500, 0x500 + 257) - - eof_code = Container( - sections=[ - Section.Code( - code=sum(Op.PUSH2[v] for v in values) - + sum(Op.SSTORE(x, Op.SWAPN[0xFF - x]) for x in range(0, n)) - + Op.STOP, - ) - ], - ) - - values_rotated = list(values[1:]) + [values[0]] - post = Account( - storage=dict(zip(range(0, n), reversed(values_rotated), strict=False)) - ) - - eof_state_test( - tx_sender_funding_amount=1_000_000_000, - container=eof_code, - container_post=post, - ) - - -@pytest.mark.parametrize( - "swapn_operand", - [ - 0, - 2**8 - 1, - ], -) -def test_swapn_on_max_stack( - swapn_operand: int, - eof_test: EOFTestFiller, -) -> None: - """Test case out of bounds SWAPN (max stack).""" - eof_code = Container( - sections=[ - Section.Code( - code=sum( - Op.PUSH2[v] for v in range(0, MAX_STACK_INCREASE_LIMIT) - ) - + Op.SWAPN[swapn_operand] - + Op.STOP, - ) - ], - ) - eof_test( - container=eof_code, - ) - - -@pytest.mark.parametrize( - "stack_height", - [ - 0, - 1, - 21, - 2**8 - 1, - ], -) -def test_swapn_stack_underflow( - stack_height: int, - eof_test: EOFTestFiller, -) -> None: - """Test case out of bounds SWAPN (underflow).""" - eof_code = Container( - sections=[ - Section.Code( - code=sum(Op.PUSH2[v] for v in range(0, stack_height)) - + Op.SWAPN[stack_height] - + Op.STOP, - # This is also tested in test_all_opcodes_stack_underflow() - # so make it differ by the declared stack height. - max_stack_height=stack_height + 1, - ) - ], - ) - eof_test( - container=eof_code, - expect_exception=EOFException.STACK_UNDERFLOW, - ) - - -@pytest.mark.parametrize( - "swapn_arg,stack_height", - [pytest.param(5, 9, id="5_of_9"), pytest.param(12, 30, id="12_of_30")], -) -def test_swapn_simple( - stack_height: int, - swapn_arg: int, - pre: Alloc, - state_test: StateTestFiller, -) -> None: - """Test case for simple SWAPN operations.""" - sender = pre.fund_eoa() - contract_address = pre.deploy_contract( - code=Container( - sections=[ - Section.Code( - code=sum(Op.PUSH2[v] for v in range(stack_height, 0, -1)) - + Op.SWAPN[swapn_arg] - + sum( - (Op.PUSH1(v) + Op.SSTORE) - for v in range(1, stack_height + 1) - ) - + Op.STOP, - max_stack_height=stack_height + 1, - ) - ], - ) - ) - - storage = {v: v for v in range(1, stack_height + 1)} - storage[1], storage[swapn_arg + 2] = storage[swapn_arg + 2], storage[1] - print(storage) - post = {contract_address: Account(storage=storage)} - - tx = Transaction(to=contract_address, sender=sender, gas_limit=10_000_000) - - state_test(env=Environment(), pre=pre, post=post, tx=tx) diff --git a/tests/unscheduled/eip7692_eof_v1/eip7069_extcall/__init__.py b/tests/unscheduled/eip7692_eof_v1/eip7069_extcall/__init__.py deleted file mode 100644 index 672c24a740..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip7069_extcall/__init__.py +++ /dev/null @@ -1,11 +0,0 @@ -""" -Test cases for EIP-7069 Revamped CALL instructions - [EIP-7069: Revamped CALL instructions](https://eips.ethereum.org/EIPS/eip-7069) - proposes modifications to `CALL` instructions to align with the - structured EOF format. Opcodes introduced: `EXTCALL` (`0xF8`), - `EXTDELEGATECALL` (`0xF9`), `EXTSTATICCALL` (`0xFB`), `RETURNDATALOAD` - (`0xF7`). -""" - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-7069.md" -REFERENCE_SPEC_VERSION = "1795943aeacc86131d5ab6bb3d65824b3b1d4cad" diff --git a/tests/unscheduled/eip7692_eof_v1/eip7069_extcall/helpers.py b/tests/unscheduled/eip7692_eof_v1/eip7069_extcall/helpers.py deleted file mode 100644 index 16e1a8b0dc..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip7069_extcall/helpers.py +++ /dev/null @@ -1,34 +0,0 @@ -"""EOF extcall tests helpers.""" - -import itertools - -"""Storage addresses for common testing fields""" -_slot = itertools.count() -next(_slot) # don't use slot 0 -slot_code_worked = next(_slot) -slot_eof_target_call_status = next(_slot) -slot_legacy_target_call_status = next(_slot) -slot_eof_target_returndata = next(_slot) -slot_eof_target_returndatasize = next(_slot) -slot_legacy_target_returndatasize = next(_slot) -slot_delegate_code_worked = next(_slot) -slot_call_status = next(_slot) -slot_calldata_1 = next(_slot) -slot_calldata_2 = next(_slot) - -slot_last_slot = next(_slot) - -"""Storage value indicating an abort""" -value_exceptional_abort_canary = 0x1984 - -"""Storage values for common testing fields""" -value_code_worked = 0x2015 - -"""Memory and storage value for calldata""" -value_calldata_1 = ( - 0xC1C1C1C1C1C1C1C1C1C1C1C1C1C1C1C1C1C1C1C1C1C1C1C1C1C1C1C1C1C1C1C1 -) -value_calldata_2 = ( - 0xC2C2C2C2C2C2C2C2C2C2C2C2C2C2C2C2C2C2C2C2C2C2C2C2C2C2C2C2C2C2C2C2 -) -size_calldata = 32 diff --git a/tests/unscheduled/eip7692_eof_v1/eip7069_extcall/spec.py b/tests/unscheduled/eip7692_eof_v1/eip7069_extcall/spec.py deleted file mode 100644 index f11272b7ef..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip7069_extcall/spec.py +++ /dev/null @@ -1,7 +0,0 @@ -"""EOF V1 Constants used throughout all tests.""" - -LEGACY_CALL_FAILURE = 0 -LEGACY_CALL_SUCCESS = 1 -EXTCALL_SUCCESS = 0 -EXTCALL_REVERT = 1 -EXTCALL_FAILURE = 2 diff --git a/tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_address_space_extension.py b/tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_address_space_extension.py deleted file mode 100644 index f0329951bd..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_address_space_extension.py +++ /dev/null @@ -1,258 +0,0 @@ -"""Tests the "Address Space Extension" aspect of EXT*CALL.""" - -import itertools - -import pytest -from execution_testing import ( - Account, - Address, - Alloc, - Environment, - Op, - StateTestFiller, - Transaction, -) -from execution_testing.test_types.eof.v1 import Container, Section - -from .. import EOF_FORK_NAME -from .helpers import value_exceptional_abort_canary -from .spec import EXTCALL_REVERT, EXTCALL_SUCCESS, LEGACY_CALL_SUCCESS - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-7069.md" -REFERENCE_SPEC_VERSION = "1795943aeacc86131d5ab6bb3d65824b3b1d4cad" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - -_slot = itertools.count(1) -slot_top_level_call_status = next(_slot) -slot_target_call_status = next(_slot) -slot_target_returndata = next(_slot) - - -@pytest.mark.parametrize( - "target_address", - ( - pytest.param(b"", id="zero"), - pytest.param(b"\xc0\xde", id="short"), - pytest.param(b"\x78" * 20, id="mid_20"), - pytest.param(b"\xff" * 20, id="max_20"), - pytest.param(b"\x01" + (b"\x00" * 20), id="min_ase"), - pytest.param(b"\x5a" * 28, id="mid_ase"), - pytest.param(b"\x5a" * 32, id="full_ase"), - pytest.param(b"\xff" * 32, id="max_ase"), - ), -) -@pytest.mark.parametrize( - "target_account_type", - ( - "empty", - "EOA", - "LegacyContract", # Hard-codes an address in pre-alloc - "EOFContract", # Hard-codes an address in pre-alloc - ), - ids=lambda x: x, -) -@pytest.mark.parametrize( - "target_opcode", - ( - Op.CALL, - Op.CALLCODE, - Op.STATICCALL, - Op.DELEGATECALL, - Op.EXTCALL, - Op.EXTDELEGATECALL, - Op.EXTSTATICCALL, - ), -) -def test_address_space_extension( - state_test: StateTestFiller, - pre: Alloc, - target_address: bytes, - target_opcode: Op, - target_account_type: str, -) -> None: - """ - Test contacts with possibly extended address and fail if address is too - large. - """ - env = Environment() - - ase_address = len(target_address) > 20 - stripped_address = Address(target_address[-20:], left_padding=True) - if ase_address and target_address[0] == b"00": - raise ValueError( - "Test instrumentation requires target addresses trim leading zeros" - ) - - ase_ready_opcode = ( - False - if target_opcode - in [Op.CALL, Op.CALLCODE, Op.DELEGATECALL, Op.STATICCALL] - else True - ) - - sender = pre.fund_eoa() - - address_caller = pre.deploy_contract( - Container( - sections=[ - Section.Code( - code=Op.SSTORE( - slot_target_call_status, - target_opcode(address=Op.CALLDATALOAD(0)), - ) - + Op.RETURNDATACOPY(0, 0, Op.RETURNDATASIZE) - + Op.SSTORE(slot_target_returndata, Op.MLOAD(0)) - + Op.STOP, - code_inputs=0, - ) - ], - ) - if ase_ready_opcode - else Op.SSTORE( - slot_target_call_status, - target_opcode(address=Op.CALLDATALOAD(0)), - ) - + Op.RETURNDATACOPY(0, 0, Op.RETURNDATASIZE) - + Op.SSTORE(slot_target_returndata, Op.MLOAD(0)) - + Op.STOP, - storage={ - slot_target_call_status: value_exceptional_abort_canary, - slot_target_returndata: value_exceptional_abort_canary, - }, - ) - - address_entry_point = pre.deploy_contract( - Op.MSTORE(0, Op.PUSH32(target_address)) - + Op.SSTORE( - slot_top_level_call_status, - Op.CALL(50000, address_caller, 0, 0, 32, 0, 0), - ) - + Op.STOP(), - storage={ - slot_top_level_call_status: value_exceptional_abort_canary, - }, - ) - - match target_account_type: - case "empty": - # add no account - pass - case "EOA": - pre.fund_address(stripped_address, 10**18) - # TODO: we could use pre.fund_eoa here with nonce!=0. - case "LegacyContract": - pre[stripped_address] = Account( - code=Op.MSTORE(0, Op.ADDRESS) + Op.RETURN(0, 32), - balance=0, - nonce=0, - ) - case "EOFContract": - pre[stripped_address] = Account( - code=Container( - sections=[ - Section.Code( - code=Op.MSTORE(0, Op.ADDRESS) + Op.RETURN(0, 32), - ) - ], - ), - balance=0, - nonce=0, - ) - - caller_storage: dict[int, int | bytes | Address] = {} - match target_account_type: - case "empty" | "EOA": - if ase_address and ase_ready_opcode: - caller_storage[slot_target_call_status] = ( - value_exceptional_abort_canary - ) - caller_storage[slot_target_returndata] = ( - value_exceptional_abort_canary - ) - elif target_opcode == Op.EXTDELEGATECALL: - caller_storage[slot_target_call_status] = EXTCALL_REVERT - caller_storage[slot_target_returndata] = 0 - else: - caller_storage[slot_target_call_status] = ( - EXTCALL_SUCCESS - if ase_ready_opcode - else LEGACY_CALL_SUCCESS - ) - case "LegacyContract" | "EOFContract": - match target_opcode: - case Op.CALL | Op.STATICCALL: - caller_storage[slot_target_call_status] = ( - LEGACY_CALL_SUCCESS - ) - # CALL and STATICCALL call will call the stripped address - caller_storage[slot_target_returndata] = stripped_address - case Op.CALLCODE | Op.DELEGATECALL: - caller_storage[slot_target_call_status] = ( - LEGACY_CALL_SUCCESS - ) - # CALLCODE and DELEGATECALL call will call the stripped - # address but will change the sender to self - caller_storage[slot_target_returndata] = address_caller - case Op.EXTCALL | Op.EXTSTATICCALL: - # EXTCALL and EXTSTATICCALL will fault if calling an ASE - # address - if ase_address: - caller_storage[slot_target_call_status] = ( - value_exceptional_abort_canary - ) - caller_storage[slot_target_returndata] = ( - value_exceptional_abort_canary - ) - else: - caller_storage[slot_target_call_status] = ( - EXTCALL_SUCCESS - ) - caller_storage[slot_target_returndata] = ( - stripped_address - ) - case Op.EXTDELEGATECALL: - if ase_address: - caller_storage[slot_target_call_status] = ( - value_exceptional_abort_canary - ) - caller_storage[slot_target_returndata] = ( - value_exceptional_abort_canary - ) - elif target_account_type == "LegacyContract": - caller_storage[slot_target_call_status] = ( - EXTCALL_REVERT - ) - caller_storage[slot_target_returndata] = 0 - else: - caller_storage[slot_target_call_status] = ( - EXTCALL_SUCCESS - ) - # EXTDELEGATECALL call will call the stripped address - # but will change the sender to self - caller_storage[slot_target_returndata] = address_caller - - post = { - address_entry_point: Account( - storage={ - slot_top_level_call_status: EXTCALL_SUCCESS - if ase_ready_opcode and ase_address - else LEGACY_CALL_SUCCESS - } - ), - address_caller: Account(storage=caller_storage), - } - - tx = Transaction( - sender=sender, - to=address_entry_point, - gas_limit=50_000_000, - data="", - ) - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calldata.py b/tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calldata.py deleted file mode 100644 index a877a136f8..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calldata.py +++ /dev/null @@ -1,601 +0,0 @@ -""" -Call data tests for EXT*CALL instructions - Tests for call data handling in - [EIP-7069: Revamped CALL instructions](https://eips.ethereum.org/EIPS/eip-7069). -""" - -import pytest -from execution_testing import ( - Account, - Alloc, - Environment, - Op, - StateTestFiller, - Transaction, -) -from execution_testing.test_types.eof.v1 import Container, Section - -from .. import EOF_FORK_NAME -from . import REFERENCE_SPEC_GIT_PATH, REFERENCE_SPEC_VERSION -from .helpers import ( - size_calldata, - slot_call_status, - slot_calldata_1, - slot_calldata_2, - slot_code_worked, - slot_delegate_code_worked, - slot_eof_target_returndata, - value_calldata_1, - value_calldata_2, - value_code_worked, - value_exceptional_abort_canary, -) -from .spec import ( - EXTCALL_FAILURE, - EXTCALL_SUCCESS, - LEGACY_CALL_FAILURE, - LEGACY_CALL_SUCCESS, -) - -REFERENCE_SPEC_GIT_PATH = REFERENCE_SPEC_GIT_PATH -REFERENCE_SPEC_VERSION = REFERENCE_SPEC_VERSION - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - - -def right_pad_32(v: bytes) -> bytes: - """Take bytes and returns a 32 byte version right padded with zeros.""" - return v.ljust(32, b"\0") - - -@pytest.mark.parametrize("value", [0, 1]) -@pytest.mark.parametrize( - "memory", - [ - b"", - b"1234567890abcdef", - b"1234567890abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ=-", - b"1234567890abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ=-" - * 4, - ], - ids=lambda x: "size_%d" % len(x), -) -@pytest.mark.parametrize("offset", [0, 8, 24, 80]) -@pytest.mark.parametrize("length", [0, 8, 32, 48]) -def test_extcalls_inputdata( - state_test: StateTestFiller, - pre: Alloc, - value: int, - memory: bytes, - offset: int, - length: int, -) -> None: - """ - Tests call data into EXTCALL including multiple offset conditions. - - Caller pushes data into memory, then calls the target. Target writes 64 - bytes of call data to storage and a success byte. - """ - env = Environment() - - sender = pre.fund_eoa() - - address_returner = pre.deploy_contract( - Container( - sections=[ - Section.Code( - code=Op.SSTORE(slot_calldata_1, Op.CALLDATALOAD(0)) - + Op.SSTORE(slot_calldata_2, Op.CALLDATALOAD(32)) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP - ), - ] - ), - ) - address_caller = pre.deploy_contract( - Container( - sections=[ - Section.Code( - code=Op.DATACOPY(0, 0, len(memory)) - + Op.SSTORE( - slot_call_status, - Op.EXTCALL(address_returner, offset, length, value), - ) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP - ), - Section.Data(data=memory), - ] - ), - storage={slot_call_status: value_exceptional_abort_canary}, - balance=10**9, - ) - - calldata = memory[offset : offset + length] - post = { - address_returner: Account( - storage={ - slot_code_worked: value_code_worked, - slot_calldata_1: right_pad_32(calldata[0:32]), - slot_calldata_2: right_pad_32(calldata[32:64]), - } - ), - address_caller: Account( - storage={ - slot_code_worked: value_code_worked, - slot_call_status: EXTCALL_SUCCESS, - } - ), - } - - tx = Transaction(to=address_caller, gas_limit=2_000_000, sender=sender) - - state_test( - env=env, - pre=pre, - tx=tx, - post=post, - ) - - -@pytest.mark.parametrize( - "memory", - [ - b"", - b"1234567890abcdef", - b"1234567890abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ=-", - b"1234567890abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ=-" - * 4, - ], - ids=lambda x: "size_%d" % len(x), -) -@pytest.mark.parametrize("offset", [0, 8, 24, 80]) -@pytest.mark.parametrize("length", [0, 8, 32, 48]) -def test_extdelegatecall_inputdata( - state_test: StateTestFiller, - pre: Alloc, - memory: bytes, - offset: int, - length: int, -) -> None: - """ - Tests call data into EXTDELEGATECALL including multiple offset conditions. - - Caller pushes data into memory, then calls the target. Target writes 64 - bytes of call data to storage and a success byte. - """ - env = Environment() - - sender = pre.fund_eoa() - - address_returner = pre.deploy_contract( - Container( - sections=[ - Section.Code( - code=Op.SSTORE(slot_calldata_1, Op.CALLDATALOAD(0)) - + Op.SSTORE(slot_calldata_2, Op.CALLDATALOAD(32)) - + Op.SSTORE(slot_delegate_code_worked, value_code_worked) - + Op.STOP - ), - ] - ), - ) - address_caller = pre.deploy_contract( - Container( - sections=[ - Section.Code( - code=Op.DATACOPY(0, 0, len(memory)) - + Op.SSTORE( - slot_call_status, - Op.EXTDELEGATECALL(address_returner, offset, length), - ) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP - ), - Section.Data(data=memory), - ] - ), - storage={slot_call_status: value_exceptional_abort_canary}, - balance=10**9, - ) - - calldata = memory[offset : offset + length] - post = { - address_returner: Account(storage={}), - address_caller: Account( - storage={ - slot_code_worked: value_code_worked, - slot_delegate_code_worked: value_code_worked, - slot_call_status: EXTCALL_SUCCESS, - slot_calldata_1: right_pad_32(calldata[0:32]), - slot_calldata_2: right_pad_32(calldata[32:64]), - } - ), - } - - tx = Transaction(to=address_caller, gas_limit=2_000_000, sender=sender) - - state_test( - env=env, - pre=pre, - tx=tx, - post=post, - ) - - -@pytest.mark.parametrize( - "memory", - [ - b"", - b"1234567890abcdef", - b"1234567890abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ=-", - b"1234567890abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ=-" - * 4, - ], - ids=lambda x: "size_%d" % len(x), -) -@pytest.mark.parametrize("offset", [0, 8, 24, 80]) -@pytest.mark.parametrize("length", [0, 8, 32, 48]) -def test_extstaticcall_inputdata( - state_test: StateTestFiller, - pre: Alloc, - memory: bytes, - offset: int, - length: int, -) -> None: - """ - Tests call data into EXTSTATICCALL including multiple offset conditions. - - Caller pushes data into memory, then calls the target. Target writes 64 - bytes of call data to storage and a success byte. - """ - env = Environment() - - sender = pre.fund_eoa() - - address_returner = pre.deploy_contract( - Container( - sections=[ - Section.Code( - code=Op.CALLDATACOPY(0, 0, Op.CALLDATASIZE) - + Op.RETURN(0, Op.CALLDATASIZE) - ), - ] - ), - ) - address_caller = pre.deploy_contract( - Container( - sections=[ - Section.Code( - code=Op.DATACOPY(0, 0, len(memory)) - + Op.SSTORE( - slot_call_status, - Op.EXTSTATICCALL(address_returner, offset, length), - ) - + Op.SSTORE(slot_calldata_1, Op.RETURNDATALOAD(0)) - + Op.SSTORE(slot_calldata_2, Op.RETURNDATALOAD(32)) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP - ), - Section.Data(data=memory), - ] - ), - storage={slot_call_status: value_exceptional_abort_canary}, - balance=10**9, - ) - - calldata = memory[offset : offset + length] - post = { - address_returner: Account(storage={}), - address_caller: Account( - storage={ - slot_code_worked: value_code_worked, - slot_call_status: EXTCALL_SUCCESS, - slot_calldata_1: right_pad_32(calldata[0:32]), - slot_calldata_2: right_pad_32(calldata[32:64]), - } - ), - } - - tx = Transaction(to=address_caller, gas_limit=2_000_000, sender=sender) - - state_test( - env=env, - pre=pre, - tx=tx, - post=post, - ) - - -@pytest.mark.parametrize( - "opcode", - [ - Op.CALL, - Op.CALLCODE, - Op.DELEGATECALL, - Op.STATICCALL, - Op.EXTCALL, - Op.EXTDELEGATECALL, - Op.EXTSTATICCALL, - ], -) -def test_calldata_remains_after_subcall( - state_test: StateTestFiller, - pre: Alloc, - opcode: Op, -) -> None: - """ - Tests call data remains after a call to another contract. - - Caller pushes data into memory, then calls the target. Target calls 3rd - contract. 3rd contract returns. Target writes calldata to storage. - """ - env = Environment() - - sender = pre.fund_eoa() - - address_sub_called = pre.deploy_contract( - Container( - sections=[ - Section.Code( - code=Op.SSTORE( - slot_delegate_code_worked, value_code_worked - ) - + Op.STOP - ) - ] - ), - storage={slot_delegate_code_worked: value_exceptional_abort_canary}, - ) - called_code = ( - Op.MSTORE(0, value_calldata_2) - + Op.SSTORE(slot_call_status, value_exceptional_abort_canary) - + Op.SSTORE(slot_calldata_1, value_exceptional_abort_canary) - + Op.SSTORE(slot_code_worked, value_exceptional_abort_canary) - + Op.SSTORE( - slot_call_status, - opcode( - address=address_sub_called, - args_offset=0, - args_size=size_calldata, - ), - ) - + Op.SSTORE(slot_calldata_1, Op.CALLDATALOAD(0)) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP - ) - match opcode: - case Op.CALL | Op.CALLCODE | Op.DELEGATECALL | Op.STATICCALL: - address_called = pre.deploy_contract(code=called_code) - case Op.EXTCALL | Op.EXTDELEGATECALL | Op.EXTSTATICCALL: - address_called = pre.deploy_contract( - Container( - sections=[ - Section.Code(code=called_code), - ] - ), - ) - address_caller = pre.deploy_contract( - Container( - sections=[ - Section.Code( - code=Op.MSTORE(0, value_calldata_1) - + Op.SSTORE( - slot_calldata_1, value_exceptional_abort_canary - ) - + Op.SSTORE( - slot_code_worked, value_exceptional_abort_canary - ) - + Op.SSTORE( - slot_call_status, - Op.EXTCALL(address_called, 0, size_calldata, 0), - ) - + Op.SSTORE(slot_calldata_1, Op.RETURNDATALOAD(0)) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP - ), - ] - ), - storage={slot_call_status: value_exceptional_abort_canary}, - balance=10**9, - ) - - match opcode: - case Op.STATICCALL: - called_storage = { - slot_code_worked: value_code_worked, - slot_call_status: LEGACY_CALL_FAILURE, - slot_calldata_1: value_calldata_1, - } - sub_called_storage = { - slot_delegate_code_worked: value_exceptional_abort_canary, - } - case Op.DELEGATECALL | Op.CALLCODE: - called_storage = { - slot_code_worked: value_code_worked, - slot_delegate_code_worked: value_code_worked, - slot_call_status: LEGACY_CALL_SUCCESS, - slot_calldata_1: value_calldata_1, - } - sub_called_storage = { - slot_delegate_code_worked: value_exceptional_abort_canary, - } - case Op.CALL: - called_storage = { - slot_code_worked: value_code_worked, - slot_call_status: LEGACY_CALL_SUCCESS, - slot_calldata_1: value_calldata_1, - } - sub_called_storage = { - slot_delegate_code_worked: value_code_worked, - } - case Op.EXTSTATICCALL: - called_storage = { - slot_code_worked: value_code_worked, - slot_call_status: EXTCALL_FAILURE, - slot_calldata_1: value_calldata_1, - } - sub_called_storage = { - slot_delegate_code_worked: value_exceptional_abort_canary, - } - case Op.EXTDELEGATECALL: - called_storage = { - slot_code_worked: value_code_worked, - slot_delegate_code_worked: value_code_worked, - slot_call_status: EXTCALL_SUCCESS, - slot_calldata_1: value_calldata_1, - } - sub_called_storage = { - slot_delegate_code_worked: value_exceptional_abort_canary, - } - case Op.EXTCALL: - called_storage = { - slot_code_worked: value_code_worked, - slot_call_status: EXTCALL_SUCCESS, - slot_calldata_1: value_calldata_1, - } - sub_called_storage = { - slot_delegate_code_worked: value_code_worked, - } - case _: - raise ValueError(f"Unexpected opcode: {opcode}") - - post = { - address_caller: Account(storage={slot_code_worked: value_code_worked}), - address_called: Account(storage=called_storage), - address_sub_called: Account(storage=sub_called_storage), - } - - tx = Transaction(to=address_caller, gas_limit=4_000_000, sender=sender) - - state_test( - env=env, - pre=pre, - tx=tx, - post=post, - ) - - -@pytest.mark.parametrize( - "operation", [Op.EXTCALL, Op.EXTSTATICCALL, Op.EXTDELEGATECALL] -) -@pytest.mark.parametrize( - "offset_field", - [ - pytest.param(True, id="offset"), - pytest.param(False, id="size"), - ], -) -@pytest.mark.parametrize( - ("test_arg", "success"), - [ - pytest.param(0, True, id="zero"), - pytest.param(0xFF, True, id="8-bit"), - pytest.param(0x100, True, id="9-bit"), - pytest.param(0xFFFF, True, id="16-bit"), - pytest.param(0x10000, True, id="17-bit"), - pytest.param(0x1FFFF20, False, id="32-bit-mem-cost"), - pytest.param(0x2D412E0, False, id="33-bit-mem-cost"), - pytest.param(0xFFFFFFFF, False, id="32-bit"), - pytest.param(0x100000000, False, id="33-bit"), - pytest.param(0x1FFFFFFFF20, False, id="64-bit-mem-cost"), - pytest.param(0x2D413CCCF00, False, id="65-bit-mem-cost"), - pytest.param(0xFFFFFFFFFFFFFFFF, False, id="64-bit"), - pytest.param(0x10000000000000000, False, id="65-bit"), - pytest.param(0xFFFFFFFFFFFFFFFF, False, id="128-bit"), - pytest.param(0x10000000000000000, False, id="129-bit"), - pytest.param(0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF, False, id="256-bit"), - ], -) -def test_extcalls_input_offset( - state_test: StateTestFiller, - pre: Alloc, - operation: Op, - offset_field: str, - test_arg: int, - success: bool, -) -> None: - """ - Tests call data into EXT*CALL including multiple offset conditions. - - Returner returns a success value, which caller stores. If memory expansion - cost is less than 2 billion gas call succeeds. Else whole transaction - aborts, leaving canaries in memory. - - The name id of `*-mem-cost` refers to the bit-length of the result of the - calculated memory expansion cost. Their length choice is designed to cause - problems on shorter bit-length representations with native integers. - - The `offset_field` param indicates what part of the input data arguments - are being tested, either the offset of the data in memory or the size of - the data in memory. - - The `test_arg` param is the value passed into the field being tested - (offset or size), intending to trigger integer size bugs for that - particular field. - """ - env = Environment(gas_limit=1_000_000_000) - - sender = pre.fund_eoa() - - address_returner = pre.deploy_contract( - Container( - sections=[ - Section.Code( - code=Op.MSTORE(0, value_code_worked) + Op.RETURN(0, 32) - ), - ] - ), - ) - address_caller = pre.deploy_contract( - Container( - sections=[ - Section.Code( - code=( - operation( - address=address_returner, - args_offset=test_arg, - args_size=32, - ) - if offset_field - else operation( - address=address_returner, - args_offset=32, - args_size=test_arg, - ) - ) - + Op.SSTORE( - slot_eof_target_returndata, Op.RETURNDATALOAD(0) - ) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP - ) - ] - ), - storage={ - slot_code_worked: value_exceptional_abort_canary, - slot_eof_target_returndata: value_exceptional_abort_canary, - }, - ) - - post = { - address_caller: Account( - storage={ - slot_eof_target_returndata: value_code_worked - if success - else value_exceptional_abort_canary, - slot_code_worked: value_code_worked - if success - else value_exceptional_abort_canary, - } - ), - } - - tx = Transaction(to=address_caller, gas_limit=1_000_000_000, sender=sender) - - state_test( - env=env, - pre=pre, - tx=tx, - post=post, - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py b/tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py deleted file mode 100644 index c06f8fe045..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py +++ /dev/null @@ -1,1371 +0,0 @@ -"""test calls across EOF and Legacy.""" - -import itertools -from enum import Enum, auto, unique - -import pytest -from execution_testing import ( - EOA, - Account, - Address, - Alloc, - Bytecode, - Environment, - EVMCodeType, - Op, - StateTestFiller, - Storage, - Transaction, - compute_eofcreate_address, -) -from execution_testing.base_types import HashInt -from execution_testing.test_types.eof.v1 import Container, Section - -from .. import EOF_FORK_NAME -from .spec import ( - EXTCALL_FAILURE, - EXTCALL_REVERT, - EXTCALL_SUCCESS, - LEGACY_CALL_FAILURE, - LEGACY_CALL_SUCCESS, -) - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-3540.md" -REFERENCE_SPEC_VERSION = "2f013de4065babde7c02f84a2ce9864a3c5bfbd3" - -"""Storage addresses for common testing fields""" -_slot = itertools.count(1) -slot_code_worked = next(_slot) -slot_call_result = next(_slot) -slot_returndata = next(_slot) -slot_returndatasize = next(_slot) -slot_caller = next(_slot) -slot_returndatasize_before_clear = next(_slot) -slot_max_depth = next(_slot) -slot_last_slot = next(_slot) - -"""Storage values for common testing fields""" -value_code_worked = 0x2015 -value_returndata_magic = b"\x42" - - -contract_eof_sstore = Container( - sections=[ - Section.Code( - code=Op.SSTORE(slot_caller, Op.CALLER()) + Op.STOP, - ) - ] -) - - -@unique -class TargetAccountType(Enum): - """Kinds of target accounts for calls.""" - - EMPTY = auto() - EOA = auto() - LEGACY_CONTRACT = auto() - EOF_CONTRACT = auto() - LEGACY_CONTRACT_INVALID = auto() - EOF_CONTRACT_INVALID = auto() - LEGACY_CONTRACT_REVERT = auto() - EOF_CONTRACT_REVERT = auto() - IDENTITY_PRECOMPILE = auto() - - def __str__(self) -> str: - """Return string representation of the enum.""" - return f"{self.name}" - - -@pytest.fixture -def target_address( - pre: Alloc, target_account_type: TargetAccountType -) -> Address: - """Target address of the call depending on required type of account.""" - match target_account_type: - case TargetAccountType.EMPTY: - return pre.fund_eoa(amount=0) - case TargetAccountType.EOA: - return pre.fund_eoa() - case TargetAccountType.LEGACY_CONTRACT: - return pre.deploy_contract( - code=Op.STOP, - ) - case TargetAccountType.EOF_CONTRACT: - return pre.deploy_contract( - code=Container.Code(Op.STOP), - ) - case TargetAccountType.LEGACY_CONTRACT_INVALID: - return pre.deploy_contract( - code=Op.INVALID, - ) - case TargetAccountType.EOF_CONTRACT_INVALID: - return pre.deploy_contract( - code=Container.Code(Op.INVALID), - ) - case TargetAccountType.LEGACY_CONTRACT_REVERT: - return pre.deploy_contract( - code=Op.REVERT(0, 0), - ) - case TargetAccountType.EOF_CONTRACT_REVERT: - return pre.deploy_contract( - code=Container.Code(Op.REVERT(0, 0)), - ) - case TargetAccountType.IDENTITY_PRECOMPILE: - return identity - - -@pytest.mark.parametrize( - "opcode", - [ - Op.CALL, - Op.DELEGATECALL, - Op.CALLCODE, - Op.STATICCALL, - ], -) -def test_legacy_calls_eof_sstore( - state_test: StateTestFiller, - pre: Alloc, - sender: EOA, - opcode: Op, -) -> None: - """Test legacy contracts calling EOF contracts that use SSTORE.""" - env = Environment() - destination_contract_address = pre.deploy_contract(contract_eof_sstore) - - caller_contract = Op.SSTORE( - slot_call_result, opcode(address=destination_contract_address) - ) + Op.SSTORE(slot_code_worked, value_code_worked) - - calling_contract_address = pre.deploy_contract(caller_contract) - - tx = Transaction( - sender=sender, - to=calling_contract_address, - gas_limit=50000000, - ) - - calling_storage = Storage( - { - slot_code_worked: value_code_worked, # type: ignore - slot_call_result: LEGACY_CALL_SUCCESS, # type: ignore - } - ) - destination_storage = Storage() - - if opcode == Op.CALL: - destination_storage[slot_caller] = calling_contract_address - elif opcode == Op.DELEGATECALL: - calling_storage[slot_caller] = sender - elif opcode == Op.CALLCODE: - calling_storage[slot_caller] = calling_contract_address - elif opcode == Op.STATICCALL: - calling_storage[slot_call_result] = LEGACY_CALL_FAILURE - - post = { - calling_contract_address: Account(storage=calling_storage), - destination_contract_address: Account(storage=destination_storage), - } - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) - - -@pytest.mark.parametrize( - "opcode", - [ - Op.CALL, - Op.DELEGATECALL, - Op.CALLCODE, - Op.STATICCALL, - ], -) -def test_legacy_calls_eof_mstore( - state_test: StateTestFiller, - pre: Alloc, - sender: EOA, - opcode: Op, -) -> None: - """Test legacy contracts calling EOF contracts that only return data.""" - env = Environment() - destination_contract_code = Container( - sections=[ - Section.Code( - code=Op.MSTORE8( - 0, int.from_bytes(value_returndata_magic, "big") - ) - + Op.RETURN(0, len(value_returndata_magic)), - ) - ] - ) - destination_contract_address = pre.deploy_contract( - destination_contract_code - ) - - caller_contract = ( - Op.SSTORE( - slot_call_result, opcode(address=destination_contract_address) - ) - + Op.SSTORE(slot_returndatasize, Op.RETURNDATASIZE) - + Op.RETURNDATACOPY(31, 0, 1) - + Op.SSTORE(slot_returndata, Op.MLOAD(0)) - + Op.SSTORE(slot_code_worked, value_code_worked) - ) - calling_contract_address = pre.deploy_contract(caller_contract) - - tx = Transaction( - sender=sender, - to=calling_contract_address, - gas_limit=50000000, - ) - - calling_storage = { - slot_code_worked: value_code_worked, - slot_call_result: LEGACY_CALL_SUCCESS, - slot_returndatasize: len(value_returndata_magic), - slot_returndata: value_returndata_magic, - } - - post = { - calling_contract_address: Account(storage=calling_storage), - destination_contract_address: Account(storage={}), - } - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) - - -@pytest.mark.parametrize( - "opcode", - [ - Op.EXTCALL, - Op.EXTDELEGATECALL, - Op.EXTSTATICCALL, - ], -) -def test_eof_calls_eof_sstore( - state_test: StateTestFiller, - pre: Alloc, - sender: EOA, - opcode: Op, -) -> None: - """Test EOF contracts calling EOF contracts that use SSTORE.""" - env = Environment() - destination_contract_address = pre.deploy_contract(contract_eof_sstore) - - caller_contract = Container( - sections=[ - Section.Code( - code=Op.SSTORE( - slot_call_result, - opcode(address=destination_contract_address), - ) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ) - ] - ) - calling_contract_address = pre.deploy_contract(caller_contract) - - tx = Transaction( - sender=sender, - to=calling_contract_address, - gas_limit=50000000, - ) - - calling_storage = Storage( - { - HashInt(slot_code_worked): HashInt(value_code_worked), - HashInt(slot_call_result): HashInt(EXTCALL_SUCCESS), - } - ) - destination_storage = Storage() - - if opcode == Op.EXTCALL: - destination_storage[slot_caller] = calling_contract_address - elif opcode == Op.EXTDELEGATECALL: - calling_storage[slot_caller] = sender - elif opcode == Op.EXTSTATICCALL: - calling_storage[slot_call_result] = EXTCALL_FAILURE - - post = { - calling_contract_address: Account(storage=calling_storage), - destination_contract_address: Account(storage=destination_storage), - } - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) - - -@pytest.mark.parametrize( - "opcode", - [ - Op.EXTCALL, - Op.EXTDELEGATECALL, - Op.EXTSTATICCALL, - ], -) -def test_eof_calls_eof_mstore( - state_test: StateTestFiller, - pre: Alloc, - sender: EOA, - opcode: Op, -) -> None: - """Test EOF contracts calling EOF contracts that return data.""" - env = Environment() - destination_contract_code = Container( - sections=[ - Section.Code( - code=Op.MSTORE8( - 0, int.from_bytes(value_returndata_magic, "big") - ) - + Op.RETURN(0, 32), - ) - ] - ) - destination_contract_address = pre.deploy_contract( - destination_contract_code - ) - - caller_contract = Container( - sections=[ - Section.Code( - code=Op.SSTORE( - slot_call_result, - opcode(address=destination_contract_address), - ) - + Op.SSTORE(slot_returndatasize, Op.RETURNDATASIZE) - + Op.SSTORE(slot_returndata, Op.RETURNDATALOAD(0)) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ) - ] - ) - calling_contract_address = pre.deploy_contract(caller_contract) - - tx = Transaction( - sender=sender, - to=calling_contract_address, - gas_limit=50000000, - ) - - calling_storage = { - slot_code_worked: value_code_worked, - slot_call_result: EXTCALL_SUCCESS, - slot_returndatasize: 0x20, - slot_returndata: value_returndata_magic - + b"\0" * (0x20 - len(value_returndata_magic)), - } - - post = { - calling_contract_address: Account(storage=calling_storage), - destination_contract_address: Account(storage={}), - } - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) - - -identity = Address(0x04) -# `blake2f`` is chosen for the test because it fails unless args_size == 213, -# which is what we are interested in. -blake2f = Address(0x09) -# `p256verify` / RIP-7212 has been in and out of prague and osaka. -# Hence we need to test explicitly -p256verify = Address(0x100) - - -@pytest.mark.parametrize( - ["opcode", "precompile", "expected_result"], - [ - pytest.param( - Op.EXTCALL, identity, EXTCALL_SUCCESS, id="extcall_success" - ), - pytest.param( - Op.EXTDELEGATECALL, - identity, - EXTCALL_REVERT, - id="extdelegatecall_blocked1", - ), - pytest.param( - Op.EXTSTATICCALL, - identity, - EXTCALL_SUCCESS, - id="extstaticcall_success", - ), - pytest.param( - Op.EXTCALL, blake2f, EXTCALL_FAILURE, id="extcall_failure" - ), - pytest.param( - Op.EXTDELEGATECALL, - blake2f, - EXTCALL_REVERT, - id="extdelegatecall_blocked2", - ), - pytest.param( - Op.EXTSTATICCALL, - blake2f, - EXTCALL_FAILURE, - id="extstaticcall_failure", - ), - pytest.param( - Op.EXTCALL, p256verify, EXTCALL_SUCCESS, id="extcall_p256verify" - ), - pytest.param( - Op.EXTDELEGATECALL, - p256verify, - EXTCALL_REVERT, - id="extdelegatecall_p256verify", - ), - pytest.param( - Op.EXTSTATICCALL, - p256verify, - EXTCALL_SUCCESS, - id="extstaticcall_p256verify", - ), - ], -) -def test_eof_calls_precompile( - state_test: StateTestFiller, - pre: Alloc, - sender: EOA, - opcode: Op, - precompile: Address, - expected_result: int, -) -> None: - """Test EOF contracts calling precompiles.""" - env = Environment() - - caller_contract = Container.Code( - Op.MSTORE(0, value_returndata_magic) - + Op.SSTORE( - slot_call_result, - opcode(address=precompile, args_offset=0, args_size=32), - ) - + Op.SSTORE(slot_returndatasize, Op.RETURNDATASIZE) - + Op.SSTORE(slot_returndata, Op.RETURNDATALOAD(0)) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ) - calling_contract_address = pre.deploy_contract(caller_contract) - - tx = Transaction( - sender=sender, - to=calling_contract_address, - gas_limit=5000000, - ) - - success_identity = ( - expected_result == EXTCALL_SUCCESS and precompile == identity - ) - - calling_storage = { - slot_code_worked: value_code_worked, - slot_call_result: expected_result, - slot_returndatasize: 32 if success_identity else 0, - slot_returndata: value_returndata_magic if success_identity else 0, - } - - post = { - calling_contract_address: Account(storage=calling_storage), - } - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) - - -@pytest.mark.parametrize( - "opcode", - [ - Op.EXTCALL, - Op.EXTDELEGATECALL, - Op.EXTSTATICCALL, - ], -) -def test_eof_calls_legacy_sstore( - state_test: StateTestFiller, - pre: Alloc, - sender: EOA, - opcode: Op, -) -> None: - """Test EOF contracts calling Legacy contracts that use SSTORE.""" - env = Environment() - destination_contract_code = Op.SSTORE(slot_caller, Op.CALLER()) + Op.STOP - destination_contract_address = pre.deploy_contract( - destination_contract_code - ) - - caller_contract = Container( - sections=[ - Section.Code( - code=Op.SSTORE( - slot_call_result, - opcode(address=destination_contract_address), - ) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ) - ] - ) - calling_contract_address = pre.deploy_contract(caller_contract) - - tx = Transaction( - sender=sender, - to=calling_contract_address, - gas_limit=50000000, - ) - - calling_storage = { - slot_code_worked: value_code_worked, - slot_call_result: EXTCALL_SUCCESS, - } - destination_storage = {} - - if opcode == Op.EXTCALL: - destination_storage[slot_caller] = calling_contract_address - elif opcode == Op.EXTDELEGATECALL: - # EOF delegate call to legacy is a light failure by rule - calling_storage[slot_call_result] = EXTCALL_REVERT - elif opcode == Op.EXTSTATICCALL: - calling_storage[slot_call_result] = EXTCALL_FAILURE - - post = { - calling_contract_address: Account(storage=calling_storage), - destination_contract_address: Account(storage=destination_storage), - } - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) - - -@pytest.mark.parametrize( - "opcode", - [ - Op.EXTCALL, - Op.EXTDELEGATECALL, - Op.EXTSTATICCALL, - ], -) -def test_eof_calls_legacy_mstore( - state_test: StateTestFiller, - pre: Alloc, - sender: EOA, - opcode: Op, -) -> None: - """Test EOF contracts calling Legacy contracts that return data.""" - env = Environment() - destination_contract_code = Op.MSTORE8( - 0, int.from_bytes(value_returndata_magic, "big") - ) + Op.RETURN(0, 32) - destination_contract_address = pre.deploy_contract( - destination_contract_code - ) - - caller_contract = Container( - sections=[ - Section.Code( - code=Op.SSTORE( - slot_call_result, - opcode(address=destination_contract_address), - ) - + Op.SSTORE(slot_returndatasize, Op.RETURNDATASIZE) - + Op.SSTORE(slot_returndata, Op.RETURNDATALOAD(0)) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ) - ] - ) - calling_contract_address = pre.deploy_contract(caller_contract) - - tx = Transaction( - sender=sender, - to=calling_contract_address, - gas_limit=50000000, - ) - - calling_storage = { - slot_code_worked: value_code_worked, - slot_call_result: EXTCALL_SUCCESS, - slot_returndatasize: 0x20, - slot_returndata: value_returndata_magic - + b"\0" * (0x20 - len(value_returndata_magic)), - } - - if opcode == Op.EXTDELEGATECALL: - # EOF delegate call to legacy is a light failure by rule - calling_storage[slot_call_result] = EXTCALL_REVERT - calling_storage[slot_returndatasize] = 0 - calling_storage[slot_returndata] = 0 - - post = { - calling_contract_address: Account(storage=calling_storage), - destination_contract_address: Account(storage={}), - } - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) - - -@pytest.mark.parametrize( - "opcode", - [ - Op.EXTCALL, - Op.EXTDELEGATECALL, - Op.EXTSTATICCALL, - ], -) -@pytest.mark.parametrize( - ["destination_code", "expected_result"], - [ - pytest.param(Op.REVERT(0, 0), EXTCALL_REVERT, id="legacy_revert"), - pytest.param(Op.INVALID, EXTCALL_FAILURE, id="legacy_invalid"), - pytest.param(Op.SHA3(0, 2**255), EXTCALL_FAILURE, id="legacy_oog"), - pytest.param( - Op.RETURNDATACOPY(0, 1, 2), - EXTCALL_FAILURE, - id="legacy_oob_returndata", - ), - pytest.param( - Container.Code(Op.REVERT(0, 0)), EXTCALL_REVERT, id="eof_revert" - ), - pytest.param( - Container.Code(Op.INVALID), EXTCALL_FAILURE, id="eof_invalid" - ), - pytest.param( - Container.Code(Op.SHA3(0, 2**255) + Op.STOP), - EXTCALL_FAILURE, - id="eof_oog", - ), - ], -) -def test_callee_fails( - state_test: StateTestFiller, - pre: Alloc, - sender: EOA, - opcode: Op, - destination_code: Bytecode | Container, - expected_result: int, -) -> None: - """Test EOF contracts calling contracts that fail for various reasons.""" - env = Environment() - - destination_contract_address = pre.deploy_contract(destination_code) - - caller_contract = Container.Code( - Op.SSTORE(slot_code_worked, value_code_worked) - + Op.SSTORE( - slot_call_result, opcode(address=destination_contract_address) - ) - + Op.STOP, - ) - calling_contract_address = pre.deploy_contract(caller_contract) - - tx = Transaction( - sender=sender, - to=calling_contract_address, - gas_limit=4000000, - ) - - calling_storage = { - slot_code_worked: value_code_worked, - slot_call_result: EXTCALL_REVERT - if opcode == Op.EXTDELEGATECALL - and not isinstance(destination_code, Container) - else expected_result, - } - - post = { - calling_contract_address: Account(storage=calling_storage), - destination_contract_address: Account(storage={}), - } - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) - - -@pytest.mark.parametrize( - ["opcode", "destination_code", "expected_result"], - [ - pytest.param( - Op.EXTCALL, Op.ADDRESS, "destination", id="extcall_address" - ), - pytest.param( - Op.EXTDELEGATECALL, - Op.ADDRESS, - "caller", - id="extdelegatecall_address", - ), - pytest.param( - Op.EXTSTATICCALL, - Op.ADDRESS, - "destination", - id="extstaticcall_address", - ), - pytest.param(Op.EXTCALL, Op.CALLER, "caller", id="extcall_caller"), - pytest.param( - Op.EXTDELEGATECALL, - Op.CALLER, - "sender", - id="extdelegatecall_caller", - ), - pytest.param( - Op.EXTSTATICCALL, Op.CALLER, "caller", id="extstaticcall_caller" - ), - pytest.param(Op.EXTCALL, Op.CALLVALUE, 0, id="extcall_call_value"), - pytest.param( - Op.EXTDELEGATECALL, - Op.CALLVALUE, - "tx_value", - id="extdelegatecall_call_value", - ), - pytest.param( - Op.EXTSTATICCALL, Op.CALLVALUE, 0, id="extstaticcall_call_value" - ), - pytest.param(Op.EXTCALL, Op.ORIGIN, "sender", id="extcall_origin"), - pytest.param( - Op.EXTDELEGATECALL, - Op.ORIGIN, - "sender", - id="extdelegatecall_origin", - ), - pytest.param( - Op.EXTSTATICCALL, Op.ORIGIN, "sender", id="extstaticcall_origin" - ), - ], -) -@pytest.mark.with_all_evm_code_types -def test_callee_context( - state_test: StateTestFiller, - pre: Alloc, - sender: EOA, - opcode: Op, - destination_code: Bytecode, - expected_result: str | int, - evm_code_type: EVMCodeType, -) -> None: - """Test EOF calls' callee context instructions.""" - env = Environment() - tx_value = 0x1123 - - destination_contract_address = pre.deploy_contract( - Op.MSTORE(0, destination_code) + Op.RETURN(0, 32) - ) - - caller_contract = Container.Code( - Op.SSTORE(slot_code_worked, value_code_worked) - + opcode(address=destination_contract_address) - + Op.SSTORE(slot_returndata, Op.RETURNDATALOAD(0)) - + Op.STOP, - ) - calling_contract_address = pre.deploy_contract(caller_contract) - - tx = Transaction( - sender=sender, - to=calling_contract_address, - gas_limit=100000, - value=tx_value, - ) - - expected_bytes: Address | int - if expected_result == "destination": - expected_bytes = destination_contract_address - elif expected_result == "caller": - expected_bytes = calling_contract_address - elif expected_result == "sender": - expected_bytes = sender - elif expected_result == "tx_value": - expected_bytes = tx_value - elif isinstance(expected_result, int): - expected_bytes = expected_result - else: - raise TypeError("Unexpected expected_result", expected_result) - - calling_storage = { - slot_code_worked: value_code_worked, - slot_returndata: 0 - if ( - opcode == Op.EXTDELEGATECALL - and evm_code_type == EVMCodeType.LEGACY - ) - else expected_bytes, - } - - post = { - calling_contract_address: Account(storage=calling_storage), - destination_contract_address: Account(storage={}), - } - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) - - -@pytest.mark.parametrize( - "opcode", - [ - Op.EXTCALL, - Op.EXTDELEGATECALL, - ], -) -@pytest.mark.parametrize("fail_opcode", [Op.REVERT, Op.INVALID]) -def test_eof_calls_eof_then_fails( - state_test: StateTestFiller, - pre: Alloc, - sender: EOA, - opcode: Op, - fail_opcode: Op, -) -> None: - """Test EOF contracts calling EOF contracts and failing after the call.""" - env = Environment() - destination_contract_address = pre.deploy_contract(contract_eof_sstore) - - caller_contract = Container.Code( - Op.SSTORE( - slot_call_result, opcode(address=destination_contract_address) - ) - + Op.SSTORE(slot_code_worked, value_code_worked) - + fail_opcode(offset=0, size=0), - ) - calling_contract_address = pre.deploy_contract(caller_contract) - - tx = Transaction( - sender=sender, - to=calling_contract_address, - gas_limit=50000000, - ) - - post = { - calling_contract_address: Account(storage=Storage()), - destination_contract_address: Account(storage=Storage()), - } - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) - - -@pytest.mark.parametrize( - "opcode", - [ - Op.EXTCALL, - Op.EXTDELEGATECALL, - Op.EXTSTATICCALL, - ], -) -@pytest.mark.parametrize( - "target_account_type", - TargetAccountType, -) -@pytest.mark.parametrize("value", [0, 1]) -def test_eof_calls_clear_return_buffer( - state_test: StateTestFiller, - pre: Alloc, - sender: EOA, - opcode: Op, - target_address: Address, - value: int, -) -> None: - """Test EOF contracts calling clears returndata buffer.""" - env = Environment() - filling_contract_code = Container.Code( - Op.MSTORE8(0, int.from_bytes(value_returndata_magic, "big")) - + Op.RETURN(0, 32), - ) - filling_callee_address = pre.deploy_contract(filling_contract_code) - - caller_contract = Container.Code( - # First fill the return buffer and sanity check - Op.EXTCALL(filling_callee_address, 0, 0, 0) - + Op.SSTORE(slot_returndatasize_before_clear, Op.RETURNDATASIZE) - # Then call something that doesn't return and check returndata cleared - + opcode(address=target_address, value=value) - + Op.SSTORE(slot_returndatasize, Op.RETURNDATASIZE) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ) - - calling_contract_address = pre.deploy_contract(caller_contract) - - tx = Transaction( - sender=sender, - to=calling_contract_address, - gas_limit=50000000, - ) - - calling_storage = { - slot_code_worked: value_code_worked, - # Sanity check - slot_returndatasize_before_clear: 0x20, - slot_returndatasize: 0, - } - - post = { - calling_contract_address: Account(storage=calling_storage), - filling_callee_address: Account(storage={}), - } - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) - - -@pytest.mark.parametrize( - "opcode", - [ - Op.CALL, - Op.EXTCALL, - ], -) -def test_eof_calls_static_flag_with_value( - state_test: StateTestFiller, - pre: Alloc, - sender: EOA, - opcode: Op, -) -> None: - """ - Test EOF contracts calls handle static flag and sending value correctly. - """ - env = Environment() - - noop_callee_address = pre.deploy_contract(Container.Code(Op.STOP)) - - failing_contract_code = ( - opcode(address=noop_callee_address, value=1) + Op.STOP - ) - failing_contract_address = pre.deploy_contract( - Container.Code( - failing_contract_code, - ) - if opcode == Op.EXTCALL - else failing_contract_code - ) - - calling_contract_address = pre.deploy_contract( - Container.Code( - Op.SSTORE( - slot_call_result, - Op.EXTSTATICCALL(address=failing_contract_address), - ) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP - ) - ) - tx = Transaction( - sender=sender, - to=calling_contract_address, - gas_limit=5_000_000, - ) - - calling_storage = { - slot_code_worked: value_code_worked, - slot_call_result: EXTCALL_FAILURE, - } - - post = { - calling_contract_address: Account(storage=calling_storage), - } - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) - - -min_retained_gas = 2300 -min_callee_gas = 5000 - - -@pytest.mark.parametrize( - ["opcode", "extra_gas_value_transfer", "value"], - [ - [Op.EXTCALL, 0, 0], - [Op.EXTCALL, 9_000, 1], - [Op.EXTSTATICCALL, 0, 0], - [Op.EXTDELEGATECALL, 0, 0], - ], - ids=[ - "extcall_without_value", - "extcall_with_value", - "extstaticcall", - "extdelegatecall", - ], -) -@pytest.mark.parametrize( - ["extra_gas_limit", "reverts"], - [ - [0, False], - [min_retained_gas, False], - [min_callee_gas, False], - [min_retained_gas + min_callee_gas, True], - ], - ids=["no_allowances", "only_retained", "only_callee", "both_allowances"], -) -def test_eof_calls_min_callee_gas( - state_test: StateTestFiller, - pre: Alloc, - sender: EOA, - opcode: Op, - extra_gas_value_transfer: int, - value: int, - extra_gas_limit: int, - reverts: bool, -) -> None: - """ - Test EOF contracts calls do light failure when retained/callee gas is not - enough. - - Premise of the test is that there exists a range of `gas_limit` values, - which are enough for all instructions to execute, but call's returned value - is 1, meaning not enough for gas allowances (MIN_RETAINED_GAS and - MIN_CALLEE_GAS) - ones marked with `reverts==False`. - - Once we provide both allowances, the RJUMPI condition is no longer met and - `reverts==True`. - """ - env = Environment() - - noop_callee_address = pre.deploy_contract(Container.Code(Op.STOP)) - - revert_block = Op.REVERT(0, 0) - calling_contract_address = pre.deploy_contract( - Container.Code( - Op.SSTORE(slot_code_worked, value_code_worked) - + Op.EQ( - opcode(address=noop_callee_address, value=value), - EXTCALL_REVERT, - ) - # If the return code isn't 1, it means gas was enough to cover the - # allowances. - + Op.RJUMPI[len(revert_block)] - + revert_block - + Op.STOP - ), - balance=value, - ) - - # `no_oog_gas` is minimum amount of gas_limit which makes the transaction - # not go oog. - push_operations = 3 + len(opcode.kwargs) - no_oog_gas = ( - 21_000 - + 20_000 # SSTORE - + 2_100 # SSTORE COLD_SLOAD_COST - + push_operations * 3 # PUSH operations - + 100 # WARM_STORAGE_READ_COST - + 2500 # COLD_ACCOUNT_ACCESS - WARM_STORAGE_READ_COST - + extra_gas_value_transfer - + 4 # RJUMPI - + 3 # EQ - ) - - tx = Transaction( - sender=sender, - to=calling_contract_address, - gas_limit=no_oog_gas + extra_gas_limit, - ) - - calling_storage = { - slot_code_worked: 0 if reverts else value_code_worked, - } - - post = { - calling_contract_address: Account(storage=calling_storage), - } - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) - - -@pytest.mark.parametrize( - "balance", - [0, 1, 2, pytest.param(2**256 - 1, marks=pytest.mark.pre_alloc_modify)], -) -@pytest.mark.parametrize("value", [0, 1, 2, 2**256 - 1]) -def test_eof_calls_with_value( - state_test: StateTestFiller, - pre: Alloc, - sender: EOA, - balance: int, - value: int, -) -> None: - """ - Test EOF contracts calls handle value calls with and without enough - balance. - """ - env = Environment() - - noop_callee_address = pre.deploy_contract(Container.Code(Op.STOP)) - - calling_contract_address = pre.deploy_contract( - Container.Code( - Op.SSTORE( - slot_call_result, - Op.EXTCALL(address=noop_callee_address, value=value), - ) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP - ), - balance=balance, - ) - tx = Transaction( - sender=sender, - to=calling_contract_address, - gas_limit=50000000, - ) - - calling_storage = { - slot_code_worked: value_code_worked, - slot_call_result: EXTCALL_REVERT - if balance < value - else EXTCALL_SUCCESS, - } - - post = { - calling_contract_address: Account(storage=calling_storage), - noop_callee_address: Account(balance=0 if balance < value else value), - } - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) - - -@pytest.mark.parametrize( - "opcode", - [ - Op.EXTCALL, - Op.EXTDELEGATECALL, - Op.EXTSTATICCALL, - ], -) -def test_eof_calls_msg_depth( - state_test: StateTestFiller, - pre: Alloc, - sender: EOA, - opcode: Op, -) -> None: - """ - Test EOF contracts calls handle msg depth limit correctly (1024). - - Note: - due to block gas limit and the 63/64th rule this limit is unlikely - to be hit on mainnet. - - """ - # Not a precise gas_limit formula, but enough to exclude risk of gas - # causing the failure. - gas_limit = int(200000 * (64 / 63) ** 1024) - env = Environment(gas_limit=gas_limit) - - # Flow of the test: - # `callee_code` is recursively calling itself, passing msg depth as - # calldata (kept with the `MSTORE(0, ADD(...))`). When maximum msg depth is - # reached the call fails and starts returning. The deep-most frame returns: - # - current reached msg depth (expected to be the maximum 1024), with the - # `MSTORE(32, ADD(...))` - # - the respective return code of the EXT*CALL (expected to be 1 - light - # failure), with the `MSTORE(64, NOOP)`. Note the `NOOP` is just to - # appease the `Op.MSTORE` call, the return code value is actually - # coming from the `Op.DUP1` - # When unwinding the msg call stack, the intermediate frames return - # whatever the deeper callee returned with the `RETURNDATACOPY` - # instruction. - - # Memory offsets layout: - # - 0 - input - msg depth - # - 32 - output - msg depth - # - 64 - output - call result - returndatacopy_block = Op.RETURNDATACOPY(32, 0, 64) + Op.RETURN(32, 64) - deep_most_result_block = ( - Op.MSTORE(32, Op.ADD(Op.CALLDATALOAD(0), 1)) - + Op.MSTORE(64, Op.NOOP) - + Op.RETURN(32, 64) - ) - rjump_offset = len(returndatacopy_block) - - callee_code = Container.Code( - # current stack depth in memory bytes 0-31 - Op.MSTORE(0, Op.ADD(Op.CALLDATALOAD(0), 1)) - # pass it along deeper as calldata - + opcode(address=Op.ADDRESS, args_size=32) - # duplicate return code for the `returndatacopy_block` below - + Op.DUP1 - # if return code was: - # - 1, we're in the deep-most frame, `deep_most_result_block` returns - # the actual result - # - 0, we're in an intermediate frame, `returndatacopy_block` only - # passes on the result - + Op.RJUMPI[rjump_offset] - + returndatacopy_block - + deep_most_result_block - ) - - callee_address = pre.deploy_contract(callee_code) - - calling_contract_address = pre.deploy_contract( - Container.Code( - Op.MSTORE(0, Op.CALLDATALOAD(0)) - + Op.EXTCALL(address=callee_address, args_size=32) - + Op.SSTORE(slot_max_depth, Op.RETURNDATALOAD(0)) - + Op.SSTORE(slot_call_result, Op.RETURNDATALOAD(32)) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP - ) - ) - tx = Transaction( - sender=sender, - to=calling_contract_address, - gas_limit=gas_limit, - ) - - calling_storage = { - slot_max_depth: 1024, - slot_code_worked: value_code_worked, - slot_call_result: EXTCALL_REVERT, - } - - post = { - calling_contract_address: Account(storage=calling_storage), - } - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) - - -@pytest.mark.parametrize("target_account_type", TargetAccountType) -@pytest.mark.parametrize("delegate", [True, False]) -@pytest.mark.parametrize("call_from_initcode", [True, False]) -def test_extdelegate_call_targets( - state_test: StateTestFiller, - pre: Alloc, - target_account_type: TargetAccountType, - target_address: Address, - delegate: bool, - call_from_initcode: bool, -) -> None: - """ - Test EOF contracts extdelegatecalling various targets, especially resolved - via 7702 delegation. - """ - env = Environment() - - if delegate: - target_address = pre.fund_eoa(0, delegation=target_address) - - sender = pre.fund_eoa() - delegate_call_code = Op.SSTORE( - slot_call_result, Op.EXTDELEGATECALL(address=target_address) - ) + Op.SSTORE(slot_code_worked, value_code_worked) - - if call_from_initcode: - # Call from initcode - caller_contract = Container( - sections=[ - Section.Code( - code=delegate_call_code + Op.RETURNCODE[0](0, 0), - ), - Section.Container(Container.Code(Op.STOP)), - ] - ) - initcode_hash = caller_contract.hash - factory_address = pre.deploy_contract( - code=Op.TXCREATE(tx_initcode_hash=initcode_hash) + Op.STOP, - ) - tx = Transaction( - sender=sender, - to=factory_address, - data=caller_contract, - gas_limit=4_000_000, - initcodes=[caller_contract], - ) - calling_contract_address = compute_eofcreate_address( - factory_address, 0 - ) - else: - # Normal call from existing contract - caller_contract = Container.Code( - delegate_call_code + Op.STOP, - ) - calling_contract_address = pre.deploy_contract(caller_contract) - - tx = Transaction( - sender=sender, - to=calling_contract_address, - gas_limit=4_000_000, - ) - - calling_storage = { - slot_code_worked: value_code_worked, - slot_call_result: EXTCALL_SUCCESS - if target_account_type == TargetAccountType.EOF_CONTRACT - else EXTCALL_FAILURE - if target_account_type == TargetAccountType.EOF_CONTRACT_INVALID - else EXTCALL_REVERT, - } - - post = { - calling_contract_address: Account(storage=calling_storage), - } - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_gas.py b/tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_gas.py deleted file mode 100644 index 2038217021..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_gas.py +++ /dev/null @@ -1,247 +0,0 @@ -""" -Gas consumption tests for EXT*CALL instructions - Tests for gas consumption in - [EIP-7069: Revamped CALL instructions](https://eips.ethereum.org/EIPS/eip-7069). -""" - -import pytest -from execution_testing import ( - Address, - Alloc, - Environment, - Fork, - Op, - StateTestFiller, -) -from execution_testing.test_types.eof.v1 import Container - -from .. import EOF_FORK_NAME -from ..gas_test import gas_test -from . import REFERENCE_SPEC_GIT_PATH, REFERENCE_SPEC_VERSION - -REFERENCE_SPEC_GIT_PATH = REFERENCE_SPEC_GIT_PATH -REFERENCE_SPEC_VERSION = REFERENCE_SPEC_VERSION - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - - -COLD_ACCOUNT_ACCESS_GAS = 2600 -WARM_ACCOUNT_ACCESS_GAS = 100 -CALL_WITH_VALUE_GAS = 9000 -ACCOUNT_CREATION_GAS = 25000 - - -@pytest.fixture -def state_env() -> Environment: - """ - Prepare the environment for all state test cases. - - Main difference is that the excess blob gas is not increased by the target, - as there is no genesis block -> block 1 transition, and therefore the - excess blob gas is not decreased by the target. - """ - return Environment() - - -@pytest.mark.parametrize( - ["opcode", "pre_setup", "cold_gas", "warm_gas", "new_account"], - [ - pytest.param( - Op.EXTCALL, - Op.PUSH0, - COLD_ACCOUNT_ACCESS_GAS, - WARM_ACCOUNT_ACCESS_GAS, - False, - id="EXTCALL", - ), - pytest.param( - Op.EXTCALL, - Op.PUSH1(1), - COLD_ACCOUNT_ACCESS_GAS + CALL_WITH_VALUE_GAS, - WARM_ACCOUNT_ACCESS_GAS + CALL_WITH_VALUE_GAS, - False, - id="EXTCALL_with_value", - ), - pytest.param( - Op.EXTDELEGATECALL, - Op.NOOP, - COLD_ACCOUNT_ACCESS_GAS, - WARM_ACCOUNT_ACCESS_GAS, - False, - id="EXTDELEGATECALL", - ), - pytest.param( - Op.EXTSTATICCALL, - Op.NOOP, - COLD_ACCOUNT_ACCESS_GAS, - WARM_ACCOUNT_ACCESS_GAS, - False, - id="EXTSTATICCALL", - ), - pytest.param( - Op.EXTCALL, - Op.PUSH0, - COLD_ACCOUNT_ACCESS_GAS, - WARM_ACCOUNT_ACCESS_GAS, - True, - id="EXTCALL_new_acc", - ), - pytest.param( - Op.EXTCALL, - Op.PUSH1(1), - COLD_ACCOUNT_ACCESS_GAS - + ACCOUNT_CREATION_GAS - + CALL_WITH_VALUE_GAS, - WARM_ACCOUNT_ACCESS_GAS - + ACCOUNT_CREATION_GAS - + CALL_WITH_VALUE_GAS, - True, - id="EXTCALL_with_value_new_acc", - ), - pytest.param( - Op.EXTDELEGATECALL, - Op.NOOP, - COLD_ACCOUNT_ACCESS_GAS, - WARM_ACCOUNT_ACCESS_GAS, - True, - id="EXTDELEGATECALL_new_acc", - ), - pytest.param( - Op.EXTSTATICCALL, - Op.NOOP, - COLD_ACCOUNT_ACCESS_GAS, - WARM_ACCOUNT_ACCESS_GAS, - True, - id="EXTSTATICCALL_new_acc", - ), - ], -) -@pytest.mark.parametrize( - "mem_expansion_bytes", - [0, 1, 32, 33], -) -def test_ext_calls_gas( - state_test: StateTestFiller, - pre: Alloc, - fork: Fork, - state_env: Environment, - opcode: Op, - pre_setup: Op, - cold_gas: int, - warm_gas: int, - new_account: bool, - mem_expansion_bytes: int, -) -> None: - """ - Tests variations of EXT*CALL gas, both warm and cold, without and with mem - expansions. - """ - address_target = ( - pre.fund_eoa(0) - if new_account - else pre.deploy_contract(Container.Code(Op.STOP)) - ) - cost_memory_bytes = fork.memory_expansion_gas_calculator() - gas_test( - fork, - state_test, - state_env, - pre, - setup_code=pre_setup - + Op.PUSH1(mem_expansion_bytes) - + Op.PUSH0 - + Op.PUSH20(address_target), - subject_code=opcode, - tear_down_code=Op.STOP, - cold_gas=cold_gas + cost_memory_bytes(new_bytes=mem_expansion_bytes), - warm_gas=warm_gas + cost_memory_bytes(new_bytes=mem_expansion_bytes), - ) - - -@pytest.mark.parametrize( - "opcode", [Op.EXTCALL, Op.EXTDELEGATECALL, Op.EXTSTATICCALL] -) -@pytest.mark.parametrize("value", [0, 1]) -def test_transfer_gas_is_cleared( - state_test: StateTestFiller, - pre: Alloc, - fork: Fork, - state_env: Environment, - opcode: Op, - value: int, -) -> None: - """ - Test that EXT*CALL call doesn't charge for value transfer, even if the - outer call transferred value. - - NOTE: This is particularly possible for EXTDELEGATECALL, which carries over - the value sent in the outer call, however, we extend the test to all 3 - EXT*CALL opcodes for good measure. - """ - noop_callee_address = pre.deploy_contract(Container.Code(Op.STOP)) - - extdelegatecall_contract_address = pre.deploy_contract( - Container.Code(opcode(address=noop_callee_address) + Op.STOP) - ) - - push_gas = (4 if opcode == Op.EXTCALL else 3) * 3 - - gas_test( - fork, - state_test, - state_env, - pre, - setup_code=Op.PUSH1(value) - + Op.PUSH0 * 2 - + Op.PUSH20(extdelegatecall_contract_address), - subject_code=Op.EXTCALL, - subject_balance=5 * value, - tear_down_code=Op.STOP, - # NOTE: CALL_WITH_VALUE_GAS is charged only once on the outer EXTCALL, - # while the base call gas - twice. - cold_gas=2 * COLD_ACCOUNT_ACCESS_GAS - + (CALL_WITH_VALUE_GAS if value > 0 else 0) - + push_gas, - warm_gas=2 * WARM_ACCOUNT_ACCESS_GAS - + (CALL_WITH_VALUE_GAS if value > 0 else 0) - + push_gas, - out_of_gas_testing=False, - ) - - -@pytest.mark.parametrize( - "opcode", [Op.EXTCALL, Op.EXTDELEGATECALL, Op.EXTSTATICCALL] -) -def test_late_account_create( - state_test: StateTestFiller, - pre: Alloc, - fork: Fork, - state_env: Environment, - opcode: Op, -) -> None: - """ - Test EXTCALL to a non-existent account after another EXT*CALL has called it - and not created it. - """ - empty_address = Address(0xDECAFC0DE) - - gas_test( - fork, - state_test, - state_env, - pre, - prelude_code=Op.BALANCE(address=empty_address), - setup_code=opcode(address=empty_address) - + Op.PUSH1(1) - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH20(empty_address), - subject_code=Op.EXTCALL, - subject_balance=5, - tear_down_code=Op.STOP, - cold_gas=WARM_ACCOUNT_ACCESS_GAS - + CALL_WITH_VALUE_GAS - + ACCOUNT_CREATION_GAS, - warm_gas=WARM_ACCOUNT_ACCESS_GAS + CALL_WITH_VALUE_GAS, - out_of_gas_testing=False, - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_returndatacopy_memory_expansion.py b/tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_returndatacopy_memory_expansion.py deleted file mode 100644 index 378f9ec64c..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_returndatacopy_memory_expansion.py +++ /dev/null @@ -1,309 +0,0 @@ -"""Memory expansion tests for RETURNDATACOPY executing in EOF code.""" - -from typing import Mapping, Tuple - -import pytest -from execution_testing import ( - Account, - Address, - Alloc, - Bytecode, - Environment, - Fork, - Op, - StateTestFiller, - Storage, - Transaction, -) -from execution_testing.test_types.eof.v1 import Container - -from .. import EOF_FORK_NAME - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-7069.md" -REFERENCE_SPEC_VERSION = "e469fd6c8d736b2a3e1ce632263e3ad36fc8624d" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - - -@pytest.fixture -def callee_bytecode(dest: int, src: int, length: int) -> Container: - """Callee performs a single returndatacopy operation and then returns.""" - bytecode = Bytecode() - - # Copy the initial memory - bytecode += Op.CALLDATACOPY(0x00, 0x00, Op.CALLDATASIZE()) - - # Pushes for the return operation - bytecode += Op.PUSH1(0x00) + Op.PUSH1(0x00) - - # Perform the returndatacopy operation - bytecode += Op.RETURNDATACOPY(dest, src, length) - - bytecode += Op.RETURN - - return Container.Code(code=bytecode) - - -@pytest.fixture -def subcall_exact_cost( - fork: Fork, - initial_memory: bytes, - dest: int, - length: int, -) -> int: - """ - Return exact cost of the subcall, based on the initial memory and the - length of the copy. - """ - cost_memory_bytes = fork.memory_expansion_gas_calculator() - - returndatacopy_cost = 3 - returndatacopy_cost += 3 * ((length + 31) // 32) - if length > 0 and dest + length > len(initial_memory): - returndatacopy_cost += cost_memory_bytes( - new_bytes=dest + length, previous_bytes=len(initial_memory) - ) - - calldatacopy_cost = 3 - calldatacopy_cost += 3 * ((len(initial_memory) + 31) // 32) - calldatacopy_cost += cost_memory_bytes(new_bytes=len(initial_memory)) - - pushes_cost = 3 * 7 - calldatasize_cost = 2 - return ( - returndatacopy_cost - + calldatacopy_cost - + pushes_cost - + calldatasize_cost - ) - - -@pytest.fixture -def bytecode_storage( - subcall_exact_cost: int, - successful: bool, - memory_expansion_address: Address, -) -> Tuple[Bytecode, Storage.StorageDictType]: - """ - Prepare bytecode and storage for the test, based on the expected result of - the subcall (whether it succeeds or fails depending on the length of the - memory expansion). - """ - bytecode = Bytecode() - storage = {} - - # Pass on the calldata - bytecode += Op.CALLDATACOPY(0x00, 0x00, Op.CALLDATASIZE()) - - subcall_gas = subcall_exact_cost if successful else subcall_exact_cost - 1 - - # Perform the subcall and store a one in the result location - bytecode += Op.SSTORE( - Op.CALL( - subcall_gas, - memory_expansion_address, - 0, - 0, - Op.CALLDATASIZE(), - 0, - 0, - ), - 1, - ) - storage[int(successful)] = 1 - - return (bytecode, storage) - - -@pytest.fixture -def tx_max_fee_per_gas() -> int: # noqa: D103 - return 7 - - -@pytest.fixture -def block_gas_limit() -> int: # noqa: D103 - return 100_000_000 - - -@pytest.fixture -def tx_gas_limit( # noqa: D103 - subcall_exact_cost: int, - block_gas_limit: int, -) -> int: - return min(max(500_000, subcall_exact_cost * 2), block_gas_limit) - - -@pytest.fixture -def env( # noqa: D103 - block_gas_limit: int, -) -> Environment: - return Environment(gas_limit=block_gas_limit) - - -@pytest.fixture -def caller_address( # noqa: D103 - pre: Alloc, bytecode_storage: Tuple[bytes, Storage.StorageDictType] -) -> Address: - return pre.deploy_contract(code=bytecode_storage[0]) - - -@pytest.fixture -def memory_expansion_address(pre: Alloc, callee_bytecode: bytes) -> Address: # noqa: D103 - return pre.deploy_contract(code=callee_bytecode) - - -@pytest.fixture -def sender(pre: Alloc, tx_max_fee_per_gas: int, tx_gas_limit: int) -> Address: # noqa: D103 - return pre.fund_eoa(tx_max_fee_per_gas * tx_gas_limit) - - -@pytest.fixture -def tx( # noqa: D103 - sender: Address, - caller_address: Address, - initial_memory: bytes, - tx_max_fee_per_gas: int, - tx_gas_limit: int, -) -> Transaction: - return Transaction( - sender=sender, - to=caller_address, - data=initial_memory, - gas_limit=tx_gas_limit, - max_fee_per_gas=tx_max_fee_per_gas, - max_priority_fee_per_gas=0, - ) - - -@pytest.fixture -def post( # noqa: D103 - caller_address: Address, - bytecode_storage: Tuple[bytes, Storage.StorageDictType], -) -> Mapping: - return { - caller_address: Account(storage=bytecode_storage[1]), - } - - -@pytest.mark.parametrize( - "dest,src,length", - [ - (0x00, 0x00, 0x01), - (0x100, 0x00, 0x01), - (0x1F, 0x00, 0x01), - (0x20, 0x00, 0x01), - (0x1000, 0x00, 0x01), - (0x1000, 0x00, 0x40), - (0x00, 0x00, 0x00), - (2**256 - 1, 0x00, 0x00), - (0x00, 2**256 - 1, 0x00), - (2**256 - 1, 2**256 - 1, 0x00), - ], - ids=[ - "single_byte_expansion", - "single_byte_expansion_2", - "single_byte_expansion_word_boundary", - "single_byte_expansion_word_boundary_2", - "multi_word_expansion", - "multi_word_expansion_2", - "zero_length_expansion", - "huge_dest_zero_length", - "huge_src_zero_length", - "huge_dest_huge_src_zero_length", - ], -) -@pytest.mark.parametrize("successful", [True, False]) -@pytest.mark.parametrize( - "initial_memory", - [ - bytes(range(0x00, 0x100)), - bytes(), - ], - ids=[ - "from_existent_memory", - "from_empty_memory", - ], -) -def test_returndatacopy_memory_expansion( - state_test: StateTestFiller, - env: Environment, - pre: Alloc, - post: Mapping[str, Account], - tx: Transaction, -) -> None: - """ - Perform RETURNDATACOPY operations that expand the memory, and verify the - gas it costs. - """ - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) - - -@pytest.mark.parametrize( - "dest,src,length", - [ - pytest.param( - 2**256 - 1, 0x00, 0x01, id="max_dest_single_byte_expansion" - ), - pytest.param( - 2**256 - 2, - 0x00, - 0x01, - id="max_dest_minus_one_single_byte_expansion", - ), - pytest.param( - 2**255 - 1, 0x00, 0x01, id="half_max_dest_single_byte_expansion" - ), - pytest.param(0x00, 0x00, 2**256 - 1, id="max_length_expansion"), - pytest.param( - 0x00, 0x00, 2**256 - 2, id="max_length_minus_one_expansion" - ), - pytest.param(0x00, 0x00, 2**255 - 1, id="half_max_length_expansion"), - pytest.param(0x1FFFF20, 0x00, 0x01, id="32-bit-mem-cost_offset"), - pytest.param(0x2D412E0, 0x00, 0x01, id="33-bit-mem-cost_offset"), - pytest.param(0x00, 0x00, 0x1FFFF20, id="32-bit-mem-cost_size"), - pytest.param(0x00, 0x00, 0x2D412E0, id="33-bit-mem-cost_size"), - pytest.param(0x1FFFFFFFF20, 0x00, 0x01, id="64-bit-mem-cost_offset"), - pytest.param(0x2D413CCCF00, 0x00, 0x01, id="65-bit-mem-cost_offset"), - pytest.param(0x00, 0x00, 0x1FFFFFFFF20, id="64-bit-mem-cost_size"), - pytest.param(0x00, 0x00, 0x2D413CCCF00, id="65-bit-mem-cost_size"), - ], -) -@pytest.mark.parametrize( - "subcall_exact_cost", - [2**128 - 1], - ids=[""], -) # Limit subcall gas, otherwise it would be impossibly large -@pytest.mark.parametrize("successful", [False]) -@pytest.mark.parametrize( - "initial_memory", - [ - bytes(range(0x00, 0x100)), - bytes(), - ], - ids=[ - "from_existent_memory", - "from_empty_memory", - ], -) -def test_returndatacopy_huge_memory_expansion( - state_test: StateTestFiller, - env: Environment, - pre: Mapping[str, Account], - post: Mapping[str, Account], - tx: Transaction, -) -> None: - """ - Perform RETURNDATACOPY operations that expand the memory by huge amounts, - and verify that it correctly runs out of gas. - """ - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_returndataload.py b/tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_returndataload.py deleted file mode 100644 index 8adcd6997b..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_returndataload.py +++ /dev/null @@ -1,410 +0,0 @@ -""" -RETURNDATALOAD instruction tests - Tests for RETURNDATALOAD instruction in - [EIP-7069: Revamped CALL instructions](https://eips.ethereum.org/EIPS/eip-7069). -""" - -from typing import cast - -import pytest -from execution_testing import ( - Account, - Alloc, - Environment, - Op, - StateTestFiller, - Storage, - Transaction, -) -from execution_testing.test_types.eof.v1 import Container, Section - -from .. import EOF_FORK_NAME -from . import REFERENCE_SPEC_GIT_PATH, REFERENCE_SPEC_VERSION -from .helpers import ( - slot_code_worked, - slot_eof_target_call_status, - slot_eof_target_returndata, - slot_eof_target_returndatasize, - slot_legacy_target_call_status, - slot_legacy_target_returndatasize, - value_code_worked, - value_exceptional_abort_canary, -) -from .spec import ( - EXTCALL_FAILURE, - EXTCALL_SUCCESS, - LEGACY_CALL_FAILURE, - LEGACY_CALL_SUCCESS, -) - -REFERENCE_SPEC_GIT_PATH = REFERENCE_SPEC_GIT_PATH -REFERENCE_SPEC_VERSION = REFERENCE_SPEC_VERSION - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - - -@pytest.mark.parametrize( - "opcode", - [ - Op.CALL, - Op.CALLCODE, - Op.DELEGATECALL, - Op.STATICCALL, - Op.EXTCALL, - Op.EXTDELEGATECALL, - Op.EXTSTATICCALL, - ], -) -@pytest.mark.parametrize( - "return_data", - [ - b"", - b"\x10" * 0x10, - b"\x20" * 0x20, - b"\x30" * 0x30, - ], - ids=lambda x: "len_%x" % len(x), -) -@pytest.mark.parametrize( - "offset", - [ - 0, - 0x10, - 0x20, - 0x30, - ], - ids=lambda x: "offset_%x" % x, -) -@pytest.mark.parametrize( - "size", - [ - 0, - 0x10, - 0x20, - 0x30, - ], - ids=lambda x: "size_%x" % x, -) -def test_returndatacopy_handling( - state_test: StateTestFiller, - pre: Alloc, - opcode: Op, - return_data: bytes, - offset: int, - size: int, -) -> None: - """ - Tests ReturnDataLoad including multiple offset conditions and differing - legacy vs. eof boundary conditions. - - entrypoint creates a "0xff" test area of memory, delegate calls to caller. - Caller is either EOF or legacy, as per parameter. Calls returner and - copies the return data based on offset and size params. Cases are expected - to trigger boundary violations. - - Entrypoint copies the test area to storage slots, and the expected result - is asserted. - """ - env = Environment() - - slot_result_start = 0x1000 - - sender = pre.fund_eoa() - - address_returner = pre.deploy_contract( - Container( - sections=[ - Section.Code( - code=Op.DATACOPY(0, 0, Op.DATASIZE) - + Op.RETURN(0, Op.DATASIZE), - ), - Section.Data(data=return_data), - ] - ) - ) - - result = [0xFF] * 0x40 - result[0:size] = [0] * size - extent = size - max(0, size + offset - len(return_data)) - if extent > 0 and len(return_data) > 0: - result[0:extent] = [return_data[0]] * extent - - code_under_test = ( - opcode(address=address_returner) - + Op.RETURNDATACOPY(0, offset, size) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.RETURN(0, size) - ) - match opcode: - case Op.EXTCALL | Op.EXTDELEGATECALL | Op.EXTSTATICCALL: - address_caller = pre.deploy_contract( - Container( - sections=[ - Section.Code( - code=code_under_test, - ) - ] - ) - ) - case Op.CALL | Op.CALLCODE | Op.DELEGATECALL | Op.STATICCALL: - address_caller = pre.deploy_contract(code_under_test) - - address_entry_point = pre.deploy_contract( - Op.NOOP - # First, create a "dirty" area, so we can check zero overwrite - + Op.MSTORE(0x00, -1) - + Op.MSTORE(0x20, -1) - # call the contract under test - + Op.DELEGATECALL(1_000_000, address_caller, 0, 0, 0, 0) - + Op.RETURNDATACOPY(0, 0, Op.RETURNDATASIZE) - # store the return data - + Op.SSTORE(slot_result_start, Op.MLOAD(0x0)) - + Op.SSTORE(slot_result_start + 1, Op.MLOAD(0x20)) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ) - - post = { - address_entry_point: Account( - storage={ - slot_code_worked: value_code_worked, - slot_result_start: bytes(result[:0x20]), - (slot_result_start + 0x1): bytes(result[0x20:]), - } - ) - } - if opcode in [Op.CALL, Op.CALLCODE, Op.DELEGATECALL, Op.STATICCALL] and ( - (offset + size) > len(return_data) - ): - post[address_entry_point] = Account( - storage={ - slot_code_worked: value_code_worked, - slot_result_start: b"\xff" * 32, - slot_result_start + 1: b"\xff" * 32, - } - ) - - tx = Transaction( - to=address_entry_point, gas_limit=2_000_000, sender=sender - ) - - state_test( - env=env, - pre=pre, - tx=tx, - post=post, - ) - - -@pytest.mark.parametrize( - "opcode", - [ - Op.EXTCALL, - Op.EXTDELEGATECALL, - Op.EXTSTATICCALL, - ], -) -@pytest.mark.parametrize( - "return_data", - [ - b"", - b"\x10" * 0x10, - b"\x20" * 0x20, - b"\x30" * 0x30, - ], - ids=lambda x: "len_%x" % len(x), -) -@pytest.mark.parametrize( - "offset", - [ - 0, - 0x10, - 0x20, - 0x30, - ], - ids=lambda x: "offset_%x" % x, -) -def test_returndataload_handling( - state_test: StateTestFiller, - pre: Alloc, - opcode: Op, - return_data: bytes, - offset: int, -) -> None: - """ - Much simpler than returndatacopy, no memory or boosted call. Returner is - called and results are stored in storage slot, which is asserted for - expected values. The parameters offset and return data are configured to - test boundary conditions. - """ - env = Environment() - - slot_result_start = 0x1000 - - sender = pre.fund_eoa() - address_returner = pre.deploy_contract( - Container( - sections=[ - Section.Code( - code=Op.DATACOPY(0, 0, Op.DATASIZE) - + Op.RETURN(0, Op.DATASIZE), - ), - Section.Data(data=return_data), - ] - ) - ) - address_entry_point = pre.deploy_contract( - Container( - sections=[ - Section.Code( - code=opcode(address=address_returner) - + Op.SSTORE(slot_result_start, Op.RETURNDATALOAD(offset)) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ) - ] - ) - ) - - result = [0] * 0x20 - extent = 0x20 - max(0, 0x20 + offset - len(return_data)) - if extent > 0 and len(return_data) > 0: - result[0:extent] = [return_data[0]] * extent - post = { - address_entry_point: Account( - storage={ - slot_code_worked: value_code_worked, - slot_result_start: bytes(result), - } - ) - } - - tx = Transaction( - to=address_entry_point, gas_limit=2_000_000, sender=sender - ) - - state_test( - env=env, - pre=pre, - tx=tx, - post=post, - ) - - -@pytest.mark.parametrize( - "opcode", - [ - Op.CALL, - Op.EXTCALL, - ], -) -def test_returndatacopy_oob( - state_test: StateTestFiller, - pre: Alloc, - opcode: Op, -) -> None: - """ - Extends the RETURNDATACOPY test for correct out-of-bounds behavior, by - checking if the caller frame's context being EOF or legacy doesn't impact - the execution logic of the RETURNDATACOPY instance under test. - """ - env = Environment() - - sender = pre.fund_eoa() - - # Both callee codes below make an OOB (out-of-bounds) RETURNDATACOPY of one - # byte, which they then attempt to return (Legacy should exceptionally halt - # on RETURNDATACOPY). - address_callee_eof = pre.deploy_contract( - code=Container( - sections=[ - Section.Code( - code=Op.RETURNDATACOPY(0, 0, 1) + Op.RETURN(0, 1), - max_stack_height=3, - ) - ] - ) - ) - address_callee_legacy = pre.deploy_contract( - Op.RETURNDATACOPY(0, 0, 1) + Op.RETURN(0, 1) - ) - - # Caller code is selected to either be Legacy or EOF using params. - code_entry_point = ( - Op.SSTORE( - slot_eof_target_call_status, opcode(address=address_callee_eof) - ) - + Op.SSTORE(slot_eof_target_returndatasize, Op.RETURNDATASIZE) - + Op.SSTORE( - slot_eof_target_returndata, - Op.RETURNDATACOPY(0, 0, 1) + Op.MLOAD(0), - ) - + Op.SSTORE( - slot_legacy_target_call_status, - opcode(address=address_callee_legacy), - ) - + Op.SSTORE(slot_legacy_target_returndatasize, Op.RETURNDATASIZE) - + Op.STOP - ) - - storage_entry_point = Storage( - cast( - Storage.StorageDictType, - { - slot_eof_target_call_status: value_exceptional_abort_canary, - slot_eof_target_returndata: value_exceptional_abort_canary, - slot_eof_target_returndatasize: value_exceptional_abort_canary, - slot_legacy_target_call_status: value_exceptional_abort_canary, - slot_legacy_target_returndatasize: ( - value_exceptional_abort_canary - ), - }, - ) - ) - - address_entry_point = ( - pre.deploy_contract(code=code_entry_point, storage=storage_entry_point) - if opcode == Op.CALL - else pre.deploy_contract( - Container( - sections=[ - Section.Code( - code=code_entry_point, - max_stack_height=4, - storage=storage_entry_point, - ) - ] - ) - ) - ) - - tx = Transaction( - to=address_entry_point, gas_limit=2_000_000, sender=sender - ) - - post = { - address_entry_point: Account( - storage={ - slot_eof_target_call_status: LEGACY_CALL_SUCCESS, - slot_eof_target_returndata: "0x00", - slot_eof_target_returndatasize: "0x01", - slot_legacy_target_call_status: LEGACY_CALL_FAILURE, - slot_legacy_target_returndatasize: "0x00", - } - if opcode == Op.CALL - else { - slot_eof_target_call_status: EXTCALL_SUCCESS, - slot_eof_target_returndata: "0x00", - slot_eof_target_returndatasize: "0x01", - slot_legacy_target_call_status: EXTCALL_FAILURE, - slot_legacy_target_returndatasize: "0x00", - } - ) - } - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip7480_data_section/__init__.py b/tests/unscheduled/eip7692_eof_v1/eip7480_data_section/__init__.py deleted file mode 100644 index ee7f967e8f..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip7480_data_section/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -""" -Test cases for EOF Data section access instructions for EIP-7480. - -EIP-7480 specifies instructions for accessing data stored in the dedicated -data section of the EOF format. Full specification: [EIP-7480: EOF - Data -section access instructions](https://eips.ethereum.org/EIPS/eip-7480). -Opcodes introduced: `DATALOAD` (`0xD0`), `DATALOADN` (`0xD1`), `DATASIZE` -(`0xD2`), `DATACOPY` (`0xD3`). -""" diff --git a/tests/unscheduled/eip7692_eof_v1/eip7480_data_section/spec.py b/tests/unscheduled/eip7692_eof_v1/eip7480_data_section/spec.py deleted file mode 100644 index d382a9ca43..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip7480_data_section/spec.py +++ /dev/null @@ -1 +0,0 @@ -"""EOF V1 Constants used throughout all tests.""" diff --git a/tests/unscheduled/eip7692_eof_v1/eip7480_data_section/test_code_validation.py b/tests/unscheduled/eip7692_eof_v1/eip7480_data_section/test_code_validation.py deleted file mode 100644 index 76bdcbc65f..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip7480_data_section/test_code_validation.py +++ /dev/null @@ -1,301 +0,0 @@ -"""EOF V1 Code Validation tests.""" - -import pytest -from execution_testing import EOFException, EOFTestFiller, Op -from execution_testing.test_types.eof.v1 import Container, Section -from execution_testing.test_types.eof.v1.constants import ( - MAX_INITCODE_SIZE, -) - -from .. import EOF_FORK_NAME - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-7480.md" -REFERENCE_SPEC_VERSION = "3ee1334ef110420685f1c8ed63e80f9e1766c251" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - -smallest_runtime_subcontainer = Container( - name="Runtime Subcontainer", - sections=[ - Section.Code(code=Op.STOP), - ], -) - - -def container_name(c: Container) -> str: - """Return the name of the container for use in pytest ids.""" - if hasattr(c, "name") and c.name is not None: - return c.name - else: - return c.__class__.__name__ - - -@pytest.mark.parametrize( - "container", - [ - Container( - name="empty_data_section", - sections=[ - Section.Code( - code=Op.ADDRESS + Op.POP + Op.STOP, - ), - Section.Data(data=""), - ], - ), - Container( - name="small_data_section", - sections=[ - Section.Code( - code=Op.ADDRESS + Op.POP + Op.STOP, - ), - Section.Data(data="1122334455667788" * 4), - ], - ), - pytest.param( - Container( - name="large_data_section", - sections=[ - Section.Code( - code=Op.ADDRESS + Op.POP + Op.STOP, - ), - Section.Data(data="1122334455667788" * 3 * 1024), - ], - ), - marks=pytest.mark.eof_test_only( - reason="initcode exceeds max size" - ), - ), - pytest.param( - Container( - name="max_data_section", - sections=[ - Section.Code(code=Op.STOP), - # Hits the 49152 bytes limit for the entire container - Section.Data( - data=b"\x00" - * ( - MAX_INITCODE_SIZE - - len(smallest_runtime_subcontainer) - ) - ), - ], - ), - marks=pytest.mark.eof_test_only( - reason="initcode exceeds max size" - ), - ), - Container( - name="DATALOADN_zero", - sections=[ - Section.Code( - code=Op.DATALOADN[0] + Op.POP + Op.STOP, - ), - Section.Data(data="1122334455667788" * 16), - ], - ), - Container( - name="DATALOADN_middle", - sections=[ - Section.Code( - code=Op.DATALOADN[16] + Op.POP + Op.STOP, - ), - Section.Data(data="1122334455667788" * 16), - ], - ), - Container( - name="DATALOADN_edge", - sections=[ - Section.Code( - code=Op.DATALOADN[128 - 32] + Op.POP + Op.STOP, - ), - Section.Data(data="1122334455667788" * 16), - ], - ), - ], - ids=container_name, -) -def test_valid_containers_with_data_section( - eof_test: EOFTestFiller, - container: Container, -) -> None: - """Test EOF validation of valid containers with data sections.""" - assert container.validity_error is None, ( - f"Valid container with validity error: {container.validity_error}" - ) - eof_test( - container=container, - ) - - -@pytest.mark.parametrize( - "container", - [ - Container( - name="DATALOADN_0_empty_data", - sections=[ - Section.Code( - code=Op.DATALOADN[0] + Op.POP + Op.STOP, - ), - ], - validity_error=EOFException.INVALID_DATALOADN_INDEX, - ), - Container( - name="DATALOADN_max_empty_data", - sections=[ - Section.Code( - code=Op.DATALOADN[0xFFFF - 32] + Op.POP + Op.STOP, - ), - ], - validity_error=EOFException.INVALID_DATALOADN_INDEX, - ), - Container( - name="DATALOADN_1_over_data", - sections=[ - Section.Code( - code=Op.DATALOADN[1] + Op.POP + Op.STOP, - ), - Section.Data(b"\x00"), - ], - validity_error=EOFException.INVALID_DATALOADN_INDEX, - ), - Container( - name="DATALOADN_32_over_data", - sections=[ - Section.Code( - code=Op.DATALOADN[32] + Op.POP + Op.STOP, - ), - Section.Data(b"\xda" * 32), - ], - validity_error=EOFException.INVALID_DATALOADN_INDEX, - ), - Container( - name="DATALOADN_0_data_31", - sections=[ - Section.Code( - code=Op.DATALOADN[0] + Op.POP + Op.STOP, - ), - Section.Data(b"\xda" * 31), - ], - validity_error=EOFException.INVALID_DATALOADN_INDEX, - ), - Container( - name="DATALOADN_32_data_63", - sections=[ - Section.Code( - code=Op.DATALOADN[32] + Op.POP + Op.STOP, - ), - Section.Data(b"\xda" * 63), - ], - validity_error=EOFException.INVALID_DATALOADN_INDEX, - ), - Container( - name="DATALOADN_max_imm", - sections=[ - Section.Code( - code=Op.DATALOADN[0xFFFF] + Op.POP + Op.STOP, - ), - Section.Data(b"\xda" * 32), - ], - validity_error=EOFException.INVALID_DATALOADN_INDEX, - ), - Container( - name="DATALOADN_max_small_data", - sections=[ - Section.Code( - code=Op.DATALOADN[0xFFFF - 32] + Op.POP + Op.STOP, - ), - Section.Data(data="1122334455667788" * 16), - ], - validity_error=EOFException.INVALID_DATALOADN_INDEX, - ), - Container( - name="DATALOADN_max_half_data", - sections=[ - Section.Code( - code=Op.DATALOADN[0xFFFF - 32] + Op.POP + Op.STOP, - ), - Section.Data(data=("1122334455667788" * 4 * 1024)[2:]), - ], - validity_error=EOFException.INVALID_DATALOADN_INDEX, - ), - pytest.param( - Container( - name="data_section_over_container_limit", - sections=[ - Section.Code(code=Op.STOP), - # Over the 49152 bytes limit for the entire container - Section.Data( - data=(b"12345678" * 6 * 1024)[ - len(smallest_runtime_subcontainer) - 1 : - ] - ), - ], - validity_error=EOFException.CONTAINER_SIZE_ABOVE_LIMIT, - ), - marks=pytest.mark.eof_test_only( - reason="initcode exceeds max size" - ), - ), - ], - ids=container_name, -) -def test_invalid_containers_with_data_section( - eof_test: EOFTestFiller, - container: Container, -) -> None: - """Test EOF validation of invalid containers with data sections.""" - assert container.validity_error is not None, ( - "Invalid container without validity error" - ) - eof_test( - container=container, - expect_exception=container.validity_error, - ) - - -@pytest.mark.parametrize( - "container", - [ - Container( - name="imm0", - sections=[ - Section.Code(Op.DATALOADN), - Section.Data(b"\xff" * 32), - ], - ), - Container( - name="imm1", - sections=[ - Section.Code(Op.DATALOADN + b"\x00"), - Section.Data(b"\xff" * 32), - ], - ), - Container( - name="imm_from_next_section", - sections=[ - Section.Code( - Op.CALLF[1] + Op.JUMPF[2], - max_stack_height=1, - ), - Section.Code( - Op.DATALOADN + b"\x00", - code_outputs=1, - ), - Section.Code( - Op.STOP, - ), - Section.Data(b"\xff" * 32), - ], - ), - ], - ids=container_name, -) -def test_dataloadn_truncated_immediate( - eof_test: EOFTestFiller, - container: Container, -) -> None: - """Test cases for DATALOADN instructions with truncated immediate bytes.""" - eof_test( - container=container, - expect_exception=EOFException.TRUNCATED_INSTRUCTION, - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip7480_data_section/test_data_opcodes.py b/tests/unscheduled/eip7692_eof_v1/eip7480_data_section/test_data_opcodes.py deleted file mode 100644 index 041bf3a9bf..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip7480_data_section/test_data_opcodes.py +++ /dev/null @@ -1,177 +0,0 @@ -"""Execution of DATA* opcodes within EOF V1 containers tests.""" - -import pytest -from execution_testing import ( - Account, - Alloc, - Environment, - EOFStateTestFiller, - Op, - StateTestFiller, - Transaction, -) -from execution_testing.test_types.eof.v1 import Container, Section - -from .. import EOF_FORK_NAME - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-7480.md" -REFERENCE_SPEC_VERSION = "3ee1334ef110420685f1c8ed63e80f9e1766c251" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - - -@pytest.mark.parametrize("index", [0, 1, 31, 32, 33, 63, 64]) -@pytest.mark.parametrize("suffix_len", [0, 1, 31, 32, 24000]) -def test_dataloadn( - eof_state_test: EOFStateTestFiller, index: int, suffix_len: int -) -> None: - """Basic tests for DATALOADN execution.""" - sentinel = ( - 0x8000000000000000000000000000000000000000000000000000000000000001 - ) - eof_state_test( - container=Container( - sections=[ - Section.Code( - Op.SSTORE(0, Op.DATALOADN[index]) + Op.STOP, - ), - Section.Data( - index * b"\xbe" - + sentinel.to_bytes(32, byteorder="big") - + suffix_len * b"\xaf" - ), - ], - ), - container_post=Account(storage={0: sentinel}), - ) - - -def create_data_test( - offset: int, datasize: int -) -> tuple[Container, dict[int, int]]: - """ - Generate data load operators test cases based on load offset and data - section size. - """ - data = b"".join( - i.to_bytes(length=2, byteorder="big") - for i in range(1, datasize // 2 + 1) - ) - assert len(data) == datasize - overhang = min(32, offset + 32 - datasize) - answer = ( - data[offset : offset + 32] - if overhang <= 0 - else data[offset:] + b"\x00" * overhang - ) - dataloadn_op = Op.DATALOADN[offset] if overhang <= 0 else Op.PUSH32[answer] - - return ( - Container( - sections=[ - Section.Code( - code=( - Op.CALLF[1] - + Op.CALLF[2] - + Op.CALLF[3] - + Op.CALLF[4] - + Op.SSTORE(0, 1) - + Op.STOP - ), - ), - Section.Code( - code=( - Op.DATALOAD(offset) + Op.PUSH1(1) + Op.SSTORE + Op.RETF - ), - code_inputs=0, - code_outputs=0, - ), - Section.Code( - code=(dataloadn_op + Op.PUSH1(2) + Op.SSTORE + Op.RETF), - code_inputs=0, - code_outputs=0, - ), - Section.Code( - code=(Op.DATASIZE + Op.PUSH1(3) + Op.SSTORE + Op.RETF), - code_inputs=0, - code_outputs=0, - ), - Section.Code( - code=( - Op.DATACOPY(0, offset, 32) - + Op.SSTORE(4, Op.MLOAD(0)) - + Op.RETF - ), - code_inputs=0, - code_outputs=0, - ), - Section.Data(data), - ], - ), - { - 0: 1, - 1: int.from_bytes(answer, byteorder="big"), - 2: int.from_bytes(answer, byteorder="big"), - 3: datasize, - 4: int.from_bytes(answer, byteorder="big"), - }, - ) - - -@pytest.mark.parametrize( - ["offset", "datasize"], - [ - pytest.param(0, 0, id="empty_zero"), - pytest.param(0, 2, id="short_zero"), - pytest.param(0, 32, id="exact_zero"), - pytest.param(0, 64, id="large_zero"), - pytest.param(32, 0, id="empty_32"), - pytest.param(32, 34, id="short_32"), - pytest.param(32, 64, id="exact_32"), - pytest.param(32, 96, id="large_32"), - pytest.param(0x5BFE, 0, id="empty_23k"), - pytest.param(0x5BFE, 0x5C00, id="short_23k"), - pytest.param(0x5BE0, 0x5D00, id="exact_23k"), - pytest.param(0x2345, 0x5C00, id="large_23k"), - pytest.param(2**16 - 1, 32, id="u16_max"), - pytest.param(2**16, 32, id="u16_max_plus_1"), - pytest.param(2**32 - 1, 32, id="u32_max"), - pytest.param(2**32, 32, id="u32_max_plus_1"), - pytest.param(2**64 - 1, 32, id="u64_max"), - pytest.param(2**64, 32, id="u64_max_plus_1"), - ], -) -def test_data_section_succeed( - state_test: StateTestFiller, - pre: Alloc, - offset: int, - datasize: int, -) -> None: - """Test simple contracts that simply expect data section to succeed.""" - env = Environment() - - (container, expected_storage) = create_data_test(offset, datasize) - callee_contract = pre.deploy_contract(code=container) - entry_point = pre.deploy_contract( - code=Op.SSTORE(0, Op.DELEGATECALL(Op.GAS, callee_contract, 0, 0, 0, 0)) - + Op.STOP() - ) - sender = pre.fund_eoa() - - tx = Transaction( - to=entry_point, - gas_limit=50000000, - gas_price=10, - protected=False, - data="", - sender=sender, - ) - - post = {entry_point: Account(storage=expected_storage)} - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip7480_data_section/test_datacopy_memory_expansion.py b/tests/unscheduled/eip7692_eof_v1/eip7480_data_section/test_datacopy_memory_expansion.py deleted file mode 100644 index 746cd4c64b..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip7480_data_section/test_datacopy_memory_expansion.py +++ /dev/null @@ -1,338 +0,0 @@ -"""Memory expansion tests for DATACOPY.""" - -from typing import Mapping, Tuple - -import pytest -from execution_testing import ( - Account, - Address, - Alloc, - Bytecode, - Environment, - Fork, - Op, - StateTestFiller, - Storage, - Transaction, -) -from execution_testing.test_types.eof.v1 import Container, Section - -from .. import EOF_FORK_NAME - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-7480.md" -REFERENCE_SPEC_VERSION = "3ee1334ef110420685f1c8ed63e80f9e1766c251" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - - -@pytest.fixture -def callee_bytecode( - dest: int, src: int, length: int, data_section: bytes -) -> Container: - """Callee performs a single datacopy operation and then returns.""" - bytecode = Bytecode() - - # Copy the initial memory - bytecode += Op.CALLDATACOPY(0x00, 0x00, Op.CALLDATASIZE()) - - # Pushes for the return operation - bytecode += Op.PUSH1(0x00) + Op.PUSH1(0x00) - - # Perform the datacopy operation - bytecode += Op.DATACOPY(dest, src, length) - - bytecode += Op.RETURN - - return Container( - sections=[Section.Code(code=bytecode), Section.Data(data=data_section)] - ) - - -@pytest.fixture -def subcall_exact_cost( - fork: Fork, - initial_memory: bytes, - dest: int, - length: int, -) -> int: - """ - Return exact cost of the subcall, based on the initial memory and the - length of the copy. - """ - cost_memory_bytes = fork.memory_expansion_gas_calculator() - - datacopy_cost = 3 - datacopy_cost += 3 * ((length + 31) // 32) - if length > 0 and dest + length > len(initial_memory): - datacopy_cost += cost_memory_bytes( - new_bytes=dest + length, previous_bytes=len(initial_memory) - ) - - calldatacopy_cost = 3 - calldatacopy_cost += 3 * ((len(initial_memory) + 31) // 32) - calldatacopy_cost += cost_memory_bytes(new_bytes=len(initial_memory)) - - pushes_cost = 3 * 7 - calldatasize_cost = 2 - return datacopy_cost + calldatacopy_cost + pushes_cost + calldatasize_cost - - -@pytest.fixture -def bytecode_storage( - subcall_exact_cost: int, - successful: bool, - memory_expansion_address: Address, -) -> Tuple[Bytecode, Storage.StorageDictType]: - """ - Prepare bytecode and storage for the test, based on the expected result of - the subcall (whether it succeeds or fails depending on the length of the - memory expansion). - """ - bytecode = Bytecode() - storage = {} - - # Pass on the calldata - bytecode += Op.CALLDATACOPY(0x00, 0x00, Op.CALLDATASIZE()) - - subcall_gas = subcall_exact_cost if successful else subcall_exact_cost - 1 - - # Perform the subcall and store a one in the result location - bytecode += Op.SSTORE( - Op.CALL( - subcall_gas, - memory_expansion_address, - 0, - 0, - Op.CALLDATASIZE(), - 0, - 0, - ), - 1, - ) - storage[int(successful)] = 1 - - return (bytecode, storage) - - -@pytest.fixture -def tx_max_fee_per_gas() -> int: # noqa: D103 - return 7 - - -@pytest.fixture -def block_gas_limit() -> int: # noqa: D103 - return 100_000_000 - - -@pytest.fixture -def tx_gas_limit( # noqa: D103 - subcall_exact_cost: int, - block_gas_limit: int, -) -> int: - return min(max(500_000, subcall_exact_cost * 2), block_gas_limit) - - -@pytest.fixture -def env( # noqa: D103 - block_gas_limit: int, -) -> Environment: - return Environment(gas_limit=block_gas_limit) - - -@pytest.fixture -def caller_address( # noqa: D103 - pre: Alloc, bytecode_storage: Tuple[bytes, Storage.StorageDictType] -) -> Address: - return pre.deploy_contract(code=bytecode_storage[0]) - - -@pytest.fixture -def memory_expansion_address(pre: Alloc, callee_bytecode: bytes) -> Address: # noqa: D103 - return pre.deploy_contract(code=callee_bytecode) - - -@pytest.fixture -def sender(pre: Alloc, tx_max_fee_per_gas: int, tx_gas_limit: int) -> Address: # noqa: D103 - return pre.fund_eoa(tx_max_fee_per_gas * tx_gas_limit) - - -@pytest.fixture -def tx( # noqa: D103 - sender: Address, - caller_address: Address, - initial_memory: bytes, - tx_max_fee_per_gas: int, - tx_gas_limit: int, -) -> Transaction: - return Transaction( - sender=sender, - to=caller_address, - data=initial_memory, - gas_limit=tx_gas_limit, - max_fee_per_gas=tx_max_fee_per_gas, - max_priority_fee_per_gas=0, - ) - - -@pytest.fixture -def post( # noqa: D103 - caller_address: Address, - bytecode_storage: Tuple[bytes, Storage.StorageDictType], -) -> Mapping: - return { - caller_address: Account(storage=bytecode_storage[1]), - } - - -@pytest.mark.parametrize( - "dest,src,length", - [ - (0x00, 0x00, 0x01), - (0x100, 0x00, 0x01), - (0x1F, 0x00, 0x01), - (0x20, 0x00, 0x01), - (0x1000, 0x00, 0x01), - (0x1000, 0x00, 0x40), - (0x00, 0x00, 0x00), - (2**256 - 1, 0x00, 0x00), - (0x00, 2**256 - 1, 0x00), - (2**256 - 1, 2**256 - 1, 0x00), - ], - ids=[ - "single_byte_expansion", - "single_byte_expansion_2", - "single_byte_expansion_word_boundary", - "single_byte_expansion_word_boundary_2", - "multi_word_expansion", - "multi_word_expansion_2", - "zero_length_expansion", - "huge_dest_zero_length", - "huge_src_zero_length", - "huge_dest_huge_src_zero_length", - ], -) -@pytest.mark.parametrize("successful", [True, False]) -@pytest.mark.parametrize( - "initial_memory", - [ - bytes(range(0x00, 0x100)), - bytes(), - ], - ids=[ - "from_existent_memory", - "from_empty_memory", - ], -) -@pytest.mark.parametrize( - "data_section", - [ - bytes(), - b"\xfc", - bytes(range(0x00, 0x20)), - bytes(range(0x00, 0x100)), - ], - ids=[ - "empty_data_section", - "byte_data_section", - "word_data_section", - "large_data_section", - ], -) -def test_datacopy_memory_expansion( - state_test: StateTestFiller, - env: Environment, - pre: Alloc, - post: Mapping[str, Account], - tx: Transaction, -) -> None: - """ - Perform DATACOPY operations that expand the memory, and verify the gas it - costs to do so. - """ - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) - - -@pytest.mark.parametrize( - "dest,src,length", - [ - pytest.param( - 2**256 - 1, 0x00, 0x01, id="max_dest_single_byte_expansion" - ), - pytest.param( - 2**256 - 2, - 0x00, - 0x01, - id="max_dest_minus_one_single_byte_expansion", - ), - pytest.param( - 2**255 - 1, 0x00, 0x01, id="half_max_dest_single_byte_expansion" - ), - pytest.param(0x00, 0x00, 2**256 - 1, id="max_length_expansion"), - pytest.param( - 0x00, 0x00, 2**256 - 2, id="max_length_minus_one_expansion" - ), - pytest.param(0x00, 0x00, 2**255 - 1, id="half_max_length_expansion"), - pytest.param(0x1FFFF20, 0x00, 0x01, id="32-bit-mem-cost_offset"), - pytest.param(0x2D412E0, 0x00, 0x01, id="33-bit-mem-cost_offset"), - pytest.param(0x00, 0x00, 0x1FFFF20, id="32-bit-mem-cost_size"), - pytest.param(0x00, 0x00, 0x2D412E0, id="33-bit-mem-cost_size"), - pytest.param(0x1FFFFFFFF20, 0x00, 0x01, id="64-bit-mem-cost_offset"), - pytest.param(0x2D413CCCF00, 0x00, 0x01, id="65-bit-mem-cost_offset"), - pytest.param(0x00, 0x00, 0x1FFFFFFFF20, id="64-bit-mem-cost_size"), - pytest.param(0x00, 0x00, 0x2D413CCCF00, id="65-bit-mem-cost_size"), - ], -) -@pytest.mark.parametrize( - "subcall_exact_cost", - [2**128 - 1], - ids=[""], -) # Limit subcall gas, otherwise it would be impossibly large -@pytest.mark.parametrize("successful", [False]) -@pytest.mark.parametrize( - "initial_memory", - [ - bytes(range(0x00, 0x100)), - bytes(), - ], - ids=[ - "from_existent_memory", - "from_empty_memory", - ], -) -@pytest.mark.parametrize( - "data_section", - [ - bytes(), - b"\xfc", - bytes(range(0x00, 0x20)), - bytes(range(0x00, 0x100)), - ], - ids=[ - "empty_data_section", - "byte_data_section", - "word_data_section", - "large_data_section", - ], -) -def test_datacopy_huge_memory_expansion( - state_test: StateTestFiller, - env: Environment, - pre: Mapping[str, Account], - post: Mapping[str, Account], - tx: Transaction, -) -> None: - """ - Perform DATACOPY operations that expand the memory by huge amounts, and - verify that it correctly runs out of gas. - """ - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/__init__.py b/tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/__init__.py deleted file mode 100644 index 4156e92a7a..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -""" -Test cases for EOF Contract Creation for EIP-7620. - -EIP-7620 replaces `CREATE` and `CREATE2` with `EOFCREATE` for deploying -contracts in the EOF format. - -Full specification: -[EIP-7620: EOF Contract Creation](https://eips.ethereum.org/EIPS/eip-7620). - -Opcodes introduced: `EOFCREATE` (`0xEC`), `RETURNCODE` (`0xEE`). - -EOFCREATE, RETURNCODE, and container tests. - -evmone tests not ported: -- create_tx_with_eof_initcode: This calls it invalid, it is now the way to - add EOF contacts to state -- eofcreate_extcall_returncode: Per the new initcode - mode tests you cannot have RETURNCODE in a - deployed contract -- eofcreate_dataloadn_referring_to_auxdata: covered by - tests.unscheduled.eip7480_data_section. - test_data_opcodes.test_data_section_succeed -- eofcreate_initcontainer_return: RETURN is banned in initcode containers -- eofcreate_initcontainer_stop: STOP is banned in initcode containers -- All TXCREATE tests. -""" diff --git a/tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/helpers.py b/tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/helpers.py deleted file mode 100644 index ada21155ca..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/helpers.py +++ /dev/null @@ -1,90 +0,0 @@ -"""A collection of contracts used in 7620 EOF tests.""" - -import itertools - -from execution_testing import Op -from execution_testing.test_types.eof.v1 import Container, Section - -"""Storage addresses for common testing fields""" -_slot = itertools.count() -next(_slot) # don't use slot 0 -slot_code_worked = next(_slot) -slot_code_should_fail = next(_slot) -slot_create_address = next(_slot) -slot_calldata = next(_slot) -slot_call_result = next(_slot) -slot_returndata = next(_slot) -slot_returndata_size = next(_slot) -slot_max_depth = next(_slot) -slot_call_or_create = next(_slot) -slot_counter = next(_slot) -slot_data_load = next(_slot) -slot_all_subcall_gas_gone = next(_slot) -slot_a = next(_slot) -slot_b = next(_slot) - -slot_last_slot = next(_slot) - -value_code_worked = 0x2015 -value_canary_should_not_change = 0x2019 -value_canary_to_be_overwritten = 0x2009 -value_long_value = b"abcdefghijklmnopqrstuvwxyz123456" - -smallest_runtime_subcontainer = Container.Code( - code=Op.STOP, name="Runtime Subcontainer" -) - -smallest_initcode_subcontainer = Container( - name="Initcode Subcontainer", - sections=[ - Section.Code(code=Op.RETURNCODE[0](0, 0)), - Section.Container(container=smallest_runtime_subcontainer), - ], -) -smallest_initcode_subcontainer_gas = 2 * 3 - -aborting_container = Container.Code(Op.INVALID, name="Aborting Container") -reverting_container = Container.Code( - Op.REVERT(0, 0), name="Reverting Container" -) -expensively_reverting_container = Container.Code( - Op.SHA3(0, 32) + Op.REVERT(0, 0), name="Expensively Reverting Container" -) -expensively_reverting_container_gas = 2 * 3 + 30 + 3 + 6 + 2 * 3 -big_runtime_subcontainer = Container.Code( - Op.NOOP * 10000 + Op.STOP, name="Big Subcontainer" -) - -bigger_initcode_subcontainer_gas = 3 + 4 + 2 * 3 -bigger_initcode_subcontainer = Container( - name="Bigger Initcode Subcontainer", - sections=[ - Section.Code( - code=Op.RJUMPI[len(Op.RETURNCODE[0](0, 0))](1) - + Op.RETURNCODE[0](0, 0) - + Op.RETURNCODE[1](0, 0) - ), - Section.Container(container=smallest_runtime_subcontainer), - Section.Container(container=smallest_runtime_subcontainer), - ], -) - -data_runtime_container = smallest_runtime_subcontainer.copy() -data_runtime_container.sections.append(Section.Data("0x00")) - -data_initcode_subcontainer = Container( - name="Data Initcode Subcontainer", - sections=[ - Section.Code(code=Op.RETURNCODE[0](0, 0)), - Section.Container(container=data_runtime_container), - ], -) - -data_appending_initcode_subcontainer = Container( - name="Data Appending Initcode Subcontainer", - sections=[ - Section.Code(code=Op.RETURNCODE[0](0, 1)), - Section.Container(container=smallest_runtime_subcontainer), - ], -) -data_appending_initcode_subcontainer_gas = 2 * 3 + 3 diff --git a/tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/spec.py b/tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/spec.py deleted file mode 100644 index dbaf719958..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/spec.py +++ /dev/null @@ -1,3 +0,0 @@ -"""EOF V1 Constants used throughout all tests.""" - -EOFCREATE_FAILURE = 0 diff --git a/tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_eofcreate.py b/tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_eofcreate.py deleted file mode 100644 index a1f4a52dad..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_eofcreate.py +++ /dev/null @@ -1,892 +0,0 @@ -"""Test good and bad EOFCREATE cases.""" - -import pytest -from execution_testing import ( - Account, - Address, - Alloc, - Bytecode, - Environment, - EOFTestFiller, - Op, - StateTestFiller, - Storage, - Transaction, - compute_eofcreate_address, -) -from execution_testing.exceptions import EOFException -from execution_testing.test_types.eof.v1 import Container, Section - -from .. import EOF_FORK_NAME -from ..eip7069_extcall.spec import EXTCALL_SUCCESS -from .helpers import ( - slot_call_result, - slot_calldata, - slot_code_worked, - slot_create_address, - slot_data_load, - slot_last_slot, - slot_returndata_size, - smallest_initcode_subcontainer, - smallest_runtime_subcontainer, - value_canary_to_be_overwritten, - value_code_worked, - value_long_value, -) -from .spec import EOFCREATE_FAILURE - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-7620.md" -REFERENCE_SPEC_VERSION = "52ddbcdddcf72dd72427c319f2beddeb468e1737" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - - -def test_simple_eofcreate( - state_test: StateTestFiller, - pre: Alloc, -) -> None: - """Verifies a simple EOFCREATE case.""" - env = Environment() - sender = pre.fund_eoa() - contract_address = pre.deploy_contract( - code=Container( - sections=[ - Section.Code( - code=Op.SSTORE(0, Op.EOFCREATE[0](0, 0, 0, 0)) + Op.STOP, - ), - Section.Container(container=smallest_initcode_subcontainer), - ], - ), - storage={0: 0xB17D}, # a canary to be overwritten - ) - # Storage in 0 should have the address, - post = { - contract_address: Account( - storage={0: compute_eofcreate_address(contract_address, 0)} - ) - } - tx = Transaction( - to=contract_address, - gas_limit=10_000_000, - gas_price=10, - protected=False, - sender=sender, - ) - state_test(env=env, pre=pre, post=post, tx=tx) - - -def test_eofcreate_then_dataload( - state_test: StateTestFiller, - pre: Alloc, -) -> None: - """ - Verifies that a contract returned with auxdata does not overwrite the - parent data. - """ - env = Environment() - sender = pre.fund_eoa() - small_auxdata_container = Container( - sections=[ - Section.Code(code=Op.RETURNCODE[0](0, 32)), - Section.Container(container=smallest_runtime_subcontainer), - ], - ) - contract_address = pre.deploy_contract( - code=Container( - sections=[ - Section.Code( - code=Op.SSTORE(0, Op.EOFCREATE[0](0, 0, 0, 0)) - + Op.SSTORE(slot_data_load, Op.DATALOAD(0)) - + Op.STOP, - ), - Section.Container( - container=small_auxdata_container, - ), - Section.Data(data=value_long_value), - ], - ), - storage={slot_data_load: value_canary_to_be_overwritten}, - ) - - post = { - contract_address: Account( - storage={ - 0: compute_eofcreate_address(contract_address, 0), - slot_data_load: value_long_value, - } - ) - } - tx = Transaction( - to=contract_address, - gas_limit=10_000_000, - gas_price=10, - protected=False, - sender=sender, - ) - state_test(env=env, pre=pre, post=post, tx=tx) - - -def test_eofcreate_then_call( - state_test: StateTestFiller, - pre: Alloc, -) -> None: - """ - Verifies a simple EOFCREATE case, and then calls the deployed contract. - """ - env = Environment() - callable_contract = Container( - sections=[ - Section.Code( - code=Op.SSTORE(slot_code_worked, value_code_worked) + Op.STOP, - ), - ] - ) - callable_contract_initcode = Container( - sections=[ - Section.Code( - code=Op.RETURNCODE[0](0, 0), - ), - Section.Container(container=callable_contract), - ] - ) - - sender = pre.fund_eoa() - contract_address = pre.deploy_contract( - Container( - sections=[ - Section.Code( - code=Op.SSTORE( - slot_create_address, Op.EOFCREATE[0](0, 0, 0, 0) - ) - + Op.EXTCALL(Op.SLOAD(slot_create_address), 0, 0, 0) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ), - Section.Container(container=callable_contract_initcode), - ], - ) - ) - - callable_address = compute_eofcreate_address(contract_address, 0) - - # Storage in 0 should have the address, - # - post = { - contract_address: Account( - storage={ - slot_create_address: callable_address, - slot_code_worked: value_code_worked, - } - ), - callable_address: Account( - storage={slot_code_worked: value_code_worked} - ), - } - - tx = Transaction( - to=contract_address, - gas_limit=10_000_000, - gas_price=10, - protected=False, - sender=sender, - ) - state_test(env=env, pre=pre, post=post, tx=tx) - - -@pytest.mark.parametrize( - "auxdata_bytes", - [ - pytest.param(b"", id="zero"), - pytest.param(b"aabbcc", id="short"), - pytest.param(b"aabbccddeef", id="one_byte_short"), - pytest.param(b"aabbccddeeff", id="exact"), - pytest.param(b"aabbccddeeffg", id="one_byte_long"), - pytest.param(b"aabbccddeeffgghhii", id="extra"), - ], -) -def test_auxdata_variations( - state_test: StateTestFiller, pre: Alloc, auxdata_bytes: bytes -) -> None: - """Verifies that auxdata bytes are correctly handled in RETURNCODE.""" - env = Environment() - auxdata_size = len(auxdata_bytes) - pre_deploy_header_data_size = 18 - pre_deploy_data = b"AABBCC" - deploy_success = ( - len(auxdata_bytes) + len(pre_deploy_data) - >= pre_deploy_header_data_size - ) - - runtime_subcontainer = Container( - name="Runtime Subcontainer with truncated data", - sections=[ - Section.Code(code=Op.STOP), - Section.Data( - data=pre_deploy_data, custom_size=pre_deploy_header_data_size - ), - ], - ) - - initcode_subcontainer = Container( - name="Initcode Subcontainer", - sections=[ - Section.Code( - code=Op.MSTORE(0, Op.PUSH32(auxdata_bytes.ljust(32, b"\0"))) - + Op.RETURNCODE[0](0, auxdata_size), - ), - Section.Container(container=runtime_subcontainer), - ], - ) - - sender = pre.fund_eoa() - contract_address = pre.deploy_contract( - code=Container( - sections=[ - Section.Code( - code=Op.SSTORE( - slot_create_address, Op.EOFCREATE[0](0, 0, 0, 0) - ) - + Op.STOP, - ), - Section.Container(container=initcode_subcontainer), - ] - ), - storage={slot_create_address: value_canary_to_be_overwritten}, - ) - - # Storage in 0 should have the address, - post = { - contract_address: Account( - storage={ - slot_create_address: compute_eofcreate_address( - contract_address, 0 - ) - if deploy_success - else b"\0" - } - ) - } - - tx = Transaction( - to=contract_address, - gas_limit=10_000_000, - gas_price=10, - protected=False, - sender=sender, - ) - - state_test(env=env, pre=pre, post=post, tx=tx) - - -def test_calldata(state_test: StateTestFiller, pre: Alloc) -> None: - """Verifies CALLDATA passing through EOFCREATE.""" - env = Environment() - - initcode_subcontainer = Container( - name="Initcode Subcontainer", - sections=[ - Section.Code( - code=Op.CALLDATACOPY(0, 0, Op.CALLDATASIZE) - + Op.SSTORE(slot_calldata, Op.MLOAD(0)) - + Op.RETURNCODE[0](0, Op.CALLDATASIZE), - ), - Section.Container(container=smallest_runtime_subcontainer), - ], - ) - - calldata_size = 32 - calldata = b"\x45" * calldata_size - sender = pre.fund_eoa() - contract_address = pre.deploy_contract( - code=Container( - sections=[ - Section.Code( - code=Op.MSTORE(0, Op.PUSH32(calldata)) - + Op.SSTORE( - slot_create_address, - Op.EOFCREATE[0](input_size=calldata_size), - ) - + Op.STOP, - ), - Section.Container(container=initcode_subcontainer), - ] - ) - ) - - # deployed contract is smallest plus data - deployed_contract = Container( - name="deployed contract", - sections=[ - *smallest_runtime_subcontainer.sections, - Section.Data(data=calldata), - ], - ) - # factory contract Storage in 0 should have the created address, - # created contract storage in 0 should have the calldata - created_address = compute_eofcreate_address(contract_address, 0) - post = { - contract_address: Account( - storage={slot_create_address: created_address} - ), - created_address: Account( - code=deployed_contract, storage={slot_calldata: calldata} - ), - } - - tx = Transaction( - to=contract_address, - gas_limit=10_000_000, - gas_price=10, - protected=False, - sender=sender, - ) - - state_test(env=env, pre=pre, post=post, tx=tx) - - -def test_eofcreate_in_initcode( - state_test: StateTestFiller, - pre: Alloc, -) -> None: - """ - Verifies an EOFCREATE occurring within initcode creates that contract. - """ - nested_initcode_subcontainer = Container( - sections=[ - Section.Code( - code=Op.SSTORE( - slot_create_address, Op.EOFCREATE[0](0, 0, 0, 0) - ) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.RETURNCODE[1](0, 0), - ), - Section.Container(container=smallest_initcode_subcontainer), - Section.Container(container=smallest_runtime_subcontainer), - ] - ) - - env = Environment() - sender = pre.fund_eoa() - contract_address = pre.deploy_contract( - code=Container( - sections=[ - Section.Code( - code=Op.SSTORE( - slot_create_address, Op.EOFCREATE[0](0, 0, 0, 0) - ) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ), - Section.Container(container=nested_initcode_subcontainer), - ] - ) - ) - - outer_address = compute_eofcreate_address(contract_address, 0) - inner_address = compute_eofcreate_address(outer_address, 0) - post = { - contract_address: Account( - storage={ - slot_create_address: outer_address, - slot_code_worked: value_code_worked, - } - ), - outer_address: Account( - storage={ - slot_create_address: inner_address, - slot_code_worked: value_code_worked, - } - ), - } - - tx = Transaction( - to=contract_address, - gas_limit=10_000_000, - gas_price=10, - protected=False, - sender=sender, - ) - - state_test(env=env, pre=pre, post=post, tx=tx) - - -def test_eofcreate_in_initcode_reverts( - state_test: StateTestFiller, - pre: Alloc, -) -> None: - """ - Verifies an EOFCREATE occurring in an initcode is rolled back when the - initcode reverts. - """ - nested_initcode_subcontainer = Container( - sections=[ - Section.Code( - code=Op.SSTORE( - slot_create_address, Op.EOFCREATE[0](0, 0, 0, 0) - ) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.REVERT(0, 0), - ), - Section.Container(container=smallest_initcode_subcontainer), - ] - ) - - env = Environment() - sender = pre.fund_eoa() - contract_address = pre.deploy_contract( - code=Container( - sections=[ - Section.Code( - code=Op.SSTORE( - slot_create_address, Op.EOFCREATE[0](0, 0, 0, 0) - ) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ), - Section.Container(container=nested_initcode_subcontainer), - ] - ), - storage={slot_create_address: value_canary_to_be_overwritten}, - ) - - outer_address = compute_eofcreate_address(contract_address, 0) - inner_address = compute_eofcreate_address(outer_address, 0) - post = { - contract_address: Account( - storage={ - slot_create_address: 0, - slot_code_worked: value_code_worked, - } - ), - outer_address: Account.NONEXISTENT, - inner_address: Account.NONEXISTENT, - } - tx = Transaction( - to=contract_address, - gas_limit=10_000_000, - gas_price=10, - protected=False, - sender=sender, - ) - state_test(env=env, pre=pre, post=post, tx=tx) - - -def test_return_data_cleared( - state_test: StateTestFiller, - pre: Alloc, -) -> None: - """ - Verifies the return data is not reused from a extcall but is cleared upon - eofcreate. - """ - env = Environment() - value_return_canary = 0x4158675309 - value_return_canary_size = 5 - callable_address = pre.deploy_contract( - code=Container( - sections=[ - Section.Code( - code=Op.MSTORE(0, value_return_canary) - + Op.RETURN(0, value_return_canary_size), - ) - ] - ) - ) - - slot_returndata_size_2 = slot_last_slot * 2 + slot_returndata_size - sender = pre.fund_eoa() - contract_address = pre.deploy_contract( - code=Container( - sections=[ - Section.Code( - code=Op.SSTORE( - slot_call_result, Op.EXTCALL(callable_address, 0, 0, 0) - ) - + Op.SSTORE(slot_returndata_size, Op.RETURNDATASIZE) - + Op.SSTORE( - slot_create_address, Op.EOFCREATE[0](0, 0, 0, 0) - ) - + Op.SSTORE(slot_returndata_size_2, Op.RETURNDATASIZE) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ), - Section.Container(container=smallest_initcode_subcontainer), - ], - ) - ) - - new_contract_address = compute_eofcreate_address(contract_address, 0) - post = { - contract_address: Account( - storage={ - slot_call_result: EXTCALL_SUCCESS, - slot_returndata_size: value_return_canary_size, - slot_create_address: new_contract_address, - slot_returndata_size_2: 0, - slot_code_worked: value_code_worked, - }, - nonce=2, - ), - callable_address: Account(nonce=1), - new_contract_address: Account(nonce=1), - } - tx = Transaction( - to=contract_address, - gas_limit=10_000_000, - gas_price=10, - protected=False, - sender=sender, - ) - - state_test(env=env, pre=pre, post=post, tx=tx) - - -def test_address_collision( - state_test: StateTestFiller, - pre: Alloc, -) -> None: - """Tests address collision.""" - env = Environment( - gas_limit=300_000_000_000, - ) - - slot_create_address_2 = slot_last_slot * 2 + slot_create_address - slot_create_address_3 = slot_last_slot * 3 + slot_create_address - sender = pre.fund_eoa() - contract_address = pre.deploy_contract( - code=Container( - sections=[ - Section.Code( - code=Op.SSTORE( - slot_create_address, Op.EOFCREATE[0](0, 0, 0, 0) - ) - + Op.SSTORE( - slot_create_address_2, Op.EOFCREATE[0](0, 0, 0, 0) - ) - + Op.SSTORE(slot_create_address_3, Op.EOFCREATE[0](salt=1)) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ), - Section.Container(container=smallest_initcode_subcontainer), - ], - ) - ) - salt_zero_address = compute_eofcreate_address(contract_address, 0) - salt_one_address = compute_eofcreate_address(contract_address, 1) - - # Hard-code address for collision, no other way to do this. - # We should mark tests that do this, and fail on unmarked tests. - pre[salt_one_address] = Account(balance=1, nonce=1) - - post = { - contract_address: Account( - storage={ - slot_create_address: salt_zero_address, - # had an in-transaction collision - slot_create_address_2: EOFCREATE_FAILURE, - # had a pre-existing collision - slot_create_address_3: EOFCREATE_FAILURE, - slot_code_worked: value_code_worked, - } - ) - } - - # Multiple create fails is expensive, use an absurd amount of gas - tx = Transaction( - to=contract_address, - gas_limit=300_000_000_000, - gas_price=10, - protected=False, - sender=sender, - ) - state_test(env=env, pre=pre, post=post, tx=tx) - - -def test_eofcreate_revert_eof_returndata( - state_test: StateTestFiller, - pre: Alloc, -) -> None: - """ - Verifies the return data is not being deployed, even if happens to be valid - EOF. - """ - env = Environment() - code_reverts_with_calldata = Container( - name="Initcode Subcontainer reverting with its calldata", - sections=[ - Section.Code( - code=Op.CALLDATACOPY(0, 0, Op.CALLDATASIZE) - + Op.REVERT(0, Op.CALLDATASIZE), - ), - ], - ) - - sender = pre.fund_eoa() - salt = 0 - contract_address = pre.deploy_contract( - code=Container( - sections=[ - Section.Code( - code=Op.CALLDATACOPY(0, 0, Op.CALLDATASIZE) - + Op.SSTORE( - slot_create_address, - Op.EOFCREATE[0](salt=salt, input_size=Op.CALLDATASIZE), - ) - + Op.SSTORE(slot_returndata_size, Op.RETURNDATASIZE) - + Op.STOP, - ), - Section.Container(container=code_reverts_with_calldata), - ], - ), - storage={slot_create_address: value_canary_to_be_overwritten}, - ) - eof_create_address = compute_eofcreate_address(contract_address, salt) - - post = { - contract_address: Account( - storage={ - slot_create_address: 0, - slot_returndata_size: len(smallest_runtime_subcontainer), - }, - ), - eof_create_address: Account.NONEXISTENT, - } - - tx = Transaction( - to=contract_address, - gas_limit=1_000_000, - sender=sender, - # Simplest possible valid EOF container, which is going to be - # revert-returned from initcode and must not end up being deployed. - data=smallest_runtime_subcontainer, - ) - - state_test(env=env, pre=pre, post=post, tx=tx) - - -@pytest.mark.parametrize("index", [0, 1, 255], ids=lambda x: x) -def test_eofcreate_invalid_index( - eof_test: EOFTestFiller, - index: int, -) -> None: - """EOFCREATE referring non-existent container section index.""" - container = Container.Code(code=Op.EOFCREATE[index](0, 0, 0, 0) + Op.STOP) - if index != 0: - container.sections.append( - Section.Container(container=Container.Code(Op.INVALID)) - ) - - eof_test( - container=container, - expect_exception=EOFException.INVALID_CONTAINER_SECTION_INDEX, - ) - - -def test_eofcreate_invalid_truncated_immediate( - eof_test: EOFTestFiller, -) -> None: - """EOFCREATE instruction with missing immediate byte.""" - eof_test( - container=Container( - sections=[ - Section.Code(Op.PUSH0 * 4 + Op.EOFCREATE), - Section.Container(Container.Code(Op.INVALID)), - ], - ), - expect_exception=EOFException.TRUNCATED_INSTRUCTION, - ) - - -@pytest.mark.parametrize( - ["data_len", "data_section_size"], - [ - (0, 1), - (0, 0xFFFF), - (2, 3), - (2, 0xFFFF), - ], -) -def test_eofcreate_truncated_container( - eof_test: EOFTestFiller, - data_len: int, - data_section_size: int, -) -> None: - """ - EOFCREATE instruction targeting a container with truncated data section. - """ - assert data_len < data_section_size - eof_test( - container=Container( - sections=[ - Section.Code(Op.EOFCREATE[0](0, 0, 0, 0) + Op.STOP), - Section.Container( - Container( - sections=[ - Section.Code(Op.INVALID), - Section.Data( - b"\xda" * data_len, - custom_size=data_section_size, - ), - ], - ) - ), - ], - ), - expect_exception=EOFException.EOFCREATE_WITH_TRUNCATED_CONTAINER, - ) - - -@pytest.mark.parametrize( - ["destination_code", "expected_result"], - [ - pytest.param(Op.ADDRESS, "destination"), - pytest.param(Op.CALLER, "caller"), - pytest.param(Op.CALLVALUE, "eofcreate_value"), - pytest.param(Op.ORIGIN, "sender"), - pytest.param(Op.SELFBALANCE, "selfbalance"), - pytest.param(Op.BALANCE(Op.CALLER), "factorybalance"), - ], -) -def test_eofcreate_context( - state_test: StateTestFiller, - pre: Alloc, - destination_code: Bytecode, - expected_result: str, -) -> None: - """Test EOFCREATE's initcode context instructions.""" - env = Environment() - sender = pre.fund_eoa() - value = 0x1123 - eofcreate_value = 0x13 - - initcode = Container( - sections=[ - Section.Code( - Op.SSTORE(slot_call_result, destination_code) - + Op.RETURNCODE[0](0, 0) - ), - Section.Container(smallest_runtime_subcontainer), - ] - ) - - factory_contract = Container( - sections=[ - Section.Code( - Op.SSTORE(slot_code_worked, value_code_worked) - + Op.EOFCREATE[0](value=eofcreate_value) - + Op.STOP - ), - Section.Container(initcode), - ] - ) - factory_address = pre.deploy_contract(factory_contract) - - destination_contract_address = compute_eofcreate_address( - factory_address, 0 - ) - - tx = Transaction( - sender=sender, to=factory_address, gas_limit=200_000, value=value - ) - - expected_bytes: Address | int - if expected_result == "destination": - expected_bytes = destination_contract_address - elif expected_result == "caller": - expected_bytes = factory_address - elif expected_result == "sender": - expected_bytes = sender - elif expected_result == "eofcreate_value": - expected_bytes = eofcreate_value - elif expected_result == "selfbalance": - expected_bytes = eofcreate_value - elif expected_result == "factorybalance": - # Factory receives value from sender and passes on eofcreate_value as - # endowment. - expected_bytes = value - eofcreate_value - else: - raise TypeError("Unexpected expected_result", expected_result) - - calling_storage = { - slot_code_worked: value_code_worked, - } - destination_contract_storage = { - slot_call_result: expected_bytes, - } - - post = { - factory_address: Account( - storage=calling_storage, balance=value - eofcreate_value - ), - destination_contract_address: Account( - storage=destination_contract_storage, balance=eofcreate_value - ), - } - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) - - -def test_eofcreate_memory_context( - state_test: StateTestFiller, - pre: Alloc, -) -> None: - """ - Verifies an EOFCREATE frame enjoys a separate EVM memory from its caller - frame. - """ - env = Environment() - destination_storage = Storage() - contract_storage = Storage() - initcontainer = Container( - sections=[ - Section.Code( - Op.SSTORE( - destination_storage.store_next(value_code_worked), - value_code_worked, - ) - + Op.SSTORE(destination_storage.store_next(0), Op.MSIZE()) - + Op.SSTORE(destination_storage.store_next(0), Op.MLOAD(0)) - + Op.MSTORE(0, 2) - + Op.MSTORE(32, 2) - + Op.RETURNCODE[0](0, 0) - ), - Section.Container(smallest_runtime_subcontainer), - ] - ) - contract_address = pre.deploy_contract( - code=Container( - sections=[ - Section.Code( - Op.SSTORE( - contract_storage.store_next(value_code_worked), - value_code_worked, - ) - + Op.MSTORE(0, 1) - + Op.EOFCREATE[0](0, 0, 0, 0) - + Op.SSTORE(contract_storage.store_next(32), Op.MSIZE()) - + Op.SSTORE(contract_storage.store_next(1), Op.MLOAD(0)) - + Op.SSTORE(contract_storage.store_next(0), Op.MLOAD(32)) - + Op.STOP, - ), - Section.Container(initcontainer), - ], - ), - ) - destination_contract_address = compute_eofcreate_address( - contract_address, 0 - ) - post = { - contract_address: Account(storage=contract_storage), - destination_contract_address: Account(storage=destination_storage), - } - tx = Transaction( - to=contract_address, - gas_limit=200_000, - sender=pre.fund_eoa(), - ) - state_test(env=env, pre=pre, post=post, tx=tx) diff --git a/tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_eofcreate_failures.py b/tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_eofcreate_failures.py deleted file mode 100644 index 6e89a54b24..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_eofcreate_failures.py +++ /dev/null @@ -1,927 +0,0 @@ -"""Test good and bad EOFCREATE cases.""" - -import pytest -from execution_testing import ( - Account, - Address, - Alloc, - Environment, - Op, - StateTestFiller, - Transaction, - compute_eofcreate_address, -) -from execution_testing.test_types.eof.v1 import Container, Section -from execution_testing.test_types.eof.v1.constants import ( - MAX_BYTECODE_SIZE, - MAX_INITCODE_SIZE, -) - -from .. import EOF_FORK_NAME -from ..eip7069_extcall.spec import ( - EXTCALL_FAILURE, - EXTCALL_REVERT, - LEGACY_CALL_FAILURE, -) -from .helpers import ( - aborting_container, - slot_call_or_create, - slot_call_result, - slot_code_should_fail, - slot_code_worked, - slot_counter, - slot_create_address, - slot_max_depth, - slot_returndata, - slot_returndata_size, - smallest_initcode_subcontainer, - smallest_runtime_subcontainer, - value_canary_should_not_change, - value_code_worked, -) -from .spec import EOFCREATE_FAILURE - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-7620.md" -REFERENCE_SPEC_VERSION = "52ddbcdddcf72dd72427c319f2beddeb468e1737" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - - -@pytest.mark.parametrize( - "revert", - [ - pytest.param(b"", id="empty"), - pytest.param(b"\x08\xc3\x79\xa0", id="Error(string)"), - ], -) -def test_initcode_revert( - state_test: StateTestFiller, pre: Alloc, revert: bytes -) -> None: - """Verifies proper handling of REVERT in initcode.""" - env = Environment() - revert_size = len(revert) - - initcode_subcontainer = Container( - name="Initcode Subcontainer that reverts", - sections=[ - Section.Code( - code=Op.MSTORE(0, Op.PUSH32(revert)) - + Op.REVERT(32 - revert_size, revert_size), - ), - ], - ) - - factory_contract = Container( - name="factory contract", - sections=[ - Section.Code( - code=Op.SSTORE( - slot_create_address, Op.EOFCREATE[0](0, 0, 0, 0) - ) - + Op.SSTORE(slot_returndata_size, Op.RETURNDATASIZE) - + Op.RETURNDATACOPY( - Op.SUB(32, Op.RETURNDATASIZE), 0, Op.RETURNDATASIZE - ) - + Op.SSTORE(slot_returndata, Op.MLOAD(0)) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ), - Section.Container(container=initcode_subcontainer), - ], - ) - - sender = pre.fund_eoa() - contract_address = pre.deploy_contract(code=factory_contract) - - post = { - contract_address: Account( - storage={ - slot_create_address: EOFCREATE_FAILURE, - slot_returndata_size: revert_size, - slot_returndata: revert, - slot_code_worked: value_code_worked, - } - ), - compute_eofcreate_address(contract_address, 0): Account.NONEXISTENT, - } - tx = Transaction( - to=contract_address, - gas_limit=10_000_000, - gas_price=10, - protected=False, - sender=sender, - ) - state_test(env=env, pre=pre, post=post, tx=tx) - - -def test_initcode_aborts( - state_test: StateTestFiller, - pre: Alloc, -) -> None: - """Verifies correct handling of a halt in EOF initcode.""" - env = Environment() - sender = pre.fund_eoa() - contract_address = pre.deploy_contract( - code=Container( - sections=[ - Section.Code( - code=Op.SSTORE( - slot_create_address, Op.EOFCREATE[0](0, 0, 0, 0) - ) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ), - Section.Container(container=aborting_container), - ] - ) - ) - # Storage in slot_create_address should not have the address, - post = { - contract_address: Account( - storage={ - slot_create_address: EOFCREATE_FAILURE, - slot_code_worked: value_code_worked, - } - ), - compute_eofcreate_address(contract_address, 0): Account.NONEXISTENT, - } - tx = Transaction( - to=contract_address, - gas_limit=10_000_000, - gas_price=10, - protected=False, - sender=sender, - ) - - state_test(env=env, pre=pre, post=post, tx=tx) - - -""" -Size of the factory portion of test_eofcreate_deploy_sizes, but as the runtime -code is dynamic, we have to use a pre-calculated size -""" -factory_size = 78 - - -@pytest.mark.parametrize( - "target_deploy_size", - [ - pytest.param(0x4000, id="large"), - pytest.param(MAX_BYTECODE_SIZE, id="max"), - pytest.param(MAX_BYTECODE_SIZE + 1, id="overmax"), - pytest.param(MAX_INITCODE_SIZE - factory_size, id="initcodemax"), - pytest.param( - MAX_INITCODE_SIZE - factory_size + 1, - id="initcodeovermax", - marks=pytest.mark.skip("Oversized container in pre-alloc"), - ), - pytest.param( - 0xFFFF - factory_size, - id="64k-1", - marks=pytest.mark.skip("Oversized container in pre-alloc"), - ), - ], -) -def test_eofcreate_deploy_sizes( - state_test: StateTestFiller, - pre: Alloc, - target_deploy_size: int, -) -> None: - """ - Verifies a mix of runtime contract sizes mixing success and multiple size - failure modes. - """ - env = Environment() - - runtime_container = Container( - sections=[ - Section.Code( - code=Op.JUMPDEST - * (target_deploy_size - len(smallest_runtime_subcontainer)) - + Op.STOP, - ), - ] - ) - - initcode_subcontainer = Container( - name="Initcode Subcontainer", - sections=[ - Section.Code( - code=Op.RETURNCODE[0](0, 0), - ), - Section.Container(container=runtime_container), - ], - ) - - factory_container = Container( - sections=[ - Section.Code( - code=Op.SSTORE( - slot_create_address, Op.EOFCREATE[0](0, 0, 0, 0) - ) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ), - Section.Container(container=initcode_subcontainer), - ] - ) - - assert factory_size == (len(factory_container) - len(runtime_container)), ( - "factory_size is wrong, expected factory_size is %d, calculated is %d" - % ( - factory_size, - len(factory_container) - len(runtime_container), - ) - ) - - sender = pre.fund_eoa() - contract_address = pre.deploy_contract(code=factory_container) - # Storage in 0 should have the address, Storage 1 is a canary of 1 to make - # sure it tried to execute, which also covers cases of data+code being - # greater than initcode_size_max, which is allowed. - success = target_deploy_size <= MAX_BYTECODE_SIZE - post = { - contract_address: Account( - storage={ - slot_create_address: compute_eofcreate_address( - contract_address, 0 - ) - if success - else EOFCREATE_FAILURE, - slot_code_worked: value_code_worked, - } - ), - compute_eofcreate_address(contract_address, 0): Account() - if success - else Account.NONEXISTENT, - } - tx = Transaction( - to=contract_address, - gas_limit=10_000_000, - gas_price=10, - protected=False, - sender=sender, - ) - - state_test(env=env, pre=pre, post=post, tx=tx) - - -@pytest.mark.parametrize( - "target_deploy_size", - [ - pytest.param(0x4000, id="large"), - pytest.param(MAX_BYTECODE_SIZE, id="max"), - pytest.param(MAX_BYTECODE_SIZE + 1, id="overmax"), - pytest.param(MAX_INITCODE_SIZE - factory_size, id="initcodemax"), - pytest.param( - MAX_INITCODE_SIZE - factory_size + 1, id="initcodeovermax" - ), - pytest.param(0xFFFF - factory_size, id="64k-1"), - ], -) -@pytest.mark.skip("Not implemented") -def test_eofcreate_deploy_sizes_tx( - state_test: StateTestFiller, - target_deploy_size: int, -) -> None: - """ - Verifies a mix of runtime contract sizes mixing success and multiple size - failure modes where the initcontainer is included in a transaction. - """ - raise NotImplementedError("Not implemented") - - -@pytest.mark.parametrize( - "auxdata_size", - [ - pytest.param( - MAX_BYTECODE_SIZE - len(smallest_runtime_subcontainer), - id="maxcode", - ), - pytest.param( - MAX_BYTECODE_SIZE - len(smallest_runtime_subcontainer) + 1, - id="overmaxcode", - ), - pytest.param(0x10000 - 60, id="almost64k"), - pytest.param(0x10000 - 1, id="64k-1"), - pytest.param(0x10000, id="64k"), - pytest.param(0x10000 + 1, id="over64k"), - ], -) -def test_auxdata_size_failures( - state_test: StateTestFiller, pre: Alloc, auxdata_size: int -) -> None: - """ - Exercises a number of auxdata size violations, and one maxcode success. - """ - env = Environment() - auxdata_bytes = b"a" * auxdata_size - - initcode_subcontainer = Container( - name="Initcode Subcontainer", - sections=[ - Section.Code( - code=Op.CALLDATACOPY(0, 0, Op.CALLDATASIZE) - + Op.RETURNCODE[0](0, Op.CALLDATASIZE), - ), - Section.Container(container=smallest_runtime_subcontainer), - ], - ) - - sender = pre.fund_eoa() - contract_address = pre.deploy_contract( - code=Container( - sections=[ - Section.Code( - code=Op.CALLDATACOPY(0, 0, Op.CALLDATASIZE) - + Op.SSTORE( - slot_create_address, - Op.EOFCREATE[0](input_size=Op.CALLDATASIZE), - ) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ), - Section.Container(container=initcode_subcontainer), - ] - ) - ) - - deployed_container_size = len(smallest_runtime_subcontainer) + auxdata_size - - # Storage in 0 will have address in first test, 0 in all other cases - # indicating failure - # - # Storage 1 in 1 is a canary to see if EOFCREATE opcode - # halted - success = deployed_container_size <= MAX_BYTECODE_SIZE - post = { - contract_address: Account( - storage={ - slot_create_address: compute_eofcreate_address( - contract_address, 0 - ) - if success - else 0, - slot_code_worked: value_code_worked, - } - ), - compute_eofcreate_address(contract_address, 0): Account() - if success - else Account.NONEXISTENT, - } - - tx = Transaction( - to=contract_address, - gas_limit=10_000_000, - gas_price=10, - protected=False, - sender=sender, - data=auxdata_bytes, - ) - - state_test(env=env, pre=pre, post=post, tx=tx) - - -@pytest.mark.parametrize( - "value", - [ - pytest.param(1, id="1_wei"), - pytest.param(10**9, id="1_gwei"), - ], -) -def test_eofcreate_insufficient_stipend( - state_test: StateTestFiller, - pre: Alloc, - value: int, -) -> None: - """ - Exercises an EOFCREATE that fails because the calling account does not have - enough ether to pay the stipend. - """ - env = Environment() - initcode_container = Container( - sections=[ - Section.Code( - code=Op.SSTORE( - slot_create_address, Op.EOFCREATE[0](value=value) - ) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ), - Section.Container(container=smallest_initcode_subcontainer), - ] - ) - sender = pre.fund_eoa(10**11) - contract_address = pre.deploy_contract( - code=initcode_container, - balance=value - 1, - ) - # create will fail but not trigger a halt, so canary at storage 1 should be - # set - # - # also validate target created contract fails - post = { - contract_address: Account( - storage={ - slot_create_address: EOFCREATE_FAILURE, - slot_code_worked: value_code_worked, - } - ), - compute_eofcreate_address(contract_address, 0): Account.NONEXISTENT, - } - tx = Transaction( - to=contract_address, - gas_limit=10_000_000, - gas_price=10, - protected=False, - sender=sender, - ) - state_test(env=env, pre=pre, post=post, tx=tx) - - -def test_insufficient_initcode_gas( - state_test: StateTestFiller, - pre: Alloc, -) -> None: - """ - Exercises an EOFCREATE when there is not enough gas for the initcode - charge. - """ - env = Environment() - - initcode_data = b"a" * 0x5000 - initcode_container = Container( - name="Large Initcode Subcontainer", - sections=[ - Section.Code( - code=Op.RETURNCODE[0](0, 0), - ), - Section.Container(container=smallest_runtime_subcontainer), - Section.Data(data=initcode_data), - ], - ) - - sender = pre.fund_eoa() - contract_address = pre.deploy_contract( - code=Container( - sections=[ - Section.Code( - code=Op.SSTORE( - slot_create_address, Op.EOFCREATE[0](0, 0, 0, 0) - ) - + Op.SSTORE(slot_code_should_fail, value_code_worked) - + Op.STOP, - ), - Section.Container(container=initcode_container), - ], - ), - storage={ - slot_create_address: value_canary_should_not_change, - slot_code_should_fail: value_canary_should_not_change, - }, - ) - # enough gas for everything but EVM opcodes and EIP-150 reserves - gas_limit = 21_000 + 32_000 + (len(initcode_data) + 31) // 32 * 6 - # out_of_gas is triggered, so canary won't set value - # also validate target created contract fails - post = { - contract_address: Account( - storage={ - slot_create_address: value_canary_should_not_change, - slot_code_should_fail: value_canary_should_not_change, - }, - ), - compute_eofcreate_address(contract_address, 0): Account.NONEXISTENT, - } - tx = Transaction( - to=contract_address, - gas_limit=gas_limit, - gas_price=10, - protected=False, - sender=sender, - ) - - state_test(env=env, pre=pre, post=post, tx=tx) - - -def test_insufficient_gas_memory_expansion( - state_test: StateTestFiller, - pre: Alloc, -) -> None: - """ - Exercises EOFCREATE when the memory for auxdata has not been expanded but - is requested. - """ - env = Environment() - - auxdata_size = 0x5000 - initcode_container = Container( - sections=[ - Section.Code( - code=Op.SSTORE( - slot_create_address, - Op.EOFCREATE[0](input_size=auxdata_size), - ) - + Op.SSTORE(slot_code_should_fail, slot_code_worked) - + Op.STOP, - ), - Section.Container(container=smallest_initcode_subcontainer), - ], - ) - sender = pre.fund_eoa() - contract_address = pre.deploy_contract( - code=initcode_container, - storage={ - slot_create_address: value_canary_should_not_change, - slot_code_should_fail: value_canary_should_not_change, - }, - ) - # enough gas for everything but EVM opcodes and EIP-150 reserves - initcode_container_words = (len(initcode_container) + 31) // 32 - auxdata_size_words = (auxdata_size + 31) // 32 - gas_limit = ( - 21_000 - + 32_000 - + initcode_container_words * 6 - + 3 * auxdata_size_words - + auxdata_size_words * auxdata_size_words // 512 - ) - # out_of_gas is triggered, so canary won't set value - # also validate target created contract fails - post = { - contract_address: Account( - storage={ - slot_create_address: value_canary_should_not_change, - slot_code_should_fail: value_canary_should_not_change, - }, - ), - compute_eofcreate_address(contract_address, 0): Account.NONEXISTENT, - } - tx = Transaction( - to=contract_address, - gas_limit=gas_limit, - gas_price=10, - protected=False, - sender=sender, - ) - - state_test(env=env, pre=pre, post=post, tx=tx) - - -def test_insufficient_returncode_auxdata_gas( - state_test: StateTestFiller, - pre: Alloc, -) -> None: - """ - Exercises a RETURNCODE when there is not enough gas for the initcode - charge. - """ - env = Environment() - - auxdata_size = 0x5000 - initcode_container = Container( - name="Large Initcode Subcontainer", - sections=[ - Section.Code( - code=Op.RETURNCODE[0](0, auxdata_size), - ), - Section.Container(container=smallest_runtime_subcontainer), - ], - ) - - sender = pre.fund_eoa() - contract_address = pre.deploy_contract( - code=Container( - sections=[ - Section.Code( - code=Op.SSTORE( - slot_create_address, Op.EOFCREATE[0](0, 0, 0, 0) - ) - + Op.SSTORE(slot_code_should_fail, value_code_worked) - + Op.STOP, - ), - Section.Container(container=initcode_container), - ], - ), - storage={ - slot_create_address: value_canary_should_not_change, - slot_code_should_fail: value_canary_should_not_change, - }, - ) - # enough gas for everything but EVM opcodes and EIP-150 reserves - initcode_container_words = (len(initcode_container) + 31) // 32 - auxdata_size_words = (auxdata_size + 31) // 32 - gas_limit = ( - 21_000 - + 32_000 - + initcode_container_words * 6 - + 3 * auxdata_size_words - + auxdata_size_words * auxdata_size_words // 512 - ) - # out_of_gas is triggered, so canary won't set value - # also validate target created contract fails - post = { - contract_address: Account( - storage={ - slot_create_address: value_canary_should_not_change, - slot_code_should_fail: value_canary_should_not_change, - }, - ), - compute_eofcreate_address(contract_address, 0): Account.NONEXISTENT, - } - tx = Transaction( - to=contract_address, - gas_limit=gas_limit, - gas_price=10, - protected=False, - sender=sender, - ) - - state_test(env=env, pre=pre, post=post, tx=tx) - - -@pytest.mark.parametrize( - "opcode", - [ - Op.STATICCALL, - Op.EXTSTATICCALL, - ], -) -@pytest.mark.parametrize("endowment", [0, 1]) # included to verify static flag -# check comes first -@pytest.mark.parametrize( - "initcode", - [smallest_initcode_subcontainer, aborting_container], - ids=["working_initcode", "aborting_code"], -) -def test_static_flag_eofcreate( - state_test: StateTestFiller, - pre: Alloc, - opcode: Op, - endowment: int, - initcode: Container, -) -> None: - """Verifies correct handling of the static call flag with EOFCREATE.""" - env = Environment() - sender = pre.fund_eoa() - contract_address = pre.deploy_contract( - code=Container( - sections=[ - Section.Code( - code=Op.EOFCREATE[0](value=endowment) + Op.STOP, - ), - Section.Container(container=initcode), - ] - ) - ) - calling_code = ( - Op.SSTORE(slot_call_result, opcode(address=contract_address)) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP - ) - calling_address = pre.deploy_contract( - Container.Code(calling_code) - if opcode == Op.EXTSTATICCALL - else calling_code - ) - - post = { - calling_address: Account( - storage={ - slot_call_result: EXTCALL_FAILURE - if opcode == Op.EXTSTATICCALL - else LEGACY_CALL_FAILURE, - slot_code_worked: value_code_worked, - } - ), - compute_eofcreate_address(contract_address, 0): Account.NONEXISTENT, - } - tx = Transaction( - to=calling_address, - gas_limit=10_000_000, - protected=False, - sender=sender, - ) - - state_test(env=env, pre=pre, post=post, tx=tx) - - -magic_value_call = 0xCA11 -magic_value_create = 0xCC12EA7E - - -@pytest.mark.parametrize( - "opcode", - [ - Op.EXTCALL, - Op.EXTDELEGATECALL, - ], -) -@pytest.mark.parametrize( - "who_fails", - [magic_value_call, magic_value_create], - ids=["call_fails", "create_fails"], -) -@pytest.mark.pre_alloc_modify -def test_eof_eofcreate_msg_depth( - state_test: StateTestFiller, - pre: Alloc, - opcode: Op, - who_fails: int, -) -> None: - """ - Test EOFCREATE handles msg depth limit correctly (1024). - - NOTE: due to block gas limit and the 63/64th rule this limit is - unlikely to be hit on mainnet. - NOTE: See `tests/unscheduled/eip7692_eof_v1/eip7069_extcall/ - test_calls.py::test_eof_calls_msg_depth` for more explanations and - comments. Most notable deviation from that test is that here calls - and `EOFCREATE`s alternate in order to reach the max depth. - `who_fails` decides whether the failing depth 1024 will be on a call - or on an `EOFCREATE` to happen. - """ - # Not a precise gas_limit formula, but enough to exclude risk of gas - # causing the failure. - gas_limit = int(20000000 * (64 / 63) ** 1024) - env = Environment(gas_limit=gas_limit) - sender = pre.fund_eoa() - - callee_address = Address(0x5000) - - # Memory offsets layout: - # - 0 - input - msg depth - # - 32 - output - msg depth - # - 64 - output - call result - # - 96 - output - magic value: create or call - returndatacopy_block = Op.RETURNDATACOPY(32, 0, 96) + Op.REVERT(32, 96) - deep_most_result_block = ( - Op.MSTORE(32, Op.ADD(Op.CALLDATALOAD(0), 1)) - + Op.MSTORE(64, Op.NOOP) - + Op.REVERT(32, 96) - ) - rjump_offset = len(returndatacopy_block) - - callee_code = Container( - sections=[ - Section.Code( - Op.MSTORE(0, Op.ADD(Op.CALLDATALOAD(0), 1)) - + Op.MSTORE(96, magic_value_create) - + Op.EOFCREATE[0](salt=Op.CALLDATALOAD(0), input_size=32) - + Op.RETURNDATASIZE - + Op.ISZERO - + Op.RJUMPI[rjump_offset] - + returndatacopy_block - + deep_most_result_block - ), - Section.Container( - Container.Code( - Op.MSTORE(0, Op.ADD(Op.CALLDATALOAD(0), 1)) - + Op.MSTORE(96, magic_value_call) - + opcode(address=callee_address, args_size=32) - + Op.RETURNDATASIZE - + Op.ISZERO - + Op.RJUMPI[rjump_offset] - + returndatacopy_block - + deep_most_result_block - ) - ), - ] - ) - - pre.deploy_contract(callee_code, address=callee_address) - - calling_contract_address = pre.deploy_contract( - Container.Code( - Op.MSTORE(0, Op.CALLDATALOAD(0)) - + opcode(address=callee_address, args_size=32) - + Op.SSTORE(slot_max_depth, Op.RETURNDATALOAD(0)) - + Op.SSTORE(slot_call_result, Op.RETURNDATALOAD(32)) - + Op.SSTORE(slot_call_or_create, Op.RETURNDATALOAD(64)) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP - ) - ) - - # Only bumps the msg call depth "register" and forwards to the - # `calling_contract_address`. If it is used it makes the "failing" depth of - # 1024 to happen on EOFCREATE, instead of CALL. - passthrough_address = pre.deploy_contract( - Container.Code( - Op.MSTORE(0, 1) - + Op.EXTCALL(address=calling_contract_address, args_size=32) - + Op.STOP - ) - ) - - tx = Transaction( - sender=sender, - to=calling_contract_address - if who_fails == magic_value_call - else passthrough_address, - gas_limit=gas_limit, - data="", - ) - - calling_storage = { - slot_max_depth: 1024, - slot_code_worked: value_code_worked, - slot_call_result: EXTCALL_REVERT - if who_fails == magic_value_call - else EOFCREATE_FAILURE, - slot_call_or_create: who_fails, - } - - post = { - calling_contract_address: Account(storage=calling_storage), - } - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) - - -def test_reentrant_eofcreate( - state_test: StateTestFiller, - pre: Alloc, -) -> None: - """ - Verifies a reentrant EOFCREATE case, where EIP-161 prevents conflict via - nonce bump. - """ - env = Environment() - # Calls into the factory contract with 1 as input. - reenter_code = Op.MSTORE(0, 1) + Op.EXTCALL( - address=Op.CALLDATALOAD(32), args_size=32 - ) - # Initcode: if given 0 as 1st word of input will call into the factory - # again. 2nd word of input is the address of the factory. - initcontainer = Container( - sections=[ - Section.Code( - Op.SSTORE(slot_counter, Op.ADD(Op.SLOAD(slot_counter), 1)) - + Op.CALLDATALOAD(0) - + Op.RJUMPI[len(reenter_code)] - + reenter_code - + Op.RETURNCODE[0](0, 0) - ), - Section.Container(smallest_runtime_subcontainer), - ] - ) - # Factory: - # Passes on its input into the initcode. - # It's 0 first time, 1 the second time. - # Saves the result of deployment in slot 0 first time, 1 the - # second time. - contract_address = pre.deploy_contract( - code=Container( - sections=[ - Section.Code( - Op.CALLDATACOPY(0, 0, 32) - + Op.MSTORE(32, Op.ADDRESS) - # 1st word - copied from input (reenter flag) - # 2nd word - `this.address` - + Op.SSTORE( - Op.CALLDATALOAD(0), Op.EOFCREATE[0](input_size=64) - ) - + Op.STOP, - ), - Section.Container(initcontainer), - ], - ), - storage={0: 0xB17D, 1: 0xB17D}, # a canary to be overwritten - ) - # Flow is: - # reenter flag 0 -> factory -> reenter flag 0 -> initcode -> - # reenter -> reenter flag 1 -> factory -> reenter flag 1 -> (!) initcode - # -> stop - # if the EIP-161 nonce bump is not implemented. - # - # If it is, it fails before second inicode marked (!). - # - # Storage in 0 should have the address from the outer EOFCREATE. - # Storage in 1 should have 0 from the inner EOFCREATE. For the created - # contract storage in `slot_counter` should be 1 as initcode - # executes only once. - post = { - contract_address: Account( - storage={ - 0: compute_eofcreate_address(contract_address, 0), - 1: 0, - } - ), - compute_eofcreate_address(contract_address, 0): Account( - nonce=1, - code=smallest_runtime_subcontainer, - storage={slot_counter: 1}, - ), - } - tx = Transaction( - to=contract_address, - gas_limit=500_000, - sender=pre.fund_eoa(), - ) - state_test(env=env, pre=pre, post=post, tx=tx) diff --git a/tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_gas.py b/tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_gas.py deleted file mode 100644 index de31a907ab..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_gas.py +++ /dev/null @@ -1,170 +0,0 @@ -"""Test good and bad EOFCREATE cases.""" - -import pytest -from execution_testing import ( - Alloc, - Environment, - Fork, - Op, - StateTestFiller, - compute_eofcreate_address, -) -from execution_testing.test_types.eof.v1 import Container, Section - -from .. import EOF_FORK_NAME -from ..gas_test import gas_test -from .helpers import ( - aborting_container, - big_runtime_subcontainer, - bigger_initcode_subcontainer, - bigger_initcode_subcontainer_gas, - data_appending_initcode_subcontainer, - data_appending_initcode_subcontainer_gas, - data_initcode_subcontainer, - data_runtime_container, - expensively_reverting_container, - expensively_reverting_container_gas, - reverting_container, - slot_counter, - smallest_initcode_subcontainer, - smallest_initcode_subcontainer_gas, - smallest_runtime_subcontainer, -) - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-7620.md" -REFERENCE_SPEC_VERSION = "52ddbcdddcf72dd72427c319f2beddeb468e1737" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - -EOFCREATE_GAS = 32000 - - -def make_factory(initcode: Container) -> Container: - """Wrap initcontainer into a minimal runtime container.""" - return Container( - name="Factory Subcontainer", - sections=[ - Section.Code(Op.EOFCREATE[0](0, 0, 0, 0) + Op.STOP), - Section.Container(initcode), - ], - ) - - -@pytest.mark.parametrize("value", [0, 1]) -@pytest.mark.parametrize("new_account", [True, False]) -@pytest.mark.parametrize( - "mem_expansion_bytes", - [0, 1, 32, 33], -) -@pytest.mark.parametrize( - ["initcode", "initcode_execution_cost", "runtime"], - [ - pytest.param( - smallest_initcode_subcontainer, - smallest_initcode_subcontainer_gas, - smallest_runtime_subcontainer, - id="smallest_code", - ), - pytest.param( - Container.Init(aborting_container), - smallest_initcode_subcontainer_gas, - aborting_container, - id="aborting_runtime", - ), - pytest.param( - reverting_container, - smallest_initcode_subcontainer_gas, - None, - id="reverting_initcode", - ), - pytest.param( - expensively_reverting_container, - expensively_reverting_container_gas, - None, - id="expensively_reverting_initcode", - ), - pytest.param( - Container.Init(big_runtime_subcontainer), - smallest_initcode_subcontainer_gas, - big_runtime_subcontainer, - id="big_runtime", - ), - pytest.param( - Container.Init(make_factory(smallest_initcode_subcontainer)), - smallest_initcode_subcontainer_gas, - make_factory(smallest_initcode_subcontainer), - id="nested_initcode", - ), - pytest.param( - bigger_initcode_subcontainer, - bigger_initcode_subcontainer_gas, - smallest_runtime_subcontainer, - id="bigger_initcode", - ), - pytest.param( - data_initcode_subcontainer, - smallest_initcode_subcontainer_gas, - data_runtime_container, - id="data_initcode", - ), - pytest.param( - data_appending_initcode_subcontainer, - data_appending_initcode_subcontainer_gas, - data_runtime_container, - id="data_appending_initcode", - ), - ], -) -def test_eofcreate_gas( - state_test: StateTestFiller, - pre: Alloc, - fork: Fork, - value: int, - new_account: bool, - mem_expansion_bytes: int, - initcode: Container, - initcode_execution_cost: int, - runtime: Container, -) -> None: - """Tests variations of EOFCREATE gas.""" - deployed_code_cost = 200 * len(runtime) if runtime else 0 - - subject_address = pre.fund_eoa(0) - - salt_addresses = [ - compute_eofcreate_address(subject_address, i) for i in range(4) - ] - - if not new_account: - for a in salt_addresses: - pre.fund_address(a, 1) - - # Using `TLOAD` / `TSTORE` to work around warm/cold gas differences. We - # need a counter to pick a distinct salt on each `EOFCREATE` and avoid - # running into address conflicts. - code_increment_counter = ( - Op.TLOAD(slot_counter) - + Op.DUP1 - + Op.TSTORE(slot_counter, Op.PUSH1(1) + Op.ADD) - ) - cost_memory_bytes = fork.memory_expansion_gas_calculator() - gas_test( - fork, - state_test, - Environment(), - pre, - setup_code=Op.PUSH32(value) - + Op.PUSH1(mem_expansion_bytes) - + Op.PUSH0 - + code_increment_counter, - subject_code=Op.EOFCREATE[0], - tear_down_code=Op.STOP, - cold_gas=EOFCREATE_GAS - + cost_memory_bytes(new_bytes=mem_expansion_bytes) - + initcode_execution_cost - + deployed_code_cost, - subject_subcontainer=initcode, - subject_balance=value * 4, - subject_address=subject_address, - oog_difference=initcode_execution_cost + deployed_code_cost + 1, - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_legacy_eof_creates.py b/tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_legacy_eof_creates.py deleted file mode 100644 index 90af4d8ac7..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_legacy_eof_creates.py +++ /dev/null @@ -1,265 +0,0 @@ -"""Test interactions between CREATE, CREATE2, and EOFCREATE.""" - -import pytest -from execution_testing import ( - Account, - Address, - Alloc, - Bytes, - Environment, - Op, - Opcodes, - StateTestFiller, - Transaction, - compute_create_address, -) -from execution_testing import Initcode as LegacyInitcode -from execution_testing.test_types.eof.v1 import Container - -from ....prague.eip7702_set_code_tx.spec import Spec -from .. import EOF_FORK_NAME -from .helpers import ( - slot_all_subcall_gas_gone, - slot_code_worked, - slot_create_address, - smallest_initcode_subcontainer, - smallest_runtime_subcontainer, - value_code_worked, -) -from .spec import EOFCREATE_FAILURE - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-7620.md" -REFERENCE_SPEC_VERSION = "52ddbcdddcf72dd72427c319f2beddeb468e1737" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - - -@pytest.mark.parametrize( - "legacy_create_opcode", - [ - pytest.param(Op.CREATE, id="CREATE"), - pytest.param(Op.CREATE2, id="CREATE2"), - ], -) -@pytest.mark.parametrize( - "initcode", - [ - Bytes("0xEF00"), - Bytes("0xEF0001"), - pytest.param( - smallest_initcode_subcontainer, id="deploy_eof_initcontainer" - ), - pytest.param(smallest_runtime_subcontainer, id="deploy_eof_container"), - ], -) -def test_cross_version_creates_fail_light( - state_test: StateTestFiller, - pre: Alloc, - legacy_create_opcode: Opcodes, - initcode: Bytes | Container, -) -> None: - """ - Verifies that CREATE and CREATE2 cannot run EOF initcodes and fail early on - attempt. - """ - env = Environment() - - sender = pre.fund_eoa() - - tx_gas_limit = 10_000_000 - - contract_address = pre.deploy_contract( - code=Op.CALLDATACOPY(0, 0, Op.CALLDATASIZE) - + Op.SSTORE( - slot_create_address, legacy_create_opcode(size=Op.CALLDATASIZE) - ) - # Approximates whether code until here consumed the 63/64th gas given - # to subcall - + Op.SSTORE( - slot_all_subcall_gas_gone, Op.LT(Op.GAS, tx_gas_limit // 64) - ) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP - ) - - post = { - contract_address: Account( - storage={ - slot_create_address: EOFCREATE_FAILURE, - slot_code_worked: value_code_worked, - slot_all_subcall_gas_gone: 0, - }, - nonce=1, - ), - # Double check no accounts were created - compute_create_address( - address=contract_address, nonce=1 - ): Account.NONEXISTENT, - compute_create_address( - address=contract_address, - initcode=initcode, - salt=0, - opcode=Op.CREATE2, - ): Account.NONEXISTENT, - } - tx = Transaction( - to=contract_address, - gas_limit=tx_gas_limit, - sender=sender, - data=initcode, - ) - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) - - -@pytest.mark.parametrize( - "legacy_create_opcode", - [ - pytest.param(Op.CREATE, id="CREATE"), - pytest.param(Op.CREATE2, id="CREATE2"), - ], -) -@pytest.mark.parametrize( - "initcode", - [ - Bytes("0xEF"), - Bytes("0xEF01"), - Bytes("0xEF0101"), - Spec.delegation_designation(Address(0xAA)), - Bytes("0xEF02"), - ], -) -def test_cross_version_creates_fail_hard( - state_test: StateTestFiller, - pre: Alloc, - legacy_create_opcode: Opcodes, - initcode: Bytes, -) -> None: - """ - Verifies that CREATE and CREATE2 fail hard on attempt to run initcode - starting with `EF` but not `EF00`. - """ - env = Environment() - - sender = pre.fund_eoa() - - tx_gas_limit = 10_000_000 - - contract_address = pre.deploy_contract( - code=Op.CALLDATACOPY(0, 0, Op.CALLDATASIZE) - + Op.SSTORE( - slot_create_address, legacy_create_opcode(size=Op.CALLDATASIZE) - ) - # Approximates whether code until here consumed the 63/64th gas given - # to subcall - + Op.SSTORE( - slot_all_subcall_gas_gone, Op.LT(Op.GAS, tx_gas_limit // 64) - ) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP - ) - - post = { - contract_address: Account( - storage={ - slot_create_address: EOFCREATE_FAILURE, - slot_code_worked: value_code_worked, - slot_all_subcall_gas_gone: 1, - }, - nonce=2, - ), - # Double check no accounts were created - compute_create_address( - address=contract_address, nonce=1 - ): Account.NONEXISTENT, - compute_create_address( - address=contract_address, - initcode=initcode, - salt=0, - opcode=Op.CREATE2, - ): Account.NONEXISTENT, - } - tx = Transaction( - to=contract_address, - gas_limit=tx_gas_limit, - sender=sender, - data=initcode, - ) - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) - - -@pytest.mark.parametrize( - "legacy_create_opcode", - [ - pytest.param(Op.CREATE, id="CREATE"), - pytest.param(Op.CREATE2, id="CREATE2"), - ], -) -@pytest.mark.parametrize( - "deploy_code", - [ - Bytes("0xEF"), - Bytes("0xEF00"), - Bytes("0xEF0001"), - Bytes("0xEF01"), - pytest.param( - smallest_initcode_subcontainer, id="deploy_eof_initcontainer" - ), - pytest.param(smallest_runtime_subcontainer, id="deploy_eof_container"), - ], -) -def test_legacy_initcode_eof_contract_fails( - state_test: StateTestFiller, - pre: Alloc, - legacy_create_opcode: Opcodes, - deploy_code: Bytes | Container, -) -> None: - """ - Verifies that legacy initcode cannot create EOF. - - This tests only ensures EIP-3541 behavior is kept, not altered by EIP-7620. - """ - env = Environment() - init_code = LegacyInitcode(deploy_code=deploy_code) - salt_param = [0] if legacy_create_opcode == Op.CREATE2 else [] - factory_code = ( - Op.CALLDATACOPY(0, 0, Op.CALLDATASIZE) - + Op.SSTORE( - slot_create_address, - legacy_create_opcode(0, 0, Op.CALLDATASIZE, *salt_param), - ) - + Op.SSTORE(slot_code_worked, value_code_worked) - ) - - sender = pre.fund_eoa() - contract_address = pre.deploy_contract(code=factory_code) - - # Storage in 0 should be empty as the final CREATE filed - # and 1 in 1 to show execution continued and did not halt - post = { - contract_address: Account( - storage={ - slot_create_address: EOFCREATE_FAILURE, - slot_code_worked: value_code_worked, - } - ) - } - tx = Transaction( - to=contract_address, - gas_limit=10_000_000, - data=init_code, - sender=sender, - ) - - state_test(env=env, pre=pre, post=post, tx=tx) diff --git a/tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_memory.py b/tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_memory.py deleted file mode 100644 index 32fc3502f9..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_memory.py +++ /dev/null @@ -1,145 +0,0 @@ -"""Test good and bad EOFCREATE cases.""" - -import pytest -from execution_testing import ( - Account, - Alloc, - Environment, - Op, - StateTestFiller, - Storage, - Transaction, - compute_eofcreate_address, -) -from execution_testing.test_types.eof.v1 import Container, Section - -from .. import EOF_FORK_NAME -from .helpers import ( - slot_code_worked, - slot_create_address, - smallest_initcode_subcontainer, - smallest_runtime_subcontainer, - value_canary_to_be_overwritten, - value_code_worked, -) - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-7620.md" -REFERENCE_SPEC_VERSION = "52ddbcdddcf72dd72427c319f2beddeb468e1737" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - - -@pytest.mark.parametrize( - "offset_field", - [ - pytest.param(True, id="offset"), - pytest.param(False, id="size"), - ], -) -@pytest.mark.parametrize( - ("test_arg", "success"), - [ - pytest.param(0, True, id="zero"), - pytest.param(0xFF, True, id="8-bit"), - pytest.param(0x100, True, id="9-bit"), - pytest.param(0xFFFF, True, id="16-bit"), - pytest.param(0x10000, True, id="17-bit"), - pytest.param(0x1FFFF20, False, id="32-bit-mem-cost"), - pytest.param(0x2D412E0, False, id="33-bit-mem-cost"), - pytest.param(0xFFFFFFFF, False, id="32-bit"), - pytest.param(0x100000000, False, id="33-bit"), - pytest.param(0x1FFFFFFFF20, False, id="64-bit-mem-cost"), - pytest.param(0x2D413CCCF00, False, id="65-bit-mem-cost"), - pytest.param(0xFFFFFFFFFFFFFFFF, False, id="64-bit"), - pytest.param(0x10000000000000000, False, id="65-bit"), - pytest.param(0xFFFFFFFFFFFFFFFF, False, id="128-bit"), - pytest.param(0x10000000000000000, False, id="129-bit"), - pytest.param(0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF, False, id="256-bit"), - ], -) -def test_eofcreate_memory( - state_test: StateTestFiller, - pre: Alloc, - offset_field: str, - test_arg: int, - success: bool, -) -> None: - """ - Tests auxdata sizes in EOFCREATE including multiple offset conditions. - - EOFCREATE either succeeds or fails based on memory access cost, resulting - in new address or zero in the create address slot. - - The name id of `*-mem-cost` refers to the bit-length of the result of the - calculated memory expansion cost. Their length choice is designed to cause - problems on shorter bit-length representations with native integers. - - The `offset_field` param indicates what part of the input data arguments - are being tested, either the offset of the data in memory or the size of - the data in memory. - - The `test_arg` param is the value passed into the field being tested - (offset or size), intending to trigger integer size bugs for that - particular field. - """ - env = Environment(gas_limit=2_000_000_000) - - sender = pre.fund_eoa(10**27) - - initial_storage = Storage( - { - slot_create_address: value_canary_to_be_overwritten, # type: ignore - slot_code_worked: value_canary_to_be_overwritten, # type: ignore - } - ) - calling_contract_address = pre.deploy_contract( - code=Container( - sections=[ - Section.Code( - code=Op.SSTORE( - slot_create_address, - Op.EOFCREATE[0]( - value=0, - salt=0, - input_offset=test_arg if offset_field else 32, - input_size=32 if offset_field else test_arg, - ), - ) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ), - Section.Container(container=smallest_initcode_subcontainer), - ] - ), - storage=initial_storage, - ) - destination_contract_address = compute_eofcreate_address( - calling_contract_address, 0 - ) - - post = { - calling_contract_address: Account( - storage={ - slot_create_address: destination_contract_address, - slot_code_worked: value_code_worked, - } - if success - else initial_storage, - ), - destination_contract_address: Account( - code=smallest_runtime_subcontainer - ) - if success - else Account.NONEXISTENT, - } - - tx = Transaction( - sender=sender, to=calling_contract_address, gas_limit=2_000_000_000 - ) - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_returncode.py b/tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_returncode.py deleted file mode 100644 index b47df3fe54..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_returncode.py +++ /dev/null @@ -1,319 +0,0 @@ -"""Tests for RETURNCODE instruction validation.""" - -import pytest -from execution_testing import ( - Account, - Alloc, - Environment, - EOFException, - EOFTestFiller, - Op, - StateTestFiller, - Transaction, - compute_eofcreate_address, -) -from execution_testing.test_types.eof.v1 import ( - Container, - ContainerKind, - Section, -) -from execution_testing.test_types.eof.v1.constants import ( - MAX_BYTECODE_SIZE, -) - -from .. import EOF_FORK_NAME -from .helpers import ( - slot_create_address, - smallest_runtime_subcontainer, - value_canary_to_be_overwritten, -) - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-7620.md" -REFERENCE_SPEC_VERSION = "f20b164b00ae5553f7536a6d7a83a0f254455e09" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - - -def test_returncode_valid_index_0( - eof_test: EOFTestFiller, -) -> None: - """Deploy container index 0.""" - eof_test( - container_kind=ContainerKind.INITCODE, - container=Container( - sections=[ - Section.Code( - code=Op.RETURNCODE[0](0, 0), - ), - Section.Container( - container=Container( - sections=[Section.Code(code=Op.INVALID)] - ) - ), - ], - ), - ) - - -def test_returncode_valid_index_1( - eof_test: EOFTestFiller, -) -> None: - """Deploy container index 1.""" - eof_test( - container_kind=ContainerKind.INITCODE, - container=Container( - sections=[ - Section.Code( - code=Op.RJUMPI[6](0) - + Op.RETURNCODE[0](0, 0) - + Op.RETURNCODE[1](0, 0), - max_stack_height=2, - ), - Section.Container( - container=Container( - sections=[Section.Code(code=Op.INVALID)] - ) - ), - Section.Container( - container=Container( - sections=[Section.Code(code=Op.INVALID)] - ) - ), - ], - ), - ) - - -def test_returncode_valid_index_255( - eof_test: EOFTestFiller, -) -> None: - """Deploy container index 255.""" - eof_test( - container_kind=ContainerKind.INITCODE, - container=Container( - sections=[ - Section.Code( - sum( - (Op.RJUMPI[6](0) + Op.RETURNCODE[i](0, 0)) - for i in range(256) - ) - + Op.REVERT(0, 0), - max_stack_height=2, - ) - ] - + [ - Section.Container( - container=Container( - sections=[Section.Code(code=Op.INVALID)] - ) - ) - ] - * 256 - ), - ) - - -def test_returncode_invalid_truncated_immediate( - eof_test: EOFTestFiller, -) -> None: - """Truncated immediate.""" - eof_test( - container_kind=ContainerKind.INITCODE, - container=Container( - sections=[ - Section.Code( - code=Op.PUSH0 + Op.PUSH0 + Op.RETURNCODE, - ), - ], - ), - expect_exception=EOFException.TRUNCATED_INSTRUCTION, - ) - - -def test_returncode_invalid_index_0( - eof_test: EOFTestFiller, -) -> None: - """Referring to non-existent container section index 0.""" - eof_test( - container_kind=ContainerKind.INITCODE, - container=Container( - sections=[ - Section.Code( - code=Op.RETURNCODE[0](0, 0), - ), - ], - ), - expect_exception=EOFException.INVALID_CONTAINER_SECTION_INDEX, - ) - - -def test_returncode_invalid_index_1( - eof_test: EOFTestFiller, -) -> None: - """Referring to non-existent container section index 1.""" - eof_test( - container_kind=ContainerKind.INITCODE, - container=Container( - sections=[ - Section.Code( - code=Op.RETURNCODE[1](0, 0), - ), - Section.Container( - container=Container( - sections=[Section.Code(code=Op.INVALID)] - ) - ), - ], - ), - expect_exception=EOFException.INVALID_CONTAINER_SECTION_INDEX, - ) - - -def test_returncode_invalid_index_255( - eof_test: EOFTestFiller, -) -> None: - """Referring to non-existent container section index 255.""" - eof_test( - container_kind=ContainerKind.INITCODE, - container=Container( - sections=[ - Section.Code( - code=Op.RETURNCODE[255](0, 0), - ), - Section.Container( - container=Container( - sections=[Section.Code(code=Op.INVALID)] - ) - ), - ], - ), - expect_exception=EOFException.INVALID_CONTAINER_SECTION_INDEX, - ) - - -def test_returncode_terminating( - eof_test: EOFTestFiller, -) -> None: - """Unreachable code after RETURNCODE.""" - eof_test( - container_kind=ContainerKind.INITCODE, - container=Container( - sections=[ - Section.Code( - code=Op.RETURNCODE[0](0, 0) + Op.REVERT(0, 0), - ), - Section.Container( - container=Container( - sections=[Section.Code(code=Op.INVALID)] - ) - ), - ], - ), - expect_exception=EOFException.UNREACHABLE_INSTRUCTIONS, - ) - - -@pytest.mark.parametrize( - "offset_field", - [ - pytest.param(True, id="offset"), - pytest.param(False, id="size"), - ], -) -@pytest.mark.parametrize( - ("test_arg", "success"), - [ - pytest.param(0, True, id="zero"), - pytest.param(0xFF, True, id="8-bit"), - pytest.param(0x100, True, id="9-bit"), - pytest.param(0xFFFF, True, id="16-bit"), - pytest.param(0x10000, True, id="17-bit"), - pytest.param(0x1FFFF20, False, id="32-bit-mem-cost"), - pytest.param(0x2D412E0, False, id="33-bit-mem-cost"), - pytest.param(0xFFFFFFFF, False, id="32-bit"), - pytest.param(0x100000000, False, id="33-bit"), - pytest.param(0x1FFFFFFFF20, False, id="64-bit-mem-cost"), - pytest.param(0x2D413CCCF00, False, id="65-bit-mem-cost"), - pytest.param(0xFFFFFFFFFFFFFFFF, False, id="64-bit"), - pytest.param(0x10000000000000000, False, id="65-bit"), - pytest.param(0xFFFFFFFFFFFFFFFF, False, id="128-bit"), - pytest.param(0x10000000000000000, False, id="129-bit"), - pytest.param(0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF, False, id="256-bit"), - ], -) -def test_returncode_memory_expansion( - state_test: StateTestFiller, - pre: Alloc, - offset_field: str, - test_arg: int, - success: bool, -) -> None: - """ - Attempts an EOFCREATE with a possibly too-large auxdata. Create either - fails due to gas or contract too large, resulting in address or zero on - failure in the create address slot. - - The name id of `*-mem-cost` refers to the bit-length of the result of the - calculated memory expansion cost. Their length choice is designed to cause - problems on shorter bit-length representations with native integers. - - The `offset_field` param indicates what part of the input data arguments - are being tested, either the offset of the data in memory or the size of - the data in memory. - - The `test_arg` param is the value passed into the field being tested - (offset or size), intending to trigger integer size bugs for that - particular field. - """ - env = Environment(gas_limit=2_000_000_000) - sender = pre.fund_eoa(10**27) - - eof_size_acceptable = offset_field or test_arg < MAX_BYTECODE_SIZE - - mem_size_initcode_container = Container( - sections=[ - Section.Code( - code=Op.RETURNCODE[0]( - auxdata_offset=test_arg if offset_field else 32, - auxdata_size=32 if offset_field else test_arg, - ) - ), - Section.Container(container=smallest_runtime_subcontainer), - ], - ) - contract_address = pre.deploy_contract( - code=Container( - sections=[ - Section.Code( - code=Op.SSTORE( - slot_create_address, Op.EOFCREATE[0](0, 0, 0, 0) - ) - + Op.STOP, - ), - Section.Container(container=mem_size_initcode_container), - ], - ), - storage={ - slot_create_address: value_canary_to_be_overwritten, - }, - ) - # Storage in 0 should have the address, - post = { - contract_address: Account( - storage={ - slot_create_address: compute_eofcreate_address( - contract_address, 0 - ) - if success and eof_size_acceptable - else 0, - } - ) - } - tx = Transaction( - to=contract_address, - gas_limit=2_000_000_000, - gas_price=10, - protected=False, - sender=sender, - ) - state_test(env=env, pre=pre, post=post, tx=tx) diff --git a/tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_subcontainer_validation.py b/tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_subcontainer_validation.py deleted file mode 100644 index da7f21e6b6..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_subcontainer_validation.py +++ /dev/null @@ -1,1009 +0,0 @@ -"""EOF Subcontainer tests covering simple cases.""" - -import pytest -from execution_testing import ( - Account, - Bytecode, - EOFException, - EOFStateTestFiller, - EOFTestFiller, - Op, -) -from execution_testing.test_types.eof.v1 import ( - Container, - ContainerKind, - Section, -) -from execution_testing.test_types.eof.v1.constants import ( - MAX_BYTECODE_SIZE, - MAX_INITCODE_SIZE, -) - -from .. import EOF_FORK_NAME -from .helpers import slot_code_worked, value_code_worked - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-7620.md" -REFERENCE_SPEC_VERSION = "52ddbcdddcf72dd72427c319f2beddeb468e1737" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - -eofcreate_code_section = Section.Code( - code=Op.EOFCREATE[0](0, 0, 0, 0) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - max_stack_height=4, -) -eofcreate_revert_code_section = Section.Code( - code=Op.EOFCREATE[0](0, 0, 0, 0) + Op.REVERT(0, 0), -) -returncode_code_section = Section.Code( - code=Op.SSTORE(slot_code_worked, value_code_worked) - + Op.RETURNCODE[0](0, 0), - max_stack_height=2, -) -stop_container = Container.Code(Op.STOP) -stop_sub_container = Section.Container(stop_container) -return_sub_container = Section.Container(Container.Code(Op.RETURN(0, 0))) -revert_sub_container = Section.Container(Container.Code(Op.REVERT(0, 0))) -abort_sub_container = Section.Container(Container.Code(Op.INVALID)) -returncode_sub_container = Section.Container( - Container( - sections=[ - Section.Code(Op.RETURNCODE[0](0, 0)), - stop_sub_container, - ], - ) -) - - -def test_simple_create_from_deployed( - eof_state_test: EOFStateTestFiller, -) -> None: - """Simple EOF creation from a deployed EOF container.""" - eof_state_test( - container=Container( - sections=[ - eofcreate_code_section, - returncode_sub_container, - ], - ), - container_post=Account(storage={slot_code_worked: value_code_worked}), - ) - - -def test_simple_create_from_creation( - eof_state_test: EOFStateTestFiller, -) -> None: - """Simple EOF creation from a create transaction container.""" - eof_state_test( - container=Container( - sections=[ - returncode_code_section, - stop_sub_container, - ], - kind=ContainerKind.INITCODE, - ), - container_post=Account(storage={slot_code_worked: value_code_worked}), - ) - - -@pytest.mark.parametrize( - "zero_section", - [eofcreate_code_section, returncode_code_section], - ids=["eofcreate", "returncode"], -) -def test_reverting_container( - eof_state_test: EOFStateTestFiller, - zero_section: Container, -) -> None: - """Test revert containers.""" - eof_state_test( - container=Container( - sections=[ - zero_section, - revert_sub_container, - ], - kind=( - ContainerKind.INITCODE - if zero_section == returncode_code_section - else ContainerKind.RUNTIME - ), - ), - container_post=Account(storage={slot_code_worked: value_code_worked}), - ) - - -@pytest.mark.parametrize( - "code_section,first_sub_container,container_kind", - [ - ( - eofcreate_code_section, - returncode_sub_container, - ContainerKind.RUNTIME, - ), - (returncode_code_section, stop_sub_container, ContainerKind.INITCODE), - ], - ids=["eofcreate", "returncode"], -) -@pytest.mark.parametrize( - "extra_sub_container", - [stop_sub_container, revert_sub_container, returncode_sub_container], - ids=["stop", "revert", "returncode"], -) -def test_orphan_container( - eof_test: EOFTestFiller, - code_section: Section, - first_sub_container: Container, - extra_sub_container: Container, - container_kind: ContainerKind, -) -> None: - """Test orphaned containers.""" - eof_test( - container=Container( - sections=[ - code_section, - first_sub_container, - extra_sub_container, - ], - kind=container_kind, - ), - expect_exception=EOFException.ORPHAN_SUBCONTAINER, - ) - - -@pytest.mark.parametrize( - "code_section,sub_container,container_kind", - [ - pytest.param( - eofcreate_code_section, - returncode_sub_container, - ContainerKind.RUNTIME, - id="EOFCREATE_RETURNCODE", - ), - pytest.param( - returncode_code_section, - stop_sub_container, - ContainerKind.INITCODE, - id="RETURNCODE_STOP", - ), - pytest.param( - returncode_code_section, - return_sub_container, - ContainerKind.INITCODE, - id="RETURNCODE_RETURN", - ), - pytest.param( - eofcreate_code_section, - revert_sub_container, - ContainerKind.RUNTIME, - id="EOFCREATE_REVERT", - ), - pytest.param( - returncode_code_section, - revert_sub_container, - ContainerKind.INITCODE, - id="RETURNCODE_REVERT", - ), - ], -) -def test_container_combos_valid( - eof_state_test: EOFStateTestFiller, - code_section: Section, - sub_container: Container, - container_kind: ContainerKind, -) -> None: - """Test valid subcontainer reference / opcode combos.""" - eof_state_test( - container=Container( - sections=[ - code_section, - sub_container, - ], - kind=container_kind, - ), - container_post=Account(storage={slot_code_worked: value_code_worked}), - ) - - -@pytest.mark.parametrize( - "code_section,first_sub_container,container_kind", - [ - pytest.param( - eofcreate_code_section, - stop_sub_container, - ContainerKind.RUNTIME, - id="EOFCREATE_STOP", - ), - pytest.param( - eofcreate_code_section, - return_sub_container, - ContainerKind.RUNTIME, - id="EOFCREATE_RETURN", - ), - pytest.param( - returncode_code_section, - returncode_sub_container, - ContainerKind.INITCODE, - id="RETURNCODE_RETURNCODE", - ), - ], -) -def test_container_combos_invalid( - eof_test: EOFTestFiller, - code_section: Section, - first_sub_container: Container, - container_kind: ContainerKind, -) -> None: - """Test invalid subcontainer reference / opcode combos.""" - eof_test( - container=Container( - sections=[ - code_section, - first_sub_container, - ], - kind=container_kind, - ), - expect_exception=EOFException.INCOMPATIBLE_CONTAINER_KIND, - ) - - -@pytest.mark.parametrize( - "code_section,first_sub_container", - [ - pytest.param( - eofcreate_revert_code_section, - returncode_sub_container, - id="EOFCREATE_RETURNCODE", - ), - pytest.param( - returncode_code_section, - stop_sub_container, - id="RETURNCODE_STOP", - ), - pytest.param( - returncode_code_section, - return_sub_container, - id="RETURNCODE_RETURN", - ), - pytest.param( - eofcreate_revert_code_section, - revert_sub_container, - id="EOFCREATE_REVERT", - ), - pytest.param( - returncode_code_section, - revert_sub_container, - id="RETURNCODE_REVERT", - ), - ], -) -def test_container_combos_deeply_nested_valid( - eof_test: EOFTestFiller, - code_section: Section, - first_sub_container: Container, -) -> None: - """ - Test valid subcontainer reference / opcode combos on a deep container - nesting level. - """ - valid_container = Container( - sections=[ - code_section, - first_sub_container, - ], - kind=ContainerKind.INITCODE, - ) - - container = valid_container - while len(container) < MAX_BYTECODE_SIZE: - container = Container( - sections=[ - eofcreate_revert_code_section, - Section.Container(container=container.copy()), - ], - kind=ContainerKind.INITCODE, - ) - - eof_test( - container=container, - deployed_container=None, # Execution reverts before deployment - ) - - -@pytest.mark.parametrize( - "code_section,first_sub_container", - [ - pytest.param( - eofcreate_revert_code_section, - stop_sub_container, - id="EOFCREATE_STOP", - ), - pytest.param( - eofcreate_revert_code_section, - return_sub_container, - id="EOFCREATE_RETURN", - ), - pytest.param( - returncode_code_section, - returncode_sub_container, - id="RETURNCODE_RETURNCODE", - ), - ], -) -def test_container_combos_deeply_nested_invalid( - eof_test: EOFTestFiller, - code_section: Section, - first_sub_container: Container, -) -> None: - """ - Test invalid subcontainer reference / opcode combos on a deep container - nesting level. - """ - invalid_container = Container( - sections=[ - code_section, - first_sub_container, - ], - kind=ContainerKind.INITCODE, - ) - - container = invalid_container - while len(container) < MAX_BYTECODE_SIZE: - container = Container( - sections=[ - eofcreate_revert_code_section, - Section.Container(container=container.copy()), - ], - kind=ContainerKind.INITCODE, - ) - - eof_test( - container=container, - expect_exception=EOFException.INCOMPATIBLE_CONTAINER_KIND, - ) - - -@pytest.mark.parametrize( - "code_section,first_sub_container,container_kind", - [ - pytest.param( - eofcreate_code_section, - returncode_sub_container, - ContainerKind.RUNTIME, - id="EOFCREATE_RETURNCODE", - ), - pytest.param( - returncode_code_section, - stop_sub_container, - ContainerKind.INITCODE, - id="RETURNCODE_STOP", - ), - pytest.param( - returncode_code_section, - return_sub_container, - ContainerKind.INITCODE, - id="RETURNCODE_RETURN", - ), - pytest.param( - eofcreate_code_section, - revert_sub_container, - ContainerKind.RUNTIME, - id="EOFCREATE_REVERT", - ), - pytest.param( - returncode_code_section, - revert_sub_container, - ContainerKind.INITCODE, - id="RETURNCODE_REVERT", - ), - ], -) -def test_container_combos_non_first_code_sections_valid( - eof_test: EOFTestFiller, - code_section: Section, - first_sub_container: Container, - container_kind: ContainerKind, -) -> None: - """ - Test valid subcontainer reference / opcode combos in a non-first code - section. - """ - eof_test( - container=Container( - sections=[Section.Code(Op.JUMPF[i]) for i in range(1, 1024)] - + [code_section, first_sub_container], - kind=container_kind, - ), - ) - - -@pytest.mark.parametrize( - "code_section,first_sub_container,container_kind", - [ - pytest.param( - eofcreate_code_section, - stop_sub_container, - ContainerKind.RUNTIME, - id="EOFCREATE_STOP", - ), - pytest.param( - eofcreate_code_section, - return_sub_container, - ContainerKind.RUNTIME, - id="EOFCREATE_RETURN", - ), - pytest.param( - returncode_code_section, - returncode_sub_container, - ContainerKind.INITCODE, - id="RETURNCODE_RETURNCODE", - ), - ], -) -def test_container_combos_non_first_code_sections_invalid( - eof_test: EOFTestFiller, - code_section: Section, - first_sub_container: Container, - container_kind: ContainerKind, -) -> None: - """ - Test invalid subcontainer reference / opcode combos in a non-first code - section. - """ - eof_test( - container=Container( - sections=[Section.Code(Op.JUMPF[i]) for i in range(1, 1024)] - + [code_section, first_sub_container], - kind=container_kind, - ), - expect_exception=EOFException.INCOMPATIBLE_CONTAINER_KIND, - ) - - -def test_container_both_kinds_same_sub(eof_test: EOFTestFiller) -> None: - """ - Test subcontainer conflicts (both EOFCREATE and RETURNCODE Reference). - """ - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.EOFCREATE[0](0, 0, 0, 0) + Op.JUMPF[1], - ), - Section.Code( - code=Op.RETURNCODE[0](0, 0), - ), - revert_sub_container, - ], - ), - expect_exception=EOFException.INCOMPATIBLE_CONTAINER_KIND, - ) - - -@pytest.mark.parametrize("container_idx", [0, 1, 255]) -@pytest.mark.parametrize( - "sub_container", - [ - pytest.param(abort_sub_container, id="abort"), - pytest.param(revert_sub_container, id="revert"), - ], -) -def test_container_ambiguous_kind( - eof_test: EOFTestFiller, container_idx: int, sub_container: Section -) -> None: - """ - Test ambiguous container kind: a single subcontainer reference by both - EOFCREATE and RETURNCODE. - """ - sections = [ - Section.Code( - code=( - sum(Op.EOFCREATE[i](0, 0, 0, 0) for i in range(container_idx)) - + Op.EOFCREATE[container_idx](0, 0, 0, 0) - + Op.RETURNCODE[container_idx](0, 0) - ), - ), - ] - sections += (container_idx + 1) * [sub_container] - - eof_test( - container=Container( - sections=sections, - kind=ContainerKind.INITCODE, - ), - expect_exception=EOFException.AMBIGUOUS_CONTAINER_KIND, - ) - - -def test_container_both_kinds_different_sub(eof_test: EOFTestFiller) -> None: - """Test multiple kinds of subcontainer at the same level.""" - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.EOFCREATE[0](0, 0, 0, 0) + Op.JUMPF[1], - ), - Section.Code( - code=Op.RETURNCODE[1](0, 0), - ), - returncode_sub_container, - stop_sub_container, - ], - kind=ContainerKind.INITCODE, - ), - deployed_container=stop_container, - ) - - -def test_container_multiple_eofcreate_references( - eof_test: EOFTestFiller, -) -> None: - """ - Test multiple references to the same subcontainer from an EOFCREATE - operation. - """ - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.EOFCREATE[0](0, 0, 0, 0) - + Op.EOFCREATE[0](0, 0, 0, 0) - + Op.STOP, - ), - returncode_sub_container, - ], - ), - ) - - -def test_container_multiple_returncode_references( - eof_test: EOFTestFiller, -) -> None: - """ - Test multiple references to the same subcontainer from a RETURNCONTACT - operation. - """ - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.CALLDATALOAD - + Op.RJUMPI[6] - + Op.RETURNCODE[0](0, 0) - + Op.RETURNCODE[0](0, 0) - ), - stop_sub_container, - ], - kind=ContainerKind.INITCODE, - ), - ) - - -@pytest.mark.parametrize("version", [0, 255], ids=lambda x: x) -def test_subcontainer_wrong_eof_version( - eof_test: EOFTestFiller, - version: int, -) -> None: - """Test a subcontainer with the incorrect EOF version.""" - eof_test( - container=Container( - sections=[ - Section.Code( - code=Op.EOFCREATE[0](0, 0, 0, 0) + Op.STOP, - ), - Section.Container( - container=Container( - version=[version], - sections=[Section.Code(code=Op.STOP)], - ) - ), - ], - kind=ContainerKind.RUNTIME, - ), - expect_exception=EOFException.INVALID_VERSION, - ) - - -@pytest.mark.parametrize("delta", [-1, 1], ids=["smaller", "larger"]) -@pytest.mark.parametrize( - "kind", [ContainerKind.RUNTIME, ContainerKind.INITCODE] -) -def test_subcontainer_wrong_size( - eof_test: EOFTestFiller, - delta: int, - kind: ContainerKind, -) -> None: - """Test a subcontainer with the incorrect size in the parent's header.""" - eof_test( - container=Container( - sections=[ - Section.Code( - code=(Op.EOFCREATE[0](0, 0, 0, 0) + Op.STOP) - if kind == ContainerKind.RUNTIME - else (Op.RETURNCODE[0](0, 0)), - ), - Section.Container( - container=Container(sections=[Section.Code(code=Op.STOP)]), - custom_size=len(stop_sub_container.data) + delta, - ), - ], - kind=kind, - ), - expect_exception=EOFException.INVALID_SECTION_BODIES_SIZE, - ) - - -deep_container_parametrize = pytest.mark.parametrize( - ["deepest_container", "exception"], - [ - pytest.param(Container.Code(Op.STOP), None, id="valid"), - pytest.param( - Container.Code(code=Op.PUSH0), - EOFException.MISSING_STOP_OPCODE, - id="code-error", - ), - pytest.param( - Container( - raw_bytes="EF0100A94F5374FCE5EDBC8E2A8697C15331677E6EBF0B" - ), - EOFException.INVALID_MAGIC, - id="structure-error", - ), - ], -) - - -@deep_container_parametrize -@pytest.mark.eof_test_only(reason="Initcontainer exceeds maximum") -def test_deep_container( - eof_test: EOFTestFiller, - deepest_container: Container, - exception: EOFException | None, -) -> None: - """ - Test a very deeply nested container. - - This test skips generating a state test because the initcode size is too - large. - """ - container = deepest_container - last_container = deepest_container - while len(container) < MAX_INITCODE_SIZE: - last_container = container - container = Container( - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.EOFCREATE[0] - + Op.STOP, - ), - Section.Container( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH0 + Op.PUSH0 + Op.RETURNCODE[0], - ), - Section.Container(container=last_container), - ] - ) - ), - ], - ) - - eof_test(container=last_container, expect_exception=exception) - - -@deep_container_parametrize -def test_deep_container_initcode( - eof_test: EOFTestFiller, - deepest_container: Container, - exception: EOFException | None, -) -> None: - """Test a very deeply nested initcontainer.""" - container = Container( - sections=[ - Section.Code( - code=Op.PUSH0 + Op.PUSH0 + Op.RETURNCODE[0], - ), - Section.Container(container=deepest_container), - ], - kind=ContainerKind.INITCODE, - ) - last_container = container - while len(container) < MAX_INITCODE_SIZE: - last_container = container - container = Container( - sections=[ - Section.Code( - code=Op.PUSH0 + Op.PUSH0 + Op.RETURNCODE[0], - ), - Section.Container( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.PUSH0 - + Op.EOFCREATE[0] - + Op.STOP - ), - Section.Container(container=last_container), - ] - ) - ), - ], - kind=ContainerKind.INITCODE, - ) - eof_test( - container=last_container, - expect_exception=exception, - deployed_container=None, - ) - - -@pytest.mark.parametrize( - ["width", "exception"], - [ - pytest.param(256, None, id="256"), - pytest.param(257, EOFException.TOO_MANY_CONTAINERS, id="257"), - pytest.param( - 0x8000, - EOFException.CONTAINER_SIZE_ABOVE_LIMIT, - marks=pytest.mark.eof_test_only(reason="int too big to convert"), - id="negative_i16", - ), - pytest.param( - 0xFFFF, - EOFException.CONTAINER_SIZE_ABOVE_LIMIT, - marks=pytest.mark.eof_test_only(reason="int too big to convert"), - id="max_u16", - ), - ], -) -def test_wide_container( - eof_test: EOFTestFiller, width: int, exception: EOFException -) -> None: - """Test a container with the maximum number of sub-containers.""" - create_code: Bytecode = Op.STOP - for x in range(0, 256): - create_code = Op.EOFCREATE[x](0, 0, 0, 0) + create_code - eof_test( - container=Container( - sections=[ - Section.Code( - code=create_code, - ), - *( - [ - Section.Container( - container=Container( - sections=[ - Section.Code( - code=Op.PUSH0 - + Op.PUSH0 - + Op.RETURNCODE[0], - ), - stop_sub_container, - ] - ) - ) - ] - * width - ), - ] - ), - expect_exception=exception, - ) - - -@pytest.mark.parametrize( - "container", - [ - pytest.param( - Container( - sections=[ - Section.Code( - Op.CALLDATASIZE - + Op.PUSH1[0] - + Op.PUSH1[255] - + Op.PUSH1[0] - + Op.EOFCREATE[0] - + Op.POP - + Op.STOP - ), - abort_sub_container, - ], - expected_bytecode=""" - ef0001010004020001000b03000100000014ff0000000080000436600060ff6000ec005000ef000101000402 - 00010001ff00000000800000fe""", - ), - id="eofcreate_0", - ), - pytest.param( - Container( - sections=[ - Section.Code(Op.PUSH1[0] + Op.RJUMP[0] + Op.STOP), - abort_sub_container, - ], - expected_bytecode=""" - ef0001010004020001000603000100000014ff000000008000016000e0000000ef000101000402000100 - 01ff00000000800000fe""", - # Originally this test was "valid" because it was created - # before "orphan subcontainer" rule was introduced. - validity_error=EOFException.ORPHAN_SUBCONTAINER, - ), - id="orphan_subcontainer_0", - ), - pytest.param( - Container( - sections=[ - Section.Code(Op.PUSH1[0] + Op.RJUMP[0] + Op.STOP), - abort_sub_container, - Section.Data(custom_size=2), - ], - expected_bytecode=""" - ef0001010004020001000603000100000014ff000200008000016000e0000000ef000101000402000100 - 01ff00000000800000fe""", - # Originally this test was "valid" but against the current spec - # it contains two errors: data section truncated and orphan - # subcontainer. - validity_error=EOFException.TOPLEVEL_CONTAINER_TRUNCATED, - ), - id="orphan_subcontainer_0_and_truncated_data", - ), - pytest.param( - Container( - sections=[ - Section.Code(Op.PUSH1[0] + Op.RJUMP[0] + Op.STOP), - abort_sub_container, - Section.Data("aabb"), - ], - expected_bytecode=""" - ef0001010004020001000603000100000014ff000200008000016000e0000000ef000101000402000100 - 01ff00000000800000feaabb""", - # Originally this test was "valid" because it was created - # before "orphan subcontainer" rule was introduced. - validity_error=EOFException.ORPHAN_SUBCONTAINER, - ), - id="orphan_subcontainer_0_and_data", - ), - pytest.param( - Container( - sections=[ - Section.Code(Op.EOFCREATE[0](0, 0, 0, 0) + Op.STOP), - Section.Container("aabbccddeeff"), - ], - # The original test has been modified to reference the - # subcontainer by EOFCREATE. - validity_error=EOFException.INVALID_MAGIC, - ), - id="subcontainer_0_with_invalid_prefix", - ), - pytest.param( - Container( - sections=[ - Section.Code( - Op.CALLDATASIZE - + Op.PUSH1[0] - + Op.PUSH1[255] - + Op.PUSH1[0] - + Op.EOFCREATE[1] - + Op.POP - + Op.STOP - ) - ] - + 2 * [abort_sub_container], - expected_bytecode=""" - ef0001010004020001000b0300020000001400000014ff0000000080000436600060ff6000ec015000ef00010100 - 040200010001ff00000000800000feef00010100040200010001ff00000000800000fe""", - # Originally this test was "valid" because it was created - # before "orphan subcontainer" rule was introduced. - validity_error=EOFException.ORPHAN_SUBCONTAINER, - ), - id="eofcreate_1_orphan_subcontainer_0", - ), - pytest.param( - Container( - sections=[ - Section.Code(Op.PUSH1[0] + Op.RJUMP[0] + Op.STOP), - abort_sub_container, - Section.Container( - Container.Code(Op.PUSH0 + Op.PUSH0 + Op.RETURN) - ), - ], - expected_bytecode=""" - ef000101000402000100060300020000001400000016ff000000008000016000e0000000ef000101000402000100 - 01ff00000000800000feef00010100040200010003ff000000008000025f5ff3""", - # Originally this test was "valid" because it was created - # before "orphan subcontainer" rule was introduced. - validity_error=EOFException.ORPHAN_SUBCONTAINER, - ), - id="two_orphan_subcontainers", - ), - pytest.param( - Container( - sections=[ - Section.Code( - Op.CALLDATASIZE - + Op.PUSH1[0] - + Op.PUSH1[255] - + Op.PUSH1[0] - + Op.EOFCREATE[255] - + Op.POP - + Op.STOP - ) - ] - + 256 * [abort_sub_container], - # Originally this test was "valid" because it was created - # before "orphan subcontainer" rule was introduced. - validity_error=EOFException.ORPHAN_SUBCONTAINER, - ), - id="eofcreate_255_max_orphan_subcontainers", - ), - pytest.param( - Container( - sections=[Section.Code(Op.PUSH1[0] + Op.RJUMP[0] + Op.STOP)] - + 256 * [abort_sub_container], - # Originally this test was "valid" because it was created - # before "orphan subcontainer" rule was introduced. - validity_error=EOFException.ORPHAN_SUBCONTAINER, - ), - id="max_orphan_subcontainers", - ), - ], -) -def test_migrated_eofcreate( - eof_test: EOFTestFiller, container: Container -) -> None: - """Tests migrated from EOFTests/efValidation/EOF1_eofcreate_valid_.json.""" - eof_test(container=container, expect_exception=container.validity_error) - - -def test_dangling_initcode_subcontainer_bytes( - eof_test: EOFTestFiller, -) -> None: - """ - Initcode mode EOF Subcontainer test with subcontainer containing dangling - bytes. - """ - eof_test( - container=Container( - sections=[ - returncode_code_section, - Section.Container( - container=Container( - raw_bytes=stop_sub_container.data + b"\x99", - ), - ), - ], - kind=ContainerKind.INITCODE, - ), - expect_exception=EOFException.INVALID_SECTION_BODIES_SIZE, - ) - - -def test_dangling_runtime_subcontainer_bytes( - eof_test: EOFTestFiller, -) -> None: - """ - Runtime mode EOF Subcontainer test with subcontainer containing dangling - bytes. - """ - eof_test( - container=Container( - sections=[ - eofcreate_code_section, - Section.Container( - container=Container( - raw_bytes=returncode_sub_container.data + b"\x99", - ), - ), - ], - ), - expect_exception=EOFException.INVALID_SECTION_BODIES_SIZE, - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip7873_tx_create/__init__.py b/tests/unscheduled/eip7692_eof_v1/eip7873_tx_create/__init__.py deleted file mode 100644 index ddd4426793..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip7873_tx_create/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -""" -Test cases for [EIP-7873: TXCREATE and InitcodeTransaction](https://eips.ethereum.org/EIPS/eip-7873). -""" diff --git a/tests/unscheduled/eip7692_eof_v1/eip7873_tx_create/spec.py b/tests/unscheduled/eip7692_eof_v1/eip7873_tx_create/spec.py deleted file mode 100644 index b3ec52b587..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip7873_tx_create/spec.py +++ /dev/null @@ -1,3 +0,0 @@ -"""EOF V1 Constants used throughout all tests.""" - -TXCREATE_FAILURE = 0 diff --git a/tests/unscheduled/eip7692_eof_v1/eip7873_tx_create/test_creation_tx.py b/tests/unscheduled/eip7692_eof_v1/eip7873_tx_create/test_creation_tx.py deleted file mode 100644 index 51761be0c9..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip7873_tx_create/test_creation_tx.py +++ /dev/null @@ -1,135 +0,0 @@ -"""Test bad TXCREATE cases.""" - -import pytest -from execution_testing import ( - Account, - Address, - Alloc, - Bytes, - Environment, - StateTestFiller, - Transaction, - TransactionReceipt, -) -from execution_testing import Initcode as LegacyInitcode -from execution_testing.test_types.eof.v1 import Container - -from ....prague.eip7702_set_code_tx.spec import Spec -from .. import EOF_FORK_NAME -from ..eip7620_eof_create.helpers import ( - smallest_initcode_subcontainer, - smallest_runtime_subcontainer, -) - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-7873.md" -REFERENCE_SPEC_VERSION = "23d96ceff8f0690432ab91089ae257f08f32340f" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - - -@pytest.mark.with_all_contract_creating_tx_types( - selector=lambda tx_type: tx_type != 6 -) -@pytest.mark.parametrize( - "deploy_code", - [ - Bytes("0xEF"), - Bytes("0xEF00"), - Bytes("0xEF0001"), - Bytes("0xEF01"), - smallest_runtime_subcontainer, - smallest_initcode_subcontainer, - ], -) -def test_legacy_create_tx_legacy_initcode_eof_bytecode( - state_test: StateTestFiller, - pre: Alloc, - tx_type: int, - deploy_code: Bytes | Container, -) -> None: - """ - Test that a legacy contract creation tx cannot create EOF code. - - This tests only ensures EIP-3541 behavior is kept, not altered by EIP-7873 - """ - env = Environment() - sender = pre.fund_eoa() - - initcode = LegacyInitcode(deploy_code=deploy_code) - - tx = Transaction( - ty=tx_type, - sender=sender, - to=None, - gas_limit=100000, - data=initcode, - ) - - destination_contract_address = tx.created_contract - - post = { - destination_contract_address: Account.NONEXISTENT, - } - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) - - -@pytest.mark.with_all_contract_creating_tx_types( - selector=lambda tx_type: tx_type != 6 -) -@pytest.mark.parametrize( - "initcode", - [ - Bytes("0xEF"), - Bytes("0xEF01"), - Bytes("0xEF0101"), - Spec.delegation_designation(Address(0xAA)), - Bytes("0xEF02"), - Bytes("0xEF00"), - Bytes("0xEF0001"), - smallest_runtime_subcontainer, - smallest_initcode_subcontainer, - ], -) -def test_legacy_create_tx_prefix_initcode( - state_test: StateTestFiller, - pre: Alloc, - tx_type: int, - initcode: Bytes, -) -> None: - """ - Test that a legacy contract creation tx behaves as it did before EIP-7873 - for initcode stating with `EF`. The transaction should be valid but fail on - executing of the first byte `EF`. - """ - env = Environment() - sender = pre.fund_eoa() - gas_limit = 100_000 - - tx = Transaction( - ty=tx_type, - sender=sender, - to=None, - gas_limit=gas_limit, - data=initcode, - expected_receipt=TransactionReceipt(gas_used=gas_limit), - ) - - destination_contract_address = tx.created_contract - - post = { - destination_contract_address: Account.NONEXISTENT, - sender: Account(nonce=1), - } - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip7873_tx_create/test_txcreate.py b/tests/unscheduled/eip7692_eof_v1/eip7873_tx_create/test_txcreate.py deleted file mode 100644 index 294596af6d..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip7873_tx_create/test_txcreate.py +++ /dev/null @@ -1,822 +0,0 @@ -"""Test good TXCREATE cases.""" - -import pytest -from execution_testing import ( - Account, - Address, - Alloc, - Bytecode, - Environment, - EVMCodeType, - Op, - StateTestFiller, - Storage, - Transaction, - compute_eofcreate_address, -) -from execution_testing.test_types.eof.v1 import Container, Section - -from .. import EOF_FORK_NAME -from ..eip7069_extcall.spec import EXTCALL_SUCCESS, LEGACY_CALL_SUCCESS -from ..eip7620_eof_create.helpers import ( - slot_call_result, - slot_calldata, - slot_code_worked, - slot_create_address, - slot_data_load, - slot_last_slot, - slot_returndata_size, - smallest_initcode_subcontainer, - smallest_runtime_subcontainer, - value_canary_to_be_overwritten, - value_code_worked, - value_long_value, -) -from .spec import TXCREATE_FAILURE - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-7873.md" -REFERENCE_SPEC_VERSION = "1115fe6110fcc0efc823fb7f8f5cd86c42173efe" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - - -@pytest.mark.with_all_evm_code_types -@pytest.mark.parametrize("tx_initcode_count", [1, 255, 256]) -def test_simple_txcreate( - state_test: StateTestFiller, pre: Alloc, tx_initcode_count: int -) -> None: - """Verifies a simple TXCREATE case.""" - env = Environment() - sender = pre.fund_eoa() - initcode_hash = smallest_initcode_subcontainer.hash - contract_address = pre.deploy_contract( - code=Op.SSTORE(0, Op.TXCREATE(tx_initcode_hash=initcode_hash)) - + Op.STOP, - storage={0: 0xB17D}, # a canary to be overwritten - ) - # Storage in 0 should have the address, - post = { - contract_address: Account( - storage={0: compute_eofcreate_address(contract_address, 0)} - ) - } - tx = Transaction( - to=contract_address, - gas_limit=10_000_000, - sender=sender, - initcodes=[smallest_initcode_subcontainer] * tx_initcode_count, - ) - state_test(env=env, pre=pre, post=post, tx=tx) - - -def test_txcreate_then_dataload( - state_test: StateTestFiller, - pre: Alloc, -) -> None: - """ - Verifies that a contract returned with auxdata does not overwrite the - parent data. - """ - env = Environment() - sender = pre.fund_eoa() - small_auxdata_container = Container( - sections=[ - Section.Code(code=Op.RETURNCODE[0](0, 32)), - Section.Container(container=smallest_runtime_subcontainer), - ], - ) - initcode_hash = small_auxdata_container.hash - contract_address = pre.deploy_contract( - code=Container( - sections=[ - Section.Code( - code=Op.SSTORE( - 0, Op.TXCREATE(tx_initcode_hash=initcode_hash) - ) - + Op.SSTORE(slot_data_load, Op.DATALOAD(0)) - + Op.STOP, - ), - Section.Data(data=value_long_value), - ], - ), - storage={slot_data_load: value_canary_to_be_overwritten}, - ) - - post = { - contract_address: Account( - storage={ - 0: compute_eofcreate_address(contract_address, 0), - slot_data_load: value_long_value, - } - ) - } - tx = Transaction( - to=contract_address, - gas_limit=10_000_000, - sender=sender, - initcodes=[small_auxdata_container], - ) - state_test(env=env, pre=pre, post=post, tx=tx) - - -@pytest.mark.with_all_evm_code_types -def test_txcreate_then_call( - state_test: StateTestFiller, - pre: Alloc, - evm_code_type: EVMCodeType, -) -> None: - """ - Verifies a simple TXCREATE case, and then calls the deployed contract. - """ - env = Environment() - callable_contract = Container( - sections=[ - Section.Code( - code=Op.SSTORE(slot_code_worked, value_code_worked) + Op.STOP, - ), - ] - ) - callable_contract_initcode = Container( - sections=[ - Section.Code( - code=Op.RETURNCODE[0](0, 0), - ), - Section.Container(container=callable_contract), - ] - ) - initcode_hash = callable_contract_initcode.hash - - sender = pre.fund_eoa() - opcode = Op.EXTCALL if evm_code_type == EVMCodeType.EOF_V1 else Op.CALL - contract_address = pre.deploy_contract( - code=Op.SSTORE( - slot_create_address, Op.TXCREATE(tx_initcode_hash=initcode_hash) - ) - + opcode(address=Op.SLOAD(slot_create_address)) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ) - - callable_address = compute_eofcreate_address(contract_address, 0) - - # Storage in 0 should have the address, - # - post = { - contract_address: Account( - storage={ - slot_create_address: callable_address, - slot_code_worked: value_code_worked, - } - ), - callable_address: Account( - storage={slot_code_worked: value_code_worked} - ), - } - - tx = Transaction( - to=contract_address, - gas_limit=10_000_000, - sender=sender, - initcodes=[callable_contract_initcode], - ) - state_test(env=env, pre=pre, post=post, tx=tx) - - -@pytest.mark.with_all_evm_code_types -@pytest.mark.parametrize( - "auxdata_bytes", - [ - pytest.param(b"", id="zero"), - pytest.param(b"aabbcc", id="short"), - pytest.param(b"aabbccddeef", id="one_byte_short"), - pytest.param(b"aabbccddeeff", id="exact"), - pytest.param(b"aabbccddeeffg", id="one_byte_long"), - pytest.param(b"aabbccddeeffgghhii", id="extra"), - ], -) -def test_auxdata_variations( - state_test: StateTestFiller, - pre: Alloc, - auxdata_bytes: bytes, -) -> None: - """Verifies that auxdata bytes are correctly handled in RETURNCODE.""" - env = Environment() - auxdata_size = len(auxdata_bytes) - pre_deploy_header_data_size = 18 - pre_deploy_data = b"AABBCC" - deploy_success = ( - len(auxdata_bytes) + len(pre_deploy_data) - >= pre_deploy_header_data_size - ) - - runtime_subcontainer = Container( - name="Runtime Subcontainer with truncated data", - sections=[ - Section.Code(code=Op.STOP), - Section.Data( - data=pre_deploy_data, custom_size=pre_deploy_header_data_size - ), - ], - ) - - initcode_subcontainer = Container( - name="Initcode Subcontainer", - sections=[ - Section.Code( - code=Op.MSTORE(0, Op.PUSH32(auxdata_bytes.ljust(32, b"\0"))) - + Op.RETURNCODE[0](0, auxdata_size), - ), - Section.Container(container=runtime_subcontainer), - ], - ) - initcode_hash = initcode_subcontainer.hash - - sender = pre.fund_eoa() - contract_address = pre.deploy_contract( - code=Op.SSTORE( - slot_create_address, Op.TXCREATE(tx_initcode_hash=initcode_hash) - ) - + Op.STOP, - storage={slot_create_address: value_canary_to_be_overwritten}, - ) - - # Storage in 0 should have the address, - post = { - contract_address: Account( - storage={ - slot_create_address: compute_eofcreate_address( - contract_address, 0 - ) - if deploy_success - else b"\0" - } - ) - } - - tx = Transaction( - to=contract_address, - gas_limit=10_000_000, - sender=sender, - initcodes=[initcode_subcontainer], - ) - - state_test(env=env, pre=pre, post=post, tx=tx) - - -@pytest.mark.with_all_evm_code_types -def test_calldata(state_test: StateTestFiller, pre: Alloc) -> None: - """Verifies CALLDATA passing through TXCREATE.""" - env = Environment() - - initcode_subcontainer = Container( - name="Initcode Subcontainer", - sections=[ - Section.Code( - code=Op.CALLDATACOPY(0, 0, Op.CALLDATASIZE) - + Op.SSTORE(slot_calldata, Op.MLOAD(0)) - + Op.RETURNCODE[0](0, Op.CALLDATASIZE), - ), - Section.Container(container=smallest_runtime_subcontainer), - ], - ) - initcode_hash = initcode_subcontainer.hash - - calldata_size = 32 - calldata = b"\x45" * calldata_size - sender = pre.fund_eoa() - contract_address = pre.deploy_contract( - code=Op.MSTORE(0, Op.PUSH32(calldata)) - + Op.SSTORE( - slot_create_address, - Op.TXCREATE( - tx_initcode_hash=initcode_hash, input_size=calldata_size - ), - ) - + Op.STOP, - ) - - # deployed contract is smallest plus data - deployed_contract = Container( - name="deployed contract", - sections=[ - *smallest_runtime_subcontainer.sections, - Section.Data(data=calldata), - ], - ) - # factory contract Storage in 0 should have the created address, - # created contract storage in 0 should have the calldata - created_address = compute_eofcreate_address(contract_address, 0) - post = { - contract_address: Account( - storage={slot_create_address: created_address} - ), - created_address: Account( - code=deployed_contract, storage={slot_calldata: calldata} - ), - } - - tx = Transaction( - to=contract_address, - gas_limit=10_000_000, - sender=sender, - initcodes=[initcode_subcontainer], - ) - - state_test(env=env, pre=pre, post=post, tx=tx) - - -@pytest.mark.parametrize("outer_create_opcode", [Op.TXCREATE, Op.EOFCREATE]) -@pytest.mark.parametrize("inner_create_opcode", [Op.TXCREATE, Op.EOFCREATE]) -@pytest.mark.parametrize("outer_create_reverts", [True, False]) -def test_txcreate_in_initcode( - state_test: StateTestFiller, - pre: Alloc, - outer_create_opcode: Op, - inner_create_opcode: Op, - outer_create_reverts: bool, -) -> None: - """ - Verifies an TXCREATE occurring within initcode creates that contract. - - Via the `outer_create_reverts` also verifies a TXCREATE occurring in an - initcode is rolled back when the initcode reverts. - """ - smallest_initcode_subcontainer_hash = smallest_initcode_subcontainer.hash - inner_create_bytecode = ( - Op.TXCREATE(tx_initcode_hash=smallest_initcode_subcontainer_hash) - if inner_create_opcode == Op.TXCREATE - else Op.EOFCREATE[1](0, 0, 0, 0) - ) - # The terminating code of the inner initcontainer, the RJUMPI is a trick to - # not need to deal with the subcontainer indices - revert_code = Op.REVERT(0, 0) - terminating_code = ( - Op.RJUMPI[len(revert_code)](0) + revert_code + Op.RETURNCODE[0](0, 0) - if outer_create_reverts - else Op.RETURNCODE[0](0, 0) - ) - nested_initcode_subcontainer = Container( - sections=[ - Section.Code( - code=Op.SSTORE(slot_create_address, inner_create_bytecode) - + Op.SSTORE(slot_code_worked, value_code_worked) - + terminating_code, - ), - Section.Container(container=smallest_runtime_subcontainer), - ] - + ( - [Section.Container(container=smallest_initcode_subcontainer)] - if inner_create_opcode == Op.EOFCREATE - else [] - ) - ) - nested_initcode_subcontainer_hash = nested_initcode_subcontainer.hash - - outer_create_bytecode = ( - Op.TXCREATE(tx_initcode_hash=nested_initcode_subcontainer_hash) - if outer_create_opcode == Op.TXCREATE - else Op.EOFCREATE[0](0, 0, 0, 0) - ) - - env = Environment() - sender = pre.fund_eoa() - contract_code = Container( - sections=[ - Section.Code( - code=Op.SSTORE(slot_create_address, outer_create_bytecode) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ), - ] - + ( - [Section.Container(container=nested_initcode_subcontainer)] - if outer_create_opcode == Op.EOFCREATE - else [] - ) - ) - contract_address = pre.deploy_contract(code=contract_code) - - outer_address = compute_eofcreate_address(contract_address, 0) - inner_address = compute_eofcreate_address(outer_address, 0) - post = { - contract_address: Account( - storage={ - slot_create_address: outer_address - if not outer_create_reverts - else 0, - slot_code_worked: value_code_worked, - } - ), - outer_address: Account( - storage={ - slot_create_address: inner_address, - slot_code_worked: value_code_worked, - } - ) - if not outer_create_reverts - else Account.NONEXISTENT, - inner_address: Account() - if not outer_create_reverts - else Account.NONEXISTENT, - } - - tx = Transaction( - to=contract_address, - gas_limit=10_000_000, - sender=sender, - initcodes=[ - nested_initcode_subcontainer, - smallest_initcode_subcontainer, - ], - ) - - state_test(env=env, pre=pre, post=post, tx=tx) - - -@pytest.mark.with_all_evm_code_types -def test_return_data_cleared( - state_test: StateTestFiller, - pre: Alloc, - evm_code_type: EVMCodeType, -) -> None: - """ - Verifies the return data is not reused from a extcall but is cleared upon - TXCREATE. - """ - env = Environment() - value_return_canary = 0x4158675309 - value_return_canary_size = 5 - callable_address = pre.deploy_contract( - code=Container( - sections=[ - Section.Code( - code=Op.MSTORE(0, value_return_canary) - + Op.RETURN(0, value_return_canary_size), - ) - ] - ) - ) - initcode_hash = smallest_initcode_subcontainer.hash - - slot_returndata_size_2 = slot_last_slot * 2 + slot_returndata_size - sender = pre.fund_eoa() - opcode = Op.EXTCALL if evm_code_type == EVMCodeType.EOF_V1 else Op.CALL - contract_address = pre.deploy_contract( - code=Op.SSTORE(slot_call_result, opcode(address=callable_address)) - + Op.SSTORE(slot_returndata_size, Op.RETURNDATASIZE) - + Op.SSTORE( - slot_create_address, Op.TXCREATE(tx_initcode_hash=initcode_hash) - ) - + Op.SSTORE(slot_returndata_size_2, Op.RETURNDATASIZE) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ) - - new_contract_address = compute_eofcreate_address(contract_address, 0) - post = { - contract_address: Account( - storage={ - slot_call_result: EXTCALL_SUCCESS - if evm_code_type == EVMCodeType.EOF_V1 - else LEGACY_CALL_SUCCESS, - slot_returndata_size: value_return_canary_size, - slot_create_address: new_contract_address, - slot_returndata_size_2: 0, - slot_code_worked: value_code_worked, - }, - nonce=2, - ), - callable_address: Account(nonce=1), - new_contract_address: Account(nonce=1), - } - tx = Transaction( - to=contract_address, - gas_limit=10_000_000, - sender=sender, - initcodes=[smallest_initcode_subcontainer], - ) - - state_test(env=env, pre=pre, post=post, tx=tx) - - -@pytest.mark.with_all_evm_code_types -def test_address_collision( - state_test: StateTestFiller, - pre: Alloc, -) -> None: - """Tests address collision.""" - env = Environment( - gas_limit=300_000_000_000, - ) - - slot_create_address_2 = slot_last_slot * 2 + slot_create_address - slot_create_address_3 = slot_last_slot * 3 + slot_create_address - sender = pre.fund_eoa() - initcode_hash = smallest_initcode_subcontainer.hash - contract_address = pre.deploy_contract( - code=Op.SSTORE( - slot_create_address, Op.TXCREATE(tx_initcode_hash=initcode_hash) - ) - + Op.SSTORE( - slot_create_address_2, Op.TXCREATE(tx_initcode_hash=initcode_hash) - ) - + Op.SSTORE( - slot_create_address_3, - Op.TXCREATE(tx_initcode_hash=initcode_hash, salt=1), - ) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ) - salt_zero_address = compute_eofcreate_address(contract_address, 0) - salt_one_address = compute_eofcreate_address(contract_address, 1) - - # Hard-code address for collision, no other way to do this. - # We should mark tests that do this, and fail on unmarked tests. - pre[salt_one_address] = Account(balance=1, nonce=1) - - post = { - contract_address: Account( - storage={ - slot_create_address: salt_zero_address, - # had an in-transaction collision - slot_create_address_2: TXCREATE_FAILURE, - # had a pre-existing collision - slot_create_address_3: TXCREATE_FAILURE, - slot_code_worked: value_code_worked, - } - ) - } - - # Multiple create fails is expensive, use an absurd amount of gas - tx = Transaction( - to=contract_address, - gas_limit=300_000_000_000, - sender=sender, - initcodes=[smallest_initcode_subcontainer], - ) - state_test(env=env, pre=pre, post=post, tx=tx) - - -@pytest.mark.with_all_evm_code_types -def test_txcreate_revert_eof_returndata( - state_test: StateTestFiller, - pre: Alloc, -) -> None: - """ - Verifies the return data is not being deployed, even if happens to be valid - EOF. - """ - env = Environment() - code_reverts_with_calldata = Container( - name="Initcode Subcontainer reverting with its calldata", - sections=[ - Section.Code( - code=Op.CALLDATACOPY(0, 0, Op.CALLDATASIZE) - + Op.REVERT(0, Op.CALLDATASIZE), - ), - ], - ) - initcode_hash = code_reverts_with_calldata.hash - - sender = pre.fund_eoa() - contract_address = pre.deploy_contract( - code=Op.CALLDATACOPY(0, 0, Op.CALLDATASIZE) - + Op.SSTORE( - slot_create_address, - Op.TXCREATE( - tx_initcode_hash=initcode_hash, input_size=Op.CALLDATASIZE - ), - ) - + Op.SSTORE(slot_returndata_size, Op.RETURNDATASIZE) - + Op.STOP, - storage={slot_create_address: value_canary_to_be_overwritten}, - ) - new_address = compute_eofcreate_address(contract_address, 0) - - post = { - contract_address: Account( - storage={ - slot_create_address: 0, - slot_returndata_size: len(smallest_runtime_subcontainer), - }, - ), - new_address: Account.NONEXISTENT, - } - - tx = Transaction( - to=contract_address, - gas_limit=1_000_000, - sender=sender, - initcodes=[code_reverts_with_calldata], - # Simplest possible valid EOF container, which is going to be - # revert-returned from initcode and must not end up being deployed. - data=smallest_runtime_subcontainer, - ) - - state_test(env=env, pre=pre, post=post, tx=tx) - - -@pytest.mark.with_all_evm_code_types -@pytest.mark.parametrize( - ["destination_code", "expected_result"], - [ - pytest.param(Op.ADDRESS, "destination"), - pytest.param(Op.CALLER, "caller"), - pytest.param(Op.CALLVALUE, "eofcreate_value"), - pytest.param(Op.ORIGIN, "sender"), - pytest.param(Op.SELFBALANCE, "selfbalance"), - pytest.param(Op.BALANCE(Op.CALLER), "factorybalance"), - ], -) -def test_txcreate_context( - state_test: StateTestFiller, - pre: Alloc, - destination_code: Bytecode, - expected_result: str, -) -> None: - """Test TXCREATE's initcode context instructions.""" - env = Environment() - sender = pre.fund_eoa() - value = 0x1123 - txcreate_value = 0x13 - - initcode = Container( - sections=[ - Section.Code( - Op.SSTORE(slot_call_result, destination_code) - + Op.RETURNCODE[0](0, 0) - ), - Section.Container(smallest_runtime_subcontainer), - ] - ) - initcode_hash = initcode.hash - - factory_address = pre.deploy_contract( - code=Op.SSTORE(slot_code_worked, value_code_worked) - + Op.TXCREATE(tx_initcode_hash=initcode_hash, value=txcreate_value) - + Op.STOP - ) - - destination_contract_address = compute_eofcreate_address( - factory_address, 0 - ) - - tx = Transaction( - sender=sender, - to=factory_address, - gas_limit=200_000, - value=value, - initcodes=[initcode], - ) - - expected_bytes: Address | int - if expected_result == "destination": - expected_bytes = destination_contract_address - elif expected_result == "caller": - expected_bytes = factory_address - elif expected_result == "sender": - expected_bytes = sender - elif expected_result == "eofcreate_value": - expected_bytes = txcreate_value - elif expected_result == "selfbalance": - expected_bytes = txcreate_value - elif expected_result == "factorybalance": - # Factory receives value from sender and passes on eofcreate_value as - # endowment. - expected_bytes = value - txcreate_value - else: - raise TypeError("Unexpected expected_result", expected_result) - - calling_storage = { - slot_code_worked: value_code_worked, - } - destination_contract_storage = { - slot_call_result: expected_bytes, - } - - post = { - factory_address: Account( - storage=calling_storage, balance=value - txcreate_value - ), - destination_contract_address: Account( - storage=destination_contract_storage, balance=txcreate_value - ), - } - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) - - -@pytest.mark.with_all_evm_code_types -def test_txcreate_memory_context( - state_test: StateTestFiller, - pre: Alloc, -) -> None: - """ - Verifies an TXCREATE frame enjoys a separate EVM memory from its caller - frame. - """ - env = Environment() - destination_storage = Storage() - contract_storage = Storage() - initcontainer = Container( - sections=[ - Section.Code( - Op.SSTORE( - destination_storage.store_next(value_code_worked), - value_code_worked, - ) - + Op.SSTORE(destination_storage.store_next(0), Op.MSIZE()) - + Op.SSTORE(destination_storage.store_next(0), Op.MLOAD(0)) - + Op.MSTORE(0, 2) - + Op.MSTORE(32, 2) - + Op.RETURNCODE[0](0, 0) - ), - Section.Container(smallest_runtime_subcontainer), - ] - ) - initcode_hash = initcontainer.hash - contract_address = pre.deploy_contract( - code=Op.SSTORE( - contract_storage.store_next(value_code_worked), value_code_worked - ) - + Op.MSTORE(0, 1) - + Op.TXCREATE(tx_initcode_hash=initcode_hash) - + Op.SSTORE(contract_storage.store_next(32), Op.MSIZE()) - + Op.SSTORE(contract_storage.store_next(1), Op.MLOAD(0)) - + Op.SSTORE(contract_storage.store_next(0), Op.MLOAD(32)) - + Op.STOP, - ) - destination_contract_address = compute_eofcreate_address( - contract_address, 0 - ) - post = { - contract_address: Account(storage=contract_storage), - destination_contract_address: Account(storage=destination_storage), - } - tx = Transaction( - to=contract_address, - gas_limit=200_000, - sender=pre.fund_eoa(), - initcodes=[initcontainer], - ) - state_test(env=env, pre=pre, post=post, tx=tx) - - -@pytest.mark.with_all_evm_code_types -def test_short_data_subcontainer( - state_test: StateTestFiller, - pre: Alloc, -) -> None: - """ - Deploy a subcontainer where the data is "short" and filled by deployment - code. - """ - env = Environment() - sender = pre.fund_eoa() - - deploy_container = Container( - sections=[ - Section.Code(Op.STOP), - Section.Data(data="001122", custom_size=4), - ] - ) - initcontainer = Container( - sections=[ - Section.Code(code=Op.RETURNCODE[0](0, 5)), - Section.Container(deploy_container), - ], - ) - initcode_hash = initcontainer.hash - contract_address = pre.deploy_contract( - code=Op.SSTORE(0, Op.TXCREATE(tx_initcode_hash=initcode_hash)) - + Op.STOP, - storage={0: 0xB17D}, # a canary to be overwritten - ) - # Storage in 0 should have the address, - destination_address = compute_eofcreate_address(contract_address, 0) - destination_code = deploy_container.copy() - destination_code.sections[1] = Section.Data(data="0011220000000000") - post = { - contract_address: Account( - storage={0: compute_eofcreate_address(contract_address, 0)} - ), - destination_address: Account(code=destination_code), - } - tx = Transaction( - to=contract_address, - gas_limit=100_000, - sender=sender, - initcodes=[initcontainer], - ) - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip7873_tx_create/test_txcreate_failures.py b/tests/unscheduled/eip7692_eof_v1/eip7873_tx_create/test_txcreate_failures.py deleted file mode 100644 index d9b05d4249..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip7873_tx_create/test_txcreate_failures.py +++ /dev/null @@ -1,1089 +0,0 @@ -"""Test bad TXCREATE cases.""" - -import pytest -from execution_testing import ( - Account, - Address, - Alloc, - Bytecode, - Bytes, - Environment, - EVMCodeType, - Fork, - Hash, - Op, - StateTestFiller, - Transaction, - compute_eofcreate_address, -) -from execution_testing.test_types.eof.v1 import Container, Section -from execution_testing.test_types.eof.v1.constants import ( - MAX_BYTECODE_SIZE, - MAX_INITCODE_SIZE, -) - -from .. import EOF_FORK_NAME -from ..eip7069_extcall.spec import ( - EXTCALL_FAILURE, - EXTCALL_REVERT, - LEGACY_CALL_FAILURE, -) -from ..eip7620_eof_create.helpers import ( - aborting_container, - slot_call_or_create, - slot_call_result, - slot_code_should_fail, - slot_code_worked, - slot_counter, - slot_create_address, - slot_max_depth, - slot_returndata, - slot_returndata_size, - smallest_initcode_subcontainer, - smallest_runtime_subcontainer, - value_canary_should_not_change, - value_canary_to_be_overwritten, - value_code_worked, -) -from .spec import TXCREATE_FAILURE - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-7873.md" -REFERENCE_SPEC_VERSION = "1115fe6110fcc0efc823fb7f8f5cd86c42173efe" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - - -@pytest.mark.with_all_evm_code_types -@pytest.mark.parametrize( - "revert", - [ - pytest.param(b"", id="empty"), - pytest.param(b"\x08\xc3\x79\xa0", id="Error(string)"), - ], -) -def test_initcode_revert( - state_test: StateTestFiller, pre: Alloc, revert: bytes -) -> None: - """Verifies proper handling of REVERT in initcode.""" - env = Environment() - revert_size = len(revert) - - initcode_subcontainer = Container( - name="Initcode Subcontainer that reverts", - sections=[ - Section.Code( - code=Op.MSTORE(0, Op.PUSH32(revert)) - + Op.REVERT(32 - revert_size, revert_size), - ), - ], - ) - initcode_hash = initcode_subcontainer.hash - - sender = pre.fund_eoa() - contract_address = pre.deploy_contract( - code=Op.SSTORE( - slot_create_address, Op.TXCREATE(tx_initcode_hash=initcode_hash) - ) - + Op.SSTORE(slot_returndata_size, Op.RETURNDATASIZE) - + Op.RETURNDATACOPY( - Op.SUB(32, Op.RETURNDATASIZE), 0, Op.RETURNDATASIZE - ) - + Op.SSTORE(slot_returndata, Op.MLOAD(0)) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP - ) - - post = { - contract_address: Account( - storage={ - slot_create_address: TXCREATE_FAILURE, - slot_returndata_size: revert_size, - slot_returndata: revert, - slot_code_worked: value_code_worked, - } - ), - compute_eofcreate_address(contract_address, 0): Account.NONEXISTENT, - } - tx = Transaction( - to=contract_address, - gas_limit=10_000_000, - sender=sender, - initcodes=[initcode_subcontainer], - ) - state_test(env=env, pre=pre, post=post, tx=tx) - - -@pytest.mark.with_all_evm_code_types -@pytest.mark.parametrize( - "initcode_hash", - [ - Bytes("").keccak256(), - Bytes("00" * 32), - Bytes("FF" * 32), - Bytes("EF01").keccak256(), - smallest_runtime_subcontainer.hash, - ], -) -@pytest.mark.parametrize("tx_initcode_count", [1, 255, 256]) -def test_txcreate_invalid_hash( - state_test: StateTestFiller, - pre: Alloc, - tx_initcode_count: int, - initcode_hash: Hash, -) -> None: - """Verifies proper handling of REVERT in initcode.""" - env = Environment() - - sender = pre.fund_eoa() - contract_address = pre.deploy_contract( - code=Op.SSTORE( - slot_create_address, Op.TXCREATE(tx_initcode_hash=initcode_hash) - ) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP - ) - - post = { - contract_address: Account( - storage={ - slot_create_address: TXCREATE_FAILURE, - slot_code_worked: value_code_worked, - } - ), - compute_eofcreate_address(contract_address, 0): Account.NONEXISTENT, - } - tx = Transaction( - to=contract_address, - gas_limit=10_000_000, - sender=sender, - initcodes=[smallest_initcode_subcontainer] * tx_initcode_count, - ) - state_test(env=env, pre=pre, post=post, tx=tx) - - -@pytest.mark.with_all_evm_code_types -def test_initcode_aborts(state_test: StateTestFiller, pre: Alloc) -> None: - """Verifies correct handling of a halt in EOF initcode.""" - env = Environment() - sender = pre.fund_eoa() - initcode_hash = aborting_container.hash - contract_address = pre.deploy_contract( - code=Op.SSTORE( - slot_create_address, Op.TXCREATE(tx_initcode_hash=initcode_hash) - ) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ) - # Storage in slot_create_address should not have the address, - post = { - contract_address: Account( - storage={ - slot_create_address: TXCREATE_FAILURE, - slot_code_worked: value_code_worked, - } - ), - compute_eofcreate_address(contract_address, 0): Account.NONEXISTENT, - } - tx = Transaction( - to=contract_address, - gas_limit=10_000_000, - sender=sender, - initcodes=[aborting_container], - ) - - state_test(env=env, pre=pre, post=post, tx=tx) - - -""" -Size of the initcode portion of test_txcreate_deploy_sizes, but as the runtime -code is dynamic, we have to use a pre-calculated size -""" -initcode_size = 32 - - -@pytest.mark.with_all_evm_code_types -@pytest.mark.parametrize( - "target_deploy_size", - [ - pytest.param(0x4000, id="large"), - pytest.param(MAX_BYTECODE_SIZE, id="max"), - pytest.param(MAX_BYTECODE_SIZE + 1, id="overmax"), - pytest.param( - MAX_INITCODE_SIZE - initcode_size - 1, id="below_initcodemax" - ), - pytest.param(MAX_INITCODE_SIZE - initcode_size, id="initcodemax"), - ], -) -def test_txcreate_deploy_sizes( - state_test: StateTestFiller, - pre: Alloc, - target_deploy_size: int, -) -> None: - """ - Verify a mix of runtime contract sizes. - - This mixes success and multiple size failure modes. - """ - env = Environment() - - runtime_container = Container( - sections=[ - Section.Code( - code=Op.JUMPDEST - * (target_deploy_size - len(smallest_runtime_subcontainer)) - + Op.STOP, - ), - ] - ) - - initcode_subcontainer = Container( - name="Initcode Subcontainer", - sections=[ - Section.Code( - code=Op.RETURNCODE[0](0, 0), - ), - Section.Container(container=runtime_container), - ], - ) - assert initcode_size == len(initcode_subcontainer) - len(runtime_container) - - assert initcode_size == ( - len(initcode_subcontainer) - len(runtime_container) - ), "initcode_size is wrong, expected initcode_size %d, calculated %d" % ( - initcode_size, - len(initcode_subcontainer) - len(runtime_container), - ) - initcode_hash = initcode_subcontainer.hash - - sender = pre.fund_eoa() - contract_address = pre.deploy_contract( - code=Op.SSTORE( - slot_create_address, Op.TXCREATE(tx_initcode_hash=initcode_hash) - ) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP - ) - # Storage in 0 should have the address, Storage 1 is a canary of 1 to make - # sure it tried to execute, which also covers cases of data+code being - # greater than initcode_size_max, which is allowed. - success = target_deploy_size <= MAX_BYTECODE_SIZE - post = { - contract_address: Account( - storage={ - slot_create_address: compute_eofcreate_address( - contract_address, 0 - ) - if success - else TXCREATE_FAILURE, - slot_code_worked: value_code_worked, - } - ), - compute_eofcreate_address(contract_address, 0): Account() - if success - else Account.NONEXISTENT, - } - tx = Transaction( - to=contract_address, - gas_limit=20_000_000, - sender=sender, - initcodes=[initcode_subcontainer], - ) - - state_test(env=env, pre=pre, post=post, tx=tx) - - -@pytest.mark.with_all_evm_code_types -@pytest.mark.parametrize( - "auxdata_size", - [ - pytest.param( - MAX_BYTECODE_SIZE - len(smallest_runtime_subcontainer), - id="maxcode", - ), - pytest.param( - MAX_BYTECODE_SIZE - len(smallest_runtime_subcontainer) + 1, - id="overmaxcode", - ), - pytest.param(0x10000 - 60, id="almost64k"), - pytest.param(0x10000 - 1, id="64k-1"), - pytest.param(0x10000, id="64k"), - pytest.param(0x10000 + 1, id="over64k"), - ], -) -def test_auxdata_size_failures( - state_test: StateTestFiller, pre: Alloc, auxdata_size: int -) -> None: - """ - Exercises a number of auxdata size violations, and one maxcode success. - """ - env = Environment() - auxdata_bytes = b"a" * auxdata_size - - initcode_subcontainer = Container( - name="Initcode Subcontainer", - sections=[ - Section.Code( - code=Op.CALLDATACOPY(0, 0, Op.CALLDATASIZE) - + Op.RETURNCODE[0](0, Op.CALLDATASIZE), - ), - Section.Container(container=smallest_runtime_subcontainer), - ], - ) - - sender = pre.fund_eoa() - initcode_hash = initcode_subcontainer.hash - contract_address = pre.deploy_contract( - code=Op.CALLDATACOPY(0, 0, Op.CALLDATASIZE) - + Op.SSTORE( - slot_create_address, - Op.TXCREATE( - tx_initcode_hash=initcode_hash, input_size=Op.CALLDATASIZE - ), - ) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - ) - - deployed_container_size = len(smallest_runtime_subcontainer) + auxdata_size - - # Storage in 0 will have address in first test, 0 in all other cases - # indicating failure - # - # Storage 1 in 1 is a canary to see if TXCREATE opcode halted - success = deployed_container_size <= MAX_BYTECODE_SIZE - post = { - contract_address: Account( - storage={ - slot_create_address: compute_eofcreate_address( - contract_address, 0 - ) - if deployed_container_size <= MAX_BYTECODE_SIZE - else 0, - slot_code_worked: value_code_worked, - } - ), - compute_eofcreate_address(contract_address, 0): Account() - if success - else Account.NONEXISTENT, - } - - tx = Transaction( - to=contract_address, - gas_limit=20_000_000, - sender=sender, - initcodes=[initcode_subcontainer], - data=auxdata_bytes, - ) - - state_test(env=env, pre=pre, post=post, tx=tx) - - -@pytest.mark.with_all_evm_code_types -@pytest.mark.parametrize( - "value", - [ - pytest.param(1, id="1_wei"), - pytest.param(10**9, id="1_gwei"), - ], -) -def test_txcreate_insufficient_stipend( - state_test: StateTestFiller, - pre: Alloc, - value: int, -) -> None: - """ - Exercises an TXCREATE that fails because the calling account does not have - enough ether to pay the stipend. - """ - env = Environment() - sender = pre.fund_eoa(10**11) - initcode_hash = smallest_initcode_subcontainer.hash - - contract_address = pre.deploy_contract( - code=Op.SSTORE( - slot_create_address, - Op.TXCREATE(tx_initcode_hash=initcode_hash, value=value), - ) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP, - balance=value - 1, - ) - # create will fail but not trigger a halt, so canary at storage 1 - # should be set - # - # also validate target created contract fails - post = { - contract_address: Account( - storage={ - slot_create_address: TXCREATE_FAILURE, - slot_code_worked: value_code_worked, - } - ), - compute_eofcreate_address(contract_address, 0): Account.NONEXISTENT, - } - tx = Transaction( - to=contract_address, - gas_limit=20_000_000, - sender=sender, - initcodes=[smallest_initcode_subcontainer], - ) - state_test(env=env, pre=pre, post=post, tx=tx) - - -@pytest.mark.with_all_evm_code_types -def test_insufficient_initcode_gas( - state_test: StateTestFiller, pre: Alloc, fork: Fork -) -> None: - """ - Exercises an TXCREATE when there is not enough gas for the constant charge. - """ - env = Environment() - - initcode_container = Container( - sections=[ - Section.Code( - code=Op.RETURNCODE[0](0, 0), - ), - Section.Container(container=smallest_runtime_subcontainer), - ], - ) - initcode_hash = initcode_container.hash - - sender = pre.fund_eoa() - contract_address = pre.deploy_contract( - code=Op.SSTORE( - slot_create_address, Op.TXCREATE(tx_initcode_hash=initcode_hash) - ) - + Op.SSTORE(slot_code_should_fail, value_code_worked) - + Op.STOP, - storage={ - slot_create_address: value_canary_should_not_change, - slot_code_should_fail: value_canary_should_not_change, - }, - ) - # enough gas for everything but EVM opcodes and EIP-150 reserves - # FIXME: should not use that calculator!!! - # FIXME: the -1000 is a wild guess - revisit this - gas_limit = ( - 32_000 - - 1_000 - + fork.transaction_intrinsic_cost_calculator()( - calldata=initcode_container - ) - ) - # out_of_gas is triggered, so canary won't set value - # also validate target created contract fails - post = { - contract_address: Account( - storage={ - slot_create_address: value_canary_should_not_change, - slot_code_should_fail: value_canary_should_not_change, - }, - ), - compute_eofcreate_address(contract_address, 0): Account.NONEXISTENT, - } - tx = Transaction( - to=contract_address, - gas_limit=gas_limit, - sender=sender, - initcodes=[initcode_container], - ) - - state_test(env=env, pre=pre, post=post, tx=tx) - - -@pytest.mark.with_all_evm_code_types -def test_insufficient_gas_memory_expansion( - state_test: StateTestFiller, - pre: Alloc, - fork: Fork, -) -> None: - """ - Exercises TXCREATE when the memory for auxdata has not been expanded but is - requested. - """ - env = Environment() - - auxdata_size = 0x5000 - initcode_hash = smallest_initcode_subcontainer.hash - sender = pre.fund_eoa() - contract_address = pre.deploy_contract( - code=Op.SSTORE( - slot_create_address, - Op.TXCREATE( - tx_initcode_hash=initcode_hash, input_size=auxdata_size - ), - ) - + Op.SSTORE(slot_code_should_fail, slot_code_worked) - + Op.STOP, - storage={ - slot_create_address: value_canary_should_not_change, - slot_code_should_fail: value_canary_should_not_change, - }, - ) - # enough gas for everything but EVM opcodes and EIP-150 reserves - auxdata_size_words = (auxdata_size + 31) // 32 - gas_limit = ( - 32_000 - + 3 * auxdata_size_words - + auxdata_size_words * auxdata_size_words // 512 - + fork.transaction_intrinsic_cost_calculator()( - calldata=smallest_initcode_subcontainer - ) - ) - # out_of_gas is triggered, so canary won't set value - # also validate target created contract fails - post = { - contract_address: Account( - storage={ - slot_create_address: value_canary_should_not_change, - slot_code_should_fail: value_canary_should_not_change, - }, - ), - compute_eofcreate_address(contract_address, 0): Account.NONEXISTENT, - } - tx = Transaction( - to=contract_address, - gas_limit=gas_limit, - sender=sender, - initcodes=[smallest_initcode_subcontainer], - ) - - state_test(env=env, pre=pre, post=post, tx=tx) - - -@pytest.mark.with_all_evm_code_types -def test_insufficient_returncode_auxdata_gas( - state_test: StateTestFiller, - pre: Alloc, - fork: Fork, -) -> None: - """ - Exercises a RETURNCODE when there is not enough gas for the initcode - charge. - """ - env = Environment() - - auxdata_size = 0x5000 - initcode_container = Container( - name="Large Initcode Subcontainer", - sections=[ - Section.Code( - code=Op.RETURNCODE[0](0, auxdata_size), - ), - Section.Container(container=smallest_runtime_subcontainer), - ], - ) - initcode_hash = initcode_container.hash - - sender = pre.fund_eoa() - contract_address = pre.deploy_contract( - code=Op.SSTORE(slot_code_worked, value_code_worked) - + Op.TXCREATE(tx_initcode_hash=initcode_hash) - + Op.STOP, - storage={ - slot_code_worked: value_canary_to_be_overwritten, - }, - ) - # 63/64ths is not enough to cover RETURNCODE memory expansion. - # Unfortunately the 1/64th left won't realistically accommodate a SSTORE - auxdata_size_words = (auxdata_size + 31) // 32 - gas_limit = ( - 32_000 - + 2600 # SSTORE - + 3 * auxdata_size_words - + auxdata_size_words * auxdata_size_words // 512 - + fork.transaction_intrinsic_cost_calculator()( - calldata=initcode_container - ) - ) - # out_of_gas is triggered in the initcode context, so canary will set value - # also validate target created contract fails - post = { - contract_address: Account( - storage={ - slot_code_worked: value_code_worked, - }, - ), - compute_eofcreate_address(contract_address, 0): Account.NONEXISTENT, - } - tx = Transaction( - to=contract_address, - gas_limit=gas_limit, - sender=sender, - initcodes=[initcode_container], - ) - - state_test(env=env, pre=pre, post=post, tx=tx) - - -@pytest.mark.with_all_evm_code_types -@pytest.mark.parametrize( - "opcode", - [ - Op.STATICCALL, - Op.EXTSTATICCALL, - ], -) -@pytest.mark.parametrize("endowment", [0, 1]) # included to verify static flag -# check comes first -@pytest.mark.parametrize( - "initcode", - [smallest_initcode_subcontainer, aborting_container], - ids=["working_initcode", "aborting_code"], -) -def test_static_flag_txcreate( - state_test: StateTestFiller, - pre: Alloc, - opcode: Op, - endowment: int, - initcode: Container, -) -> None: - """Verifies correct handling of the static call flag with TXCREATE.""" - env = Environment() - initcode_hash = initcode.hash - sender = pre.fund_eoa() - contract_address = pre.deploy_contract( - code=Op.TXCREATE(tx_initcode_hash=initcode_hash, value=endowment) - + Op.STOP, - ) - calling_code = ( - Op.SSTORE(slot_call_result, opcode(address=contract_address)) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP - ) - calling_address = pre.deploy_contract( - calling_code, - # Need to override the global value from the `with_all_evm_code_types` - # marker. - evm_code_type=EVMCodeType.EOF_V1 - if opcode == Op.EXTSTATICCALL - else EVMCodeType.LEGACY, - ) - - post = { - calling_address: Account( - storage={ - slot_call_result: EXTCALL_FAILURE - if opcode == Op.EXTSTATICCALL - else LEGACY_CALL_FAILURE, - slot_code_worked: value_code_worked, - } - ), - compute_eofcreate_address(contract_address, 0): Account.NONEXISTENT, - } - tx = Transaction( - to=calling_address, - gas_limit=10_000_000, - sender=sender, - initcodes=[initcode], - ) - - state_test(env=env, pre=pre, post=post, tx=tx) - - -magic_value_call = 0xCA11 -magic_value_create = 0xCC12EA7E - - -@pytest.mark.with_all_evm_code_types -@pytest.mark.parametrize( - "who_fails", - [magic_value_call, magic_value_create], - ids=["call_fails", "create_fails"], -) -@pytest.mark.pre_alloc_modify -def test_eof_txcreate_msg_depth( - state_test: StateTestFiller, - pre: Alloc, - who_fails: int, - evm_code_type: EVMCodeType, -) -> None: - """ - Test TXCREATE handles msg depth limit correctly (1024). - NOTE: due to block gas limit and the 63/64th rule this limit is - unlikely to be hit on mainnet. - NOTE: See - `tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py:: - test_eof_calls_msg_depth` for more explanations and comments. - Most notable deviation from that test is that here calls and `TXCREATE`s - alternate in order to reach the max depth. `who_fails` decides whether - the failing depth 1024 will be on a call or on an `TXCREATE` to happen. - """ - # Not a precise gas_limit formula, but enough to exclude risk of gas - # causing the failure. - gas_limit = int(20000000 * (64 / 63) ** 1024) - env = Environment(gas_limit=gas_limit) - - callee_address = Address(0x5000) - - # Memory offsets layout: - # - 0 - input - msg depth - # - 32 - output - msg depth - # - 64 - output - call result - # - 96 - output - magic value: create or call - returndatacopy_block = Op.RETURNDATACOPY(32, 0, 96) + Op.REVERT(32, 96) - deep_most_result_block = ( - Op.MSTORE(32, Op.ADD(Op.CALLDATALOAD(0), 1)) - + Op.MSTORE(64, Op.NOOP) - + Op.REVERT(32, 96) - ) - rjump_offset = len(returndatacopy_block) - initcode = Container.Code( - Op.MSTORE(0, Op.ADD(Op.CALLDATALOAD(0), 1)) - + Op.MSTORE(96, magic_value_call) - + Op.EXTCALL(address=callee_address, args_size=32) - + Op.RETURNDATASIZE - + Op.ISZERO - + Op.RJUMPI[rjump_offset] - + returndatacopy_block - + deep_most_result_block - ) - - initcode_hash = initcode.hash - sender = pre.fund_eoa() - - jump_code = ( - Op.RJUMPI[rjump_offset] - if evm_code_type == EVMCodeType.EOF_V1 - else Op.ADD(Op.PC, rjump_offset + 3) + Op.JUMPI - ) - callee_code = ( - Op.MSTORE(0, Op.ADD(Op.CALLDATALOAD(0), 1)) - + Op.MSTORE(96, magic_value_create) - + Op.TXCREATE( - tx_initcode_hash=initcode_hash, - salt=Op.CALLDATALOAD(0), - input_size=32, - ) - + Op.RETURNDATASIZE - + Op.ISZERO - + jump_code - + returndatacopy_block - + Op.JUMPDEST - + deep_most_result_block - ) - - pre.deploy_contract(callee_code, address=callee_address) - - calling_contract_address = pre.deploy_contract( - Container.Code( - Op.MSTORE(0, Op.CALLDATALOAD(0)) - + Op.EXTCALL(address=callee_address, args_size=32) - + Op.SSTORE(slot_max_depth, Op.RETURNDATALOAD(0)) - + Op.SSTORE(slot_call_result, Op.RETURNDATALOAD(32)) - + Op.SSTORE(slot_call_or_create, Op.RETURNDATALOAD(64)) - + Op.SSTORE(slot_code_worked, value_code_worked) - + Op.STOP - ) - ) - - # Only bumps the msg call depth "register" and forwards to the - # `calling_contract_address`. If it is used it makes the "failing" depth of - # 1024 to happen on TXCREATE, instead of CALL. - passthrough_address = pre.deploy_contract( - Container.Code( - Op.MSTORE(0, 1) - + Op.EXTCALL(address=calling_contract_address, args_size=32) - + Op.STOP - ) - ) - - tx = Transaction( - sender=sender, - initcodes=[initcode], - to=calling_contract_address - if who_fails == magic_value_call - else passthrough_address, - gas_limit=gas_limit, - data="", - ) - - calling_storage = { - slot_max_depth: 1024, - slot_code_worked: value_code_worked, - slot_call_result: EXTCALL_REVERT - if who_fails == magic_value_call - else TXCREATE_FAILURE, - slot_call_or_create: who_fails, - } - - post = { - calling_contract_address: Account(storage=calling_storage), - } - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) - - -@pytest.mark.with_all_evm_code_types -def test_reentrant_txcreate( - state_test: StateTestFiller, - pre: Alloc, -) -> None: - """ - Verifies a reentrant TXCREATE case, where EIP-161 prevents conflict via - nonce bump. - """ - env = Environment() - # Calls into the factory contract with 1 as input. - reenter_code = Op.MSTORE(0, 1) + Op.EXTCALL( - address=Op.CALLDATALOAD(32), args_size=32 - ) - # Initcode: if given 0 as 1st word of input will call into the factory - # again. 2nd word of input is the address of the factory. - initcontainer = Container( - sections=[ - Section.Code( - Op.SSTORE(slot_counter, Op.ADD(Op.SLOAD(slot_counter), 1)) - + Op.CALLDATALOAD(0) - + Op.RJUMPI[len(reenter_code)] - + reenter_code - + Op.RETURNCODE[0](0, 0) - ), - Section.Container(smallest_runtime_subcontainer), - ] - ) - initcode_hash = initcontainer.hash - # Factory: - # Passes on its input into the initcode. - # It's 0 first time, 1 the second time. - # Saves the result of deployment in slot 0 first time, 1 the second time. - contract_address = pre.deploy_contract( - code=Op.CALLDATACOPY(0, 0, 32) - + Op.MSTORE(32, Op.ADDRESS) - # 1st word - copied from input (reenter flag) - # 2nd word - `this.address` - + Op.SSTORE( - Op.CALLDATALOAD(0), - Op.TXCREATE(tx_initcode_hash=initcode_hash, input_size=64), - ) - + Op.STOP, - storage={0: 0xB17D, 1: 0xB17D}, # a canary to be overwritten - ) - # Flow is: reenter flag 0 -> factory -> reenter flag 0 -> initcode - # -> reenter -> reenter flag 1 -> factory -> reenter flag 1 - # -> (!) initcode -> stop, - # if the EIP-161 nonce bump is not implemented. If it is, it fails before - # second initcode marked (!). - # Storage in 0 should have the address from the outer TXCREATE. - # Storage in 1 should have 0 from the inner TXCREATE. - # For the created contract storage in `slot_counter` should be 1 as - # initcode executes only once - post = { - contract_address: Account( - storage={ - 0: compute_eofcreate_address(contract_address, 0), - 1: 0, - } - ), - compute_eofcreate_address(contract_address, 0): Account( - nonce=1, - code=smallest_runtime_subcontainer, - storage={slot_counter: 1}, - ), - } - tx = Transaction( - to=contract_address, - gas_limit=500_000, - initcodes=[initcontainer], - sender=pre.fund_eoa(), - ) - state_test(env=env, pre=pre, post=post, tx=tx) - - -@pytest.mark.with_all_evm_code_types -@pytest.mark.parametrize( - "reason", - [ - "valid", - "invalid_deploy_container", - "invalid_initcode", - "invalid_opcode_during_initcode", - "invalid_opcode_with_sstore_during_initcode", - "revert_opcode_during_initcode", - "out_of_gas_during_initcode", - "out_of_gas_when_returning_contract", - "out_of_gas_when_returning_contract_due_to_memory_expansion", - ], -) -def test_invalid_container_deployment( - state_test: StateTestFiller, - fork: Fork, - pre: Alloc, - reason: str, -) -> None: - """ - Verify contract is not deployed when an invalid container deployment is - attempted. - """ - env = Environment() - sender = pre.fund_eoa() - - # Valid defaults - deployed_container = Container( - sections=[ - Section.Code(code=Op.CALLF[1](Op.PUSH0, Op.PUSH0) + Op.STOP), - Section.Code(code=Op.ADD + Op.RETF, code_inputs=2, code_outputs=1), - ] - ) - initcontainer: Container = Container( - sections=[ - Section.Code(code=Op.RETURNCODE[0](0, 0)), - Section.Container(deployed_container), - ], - ) - tx_gas_limit = 100_000 - fork_intrinsic_gas_calculator = ( - fork.transaction_intrinsic_cost_calculator() - ) - fork_gas_costs = fork.gas_costs() - - # Modify defaults based on invalidity reason - if reason == "invalid_deploy_container": - deployed_container = Container( - sections=[ - Section.Code(code=Op.CALLF[1](Op.PUSH0, Op.PUSH0) + Op.STOP), - Section.Code(code=Op.ADD + Op.RETF, code_outputs=0), - ] - ) - initcontainer = Container( - sections=[ - Section.Code(code=Op.RETURNCODE[0](0, 0)), - Section.Container(deployed_container), - ], - ) - elif reason == "invalid_initcode": - initcontainer = Container( - sections=[ - Section.Code(code=Op.RETURNCODE[1](0, 0)), - Section.Container(deployed_container), - ], - ) - elif ( - reason == "invalid_opcode_during_initcode" - or reason == "invalid_opcode_with_sstore_during_initcode" - or reason == "revert_opcode_during_initcode" - or reason == "out_of_gas_during_initcode" - ): - invalid_code_path: Bytecode - if reason == "invalid_opcode_with_sstore_during_initcode": - invalid_code_path = Op.SSTORE(0, 1) + Op.INVALID - elif reason == "revert_opcode_during_initcode": - invalid_code_path = Op.REVERT(0, 0) - elif reason == "out_of_gas_during_initcode": - invalid_code_path = Op.MSTORE(0xFFFFFFFFFFFFFFFFFFFFFFFFFFF, 1) - elif reason == "invalid_opcode_during_initcode": - invalid_code_path = Op.INVALID - else: - raise Exception(f"invalid case: {reason}") - initcontainer = Container( - sections=[ - Section.Code( - code=Op.RJUMPI[len(invalid_code_path)](Op.PUSH0) - + invalid_code_path - + Op.RETURNCODE[0](0, 0) - ), - Section.Container(deployed_container), - ], - ) - elif reason == "out_of_gas_when_returning_contract": - factory_gas_cost = ( - 7 * fork_gas_costs.G_VERY_LOW - + fork_gas_costs.G_STORAGE_SET - + fork_gas_costs.G_COLD_SLOAD - + fork_gas_costs.G_CREATE - ) - initcode_gas_cost = 2 * fork_gas_costs.G_VERY_LOW - tx_gas_limit = ( - fork_intrinsic_gas_calculator(calldata=initcontainer) - + factory_gas_cost - + (initcode_gas_cost - 1) * 64 // 63 - ) - elif ( - reason == "out_of_gas_when_returning_contract_due_to_memory_expansion" - ): - factory_gas_cost = ( - 7 * fork_gas_costs.G_VERY_LOW - + fork_gas_costs.G_STORAGE_SET - + fork_gas_costs.G_COLD_SLOAD - + fork_gas_costs.G_CREATE - ) - initcode_gas_cost = ( - # Code deposit gas cost - len(deployed_container) * fork_gas_costs.G_CODE_DEPOSIT_BYTE - # Two push opcodes - + 2 * fork_gas_costs.G_VERY_LOW - ) - tx_gas_limit = ( - fork_intrinsic_gas_calculator(calldata=initcontainer) - + factory_gas_cost - + initcode_gas_cost * 64 // 63 - ) - initcontainer = Container( - sections=[ - Section.Code( - code=Op.RETURNCODE[0](0xFFFFFFFFFFFFFFFFFFFFFFFFFFF, 0x1) - ), - Section.Container(deployed_container), - ], - ) - elif reason == "valid": - pass - else: - raise TypeError("Unexpected reason", reason) - - initcode_hash = initcontainer.hash - contract_address = pre.deploy_contract( - code=Op.SSTORE(slot_code_worked, value_code_worked) - + Op.TXCREATE(tx_initcode_hash=initcode_hash) - + Op.STOP - ) - - tx = Transaction( - to=contract_address, - sender=sender, - gas_limit=tx_gas_limit, - initcodes=[initcontainer], - ) - - destination_contract_address = compute_eofcreate_address( - contract_address, 0 - ) - - post = ( - { - destination_contract_address: Account.NONEXISTENT, - contract_address: Account( - nonce=1 - if reason in ["invalid_initcode", "invalid_deploy_container"] - else 2, - storage={ - slot_code_worked: value_code_worked, - }, - ), - } - if reason != "valid" - else { - destination_contract_address: Account( - nonce=1, code=deployed_container - ), - contract_address: Account( - nonce=2, - storage={ - slot_code_worked: value_code_worked, - }, - ), - } - ) - - state_test( - env=env, - pre=pre, - post=post, - tx=tx, - ) diff --git a/tests/unscheduled/eip7692_eof_v1/eip7873_tx_create/test_txcreate_validates.py b/tests/unscheduled/eip7692_eof_v1/eip7873_tx_create/test_txcreate_validates.py deleted file mode 100644 index 52a3cdbf93..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eip7873_tx_create/test_txcreate_validates.py +++ /dev/null @@ -1,273 +0,0 @@ -"""Test bad TXCREATE cases.""" - -from enum import Enum, auto, unique -from typing import Tuple - -import pytest -from execution_testing import ( - AccessList, - Account, - Address, - Alloc, - Bytecode, - Bytes, - Environment, - EVMCodeType, - Hash, - Op, - StateTestFiller, - Transaction, - compute_eofcreate_address, -) -from execution_testing.test_types.eof.v1 import Container, Section - -from ....prague.eip7702_set_code_tx.spec import Spec -from .. import EOF_FORK_NAME -from ..eip7620_eof_create.helpers import ( - slot_a, - slot_b, - slot_code_worked, - smallest_initcode_subcontainer, - smallest_runtime_subcontainer, - value_code_worked, -) -from .spec import TXCREATE_FAILURE - -REFERENCE_SPEC_GIT_PATH = "EIPS/eip-7873.md" -REFERENCE_SPEC_VERSION = "1115fe6110fcc0efc823fb7f8f5cd86c42173efe" - -pytestmark = pytest.mark.valid_from(EOF_FORK_NAME) - - -@unique -class ValidatedCode(Enum): - """Kinds of valid/invalid EOF.""" - - LEGACY = auto() - EF = auto() - EOFV1_RUNTIME = auto() - EOFV1_RUNTIME_INVALID = auto() - EOFV1_INITCODE = auto() - EOFV2 = auto() - DELEGATION = auto() - SUBCONTAINER_INVALID = auto() - - def bytecode(self) -> Bytecode | Container | Bytes: - """Bytecode for the code to validate.""" - match self: - case ValidatedCode.LEGACY: - return Op.STOP - case ValidatedCode.EF: - return Bytes("0xEF") - case ValidatedCode.EOFV1_RUNTIME: - return smallest_runtime_subcontainer - case ValidatedCode.EOFV1_RUNTIME_INVALID: - return Container.Code(Op.ADD) - case ValidatedCode.EOFV1_INITCODE: - return smallest_initcode_subcontainer - case ValidatedCode.EOFV2: - return Bytes("0xEF0002") - case ValidatedCode.DELEGATION: - return Bytes( - Spec.DELEGATION_DESIGNATION + Bytes("".join(20 * ["ab"])) - ) - case ValidatedCode.SUBCONTAINER_INVALID: - return Container( - sections=[ - Section.Code(Op.RETURNCODE[0](0, 0)), - Section.Container(Container.Code(Op.ADD)), - ] - ) - - def valid(self) -> bool: - """Whether the code is valid in EOF v1.""" - return self in [ValidatedCode.EOFV1_INITCODE] - - def __str__(self) -> str: - """Return string representation of the enum.""" - return f"{self.name}" - - -class Factory(Enum): - """ - Kinds of systems leading up to a call to TXCREATE. - - DIRECT just puts the TXCREATE in the code it generates, while *CALL ones - call into another account which does the TXCREATE. - """ - - DIRECT = auto() - WITH_CALL = auto() - WITH_DELEGATECALL = auto() - WITH_STATICCALL = auto() - - def creation_snippet( - self, - initcode_hash: Hash, - pre: Alloc, - salt: int, - evm_code_type: EVMCodeType, - value: int, - input_size: int, - ) -> Tuple[Bytecode, Address | None]: - """ - Return snippet to cause TXCREATE to be called along with an address. - - This will end up in the `compute_eofcreate_address` or None if that - would be the snippet itself. - """ - if evm_code_type not in [EVMCodeType.LEGACY, EVMCodeType.EOF_V1]: - raise Exception(f"Test needs to be updated for {evm_code_type}") - # Snippet which invokes the TXCREATE itself - txcreate_code = Op.TXCREATE( - tx_initcode_hash=initcode_hash, - salt=salt, - value=value, - input_size=input_size, - ) - # Snippet which returns the TXCREATE result to caller - callee_txcreate_code = Op.MSTORE(0, txcreate_code) + Op.RETURN(0, 32) - # Snippet which recovers the TXCREATE result from returndata (wipes - # memory afterwards) - returndataload_code = ( - Op.RETURNDATALOAD - if evm_code_type == EVMCodeType.EOF_V1 - else Op.RETURNDATACOPY(0, 0, Op.RETURNDATASIZE) - + Op.MLOAD(0) - + Op.MSTORE(0, 0) - ) - match self: - case Factory.DIRECT: - return txcreate_code, None - case Factory.WITH_CALL: - callee_address = pre.deploy_contract(callee_txcreate_code) - if evm_code_type == EVMCodeType.EOF_V1: - return Op.EXTCALL( - address=callee_address - ) + returndataload_code, callee_address - else: - return Op.CALL( - address=callee_address - ) + returndataload_code, callee_address - case Factory.WITH_DELEGATECALL: - callee_address = pre.deploy_contract(callee_txcreate_code) - if evm_code_type == EVMCodeType.EOF_V1: - return Op.EXTDELEGATECALL( - address=callee_address - ) + returndataload_code, None - else: - return Op.DELEGATECALL( - address=callee_address - ) + returndataload_code, None - case Factory.WITH_STATICCALL: - callee_address = pre.deploy_contract(callee_txcreate_code) - if evm_code_type == EVMCodeType.EOF_V1: - return Op.EXTSTATICCALL( - address=callee_address - ) + returndataload_code, None - else: - return Op.STATICCALL( - address=callee_address - ) + returndataload_code, None - - def __str__(self) -> str: - """Return string representation of the enum.""" - return f"{self.name}" - - -@pytest.mark.with_all_evm_code_types -# Subset chosen to limit number of test cases -@pytest.mark.parametrize( - "code_a", [ValidatedCode.EOFV1_INITCODE, ValidatedCode.LEGACY] -) -@pytest.mark.parametrize("code_b", ValidatedCode) -# Subset chosen to limit number of test cases -@pytest.mark.parametrize("factory_a", [Factory.DIRECT, Factory.WITH_CALL]) -@pytest.mark.parametrize("factory_b", Factory) -@pytest.mark.parametrize("value", [0, 1]) -@pytest.mark.parametrize("input_size", [0, 31]) -@pytest.mark.parametrize("access_list_a", [True, False]) -def test_txcreate_validates( - state_test: StateTestFiller, - pre: Alloc, - code_a: ValidatedCode, - code_b: ValidatedCode, - factory_a: Factory, - factory_b: Factory, - evm_code_type: EVMCodeType, - value: int, - input_size: int, - access_list_a: bool, -) -> None: - """ - Verifies proper validation of initcode on TXCREATE in various scenarios. - """ - env = Environment() - snippet_a, factory_address_a = factory_a.creation_snippet( - Bytes(code_a.bytecode()).keccak256(), - pre, - 0, - evm_code_type, - value, - input_size, - ) - snippet_b, factory_address_b = factory_b.creation_snippet( - Bytes(code_b.bytecode()).keccak256(), - pre, - 1, - evm_code_type, - value, - input_size, - ) - - sender = pre.fund_eoa() - contract_address = pre.deploy_contract( - code=( - Op.SSTORE(slot_code_worked, value_code_worked) - + Op.SSTORE(slot_a, snippet_a) - + Op.SSTORE(slot_b, snippet_b) - + Op.STOP - ) - ) - - create_address_a = ( - factory_address_a if factory_address_a else contract_address - ) - create_address_b = ( - factory_address_b if factory_address_b else contract_address - ) - destination_address_a = compute_eofcreate_address(create_address_a, 0) - destination_address_b = compute_eofcreate_address(create_address_b, 1) - post = { - contract_address: Account( - storage={ - slot_a: destination_address_a - if code_a.valid() - and value == 0 - and factory_a != Factory.WITH_STATICCALL - else TXCREATE_FAILURE, - slot_b: destination_address_b - if code_b.valid() - and value == 0 - and factory_b != Factory.WITH_STATICCALL - else TXCREATE_FAILURE, - slot_code_worked: value_code_worked, - } - ) - } - - if access_list_a: - access_list = [ - AccessList(address=destination_address_a, storage_keys=[Hash(0x0)]) - ] - else: - access_list = [] - - tx = Transaction( - to=contract_address, - gas_limit=10_000_000, - sender=sender, - initcodes=[code_a.bytecode(), code_b.bytecode()], - access_list=access_list, - ) - state_test(env=env, pre=pre, post=post, tx=tx) diff --git a/tests/unscheduled/eip7692_eof_v1/eof_tracker.md b/tests/unscheduled/eip7692_eof_v1/eof_tracker.md deleted file mode 100644 index 98e02aa9c8..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/eof_tracker.md +++ /dev/null @@ -1,502 +0,0 @@ -# EOF Testing Coverage Tracker - -- [ ] Example Test Case 1 -- [x] Example Test Case 2 ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_eof_example.py`](./eip3540_eof_v1/test_eof_example/index.md)) - -## EIP-3540: EOF - EVM Object Format v1 - -### Validation - -- [x] Empty code is not a valid EOF ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k empty_container`) -- [x] Valid container without data section ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k single_code_section_no_data_section`) -- [x] Valid container with data section ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_valid_containers`](./eip3540_eof_v1/test_container_validation/test_valid_containers.md) `-k single_code_section_with_data_section`) -- [x] Valid container with truncated data section ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k data_section_contents_incomplete`, [`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_migrated_valid_invalid.py::test_migrated_valid_invalid`](./eip3540_eof_v1/test_migrated_valid_invalid/test_migrated_valid_invalid.md) `-k data_section_contents_incomplete`) -- [x] Valid container with data section truncated to empty ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k no_data_section_contents`, [`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_migrated_valid_invalid.py::test_migrated_valid_invalid`](./eip3540_eof_v1/test_migrated_valid_invalid/test_migrated_valid_invalid.md) `-k no_data_section_contents`) -- [x] Valid containers with multiple code sections ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_valid_containers`](./eip3540_eof_v1/test_container_validation/test_valid_containers.md) `-k multiple_code_sections`) -- [x] Valid containers with max number of code sections ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_max_code_sections`](./eip3540_eof_v1/test_container_validation/test_max_code_sections.md)) -- [x] Too many code sections ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k too_many_code_sections`) -- [x] Truncated magic ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k incomplete_magic`) -- [x] Valid container except magic ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_magic_validation`](./eip3540_eof_v1/test_container_validation/test_magic_validation.md)) -- [x] Truncated before version ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k no_version`) -- [x] Valid container except version ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_version_validation`](./eip3540_eof_v1/test_container_validation/test_version_validation.md)) -- [x] Truncated before type section header ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k no_type_header`) -- [x] Truncated before type section size ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k no_type_section_size`) -- [x] Truncated type section size ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k incomplete_type_section_size`) -- [x] No type section header ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_section_order.py::test_section_order`](./eip3540_eof_v1/test_section_order/index.md) `-k test_position_CasePosition.HEADER-section_test_SectionTest.MISSING-section_kind_TYPE`) -- [x] Truncated before code section header ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k no_code_header`) -- [x] Truncated before code section number ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k code_section_count_missing`) -- [x] Truncated code section number ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k code_section_count_incomplete`) -- [x] Truncated before code section size ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k code_section_size_missing`) -- [x] Truncated code section size ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k code_section_size_incomplete`) -- [x] No code section header ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_section_order.py::test_section_order`](./eip3540_eof_v1/test_section_order/index.md) `-k test_position_CasePosition.HEADER-section_test_SectionTest.MISSING-section_kind_CODE`) -- [x] Zero code section number ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k zero_code_sections_header`) -- [x] Zero code section size ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k empty_code_section`) -- [x] Zero code section size with non-empty data section ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k empty_code_section_with_non_empty_data`) -- [x] No container sections, truncated before data section header ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k truncated_header_data_section`) -- [x] Container sections present, truncated before data section header ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k truncated_header_data_section_with_container_section`) -- [x] Truncated before data section size ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k no_data_section_size`) -- [x] Truncated data section size ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k data_section_size_incomplete`) -- [x] Truncated before header terminator ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k terminator_incomplete`) -- [x] Truncated before type section ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k truncated_before_type_section`) -- [x] Type section truncated before outputs ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k truncated_type_section_before_outputs`) -- [x] Type section truncated before max_stack_height ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k truncated_type_section_before_max_stack_height`) -- [x] Type section truncated max_stack_height ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k truncated_type_section_truncated_max_stack_height`) -- [x] Truncated before code sections ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k no_code_section_contents`) -- [x] Truncated code section ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k incomplete_code_section_contents`) -- [x] Data section empty, trailing bytes ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k no_data_section_contents`) -- [x] Data section non-empty, trailing bytes ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k trailing_bytes_after_data_section`) -- [x] Wrong order of sections ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_section_order.py`](./eip3540_eof_v1/index.md)) -- [x] No data section header ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_section_order.py::test_section_order`](./eip3540_eof_v1/test_section_order/index.md) `-k test_position_CasePosition.HEADER-section_test_SectionTest.MISSING-section_kind_DATA`) -- [x] Multiple data sections ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k multiple_data_sections`, [`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k multiple_code_and_data_sections`) -- [x] Unknown section id ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k unknown_section_1`, [`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k unknown_section_2`) -- [x] Type section size != 4 * code section number ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py`](./eip3540_eof_v1/test_container_validation/index.md)`-k type_section_too`) -- [x] Code section with max max_stack_height ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_valid_containers`](./eip3540_eof_v1/test_container_validation/test_valid_containers.md) `-k single_code_section_max_stack_size`) -- [x] Code section with max_stack_height above limit ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_invalid_containers`](./eip3540_eof_v1/test_container_validation/test_invalid_containers.md) `-k single_code_section_max_stack_size_too_large`) -- [x] Valid code sections with inputs/outputs ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py`](./eip3540_eof_v1/test_container_validation/index.md)`-k code_section_with_inputs_outputs`) -- [x] Valid code section with max inputs ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py`](./eip3540_eof_v1/test_container_validation/index.md)`-k code_section_input_maximum`) -- [x] Valid code section with max outputs ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py`](./eip3540_eof_v1/test_container_validation/index.md)`-k code_section_output_maximum`) -- [x] Code sections with invalid number of inputs/outputs ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py`](./eip3540_eof_v1/test_container_validation/index.md)`-k code_section_input_too_large`, [`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py`](./eip3540_eof_v1/test_container_validation/index.md)`-k code_section_output_too_large`) -- [x] First section with inputs/outputs ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py`](./eip3540_eof_v1/test_container_validation/index.md)`-k invalid_first_code_section`) -- [x] Multiple type section headers ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py`](./eip3540_eof_v1/test_container_validation/index.md)`-k too_many_type_sections`) -- [x] Multiple code section headers ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py`](./eip3540_eof_v1/test_container_validation/index.md)`-k multiple_code_headers`) -- [x] Multiple data section headers ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py`](./eip3540_eof_v1/test_container_validation/index.md)`-k multiple_data_sections`) -- [x] Container without type section ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_section_order.py::test_section_order`](./eip3540_eof_v1/test_section_order/index.md) `-k 'SectionTest.MISSING-section_kind_TYPE'`) -- [x] Container without code sections ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_section_order.py::test_section_order`](./eip3540_eof_v1/test_section_order/index.md) `-k 'SectionTest.MISSING-section_kind_CODE'`) -- [x] Container without data section ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_section_order.py::test_section_order`](./eip3540_eof_v1/test_section_order/index.md) `-k 'SectionTest.MISSING-section_kind_DATA'`) -- [x] Valid containers without data section and with subcontainers ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_valid_containers`](./eip3540_eof_v1/test_container_validation/test_valid_containers.md)`[fork_unscheduled-eof_test-single_subcontainer_without_data]`) -- [x] Valid containers with data section and with subcontainers ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_valid_containers`](./eip3540_eof_v1/test_container_validation/test_valid_containers.md)`[fork_unscheduled-eof_test-single_subcontainer_with_data]`) -- [x] Valid container with maximum number of subcontainers ([`tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_subcontainer_validation.py::test_wide_container`](./eip7620_eof_create/test_subcontainer_validation/test_wide_container.md)`[fork_unscheduled-eof_test-256]`) -- [x] Container with number of subcontainers above the limit ([`tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_subcontainer_validation.py::test_wide_container`](./eip7620_eof_create/test_subcontainer_validation/test_wide_container.md)`[fork_unscheduled-eof_test-257]`) -- [x] Subcontainer section header truncated before subcontainer number ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py`](./eip3540_eof_v1/test_container_validation/index.md)`-k no_container_section_count`) -- [x] Subcontainer section header truncated before subcontainer size ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py`](./eip3540_eof_v1/test_container_validation/index.md)`-k incomplete_container_section_count`) -- [x] Truncated subcontainer size ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py`](./eip3540_eof_v1/test_container_validation/index.md)`-k no_container_section_size`, [`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py`](./eip3540_eof_v1/test_container_validation/index.md)`-k incomplete_container_section_size`) -- [x] Zero container section number ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py`](./eip3540_eof_v1/test_container_validation/index.md)`-k zero_container_section_count`) -- [x] Zero container section size ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py`](./eip3540_eof_v1/test_container_validation/index.md)`-k zero_size_container_section`) -- [x] Truncated container section body ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py`](./eip3540_eof_v1/test_container_validation/index.md)`-k no_container_section_contents`) -- [x] Multiple container section headers ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py`](./eip3540_eof_v1/test_container_validation/index.md)`-k multiple_container_headers`) -- [x] Invalid subcontainer ([`tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_subcontainer_validation.py`](./eip7620_eof_create/test_subcontainer_validation/index.md)`-k invalid`) -- [x] Invalid subcontainer on a deep nesting level ([`tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_subcontainer_validation.py::test_deep_container`](./eip7620_eof_create/test_subcontainer_validation/test_deep_container.md)) -- [x] Max number of inputs/outputs in a section ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_valid_containers`](./eip3540_eof_v1/test_container_validation/test_valid_containers.md)`[fork_unscheduled-eof_test-code_section_input_maximum]`, [`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py::test_valid_containers`](./eip3540_eof_v1/test_container_validation/test_valid_containers.md)`[fork_unscheduled-eof_test-code_section_output_maximum]`) -- [x] Number of inputs/outputs in a section above the limit ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py`](./eip3540_eof_v1/test_container_validation/index.md)`-k code_section_input_too_large`, [`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_container_validation.py`](./eip3540_eof_v1/test_container_validation/index.md)`-k code_section_output_too_large`) - -### Execution - -- [x] Execution of EOF contracts ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_execution.py::test_eof_execution`](./eip3540_eof_v1/test_execution/test_eof_execution.md)) -- [x] Legacy executing EXTCODESIZE of EOF contract ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_extcode.py::test_legacy_calls_eof_sstore`](./eip3540_eof_v1/test_extcode/test_legacy_calls_eof_sstore.md)) -- [x] Legacy executing EXTCODEHASH of EOF contract ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_extcode.py::test_legacy_calls_eof_sstore`](./eip3540_eof_v1/test_extcode/test_legacy_calls_eof_sstore.md)) -- [x] Legacy executing EXTCODECOPY of EOF contract ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_extcode.py::test_legacy_calls_eof_sstore`](./eip3540_eof_v1/test_extcode/test_legacy_calls_eof_sstore.md)) - -## EIP-3670: EOF - Code Validation - -### Validation - -- [x] Code section with invalid opcodes is rejected ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_all_opcodes_in_container.py::test_all_opcodes_in_container`](./eip3540_eof_v1/test_all_opcodes_in_container/test_all_opcodes_in_container.md)) -- [x] INVALID opcode is valid ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_all_opcodes_in_container.py::test_all_opcodes_in_container`](./eip3540_eof_v1/test_all_opcodes_in_container/test_all_opcodes_in_container.md)) -- [x] Truncated PUSH data ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_all_opcodes_in_container.py::test_truncated_data_portion_opcodes`](./eip3540_eof_v1/test_all_opcodes_in_container/test_truncated_data_portion_opcodes.md)) -- [x] Opcodes deprecated in EOF are rejected ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_all_opcodes_in_container.py::test_all_opcodes_in_container`](./eip3540_eof_v1/test_all_opcodes_in_container/test_all_opcodes_in_container.md)) -- [x] Codes with each valid opcodes ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_all_opcodes_in_container.py::test_all_opcodes_in_container`](./eip3540_eof_v1/test_all_opcodes_in_container/test_all_opcodes_in_container.md)) -- [x] Undefined instruction after terminating instruction ([`tests/unscheduled/eip7692_eof_v1/eip3540_eof_v1/test_all_opcodes_in_container.py::test_invalid_opcodes_after_stop`](./eip3540_eof_v1/test_all_opcodes_in_container/test_invalid_opcodes_after_stop.md)) - -## EIP-4200: EOF - Static relative jumps - -### Validation - -- [x] Valid RJUMP with various offsets ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjump.py::test_rjump_positive_negative`](./eip4200_relative_jumps/test_rjump/test_rjump_positive_negative.md)) -- [x] Valid RJUMP with maximum offset ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjump.py::test_rjump_maxes`](./eip4200_relative_jumps/test_rjump/test_rjump_maxes.md)) -- [x] Valid RJUMP with minimum offset ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjump.py::test_rjump_maxes`](./eip4200_relative_jumps/test_rjump/test_rjump_maxes.md)) -- [x] Valid RJUMPI with various offsets ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpi.py::test_rjumpi_forwards`](./eip4200_relative_jumps/test_rjumpi/test_rjumpi_forwards.md)) -- [x] Valid RJUMPI with maximum offset ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpi.py::test_rjumpi_max_forward`](./eip4200_relative_jumps/test_rjumpi/test_rjumpi_max_forward.md)) -- [x] Valid RJUMPI with minimum offset ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpi.py::test_rjumpi_max_backward`](./eip4200_relative_jumps/test_rjumpi/test_rjumpi_max_backward.md)) -- [x] Valid RJUMPV with various number of offsets and various offsets ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpv.py::test_rjumpv_forwards`](./eip4200_relative_jumps/test_rjumpv/test_rjumpv_forwards.md)) -- [x] Valid RJUMPV with table size 256 ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpv.py::test_rjumpv_full_table`](./eip4200_relative_jumps/test_rjumpv/test_rjumpv_full_table.md)) -- [x] Valid RJUMPV containing maximum offset ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpv.py::test_rjumpv_max_forwards`](./eip4200_relative_jumps/test_rjumpv/test_rjumpv_max_forwards.md)) -- [ ] Valid RJUMPV containing minimum offset -- [x] Truncated before RJUMP immediate ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjump.py::test_rjump_truncated_rjump`](./eip4200_relative_jumps/test_rjump/test_rjump_truncated_rjump.md)) -- [x] Truncated RJUMP immediate ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjump.py::test_rjump_truncated_rjump_2`](./eip4200_relative_jumps/test_rjump/test_rjump_truncated_rjump_2.md)) -- [x] RJUMP out of container bounds ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjump.py::test_rjump_after_container`](./eip4200_relative_jumps/test_rjump/test_rjump_after_container.md)) -- [x] RJUMP out of section bounds ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjump.py::test_rjump_into_data`](./eip4200_relative_jumps/test_rjump/test_rjump_into_data.md)) -- [x] RJUMP into immediate ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjump.py::test_rjump_into_push_1`](./eip4200_relative_jumps/test_rjump/test_rjump_into_push_1.md)) -- [x] Truncated before RJUMPI immediate ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpi.py::test_rjumpi_truncated`](./eip4200_relative_jumps/test_rjumpi/test_rjumpi_truncated.md)) -- [x] Truncated RJUMPI immediate ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpi.py::test_rjumpi_truncated_2`](./eip4200_relative_jumps/test_rjumpi/test_rjumpi_truncated_2.md)) -- [x] RJUMPI out of container bounds ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpi.py::test_rjumpi_after_container`](./eip4200_relative_jumps/test_rjumpi/test_rjumpi_after_container.md)) -- [x] RJUMPI out of section bounds ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpi.py::test_rjumpi_into_data`](./eip4200_relative_jumps/test_rjumpi/test_rjumpi_into_data.md)) -- [x] RJUMPI into immediate ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpi.py::test_rjumpi_into_push_1`](./eip4200_relative_jumps/test_rjumpi/test_rjumpi_into_push_1.md)) -- [x] Truncated before RJUMPV immediate ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpv.py::test_rjumpv_truncated_empty`](./eip4200_relative_jumps/test_rjumpv/test_rjumpv_truncated_empty.md)) -- [x] Truncated RJUMPV immediate ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpv.py::test_rjumpv_truncated`](./eip4200_relative_jumps/test_rjumpv/test_rjumpv_truncated.md)) -- [x] RJUMPV out of container bounds ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpv.py::test_rjumpv_after_container`](./eip4200_relative_jumps/test_rjumpv/test_rjumpv_after_container.md)) -- [x] RJUMPV out of section bounds ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpv.py::test_rjumpv_into_data`](./eip4200_relative_jumps/test_rjumpv/test_rjumpv_into_data.md)) -- [x] RJUMPV into immediate ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpv.py::test_rjumpv_into_push_1`](./eip4200_relative_jumps/test_rjumpv/test_rjumpv_into_push_1.md)) - -### Execution - -- [x] RJUMP forwards ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjump.py::test_rjump_positive_negative`](./eip4200_relative_jumps/test_rjump/test_rjump_positive_negative.md)) -- [x] RJUMP backwards ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjump.py::test_rjump_positive_negative`](./eip4200_relative_jumps/test_rjump/test_rjump_positive_negative.md)) -- [x] RJUMP with 0 offset ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjump.py::test_rjump_zero`](./eip4200_relative_jumps/test_rjump/test_rjump_zero.md)) -- [x] RJUMPI forwards with condition true/false ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpi.py::test_rjumpi_condition_forwards`](./eip4200_relative_jumps/test_rjumpi/test_rjumpi_condition_forwards.md)) -- [x] RJUMPI backwards with condition true/false ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpi.py::test_rjumpi_condition_backwards`](./eip4200_relative_jumps/test_rjumpi/test_rjumpi_condition_backwards.md)) -- [x] RJUMPI with 0 offset with condition true/false ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpi.py::test_rjumpi_condition_zero`](./eip4200_relative_jumps/test_rjumpi/test_rjumpi_condition_zero.md)) -- [x] RJUMPV with different case values ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpv.py::test_rjumpv_condition`](./eip4200_relative_jumps/test_rjumpv/test_rjumpv_condition.md)) -- [x] RJUMPV with case value out of table bounds ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpv.py::test_rjumpv_condition`](./eip4200_relative_jumps/test_rjumpv/test_rjumpv_condition.md)) -- [x] RJUMPV with max cases number ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpv.py::test_rjumpv_condition`](./eip4200_relative_jumps/test_rjumpv/test_rjumpv_condition.md), [`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpv.py`](./eip4200_relative_jumps/test_rjumpv/index.md)`-k test_rjumpv_full_table`) - -## EIP-4750: EOF - Functions - -### Validation - -- [x] Valid CALLFs ([`tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_callf_execution.py::test_callf`](./eip4750_functions/test_callf_execution/test_callf.md)) -- [x] CALLFs to non-existing sections ([`tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_code_validation.py::test_invalid_code_section_index`](./eip4750_functions/test_code_validation/test_invalid_code_section_index.md)) -- [x] Truncated CALLF immediate ([`tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_code_validation.py::test_callf_truncated_immediate`](./eip4750_functions/test_code_validation/test_callf_truncated_immediate.md)) -- [x] Unreachable code sections ([`tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_code_validation.py::test_unreachable_code_sections`](./eip4750_functions/test_code_validation/test_unreachable_code_sections.md)) -- [x] Sections reachable from other sections, but not reachable from section 0 ([`tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_code_validation.py::test_unreachable_code_sections`](./eip4750_functions/test_code_validation/test_unreachable_code_sections.md)) -- [x] Unreachable code section that calls itself with JUMPF ([`tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_code_validation.py::test_unreachable_code_sections`](./eip4750_functions/test_code_validation/test_unreachable_code_sections.md)) -- [x] Unreachable code section that calls itself with CALLF ([`tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_code_validation.py::test_unreachable_code_sections`](./eip4750_functions/test_code_validation/test_unreachable_code_sections.md)) -- [ ] RETF with maximum number of outputs (ethereum/tests: src/EOFTestsFiller/EIP5450/validInvalidFiller.yml) - -### Execution - -- [ ] CALLF/RETF execution (ethereum/tests: src/EIPTestsFiller/StateTests/stEOF/stEIP4200/CALLF_RETF_ExecutionFiller.yml) -- [ ] Dispatch to CALLF to different functions based on calldata (ethereum/tests: src/EIPTestsFiller/StateTests/stEOF/stEIP4200/CALLF_RETF_ExecutionFiller.yml) -- [ ] Maximum number of code sections, calling each section with CALLF (ethereum/tests: src/EIPTestsFiller/StateTests/stEOF/stEIP4200/CALLF_RETF_ExecutionFiller.yml) - -## EIP-5450: EOF - Stack Validation - -### Validation - -#### Terminating instructions - -- [ ] Check all terminating opcodes (ethereum/tests: ./src/EOFTestsFiller/efExample/validInvalidFiller.yml src/EOFTestsFiller/EIP5450/validInvalidFiller.yml) -- [ ] Code section not terminating (executing beyond section end) (ethereum/tests: ./src/EOFTestsFiller/efExample/validInvalidFiller.yml src/EOFTestsFiller/EIP5450/validInvalidFiller.yml src/EOFTestsFiller/efStack/no_terminating_instruction_Copier.json) -- [ ] Code section ending with NOP (not terminating) (src/EOFTestsFiller/EIP5450/validInvalidFiller.yml) -- [ ] Check that unreachable code is invalid after all terminating instructions (ethereum/tests: src/EOFTestsFiller/EIP5450/validInvalidFiller.yml) - -#### Jumps - -##### RJUMP - -- [x] Valid RJUMP backwards in a constant stack segment ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjump.py::test_rjump_valid_backward`](./eip4200_relative_jumps/test_rjump/test_rjump_valid_backward.md) -- [x] Invalid RJUMP backwards with mismatching stack in a constant stack segment ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjump.py::test_rjump_backward_invalid_max_stack_height`](./eip4200_relative_jumps/test_rjump/test_rjump_backward_invalid_max_stack_height.md) -- [x] Valid RJUMP backwards in a variable stack segment ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjump.py::test_rjump_valid_backward`](./eip4200_relative_jumps/test_rjump/test_rjump_valid_backward.md) -- [x] Invalid RJUMP backwards with mismatching stack in a variable stack segment ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjump.py::test_rjump_backward_invalid_max_stack_height`](./eip4200_relative_jumps/test_rjump/test_rjump_backward_invalid_max_stack_height.md) -- [x] Valid RJUMP forwards ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjump.py::test_rjump_valid_forward`](./eip4200_relative_jumps/test_rjump/test_rjump_valid_forward.md) -- [x] Valid RJUMP forwards from different stack ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjump.py::test_rjump_valid_forward`](./eip4200_relative_jumps/test_rjump/test_rjump_valid_forward.md) -- [x] Valid RJUMP forwards in variable stack segment ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjump.py::test_rjump_valid_forward`](./eip4200_relative_jumps/test_rjump/test_rjump_valid_forward.md) -- [x] Valid RJUMP forwards from different stack in variable stack segment ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjump.py::test_rjump_valid_forward`](./eip4200_relative_jumps/test_rjump/test_rjump_valid_forward.md) -- [ ] Valid empty infinite loop with RJUMP (ethereum/tests: src/EOFTestsFiller/EIP5450/validInvalidFiller.yml) -- [ ] Valid balanced infinite loop (ethereum/tests: src/EOFTestsFiller/EIP5450/validInvalidFiller.yml) -- [x] RJUMP to self (including variadic stack height) ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjump.py::test_rjump_into_self`](./eip4200_relative_jumps/test_rjump/test_rjump_into_self.md)) - -##### RJUMPI - -- [x] Valid RJUMPI backwards in a constant stack segment ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpi.py::test_rjumpi_valid_backward`](./eip4200_relative_jumps/test_rjumpi/test_rjumpi_valid_backward.md) -- [x] Invalid RJUMPI backwards with mismatching stack in a constant stack segment ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpi.py::test_rjumpi_backward_invalid_max_stack_height`](./eip4200_relative_jumps/test_rjumpi/test_rjumpi_backward_invalid_max_stack_height.md) -- [x] Valid RJUMPI backwards in a variable stack segment ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpi.py::test_rjumpi_valid_backward`](./eip4200_relative_jumps/test_rjumpi/test_rjumpi_valid_backward.md) -- [x] Invalid RJUMPI backwards with mismatching stack in a variable stack segment ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpi.py::test_rjumpi_backward_invalid_max_stack_height`](./eip4200_relative_jumps/test_rjumpi/test_rjumpi_backward_invalid_max_stack_height.md) -- [x] RJUMPI forward with branches of equal stack height ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpi.py::test_rjumpi_valid_forward`](./eip4200_relative_jumps/test_rjumpi/test_rjumpi_valid_forward.md) -- [x] RJUMPI forward with branches of equal stack height in a variable stack segment ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpi.py::test_rjumpi_valid_forward`](./eip4200_relative_jumps/test_rjumpi/test_rjumpi_valid_forward.md) -- [x] RJUMPI forward with branches of different stack height ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpi.py::test_rjumpi_valid_forward`](./eip4200_relative_jumps/test_rjumpi/test_rjumpi_valid_forward.md) -- [x] RJUMPI forward with branches of different stack height in a variable stack segment ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpi.py::test_rjumpi_valid_forward`](./eip4200_relative_jumps/test_rjumpi/test_rjumpi_valid_forward.md) -- [x] Valid loop using RJUMPI ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpi.py::test_rjumpi_valid_forward`](./eip4200_relative_jumps/test_rjumpi/test_rjumpi_valid_forward.md) -- [x] Valid loop with a break using RJUMPI - equal stack after break and normal loop end ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpi.py::test_rjumpi_valid_forward`](./eip4200_relative_jumps/test_rjumpi/test_rjumpi_valid_forward.md) -- [x] Valid loop with a break using RJUMPI - equal stack after break and normal loop end, variable stack segment ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpi.py::test_rjumpi_valid_forward`](./eip4200_relative_jumps/test_rjumpi/test_rjumpi_valid_forward.md) -- [x] Valid loop with a break using RJUMPI - different stack after break and normal loop end ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpi.py::test_rjumpi_valid_forward`](./eip4200_relative_jumps/test_rjumpi/test_rjumpi_valid_forward.md) -- [x] Valid loop with a break using RJUMPI - different stack after break and normal loop end, variable stack segment ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpi.py::test_rjumpi_valid_forward`](./eip4200_relative_jumps/test_rjumpi/test_rjumpi_valid_forward.md) -- [x] If-then-else with equal stack height in branches ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpi.py::test_rjumpi_valid_forward`](./eip4200_relative_jumps/test_rjumpi/test_rjumpi_valid_forward.md) -- [x] If-then-else with equal stack height in branches, variable stack segment ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpi.py::test_rjumpi_valid_forward`](./eip4200_relative_jumps/test_rjumpi/test_rjumpi_valid_forward.md) -- [x] If-then-else with different stack height in branches ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpi.py::test_rjumpi_valid_forward`](./eip4200_relative_jumps/test_rjumpi/test_rjumpi_valid_forward.md) -- [x] If-then-else with different stack height in branches, variable stack segment ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpi.py::test_rjumpi_valid_forward`](./eip4200_relative_jumps/test_rjumpi/test_rjumpi_valid_forward.md) -- [x] RJUMPI to self (including variadic stack height) ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpi.py::test_rjumpi_into_self`](./eip4200_relative_jumps/test_rjumpi/test_rjumpi_into_self.md)) - -##### RJUMPV - -- [x] Valid RJUMPV backwards in a constant stack segment ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpv.py::test_rjumpv_valid_backward`](./eip4200_relative_jumps/test_rjumpv/test_rjumpv_valid_backward.md) -- [x] Invalid RJUMPV backwards with mismatching stack in a constant stack segment ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpv.py::test_rjumpv_backward_invalid_max_stack_height`](./eip4200_relative_jumps/test_rjumpv/test_rjumpv_backward_invalid_max_stack_height.md) -- [x] Valid RJUMPV backwards in a variable stack segment ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpv.py::test_rjumpv_valid_backward`](./eip4200_relative_jumps/test_rjumpv/test_rjumpv_valid_backward.md) -- [x] Invalid RJUMPV backwards with mismatching stack in a variable stack segment ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpv.py::test_rjumpv_backward_invalid_max_stack_height`](./eip4200_relative_jumps/test_rjumpv/test_rjumpv_backward_invalid_max_stack_height.md) -- [x] RJUMPV forward with branches of equal stack height ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpv.py::test_rjumpv_valid_forward`](./eip4200_relative_jumps/test_rjumpv/test_rjumpv_valid_forward.md) -- [x] RJUMPV forward with branches of equal stack height in a variable stack segment ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpv.py::test_rjumpv_valid_forward`](./eip4200_relative_jumps/test_rjumpv/test_rjumpv_valid_forward.md) -- [x] RJUMPV forward with branches of different stack height ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpv.py::test_rjumpv_valid_forward`](./eip4200_relative_jumps/test_rjumpv/test_rjumpv_valid_forward.md) -- [x] RJUMPV forward with branches of different stack height in a variable stack segment ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpv.py::test_rjumpv_valid_forward`](./eip4200_relative_jumps/test_rjumpv/test_rjumpv_valid_forward.md) -- [ ] Valid infinite loop using RJUMPV (ethereum/tests: src/EOFTestsFiller/EIP5450/validInvalidFiller.yml) -- [x] Switch with equal stack height in branches ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpv.py::test_rjumpv_valid_forward`](./eip4200_relative_jumps/test_rjumpv/test_rjumpv_valid_forward.md) -- [x] Switch with equal stack height in branches, variable stack segment ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpv.py::test_rjumpv_valid_forward`](./eip4200_relative_jumps/test_rjumpv/test_rjumpv_valid_forward.md) -- [x] Switch with different stack height in branches ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpv.py::test_rjumpv_valid_forward`](./eip4200_relative_jumps/test_rjumpv/test_rjumpv_valid_forward.md) -- [x] Switch with different stack height in branches, variable stack segment ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpv.py::test_rjumpv_valid_forward`](./eip4200_relative_jumps/test_rjumpv/test_rjumpv_valid_forward.md) -- [x] RJUMPV to self (including variadic stack height) ([`tests/unscheduled/eip7692_eof_v1/eip4200_relative_jumps/test_rjumpv.py::test_rjumpv_into_self`](./eip4200_relative_jumps/test_rjumpv/test_rjumpv_into_self.md)) - -##### Combinations - -- [ ] RJUMP and RJUMPI with the same target and equal stack height (ethereum/tests: src/EOFTestsFiller/efStack/forwards_rjumpi_Copier.json) -- [ ] RJUMP and RJUMPI with the same target and equal stack height in a variable stack segment (ethereum/tests: src/EOFTestsFiller/efStack/forwards_rjumpi_variable_stack_Copier.json) -- [ ] RJUMP and RJUMPI with the same target and different stack height (ethereum/tests: src/EOFTestsFiller/efStack/forwards_rjumpi_Copier.json) -- [ ] RJUMP and RJUMPI with the same target and different stack height in a variable stack segment (ethereum/tests: src/EOFTestsFiller/efStack/forwards_rjumpi_variable_stack_Copier.json) -- [ ] RJUMP and RJUMPV with the same target and equal stack height (ethereum/tests: src/EOFTestsFiller/efStack/forwards_rjumpv_Copier.json) -- [ ] RJUMP and RJUMPV with the same target and equal stack height in a variable stack segment (ethereum/tests: src/EOFTestsFiller/efStack/forwards_rjumpv_variable_stack_Copier.json) -- [ ] RJUMP and RJUMPV with the same target and different stack height (ethereum/tests: src/EOFTestsFiller/efStack/forwards_rjumpv_Copier.json) -- [ ] RJUMP and RJUMPV with the same target and different stack height in a variable stack segment (ethereum/tests: src/EOFTestsFiller/efStack/forwards_rjumpv_variable_stack_Copier.json) -- [ ] RJUMPI and RJUMPV with the same target - -#### Stack underflow - -- [x] Stack underflows ([`tests/unscheduled/eip7692_eof_v1/eip5450_stack/test_code_validation.py::test_all_opcodes_stack_underflow`](./eip5450_stack/test_code_validation/test_all_opcodes_stack_underflow.md)) -- [x] Stack underflow with enough items available in caller stack - can't dig into caller frame ([`tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_code_validation.py::test_eof_validity`](./eip4750_functions/test_code_validation/test_eof_validity.md)) -- [x] Stack underflow in variable stack segment, only min underflow ([`tests/unscheduled/eip7692_eof_v1/eip5450_stack/test_code_validation.py::test_all_opcodes_stack_underflow`](./eip5450_stack/test_code_validation/test_all_opcodes_stack_underflow.md)) -- [x] Stack underflow in variable stack segment, both min and max underflow ([`tests/unscheduled/eip7692_eof_v1/eip5450_stack/test_code_validation.py::test_all_opcodes_stack_underflow`](./eip5450_stack/test_code_validation/test_all_opcodes_stack_underflow.md)) - -#### CALLF - -- [x] Valid CALLFs to functions with inputs ([`tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_code_validation.py::test_eof_validity`](./eip4750_functions/test_code_validation/test_eof_validity.md)) -- [ ] CALLF stack underflows ([`tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_code_validation.py::test_eof_validity`](./eip4750_functions/test_code_validation/test_eof_validity.md)) - - [ ] ./src/EOFTestsFiller/efExample/validInvalidFiller.yml - - [ ] src/EOFTestsFiller/EIP5450/validInvalidFiller.yml -- [x] CALLF stack underflow in variable stack segment, only min underflow ([`tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_code_validation.py::test_callf_stack_underflow_examples`](./eip4750_functions/test_code_validation/test_callf_stack_underflow_examples.md)) -- [x] CALLF stack underflow in variable stack segment, both min and max underflow ([`tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_code_validation.py::test_callf_stack_underflow_examples`](./eip4750_functions/test_code_validation/test_callf_stack_underflow_examples.md)) -- [ ] Branching to CALLFs with the same number of outputs (ethereum/tests: src/EOFTestsFiller/EIP5450/validInvalidFiller.yml) -- [ ] Check that CALLF stack inputs/outputs equal to target section type definition - -#### RETF - -- [ ] Valid RETF with correct number of items on stack ([`tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_code_validation.py::test_eof_validity`](./eip4750_functions/test_code_validation/test_eof_validity.md)) - - [ ] src/EOFTestsFiller/EIP5450/validInvalidFiller.yml -- [ ] Invalid RETF with extra items on stack ([`tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_code_validation.py::test_eof_validity`](./eip4750_functions/test_code_validation/test_eof_validity.md)) - - [ ] ./src/EOFTestsFiller/efExample/validInvalidFiller.yml -- [x] RETF stack underflow ([`tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_code_validation.py::test_eof_validity`](./eip4750_functions/test_code_validation/test_eof_validity.md)) -- [x] RETF reached via different paths ([`tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_code_validation.py::test_eof_validity`](./eip4750_functions/test_code_validation/test_eof_validity.md)) -- [x] RETF in variable stack segment is not allowed ([`tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_code_validation.py::test_eof_validity`](./eip4750_functions/test_code_validation/test_eof_validity.md)) -- [ ] Extra items on stack allowed for terminating instructions other than RETF (ethereum/tests: src/EOFTestsFiller/EIP5450/validInvalidFiller.yml) -- [x] Invalid RETF in a non-returning function ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_nonreturning_validation.py::test_first_section_returning`](./eip6206_jumpf/test_nonreturning_validation/test_first_section_returning.md)) - -#### JUMPF - -- [x] Extra items on stack are allowed for JUMPF to non-returning function ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_validation.py::test_jumpf_to_non_returning`](./eip6206_jumpf/test_jumpf_validation/test_jumpf_to_non_returning.md) [`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_validation.py::test_jumpf_to_non_returning_variable_stack`](./eip6206_jumpf/test_jumpf_validation/test_jumpf_to_non_returning_variable_stack.md)) -- [x] JUMPF stack underflows ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_validation.py::test_jumpf_to_non_returning`](./eip6206_jumpf/test_jumpf_validation/test_jumpf_to_non_returning.md) [`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_validation.py::test_jumpf_to_returning`](./eip6206_jumpf/test_jumpf_validation/test_jumpf_to_returning.md)) -- [x] JUMPF stack underflow in a variable stack segment - only min underflow ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_validation.py::test_jumpf_to_non_returning_variable_stack`](./eip6206_jumpf/test_jumpf_validation/test_jumpf_to_non_returning_variable_stack.md)) -- [x] JUMPF stack underflow in a variable stack segment - both min and max underflow ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_validation.py::test_jumpf_to_non_returning_variable_stack`](./eip6206_jumpf/test_jumpf_validation/test_jumpf_to_non_returning_variable_stack.md)) -- [x] JUMPF into function with the same number of outputs ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_validation.py::test_jumpf_to_returning`](./eip6206_jumpf/test_jumpf_validation/test_jumpf_to_returning.md)) -- [x] JUMPF into function with fewer outputs than current one ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_validation.py::test_jumpf_to_returning`](./eip6206_jumpf/test_jumpf_validation/test_jumpf_to_returning.md)) -- [x] Extra items on stack are allowed for JUMPF to returning function ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_validation.py::test_jumpf_to_returning`](./eip6206_jumpf/test_jumpf_validation/test_jumpf_to_returning.md)) -- [x] JUMPF to returning in a variable stack segment is not allowed ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_validation.py::test_jumpf_to_returning_variable_stack_1`](./eip6206_jumpf/test_jumpf_validation/test_jumpf_to_returning_variable_stack_1.md) [`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_validation.py::test_jumpf_to_returning_variable_stack_2`](./eip6206_jumpf/test_jumpf_validation/test_jumpf_to_returning_variable_stack_2.md) [`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_validation.py::test_jumpf_to_returning_variable_stack_3`](./eip6206_jumpf/test_jumpf_validation/test_jumpf_to_returning_variable_stack_3.md)) -- (ethereum/tests: src/EOFTestsFiller/efStack/jumpf_to_returning_variable_stack_Copier.json) -- [x] Invalid JUMPF in a non-returning function ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_nonreturning_validation.py::test_retf_in_nonreturning`](./eip6206_jumpf/test_nonreturning_validation/test_retf_in_nonreturning.md)) -- [ ] Truncated JUMPF immediate - -#### Stack overflow - -##### CALLF - -- [x] Max allowed stack height reached in CALLF-ed function ([`tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_code_validation.py::test_callf_stack_overflow`](./eip4750_functions/test_code_validation/test_callf_stack_overflow.md)) -- [x] CALLF validation time stack overflow ([`tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_code_validation.py::test_callf_stack_overflow_after_callf`](./eip4750_functions/test_code_validation/test_callf_stack_overflow_after_callf.md)) -- [x] Max allowed stack height reached in CALLF-ed function with inputs ([`tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_code_validation.py::test_callf_with_inputs_stack_overflow`](./eip4750_functions/test_code_validation/test_callf_with_inputs_stack_overflow.md)) -- [x] CALLF validation time stack overflow in function with inputs ([`tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_code_validation.py::test_callf_with_inputs_stack_overflow`](./eip4750_functions/test_code_validation/test_callf_with_inputs_stack_overflow.md)) -- [x] Max allowed stack height reached in CALLF-ed function. CALLF in variable stack segment. ([`tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_code_validation.py::test_callf_stack_overflow_variable_stack`](./eip4750_functions/test_code_validation/test_callf_stack_overflow_variable_stack.md) [`tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_code_validation.py::test_callf_stack_overflow_variable_stack_2`](./eip4750_functions/test_code_validation/test_callf_stack_overflow_variable_stack_2.md))) -- [x] CALLF validation time stack overflow in variable stack segment. ([`tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_code_validation.py::test_callf_stack_overflow_variable_stack_3`](./eip4750_functions/test_code_validation/test_callf_stack_overflow_variable_stack_3.md) [`tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_code_validation.py::test_callf_stack_overflow_variable_stack_4`](./eip4750_functions/test_code_validation/test_callf_stack_overflow_variable_stack_4.md))) -- [x] Max allowed stack height reached in CALLF-ed function with inputs. CALLF in variable stack segment. ([`tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_code_validation.py::test_callf_with_inputs_stack_overflow_variable_stack`](./eip4750_functions/test_code_validation/test_callf_with_inputs_stack_overflow_variable_stack.md)) -- [x] CALLF validation time stack overflow in function with inputs in variable stack segment. ([`tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_code_validation.py::test_callf_with_inputs_stack_overflow_variable_stack`](./eip4750_functions/test_code_validation/test_callf_with_inputs_stack_overflow_variable_stack.md)) -- [ ] Function inputs are accessible and accounted for (no stack underflow if they are popped) (ethereum/tests: src/EOFTestsFiller/EIP5450/validInvalidFiller.yml) - -##### JUMPF - -- [x] Max allowed stack height reached in JUMPF-ed function ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_validation.py::test_jumpf_other_stack_overflow`](./eip6206_jumpf/test_jumpf_validation/test_jumpf_other_stack_overflow.md)) -- [x] JUMPF validation time stack overflow ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_validation.py::test_jumpf_other_stack_overflow`](./eip6206_jumpf/test_jumpf_validation/test_jumpf_other_stack_overflow.md)) -- [x] Max allowed stack height reached in JUMPF-ed function with inputs ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_stack.py::test_jumpf_with_inputs_stack_overflow`](./eip6206_jumpf/test_jumpf_stack/test_jumpf_with_inputs_stack_overflow.md)) -- [x] JUMPF validation time stack overflow in function with inputs ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_stack.py::test_jumpf_with_inputs_stack_overflow`](./eip6206_jumpf/test_jumpf_stack/test_jumpf_with_inputs_stack_overflow.md)) -- [x] JUMPF validation time stack overflow in function with inputs, variable stack segment, only max overflow ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_stack.py::test_jumpf_with_inputs_stack_overflow_variable_stack`](./eip6206_jumpf/test_jumpf_stack/test_jumpf_with_inputs_stack_overflow_variable_stack.md)) -- [x] JUMPF validation time stack overflow in function with inputs, variable stack segment, both max and min overflow ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_stack.py::test_jumpf_with_inputs_stack_overflow_variable_stack`](./eip6206_jumpf/test_jumpf_stack/test_jumpf_with_inputs_stack_overflow_variable_stack.md)) -- [x] Max allowed stack height reached in JUMPF-ed function. JUMPF in variable stack segment. ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_stack.py::test_jumpf_variadic_stack_overflow`](./eip6206_jumpf/test_jumpf_stack/test_jumpf_variadic_stack_overflow.md)) -- [x] JUMPF validation time stack overflow in variable stack segment - only max overflow. ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_stack.py::test_jumpf_variadic_stack_overflow`](./eip6206_jumpf/test_jumpf_stack/test_jumpf_variadic_stack_overflow.md)) -- [x] JUMPF validation time stack overflow in variable stack segment - both min and max overflow. ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_stack.py::test_jumpf_variadic_stack_overflow`](./eip6206_jumpf/test_jumpf_stack/test_jumpf_variadic_stack_overflow.md)) -- [x] Max allowed stack height reached in JUMPF-ed function with inputs. JUMPF in variable stack segment. ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_stack.py::test_jumpf_with_inputs_stack_overflow_variable_stack`](./eip6206_jumpf/test_jumpf_stack/test_jumpf_with_inputs_stack_overflow_variable_stack.md)) -- [x] JUMPF validation time stack overflow in function with inputs in variable stack segment. ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_stack.py::test_jumpf_with_inputs_stack_overflow_variable_stack`](./eip6206_jumpf/test_jumpf_stack/test_jumpf_with_inputs_stack_overflow_variable_stack.md)) - -#### SWAPN/DUPN/EXCHANGE - -- [ ] Valid DUPN with enough items on stack (ethereum/tests: src/EOFTestsFiller/efStack/dupn_stack_validation_Copier.json) -- [ ] DUPN stack underflow (ethereum/tests: src/EOFTestsFiller/efStack/dupn_stack_validation_Copier.json) -- [ ] Valid SWAPN with enough items on stack (ethereum/tests: src/EOFTestsFiller/efStack/swapn_stack_validation_Copier.json) -- [ ] SWAPN stack underflow (ethereum/tests: src/EOFTestsFiller/efStack/swapn_stack_validation_Copier.json) -- [ ] Valid EXCHANGE with enough items on stack (ethereum/tests: src/EOFTestsFiller/efStack/exchange_deep_stack_validation_Copier.json src/EOFTestsFiller/efStack/exchange_stack_validation_Copier.json) -- [ ] EXCHANGE stack underflow (ethereum/tests: src/EOFTestsFiller/efStack/exchange_stack_validation_Copier.json src/EOFTestsFiller/efStack/exchange_empty_stack_validation_Copier.json) - -#### Other - -- [ ] Wrong max_stack_height (ethereum/tests: ./src/EOFTestsFiller/efExample/validInvalidFiller.yml src/EOFTestsFiller/efValidation/max_stack_height_Copier.json) -- [ ] All opcodes correctly account for stack inputs/outputs (ethereum/tests: src/EOFTestsFiller/EIP5450/validInvalidFiller.yml) -- [ ] Code reachable only via backwards jump is invalid -- [x] Maximally broad [0, 1023] stack range ([`tests/unscheduled/eip7692_eof_v1/eip_5450_stack/test_code_validation.py::test_stack_range_maximally_broad`](./eip5450_stack/test_code_validation/test_stack_range_maximally_broad.md)) - -### Execution - -- [x] Max stack size (1024) in CALLF-ed function ([`tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_callf_execution.py::test_callf_operand_stack_size_max`](./eip4750_functions/test_callf_execution/test_callf_operand_stack_size_max.md) - - -## EIP-6206: EOF - JUMPF and non-returning functions - -### Validation - -- [x] Zero section returning ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_nonreturning_validation.py::test_first_section_returning`](./eip6206_jumpf/test_nonreturning_validation/test_first_section_returning.md), ethereum/tests: ./src/EOFTestsFiller/efExample/validInvalidFiller.yml src/EOFTestsFiller/EIP4750/validInvalidFiller.yml) -- [x] Zero section declared non-returning but ends with RETF ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_nonreturning_validation.py::test_retf_in_nonreturning`](./eip6206_jumpf/test_nonreturning_validation/test_retf_in_nonreturning.md), ethereum/tests: src/EOFTestsFiller/EIP4750/validInvalidFiller.yml) -- [x] CALLF into non-returning function ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_nonreturning_validation.py::test_callf_to_nonreturning`](./eip6206_jumpf/test_nonreturning_validation/test_callf_to_nonreturning.md)) -- [x] Valid JUMPF into sections with equal number of outputs ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_execution.py::test_jumpf_to_retf`](./eip6206_jumpf/test_jumpf_execution/test_jumpf_to_retf.md)) -- [x] Valid JUMPF into sections with different but compatible number of outputs ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_execution.py::test_jumpf_to_retf`](./eip6206_jumpf/test_jumpf_execution/test_jumpf_to_retf.md)) -- [x] JUMPF into sections with incompatible outputs ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_stack.py::test_jumpf_incompatible_outputs`](./eip6206_jumpf/test_jumpf_stack/test_jumpf_incompatible_outputs.md)) -- [x] Non-returning section without JUMPF ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_execution.py::test_jumpf_forward`](./eip6206_jumpf/test_jumpf_execution/test_jumpf_forward.md)) -- [x] Non-returning section with JUMPF ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_nonreturning_validation.py::test_jumpf_in_nonreturning`](./eip6206_jumpf/test_nonreturning_validation/test_jumpf_in_nonreturning.md)) -- [x] Returning section with RETF ([`tests/unscheduled/eip7692_eof_v1/eip4750_functions/test_callf_execution.py::test_callf`](./eip4750_functions/test_callf_execution/test_callf.md)) -- [x] Returning section with JUMPF ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_validation.py::test_returning_jumpf`](./eip6206_jumpf/test_jumpf_validation/test_returning_jumpf.md)) -- [x] Returning section with JUMPF to returning and RETF ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_execution.py::test_jumpf_to_retf`](./eip6206_jumpf/test_jumpf_execution/test_jumpf_to_retf.md)) -- [x] Returning section with JUMPF to non-returning and RETF ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_validation.py::test_returning_jumpf`](./eip6206_jumpf/test_jumpf_validation/test_returning_jumpf.md)) -- [x] Returning section without JUMPF nor RETF ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_nonreturning_validation.py::test_returning_section_not_returning`](./eip6206_jumpf/test_nonreturning_validation/test_returning_section_not_returning.md)) -- [x] Invalid non-returning flag ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_nonreturning_validation.py::test_returning_section_not_returning`](./eip6206_jumpf/test_nonreturning_validation/test_returning_section_not_returning.md)) -- [x] Circular JUMPF between two sections ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_execution.py::test_jumpf_infinite_loop`](./eip6206_jumpf/test_jumpf_execution/test_jumpf_infinite_loop.md)) -- [x] JUMPF into non-existing section ([`tests/unscheduled/eip7692_eof_v1/eip6206_jumpf/test_jumpf_validation.py::test_invalid_code_section_index`](./eip6206_jumpf/test_jumpf_validation/test_invalid_code_section_index.md)) - -## EIP-7480: EOF - Data section access instructions - -### Validation - -- [x] Valid DATALOADN with various offsets ([`tests/unscheduled/eip7692_eof_v1/eip7480_data_section/test_data_opcodes.py::test_dataloadn`](./eip7480_data_section/test_data_opcodes/test_dataloadn.md) -- [x] Truncated DATALOADN immediate ([`tests/unscheduled/eip7692_eof_v1/eip7480_data_section/test_code_validation.py::test_dataloadn_truncated_immediate`](./eip7480_data_section/test_code_validation/test_dataloadn_truncated_immediate.md) -- [x] DATALOADN offset out of bounds ([`tests/unscheduled/eip7692_eof_v1/eip7480_data_section/test_code_validation.py::test_invalid_containers_with_data_section`](./eip7480_data_section/test_code_validation/test_invalid_containers_with_data_section.md) -- [x] DATALOADN accessing not full word ([`tests/unscheduled/eip7692_eof_v1/eip7480_data_section/test_code_validation.py::test_invalid_containers_with_data_section`](./eip7480_data_section/test_code_validation/test_invalid_containers_with_data_section.md) - -## EIP-663: SWAPN, DUPN and EXCHANGE instructions - -### Validation - -- [ ] A DUPN instruction causes stack overflow -- [ ] A DUPN instruction causes stack underflow -- [ ] A DUPN instruction causes max stack height mismatch -- [ ] A SWAPN instruction causes stack underflow - -### Execution - -- [x] Positive tests for DUPN instructions ([`./tests/unscheduled/eip7692_eof_v1/eip663_dupn_swapn_exchange/test_dupn.py::test_dupn_all_valid_immediates`](./eip663_dupn_swapn_exchange/test_dupn/test_dupn_all_valid_immediates.md)) -- [x] Positive tests for SWAPN instructions ([`./tests/unscheduled/eip7692_eof_v1/eip663_dupn_swapn_exchange/test_swapn.py::test_swapn_all_valid_immediates`](./eip663_dupn_swapn_exchange/test_swapn/test_swapn_all_valid_immediates.md)) -- [x] Positive tests for EXCHANGE instruction ([`./tests/unscheduled/eip7692_eof_v1/eip663_dupn_swapn_exchange/test_exchange.py::test_exchange_all_valid_immediates`](./eip663_dupn_swapn_exchange/test_exchange/test_exchange_all_valid_immediates.md)) - -## EIP-7069: Revamped CALL instructions - -### Execution - -- [x] EXTDELEGATECALL from EOF to EOF ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py::test_eof_calls_eof_sstore`](./eip7069_extcall/test_calls/test_eof_calls_eof_sstore.md)) -- [x] EXTDELEGATECALL from EOF to legacy fails ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py::test_eof_calls_legacy_sstore`](./eip7069_extcall/test_calls/test_eof_calls_legacy_sstore.md)) -- [ ] EXTDELEGATECALL forwards static mode (evmone-tests: state_tests/state_transition/eof_calls/extdelegatecall_static.json) -- [x] EXTCALL with value success ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py::test_eof_calls_with_value`](./eip7069_extcall/test_calls/test_eof_calls_with_value.md)) -- [x] EXTCALL with value from EXTSTATICCALL ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py::test_eof_calls_static_flag_with_value`](./eip7069_extcall/test_calls/test_eof_calls_static_flag_with_value.md)) -- [x] EXTCALL with value, not enough balance ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py::test_eof_calls_with_value`](./eip7069_extcall/test_calls/test_eof_calls_with_value.md)) -- [x] EXTCALL with value, check additional charge for value ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_gas.py::test_ext_calls_gas`](./eip7069_extcall/test_gas/test_ext_calls_gas.md)) -- [x] EXTCALL with gas not enough for callee to get 5000 gas ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py::test_eof_calls_min_callee_gas`](./eip7069_extcall/test_calls/test_eof_calls_min_callee_gas.md)) -- [x] RETURNDATA* after EXTCALL ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_returndataload.py`](./eip7069_extcall/test_returndataload/index.md)) -- [x] RETURNDATA* after EXTDELEGATECALL ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_returndataload.py`](./eip7069_extcall/test_returndataload/index.md)) -- [x] RETURNDATA* after EXTSTATICCALL ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_returndataload.py`](./eip7069_extcall/test_returndataload/index.md)) -- [x] RETURNDATA* after aborted EXT*CALL ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py::test_eof_calls_clear_return_buffer`](./eip7069_extcall/test_calls/test_eof_calls_clear_return_buffer.md)) -- [x] Failed EXTCALL clears returndata from previous EXTCALL ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py::test_eof_calls_clear_return_buffer`](./eip7069_extcall/test_calls/test_eof_calls_clear_return_buffer.md)) -- [x] EXTCALL not enough gas for input memory charge ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_gas.py::test_ext_calls_gas`](./eip7069_extcall/test_gas/test_ext_calls_gas.md)) -- [x] EXTDELEGATECALL not enough gas for input memory charge ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_gas.py::test_ext_calls_gas`](./eip7069_extcall/test_gas/test_ext_calls_gas.md)) -- [x] EXTSTATICCALL not enough gas for input memory charge ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_gas.py::test_ext_calls_gas`](./eip7069_extcall/test_gas/test_ext_calls_gas.md)) -- [x] EXTCALL exception due to target address overflow (bits set in high 12 bytes) ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_address_space_extension.py`](./eip7069_extcall/test_address_space_extension/index.md)) -- [x] EXTDELEGATECALL exception due to target address overflow (bits set in high 12 bytes) ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_address_space_extension.py`](./eip7069_extcall/test_address_space_extension/index.md)) -- [x] EXTSTATICCALL exception due to target address overflow (bits set in high 12 bytes) ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_address_space_extension.py`](./eip7069_extcall/test_address_space_extension/index.md)) -- [x] EXTCALL not enough gas for warming up target address ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_gas.py::test_ext_calls_gas`](./eip7069_extcall/test_gas/test_ext_calls_gas.md)) -- [x] EXTDELEGATECALL not enough gas for warming up target address ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_gas.py::test_ext_calls_gas`](./eip7069_extcall/test_gas/test_ext_calls_gas.md)) -- [x] EXTSTATICCALL not enough gas for warming up target address ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_gas.py::test_ext_calls_gas`](./eip7069_extcall/test_gas/test_ext_calls_gas.md)) -- [x] EXTCALL not enough gas for account creation cost (transfer value to non-existing account) ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_gas.py::test_ext_calls_gas`](./eip7069_extcall/test_gas/test_ext_calls_gas.md)) -- [x] OOG after EXTCALL ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py::test_eof_calls_eof_then_fails`](./eip7069_extcall/test_calls/test_eof_calls_eof_then_fails.md)) -- [x] OOG after EXTDELEGATECALL ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py::test_eof_calls_eof_then_fails`](./eip7069_extcall/test_calls/test_eof_calls_eof_then_fails.md)) -- [x] OOG after EXTSTATICCALL ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py::test_eof_calls_eof_then_fails`](./eip7069_extcall/test_calls/test_eof_calls_eof_then_fails.md)) -- [x] REVERT inside EXTCALL ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py::test_callee_fails`](./eip7069_extcall/test_calls/test_callee_fails.md)) -- [x] REVERT inside EXTDELEGATECALL ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py::test_callee_fails`](./eip7069_extcall/test_calls/test_callee_fails.md)) -- [x] REVERT inside EXTSTATICCALL ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py::test_callee_fails`](./eip7069_extcall/test_calls/test_callee_fails.md)) -- [x] EXTCALL with input (`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calldata.py`) -- [x] EXTDELEGATECALL with input (`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calldata.py`) -- [x] EXTSTATICCALL with input (`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calldata.py`) -- [x] EXTCALL with just enough gas for MIN_RETAINED_GAS and MIN_CALLEE_GAS ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py::test_eof_calls_min_callee_gas`](./eip7069_extcall/test_calls/test_eof_calls_min_callee_gas.md)) -- [x] EXTCALL with not enough gas for MIN_CALLEE_GAS ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py::test_eof_calls_min_callee_gas`](./eip7069_extcall/test_calls/test_eof_calls_min_callee_gas.md)) -- [x] ADDRESS and CALLER inside EXTCALL ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py::test_callee_context`](./eip7069_extcall/test_calls/test_callee_context.md)) -- [x] ADDRESS and CALLER inside EXTDELEGATECALL ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py::test_callee_context`](./eip7069_extcall/test_calls/test_callee_context.md)) -- [x] ADDRESS and CALLER inside EXTSTATICCALL ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py::test_callee_context`](./eip7069_extcall/test_calls/test_callee_context.md)) -- [ ] Refund inside EXTCALL is applied after the transaction (evmone-tests: state_tests/state_transition/eof_calls/extcall_gas_refund_propagation.json) -- [ ] Refund inside EXTDELEGATECALL is applied after the transaction (evmone-tests: state_tests/state_transition/eof_calls/extdelegatecall_gas_refund_propagation.json) -- [x] EXTSTATICCALL from EOF to non-pure legacy contract failing ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py::test_eof_calls_legacy_sstore`](./eip7069_extcall/test_calls/test_eof_calls_legacy_sstore.md)) -- [x] EXTSTATICCALL from EOF to pure EOF contract ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py::test_eof_calls_legacy_mstore`](./eip7069_extcall/test_calls/test_eof_calls_legacy_mstore.md)) -- [x] EXTSTATICCALL from EOF to non-pure EOF contract failing ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py::test_eof_calls_eof_sstore`](./eip7069_extcall/test_calls/test_eof_calls_eof_sstore.md)) -- [x] `*CALLs` from legacy contracts to EOF contracts (ethereum/tests: ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py::test_legacy_calls_eof_sstore`](./eip7069_extcall/test_calls/test_eof_calls_eof_sstore.md)) -- [x] `EXT*CALLs` from EOF to legacy contracts ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py::test_eof_calls_legacy_sstore`](./eip7069_extcall/test_calls/test_eof_calls_eof_sstore.md)) -- [x] EXTDELEGATECALL from EOF to EOF contract ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py::test_eof_calls_eof_sstore`](./eip7069_extcall/test_calls/test_eof_calls_eof_sstore.md)) -- [x] EXTDELEGATECALL from EOF to legacy contract failing ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py::test_extdelegate_call_targets`](./eip7069_extcall/test_calls/test_extdelegate_call_targets.md)) -- [x] EXTDELEGATECALL from EOF to EOA failing ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py::test_extdelegate_call_targets`](./eip7069_extcall/test_calls/test_extdelegate_call_targets.md)) -- [x] EXTDELEGATECALL from EOF to empty account failing ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py::test_extdelegate_call_targets`](./eip7069_extcall/test_calls/test_extdelegate_call_targets.md)) -- [x] EXTDELEGATECALL to EIP-7702 delegate ([`./tests/unscheduled/eip7692_eof_v1/eip7069_extcall/test_calls.py::test_extdelegate_call_targets`](./eip7069_extcall/test_calls/test_extdelegate_call_targets.md)) - - -## EIP-7620: EOF Contract Creation - -### Validation - -- [ ] Valid EOFCREATEs referring to various container numbers (ethereum/tests: ./src/EOFTestsFiller/efValidation/EOF1_eofcreate_valid_Copier.json) -- [x] Truncated before EOFCREATE immediate ([`tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_eofcreate.py`](./eip7620_eof_create/test_eofcreate/index.md)`-k test_eofcreate_invalid_truncated_immediate`) -- [ ] EOFCREATE is not a valid terminating instruction -- [x] EOFCREATE immediate referring to non-existing container ([`tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_eofcreate.py`](./eip7620_eof_create/test_eofcreate/index.md)`-k test_eofcreate_invalid_index`) -- [x] EOFCREATE immediate referring to container with truncated data ([`tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_eofcreate.py`](./eip7620_eof_create/test_eofcreate/index.md)`-k test_eofcreate_truncated_container`) -- [x] Valid RETURNCODEs referring to various container numbers ([`tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_returncode.py`](./eip7620_eof_create/test_returncode/index.md)`-k test_returncode_valid_index`) -- [x] Truncated before RETURNCODE immediate ([`tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_returncode.py::test_returncode_invalid_truncated_immediate`](./eip7620_eof_create/test_returncode/test_returncode_invalid_truncated_immediate.md)) -- [x] RETURNCODE immediate referring to non-existing container ([`tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_returncode.py`](./eip7620_eof_create/test_returncode/index.md)`-k test_returncode_invalid_index`) -- [x] Unreachable code after RETURNCODE, check that RETURNCODE is terminating ([`tests/unscheduled/eip7692_eof_v1/eip7620_eof_create/test_returncode.py::test_returncode_terminating`](./eip7620_eof_create/test_returncode/test_returncode_terminating.md)) - -### Execution - -- [ ] CREATE with EOF initcode fails in Prague (evmone-tests: state_tests/state_transition/eof_create/create_with_eof_initcode.json) -- [ ] CREATE with EOF initcode fails in Cancun (evmone-tests: state_tests/state_transition/eof_create/create_with_eof_initcode_cancun.json) -- [ ] CREATE2 with EOF initcode fails in Prague (evmone-tests: state_tests/state_transition/eof_create/create2_with_eof_initcode.json) -- [ ] CREATE2 with EOF initcode fails in Cancun (evmone-tests: state_tests/state_transition/eof_create/create2_with_eof_initcode_cancun.json) -- [ ] CREATE with legacy initcode and EOF deploy code fails (evmone-tests: state_tests/state_transition/eof_create/create_deploying_eof.json) -- [ ] CREATE2 with legacy initcode and EOF deploy code fails (evmone-tests: state_tests/state_transition/eof_create/create2_deploying_eof.json) -- [ ] EOFCREATE success with empty aux data (evmone-tests: state_tests/state_transition/eof_create/eofcreate_empty_auxdata.json) -- [ ] EOFCREATE success with aux data length equal to declared in deploy container (evmone-tests: state_tests/state_transition/eof_create/eofcreate_auxdata_equal_to_declared.json) -- [ ] EOFCREATE success with aux data longer than size declared in deploy container (evmone-tests: state_tests/state_transition/eof_create/eofcreate_auxdata_longer_than_declared.json) -- [ ] EOFCREATE with aux data shorter than size declared in deploy container fails (evmone-tests: state_tests/state_transition/eof_create/eofcreate_auxdata_shorter_than_declared.json) -- [ ] EOFCREATE success deploying DATALOADN referring to aux data portion of deploy container data (evmone-tests: state_tests/state_transition/eof_create/eofcreate_dataloadn_referring_to_auxdata.json) -- [ ] EOFCREATE success with deploy container having aux data and subcontainer (evmone-tests: state_tests/state_transition/eof_create/eofcreate_with_auxdata_and_subcontainer.json) -- [ ] REVERT in initcontainer (evmone-tests: state_tests/state_transition/eof_create/eofcreate_revert_empty_returndata.json) -- [ ] REVERT with return data in initcontainer (evmone-tests: state_tests/state_transition/eof_create/eofcreate_revert_non_empty_returndata.json) -- [ ] Exceptional abort in initcontainer (evmone-tests: state_tests/state_transition/eof_create/eofcreate_initcontainer_aborts.json) -- [ ] EOFCREATE with deploy container of max size 0x6000 (evmone-tests: state_tests/state_transition/eof_create/eofcreate_deploy_container_max_size.json) -- [ ] EOFCREATE with deploy container size above limit (evmone-tests: state_tests/state_transition/eof_create/eofcreate_deploy_container_too_large.json) -- [ ] EOFCREATE with deploy container data size above 64K after appending aux data (evmone-tests: state_tests/state_transition/eof_create/eofcreate_appended_data_size_larger_than_64K.json) -- [ ] EOFCREATE with deploy container size above limit after appending aux data (evmone-tests: state_tests/state_transition/eof_create/eofcreate_deploy_container_with_aux_data_too_large.json) -- [ ] EOFCREATE success nested in EOFCREATE initcode (evmone-tests: state_tests/state_transition/eof_create/eofcreate_nested_eofcreate.json) -- [ ] EOFCREATE success nested in EOFCREATE initcode that reverts (evmone-tests: state_tests/state_transition/eof_create/eofcreate_nested_eofcreate_revert.json) -- [ ] EOFCREATE with value success -- [ ] EOFCREATE with value - not enough caller balance (evmone-tests: state_tests/state_transition/eof_create/eofcreate_caller_balance_too_low.json) -- [ ] EOFCREATE not enough gas for initcode (EIP-3860) charge (evmone-tests: state_tests/state_transition/eof_create/eofcreate_not_enough_gas_for_initcode_charge.json) -- [ ] EOFCREATE not enough gas for input memory expansion (evmone-tests: state_tests/state_transition/eof_create/eofcreate_not_enough_gas_for_mem_expansion.json) -- [ ] RETURNCODE not enough gas for aux data memory expansion (evmone-tests: state_tests/state_transition/eof_create/returncode_not_enough_gas_for_mem_expansion.json) -- [ ] Successful EOFCREATE clears returndata (evmone-tests: state_tests/state_transition/eof_create/eofcreate_clears_returndata.json) -- [ ] Second EOFCREATE with the same container and salt fails (evmone-tests: state_tests/state_transition/eof_create/eofcreate_failure_after_eofcreate_success.json) -- [ ] Call created contract after EOFCREATE (evmone-tests: state_tests/state_transition/eof_create/eofcreate_call_created_contract.json) - -## EIP-7698: EOF - Creation transaction - -TODO: replace with `EIP-7873: EOF - TXCREATE and InitcodeTransaction type` test cases - -### Execution - -- [ ] Creation transaction success with empty deploy container data (evmone-tests: state_tests/state_transition/eof_create/creation_tx.json) -- [ ] Creation transaction success with data in deploy container without aux data (evmone-tests: state_tests/state_transition/eof_create/creation_tx_deploy_data.json) -- [ ] Creation transaction success with data in deploy container with aux data length equal to declared (evmone-tests: state_tests/state_transition/eof_create/creation_tx_static_auxdata_in_calldata.json) -- [ ] Creation transaction success with data in deploy container with aux data longer than declared (evmone-tests: state_tests/state_transition/eof_create/creation_tx_dynamic_auxdata_in_calldata.json) -- [ ] Creation transaction success deploying DATALOADN referring to aux data portion of deploy container data (evmone-tests: state_tests/state_transition/eof_create/creation_tx_dataloadn_referring_to_auxdata.json) -- [ ] Exceptional abort in creation transaction initcode (evmone-tests: state_tests/state_transition/eof_create/creation_tx_initcontainer_aborts.json) -- [ ] RETURN in creation transaction initcode fails (evmone-tests: state_tests/state_transition/eof_create/creation_tx_initcontainer_return.json) -- [ ] STOP in creation transaction initcode fails (evmone-tests: state_tests/state_transition/eof_create/creation_tx_initcontainer_stop.json) -- [ ] Creation transaction with initcode of max allowed size 0xc000 (evmone-tests: state_tests/state_transition/eof_create/creation_tx_initcontainer_max_size.json) -- [ ] Creation transaction with initcode size above limit (evmone-tests: state_tests/state_transition/eof_create/creation_tx_initcontainer_too_large.json) -- [ ] Creation transaction deploys container of max allowed size 0x6000 (evmone-tests: state_tests/state_transition/eof_create/creation_tx_deploy_container_max_size.json) -- [ ] Creation transaction deploying container of size above limit fails (evmone-tests: state_tests/state_transition/eof_create/creation_tx_deploy_container_too_large.json) -- [ ] EOFCREATE success nested in creation transaction initcode (evmone-tests: state_tests/state_transition/eof_create/creation_tx_nested_eofcreate.json) -- [ ] Creation transaction with invalid initcontainer (invalid header) (evmone-tests: state_tests/state_transition/eof_create/creation_tx_invalid_initcode_header.json) -- [ ] Creation transaction with invalid initcontainer (invalid EOF version) (evmone-tests: state_tests/state_transition/eof_create/creation_tx_invalid_eof_version.json) -- [ ] Creation transaction with invalid initcontainer (invalid max stack height) (evmone-tests: state_tests/state_transition/eof_create/creation_tx_invalid_initcode.json) -- [ ] Creation transaction fails if initcontainer has truncated data section (declared size > present data size ) (evmone-tests: state_tests/state_transition/eof_create/creation_tx_truncated_data_initcode.json) -- [ ] Creation transaction with invalid deploy container (evmone-tests: state_tests/state_transition/eof_create/creation_tx_invalid_deploycode.json) -- [ ] Create transaction with legacy initcode and EOF deploy code fails (evmone-tests: state_tests/state_transition/eof_create/creation_tx_deploying_eof.json) -- [ ] EOF creation transaction fails before Prague (evmone-tests: state_tests/state_transition/eof_create/initcode_transaction_before_unscheduled.json) diff --git a/tests/unscheduled/eip7692_eof_v1/gas_test.py b/tests/unscheduled/eip7692_eof_v1/gas_test.py deleted file mode 100644 index 184e609184..0000000000 --- a/tests/unscheduled/eip7692_eof_v1/gas_test.py +++ /dev/null @@ -1,215 +0,0 @@ -"""Utility to generate gas usage related state tests automatically.""" - -import itertools - -from execution_testing import ( - Account, - Address, - Alloc, - Bytecode, - Environment, - EVMCodeType, - Op, - StateTestFiller, - Transaction, -) -from execution_testing.forks.forks.forks import Berlin -from execution_testing.forks.helpers import Fork -from execution_testing.test_types.eof.v1 import Container, Section - -from .eip7069_extcall.spec import ( - LEGACY_CALL_FAILURE, - LEGACY_CALL_SUCCESS, -) - -WARM_ACCOUNT_ACCESS_GAS = 100 - -"""Storage addresses for common testing fields""" -_slot = itertools.count() -slot_cold_gas = next(_slot) -slot_warm_gas = next(_slot) -slot_oog_call_result = next(_slot) -slot_sanity_call_result = next(_slot) - - -def gas_test( - fork: Fork, - state_test: StateTestFiller, - env: Environment, - pre: Alloc, - setup_code: Bytecode, - subject_code: Bytecode, - tear_down_code: Bytecode, - cold_gas: int, - warm_gas: int | None = None, - subject_subcontainer: Container | None = None, - subject_address: Address | None = None, - subject_balance: int = 0, - oog_difference: int = 1, - out_of_gas_testing: bool = True, - *, - prelude_code: Bytecode | None = None, - eof: bool = True, -) -> None: - """ - Create State Test to check the gas cost of a sequence of EOF code. - - `setup_code` and `tear_down_code` are called multiple times during the - test, and MUST NOT have any side-effects which persist across message - calls, and in particular, any effects on the gas usage of `subject_code`. - """ - if fork < Berlin: - raise ValueError( - "Gas tests before Berlin are not supported due to CALL gas changes" - ) - - if cold_gas <= 0: - raise ValueError( - f"Target gas allocations (cold_gas) must be > 0, got {cold_gas}" - ) - if warm_gas is None: - warm_gas = cold_gas - - sender = pre.fund_eoa() - - address_baseline = pre.deploy_contract( - Container.Code(setup_code + tear_down_code) - if eof - else setup_code + tear_down_code - ) - code_subject = setup_code + subject_code + tear_down_code - address_subject = pre.deploy_contract( - code_subject - if not eof - else ( - Container.Code(code_subject) - if not subject_subcontainer - else Container( - sections=[ - Section.Code(code_subject), - Section.Container(subject_subcontainer), - ] - ) - ), - balance=subject_balance, - address=subject_address, - ) - # 2 times GAS, POP, CALL, 6 times PUSH1 - instructions charged for at every - # gas run - gas_single_gas_run = 2 * 2 + 2 + WARM_ACCOUNT_ACCESS_GAS + 6 * 3 - address_legacy_harness = pre.deploy_contract( - code=( - # warm subject and baseline without executing - ( - Op.BALANCE(address_subject) - + Op.POP - + Op.BALANCE(address_baseline) - + Op.POP - ) - # run any "prelude" code that may have universal side effects - + prelude_code - # Baseline gas run - + ( - Op.GAS - + Op.CALL(address=address_baseline, gas=Op.GAS) - + Op.POP - + Op.GAS - + Op.SWAP1 - + Op.SUB - ) - # cold gas run - + ( - Op.GAS - + Op.CALL(address=address_subject, gas=Op.GAS) - + Op.POP - + Op.GAS - + Op.SWAP1 - + Op.SUB - ) - # warm gas run - + ( - Op.GAS - + Op.CALL(address=address_subject, gas=Op.GAS) - + Op.POP - + Op.GAS - + Op.SWAP1 - + Op.SUB - ) - # Store warm gas: DUP3 is the gas of the baseline gas run - + ( - Op.DUP3 - + Op.SWAP1 - + Op.SUB - + Op.PUSH2(slot_warm_gas) - + Op.SSTORE - ) - # store cold gas: DUP2 is the gas of the baseline gas run - + ( - Op.DUP2 - + Op.SWAP1 - + Op.SUB - + Op.PUSH2(slot_cold_gas) - + Op.SSTORE - ) - + ( - ( - # do an oog gas run, unless skipped with - # `out_of_gas_testing=False`: - # - # - DUP7 is the gas of the baseline gas run, after other - # CALL args were pushed - # - subtract the gas charged by the harness - # - add warm gas charged by the subject - # - subtract `oog_difference` to cause OOG exception - # (1 by default) - Op.SSTORE( - slot_oog_call_result, - Op.CALL( - gas=Op.ADD( - warm_gas - gas_single_gas_run - oog_difference, - Op.DUP7, - ), - address=address_subject, - ), - ) - # sanity gas run: not subtracting 1 to see if enough gas - # makes the call succeed - + Op.SSTORE( - slot_sanity_call_result, - Op.CALL( - gas=Op.ADD(warm_gas - gas_single_gas_run, Op.DUP7), - address=address_subject, - ), - ) - + Op.STOP - ) - if out_of_gas_testing - else Op.STOP - ) - ), - evm_code_type=EVMCodeType.LEGACY, # Needs to be legacy to use GAS - # opcode - ) - - post = { - address_legacy_harness: Account( - storage={ - slot_warm_gas: warm_gas, - slot_cold_gas: cold_gas, - }, - ), - } - - if out_of_gas_testing: - post[address_legacy_harness].storage[slot_oog_call_result] = ( - LEGACY_CALL_FAILURE - ) - post[address_legacy_harness].storage[slot_sanity_call_result] = ( - LEGACY_CALL_SUCCESS - ) - - tx = Transaction( - to=address_legacy_harness, gas_limit=env.gas_limit, sender=sender - ) - - state_test(env=env, pre=pre, tx=tx, post=post) diff --git a/whitelist.txt b/whitelist.txt index 171afdf61b..2cf28a9928 100644 --- a/whitelist.txt +++ b/whitelist.txt @@ -482,14 +482,6 @@ env envvar eoa EOAs -eof -EOF1 -eofcreate -EOFException -eofparse -eoftest -EOFv -eofwrap epilog eq ERC @@ -505,7 +497,6 @@ ethereum's EthereumCLI EthereumJS evm -EVMCodeType Evmone evmone exc From e0c64f01aa30903b57fc6f23efa994ce20e880eb Mon Sep 17 00:00:00 2001 From: danceratopz Date: Mon, 19 Jan 2026 14:11:43 +0100 Subject: [PATCH 084/154] fix(tests-execute): relax pydantic checks on `GetPayloadResponse` --- packages/testing/src/execution_testing/rpc/rpc_types.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/testing/src/execution_testing/rpc/rpc_types.py b/packages/testing/src/execution_testing/rpc/rpc_types.py index 2bb6b8681d..512ec68c6a 100644 --- a/packages/testing/src/execution_testing/rpc/rpc_types.py +++ b/packages/testing/src/execution_testing/rpc/rpc_types.py @@ -194,6 +194,8 @@ class BlobAndProofV2(CamelModel): class GetPayloadResponse(CamelModel): """Represents the response of a get payload request.""" + model_config = CamelModel.model_config | {"extra": "ignore"} + execution_payload: FixtureExecutionPayload blobs_bundle: BlobsBundle | None = None execution_requests: List[Bytes] | None = None From 89b3e8a22cc19ee455f1221ddf89b66eeda8a7e5 Mon Sep 17 00:00:00 2001 From: spencer Date: Mon, 19 Jan 2026 15:42:23 +0000 Subject: [PATCH 085/154] chore(ci): skip redundant checks in workflows (#2038) Co-authored-by: danceratopz --- .github/PULL_REQUEST_TEMPLATE.md | 2 -- .github/workflows/benchmark.yaml | 8 ++++++++ .github/workflows/hive-consume.yaml | 8 ++++++++ .github/workflows/test.yaml | 16 ++++++++++++++++ 4 files changed, 32 insertions(+), 2 deletions(-) diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 05cff6d8d6..0d5b8b038d 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -10,11 +10,9 @@ N/A. - [ ] All: Ran fast `tox` checks to avoid unnecessary CI fails, see also [Code Standards](https://eest.ethereum.org/main/getting_started/code_standards/) and [Enabling Pre-commit Checks](https://eest.ethereum.org/main/dev/precommit/): - ```console uvx tox -e static ``` - - [ ] All: PR title adheres to the [repo standard](https://eest.ethereum.org/main/getting_started/contributing/?h=contri#commit-messages-issue-and-pr-titles) - it will be used as the squash commit message and should start `type(scope):`. - [ ] All: Considered updating the online docs in the [./docs/](/ethereum/execution-specs/blob/HEAD/docs/) directory. - [ ] All: Set appropriate labels for the changes (only maintainers can apply labels). diff --git a/.github/workflows/benchmark.yaml b/.github/workflows/benchmark.yaml index ee7a816c3d..532f765ce2 100644 --- a/.github/workflows/benchmark.yaml +++ b/.github/workflows/benchmark.yaml @@ -11,6 +11,14 @@ on: - "packages/testing/src/execution_testing/cli/pytest_commands/plugins/**" - ".github/workflows/benchmark.yaml" pull_request: + paths-ignore: + - "**.md" + - "LICENSE*" + - ".gitignore" + - ".vscode/**" + - "whitelist.txt" + - "docs/**" + - "mkdocs.yml" workflow_dispatch: concurrency: diff --git a/.github/workflows/hive-consume.yaml b/.github/workflows/hive-consume.yaml index 13b30cf16d..d2374b1306 100644 --- a/.github/workflows/hive-consume.yaml +++ b/.github/workflows/hive-consume.yaml @@ -4,6 +4,14 @@ on: push: branches: - "forks/**" + paths-ignore: + - "**.md" + - "LICENSE*" + - ".gitignore" + - ".vscode/**" + - "whitelist.txt" + - "docs/**" + - "mkdocs.yml" pull_request: paths: - ".github/workflows/hive-consume.yaml" diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index f5a8af71a6..2e502b7af5 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -6,8 +6,24 @@ on: - master - mainnet - "forks/**" + paths-ignore: + - "**.md" + - "LICENSE*" + - ".gitignore" + - ".vscode/**" + - "whitelist.txt" + - "docs/**" + - "mkdocs.yml" workflow_dispatch: pull_request: + paths-ignore: + - "**.md" + - "LICENSE*" + - ".gitignore" + - ".vscode/**" + - "whitelist.txt" + - "docs/**" + - "mkdocs.yml" concurrency: group: ${{ github.workflow }}-${{ github.ref || github.run_id }} From 4ef381a0f75c96b52da635653ab580e731d3882a Mon Sep 17 00:00:00 2001 From: Mario Vega Date: Tue, 20 Jan 2026 04:51:07 +0100 Subject: [PATCH 086/154] chore(testing/vm): Remove UndefinedOpcodes (#2044) --- .../testing/src/execution_testing/__init__.py | 2 - .../tools/tests/test_code.py | 15 +-- .../src/execution_testing/vm/__init__.py | 2 - .../src/execution_testing/vm/opcodes.py | 111 ------------------ tests/frontier/opcodes/test_all_opcodes.py | 9 +- 5 files changed, 5 insertions(+), 134 deletions(-) diff --git a/packages/testing/src/execution_testing/__init__.py b/packages/testing/src/execution_testing/__init__.py index 0af1799866..b10c00e353 100644 --- a/packages/testing/src/execution_testing/__init__.py +++ b/packages/testing/src/execution_testing/__init__.py @@ -106,7 +106,6 @@ Opcode, OpcodeCallArg, Opcodes, - UndefinedOpcodes, call_return_code, ) @@ -190,7 +189,6 @@ "TransactionTest", "TransactionTestFiller", "TransactionType", - "UndefinedOpcodes", "While", "CoerceBytes", "Withdrawal", diff --git a/packages/testing/src/execution_testing/tools/tests/test_code.py b/packages/testing/src/execution_testing/tools/tests/test_code.py index 45d0e88e26..5da2d3b57b 100644 --- a/packages/testing/src/execution_testing/tools/tests/test_code.py +++ b/packages/testing/src/execution_testing/tools/tests/test_code.py @@ -27,7 +27,7 @@ ) from execution_testing.specs import StateTest from execution_testing.test_types import Alloc, Environment, Transaction -from execution_testing.vm import Op, UndefinedOpcodes +from execution_testing.vm import Op from ..tools_code import CalldataCase, Case, Conditional, Initcode, Switch @@ -699,16 +699,3 @@ def test_switch( t8n=default_t8n, fixture_format=BlockchainFixture, ) - - -def test_full_opcode_range() -> None: - """ - Test that the full opcode range is covered by the opcode set defined by - Opcodes and UndefineOpcodes. - """ - assert len(set(Op) & set(UndefinedOpcodes)) == 0 - full_possible_opcode_set = set(Op) | set(UndefinedOpcodes) - assert len(full_possible_opcode_set) == 256 - assert {op.hex() for op in full_possible_opcode_set} == { - f"{i:02x}" for i in range(256) - } diff --git a/packages/testing/src/execution_testing/vm/__init__.py b/packages/testing/src/execution_testing/vm/__init__.py index 40f4410fe0..b07127aabf 100644 --- a/packages/testing/src/execution_testing/vm/__init__.py +++ b/packages/testing/src/execution_testing/vm/__init__.py @@ -13,7 +13,6 @@ Opcode, OpcodeCallArg, Opcodes, - UndefinedOpcodes, ) # Ergonomic alias for the commonly used Opcodes enum @@ -31,6 +30,5 @@ "OpcodeCallArg", "OpcodeGasCalculator", "Opcodes", - "UndefinedOpcodes", "call_return_code", ) diff --git a/packages/testing/src/execution_testing/vm/opcodes.py b/packages/testing/src/execution_testing/vm/opcodes.py index 3fa693e2cc..31f7cf10e9 100644 --- a/packages/testing/src/execution_testing/vm/opcodes.py +++ b/packages/testing/src/execution_testing/vm/opcodes.py @@ -5768,114 +5768,3 @@ class Macros(Macro, Enum): ---- - None """ - - -class UndefinedOpcodes(Opcode, Enum): - """Enum containing all unknown opcodes (107 at the moment).""" - - OPCODE_0C = Opcode(0x0C) - OPCODE_0D = Opcode(0x0D) - OPCODE_0E = Opcode(0x0E) - OPCODE_0F = Opcode(0x0F) - OPCODE_1F = Opcode(0x1F) - OPCODE_21 = Opcode(0x21) - OPCODE_22 = Opcode(0x22) - OPCODE_23 = Opcode(0x23) - OPCODE_24 = Opcode(0x24) - OPCODE_25 = Opcode(0x25) - OPCODE_26 = Opcode(0x26) - OPCODE_27 = Opcode(0x27) - OPCODE_28 = Opcode(0x28) - OPCODE_29 = Opcode(0x29) - OPCODE_2A = Opcode(0x2A) - OPCODE_2B = Opcode(0x2B) - OPCODE_2C = Opcode(0x2C) - OPCODE_2D = Opcode(0x2D) - OPCODE_2E = Opcode(0x2E) - OPCODE_2F = Opcode(0x2F) - OPCODE_4B = Opcode(0x4B) - OPCODE_4C = Opcode(0x4C) - OPCODE_4D = Opcode(0x4D) - OPCODE_4E = Opcode(0x4E) - OPCODE_4F = Opcode(0x4F) - OPCODE_A5 = Opcode(0xA5) - OPCODE_A6 = Opcode(0xA6) - OPCODE_A7 = Opcode(0xA7) - OPCODE_A8 = Opcode(0xA8) - OPCODE_A9 = Opcode(0xA9) - OPCODE_AA = Opcode(0xAA) - OPCODE_AB = Opcode(0xAB) - OPCODE_AC = Opcode(0xAC) - OPCODE_AD = Opcode(0xAD) - OPCODE_AE = Opcode(0xAE) - OPCODE_AF = Opcode(0xAF) - OPCODE_B0 = Opcode(0xB0) - OPCODE_B1 = Opcode(0xB1) - OPCODE_B2 = Opcode(0xB2) - OPCODE_B3 = Opcode(0xB3) - OPCODE_B4 = Opcode(0xB4) - OPCODE_B5 = Opcode(0xB5) - OPCODE_B6 = Opcode(0xB6) - OPCODE_B7 = Opcode(0xB7) - OPCODE_B8 = Opcode(0xB8) - OPCODE_B9 = Opcode(0xB9) - OPCODE_BA = Opcode(0xBA) - OPCODE_BB = Opcode(0xBB) - OPCODE_BC = Opcode(0xBC) - OPCODE_BD = Opcode(0xBD) - OPCODE_BE = Opcode(0xBE) - OPCODE_BF = Opcode(0xBF) - OPCODE_C0 = Opcode(0xC0) - OPCODE_C1 = Opcode(0xC1) - OPCODE_C2 = Opcode(0xC2) - OPCODE_C3 = Opcode(0xC3) - OPCODE_C4 = Opcode(0xC4) - OPCODE_C5 = Opcode(0xC5) - OPCODE_C6 = Opcode(0xC6) - OPCODE_C7 = Opcode(0xC7) - OPCODE_C8 = Opcode(0xC8) - OPCODE_C9 = Opcode(0xC9) - OPCODE_CA = Opcode(0xCA) - OPCODE_CB = Opcode(0xCB) - OPCODE_CC = Opcode(0xCC) - OPCODE_CD = Opcode(0xCD) - OPCODE_CE = Opcode(0xCE) - OPCODE_CF = Opcode(0xCF) - OPCODE_D0 = Opcode(0xD0) - OPCODE_D1 = Opcode(0xD1) - OPCODE_D2 = Opcode(0xD2) - OPCODE_D3 = Opcode(0xD3) - OPCODE_D4 = Opcode(0xD4) - OPCODE_D5 = Opcode(0xD5) - OPCODE_D6 = Opcode(0xD6) - OPCODE_D7 = Opcode(0xD7) - OPCODE_D8 = Opcode(0xD8) - OPCODE_D9 = Opcode(0xD9) - OPCODE_DA = Opcode(0xDA) - OPCODE_DB = Opcode(0xDB) - OPCODE_DC = Opcode(0xDC) - OPCODE_DD = Opcode(0xDD) - OPCODE_DE = Opcode(0xDE) - OPCODE_DF = Opcode(0xDF) - OPCODE_E0 = Opcode(0xE0) - OPCODE_E1 = Opcode(0xE1) - OPCODE_E2 = Opcode(0xE2) - OPCODE_E3 = Opcode(0xE3) - OPCODE_E4 = Opcode(0xE4) - OPCODE_E5 = Opcode(0xE5) - OPCODE_E6 = Opcode(0xE6) - OPCODE_E7 = Opcode(0xE7) - OPCODE_E8 = Opcode(0xE8) - OPCODE_E9 = Opcode(0xE9) - OPCODE_EA = Opcode(0xEA) - OPCODE_EB = Opcode(0xEB) - OPCODE_EC = Opcode(0xEC) - OPCODE_ED = Opcode(0xED) - OPCODE_EE = Opcode(0xEE) - OPCODE_EF = Opcode(0xEF) - OPCODE_F6 = Opcode(0xF6) - OPCODE_F7 = Opcode(0xF7) - OPCODE_F8 = Opcode(0xF8) - OPCODE_F9 = Opcode(0xF9) - OPCODE_FB = Opcode(0xFB) - OPCODE_FC = Opcode(0xFC) diff --git a/tests/frontier/opcodes/test_all_opcodes.py b/tests/frontier/opcodes/test_all_opcodes.py index bf87d21836..d9d4eac34c 100644 --- a/tests/frontier/opcodes/test_all_opcodes.py +++ b/tests/frontier/opcodes/test_all_opcodes.py @@ -18,7 +18,6 @@ ParameterSet, StateTestFiller, Transaction, - UndefinedOpcodes, gas_test, ) @@ -78,7 +77,9 @@ def test_all_opcodes( code_worked = 1000 code_contract: Dict[Opcode, Address] = {} - for opcode in sorted(set(Op) | set(UndefinedOpcodes)): + valid_opcodes = set(fork.valid_opcodes()) + all_opcodes = set(Opcode(i) for i in range(0xFF + 1)) + for opcode in sorted(valid_opcodes | all_opcodes): code_contract[opcode] = pre.deploy_contract( balance=10, code=prepare_stack(opcode) + opcode + prepare_suffix(opcode), @@ -116,9 +117,7 @@ def test_all_opcodes( sender=pre.fund_eoa(), gas_limit=9_000_000, to=contract_address, - data=b"", - value=0, - protected=False, + protected=fork.supports_protected_txs(), ) state_test(pre=pre, post=post, tx=tx) From d3c4047f8212695607df1e7c56b55669d3084e54 Mon Sep 17 00:00:00 2001 From: felix Date: Tue, 20 Jan 2026 17:38:55 +0100 Subject: [PATCH 087/154] chore(tooling): do not gitignore source files in `fixtures` packages (#2047) --- .gitignore | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.gitignore b/.gitignore index bec46f4748..6a84f558d6 100644 --- a/.gitignore +++ b/.gitignore @@ -74,10 +74,10 @@ pip-delete-this-directory.txt /doc/autoapi tests/execution-spec-generated-tests -tests/fixtures +/tests/**/fixtures/ tests/t8n_testdata -fixtures/ +/fixtures/ # Trace output (generated by --traces flag) /traces/ From 77c2b9ca064e4bf3430a4f3f0890fc2869263cc0 Mon Sep 17 00:00:00 2001 From: danceratopz Date: Tue, 20 Jan 2026 18:54:49 +0100 Subject: [PATCH 088/154] fix(tooling,test-*): enable project ruff config for packages/testing (#2046) * fix(tooling): enable ruff config by inheriting from root pyproject.toml * style(test-*): fix I001 unsorted-imports * style(test-*): fix D413 missing-blank-line-after-last-section * style(test-*): fix D213 multi-line-summary-second-line * style(test-*): fix D400/D415 docstring punctuation Add trailing periods to docstrings that were missing terminal punctuation. * style(test-*): fix D101/D102/D103 missing docstrings Add docstrings to GenesisState class and serialize_model method. Add noqa comments for test functions where names are self-documenting. * style(test-*): fix N801/N806 naming conventions Rename uppercase variables in functions to lowercase per PEP8. Add noqa for stub class that must match external library API. * style(test-*): fix ARG001/ARG002/ARG003 unused arguments Remove unused `fork` fixture parameters from test functions in test_expect.py - pytest resolves these automatically via state_test. Use `del` for arguments that cannot be removed: - execution/base.py: Base class method signature for subclass overrides. - execute/pre_alloc.py: Method override matching parent class signature. - execute.py, filler.py: Pytest fixture parameters (fixture API contract). - forks.py: pytest_collection_modifyitems hook signature (pytest API). - benchmarking.py: Method overrides matching BenchmarkParametrizer interface. - execution_specs.py: Method override matching TransitionTool.is_installed(). * style(test-*): fix E501 in base_types/ * style(test-*): fix E501 in tools/ and forks/ * style(test-*): fix E501 in specs/ * style(test-*): fix E501 in remaining packages Fix line-too-long errors (E501, max 79 chars) in: - conftest.py - checklists/tests/test_checklist_template_consistency.py - config/app.py - exceptions/exceptions.py and exceptions/base.py - execution/base.py, blob_transaction.py, transaction_post.py - rpc/rpc.py, rpc_types.py, tests/test_types.py - vm/tests/test_vm.py * style(test-*): fix E501 in test_types/ * style(test-*): fix E501 in client_clis/ * style(test-cli): fix E501 in cli/ (excluging pytest plugins) * style(test-cli): fix E501 in pytest_commands/plugins/consume * style(test-cli): fix E501 in pytest_commands/plugins/filler * style(test-cli): fix E501 in pytest_commands/plugins/execute * style(test-cli): fix E501 in pytest_commands/plugins (misc) * style(test-fill): remove duplicate return statement in checklist * fix(test-types): avoid shadowing loop counter in block access list validation Rename extracted variables from `actual_idx`/`expected_idx` to `actual_ba_idx`/`expected_ba_idx` to avoid shadowing the outer loop counter `actual_idx`. Co-authored-by: spencer --------- Co-authored-by: spencer --- packages/testing/pyproject.toml | 2 +- packages/testing/src/conftest.py | 10 +- .../base_types/composite_types.py | 3 +- .../base_types/conversions.py | 3 +- .../execution_testing/base_types/mixins.py | 6 +- .../reference_spec/git_reference_spec.py | 3 +- .../base_types/serialization.py | 3 +- .../base_types/tests/test_base_types.py | 3 +- .../base_types/typing_utils.py | 1 + .../checklists/eip_checklist.py | 2 +- .../test_checklist_template_consistency.py | 4 +- .../execution_testing/cli/benchmark_parser.py | 28 +++- .../execution_testing/cli/check_fixtures.py | 8 +- .../cli/diff_opcode_counts.py | 11 +- .../cli/eest/commands/info.py | 3 +- .../cli/eest/make/commands/test.py | 6 +- .../src/execution_testing/cli/eest/quotes.py | 33 +++-- .../src/execution_testing/cli/evm_bytes.py | 4 +- .../execution_testing/cli/extract_config.py | 18 ++- .../cli/fillerconvert/verify_filled.py | 3 +- .../cli/fuzzer_bridge/cli.py | 22 ++- .../cli/fuzzer_bridge/converter.py | 3 +- .../src/execution_testing/cli/gen_index.py | 12 +- .../cli/generate_checklist_stubs.py | 11 +- .../cli/gentest/test_context_providers.py | 3 +- .../src/execution_testing/cli/hasher.py | 3 +- .../cli/modify_static_test_gas_limits.py | 33 +++-- .../cli/pytest_commands/checklist.py | 2 +- .../cli/pytest_commands/fill.py | 2 +- .../plugins/consume/consume.py | 46 +++--- .../plugins/consume/direct/conftest.py | 20 ++- .../plugins/consume/simulators/base.py | 3 +- .../plugins/consume/simulators/exceptions.py | 4 +- .../consume/simulators/helpers/exceptions.py | 5 +- .../consume/simulators/helpers/timing.py | 3 +- .../consume/simulators/rlp/conftest.py | 5 +- .../simulator_logic/test_via_engine.py | 49 ++++--- .../simulator_logic/test_via_rlp.py | 6 +- .../simulator_logic/test_via_sync.py | 94 ++++++++----- .../consume/simulators/single_test_client.py | 16 ++- .../consume/simulators/sync/conftest.py | 3 +- .../simulators/test_case_description.py | 20 ++- .../consume/tests/test_consume_args.py | 5 +- .../tests/test_fixtures_source_input_types.py | 6 +- .../plugins/custom_logging/plugin_logging.py | 14 +- .../plugins/execute/contracts.py | 3 +- .../plugins/execute/eth_config/eth_config.py | 62 ++++++--- .../execute/eth_config/execute_eth_config.py | 21 +-- .../tests/test_execute_eth_config.py | 58 +++++--- .../execute/eth_config/tests/test_genesis.py | 6 +- .../plugins/execute/execute.py | 130 +++++++++++------ .../execute_deploy_required_contracts.py | 19 +-- .../execute/execute_flags/execute_flags.py | 6 +- .../plugins/execute/pre_alloc.py | 115 +++++++++------ .../execute/rpc/chain_builder_eth_rpc.py | 3 +- .../plugins/execute/rpc/hive.py | 16 ++- .../plugins/execute/rpc/remote.py | 58 +++++--- .../plugins/execute/rpc/remote_seed_sender.py | 11 +- .../pytest_commands/plugins/execute/sender.py | 66 +++++---- .../plugins/execute/tests/test_pre_alloc.py | 4 +- .../plugins/filler/eip_checklist.py | 30 ++-- .../pytest_commands/plugins/filler/filler.py | 131 ++++++++++-------- .../plugins/filler/fixture_output.py | 15 +- .../filler/gen_test_doc/gen_test_doc.py | 22 +-- .../plugins/filler/gen_test_doc/page_props.py | 9 +- .../plugins/filler/ported_tests.py | 18 +-- .../plugins/filler/pre_alloc.py | 14 +- .../plugins/filler/static_filler.py | 39 +++--- .../plugins/filler/tests/conftest.py | 6 +- .../plugins/filler/tests/test_benchmarking.py | 16 ++- .../filler/tests/test_generate_all_formats.py | 2 +- .../filler/tests/test_prealloc_group.py | 48 ++++--- .../test_prealloc_group_usage_example.py | 3 +- .../tests/test_slow_marker_pre_alloc.py | 8 +- .../filler/tests/test_verify_sync_marker.py | 2 +- .../pytest_commands/plugins/filler/witness.py | 19 +-- .../pytest_commands/plugins/forks/forks.py | 78 +++++++---- .../tests/test_bad_command_line_options.py | 5 +- .../forks/tests/test_bad_validity_markers.py | 12 +- .../forks/tests/test_covariant_markers.py | 14 +- .../plugins/forks/tests/test_markers.py | 17 ++- .../cli/pytest_commands/plugins/help/help.py | 27 +++- .../plugins/pytest_hive/pytest_hive.py | 22 +-- .../plugins/shared/benchmarking.py | 12 +- .../plugins/shared/execute_fill.py | 66 ++++----- .../cli/pytest_commands/plugins/solc/solc.py | 10 +- .../spec_version_checker.py | 22 +-- .../cli/pytest_commands/processors.py | 3 +- .../cli/pytest_commands/watcher.py | 5 +- .../cli/show_pre_alloc_group_stats.py | 31 +++-- .../cli/tests/test_pytest_execute_command.py | 11 +- .../cli/tests/test_pytest_fill_command.py | 10 +- .../src/execution_testing/cli/tox_helpers.py | 18 ++- .../client_clis/cli_types.py | 9 +- .../client_clis/clis/besu.py | 114 ++++++++++----- .../client_clis/clis/erigon.py | 99 +++++++++---- .../client_clis/clis/ethereumjs.py | 63 ++++++--- .../client_clis/clis/ethrex.py | 53 ++++--- .../client_clis/clis/evmone.py | 74 +++++++--- .../client_clis/clis/execution_specs.py | 48 +++++-- .../client_clis/clis/geth.py | 70 +++++++--- .../client_clis/clis/nethermind.py | 122 +++++++++++----- .../client_clis/clis/nimbus.py | 43 ++++-- .../client_clis/clis/reth.py | 56 +++++--- .../client_clis/ethereum_cli.py | 15 +- .../client_clis/tests/test_execution_specs.py | 3 +- .../client_clis/tests/test_transition_tool.py | 5 +- .../client_clis/transition_tool.py | 10 +- .../src/execution_testing/config/app.py | 4 +- .../exceptions/exceptions.py | 3 +- .../exceptions/exceptions/base.py | 3 +- .../src/execution_testing/execution/base.py | 5 +- .../execution/blob_transaction.py | 54 +++++--- .../execution/transaction_post.py | 30 ++-- .../src/execution_testing/forks/base_fork.py | 2 +- .../execution_testing/forks/forks/forks.py | 16 +-- .../src/execution_testing/forks/helpers.py | 18 +-- .../forks/tests/test_forks.py | 14 +- .../forks/tests/test_opcode_gas_costs.py | 10 +- .../src/execution_testing/logging/__init__.py | 5 +- .../testing/src/execution_testing/rpc/rpc.py | 68 +++++---- .../src/execution_testing/rpc/rpc_types.py | 5 +- .../execution_testing/rpc/tests/test_types.py | 26 ++-- .../src/execution_testing/specs/base.py | 31 +++-- .../src/execution_testing/specs/benchmark.py | 38 +++-- .../src/execution_testing/specs/blockchain.py | 39 ++++-- .../src/execution_testing/specs/debugging.py | 3 +- .../src/execution_testing/specs/state.py | 28 ++-- .../specs/static_state/account.py | 3 +- .../specs/static_state/common/common.py | 3 +- .../specs/static_state/environment.py | 6 +- .../specs/tests/test_benchmark.py | 16 ++- .../specs/tests/test_expect.py | 6 - .../specs/tests/test_fixtures.py | 9 +- .../test_types/account_types.py | 13 +- .../test_types/blob_types.py | 76 +++++----- .../account_absent_values.py | 58 +++++--- .../block_access_list/expectations.py | 59 +++++--- .../test_types/block_access_list/modifiers.py | 31 +++-- .../test_types/block_access_list/t8n.py | 23 +-- .../test_types/receipt_types.py | 2 +- .../test_types/tests/test_blob_types.py | 23 +-- .../test_block_access_list_serialization.py | 2 +- .../tests/test_block_access_list_t8n.py | 17 ++- .../test_types/tests/test_helpers.py | 4 +- .../test_types/tests/test_post_alloc.py | 16 ++- .../test_types/tests/test_types.py | 33 ++--- .../test_types/transaction_types.py | 7 +- .../tools/tests/test_code.py | 3 +- .../tools/utility/generators.py | 17 +-- .../execution_testing/tools/utility/pytest.py | 3 +- .../tools/utility/tests/test_pytest.py | 4 +- .../tools/utility/versioning.py | 2 +- .../src/execution_testing/vm/tests/test_vm.py | 4 +- .../stubs/requests_unixsocket/__init__.pyi | 7 +- .../stubs/requests_unixsocket/adapters.pyi | 9 +- 156 files changed, 2233 insertions(+), 1276 deletions(-) diff --git a/packages/testing/pyproject.toml b/packages/testing/pyproject.toml index f567d51ea0..dcc4d4af68 100644 --- a/packages/testing/pyproject.toml +++ b/packages/testing/pyproject.toml @@ -114,7 +114,7 @@ exclude = ["*tests*"] "execution_testing.test_types" = ["kzg_trusted_setup.txt"] [tool.ruff] -line-length = 79 +extend = "../../pyproject.toml" [tool.codespell] skip = ".venv,__pycache__,.git,build,dist,*.pyc,*.lock" diff --git a/packages/testing/src/conftest.py b/packages/testing/src/conftest.py index 7258e23c9c..835ef90170 100644 --- a/packages/testing/src/conftest.py +++ b/packages/testing/src/conftest.py @@ -89,13 +89,11 @@ def default_t8n( DEFAULT_TRANSITION_TOOL_FOR_UNIT_TESTS.__name__ ) if instance is None: - raise Exception( - f"Failed to instantiate {DEFAULT_TRANSITION_TOOL_FOR_UNIT_TESTS.__name__}" - ) + tool_name = DEFAULT_TRANSITION_TOOL_FOR_UNIT_TESTS.__name__ + raise Exception(f"Failed to instantiate {tool_name}") if isinstance(instance, Exception): - raise Exception( - f"Failed to instantiate {DEFAULT_TRANSITION_TOOL_FOR_UNIT_TESTS.__name__}" - ) from instance + tool_name = DEFAULT_TRANSITION_TOOL_FOR_UNIT_TESTS.__name__ + raise Exception(f"Failed to instantiate {tool_name}") from instance return instance diff --git a/packages/testing/src/execution_testing/base_types/composite_types.py b/packages/testing/src/execution_testing/base_types/composite_types.py index c78c1e0ee9..1db6e9f7b2 100644 --- a/packages/testing/src/execution_testing/base_types/composite_types.py +++ b/packages/testing/src/execution_testing/base_types/composite_types.py @@ -152,9 +152,10 @@ def __str__(self) -> str: label_str = "" if self.address.label is not None: label_str = f" ({self.address.label})" + hint_str = f" ({self.hint})" if self.hint else "" return ( f"incorrect value in address {self.address}{label_str} for " - + f"key {Hash(self.key)}{f' ({self.hint})' if self.hint else ''}:" + + f"key {Hash(self.key)}{hint_str}:" + f" want {HexNumber(self.want)} (dec:{int(self.want)})," + f" got {HexNumber(self.got)} (dec:{int(self.got)})" ) diff --git a/packages/testing/src/execution_testing/base_types/conversions.py b/packages/testing/src/execution_testing/base_types/conversions.py index d22d64fd4e..d7d409d41d 100644 --- a/packages/testing/src/execution_testing/base_types/conversions.py +++ b/packages/testing/src/execution_testing/base_types/conversions.py @@ -62,7 +62,8 @@ def to_fixed_size_bytes( if right_padding: return input_bytes.ljust(size, b"\x00") raise Exception( - f"input is too small for fixed size bytes: {len(input_bytes)} < {size}\n" + f"input is too small for fixed size bytes: " + f"{len(input_bytes)} < {size}\n" "Use `left_padding=True` or `right_padding=True` to allow padding." ) return input_bytes diff --git a/packages/testing/src/execution_testing/base_types/mixins.py b/packages/testing/src/execution_testing/base_types/mixins.py index 7199bb9d40..f691d2a9af 100644 --- a/packages/testing/src/execution_testing/base_types/mixins.py +++ b/packages/testing/src/execution_testing/base_types/mixins.py @@ -78,9 +78,9 @@ def __repr_args__(self) -> Any: # Convert field values based on their type. This ensures consistency # between JSON and Python object representations. Should a custom - # `__repr__` be needed for a specific type, it can be added in the match - # statement below. Otherwise, the default string representation is - # used. + # `__repr__` be needed for a specific type, it can be added in the + # match statement below. Otherwise, the default string representation + # is used. repr_attrs: List[Tuple[str, Any]] = [] for a, v in attrs: match v: diff --git a/packages/testing/src/execution_testing/base_types/reference_spec/git_reference_spec.py b/packages/testing/src/execution_testing/base_types/reference_spec/git_reference_spec.py index 69cac9b466..8aaccfdd58 100644 --- a/packages/testing/src/execution_testing/base_types/reference_spec/git_reference_spec.py +++ b/packages/testing/src/execution_testing/base_types/reference_spec/git_reference_spec.py @@ -76,7 +76,8 @@ def _get_latest_spec(self) -> Dict | None: if response.status_code != 200: warnings.warn( - f"Unable to get latest version, status code: {response.status_code} - " + f"Unable to get latest version, " + f"status code: {response.status_code} - " f"text: {response.text}", stacklevel=2, ) diff --git a/packages/testing/src/execution_testing/base_types/serialization.py b/packages/testing/src/execution_testing/base_types/serialization.py index ddc01c2483..251fd4de26 100644 --- a/packages/testing/src/execution_testing/base_types/serialization.py +++ b/packages/testing/src/execution_testing/base_types/serialization.py @@ -122,7 +122,8 @@ def to_list(self, signing: bool = False) -> List[Any]: if signing: if not self.signable: raise Exception( - f'Object "{self.__class__.__name__}" does not support signing' + f'Object "{self.__class__.__name__}" ' + "does not support signing" ) field_list = self.get_rlp_signing_fields() else: diff --git a/packages/testing/src/execution_testing/base_types/tests/test_base_types.py b/packages/testing/src/execution_testing/base_types/tests/test_base_types.py index 78d32a30d8..3bd3551adf 100644 --- a/packages/testing/src/execution_testing/base_types/tests/test_base_types.py +++ b/packages/testing/src/execution_testing/base_types/tests/test_base_types.py @@ -285,7 +285,8 @@ def test_json_deserialization( """Test that to_json returns the expected JSON for the given object.""" if not can_be_deserialized: pytest.skip( - reason="The model instance in this case can not be deserialized" + reason="The model instance in this case can not be " + "deserialized" ) model_type = type(model_instance) assert model_type(**json) == model_instance diff --git a/packages/testing/src/execution_testing/base_types/typing_utils.py b/packages/testing/src/execution_testing/base_types/typing_utils.py index 18412663a3..f20aa933a3 100644 --- a/packages/testing/src/execution_testing/base_types/typing_utils.py +++ b/packages/testing/src/execution_testing/base_types/typing_utils.py @@ -17,6 +17,7 @@ def unwrap_annotation(hint: Any) -> Any: Returns: The unwrapped base type + """ type_args = get_args(hint) if not type_args: diff --git a/packages/testing/src/execution_testing/checklists/eip_checklist.py b/packages/testing/src/execution_testing/checklists/eip_checklist.py index 9fb34ef3a8..df0f7f700f 100644 --- a/packages/testing/src/execution_testing/checklists/eip_checklist.py +++ b/packages/testing/src/execution_testing/checklists/eip_checklist.py @@ -257,7 +257,7 @@ class DataPortionVariables( If the opcode contains variables in its data portion, for each variable `n` of the opcode that accesses the nth stack item, test `n` being: - """ + """ # noqa: D400,D415 class Top(ChecklistItem): """`n` is the top stack item.""" diff --git a/packages/testing/src/execution_testing/checklists/tests/test_checklist_template_consistency.py b/packages/testing/src/execution_testing/checklists/tests/test_checklist_template_consistency.py index 29c9cc6841..7d8c478c1c 100644 --- a/packages/testing/src/execution_testing/checklists/tests/test_checklist_template_consistency.py +++ b/packages/testing/src/execution_testing/checklists/tests/test_checklist_template_consistency.py @@ -81,8 +81,8 @@ def test_checklist_template_consistency() -> None: if missing_in_checklist: errors.append( - f"IDs found in markdown template but missing in EIPChecklist class " - f"({len(missing_in_checklist)} items):\n" + f"IDs found in markdown template but missing in EIPChecklist " + f"class ({len(missing_in_checklist)} items):\n" + "\n".join(f" - `{id_}`" for id_ in sorted(missing_in_checklist)) ) diff --git a/packages/testing/src/execution_testing/cli/benchmark_parser.py b/packages/testing/src/execution_testing/cli/benchmark_parser.py index f05612fea6..d8e400952a 100644 --- a/packages/testing/src/execution_testing/cli/benchmark_parser.py +++ b/packages/testing/src/execution_testing/cli/benchmark_parser.py @@ -55,7 +55,8 @@ def visit_FunctionDef(self, node: ast.FunctionDef) -> None: if not self._has_benchmark_test_param(node): return - # Filter for code generator usage (required for fixed-opcode-count mode) + # Filter for code generator usage (required for fixed-opcode-count + # mode) if not self._uses_code_generator(node): return @@ -149,19 +150,37 @@ def _extract_opcode_name(self, node: ast.expr) -> str | None: Supported patterns (opcode must be first element): Case 1 - Direct opcode reference: + + ```python @pytest.mark.parametrize("opcode", [Op.ADD, Op.MUL]) + ``` Result: ["ADD", "MUL"] Case 2a - pytest.param with direct opcode: - @pytest.mark.parametrize("opcode", [pytest.param(Op.ADD, id="add")]) + + ```python + @pytest.mark.parametrize( + "opcode", [pytest.param(Op.ADD, id="add")] + ) + ``` Result: ["ADD"] Case 2b - pytest.param with tuple (opcode first): - @pytest.mark.parametrize("opcode,arg", [pytest.param((Op.ADD, 123))]) + + ```python + @pytest.mark.parametrize( + "opcode,arg", [pytest.param((Op.ADD, 123))] + ) + ``` Result: ["ADD"] Case 3 - Plain tuple (opcode first): - @pytest.mark.parametrize("opcode,arg", [(Op.ADD, 123), (Op.MUL, 456)]) + + ```python + @pytest.mark.parametrize( + "opcode,arg", [(Op.ADD, 123), (Op.MUL, 456)] + ) + ``` Result: ["ADD", "MUL"] """ # Case 1: Direct opcode - Op.ADD @@ -200,6 +219,7 @@ def scan_benchmark_tests( Tuple of (config, pattern_sources) where: - config: mapping of pattern -> opcode counts - pattern_sources: mapping of pattern -> source file path + """ config: dict[str, list[int]] = {} pattern_sources: dict[str, Path] = {} diff --git a/packages/testing/src/execution_testing/cli/check_fixtures.py b/packages/testing/src/execution_testing/cli/check_fixtures.py index f5f13b3a0d..c8f53b367f 100644 --- a/packages/testing/src/execution_testing/cli/check_fixtures.py +++ b/packages/testing/src/execution_testing/cli/check_fixtures.py @@ -56,7 +56,9 @@ def check_json(json_file_path: Path) -> None: raise HashMismatchExceptionError( original_hash, new_hash, - message=f"Fixture hash attributes do not match for {fixture_name}", + message=( + f"Fixture hash attributes do not match for {fixture_name}" + ), ) if "hash" in fixture.info and fixture.info["hash"] != original_hash: info_hash = fixture.info["hash"] @@ -125,7 +127,9 @@ def get_input_files() -> Generator[Path, None, None]: with Progress( TextColumn( - f"[bold cyan]{{task.fields[filename]:<{filename_display_width}}}[/]", + "[bold cyan]" + f"{{task.fields[filename]:<{filename_display_width}}}" + "[/]", justify="left", ), BarColumn( diff --git a/packages/testing/src/execution_testing/cli/diff_opcode_counts.py b/packages/testing/src/execution_testing/cli/diff_opcode_counts.py index d256b0e97c..8922f5ff07 100644 --- a/packages/testing/src/execution_testing/cli/diff_opcode_counts.py +++ b/packages/testing/src/execution_testing/cli/diff_opcode_counts.py @@ -135,9 +135,11 @@ def compare_opcode_counts( "--remove-from-fixture-names", "-r", multiple=True, - help="String to be removed from the fixture name, in case the fixture names have changed, " - "in order to make the comparison easier. " - "Can be specified multiple times.", + help=( + "String to be removed from the fixture name, in case the fixture " + "names have changed, in order to make the comparison easier. " + "Can be specified multiple times." + ), ) def main( base: Path, @@ -205,7 +207,8 @@ def main( ) elif show_common: print( - f"\n{common_with_same_counts} fixtures have identical opcode counts" + f"\n{common_with_same_counts} fixtures have identical opcode " + "counts" ) diff --git a/packages/testing/src/execution_testing/cli/eest/commands/info.py b/packages/testing/src/execution_testing/cli/eest/commands/info.py index b09c73a52a..06eeda8a16 100644 --- a/packages/testing/src/execution_testing/cli/eest/commands/info.py +++ b/packages/testing/src/execution_testing/cli/eest/commands/info.py @@ -39,11 +39,12 @@ def info() -> None: version = AppConfig().version + git_commit = get_current_commit_hash_or_tag(shorten_hash=True) info_text = f""" {title} {click.style(f"v{version}", fg="blue", bold=True)} {"─" * 50} - Git commit: {click.style(get_current_commit_hash_or_tag(shorten_hash=True), fg="yellow")} + Git commit: {click.style(git_commit, fg="yellow")} Python: {click.style(platform.python_version(), fg="blue")} uv: {click.style(get_uv_version(), fg="magenta")} OS: {click.style(f"{platform.system()} {platform.release()}", fg="cyan")} diff --git a/packages/testing/src/execution_testing/cli/eest/make/commands/test.py b/packages/testing/src/execution_testing/cli/eest/make/commands/test.py index 1910785429..0e3750d2ec 100644 --- a/packages/testing/src/execution_testing/cli/eest/make/commands/test.py +++ b/packages/testing/src/execution_testing/cli/eest/make/commands/test.py @@ -170,10 +170,12 @@ def test() -> None: if fork in [dev_fork.name() for dev_fork in get_development_forks()]: fork_option = f" --until={fork}" + docs_url = DocsConfig().DOCS_URL__WRITING_TESTS click.echo( click.style( - f"\n 📝 Get started with tests: {DocsConfig().DOCS_URL__WRITING_TESTS}" - f"\n ⛽ To fill this test, run: `uv run fill {module_path}{fork_option}`", + f"\n 📝 Get started with tests: {docs_url}" + f"\n ⛽ To fill this test, run: " + f"`uv run fill {module_path}{fork_option}`", fg="cyan", ) ) diff --git a/packages/testing/src/execution_testing/cli/eest/quotes.py b/packages/testing/src/execution_testing/cli/eest/quotes.py index ea67650e00..d376cb4702 100644 --- a/packages/testing/src/execution_testing/cli/eest/quotes.py +++ b/packages/testing/src/execution_testing/cli/eest/quotes.py @@ -6,22 +6,33 @@ make_something_great = [ "🎨 Simplicity is the ultimate sophistication. - Leonardo D.", "🖌️ Simplicity is an acquired taste. - Katharine G.", - "💡 To create a memorable design you need to start with a thought that’s worth remembering." - " - Thomas M.", + ( + "💡 To create a memorable design you need to start with a thought " + "that's worth remembering. - Thomas M." + ), "🚀 Well begun is half done. - Aristotle", - "🖌️ Designers are crazy and yet sane enough to know where to draw the line. - Benjamin W.", + ( + "🖌️ Designers are crazy and yet sane enough to know where to draw " + "the line. - Benjamin W." + ), "🌟 Creativity is piercing the mundane to find the marvelous. - Bill M.", "🔍 Mistakes are the portals of discovery. - James J.", - "🧠 It's extremely difficult to be simultaneously concerned with the end-user experience of" - " whatever it is that you're building and the architecture of the program that delivers that" - " experience. - James H.", + ( + "🧠 It's extremely difficult to be simultaneously concerned with the " + "end-user experience of whatever it is that you're building and the " + "architecture of the program that delivers that experience. - James H." + ), "🧠 Good design is a lot like clear thinking made visual. - Edward T.", - "🚀 Innovation leads one to see the new in the old and distinguishes the ingenious from the" - " ingenuous. - Paul R.", + ( + "🚀 Innovation leads one to see the new in the old and distinguishes " + "the ingenious from the ingenuous. - Paul R." + ), "🔮 The best way to predict the future is to invent it. - Alan K.", - "🌟 Perfection is achieved, not when there is nothing more to add, but when there is nothing" - " left to take away. - Antoine d.", - "📏 You can’t improve what you don’t measure. - Tom D.", + ( + "🌟 Perfection is achieved, not when there is nothing more to add, " + "but when there is nothing left to take away. - Antoine d." + ), + "📏 You can't improve what you don't measure. - Tom D.", ] diff --git a/packages/testing/src/execution_testing/cli/evm_bytes.py b/packages/testing/src/execution_testing/cli/evm_bytes.py index ce5a1c6390..00043451cb 100644 --- a/packages/testing/src/execution_testing/cli/evm_bytes.py +++ b/packages/testing/src/execution_testing/cli/evm_bytes.py @@ -107,9 +107,9 @@ def process_evm_bytes(evm_bytes: bytes) -> List[OpcodeWithOperands]: # noqa: D1 return opcodes -def format_opcodes( +def format_opcodes( # noqa: D103 opcodes: List[OpcodeWithOperands], assembly: bool = False -) -> str: # noqa: D103 +) -> str: if assembly: opcodes_with_empty_lines: List[OpcodeWithOperands] = [] for i, op_with_operands in enumerate(opcodes): diff --git a/packages/testing/src/execution_testing/cli/extract_config.py b/packages/testing/src/execution_testing/cli/extract_config.py index 29397582b5..10cf25c965 100755 --- a/packages/testing/src/execution_testing/cli/extract_config.py +++ b/packages/testing/src/execution_testing/cli/extract_config.py @@ -27,7 +27,7 @@ ) from execution_testing.base_types import Alloc -from execution_testing.cli.pytest_commands.plugins.consume.simulators.helpers.ruleset import ( +from execution_testing.cli.pytest_commands.plugins.consume.simulators.helpers.ruleset import ( # noqa: E501 ruleset, ) from execution_testing.fixtures import ( @@ -130,6 +130,8 @@ def extract_client_files( class GenesisState(BaseModel): + """Model representing genesis state for configuration extraction.""" + header: FixtureHeader alloc: Alloc chain_id: int = Field(exclude=True) @@ -139,6 +141,7 @@ class GenesisState(BaseModel): def serialize_model( self, handler: SerializerFunctionWrapHandler ) -> dict[str, object]: + """Serialize the genesis state model to a dictionary.""" serialized = handler(self) output = serialized["header"] output["alloc"] = { @@ -189,9 +192,7 @@ def from_fixture(cls, fixture_path: Path) -> Self: ) def get_client_environment(self) -> dict: - """ - Get the environment variables for starting a client with the given fixture. - """ + """Get the env vars to start a client with a fixture.""" if self.fork not in ruleset: raise ValueError(f"Fork '{self.fork}' not found in hive ruleset") @@ -199,7 +200,8 @@ def get_client_environment(self) -> dict: "HIVE_CHAIN_ID": str(self.chain_id), "HIVE_FORK_DAO_VOTE": "1", "HIVE_NODETYPE": "full", - "HIVE_CHECK_LIVE_PORT": "8545", # Using RPC port for liveness check + # Using RPC port for liveness check + "HIVE_CHECK_LIVE_PORT": "8545", **{k: f"{v:d}" for k, v in ruleset[self.fork].items()}, } @@ -324,14 +326,16 @@ def extract_config( if len(new_containers) != 1: click.echo( - f"Expected exactly 1 new container, found {len(new_containers)}", + f"Expected exactly 1 new container, found " + f"{len(new_containers)}", err=True, ) sys.exit(1) container_id = new_containers.pop() click.echo( - f"Client started successfully (Container ID: {container_id})" + f"Client started successfully " + f"(Container ID: {container_id})" ) # Optionally list files in container diff --git a/packages/testing/src/execution_testing/cli/fillerconvert/verify_filled.py b/packages/testing/src/execution_testing/cli/fillerconvert/verify_filled.py index ed0c17e176..7e0555d3a9 100644 --- a/packages/testing/src/execution_testing/cli/fillerconvert/verify_filled.py +++ b/packages/testing/src/execution_testing/cli/fillerconvert/verify_filled.py @@ -79,7 +79,8 @@ def verify_refilled(refilled: Path, original: Path) -> int: f"test_name: {refilled_test_name}\n" f"original_name: {original}\n" f"refilled_hash: {refilled_result[0].hash}\n" - f"original_hash: {res.hash} f: {refilled_fork}, d: {d}, g: {g}, v: {v}" + f"original_hash: {res.hash} " + f"f: {refilled_fork}, d: {d}, g: {g}, v: {v}" ) found = True verified_vectors += 1 diff --git a/packages/testing/src/execution_testing/cli/fuzzer_bridge/cli.py b/packages/testing/src/execution_testing/cli/fuzzer_bridge/cli.py index 9fbc326853..7d463fd264 100644 --- a/packages/testing/src/execution_testing/cli/fuzzer_bridge/cli.py +++ b/packages/testing/src/execution_testing/cli/fuzzer_bridge/cli.py @@ -330,7 +330,8 @@ def process_directory_parallel( error_file, exception = error if not quiet: progress.console.print( - f"[red]Error processing {error_file}: {exception}[/red]" + f"[red]Error processing {error_file}: " + f"{exception}[/red]" ) # Update progress bar @@ -365,10 +366,11 @@ def process_directory_parallel( # Final status if not quiet: emoji = "✅" if error_count == 0 else "⚠️" + status = f"Done! {success_count} succeeded, {error_count} failed" progress.update( task_id, completed=file_count, - filename=f"Done! {success_count} succeeded, {error_count} failed {emoji}", + filename=f"{status} {emoji}", workers=num_workers, ) @@ -483,10 +485,11 @@ def process_directory( # Final status if not quiet: emoji = "✅" if error_count == 0 else "⚠️" + status = f"Done! {success_count} succeeded, {error_count} failed" progress.update( task_id, completed=file_count, - filename=f"Done! {success_count} succeeded, {error_count} failed {emoji}", + filename=f"{status} {emoji}", ) @@ -667,7 +670,9 @@ def batch_mode( "--workers", type=int, default=None, - help="Number of parallel workers (default: auto-detect based on CPU count)", + help=( + "Number of parallel workers (default: auto-detect based on CPU count)" + ), ) @click.option( "-b", @@ -680,8 +685,10 @@ def batch_mode( "--block-strategy", type=click.Choice(["distribute", "first-block"]), default="distribute", - help="Transaction distribution strategy: 'distribute' splits txs evenly, " - "'first-block' puts all txs in first block (default: distribute)", + help=( + "Transaction distribution strategy: 'distribute' splits txs evenly, " + "'first-block' puts all txs in first block (default: distribute)" + ), ) @click.option( "--block-time", @@ -739,7 +746,8 @@ def main( # Standard mode: require input_path and output_path if input_path is None or output_path is None: raise click.UsageError( - "INPUT_PATH and OUTPUT_PATH are required when not using --batch mode" + "INPUT_PATH and OUTPUT_PATH are required when not using " + "--batch mode" ) # Create transition tool t8n: TransitionTool diff --git a/packages/testing/src/execution_testing/cli/fuzzer_bridge/converter.py b/packages/testing/src/execution_testing/cli/fuzzer_bridge/converter.py index 1e62214bbc..ca575b42e5 100644 --- a/packages/testing/src/execution_testing/cli/fuzzer_bridge/converter.py +++ b/packages/testing/src/execution_testing/cli/fuzzer_bridge/converter.py @@ -155,7 +155,8 @@ def create_sender_eoa_map( # Verify private key matches address (safety check) assert Address(sender) == addr, ( - f"Private key for account {addr} does not match derived address {sender}" + f"Private key for account {addr} does not match derived " + f"address {sender}" ) senders[addr] = sender diff --git a/packages/testing/src/execution_testing/cli/gen_index.py b/packages/testing/src/execution_testing/cli/gen_index.py index cd80748c1f..1e4af37cf1 100644 --- a/packages/testing/src/execution_testing/cli/gen_index.py +++ b/packages/testing/src/execution_testing/cli/gen_index.py @@ -46,8 +46,9 @@ def count_json_files_exclude_index(start_path: Path) -> int: @click.command( help=( - "Generate an index file of all the json fixtures in the specified directory. " - "The index file is saved as 'index.json' in the specified directory." + "Generate an index file of all the json fixtures in the specified " + "directory. The index file is saved as 'index.json' in the specified " + "directory." ) ) @click.option( @@ -124,7 +125,8 @@ def generate_fixtures_index( ): if not quiet_mode: rich.print( - f"Index file [bold cyan]{output_file}[/] is up-to-date." + f"Index file [bold cyan]{output_file}[/] " + "is up-to-date." ) return except Exception as e: @@ -136,7 +138,9 @@ def generate_fixtures_index( filename_display_width = 25 with Progress( TextColumn( - f"[bold cyan]{{task.fields[filename]:<{filename_display_width}}}[/]", + "[bold cyan]" + f"{{task.fields[filename]:<{filename_display_width}}}" + "[/]", justify="left", table_column=Column(ratio=1), ), diff --git a/packages/testing/src/execution_testing/cli/generate_checklist_stubs.py b/packages/testing/src/execution_testing/cli/generate_checklist_stubs.py index 300426fc87..af657eeacf 100644 --- a/packages/testing/src/execution_testing/cli/generate_checklist_stubs.py +++ b/packages/testing/src/execution_testing/cli/generate_checklist_stubs.py @@ -132,7 +132,9 @@ class _CallableChecklistItem: @overload def __call__(self, func: F) -> F: ... @overload - def __call__(self, *, eip: Any = ..., **kwargs: Any) -> pytest.MarkDecorator: ... + def __call__( + self, *, eip: Any = ..., **kwargs: Any + ) -> pytest.MarkDecorator: ... def __str__(self) -> str: ... ''' @@ -173,11 +175,12 @@ def __str__(self) -> str: ... ) click.echo( - "\n💡 This stub file helps mypy understand that EIPChecklist classes are callable." + "\n💡 This stub file helps mypy understand that EIPChecklist " + "classes are callable." ) click.echo( - " You can now use @EIPChecklist.Opcode.Test.StackComplexOperations() " - "without type errors!" + " You can now use @EIPChecklist.Opcode.Test." + "StackComplexOperations() without type errors!" ) except ImportError as e: diff --git a/packages/testing/src/execution_testing/cli/gentest/test_context_providers.py b/packages/testing/src/execution_testing/cli/gentest/test_context_providers.py index 989796648e..b7163fc7c8 100644 --- a/packages/testing/src/execution_testing/cli/gentest/test_context_providers.py +++ b/packages/testing/src/execution_testing/cli/gentest/test_context_providers.py @@ -48,7 +48,8 @@ def _make_rpc_calls(self) -> None: """Make RPC calls to fetch transaction and block data.""" request = RPCRequest() print( - f"Perform tx request: eth_get_transaction_by_hash({self.transaction_hash})", + f"Perform tx request: eth_get_transaction_by_hash" + f"({self.transaction_hash})", file=stderr, ) self.transaction_response = request.eth_get_transaction_by_hash( diff --git a/packages/testing/src/execution_testing/cli/hasher.py b/packages/testing/src/execution_testing/cli/hasher.py index 894ee45195..ecb49665ac 100644 --- a/packages/testing/src/execution_testing/cli/hasher.py +++ b/packages/testing/src/execution_testing/cli/hasher.py @@ -90,7 +90,8 @@ def from_json_file( if not isinstance(hash_value, str): raise TypeError( - f"Expected hash to be a string in {key}, got {type(hash_value)}" + f"Expected hash to be a string in {key}, " + f"got {type(hash_value)}" ) item_hash_bytes = bytes.fromhex(hash_value[2:]) diff --git a/packages/testing/src/execution_testing/cli/modify_static_test_gas_limits.py b/packages/testing/src/execution_testing/cli/modify_static_test_gas_limits.py index 47087254d9..70b0d1c1ec 100644 --- a/packages/testing/src/execution_testing/cli/modify_static_test_gas_limits.py +++ b/packages/testing/src/execution_testing/cli/modify_static_test_gas_limits.py @@ -16,7 +16,7 @@ HexNumber, ZeroPaddedHexNumber, ) -from execution_testing.cli.pytest_commands.plugins.filler.static_filler import ( +from execution_testing.cli.pytest_commands.plugins.filler.static_filler import ( # noqa: E501 NoIntResolver, ) from execution_testing.specs import StateStaticTest @@ -77,8 +77,9 @@ def _check_fixtures( try: parsed_test_file = StaticTestFile.model_validate(loaded_yaml) except Exception as e: + yaml_dump = json.dumps(loaded_yaml, indent=2) raise Exception( - f"Unable to parse file {test_file}: {json.dumps(loaded_yaml, indent=2)}" + f"Unable to parse file {test_file}: {yaml_dump}" ) from e else: parsed_test_file = StaticTestFile.model_validate_json( @@ -95,7 +96,8 @@ def _check_fixtures( if len(parsed_test.transaction.gas_limit) != 1: if dry_run or verbose: print( - f"Test file {test_file} contains more than one test (after parsing), skipping." + f"Test file {test_file} contains more than one test " + "(after parsing), skipping." ) continue @@ -113,8 +115,8 @@ def _check_fixtures( if gas_value is None: if dry_run or verbose: print( - f"Test file {test_file} contains at least one test that cannot " - "be updated, skipping." + f"Test file {test_file} contains at least one test " + "that cannot be updated, skipping." ) continue else: @@ -134,13 +136,15 @@ def _check_fixtures( if max_gas_limit is not None and new_gas_limit > max_gas_limit: if dry_run or verbose: print( - f"New gas limit ({new_gas_limit}) exceeds max ({max_gas_limit})" + f"New gas limit ({new_gas_limit}) " + f"exceeds max ({max_gas_limit})" ) continue if dry_run or verbose: print( - f"Test file {test_file} requires modification ({new_gas_limit})" + f"Test file {test_file} requires modification " + f"({new_gas_limit})" ) # Find the appropriate pattern to replace the current gas limit @@ -171,7 +175,8 @@ def _check_fixtures( # Validate that a replacement pattern was found assert substitute_pattern is not None, ( - f"Current gas limit ({attempted_patterns}) not found in {test_file}" + f"Current gas limit ({attempted_patterns}) " + f"not found in {test_file}" ) assert substitute_string is not None @@ -212,15 +217,19 @@ def _check_fixtures( exists=True, file_okay=True, dir_okay=False, readable=True ), required=True, - help="The input json file or directory containing json listing the new gas limits for the " - "static test files.", + help=( + "The input json file or directory containing json listing the new " + "gas limits for the static test files." + ), ) @click.option( "--max-gas-limit", default=MAX_GAS_LIMIT, expose_value=True, - help="Gas limit that triggers a test modification, and also the maximum value that a test " - "should have after modification.", + help=( + "Gas limit that triggers a test modification, and also the maximum " + "value that a test should have after modification." + ), ) @click.option( "--dry-run", diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/checklist.py b/packages/testing/src/execution_testing/cli/pytest_commands/checklist.py index e76d09396f..e51f3cc593 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/checklist.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/checklist.py @@ -13,7 +13,7 @@ "-o", type=click.Path(file_okay=False, dir_okay=True, writable=True), default="./checklists", - help="Directory to output the generated checklists (default: ./checklists)", + help="Directory to output checklists (default: ./checklists)", ) @click.option( "--eip", diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/fill.py b/packages/testing/src/execution_testing/cli/pytest_commands/fill.py index 5d58519411..d9296c91a3 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/fill.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/fill.py @@ -100,7 +100,7 @@ def _create_single_phase_with_pre_alloc_groups( ] def _add_default_ignores(self, args: List[str]) -> List[str]: - """Add default ignore paths for directories not used by fill command.""" + """Add default ignore paths for directories not used by fill.""" # Directories to ignore by default default_ignores = [ "tests/evm_tools", diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/consume.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/consume.py index 0f069b61df..f95ad6bf1f 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/consume.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/consume.py @@ -289,7 +289,8 @@ def validate_local_path(path: Path) -> "FixturesSource": ) if not any(path.glob("**/*.json")): pytest.exit( - f"Specified fixture directory '{path}' does not contain any JSON files." + f"Specified fixture directory '{path}' does not contain " + "any JSON files." ) return FixturesSource(input_option=str(path), path=path) @@ -361,10 +362,11 @@ def pytest_addoption(parser: pytest.Parser) -> None: # noqa: D103 dest="fixtures_source", default=None, help=( - "Specify the JSON test fixtures source. Can be a local directory, a URL pointing to a " - " fixtures.tar.gz archive, a release name and version in the form of `NAME@v1.2.3` " - "(`stable` and `develop` are valid release names, and `latest` is a valid version), " - "or the special keyword 'stdin'. " + "Specify the JSON test fixtures source. Can be a local " + "directory, a URL pointing to a fixtures.tar.gz archive, a " + "release name and version in the form of `NAME@v1.2.3` " + "(`stable` and `develop` are valid release names, and `latest` " + "is a valid version), or the special keyword 'stdin'. " f"Defaults to the following local directory: '{default_input()}'." ), ) @@ -375,7 +377,8 @@ def pytest_addoption(parser: pytest.Parser) -> None: # noqa: D103 default=CACHED_DOWNLOADS_DIRECTORY, help=( "Specify the path where the downloaded fixtures are cached. " - f"Defaults to the following directory: '{CACHED_DOWNLOADS_DIRECTORY}'." + "Defaults to the following directory: " + f"'{CACHED_DOWNLOADS_DIRECTORY}'." ), ) consume_group.addoption( @@ -384,9 +387,10 @@ def pytest_addoption(parser: pytest.Parser) -> None: # noqa: D103 dest="extract_to_folder", default=None, help=( - "Extract downloaded fixtures to the specified directory. Only valid with 'cache' " - "command. When used, fixtures are extracted directly to this path instead of the " - "user's execution-spec-tests cache directory." + "Extract downloaded fixtures to the specified directory. Only " + "valid with 'cache' command. When used, fixtures are extracted " + "directly to this path instead of the user's execution-spec-" + "tests cache directory." ), ) if "cache" in sys.argv: @@ -408,13 +412,16 @@ def pytest_addoption(parser: pytest.Parser) -> None: # noqa: D103 type=SimLimitBehavior.from_string, default=SimLimitBehavior(".*"), help=( - "Filter tests by either a regex pattern or a literal test case ID. To match a " - "test case by its exact ID, prefix the ID with `id:`. The string following `id:` " - "will be automatically escaped so that all special regex characters are treated as " - "literals. Without the `id:` prefix, the argument is interpreted as a Python regex " - "pattern. To see which test cases are matched, without executing them, prefix with " - '`collectonly:`, e.g. `--sim.limit "collectonly:.*eip4788.*fork_Prague.*"`. ' - "To list all available test case IDs, set the value to `collectonly`." + "Filter tests by either a regex pattern or a literal test case " + "ID. To match a test case by its exact ID, prefix the ID with " + "`id:`. The string following `id:` will be automatically escaped " + "so that all special regex characters are treated as literals. " + "Without the `id:` prefix, the argument is interpreted as a " + "Python regex pattern. To see which test cases are matched, " + "without executing them, prefix with `collectonly:`, e.g. " + '`--sim.limit "collectonly:.*eip4788.*fork_Prague.*"`. ' + "To list all available test case IDs, set the value to " + "`collectonly`." ), ) @@ -498,7 +505,8 @@ def pytest_configure(config: pytest.Config) -> None: # noqa: D103 for fixture_format in BaseFixture.formats.values(): config.addinivalue_line( "markers", - f"{fixture_format.format_name}: Tests in `{fixture_format.format_name}` format ", + f"{fixture_format.format_name}: " + f"Tests in `{fixture_format.format_name}` format ", ) # All forked defined within EEST @@ -518,8 +526,8 @@ def pytest_configure(config: pytest.Config) -> None: # noqa: D103 if config.option.sim_limit: if config.option.dest_regex != ".*": pytest.exit( - "Both the --sim.limit (via env var?) and the --regex flags are set. " - "Please only set one of them." + "Both the --sim.limit (via env var?) and the --regex flags " + "are set. Please only set one of them." ) config.option.dest_regex = config.option.sim_limit.pattern if config.option.sim_limit.collectonly: diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/direct/conftest.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/direct/conftest.py index 14d4744884..980c421876 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/direct/conftest.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/direct/conftest.py @@ -64,8 +64,9 @@ def pytest_addoption(parser: pytest.Parser) -> None: # noqa: D103 type=Path, default=[], help=( - "Path to a geth evm executable that provides `blocktest` or `statetest`. " - "Flag can be used multiple times to specify multiple fixture consumer binaries." + "Path to a geth evm executable that provides `blocktest` or " + "`statetest`. Flag can be used multiple times to specify " + "multiple fixture consumer binaries." ), ) consume_group.addoption( @@ -73,7 +74,10 @@ def pytest_addoption(parser: pytest.Parser) -> None: # noqa: D103 action="store_true", dest="consumer_collect_traces", default=False, - help="Collect traces of the execution information from the fixture consumer tool.", + help=( + "Collect traces of the execution information from the fixture " + "consumer tool." + ), ) debug_group = parser.getgroup("debug", "Arguments defining debug behavior") debug_group.addoption( @@ -104,16 +108,18 @@ def pytest_configure(config: pytest.Config) -> None: # noqa: D103 elif not fixture_consumers and config.option.collectonly: warnings.warn( ( - "No fixture consumer binaries provided; using a dummy consumer for collect-only; " - "all possible fixture formats will be collected. " - "Specify fixture consumer(s) via `--bin` to see actual collection results." + "No fixture consumer binaries provided; using a dummy " + "consumer for collect-only; all possible fixture formats " + "will be collected. Specify fixture consumer(s) via `--bin` " + "to see actual collection results." ), stacklevel=1, ) fixture_consumers = [CollectOnlyFixtureConsumer()] elif not fixture_consumers: pytest.exit( - "No fixture consumer binaries provided; please specify a binary path via `--bin`." + "No fixture consumer binaries provided; please specify a binary " + "path via `--bin`." ) config.fixture_consumers = fixture_consumers # type: ignore[attr-defined] diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/base.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/base.py index 09225a01a0..cd80201b90 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/base.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/base.py @@ -33,7 +33,8 @@ def check_live_port(test_suite_name: str) -> Literal[8545, 8551]: elif test_suite_name in {"eels/consume-engine", "eels/consume-sync"}: return 8551 raise ValueError( - f"Unexpected test suite name '{test_suite_name}' while setting HIVE_CHECK_LIVE_PORT." + f"Unexpected test suite name '{test_suite_name}' while setting " + "HIVE_CHECK_LIVE_PORT." ) diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/exceptions.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/exceptions.py index ca1de25f9f..9201cd2403 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/exceptions.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/exceptions.py @@ -24,8 +24,8 @@ def pytest_addoption(parser: pytest.Parser) -> None: dest="disable_strict_exception_matching", default="", help=( - "Comma-separated list of client names and/or forks which should NOT use strict " - "exception matching." + "Comma-separated list of client names and/or forks which should " + "NOT use strict exception matching." ), ) diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/helpers/exceptions.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/helpers/exceptions.py index 5e70675ada..b8c451c072 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/helpers/exceptions.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/helpers/exceptions.py @@ -73,7 +73,10 @@ def __init__( "\nIs the fork configuration correct?" ) else: - message += "There were no differences in the expected and received genesis block headers." + message += ( + "There were no differences in the expected and received " + "genesis block headers." + ) super().__init__(message) @staticmethod diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/helpers/timing.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/helpers/timing.py index 1ae83a17b6..5201938b4b 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/helpers/timing.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/helpers/timing.py @@ -49,9 +49,10 @@ def formatted(self, precision: int = 4, indent: int = 0) -> str: """Recursively format the timing data with correct indentation.""" assert self.start_time is not None assert self.end_time is not None + time_diff = self.end_time - self.start_time formatted = ( f"{' ' * indent}{self.name}: " - f"{TimingData.format_float(self.end_time - self.start_time, precision)}\n" + f"{TimingData.format_float(time_diff, precision)}\n" ) for timing in self.timings: formatted += timing.formatted(precision, indent + 2) diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/rlp/conftest.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/rlp/conftest.py index 8fa162f152..a24425d4dd 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/rlp/conftest.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/rlp/conftest.py @@ -38,7 +38,10 @@ def test_suite_name() -> str: @pytest.fixture(scope="module") def test_suite_description() -> str: """The description of the hive test suite used in this simulator.""" - return "Execute blockchain tests by providing RLP-encoded blocks to a client upon start-up." + return ( + "Execute blockchain tests by providing RLP-encoded blocks to a " + "client upon start-up." + ) @pytest.fixture(scope="function") diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/simulator_logic/test_via_engine.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/simulator_logic/test_via_engine.py index f6a1973b12..9abe0c74a0 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/simulator_logic/test_via_engine.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/simulator_logic/test_via_engine.py @@ -77,7 +77,8 @@ def test_blockchain_via_engine( expected = fixture.genesis.block_hash got = genesis_block["hash"] logger.fail( - f"Genesis block hash mismatch. Expected: {expected}, Got: {got}" + f"Genesis block hash mismatch. " + f"Expected: {expected}, Got: {got}" ) raise GenesisBlockMismatchExceptionError( expected_header=fixture.genesis, @@ -98,17 +99,15 @@ def test_blockchain_via_engine( with payload_timing.time( f"engine_newPayloadV{payload.new_payload_version}" ): - logger.info( - f"Sending engine_newPayloadV{payload.new_payload_version}..." - ) + version = payload.new_payload_version + logger.info(f"Sending engine_newPayloadV{version}...") try: payload_response = engine_rpc.new_payload( *payload.params, version=payload.new_payload_version, ) - logger.info( - f"Payload response status: {payload_response.status}" - ) + status = payload_response.status + logger.info(f"Payload response status: {status}") expected_validity = ( PayloadStatusEnum.VALID if payload.valid() @@ -121,8 +120,8 @@ def test_blockchain_via_engine( ) if payload.error_code is not None: raise LoggedError( - f"Client failed to raise expected Engine API error code: " - f"{payload.error_code}" + "Client failed to raise expected Engine API " + f"error code: {payload.error_code}" ) elif ( payload_response.status @@ -130,7 +129,8 @@ def test_blockchain_via_engine( ): if payload_response.validation_error is None: raise LoggedError( - "Client returned INVALID but no validation error was provided." + "Client returned INVALID but no " + "validation error was provided." ) if isinstance( payload_response.validation_error, @@ -138,9 +138,12 @@ def test_blockchain_via_engine( ): message = ( "Undefined exception message: " - f'expected exception: "{payload.validation_error}", ' - f'returned exception: "{payload_response.validation_error}" ' - f'(mapper: "{payload_response.validation_error.mapper_name}")' + f"expected exception: " + f'"{payload.validation_error}", ' + f"returned exception: " + f'"{payload_response.validation_error}" ' + f"(mapper: " + f'"{payload_response.validation_error.mapper_name}")' # noqa: E501 ) if strict_exception_matching: raise LoggedError(message) @@ -152,9 +155,12 @@ def test_blockchain_via_engine( not in payload_response.validation_error ): message = ( - "Client returned unexpected validation error: " - f'got: "{payload_response.validation_error}" ' - f'expected: "{payload.validation_error}"' + "Client returned unexpected " + "validation error: " + f"got: " + f'"{payload_response.validation_error}" ' # noqa: E501 + f"expected: " + f'"{payload.validation_error}"' ) if strict_exception_matching: raise LoggedError(message) @@ -163,7 +169,8 @@ def test_blockchain_via_engine( except JSONRPCError as e: logger.info( - f"JSONRPC error encountered: {e.code} - {e.message}" + f"JSONRPC error encountered: " + f"{e.code} - {e.message}" ) if payload.error_code is None: raise LoggedError( @@ -171,7 +178,8 @@ def test_blockchain_via_engine( ) from e if e.code != payload.error_code: raise LoggedError( - f"Unexpected error code: {e.code}, expected: {payload.error_code}" + f"Unexpected error code: {e.code}, " + f"expected: {payload.error_code}" ) from e if payload.valid(): @@ -196,8 +204,9 @@ def test_blockchain_via_engine( forkchoice_response.payload_status.status != PayloadStatusEnum.VALID ): + status = forkchoice_response.payload_status.status raise LoggedError( - f"unexpected status: want {PayloadStatusEnum.VALID}," - f" got {forkchoice_response.payload_status.status}" + f"unexpected status: want " + f"{PayloadStatusEnum.VALID}, got {status}" ) logger.info("All payloads processed successfully.") diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/simulator_logic/test_via_rlp.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/simulator_logic/test_via_rlp.py index ef85b9f55f..b219edf74e 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/simulator_logic/test_via_rlp.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/simulator_logic/test_via_rlp.py @@ -64,7 +64,8 @@ def test_via_rlp( fixture_value = last_block_header[block_field] if str(block_value) != str(fixture_value): mismatches.append( - f" {block_field}: got `{block_value}`, expected `{fixture_value}`" + f" {block_field}: got `{block_value}`, " + f"expected `{fixture_value}`" ) raise AssertionError( "blockHash mismatch in last block - field mismatches:" @@ -72,6 +73,7 @@ def test_via_rlp( ) except Exception: raise AssertionError( - f"blockHash mismatch in last block: got `{block['hash']}`, " + f"blockHash mismatch in last block: " + f"got `{block['hash']}`, " f"expected `{fixture.last_block_hash}`" ) from None diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/simulator_logic/test_via_sync.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/simulator_logic/test_via_sync.py index 9369b18940..a989ed5b48 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/simulator_logic/test_via_sync.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/simulator_logic/test_via_sync.py @@ -97,7 +97,8 @@ def test_blockchain_via_sync( expected = fixture.genesis.block_hash got = genesis_block["hash"] logger.fail( - f"Genesis block hash mismatch. Expected: {expected}, Got: {got}" + f"Genesis block hash mismatch. " + f"Expected: {expected}, Got: {got}" ) raise GenesisBlockMismatchExceptionError( expected_header=fixture.genesis, @@ -122,18 +123,16 @@ def test_blockchain_via_sync( with payload_timing.time( f"engine_newPayloadV{payload.new_payload_version}" ): - logger.info( - f"Sending engine_newPayloadV{payload.new_payload_version}..." - ) + version = payload.new_payload_version + logger.info(f"Sending engine_newPayloadV{version}...") # Note: This is similar to the logic in test_via_engine.py try: payload_response = engine_rpc.new_payload( *payload.params, version=payload.new_payload_version, ) - logger.info( - f"Payload response status: {payload_response.status}" - ) + status = payload_response.status + logger.info(f"Payload response status: {status}") expected_validity = ( PayloadStatusEnum.VALID if payload.valid() @@ -146,8 +145,8 @@ def test_blockchain_via_sync( ) if payload.error_code is not None: raise LoggedError( - f"Client failed to raise expected Engine API error code: " - f"{payload.error_code}" + "Client failed to raise expected Engine API " + f"error code: {payload.error_code}" ) elif ( payload_response.status @@ -155,7 +154,8 @@ def test_blockchain_via_sync( ): if payload_response.validation_error is None: raise LoggedError( - "Client returned INVALID but no validation error was provided." + "Client returned INVALID but no " + "validation error was provided." ) if isinstance( payload_response.validation_error, @@ -163,9 +163,12 @@ def test_blockchain_via_sync( ): message = ( "Undefined exception message: " - f'expected exception: "{payload.validation_error}", ' - f'returned exception: "{payload_response.validation_error}" ' - f'(mapper: "{payload_response.validation_error.mapper_name}")' + f"expected exception: " + f'"{payload.validation_error}", ' + f"returned exception: " + f'"{payload_response.validation_error}" ' + f"(mapper: " + f'"{payload_response.validation_error.mapper_name}")' # noqa: E501 ) if strict_exception_matching: raise LoggedError(message) @@ -177,9 +180,12 @@ def test_blockchain_via_sync( not in payload_response.validation_error ): message = ( - "Client returned unexpected validation error: " - f'got: "{payload_response.validation_error}" ' - f'expected: "{payload.validation_error}"' + "Client returned unexpected " + "validation error: " + f"got: " + f'"{payload_response.validation_error}" ' # noqa: E501 + f"expected: " + f'"{payload.validation_error}"' ) if strict_exception_matching: raise LoggedError(message) @@ -188,7 +194,8 @@ def test_blockchain_via_sync( except JSONRPCError as e: logger.info( - f"JSONRPC error encountered: {e.code} - {e.message}" + f"JSONRPC error encountered: " + f"{e.code} - {e.message}" ) if payload.error_code is None: raise LoggedError( @@ -196,7 +203,8 @@ def test_blockchain_via_sync( ) from e if e.code != payload.error_code: raise LoggedError( - f"Unexpected error code: {e.code}, expected: {payload.error_code}" + f"Unexpected error code: {e.code}, " + f"expected: {payload.error_code}" ) from e if payload.valid(): @@ -221,9 +229,10 @@ def test_blockchain_via_sync( forkchoice_response.payload_status.status != PayloadStatusEnum.VALID ): + status = forkchoice_response.payload_status.status raise LoggedError( - f"unexpected status: want {PayloadStatusEnum.VALID}," - f" got {forkchoice_response.payload_status.status}" + f"unexpected status: want " + f"{PayloadStatusEnum.VALID}, got {status}" ) last_valid_block_hash = payload.params[0].block_hash @@ -234,7 +243,8 @@ def test_blockchain_via_sync( # sync_payload creates the final block that the sync client will sync to if not fixture.sync_payload: pytest.fail( - "Sync tests require a syncPayload that is not present in this test." + "Sync tests require a syncPayload that is not present in this " + "test." ) with timing_data.time("Send sync payload to client under test"): @@ -277,7 +287,8 @@ def test_blockchain_via_sync( ) except JSONRPCError as e: logger.error( - f"Error sending sync payload to client under test: {e.code} - {e.message}" + f"Error sending sync payload to client under test: " + f"{e.code} - {e.message}" ) raise @@ -297,12 +308,13 @@ def test_blockchain_via_sync( ) if response.payload_status.status != PayloadStatusEnum.VALID: raise LoggedError( - f"Unexpected status on sync client forkchoice updated to genesis: " - f"{response.payload_status.status}" + "Unexpected status on sync client forkchoice updated to " + f"genesis: {response.payload_status.status}" ) except ForkchoiceUpdateTimeoutError as e: raise LoggedError( - f"Timed out waiting for sync client forkchoice update to genesis: {e}" + "Timed out waiting for sync client forkchoice update to " + f"genesis: {e}" ) from None # Add peer using admin_addPeer This seems to be required... TODO: we can @@ -369,13 +381,13 @@ def test_blockchain_via_sync( *last_valid_payload.params, version=last_valid_payload.new_payload_version, ) - logger.info( - f"Sync client newPayload response: {sync_payload_response.status}" - ) + status = sync_payload_response.status + logger.info(f"Sync client newPayload response: {status}") # send forkchoice update pointing to latest block logger.info( - "Sending forkchoice update with last valid block to trigger sync..." + "Sending forkchoice update with last valid block to trigger " + "sync..." ) sync_forkchoice_response = sync_engine_rpc.forkchoice_updated( forkchoice_state=last_valid_block_forkchoice_state, @@ -395,16 +407,18 @@ def test_blockchain_via_sync( == PayloadStatusEnum.ACCEPTED ): logger.info( - "Sync client accepted the block, may start syncing ancestors" + "Sync client accepted the block, may start syncing " + "ancestors" ) - # Wait for P2P connections after sync starts - # Note: Reth does not report peer count but still syncs successfully + # Wait for P2P connections after sync starts. Note: Reth does not + # report peer count but still syncs successfully try: assert sync_net_rpc is not None, "sync_net_rpc is required" sync_net_rpc.wait_for_peer_connection() logger.debug( - "Peer connection verified on sync client after sync trigger" + "Peer connection verified on sync client after sync " + "trigger" ) except PeerConnectionTimeoutError: try: @@ -419,11 +433,13 @@ def test_blockchain_via_sync( except Exception as e: logger.warning( - f"Failed to trigger sync with newPayload/forkchoice update: {e}" + "Failed to trigger sync with newPayload/forkchoice update: " + f"{e}" ) else: logger.warning( - f"Could not find payload for block {last_valid_block_hash} to send to sync client" + f"Could not find payload for block {last_valid_block_hash} to " + "send to sync client" ) # Wait for synchronization with continuous forkchoice updates @@ -449,12 +465,14 @@ def test_blockchain_via_sync( ) if response.payload_status.status != PayloadStatusEnum.VALID: raise LoggedError( - f"Sync client failed to sync to block {last_valid_block_hash}: " - f"unexpected status {response.payload_status.status}" + f"Sync client failed to sync to block " + f"{last_valid_block_hash}: unexpected status " + f"{response.payload_status.status}" ) except ForkchoiceUpdateTimeoutError as e: raise LoggedError( - f"Sync client timed out syncing to block {last_valid_block_hash}: {e}" + f"Sync client timed out syncing to block " + f"{last_valid_block_hash}: {e}" ) from None logger.info("Sync verification successful! FCU returned VALID.") diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/single_test_client.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/single_test_client.py index d6cff56670..4045f92221 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/single_test_client.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/single_test_client.py @@ -48,7 +48,8 @@ def environment( chain_id = str(Number(fixture.config.chain_id)) return { "HIVE_CHAIN_ID": chain_id, - "HIVE_NETWORK_ID": chain_id, # Use same value for P2P network compatibility + # Use same value for P2P network compatibility + "HIVE_NETWORK_ID": chain_id, "HIVE_FORK_DAO_VOTE": "1", "HIVE_NODETYPE": "full", "HIVE_CHECK_LIVE_PORT": str(check_live_port), @@ -76,7 +77,8 @@ def genesis_header(fixture: BlockchainFixtureCommon) -> FixtureHeader: @pytest.fixture(scope="function") def client( hive_test: HiveTest, - client_files: dict, # configured within: rlp/conftest.py & engine/conftest.py + # configured within: rlp/conftest.py & engine/conftest.py + client_files: dict, environment: dict, client_type: ClientType, total_timing_data: TimingData, @@ -85,9 +87,8 @@ def client( Initialize the client with the appropriate files and environment variables. """ logger.info(f"Starting client ({client_type.name})...") - logger.debug( - f"Main client Network ID: {environment.get('HIVE_NETWORK_ID', 'NOT SET!')}" - ) + network_id = environment.get("HIVE_NETWORK_ID", "NOT SET!") + logger.debug(f"Main client Network ID: {network_id}") logger.debug( f"Main client Chain ID: {environment.get('HIVE_CHAIN_ID', 'NOT SET!')}" ) @@ -98,8 +99,9 @@ def client( files=client_files, ) error_message = ( - f"Unable to connect to the client container ({client_type.name}) via Hive during test " - "setup. Check the client or Hive server logs for more information." + f"Unable to connect to the client container ({client_type.name}) " + "via Hive during test setup. Check the client or Hive server logs " + "for more information." ) assert client is not None, error_message logger.info(f"Client ({client_type.name}) ready!") diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/sync/conftest.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/sync/conftest.py index d460280625..ad01454d7c 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/sync/conftest.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/sync/conftest.py @@ -246,7 +246,8 @@ def sync_client( assert sync_client is not None, error_message logger.info( - f"Sync client ({sync_client_type.name}) started with IP: {sync_client.ip}" + f"Sync client ({sync_client_type.name}) started with IP: " + f"{sync_client.ip}" ) yield sync_client diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/test_case_description.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/test_case_description.py index 421b310383..d73c00a0a6 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/test_case_description.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/test_case_description.py @@ -58,13 +58,15 @@ def hive_clients_yaml_generator_command( .yaml() .replace(" ", " ") ) - return f'echo "\\\n{yaml_content}" > {hive_clients_yaml_target_filename}' + target = hive_clients_yaml_target_filename + return f'echo "\\\n{yaml_content}" > {target}' except Exception as e: raise ValueError(f"Failed to generate YAML: {str(e)}") from e except ValueError as e: error_message = str(e) warnings.warn( - f"{error_message}. The Hive clients YAML generator command will not be available.", + f"{error_message}. The Hive clients YAML generator command will " + "not be available.", stacklevel=2, ) @@ -72,11 +74,16 @@ def hive_clients_yaml_generator_command( issue_body = ( f"Error: {error_message}\nHive version: {hive_info.commit}\n" ) - issue_url = f"https://github.com/ethereum/execution-spec-tests/issues/new?title={urllib.parse.quote(issue_title)}&body={urllib.parse.quote(issue_body)}" + issue_url = ( + "https://github.com/ethereum/execution-spec-tests/issues/new" + f"?title={urllib.parse.quote(issue_title)}" + f"&body={urllib.parse.quote(issue_body)}" + ) return ( f"Error: {error_message}\n" - f'Please create an issue to report this problem.' + f'Please create an issue to report ' + "this problem." ) @@ -148,7 +155,10 @@ def hive_dev_command( Return the command used to instantiate hive alongside the `consume` command. """ - return f"./hive --dev {hive_client_config_file_parameter} --client {client_type.name}" + return ( + f"./hive --dev {hive_client_config_file_parameter} " + f"--client {client_type.name}" + ) @pytest.fixture(scope="function") diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/tests/test_consume_args.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/tests/test_consume_args.py index f64c1ba896..a601e1353e 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/tests/test_consume_args.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/tests/test_consume_args.py @@ -201,7 +201,10 @@ def test_consume_simlimit_collectonly( pytester.copy_example( name="src/execution_testing/cli/pytest_commands/pytest_ini_files/pytest-consume.ini" ) - consume_test_path = "src/execution_testing/cli/pytest_commands/plugins/consume/direct/test_via_direct.py" + consume_test_path = ( + "src/execution_testing/cli/pytest_commands/plugins/" + "consume/direct/test_via_direct.py" + ) args = [ "-c", "pytest-consume.ini", diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/tests/test_fixtures_source_input_types.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/tests/test_fixtures_source_input_types.py index 34790d2e82..241fda66d3 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/tests/test_fixtures_source_input_types.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/tests/test_fixtures_source_input_types.py @@ -113,7 +113,8 @@ def test_output_formatting_without_release_page_for_direct_urls( elif not config.fixtures_source.is_local: reason += "Fixtures downloaded and cached." reason += f"\nPath: {config.fixtures_source.path}" - reason += f"\nInput: {config.fixtures_source.url or config.fixtures_source.path}" + input_val = config.fixtures_source.url or config.fixtures_source.path + reason += f"\nInput: {input_val}" if config.fixtures_source.release_page: reason += f"\nRelease page: {config.fixtures_source.release_page}" @@ -144,7 +145,8 @@ def test_output_formatting_with_release_page_for_specs(self) -> None: elif not config.fixtures_source.is_local: reason += "Fixtures downloaded and cached." reason += f"\nPath: {config.fixtures_source.path}" - reason += f"\nInput: {config.fixtures_source.url or config.fixtures_source.path}" + input_val = config.fixtures_source.url or config.fixtures_source.path + reason += f"\nInput: {input_val}" if config.fixtures_source.release_page: reason += f"\nRelease page: {config.fixtures_source.release_page}" diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/custom_logging/plugin_logging.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/custom_logging/plugin_logging.py index 6b28640412..3fbba65b27 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/custom_logging/plugin_logging.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/custom_logging/plugin_logging.py @@ -9,8 +9,8 @@ use case, timestamps are essential to verify timing issues against the clients log. -This module provides the pytest plugin hooks that configure logging for -pytest sessions. The core logging functionality is in execution_testing.logging. +This module provides the pytest plugin hooks that configure logging for pytest +sessions. The core logging functionality is in execution_testing.logging. """ import functools @@ -55,8 +55,9 @@ def pytest_addoption(parser: pytest.Parser) -> None: # noqa: D103 type=LogLevel.from_cli, dest="eest_log_level", help=( - "The logging level to use in the test session: DEBUG, INFO, WARNING, ERROR or " - "CRITICAL, default - INFO. An integer in [0, 50] may be also provided." + "The logging level to use in the test session: DEBUG, INFO, " + "WARNING, ERROR or CRITICAL, default - INFO. An integer in " + "[0, 50] may be also provided." ), ) logging_group.addoption( @@ -64,7 +65,10 @@ def pytest_addoption(parser: pytest.Parser) -> None: # noqa: D103 action="store", default=None, dest="eest_log_dir", - help="Directory to write log files. Defaults to ./logs if not specified.", + help=( + "Directory to write log files. Defaults to ./logs if not " + "specified." + ), ) diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/contracts.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/contracts.py index 6f143a8053..b11216fad2 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/contracts.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/contracts.py @@ -89,5 +89,6 @@ def deploy_deterministic_factory_contract( deployment_contract_code = eth_rpc.get_code(DETERMINISTIC_FACTORY_ADDRESS) logger.info(f"Deployment contract code: {deployment_contract_code}") assert deployment_contract_code == DETERMINISTIC_FACTORY_BYTECODE, ( - f"Deployment contract code is not the expected code: {deployment_contract_code} != {DETERMINISTIC_FACTORY_BYTECODE}" + f"Deployment contract code is not the expected code: " + f"{deployment_contract_code} != {DETERMINISTIC_FACTORY_BYTECODE}" ) diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/eth_config/eth_config.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/eth_config/eth_config.py index 3202f20c1b..334d0a53b7 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/eth_config/eth_config.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/eth_config/eth_config.py @@ -55,8 +55,8 @@ def pytest_addoption(parser: pytest.Parser) -> None: type=str, default=None, help=( - "Name of the network to verify for the RPC client. Supported networks by default: " - f"{', '.join(DEFAULT_NETWORKS.root.keys())}." + "Name of the network to verify for the RPC client. Supported " + f"networks by default: {', '.join(DEFAULT_NETWORKS.root.keys())}." ), ) eth_config_group.addoption( @@ -66,10 +66,14 @@ def pytest_addoption(parser: pytest.Parser) -> None: required=False, type=Path, default=None, - help="Path to the yml file that contains custom network configuration " - "(e.g. ./src/execution_testing/cli/pytest_commands/plugins/execute/eth_config/networks.yml).\nIf no config is provided " - "then majority mode will be used for devnet testing (clients that have a different " - "response than the majority of clients will fail the test)", + help=( + "Path to the yml file that contains custom network configuration " + "(e.g. ./src/execution_testing/cli/pytest_commands/plugins/" + "execute/eth_config/networks.yml). If no config is provided then " + "majority mode will be used for devnet testing (clients that have " + "a different response than the majority of clients will fail the " + "test)" + ), ) eth_config_group.addoption( "--clients", @@ -78,9 +82,11 @@ def pytest_addoption(parser: pytest.Parser) -> None: dest="clients", type=str, default=None, - help="Comma-separated list of clients to be tested in majority mode. Example: " - '"besu,erigon,geth,nethermind,nimbusel,reth"\nIf you do not pass a value, majority mode ' - "testing will be disabled.", + help=( + "Comma-separated list of clients to be tested in majority mode. " + 'Example: "besu,erigon,geth,nethermind,nimbusel,reth". If you do ' + "not pass a value, majority mode testing will be disabled." + ), ) eth_config_group.addoption( "--genesis-config-file", @@ -89,8 +95,10 @@ def pytest_addoption(parser: pytest.Parser) -> None: required=False, type=Path, default=None, - help="Path to a genesis JSON file from which a custom network configuration " - "must be derived.", + help=( + "Path to a genesis JSON file from which a custom network " + "configuration must be derived." + ), ) eth_config_group.addoption( "--genesis-config-url", @@ -99,8 +107,10 @@ def pytest_addoption(parser: pytest.Parser) -> None: required=False, type=str, default=None, - help="URL to a genesis JSON file from which a custom network configuration " - "must be derived.", + help=( + "URL to a genesis JSON file from which a custom network " + "configuration must be derived." + ), ) eth_config_group.addoption( "--rpc-endpoint", @@ -127,13 +137,14 @@ def pytest_configure(config: pytest.Config) -> None: if genesis_config_file and genesis_config_url: pytest.exit( - "Cannot specify both the --genesis-config-file and --genesis-config-url flags." + "Cannot specify both the --genesis-config-file and " + "--genesis-config-url flags." ) if (genesis_config_file or genesis_config_url) and network_name: pytest.exit( - "Cannot specify a network name when using the --genesis-config-file or " - "--genesis-config-url flag." + "Cannot specify a network name when using the " + "--genesis-config-file or --genesis-config-url flag." ) # handle the one of the three flags that was passed # case 1: genesis_config_file @@ -153,7 +164,8 @@ def pytest_configure(config: pytest.Config) -> None: network_configs_path = DEFAULT_NETWORK_CONFIGS_FILE if not network_configs_path.exists(): pytest.exit( - f'Specified networks file "{network_configs_path}" does not exist.' + f'Specified networks file "{network_configs_path}" does not ' + "exist." ) try: network_configs = NetworkConfigFile.from_yaml(network_configs_path) @@ -162,7 +174,8 @@ def pytest_configure(config: pytest.Config) -> None: if network_name not in network_configs.root: pytest.exit( - f'Network "{network_name}" could not be found in file "{network_configs_path}".' + f'Network "{network_name}" could not be found in file ' + f'"{network_configs_path}".' ) config.network = network_configs.root[network_name] # type: ignore @@ -181,7 +194,8 @@ def pytest_configure(config: pytest.Config) -> None: config.option.majority_clients = clients # List[str] else: logger.info( - "Majority test mode is disabled because no --clients value was passed." + "Majority test mode is disabled because no --clients value was " + "passed." ) if config.getoption("collectonly", default=False): @@ -201,7 +215,8 @@ def pytest_configure(config: pytest.Config) -> None: pytest.exit(f"Could not connect to RPC endpoint {rpc_endpoint}: {e}") try: logger.debug( - "Will now briefly check whether eth_config is supported by target rpc.." + "Will now briefly check whether eth_config is supported by " + "target rpc.." ) eth_rpc.config() logger.debug( @@ -271,7 +286,8 @@ def pytest_generate_tests(metafunc: pytest.Metafunc) -> None: # The test function is not run because we only have a single # client, so no majority comparison logger.info( - "Skipping eth_config majority because less than 2 exec clients were passed" + "Skipping eth_config majority because less than 2 exec " + "clients were passed" ) metafunc.parametrize( ["all_rpc_endpoints"], @@ -302,7 +318,9 @@ def pytest_generate_tests(metafunc: pytest.Metafunc) -> None: rpc_endpoint, id=f"{metafunc.definition.name}[{endpoint_name}]", ) - for endpoint_name, rpc_endpoint in all_rpc_endpoints_dict.items() + for endpoint_name, rpc_endpoint in ( + all_rpc_endpoints_dict.items() + ) ], scope="function", ) diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/eth_config/execute_eth_config.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/eth_config/execute_eth_config.py index e6d9e9a460..5edb2bde7f 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/eth_config/execute_eth_config.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/eth_config/execute_eth_config.py @@ -235,12 +235,14 @@ def test_eth_config_majority( response = eth_rpc_target.config(timeout=5) if response is None: logger.warning( - f"Got 'None' as eth_config response from {eth_rpc_target}" + f"Got 'None' as eth_config response from " + f"{eth_rpc_target}" ) continue except Exception as e: logger.warning( - f"When trying to get eth_config from {eth_rpc_target} a problem occurred: {e}" + f"When trying to get eth_config from {eth_rpc_target} a " + f"problem occurred: {e}" ) continue @@ -256,12 +258,12 @@ def test_eth_config_majority( break # no need to gather more responses for this client assert len(responses.keys()) == len(all_rpc_endpoints.keys()), ( - "Failed to get an eth_config response " - f" from each specified execution client. Full list of execution clients is " - f"{all_rpc_endpoints.keys()} but we were only able to gather eth_config responses " - f"from: {responses.keys()}\n" - "Will try again with a different consensus-execution client combination for " - "this execution client" + "Failed to get an eth_config response from each specified execution " + f"client. Full list of execution clients is " + f"{all_rpc_endpoints.keys()} but we were only able to gather " + f"eth_config responses from: {responses.keys()}\n" + "Will try again with a different consensus-execution client " + "combination for this execution client" ) # determine hashes of client responses client_to_hash_dict = {} # Dict[exec_client : response hash] # noqa: C408 @@ -298,5 +300,6 @@ def test_eth_config_majority( assert expected_hash != "" logger.info( - "All clients returned the same eth_config response. Test has been passed!" + "All clients returned the same eth_config response. Test has been " + "passed!" ) diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/eth_config/tests/test_execute_eth_config.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/eth_config/tests/test_execute_eth_config.py index 2e47ecbfdf..f9bedc4710 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/eth_config/tests/test_execute_eth_config.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/eth_config/tests/test_execute_eth_config.py @@ -71,10 +71,12 @@ }, "systemContracts": { "BEACON_ROOTS_ADDRESS": "0x000f3df6d732807ef1319fb7b8bb8522d0beac02", - "CONSOLIDATION_REQUEST_PREDEPLOY_ADDRESS": "0x0000bbddc7ce488642fb579f8b00f3a590007251", + "CONSOLIDATION_REQUEST_PREDEPLOY_ADDRESS": + "0x0000bbddc7ce488642fb579f8b00f3a590007251", "DEPOSIT_CONTRACT_ADDRESS": "0x00000000219ab540356cbb839cbe05303d7705fa", "HISTORY_STORAGE_ADDRESS": "0x0000f90827f1c53a10cb7a02335b175320002935", - "WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS": "0x00000961ef480eb55e80d19ad83579a64c007002" + "WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS": + "0x00000961ef480eb55e80d19ad83579a64c007002" } } """) @@ -111,10 +113,12 @@ }, "systemContracts": { "BEACON_ROOTS_ADDRESS": "0x000f3df6d732807ef1319fb7b8bb8522d0beac02", - "CONSOLIDATION_REQUEST_PREDEPLOY_ADDRESS": "0x0000bbddc7ce488642fb579f8b00f3a590007251", + "CONSOLIDATION_REQUEST_PREDEPLOY_ADDRESS": + "0x0000bbddc7ce488642fb579f8b00f3a590007251", "DEPOSIT_CONTRACT_ADDRESS": "0x00000000219ab540356cbb839cbe05303d7705fa", "HISTORY_STORAGE_ADDRESS": "0x0000f90827f1c53a10cb7a02335b175320002935", - "WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS": "0x00000961ef480eb55e80d19ad83579a64c007002" + "WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS": + "0x00000961ef480eb55e80d19ad83579a64c007002" } } """) @@ -151,10 +155,12 @@ }, "systemContracts": { "BEACON_ROOTS_ADDRESS": "0x000f3df6d732807ef1319fb7b8bb8522d0beac02", - "CONSOLIDATION_REQUEST_PREDEPLOY_ADDRESS": "0x0000bbddc7ce488642fb579f8b00f3a590007251", + "CONSOLIDATION_REQUEST_PREDEPLOY_ADDRESS": + "0x0000bbddc7ce488642fb579f8b00f3a590007251", "DEPOSIT_CONTRACT_ADDRESS": "0x00000000219ab540356cbb839cbe05303d7705fa", "HISTORY_STORAGE_ADDRESS": "0x0000f90827f1c53a10cb7a02335b175320002935", - "WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS": "0x00000961ef480eb55e80d19ad83579a64c007002" + "WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS": + "0x00000961ef480eb55e80d19ad83579a64c007002" } } """) @@ -191,10 +197,12 @@ }, "systemContracts": { "BEACON_ROOTS_ADDRESS": "0x000f3df6d732807ef1319fb7b8bb8522d0beac02", - "CONSOLIDATION_REQUEST_PREDEPLOY_ADDRESS": "0x0000bbddc7ce488642fb579f8b00f3a590007251", + "CONSOLIDATION_REQUEST_PREDEPLOY_ADDRESS": + "0x0000bbddc7ce488642fb579f8b00f3a590007251", "DEPOSIT_CONTRACT_ADDRESS": "0x00000000219ab540356cbb839cbe05303d7705fa", "HISTORY_STORAGE_ADDRESS": "0x0000f90827f1c53a10cb7a02335b175320002935", - "WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS": "0x00000961ef480eb55e80d19ad83579a64c007002" + "WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS": + "0x00000961ef480eb55e80d19ad83579a64c007002" } } """) @@ -231,10 +239,12 @@ }, "systemContracts": { "BEACON_ROOTS_ADDRESS": "0x000f3df6d732807ef1319fb7b8bb8522d0beac02", - "CONSOLIDATION_REQUEST_PREDEPLOY_ADDRESS": "0x0000bbddc7ce488642fb579f8b00f3a590007251", + "CONSOLIDATION_REQUEST_PREDEPLOY_ADDRESS": + "0x0000bbddc7ce488642fb579f8b00f3a590007251", "DEPOSIT_CONTRACT_ADDRESS": "0x00000000219ab540356cbb839cbe05303d7705fa", "HISTORY_STORAGE_ADDRESS": "0x0000f90827f1c53a10cb7a02335b175320002935", - "WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS": "0x00000961ef480eb55e80d19ad83579a64c007002" + "WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS": + "0x00000961ef480eb55e80d19ad83579a64c007002" } } """) @@ -271,10 +281,12 @@ }, "systemContracts": { "BEACON_ROOTS_ADDRESS": "0x000f3df6d732807ef1319fb7b8bb8522d0beac02", - "CONSOLIDATION_REQUEST_PREDEPLOY_ADDRESS": "0x0000bbddc7ce488642fb579f8b00f3a590007251", + "CONSOLIDATION_REQUEST_PREDEPLOY_ADDRESS": + "0x0000bbddc7ce488642fb579f8b00f3a590007251", "DEPOSIT_CONTRACT_ADDRESS": "0x00000000219ab540356cbb839cbe05303d7705fa", "HISTORY_STORAGE_ADDRESS": "0x0000f90827f1c53a10cb7a02335b175320002935", - "WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS": "0x00000961ef480eb55e80d19ad83579a64c007002" + "WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS": + "0x00000961ef480eb55e80d19ad83579a64c007002" } } """) @@ -285,7 +297,7 @@ STATIC_NETWORK_CONFIGS = """ -# Static network configs so updates to the network configs don't break the tests. +# Static network configs so network config updates don't break the tests. Mainnet: chainId: 0x1 genesisHash: 0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3 @@ -414,7 +426,7 @@ target: 15 max: 20 baseFeeUpdateFraction: 5007716 -""" # W505 +""" # noqa: E501 @pytest.fixture(scope="session") @@ -532,7 +544,8 @@ def test_fork_config_from_fork( f"{current_config.model_dump_json()}" ) assert current_config.fork_id == expected_eth_config.current.fork_id, ( - f"Expected {expected_eth_config.current.fork_id} but got {current_config.fork_id}" + f"Expected {expected_eth_config.current.fork_id} " + f"but got {current_config.fork_id}" ) if expected_eth_config.next is not None: assert next_config is not None, "Expected next to be not None" @@ -543,7 +556,8 @@ def test_fork_config_from_fork( f"{next_config.model_dump_json()}" ) assert next_config.fork_id == expected_eth_config.next.fork_id, ( - f"Expected {expected_eth_config.next.fork_id} but got {next_config.fork_id}" + f"Expected {expected_eth_config.next.fork_id} " + f"but got {next_config.fork_id}" ) else: assert next_config is None, "Expected next to be None" @@ -556,7 +570,8 @@ def test_fork_config_from_fork( f"{eth_config.last.model_dump_json()}" ) assert eth_config.last.fork_id == expected_eth_config.last.fork_id, ( - f"Expected {expected_eth_config.last.fork_id} but got {eth_config.last.fork_id}" + f"Expected {expected_eth_config.last.fork_id} " + f"but got {eth_config.last.fork_id}" ) else: assert eth_config.last is None, "Expected last to be None" @@ -614,19 +629,22 @@ def test_fork_ids( ) -> None: """Test various configurations of fork Ids for different timestamps.""" assert expected_current_fork_id == eth_config.current.fork_id, ( - f"Unexpected current fork id: {eth_config.current.fork_id} != {expected_current_fork_id}" + f"Unexpected current fork id: " + f"{eth_config.current.fork_id} != {expected_current_fork_id}" ) if expected_next_fork_id is not None: assert eth_config.next is not None, "Expected next to be not None" assert expected_next_fork_id == eth_config.next.fork_id, ( - f"Unexpected next fork id: {eth_config.next.fork_id} != {expected_next_fork_id}" + f"Unexpected next fork id: " + f"{eth_config.next.fork_id} != {expected_next_fork_id}" ) else: assert eth_config.next is None, "Expected next to be None" if expected_last_fork_id is not None: assert eth_config.last is not None, "Expected last to be not None" assert expected_last_fork_id == eth_config.last.fork_id, ( - f"Unexpected last fork id: {eth_config.last.fork_id} != {expected_last_fork_id}" + f"Unexpected last fork id: " + f"{eth_config.last.fork_id} != {expected_last_fork_id}" ) else: assert eth_config.last is None, "Expected last to be None" diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/eth_config/tests/test_genesis.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/eth_config/tests/test_genesis.py index 5e42440953..b71ba856e5 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/eth_config/tests/test_genesis.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/eth_config/tests/test_genesis.py @@ -131,9 +131,11 @@ def test_genesis_parsing( """ parsed_genesis = Genesis.model_validate_json(genesis_contents) assert parsed_genesis.hash == expected_hash, ( - f"Unexpected genesis hash: {parsed_genesis.hash}, expected: {expected_hash}" + f"Unexpected genesis hash: {parsed_genesis.hash}, " + f"expected: {expected_hash}" ) network_config = parsed_genesis.network_config() assert network_config == expected_network_config, ( - f"Unexpected network config: {network_config}, expected: {expected_network_config}" + f"Unexpected network config: {network_config}, " + f"expected: {expected_network_config}" ) diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/execute.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/execute.py index 30f357fc93..1bdde10539 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/execute.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/execute.py @@ -51,8 +51,8 @@ def pytest_addoption(parser: pytest.Parser) -> None: type=int, default=None, help=( - "Default gas price used for transactions, unless overridden by the test. " - "Default=None (1.5x current network gas price)" + "Default gas price used for transactions, unless overridden by " + "the test. Default=None (1.5x current network gas price)" ), ) execute_group.addoption( @@ -62,8 +62,8 @@ def pytest_addoption(parser: pytest.Parser) -> None: type=int, default=None, help=( - "Default max fee per gas used for transactions, unless overridden by the test. " - "Default=None (1.5x current network max fee per gas)" + "Default max fee per gas used for transactions, unless overridden " + "by the test. Default=None (1.5x current network max fee per gas)" ), ) execute_group.addoption( @@ -85,8 +85,9 @@ def pytest_addoption(parser: pytest.Parser) -> None: type=int, default=None, help=( - "Default max fee per blob gas used for transactions, unless overridden by the test. " - "Default=None (1.5x current network max fee per blob gas)" + "Default max fee per blob gas used for transactions, unless " + "overridden by the test. Default=None (1.5x current network max " + "fee per blob gas)" ), ) execute_group.addoption( @@ -96,9 +97,9 @@ def pytest_addoption(parser: pytest.Parser) -> None: default=EnvironmentDefaults.gas_limit // 4, type=int, help=( - "Maximum gas used to execute a single transaction. " - "Will be used as ceiling for tests that attempt to consume the entire block gas limit. " - f"(Default: {EnvironmentDefaults.gas_limit // 4})" + "Maximum gas used to execute a single transaction. Will be used " + "as ceiling for tests that attempt to consume the entire block " + f"gas limit. (Default: {EnvironmentDefaults.gas_limit // 4})" ), ) execute_group.addoption( @@ -118,7 +119,8 @@ def pytest_addoption(parser: pytest.Parser) -> None: type=float, default=0.3, help=( - "Time to wait after sending a forkchoice_updated before getting the payload." + "Time to wait after sending a forkchoice_updated before getting " + "the payload." ), ) execute_group.addoption( @@ -128,7 +130,8 @@ def pytest_addoption(parser: pytest.Parser) -> None: default=None, type=int, help=( - "Maximum gas limit for all transactions in a test. Default=None (No limit)" + "Maximum gas limit for all transactions in a test. Default=None " + "(No limit)" ), ) execute_group.addoption( @@ -136,7 +139,10 @@ def pytest_addoption(parser: pytest.Parser) -> None: action="store_true", dest="dry_run", default=False, - help="Don't send transactions, just print the minimum balance required per test.", + help=( + "Don't send transactions, just print the minimum balance required " + "per test." + ), ) execute_group.addoption( "--max-tx-per-batch", @@ -145,8 +151,8 @@ def pytest_addoption(parser: pytest.Parser) -> None: type=int, default=None, help=( - "Maximum number of transactions to send in a single batch to the RPC. " - "Default=750. Higher values may cause RPC instability." + "Maximum number of transactions to send in a single batch to the " + "RPC. Default=750. Higher values may cause RPC instability." ), ) @@ -221,11 +227,13 @@ def pytest_html_results_table_header(cells: list[str]) -> None: ) cells.insert( 4, - 'Funded Accounts', + '' + "Funded Accounts", ) cells.insert( 5, - 'Deployed Contracts', + '' + "Deployed Contracts", ) del cells[-1] # Remove the "Links" column @@ -324,7 +332,7 @@ def dry_run(request: pytest.FixtureRequest) -> bool: @pytest.fixture(scope="session") def max_transactions_per_batch(request: pytest.FixtureRequest) -> int | None: - """Return the maximum number of transactions per batch, or None for default.""" + """Return max number of transactions per batch, or None for default.""" return request.config.getoption("max_tx_per_batch") @@ -335,12 +343,14 @@ def default_max_fee_per_gas( """Return default max fee per gas used for transactions.""" max_fee_per_gas = request.config.getoption("default_max_fee_per_gas") if max_fee_per_gas is not None: + fee_gwei = max_fee_per_gas / 10**9 logger.debug( - f"Using configured default max fee per gas: {max_fee_per_gas / 10**9:.2f} Gwei" + f"Using configured default max fee per gas: {fee_gwei:.2f} Gwei" ) else: logger.debug( - "No default max fee per gas configured, will use network gas price * 1.5" + "No default max fee per gas configured, " + "will use network gas price * 1.5" ) return max_fee_per_gas @@ -354,12 +364,15 @@ def default_max_priority_fee_per_gas( "default_max_priority_fee_per_gas" ) if max_priority_fee_per_gas is not None: + prio_fee_gwei = max_priority_fee_per_gas / 10**9 logger.debug( - f"Using configured default max priority fee per gas: {max_priority_fee_per_gas / 10**9:.2f} Gwei" + f"Using configured default max priority fee per gas: " + f"{prio_fee_gwei:.2f} Gwei" ) else: logger.debug( - "No default max priority fee per gas configured, will use network max priority fee * 1.5" + "No default max priority fee per gas configured, " + "will use network max priority fee * 1.5" ) return max_priority_fee_per_gas @@ -373,12 +386,15 @@ def default_max_fee_per_blob_gas( "default_max_fee_per_blob_gas" ) if max_fee_per_blob_gas is not None: + blob_fee_gwei = max_fee_per_blob_gas / 10**9 logger.debug( - f"Using configured default max fee per blob gas: {max_fee_per_blob_gas / 10**9:.2f} Gwei" + f"Using configured default max fee per blob gas: " + f"{blob_fee_gwei:.2f} Gwei" ) else: logger.debug( - "No default max fee per blob gas configured, will use network blob base fee * 1.5" + "No default max fee per blob gas configured, " + "will use network blob base fee * 1.5" ) return max_fee_per_blob_gas @@ -388,17 +404,21 @@ def max_priority_fee_per_gas( eth_rpc: EthRPC, default_max_priority_fee_per_gas: int | None, ) -> int: - """Return max priority fee per gas used for transactions in a given test.""" + """Return max priority fee per gas for transactions in a given test.""" max_priority_fee_per_gas = default_max_priority_fee_per_gas if max_priority_fee_per_gas is None: network_max_priority_fee = eth_rpc.max_priority_fee_per_gas() max_priority_fee_per_gas = int(network_max_priority_fee * 1.5) + net_gwei = network_max_priority_fee / 10**9 + calc_gwei = max_priority_fee_per_gas / 10**9 logger.info( - f"Calculated max priority fee per gas from network: {network_max_priority_fee / 10**9:.2f} Gwei * 1.5 = {max_priority_fee_per_gas / 10**9:.2f} Gwei" + f"Calculated max priority fee per gas from network: " + f"{net_gwei:.2f} Gwei * 1.5 = {calc_gwei:.2f} Gwei" ) else: + prio_gwei = max_priority_fee_per_gas / 10**9 logger.info( - f"Using default max priority fee per gas: {max_priority_fee_per_gas / 10**9:.2f} Gwei" + f"Using default max priority fee per gas: {prio_gwei:.2f} Gwei" ) return max_priority_fee_per_gas @@ -414,24 +434,33 @@ def max_fee_per_gas( if max_fee_per_gas is None: network_gas_price = eth_rpc.gas_price() max_fee_per_gas = int(network_gas_price * 1.5) + net_gwei = network_gas_price / 10**9 + calc_gwei = max_fee_per_gas / 10**9 logger.info( - f"Calculated max fee per gas from network: {network_gas_price / 10**9:.2f} Gwei * 1.5 = {max_fee_per_gas / 10**9:.2f} Gwei" + f"Calculated max fee per gas from network: " + f"{net_gwei:.2f} Gwei * 1.5 = {calc_gwei:.2f} Gwei" ) else: - logger.info( - f"Using default max fee per gas: {max_fee_per_gas / 10**9:.2f} Gwei" - ) + fee_gwei = max_fee_per_gas / 10**9 + logger.info(f"Using default max fee per gas: {fee_gwei:.2f} Gwei") if max_priority_fee_per_gas > max_fee_per_gas: # Depending on the timing of the request, the priority fee may be # greater than the max fee. This is a workaround to ensure that the # transaction is valid. + prio_gwei = max_priority_fee_per_gas / 10**9 + fee_gwei = max_fee_per_gas / 10**9 + adj_gwei = (max_priority_fee_per_gas + 1) / 10**9 logger.warning( - f"Max priority fee per gas ({max_priority_fee_per_gas / 10**9:.2f} Gwei) is greater than max fee per gas ({max_fee_per_gas / 10**9:.2f} Gwei), " - f"adjusting max fee per gas to {(max_priority_fee_per_gas + 1) / 10**9:.2f} Gwei" + f"Max priority fee per gas ({prio_gwei:.2f} Gwei) is greater " + f"than max fee per gas ({fee_gwei:.2f} Gwei), " + f"adjusting max fee per gas to {adj_gwei:.2f} Gwei" ) max_fee_per_gas = max_priority_fee_per_gas + 1 + final_gwei = max_fee_per_gas / 10**9 + prio_gwei = max_priority_fee_per_gas / 10**9 logger.debug( - f"Final max fee per gas: {max_fee_per_gas / 10**9:.2f} Gwei, max priority fee per gas: {max_priority_fee_per_gas / 10**9:.2f} Gwei" + f"Final max fee per gas: {final_gwei:.2f} Gwei, " + f"max priority fee per gas: {prio_gwei:.2f} Gwei" ) return max_fee_per_gas @@ -446,12 +475,16 @@ def max_fee_per_blob_gas( if max_fee_per_blob_gas is None: network_blob_base_fee = eth_rpc.blob_base_fee() max_fee_per_blob_gas = int(network_blob_base_fee * 1.5) + net_gwei = network_blob_base_fee / 10**9 + calc_gwei = max_fee_per_blob_gas / 10**9 logger.info( - f"Calculated max fee per blob gas from network: {network_blob_base_fee / 10**9:.2f} Gwei * 1.5 = {max_fee_per_blob_gas / 10**9:.2f} Gwei" + f"Calculated max fee per blob gas from network: " + f"{net_gwei:.2f} Gwei * 1.5 = {calc_gwei:.2f} Gwei" ) else: + blob_gwei = max_fee_per_blob_gas / 10**9 logger.info( - f"Using default max fee per blob gas: {max_fee_per_blob_gas / 10**9:.2f} Gwei" + f"Using default max fee per blob gas: {blob_gwei:.2f} Gwei" ) return max_fee_per_blob_gas @@ -460,8 +493,12 @@ def max_fee_per_blob_gas( def gas_price(max_fee_per_gas: int, max_priority_fee_per_gas: int) -> int: """Return gas price used for transactions in a given test.""" calculated_gas_price = max_fee_per_gas + max_priority_fee_per_gas + fee_gwei = max_fee_per_gas / 10**9 + prio_gwei = max_priority_fee_per_gas / 10**9 + total_gwei = calculated_gas_price / 10**9 logger.debug( - f"Calculated gas price: {max_fee_per_gas / 10**9:.2f} Gwei (max fee) + {max_priority_fee_per_gas / 10**9:.2f} Gwei (max priority fee) = {calculated_gas_price / 10**9:.2f} Gwei" + f"Calculated gas price: {fee_gwei:.2f} Gwei (max fee) + " + f"{prio_gwei:.2f} Gwei (max priority fee) = {total_gwei:.2f} Gwei" ) return calculated_gas_price @@ -553,9 +590,8 @@ def gas_limit_accumulator() -> Generator[GasInfoAccumulator, None, None]: gas_limit_accumulator = GasInfoAccumulator() yield gas_limit_accumulator logger.info(f"Total gas limit: {gas_limit_accumulator.total_gas_limit()}") - logger.info( - f"Total minimum balance: {gas_limit_accumulator.total_minimum_balance() / 10**18:.18f}" - ) + total_min_eth = gas_limit_accumulator.total_minimum_balance() / 10**18 + logger.info(f"Total minimum balance: {total_min_eth:.18f}") def base_test_parametrizer(cls: Type[BaseTest]) -> Any: @@ -601,6 +637,7 @@ def base_test_parametrizer_func( When parametrize, indirect must be used along with the fixture format as value. """ + del fixed_opcode_count execute_format = request.param assert execute_format in BaseExecute.formats.values() assert issubclass(execute_format, BaseExecute) @@ -655,8 +692,9 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: ) if max_gas_limit_per_test is not None: assert gas_consumption <= max_gas_limit_per_test, ( - f"Test gas consumption ({gas_consumption}) exceeds the gas limit allowed " - f"per test({max_gas_limit_per_test})." + f"Test gas consumption ({gas_consumption}) exceeds " + f"the gas limit allowed per test" + f"({max_gas_limit_per_test})." ) gas_limit_accumulator.add( @@ -666,9 +704,8 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: ) if dry_run: - logger.info( - f"Minimum balance required: {minimum_balance / 10**18:.18f}" - ) + min_eth = minimum_balance / 10**18 + logger.info(f"Minimum balance required: {min_eth:.18f}") logger.info(f"Gas consumption: {gas_consumption}") return @@ -682,8 +719,9 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: actual_code = eth_rpc.get_code(deployed_contract) if actual_code != expected_code: msg = ( - f"Deployed test contract didn't match expected code at address " - f"{deployed_contract} (not enough gas_limit?).\n" + f"Deployed test contract didn't match expected " + f"code at address {deployed_contract} " + f"(not enough gas_limit?).\n" f"Expected: {expected_code}\n" f"Actual: {actual_code}" ) diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/execute_deploy_required_contracts.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/execute_deploy_required_contracts.py index 94d5c2bad8..b3510334f6 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/execute_deploy_required_contracts.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/execute_deploy_required_contracts.py @@ -40,20 +40,20 @@ def test_deploy_deterministic_deployment_contract( f"{current_deterministic_deployment_contract_address}" ) if check_only: - print( - f"✓ Contract is already deployed at {current_deterministic_deployment_contract_address}" - ) + addr = current_deterministic_deployment_contract_address + print(f"✓ Contract is already deployed at {addr}") else: - print( - f"Contract already exists at {current_deterministic_deployment_contract_address}, skipping deployment" - ) + addr = current_deterministic_deployment_contract_address + print(f"Contract already exists at {addr}, skipping deployment") return if check_only: + factory_addr = DETERMINISTIC_FACTORY_ADDRESS logger.info( - f"✗ Deterministic deployment contract NOT deployed at {DETERMINISTIC_FACTORY_ADDRESS}" + f"✗ Deterministic deployment contract NOT deployed at " + f"{factory_addr}" ) - print(f"✗ Contract is NOT deployed at {DETERMINISTIC_FACTORY_ADDRESS}") + print(f"✗ Contract is NOT deployed at {factory_addr}") pytest.fail("Contract not deployed (check-only mode)") try: @@ -66,8 +66,9 @@ def test_deploy_deterministic_deployment_contract( # Verify deployment deployed_code = eth_rpc.get_code(DETERMINISTIC_FACTORY_ADDRESS) if deployed_code != Bytes(DETERMINISTIC_FACTORY_BYTECODE): + factory_addr = DETERMINISTIC_FACTORY_ADDRESS pytest.fail( - f"Verification failed: Contract code mismatch at {DETERMINISTIC_FACTORY_ADDRESS}. " + f"Verification failed: Contract code mismatch at {factory_addr}. " f"Expected: {DETERMINISTIC_FACTORY_BYTECODE}, " f"Deployed: {deployed_code}" ) diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/execute_flags/execute_flags.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/execute_flags/execute_flags.py index 80699428f2..cb1e2a26fb 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/execute_flags/execute_flags.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/execute_flags/execute_flags.py @@ -65,14 +65,14 @@ def pytest_configure(config: pytest.Config) -> None: returncode=4, ) else: - # Use rpc_chain_id if chain_id is not provided (for backwards compatibility) + # Use rpc_chain_id if chain_id is not provided (backwards compat) if not chain_id: chain_id = rpc_chain_id if chain_id is None: pytest.exit( - "Chain ID must be provided with the --chain-id/--rpc-chain-id flags or " - "the CHAIN_ID/RPC_CHAIN_ID environment variables." + "Chain ID must be provided with the --chain-id/--rpc-chain-id " + "flags or the CHAIN_ID/RPC_CHAIN_ID environment variables." ) # write to config diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/pre_alloc.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/pre_alloc.py index caea02a545..59b4ae7f9b 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/pre_alloc.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/pre_alloc.py @@ -183,7 +183,7 @@ def execute_required_contracts( session_temp_folder: Path, ) -> None: """ - Deploy required contracts for the execute command: + Deploy required contracts for the execute command. - Deterministic deployment proxy """ @@ -215,9 +215,10 @@ def execute_required_contracts( class PendingTransaction(Transaction): """ - Custom transaction class that defines a transaction that is yet to be sent. - The value is allowed to be `None` to allow for the value to be set until the - transaction is sent. + Custom transaction class that defines a transaction yet to be sent. + + The value is allowed to be `None` to allow for the value to be set until + the transaction is sent. """ value: HexNumber | None = None # type: ignore @@ -313,6 +314,7 @@ def deterministic_deploy_contract( Deploy a contract to the allocation at a deterministic location using a deterministic deployment proxy. """ + del storage gas_costs = self._fork.gas_costs() memory_expansion_gas_calculator = ( self._fork.memory_expansion_gas_calculator() @@ -340,7 +342,8 @@ def deterministic_deploy_contract( f"Current: {chain_code}" ) logger.info( - f"Contract already deployed at {contract_address} (label={label})" + f"Contract already deployed at {contract_address} " + f"(label={label})" ) else: # Assert the deployment contract is already on chain @@ -376,7 +379,8 @@ def deterministic_deploy_contract( tx_gas_limit_cap = self._fork.transaction_gas_limit_cap() if tx_gas_limit_cap and deploy_gas_limit > tx_gas_limit_cap: raise ValueError( - f"deterministic deploy gas limit exceeds the transaction gas limit cap: {deploy_gas_limit} > {tx_gas_limit_cap}" + f"deterministic deploy gas limit exceeds the transaction " + f"gas limit cap: {deploy_gas_limit} > {tx_gas_limit_cap}" ) deploy_tx = self._add_pending_tx( action="deterministic_deploy_contract", @@ -386,10 +390,13 @@ def deterministic_deploy_contract( gas_limit=deploy_gas_limit, value=0, ) + code_size = len(deploy_code) + initcode_size = len(initcode) logger.info( f"Contract deployment tx created (label={label}): " f"tx_nonce={deploy_tx.nonce}, gas_limit={deploy_gas_limit}, " - f"code_size={len(deploy_code)} bytes, initcode_size={len(initcode)} bytes" + f"code_size={code_size} bytes, initcode_size={initcode_size} " + "bytes" ) logger.debug( @@ -458,9 +465,10 @@ def deploy_contract( ) balance = self._eth_rpc.get_balance(contract_address) nonce = self._eth_rpc.get_transaction_count(contract_address) + bal_eth = balance / 10**18 logger.debug( - f"Stub contract {contract_address}: balance={balance / 10**18:.18f} ETH, " - f"nonce={nonce}, code_size={len(code)} bytes" + f"Stub contract {contract_address}: balance={bal_eth:.18f} " + f"ETH, nonce={nonce}, code_size={len(code)} bytes" ) super().__setitem__( contract_address, @@ -504,9 +512,10 @@ def deploy_contract( ) max_initcode_size = self._fork.max_initcode_size() - if len(prepared_initcode) > max_initcode_size: + initcode_len = len(prepared_initcode) + if initcode_len > max_initcode_size: raise ValueError( - f"initcode too large {len(prepared_initcode)} > {max_initcode_size}" + f"initcode too large {initcode_len} > {max_initcode_size}" ) deploy_gas_limit += calldata_gas_calculator(data=prepared_initcode) @@ -515,7 +524,8 @@ def deploy_contract( tx_gas_limit_cap = self._fork.transaction_gas_limit_cap() if tx_gas_limit_cap and deploy_gas_limit > tx_gas_limit_cap: raise ValueError( - f"deploy gas limit exceeds the transaction gas limit cap: {deploy_gas_limit} > {tx_gas_limit_cap}" + f"deploy gas limit exceeds the transaction gas limit cap: " + f"{deploy_gas_limit} > {tx_gas_limit_cap}" ) deploy_tx = self._add_pending_tx( @@ -526,11 +536,15 @@ def deploy_contract( value=balance, gas_limit=deploy_gas_limit, ) + code_sz = len(code) + init_sz = len(prepared_initcode) + bal_eth = Number(balance) / 10**18 + slots = len(storage.root) logger.info( f"Contract deployment tx created (label={label}): " f"tx_nonce={deploy_tx.nonce}, gas_limit={deploy_gas_limit}, " - f"code_size={len(code)} bytes, initcode_size={len(prepared_initcode)} bytes, " - f"balance={Number(balance) / 10**18:.18f} ETH, storage_slots={len(storage.root)}" + f"code_size={code_sz} bytes, initcode_size={init_sz} bytes, " + f"balance={bal_eth:.18f} ETH, storage_slots={slots}" ) contract_address = deploy_tx.created_contract @@ -588,7 +602,8 @@ def fund_eoa( if not isinstance(storage, Storage): storage = Storage.model_validate(storage) logger.debug( - f"Deploying storage contract for EOA {eoa} with {len(storage)} storage slots" + f"Deploying storage contract for EOA {eoa} " + f"with {len(storage)} storage slots" ) sstore_address = self.deploy_contract( code=( @@ -600,7 +615,8 @@ def fund_eoa( ) ) logger.debug( - f"Storage contract deployed at {sstore_address} for EOA {eoa}" + f"Storage contract deployed at {sstore_address} " + f"for EOA {eoa}" ) self._add_pending_tx( @@ -715,10 +731,12 @@ def fund_address( if minimum_balance: if current_balance >= fund_amount: + cur_eth = current_balance / 10**18 + min_eth = fund_amount / 10**18 logger.info( - f"Skipping funding for address {address} (label={address.label}): " - f"current balance {current_balance / 10**18:.18f} ETH >= " - f"minimum {fund_amount / 10**18:.18f} ETH" + f"Skipping funding for address {address} " + f"(label={address.label}): current balance " + f"{cur_eth:.18f} ETH >= minimum {min_eth:.18f} ETH" ) if address in self: account = self[address] @@ -729,9 +747,10 @@ def fund_address( address, Account(balance=current_balance) ) return + fund_eth = fund_amount / 10**18 logger.debug( - f"Funding address to minimum balance {address} (label={address.label}): " - f"{fund_amount / 10**18:.18f} ETH" + f"Funding address to minimum balance {address} " + f"(label={address.label}): {fund_eth:.18f} ETH" ) self._add_pending_tx( action="fund_address", @@ -741,9 +760,10 @@ def fund_address( ) new_balance = fund_amount else: + fund_eth = fund_amount / 10**18 logger.debug( f"Funding address {address} (label={address.label}): " - f"{fund_amount / 10**18:.18f} ETH" + f"{fund_eth:.18f} ETH" ) self._add_pending_tx( action="fund_address", @@ -757,9 +777,11 @@ def fund_address( account = self[address] if account is not None: account.balance = ZeroPaddedHexNumber(new_balance) + cur_eth = current_balance / 10**18 + new_eth = new_balance / 10**18 logger.debug( f"Updated balance for existing address {address}: " - f"{current_balance / 10**18:.18f} ETH -> {new_balance / 10**18:.18f} ETH" + f"{cur_eth:.18f} ETH -> {new_eth:.18f} ETH" ) else: super().__setitem__(address, Account(balance=new_balance)) @@ -811,27 +833,30 @@ def minimum_balance_for_pending_transactions( max_fee_per_blob_gas: int, ) -> Tuple[int, int]: """ - Calculate the minimum balance required by the sender to send all pending - transactions. + Calculate the minimum balance required by the sender to send all + pending transactions. """ minimum_balance = 0 gas_consumption = 0 for tx in self._pending_txs: if tx.value is None: - # WARN: This currently fails if there's an account with `pre.fund_eoa()` that - # never sends a transaction during the test. + # WARN: This currently fails if there's an account with + # `pre.fund_eoa()` that never sends a transaction during test. if tx.to not in sender_balances: error_message = ( "Sender balance must be set before sending:" f"\nTransaction: {tx.model_dump_json(indent=2)}" ) if tx.metadata is not None: - error_message += f"\nMetadata: {tx.metadata.model_dump_json(indent=2)}" + metadata_json = tx.metadata.model_dump_json(indent=2) + error_message += f"\nMetadata: {metadata_json}" logger.error(error_message) raise ValueError(error_message) sender_balance = sender_balances[tx.to] + bal_eth = sender_balance / 10**18 logger.info( - f"Deferred EOA balance for {tx.to} set to {sender_balance / 10**18:.18f} ETH" + f"Deferred EOA balance for {tx.to} set to " + f"{bal_eth:.18f} ETH" ) tx.value = HexNumber(sender_balance) tx.set_gas_price( @@ -853,12 +878,12 @@ def send_pending_transactions(self) -> List[TransactionByHashResponse]: ) transaction_batches: List[List[PendingTransaction]] = [] last_tx_batch: List[PendingTransaction] = [] - MAX_TXS_PER_BATCH = 100 + max_txs_per_batch = 100 for tx in self._pending_txs: assert tx.value is not None, ( "Transaction value must be set before sending them to the RPC." ) - if len(last_tx_batch) >= MAX_TXS_PER_BATCH: + if len(last_tx_batch) >= max_txs_per_batch: transaction_batches.append(last_tx_batch) last_tx_batch = [] last_tx_batch.append(tx) @@ -869,16 +894,13 @@ def send_pending_transactions(self) -> List[TransactionByHashResponse]: for tx_batch in transaction_batches: txs = [tx.with_signature_and_sender() for tx in tx_batch] tx_hashes = self._eth_rpc.send_transactions(txs) + hash_strs = [str(h) for h in tx_hashes[:5]] + n_hashes = len(tx_hashes) + extra = f" and {n_hashes - 5} more" if n_hashes > 5 else "" + logger.info(f"Sent {n_hashes} transactions: {hash_strs}{extra}") logger.info( - f"Sent {len(tx_hashes)} transactions: {[str(h) for h in tx_hashes[:5]]}" - + ( - f" and {len(tx_hashes) - 5} more" - if len(tx_hashes) > 5 - else "" - ) - ) - logger.info( - f"Waiting for {len(tx_batch)} transactions to be included in blocks" + f"Waiting for {len(tx_batch)} transactions to be included " + "in blocks" ) responses += self._eth_rpc.wait_for_transactions(tx_batch) logger.info( @@ -948,18 +970,23 @@ def pre( refund_gas_limit = 21_000 tx_cost = refund_gas_limit * max_fee_per_gas if remaining_balance < tx_cost: + rem_eth = remaining_balance / 10**18 + cost_eth = tx_cost / 10**18 logger.debug( f"Skipping refund for EOA {eoa} (label={eoa.label}): " - f"insufficient balance {remaining_balance / 10**18:.18f} ETH < " - f"transaction cost {tx_cost / 10**18:.18f} ETH" + f"insufficient balance {rem_eth:.18f} ETH < " + f"transaction cost {cost_eth:.18f} ETH" ) skipped_refunds += 1 continue refund_value = remaining_balance - tx_cost + ref_eth = refund_value / 10**18 + rem_eth = remaining_balance / 10**18 + cost_eth = tx_cost / 10**18 logger.debug( f"Preparing refund transaction for EOA {eoa} (label={eoa.label}): " - f"{refund_value / 10**18:.18f} ETH (remaining: {remaining_balance / 10**18:.18f} ETH, " - f"cost: {tx_cost / 10**18:.18f} ETH)" + f"{ref_eth:.18f} ETH (remaining: {rem_eth:.18f} ETH, " + f"cost: {cost_eth:.18f} ETH)" ) refund_tx = Transaction( sender=eoa, diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/rpc/chain_builder_eth_rpc.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/rpc/chain_builder_eth_rpc.py index 85ac615e4e..6702c5d0ea 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/rpc/chain_builder_eth_rpc.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/rpc/chain_builder_eth_rpc.py @@ -453,8 +453,9 @@ def wait_for_transactions( for tx_hash, tx in pending_responses.items() ] ) + missing_str = ", ".join(missing_txs_strings) raise Exception( - f"Transactions {', '.join(missing_txs_strings)} were not included in a block " + f"Transactions {missing_str} were not included in a block " f"within {self.transaction_wait_timeout} seconds:\n" f"{pending_tx_responses_string}" ) diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/rpc/hive.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/rpc/hive.py index 5c2a82cd5a..10c23aa78b 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/rpc/hive.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/rpc/hive.py @@ -58,8 +58,12 @@ def pytest_addoption(parser: pytest.Parser) -> None: action="store", dest="tx_wait_timeout", type=int, - default=10, # Lowered from Remote RPC because of the consistent block production - help="Maximum time in seconds to wait for a transaction to be included in a block", + # Lowered from Remote RPC because of consistent block production + default=10, + help=( + "Maximum time in seconds to wait for a transaction to be " + "included in a block" + ), ) @@ -251,7 +255,8 @@ def base_hive_test( test = test_suite.start_test( name="Base Hive Test", description=( - "Base test used to deploy the main client to be used throughout all tests." + "Base test used to deploy the main client to be used " + "throughout all tests." ), ) with open(base_file, "w") as f: @@ -337,8 +342,9 @@ def client( ) error_message = ( - f"Unable to connect to the client container ({client_type.name}) via Hive during test " - "setup. Check the client or Hive server logs for more information." + f"Unable to connect to the client container ({client_type.name}) " + "via Hive during test setup. Check the client or Hive server logs " + "for more information." ) assert client is not None, error_message diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/rpc/remote.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/rpc/remote.py index f8b332dfb9..78609d4fe9 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/rpc/remote.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/rpc/remote.py @@ -33,7 +33,10 @@ def pytest_addoption(parser: pytest.Parser) -> None: dest="tx_wait_timeout", type=int, default=60, - help="Maximum time in seconds to wait for a transaction to be included in a block", + help=( + "Maximum time in seconds to wait for a transaction to be " + "included in a block" + ), ) remote_rpc_group.addoption( "--address-stubs", @@ -41,8 +44,11 @@ def pytest_addoption(parser: pytest.Parser) -> None: dest="address_stubs", default=AddressStubs(root={}), type=AddressStubs.model_validate_json_or_file, - help="The address stubs for contracts that have already been placed in the chain and to " - "use for the test. Can be a JSON formatted string or a path to a YAML or JSON file.", + help=( + "The address stubs for contracts that have already been placed " + "in the chain and to use for the test. Can be a JSON formatted " + "string or a path to a YAML or JSON file." + ), ) engine_rpc_group = parser.getgroup( @@ -54,10 +60,13 @@ def pytest_addoption(parser: pytest.Parser) -> None: action="store", default=None, dest="engine_endpoint", - help="Engine endpoint to an execution client, which implies that the execute command " - "will be used to drive the chain. If not provided, it's assumed that the execution client " - "is connected to a beacon node and the chain progresses automatically. If provided, the " - "JWT secret must be provided as well.", + help=( + "Engine endpoint to an execution client, which implies that the " + "execute command will be used to drive the chain. If not " + "provided, it's assumed that the execution client is connected " + "to a beacon node and the chain progresses automatically. If " + "provided, the JWT secret must be provided as well." + ), ) engine_rpc_group.addoption( "--engine-jwt-secret", @@ -65,8 +74,11 @@ def pytest_addoption(parser: pytest.Parser) -> None: action="store", default=None, dest="engine_jwt_secret", - help="JWT secret to be used to authenticate with the engine endpoint. Provided string " - "will be converted to bytes using the UTF-8 encoding.", + help=( + "JWT secret to be used to authenticate with the engine endpoint. " + "Provided string will be converted to bytes using the UTF-8 " + "encoding." + ), ) engine_rpc_group.addoption( "--engine-jwt-secret-file", @@ -74,14 +86,17 @@ def pytest_addoption(parser: pytest.Parser) -> None: action="store", default=None, dest="engine_jwt_secret_file", - help="Path to a file containing the JWT secret to be used to authenticate with the engine " - "endpoint. The file must contain only the JWT secret as a hex string.", + help=( + "Path to a file containing the JWT secret to be used to " + "authenticate with the engine endpoint. The file must contain " + "only the JWT secret as a hex string." + ), ) def pytest_configure(config: pytest.Config) -> None: """Check if a chain ID configuration is provided.""" - # Verify the chain ID configuration is consistent with the remote RPC endpoint + # Verify chain ID config is consistent with the remote RPC endpoint rpc_endpoint = config.getoption("rpc_endpoint") or os.environ.get( "RPC_ENDPOINT" ) @@ -92,11 +107,13 @@ def pytest_configure(config: pytest.Config) -> None: ) eth_rpc = EthRPC(rpc_endpoint) remote_chain_id = eth_rpc.chain_id() - if remote_chain_id != ChainConfigDefaults.chain_id: + configured_chain_id = ChainConfigDefaults.chain_id + if remote_chain_id != configured_chain_id: pytest.exit( - f"Chain ID obtained from the remote RPC endpoint ({remote_chain_id}) does not match " - f"the configured chain ID ({ChainConfigDefaults.chain_id})." - "Please check if the chain ID is correctly configured with the --chain-id flag." + f"Chain ID obtained from the remote RPC endpoint " + f"({remote_chain_id}) does not match the configured chain ID " + f"({configured_chain_id}). Please check if the chain ID is " + "correctly configured with the --chain-id flag." ) engine_endpoint = config.getoption("engine_endpoint") engine_rpc = None @@ -106,8 +123,9 @@ def pytest_configure(config: pytest.Config) -> None: if jwt_secret is None and jwt_secret_file is None: pytest.exit( "JWT secret must be provided if engine endpoint is provided. " - "Please check if the JWT secret is correctly configured with the " - "--engine-jwt-secret or --engine-jwt-secret-file flag." + "Please check if the JWT secret is correctly configured " + "with the --engine-jwt-secret or --engine-jwt-secret-file " + "flag." ) elif jwt_secret_file is not None: with open(jwt_secret_file, "r") as f: @@ -119,8 +137,8 @@ def pytest_configure(config: pytest.Config) -> None: except ValueError: pytest.exit( "JWT secret must be a hex string if provided as a file. " - "Please check if the JWT secret is correctly configured with the " - "--engine-jwt-secret-file flag." + "Please check if the JWT secret is correctly configured " + "with the --engine-jwt-secret-file flag." ) if isinstance(jwt_secret, str): jwt_secret = jwt_secret.encode("utf-8") diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/rpc/remote_seed_sender.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/rpc/remote_seed_sender.py index 8c3569b300..5a0480f08c 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/rpc/remote_seed_sender.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/rpc/remote_seed_sender.py @@ -26,11 +26,12 @@ def pytest_addoption(parser: pytest.Parser) -> None: required=False, dest="rpc_seed_key", help=( - "Seed key used to fund all sender keys. This account must have a balance of at least " - "`sender_key_initial_balance` * `workers` + gas fees. It should also be " - "exclusively used by this command because the nonce is only checked once and if " - "it's externally increased, the seed transactions might fail. " - "Can also be set via RPC_SEED_KEY environment variable." + "Seed key used to fund all sender keys. This account must have " + "a balance of at least `sender_key_initial_balance` * `workers` " + "+ gas fees. It should also be exclusively used by this command " + "because the nonce is only checked once and if it's externally " + "increased, the seed transactions might fail. Can also be set " + "via RPC_SEED_KEY environment variable." ), ) diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/sender.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/sender.py index e1305047b3..fc403f3464 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/sender.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/sender.py @@ -39,8 +39,8 @@ def pytest_addoption(parser: pytest.Parser) -> None: type=Wei, default=None, help=( - "Gas price set for the funding transactions of each worker's sender key. " - "Default=None (1.5x current network gas price)" + "Gas price set for the funding transactions of each worker's " + "sender key. Default=None (1.5x current network gas price)" ), ) @@ -91,8 +91,9 @@ def seed_account_sweep_amount(request: pytest.FixtureRequest) -> int | None: """Get the seed account sweep amount.""" sweep_amount = request.config.option.seed_account_sweep_amount if sweep_amount is not None: + sweep_eth = sweep_amount / 10**18 logger.info( - f"Using specified seed account sweep amount: {sweep_amount / 10**18:.18f} ETH" + f"Using specified seed account sweep amount: {sweep_eth:.18f} ETH" ) else: logger.info( @@ -140,8 +141,10 @@ def worker_key_funding_amount( if base_file.exists(): # Some other worker already did this for us, use that value. cached_amount = int(base_file.read_text()) + cached_eth = cached_amount / 10**18 logger.info( - f"Using cached worker key funding amount: {cached_amount / 10**18:.18f} ETH" + f"Using cached worker key funding amount: " + f"{cached_eth:.18f} ETH" ) return cached_amount @@ -167,10 +170,12 @@ def worker_key_funding_amount( sender_fund_refund_gas_limit * sender_funding_transactions_gas_price ) + tx_cost_eth = funding_tx_cost / 10**18 + gas_gwei = sender_funding_transactions_gas_price / 10**9 logger.info( - f"Funding transaction cost: {funding_tx_cost / 10**18:.18f} ETH " + f"Funding transaction cost: {tx_cost_eth:.18f} ETH " f"(gas_limit={sender_fund_refund_gas_limit}, " - f"gas_price={sender_funding_transactions_gas_price / 10**9:.9f} Gwei)" + f"gas_price={gas_gwei:.9f} Gwei)" ) # Subtract the cost of the transaction that is going to be sent to # the seed sender @@ -178,10 +183,12 @@ def worker_key_funding_amount( seed_sender_balance_per_worker - funding_tx_cost ) if worker_key_funding_amount <= 0: + avail_eth = available_amount / 10**18 + fund_cost_eth = funding_tx_cost / 10**18 logger.error( - f"{amount_source} is too low to distribute to {worker_count} workers. " - f"Available: {available_amount / 10**18:.6f} ETH, " - f"Funding cost: {funding_tx_cost / 10**18:.6f} ETH" + f"{amount_source} is too low to distribute to " + f"{worker_count} workers. Available: {avail_eth:.6f} ETH, " + f"Funding cost: {fund_cost_eth:.6f} ETH" ) raise AssertionError( f""" @@ -194,10 +201,13 @@ def worker_key_funding_amount( negative value. """ ) + wk_fund_eth = worker_key_funding_amount / 10**18 + per_worker_eth = seed_sender_balance_per_worker / 10**18 + tx_cost_eth = funding_tx_cost / 10**18 logger.info( - f"Calculated worker key funding amount: {worker_key_funding_amount / 10**18:.18f} ETH " - f"({seed_sender_balance_per_worker / 10**18:.18f} ETH per worker - " - f"{funding_tx_cost / 10**18:.18f} ETH transaction cost)" + f"Calculated worker key funding amount: {wk_fund_eth:.18f} ETH " + f"({per_worker_eth:.18f} ETH per worker - " + f"{tx_cost_eth:.18f} ETH transaction cost)" ) # Write the value to the file for the rest of the workers to use. base_file.write_text(str(worker_key_funding_amount)) @@ -277,8 +287,9 @@ def session_worker_key( gas_price=sender_funding_transactions_gas_price, value=worker_key_funding_amount, ).with_signature_and_sender() + fund_eth = worker_key_funding_amount / 10**18 logger.info( - f"Preparing funding transaction: {worker_key_funding_amount / 10**18:.18f} ETH " + f"Preparing funding transaction: {fund_eth:.18f} ETH " f"from {seed_key} to {worker_key} (nonce={seed_key.nonce})" ) if not dry_run: @@ -318,15 +329,18 @@ def session_worker_key( # any other transaction that might have been sent by the sender. refund_gas_price = sender_funding_transactions_gas_price * 2 tx_cost = refund_gas_limit * refund_gas_price + tx_cost_eth = tx_cost / 10**18 + gas_gwei = refund_gas_price / 10**9 logger.debug( - f"Refund transaction cost: {tx_cost / 10**18:.18f} ETH " - f"(gas_limit={refund_gas_limit}, gas_price={refund_gas_price / 10**9:.9f} Gwei)" + f"Refund transaction cost: {tx_cost_eth:.18f} ETH " + f"(gas_limit={refund_gas_limit}, gas_price={gas_gwei:.9f} Gwei)" ) if (remaining_balance - 1) < tx_cost: + rem_eth = remaining_balance / 10**18 logger.warning( - f"Insufficient balance for refund: {remaining_balance / 10**18:.18f} ETH < " - f"{tx_cost / 10**18:.18f} ETH (transaction cost). Skipping refund." + f"Insufficient balance for refund: {rem_eth:.18f} ETH < " + f"{tx_cost_eth:.18f} ETH (transaction cost). Skipping refund." ) return @@ -366,17 +380,15 @@ def worker_key( ) ) if rpc_nonce != session_worker_key.nonce: - logger.info( - f"Worker key nonce mismatch: {session_worker_key.nonce} != {rpc_nonce}" - ) + wk_nonce = session_worker_key.nonce + logger.info(f"Worker key nonce mismatch: {wk_nonce} != {rpc_nonce}") logger.info(f"Updating worker key nonce to {rpc_nonce}") session_worker_key.nonce = rpc_nonce # Record the start balance of the worker key worker_key_start_balance = eth_rpc.get_balance(session_worker_key) - logger.debug( - f"Worker key start balance: {worker_key_start_balance / 10**18:.18f} ETH" - ) + start_eth = worker_key_start_balance / 10**18 + logger.debug(f"Worker key start balance: {start_eth:.18f} ETH") yield session_worker_key @@ -385,10 +397,12 @@ def worker_key( ) final_balance = eth_rpc.get_balance(session_worker_key) used_balance = worker_key_start_balance - final_balance + used_eth = used_balance / 10**18 + start_eth = worker_key_start_balance / 10**18 + final_eth = final_balance / 10**18 logger.info( - f"Worker key {session_worker_key} used balance: {used_balance / 10**18:.18f} ETH " - f"(start: {worker_key_start_balance / 10**18:.18f} ETH, " - f"final: {final_balance / 10**18:.18f} ETH)" + f"Worker key {session_worker_key} used balance: {used_eth:.18f} ETH " + f"(start: {start_eth:.18f} ETH, final: {final_eth:.18f} ETH)" ) diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/tests/test_pre_alloc.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/tests/test_pre_alloc.py index 452e9e61a2..9320020157 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/tests/test_pre_alloc.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/tests/test_pre_alloc.py @@ -52,7 +52,7 @@ def test_address_stubs(input_value: Any, expected: AddressStubs) -> None: ), pytest.param( "one_address.json", - '{"DEPOSIT_CONTRACT_ADDRESS": "0x00000000219ab540356cbb839cbe05303d7705fa"}', + '{"DEPOSIT_CONTRACT_ADDRESS": "0x00000000219ab540356cbb839cbe05303d7705fa"}', # noqa: E501 AddressStubs( { "DEPOSIT_CONTRACT_ADDRESS": Address( @@ -64,7 +64,7 @@ def test_address_stubs(input_value: Any, expected: AddressStubs) -> None: ), pytest.param( "one_address.yaml", - "DEPOSIT_CONTRACT_ADDRESS: 0x00000000219ab540356cbb839cbe05303d7705fa", + "DEPOSIT_CONTRACT_ADDRESS: 0x00000000219ab540356cbb839cbe05303d7705fa", # noqa: E501 AddressStubs( { "DEPOSIT_CONTRACT_ADDRESS": Address( diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/eip_checklist.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/eip_checklist.py index 228e993133..ade7ec8328 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/eip_checklist.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/eip_checklist.py @@ -218,15 +218,17 @@ def from_items( return None details = [ - "The following checklist items were marked both as not applicable and covered:", + "The following checklist items were marked both " + "as not applicable and covered:", "", "| ID | Description | Not Applicable | Tests |", "|---|---|---|---|", ] for item in conflicting_items: + tests_str = ", ".join(sorted(item.tests)) details.append( f"| {item.id} | {item.description} | " - + f"{item.not_applicable_reason} | {', '.join(sorted(item.tests))} |" + f"{item.not_applicable_reason} | {tests_str} |" ) return cls(details=details) @@ -252,11 +254,6 @@ def covered_items(self) -> int: for item in self.items.values() if item.covered and not item.not_applicable ) - return sum( - 1 - for item in self.items.values() - if item.covered and not item.not_applicable - ) @property def total_items(self) -> int: @@ -315,7 +312,7 @@ def mark_not_applicable(self) -> None: ids = resolve_id(item_id) if not ids: logger.warning( - f"Item ID {item_id} not found in the checklist template, " + f"Item ID {item_id} not found in checklist template " f"for EIP {self.number}" ) continue @@ -343,7 +340,7 @@ def mark_external_coverage(self) -> None: ids = resolve_id(item_id) if not ids: logger.warning( - f"Item ID {item_id} not found in the checklist template, " + f"Item ID {item_id} not found in checklist template " f"for EIP {self.number}" ) continue @@ -362,9 +359,10 @@ def generate_filled_checklist_lines(self) -> List[str]: # Find the line with this item ID lines[checklist_item.line_number - 1] = str(checklist_item) + emoji = self.completeness_emoji + pct = f"{self.percentage:.2f}%" lines[lines.index(PERCENTAGE_LINE)] = ( - f"| {self.total_items} | {self.covered_items} | {self.completeness_emoji} " - f"{self.percentage:.2f}% |" + f"| {self.total_items} | {self.covered_items} | {emoji} {pct} |" ) # Replace the title line with the EIP number @@ -451,8 +449,8 @@ def collect_from_item( for marker in item.iter_markers("eip_checklist"): if not marker.args: pytest.fail( - f"eip_checklist marker on {item.nodeid} must have at least one argument " - "(item_id)" + f"eip_checklist marker on {item.nodeid} must have " + "at least one argument (item_id)" ) additional_eips = marker.kwargs.get("eip", []) if not isinstance(additional_eips, list): @@ -463,8 +461,8 @@ def collect_from_item( if additional_eips: if any(not isinstance(eip, int) for eip in additional_eips): pytest.fail( - "EIP numbers must be integers. Found non-integer EIPs in " - f"{item.nodeid}: {additional_eips}" + "EIP numbers must be integers. Found non-integer " + f"EIPs in {item.nodeid}: {additional_eips}" ) eips += [self.get_eip(eip) for eip in additional_eips] @@ -473,7 +471,7 @@ def collect_from_item( covered_ids = resolve_id(item_id.strip()) if not covered_ids: logger.warning( - f"Item ID {item_id} not found in the checklist template, " + f"Item ID {item_id} not found in checklist template " f"for test {item.nodeid}" ) continue diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/filler.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/filler.py index e75f01f227..55a8f37c1e 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/filler.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/filler.py @@ -322,9 +322,10 @@ def get_pre_alloc_group(self, hash_key: str) -> PreAllocGroup: self.fixture_output.pre_alloc_groups_folder_path / hash_key ) raise ValueError( - f"Pre-allocation hash {hash_key} not found in pre-allocation groups. " - f"Please check the pre-allocation groups file at: {pre_alloc_path}. " - "Make sure phase 1 (--generate-pre-alloc-groups) was run before phase 2." + f"Pre-allocation hash {hash_key} not found in " + f"pre-allocation groups. Please check the file at: " + f"{pre_alloc_path}. Make sure phase 1 " + "(--generate-pre-alloc-groups) was run before phase 2." ) return self.pre_alloc_groups[hash_key] @@ -442,8 +443,8 @@ def pytest_addoption(parser: pytest.Parser) -> None: type=Path, default=None, help=( - "Path to an evm executable (or name of an executable in the PATH) that provides `t8n`." - " Default: `ethereum-spec-evm-resolver`." + "Path to an evm executable (or name of an executable in the " + "PATH) that provides `t8n`. Default: `ethereum-spec-evm-resolver`." ), ) evm_group.addoption( @@ -453,8 +454,8 @@ def pytest_addoption(parser: pytest.Parser) -> None: type=str, default=None, help=( - "[INTERNAL USE ONLY] URL of the t8n server to use. Used by framework tests/ci; not " - "intended for regular CLI use." + "[INTERNAL USE ONLY] URL of the t8n server to use. Used by " + "framework tests/ci; not intended for regular CLI use." ), ) evm_group.addoption( @@ -462,7 +463,7 @@ def pytest_addoption(parser: pytest.Parser) -> None: action="store_true", dest="evm_collect_traces", default=None, - help="Collect traces of the execution information from the transition tool.", + help="Collect traces of execution info from the transition tool.", ) evm_group.addoption( "--verify-fixtures", @@ -470,10 +471,12 @@ def pytest_addoption(parser: pytest.Parser) -> None: dest="verify_fixtures", default=False, help=( - "Verify generated fixture JSON files using geth's evm blocktest command. " - "By default, the same evm binary as for the t8n tool is used. A different (geth) evm " - "binary may be specified via --verify-fixtures-bin, this must be specified if filling " - "with a non-geth t8n tool that does not support blocktest." + "Verify generated fixture JSON files using geth's evm " + "blocktest command. By default, the same evm binary as for " + "the t8n tool is used. A different (geth) evm binary may be " + "specified via --verify-fixtures-bin, this must be specified " + "if filling with a non-geth t8n tool that does not support " + "blocktest." ), ) evm_group.addoption( @@ -506,11 +509,13 @@ def pytest_addoption(parser: pytest.Parser) -> None: type=Path, default=Path(default_output_directory()), help=( - "Directory path to store the generated test fixtures. Must be empty if it exists. " - "If the specified path ends in '.tar.gz', then the specified tarball is additionally " - "created (the fixtures are still written to the specified path without the '.tar.gz' " - f"suffix). Tarball output automatically enables --generate-all-formats. " - f"Can be deleted. Default: '{default_output_directory()}'." + "Directory path to store the generated test fixtures. " + "Must be empty if it exists. If the specified path ends in " + "'.tar.gz', then the specified tarball is additionally " + "created (the fixtures are still written to the specified " + "path without the '.tar.gz' suffix). Tarball output " + "automatically enables --generate-all-formats. Can be " + f"deleted. Default: '{default_output_directory()}'." ), ) test_group.addoption( @@ -526,8 +531,9 @@ def pytest_addoption(parser: pytest.Parser) -> None: dest="single_fixture_per_file", default=False, help=( - "Don't group fixtures in JSON files by test function; write each fixture to its own " - "file. This can be used to increase the granularity of --verify-fixtures." + "Don't group fixtures in JSON files by test function; write " + "each fixture to its own file. This can be used to increase " + "the granularity of --verify-fixtures." ), ) test_group.addoption( @@ -562,8 +568,9 @@ def pytest_addoption(parser: pytest.Parser) -> None: default=EnvironmentDefaults.gas_limit, type=int, help=( - "Default gas limit used ceiling used for blocks and tests that attempt to " - f"consume an entire block's gas. (Default: {EnvironmentDefaults.gas_limit})" + "Default gas limit ceiling for blocks and tests that attempt " + f"to consume an entire block's gas. " + f"(Default: {EnvironmentDefaults.gas_limit})" ), ) test_group.addoption( @@ -586,9 +593,9 @@ def pytest_addoption(parser: pytest.Parser) -> None: dest="generate_all_formats", default=False, help=( - "Generate all fixture formats including BlockchainEngineXFixture. " - "This enables two-phase execution: Phase 1 generates pre-allocation groups, " - "phase 2 generates all supported fixture formats." + "Generate all fixture formats including BlockchainEngineX. " + "Enables two-phase execution: Phase 1 generates pre-allocation " + "groups, phase 2 generates all supported fixture formats." ), ) @@ -602,9 +609,9 @@ def pytest_addoption(parser: pytest.Parser) -> None: dest="optimize_gas", default=False, help=( - "Attempt to optimize the gas used in every transaction for the filled tests, " - "then print the minimum amount of gas at which the test still produces a correct " - "post state and the exact same trace." + "Attempt to optimize gas used in every transaction for filled " + "tests, then print the minimum gas at which the test still " + "produces a correct post state and the exact same trace." ), ) optimize_gas_group.addoption( @@ -625,8 +632,8 @@ def pytest_addoption(parser: pytest.Parser) -> None: default=None, type=int, help=( - "Maximum gas limit for gas optimization, if reached the search will stop and " - "fail for that given test. Requires `--optimize-gas`." + "Maximum gas limit for gas optimization, if reached the search " + "will stop and fail for that test. Requires `--optimize-gas`." ), ) optimize_gas_group.addoption( @@ -635,9 +642,9 @@ def pytest_addoption(parser: pytest.Parser) -> None: dest="optimize_gas_post_processing", default=False, help=( - "Post process the traces during gas optimization in order to Account for " - "opcodes that put the current gas in the stack, in order to remove " - "remaining-gas from the comparison." + "Post process traces during gas optimization to account for " + "opcodes that put the current gas in the stack, in order to " + "remove remaining-gas from the comparison." ), ) @@ -755,8 +762,8 @@ def pytest_configure(config: pytest.Config) -> None: and not t8n.supports_xdist ): pytest.exit( - f"The {t8n.__class__.__name__} t8n tool does not work well with the xdist plugin;" - "use -n=0.", + f"The {t8n.__class__.__name__} t8n tool does not work well " + "with the xdist plugin; use -n=0.", returncode=pytest.ExitCode.USAGE_ERROR, ) config.t8n = t8n # type: ignore[attr-defined] @@ -849,8 +856,8 @@ def pytest_terminal_summary( terminalreporter.write_sep( "=", - f" Phase 1 Complete: Generated {total_groups} pre-allocation groups " - f"({total_accounts} total accounts) ", + f" Phase 1 Complete: Generated {total_groups} pre-alloc " + f"groups ({total_accounts} total accounts) ", bold=True, green=True, ) @@ -862,8 +869,8 @@ def pytest_terminal_summary( terminalreporter.write_sep( "=", ( - f' No tests executed - the test fixtures in "{output_dir}" may now be ' - "executed against a client " + f" No tests executed - the test fixtures in " + f'"{output_dir}" may now be executed against a client ' ), bold=True, yellow=True, @@ -879,7 +886,8 @@ def pytest_html_results_table_header(cells: Any) -> None: """Customize the table headers of the HTML report table.""" cells.insert( 3, - 'JSON Fixture File', + '' + "JSON Fixture File", ) cells.insert( 4, @@ -899,7 +907,10 @@ def pytest_html_results_table_row(report: Any, cells: Any) -> None: ): fixture_path_absolute = user_props["fixture_path_absolute"] fixture_path_relative = user_props["fixture_path_relative"] - fixture_path_link = f'{fixture_path_relative}' + fixture_path_link = ( + f'' + f"{fixture_path_relative}" + ) cells.insert(3, f"{fixture_path_link}") elif report.failed: cells.insert(3, "Fixture unavailable") @@ -907,14 +918,18 @@ def pytest_html_results_table_row(report: Any, cells: Any) -> None: if user_props["evm_dump_dir"] is None: cells.insert( 4, - "For t8n debug info use --evm-dump-dir=path --traces", + "For t8n debug info use " + "--evm-dump-dir=path --traces", ) else: evm_dump_dir = user_props.get("evm_dump_dir") if evm_dump_dir == "N/A": evm_dump_entry = "N/A" else: - evm_dump_entry = f'{evm_dump_dir}' + evm_dump_entry = ( + f'' + f"{evm_dump_dir}" + ) cells.insert(4, f"{evm_dump_entry}") del cells[-1] # Remove the "Links" column @@ -984,11 +999,12 @@ def t8n( """Return configured transition tool.""" t8n: TransitionTool = request.config.t8n # type: ignore if not t8n.exception_mapper.reliable: + t8n_name = t8n.__class__.__name__ warnings.warn( - f"The t8n tool that is currently being used to fill tests ({t8n.__class__.__name__}) " - "does not provide reliable exception messages. This may lead to false positives when " - "writing tests and extra care should be taken when writing tests that produce " - "exceptions.", + f"The t8n tool being used to fill tests ({t8n_name}) " + "does not provide reliable exception messages. This may lead to " + "false positives when writing tests and extra care should be " + "taken when writing tests that produce exceptions.", stacklevel=2, ) yield t8n @@ -1039,16 +1055,18 @@ def evm_fixture_verification( except Exception: if reused_evm_bin: pytest.exit( - "The binary specified in --evm-bin could not be recognized as a known " - "FixtureConsumerTool. Either remove --verify-fixtures or set " - "--verify-fixtures-bin to a known fixture consumer binary.", + "The binary specified in --evm-bin could not be recognized " + "as a known FixtureConsumerTool. Either remove " + "--verify-fixtures or set --verify-fixtures-bin to a known " + "fixture consumer binary.", returncode=pytest.ExitCode.USAGE_ERROR, ) else: pytest.exit( - "Specified binary in --verify-fixtures-bin could not be recognized as a known " - "FixtureConsumerTool. Please see `GethFixtureConsumer` for an example " - "of how a new fixture consumer can be defined.", + "Specified binary in --verify-fixtures-bin could not be " + "recognized as a known FixtureConsumerTool. Please see " + "`GethFixtureConsumer` for an example of how a new fixture " + "consumer can be defined.", returncode=pytest.ExitCode.USAGE_ERROR, ) yield evm_fixture_verification @@ -1119,7 +1137,8 @@ def create_properties_file( config[key.lower()] = val else: warnings.warn( - f"Fixtures ini file: Skipping metadata key {key} with value {val}.", + f"Fixtures ini file: Skipping metadata key {key} " + f"with value {val}.", stacklevel=2, ) config["environment"] = environment_properties @@ -1274,7 +1293,10 @@ def fixture_source_url( test_module_relative_path, branch_or_commit_or_tag=commit_hash_or_tag, ) - github_url += f" called via `{request.node.originalname}()` in {test_module_github_url}" + github_url += ( + f" called via `{request.node.originalname}()` " + f"in {test_module_github_url}" + ) return github_url @@ -1319,6 +1341,7 @@ def base_test_parametrizer_func( When parametrize, indirect must be used along with the fixture format as value. """ + del fixed_opcode_count if hasattr(request.node, "fixture_format"): fixture_format = request.node.fixture_format else: diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/fixture_output.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/fixture_output.py index d08064b5aa..110f093434 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/fixture_output.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/fixture_output.py @@ -27,7 +27,7 @@ class FixtureOutput(BaseModel): ) clean: bool = Field( default=False, - description="Clean (remove) the output directory before filling fixtures.", + description="Clean (remove) output directory before filling.", ) generate_pre_alloc_groups: bool = Field( default=False, @@ -39,7 +39,7 @@ class FixtureOutput(BaseModel): ) should_generate_all_formats: bool = Field( default=False, - description="Generate all fixture formats including BlockchainEngineXFixture.", + description="Generate all formats including BlockchainEngineXFixture.", ) @property @@ -190,9 +190,10 @@ def create_directories(self, is_master: bool) -> None: if self.generate_pre_alloc_groups: raise ValueError( - f"Output directory '{self.directory}' must be completely empty for " - f"pre-allocation group generation (phase 1). Contains: {summary}. " - "Use --clean to remove all existing files." + f"Output directory '{self.directory}' must be completely " + f"empty for pre-allocation group generation (phase 1). " + f"Contains: {summary}. Use --clean to remove all " + "existing files." ) elif self.use_pre_alloc_groups: if not self.pre_alloc_groups_folder_path.exists(): @@ -204,8 +205,8 @@ def create_directories(self, is_master: bool) -> None: else: raise ValueError( f"Output directory '{self.directory}' is not empty. " - f"Contains: {summary}. Use --clean to remove all existing files " - "or specify a different output directory." + f"Contains: {summary}. Use --clean to remove all " + "existing files or specify a different output directory." ) # Create directories diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/gen_test_doc/gen_test_doc.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/gen_test_doc/gen_test_doc.py index 235c7d49c2..216bc07422 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/gen_test_doc/gen_test_doc.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/gen_test_doc/gen_test_doc.py @@ -88,7 +88,9 @@ def pytest_addoption(parser: pytest.Parser) -> None: # noqa: D103 action="store_true", dest="gen_docs", default=False, - help="Generate documentation for all collected tests for use in for mkdocs", + help=( + "Generate documentation for all collected tests for use in mkdocs" + ), ) gen_docs.addoption( "--gen-docs-target-fork", @@ -96,8 +98,8 @@ def pytest_addoption(parser: pytest.Parser) -> None: # noqa: D103 dest="gen_docs_target_fork", default=None, help=( - "The default fork to use generated in generated doc pages. Should be the name of the " - "next upcoming fork." + "The default fork to use generated in generated doc pages. " + "Should be the name of the next upcoming fork." ), ) @@ -193,9 +195,10 @@ def get_docstring_one_liner(item: pytest.Item) -> str: docstring in docstring_test_function_history and docstring_test_function_history[docstring] != test_function_id ): + history_id = docstring_test_function_history[docstring] logger.info( f"Duplicate docstring for {test_function_id}: " - f"{docstring_test_function_history[docstring]} and {test_function_id}" + f"{history_id} and {test_function_id}" ) else: docstring_test_function_history[docstring] = test_function_id @@ -233,7 +236,9 @@ def get_test_function_test_type(item: pytest.Item) -> str: logger.warning( f"Could not determine the test function type for {item.nodeid}" ) - return f"unknown ([📖🐛]({create_github_issue_url('docs(bug): unknown test function type')}))" + issue_title = "docs(bug): unknown test function type" + issue_url = create_github_issue_url(issue_title) + return f"unknown ([📖🐛]({issue_url}))" class TestDocsGenerator: @@ -354,15 +359,16 @@ def get_doc_site_base_url(self) -> str: return f"/execution-spec-tests/{github_ref_name}/" if ci and not github_ref_name: raise Exception( - "Failed to determine target doc version (no GITHUB_REF_NAME env?)." + "Failed to determine target doc version " + "(no GITHUB_REF_NAME env?)." ) if ( "--strict" in sys.argv or "deploy" in sys.argv ) and not doc_version: # assume we're trying to deploy manually via mike (locally) raise Exception( - "Failed to determine target doc version during strict build (set " - "GEN_TEST_DOC_VERSION env var)." + "Failed to determine target doc version during strict build " + "(set GEN_TEST_DOC_VERSION env var)." ) # local test build, e.g. via `uv run mkdocs serve` return "/execution-spec-tests/" diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/gen_test_doc/page_props.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/gen_test_doc/page_props.py index 9e44233cfc..c286b10a03 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/gen_test_doc/page_props.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/gen_test_doc/page_props.py @@ -26,10 +26,11 @@ def apply_name_filters(input_string: str) -> str: Apply a list of capitalizations/regexes to names used in titles & nav menus. - Note: As of 2024-10-08, with 634 doc pages, this function constitutes ~2.0s - of the total runtime (~5.5s). This seems to be insignificant with the time - taken by mkdocstrings to include the docstrings in the final output, which - is a separate mkdocs "build-step" that occurs outside the scope of this plugin. + Note: As of 2024-10-08, with 634 doc pages, this function constitutes + ~2.0s of the total runtime (~5.5s). This seems to be insignificant with + the time taken by mkdocstrings to include the docstrings in the final + output, which is a separate mkdocs "build-step" that occurs outside the + scope of this plugin. """ word_replacements = { "acl": "ACL", diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/ported_tests.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/ported_tests.py index 9c9196f567..e6da59b07d 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/ported_tests.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/ported_tests.py @@ -73,8 +73,9 @@ def pytest_addoption(parser: pytest.Parser) -> None: const="paths", help=( "Show information from @pytest.mark.ported_from markers. " - "Use '--show-ported-from' or '--show-ported-from=paths' to show static filler paths. " - "Use '--show-ported-from=prs' to show PR URLs." + "Use '--show-ported-from' or '--show-ported-from=paths' to show " + "static filler paths. Use '--show-ported-from=prs' to show PR " + "URLs." ), ) ported_from_group.addoption( @@ -84,10 +85,11 @@ def pytest_addoption(parser: pytest.Parser) -> None: default=False, help=( "When using --show-ported-from, exclude tests that have " - "coverage_missed_reason in their @pytest.mark.ported_from marker. " - "These are tests that were intentionally not ported from the original " - "static filler files, typically because they are redundant or obsolete. " - "This helps filter out accepted coverage gaps when analyzing test coverage." + "coverage_missed_reason in their @pytest.mark.ported_from " + "marker. These are tests that were intentionally not ported " + "from the original static filler files, typically because they " + "are redundant or obsolete. This helps filter out accepted " + "coverage gaps when analyzing test coverage." ), ) ported_from_group.addoption( @@ -103,8 +105,8 @@ def pytest_addoption(parser: pytest.Parser) -> None: dest="links_as_filled", default=False, help=( - "Convert URLs or paths to filled test file paths for coverage script. " - "Used in combination with --show-ported-from." + "Convert URLs or paths to filled test file paths for coverage " + "script. Used in combination with --show-ported-from." ), ) diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/pre_alloc.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/pre_alloc.py index ce75f76131..dd60772f9e 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/pre_alloc.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/pre_alloc.py @@ -56,7 +56,8 @@ def pytest_addoption(parser: pytest.Parser) -> None: dest="strict_alloc", default=False, help=( - "[DEBUG ONLY] Disallows deploying a contract in a predefined address." + "[DEBUG ONLY] Disallows deploying a contract in a predefined " + "address." ), ) pre_alloc_group.addoption( @@ -66,7 +67,7 @@ def pytest_addoption(parser: pytest.Parser) -> None: dest="test_contract_start_address", default=f"{CONTRACT_START_ADDRESS_DEFAULT}", type=str, - help="The starting address from which tests will deploy contracts.", + help="Starting address from which tests will deploy contracts.", ) pre_alloc_group.addoption( "--ca-incr", @@ -75,7 +76,7 @@ def pytest_addoption(parser: pytest.Parser) -> None: dest="test_contract_address_increments", default=f"{CONTRACT_ADDRESS_INCREMENTS_DEFAULT}", type=str, - help="The address increment value for each deployed contract by a test.", + help="Address increment value for each deployed contract by a test.", ) @@ -511,10 +512,13 @@ def eoa_iterator( ) or request.config.getoption("use_pre_alloc_groups", default=False): # Use a starting address that is derived from the test node eoa_start_pk = sha256_from_string(node_id_for_entropy) + # secp256k1 curve order constant + curve_order = ( # noqa: E501 + 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141 + ) return iter( EOA( - key=(eoa_start_pk + i) - % 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141, + key=(eoa_start_pk + i) % curve_order, nonce=0, ) for i in count() diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/static_filler.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/static_filler.py index 85fc21d4c4..dde15b02a1 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/static_filler.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/static_filler.py @@ -213,12 +213,11 @@ def collect(self: "FillerFile") -> Generator["FillerTestItem", None, None]: test_type.pytest_parameter_name() ) session = self.config.filling_session # type: ignore[attr-defined] + supported = test_type.supported_fixture_formats fixture_formats.extend( - fixture_format - for fixture_format in test_type.supported_fixture_formats - if session.should_generate_format( - fixture_format - ) + fmt + for fmt in supported + if session.should_generate_format(fmt) ) test_fork_set = ( @@ -230,9 +229,9 @@ def collect(self: "FillerFile") -> Generator["FillerTestItem", None, None]: pytest.fail( "The test function's " f"'{key}' fork validity markers generate " - "an empty fork range. Please check the arguments to its " - f"markers: @pytest.mark.valid_from and " - f"@pytest.mark.valid_until." + "an empty fork range. Please check the arguments " + "to its markers: @pytest.mark.valid_from and " + "@pytest.mark.valid_until." ) intersection_set = ( test_fork_set & self.config.selected_fork_set # type: ignore @@ -273,7 +272,8 @@ def collect(self: "FillerFile") -> Generator["FillerTestItem", None, None]: for mark in fixture_format_parameter_set.marks if mark.name != "parametrize" ] - test_id = f"fork_{fork.name()}-{fixture_format_parameter_set.id}" + ps_id = fixture_format_parameter_set.id + test_id = f"fork_{fork.name()}-{ps_id}" if "fork" in func_parameters: params["fork"] = fork if "pre" in func_parameters: @@ -409,10 +409,11 @@ def yul(fork: Fork, request: pytest.FixtureRequest) -> Type[Yul]: """ Fixture that allows contract code to be defined with Yul code. - This fixture defines a class that wraps the ::execution_testing.tools.Yul class - so that upon instantiation within the test case, it provides the test - case's current fork parameter. The fork is then available for use in - solc's arguments for the Yul code compilation. + This fixture defines a class that wraps the + ::execution_testing.tools.Yul class so that upon instantiation within + the test case, it provides the test case's current fork parameter. + The fork is then available for use in solc's arguments for the Yul + code compilation. Test cases can override the default value by specifying a fixed version with the @pytest.mark.compile_yul_with(FORK) marker. @@ -424,16 +425,20 @@ def yul(fork: Fork, request: pytest.FixtureRequest) -> Type[Yul]: ) if marker: if not marker.args[0]: + node_name = request.node.name pytest.fail( - f"{request.node.name}: Expected one argument in 'compile_yul_with' marker." + f"{node_name}: Expected one argument in " + "'compile_yul_with' marker." ) for fork in request.config.all_forks: # type: ignore if fork.name() == marker.args[0]: solc_target_fork = fork break else: + node_name = request.node.name + fork_arg = marker.args[0] pytest.fail( - f"{request.node.name}: Fork {marker.args[0]} not found in forks list." + f"{node_name}: Fork {fork_arg} not found in forks list." ) else: solc_target_fork = get_closest_fork(fork) @@ -444,8 +449,10 @@ def yul(fork: Fork, request: pytest.FixtureRequest) -> Type[Yul]: solc_target_fork != fork and request.config.getoption("verbose") >= 1 ): + solc_name = solc_target_fork.name() + fork_name = fork.name() warnings.warn( - f"Compiling Yul for {solc_target_fork.name()}, not {fork.name()}.", + f"Compiling Yul for {solc_name}, not {fork_name}.", stacklevel=2, ) diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/conftest.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/conftest.py index 381c85f193..4c7920f5f6 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/conftest.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/conftest.py @@ -7,7 +7,11 @@ @pytest.fixture def restore_environment_defaults() -> Generator[None, None, None]: - """Restore EnvironmentDefaults.gas_limit after test runs to prevent side effects.""" + """ + Restore EnvironmentDefaults.gas_limit after tests. + + Restore the gas limit after the test run to prevent side effects. + """ from execution_testing.test_types.block_types import EnvironmentDefaults original_gas_limit = EnvironmentDefaults.gas_limit diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_benchmarking.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_benchmarking.py index 30363d66ff..2138aa6a11 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_benchmarking.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_benchmarking.py @@ -31,7 +31,7 @@ def test_dummy_no_benchmark_test(benchmark_test: BenchmarkTestFiller) -> None: target_opcode=Op.JUMPDEST, code_generator=JumpLoopGenerator(attack_block=Op.JUMPDEST), ) - """ + """ # noqa: E501 ) test_module_with_repricing = textwrap.dedent( @@ -48,12 +48,14 @@ def test_benchmark_with_repricing(benchmark_test: BenchmarkTestFiller) -> None: ) @pytest.mark.valid_at("Prague") - def test_benchmark_without_repricing(benchmark_test: BenchmarkTestFiller) -> None: + def test_benchmark_without_repricing( + benchmark_test: BenchmarkTestFiller + ) -> None: benchmark_test( target_opcode=Op.JUMPDEST, code_generator=JumpLoopGenerator(attack_block=Op.JUMPDEST), ) - """ + """ # noqa: E501 ) test_module_without_benchmark_test_fixture = textwrap.dedent( @@ -73,7 +75,7 @@ def test_with_benchmark_test(benchmark_test: BenchmarkTestFiller) -> None: target_opcode=Op.JUMPDEST, code_generator=JumpLoopGenerator(attack_block=Op.JUMPDEST), ) - """ + """ # noqa: E501 ) test_module_with_repricing_kwargs = textwrap.dedent( @@ -250,7 +252,7 @@ def test_repricing_marker_filter_with_benchmark_options( pytester, test_module_with_repricing, "test_repricing_filter.py" ) - # Test with -m repricing filter - should only collect repricing-marked tests + # Test with -m repricing filter - should only collect repricing tests result = pytester.runpytest( "-c", "pytest-fill.ini", @@ -344,7 +346,7 @@ def test_repricing_marker_with_kwargs_filters_parametrized_tests( ) assert result.ret == 0 - # For test with repricing(opcode=Op.ADD), only ADD variant should be collected + # For repricing(opcode=Op.ADD), only ADD variant should be collected collected_lines = [ line for line in result.outlines if "test_parametrized" in line ] @@ -361,7 +363,7 @@ def test_repricing_marker_with_kwargs_filters_parametrized_tests( assert not any("SUB" in line for line in kwargs_test_lines) assert not any("MUL" in line for line in kwargs_test_lines) - # test_parametrized_with_repricing_no_kwargs should have all variants (ADD and SUB) + # test_parametrized_with_repricing_no_kwargs: all variants (ADD and SUB) no_kwargs_test_lines = [ line for line in collected_lines diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_generate_all_formats.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_generate_all_formats.py index c4254008fd..1f480270a2 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_generate_all_formats.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_generate_all_formats.py @@ -2,7 +2,7 @@ from typing import Any -from execution_testing.cli.pytest_commands.plugins.filler.fixture_output import ( +from execution_testing.cli.pytest_commands.plugins.filler.fixture_output import ( # noqa: E501 FixtureOutput, ) diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_prealloc_group.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_prealloc_group.py index d576807821..650f8fb2a2 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_prealloc_group.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_prealloc_group.py @@ -280,7 +280,7 @@ def test_chainid(state_test: StateTestFiller, pre: Alloc) -> None: }} state_test(env={env}, pre=pre, post=post, tx=tx) - """ + """ # noqa: E501 ) @@ -322,7 +322,7 @@ def test_chainid_blockchain(blockchain_test: BlockchainTestFiller, pre: Alloc) - post=post, blocks=[Block(txs=[tx])], ) - """ + """ # noqa: E501 ) @@ -397,8 +397,9 @@ def test_chainid_blockchain(blockchain_test: BlockchainTestFiller, pre: Alloc) - 2, id="different_extra_data_different_types", marks=pytest.mark.xfail( - reason="Extra data is excluded=True in the Environment model, so it does not " - "propagate correctly to the genesis header without a lot of code changes.", + reason="Extra data is excluded=True in the Environment " + "model, so it does not propagate correctly to the genesis " + "header without a lot of code changes.", ), ), # Environment fields affecting the pre-alloc groups @@ -478,45 +479,49 @@ def test_pre_alloc_grouping_by_test_type( != expected_different_pre_alloc_groups ): error_message = ( - f"Expected {expected_different_pre_alloc_groups} different pre-alloc groups, " - f"but got {len(groups)}" + f"Expected {expected_different_pre_alloc_groups} different " + f"pre-alloc groups, but got {len(groups)}" ) for group_hash, group in groups.items(): error_message += f"\n{group_hash}: \n" error_message += f"tests: {group.test_ids}\n" - error_message += f"env: {group.environment.model_dump_json(indent=2, exclude_none=True)}\n" + env_json = group.environment.model_dump_json( + indent=2, exclude_none=True + ) + error_message += f"env: {env_json}\n" raise AssertionError(error_message) for group_hash, group in groups.items(): assert ( group.environment.fee_recipient == group.genesis.fee_recipient ), ( - f"Fee recipient mismatch for group {group_hash}: {group.environment.fee_recipient} != " + f"Fee recipient mismatch for group {group_hash}: " + f"{group.environment.fee_recipient} != " f"{group.genesis.fee_recipient}" ) assert group.environment.prev_randao == group.genesis.prev_randao, ( - f"Prev randao mismatch for group {group_hash}: {group.environment.prev_randao} != " - f"{group.genesis.prev_randao}" + f"Prev randao mismatch for group {group_hash}: " + f"{group.environment.prev_randao} != {group.genesis.prev_randao}" ) assert group.environment.extra_data == group.genesis.extra_data, ( - f"Extra data mismatch for group {group_hash}: {group.environment.extra_data} != " - f"{group.genesis.extra_data}" + f"Extra data mismatch for group {group_hash}: " + f"{group.environment.extra_data} != {group.genesis.extra_data}" ) assert group.environment.number == group.genesis.number, ( - f"Number mismatch for group {group_hash}: {group.environment.number} != " - f"{group.genesis.number}" + f"Number mismatch for group {group_hash}: " + f"{group.environment.number} != {group.genesis.number}" ) assert group.environment.timestamp == group.genesis.timestamp, ( - f"Timestamp mismatch for group {group_hash}: {group.environment.timestamp} != " - f"{group.genesis.timestamp}" + f"Timestamp mismatch for group {group_hash}: " + f"{group.environment.timestamp} != {group.genesis.timestamp}" ) assert group.environment.difficulty == group.genesis.difficulty, ( - f"Difficulty mismatch for group {group_hash}: {group.environment.difficulty} != " - f"{group.genesis.difficulty}" + f"Difficulty mismatch for group {group_hash}: " + f"{group.environment.difficulty} != {group.genesis.difficulty}" ) assert group.environment.gas_limit == group.genesis.gas_limit, ( - f"Gas limit mismatch for group {group_hash}: {group.environment.gas_limit} != " - f"{group.genesis.gas_limit}" + f"Gas limit mismatch for group {group_hash}: " + f"{group.environment.gas_limit} != {group.genesis.gas_limit}" ) assert ( group.environment.base_fee_per_gas @@ -536,7 +541,8 @@ def test_pre_alloc_grouping_by_test_type( assert ( group.environment.blob_gas_used == group.genesis.blob_gas_used ), ( - f"Blob gas used mismatch for group {group_hash}: {group.environment.blob_gas_used} != " + f"Blob gas used mismatch for group {group_hash}: " + f"{group.environment.blob_gas_used} != " f"{group.genesis.blob_gas_used}" ) assert ( diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_prealloc_group_usage_example.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_prealloc_group_usage_example.py index af82827f3d..999d46f4cb 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_prealloc_group_usage_example.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_prealloc_group_usage_example.py @@ -11,7 +11,8 @@ # Example 1: Test that deploys beacon root contract with hardcoded deployer @pytest.mark.pre_alloc_group( "separate", - reason="Deploys beacon root contract using actual hardcoded deployer address", + reason="Deploys beacon root contract using actual hardcoded " + "deployer address", ) def test_beacon_root_contract_deployment() -> None: """ diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_slow_marker_pre_alloc.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_slow_marker_pre_alloc.py index cd51c8f365..0d45f4e762 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_slow_marker_pre_alloc.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_slow_marker_pre_alloc.py @@ -21,7 +21,7 @@ def test_slow_without_benchmark(state_test: StateTestFiller, pre: Alloc) -> None contract = pre.deploy_contract(code=b"") tx = Transaction(sender=sender, to=contract, gas_limit=100000) state_test(pre=pre, tx=tx, post={}) - """ + """ # noqa: E501 ) # Create test directory structure @@ -67,7 +67,7 @@ def test_slow_with_benchmark(state_test: StateTestFiller, pre: Alloc) -> None: contract = pre.deploy_contract(code=b"") tx = Transaction(sender=sender, to=contract, gas_limit=100000) state_test(pre=pre, tx=tx, post={}) - """ + """ # noqa: E501 ) # Create test directory structure @@ -112,7 +112,7 @@ def test_slow_with_existing_pre_alloc(state_test: StateTestFiller, pre: Alloc) - contract = pre.deploy_contract(code=b"") tx = Transaction(sender=sender, to=contract, gas_limit=100000) state_test(pre=pre, tx=tx, post={}) - """ + """ # noqa: E501 ) # Create test directory structure @@ -203,7 +203,7 @@ def test_slow_for_integration(state_test: StateTestFiller, pre: Alloc) -> None: contract = pre.deploy_contract(code=b"") tx = Transaction(sender=sender, to=contract, gas_limit=100000) state_test(pre=pre, tx=tx, post={}) - """ + """ # noqa: E501 ) # Create proper directory structure for tests diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_verify_sync_marker.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_verify_sync_marker.py index 0f49f6dfa3..4e4fff3340 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_verify_sync_marker.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_verify_sync_marker.py @@ -59,7 +59,7 @@ def test_verify_sync_with_param_marks(blockchain_test, has_exception) -> None: ], ) - """ + """ # noqa: E501 ) diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/witness.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/witness.py index 0059667591..f150dbc74a 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/witness.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/witness.py @@ -53,8 +53,8 @@ def pytest_addoption(parser: pytest.Parser) -> None: dest="witness", default=False, help=( - "Generate execution witness data for blockchain test fixtures using the " - "witness-filler tool (must be installed separately)." + "Generate execution witness data for blockchain test fixtures " + "using the witness-filler tool (must be installed separately)." ), ) @@ -69,10 +69,11 @@ def pytest_configure(config: pytest.Config) -> None: if config.getoption("witness"): # Check if witness-filler binary is available in PATH if not shutil.which("witness-filler"): + repo = "https://github.com/kevaundray/reth.git" # noqa: E501 pytest.exit( - "witness-filler tool not found in PATH. Please build and install witness-filler " - "from https://github.com/kevaundray/reth.git before using --witness flag.\n" - "Example: cargo install --git https://github.com/kevaundray/reth.git " + "witness-filler tool not found in PATH. Please build and " + f"install witness-filler from {repo} before using " + f"--witness flag.\nExample: cargo install --git {repo} " "witness-filler", 1, ) @@ -119,8 +120,8 @@ def generate_witness(fixture: BlockchainFixture) -> None: if result.returncode != 0: raise RuntimeError( - f"witness-filler tool failed with exit code {result.returncode}. " - f"stderr: {result.stderr}" + f"witness-filler tool failed with exit code " + f"{result.returncode}. stderr: {result.stderr}" ) try: @@ -135,9 +136,11 @@ def generate_witness(fixture: BlockchainFixture) -> None: if isinstance(block, FixtureBlock): block.execution_witness = witness except Exception as e: + output = result.stdout[:500] + suffix = "..." if len(result.stdout) > 500 else "" raise RuntimeError( f"Failed to parse witness data from witness-filler tool. " - f"Output was: {result.stdout[:500]}{'...' if len(result.stdout) > 500 else ''}" + f"Output was: {output}{suffix}" ) from e return generate_witness diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/forks.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/forks.py index 115a94aa78..faabc10500 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/forks.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/forks.py @@ -25,9 +25,6 @@ from pytest import Mark, Metafunc from execution_testing.client_clis import TransitionTool -from execution_testing.logging import ( - get_logger, -) from execution_testing.forks import ( ALL_FORKS, ALL_FORKS_WITH_TRANSITIONS, @@ -39,6 +36,9 @@ get_transition_forks, transition_fork_to, ) +from execution_testing.logging import ( + get_logger, +) logger = get_logger(__name__) @@ -384,22 +384,28 @@ def covariant_decorator( fork_covariant_decorators: List[Type[CovariantDecorator]] = [ covariant_decorator( marker_name="with_all_tx_types", - description="marks a test to be parametrized for all tx types at parameter named tx_type" - " of type int", + description=( + "marks a test to be parametrized for all tx types at parameter " + "named tx_type of type int" + ), fork_attribute_name="tx_types", argnames=["tx_type"], ), covariant_decorator( marker_name="with_all_contract_creating_tx_types", - description="marks a test to be parametrized for all tx types that can create a contract" - " at parameter named tx_type of type int", + description=( + "marks a test to be parametrized for all tx types that can " + "create a contract at parameter named tx_type of type int" + ), fork_attribute_name="contract_creating_tx_types", argnames=["tx_type"], ), covariant_decorator( marker_name="with_all_typed_transactions", - description="marks a test to be parametrized with default typed transactions named " - "typed_transaction", + description=( + "marks a test to be parametrized with default typed " + "transactions named typed_transaction" + ), fork_attribute_name="tx_types", argnames=["typed_transaction"], # indirect means the values from `tx_types` will be passed to the @@ -408,29 +414,37 @@ def covariant_decorator( ), covariant_decorator( marker_name="with_all_precompiles", - description="marks a test to be parametrized for all precompiles at parameter named" - " precompile of type int", + description=( + "marks a test to be parametrized for all precompiles at " + "parameter named precompile of type int" + ), fork_attribute_name="precompiles", argnames=["precompile"], ), covariant_decorator( marker_name="with_all_call_opcodes", - description="marks a test to be parametrized for all *CALL opcodes at parameter named" - " call_opcode", + description=( + "marks a test to be parametrized for all *CALL opcodes at " + "parameter named call_opcode" + ), fork_attribute_name="call_opcodes", argnames=["call_opcode"], ), covariant_decorator( marker_name="with_all_create_opcodes", - description="marks a test to be parametrized for all *CREATE* opcodes at parameter named" - " create_opcode", + description=( + "marks a test to be parametrized for all *CREATE* opcodes at " + "parameter named create_opcode" + ), fork_attribute_name="create_opcodes", argnames=["create_opcode"], ), covariant_decorator( marker_name="with_all_system_contracts", - description="marks a test to be parametrized for all system contracts at parameter named" - " system_contract of type int", + description=( + "marks a test to be parametrized for all system contracts at " + "parameter named system_contract of type int" + ), fork_attribute_name="system_contracts", argnames=["system_contract"], ), @@ -468,8 +482,9 @@ def pytest_configure(config: pytest.Config) -> None: config.addinivalue_line( "markers", ( - "parametrize_by_fork(names, values_fn): parametrize a test case by fork using the " - "specified names and values returned by the function values_fn(fork)" + "parametrize_by_fork(names, values_fn): parametrize a test case " + "by fork using the specified names and values returned by the " + "function values_fn(fork)" ), ) for d in fork_covariant_decorators: @@ -526,7 +541,8 @@ def get_fork_option( if single_fork and (forks_from or forks_until): print( - "Error: --fork cannot be used in combination with --from or --until", + "Error: --fork cannot be used in combination " + "with --from or --until", file=sys.stderr, ) pytest.exit( @@ -546,11 +562,13 @@ def get_fork_option( getattr(config, "single_fork_mode", False) and len(selected_fork_set) != 1 ): + fork_count = len(selected_fork_set) pytest.exit( f""" - Expected exactly one fork to be specified, got {len(selected_fork_set)} + Expected exactly one fork to be specified, got {fork_count} ({selected_fork_set}). - Make sure to specify exactly one fork using the --fork command line argument. + Make sure to specify exactly one fork using the --fork + command line argument. """, returncode=pytest.ExitCode.USAGE_ERROR, ) @@ -628,7 +646,8 @@ def session_fork(request: pytest.FixtureRequest) -> Fork | None: ): return list(request.config.selected_fork_set)[0] # type: ignore raise AssertionError( - "Plugin used `session_fork` fixture without the correct configuration (single_fork_mode)." + "Plugin used `session_fork` fixture without the correct " + "configuration (single_fork_mode)." ) @@ -1072,8 +1091,8 @@ def pytest_generate_tests(metafunc: pytest.Metafunc) -> None: marks=[ pytest.mark.skip( reason=( - f"{test_name} is not valid for any of the forks specified on " - "the command-line." + f"{test_name} is not valid for any of the " + "forks specified on the command-line." ) ) ], @@ -1209,11 +1228,12 @@ def pytest_collection_modifyitems( """ Filter tests based on param-level validity markers. - The pytest_generate_tests hook only considers function-level validity markers. - This hook runs after parametrization and can access all markers including - param-level ones, allowing us to properly filter tests based on param-level - valid_from/valid_until markers. + The pytest_generate_tests hook only considers function-level validity + markers. This hook runs after parametrization and can access all markers + including param-level ones, allowing us to properly filter tests based on + param-level valid_from/valid_until markers. """ + del config items_to_remove = [] for i, item in enumerate(items): diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_bad_command_line_options.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_bad_command_line_options.py index 5a5b286eb9..2dddbaca09 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_bad_command_line_options.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_bad_command_line_options.py @@ -9,7 +9,10 @@ "from_nonexistent_fork", ( ("--from", "Marge"), # codespell:ignore marge - "Unsupported fork provided to --from: Marge", # codespell:ignore marge + ( + "Unsupported fork provided to --from: " + "Marge" # codespell:ignore marge + ), ), ), ( diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_bad_validity_markers.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_bad_validity_markers.py index 4e8aa2d4be..6c3093a60b 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_bad_validity_markers.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_bad_validity_markers.py @@ -176,7 +176,10 @@ def test_case(state_test): def test_case(state_test): assert 0 """, - "The markers 'valid_from' and 'valid_at_transition_to' can't be combined", + ( + "The markers 'valid_from' and 'valid_at_transition_to' " + "can't be combined" + ), ), ), ( @@ -189,7 +192,10 @@ def test_case(state_test): def test_case(state_test): assert 0 """, - "The markers 'valid_until' and 'valid_at_transition_to' can't be combined", + ( + "The markers 'valid_until' and 'valid_at_transition_to' " + "can't be combined" + ), ), ), ( @@ -274,7 +280,7 @@ def test_case(state_test, value): @pytest.mark.valid_until("Prague") def test_case(state_test, value): assert 1 - """, + """, # noqa: E501 "Too many 'valid_until' markers applied to test", ), ), diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_covariant_markers.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_covariant_markers.py index a256bb539d..ff2cf4f316 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_covariant_markers.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_covariant_markers.py @@ -29,7 +29,7 @@ def test_case(state_test, tx_type): @pytest.mark.state_test_only def test_case(state_test, tx_type): pass - """, + """, # noqa: E501 {"passed": 2, "failed": 0, "skipped": 0, "errors": 0}, None, id="with_all_tx_types_with_selector", @@ -45,7 +45,7 @@ def test_case(state_test, tx_type): @pytest.mark.state_test_only def test_case(state_test, tx_type): assert tx_type != 1 - """, + """, # noqa: E501 { "passed": 2, "xpassed": 0, @@ -115,7 +115,7 @@ def test_case(request, state_test, tx_type): assert "state_test" in mark_names if tx_type == 1: assert "slow" in mark_names - """, + """, # noqa: E501 { "passed": 2, "xpassed": 1, @@ -192,7 +192,7 @@ def test_case(state_test, call_opcode): @pytest.mark.state_test_only def test_case(state_test, call_opcode): pass - """, + """, # noqa: E501 {"passed": 1, "failed": 0, "skipped": 0, "errors": 0}, None, id="with_all_call_opcodes_with_selector", @@ -266,7 +266,7 @@ def test_case(state_test, system_contract): def test_case(state_test, typed_transaction): assert isinstance(typed_transaction, Transaction) assert typed_transaction.ty in [0, 1] # Berlin supports types 0 and 1 - """, + """, # noqa: E501 {"passed": 2, "failed": 0, "skipped": 0, "errors": 0}, None, id="with_all_typed_transactions_berlin", @@ -282,7 +282,7 @@ def test_case(state_test, typed_transaction): def test_case(state_test, typed_transaction, pre): assert isinstance(typed_transaction, Transaction) assert typed_transaction.ty in [0, 1, 2] # London supports types 0, 1, 2 - """, + """, # noqa: E501 {"passed": 3, "failed": 0, "skipped": 0, "errors": 0}, None, id="with_all_typed_transactions_london", @@ -404,7 +404,7 @@ def covariant_function(fork): @pytest.mark.state_test_only def test_case(state_test, test_parameter, test_parameter_2): pass - """, + """, # noqa: E501 {"passed": 5, "failed": 0, "skipped": 0, "errors": 0}, None, id="multi_parameter_custom_covariant_marker", diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_markers.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_markers.py index 9379d90b40..e3fd58d50b 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_markers.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/tests/test_markers.py @@ -106,7 +106,9 @@ def test_case(state_test): ), pytest.param( generate_test( - valid_at_transition_to='"Paris", subsequent_forks=True, until="Cancun"', + valid_at_transition_to=( + '"Paris", subsequent_forks=True, until="Cancun"' + ), ), ["--until=Prague"], {"passed": 2, "failed": 0, "skipped": 0, "errors": 0}, @@ -143,7 +145,9 @@ def test_case(state_test): ), pytest.param( generate_test( - valid_at_transition_to='"Osaka", subsequent_forks=True, until="BPO1"', + valid_at_transition_to=( + '"Osaka", subsequent_forks=True, until="BPO1"' + ), ), ["--until=BPO1"], {"passed": 1, "failed": 0, "skipped": 0, "errors": 0}, @@ -152,7 +156,9 @@ def test_case(state_test): ), pytest.param( generate_test( - valid_at_transition_to='"Osaka", subsequent_forks=True, until="BPO1"', + valid_at_transition_to=( + '"Osaka", subsequent_forks=True, until="BPO1"' + ), valid_for_bpo_forks="", ), ["--until=BPO1"], @@ -358,7 +364,8 @@ def test_mixed_function_and_param_markers(state_test, value): generate_param_level_mixed_test(), ["--from=Berlin", "--until=Prague"], # Function marker: valid_until("Cancun") limits to <= Cancun - # all_forks (TangerineWhistle): Berlin, London, Paris, Shanghai, Cancun = 5 + # all_forks (TangerineWhistle): + # Berlin, London, Paris, Shanghai, Cancun = 5 # paris_only: Paris, Shanghai, Cancun = 3 # Total: 8 tests {"passed": 8, "failed": 0, "skipped": 0, "errors": 0}, @@ -384,7 +391,7 @@ def test_param_level_validity_markers( pytest_args: List[str], ) -> None: """ - Test param-level validity markers (valid_from, valid_until on pytest.param). + Test param-level validity markers (valid_from, valid_until). The pytest_collection_modifyitems hook filters tests based on param-level markers after parametrization, allowing different parameter values to have diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/help/help.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/help/help.py index 4ef957ee40..76769f6d9c 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/help/help.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/help/help.py @@ -19,7 +19,10 @@ def pytest_addoption(parser: pytest.Parser) -> None: action="store_true", dest="show_check_eip_versions_help", default=False, - help="Show help options only for the check_eip_versions command and exit.", + help=( + "Show help options only for the check_eip_versions command " + "and exit." + ), ) help_group.addoption( "--fill-help", @@ -40,28 +43,39 @@ def pytest_addoption(parser: pytest.Parser) -> None: action="store_true", dest="show_execute_help", default=False, - help="Show help options specific to the execute remote command and exit.", + help=( + "Show help options specific to the execute remote command " + "and exit." + ), ) help_group.addoption( "--execute-hive-help", action="store_true", dest="show_execute_hive_help", default=False, - help="Show help options specific to the execute hive command and exit.", + help=( + "Show help options specific to the execute hive command and exit." + ), ) help_group.addoption( "--execute-recover-help", action="store_true", dest="show_execute_recover_help", default=False, - help="Show help options specific to the execute recover command and exit.", + help=( + "Show help options specific to the execute recover command " + "and exit." + ), ) help_group.addoption( "--execute-eth-config-help", action="store_true", dest="show_execute_eth_config_help", default=False, - help="Show help options specific to the execute eth_config command and exit.", + help=( + "Show help options specific to the execute eth_config command " + "and exit." + ), ) @@ -163,7 +177,8 @@ def show_specific_help( pytest_ini = Path(config.inifile) # type: ignore if pytest_ini.name != expected_ini: raise ValueError( - f"Unexpected {expected_ini}!={pytest_ini.name} file option generating help." + f"Unexpected {expected_ini}!={pytest_ini.name} file option " + "generating help." ) test_parser = argparse.ArgumentParser() diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/pytest_hive/pytest_hive.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/pytest_hive/pytest_hive.py index 8064e2c48e..62a6b113cd 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/pytest_hive/pytest_hive.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/pytest_hive/pytest_hive.py @@ -48,6 +48,7 @@ from hive.testing import HiveTest, HiveTestResult, HiveTestSuite from execution_testing.logging import get_logger + from .hive_info import ClientFile, HiveInfo logger = get_logger(__name__) @@ -60,7 +61,8 @@ def pytest_configure(config: pytest.Config) -> None: # noqa: D103 "The HIVE_SIMULATOR environment variable is not set.\n\n" "If running locally, start hive in --dev mode, for example:\n" "./hive --dev --client go-ethereum\n\n" - "and set the HIVE_SIMULATOR to the reported URL. For example, in bash:\n" + "and set the HIVE_SIMULATOR to the reported URL. For example, " + "in bash:\n" "export HIVE_SIMULATOR=http://127.0.0.1:3000\n" "or in fish:\n" "set -x HIVE_SIMULATOR http://127.0.0.1:3000" @@ -96,8 +98,9 @@ def pytest_addoption(parser: pytest.Parser) -> None: # noqa: D103 dest="hive_simulator", default=os.environ.get("HIVE_SIMULATOR"), help=( - "The Hive simulator endpoint, e.g. http://127.0.0.1:3000. By default, the value is " - "taken from the HIVE_SIMULATOR environment variable." + "The Hive simulator endpoint, e.g. http://127.0.0.1:3000. By " + "default, the value is taken from the HIVE_SIMULATOR environment " + "variable." ), ) @@ -134,9 +137,8 @@ def pytest_report_header( f"hive date: {hive_info.date}", ] for client in hive_info.client_file.root: - header_lines += [ - f"hive client ({client.client}): {client.model_dump_json(exclude_none=True)}", - ] + dump = client.model_dump_json(exclude_none=True) + header_lines += [f"hive client ({client.client}): {dump}"] return header_lines @@ -264,8 +266,8 @@ def hive_test( ) except pytest.FixtureLookupError: pytest.exit( - "Error: The 'test_case_description' fixture has not been defined by the simulator " - "or pytest plugin using this plugin!" + "Error: The 'test_case_description' fixture has not been defined " + "by the simulator or pytest plugin using this plugin!" ) test_parameter_string = request.node.name @@ -343,8 +345,8 @@ def hive_test( else: test_passed = False test_result_details = ( - "Test failed for unknown reason (setup or call status unknown).\n\n" - + captured_output + "Test failed for unknown reason (setup or call status " + "unknown).\n\n" + captured_output ) test.end( diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/shared/benchmarking.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/shared/benchmarking.py index 03e261770c..b75335333f 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/shared/benchmarking.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/shared/benchmarking.py @@ -61,8 +61,8 @@ def pytest_configure(config: pytest.Config) -> None: fixed_opcode_count = OpcodeCountsConfig.from_config(config) if gas_benchmark_values is not None and fixed_opcode_count is not None: raise pytest.UsageError( - f"{GasBenchmarkValues.flag} and --fixed-opcode-count are mutually exclusive. " - "Use only one at a time." + f"{GasBenchmarkValues.flag} and --fixed-opcode-count are mutually " + "exclusive. Use only one at a time." ) if gas_benchmark_values is not None: @@ -137,10 +137,12 @@ def from_parameter_value( cls, config: pytest.Config, value: str ) -> Self | None: """Given the parameter value and config, return the expected object.""" + del config return cls.model_validate(value.split(",")) def get_test_parameters(self, test_name: str) -> list[ParameterSet]: """Get benchmark values. All tests have the same list.""" + del test_name return [ pytest.param( gas_value * 1_000_000, @@ -218,12 +220,14 @@ def pytest_collection_modifyitems( if not gas_benchmark_value and not fixed_opcode_count: return - # In --fixed-opcode-count mode, we only support tests that meet all of the following: + # In --fixed-opcode-count mode, we only support tests that meet all of + # the following: # - The test uses the benchmark_test fixture # - The benchmark test uses a code generator # # Here we filter out tests that do not use the benchmark_test fixture. - # Note: At this stage we cannot filter based on whether a code generator is used. + # Note: At this stage we cannot filter based on whether a code generator + # is used. if fixed_opcode_count is not None: filtered = [] for item in items: diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/shared/execute_fill.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/shared/execute_fill.py index acfb3be9db..b059420838 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/shared/execute_fill.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/shared/execute_fill.py @@ -53,12 +53,9 @@ def pytest_configure(config: pytest.Config) -> None: "execution_testing.cli.pytest_commands.plugins.filler.filler" ): for fixture_format in BaseFixture.formats.values(): - config.addinivalue_line( - "markers", - ( - f"{fixture_format.format_name.lower()}: {fixture_format.description}" - ), - ) + name = fixture_format.format_name.lower() + desc = fixture_format.description + config.addinivalue_line("markers", f"{name}: {desc}") for ( label, labeled_fixture_format, @@ -71,12 +68,9 @@ def pytest_configure(config: pytest.Config) -> None: "execution_testing.cli.pytest_commands.plugins.execute.execute" ): for execute_format in BaseExecute.formats.values(): - config.addinivalue_line( - "markers", - ( - f"{execute_format.format_name.lower()}: {execute_format.description}" - ), - ) + name = execute_format.format_name.lower() + desc = execute_format.description + config.addinivalue_line("markers", f"{name}: {desc}") for ( label, labeled_execute_format, @@ -104,7 +98,8 @@ def pytest_configure(config: pytest.Config) -> None: ) config.addinivalue_line( "markers", - "compile_yul_with(fork): Always compile Yul source using the corresponding evm version.", + "compile_yul_with(fork): Always compile Yul source using the " + "corresponding evm version.", ) config.addinivalue_line( "markers", @@ -124,35 +119,38 @@ def pytest_configure(config: pytest.Config) -> None: ) config.addinivalue_line( "markers", - "exception_test: Negative tests that include an invalid block or transaction.", + "exception_test: Negative tests that include an invalid block or " + "transaction.", ) config.addinivalue_line( "markers", - "eip_checklist(item_id, eip=None): Mark a test as implementing a specific checklist item. " - "The first positional parameter is the checklist item ID. " - "The optional 'eip' keyword parameter specifies additional EIPs covered by the test.", + "eip_checklist(item_id, eip=None): Mark a test as implementing a " + "specific checklist item. The first positional parameter is the " + "checklist item ID. The optional 'eip' keyword parameter specifies " + "additional EIPs covered by the test.", ) config.addinivalue_line( "markers", - "derived_test: Mark a test as a derived test (E.g. a BlockchainTest that is derived " - "from a StateTest).", + "derived_test: Mark a test as a derived test (E.g. a BlockchainTest " + "that is derived from a StateTest).", ) config.addinivalue_line( "markers", - "tagged: Marks a static test as tagged. Tags are used to generate dynamic " - "addresses for static tests at fill time. All tagged tests are compatible with " - "dynamic address generation.", + "tagged: Marks a static test as tagged. Tags are used to generate " + "dynamic addresses for static tests at fill time. All tagged tests " + "are compatible with dynamic address generation.", ) config.addinivalue_line( "markers", - "untagged: Marks a static test as untagged. Tags are used to generate dynamic " - "addresses for static tests at fill time. Untagged tests are incompatible with " - "dynamic address generation.", + "untagged: Marks a static test as untagged. Tags are used to generate " + "dynamic addresses for static tests at fill time. Untagged tests are " + "incompatible with dynamic address generation.", ) config.addinivalue_line( "markers", - "verify_sync: Marks a test to be run with `consume sync`, verifying blockchain " - "engine tests and having hive clients sync after payload execution.", + "verify_sync: Marks a test to be run with `consume sync`, verifying " + "blockchain engine tests and having hive clients sync after payload " + "execution.", ) config.addinivalue_line( "markers", @@ -161,7 +159,8 @@ def pytest_configure(config: pytest.Config) -> None: ) config.addinivalue_line( "markers", - "pre_alloc_modify: Marks a test to apply plugin-specific pre_alloc_group modifiers", + "pre_alloc_modify: Marks a test to apply plugin-specific " + "pre_alloc_group modifiers", ) config.addinivalue_line( "markers", @@ -177,7 +176,8 @@ def pytest_configure(config: pytest.Config) -> None: ) config.addinivalue_line( "markers", - "mainnet: Specialty tests crafted for running on mainnet and sanity checking.", + "mainnet: Specialty tests crafted for running on mainnet and sanity " + "checking.", ) config.addinivalue_line( "markers", @@ -191,7 +191,10 @@ def test_case_description(request: pytest.FixtureRequest) -> str: Fixture to extract and combine docstrings from the test class and the test function. """ - description_unavailable = "No description available - add a docstring to the python test class or function." + description_unavailable = ( + "No description available - add a docstring to the python test " + "class or function." + ) test_class_doc = "" test_function_doc = "" if hasattr(request.node, "cls"): @@ -241,7 +244,8 @@ def __init__(self, message: str): and "blockchain_test" in item.fixturenames ): raise InvalidFillerError( - "A filler should only implement either a state test or a blockchain test; not both." + "A filler should only implement either a state test or a " + "blockchain test; not both." ) # Check that the test defines either test type as parameter. diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/solc/solc.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/solc/solc.py index 78ba007c31..1557d18d63 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/solc/solc.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/solc/solc.py @@ -22,7 +22,8 @@ def pytest_addoption(parser: pytest.Parser) -> None: type=str, default=None, help=( - "Path to a solc executable (for Yul source compilation). Default: solc binary in PATH." + "Path to a solc executable (for Yul source compilation). " + "Default: solc binary in PATH." ), ) @@ -43,7 +44,8 @@ def pytest_configure(config: pytest.Config) -> None: solc_bin = which("solc") if not solc_bin: pytest.exit( - "solc binary not found in PATH. Please install solc and ensure it's in your PATH.", + "solc binary not found in PATH. Please install solc and " + "ensure it's in your PATH.", returncode=pytest.ExitCode.USAGE_ERROR, ) @@ -114,8 +116,8 @@ def pytest_configure(config: pytest.Config) -> None: ) if solc_version_semver < SOLC_EXPECTED_MIN_VERSION: pytest.exit( - f"Unsupported solc version: {solc_version_semver}. Minimum required version is " - f"{SOLC_EXPECTED_MIN_VERSION}", + f"Unsupported solc version: {solc_version_semver}. Minimum " + f"required version is {SOLC_EXPECTED_MIN_VERSION}", returncode=pytest.ExitCode.USAGE_ERROR, ) diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/spec_version_checker/spec_version_checker.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/spec_version_checker/spec_version_checker.py index 62b6363eb5..05b4a16f7a 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/spec_version_checker/spec_version_checker.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/spec_version_checker/spec_version_checker.py @@ -19,9 +19,11 @@ ) GITHUB_TOKEN_HELP = textwrap.dedent( - "Either set the GITHUB_TOKEN environment variable or specify one via --github-token. " - "The Github CLI can be used: `--github-token $(gh auth token)` (https://cli.github.com/) " - "or a PAT can be generated at https://github.com/settings/personal-access-tokens/new." + "Either set the GITHUB_TOKEN environment variable or specify one via " + "--github-token. The Github CLI can be used: " + "`--github-token $(gh auth token)` (https://cli.github.com/) " # noqa: E501 + "or a PAT can be generated at " + "https://github.com/settings/personal-access-tokens/new." # noqa: E501 ) @@ -37,8 +39,8 @@ def pytest_addoption(parser: pytest.Parser) -> None: dest="github_token", default=None, help=( - "Specify a Github API personal access token (PAT) to avoid rate limiting. " - f"{GITHUB_TOKEN_HELP}" + "Specify a Github API personal access token (PAT) to avoid rate " + f"limiting. {GITHUB_TOKEN_HELP}" ), ) @@ -53,7 +55,8 @@ def pytest_configure(config: pytest.Config) -> None: """ config.addinivalue_line( "markers", - "eip_version_check: a test that tests the reference spec defined in an EIP test module.", + "eip_version_check: a test that tests the reference spec defined in " + "an EIP test module.", ) github_token = config.getoption("github_token") or os.environ.get( @@ -62,8 +65,8 @@ def pytest_configure(config: pytest.Config) -> None: if not github_token: pytest.exit( - "A Github personal access token (PAT) is required but has not been provided. " - f"{GITHUB_TOKEN_HELP}" + "A Github personal access token (PAT) is required but has not " + f"been provided. {GITHUB_TOKEN_HELP}" ) config.github_token = github_token # type: ignore[attr-defined] @@ -109,7 +112,8 @@ def get_ref_spec_from_module( ) from e else: raise Exception( - "Test doesn't define REFERENCE_SPEC_GIT_PATH and REFERENCE_SPEC_VERSION" + "Test doesn't define REFERENCE_SPEC_GIT_PATH and " + "REFERENCE_SPEC_VERSION" ) return spec_obj diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/processors.py b/packages/testing/src/execution_testing/cli/pytest_commands/processors.py index dd10fac25a..c47f899094 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/processors.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/processors.py @@ -60,7 +60,8 @@ def process_args(self, args: List[str]) -> List[str]: # Check for incompatible xdist plugin if any(arg == "-n" or arg.startswith("-n=") for arg in args): sys.exit( - "error: xdist-plugin not supported with --output=stdout (remove -n args)." + "error: xdist-plugin not supported with --output=stdout " + "(remove -n args)." ) # Add flags to suppress pytest output when writing to stdout diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/watcher.py b/packages/testing/src/execution_testing/cli/pytest_commands/watcher.py index bb4e5c9fc1..8a2504baa0 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/watcher.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/watcher.py @@ -72,7 +72,7 @@ def run_fill() -> None: file_count = len(file_mtimes) self.console.print( - f"[blue]Watching {file_count} files in tests/ and src/ directories." + f"[blue]Watching {file_count} files in tests/ and src/." "\nPress Ctrl+C to stop.[/blue]" ) @@ -86,7 +86,8 @@ def run_fill() -> None: if not self.verbose: os.system("clear" if os.name != "nt" else "cls") self.console.print( - "[yellow]File changes detected, re-running...[/yellow]\n" + "[yellow]File changes detected, " + "re-running...[/yellow]\n" ) run_fill() file_mtimes = current_mtimes diff --git a/packages/testing/src/execution_testing/cli/show_pre_alloc_group_stats.py b/packages/testing/src/execution_testing/cli/show_pre_alloc_group_stats.py index 55b47ba602..2ec85527c1 100644 --- a/packages/testing/src/execution_testing/cli/show_pre_alloc_group_stats.py +++ b/packages/testing/src/execution_testing/cli/show_pre_alloc_group_stats.py @@ -383,13 +383,17 @@ def display_stats(stats: Dict, console: Console, verbose: int = 0) -> None: ) cumulative_groups_display += groups_in_bin + cumul_pct = cumulative_groups_display / total_groups * 100 + cumulative_str = ( + f"{cumulative_groups_display} ({cumul_pct:.1f}%)" + if total_groups > 0 + else "0" + ) coverage_table.add_row( size_range, str(tests_in_range), f"{coverage_percentage:.1f}%", - f"{cumulative_groups_display} ({cumulative_groups_display / total_groups * 100:.1f}%)" - if total_groups > 0 - else "0", + cumulative_str, ) console.print(coverage_table) @@ -449,11 +453,13 @@ def display_stats(stats: Dict, console: Console, verbose: int = 0) -> None: # Split test functions analysis (only show if there are any) if stats.get("split_functions"): console.print( - "\n[bold yellow]Test Functions Split Across Multiple Groups[/bold yellow]" + "\n[bold yellow]Test Functions Split Across Multiple " + "Groups[/bold yellow]" ) console.print( - "[dim]These test functions create multiple size-1 groups (due to different " - "forks/parameters), preventing pre-allocation group optimization:[/dim]", + "[dim]These test functions create multiple size-1 groups (due to " + "different forks/parameters), preventing pre-allocation group " + "optimization:[/dim]", highlight=False, ) @@ -493,21 +499,22 @@ def display_stats(stats: Dict, console: Console, verbose: int = 0) -> None: total_split_functions = len(stats["split_functions"]) console.print( - f"\n[yellow]Optimization Potential:[/yellow] Excluding these {total_split_functions} " - f"split functions would save {total_split_groups} groups" + f"\n[yellow]Optimization Potential:[/yellow] Excluding these " + f"{total_split_functions} split functions would save " + f"{total_split_groups} groups" ) # Verbosity hint console.print() if verbose == 0: console.print( - "[dim]Hint: Use -v to see detailed group and module statistics, or -vv to see all " - "groups and modules[/dim]" + "[dim]Hint: Use -v to see detailed group and module statistics, " + "or -vv to see all groups and modules[/dim]" ) elif verbose == 1: console.print( - "[dim]Hint: Use -vv to see all groups and modules (currently showing top entries " - "only)[/dim]" + "[dim]Hint: Use -vv to see all groups and modules (currently " + "showing top entries only)[/dim]" ) diff --git a/packages/testing/src/execution_testing/cli/tests/test_pytest_execute_command.py b/packages/testing/src/execution_testing/cli/tests/test_pytest_execute_command.py index 1f8179ec0f..c24b473446 100644 --- a/packages/testing/src/execution_testing/cli/tests/test_pytest_execute_command.py +++ b/packages/testing/src/execution_testing/cli/tests/test_pytest_execute_command.py @@ -85,18 +85,19 @@ def test_execute_eth_config_help(runner: CliRunner) -> None: def test_all_execute_subcommands_help_no_conflicts(runner: CliRunner) -> None: - """Test that all execute subcommands --help work without argument conflicts. + """ + Test that all execute subcommands --help work without argument conflicts. - This is a regression test for issue where --chain-id was defined in multiple - plugins, causing argparse.ArgumentError conflicts. + This is a regression test for issue where --chain-id was defined in + multiple plugins, causing argparse.ArgumentError conflicts. """ subcommands = ["remote", "recover", "hive", "eth-config"] for subcommand in subcommands: result = runner.invoke(execute, [subcommand, "--help"]) assert result.exit_code == 0, ( - f"execute {subcommand} --help failed with exit code {result.exit_code}\n" - f"Output: {result.output}" + f"execute {subcommand} --help failed with exit code " + f"{result.exit_code}\nOutput: {result.output}" ) # Ensure no argparse conflicts assert "ArgumentError" not in result.output, ( diff --git a/packages/testing/src/execution_testing/cli/tests/test_pytest_fill_command.py b/packages/testing/src/execution_testing/cli/tests/test_pytest_fill_command.py index dcb3d5a9e0..c663936810 100644 --- a/packages/testing/src/execution_testing/cli/tests/test_pytest_fill_command.py +++ b/packages/testing/src/execution_testing/cli/tests/test_pytest_fill_command.py @@ -21,7 +21,7 @@ def test_function(state_test, pre): @pytest.fixture -def expected_exit_code() -> pytest.ExitCode: +def expected_exit_code() -> pytest.ExitCode: # noqa: D103 return pytest.ExitCode.OK @@ -81,7 +81,8 @@ class TestFillPytester: """ Test fill command using pytester. - This mode skips the fill command's Click CLI and uses pytester to run the command. + This mode skips the fill command's Click CLI and uses pytester to run + the command. Pytester allows actually filling the Python test files. """ @@ -143,7 +144,10 @@ def _run_fill(*args: str) -> RunResult: @pytest.fixture() def default_html_report_file_path(self) -> str: """File path for fill's pytest html report.""" - return execution_testing.cli.pytest_commands.plugins.filler.filler.default_html_report_file_path() + filler_module = ( + execution_testing.cli.pytest_commands.plugins.filler.filler + ) + return filler_module.default_html_report_file_path() @pytest.fixture(scope="function") def default_fixtures_output( diff --git a/packages/testing/src/execution_testing/cli/tox_helpers.py b/packages/testing/src/execution_testing/cli/tox_helpers.py index 765281464e..551e90b627 100644 --- a/packages/testing/src/execution_testing/cli/tox_helpers.py +++ b/packages/testing/src/execution_testing/cli/tox_helpers.py @@ -124,8 +124,8 @@ def pyspelling() -> None: title="Pyspelling Check Failed", tox_env="spellcheck", error_message=( - "aspell is not installed. This tool is required for spell checking " - "documentation." + "aspell is not installed. This tool is required for " + "spell checking documentation." ), fix_commands=[ "# Install aspell on Ubuntu/Debian", @@ -138,7 +138,8 @@ def pyspelling() -> None: sys.exit(1) else: click.echo( - "********* Install 'aspell' and 'aspell-en' to enable spellcheck *********" + "********* Install 'aspell' and 'aspell-en' to enable " + "spellcheck *********" ) sys.exit(0) @@ -147,7 +148,9 @@ def pyspelling() -> None: write_github_summary( title="Pyspelling Check Failed", tox_env="spellcheck", - error_message="Pyspelling found spelling errors in the documentation.", + error_message=( + "Pyspelling found spelling errors in the documentation." + ), fix_commands=[ "# Check the pyspelling configuration", "cat .pyspelling.yml", @@ -189,8 +192,8 @@ def codespell() -> None: if result.returncode != 0: console.print("\n[bold red]❌ Spellcheck Failed[/bold red]") console.print( - "[yellow]Please review the errors above. For single-suggestion fixes, you can " - "automatically apply them with:[/yellow]" + "[yellow]Please review the errors above. For single-suggestion " + "fixes, you can automatically apply them with:[/yellow]" ) console.print( f"[cyan]uv run codespell {paths_str} --write-changes[/cyan]\n" @@ -253,7 +256,8 @@ def validate_changelog() -> None: if invalid_lines: click.echo( - f"❌ Found bullet points in {changelog_path} without proper punctuation:" + f"❌ Found bullet points in {changelog_path} without proper " + "punctuation:" ) click.echo() for line_num, line in invalid_lines: diff --git a/packages/testing/src/execution_testing/client_clis/cli_types.py b/packages/testing/src/execution_testing/client_clis/cli_types.py index 190192ffcc..ea79c7554b 100644 --- a/packages/testing/src/execution_testing/client_clis/cli_types.py +++ b/packages/testing/src/execution_testing/client_clis/cli_types.py @@ -137,17 +137,20 @@ def are_equivalent( """Return True if the only difference is the gas counter.""" if len(self.traces) != len(other.traces): logger.debug( - f"Traces have different lengths: {len(self.traces)} != {len(other.traces)}." + f"Traces have different lengths: " + f"{len(self.traces)} != {len(other.traces)}." ) return False if self.output != other.output: logger.debug( - f"Traces have different outputs: {self.output} != {other.output}." + f"Traces have different outputs: " + f"{self.output} != {other.output}." ) return False if self.gas_used != other.gas_used and not enable_post_processing: logger.debug( - f"Traces have different gas used: {self.gas_used} != {other.gas_used}." + f"Traces have different gas used: " + f"{self.gas_used} != {other.gas_used}." ) return False own_traces = self.traces.copy() diff --git a/packages/testing/src/execution_testing/client_clis/clis/besu.py b/packages/testing/src/execution_testing/client_clis/clis/besu.py index 824197fef7..3a75c202d4 100644 --- a/packages/testing/src/execution_testing/client_clis/clis/besu.py +++ b/packages/testing/src/execution_testing/client_clis/clis/besu.py @@ -56,7 +56,8 @@ def __init__( result = subprocess.run(args, capture_output=True, text=True) except subprocess.CalledProcessError as e: raise Exception( - f"evm process unexpectedly returned a non-zero status code: {e}." + "evm process unexpectedly returned a non-zero status " + f"code: {e}." ) from e except Exception as e: raise Exception( @@ -149,7 +150,8 @@ def evaluate( #!/bin/bash # Use $1 as t8n-server port if provided, else default to 3000 PORT=${{1:-3000}} - curl http://localhost:${{PORT}}/ -X POST -H "Content-Type: application/json" \\ + curl http://localhost:${{PORT}}/ -X POST \\ + -H "Content-Type: application/json" \\ --data '{indented_post_data_string}' """ ) @@ -165,7 +167,8 @@ def evaluate( ) response = requests.post(self.server_url, json=post_data, timeout=5) - response.raise_for_status() # exception visible in pytest failure output + # exception visible in pytest failure output + response.raise_for_status() output: TransitionToolOutput = TransitionToolOutput.model_validate( response.json(), context={"exception_mapper": self.exception_mapper}, @@ -177,7 +180,9 @@ def evaluate( { "response.txt": response.text, "status_code.txt": response.status_code, - "time_elapsed_seconds.txt": response.elapsed.total_seconds(), + "time_elapsed_seconds.txt": ( + response.elapsed.total_seconds() + ), }, ) @@ -222,7 +227,8 @@ class BesuExceptionMapper(ExceptionMapper): mapping_substring: ClassVar[Dict[ExceptionBase, str]] = { TransactionException.NONCE_IS_MAX: "invalid Nonce must be less than", TransactionException.INSUFFICIENT_MAX_FEE_PER_BLOB_GAS: ( - "transaction invalid tx max fee per blob gas less than block blob gas fee" + "transaction invalid tx max fee per blob gas less than " + "block blob gas fee" ), TransactionException.GASLIMIT_PRICE_PRODUCT_OVERFLOW: ( "invalid Upfront gas cost cannot exceed 2^256 Wei" @@ -230,13 +236,19 @@ class BesuExceptionMapper(ExceptionMapper): TransactionException.INSUFFICIENT_MAX_FEE_PER_GAS: ( "transaction invalid gasPrice is less than the current BaseFee" ), - TransactionException.GAS_ALLOWANCE_EXCEEDED: "provided gas insufficient", + TransactionException.GAS_ALLOWANCE_EXCEEDED: ( + "provided gas insufficient" + ), TransactionException.PRIORITY_GREATER_THAN_MAX_FEE_PER_GAS: ( - "transaction invalid max priority fee per gas cannot be greater than max fee per gas" + "transaction invalid max priority fee per gas cannot be greater " + "than max fee per gas" + ), + TransactionException.TYPE_3_TX_INVALID_BLOB_VERSIONED_HASH: ( + "Invalid versionedHash" ), - TransactionException.TYPE_3_TX_INVALID_BLOB_VERSIONED_HASH: "Invalid versionedHash", TransactionException.TYPE_3_TX_CONTRACT_CREATION: ( - "transaction invalid transaction blob transactions must have a to address" + "transaction invalid transaction blob transactions must have " + "a to address" ), TransactionException.TYPE_3_TX_WITH_FULL_BLOBS: ( "Failed to decode transactions from block parameter" @@ -248,11 +260,12 @@ class BesuExceptionMapper(ExceptionMapper): "Transaction type BLOB is invalid, accepted transaction types are" ), TransactionException.TYPE_4_EMPTY_AUTHORIZATION_LIST: ( - "transaction invalid transaction code delegation transactions must have a " - "non-empty code delegation list" + "transaction invalid transaction code delegation transactions " + "must have a non-empty code delegation list" ), TransactionException.TYPE_4_TX_CONTRACT_CREATION: ( - "transaction invalid transaction code delegation transactions must have a to address" + "transaction invalid transaction code delegation transactions " + "must have a to address" ), TransactionException.TYPE_4_TX_PRE_FORK: ( "transaction invalid Transaction type DELEGATE_CODE is invalid" @@ -269,70 +282,95 @@ class BesuExceptionMapper(ExceptionMapper): BlockException.INCORRECT_BLOB_GAS_USED: ( "Payload BlobGasUsed does not match calculated BlobGasUsed" ), - BlockException.INVALID_GAS_USED_ABOVE_LIMIT: "Header validation failed (FULL)", + BlockException.INVALID_GAS_USED_ABOVE_LIMIT: ( + "Header validation failed (FULL)" + ), BlockException.INVALID_GASLIMIT: "Header validation failed (FULL)", BlockException.EXTRA_DATA_TOO_BIG: "Header validation failed (FULL)", - BlockException.INVALID_BLOCK_NUMBER: "Header validation failed (FULL)", - BlockException.INVALID_BASEFEE_PER_GAS: "Header validation failed (FULL)", - BlockException.INVALID_BLOCK_TIMESTAMP_OLDER_THAN_PARENT: "block timestamp not greater than parent", - BlockException.INVALID_LOG_BLOOM: "failed to validate output of imported block", - BlockException.INVALID_RECEIPTS_ROOT: "failed to validate output of imported block", - BlockException.INVALID_STATE_ROOT: "World State Root does not match expected value", + BlockException.INVALID_BLOCK_NUMBER: ( + "Header validation failed (FULL)" + ), + BlockException.INVALID_BASEFEE_PER_GAS: ( + "Header validation failed (FULL)" + ), + BlockException.INVALID_BLOCK_TIMESTAMP_OLDER_THAN_PARENT: ( + "block timestamp not greater than parent" + ), + BlockException.INVALID_LOG_BLOOM: ( + "failed to validate output of imported block" + ), + BlockException.INVALID_RECEIPTS_ROOT: ( + "failed to validate output of imported block" + ), + BlockException.INVALID_STATE_ROOT: ( + "World State Root does not match expected value" + ), } mapping_regex = { BlockException.INVALID_REQUESTS: ( - r"Invalid execution requests|Requests hash mismatch, calculated: 0x[0-9a-f]+ header: " - r"0x[0-9a-f]+" + r"Invalid execution requests|Requests hash mismatch, " + r"calculated: 0x[0-9a-f]+ header: 0x[0-9a-f]+" ), BlockException.INVALID_BLOCK_HASH: ( - r"Computed block hash 0x[0-9a-f]+ does not match block hash parameter 0x[0-9a-f]+" + r"Computed block hash 0x[0-9a-f]+ does not match block " + r"hash parameter 0x[0-9a-f]+" ), BlockException.SYSTEM_CONTRACT_CALL_FAILED: ( - r"System call halted|System call did not execute to completion" + r"System call halted|" + r"System call did not execute to completion" ), BlockException.SYSTEM_CONTRACT_EMPTY: ( r"(Invalid system call, no code at address)|" r"(Invalid system call address:)" ), BlockException.INVALID_DEPOSIT_EVENT_LAYOUT: ( - r"Invalid (amount|index|pubKey|signature|withdrawalCred) (offset|size): " - r"expected (\d+), but got (-?\d+)|" - r"Invalid deposit log length\. Must be \d+ bytes, but is \d+ bytes" + r"Invalid (amount|index|pubKey|signature|withdrawalCred) " + r"(offset|size): expected (\d+), but got (-?\d+)|" + r"Invalid deposit log length\. Must be \d+ bytes, " + r"but is \d+ bytes" ), BlockException.RLP_BLOCK_LIMIT_EXCEEDED: ( r"Block size of \d+ bytes exceeds limit of \d+ bytes" ), TransactionException.INITCODE_SIZE_EXCEEDED: ( - r"transaction invalid Initcode size of \d+ exceeds maximum size of \d+" + r"transaction invalid Initcode size of \d+ exceeds " + r"maximum size of \d+" ), TransactionException.INSUFFICIENT_ACCOUNT_FUNDS: ( - r"transaction invalid transaction up-front cost 0x[0-9a-f]+ exceeds transaction " - r"sender account balance 0x[0-9a-f]+" + r"transaction invalid transaction up-front cost 0x[0-9a-f]+ " + r"exceeds transaction sender account balance 0x[0-9a-f]+" ), TransactionException.INTRINSIC_GAS_TOO_LOW: ( - r"transaction invalid intrinsic gas cost \d+ exceeds gas limit \d+" + r"transaction invalid intrinsic gas cost \d+ " + r"exceeds gas limit \d+" ), TransactionException.INTRINSIC_GAS_BELOW_FLOOR_GAS_COST: ( - r"transaction invalid intrinsic gas cost \d+ exceeds gas limit \d+" + r"transaction invalid intrinsic gas cost \d+ " + r"exceeds gas limit \d+" ), TransactionException.SENDER_NOT_EOA: ( - r"transaction invalid Sender 0x[0-9a-f]+ has deployed code and so is not authorized " - r"to send transactions" + r"transaction invalid Sender 0x[0-9a-f]+ has deployed code " + r"and so is not authorized to send transactions" ), TransactionException.NONCE_MISMATCH_TOO_LOW: ( - r"transaction invalid transaction nonce \d+ below sender account nonce \d+" + r"transaction invalid transaction nonce \d+ " + r"below sender account nonce \d+" ), TransactionException.NONCE_MISMATCH_TOO_HIGH: ( - r"transaction invalid transaction nonce \d+ does not match sender account nonce \d+" + r"transaction invalid transaction nonce \d+ " + r"does not match sender account nonce \d+" ), TransactionException.GAS_LIMIT_EXCEEDS_MAXIMUM: ( - r"transaction invalid Transaction gas limit must be at most \d+" + r"transaction invalid Transaction gas limit " + r"must be at most \d+" ), TransactionException.TYPE_3_TX_MAX_BLOB_GAS_ALLOWANCE_EXCEEDED: ( - r"Blob transaction 0x[0-9a-f]+ exceeds block blob gas limit: \d+ > \d+" + r"Blob transaction 0x[0-9a-f]+ exceeds " + r"block blob gas limit: \d+ > \d+" ), TransactionException.TYPE_3_TX_BLOB_COUNT_EXCEEDED: ( - r"Blob transaction has too many blobs: \d+|Invalid Blob Count: \d+" + r"Blob transaction has too many blobs: \d+|" + r"Invalid Blob Count: \d+" ), # BAL Exceptions: TODO - review once all clients completed. BlockException.INVALID_BAL_EXTRA_ACCOUNT: ( diff --git a/packages/testing/src/execution_testing/client_clis/clis/erigon.py b/packages/testing/src/execution_testing/client_clis/clis/erigon.py index 8e1f51b851..b346a6f4b2 100644 --- a/packages/testing/src/execution_testing/client_clis/clis/erigon.py +++ b/packages/testing/src/execution_testing/client_clis/clis/erigon.py @@ -12,57 +12,106 @@ class ErigonExceptionMapper(ExceptionMapper): mapping_substring = { TransactionException.SENDER_NOT_EOA: "sender not an eoa", - TransactionException.INITCODE_SIZE_EXCEEDED: "max initcode size exceeded", + TransactionException.INITCODE_SIZE_EXCEEDED: ( + "max initcode size exceeded" + ), TransactionException.INSUFFICIENT_ACCOUNT_FUNDS: ( "insufficient funds for gas * price + value" ), TransactionException.NONCE_IS_MAX: "nonce has max value", TransactionException.INTRINSIC_GAS_TOO_LOW: "intrinsic gas too low", - TransactionException.INTRINSIC_GAS_BELOW_FLOOR_GAS_COST: "intrinsic gas too low", - TransactionException.INSUFFICIENT_MAX_FEE_PER_GAS: "fee cap less than block base fee", - TransactionException.PRIORITY_GREATER_THAN_MAX_FEE_PER_GAS: "tip higher than fee cap", - TransactionException.INSUFFICIENT_MAX_FEE_PER_BLOB_GAS: "max fee per blob gas too low", + TransactionException.INTRINSIC_GAS_BELOW_FLOOR_GAS_COST: ( + "intrinsic gas too low" + ), + TransactionException.INSUFFICIENT_MAX_FEE_PER_GAS: ( + "fee cap less than block base fee" + ), + TransactionException.PRIORITY_GREATER_THAN_MAX_FEE_PER_GAS: ( + "tip higher than fee cap" + ), + TransactionException.INSUFFICIENT_MAX_FEE_PER_BLOB_GAS: ( + "max fee per blob gas too low" + ), TransactionException.NONCE_MISMATCH_TOO_LOW: "nonce too low", TransactionException.NONCE_MISMATCH_TOO_HIGH: "nonce too high", TransactionException.GAS_ALLOWANCE_EXCEEDED: "gas limit reached", - TransactionException.TYPE_3_TX_PRE_FORK: "blob txn is not supported by signer", + TransactionException.TYPE_3_TX_PRE_FORK: ( + "blob txn is not supported by signer" + ), TransactionException.TYPE_3_TX_INVALID_BLOB_VERSIONED_HASH: ( - "invalid blob versioned hash, must start with VERSIONED_HASH_VERSION_KZG" + "invalid blob versioned hash, must start with " + "VERSIONED_HASH_VERSION_KZG" + ), + TransactionException.TYPE_3_TX_BLOB_COUNT_EXCEEDED: ( + "blob transaction has too many blobs" + ), + TransactionException.TYPE_3_TX_ZERO_BLOBS: ( + "a blob stx must contain at least one blob" + ), + TransactionException.TYPE_3_TX_WITH_FULL_BLOBS: ( + "rlp: expected String or Byte" + ), + TransactionException.TYPE_3_TX_CONTRACT_CREATION: ( + "wrong size for To: 0" ), - TransactionException.TYPE_3_TX_BLOB_COUNT_EXCEEDED: "blob transaction has too many blobs", - TransactionException.TYPE_3_TX_ZERO_BLOBS: "a blob stx must contain at least one blob", - TransactionException.TYPE_3_TX_WITH_FULL_BLOBS: "rlp: expected String or Byte", - TransactionException.TYPE_3_TX_CONTRACT_CREATION: "wrong size for To: 0", TransactionException.TYPE_3_TX_MAX_BLOB_GAS_ALLOWANCE_EXCEEDED: ( "blobs/blobgas exceeds max" ), TransactionException.TYPE_4_EMPTY_AUTHORIZATION_LIST: ( "SetCodeTransaction without authorizations is invalid" ), - TransactionException.TYPE_4_TX_CONTRACT_CREATION: "wrong size for To: 0", - TransactionException.TYPE_4_TX_PRE_FORK: "setCode tx is not supported by signer", - BlockException.INVALID_DEPOSIT_EVENT_LAYOUT: "could not parse requests logs", - BlockException.SYSTEM_CONTRACT_EMPTY: "Syscall failure: Empty Code at", - BlockException.SYSTEM_CONTRACT_CALL_FAILED: "Unprecedented Syscall failure", - BlockException.INVALID_REQUESTS: "invalid requests root hash in header", + TransactionException.TYPE_4_TX_CONTRACT_CREATION: ( + "wrong size for To: 0" + ), + TransactionException.TYPE_4_TX_PRE_FORK: ( + "setCode tx is not supported by signer" + ), + BlockException.INVALID_DEPOSIT_EVENT_LAYOUT: ( + "could not parse requests logs" + ), + BlockException.SYSTEM_CONTRACT_EMPTY: ( + "Syscall failure: Empty Code at" + ), + BlockException.SYSTEM_CONTRACT_CALL_FAILED: ( + "Unprecedented Syscall failure" + ), + BlockException.INVALID_REQUESTS: ( + "invalid requests root hash in header" + ), BlockException.INVALID_BLOCK_HASH: "invalid block hash", BlockException.RLP_BLOCK_LIMIT_EXCEEDED: "block exceeds max rlp size", - BlockException.INVALID_BASEFEE_PER_GAS: "invalid block: invalid baseFee", - BlockException.INVALID_BLOCK_TIMESTAMP_OLDER_THAN_PARENT: "invalid block: timestamp older than parent", + BlockException.INVALID_BASEFEE_PER_GAS: ( + "invalid block: invalid baseFee" + ), + BlockException.INVALID_BLOCK_TIMESTAMP_OLDER_THAN_PARENT: ( + "invalid block: timestamp older than parent" + ), BlockException.INVALID_BLOCK_NUMBER: "invalid block number", - BlockException.EXTRA_DATA_TOO_BIG: "invalid block: extra-data longer than 32 bytes", + BlockException.EXTRA_DATA_TOO_BIG: ( + "invalid block: extra-data longer than 32 bytes" + ), BlockException.INVALID_GASLIMIT: "invalid block: invalid gas limit", BlockException.INVALID_STATE_ROOT: "invalid block: wrong trie root", BlockException.INVALID_RECEIPTS_ROOT: "receiptHash mismatch", BlockException.INVALID_LOG_BLOOM: "invalid bloom", } mapping_regex = { - BlockException.INVALID_BLOCK_ACCESS_LIST: r"invalid block access list|block access list mismatch", + BlockException.INVALID_BLOCK_ACCESS_LIST: ( + r"invalid block access list|block access list mismatch" + ), TransactionException.GAS_LIMIT_EXCEEDS_MAXIMUM: ( r"invalid block, txnIdx=\d+,.*gas limit too high" ), - BlockException.INCORRECT_BLOB_GAS_USED: r"blobGasUsed by execution: \d+, in header: \d+", - BlockException.INCORRECT_EXCESS_BLOB_GAS: r"invalid excessBlobGas: have \d+, want \d+", - BlockException.INVALID_GAS_USED: r"gas used by execution: \w+, in header: \w+", - BlockException.INVALID_GAS_USED_ABOVE_LIMIT: r"invalid gasUsed: have \d+, gasLimit \d+", + BlockException.INCORRECT_BLOB_GAS_USED: ( + r"blobGasUsed by execution: \d+, in header: \d+" + ), + BlockException.INCORRECT_EXCESS_BLOB_GAS: ( + r"invalid excessBlobGas: have \d+, want \d+" + ), + BlockException.INVALID_GAS_USED: ( + r"gas used by execution: \w+, in header: \w+" + ), + BlockException.INVALID_GAS_USED_ABOVE_LIMIT: ( + r"invalid gasUsed: have \d+, gasLimit \d+" + ), } diff --git a/packages/testing/src/execution_testing/client_clis/clis/ethereumjs.py b/packages/testing/src/execution_testing/client_clis/clis/ethereumjs.py index 77088cddd0..a781be51f8 100644 --- a/packages/testing/src/execution_testing/client_clis/clis/ethereumjs.py +++ b/packages/testing/src/execution_testing/client_clis/clis/ethereumjs.py @@ -65,7 +65,9 @@ class EthereumJSExceptionMapper(ExceptionMapper): TransactionException.GASLIMIT_PRICE_PRODUCT_OVERFLOW: ( "gas limit * gasPrice cannot exceed MAX_INTEGER" ), - TransactionException.INSUFFICIENT_MAX_FEE_PER_GAS: "tx unable to pay base fee", + TransactionException.INSUFFICIENT_MAX_FEE_PER_GAS: ( + "tx unable to pay base fee" + ), TransactionException.NONCE_IS_MAX: "nonce cannot equal or exceed", TransactionException.PRIORITY_GREATER_THAN_MAX_FEE_PER_GAS: ( "maxFeePerGas cannot be less than maxPriorityFeePerGas" @@ -74,16 +76,25 @@ class EthereumJSExceptionMapper(ExceptionMapper): "versioned hash does not start with KZG commitment version" ), # This message is the same as TYPE_3_TX_MAX_BLOB_GAS_ALLOWANCE_EXCEEDED - TransactionException.TYPE_3_TX_BLOB_COUNT_EXCEEDED: "exceed maximum allowance", - TransactionException.TYPE_3_TX_ZERO_BLOBS: "tx should contain at least one blob", - TransactionException.TYPE_3_TX_WITH_FULL_BLOBS: "Invalid EIP-4844 transaction", + TransactionException.TYPE_3_TX_BLOB_COUNT_EXCEEDED: ( + "exceed maximum allowance" + ), + TransactionException.TYPE_3_TX_ZERO_BLOBS: ( + "tx should contain at least one blob" + ), + TransactionException.TYPE_3_TX_WITH_FULL_BLOBS: ( + "Invalid EIP-4844 transaction" + ), TransactionException.TYPE_3_TX_CONTRACT_CREATION: ( - 'tx should have a "to" field and cannot be used to create contracts' + 'tx should have a "to" field and ' + "cannot be used to create contracts" ), TransactionException.TYPE_4_EMPTY_AUTHORIZATION_LIST: ( "Invalid EIP-7702 transaction: authorization list is empty" ), - TransactionException.INTRINSIC_GAS_TOO_LOW: "is lower than the minimum gas limit of", + TransactionException.INTRINSIC_GAS_TOO_LOW: ( + "is lower than the minimum gas limit of" + ), TransactionException.INTRINSIC_GAS_BELOW_FLOOR_GAS_COST: ( "is lower than the minimum gas limit of" ), @@ -91,37 +102,51 @@ class EthereumJSExceptionMapper(ExceptionMapper): "the initcode size of this transaction is too large" ), TransactionException.TYPE_4_TX_CONTRACT_CREATION: ( - 'tx should have a "to" field and cannot be used to create contracts' + 'tx should have a "to" field and ' + "cannot be used to create contracts" ), TransactionException.INSUFFICIENT_ACCOUNT_FUNDS: ( "sender doesn't have enough funds to send tx" ), - TransactionException.NONCE_MISMATCH_TOO_LOW: "the tx doesn't have the correct nonce", - TransactionException.GAS_ALLOWANCE_EXCEEDED: "tx has a higher gas limit than the block", + TransactionException.NONCE_MISMATCH_TOO_LOW: ( + "the tx doesn't have the correct nonce" + ), + TransactionException.GAS_ALLOWANCE_EXCEEDED: ( + "tx has a higher gas limit than the block" + ), BlockException.INCORRECT_EXCESS_BLOB_GAS: "Invalid 4844 transactions", BlockException.INVALID_RECEIPTS_ROOT: "invalid receipttrie", BlockException.INVALID_DEPOSIT_EVENT_LAYOUT: ( - "Error verifying block while running: error: number exceeds 53 bits" + "Error verifying block while running: " + "error: number exceeds 53 bits" ), } mapping_regex: ClassVar[Dict[ExceptionBase, str]] = { TransactionException.TYPE_3_TX_MAX_BLOB_GAS_ALLOWANCE_EXCEEDED: ( - r"tx causes total blob gas of \d+ to exceed maximum blob gas per block of \d+|" - r"tx can contain at most \d+ blobs" + r"tx causes total blob gas of \d+ to exceed maximum " + r"blob gas per block of \d+|tx can contain at most \d+ blobs" ), TransactionException.TYPE_3_TX_BLOB_COUNT_EXCEEDED: ( - r"tx causes total blob gas of \d+ to exceed maximum blob gas per block of \d+|" - r"tx can contain at most \d+ blobs" + r"tx causes total blob gas of \d+ to exceed maximum " + r"blob gas per block of \d+|tx can contain at most \d+ blobs" ), TransactionException.TYPE_3_TX_PRE_FORK: ( - r"blob tx used but field env.ExcessBlobGas missing|EIP-4844 not enabled on Common" + r"blob tx used but field env.ExcessBlobGas missing|" + r"EIP-4844 not enabled on Common" + ), + BlockException.BLOB_GAS_USED_ABOVE_LIMIT: ( + r"invalid blobGasUsed expected=\d+ actual=\d+" + ), + BlockException.INCORRECT_BLOB_GAS_USED: ( + r"invalid blobGasUsed expected=\d+ actual=\d+" ), - BlockException.BLOB_GAS_USED_ABOVE_LIMIT: r"invalid blobGasUsed expected=\d+ actual=\d+", - BlockException.INCORRECT_BLOB_GAS_USED: r"invalid blobGasUsed expected=\d+ actual=\d+", BlockException.INVALID_BLOCK_HASH: ( - r"Invalid blockHash, expected: 0x[0-9a-f]+, received: 0x[0-9a-f]+" + r"Invalid blockHash, expected: 0x[0-9a-f]+, " + r"received: 0x[0-9a-f]+" + ), + BlockException.INVALID_REQUESTS: ( + r"Unknown request identifier|invalid requestshash" ), - BlockException.INVALID_REQUESTS: r"Unknown request identifier|invalid requestshash", BlockException.INVALID_GAS_USED_ABOVE_LIMIT: ( r"Invalid block: too much gas used. Used: \d+, gas limit: \d+" ), diff --git a/packages/testing/src/execution_testing/client_clis/clis/ethrex.py b/packages/testing/src/execution_testing/client_clis/clis/ethrex.py index 8db6311a3a..66ec2ece33 100644 --- a/packages/testing/src/execution_testing/client_clis/clis/ethrex.py +++ b/packages/testing/src/execution_testing/client_clis/clis/ethrex.py @@ -18,28 +18,41 @@ class EthrexExceptionMapper(ExceptionMapper): "Invalid deposit request layout" ), BlockException.INVALID_REQUESTS: ( - "Requests hash does not match the one in the header after executing" + "Requests hash does not match the one in " + "the header after executing" ), BlockException.INVALID_RECEIPTS_ROOT: ( - "Receipts Root does not match the one in the header after executing" + "Receipts Root does not match the one in " + "the header after executing" ), BlockException.INVALID_STATE_ROOT: ( - "World State Root does not match the one in the header after executing" + "World State Root does not match the one in " + "the header after executing" + ), + BlockException.INVALID_GAS_USED: ( + "Gas used doesn't match value in header" + ), + BlockException.INCORRECT_BLOB_GAS_USED: ( + "Blob gas used doesn't match value in header" + ), + BlockException.INVALID_BASEFEE_PER_GAS: ( + "Base fee per gas is incorrect" ), - BlockException.INVALID_GAS_USED: "Gas used doesn't match value in header", - BlockException.INCORRECT_BLOB_GAS_USED: "Blob gas used doesn't match value in header", - BlockException.INVALID_BASEFEE_PER_GAS: "Base fee per gas is incorrect", } mapping_regex = { TransactionException.PRIORITY_GREATER_THAN_MAX_FEE_PER_GAS: ( r"(?i)priority fee.* is greater than max fee.*" ), - TransactionException.TYPE_4_EMPTY_AUTHORIZATION_LIST: r"(?i)empty authorization list", + TransactionException.TYPE_4_EMPTY_AUTHORIZATION_LIST: ( + r"(?i)empty authorization list" + ), TransactionException.SENDER_NOT_EOA: ( r"reject transactions from senders with deployed code|" r"Sender account .* shouldn't be a contract" ), - TransactionException.NONCE_MISMATCH_TOO_LOW: r"nonce \d+ too low, expected \d+|Nonce mismatch.*", + TransactionException.NONCE_MISMATCH_TOO_LOW: ( + r"nonce \d+ too low, expected \d+|Nonce mismatch.*" + ), TransactionException.NONCE_MISMATCH_TOO_HIGH: r"Nonce mismatch.*", TransactionException.TYPE_3_TX_MAX_BLOB_GAS_ALLOWANCE_EXCEEDED: ( r"blob gas used \d+ exceeds maximum allowance \d+" @@ -62,28 +75,35 @@ class EthrexExceptionMapper(ExceptionMapper): # can't decode it. TransactionException.TYPE_4_TX_CONTRACT_CREATION: ( r"unexpected length|Contract creation in type 4 transaction|" - r"Error decoding field 'to' of type primitive_types::H160: InvalidLength" + r"Error decoding field 'to' of type primitive_types::H160: " + r"InvalidLength" ), TransactionException.TYPE_3_TX_CONTRACT_CREATION: ( r"unexpected length|Contract creation in type 3 transaction|" - r"Error decoding field 'to' of type primitive_types::H160: InvalidLength" + r"Error decoding field 'to' of type primitive_types::H160: " + r"InvalidLength" ), TransactionException.TYPE_4_TX_PRE_FORK: ( r"eip 7702 transactions present in pre-prague payload|" r"Type 4 transactions are not supported before the Prague fork" ), TransactionException.INSUFFICIENT_ACCOUNT_FUNDS: ( - r"lack of funds \(\d+\) for max fee \(\d+\)|Insufficient account funds" + r"lack of funds \(\d+\) for max fee \(\d+\)|" + r"Insufficient account funds" ), TransactionException.INTRINSIC_GAS_TOO_LOW: ( - r"gas floor exceeds the gas limit|call gas cost exceeds the gas limit|" - r"Transaction gas limit lower than the minimum gas cost to execute the transaction" + r"gas floor exceeds the gas limit|" + r"call gas cost exceeds the gas limit|" + r"Transaction gas limit lower than the minimum gas cost " + r"to execute the transaction" ), TransactionException.INTRINSIC_GAS_BELOW_FLOOR_GAS_COST: ( - r"Transaction gas limit lower than the gas cost floor for calldata tokens" + r"Transaction gas limit lower than the gas cost floor " + r"for calldata tokens" ), TransactionException.INSUFFICIENT_MAX_FEE_PER_GAS: ( - r"gas price is less than basefee|Insufficient max fee per gas" + r"gas price is less than basefee|" + r"Insufficient max fee per gas" ), TransactionException.INSUFFICIENT_MAX_FEE_PER_BLOB_GAS: ( r"blob gas price is greater than max fee per blob gas|" @@ -103,7 +123,8 @@ class EthrexExceptionMapper(ExceptionMapper): r"Invalid transaction: Gas limit price product overflow.*" ), TransactionException.GAS_LIMIT_EXCEEDS_MAXIMUM: ( - r"Invalid transaction: Transaction gas limit exceeds maximum.*" + r"Invalid transaction: " + r"Transaction gas limit exceeds maximum.*" ), BlockException.SYSTEM_CONTRACT_CALL_FAILED: (r"System call failed.*"), BlockException.SYSTEM_CONTRACT_EMPTY: ( diff --git a/packages/testing/src/execution_testing/client_clis/clis/evmone.py b/packages/testing/src/execution_testing/client_clis/clis/evmone.py index 541f88539a..e474e752aa 100644 --- a/packages/testing/src/execution_testing/client_clis/clis/evmone.py +++ b/packages/testing/src/execution_testing/client_clis/clis/evmone.py @@ -144,7 +144,8 @@ def _consume_debug_dump( shutil.copyfile(fixture_path, debug_fixture_path) def _skip_message(self, fixture_format: FixtureFormat) -> str: - return f"Fixture format {fixture_format.format_name} not supported by {self.binary}" + fmt_name = fixture_format.format_name + return f"Fixture format {fmt_name} not supported by {self.binary}" @cache # noqa def consume_test_file( @@ -175,15 +176,18 @@ def consume_test_file( result = self._run_command(command) if result.returncode not in [0, 1]: + cmd_str = " ".join(command) raise Exception( - f"Unexpected exit code:\n{' '.join(command)}\n\n Error:\n{result.stderr}" + f"Unexpected exit code:\n{cmd_str}\n\n Error:\n" + f"{result.stderr}" ) try: output_data = json.load(tempfile_json) except json.JSONDecodeError as e: raise Exception( - f"Failed to parse JSON output from evmone-state/blockchaintest: {e}" + "Failed to parse JSON output from " + f"evmone-state/blockchaintest: {e}" ) from e if debug_output_path: @@ -334,16 +338,28 @@ class EvmoneExceptionMapper(ExceptionMapper): "max priority fee per gas higher than max fee per gas" ), TransactionException.NONCE_IS_MAX: "nonce has max value:", - TransactionException.TYPE_4_TX_CONTRACT_CREATION: "set code transaction must ", - TransactionException.TYPE_4_INVALID_AUTHORITY_SIGNATURE: "invalid authorization signature", + TransactionException.TYPE_4_TX_CONTRACT_CREATION: ( + "set code transaction must " + ), + TransactionException.TYPE_4_INVALID_AUTHORITY_SIGNATURE: ( + "invalid authorization signature" + ), TransactionException.TYPE_4_INVALID_AUTHORITY_SIGNATURE_S_TOO_HIGH: ( "authorization signature s value too high" ), - TransactionException.TYPE_4_EMPTY_AUTHORIZATION_LIST: "empty authorization list", + TransactionException.TYPE_4_EMPTY_AUTHORIZATION_LIST: ( + "empty authorization list" + ), TransactionException.INTRINSIC_GAS_TOO_LOW: "intrinsic gas too low", - TransactionException.INTRINSIC_GAS_BELOW_FLOOR_GAS_COST: "intrinsic gas too low", - TransactionException.TYPE_3_TX_MAX_BLOB_GAS_ALLOWANCE_EXCEEDED: "blob gas limit exceeded", - TransactionException.INITCODE_SIZE_EXCEEDED: "max initcode size exceeded", + TransactionException.INTRINSIC_GAS_BELOW_FLOOR_GAS_COST: ( + "intrinsic gas too low" + ), + TransactionException.TYPE_3_TX_MAX_BLOB_GAS_ALLOWANCE_EXCEEDED: ( + "blob gas limit exceeded" + ), + TransactionException.INITCODE_SIZE_EXCEEDED: ( + "max initcode size exceeded" + ), TransactionException.INSUFFICIENT_ACCOUNT_FUNDS: ( "insufficient funds for gas * price + value" ), @@ -353,23 +369,43 @@ class EvmoneExceptionMapper(ExceptionMapper): TransactionException.INSUFFICIENT_MAX_FEE_PER_BLOB_GAS: ( "max blob fee per gas less than block base fee" ), - TransactionException.TYPE_4_TX_PRE_FORK: "transaction type not supported", - TransactionException.TYPE_3_TX_PRE_FORK: "transaction type not supported", - TransactionException.TYPE_2_TX_PRE_FORK: "transaction type not supported", - TransactionException.TYPE_1_TX_PRE_FORK: "transaction type not supported", - TransactionException.TYPE_3_TX_INVALID_BLOB_VERSIONED_HASH: "invalid blob hash version", - TransactionException.TYPE_3_TX_BLOB_COUNT_EXCEEDED: "blob gas limit exceeded", + TransactionException.TYPE_4_TX_PRE_FORK: ( + "transaction type not supported" + ), + TransactionException.TYPE_3_TX_PRE_FORK: ( + "transaction type not supported" + ), + TransactionException.TYPE_2_TX_PRE_FORK: ( + "transaction type not supported" + ), + TransactionException.TYPE_1_TX_PRE_FORK: ( + "transaction type not supported" + ), + TransactionException.TYPE_3_TX_INVALID_BLOB_VERSIONED_HASH: ( + "invalid blob hash version" + ), + TransactionException.TYPE_3_TX_BLOB_COUNT_EXCEEDED: ( + "blob gas limit exceeded" + ), TransactionException.TYPE_3_TX_ZERO_BLOBS: "empty blob hashes list", TransactionException.TYPE_3_TX_CONTRACT_CREATION: ( "blob transaction must not be a create transaction" ), TransactionException.NONCE_MISMATCH_TOO_LOW: "nonce too low", TransactionException.NONCE_MISMATCH_TOO_HIGH: "nonce too high", - TransactionException.GAS_LIMIT_EXCEEDS_MAXIMUM: "max gas limit exceeded", - BlockException.INVALID_DEPOSIT_EVENT_LAYOUT: "invalid deposit event layout", + TransactionException.GAS_LIMIT_EXCEEDS_MAXIMUM: ( + "max gas limit exceeded" + ), + BlockException.INVALID_DEPOSIT_EVENT_LAYOUT: ( + "invalid deposit event layout" + ), # TODO EVMONE needs to differentiate when the system contract is # missing or failing - BlockException.SYSTEM_CONTRACT_EMPTY: "system contract empty or failed", - BlockException.SYSTEM_CONTRACT_CALL_FAILED: "system contract empty or failed", + BlockException.SYSTEM_CONTRACT_EMPTY: ( + "system contract empty or failed" + ), + BlockException.SYSTEM_CONTRACT_CALL_FAILED: ( + "system contract empty or failed" + ), } mapping_regex: ClassVar[Dict[ExceptionBase, str]] = {} diff --git a/packages/testing/src/execution_testing/client_clis/clis/execution_specs.py b/packages/testing/src/execution_testing/client_clis/clis/execution_specs.py index 70b8a8049c..7f181df9cc 100644 --- a/packages/testing/src/execution_testing/client_clis/clis/execution_specs.py +++ b/packages/testing/src/execution_testing/client_clis/clis/execution_specs.py @@ -7,12 +7,12 @@ from io import StringIO from pathlib import Path from typing import Any, ClassVar, Dict, Optional -from typing_extensions import override import ethereum from ethereum_spec_tools.evm_tools import create_parser from ethereum_spec_tools.evm_tools.t8n import T8N, ForkCache from ethereum_spec_tools.evm_tools.utils import get_supported_forks +from typing_extensions import override from execution_testing.client_clis.cli_types import TransitionToolOutput from execution_testing.client_clis.file_utils import ( @@ -159,6 +159,7 @@ def evaluate( @classmethod def is_installed(cls, binary_path: Optional[Path] = None) -> bool: """ExecutionSpecs is always installed.""" + del binary_path return True @@ -169,13 +170,18 @@ class ExecutionSpecsExceptionMapper(ExceptionMapper): """ mapping_substring: ClassVar[Dict[ExceptionBase, str]] = { - TransactionException.TYPE_4_EMPTY_AUTHORIZATION_LIST: "EmptyAuthorizationListError", + TransactionException.TYPE_4_EMPTY_AUTHORIZATION_LIST: ( + "EmptyAuthorizationListError" + ), TransactionException.SENDER_NOT_EOA: "InvalidSenderError", TransactionException.TYPE_4_TX_CONTRACT_CREATION: ( "TransactionTypeContractCreationError(" - "'transaction type `SetCodeTransaction` not allowed to create contracts')" + "'transaction type `SetCodeTransaction` not allowed to " + "create contracts')" + ), + TransactionException.INSUFFICIENT_ACCOUNT_FUNDS: ( + "InsufficientBalanceError" ), - TransactionException.INSUFFICIENT_ACCOUNT_FUNDS: "InsufficientBalanceError", TransactionException.TYPE_3_TX_MAX_BLOB_GAS_ALLOWANCE_EXCEEDED: ( "BlobGasLimitExceededError" ), @@ -186,30 +192,46 @@ class ExecutionSpecsExceptionMapper(ExceptionMapper): "InvalidBlobVersionedHashError" ), # This message is the same as TYPE_3_TX_MAX_BLOB_GAS_ALLOWANCE_EXCEEDED - TransactionException.TYPE_3_TX_BLOB_COUNT_EXCEEDED: "BlobCountExceededError", + TransactionException.TYPE_3_TX_BLOB_COUNT_EXCEEDED: ( + "BlobCountExceededError" + ), TransactionException.TYPE_3_TX_ZERO_BLOBS: "NoBlobDataError", - TransactionException.INTRINSIC_GAS_TOO_LOW: "InsufficientTransactionGasError", - TransactionException.INTRINSIC_GAS_BELOW_FLOOR_GAS_COST: "InsufficientTransactionGasError", + TransactionException.INTRINSIC_GAS_TOO_LOW: ( + "InsufficientTransactionGasError" + ), + TransactionException.INTRINSIC_GAS_BELOW_FLOOR_GAS_COST: ( + "InsufficientTransactionGasError" + ), TransactionException.INITCODE_SIZE_EXCEEDED: "InitCodeTooLargeError", TransactionException.PRIORITY_GREATER_THAN_MAX_FEE_PER_GAS: ( "PriorityFeeGreaterThanMaxFeeError" ), - TransactionException.NONCE_MISMATCH_TOO_HIGH: "NonceMismatchError('nonce too high')", - TransactionException.NONCE_MISMATCH_TOO_LOW: "NonceMismatchError('nonce too low')", + TransactionException.NONCE_MISMATCH_TOO_HIGH: ( + "NonceMismatchError('nonce too high')" + ), + TransactionException.NONCE_MISMATCH_TOO_LOW: ( + "NonceMismatchError('nonce too low')" + ), TransactionException.TYPE_3_TX_CONTRACT_CREATION: ( "TransactionTypeContractCreationError(" - "'transaction type `BlobTransaction` not allowed to create contracts')" + "'transaction type `BlobTransaction` not allowed to " + "create contracts')" ), TransactionException.NONCE_IS_MAX: "NonceOverflowError", - TransactionException.GAS_ALLOWANCE_EXCEEDED: "GasUsedExceedsLimitError", - TransactionException.GAS_LIMIT_EXCEEDS_MAXIMUM: "TransactionGasLimitExceededError", + TransactionException.GAS_ALLOWANCE_EXCEEDED: ( + "GasUsedExceedsLimitError" + ), + TransactionException.GAS_LIMIT_EXCEEDS_MAXIMUM: ( + "TransactionGasLimitExceededError" + ), BlockException.SYSTEM_CONTRACT_EMPTY: "System contract address", BlockException.SYSTEM_CONTRACT_CALL_FAILED: "call failed:", BlockException.INVALID_DEPOSIT_EVENT_LAYOUT: "deposit", } mapping_regex: ClassVar[Dict[ExceptionBase, str]] = { + # Temporary solution for issue #1981. TransactionException.INSUFFICIENT_MAX_FEE_PER_GAS: ( - r"InsufficientMaxFeePerGasError|InvalidBlock" # Temporary solution for issue #1981. + r"InsufficientMaxFeePerGasError|InvalidBlock" ), TransactionException.TYPE_1_TX_PRE_FORK: ( r"module '.*transactions' has no attribute 'AccessListTransaction'" diff --git a/packages/testing/src/execution_testing/client_clis/clis/geth.py b/packages/testing/src/execution_testing/client_clis/clis/geth.py index 8c41822020..e2e2c439d6 100644 --- a/packages/testing/src/execution_testing/client_clis/clis/geth.py +++ b/packages/testing/src/execution_testing/client_clis/clis/geth.py @@ -63,41 +63,65 @@ class GethExceptionMapper(ExceptionMapper): TransactionException.TYPE_3_TX_PRE_FORK: ( "transaction type not supported" ), - TransactionException.TYPE_3_TX_INVALID_BLOB_VERSIONED_HASH: "has invalid hash version", + TransactionException.TYPE_3_TX_INVALID_BLOB_VERSIONED_HASH: ( + "has invalid hash version" + ), # This message is the same as TYPE_3_TX_MAX_BLOB_GAS_ALLOWANCE_EXCEEDED - TransactionException.TYPE_3_TX_BLOB_COUNT_EXCEEDED: "blob transaction has too many blobs", - TransactionException.TYPE_3_TX_ZERO_BLOBS: "blob transaction missing blob hashes", + TransactionException.TYPE_3_TX_BLOB_COUNT_EXCEEDED: ( + "blob transaction has too many blobs" + ), + TransactionException.TYPE_3_TX_ZERO_BLOBS: ( + "blob transaction missing blob hashes" + ), TransactionException.TYPE_3_TX_WITH_FULL_BLOBS: ( "unexpected blob sidecar in transaction at index" ), TransactionException.TYPE_3_TX_CONTRACT_CREATION: ( - "input string too short for common.Address, decoding into (types.BlobTx).To" + "input string too short for common.Address, " + "decoding into (types.BlobTx).To" ), TransactionException.TYPE_4_EMPTY_AUTHORIZATION_LIST: ( "EIP-7702 transaction with empty auth list" ), TransactionException.TYPE_4_TX_CONTRACT_CREATION: ( - "input string too short for common.Address, decoding into (types.SetCodeTx).To" + "input string too short for common.Address, " + "decoding into (types.SetCodeTx).To" + ), + TransactionException.GAS_LIMIT_EXCEEDS_MAXIMUM: ( + "transaction gas limit too high" ), - TransactionException.GAS_LIMIT_EXCEEDS_MAXIMUM: "transaction gas limit too high", TransactionException.TYPE_4_TX_PRE_FORK: ( "transaction type not supported" ), - TransactionException.INITCODE_SIZE_EXCEEDED: "max initcode size exceeded", + TransactionException.INITCODE_SIZE_EXCEEDED: ( + "max initcode size exceeded" + ), TransactionException.NONCE_MISMATCH_TOO_LOW: "nonce too low", TransactionException.NONCE_MISMATCH_TOO_HIGH: "nonce too high", BlockException.INCORRECT_BLOB_GAS_USED: "blob gas used mismatch", BlockException.INCORRECT_EXCESS_BLOB_GAS: "invalid excessBlobGas", - BlockException.INVALID_VERSIONED_HASHES: "invalid number of versionedHashes", + BlockException.INVALID_VERSIONED_HASHES: ( + "invalid number of versionedHashes" + ), BlockException.INVALID_REQUESTS: "invalid requests hash", - BlockException.SYSTEM_CONTRACT_CALL_FAILED: "system call failed to execute:", + BlockException.SYSTEM_CONTRACT_CALL_FAILED: ( + "system call failed to execute:" + ), BlockException.INVALID_BLOCK_HASH: "blockhash mismatch", - BlockException.RLP_BLOCK_LIMIT_EXCEEDED: "block RLP-encoded size exceeds maximum", - BlockException.INVALID_BAL_EXTRA_ACCOUNT: "BAL change not reported in computed", - BlockException.INVALID_BAL_MISSING_ACCOUNT: "additional mutations compared to BAL", + BlockException.RLP_BLOCK_LIMIT_EXCEEDED: ( + "block RLP-encoded size exceeds maximum" + ), + BlockException.INVALID_BAL_EXTRA_ACCOUNT: ( + "BAL change not reported in computed" + ), + BlockException.INVALID_BAL_MISSING_ACCOUNT: ( + "additional mutations compared to BAL" + ), BlockException.INVALID_BLOCK_ACCESS_LIST: "unequal", BlockException.INVALID_BASEFEE_PER_GAS: "invalid baseFee", - BlockException.INVALID_BLOCK_TIMESTAMP_OLDER_THAN_PARENT: "invalid timestamp", + BlockException.INVALID_BLOCK_TIMESTAMP_OLDER_THAN_PARENT: ( + "invalid timestamp" + ), BlockException.INVALID_GASLIMIT: "invalid gas limit", BlockException.INVALID_BLOCK_NUMBER: "invalid block number", BlockException.EXTRA_DATA_TOO_BIG: "invalid extradata length", @@ -112,7 +136,9 @@ class GethExceptionMapper(ExceptionMapper): BlockException.BLOB_GAS_USED_ABOVE_LIMIT: ( r"blob gas used \d+ exceeds maximum allowance \d+" ), - BlockException.INVALID_GAS_USED_ABOVE_LIMIT: r"invalid gasUsed: have \d+, gasLimit \d+", + BlockException.INVALID_GAS_USED_ABOVE_LIMIT: ( + r"invalid gasUsed: have \d+, gasLimit \d+" + ), BlockException.INVALID_DEPOSIT_EVENT_LAYOUT: ( r"invalid requests hash|failed to parse deposit logs" ), @@ -136,11 +162,12 @@ class GethExceptionMapper(ExceptionMapper): ), BlockException.INVALID_BAL_HASH: (r"invalid block access list:"), BlockException.INVALID_BAL_MISSING_ACCOUNT: ( - r"computed state diff contained mutated accounts which weren't reported in BAL" + r"computed state diff contained mutated accounts " + r"which weren't reported in BAL" ), BlockException.INVALID_BLOCK_ACCESS_LIST: ( - r"difference between computed state diff and BAL entry for account" - r"|invalid block access list:" + r"difference between computed state diff and " + r"BAL entry for account|invalid block access list:" ), BlockException.INCORRECT_BLOCK_FORMAT: (r"invalid block access list:"), } @@ -306,7 +333,8 @@ def consume_blockchain_test( if result.returncode != 0: raise Exception( - f"Unexpected exit code:\n{' '.join(command)}\n\n Error:\n{result.stderr}" + f"Unexpected exit code:\n{' '.join(command)}\n\n" + f"Error:\n{result.stderr}" ) result_json = json.loads(result.stdout) @@ -360,7 +388,8 @@ def consume_state_test_file( if result.returncode != 0: raise Exception( - f"Unexpected exit code:\n{' '.join(command)}\n\n Error:\n{result.stderr}" + f"Unexpected exit code:\n{' '.join(command)}\n\n" + f"Error:\n{result.stderr}" ) result_json = json.loads(result.stdout) @@ -435,5 +464,6 @@ def consume_fixture( ) else: raise Exception( - f"Fixture format {fixture_format.format_name} not supported by {self.binary}" + f"Fixture format {fixture_format.format_name} " + f"not supported by {self.binary}" ) diff --git a/packages/testing/src/execution_testing/client_clis/clis/nethermind.py b/packages/testing/src/execution_testing/client_clis/clis/nethermind.py index 5db8488f92..bb23b04aa0 100644 --- a/packages/testing/src/execution_testing/client_clis/clis/nethermind.py +++ b/packages/testing/src/execution_testing/client_clis/clis/nethermind.py @@ -131,7 +131,8 @@ def _build_command_with_options( pass # no additional options needed else: raise Exception( - f"Fixture format {fixture_format.format_name} not supported by {self.binary}" + f"Fixture format {fixture_format.format_name} " + f"not supported by {self.binary}" ) command += ["--input", str(fixture_path)] if debug_output_path: @@ -163,14 +164,16 @@ def consume_state_test_file( if result.returncode != 0: raise Exception( - f"Unexpected exit code:\n{' '.join(command)}\n\n Error:\n{result.stderr}" + f"Unexpected exit code:\n{' '.join(command)}\n\n" + f"Error:\n{result.stderr}" ) try: result_json = json.loads(result.stdout) except json.JSONDecodeError as e: raise Exception( - f"Failed to parse JSON output on stdout from nethtest:\n{result.stdout}" + f"Failed to parse JSON output on stdout from nethtest:\n" + f"{result.stdout}" ) from e if not isinstance(result_json, list): @@ -205,8 +208,8 @@ def consume_state_test( test_result["name"].endswith(nethtest_suffix) for test_result in file_results ), ( - "consume direct with nethtest doesn't support the multi-data statetest format " - "used in ethereum/tests (yet)" + "consume direct with nethtest doesn't support the " + "multi-data statetest format used in ethereum/tests (yet)" ) test_result = [ test_result @@ -221,7 +224,8 @@ def consume_state_test( f"Test result for {fixture_name} missing" ) assert test_result[0]["pass"], ( - f"State test '{fixture_name}' failed, available stderr:\n {stderr}" + f"State test '{fixture_name}' failed, " + f"available stderr:\n {stderr}" ) else: if any(not test_result["pass"] for test_result in file_results): @@ -251,7 +255,8 @@ def consume_blockchain_test( if result.returncode != 0: raise Exception( - f"nethtest exited with non-zero exit code ({result.returncode}).\n" + f"nethtest exited with non-zero exit code " + f"({result.returncode}).\n" f"stdout:\n{result.stdout}\n" f"stderr:\n{result.stderr}\n" f"{' '.join(command)}" @@ -287,7 +292,8 @@ def consume_fixture( ) else: raise Exception( - f"Fixture format {fixture_format.format_name} not supported by {self.binary}" + f"Fixture format {fixture_format.format_name} " + f"not supported by {self.binary}" ) @@ -297,16 +303,28 @@ class NethermindExceptionMapper(ExceptionMapper): mapping_substring = { TransactionException.SENDER_NOT_EOA: "sender has deployed code", TransactionException.INTRINSIC_GAS_TOO_LOW: "intrinsic gas too low", - TransactionException.INTRINSIC_GAS_BELOW_FLOOR_GAS_COST: "intrinsic gas too low", - TransactionException.INSUFFICIENT_MAX_FEE_PER_GAS: "miner premium is negative", + TransactionException.INTRINSIC_GAS_BELOW_FLOOR_GAS_COST: ( + "intrinsic gas too low" + ), + TransactionException.INSUFFICIENT_MAX_FEE_PER_GAS: ( + "miner premium is negative" + ), TransactionException.PRIORITY_GREATER_THAN_MAX_FEE_PER_GAS: ( "InvalidMaxPriorityFeePerGas: Cannot be higher than maxFeePerGas" ), - TransactionException.GAS_ALLOWANCE_EXCEEDED: "Block gas limit exceeded", + TransactionException.GAS_ALLOWANCE_EXCEEDED: ( + "Block gas limit exceeded" + ), TransactionException.NONCE_IS_MAX: "NonceTooHigh", - TransactionException.INITCODE_SIZE_EXCEEDED: "max initcode size exceeded", - TransactionException.NONCE_MISMATCH_TOO_LOW: "wrong transaction nonce", - TransactionException.NONCE_MISMATCH_TOO_HIGH: "wrong transaction nonce", + TransactionException.INITCODE_SIZE_EXCEEDED: ( + "max initcode size exceeded" + ), + TransactionException.NONCE_MISMATCH_TOO_LOW: ( + "wrong transaction nonce" + ), + TransactionException.NONCE_MISMATCH_TOO_HIGH: ( + "wrong transaction nonce" + ), TransactionException.INSUFFICIENT_MAX_FEE_PER_BLOB_GAS: ( "InsufficientMaxFeePerBlobGas: Not enough to cover blob gas fee" ), @@ -319,11 +337,15 @@ class NethermindExceptionMapper(ExceptionMapper): TransactionException.TYPE_3_TX_PRE_FORK: ( "InvalidTxType: Transaction type in Custom is not supported" ), - TransactionException.TYPE_3_TX_ZERO_BLOBS: "blob transaction missing blob hashes", + TransactionException.TYPE_3_TX_ZERO_BLOBS: ( + "blob transaction missing blob hashes" + ), TransactionException.TYPE_3_TX_INVALID_BLOB_VERSIONED_HASH: ( "InvalidBlobVersionedHashVersion: Blob version not supported" ), - TransactionException.TYPE_3_TX_CONTRACT_CREATION: "blob transaction of type create", + TransactionException.TYPE_3_TX_CONTRACT_CREATION: ( + "blob transaction of type create" + ), TransactionException.TYPE_4_EMPTY_AUTHORIZATION_LIST: ( "MissingAuthorizationList: Must be set" ), @@ -334,9 +356,12 @@ class NethermindExceptionMapper(ExceptionMapper): "InvalidTxType: Transaction type in Custom is not supported" ), BlockException.INCORRECT_BLOB_GAS_USED: ( - "HeaderBlobGasMismatch: Blob gas in header does not match calculated" + "HeaderBlobGasMismatch: " + "Blob gas in header does not match calculated" + ), + BlockException.INVALID_REQUESTS: ( + "InvalidRequestsHash: Requests hash mismatch in block" ), - BlockException.INVALID_REQUESTS: "InvalidRequestsHash: Requests hash mismatch in block", BlockException.INVALID_GAS_USED_ABOVE_LIMIT: ( "ExceededGasLimit: Gas used exceeds gas limit." ), @@ -346,23 +371,43 @@ class NethermindExceptionMapper(ExceptionMapper): BlockException.INVALID_DEPOSIT_EVENT_LAYOUT: ( "DepositsInvalid: Invalid deposit event layout:" ), - BlockException.INVALID_BASEFEE_PER_GAS: "InvalidBaseFeePerGas: Does not match calculated", - BlockException.INVALID_BLOCK_TIMESTAMP_OLDER_THAN_PARENT: "InvalidTimestamp: Timestamp in header cannot be lower than ancestor", - BlockException.INVALID_BLOCK_NUMBER: "InvalidBlockNumber: Block number does not match the parent", - BlockException.EXTRA_DATA_TOO_BIG: "InvalidExtraData: Extra data in header is not valid", - BlockException.INVALID_GASLIMIT: "InvalidGasLimit: Gas limit is not correct", - BlockException.INVALID_RECEIPTS_ROOT: "InvalidReceiptsRoot: Receipts root in header does not match", - BlockException.INVALID_LOG_BLOOM: "InvalidLogsBloom: Logs bloom in header does not match", - BlockException.INVALID_STATE_ROOT: "InvalidStateRoot: State root in header does not match", + BlockException.INVALID_BASEFEE_PER_GAS: ( + "InvalidBaseFeePerGas: Does not match calculated" + ), + BlockException.INVALID_BLOCK_TIMESTAMP_OLDER_THAN_PARENT: ( + "InvalidTimestamp: " + "Timestamp in header cannot be lower than ancestor" + ), + BlockException.INVALID_BLOCK_NUMBER: ( + "InvalidBlockNumber: Block number does not match the parent" + ), + BlockException.EXTRA_DATA_TOO_BIG: ( + "InvalidExtraData: Extra data in header is not valid" + ), + BlockException.INVALID_GASLIMIT: ( + "InvalidGasLimit: Gas limit is not correct" + ), + BlockException.INVALID_RECEIPTS_ROOT: ( + "InvalidReceiptsRoot: Receipts root in header does not match" + ), + BlockException.INVALID_LOG_BLOOM: ( + "InvalidLogsBloom: Logs bloom in header does not match" + ), + BlockException.INVALID_STATE_ROOT: ( + "InvalidStateRoot: State root in header does not match" + ), } mapping_regex = { TransactionException.INSUFFICIENT_ACCOUNT_FUNDS: ( - r"insufficient sender balance|insufficient MaxFeePerGas for sender balance" + r"insufficient sender balance|" + r"insufficient MaxFeePerGas for sender balance" + ), + TransactionException.TYPE_3_TX_WITH_FULL_BLOBS: ( + r"Transaction \d+ is not valid" ), - TransactionException.TYPE_3_TX_WITH_FULL_BLOBS: r"Transaction \d+ is not valid", TransactionException.TYPE_3_TX_MAX_BLOB_GAS_ALLOWANCE_EXCEEDED: ( - r"BlockBlobGasExceeded: A block cannot have more than \d+ blob gas, blobs count \d+, " - r"blobs gas used: \d+" + r"BlockBlobGasExceeded: A block cannot have more than " + r"\d+ blob gas, blobs count \d+, blobs gas used: \d+" ), TransactionException.TYPE_3_TX_BLOB_COUNT_EXCEEDED: ( r"BlobTxGasLimitExceeded: Transaction's totalDataGas=\d+ " @@ -372,11 +417,12 @@ class NethermindExceptionMapper(ExceptionMapper): r"TxGasLimitCapExceeded: Gas limit \d+ \w+ cap of \d+\.?" ), BlockException.INCORRECT_EXCESS_BLOB_GAS: ( - r"HeaderExcessBlobGasMismatch: Excess blob gas in header does not match calculated" - r"|Overflow in excess blob gas" + r"HeaderExcessBlobGasMismatch: Excess blob gas in header " + r"does not match calculated|Overflow in excess blob gas" ), BlockException.INVALID_BLOCK_HASH: ( - r"Invalid block hash 0x[0-9a-f]+ does not match calculated hash 0x[0-9a-f]+" + r"Invalid block hash 0x[0-9a-f]+ does not match " + r"calculated hash 0x[0-9a-f]+" ), BlockException.SYSTEM_CONTRACT_EMPTY: ( r"(Withdrawals|Consolidations)Empty: Contract is not deployed\." @@ -386,17 +432,19 @@ class NethermindExceptionMapper(ExceptionMapper): ), # BAL Exceptions: TODO - review once all clients completed. BlockException.INVALID_BAL_EXTRA_ACCOUNT: ( - r"could not be parsed as a block: Could not decode block access list." + r"could not be parsed as a block: " + r"Could not decode block access list." ), BlockException.INVALID_BAL_HASH: (r"InvalidBlockLevelAccessListRoot:"), BlockException.INVALID_BAL_MISSING_ACCOUNT: ( r"InvalidBlockLevelAccessListRoot:" ), BlockException.INVALID_BLOCK_ACCESS_LIST: ( - r"InvalidBlockLevelAccessListRoot:" - r"|could not be parsed as a block: Could not decode block access list." + r"InvalidBlockLevelAccessListRoot:|could not be parsed as a " + r"block: Could not decode block access list." ), BlockException.INCORRECT_BLOCK_FORMAT: ( - r"could not be parsed as a block: Could not decode block access list." + r"could not be parsed as a block: " + r"Could not decode block access list." ), } diff --git a/packages/testing/src/execution_testing/client_clis/clis/nimbus.py b/packages/testing/src/execution_testing/client_clis/clis/nimbus.py index 3452e8f22c..aaacc837d8 100644 --- a/packages/testing/src/execution_testing/client_clis/clis/nimbus.py +++ b/packages/testing/src/execution_testing/client_clis/clis/nimbus.py @@ -44,7 +44,8 @@ def __init__( result = subprocess.run(args, capture_output=True, text=True) except subprocess.CalledProcessError as e: raise Exception( - f"evm process unexpectedly returned a non-zero status code: {e}." + f"evm process unexpectedly returned " + f"a non-zero status code: {e}." ) from e except Exception as e: raise Exception( @@ -80,7 +81,9 @@ class NimbusExceptionMapper(ExceptionMapper): TransactionException.TYPE_4_TX_CONTRACT_CREATION: ( "set code transaction must not be a create transaction" ), - TransactionException.INSUFFICIENT_ACCOUNT_FUNDS: "invalid tx: not enough cash to send", + TransactionException.INSUFFICIENT_ACCOUNT_FUNDS: ( + "invalid tx: not enough cash to send" + ), TransactionException.TYPE_3_TX_MAX_BLOB_GAS_ALLOWANCE_EXCEEDED: ( "would exceed maximum allowance" ), @@ -97,22 +100,40 @@ class NimbusExceptionMapper(ExceptionMapper): "invalid tx: one of blobVersionedHash has invalid version" ), # TODO: temp solution until mapper for nimbus is fixed - TransactionException.GAS_LIMIT_EXCEEDS_MAXIMUM: "zero gasUsed but transactions present", + TransactionException.GAS_LIMIT_EXCEEDS_MAXIMUM: ( + "zero gasUsed but transactions present" + ), # This message is the same as TYPE_3_TX_MAX_BLOB_GAS_ALLOWANCE_EXCEEDED - TransactionException.TYPE_3_TX_BLOB_COUNT_EXCEEDED: "exceeds maximum allowance", - TransactionException.TYPE_3_TX_ZERO_BLOBS: "blob transaction missing blob hashes", - TransactionException.INTRINSIC_GAS_TOO_LOW: "zero gasUsed but transactions present", - TransactionException.INTRINSIC_GAS_BELOW_FLOOR_GAS_COST: "intrinsic gas too low", - TransactionException.INITCODE_SIZE_EXCEEDED: "max initcode size exceeded", + TransactionException.TYPE_3_TX_BLOB_COUNT_EXCEEDED: ( + "exceeds maximum allowance" + ), + TransactionException.TYPE_3_TX_ZERO_BLOBS: ( + "blob transaction missing blob hashes" + ), + TransactionException.INTRINSIC_GAS_TOO_LOW: ( + "zero gasUsed but transactions present" + ), + TransactionException.INTRINSIC_GAS_BELOW_FLOOR_GAS_COST: ( + "intrinsic gas too low" + ), + TransactionException.INITCODE_SIZE_EXCEEDED: ( + "max initcode size exceeded" + ), BlockException.RLP_BLOCK_LIMIT_EXCEEDED: ( # TODO: "ExceededBlockSizeLimit: Exceeded block size limit" ), BlockException.INVALID_BASEFEE_PER_GAS: "invalid baseFee", - BlockException.INVALID_BLOCK_NUMBER: "Blocks must be numbered consecutively", - BlockException.INVALID_BLOCK_TIMESTAMP_OLDER_THAN_PARENT: "Invalid timestamp", + BlockException.INVALID_BLOCK_NUMBER: ( + "Blocks must be numbered consecutively" + ), + BlockException.INVALID_BLOCK_TIMESTAMP_OLDER_THAN_PARENT: ( + "Invalid timestamp" + ), BlockException.INVALID_GASLIMIT: "invalid gas limit", - BlockException.INVALID_GAS_USED_ABOVE_LIMIT: "gasUsed should be non negative and smaller or equal gasLimit", + BlockException.INVALID_GAS_USED_ABOVE_LIMIT: ( + "gasUsed should be non negative and smaller or equal gasLimit" + ), BlockException.INVALID_BLOCK_HASH: "blockhash mismatch", BlockException.INVALID_STATE_ROOT: "stateRoot mismatch", BlockException.INVALID_RECEIPTS_ROOT: "receiptRoot mismatch", diff --git a/packages/testing/src/execution_testing/client_clis/clis/reth.py b/packages/testing/src/execution_testing/client_clis/clis/reth.py index d20c44c472..f2628205aa 100644 --- a/packages/testing/src/execution_testing/client_clis/clis/reth.py +++ b/packages/testing/src/execution_testing/client_clis/clis/reth.py @@ -15,17 +15,25 @@ class RethExceptionMapper(ExceptionMapper): "reject transactions from senders with deployed code" ), TransactionException.INSUFFICIENT_ACCOUNT_FUNDS: "lack of funds", - TransactionException.INITCODE_SIZE_EXCEEDED: "create initcode size limit", - TransactionException.INSUFFICIENT_MAX_FEE_PER_GAS: "gas price is less than basefee", + TransactionException.INITCODE_SIZE_EXCEEDED: ( + "create initcode size limit" + ), + TransactionException.INSUFFICIENT_MAX_FEE_PER_GAS: ( + "gas price is less than basefee" + ), TransactionException.PRIORITY_GREATER_THAN_MAX_FEE_PER_GAS: ( "priority fee is greater than max fee" ), TransactionException.GASLIMIT_PRICE_PRODUCT_OVERFLOW: "overflow", TransactionException.TYPE_3_TX_CONTRACT_CREATION: "unexpected length", TransactionException.TYPE_3_TX_WITH_FULL_BLOBS: "unexpected list", - TransactionException.TYPE_3_TX_INVALID_BLOB_VERSIONED_HASH: "blob version not supported", + TransactionException.TYPE_3_TX_INVALID_BLOB_VERSIONED_HASH: ( + "blob version not supported" + ), TransactionException.TYPE_3_TX_ZERO_BLOBS: "empty blobs", - TransactionException.TYPE_4_EMPTY_AUTHORIZATION_LIST: "empty authorization list", + TransactionException.TYPE_4_EMPTY_AUTHORIZATION_LIST: ( + "empty authorization list" + ), TransactionException.TYPE_4_TX_CONTRACT_CREATION: "unexpected length", TransactionException.TYPE_4_TX_PRE_FORK: ( "eip 7702 transactions present in pre-prague payload" @@ -41,10 +49,15 @@ class RethExceptionMapper(ExceptionMapper): BlockException.INVALID_LOG_BLOOM: "header bloom filter mismatch", } mapping_regex = { - TransactionException.NONCE_MISMATCH_TOO_LOW: r"nonce \d+ too low, expected \d+", - TransactionException.NONCE_MISMATCH_TOO_HIGH: r"nonce \d+ too high, expected \d+", + TransactionException.NONCE_MISMATCH_TOO_LOW: ( + r"nonce \d+ too low, expected \d+" + ), + TransactionException.NONCE_MISMATCH_TOO_HIGH: ( + r"nonce \d+ too high, expected \d+" + ), TransactionException.INSUFFICIENT_MAX_FEE_PER_BLOB_GAS: ( - r"blob gas price \(\d+\) is greater than max fee per blob gas \(\d+\)" + r"blob gas price \(\d+\) is greater than " + r"max fee per blob gas \(\d+\)" ), TransactionException.INTRINSIC_GAS_TOO_LOW: ( r"call gas cost \(\d+\) exceeds the gas limit \(\d+\)" @@ -55,7 +68,9 @@ class RethExceptionMapper(ExceptionMapper): TransactionException.TYPE_3_TX_MAX_BLOB_GAS_ALLOWANCE_EXCEEDED: ( r"blob gas used \d+ exceeds maximum allowance \d+" ), - TransactionException.TYPE_3_TX_BLOB_COUNT_EXCEEDED: r"too many blobs, have \d+, max \d+", + TransactionException.TYPE_3_TX_BLOB_COUNT_EXCEEDED: ( + r"too many blobs, have \d+, max \d+" + ), TransactionException.TYPE_3_TX_PRE_FORK: ( r"blob transactions present in pre-cancun payload|empty blobs" ), @@ -65,32 +80,40 @@ class RethExceptionMapper(ExceptionMapper): TransactionException.GAS_LIMIT_EXCEEDS_MAXIMUM: ( r"transaction gas limit.*is greater than the cap" ), - BlockException.SYSTEM_CONTRACT_CALL_FAILED: r"failed to apply .* requests contract call", + BlockException.SYSTEM_CONTRACT_CALL_FAILED: ( + r"failed to apply .* requests contract call" + ), BlockException.INCORRECT_BLOB_GAS_USED: ( - r"blob gas used mismatch|blob gas used \d+ is not a multiple of blob gas per blob" + r"blob gas used mismatch|" + r"blob gas used \d+ is not a multiple of blob gas per blob" ), BlockException.INCORRECT_EXCESS_BLOB_GAS: ( - r"excess blob gas \d+ is not a multiple of blob gas per blob|invalid excess blob gas" + r"excess blob gas \d+ is not a multiple of blob gas per blob|" + r"invalid excess blob gas" ), BlockException.INVALID_GAS_USED_ABOVE_LIMIT: ( r"block used gas \(\d+\) is greater than gas limit \(\d+\)" ), BlockException.INVALID_GASLIMIT: ( - r"child gas_limit \d+ max .* is .*|child gas limit \d+ is below the minimum allowed limit" + r"child gas_limit \d+ max .* is .*|" + r"child gas limit \d+ is below the minimum allowed limit" ), BlockException.INVALID_BLOCK_TIMESTAMP_OLDER_THAN_PARENT: ( - r"block timestamp \d+ is in the past compared to the parent timestamp \d+" + r"block timestamp \d+ is in the past compared to " + r"the parent timestamp \d+" ), BlockException.INVALID_BLOCK_NUMBER: ( r"block number \d+ does not match parent block number \d+" ), # BAL Exceptions: TODO - review once all clients completed. BlockException.INVALID_BAL_EXTRA_ACCOUNT: ( - r"Block BAL contains an account change that is not present in the computed BAL." + r"Block BAL contains an account change " + r"that is not present in the computed BAL." ), BlockException.INVALID_BAL_HASH: (r"Block's access list is invalid."), BlockException.INVALID_BAL_MISSING_ACCOUNT: ( - r"Block BAL is missing an account change that is present in the computed BAL." + r"Block BAL is missing an account change " + r"that is present in the computed BAL." ), BlockException.INVALID_BLOCK_ACCESS_LIST: ( r"Block's access list is invalid." @@ -113,6 +136,7 @@ class RethExceptionMapper(ExceptionMapper): # EELS definition for `is_valid_deposit_event_data`: # https://github.com/ethereum/execution-specs/blob/5ddb904fa7ba27daeff423e78466744c51e8cb6a/src/ethereum/forks/prague/requests.py#L51 BlockException.INVALID_DEPOSIT_EVENT_LAYOUT: ( - r"failed to decode deposit requests from receipts|mismatched block requests hash" + r"failed to decode deposit requests from receipts|" + r"mismatched block requests hash" ), } diff --git a/packages/testing/src/execution_testing/client_clis/ethereum_cli.py b/packages/testing/src/execution_testing/client_clis/ethereum_cli.py index e55e2f602a..1e02634f5b 100644 --- a/packages/testing/src/execution_testing/client_clis/ethereum_cli.py +++ b/packages/testing/src/execution_testing/client_clis/ethereum_cli.py @@ -111,7 +111,8 @@ def from_binary_path( binary = Path(resolved_path) else: logger.debug( - f"Resolved path does not exist: {resolved_path}\nTrying to find it via `which`" + f"Resolved path does not exist: {resolved_path}\n" + "Trying to find it via `which`" ) # it might be that the provided binary exists in path @@ -155,7 +156,8 @@ def from_binary_path( if result.returncode != 0: logger.debug( - f"Subprocess returncode is not 0! It is: {result.returncode}" + "Subprocess returncode is not 0! " + f"It is: {result.returncode}" ) # don't raise exception, you are supposed to keep trying # different version flags @@ -188,12 +190,14 @@ def from_binary_path( continue logger.debug( - f"T8n with version {binary_output} does not belong to subclass {subclass}" + f"T8n with version {binary_output} does not " + f"belong to subclass {subclass}" ) except Exception as e: logger.debug( - f"Trying to determine t8n version with flag `{version_flag}` failed: {e}" + f"Trying to determine t8n version with flag " + f"`{version_flag}` failed: {e}" ) continue @@ -209,7 +213,8 @@ def detect_binary(cls, binary_output: str) -> bool: assert cls.detect_binary_pattern is not None logger.debug( - f"Trying to match {binary_output} against this pattern: {cls.detect_binary_pattern}" + f"Trying to match {binary_output} against this " + f"pattern: {cls.detect_binary_pattern}" ) match_result = cls.detect_binary_pattern.match(binary_output) match_successful: bool = match_result is not None diff --git a/packages/testing/src/execution_testing/client_clis/tests/test_execution_specs.py b/packages/testing/src/execution_testing/client_clis/tests/test_execution_specs.py index df0d35b6a6..498e08deb0 100644 --- a/packages/testing/src/execution_testing/client_clis/tests/test_execution_specs.py +++ b/packages/testing/src/execution_testing/client_clis/tests/test_execution_specs.py @@ -116,7 +116,8 @@ def test_evm_tool_binary_arg( # typing: Path can not take None; but if None, we may # as well fail explicitly. raise Exception( - f"Failed to find `{DEFAULT_EVM_T8N_BINARY_NAME}` in the PATH via which" + f"Failed to find `{DEFAULT_EVM_T8N_BINARY_NAME}` " + "in the PATH via which" ) evm_tool(binary=Path(evm_bin)).version() return diff --git a/packages/testing/src/execution_testing/client_clis/tests/test_transition_tool.py b/packages/testing/src/execution_testing/client_clis/tests/test_transition_tool.py index 7e04f3f88e..5a77e80912 100644 --- a/packages/testing/src/execution_testing/client_clis/tests/test_transition_tool.py +++ b/packages/testing/src/execution_testing/client_clis/tests/test_transition_tool.py @@ -49,7 +49,10 @@ def test_default_tool() -> None: "ethereum-spec-evm", ExecutionSpecsTransitionTool, marks=pytest.mark.skip( - reason="ExecutionSpecsTransitionTool through binary path is not supported" + reason=( + "ExecutionSpecsTransitionTool through binary path " + "is not supported" + ) ), ), ( diff --git a/packages/testing/src/execution_testing/client_clis/transition_tool.py b/packages/testing/src/execution_testing/client_clis/transition_tool.py index 1ac1b4cffe..223a0cda71 100644 --- a/packages/testing/src/execution_testing/client_clis/transition_tool.py +++ b/packages/testing/src/execution_testing/client_clis/transition_tool.py @@ -526,9 +526,10 @@ def _evaluate_server( if debug_output_path: with profiler.pause(): + request_data_str = json.dumps(request_data_json, indent=2) request_info = ( f"Server URL: {self.server_url}\n\n" - f"Request Data:\n{json.dumps(request_data_json, indent=2)}\n" + f"Request Data:\n{request_data_str}\n" ) dump_files_to_directory( debug_output_path, @@ -543,7 +544,9 @@ def _evaluate_server( tx.model_dump(mode="json", **model_dump_config) for tx in request_data.input.txs ], - "input/blob_params.json": request_data.input.blob_params, + "input/blob_params.json": ( + request_data.input.blob_params + ), "request_info.txt": request_info, }, ) @@ -570,9 +573,10 @@ def _evaluate_server( if debug_output_path: with profiler.pause(): + headers_str = json.dumps(dict(response.headers), indent=2) response_info = ( f"Status Code: {response.status_code}\n\n" - f"Headers:\n{json.dumps(dict(response.headers), indent=2)}\n\n" + f"Headers:\n{headers_str}\n\n" f"Content:\n{response.text}\n" ) dump_files_to_directory( diff --git a/packages/testing/src/execution_testing/config/app.py b/packages/testing/src/execution_testing/config/app.py index 6c3f302e64..2f7a486790 100644 --- a/packages/testing/src/execution_testing/config/app.py +++ b/packages/testing/src/execution_testing/config/app.py @@ -9,7 +9,9 @@ from pydantic import BaseModel -import execution_testing.cli.pytest_commands.plugins.consume.releases as releases +from execution_testing.cli.pytest_commands.plugins.consume import ( + releases, +) class AppConfig(BaseModel): diff --git a/packages/testing/src/execution_testing/exceptions/exceptions.py b/packages/testing/src/execution_testing/exceptions/exceptions.py index ce5d7de6b7..4243ef3929 100644 --- a/packages/testing/src/execution_testing/exceptions/exceptions.py +++ b/packages/testing/src/execution_testing/exceptions/exceptions.py @@ -51,7 +51,8 @@ def from_str(cls, value: "str | ExceptionBase") -> "ExceptionBase": else: # Otherwise, use the class that the method is called on assert cls.__name__ == class_name, ( - f"Unexpected exception type: {class_name}, expected {cls.__name__}" + f"Unexpected exception type: {class_name}, " + f"expected {cls.__name__}" ) exception_class = cls diff --git a/packages/testing/src/execution_testing/exceptions/exceptions/base.py b/packages/testing/src/execution_testing/exceptions/exceptions/base.py index 4d96f9d148..7c9a9d8339 100644 --- a/packages/testing/src/execution_testing/exceptions/exceptions/base.py +++ b/packages/testing/src/execution_testing/exceptions/exceptions/base.py @@ -51,7 +51,8 @@ def from_str(cls, value: "str | ExceptionBase") -> "ExceptionBase": else: # Otherwise, use the class that the method is called on assert cls.__name__ == class_name, ( - f"Unexpected exception type: {class_name}, expected {cls.__name__}" + f"Unexpected exception type: {class_name}, " + f"expected {cls.__name__}" ) exception_class = cls diff --git a/packages/testing/src/execution_testing/execution/base.py b/packages/testing/src/execution_testing/execution/base.py index 71aed55166..dacd8a121b 100644 --- a/packages/testing/src/execution_testing/execution/base.py +++ b/packages/testing/src/execution_testing/execution/base.py @@ -42,8 +42,11 @@ def get_required_sender_balances( fork: Fork, ) -> Dict[Address, int]: """Get the required sender balances.""" + del gas_price, max_fee_per_gas, max_priority_fee_per_gas + del max_fee_per_blob_gas, fork raise Exception( - f"Method `get_required_sender_balances` not implemented for {self.format_name}" + "Method `get_required_sender_balances` not implemented for " + f"{self.format_name}" ) @abstractmethod diff --git a/packages/testing/src/execution_testing/execution/blob_transaction.py b/packages/testing/src/execution_testing/execution/blob_transaction.py index 2dd89cb4c2..7ffe16641f 100644 --- a/packages/testing/src/execution_testing/execution/blob_transaction.py +++ b/packages/testing/src/execution_testing/execution/blob_transaction.py @@ -50,8 +50,8 @@ def versioned_hashes_with_blobs_and_proofs( ) else: raise ValueError( - f"Blob with versioned hash {blob.versioned_hash.hex()} requires a proof " - "that is not None" + f"Blob with versioned hash {blob.versioned_hash.hex()} " + "requires a proof that is not None" ) return versioned_hashes @@ -66,8 +66,8 @@ class BlobTransaction(BaseExecute): format_name: ClassVar[str] = "blob_transaction_test" description: ClassVar[str] = ( - "Send blob transactions to the execution client and validate their availability via " - "`engine_getBlobsV*`" + "Send blob transactions to the execution client and validate their " + "availability via `engine_getBlobsV*`" ) txs: List[NetworkWrappedTransaction | Transaction] @@ -132,7 +132,8 @@ def execute( tx.rlp(), request_id=metadata.to_json() ) assert expected_hash == received_hash, ( - f"Expected hash {expected_hash} does not match received hash {received_hash}." + f"Expected hash {expected_hash} does not match " + f"received hash {received_hash}." ) if engine_rpc is None: @@ -161,14 +162,14 @@ def execute( if self.nonexisting_blob_hashes is not None: if blob_response is not None: raise ValueError( - f"Non-existing blob hashes were requested and " - "the client was expected to respond with 'null', but instead it replied: " - f"{blob_response.root}" + "Non-existing blob hashes were requested and the client " + "was expected to respond with 'null', but instead it " + f"replied: {blob_response.root}" ) else: logger.info( - "Test was passed (partial responses are not allowed and the client " - "correctly returned 'null')" + "Test was passed (partial responses are not allowed and " + "the client correctly returned 'null')" ) eth_rpc.wait_for_transactions(sent_txs) return @@ -176,7 +177,8 @@ def execute( assert blob_response is not None local_blobs_and_proofs = list(versioned_hashes.values()) assert len(blob_response) == len(local_blobs_and_proofs), ( - f"Expected {len(local_blobs_and_proofs)} blobs and proofs, got {len(blob_response)}." + f"Expected {len(local_blobs_and_proofs)} blobs and proofs, " + f"got {len(blob_response)}." ) for expected_blob, received_blob in zip( @@ -198,8 +200,14 @@ def execute( raise ValueError("Blob mismatch.") if expected_blob.proofs != received_blob.proofs: error_message = "Proofs mismatch." - error_message += f"len(expected_blob.proofs) = {len(expected_blob.proofs)}, " - error_message += f"len(received_blob.proofs) = {len(received_blob.proofs)}\n" + expected_len = len(expected_blob.proofs) + received_len = len(received_blob.proofs) + error_message += ( + f"len(expected_blob.proofs) = {expected_len}, " + ) + error_message += ( + f"len(received_blob.proofs) = {received_len}\n" + ) if len(expected_blob.proofs) == len(received_blob.proofs): index = 0 @@ -212,16 +220,28 @@ def execute( error_message += ( f"Proof length mismatch. index = {index}," ) - error_message += f"expected_proof length = {len(expected_proof)}, " - error_message += f"received_proof length = {len(received_proof)}\n" + exp_len = len(expected_proof) + rcv_len = len(received_proof) + error_message += ( + f"expected_proof length = {exp_len}, " + ) + error_message += ( + f"received_proof length = {rcv_len}\n" + ) index += 1 continue if expected_proof != received_proof: error_message += ( f"Proof mismatch. index = {index}," ) - error_message += f"expected_proof hash = {sha256(expected_proof).hexdigest()}, " - error_message += f"received_proof hash = {sha256(received_proof).hexdigest()}\n" + exp_hash = sha256(expected_proof).hexdigest() + rcv_hash = sha256(received_proof).hexdigest() + error_message += ( + f"expected_proof hash = {exp_hash}, " + ) + error_message += ( + f"received_proof hash = {rcv_hash}\n" + ) index += 1 raise ValueError(error_message) else: diff --git a/packages/testing/src/execution_testing/execution/transaction_post.py b/packages/testing/src/execution_testing/execution/transaction_post.py index 716c47793b..5648ddab06 100644 --- a/packages/testing/src/execution_testing/execution/transaction_post.py +++ b/packages/testing/src/execution_testing/execution/transaction_post.py @@ -41,7 +41,8 @@ class TransactionPost(BaseExecute): format_name: ClassVar[str] = "transaction_post_test" description: ClassVar[str] = ( - "Simple transaction sending, then post-check after all transactions are included" + "Simple transaction sending, then post-check after all transactions " + "are included" ) def get_required_sender_balances( @@ -85,7 +86,8 @@ def execute( for tx in block: if not isinstance(tx, NetworkWrappedTransaction): assert tx.ty != 3, ( - "Unwrapped transaction type 3 is not supported in execute mode." + "Unwrapped transaction type 3 is not supported in " + "execute mode." ) # Track transaction hashes for gas validation (benchmarking) @@ -129,7 +131,8 @@ def execute( ) as exc_info: eth_rpc.send_transaction(transaction) logger.info( - f"Transaction rejected as expected: {exc_info.value}" + "Transaction rejected as expected: " + f"{exc_info.value}" ) else: # Send transactions (batching is handled by eth_rpc internally) @@ -153,10 +156,12 @@ def execute( total_gas_used += gas_used # Verify that the total gas consumed matches expectations - assert total_gas_used == self.expected_benchmark_gas_used, ( + expected_gas = self.expected_benchmark_gas_used + diff = total_gas_used - expected_gas + assert total_gas_used == expected_gas, ( f"Total gas used ({total_gas_used}) does not match " - f"expected benchmark gas ({self.expected_benchmark_gas_used}), " - f"difference: {total_gas_used - self.expected_benchmark_gas_used}" + f"expected benchmark gas ({expected_gas}), " + f"difference: {diff}" ) for address, account in self.post.root.items(): @@ -176,15 +181,18 @@ def execute( else: if "balance" in account.model_fields_set: assert balance == account.balance, ( - f"Balance of {address} is {balance}, expected {account.balance}." + f"Balance of {address} is {balance}, " + f"expected {account.balance}." ) if "code" in account.model_fields_set: assert code == account.code, ( - f"Code of {address} is {code}, expected {account.code}." + f"Code of {address} is {code}, " + f"expected {account.code}." ) if "nonce" in account.model_fields_set: assert nonce == account.nonce, ( - f"Nonce of {address} is {nonce}, expected {account.nonce}." + f"Nonce of {address} is {nonce}, " + f"expected {account.nonce}." ) if "storage" in account.model_fields_set: for key, value in account.storage.items(): @@ -192,6 +200,6 @@ def execute( address, Hash(key) ) assert storage_value == value, ( - f"Storage value at {key} of {address} is {storage_value}," - f"expected {value}." + f"Storage value at {key} of {address} is " + f"{storage_value}, expected {value}." ) diff --git a/packages/testing/src/execution_testing/forks/base_fork.py b/packages/testing/src/execution_testing/forks/base_fork.py index 6c549f2b76..89fa18afc2 100644 --- a/packages/testing/src/execution_testing/forks/base_fork.py +++ b/packages/testing/src/execution_testing/forks/base_fork.py @@ -602,7 +602,7 @@ def get_reward(cls, *, block_number: int = 0, timestamp: int = 0) -> int: @classmethod @abstractmethod def supports_protected_txs(cls) -> bool: - """Return whether the fork implements EIP-155 transaction protection""" + """Return whether the fork implements EIP-155 protection.""" pass @classmethod diff --git a/packages/testing/src/execution_testing/forks/forks/forks.py b/packages/testing/src/execution_testing/forks/forks/forks.py index 5bd10b95f4..c2deb27bae 100644 --- a/packages/testing/src/execution_testing/forks/forks/forks.py +++ b/packages/testing/src/execution_testing/forks/forks/forks.py @@ -1079,9 +1079,7 @@ def get_reward(cls, *, block_number: int = 0, timestamp: int = 0) -> int: @classmethod def supports_protected_txs(cls) -> bool: - """ - At Genesis, fork does not have support for EIP-155 protected transactions. - """ + """At Genesis, fork has no support for EIP-155 protected txs.""" return False @classmethod @@ -1374,20 +1372,22 @@ def build_default_block_header( """ Build a default block header for this fork with the given attributes. - This method automatically detects which header fields are required by the fork - and assigns appropriate default values. It introspects the FixtureHeader model - to find fields with HeaderForkRequirement annotations and automatically includes - them if the fork requires them. + This method automatically detects which header fields are required by + the fork and assigns appropriate default values. It introspects the + FixtureHeader model to find fields with HeaderForkRequirement + annotations and automatically includes them if the fork requires them. Args: block_number: The block number timestamp: The block timestamp Returns: - FixtureHeader instance with default values applied based on fork requirements + FixtureHeader instance with default values applied based on fork + requirements. Raises: TypeError: If the overrides don't have the correct type. + """ from execution_testing.fixtures.blockchain import FixtureHeader diff --git a/packages/testing/src/execution_testing/forks/helpers.py b/packages/testing/src/execution_testing/forks/helpers.py index b86c2bc46f..f78d84468e 100644 --- a/packages/testing/src/execution_testing/forks/helpers.py +++ b/packages/testing/src/execution_testing/forks/helpers.py @@ -66,17 +66,19 @@ def __init__(self, message: str) -> None: def get_forks() -> List[Type[BaseFork]]: """ - Return list of all the fork classes implemented by `execution_testing.forks` - ordered chronologically by deployment. + Return all fork classes implemented by `execution_testing.forks`. + + Ordered chronologically by deployment. """ return all_forks[:] def get_deployed_forks() -> List[Type[BaseFork]]: """ - Return list of all the fork classes implemented by `execution_testing.forks` - that have been deployed to mainnet, chronologically ordered by deployment. - BPO (Blob Parameter Only) forks are excluded as they are handled separately. + Return all fork classes that have been deployed to mainnet. + + Chronologically ordered by deployment. BPO (Blob Parameter Only) forks + are excluded as they are handled separately. """ return [ fork @@ -87,9 +89,9 @@ def get_deployed_forks() -> List[Type[BaseFork]]: def get_development_forks() -> List[Type[BaseFork]]: """ - Return list of all the fork classes implemented by `execution_testing.forks` - that have been not yet deployed to mainnet and are currently under - development. The list is ordered by their planned deployment date. + Return all fork classes not yet deployed and under development. + + The list is ordered by their planned deployment date. """ return [fork for fork in get_forks() if not fork.is_deployed()] diff --git a/packages/testing/src/execution_testing/forks/tests/test_forks.py b/packages/testing/src/execution_testing/forks/tests/test_forks.py index f945fad0a6..5967b49e64 100644 --- a/packages/testing/src/execution_testing/forks/tests/test_forks.py +++ b/packages/testing/src/execution_testing/forks/tests/test_forks.py @@ -8,11 +8,11 @@ from execution_testing.base_types import BlobSchedule from ..forks.forks import ( - Amsterdam, BPO1, BPO2, BPO3, BPO4, + Amsterdam, Berlin, Cancun, Frontier, @@ -236,12 +236,12 @@ def test_fork_in_pydantic_model() -> None: "fork_2": "ParisToShanghaiAtTime15k", "fork_3": None, } - assert ( - model.model_dump_json() - == '{"fork_1":"Paris","fork_2":"ParisToShanghaiAtTime15k","fork_3":null}' + assert model.model_dump_json() == ( + '{"fork_1":"Paris","fork_2":"ParisToShanghaiAtTime15k","fork_3":null}' ) model = ForkInPydanticModel.model_validate_json( - '{"fork_1": "Paris", "fork_2": "ParisToShanghaiAtTime15k", "fork_3": null}' + '{"fork_1": "Paris", "fork_2": "ParisToShanghaiAtTime15k", ' + '"fork_3": null}' ) assert model.fork_1 == Paris assert model.fork_2 == ParisToShanghaiAtTime15k @@ -415,9 +415,9 @@ def test_tx_types() -> None: # noqa: D103 "create_tx", [False, True], ) -def test_tx_intrinsic_gas_functions( +def test_tx_intrinsic_gas_functions( # noqa: D103 fork: Fork, calldata: bytes, create_tx: bool -) -> None: # noqa: D103 +) -> None: intrinsic_gas = 21_000 if calldata == b"\0": intrinsic_gas += 4 diff --git a/packages/testing/src/execution_testing/forks/tests/test_opcode_gas_costs.py b/packages/testing/src/execution_testing/forks/tests/test_opcode_gas_costs.py index 6391429a37..11cd3354f9 100644 --- a/packages/testing/src/execution_testing/forks/tests/test_opcode_gas_costs.py +++ b/packages/testing/src/execution_testing/forks/tests/test_opcode_gas_costs.py @@ -4,7 +4,7 @@ from execution_testing.vm import Bytecode, Op -from ..forks.forks import Osaka, Homestead +from ..forks.forks import Homestead, Osaka from ..helpers import Fork @@ -417,7 +417,7 @@ ), ], ) -def test_opcode_gas_costs(fork: Fork, opcode: Op, expected_cost: int) -> None: +def test_opcode_gas_costs(fork: Fork, opcode: Op, expected_cost: int) -> None: # noqa: D103 op_gas_cost_calc = fork.opcode_gas_calculator() assert expected_cost == op_gas_cost_calc(opcode) @@ -445,7 +445,7 @@ def test_opcode_gas_costs(fork: Fork, opcode: Op, expected_cost: int) -> None: ), ], ) -def test_bytecode_gas_costs( +def test_bytecode_gas_costs( # noqa: D103 fork: Fork, bytecode: Bytecode, expected_cost: int ) -> None: assert expected_cost == bytecode.gas_cost(fork) @@ -486,7 +486,7 @@ def test_bytecode_gas_costs( ), ], ) -def test_opcode_refunds(fork: Fork, opcode: Op, expected_refund: int) -> None: +def test_opcode_refunds(fork: Fork, opcode: Op, expected_refund: int) -> None: # noqa: D103 op_refund_calc = fork.opcode_refund_calculator() assert expected_refund == op_refund_calc(opcode) @@ -522,7 +522,7 @@ def test_opcode_refunds(fork: Fork, opcode: Op, expected_refund: int) -> None: ), ], ) -def test_bytecode_refunds( +def test_bytecode_refunds( # noqa: D103 fork: Fork, bytecode: Bytecode, expected_refund: int ) -> None: assert expected_refund == bytecode.refund(fork) diff --git a/packages/testing/src/execution_testing/logging/__init__.py b/packages/testing/src/execution_testing/logging/__init__.py index 52a97172cd..b0758ba595 100644 --- a/packages/testing/src/execution_testing/logging/__init__.py +++ b/packages/testing/src/execution_testing/logging/__init__.py @@ -10,13 +10,12 @@ VERBOSE_LEVEL, ColorFormatter, EESTLogger, - UTCFormatter, - get_logger, LogLevel, + UTCFormatter, configure_logging, + get_logger, ) - __all__ = [ "VERBOSE_LEVEL", "FAIL_LEVEL", diff --git a/packages/testing/src/execution_testing/rpc/rpc.py b/packages/testing/src/execution_testing/rpc/rpc.py index ccb60f7b07..65e4643f7b 100644 --- a/packages/testing/src/execution_testing/rpc/rpc.py +++ b/packages/testing/src/execution_testing/rpc/rpc.py @@ -19,6 +19,8 @@ retry_if_exception_type, stop_after_attempt, wait_exponential, +) +from tenacity import ( wait_fixed as wait_fixed_tenacity, ) @@ -92,14 +94,15 @@ def __init__( self.attempts = attempts self.elapsed = elapsed self.interval = interval - super().__init__( + msg = ( f"Block {block_hash} not available after {attempts} attempts " f"over {elapsed:.1f}s (interval: {interval}s)" ) + super().__init__(msg) class ForkchoiceUpdateTimeoutError(Exception): - """Raised when forkchoice update doesn't reach VALID within retry limits.""" + """Raised when forkchoice update doesn't reach VALID in time.""" def __init__( self, @@ -113,10 +116,12 @@ def __init__( self.elapsed = elapsed self.interval = interval self.final_status = final_status - super().__init__( - f"Forkchoice update failed to reach VALID after {attempts} attempts " - f"over {elapsed:.1f}s (interval: {interval}s), final status: {final_status}" + msg = ( + f"Forkchoice update failed to reach VALID after {attempts} " + f"attempts over {elapsed:.1f}s (interval: {interval}s), " + f"final status: {final_status}" ) + super().__init__(msg) class PeerConnectionTimeoutError(Exception): @@ -136,11 +141,12 @@ def __init__( self.interval = interval self.expected_peers = expected_peers self.actual_peers = actual_peers - super().__init__( + msg = ( f"Peer connection not established after {attempts} attempts " f"over {elapsed:.1f}s (interval: {interval}s), " f"expected >= {expected_peers} peers, got {actual_peers}" ) + super().__init__(msg) class BaseRPC: @@ -242,8 +248,8 @@ def post_request( headers = base_header | extra_headers logger.debug( - f"Sending RPC request to {self.url}, method={self.namespace}_{method}, " - f"timeout={timeout}..." + f"Sending RPC request to {self.url}, " + f"method={self.namespace}_{method}, timeout={timeout}..." ) response = self._make_request(self.url, payload, headers, timeout) @@ -333,8 +339,8 @@ def __init__( self.max_transactions_per_batch = max_transactions_per_batch if max_transactions_per_batch > self.OVERLOAD_THRESHOLD: logger.warning( - f"max_transactions_per_batch ({max_transactions_per_batch}) exceeds " - f"the safe threshold ({self.OVERLOAD_THRESHOLD}). " + f"max_transactions_per_batch ({max_transactions_per_batch}) " + f"exceeds the safe threshold ({self.OVERLOAD_THRESHOLD}). " "This may cause RPC service instability or failures." ) @@ -416,6 +422,7 @@ def get_block_by_hash_with_retry( Raises: BlockNotAvailableError: If block not available after max_attempts. + """ attempts = 0 start_time = time.time() @@ -585,9 +592,7 @@ def max_priority_fee_per_gas(self) -> int: return self._get_gas_information(method="maxPriorityFeePerGas") def blob_base_fee(self) -> int: - """ - `eth_blobBaseFee`: Return the current blob base fee per gas of the network. - """ + """Return the current blob base fee per gas of the network.""" return self._get_gas_information(method="blobBaseFee") def send_raw_transaction( @@ -672,8 +677,9 @@ def wait_for_transaction( break time.sleep(self.poll_interval) raise Exception( - f"Transaction {tx_hash} ({transaction.model_dump_json()}) not included in a " - f"block after {self.transaction_wait_timeout} seconds" + f"Transaction {tx_hash} ({transaction.model_dump_json()}) " + f"not included in a block after {self.transaction_wait_timeout} " + "seconds" ) def wait_for_transactions( @@ -711,8 +717,8 @@ def wait_for_transactions( if tx.hash in tx_hashes ] raise Exception( - f"Transactions {', '.join(missing_txs_strings)} not included in a block " - f"after {self.transaction_wait_timeout} seconds" + f"Transactions {', '.join(missing_txs_strings)} not included " + f"in a block after {self.transaction_wait_timeout} seconds" ) def send_wait_transaction(self, transaction: TransactionProtocol) -> Any: @@ -766,10 +772,13 @@ class EngineRPC(BaseRPC): jwt_secret: bytes + # Default secret used in hive + DEFAULT_JWT_SECRET: bytes = b"secretsecretsecretsecretsecretse" + def __init__( self, *args: Any, - jwt_secret: bytes = b"secretsecretsecretsecretsecretse", # Default secret used in hive + jwt_secret: bytes = DEFAULT_JWT_SECRET, **kwargs: Any, ) -> None: """Initialize Engine RPC class with the given JWT secret.""" @@ -902,11 +911,12 @@ def forkchoice_updated_with_retry( on_retry: Callable[[RetryCallState], None] | None = None, ) -> ForkchoiceUpdateResponse: """ - Send forkchoice update, retrying while SYNCING until a terminal status. + Send forkchoice update, retrying while SYNCING until terminal. - Retries only while the client returns SYNCING status. Returns immediately - on any terminal status (VALID, INVALID, ACCEPTED, etc.) - the caller is - responsible for checking if the returned status matches expectations. + Retries only while the client returns SYNCING status. Returns + immediately on any terminal status (VALID, INVALID, ACCEPTED, etc.) + - the caller is responsible for checking if the returned status + matches expectations. Args: forkchoice_state: The forkchoice state to send. @@ -917,10 +927,11 @@ def forkchoice_updated_with_retry( Receives tenacity RetryCallState. If None, logs at debug level. Returns: - ForkchoiceUpdateResponse with a terminal status (VALID, INVALID, etc.). + ForkchoiceUpdateResponse with a terminal status (VALID, etc.). Raises: ForkchoiceUpdateTimeoutError: If still SYNCING after max_attempts. + """ # Track state for exception message in the case of timeout attempts = 0 @@ -928,10 +939,13 @@ def forkchoice_updated_with_retry( last_response: ForkchoiceUpdateResponse | None = None def default_on_retry(retry_state: RetryCallState) -> None: + if last_response: + status = str(last_response.payload_status.status) + else: + status = "N/A" logger.debug( f"Forkchoice update attempt {retry_state.attempt_number}: " - f"status={last_response.payload_status.status if last_response else 'N/A'}, " - f"retrying in {wait_fixed}s..." + f"status={status}, retrying in {wait_fixed}s..." ) retry_callback = on_retry if on_retry is not None else default_on_retry @@ -996,14 +1010,16 @@ def wait_for_peer_connection( Raises: PeerConnectionTimeoutError: If min_peers not reached within limits. + """ attempts = 0 start_time = time.time() last_peer_count = 0 def default_on_retry(retry_state: RetryCallState) -> None: + attempt = retry_state.attempt_number logger.debug( - f"Waiting for peer connection, attempt {retry_state.attempt_number}: " + f"Waiting for peer connection, attempt {attempt}: " f"{last_peer_count} peers, need >= {min_peers}, " f"retrying in {wait_fixed}s..." ) diff --git a/packages/testing/src/execution_testing/rpc/rpc_types.py b/packages/testing/src/execution_testing/rpc/rpc_types.py index 512ec68c6a..d543fc56ff 100644 --- a/packages/testing/src/execution_testing/rpc/rpc_types.py +++ b/packages/testing/src/execution_testing/rpc/rpc_types.py @@ -49,7 +49,10 @@ def __init__( def __str__(self) -> str: """Return string representation of the JSONRPCError.""" if self.data is not None: - return f"JSONRPCError(code={self.code}, message={self.message}, data={self.data})" + return ( + f"JSONRPCError(code={self.code}, message={self.message}, " + f"data={self.data})" + ) return f"JSONRPCError(code={self.code}, message={self.message})" diff --git a/packages/testing/src/execution_testing/rpc/tests/test_types.py b/packages/testing/src/execution_testing/rpc/tests/test_types.py index 0b1a9f5b84..5e49097041 100644 --- a/packages/testing/src/execution_testing/rpc/tests/test_types.py +++ b/packages/testing/src/execution_testing/rpc/tests/test_types.py @@ -23,13 +23,15 @@ "BN254_PAIRING": "0x0000000000000000000000000000000000000008", "ECREC": "0x0000000000000000000000000000000000000001", "ID": "0x0000000000000000000000000000000000000004", - "KZG_POINT_EVALUATION": "0x000000000000000000000000000000000000000a", + "KZG_POINT_EVALUATION": "0x000000000000000000000000000000000000000a", # noqa: E501 "MODEXP": "0x0000000000000000000000000000000000000005", "RIPEMD160": "0x0000000000000000000000000000000000000003", "SHA256": "0x0000000000000000000000000000000000000002", }, "systemContracts": { - "BEACON_ROOTS_ADDRESS": "0x000f3df6d732807ef1319fb7b8bb8522d0beac02" + "BEACON_ROOTS_ADDRESS": ( # noqa: E501 + "0x000f3df6d732807ef1319fb7b8bb8522d0beac02" + ) }, }, "next": { @@ -47,21 +49,21 @@ "BLS12_G1MSM": "0x000000000000000000000000000000000000000c", "BLS12_G2ADD": "0x000000000000000000000000000000000000000d", "BLS12_G2MSM": "0x000000000000000000000000000000000000000e", - "BLS12_MAP_FP2_TO_G2": "0x0000000000000000000000000000000000000011", - "BLS12_MAP_FP_TO_G1": "0x0000000000000000000000000000000000000010", - "BLS12_PAIRING_CHECK": "0x000000000000000000000000000000000000000f", + "BLS12_MAP_FP2_TO_G2": "0x0000000000000000000000000000000000000011", # noqa: E501 + "BLS12_MAP_FP_TO_G1": "0x0000000000000000000000000000000000000010", # noqa: E501 + "BLS12_PAIRING_CHECK": "0x000000000000000000000000000000000000000f", # noqa: E501 "BN254_ADD": "0x0000000000000000000000000000000000000006", "BN254_MUL": "0x0000000000000000000000000000000000000007", "BN254_PAIRING": "0x0000000000000000000000000000000000000008", "ECREC": "0x0000000000000000000000000000000000000001", "ID": "0x0000000000000000000000000000000000000004", - "KZG_POINT_EVALUATION": "0x000000000000000000000000000000000000000a", + "KZG_POINT_EVALUATION": "0x000000000000000000000000000000000000000a", # noqa: E501 "MODEXP": "0x0000000000000000000000000000000000000005", "RIPEMD160": "0x0000000000000000000000000000000000000003", "SHA256": "0x0000000000000000000000000000000000000002", }, "systemContracts": { - "BEACON_ROOTS_ADDRESS": "0x000f3df6d732807ef1319fb7b8bb8522d0beac02", + "BEACON_ROOTS_ADDRESS": "0x000f3df6d732807ef1319fb7b8bb8522d0beac02", # noqa: E501 "CONSOLIDATION_REQUEST_PREDEPLOY_ADDRESS": ( "0x0000bbddc7ce488642fb579f8b00f3a590007251" ), @@ -91,21 +93,21 @@ "BLS12_G1MSM": "0x000000000000000000000000000000000000000c", "BLS12_G2ADD": "0x000000000000000000000000000000000000000d", "BLS12_G2MSM": "0x000000000000000000000000000000000000000e", - "BLS12_MAP_FP2_TO_G2": "0x0000000000000000000000000000000000000011", - "BLS12_MAP_FP_TO_G1": "0x0000000000000000000000000000000000000010", - "BLS12_PAIRING_CHECK": "0x000000000000000000000000000000000000000f", + "BLS12_MAP_FP2_TO_G2": "0x0000000000000000000000000000000000000011", # noqa: E501 + "BLS12_MAP_FP_TO_G1": "0x0000000000000000000000000000000000000010", # noqa: E501 + "BLS12_PAIRING_CHECK": "0x000000000000000000000000000000000000000f", # noqa: E501 "BN254_ADD": "0x0000000000000000000000000000000000000006", "BN254_MUL": "0x0000000000000000000000000000000000000007", "BN254_PAIRING": "0x0000000000000000000000000000000000000008", "ECREC": "0x0000000000000000000000000000000000000001", "ID": "0x0000000000000000000000000000000000000004", - "KZG_POINT_EVALUATION": "0x000000000000000000000000000000000000000a", + "KZG_POINT_EVALUATION": "0x000000000000000000000000000000000000000a", # noqa: E501 "MODEXP": "0x0000000000000000000000000000000000000005", "RIPEMD160": "0x0000000000000000000000000000000000000003", "SHA256": "0x0000000000000000000000000000000000000002", }, "systemContracts": { - "BEACON_ROOTS_ADDRESS": "0x000f3df6d732807ef1319fb7b8bb8522d0beac02", + "BEACON_ROOTS_ADDRESS": "0x000f3df6d732807ef1319fb7b8bb8522d0beac02", # noqa: E501 "CONSOLIDATION_REQUEST_PREDEPLOY_ADDRESS": ( "0x0000bbddc7ce488642fb579f8b00f3a590007251" ), diff --git a/packages/testing/src/execution_testing/specs/base.py b/packages/testing/src/execution_testing/specs/base.py index 62c170440c..fc8ce0d9f7 100644 --- a/packages/testing/src/execution_testing/specs/base.py +++ b/packages/testing/src/execution_testing/specs/base.py @@ -59,7 +59,10 @@ def __init__( def __str__(self) -> str: """Return the error message.""" - return f"{self.message}: Expected {self.expected_hash}, got {self.actual_hash}" + return ( + f"{self.message}: Expected {self.expected_hash}, " + f"got {self.actual_hash}" + ) def verify_result(result: Result, env: Environment) -> None: @@ -275,15 +278,15 @@ def check_exception_test( if negative_test_marker != exception: if exception: raise Exception( - "Test produced an invalid block or transaction but was not marked with the " - "`exception_test` marker. Add the `@pytest.mark.exception_test` decorator " - "to the test." + "Test produced an invalid block or transaction but was " + "not marked with the `exception_test` marker. Add the " + "`@pytest.mark.exception_test` decorator to the test." ) else: raise Exception( - "Test didn't produce an invalid block or transaction but was marked with the " - "`exception_test` marker. Remove the `@pytest.mark.exception_test` decorator " - "from the test." + "Test didn't produce an invalid block or transaction but " + "was marked with the `exception_test` marker. Remove the " + "`@pytest.mark.exception_test` decorator from the test." ) def get_genesis_environment(self) -> Environment: @@ -294,8 +297,8 @@ def get_genesis_environment(self) -> Environment: environment. """ raise NotImplementedError( - f"{self.__class__.__name__} must implement genesis environment access for use with " - "pre-allocation groups." + f"{self.__class__.__name__} must implement genesis environment " + "access for use with pre-allocation groups." ) def update_pre_alloc_groups( @@ -307,8 +310,9 @@ def update_pre_alloc_groups( """ if not hasattr(self, "pre"): raise AttributeError( - f"{self.__class__.__name__} does not have a 'pre' field. Pre-allocation groups " - "are only supported for test types that define pre-allocation." + f"{self.__class__.__name__} does not have a 'pre' field. " + "Pre-allocation groups are only supported for test types " + "that define pre-allocation." ) pre_alloc_hash = self.compute_pre_alloc_group_hash() pre_alloc_group_builders.add_test_pre( @@ -323,8 +327,9 @@ def compute_pre_alloc_group_hash(self) -> str: """Hash (fork, env) in order to group tests by genesis config.""" if not hasattr(self, "pre"): raise AttributeError( - f"{self.__class__.__name__} does not have a 'pre' field. Pre-allocation group " - "usage is only supported for test types that define pre-allocs." + f"{self.__class__.__name__} does not have a 'pre' field. " + "Pre-allocation group usage is only supported for test " + "types that define pre-allocs." ) fork_digest = hashlib.sha256(self.fork.name().encode("utf-8")).digest() fork_hash = int.from_bytes(fork_digest[:8], byteorder="big") diff --git a/packages/testing/src/execution_testing/specs/benchmark.py b/packages/testing/src/execution_testing/specs/benchmark.py index ce4cb2aca1..0777d69b7c 100644 --- a/packages/testing/src/execution_testing/specs/benchmark.py +++ b/packages/testing/src/execution_testing/specs/benchmark.py @@ -178,21 +178,26 @@ def generate_repeated_code( max_iterations = available_space // len(repeated_code) # Use fixed_opcode_count if provided, otherwise fill to max - # Iteration Logic: The goal is to set the total operation count proportional to a - # 'fixed_opcode_count' multiplied by 1000, across two contracts (Loop M * Target N). + # Iteration Logic: The goal is to set the total operation count + # proportional to a 'fixed_opcode_count' multiplied by 1000, + # across two contracts (Loop M * Target N). # --- 1. Determine Inner Iterations (N) --- - # The Target Contract's loop count is determined by block filling, capped at 1000. + # The Target Contract's loop count is determined by block filling, + # capped at 1000. # # 1a. Calculate 'max_iterations' to fill the block. # 1b. The Inner Iteration count (N) is capped at 1000. - # 1c. If the calculated N is less than 1000, use 500 as the fallback count. + # 1c. If the calculated N is less than 1000, use 500 as the fallback. # --- 2. Determine Outer Iterations (M) --- - # The Loop Contract's call count (M) is set to ensure the final total execution is consistent. + # The Loop Contract's call count (M) is set to ensure the final + # total execution is consistent. # - # 2a. If N is 1000: Set M = fixed_opcode_count. (Total ops: fixed_opcode_count * 1000) - # 2b. If N is 500: Set M = fixed_opcode_count * 2. (Total ops: (fixed_opcode_count * 2) * 500 = fixed_opcode_count * 1000) + # 2a. If N is 1000: Set M = fixed_opcode_count. + # (Total ops: fixed_opcode_count * 1000) + # 2b. If N is 500: Set M = fixed_opcode_count * 2. + # (Total ops: (fixed_opcode_count * 2) * 500) if self.fixed_opcode_count is not None: inner_iterations = 1000 if max_iterations >= 1000 else 500 self._inner_iterations = min(max_iterations, inner_iterations) @@ -221,8 +226,8 @@ def _validate_code_size(self, code: Bytecode, fork: Fork) -> None: """Validate that the generated code fits within size limits.""" if len(code) > fork.max_code_size(): raise ValueError( - f"Generated code size {len(code)} exceeds maximum allowed size " - f"{fork.max_code_size()}" + f"Generated code size {len(code)} exceeds maximum " + f"allowed size {fork.max_code_size()}" ) @@ -268,7 +273,9 @@ class BenchmarkTest(BaseTest): ] supported_markers: ClassVar[Dict[str, str]] = { - "blockchain_test_engine_only": "Only generate a blockchain test engine fixture", + "blockchain_test_engine_only": ( + "Only generate a blockchain test engine fixture" + ), "blockchain_test_only": "Only generate a blockchain test fixture", "repricing": "Mark test as reference test for gas repricing analysis", } @@ -295,7 +302,8 @@ def model_post_init(self, __context: Any, /) -> None: if len(set_props) != 1: raise ValueError( - f"Exactly one must be set, but got {len(set_props)}: {', '.join(set_props)}" + f"Exactly one must be set, but got {len(set_props)}: " + f"{', '.join(set_props)}" ) blocks: List[Block] = self.setup_blocks @@ -335,7 +343,8 @@ def model_post_init(self, __context: Any, /) -> None: else: raise ValueError( - "Cannot create BlockchainTest without a code generator, transactions, or blocks" + "Cannot create BlockchainTest without a code generator, " + "transactions, or blocks" ) self.blocks = blocks @@ -447,8 +456,9 @@ def generate_blockchain_test(self) -> BlockchainTest: def _verify_target_opcode_count( self, opcode_count: OpcodeCount | None ) -> None: - """Verify the target opcode was executed the expected number of times.""" - # Skip validation if opcode count is not available (e.g. currently only supported for evmone filling) + """Verify target opcode was executed the expected number of times.""" + # Skip validation if opcode count is not available + # (e.g. currently only supported for evmone filling) if opcode_count is None: return diff --git a/packages/testing/src/execution_testing/specs/blockchain.py b/packages/testing/src/execution_testing/specs/blockchain.py index 06d6bebfc9..b5a6ddffe9 100644 --- a/packages/testing/src/execution_testing/specs/blockchain.py +++ b/packages/testing/src/execution_testing/specs/blockchain.py @@ -223,7 +223,8 @@ def verify(self, target: FixtureHeader) -> None: value = getattr(target, field_name) if baseline_value is Header.EMPTY_FIELD: assert value is None, ( - f"invalid header field {field_name}, got {value}, want None" + f"invalid header field {field_name}, " + f"got {value}, want None" ) continue assert value == baseline_value, ( @@ -498,7 +499,9 @@ class BlockchainTest(BaseTest): ] supported_markers: ClassVar[Dict[str, str]] = { - "blockchain_test_engine_only": "Only generate a blockchain test engine fixture", + "blockchain_test_engine_only": ( + "Only generate a blockchain test engine fixture" + ), "blockchain_test_only": "Only generate a blockchain test fixture", } @@ -586,12 +589,13 @@ def generate_block_data( if failing_tx_count := len([tx for tx in txs if tx.error]) > 0: if failing_tx_count > 1: raise Exception( - "test correctness: only one transaction can produce an exception in a block" + "test correctness: only one transaction can produce " + "an exception in a block" ) if not txs[-1].error: raise Exception( - "test correctness: the transaction that produces an exception " - + "must be the last transaction in the block" + "test correctness: the transaction that produces an " + "exception must be the last transaction in the block" ) transition_tool_output = t8n.evaluate( @@ -664,10 +668,11 @@ def generate_block_data( gas_used = int(transition_tool_output.result.gas_used) if not self.skip_gas_used_validation: + diff = gas_used - expected_benchmark_gas_used assert gas_used == expected_benchmark_gas_used, ( - f"gas_used ({gas_used}) does not match expected_benchmark_gas_used " - f"({expected_benchmark_gas_used})" - f", difference: {gas_used - expected_benchmark_gas_used}" + f"gas_used ({gas_used}) does not match " + f"expected_benchmark_gas_used " + f"({expected_benchmark_gas_used}), difference: {diff}" ) requests_list: List[Bytes] | None = None @@ -683,8 +688,8 @@ def generate_block_data( if Hash(requests) != header.requests_hash: raise Exception( - "Requests root in header does not match the requests root in the transition " - "tool output: " + "Requests root in header does not match the requests " + "root in the transition tool output: " f"{header.requests_hash} != {Hash(requests)}" ) @@ -702,8 +707,8 @@ def generate_block_data( assert ( transition_tool_output.result.block_access_list is not None ), ( - "Block access list is required for this block but was not provided " - "by the transition tool" + "Block access list is required for this block but was not " + "provided by the transition tool" ) rlp = transition_tool_output.result.block_access_list.rlp @@ -727,7 +732,8 @@ def generate_block_data( t8n_bal = transition_tool_output.result.block_access_list bal = t8n_bal - # Always validate BAL structural integrity (ordering, duplicates) if present + # Always validate BAL structural integrity (ordering, duplicates) + # if present if t8n_bal is not None: t8n_bal.validate_structure() @@ -997,7 +1003,8 @@ def make_hive_fixture( elif fixture_format == BlockchainEngineSyncFixture: # Sync fixture format assert genesis.header.block_hash != head_hash, ( - "Invalid payload tests negative test via sync is not supported yet." + "Invalid payload tests negative test via sync is not " + "supported yet." ) # Most clients require the header to start the sync process, so we # create an empty block on top of the last block of the test to @@ -1011,7 +1018,9 @@ def make_hive_fixture( ) fixture_data.update( { - "sync_payload": sync_built_block.get_fixture_engine_new_payload(), + "sync_payload": ( + sync_built_block.get_fixture_engine_new_payload() + ), "pre": pre, "post_state": alloc if not self.exclude_full_post_state_in_output diff --git a/packages/testing/src/execution_testing/specs/debugging.py b/packages/testing/src/execution_testing/specs/debugging.py index 742883a3ac..0baf544a4f 100644 --- a/packages/testing/src/execution_testing/specs/debugging.py +++ b/packages/testing/src/execution_testing/specs/debugging.py @@ -9,7 +9,8 @@ def print_traces(traces: List[Traces] | None) -> None: """Print the traces from the transition tool for debugging.""" if traces is None: print( - "Traces not collected. Use `--traces` to see detailed execution information." + "Traces not collected. Use `--traces` to see detailed " + "execution information." ) return print("Printing traces for debugging purposes:") diff --git a/packages/testing/src/execution_testing/specs/state.py b/packages/testing/src/execution_testing/specs/state.py index b6923aae7d..e4d5f9c61c 100644 --- a/packages/testing/src/execution_testing/specs/state.py +++ b/packages/testing/src/execution_testing/specs/state.py @@ -184,7 +184,8 @@ def verify_modified_gas_limit( ) except Exception as e: logger.debug( - f"Transactions are not equivalent (gas_limit={current_gas_limit})" + "Transactions are not equivalent " + f"(gas_limit={current_gas_limit})" ) logger.debug(e) return False @@ -201,14 +202,16 @@ def verify_modified_gas_limit( for k in base_tool_alloc.root.keys(): if k not in modified_tool_alloc: logger.debug( - f"Post alloc is not equivalent (gas_limit={current_gas_limit})" + "Post alloc is not equivalent " + f"(gas_limit={current_gas_limit})" ) return False base_account = base_tool_alloc[k] modified_account = modified_tool_alloc[k] if (modified_account is None) != (base_account is None): logger.debug( - f"Post alloc is not equivalent (gas_limit={current_gas_limit})" + "Post alloc is not equivalent " + f"(gas_limit={current_gas_limit})" ) return False if ( @@ -217,7 +220,8 @@ def verify_modified_gas_limit( and base_account.nonce != modified_account.nonce ): logger.debug( - f"Post alloc is not equivalent (gas_limit={current_gas_limit})" + "Post alloc is not equivalent " + f"(gas_limit={current_gas_limit})" ) return False logger.debug( @@ -247,10 +251,12 @@ def _generate_blockchain_genesis_environment(self) -> Environment: Generate the genesis environment for the BlockchainTest formatted test. """ assert self.env.number >= 1, ( - "genesis block number cannot be negative, set state test env.number to at least 1" + "genesis block number cannot be negative, set state test " + "env.number to at least 1" ) assert self.env.timestamp >= 1, ( - "genesis timestamp cannot be negative, set state test env.timestamp to at least 1" + "genesis timestamp cannot be negative, set state test " + "env.timestamp to at least 1" ) # There's only a handful of values that we need to set in the genesis # for the environment values at block 1 to make sense: @@ -435,7 +441,8 @@ def make_state_test_fixture( ): raise Exception( "Requires more than the minimum " - f"{self._gas_optimization_max_gas_limit} wanted." + f"{self._gas_optimization_max_gas_limit} " + "wanted." ) assert self.verify_modified_gas_limit( @@ -459,10 +466,11 @@ def make_state_test_fixture( ) gas_used = int(transition_tool_output.result.gas_used) if not self.skip_gas_used_validation: + diff = gas_used - expected_benchmark_gas_used assert gas_used == expected_benchmark_gas_used, ( - f"gas_used ({gas_used}) does not match expected_benchmark_gas_used " - f"({expected_benchmark_gas_used})" - f", difference: {gas_used - expected_benchmark_gas_used}" + f"gas_used ({gas_used}) does not match " + f"expected_benchmark_gas_used " + f"({expected_benchmark_gas_used}), difference: {diff}" ) return StateFixture( diff --git a/packages/testing/src/execution_testing/specs/static_state/account.py b/packages/testing/src/execution_testing/specs/static_state/account.py index d4a2883569..fe5c12aa11 100644 --- a/packages/testing/src/execution_testing/specs/static_state/account.py +++ b/packages/testing/src/execution_testing/specs/static_state/account.py @@ -265,7 +265,8 @@ def setup(self, pre: Alloc, all_dependencies: Dict[str, Tag]) -> TagDict: if extra_dependency not in resolved_accounts: if all_dependencies[extra_dependency].type != "eoa": raise ValueError( - f"Contract dependency {extra_dependency} not found in pre" + f"Contract dependency {extra_dependency} " + "not found in pre" ) # Create new EOA - this will have a dynamically generated key diff --git a/packages/testing/src/execution_testing/specs/static_state/common/common.py b/packages/testing/src/execution_testing/specs/static_state/common/common.py index 4c3f4c602d..fead4be2e6 100644 --- a/packages/testing/src/execution_testing/specs/static_state/common/common.py +++ b/packages/testing/src/execution_testing/specs/static_state/common/common.py @@ -130,7 +130,8 @@ def compiled(self, tags: TagDict) -> bytes: if not isinstance(raw_code, str): raise ValueError( - f"code is of type {type(raw_code)} but expected a string: {raw_code}" + f"code is of type {type(raw_code)} but expected a string: " + f"{raw_code}" ) if len(raw_code) == 0: return b"" diff --git a/packages/testing/src/execution_testing/specs/static_state/environment.py b/packages/testing/src/execution_testing/specs/static_state/environment.py index 6cf7822ddc..32cd3b4e6d 100644 --- a/packages/testing/src/execution_testing/specs/static_state/environment.py +++ b/packages/testing/src/execution_testing/specs/static_state/environment.py @@ -42,7 +42,8 @@ def check_fields(self) -> "EnvironmentInStateTestFiller": if self.current_difficulty is None: if self.current_random is None: raise ValueError( - "If `currentDifficulty` is not set, `currentRandom` must be set!" + "If `currentDifficulty` is not set, " + "`currentRandom` must be set!" ) return self @@ -51,7 +52,8 @@ def get_environment(self, tags: TagDict) -> Environment: kwargs: Dict[str, Any] = {} if isinstance(self.current_coinbase, Tag): assert self.current_coinbase.name in tags, ( - f"Tag {self.current_coinbase.name} to resolve coinbase not found in tags" + f"Tag {self.current_coinbase.name} to resolve coinbase " + "not found in tags" ) kwargs["fee_recipient"] = self.current_coinbase.resolve(tags) else: diff --git a/packages/testing/src/execution_testing/specs/tests/test_benchmark.py b/packages/testing/src/execution_testing/specs/tests/test_benchmark.py index 215ec36de6..3f859417d0 100644 --- a/packages/testing/src/execution_testing/specs/tests/test_benchmark.py +++ b/packages/testing/src/execution_testing/specs/tests/test_benchmark.py @@ -51,20 +51,22 @@ def test_split_transaction( # Verify the number of transactions assert len(split_txs) == expected_splits, ( - f"Expected {expected_splits} transactions for {gas_benchmark_value_millions}M gas, " - f"got {len(split_txs)}" + f"Expected {expected_splits} transactions for " + f"{gas_benchmark_value_millions}M gas, got {len(split_txs)}" ) # Verify total gas equals the benchmark value total_gas = sum(tx.gas_limit for tx in split_txs) assert total_gas == gas_benchmark_value, ( - f"Total gas {total_gas} doesn't match benchmark value {gas_benchmark_value}" + f"Total gas {total_gas} doesn't match benchmark " + f"value {gas_benchmark_value}" ) # Verify no transaction exceeds the cap for i, tx in enumerate(split_txs): assert tx.gas_limit <= gas_limit_cap, ( - f"Transaction {i} gas limit {tx.gas_limit} exceeds cap {gas_limit_cap}" + f"Transaction {i} gas limit {tx.gas_limit} " + f"exceeds cap {gas_limit_cap}" ) # Verify nonces increment correctly @@ -74,7 +76,8 @@ def test_split_transaction( # Verify gas distribution for i, tx in enumerate(split_txs[:-1]): # All but last should be at cap assert tx.gas_limit == gas_limit_cap, ( - f"Transaction {i} should have gas limit {gas_limit_cap}, got {tx.gas_limit}" + f"Transaction {i} should have gas limit {gas_limit_cap}, " + f"got {tx.gas_limit}" ) # Last transaction should have the remainder @@ -83,7 +86,8 @@ def test_split_transaction( gas_limit_cap * (expected_splits - 1) ) assert split_txs[-1].gas_limit == expected_last_gas, ( - f"Last transaction should have {expected_last_gas} gas, got {split_txs[-1].gas_limit}" + f"Last transaction should have {expected_last_gas} gas, " + f"got {split_txs[-1].gas_limit}" ) diff --git a/packages/testing/src/execution_testing/specs/tests/test_expect.py b/packages/testing/src/execution_testing/specs/tests/test_expect.py index a73478d8fc..5cb5d0519c 100644 --- a/packages/testing/src/execution_testing/specs/tests/test_expect.py +++ b/packages/testing/src/execution_testing/specs/tests/test_expect.py @@ -170,7 +170,6 @@ def test_post_storage_value_mismatch( expected_exception: Storage.KeyValueMismatchError, state_test: StateTest, default_t8n: TransitionTool, - fork: Fork, ) -> None: """ Test post state `Account.storage` exceptions during state test fixture @@ -205,7 +204,6 @@ def test_post_nonce_value_mismatch( post: Alloc, state_test: StateTest, default_t8n: TransitionTool, - fork: Fork, ) -> None: """ Test post state `Account.nonce` verification and exceptions during state @@ -251,7 +249,6 @@ def test_post_code_value_mismatch( post: Alloc, state_test: StateTest, default_t8n: TransitionTool, - fork: Fork, ) -> None: """ Test post state `Account.code` verification and exceptions during state @@ -297,7 +294,6 @@ def test_post_balance_value_mismatch( post: Alloc, state_test: StateTest, default_t8n: TransitionTool, - fork: Fork, ) -> None: """ Test post state `Account.balance` verification and exceptions during state @@ -352,7 +348,6 @@ def test_post_balance_value_mismatch( def test_post_account_mismatch( state_test: StateTest, default_t8n: TransitionTool, - fork: Fork, exception_type: Type[Exception] | None, ) -> None: """ @@ -435,7 +430,6 @@ def test_post_account_mismatch( def test_transaction_expectation( state_test: StateTest, default_t8n: TransitionTool, - fork: Fork, exception_type: Type[Exception] | None, fixture_format: FixtureFormat, ) -> None: diff --git a/packages/testing/src/execution_testing/specs/tests/test_fixtures.py b/packages/testing/src/execution_testing/specs/tests/test_fixtures.py index aec091a0ab..90680eb05d 100644 --- a/packages/testing/src/execution_testing/specs/tests/test_fixtures.py +++ b/packages/testing/src/execution_testing/specs/tests/test_fixtures.py @@ -92,9 +92,9 @@ def test_check_helper_fixtures() -> None: Cancun, ], ) -def test_make_genesis( +def test_make_genesis( # noqa: D103 fork: Fork, fixture_hash: bytes, default_t8n: TransitionTool -) -> None: # noqa: D103 +) -> None: env = Environment(gas_limit=100_000_000_000_000_000) pre = Alloc( @@ -202,10 +202,9 @@ def test_fill_state_test( tag="my_chain_id_test", ).generate(t8n=default_t8n, fixture_format=fixture_format) assert generated_fixture.__class__ == fixture_format + fixture_key = f"000/my_chain_id_test/{fork}/tx_type_{tx_type}" fixture = { - f"000/my_chain_id_test/{fork}/tx_type_{tx_type}": generated_fixture.json_dict_with_info( - hash_only=True - ), + fixture_key: generated_fixture.json_dict_with_info(hash_only=True), } format_name = fixture_format.format_name diff --git a/packages/testing/src/execution_testing/test_types/account_types.py b/packages/testing/src/execution_testing/test_types/account_types.py index 42922ba375..04106c4005 100644 --- a/packages/testing/src/execution_testing/test_types/account_types.py +++ b/packages/testing/src/execution_testing/test_types/account_types.py @@ -172,7 +172,10 @@ class UnexpectedAccountError(Exception): def __str__(self) -> str: """Print exception string.""" - return f"unexpected account in allocation {self.address}: {self.account}" + return ( + f"unexpected account in allocation {self.address}: " + f"{self.account}" + ) @dataclass(kw_only=True) class MissingAccountError(Exception): @@ -243,7 +246,8 @@ def merge( if overlapping_keys: if key_collision_mode == cls.KeyCollisionMode.ERROR: raise Exception( - f"Overlapping keys detected: {[key.hex() for key in overlapping_keys]}" + f"Overlapping keys detected: " + f"{[key.hex() for key in overlapping_keys]}" ) elif ( key_collision_mode @@ -410,9 +414,11 @@ def deterministic_deploy_contract( storage: The expected storage state of the deployed contract after initcode execution. label: Label to use for the contract. + """ raise NotImplementedError( - "deterministic_deploy_contract is not implemented in the base class" + "deterministic_deploy_contract is not implemented in the base " + "class" ) def deploy_contract( @@ -466,6 +472,7 @@ def fund_address( minimum_balance: If set to True, account will be checked to have a minimum balance of `amount` and only fund if the balance is insufficient + """ raise NotImplementedError( "fund_address is not implemented in the base class" diff --git a/packages/testing/src/execution_testing/test_types/blob_types.py b/packages/testing/src/execution_testing/test_types/blob_types.py index 6270135940..b4dfc58a9b 100644 --- a/packages/testing/src/execution_testing/test_types/blob_types.py +++ b/packages/testing/src/execution_testing/test_types/blob_types.py @@ -13,10 +13,10 @@ from execution_testing.base_types.base_types import Bytes, Hash from execution_testing.base_types.pydantic import CamelModel +from execution_testing.forks import Fork from execution_testing.logging import ( get_logger, ) -from execution_testing.forks import Fork CACHED_BLOBS_DIRECTORY: Path = ( Path(platformdirs.user_cache_dir("ethereum-execution-spec-tests")) @@ -161,7 +161,8 @@ def get_commitment(data: Bytes) -> Bytes: ) assert len(data) == field_elements * bytes_per_field, ( f"Expected blob of length " - f"{field_elements * bytes_per_field} but got blob of length {len(data)}" + f"{field_elements * bytes_per_field} but got blob of length " + f"{len(data)}" ) # calculate commitment @@ -207,8 +208,9 @@ def get_proof(fork: Fork, data: Bytes) -> List[Bytes] | Bytes: return proofs raise AssertionError( - f"get_proof() has not been implemented yet for fork: {fork.name()}." - f"Got amount of cell proofs {amount_cell_proofs} but expected 128." + f"get_proof() has not been implemented yet for fork: " + f"{fork.name()}. Got amount of cell proofs " + f"{amount_cell_proofs} but expected 128." ) def get_cells(fork: Fork, data: Bytes) -> List[Bytes] | None: @@ -228,8 +230,9 @@ def get_cells(fork: Fork, data: Bytes) -> List[Bytes] | None: return cells # List[bytes] raise AssertionError( - f"get_cells() has not been implemented yet for fork: {fork.name()}. Got amount of " - f"cell proofs {amount_cell_proofs} but expected 128." + f"get_cells() has not been implemented yet for fork: " + f"{fork.name()}. Got amount of cell proofs " + f"{amount_cell_proofs} but expected 128." ) # first, create cached blobs dir if necessary @@ -250,7 +253,8 @@ def get_cells(fork: Fork, data: Bytes) -> List[Bytes] | None: with FileLock(lock_file_path): if blob_location.exists(): logger.debug( - f"Blob exists already, reading it from file {blob_location}" + f"Blob exists already, reading it from file " + f"{blob_location}" ) return Blob.from_file(Blob.get_filename(fork, seed)) @@ -293,8 +297,8 @@ def from_file(file_name: str) -> "Blob": """ # ensure filename was passed assert file_name.startswith("blob_"), ( - f"You provided an invalid blob filename. Expected it to start with 'blob_' " - f"but got: {file_name}" + f"You provided an invalid blob filename. Expected it to start " + f"with 'blob_' but got: {file_name}" ) if ".json" not in file_name: @@ -305,7 +309,8 @@ def from_file(file_name: str) -> "Blob": # check whether blob exists assert blob_file_location.exists(), ( - f"Tried to load blob from file but {blob_file_location} does not exist" + f"Tried to load blob from file but {blob_file_location} does not " + "exist" ) # read blob from file @@ -326,7 +331,8 @@ def write_to_file(self) -> None: # warn if existing static_blob gets overwritten if output_location.exists(): logger.debug( - f"Blob {output_location} already exists. It will be overwritten." + f"Blob {output_location} already exists. It will be " + "overwritten." ) # overwrite existing @@ -343,14 +349,16 @@ def verify_cell_kzg_proof_batch(self, cell_indices: list) -> bool: ) assert amount_cell_proofs > 0, ( - f"verify_cell_kzg_proof_batch() is not available for your fork: {self.fork.name()}." + f"verify_cell_kzg_proof_batch() is not available for your fork: " + f"{self.fork.name()}." ) assert self.cells is not None, "self.cells is None, critical error." assert len(cell_indices) == len(self.cells), ( - f"Cell Indices list (detected length {len(cell_indices)}) and Cell list " - f"(detected length {len(self.cells)}) should have same length." + f"Cell Indices list (detected length {len(cell_indices)}) and " + f"Cell list (detected length {len(self.cells)}) should have same " + "length." ) # each cell refers to the same commitment @@ -386,24 +394,26 @@ def delete_cells_then_recover_them( ) assert amount_cell_proofs > 0, ( - f"delete_cells_then_recover_them() is not available for fork: {self.fork.name()}" + f"delete_cells_then_recover_them() is not available for fork: " + f"{self.fork.name()}" ) assert self.cells is not None, "self.cells is None, critical problem." assert isinstance(self.proof, list), ( - "This function only works when self.proof is a list, but it seems to be " - " of type bytes (not a list)" + "This function only works when self.proof is a list, but it seems " + "to be of type bytes (not a list)" ) assert len(self.cells) == 128, ( - f"You are supposed to pass a full cell list with 128 elements to this function, " - f"but got list of length {len(self.cells)}" + f"You are supposed to pass a full cell list with 128 elements to " + f"this function, but got list of length {len(self.cells)}" ) assert len(deletion_indices) < 129, ( - f"You can't delete more than every cell (max len of deletion indices list is 128), " - f"but you passed a deletion indices list of length {len(deletion_indices)}" + f"You can't delete more than every cell (max len of deletion " + f"indices list is 128), but you passed a deletion indices list of " + f"length {len(deletion_indices)}" ) for i in deletion_indices: assert 0 <= i <= 127, ( @@ -425,23 +435,26 @@ def delete_cells_then_recover_them( # determine success/failure assert len(recovered_cells) == len(self.cells), ( - f"Failed to recover cell list. Original cell list had length {len(self.cells)} but " - f"recovered cell list has length {len(recovered_cells)}" + f"Failed to recover cell list. Original cell list had length " + f"{len(self.cells)} but recovered cell list has length " + f"{len(recovered_cells)}" ) assert len(recovered_proofs) == len(self.proof), ( - f"Failed to recover proofs list. Original proofs list had length {len(self.proof)} " - f"but recovered proofs list has length {len(recovered_proofs)}" + f"Failed to recover proofs list. Original proofs list had length " + f"{len(self.proof)} but recovered proofs list has length " + f"{len(recovered_proofs)}" ) for i in range(len(recovered_cells)): assert self.cells[i] == recovered_cells[i], ( - f"Failed to correctly restore missing cells. At index {i} original cell was " - f"0x{self.cells[i].hex()} but reconstructed cell does not match: " - f"0x{recovered_cells[i].hex()}" + f"Failed to correctly restore missing cells. At index {i} " + f"original cell was 0x{self.cells[i].hex()} but reconstructed " + f"cell does not match: 0x{recovered_cells[i].hex()}" ) assert self.proof[i] == recovered_proofs[i], ( - f"Failed to correctly restore missing proofs. At index {i} original proof was " - f"0x{self.proof[i].hex()} but reconstructed proof does not match: " + f"Failed to correctly restore missing proofs. At index {i} " + f"original proof was 0x{self.proof[i].hex()} but " + f"reconstructed proof does not match: " f"0x{recovered_proofs[i].hex()}" ) @@ -502,7 +515,8 @@ def corrupt_byte(b: bytes) -> Bytes: # pre-osaka (cancun and prague) assert amount_cell_proofs == 0, ( - f"You need to adjust corrupt_proof to handle fork {self.fork.name()}" + f"You need to adjust corrupt_proof to handle fork " + f"{self.fork.name()}" ) assert isinstance(self.proof, Bytes), ( "proof was expected to be Bytes but it isn't" diff --git a/packages/testing/src/execution_testing/test_types/block_access_list/account_absent_values.py b/packages/testing/src/execution_testing/test_types/block_access_list/account_absent_values.py index aca89076ac..3f57082bd1 100644 --- a/packages/testing/src/execution_testing/test_types/block_access_list/account_absent_values.py +++ b/packages/testing/src/execution_testing/test_types/block_access_list/account_absent_values.py @@ -81,8 +81,10 @@ class BalAccountAbsentValues(CamelModel): ) balance_changes: List[BalBalanceChange] = Field( default_factory=list, - description="List of balance changes that should NOT exist in the BAL. " - "Validates that none of these changes are present.", + description=( + "List of balance changes that should NOT exist in the BAL. " + "Validates that none of these changes are present." + ), ) code_changes: List[BalCodeChange] = Field( default_factory=list, @@ -91,8 +93,10 @@ class BalAccountAbsentValues(CamelModel): ) storage_changes: List[BalStorageSlot] = Field( default_factory=list, - description="List of storage slots/changes that should NOT exist in the BAL. " - "Validates that none of these changes are present.", + description=( + "List of storage slots/changes that should NOT exist in the BAL. " + "Validates that none of these changes are present." + ), ) storage_reads: List[StorageKey] = Field( default_factory=list, @@ -114,8 +118,8 @@ def validate_specific_absences_only(self) -> "BalAccountAbsentValues": ): raise ValueError( "At least one absence field must be specified. " - "`BalAccountAbsentValues` is for checking specific forbidden values. " - f"{EMPTY_LIST_ERROR_MSG}" + "`BalAccountAbsentValues` is for checking specific forbidden " + f"values. {EMPTY_LIST_ERROR_MSG}" ) # check that no fields are explicitly set to empty lists @@ -130,16 +134,17 @@ def validate_specific_absences_only(self) -> "BalAccountAbsentValues": for field_name, field_value in field_checks: if field_name in self.model_fields_set and field_value == []: raise ValueError( - f"`BalAccountAbsentValues.{field_name}` cannot be an empty list. " - f"{EMPTY_LIST_ERROR_MSG}" + f"`BalAccountAbsentValues.{field_name}` cannot be an " + f"empty list. {EMPTY_LIST_ERROR_MSG}" ) # validate that storage_changes don't have empty slot_changes for storage_slot in self.storage_changes: if not storage_slot.slot_changes: raise ValueError( - f"`BalAccountAbsentValues.storage_changes[{storage_slot.slot}].slot_changes` " - f"cannot be an empty list. {EMPTY_LIST_ERROR_MSG}" + f"`BalAccountAbsentValues.storage_changes" + f"[{storage_slot.slot}].slot_changes` cannot be an empty " + f"list. {EMPTY_LIST_ERROR_MSG}" ) return self @@ -171,23 +176,35 @@ def validate_against(self, account: BalAccountChange) -> None: self._validate_forbidden_changes( account.nonce_changes, self.nonce_changes, - lambda a, f: a.block_access_index == f.block_access_index - and a.post_nonce == f.post_nonce, - lambda a: f"Unexpected nonce change found at tx {a.block_access_index}", + lambda a, f: ( + a.block_access_index == f.block_access_index + and a.post_nonce == f.post_nonce + ), + lambda a: ( + f"Unexpected nonce change found at tx {a.block_access_index}" + ), ) self._validate_forbidden_changes( account.balance_changes, self.balance_changes, - lambda a, f: a.block_access_index == f.block_access_index - and a.post_balance == f.post_balance, - lambda a: f"Unexpected balance change found at tx {a.block_access_index}", + lambda a, f: ( + a.block_access_index == f.block_access_index + and a.post_balance == f.post_balance + ), + lambda a: ( + f"Unexpected balance change found at tx {a.block_access_index}" + ), ) self._validate_forbidden_changes( account.code_changes, self.code_changes, - lambda a, f: a.block_access_index == f.block_access_index - and a.new_code == f.new_code, - lambda a: f"Unexpected code change found at tx {a.block_access_index}", + lambda a, f: ( + a.block_access_index == f.block_access_index + and a.new_code == f.new_code + ), + lambda a: ( + f"Unexpected code change found at tx {a.block_access_index}" + ), ) for forbidden_storage_slot in self.storage_changes: @@ -202,7 +219,8 @@ def validate_against(self, account: BalAccountChange) -> None: and a.post_value == f.post_value ), lambda a, slot=slot_id: ( - f"Unexpected storage change found at slot {slot} in tx {a.block_access_index}" + f"Unexpected storage change found at slot {slot} " + f"in tx {a.block_access_index}" ), ) diff --git a/packages/testing/src/execution_testing/test_types/block_access_list/expectations.py b/packages/testing/src/execution_testing/test_types/block_access_list/expectations.py index c700eec77b..d2c6ce9430 100644 --- a/packages/testing/src/execution_testing/test_types/block_access_list/expectations.py +++ b/packages/testing/src/execution_testing/test_types/block_access_list/expectations.py @@ -48,7 +48,9 @@ class BalAccountExpectation(CamelModel): ) absent_values: Optional[BalAccountAbsentValues] = Field( default=None, - description="Explicit absent value expectations using BalAccountAbsentValues", + description=( + "Explicit absent value expectations using BalAccountAbsentValues" + ), ) _EMPTY: ClassVar[Optional["BalAccountExpectation"]] = None @@ -108,7 +110,9 @@ class BlockAccessListExpectation(CamelModel): expected_block_access_list = BlockAccessListExpectation( account_expectations={ alice: BalAccountExpectation( - nonce_changes=[BalNonceChange(block_access_index=1, post_nonce=1)] + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ] ), bob: None, # Bob should NOT be in the BAL } @@ -195,10 +199,10 @@ def verify_against(self, actual_bal: "BlockAccessList") -> None: elif not expectation.model_fields_set: # Disallow ambiguous BalAccountExpectation() with no fields set raise BlockAccessListValidationError( - f"Address {address}: BalAccountExpectation() with no fields set is " - f"ambiguous. Use BalAccountExpectation.empty() to validate no changes, " - f"or explicitly set the fields to validate " - f"(e.g., nonce_changes=[...])." + f"Address {address}: BalAccountExpectation() with no " + "fields set is ambiguous. Use BalAccountExpectation." + "empty() to validate no changes, or explicitly set the " + "fields to validate (e.g., nonce_changes=[...])." ) else: # check address is present and validate changes @@ -213,8 +217,9 @@ def verify_against(self, actual_bal: "BlockAccessList") -> None: address ) != BalAccountChange(address=address): raise BlockAccessListValidationError( - f"No account changes expected for {address} but found " - f"changes: {actual_accounts_by_addr[address]}" + f"No account changes expected for {address} but " + f"found changes: " + f"{actual_accounts_by_addr[address]}" ) actual_account = actual_accounts_by_addr[address] @@ -272,7 +277,8 @@ def _compare_account_expectations( # Check if explicitly set to empty but actual has values if not expected_list and actual_list: raise BlockAccessListValidationError( - f"Expected {field_name} to be empty but found {actual_list}" + f"Expected {field_name} to be empty but found " + f"{actual_list}" ) if field_name == "storage_reads": @@ -289,8 +295,8 @@ def _compare_account_expectations( if not found: raise BlockAccessListValidationError( - f"Storage read {expected_read} not found or not in correct order. " - f"Actual reads: {actual_list}" + f"Storage read {expected_read} not found or not " + f"in correct order. Actual reads: {actual_list}" ) elif field_name == "storage_changes": @@ -321,12 +327,20 @@ def _compare_account_expectations( actual_change = actual_slot_changes[ slot_actual_idx ] - if ( + actual_ba_idx = ( actual_change.block_access_index - == expected_change.block_access_index - and actual_change.post_value + ) + expected_ba_idx = ( + expected_change.block_access_index + ) + idx_match = ( + actual_ba_idx == expected_ba_idx + ) + val_match = ( + actual_change.post_value == expected_change.post_value - ): + ) + if idx_match and val_match: slot_found = True slot_actual_idx += 1 break @@ -334,10 +348,12 @@ def _compare_account_expectations( if not slot_found: raise BlockAccessListValidationError( - f"Storage change {expected_change} not found " - f"or not in correct order in slot " - f"{expected_slot.slot}. " - f"Actual slot changes: {actual_slot_changes}" + f"Storage change " + f"{expected_change} not found or " + f"not in correct order in slot " + f"{expected_slot.slot}. Actual " + f"slot changes: " + f"{actual_slot_changes}" ) found = True @@ -402,8 +418,9 @@ def _compare_account_expectations( if not found: raise BlockAccessListValidationError( - f"{item_type.capitalize()} change {exp_tuple} not found " - f"or not in correct order. Actual changes: {actual_tuples}" + f"{item_type.capitalize()} change {exp_tuple} not " + f"found or not in correct order. Actual changes: " + f"{actual_tuples}" ) diff --git a/packages/testing/src/execution_testing/test_types/block_access_list/modifiers.py b/packages/testing/src/execution_testing/test_types/block_access_list/modifiers.py index ddad9605d4..550c21e8c8 100644 --- a/packages/testing/src/execution_testing/test_types/block_access_list/modifiers.py +++ b/packages/testing/src/execution_testing/test_types/block_access_list/modifiers.py @@ -47,7 +47,8 @@ def transform(bal: BlockAccessList) -> BlockAccessList: # sanity check that we found all addresses specified missing = set(addresses) - found_addresses raise ValueError( - f"Some specified addresses were not found in the BAL: {missing}" + f"Some specified addresses were not found in the BAL: " + f"{missing}" ) return BlockAccessList(root=new_root) @@ -93,7 +94,9 @@ def transform(bal: BlockAccessList) -> BlockAccessList: == block_access_index ): kwargs = { - "block_access_index": block_access_index, + "block_access_index": ( + block_access_index + ), value_field: new_value, } storage_slot.slot_changes[j] = ( @@ -298,8 +301,8 @@ def transform(bal: BlockAccessList) -> BlockAccessList: ZeroPaddedHexNumber(idx1) ) - # Note: storage_reads is just a list of StorageKey, no block_access_index to - # swap + # Note: storage_reads is just a list of StorageKey, no + # block_access_index to swap # Swap in code changes if new_account.code_changes: @@ -317,7 +320,8 @@ def transform(bal: BlockAccessList) -> BlockAccessList: new_root.append(new_account) - # Validate that at least one swap occurred for each index across all change types + # Validate at least one swap occurred for each index across all + # change types idx1_found = ( nonce_indices[idx1] or balance_indices[idx1] @@ -333,11 +337,13 @@ def transform(bal: BlockAccessList) -> BlockAccessList: if not idx1_found: raise ValueError( - f"Block access index {idx1} not found in any BAL changes to swap" + f"Block access index {idx1} not found in any BAL changes " + "to swap" ) if not idx2_found: raise ValueError( - f"Block access index {idx2} not found in any BAL changes to swap" + f"Block access index {idx2} not found in any BAL changes " + "to swap" ) return BlockAccessList(root=new_root) @@ -365,8 +371,9 @@ def append_change( """ Append a change to an account's field list. - Generic function to add extraneous entries to nonce_changes, balance_changes, - or code_changes fields. The field is inferred from the change type. + Generic function to add extraneous entries to nonce_changes, + balance_changes, or code_changes fields. The field is inferred from the + change type. """ # Infer field name from change type if isinstance(change, BalNonceChange): @@ -396,7 +403,8 @@ def transform(bal: BlockAccessList) -> BlockAccessList: if not found_address: raise ValueError( - f"Address {account} not found in BAL to append change to {field}" + f"Address {account} not found in BAL to append change to " + f"{field}" ) return BlockAccessList(root=new_root) @@ -415,7 +423,8 @@ def append_storage( Generic function for all storage operations: - If read=True: appends to storage_reads - - If change provided and slot exists: appends to existing slot's slot_changes + - If change provided and slot exists: appends to existing slot's + slot_changes - If change provided and slot new: creates new BalStorageSlot """ found_address = False diff --git a/packages/testing/src/execution_testing/test_types/block_access_list/t8n.py b/packages/testing/src/execution_testing/test_types/block_access_list/t8n.py index 19733a240f..7d1cb2bc07 100644 --- a/packages/testing/src/execution_testing/test_types/block_access_list/t8n.py +++ b/packages/testing/src/execution_testing/test_types/block_access_list/t8n.py @@ -63,6 +63,7 @@ def validate_structure(self) -> None: Raises: BlockAccessListValidationError: If validation fails + """ # Check address ordering (ascending) for i in range(1, len(self.root)): @@ -89,8 +90,9 @@ def validate_structure(self) -> None: # Check both ordering and duplicates if bal_indices != sorted(bal_indices): raise BlockAccessListValidationError( - f"Block access indices not in ascending order in {field_name} of account " - f"{account.address}. Got: {bal_indices}, Expected: {sorted(bal_indices)}" + f"Block access indices not in ascending order in " + f"{field_name} of account {account.address}. Got: " + f"{bal_indices}, Expected: {sorted(bal_indices)}" ) if len(bal_indices) != len(set(bal_indices)): @@ -102,8 +104,8 @@ def validate_structure(self) -> None: } ) raise BlockAccessListValidationError( - f"Duplicate transaction indices in {field_name} of account " - f"{account.address}. Duplicates: {duplicates}" + f"Duplicate transaction indices in {field_name} of " + f"account {account.address}. Duplicates: {duplicates}" ) # Check storage slot ordering @@ -114,7 +116,8 @@ def validate_structure(self) -> None: ): raise BlockAccessListValidationError( f"Storage slots not in ascending order in account " - f"{account.address}: {account.storage_changes[i - 1].slot} >= " + f"{account.address}: " + f"{account.storage_changes[i - 1].slot} >= " f"{account.storage_changes[i].slot}" ) @@ -130,9 +133,10 @@ def validate_structure(self) -> None: # Check both ordering and duplicates if bal_indices != sorted(bal_indices): raise BlockAccessListValidationError( - f"Transaction indices not in ascending order in storage slot " - f"{storage_slot.slot} of account {account.address}. " - f"Got: {bal_indices}, Expected: {sorted(bal_indices)}" + f"Transaction indices not in ascending order in " + f"storage slot {storage_slot.slot} of account " + f"{account.address}. Got: {bal_indices}, Expected: " + f"{sorted(bal_indices)}" ) if len(bal_indices) != len(set(bal_indices)): @@ -154,6 +158,7 @@ def validate_structure(self) -> None: if account.storage_reads[i - 1] >= account.storage_reads[i]: raise BlockAccessListValidationError( f"Storage reads not in ascending order in account " - f"{account.address}: {account.storage_reads[i - 1]} >= " + f"{account.address}: " + f"{account.storage_reads[i - 1]} >= " f"{account.storage_reads[i]}" ) diff --git a/packages/testing/src/execution_testing/test_types/receipt_types.py b/packages/testing/src/execution_testing/test_types/receipt_types.py index 4343b25d8d..66b1216daf 100644 --- a/packages/testing/src/execution_testing/test_types/receipt_types.py +++ b/packages/testing/src/execution_testing/test_types/receipt_types.py @@ -42,7 +42,7 @@ class TransactionReceipt(CamelModel): @model_validator(mode="before") @classmethod def strip_extra_fields(cls, data: Any) -> Any: - """Strip extra fields from t8n tool output that are not part of the model.""" + """Strip extra fields from t8n tool output not part of model.""" if isinstance(data, dict): # t8n tool returns 'succeeded' which is redundant with 'status' data.pop("succeeded", None) diff --git a/packages/testing/src/execution_testing/test_types/tests/test_blob_types.py b/packages/testing/src/execution_testing/test_types/tests/test_blob_types.py index 60490120f0..bcc2b2d163 100644 --- a/packages/testing/src/execution_testing/test_types/tests/test_blob_types.py +++ b/packages/testing/src/execution_testing/test_types/tests/test_blob_types.py @@ -68,8 +68,8 @@ def wait_until_counter_reached(target: int, poll_interval: float = 0.1) -> int: pytest.fail( f"The blob_unit_test lock counter is too high! " f"Expected {target}, but got {current_value}. " - f"It probably reused an existing file that was not cleared. " - f"Delete {file_path} manually to fix this." + f"It probably reused an existing file that was " + f"not cleared. Delete {file_path} to fix this." ) except Exception: current_value = 0 @@ -132,8 +132,8 @@ def test_blob_proof_corruption( b.corrupt_proof(corruption_mode) assert b.proof != old_valid_proof, ( - f"Proof corruption mode {corruption_mode} for fork {fork.name()} failed, " - "proof is unchanged!" + f"Proof corruption mode {corruption_mode} for fork {fork.name()} " + "failed, proof is unchanged!" ) increment_counter() @@ -169,8 +169,8 @@ def test_transition_fork_blobs( if not pre_transition_fork.supports_blobs() and timestamp < 15000: print( - f"Skipping blob creation because pre-transition fork is {pre_transition_fork} " - f"and timestamp is {timestamp}" + f"Skipping blob creation because pre-transition fork is " + f"{pre_transition_fork} and timestamp is {timestamp}" ) return @@ -180,14 +180,17 @@ def test_transition_fork_blobs( if timestamp == 14999: # case: no transition yet assert b.fork.name() == pre_transition_fork.name(), ( - f"Transition fork failure! Fork {fork.name()} at timestamp: {timestamp} should have " - f"stayed at fork {pre_transition_fork.name()} but has unexpectedly transitioned " + f"Transition fork failure! Fork {fork.name()} at timestamp: " + f"{timestamp} should have stayed at fork " + f"{pre_transition_fork.name()} but has unexpectedly transitioned " f"to {b.fork.name()}" ) elif timestamp == 15000: # case: transition to next fork has happened assert b.fork.name() == post_transition_fork_at_15k.name(), ( - f"Transition fork failure! Fork {fork.name()} at timestamp: {timestamp} should have " - f"transitioned to {post_transition_fork_at_15k.name()} but is still at {b.fork.name()}" + f"Transition fork failure! Fork {fork.name()} at timestamp: " + f"{timestamp} should have transitioned to " + f"{post_transition_fork_at_15k.name()} but is still at " + f"{b.fork.name()}" ) # delete counter at last iteration (otherwise re-running all unit tests diff --git a/packages/testing/src/execution_testing/test_types/tests/test_block_access_list_serialization.py b/packages/testing/src/execution_testing/test_types/tests/test_block_access_list_serialization.py index a42d86ff65..d454a097ce 100644 --- a/packages/testing/src/execution_testing/test_types/tests/test_block_access_list_serialization.py +++ b/packages/testing/src/execution_testing/test_types/tests/test_block_access_list_serialization.py @@ -19,7 +19,7 @@ def test_bal_serialization_roundtrip_zero_padded_hex() -> None: """ - Test that BAL serializes with zero-padded hex format and round-trips correctly. + Test BAL serializes with zero-padded hex format and round-trips correctly. This verifies that values like 12 serialize as "0x0c" (not "0xc"), which is required for consistency with other test vector fields. diff --git a/packages/testing/src/execution_testing/test_types/tests/test_block_access_list_t8n.py b/packages/testing/src/execution_testing/test_types/tests/test_block_access_list_t8n.py index 942abf6bb2..b959b39b43 100644 --- a/packages/testing/src/execution_testing/test_types/tests/test_block_access_list_t8n.py +++ b/packages/testing/src/execution_testing/test_types/tests/test_block_access_list_t8n.py @@ -129,7 +129,7 @@ def test_bal_storage_reads_ordering() -> None: ) def test_bal_block_access_indices_ordering(field_name: str) -> None: """ - Test that transaction indices must be in ascending order within change lists. + Test tx indices must be in ascending order within change lists. """ addr = Address(0xA) @@ -281,20 +281,24 @@ def test_bal_duplicate_block_access_indices(field_name: str) -> None: [BalAccountChange(address=addr, **{field_name: changes})] ) + match_pattern = ( + f"Duplicate transaction indices in {field_name}.*Duplicates: \\[1\\]" + ) with pytest.raises( BlockAccessListValidationError, - match=f"Duplicate transaction indices in {field_name}.*Duplicates: \\[1\\]", + match=match_pattern, ): bal.validate_structure() def test_bal_storage_duplicate_block_access_indices() -> None: """ - Test that storage changes must not have duplicate tx indices within same slot. + Test storage changes must not have duplicate tx indices within same slot. """ addr = Address(0xA) - # Create storage changes with duplicate block_access_index within the same slot + # Create storage changes with duplicate block_access_index within + # the same slot bal = BlockAccessList( [ BalAccountChange( @@ -322,9 +326,12 @@ def test_bal_storage_duplicate_block_access_indices() -> None: ] ) + match_pattern = ( + "Duplicate transaction indices in storage slot.*Duplicates: \\[1\\]" + ) with pytest.raises( BlockAccessListValidationError, - match="Duplicate transaction indices in storage slot.*Duplicates: \\[1\\]", + match=match_pattern, ): bal.validate_structure() diff --git a/packages/testing/src/execution_testing/test_types/tests/test_helpers.py b/packages/testing/src/execution_testing/test_types/tests/test_helpers.py index f14c58dc6e..f85cb6e1d0 100644 --- a/packages/testing/src/execution_testing/test_types/tests/test_helpers.py +++ b/packages/testing/src/execution_testing/test_types/tests/test_helpers.py @@ -61,7 +61,7 @@ def test_address() -> None: "0x06012c8cf97bead5deae237070f9587f8e7a266d", id="large-nonce-0x-str-address", marks=pytest.mark.xfail( - reason="Nonce too large to convert with hard-coded to_bytes length of 1" + reason="Nonce too large for hard-coded to_bytes length of 1" ), ), ], @@ -145,7 +145,7 @@ def test_compute_create2_address( https://github.com/ethereum/go-ethereum/blob/2189773093b2fe6d161b6477589f964470ff5bce/core/vm/instructions_test.go. Note: `compute_create2_address` does not generate checksum addresses. - """ + """ # noqa: E501 salt_as_int = int(salt, 16) initcode_as_bytes = bytes.fromhex(initcode[2:]) assert ( diff --git a/packages/testing/src/execution_testing/test_types/tests/test_post_alloc.py b/packages/testing/src/execution_testing/test_types/tests/test_post_alloc.py index c36fdf0684..045a056060 100644 --- a/packages/testing/src/execution_testing/test_types/tests/test_post_alloc.py +++ b/packages/testing/src/execution_testing/test_types/tests/test_post_alloc.py @@ -30,7 +30,9 @@ def alloc(request: pytest.FixtureRequest) -> Alloc: # Account should not exist but contained in alloc ( { - "0x0000000000000000000000000000000000000000": Account.NONEXISTENT + "0x0000000000000000000000000000000000000000": ( # noqa: E501 + Account.NONEXISTENT + ) }, { "0x0000000000000000000000000000000000000000": { @@ -45,7 +47,9 @@ def alloc(request: pytest.FixtureRequest) -> Alloc: # Account should not exist but contained in alloc ( { - "0x0000000000000000000000000000000000000000": Account.NONEXISTENT + "0x0000000000000000000000000000000000000000": ( # noqa: E501 + Account.NONEXISTENT + ) }, {"0x0000000000000000000000000000000000000000": {"nonce": "1"}}, Alloc.UnexpectedAccountError, @@ -53,7 +57,9 @@ def alloc(request: pytest.FixtureRequest) -> Alloc: # Account should not exist but contained in alloc ( { - "0x0000000000000000000000000000000000000001": Account.NONEXISTENT + "0x0000000000000000000000000000000000000001": ( # noqa: E501 + Account.NONEXISTENT + ) }, {"0x0000000000000000000000000000000000000001": {"balance": "1"}}, Alloc.UnexpectedAccountError, @@ -61,7 +67,9 @@ def alloc(request: pytest.FixtureRequest) -> Alloc: # Account should not exist but contained in alloc ( { - "0x000000000000000000000000000000000000000a": Account.NONEXISTENT + "0x000000000000000000000000000000000000000a": ( # noqa: E501 + Account.NONEXISTENT + ) }, {"0x000000000000000000000000000000000000000A": {"code": "0x00"}}, Alloc.UnexpectedAccountError, diff --git a/packages/testing/src/execution_testing/test_types/tests/test_types.py b/packages/testing/src/execution_testing/test_types/tests/test_types.py index e3e3cf5156..83ba6dc301 100644 --- a/packages/testing/src/execution_testing/test_types/tests/test_types.py +++ b/packages/testing/src/execution_testing/test_types/tests/test_types.py @@ -468,7 +468,7 @@ def test_account_merge( True, Environment(), { - "currentCoinbase": "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba", + "currentCoinbase": "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba", # noqa: E501 "currentGasLimit": str( ZeroPaddedHexNumber(Environment().gas_limit) ), @@ -507,7 +507,7 @@ def test_account_merge( block_hashes={1: 2, 3: 4}, ), { - "currentCoinbase": "0x0000000000000000000000000000000000001234", + "currentCoinbase": "0x0000000000000000000000000000000000001234", # noqa: E501 "currentGasLimit": str( ZeroPaddedHexNumber(Environment().gas_limit) ), @@ -528,7 +528,7 @@ def test_account_merge( { "index": "0x0", "validatorIndex": "0x1", - "address": "0x0000000000000000000000000000000000001234", + "address": "0x0000000000000000000000000000000000001234", # noqa: E501 "amount": "0x2", }, ], @@ -537,10 +537,10 @@ def test_account_merge( "currentBlobGasUsed": "0x10", "currentExcessBlobGas": "0x11", "blockHashes": { - "0x01": "0x0000000000000000000000000000000000000000000000000000000000000002", - "0x03": "0x0000000000000000000000000000000000000000000000000000000000000004", + "0x01": "0x0000000000000000000000000000000000000000000000000000000000000002", # noqa: E501 + "0x03": "0x0000000000000000000000000000000000000000000000000000000000000004", # noqa: E501 }, - "parentHash": "0x0000000000000000000000000000000000000000000000000000000000000004", + "parentHash": "0x0000000000000000000000000000000000000000000000000000000000000004", # noqa: E501 "ommers": [], }, id="environment_2", @@ -558,8 +558,8 @@ def test_account_merge( "gas": "0x5208", "gasPrice": "0xa", "v": "0x26", - "r": "0xcc61d852649c34cc0b71803115f38036ace257d2914f087bf885e6806a664fbd", - "s": "0x2020cb35f5d7731ab540d62614503a7f2344301a86342f67daf011c1341551ff", + "r": "0xcc61d852649c34cc0b71803115f38036ace257d2914f087bf885e6806a664fbd", # noqa: E501 + "s": "0x2020cb35f5d7731ab540d62614503a7f2344301a86342f67daf011c1341551ff", # noqa: E501 "sender": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", }, id="transaction_t8n_default_args", @@ -579,8 +579,8 @@ def test_account_merge( "gas": "0x5208", "gasPrice": "0xa", "v": "0x25", - "r": "0x1cfe2cbb0c3577f74d9ae192a7f1ee2d670fe806a040f427af9cb768be3d07ce", - "s": "0xcbe2d029f52dbf93ade486625bed0603945d2c7358b31de99fe8786c00f13da", + "r": "0x1cfe2cbb0c3577f74d9ae192a7f1ee2d670fe806a040f427af9cb768be3d07ce", # noqa: E501 + "s": "0xcbe2d029f52dbf93ade486625bed0603945d2c7358b31de99fe8786c00f13da", # noqa: E501 "sender": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", }, id="transaction_t8n_to_none", @@ -600,8 +600,8 @@ def test_account_merge( "gas": "0x5208", "gasPrice": "0xa", "v": "0x25", - "r": "0x1cfe2cbb0c3577f74d9ae192a7f1ee2d670fe806a040f427af9cb768be3d07ce", - "s": "0xcbe2d029f52dbf93ade486625bed0603945d2c7358b31de99fe8786c00f13da", + "r": "0x1cfe2cbb0c3577f74d9ae192a7f1ee2d670fe806a040f427af9cb768be3d07ce", # noqa: E501 + "s": "0xcbe2d029f52dbf93ade486625bed0603945d2c7358b31de99fe8786c00f13da", # noqa: E501 "sender": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", }, id="transaction_t8n_to_empty_str", @@ -629,7 +629,7 @@ def test_account_merge( "to": "0x0000000000000000000000000000000000001234", "accessList": [ { - "address": "0x0000000000000000000000000000000000001234", + "address": "0x0000000000000000000000000000000000001234", # noqa: E501 "storageKeys": [ "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", @@ -647,8 +647,8 @@ def test_account_merge( "0x0000000000000000000000000000000000000000000000000000000000000001", ], "v": "0x0", - "r": "0x418bb557c43262375f80556cb09dac5e67396acf0eaaf2c2540523d1ce54b280", - "s": "0x4fa36090ea68a1138043d943ced123c0b0807d82ff3342a6977cbc09230e927c", + "r": "0x418bb557c43262375f80556cb09dac5e67396acf0eaaf2c2540523d1ce54b280", # noqa: E501 + "s": "0x4fa36090ea68a1138043d943ced123c0b0807d82ff3342a6977cbc09230e927c", # noqa: E501 "sender": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", }, id="transaction_3", @@ -677,7 +677,8 @@ def test_json_deserialization( """Test that to_json returns the expected JSON for the given object.""" if not can_be_deserialized: pytest.skip( - reason="The model instance in this case can not be deserialized" + reason="The model instance in this case can not be " + "deserialized" ) model_type = type(model_instance) assert model_type(**json) == model_instance diff --git a/packages/testing/src/execution_testing/test_types/transaction_types.py b/packages/testing/src/execution_testing/test_types/transaction_types.py index aa4ef542fb..cb1267b94d 100644 --- a/packages/testing/src/execution_testing/test_types/transaction_types.py +++ b/packages/testing/src/execution_testing/test_types/transaction_types.py @@ -1,9 +1,9 @@ """Transaction-related types for Ethereum tests.""" +import numbers from dataclasses import dataclass from enum import IntEnum from functools import cached_property -import numbers from typing import Any, ClassVar, Dict, Generic, List, Literal, Self, Sequence import ethereum_rlp as eth_rlp @@ -832,7 +832,8 @@ def signer_minimum_balance(self, *, fork: Fork) -> int: if self.ty == 3 and self.blob_versioned_hashes is not None: max_fee_per_blob_gas = self.max_fee_per_blob_gas assert max_fee_per_blob_gas is not None, ( - "Impossible to calculate minimum balance without max_fee_per_blob_gas" + "Impossible to calculate minimum balance without " + "max_fee_per_blob_gas" ) return ( gas_price * gas_limit @@ -1008,7 +1009,7 @@ def set_gas_price( max_priority_fee_per_gas: int, max_fee_per_blob_gas: int, ) -> None: - """Set the gas price to the appropriate values of the current execution environment.""" + """Set gas price to values of the current execution environment.""" self.tx.set_gas_price( gas_price=gas_price, max_fee_per_gas=max_fee_per_gas, diff --git a/packages/testing/src/execution_testing/tools/tests/test_code.py b/packages/testing/src/execution_testing/tools/tests/test_code.py index 5da2d3b57b..b99a5c0797 100644 --- a/packages/testing/src/execution_testing/tools/tests/test_code.py +++ b/packages/testing/src/execution_testing/tools/tests/test_code.py @@ -496,7 +496,8 @@ def test_opcodes_if(conditional_bytecode: bytes, expected: bytes) -> None: default_action=Op.SSTORE(0, 6), ), {0: 3}, - id="five-cases-multiple-conditions-met", # first in list should be evaluated + # first in list should be evaluated + id="five-cases-multiple-conditions-met", ), pytest.param( Hash(9), diff --git a/packages/testing/src/execution_testing/tools/utility/generators.py b/packages/testing/src/execution_testing/tools/utility/generators.py index 003225f7dc..8819dfe04a 100644 --- a/packages/testing/src/execution_testing/tools/utility/generators.py +++ b/packages/testing/src/execution_testing/tools/utility/generators.py @@ -360,8 +360,9 @@ def wrapper( # storage, we need to add some NO-OP (JUMPDEST) to the code # that each consume 1 gas. assert gas_costs.G_JUMPDEST == 1, ( - f"JUMPDEST gas cost should be 1, but got {gas_costs.G_JUMPDEST}. " - "Generator `generate_system_contract_error_test` needs to be updated." + "JUMPDEST gas cost should be 1, but got " + f"{gas_costs.G_JUMPDEST}. Generator " + "`generate_system_contract_error_test` needs updating." ) modified_system_contract_code += sum( Op.JUMPDEST @@ -495,14 +496,14 @@ def gas_test( # 2 times GAS, POP, CALL, 6 times PUSH1 - instructions charged for at every # gas run gas_costs = fork.gas_costs() - OPCODE_GAS_COST = gas_costs.G_BASE - OPCODE_POP_COST = gas_costs.G_BASE - OPCODE_PUSH_COST = gas_costs.G_VERY_LOW + opcode_gas_cost = gas_costs.G_BASE + opcode_pop_cost = gas_costs.G_BASE + opcode_push_cost = gas_costs.G_VERY_LOW gas_single_gas_run = ( - 2 * OPCODE_GAS_COST - + OPCODE_POP_COST + 2 * opcode_gas_cost + + opcode_pop_cost + gas_costs.G_WARM_ACCOUNT_ACCESS - + 6 * OPCODE_PUSH_COST + + 6 * opcode_push_cost ) address_legacy_harness = pre.deploy_contract( code=( diff --git a/packages/testing/src/execution_testing/tools/utility/pytest.py b/packages/testing/src/execution_testing/tools/utility/pytest.py index 2f62fef52b..ac6c708bdd 100644 --- a/packages/testing/src/execution_testing/tools/utility/pytest.py +++ b/packages/testing/src/execution_testing/tools/utility/pytest.py @@ -131,7 +131,8 @@ def test_range(min_value, max_value, average): for i, case in enumerate(cases): if not (len(case.values) == 1 and isinstance(case.values[0], dict)): raise ValueError( - "each case must contain exactly one value; a dict of parameter values" + "each case must contain exactly one value; " + "a dict of parameter values" ) if set(case.values[0].keys()) - set(defaults.keys()): raise UnknownParameterInCasesError() diff --git a/packages/testing/src/execution_testing/tools/utility/tests/test_pytest.py b/packages/testing/src/execution_testing/tools/utility/tests/test_pytest.py index 5ba3d233a7..09ad5c0957 100644 --- a/packages/testing/src/execution_testing/tools/utility/tests/test_pytest.py +++ b/packages/testing/src/execution_testing/tools/utility/tests/test_pytest.py @@ -164,9 +164,9 @@ def test_extend_with_defaults_raises_for_unknown_default() -> None: # noqa: D10 ), ], ) -def test_extend_with_defaults_raises_value_error( +def test_extend_with_defaults_raises_value_error( # noqa: D103 defaults: dict, cases: list -) -> None: # noqa: D103 +) -> None: expected_message = ( "each case must contain exactly one value; a dict of parameter values" ) diff --git a/packages/testing/src/execution_testing/tools/utility/versioning.py b/packages/testing/src/execution_testing/tools/utility/versioning.py index 991474bf6e..d8399e8e3d 100644 --- a/packages/testing/src/execution_testing/tools/utility/versioning.py +++ b/packages/testing/src/execution_testing/tools/utility/versioning.py @@ -33,7 +33,7 @@ def get_current_commit_hash_or_tag( ) except InvalidGitRepositoryError: # Handle the case where the repository is not a valid Git repository - return "Not a git repository; this should only be seen in framework tests." + return "Not a git repository; only seen in framework tests." def generate_github_url( diff --git a/packages/testing/src/execution_testing/vm/tests/test_vm.py b/packages/testing/src/execution_testing/vm/tests/test_vm.py index 5e30845f86..837de4b25e 100644 --- a/packages/testing/src/execution_testing/vm/tests/test_vm.py +++ b/packages/testing/src/execution_testing/vm/tests/test_vm.py @@ -80,7 +80,7 @@ + [0xFF] * 32 + [0x55] ), - id="SSTORE(-1, CALL(GAS, ADDRESS, PUSH1(0x20), 0, 0, 0x20, 0x1234))", + id="SSTORE(-1, CALL(GAS, ADDRESS, PUSH1(0x20), 0, 0, 0x20, 0x1234))", # noqa: E501 ), pytest.param( Op.CALL(Op.GAS, Op.PUSH20(0x1234), 0, 0, 0, 0, 32), @@ -395,7 +395,7 @@ def test_opcode_kwargs_validation() -> None: with pytest.raises( ValueError, - match=r"Invalid keyword argument\(s\) \['wrong_arg'\] for opcode MSTORE", + match=r"Invalid keyword argument\(s\) \['wrong_arg'\] for opcode MSTORE", # noqa: E501 ): Op.MSTORE(offset=0, value=1, wrong_arg=2) diff --git a/packages/testing/stubs/requests_unixsocket/__init__.pyi b/packages/testing/stubs/requests_unixsocket/__init__.pyi index 2937cd5481..c05719f59e 100644 --- a/packages/testing/stubs/requests_unixsocket/__init__.pyi +++ b/packages/testing/stubs/requests_unixsocket/__init__.pyi @@ -1,8 +1,9 @@ +from typing import Callable, Self, Tuple + import requests -from typing import Tuple, Callable, Self -from requests.sessions import _Data from _typeshed import Incomplete from requests.models import _JSON, Response +from requests.sessions import _Data DEFAULT_SCHEME: str @@ -11,7 +12,7 @@ class Session(requests.Session): self, url_scheme: str = ..., *args: Incomplete, **kwargs: Incomplete ) -> None: ... -class monkeypatch: +class monkeypatch: # noqa: N801 session: Session methods: Tuple[str | bytes, ...] orig_methods: dict[str | bytes, Callable] diff --git a/packages/testing/stubs/requests_unixsocket/adapters.pyi b/packages/testing/stubs/requests_unixsocket/adapters.pyi index 38325850c3..8a87712e41 100644 --- a/packages/testing/stubs/requests_unixsocket/adapters.pyi +++ b/packages/testing/stubs/requests_unixsocket/adapters.pyi @@ -1,12 +1,13 @@ +from socket import socket +from typing import Mapping, Tuple + import urllib3 -from typing import Tuple, Mapping from _typeshed import Incomplete from requests.adapters import HTTPAdapter from requests.models import PreparedRequest -from urllib3.util import Timeout -from urllib3.connectionpool import HTTPConnectionPool from urllib3._collections import RecentlyUsedContainer -from socket import socket +from urllib3.connectionpool import HTTPConnectionPool +from urllib3.util import Timeout class UnixHTTPConnection(urllib3.connection.HTTPConnection): unix_socket_url: str From c14e9c9a290f7203df1654cb2b8401881acbb011 Mon Sep 17 00:00:00 2001 From: felipe Date: Tue, 20 Jan 2026 12:43:08 -0700 Subject: [PATCH 089/154] fix(tools): fix remaining lint checks after large ruff refactor (#2050) --- .../src/execution_testing/fixtures/base.py | 3 +- .../execution_testing/fixtures/blockchain.py | 52 +++++++++------ .../execution_testing/fixtures/collector.py | 3 +- .../src/execution_testing/fixtures/common.py | 2 +- .../fixtures/pre_alloc_groups.py | 6 +- .../fixtures/tests/test_blockchain.py | 64 +++++++++---------- .../fixtures/tests/test_state.py | 10 ++- 7 files changed, 75 insertions(+), 65 deletions(-) diff --git a/packages/testing/src/execution_testing/fixtures/base.py b/packages/testing/src/execution_testing/fixtures/base.py index 302d7e58b0..07416c7555 100644 --- a/packages/testing/src/execution_testing/fixtures/base.py +++ b/packages/testing/src/execution_testing/fixtures/base.py @@ -33,7 +33,8 @@ def fixture_format_discriminator(v: Any) -> str | None: info_dict = v.info if info_dict is None: raise ValueError( - f"Fixture does not have an info field, cannot determine fixture format: {v}" + "Fixture does not have an info field, " + f"cannot determine fixture format: {v}" ) fixture_format = info_dict.get("fixture-format") if not fixture_format: diff --git a/packages/testing/src/execution_testing/fixtures/blockchain.py b/packages/testing/src/execution_testing/fixtures/blockchain.py index 786d6dd34e..23145e6238 100644 --- a/packages/testing/src/execution_testing/fixtures/blockchain.py +++ b/packages/testing/src/execution_testing/fixtures/blockchain.py @@ -97,11 +97,13 @@ def validate_post_state_fields(self: Any) -> Any: if field1_value is None and field2_value is None: raise ValueError( - f"Either {field1_name} or {field2_name} must be provided." + f"Either {field1_name} or {field2_name} " + "must be provided." ) if field1_value is not None and field2_value is not None: raise ValueError( - f"Only one of {field1_name} or {field2_name} must be provided." + f"Only one of {field1_name} or {field2_name} " + "must be provided." ) return self @@ -145,8 +147,8 @@ class FixtureHeader(CamelModel): We combine the `Environment` and `Result` contents to create this model. """ - # Allow extra fields: FixtureHeader is constructed from merged Result and - # Environment data via model_dump(), which includes fields not in this model. + # Allow extra fields: FixtureHeader is constructed from merged Result + # and Environment data via model_dump(), which has extra fields. model_config = CamelModel.model_config | {"extra": "ignore"} parent_hash: Hash = Hash(0) @@ -286,10 +288,11 @@ def get_default_from_annotation( timestamp: int = 0, ) -> Any: """ - Get appropriate default value for a header field based on its type hint. + Get default value for a header field based on its type hint. This method handles: - 1. Fork requirement checking - only returns a default if the fork requires the field + 1. Fork requirement checking - only returns a default if the fork + requires the field 2. Model-defined defaults - uses the field's default value if available 3. Type-based defaults - constructs defaults based on the field type @@ -297,7 +300,8 @@ def get_default_from_annotation( fork: Fork to check requirements against field_name: Name of the field field_hint: Type annotation of the field - block_number: Block number for fork requirement checking (default: 0) + block_number: Block number for fork requirement checking + (default: 0) timestamp: Timestamp for fork requirement checking (default: 0) Returns: @@ -305,9 +309,11 @@ def get_default_from_annotation( the field is not required by the fork Raises: - TypeError: If the field type is not supported and no default value - is defined in the model. This indicates that support for the type - needs to be added or an explicit default must be provided. + TypeError: If the field type is not supported and no default + value is defined in the model. This indicates that support + for the type needs to be added or an explicit default must + be provided. + """ # Check if this field has a HeaderForkRequirement annotation header_fork_requirement = HeaderForkRequirement.get_from_annotation( @@ -315,13 +321,18 @@ def get_default_from_annotation( ) if header_fork_requirement is not None: # Only provide a default if the fork requires this field - if not header_fork_requirement.required(fork, block_number, timestamp): + if not header_fork_requirement.required( + fork, block_number, timestamp + ): return None # Check if the field has a default value defined in the model if field_name in cls.model_fields: field_info = cls.model_fields[field_name] - if field_info.default is not None and field_info.default is not PydanticUndefined: + if ( + field_info.default is not None + and field_info.default is not PydanticUndefined + ): return field_info.default if field_info.default_factory is not None: return field_info.default_factory() # type: ignore[call-arg] @@ -339,11 +350,11 @@ def get_default_from_annotation( elif actual_type == Bytes: return Bytes(b"") else: - # Unsupported type - raise an error to catch this during development + # Unsupported type - raise error to catch this during development raise TypeError( f"Cannot generate default value for field '{field_name}' " f"with unsupported type '{actual_type}'. " - f"Add support for this type or provide a default value explicitly." + "Add support for this type or provide a default explicitly." ) @classmethod @@ -501,7 +512,8 @@ def from_fixture_header( ): if block_access_list is None: raise ValueError( - f"`block_access_list` is required in engine `ExecutionPayload` for >={fork}." + "`block_access_list` is required in engine " + f"`ExecutionPayload` for >={fork}." ) execution_payload = FixtureExecutionPayload.from_fixture_header( @@ -609,8 +621,9 @@ class FixtureBlockBase(CamelModel): @classmethod def strip_block_number_computed_field(cls, data: Any) -> Any: """ - Strip the block_number computed field which gets included in model_dump() - but is not a valid input field. + Strip the block_number computed field included in model_dump(). + + This field is not a valid input field. """ if isinstance(data, dict): data.pop("blocknumber", None) @@ -785,7 +798,7 @@ class BlockchainEngineXFixture(BlockchainEngineFixtureCommon): """ # Allow extra fields: BlockchainEngineXFixture is constructed from shared - # fixture_data that includes fields for other fixture formats (e.g. genesis). + # fixture_data that has fields for other fixture formats (e.g. genesis). model_config = CamelModel.model_config | {"extra": "ignore"} format_name: ClassVar[str] = "blockchain_test_engine_x" @@ -824,7 +837,8 @@ class BlockchainEngineSyncFixture(BlockchainEngineFixture): format_name: ClassVar[str] = "blockchain_test_sync" description: ClassVar[str] = ( - "Tests that generate a blockchain test fixture for Engine API testing with client sync." + "Tests that generate a blockchain test fixture for Engine API " + "testing with client sync." ) sync_payload: FixtureEngineNewPayload | None = None diff --git a/packages/testing/src/execution_testing/fixtures/collector.py b/packages/testing/src/execution_testing/fixtures/collector.py index 53d5aaf83d..b389609d5d 100644 --- a/packages/testing/src/execution_testing/fixtures/collector.py +++ b/packages/testing/src/execution_testing/fixtures/collector.py @@ -84,7 +84,8 @@ def get_dump_dir_path( str(test_module_relative_dir).replace(os.sep, "__") ) test_name, test_parameter_string = self.get_name_and_parameters() - flat_path = f"{str(test_module_relative_dir).replace(os.sep, '__')}__{test_name}" + dir_str = str(test_module_relative_dir).replace(os.sep, "__") + flat_path = f"{dir_str}__{test_name}" if level == "test_function": return Path(base_dump_dir) / flat_path elif level == "test_parameter": diff --git a/packages/testing/src/execution_testing/fixtures/common.py b/packages/testing/src/execution_testing/fixtures/common.py index 17aa874852..eb381019f9 100644 --- a/packages/testing/src/execution_testing/fixtures/common.py +++ b/packages/testing/src/execution_testing/fixtures/common.py @@ -52,7 +52,7 @@ class FixtureAuthorizationTuple( """Authorization tuple for fixture transactions.""" # Allow extra fields: FixtureAuthorizationTuple is constructed from - # AuthorizationTuple via model_dump(), which includes fields not in this model. + # AuthorizationTuple via model_dump(), which has extra fields. model_config = CamelModel.model_config | {"extra": "ignore"} v: ZeroPaddedHexNumber = Field( diff --git a/packages/testing/src/execution_testing/fixtures/pre_alloc_groups.py b/packages/testing/src/execution_testing/fixtures/pre_alloc_groups.py index 989a10a0bd..41aa1d1150 100644 --- a/packages/testing/src/execution_testing/fixtures/pre_alloc_groups.py +++ b/packages/testing/src/execution_testing/fixtures/pre_alloc_groups.py @@ -178,7 +178,7 @@ class ModelDumpCache: model_dump_mode: Literal["json", "python"] """Mode of the model dump when `model_dump` is called.""" model_dump_type: Literal["string", "dict"] - """Whether `model_dump_json` or `model_dump` was used to generate the data.""" + """Whether `model_dump_json` or `model_dump` was used to generate data.""" data: Any @@ -197,7 +197,7 @@ class GroupPreAlloc(Alloc): _model_dump_cache: ModelDumpCache | None = PrivateAttr(None) def state_root(self) -> Hash: - """On pre-alloc groups, which are normally very big, we always cache.""" + """On pre-alloc groups, which are normally very big, always cache.""" if self._cached_state_root is not None: return self._cached_state_root return super().state_root() @@ -230,7 +230,7 @@ def model_dump( # type: ignore[override] return data def model_dump_json(self, **kwargs: Any) -> str: - """Model dump the pre-allocation group in JSON string format, with caching.""" + """Model dump the pre-allocation group in JSON string, with caching.""" if ( self._model_dump_cache is not None and self._model_dump_cache.model_dump_mode == "json" diff --git a/packages/testing/src/execution_testing/fixtures/tests/test_blockchain.py b/packages/testing/src/execution_testing/fixtures/tests/test_blockchain.py index 966b8359bb..33a153b6a6 100644 --- a/packages/testing/src/execution_testing/fixtures/tests/test_blockchain.py +++ b/packages/testing/src/execution_testing/fixtures/tests/test_blockchain.py @@ -87,8 +87,8 @@ "gasLimit": "0x5208", "gasPrice": "0x0a", "v": "0x26", - "r": "0xcc61d852649c34cc0b71803115f38036ace257d2914f087bf885e6806a664fbd", - "s": "0x2020cb35f5d7731ab540d62614503a7f2344301a86342f67daf011c1341551ff", + "r": "0xcc61d852649c34cc0b71803115f38036ace257d2914f087bf885e6806a664fbd", # noqa: E501 + "s": "0x2020cb35f5d7731ab540d62614503a7f2344301a86342f67daf011c1341551ff", # noqa: E501 "sender": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", }, id="fixture_transaction_type_0_default_values", @@ -108,8 +108,8 @@ "gasLimit": "0x5208", "gasPrice": "0x0a", "v": "0x25", - "r": "0x1cfe2cbb0c3577f74d9ae192a7f1ee2d670fe806a040f427af9cb768be3d07ce", - "s": "0x0cbe2d029f52dbf93ade486625bed0603945d2c7358b31de99fe8786c00f13da", + "r": "0x1cfe2cbb0c3577f74d9ae192a7f1ee2d670fe806a040f427af9cb768be3d07ce", # noqa: E501 + "s": "0x0cbe2d029f52dbf93ade486625bed0603945d2c7358b31de99fe8786c00f13da", # noqa: E501 "sender": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", }, id="fixture_transaction_type_0_contract_creation", @@ -130,8 +130,8 @@ "gasPrice": "0x0a", "accessList": [], "v": "0x01", - "r": "0x58b4ddaa529492d32b6bc8327eb8ee0bc8b535c3bfc0f4f1db3d7c16b51d1851", - "s": "0x5ef19167661b14d06dfc785bf62693e6f9e5a44e7c11e0320efed27b27294970", + "r": "0x58b4ddaa529492d32b6bc8327eb8ee0bc8b535c3bfc0f4f1db3d7c16b51d1851", # noqa: E501 + "s": "0x5ef19167661b14d06dfc785bf62693e6f9e5a44e7c11e0320efed27b27294970", # noqa: E501 "sender": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", }, id="fixture_transaction_type_1_default_values", @@ -155,8 +155,8 @@ "maxFeePerGas": "0x07", "accessList": [], "v": "0x00", - "r": "0x33fc39081d01f8e7f0ce5426d4a00a7b07c2edea064d24a8cac8e4b1f0c08298", - "s": "0x4635e1c45238697db38e37070d4fce27fb5684f9dec4046466ea42a9834bad0a", + "r": "0x33fc39081d01f8e7f0ce5426d4a00a7b07c2edea064d24a8cac8e4b1f0c08298", # noqa: E501 + "s": "0x4635e1c45238697db38e37070d4fce27fb5684f9dec4046466ea42a9834bad0a", # noqa: E501 "sender": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", }, id="fixture_transaction_type_2_default_values", @@ -185,8 +185,8 @@ "accessList": [], "blobVersionedHashes": [], "v": "0x01", - "r": "0x8978475a00bf155bf5687dfda89c2df55ef6c341cdfd689aeaa6c519569a530a", - "s": "0x66fc34935cdd191441a12a2e7b1f224cb40b928afb9bc89c8ddb2b78c19342cc", + "r": "0x8978475a00bf155bf5687dfda89c2df55ef6c341cdfd689aeaa6c519569a530a", # noqa: E501 + "s": "0x66fc34935cdd191441a12a2e7b1f224cb40b928afb9bc89c8ddb2b78c19342cc", # noqa: E501 "sender": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", }, id="fixture_transaction_type_3_default_values", @@ -224,15 +224,15 @@ "address": Address(2).hex(), "nonce": "0x03", "v": "0x00", - "r": "0xda29c3bd0304ae475b06d1a11344e0b6d75590f2c23138c9507f4b5bedde3c79", - "s": "0x3e1fb143ae0460373d567cf901645757b321e42c423a53b2d46ed13c9ef0a9ab", + "r": "0xda29c3bd0304ae475b06d1a11344e0b6d75590f2c23138c9507f4b5bedde3c79", # noqa: E501 + "s": "0x3e1fb143ae0460373d567cf901645757b321e42c423a53b2d46ed13c9ef0a9ab", # noqa: E501 "signer": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", "yParity": "0x00", } ], "v": "0x01", - "r": "0xe7da7f244c95cea73ac6316971139ac0eb8fad455d9a25e1c134d7a157c38ff9", - "s": "0x1939185d2e2a2b3375183e42b5755d695efbd72e186cf9a3e6958a3fb84cc709", + "r": "0xe7da7f244c95cea73ac6316971139ac0eb8fad455d9a25e1c134d7a157c38ff9", # noqa: E501 + "s": "0x1939185d2e2a2b3375183e42b5755d695efbd72e186cf9a3e6958a3fb84cc709", # noqa: E501 "sender": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", }, id="fixture_transaction_type_4", @@ -262,7 +262,7 @@ "to": "0x0000000000000000000000000000000000001234", "accessList": [ { - "address": "0x0000000000000000000000000000000000001234", + "address": "0x0000000000000000000000000000000000001234", # noqa: E501 "storageKeys": [ "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", @@ -280,8 +280,8 @@ "0x0000000000000000000000000000000000000000000000000000000000000001", ], "v": "0x00", - "r": "0x418bb557c43262375f80556cb09dac5e67396acf0eaaf2c2540523d1ce54b280", - "s": "0x4fa36090ea68a1138043d943ced123c0b0807d82ff3342a6977cbc09230e927c", + "r": "0x418bb557c43262375f80556cb09dac5e67396acf0eaaf2c2540523d1ce54b280", # noqa: E501 + "s": "0x4fa36090ea68a1138043d943ced123c0b0807d82ff3342a6977cbc09230e927c", # noqa: E501 "sender": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", }, id="fixture_transaction_3", @@ -321,7 +321,7 @@ "extraData": Bytes([12]).hex(), "mixHash": Hash(13).hex(), "nonce": HeaderNonce(14).hex(), - "hash": "0x1dc087517148c2d6a1dd1ea5de107bc5f728414f9d210ed18286d305abe6ba5e", + "hash": "0x1dc087517148c2d6a1dd1ea5de107bc5f728414f9d210ed18286d305abe6ba5e", # noqa: E501 }, id="fixture_header_1", ), @@ -368,7 +368,7 @@ "withdrawalsRoot": Hash(16).hex(), "blobGasUsed": ZeroPaddedHexNumber(17).hex(), "excessBlobGas": ZeroPaddedHexNumber(18).hex(), - "hash": "0xd90115b7fde329f64335763a446af150ab67e639281dccdb07a007d18bb80211", + "hash": "0xd90115b7fde329f64335763a446af150ab67e639281dccdb07a007d18bb80211", # noqa: E501 }, id="fixture_header_2", ), @@ -423,7 +423,7 @@ "withdrawalsRoot": Hash(16).hex(), "blobGasUsed": ZeroPaddedHexNumber(17).hex(), "excessBlobGas": ZeroPaddedHexNumber(18).hex(), - "hash": "0xd90115b7fde329f64335763a446af150ab67e639281dccdb07a007d18bb80211", + "hash": "0xd90115b7fde329f64335763a446af150ab67e639281dccdb07a007d18bb80211", # noqa: E501 }, "blocknumber": "8", "uncleHeaders": [], @@ -438,8 +438,8 @@ "gasLimit": "0x5208", "gasPrice": "0x0a", "v": "0x26", - "r": "0xcc61d852649c34cc0b71803115f38036ace257d2914f087bf885e6806a664fbd", - "s": "0x2020cb35f5d7731ab540d62614503a7f2344301a86342f67daf011c1341551ff", + "r": "0xcc61d852649c34cc0b71803115f38036ace257d2914f087bf885e6806a664fbd", # noqa: E501 + "s": "0x2020cb35f5d7731ab540d62614503a7f2344301a86342f67daf011c1341551ff", # noqa: E501 "sender": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", } ], @@ -497,7 +497,7 @@ "withdrawalsRoot": Hash(16).hex(), "blobGasUsed": ZeroPaddedHexNumber(17).hex(), "excessBlobGas": ZeroPaddedHexNumber(18).hex(), - "hash": "0xd90115b7fde329f64335763a446af150ab67e639281dccdb07a007d18bb80211", + "hash": "0xd90115b7fde329f64335763a446af150ab67e639281dccdb07a007d18bb80211", # noqa: E501 }, "blocknumber": "8", "uncleHeaders": [], @@ -512,8 +512,8 @@ "gasLimit": "0x5208", "gasPrice": "0x0a", "v": "0x25", - "r": "0x1cfe2cbb0c3577f74d9ae192a7f1ee2d670fe806a040f427af9cb768be3d07ce", - "s": "0x0cbe2d029f52dbf93ade486625bed0603945d2c7358b31de99fe8786c00f13da", + "r": "0x1cfe2cbb0c3577f74d9ae192a7f1ee2d670fe806a040f427af9cb768be3d07ce", # noqa: E501 + "s": "0x0cbe2d029f52dbf93ade486625bed0603945d2c7358b31de99fe8786c00f13da", # noqa: E501 "sender": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", } ], @@ -540,7 +540,7 @@ ), { "rlp": "0x00", - "expectException": "TransactionException.INTRINSIC_GAS_TOO_LOW", + "expectException": "TransactionException.INTRINSIC_GAS_TOO_LOW", # noqa: E501 }, id="invalid_fixture_block_2", ), @@ -554,7 +554,7 @@ ), { "rlp": "0x00", - "expectException": "TransactionException.INTRINSIC_GAS_TOO_LOW", + "expectException": "TransactionException.INTRINSIC_GAS_TOO_LOW", # noqa: E501 }, id="invalid_fixture_block_3", ), @@ -635,7 +635,7 @@ "baseFeePerGas": hex(15), "blobGasUsed": hex(17), "excessBlobGas": hex(18), - "blockHash": "0xd90115b7fde329f64335763a446af150ab67e639281dccdb07a007d18bb80211", + "blockHash": "0xd90115b7fde329f64335763a446af150ab67e639281dccdb07a007d18bb80211", # noqa: E501 "transactions": [ Transaction( to=0x1234, @@ -1008,9 +1008,7 @@ def test_json_deserialization( ) -> None: """Test that to_json returns the expected JSON for the given object.""" if not can_be_deserialized: - pytest.skip( - reason="The model instance in this case can not be deserialized" - ) + pytest.skip(reason="Model instance cannot be deserialized") model_type = type(model_instance) assert model_type(**json_repr) == model_instance @@ -1412,7 +1410,5 @@ def test_json_deserialization( ) -> None: """Test that to_json returns the expected JSON for the given object.""" if not can_be_deserialized: - pytest.skip( - reason="The model instance in this case can not be deserialized" - ) + pytest.skip(reason="Model instance cannot be deserialized") assert adapter.validate_python(json_repr) == type_instance diff --git a/packages/testing/src/execution_testing/fixtures/tests/test_state.py b/packages/testing/src/execution_testing/fixtures/tests/test_state.py index 1759c41d3f..962392100a 100644 --- a/packages/testing/src/execution_testing/fixtures/tests/test_state.py +++ b/packages/testing/src/execution_testing/fixtures/tests/test_state.py @@ -43,7 +43,7 @@ "hash": Hash(0).hex(), "logs": Hash(1).hex(), "txbytes": Bytes(b"\x02").hex(), - "expectException": "TransactionException.INITCODE_SIZE_EXCEEDED", + "expectException": "TransactionException.INITCODE_SIZE_EXCEEDED", # noqa: E501 "indexes": {"data": 0, "gas": 0, "value": 0}, "state": {}, }, @@ -64,7 +64,7 @@ "hash": Hash(0).hex(), "logs": Hash(1).hex(), "txbytes": Bytes(b"\x02").hex(), - "expectException": "TransactionException.INITCODE_SIZE_EXCEEDED", + "expectException": "TransactionException.INITCODE_SIZE_EXCEEDED", # noqa: E501 "indexes": {"data": 0, "gas": 0, "value": 0}, "state": {}, }, @@ -86,7 +86,7 @@ "hash": Hash(0).hex(), "logs": Hash(1).hex(), "txbytes": Bytes(b"\x02").hex(), - "expectException": "TransactionException.INITCODE_SIZE_EXCEEDED|" + "expectException": "TransactionException.INITCODE_SIZE_EXCEEDED|" # noqa: E501 "TransactionException.INSUFFICIENT_ACCOUNT_FUNDS", "indexes": {"data": 0, "gas": 0, "value": 0}, "state": {}, @@ -116,8 +116,6 @@ def test_json_deserialization( ) -> None: """Test that to_json returns the expected JSON for the given object.""" if not can_be_deserialized: - pytest.skip( - reason="The model instance in this case can not be deserialized" - ) + pytest.skip(reason="Model instance cannot be deserialized") model_type = type(model_instance) assert model_type(**json) == model_instance From 7b9fc7d5558f2f88dcd1d9a29f247cdb25e484db Mon Sep 17 00:00:00 2001 From: Mario Vega Date: Wed, 21 Jan 2026 21:38:51 +0100 Subject: [PATCH 090/154] feat(ci): Create Devnet Workflows (#2053) * feat: Create devnet branch, rebase eip branch * fixes * fixes * fix * fixes * remove types * Update .github/actions/merge-eip-branches/action.yaml Co-authored-by: spencer * Update .github/actions/rebase-eip-branch/action.yaml Co-authored-by: spencer * Update .github/actions/merge-eip-branches/action.yaml Co-authored-by: spencer * PR review feedback * REMOTE env variable * Nit: Change names * fix --------- Co-authored-by: spencer --- .../actions/merge-eip-branches/action.yaml | 127 ++++++++++++++++++ .github/actions/rebase-eip-branch/action.yaml | 71 ++++++++++ .github/workflows/eip-rebase.yaml | 36 +++++ .github/workflows/update-devnet-branch.yaml | 44 ++++++ 4 files changed, 278 insertions(+) create mode 100644 .github/actions/merge-eip-branches/action.yaml create mode 100644 .github/actions/rebase-eip-branch/action.yaml create mode 100644 .github/workflows/eip-rebase.yaml create mode 100644 .github/workflows/update-devnet-branch.yaml diff --git a/.github/actions/merge-eip-branches/action.yaml b/.github/actions/merge-eip-branches/action.yaml new file mode 100644 index 0000000000..1a0b9afb64 --- /dev/null +++ b/.github/actions/merge-eip-branches/action.yaml @@ -0,0 +1,127 @@ +name: Merge EIP Branches +description: Merges multiple EIP branches into a devnet branch, by pushing using --force-with-lease + +inputs: + fork: + description: 'Fork name (e.g., amsterdam)' + required: true + devnet_name: + description: 'Devnet name (e.g., amsterdam/1, bal/2, eip-1234/3). Created branch will be `devnets/$devnet_name`' + required: true + eip_numbers: + description: 'Comma-separated list of EIP numbers (e.g., 1234,2345,3456)' + required: true + +runs: + using: "composite" + steps: + - name: Configure Git + shell: bash + run: | + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + + - name: Merge EIP branches + shell: bash + env: + FORK: ${{ inputs.fork }} + DEVNET_NAME: ${{ inputs.devnet_name }} + EIP_NUMBERS: ${{ inputs.eip_numbers }} + REMOTE: origin + run: | + set -euo pipefail + + echo "FORK=$FORK" + echo "DEVNET_NAME=$DEVNET_NAME" + echo "EIP_NUMBERS=$EIP_NUMBERS" + + FORK_BRANCH="forks/${FORK}" + DEVNET_BRANCH="devnets/${DEVNET_NAME}" + + # Basic ref-format validation for user-provided pieces + # (git check-ref-format expects full refnames for some checks; --branch is easiest) + git check-ref-format --branch "${DEVNET_BRANCH}" >/dev/null + + # Convert comma-separated list to array + IFS=',' read -ra RAW_EIPS <<< "$EIP_NUMBERS" + + # Normalize: trim whitespace, drop empties + EIPS=() + for raw in "${RAW_EIPS[@]}"; do + eip="$(echo "$raw" | xargs)" # trims leading/trailing whitespace + if [[ -n "$eip" ]]; then + # Optional: ensure it looks numeric + if [[ ! "$eip" =~ ^[0-9]+$ ]]; then + echo "Error: Invalid EIP number ${eip}" + exit 1 + fi + EIPS+=("$eip") + fi + done + + if [[ ${#EIPS[@]} -eq 0 ]]; then + echo "Error: No EIP numbers provided after parsing '${EIP_NUMBERS}'" + exit 1 + fi + + git fetch "${REMOTE}" --prune + + # Build the list of expected remote branches and verify all exist, + # then checkout each branch and rebase it onto the fork branch automatically. + for eip in "${EIPS[@]}"; do + EIP_BRANCH="eips/${FORK}/eip-${eip}" + # Validate ref format for each constructed branch name + git check-ref-format --branch "${EIP_BRANCH}" >/dev/null || { + echo "Error: Computed branch name '${EIP_BRANCH}' is not a valid ref" + exit 1 + } + + if ! git show-ref --verify --quiet "refs/remotes/${REMOTE}/${EIP_BRANCH}"; then + echo "Error: Missing remote branch ${REMOTE}/${EIP_BRANCH}" + exit 1 + fi + + git checkout -B "${EIP_BRANCH}" "${REMOTE}/${EIP_BRANCH}" + + if git merge-base --is-ancestor "${REMOTE}/${FORK_BRANCH}" "${EIP_BRANCH}"; then + echo "${EIP_BRANCH} is already based on ${FORK_BRANCH}, skipping rebase" + else + if ! git rebase "${REMOTE}/${FORK_BRANCH}"; then + echo "Error: Automatic rebase of ${EIP_BRANCH} onto ${FORK_BRANCH} failed" + git rebase --abort || true + exit 1 + else + echo "Rebase of ${EIP_BRANCH} onto ${FORK_BRANCH} successful" + fi + fi + done + + # Create (or reset) devnet branch from first EIP + FIRST_EIP="eips/${FORK}/eip-${EIPS[0]}" + echo "Creating branch ${DEVNET_BRANCH} from ${FIRST_EIP}" + git checkout -B "${DEVNET_BRANCH}" "${FIRST_EIP}" + + # Merge remaining EIPs + for ((i=1; i<${#EIPS[@]}; i++)); do + EIP_BRANCH="eips/${FORK}/eip-${EIPS[$i]}" + echo "Merging ${EIP_BRANCH}..." + + if ! git merge "${EIP_BRANCH}" -m "Merge ${EIP_BRANCH} into ${DEVNET_BRANCH}"; then + echo "Error: Merge conflict occurred while merging ${EIP_BRANCH}" + git merge --abort || true + exit 1 + fi + done + + echo "All EIP branches merged successfully" + + - name: Push devnet branch + shell: bash + env: + DEVNET_NAME: ${{ inputs.devnet_name }} + REMOTE: origin + run: | + DEVNET_BRANCH="devnets/${DEVNET_NAME}" + + echo "Force pushing ${DEVNET_BRANCH} to ${REMOTE}" + git push --force-with-lease "${REMOTE}" "${DEVNET_BRANCH}" diff --git a/.github/actions/rebase-eip-branch/action.yaml b/.github/actions/rebase-eip-branch/action.yaml new file mode 100644 index 0000000000..37bbec6946 --- /dev/null +++ b/.github/actions/rebase-eip-branch/action.yaml @@ -0,0 +1,71 @@ +name: Rebase EIP Branch +description: Rebases an EIP branch onto a fork branch +inputs: + fork: + description: 'Fork name (e.g., amsterdam)' + required: true + eip_number: + description: 'EIP number' + required: true +runs: + using: "composite" + steps: + - name: Configure Git + shell: bash + run: | + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + + - name: Rebase EIP branch (create if missing) + shell: bash + env: + FORK: ${{ inputs.fork }} + EIP_NUMBER: ${{ inputs.eip_number }} + REMOTE: origin + run: | + set -euo pipefail + + echo "FORK=$FORK" + echo "EIP_NUMBER=$EIP_NUMBER" + EIP_BRANCH="eips/${FORK}/eip-${EIP_NUMBER}" + FORK_BRANCH="forks/${FORK}" + + git fetch "${REMOTE}" --prune + + # Sanity: fork base must exist + if ! git show-ref --verify --quiet "refs/remotes/${REMOTE}/${FORK_BRANCH}"; then + echo "Error: Base fork branch ${REMOTE}/${FORK_BRANCH} does not exist" + exit 1 + fi + + # Create local branch from remote if it exists, else create from fork base + if git show-ref --verify --quiet "refs/remotes/${REMOTE}/${EIP_BRANCH}"; then + echo "Checking out existing ${EIP_BRANCH} (tracking ${REMOTE})" + git checkout -B "${EIP_BRANCH}" "${REMOTE}/${EIP_BRANCH}" + else + echo "Branch ${EIP_BRANCH} does not exist on ${REMOTE}; creating from ${REMOTE}/${FORK_BRANCH}" + git checkout -B "${EIP_BRANCH}" "${REMOTE}/${FORK_BRANCH}" + + # First push creates the remote branch (no force needed) + git push -u "${REMOTE}" "${EIP_BRANCH}" + fi + + echo "Rebasing ${EIP_BRANCH} onto ${REMOTE}/${FORK_BRANCH}" + if ! git rebase "${REMOTE}/${FORK_BRANCH}"; then + echo "Error: Rebase conflict occurred while rebasing ${EIP_BRANCH} onto ${FORK_BRANCH}" + git rebase --abort || true + exit 1 + fi + + echo "Rebase successful" + + - name: Push rebased branch + shell: bash + env: + FORK: ${{ inputs.fork }} + EIP_NUMBER: ${{ inputs.eip_number }} + REMOTE: origin + run: | + EIP_BRANCH="eips/${FORK}/eip-${EIP_NUMBER}" + echo "Force pushing ${EIP_BRANCH} to ${REMOTE}" + git push --force-with-lease "${REMOTE}" "${EIP_BRANCH}" diff --git a/.github/workflows/eip-rebase.yaml b/.github/workflows/eip-rebase.yaml new file mode 100644 index 0000000000..01c486a25c --- /dev/null +++ b/.github/workflows/eip-rebase.yaml @@ -0,0 +1,36 @@ +name: EIP Rebase Onto Fork Branch + +on: + workflow_dispatch: + inputs: + fork: + description: 'Fork name (e.g., amsterdam)' + required: true + type: string + eip_number: + description: 'EIP number' + required: true + type: string + +concurrency: + group: ${{ github.workflow }}-${{ github.event.inputs.fork }}-${{ github.event.inputs.eip_number }} + cancel-in-progress: false + +permissions: + contents: write + +jobs: + rebase-eip: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 + with: + fetch-depth: 0 + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Rebase EIP branch onto fork + uses: ./.github/actions/rebase-eip-branch + with: + fork: ${{ github.event.inputs.fork }} + eip_number: ${{ github.event.inputs.eip_number }} diff --git a/.github/workflows/update-devnet-branch.yaml b/.github/workflows/update-devnet-branch.yaml new file mode 100644 index 0000000000..73f2229252 --- /dev/null +++ b/.github/workflows/update-devnet-branch.yaml @@ -0,0 +1,44 @@ +name: Update Devnet Branch + +on: + workflow_dispatch: + inputs: + fork: + description: 'Fork name (e.g., amsterdam)' + required: true + type: string + default: 'amsterdam' + devnet_name: + description: 'Devnet name (e.g., amsterdam/1, bal/2, eip-1234/3)' + required: true + type: string + default: 'bal/2' + eip_numbers: + description: 'Comma-separated list of EIP numbers (e.g., 1234,2345,3456)' + required: true + type: string + default: '8024,7843,7708,7778' + +concurrency: + group: ${{ github.workflow }}-${{ github.event.inputs.fork || 'amsterdam' }}-${{ github.event.inputs.devnet_name || github.ref }} + cancel-in-progress: false + +permissions: + contents: write + +jobs: + update-devnet-branch: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 + with: + fetch-depth: 0 + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Merge EIP branches into devnet + uses: ./.github/actions/merge-eip-branches + with: + fork: ${{ github.event.inputs.fork }} + devnet_name: ${{ github.event.inputs.devnet_name }} + eip_numbers: ${{ github.event.inputs.eip_numbers }} From f1ba2b1d7f8fd88d1f608e95c5d940527f38c4bd Mon Sep 17 00:00:00 2001 From: felipe Date: Wed, 21 Jan 2026 14:40:11 -0700 Subject: [PATCH 091/154] feat(test): port static context static tests to python (#1960) * feat(test): port static context static tests to python Port STATICCALL tests with zero and non-zero value to precompiles * feat(test): split into legacy test and high-level API test + parametrize - Inspired by comment: https://github.com/ethereum/execution-specs/pull/1960#discussion_r2656834142 - Split the unreadable bytecode test from the comment where we can parametrize with all precompiles as well as with call_value = [0 and nonzero]. * chore: parametrize CREATE2 from Constantinople (where introduced) * refactor: address comments from PR #1960 * feat(tests): Add BAL expectations for static call tests >=Amsterdam * refactor(test): Update test name, add to BAL test_cases.md * chore: convert to state_test --------- Co-authored-by: Mario Vega --- docs/CHANGELOG.md | 1 + .../pytest_commands/plugins/forks/forks.py | 52 +- .../test_cases.md | 4 + tests/byzantium/eip214_staticcall/__init__.py | 7 + tests/byzantium/eip214_staticcall/spec.py | 17 + .../eip214_staticcall/test_staticcall.py | 672 ++++++++++++++++++ ...ueToPrecompileFromCalledContractFiller.yml | 197 ----- ...ompileFromContractInitializationFiller.yml | 207 ------ ...ValueToPrecompileFromTransactionFiller.yml | 176 ----- ...ueToPrecompileFromCalledContractFiller.yml | 234 ------ ...ompileFromContractInitializationFiller.yml | 226 ------ ...ValueToPrecompileFromTransactionFiller.yml | 216 ------ ...StaticcallForPrecompilesIssue683Filler.yml | 47 -- 13 files changed, 752 insertions(+), 1304 deletions(-) create mode 100644 tests/byzantium/eip214_staticcall/__init__.py create mode 100644 tests/byzantium/eip214_staticcall/spec.py create mode 100644 tests/byzantium/eip214_staticcall/test_staticcall.py delete mode 100644 tests/static/state_tests/stStaticFlagEnabled/CallWithNOTZeroValueToPrecompileFromCalledContractFiller.yml delete mode 100644 tests/static/state_tests/stStaticFlagEnabled/CallWithNOTZeroValueToPrecompileFromContractInitializationFiller.yml delete mode 100644 tests/static/state_tests/stStaticFlagEnabled/CallWithNOTZeroValueToPrecompileFromTransactionFiller.yml delete mode 100644 tests/static/state_tests/stStaticFlagEnabled/CallWithZeroValueToPrecompileFromCalledContractFiller.yml delete mode 100644 tests/static/state_tests/stStaticFlagEnabled/CallWithZeroValueToPrecompileFromContractInitializationFiller.yml delete mode 100644 tests/static/state_tests/stStaticFlagEnabled/CallWithZeroValueToPrecompileFromTransactionFiller.yml delete mode 100644 tests/static/state_tests/stStaticFlagEnabled/StaticcallForPrecompilesIssue683Filler.yml diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md index a703a9b485..3f1e21902e 100644 --- a/docs/CHANGELOG.md +++ b/docs/CHANGELOG.md @@ -38,6 +38,7 @@ Test fixtures for use by clients are available for each release on the [Github r - 🔀 Relabel `@pytest.mark.repricing` markers in benchmark tests to reflect configurations requested for gas repricing analysis ([#1971](https://github.com/ethereum/execution-specs/pull/1971)). - ✨ New EIP-7702 test cases added ([#1974](https://github.com/ethereum/execution-specs/pull/1974)). - ✨ Add missing benchmark configurations / opcode to benchmark tests for repricing analysis([#2006](https://github.com/ethereum/execution-specs/pull/2006)). +- ✨ Port STATICCALL to CALL tests with zero and non-zero value transfer from `tests/static`, extending coverage with `pytest.mark.with_all_precompiles` ([#1960](https://github.com/ethereum/execution-specs/pull/1960)). ## [v5.4.0](https://github.com/ethereum/execution-spec-tests/releases/tag/v5.4.0) - 2025-12-07 diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/forks.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/forks.py index faabc10500..197e9f595d 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/forks.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/forks.py @@ -1123,6 +1123,39 @@ def pytest_generate_tests(metafunc: pytest.Metafunc) -> None: parametrize_fork(metafunc, pytest_params) +def get_param_level_min_valid_fork(metafunc: Metafunc) -> Fork | None: + """ + Extract the minimum valid fork from param-level valid_from markers. + + Returns the earliest fork from any valid_from marker inside pytest.param, + or None if no such markers exist. + """ + min_fork: Fork | None = None + + for marker in metafunc.definition.iter_markers("parametrize"): + if len(marker.args) < 2: + continue + + for value in marker.args[1]: + if not isinstance(value, ParameterSet) or not value.marks: + continue + + for mark in value.marks: + mark_obj = mark.mark if hasattr(mark, "mark") else mark + if mark_obj.name == "valid_from" and mark_obj.args: + fork_name = mark_obj.args[0] + try: + for fork in ALL_FORKS: + if fork.name() == fork_name: + if min_fork is None or fork < min_fork: + min_fork = fork + break + except (ValueError, InvalidForkError): + pass + + return min_fork + + def add_fork_covariant_parameters( metafunc: Metafunc, fork_parametrizers: List[ForkParametrizer] ) -> None: @@ -1130,7 +1163,24 @@ def add_fork_covariant_parameters( Iterate over the fork covariant descriptors and add their values to the test function. """ - # Process all covariant decorators uniformly + # Check if any covariant markers are present + has_covariant_markers = any( + list(metafunc.definition.iter_markers(cd.marker_name)) + for cd in fork_covariant_decorators + ) or any( + marker.name == "parametrize_by_fork" + for marker in metafunc.definition.iter_markers() + ) + + # Filter forks before any param-level valid_from to avoid covariant + # assertion errors + if has_covariant_markers: + param_min_fork = get_param_level_min_valid_fork(metafunc) + if param_min_fork: + fork_parametrizers[:] = [ + fp for fp in fork_parametrizers if fp.fork >= param_min_fork + ] + for covariant_descriptor in fork_covariant_decorators: if list( metafunc.definition.iter_markers(covariant_descriptor.marker_name) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index 29d77ea46a..54d53064fe 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -115,6 +115,10 @@ | `test_selfdestruct_created_in_same_tx_with_revert` | Ensure BAL tracks selfdestruct with revert correctly (pre-Amsterdam test with BAL) | Contract created and selfdestructed in same tx with nested revert | BAL **MUST** track storage reads and balance changes for selfdestruct even with reverts | ✅ Completed | | `test_value_transfer_gas_calculation` | Ensure BAL correctly tracks OOG scenarios for CALL/CALLCODE/DELEGATECALL/STATICCALL (pre-Amsterdam test with BAL) | Nested calls with precise gas limits to test OOG behavior. For CALL with OOG: target account is read. For CALLCODE/DELEGATECALL/STATICCALL with OOG: target account **NOT** read (OOG before state access) | For CALL: target in BAL even with OOG. For CALLCODE/DELEGATECALL/STATICCALL: target **NOT** in BAL when OOG (state access deferred until after gas check) | ✅ Completed | | `test_bal_call_with_value_in_static_context` | Ensure BAL does NOT include target when CALL with value fails in static context | `static_caller` uses `STATICCALL` to call `caller`. `caller` attempts `CALL(target, value=1)` which must fail due to static context. Target is an empty account. | BAL **MUST NOT** include target because static context check (`is_static && value > 0`) must happen BEFORE any account access or BAL tracking. BAL **MUST** include `static_caller` with `storage_changes` (STATICCALL succeeded), `caller` with empty changes. | ✅ Completed | +| `test_staticcall_reentrant_call_to_precompile` | Ensure BAL captures STATICCALL reentry with CALL to precompile | Contract STATICCALLs itself. On reentry (CALLVALUE=0), attempts CALL to precompile with parametrized value. File: `tests/byzantium/eip214_staticcall/test_staticcall.py`. | call_value=0: target with `storage_changes` (slot 0=1), precompile with empty changes. call_value>0: target with `storage_reads` (slot 0), precompile **NOT** in BAL (reverted before accessed). | ✅ Completed | +| `test_staticcall_call_to_precompile` | Ensure BAL captures STATICCALL → CALL to precompile chain | Contract A STATICCALLs contract B. B attempts CALL to precompile. File: `tests/byzantium/eip214_staticcall/test_staticcall.py`. | call_value=0: contract_a with markers, contract_b empty (STATICCALLed), precompile empty. call_value>0: contract_a with `storage_reads` for slot 1, precompile **NOT** in BAL. | ✅ Completed | +| `test_staticcall_nested_call_to_precompile` | Ensure BAL captures nested CALL → STATICCALL → CALL to precompile | Contract B CALLs A. A STATICCALLs C. C attempts CALL to precompile. File: `tests/byzantium/eip214_staticcall/test_staticcall.py`. | call_value=0: all contracts with markers/empty, precompile empty. call_value>0: contract_a with `storage_reads` for slot 1, precompile **NOT** in BAL. | ✅ Completed | +| `test_staticcall_call_to_precompile_from_contract_init` | Ensure BAL captures STATICCALL to precompile during CREATE init | Contract A CREATEs contract. Init code STATICCALLs B which CALLs precompile. File: `tests/byzantium/eip214_staticcall/test_staticcall.py`. | call_value=0: contract_a with markers/nonce, created_contract with markers/nonce, contract_b empty, precompile empty. call_value>0: created_contract with `storage_reads` for slot 1, precompile **NOT** in BAL. | ✅ Completed | | `test_bal_4788_simple` | Ensure BAL captures beacon root storage writes during pre-execution system call | Block with 2 normal user transactions: Alice sends 10 wei to Charlie, Bob sends 10 wei to Charlie. At block start (pre-execution), `SYSTEM_ADDRESS` calls `BEACON_ROOTS_ADDRESS` to store parent beacon root | BAL **MUST** include at `block_access_index=0`: `BEACON_ROOTS_ADDRESS` with two `storage_changes` (timestamp slot and beacon root slot); `SYSTEM_ADDRESS` **MUST NOT** be included in BAL. At `block_access_index=1`: Alice with `nonce_changes`, Charlie with `balance_changes` (10 wei). At `block_access_index=2`: Bob with `nonce_changes`, Charlie with `balance_changes` (20 wei total). | ✅ Completed | | `test_bal_4788_empty_block` | Ensure BAL captures beacon root storage writes in empty block | Block with no transactions. At block start (pre-execution), `SYSTEM_ADDRESS` calls `BEACON_ROOTS_ADDRESS` to store parent beacon root | BAL **MUST** include at `block_access_index=0`: `BEACON_ROOTS_ADDRESS` with two `storage_changes` (timestamp slot and beacon root slot); `SYSTEM_ADDRESS` **MUST NOT** be included in BAL. No transaction-related BAL entries. | ✅ Completed | | `test_bal_4788_query` | Ensure BAL captures storage reads when querying beacon root (valid and invalid queries) with optional value transfer | Parameterized test: Block 1 stores beacon root at timestamp 12. Block 2 queries with three timestamp scenarios (valid=12, invalid non-zero=42, invalid zero=0) and value (0 or 100 wei). Valid query (timestamp=12): reads both timestamp and root slots, writes returned value. If value > 0, beacon root contract receives balance. Invalid query with non-zero timestamp (timestamp=42): reads only timestamp slot before reverting, query contract has implicit SLOAD recorded (SSTORE reverts), no value transferred. Invalid query with zero timestamp (timestamp=0): reverts immediately without any storage access, query contract has implicit SLOAD recorded, no value transferred. | Block 1 BAL: System call writes. Block 2 BAL **MUST** include at `block_access_index=0`: System call writes for block 2. Valid case (timestamp=12) at `block_access_index=1`: `BEACON_ROOTS_ADDRESS` with `storage_reads` [timestamp_slot, root_slot] and `balance_changes` if value > 0, query contract with `storage_changes`. Invalid non-zero case (timestamp=42) at `block_access_index=1`: `BEACON_ROOTS_ADDRESS` with `storage_reads` [timestamp_slot only] and NO `balance_changes` (reverted), query contract with `storage_reads` [0] and NO `storage_changes`. Invalid zero case (timestamp=0) at `block_access_index=1`: `BEACON_ROOTS_ADDRESS` with NO `storage_reads` (reverts before access) and NO `balance_changes`, query contract with `storage_reads` [0] and NO `storage_changes`. | ✅ Completed | diff --git a/tests/byzantium/eip214_staticcall/__init__.py b/tests/byzantium/eip214_staticcall/__init__.py new file mode 100644 index 0000000000..dd2092814a --- /dev/null +++ b/tests/byzantium/eip214_staticcall/__init__.py @@ -0,0 +1,7 @@ +""" +Test cases for EIP-214: STATICCALL opcode. + +EIP-214 introduced the STATICCALL opcode which creates a read-only call +context. Any state-modifying operations (including CALL with non-zero +value) within a STATICCALL context will cause the call to fail. +""" diff --git a/tests/byzantium/eip214_staticcall/spec.py b/tests/byzantium/eip214_staticcall/spec.py new file mode 100644 index 0000000000..497a6c9f4d --- /dev/null +++ b/tests/byzantium/eip214_staticcall/spec.py @@ -0,0 +1,17 @@ +"""Defines EIP-214 specification reference.""" + +from dataclasses import dataclass + + +@dataclass(frozen=True) +class ReferenceSpec: + """Defines the reference spec version and git path.""" + + git_path: str + version: str + + +ref_spec_214 = ReferenceSpec( + git_path="EIPS/eip-214.md", + version="009d0e1ce76b2c171c34bacdb2f13d606c9918b0", +) diff --git a/tests/byzantium/eip214_staticcall/test_staticcall.py b/tests/byzantium/eip214_staticcall/test_staticcall.py new file mode 100644 index 0000000000..d3926bcf71 --- /dev/null +++ b/tests/byzantium/eip214_staticcall/test_staticcall.py @@ -0,0 +1,672 @@ +""" +Tests for EIP-214 STATICCALL opcode behavior. + +STATICCALL creates a read-only call context where state-modifying operations +are forbidden. This includes CALL with non-zero value to any address. +""" + +import pytest +from execution_testing import ( + Account, + Address, + Alloc, + BalAccountExpectation, + BalBalanceChange, + BalNonceChange, + BalStorageChange, + BalStorageSlot, + BlockAccessListExpectation, + Conditional, + Fork, + Op, + StateTestFiller, + Transaction, + compute_create_address, +) + +from .spec import ref_spec_214 + +REFERENCE_SPEC_GIT_PATH = ref_spec_214.git_path +REFERENCE_SPEC_VERSION = ref_spec_214.version + + +def bal_marker_storage_changes( + marker: int, staticcall_result: int +) -> list[BalStorageSlot]: + """ + Build BAL storage changes for the common pattern of marker slots. + + Most tests write to slots 0, 1, 2 where: + - slot 0: marker value + - slot 1: STATICCALL result (0 or 1) + - slot 2: marker value + """ + return [ + BalStorageSlot( + slot=0, + slot_changes=[ + BalStorageChange(block_access_index=1, post_value=marker) + ], + ), + BalStorageSlot( + slot=1, + slot_changes=[ + BalStorageChange( + block_access_index=1, post_value=staticcall_result + ) + ], + ), + BalStorageSlot( + slot=2, + slot_changes=[ + BalStorageChange(block_access_index=1, post_value=marker) + ], + ), + ] + + +def bal_expectation_for_contract_with_markers( + marker: int, + staticcall_result: int, + balance_change: int | None = None, + initial_balance: int = 0, +) -> BalAccountExpectation: + """ + Build BAL expectation for a contract that writes marker storage slots. + + Args: + marker: The marker value written to slots 0 and 2 + staticcall_result: The value written to slot 1 (STATICCALL result) + balance_change: If provided, include a balance change + initial_balance: The initial balance (for computing post_balance) + + """ + return BalAccountExpectation( + storage_changes=bal_marker_storage_changes(marker, staticcall_result), + balance_changes=( + [ + BalBalanceChange( + block_access_index=1, + post_balance=initial_balance + balance_change, + ) + ] + if balance_change is not None + else [] + ), + ) + + +@pytest.mark.with_all_precompiles +@pytest.mark.parametrize( + "call_value", [0, 2], ids=["zero_value", "nonzero_value"] +) +@pytest.mark.ported_from( + "https://github.com/ethereum/tests/blob/v13.3/src/GeneralStateTestsFiller/" + "stStaticFlagEnabled/StaticcallForPrecompilesIssue683Filler.yml" +) +@pytest.mark.valid_from("Byzantium") +def test_staticcall_reentrant_call_to_precompile( + pre: Alloc, + state_test: StateTestFiller, + precompile: Address, + call_value: int, + fork: Fork, +) -> None: + """ + Test CALL to precompile inside STATICCALL with zero and non-zero value. + + Regression test for ethereum/tests#683. + Source: https://github.com/ethereum/execution-specs/pull/1960#discussion_r2656834142 + + A single contract STATICCALLs itself. On reentry (detected via CALLVALUE=0, + since STATICCALL doesn't forward value), it attempts CALL to a precompile. + + - call_value=0: CALL succeeds in static context → STATICCALL returns 1 + - call_value>0: CALL violates static context, reverts frame → STATICCALL + returns 0 + """ + alice = pre.fund_eoa() + + # Contract that STATICCALLs itself on reentry (CALLVALUE=0), + # attempts CALL to precompile + target_code = Conditional( + # CALLVALUE=0 indicates we're inside the STATICCALL (reentry) + condition=Op.ISZERO(Op.CALLVALUE), + # try CALL with parametrized value (fails if value > 0) + if_true=Op.CALL(address=precompile, value=call_value), + # STATICCALL to self, store result (0=fail, 1=success) + if_false=Op.SSTORE(0, Op.STATICCALL(address=Op.ADDRESS)), + ) + + target_balance = 1000 + target = pre.deploy_contract(code=target_code, balance=target_balance) + + tx_value = 100 + tx = Transaction( + sender=alice, + to=target, + gas_limit=1_000_000, + value=tx_value, + protected=True, + ) + + bal_expectation = None + if fork.header_bal_hash_required(): + # Target contract always receives tx value + target_balance_changes = [ + BalBalanceChange( + block_access_index=1, post_balance=target_balance + tx_value + ) + ] + + # call_value > 0: SSTORE(0, 0) is a read; call_value == 0: real change + account_expectations: dict[Address, BalAccountExpectation | None] = { + target: ( + BalAccountExpectation( + storage_changes=[ + BalStorageSlot( + slot=0, + slot_changes=[ + BalStorageChange( + block_access_index=1, post_value=1 + ) + ], + ), + ], + balance_changes=target_balance_changes, + ) + if call_value == 0 + else BalAccountExpectation( + storage_reads=[0], + balance_changes=target_balance_changes, + ) + ), + } + + if call_value == 0: + account_expectations[precompile] = BalAccountExpectation.empty() + else: + account_expectations[precompile] = None # reverted before accessed + + bal_expectation = BlockAccessListExpectation( + account_expectations=account_expectations + ) + + state_test( + pre=pre, + tx=tx, + expected_block_access_list=bal_expectation, + post={ + target: Account( + balance=target_balance + tx_value, + storage={0: 1 if call_value == 0 else 0}, + ), + }, + ) + + +@pytest.mark.with_all_precompiles +@pytest.mark.parametrize( + "call_value", [0, 2], ids=["zero_value", "nonzero_value"] +) +@pytest.mark.ported_from( + [ + "https://github.com/ethereum/tests/blob/v13.3/src/GeneralStateTestsFiller/stStaticFlagEnabled/CallWithZeroValueToPrecompileFromTransactionFiller.yml", + "https://github.com/ethereum/tests/blob/v13.3/src/GeneralStateTestsFiller/stStaticFlagEnabled/CallWithNOTZeroValueToPrecompileFromTransactionFiller.yml", + ], +) +@pytest.mark.valid_from("Byzantium") +def test_staticcall_call_to_precompile( + pre: Alloc, + state_test: StateTestFiller, + precompile: Address, + call_value: int, + fork: Fork, +) -> None: + """ + Test CALL to precompile inside STATICCALL with zero and non-zero value. + + Contract A STATICCALLs contract B. Contract B attempts to CALL precompile. + With value = 0, this succeeds. With value > 0, this fails (static context). + """ + alice = pre.fund_eoa() + + initial_contract_balance = 1000 + marker = 0xFEEDFEEDFEEDFEEDFEEDFEEDFEEDFEEDFEEDFEEDFEEDFEEDFEEDFEEDFEEDFEED + + # Contract B: attempts CALL to precompile with the parametrized value + contract_b = pre.deploy_contract( + code=Op.CALL(gas=100_000, address=precompile, value=call_value), + balance=initial_contract_balance, + ) + + # Contract A: STATICCALLs contract B and stores the result + contract_a = pre.deploy_contract( + code=( + Op.SSTORE(0, marker) + + Op.SSTORE(1, Op.STATICCALL(gas=200_000, address=contract_b)) + + Op.SSTORE(2, marker) + ), + balance=initial_contract_balance, + ) + + tx_value = 100 + staticcall_result = 1 if call_value == 0 else 0 + + bal_expectation = None + if fork.header_bal_hash_required(): + contract_a_balance_changes = [ + BalBalanceChange( + block_access_index=1, + post_balance=initial_contract_balance + tx_value, + ) + ] + + # slot 1 read when call_value > 0 + account_expectations: dict[Address, BalAccountExpectation | None] = { + contract_a: ( + bal_expectation_for_contract_with_markers( + marker=marker, + staticcall_result=staticcall_result, + balance_change=tx_value, + initial_balance=initial_contract_balance, + ) + if call_value == 0 + else BalAccountExpectation( + storage_changes=[ + BalStorageSlot( + slot=0, + slot_changes=[ + BalStorageChange( + block_access_index=1, post_value=marker + ) + ], + ), + BalStorageSlot( + slot=2, + slot_changes=[ + BalStorageChange( + block_access_index=1, post_value=marker + ) + ], + ), + ], + storage_reads=[1], + balance_changes=contract_a_balance_changes, + ) + ), + contract_b: BalAccountExpectation.empty(), # STATICCALLed + } + + if call_value == 0: + account_expectations[precompile] = BalAccountExpectation.empty() + else: + account_expectations[precompile] = None # reverted before accessed + + bal_expectation = BlockAccessListExpectation( + account_expectations=account_expectations + ) + + state_test( + pre=pre, + tx=Transaction( + sender=alice, + to=contract_a, + gas_limit=500_000, + value=tx_value, + protected=True, + ), + expected_block_access_list=bal_expectation, + post={ + contract_a: Account( + balance=initial_contract_balance + tx_value, + storage={ + 0: marker, + 1: staticcall_result, + 2: marker, + }, + ), + contract_b: Account(balance=initial_contract_balance), + }, + ) + + +@pytest.mark.with_all_precompiles +@pytest.mark.parametrize( + "call_value", [0, 2], ids=["zero_value", "nonzero_value"] +) +@pytest.mark.ported_from( + [ + "https://github.com/ethereum/tests/blob/v13.3/src/GeneralStateTestsFiller/stStaticFlagEnabled/CallWithZeroValueToPrecompileFromCalledContractFiller.yml", + "https://github.com/ethereum/tests/blob/v13.3/src/GeneralStateTestsFiller/stStaticFlagEnabled/CallWithNOTZeroValueToPrecompileFromCalledContractFiller.yml", + ], +) +@pytest.mark.valid_from("Byzantium") +def test_staticcall_nested_call_to_precompile( + pre: Alloc, + state_test: StateTestFiller, + precompile: Address, + call_value: int, + fork: Fork, +) -> None: + """ + Test STATICCALL behavior with an extra call depth layer. + + Contract B (target) receives tx and CALLs contract A. + Contract A STATICCALLs contract C. + Contract C attempts to CALL the precompile. + With value = 0, this succeeds. With value > 0, this fails (static context). + """ + alice = pre.fund_eoa() + + initial_contract_balance = 1000 + marker = 0xFEEDFEEDFEEDFEEDFEEDFEEDFEEDFEEDFEEDFEEDFEEDFEEDFEEDFEEDFEEDFEED + + # Contract C: attempts CALL to precompile with the parametrized value + contract_c = pre.deploy_contract( + code=Op.CALL(gas=100_000, address=precompile, value=call_value), + balance=initial_contract_balance, + ) + + # Contract A: STATICCALLs contract C, stores markers and result + contract_a = pre.deploy_contract( + code=( + Op.SSTORE(0, marker) + + Op.SSTORE(1, Op.STATICCALL(gas=200_000, address=contract_c)) + + Op.SSTORE(2, marker) + ), + balance=initial_contract_balance, + ) + + # Contract B (target): CALLs contract A, stores markers and result + contract_b = pre.deploy_contract( + code=( + Op.SSTORE(0, marker) + + Op.SSTORE(1, Op.CALL(gas=300_000, address=contract_a)) + + Op.SSTORE(2, marker) + ), + balance=initial_contract_balance, + ) + + tx_value = 100 + staticcall_result = 1 if call_value == 0 else 0 + + bal_expectation = None + if fork.header_bal_hash_required(): + # slot 1 read when call_value > 0 + account_expectations: dict[Address, BalAccountExpectation | None] = { + contract_a: ( + bal_expectation_for_contract_with_markers( + marker=marker, + staticcall_result=staticcall_result, + ) + if call_value == 0 + else BalAccountExpectation( + storage_changes=[ + BalStorageSlot( + slot=0, + slot_changes=[ + BalStorageChange( + block_access_index=1, post_value=marker + ) + ], + ), + BalStorageSlot( + slot=2, + slot_changes=[ + BalStorageChange( + block_access_index=1, post_value=marker + ) + ], + ), + ], + storage_reads=[1], + ) + ), + contract_b: bal_expectation_for_contract_with_markers( + marker=marker, + staticcall_result=1, # CALL to A always succeeds + balance_change=tx_value, + initial_balance=initial_contract_balance, + ), + contract_c: BalAccountExpectation.empty(), # STATICCALLed + } + + if call_value == 0: + account_expectations[precompile] = BalAccountExpectation.empty() + else: + account_expectations[precompile] = None # reverted before accessed + + bal_expectation = BlockAccessListExpectation( + account_expectations=account_expectations + ) + + state_test( + pre=pre, + tx=Transaction( + sender=alice, + to=contract_b, + gas_limit=500_000, + value=tx_value, + protected=True, + ), + expected_block_access_list=bal_expectation, + post={ + contract_a: Account( + balance=initial_contract_balance, + storage={ + 0: marker, + # only succeeds if call_value == 0 + 1: staticcall_result, + 2: marker, + }, + ), + contract_b: Account( + balance=initial_contract_balance + tx_value, + storage={ + 0: marker, + 1: 1, # CALL to A always succeeds + 2: marker, + }, + ), + contract_c: Account(balance=initial_contract_balance), + }, + ) + + +@pytest.mark.with_all_precompiles +@pytest.mark.parametrize( + "call_value", [0, 2], ids=["zero_value", "nonzero_value"] +) +@pytest.mark.ported_from( + [ + "https://github.com/ethereum/tests/blob/v13.3/src/GeneralStateTestsFiller/stStaticFlagEnabled/CallWithZeroValueToPrecompileFromContractInitializationFiller.yml", + "https://github.com/ethereum/tests/blob/v13.3/src/GeneralStateTestsFiller/stStaticFlagEnabled/CallWithNOTZeroValueToPrecompileFromContractInitializationFiller.yml", + ], +) +@pytest.mark.parametrize( + "create_opcode", + [ + pytest.param(Op.CREATE, marks=pytest.mark.valid_from("Byzantium")), + pytest.param( + Op.CREATE2, marks=pytest.mark.valid_from("Constantinople") + ), + ], +) +def test_staticcall_call_to_precompile_from_contract_init( + pre: Alloc, + state_test: StateTestFiller, + precompile: Address, + call_value: int, + create_opcode: Op, + fork: Fork, +) -> None: + """ + Test STATICCALL behavior during contract initialization (CREATE). + + Contract A CREATEs a new contract whose init code STATICCALLs contract B. + Contract B attempts to CALL the precompile. + With value = 0, this succeeds. With value > 0, this fails in static + context. + """ + alice = pre.fund_eoa() + marker = 0xFEEDFEEDFEEDFEEDFEEDFEEDFEEDFEEDFEEDFEEDFEEDFEEDFEEDFEEDFEEDFEED + + contract_initial_balance = 1000 + + # Contract B: attempts CALL to precompile with the parametrized value + contract_b = pre.deploy_contract( + code=Op.CALL(gas=100_000, address=precompile, value=call_value), + balance=contract_initial_balance, + ) + + # Init code: stores markers and STATICCALL result during initialization + # Note: storage written during init but no return means the created + # contract will have empty code. + initcode = ( + Op.SSTORE(0, marker) + + Op.SSTORE(1, Op.STATICCALL(gas=200_000, address=contract_b)) + + Op.SSTORE(2, marker) + ) + + # Contract A: CREATEs new contract using init_code from calldata + contract_a = pre.deploy_contract( + code=( + Op.SSTORE(0, marker) + + Op.CALLDATACOPY(0, 0, Op.CALLDATASIZE) + + Op.SSTORE( + 1, + create_opcode(value=0, offset=0, size=Op.CALLDATASIZE), + ) + + Op.SSTORE(2, marker) + ), + balance=contract_initial_balance, + ) + created_contract = compute_create_address( + nonce=1, + address=contract_a, + opcode=create_opcode, + initcode=initcode, + ) + + tx_value = 100 + staticcall_result = 1 if call_value == 0 else 0 + + bal_expectation = None + if fork.header_bal_hash_required(): + # stores created_contract in slot 1, receives tx value + account_expectations: dict[Address, BalAccountExpectation | None] = { + contract_a: BalAccountExpectation( + storage_changes=[ + BalStorageSlot( + slot=0, + slot_changes=[ + BalStorageChange( + block_access_index=1, post_value=marker + ) + ], + ), + BalStorageSlot( + slot=1, + slot_changes=[ + BalStorageChange( + block_access_index=1, + post_value=created_contract, + ) + ], + ), + BalStorageSlot( + slot=2, + slot_changes=[ + BalStorageChange( + block_access_index=1, post_value=marker + ) + ], + ), + ], + balance_changes=[ + BalBalanceChange( + block_access_index=1, + post_balance=contract_initial_balance + tx_value, + ) + ], + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=2) + ], + ), + contract_b: BalAccountExpectation.empty(), # STATICCALLed + } + + # slot 1 read when call_value > 0 + created_nonce_changes = [ + BalNonceChange(block_access_index=1, post_nonce=1) + ] + account_expectations[created_contract] = ( + BalAccountExpectation( + storage_changes=bal_marker_storage_changes( + marker, staticcall_result + ), + nonce_changes=created_nonce_changes, + ) + if call_value == 0 + else BalAccountExpectation( + storage_changes=[ + BalStorageSlot( + slot=0, + slot_changes=[ + BalStorageChange( + block_access_index=1, post_value=marker + ) + ], + ), + BalStorageSlot( + slot=2, + slot_changes=[ + BalStorageChange( + block_access_index=1, post_value=marker + ) + ], + ), + ], + storage_reads=[1], + nonce_changes=created_nonce_changes, + ) + ) + + if call_value == 0: + account_expectations[precompile] = BalAccountExpectation.empty() + else: + account_expectations[precompile] = None # reverted before accessed + + bal_expectation = BlockAccessListExpectation( + account_expectations=account_expectations + ) + + state_test( + pre=pre, + tx=Transaction( + sender=alice, + to=contract_a, + gas_limit=4_000_000, + value=tx_value, + data=bytes(initcode), + protected=True, + ), + expected_block_access_list=bal_expectation, + post={ + contract_a: Account( + balance=contract_initial_balance + tx_value, + storage={0: marker, 1: created_contract, 2: marker}, + ), + created_contract: Account( + storage={ + 0: marker, + # only succeeds if call_value == 0 + 1: staticcall_result, + 2: marker, + }, + code=b"", + ), + contract_b: Account(balance=contract_initial_balance), + }, + ) diff --git a/tests/static/state_tests/stStaticFlagEnabled/CallWithNOTZeroValueToPrecompileFromCalledContractFiller.yml b/tests/static/state_tests/stStaticFlagEnabled/CallWithNOTZeroValueToPrecompileFromCalledContractFiller.yml deleted file mode 100644 index 5ae5aabbaf..0000000000 --- a/tests/static/state_tests/stStaticFlagEnabled/CallWithNOTZeroValueToPrecompileFromCalledContractFiller.yml +++ /dev/null @@ -1,197 +0,0 @@ ---- -CallWithNOTZeroValueToPrecompileFromCalledContract: - _info: - comment: | - Contract B calls contract A. - Contract A staticcalls contract CALL-00X. - Contract CALL-00X calls precompiled contracts with non zero value. - It should interrupt for each precompiled contract. - env: - currentCoinbase: '0xcafe000000000000000000000000000000000001' - currentDifficulty: '0x20000' - currentGasLimit: '10000000' - currentNumber: '1' - currentTimestamp: '1000' - expect: - - indexes: - data: - - !!int 0 - - !!int 1 - - !!int 2 - - !!int 3 - - !!int 4 - - !!int 5 - - !!int 6 - - !!int 7 - gas: !!int -1 - value: !!int -1 - network: - - ">=Cancun" - result: - : - balance: '1000' - storage: - '0x00': '0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed' - '0x02': '0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed' - : - balance: '1100' - storage: - '0x00': '0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed' - '0x01': '0x01' - '0x02': '0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed' - pre: - : - balance: '1000' - nonce: '0' - storage: - '0x00': '0xdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeaf' - '0x01': '0xdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeaf' - '0x02': '0xdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeaf' - code: | - { - [[ 0x00 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed - [ 0 ] (CALLDATALOAD 0) - [[ 0x01 ]] (CALL (GAS) 0 0 32 0 0) - [[ 0x02 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed - } - : - balance: '1000' - nonce: '0' - storage: - '0x00': '0xdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeaf' - '0x01': '0xdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeaf' - '0x02': '0xdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeaf' - code: | - { - [[ 0x00 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed - [[ 0x01 ]] (STATICCALL (GAS) (CALLDATALOAD 0) 0 0 0 0) - [[ 0x02 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed - } - : - balance: '1000' - nonce: '0' - storage: {} - code: | - { - ;; Recovery of ECDSA signature - [ 0x00 ] 0x18c547e4f7b0f325ad1e56f57e26c745b09a3e503d86e00e5255ff7f715d3d1c - [ 0x20 ] 28 - [ 0x40 ] 0x73b1693892219d736caba55bdb67216e485557ea6b6af75f37096c9aa6a5a75f - [ 0x60 ] 0xeeb940b1d03b21e36b0e47e79769f095fe2ab855bd91e3a38756b7d75a9c4549 - (CALL (GAS) 1 2 0 128 0x2000 32) - } - : - balance: '1000' - nonce: '0' - storage: {} - code: | - { - ;; Hash function SHA256 - [ 0x00 ] 0x0000000ccccccccccccccccccccccccccccccccccccccccccccccccccc000000 - (CALL (GAS) 2 2 0 32 0x2000 32) - } - : - balance: '1000' - nonce: '0' - storage: {} - code: | - { - ;; Hash function RIPEMD160 - [ 0x00 ] 0x0000000ccccccccccccccccccccccccccccccccccccccccccccccccccc000000 - (CALL (GAS) 3 2 0 32 0x2000 32) - } - : - balance: '1000' - nonce: '0' - storage: {} - code: | - { - ;; Identity - [ 0x00 ] 0x0000000ccccccccccccccccccccccccccccccccccccccccccccccccccc000000 - (CALL (GAS) 4 2 0 32 0x2000 32) - } - : - balance: '1000' - nonce: '0' - storage: {} - code: | - { - ;; Modular exponentiation - [ 0x00 ] 1 - [ 0x20 ] 32 - [ 0x40 ] 32 - [ 0x60 ] 0x03fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc - [ 0x80 ] 0x2efffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc - [ 0xa0 ] 0x2f00000000000000000000000000000000000000000000000000000000000000 - (CALL (GAS) 5 2 0 0xa1 0x2000 32) - } - : - balance: '1000' - nonce: '0' - storage: {} - code: | - { - ;; Addition on elliptic curve alt_bn128 - [ 0x00 ] 0x0f25929bcb43d5a57391564615c9e70a992b10eafa4db109709649cf48c50dd2 - [ 0x20 ] 0x16da2f5cb6be7a0aa72c440c53c9bbdfec6c36c7d515536431b3a865468acbba - [ 0x40 ] 0x1de49a4b0233273bba8146af82042d004f2085ec982397db0d97da17204cc286 - [ 0x60 ] 0x0217327ffc463919bef80cc166d09c6172639d8589799928761bcd9f22c903d4 - (CALL (GAS) 6 2 0 128 0x2000 64) - } - : - balance: '1000' - nonce: '0' - storage: {} - code: | - { - ;; Scalar multiplication on elliptic curve alt_bn128 - [ 0x00 ] 0x0f25929bcb43d5a57391564615c9e70a992b10eafa4db109709649cf48c50dd2 - [ 0x20 ] 0x16da2f5cb6be7a0aa72c440c53c9bbdfec6c36c7d515536431b3a865468acbba - [ 0x40 ] 0x0000000000000000000000000000000000000000000000000000000000000003 - (CALL (GAS) 7 2 0 96 0x2000 64) - } - : - balance: '1000' - nonce: '0' - storage: {} - code: | - { - ;; Checking a pairing equation on curve alt_bn128 - ;; proof.A + vk.A + negateG1(proof.Ap) + P2 - [ 0x0000 ] 0x1c76476f4def4bb94541d57ebba1193381ffa7aa76ada664dd31c16024c43f59 - [ 0x0020 ] 0x3034dd2920f673e204fee2811c678745fc819b55d3e9d294e45c9b03a76aef41 - [ 0x0040 ] 0x209dd15ebff5d46c4bd888e51a93cf99a7329636c63514396b4a452003a35bf7 - [ 0x0060 ] 0x04bf11ca01483bfa8b34b43561848d28905960114c8ac04049af4b6315a41678 - [ 0x0080 ] 0x2bb8324af6cfc93537a2ad1a445cfd0ca2a71acd7ac41fadbf933c2a51be344d - [ 0x00a0 ] 0x120a2a4cf30c1bf9845f20c6fe39e07ea2cce61f0c9bb048165fe5e4de877550 - [ 0x00c0 ] 0x111e129f1cf1097710d41c4ac70fcdfa5ba2023c6ff1cbeac322de49d1b6df7c - [ 0x00e0 ] 0x2032c61a830e3c17286de9462bf242fca2883585b93870a73853face6a6bf411 - [ 0x0100 ] 0x198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c2 - [ 0x0120 ] 0x1800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed - [ 0x0140 ] 0x090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b - [ 0x0160 ] 0x12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa - (CALL (GAS) 8 2 0 0x0180 0x2000 32) - } - : - balance: '1000000000000000000' - code: '' - nonce: '0' - storage: {} - transaction: - data: - - ':raw 0x000000000000000000000000' - - ':raw 0x000000000000000000000000' - - ':raw 0x000000000000000000000000' - - ':raw 0x000000000000000000000000' - - ':raw 0x000000000000000000000000' - - ':raw 0x000000000000000000000000' - - ':raw 0x000000000000000000000000' - - ':raw 0x000000000000000000000000' - gasLimit: - - '4000000' - gasPrice: '10' - nonce: '0' - secretKey: '' - to: '' - value: - - '100' diff --git a/tests/static/state_tests/stStaticFlagEnabled/CallWithNOTZeroValueToPrecompileFromContractInitializationFiller.yml b/tests/static/state_tests/stStaticFlagEnabled/CallWithNOTZeroValueToPrecompileFromContractInitializationFiller.yml deleted file mode 100644 index 8dc042d554..0000000000 --- a/tests/static/state_tests/stStaticFlagEnabled/CallWithNOTZeroValueToPrecompileFromContractInitializationFiller.yml +++ /dev/null @@ -1,207 +0,0 @@ ---- -CallWithNOTZeroValueToPrecompileFromContractInitialization: - _info: - comment: | - Contract A creates new contract. - New contract initialization code staticcalls contract CALL-00X. - Contract CALL-00X calls precompiled contracts with non zero value. - It should interrupt for each precompiled contract. - env: - currentCoinbase: '0xcafe000000000000000000000000000000000001' - currentDifficulty: '0x20000' - currentGasLimit: '10000000' - currentNumber: '1' - currentTimestamp: '1000' - expect: - - indexes: - data: - - !!int 0 - - !!int 1 - - !!int 2 - - !!int 3 - - !!int 4 - - !!int 5 - - !!int 6 - - !!int 7 - gas: !!int -1 - value: !!int -1 - network: - - ">=Cancun" - result: - a000000000000000000000000000000000000000: - balance: '1100' - storage: - '0x00': '0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed' - '0x01': '0xf7b0c3906cdfc0b8a638d274d3bcbbd6318e9ac1' - '0x02': '0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed' - f7b0c3906cdfc0b8a638d274d3bcbbd6318e9ac1: - storage: - '0x00': '0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed' - '0x02': '0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed' - pre: - a000000000000000000000000000000000000000: - balance: '1000' - nonce: '1' - storage: - '0x00': '0xdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeaf' - '0x01': '0xdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeaf' - '0x02': '0xdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeaf' - code: | - { - [[ 0x00 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed - (CALLDATACOPY 0 0 (CALLDATASIZE)) - [[ 0x01 ]] (CREATE 0 0 (CALLDATASIZE)) - [[ 0x02 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed - } - ca11001000000000000000000000000000000000: - balance: '1000' - nonce: '0' - storage: {} - code: | - { - ;; Recovery of ECDSA signature - [ 0x00 ] 0x18c547e4f7b0f325ad1e56f57e26c745b09a3e503d86e00e5255ff7f715d3d1c - [ 0x20 ] 28 - [ 0x40 ] 0x73b1693892219d736caba55bdb67216e485557ea6b6af75f37096c9aa6a5a75f - [ 0x60 ] 0xeeb940b1d03b21e36b0e47e79769f095fe2ab855bd91e3a38756b7d75a9c4549 - (CALL (GAS) 1 2 0 128 0x2000 32) - } - ca11002000000000000000000000000000000000: - balance: '1000' - nonce: '0' - storage: {} - code: | - { - ;; Hash function SHA256 - [ 0x00 ] 0x0000000ccccccccccccccccccccccccccccccccccccccccccccccccccc000000 - (CALL (GAS) 2 2 0 32 0x2000 32) - } - ca11003000000000000000000000000000000000: - balance: '1000' - nonce: '0' - storage: {} - code: | - { - ;; Hash function RIPEMD160 - [ 0x00 ] 0x0000000ccccccccccccccccccccccccccccccccccccccccccccccccccc000000 - (CALL (GAS) 3 2 0 32 0x2000 32) - } - ca11004000000000000000000000000000000000: - balance: '1000' - nonce: '0' - storage: {} - code: | - { - ;; Identity - [ 0x00 ] 0x0000000ccccccccccccccccccccccccccccccccccccccccccccccccccc000000 - (CALL (GAS) 4 2 0 32 0x2000 32) - } - ca11005000000000000000000000000000000000: - balance: '1000' - nonce: '0' - storage: {} - code: | - { - ;; Modular exponentiation - [ 0x00 ] 1 - [ 0x20 ] 32 - [ 0x40 ] 32 - [ 0x60 ] 0x03fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc - [ 0x80 ] 0x2efffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc - [ 0xa0 ] 0x2f00000000000000000000000000000000000000000000000000000000000000 - (CALL (GAS) 5 2 0 0xa1 0x2000 32) - } - ca11006000000000000000000000000000000000: - balance: '1000' - nonce: '0' - storage: {} - code: | - { - ;; Addition on elliptic curve alt_bn128 - [ 0x00 ] 0x0f25929bcb43d5a57391564615c9e70a992b10eafa4db109709649cf48c50dd2 - [ 0x20 ] 0x16da2f5cb6be7a0aa72c440c53c9bbdfec6c36c7d515536431b3a865468acbba - [ 0x40 ] 0x1de49a4b0233273bba8146af82042d004f2085ec982397db0d97da17204cc286 - [ 0x60 ] 0x0217327ffc463919bef80cc166d09c6172639d8589799928761bcd9f22c903d4 - (CALL (GAS) 6 2 0 128 0x2000 64) - } - ca11007000000000000000000000000000000000: - balance: '1000' - nonce: '0' - storage: {} - code: | - { - ;; Scalar multiplication on elliptic curve alt_bn128 - [ 0x00 ] 0x0f25929bcb43d5a57391564615c9e70a992b10eafa4db109709649cf48c50dd2 - [ 0x20 ] 0x16da2f5cb6be7a0aa72c440c53c9bbdfec6c36c7d515536431b3a865468acbba - [ 0x40 ] 0x0000000000000000000000000000000000000000000000000000000000000003 - (CALL (GAS) 7 2 0 96 0x2000 64) - } - ca11008000000000000000000000000000000000: - balance: '1000' - nonce: '0' - storage: {} - code: | - { - ;; Checking a pairing equation on curve alt_bn128 - ;; proof.A + vk.A + negateG1(proof.Ap) + P2 - [ 0x0000 ] 0x1c76476f4def4bb94541d57ebba1193381ffa7aa76ada664dd31c16024c43f59 - [ 0x0020 ] 0x3034dd2920f673e204fee2811c678745fc819b55d3e9d294e45c9b03a76aef41 - [ 0x0040 ] 0x209dd15ebff5d46c4bd888e51a93cf99a7329636c63514396b4a452003a35bf7 - [ 0x0060 ] 0x04bf11ca01483bfa8b34b43561848d28905960114c8ac04049af4b6315a41678 - [ 0x0080 ] 0x2bb8324af6cfc93537a2ad1a445cfd0ca2a71acd7ac41fadbf933c2a51be344d - [ 0x00a0 ] 0x120a2a4cf30c1bf9845f20c6fe39e07ea2cce61f0c9bb048165fe5e4de877550 - [ 0x00c0 ] 0x111e129f1cf1097710d41c4ac70fcdfa5ba2023c6ff1cbeac322de49d1b6df7c - [ 0x00e0 ] 0x2032c61a830e3c17286de9462bf242fca2883585b93870a73853face6a6bf411 - [ 0x0100 ] 0x198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c2 - [ 0x0120 ] 0x1800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed - [ 0x0140 ] 0x090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b - [ 0x0160 ] 0x12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa - (CALL (GAS) 8 2 0 0x0180 0x2000 32) - } - a94f5374fce5edbc8e2a8697c15331677e6ebf0b: - balance: '1000000000000000000' - code: '' - nonce: '0' - storage: {} - transaction: - data: - - | - { [[ 0x00 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed - [[ 0x01 ]] (STATICCALL (GAS) 0xca11001000000000000000000000000000000000 0 0 0 0) - [[ 0x02 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed } - - | - { [[ 0x00 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed - [[ 0x01 ]] (STATICCALL (GAS) 0xca11002000000000000000000000000000000000 0 0 0 0) - [[ 0x02 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed } - - | - { [[ 0x00 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed - [[ 0x01 ]] (STATICCALL (GAS) 0xca11003000000000000000000000000000000000 0 0 0 0) - [[ 0x02 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed } - - | - { [[ 0x00 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed - [[ 0x01 ]] (STATICCALL (GAS) 0xca11004000000000000000000000000000000000 0 0 0 0) - [[ 0x02 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed } - - | - { [[ 0x00 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed - [[ 0x01 ]] (STATICCALL (GAS) 0xca11005000000000000000000000000000000000 0 0 0 0) - [[ 0x02 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed } - - | - { [[ 0x00 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed - [[ 0x01 ]] (STATICCALL (GAS) 0xca11006000000000000000000000000000000000 0 0 0 0) - [[ 0x02 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed } - - | - { [[ 0x00 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed - [[ 0x01 ]] (STATICCALL (GAS) 0xca11007000000000000000000000000000000000 0 0 0 0) - [[ 0x02 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed } - - | - { [[ 0x00 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed - [[ 0x01 ]] (STATICCALL (GAS) 0xca11008000000000000000000000000000000000 0 0 0 0) - [[ 0x02 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed } - gasLimit: - - '4000000' - gasPrice: '10' - nonce: '0' - secretKey: '0x45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8' - to: 'a000000000000000000000000000000000000000' - value: - - '100' diff --git a/tests/static/state_tests/stStaticFlagEnabled/CallWithNOTZeroValueToPrecompileFromTransactionFiller.yml b/tests/static/state_tests/stStaticFlagEnabled/CallWithNOTZeroValueToPrecompileFromTransactionFiller.yml deleted file mode 100644 index b9e571025f..0000000000 --- a/tests/static/state_tests/stStaticFlagEnabled/CallWithNOTZeroValueToPrecompileFromTransactionFiller.yml +++ /dev/null @@ -1,176 +0,0 @@ ---- -CallWithNOTZeroValueToPrecompileFromTransaction: - _info: - comment: | - Contract A staticcalls contract CALL-00X. - Contract CALL-00X calls precompiled contracts with non zero value. - It should interrupt for each precompiled contract. - env: - currentCoinbase: '0xcafe000000000000000000000000000000000001' - currentDifficulty: '0x20000' - currentGasLimit: '10000000' - currentNumber: '1' - currentTimestamp: '1000' - expect: - - indexes: - data: - - !!int 0 - - !!int 1 - - !!int 2 - - !!int 3 - - !!int 4 - - !!int 5 - - !!int 6 - - !!int 7 - gas: !!int -1 - value: !!int -1 - network: - - ">=Cancun" - result: - : - balance: '1100' - storage: - '0x00': '0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed' - '0x02': '0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed' - pre: - : - balance: '1000' - nonce: '0' - storage: - '0x00': '0xdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeaf' - '0x01': '0xdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeaf' - '0x02': '0xdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeaf' - code: | - { - [[ 0x00 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed - [[ 0x01 ]] (STATICCALL (GAS) (CALLDATALOAD 0) 0 0 0 0) - [[ 0x02 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed - } - : - balance: '1000' - nonce: '0' - storage: {} - code: | - { - ;; Recovery of ECDSA signature - [ 0x00 ] 0x18c547e4f7b0f325ad1e56f57e26c745b09a3e503d86e00e5255ff7f715d3d1c - [ 0x20 ] 28 - [ 0x40 ] 0x73b1693892219d736caba55bdb67216e485557ea6b6af75f37096c9aa6a5a75f - [ 0x60 ] 0xeeb940b1d03b21e36b0e47e79769f095fe2ab855bd91e3a38756b7d75a9c4549 - (CALL (GAS) 1 2 0 128 0x2000 32) - } - : - balance: '1000' - nonce: '0' - storage: {} - code: | - { - ;; Hash function SHA256 - [ 0x00 ] 0x0000000ccccccccccccccccccccccccccccccccccccccccccccccccccc000000 - (CALL (GAS) 2 2 0 32 0x2000 32) - } - : - balance: '1000' - nonce: '0' - storage: {} - code: | - { - ;; Hash function RIPEMD160 - [ 0x00 ] 0x0000000ccccccccccccccccccccccccccccccccccccccccccccccccccc000000 - (CALL (GAS) 3 2 0 32 0x2000 32) - } - : - balance: '1000' - nonce: '0' - storage: {} - code: | - { - ;; Identity - [ 0x00 ] 0x0000000ccccccccccccccccccccccccccccccccccccccccccccccccccc000000 - (CALL (GAS) 4 2 0 32 0x2000 32) - } - : - balance: '1000' - nonce: '0' - storage: {} - code: | - { - ;; Modular exponentiation - [ 0x00 ] 1 - [ 0x20 ] 32 - [ 0x40 ] 32 - [ 0x60 ] 0x03fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc - [ 0x80 ] 0x2efffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc - [ 0xa0 ] 0x2f00000000000000000000000000000000000000000000000000000000000000 - (CALL (GAS) 5 2 0 0xa1 0x2000 32) - } - : - balance: '1000' - nonce: '0' - storage: {} - code: | - { - ;; Addition on elliptic curve alt_bn128 - [ 0x00 ] 0x0f25929bcb43d5a57391564615c9e70a992b10eafa4db109709649cf48c50dd2 - [ 0x20 ] 0x16da2f5cb6be7a0aa72c440c53c9bbdfec6c36c7d515536431b3a865468acbba - [ 0x40 ] 0x1de49a4b0233273bba8146af82042d004f2085ec982397db0d97da17204cc286 - [ 0x60 ] 0x0217327ffc463919bef80cc166d09c6172639d8589799928761bcd9f22c903d4 - (CALL (GAS) 6 2 0 128 0x2000 64) - } - : - balance: '1000' - nonce: '0' - storage: {} - code: | - { - ;; Scalar multiplication on elliptic curve alt_bn128 - [ 0x00 ] 0x0f25929bcb43d5a57391564615c9e70a992b10eafa4db109709649cf48c50dd2 - [ 0x20 ] 0x16da2f5cb6be7a0aa72c440c53c9bbdfec6c36c7d515536431b3a865468acbba - [ 0x40 ] 0x0000000000000000000000000000000000000000000000000000000000000003 - (CALL (GAS) 7 2 0 96 0x2000 64) - } - : - balance: '1000' - nonce: '0' - storage: {} - code: | - { - ;; Checking a pairing equation on curve alt_bn128 - ;; proof.A + vk.A + negateG1(proof.Ap) + P2 - [ 0x0000 ] 0x1c76476f4def4bb94541d57ebba1193381ffa7aa76ada664dd31c16024c43f59 - [ 0x0020 ] 0x3034dd2920f673e204fee2811c678745fc819b55d3e9d294e45c9b03a76aef41 - [ 0x0040 ] 0x209dd15ebff5d46c4bd888e51a93cf99a7329636c63514396b4a452003a35bf7 - [ 0x0060 ] 0x04bf11ca01483bfa8b34b43561848d28905960114c8ac04049af4b6315a41678 - [ 0x0080 ] 0x2bb8324af6cfc93537a2ad1a445cfd0ca2a71acd7ac41fadbf933c2a51be344d - [ 0x00a0 ] 0x120a2a4cf30c1bf9845f20c6fe39e07ea2cce61f0c9bb048165fe5e4de877550 - [ 0x00c0 ] 0x111e129f1cf1097710d41c4ac70fcdfa5ba2023c6ff1cbeac322de49d1b6df7c - [ 0x00e0 ] 0x2032c61a830e3c17286de9462bf242fca2883585b93870a73853face6a6bf411 - [ 0x0100 ] 0x198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c2 - [ 0x0120 ] 0x1800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed - [ 0x0140 ] 0x090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b - [ 0x0160 ] 0x12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa - (CALL (GAS) 8 2 0 0x0180 0x2000 32) - } - : - balance: '1000000000000000000' - code: '' - nonce: '0' - storage: {} - transaction: - data: - - ':raw 0x000000000000000000000000' - - ':raw 0x000000000000000000000000' - - ':raw 0x000000000000000000000000' - - ':raw 0x000000000000000000000000' - - ':raw 0x000000000000000000000000' - - ':raw 0x000000000000000000000000' - - ':raw 0x000000000000000000000000' - - ':raw 0x000000000000000000000000' - gasLimit: - - '4000000' - gasPrice: '10' - nonce: '0' - secretKey: '' - to: '' - value: - - '100' diff --git a/tests/static/state_tests/stStaticFlagEnabled/CallWithZeroValueToPrecompileFromCalledContractFiller.yml b/tests/static/state_tests/stStaticFlagEnabled/CallWithZeroValueToPrecompileFromCalledContractFiller.yml deleted file mode 100644 index 0f85d3338f..0000000000 --- a/tests/static/state_tests/stStaticFlagEnabled/CallWithZeroValueToPrecompileFromCalledContractFiller.yml +++ /dev/null @@ -1,234 +0,0 @@ ---- -CallWithZeroValueToPrecompileFromCalledContract: - _info: - comment: | - Contract C calls contract B. - Contract B staticcalls contract A. - Contract A calls precompiled contracts with 0 value. - It should execute successfully for each precompiled contract. - env: - currentCoinbase: '0xcafe000000000000000000000000000000000001' - currentDifficulty: '0x20000' - currentGasLimit: '10000000' - currentNumber: '1' - currentTimestamp: '1000' - expect: - - indexes: - data: !!int -1 - gas: !!int -1 - value: !!int -1 - network: - - ">=Cancun" - result: - a000000000000000000000000000000000000000: - balance: '1000' - storage: {} - c000000000000000000000000000000000000000: - balance: '1100' - storage: - '0x00': '0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed' - '0x01': '0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed' - b000000000000000000000000000000000000000: - balance: '1000' - storage: - '0x00': '0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed' - '0x01': '0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed' - # precompiled contract #1 - Recovery of ECDSA signature - '0x0a00': '0x01' - '0x0a01': '0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b' - '0x0a02': '0x01' - # precompiled contract #2 - Hash function SHA256 - '0x0a03': '0x01' - '0x0a04': '0x0ccccccccccccccccccccccccccccccccccccccccccccccccccc000000' - '0x0a05': '0x73f5062fb68ed2a1ec82ff8c73f9251bb9cf53a623bc93527e16bc5ae29dad74' - # precompiled contract #3 - Hash function RIPEMD160 - '0x0a06': '0x01' - '0x0a07': '0x0ccccccccccccccccccccccccccccccccccccccccccccccccccc000000' - '0x0a08': '0x14ef238cfa4075e9ede92f18b1566c1dd0b99aaa' - # precompiled contract #4 - Identity - '0x0a09': '0x01' - '0x0a10': '0x0ccccccccccccccccccccccccccccccccccccccccccccccccccc000000' - # precompiled contract #5 - Modular exponentiation - '0x0a11': '0x01' - '0x0a12': '0x01' - # precompiled contract #6 - Addition on elliptic curve alt_bn128 - '0x0a13': '0x01' - '0x0a14': '0x1f4d1d80177b1377743d1901f70d7389be7f7a35a35bfd234a8aaee615b88c49' - '0x0a15': '0x018683193ae021a2f8920fed186cde5d9b1365116865281ccf884c1f28b1df8f' - # precompiled contract #7 - Scalar multiplication on elliptic curve alt_bn128 - '0x0a16': '0x01' - '0x0a17': '0x1f4d1d80177b1377743d1901f70d7389be7f7a35a35bfd234a8aaee615b88c49' - '0x0a18': '0x018683193ae021a2f8920fed186cde5d9b1365116865281ccf884c1f28b1df8f' - # precompiled contract #8 - Checking a pairing equation on curve alt_bn128 - '0x0a19': '0x01' - '0x0a20': '0x01' - pre: - c000000000000000000000000000000000000000: - balance: '1000' - nonce: '0' - storage: - '0x00': '0xdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeaf' - '0x01': '0xdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeaf' - code: | - { - [[ 0x00 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed - (CALL (GAS) 0xb000000000000000000000000000000000000000 0 0 0 0 0) - [[ 0x01 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed - } - b000000000000000000000000000000000000000: - balance: '1000' - nonce: '0' - storage: - '0x00': '0xdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeaf' - '0x01': '0xdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeaf' - code: | - { - [[ 0x00 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed - (STATICCALL (GAS) 0xa000000000000000000000000000000000000000 0 0 0x0a0000 0x012020) - [[ 0x01 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed - ;; save results to store - [[ 0x0a00 ]] @0x0a0000 [[ 0x0a11 ]] @0x0a1100 - [[ 0x0a01 ]] @0x0a0100 [[ 0x0a12 ]] @0x0a1200 - [[ 0x0a02 ]] @0x0a0200 [[ 0x0a13 ]] @0x0a1300 - [[ 0x0a03 ]] @0x0a0300 [[ 0x0a14 ]] @0x0a1400 - [[ 0x0a04 ]] @0x0a0400 [[ 0x0a15 ]] @0x0a1500 - [[ 0x0a05 ]] @0x0a0500 [[ 0x0a16 ]] @0x0a1600 - [[ 0x0a06 ]] @0x0a0600 [[ 0x0a17 ]] @0x0a1700 - [[ 0x0a07 ]] @0x0a0700 [[ 0x0a18 ]] @0x0a1800 - [[ 0x0a08 ]] @0x0a0800 [[ 0x0a19 ]] @0x0a1900 - [[ 0x0a09 ]] @0x0a0900 [[ 0x0a20 ]] @0x0a2000 - [[ 0x0a10 ]] @0x0a1000 - } - a000000000000000000000000000000000000000: - balance: '1000' - nonce: '0' - storage: {} - code: | - { - ;; Recovery of ECDSA signature - [ 0x00 ] 0x18c547e4f7b0f325ad1e56f57e26c745b09a3e503d86e00e5255ff7f715d3d1c - [ 0x20 ] 28 - [ 0x40 ] 0x73b1693892219d736caba55bdb67216e485557ea6b6af75f37096c9aa6a5a75f - [ 0x60 ] 0xeeb940b1d03b21e36b0e47e79769f095fe2ab855bd91e3a38756b7d75a9c4549 - ;; zero value - [ 0x0a0000 ] (CALL (GAS) 1 0 0 128 0x2000 32) - [ 0x0a0100 ] (MOD @0x2000 (EXP 2 160)) - [ 0x0a0200 ] (EQ (ORIGIN) @0x0a0100) - [ 0x00 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x20 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x40 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x60 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x2000 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - - ;; Hash function SHA256 - [ 0x00 ] 0x0000000ccccccccccccccccccccccccccccccccccccccccccccccccccc000000 - ;; zero value - [ 0x0a0300 ] (CALL (GAS) 2 0 0 32 0x2000 32) - [ 0x0a0400 ] @0 - [ 0x0a0500 ] @0x2000 - [ 0x00 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x2000 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - - ;; Hash function RIPEMD160 - [ 0x00 ] 0x0000000ccccccccccccccccccccccccccccccccccccccccccccccccccc000000 - ;; zero value - [ 0x0a0600 ] (CALL (GAS) 3 0 0 32 0x2000 32) - [ 0x0a0700 ] @0 - [ 0x0a0800 ] @0x2000 - [ 0x00 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x2000 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - - ;; Identity - [ 0x00 ] 0x0000000ccccccccccccccccccccccccccccccccccccccccccccccccccc000000 - ;; zero value - [ 0x0a0900 ] (CALL (GAS) 4 0 0 32 0x2000 32) - [ 0x0a1000 ] @0x2000 - [ 0x00 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x2000 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - - ;; Modular exponentiation - [ 0x00 ] 1 - [ 0x20 ] 32 - [ 0x40 ] 32 - [ 0x60 ] 0x03fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc - [ 0x80 ] 0x2efffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc - [ 0xa0 ] 0x2f00000000000000000000000000000000000000000000000000000000000000 - ;; zero value - [ 0x0a1100 ] (CALL (GAS) 5 0 0 0xa1 0x2000 32) - [ 0x0a1200 ] @0x2000 - [ 0x00 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x20 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x40 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x60 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x80 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0xa0 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x2000 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - - ;; Addition on elliptic curve alt_bn128 - [ 0x00 ] 0x0f25929bcb43d5a57391564615c9e70a992b10eafa4db109709649cf48c50dd2 - [ 0x20 ] 0x16da2f5cb6be7a0aa72c440c53c9bbdfec6c36c7d515536431b3a865468acbba - [ 0x40 ] 0x1de49a4b0233273bba8146af82042d004f2085ec982397db0d97da17204cc286 - [ 0x60 ] 0x0217327ffc463919bef80cc166d09c6172639d8589799928761bcd9f22c903d4 - ;; zero value - [ 0x0a1300 ] (CALL (GAS) 6 0 0 128 0x2000 64) - [ 0x0a1400 ] @0x2000 - [ 0x0a1500 ] @0x2020 - [ 0x00 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x20 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x40 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x60 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x2000 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x2020 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - - ;; Scalar multiplication on elliptic curve alt_bn128 - [ 0x00 ] 0x0f25929bcb43d5a57391564615c9e70a992b10eafa4db109709649cf48c50dd2 - [ 0x20 ] 0x16da2f5cb6be7a0aa72c440c53c9bbdfec6c36c7d515536431b3a865468acbba - [ 0x40 ] 0x0000000000000000000000000000000000000000000000000000000000000003 - ;; zero value - [ 0x0a1600 ] (CALL (GAS) 7 0 0 96 0x2000 64) - [ 0x0a1700 ] @0x2000 - [ 0x0a1800 ] @0x2020 - [ 0x00 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x20 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x40 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x2000 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x2020 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - - ;; Checking a pairing equation on curve alt_bn128 - ;; proof.A + vk.A + negateG1(proof.Ap) + P2 - [ 0x0000 ] 0x1c76476f4def4bb94541d57ebba1193381ffa7aa76ada664dd31c16024c43f59 - [ 0x0020 ] 0x3034dd2920f673e204fee2811c678745fc819b55d3e9d294e45c9b03a76aef41 - [ 0x0040 ] 0x209dd15ebff5d46c4bd888e51a93cf99a7329636c63514396b4a452003a35bf7 - [ 0x0060 ] 0x04bf11ca01483bfa8b34b43561848d28905960114c8ac04049af4b6315a41678 - [ 0x0080 ] 0x2bb8324af6cfc93537a2ad1a445cfd0ca2a71acd7ac41fadbf933c2a51be344d - [ 0x00a0 ] 0x120a2a4cf30c1bf9845f20c6fe39e07ea2cce61f0c9bb048165fe5e4de877550 - [ 0x00c0 ] 0x111e129f1cf1097710d41c4ac70fcdfa5ba2023c6ff1cbeac322de49d1b6df7c - [ 0x00e0 ] 0x2032c61a830e3c17286de9462bf242fca2883585b93870a73853face6a6bf411 - [ 0x0100 ] 0x198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c2 - [ 0x0120 ] 0x1800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed - [ 0x0140 ] 0x090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b - [ 0x0160 ] 0x12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa - ;; zero value - [ 0x0a1900 ] (CALL (GAS) 8 0 0 0x0180 0x2000 32) - [ 0x0a2000 ] @0x2000 - [ 0x2000 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - - ;; return all computed values to caller for checking - (RETURN 0x0a0000 0x012020) - } - a94f5374fce5edbc8e2a8697c15331677e6ebf0b: - balance: '1000000000000000000' - code: '' - nonce: '0' - storage: {} - transaction: - data: - - '' - gasLimit: - - '4000000' - gasPrice: '10' - nonce: '0' - secretKey: '0x45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8' - to: 'c000000000000000000000000000000000000000' - value: - - '100' diff --git a/tests/static/state_tests/stStaticFlagEnabled/CallWithZeroValueToPrecompileFromContractInitializationFiller.yml b/tests/static/state_tests/stStaticFlagEnabled/CallWithZeroValueToPrecompileFromContractInitializationFiller.yml deleted file mode 100644 index 9417a6ed6d..0000000000 --- a/tests/static/state_tests/stStaticFlagEnabled/CallWithZeroValueToPrecompileFromContractInitializationFiller.yml +++ /dev/null @@ -1,226 +0,0 @@ ---- -CallWithZeroValueToPrecompileFromContractInitialization: - _info: - comment: | - Contract B creates new contract. - New contract initialization code staticcalls contract A. - Contract A calls precompiled contracts with 0 value. - It should execute successfully for each precompiled contract. - env: - currentCoinbase: '0xcafe000000000000000000000000000000000001' - currentDifficulty: '0x20000' - currentGasLimit: '10000000' - currentNumber: '1' - currentTimestamp: '1000' - expect: - - indexes: - data: - - !!int 0 - gas: !!int -1 - value: !!int -1 - network: - - ">=Cancun" - result: - b000000000000000000000000000000000000000: - balance: '1100' - storage: - '0x00': '0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed' - '0x01': '0xfd7776b1a634b0dc19301b174ccf30d4d24070a8' - '0x02': '0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed' - fd7776b1a634b0dc19301b174ccf30d4d24070a8: - storage: - '0x00': '0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed' - '0x01': '0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed' - # precompiled contract #1 - Recovery of ECDSA signature - '0x0a00': '0x01' - '0x0a01': '0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b' - '0x0a02': '0x01' - # precompiled contract #2 - Hash function SHA256 - '0x0a03': '0x01' - '0x0a04': '0x0ccccccccccccccccccccccccccccccccccccccccccccccccccc000000' - '0x0a05': '0x73f5062fb68ed2a1ec82ff8c73f9251bb9cf53a623bc93527e16bc5ae29dad74' - # precompiled contract #3 - Hash function RIPEMD160 - '0x0a06': '0x01' - '0x0a07': '0x0ccccccccccccccccccccccccccccccccccccccccccccccccccc000000' - '0x0a08': '0x14ef238cfa4075e9ede92f18b1566c1dd0b99aaa' - # precompiled contract #4 - Identity - '0x0a09': '0x01' - '0x0a10': '0x0ccccccccccccccccccccccccccccccccccccccccccccccccccc000000' - # precompiled contract #5 - Modular exponentiation - '0x0a11': '0x01' - '0x0a12': '0x01' - # precompiled contract #6 - Addition on elliptic curve alt_bn128 - '0x0a13': '0x01' - '0x0a14': '0x1f4d1d80177b1377743d1901f70d7389be7f7a35a35bfd234a8aaee615b88c49' - '0x0a15': '0x018683193ae021a2f8920fed186cde5d9b1365116865281ccf884c1f28b1df8f' - # precompiled contract #7 - Scalar multiplication on elliptic curve alt_bn128 - '0x0a16': '0x01' - '0x0a17': '0x1f4d1d80177b1377743d1901f70d7389be7f7a35a35bfd234a8aaee615b88c49' - '0x0a18': '0x018683193ae021a2f8920fed186cde5d9b1365116865281ccf884c1f28b1df8f' - # precompiled contract #8 - Checking a pairing equation on curve alt_bn128 - '0x0a19': '0x01' - '0x0a20': '0x01' - pre: - b000000000000000000000000000000000000000: - balance: '1000' - nonce: '0' - storage: - '0x00': '0xdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeaf' - '0x01': '0xdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeaf' - code: | - { - [[ 0x00 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed - (CALLDATACOPY 0 0 (CALLDATASIZE)) - [[ 0x01 ]] (CREATE2 0 0 (CALLDATASIZE) 0x5a175a175a17) - [[ 0x02 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed - } - a000000000000000000000000000000000000000: - balance: '1000' - nonce: '0' - storage: {} - code: | - { - ;; Recovery of ECDSA signature - [ 0x00 ] 0x18c547e4f7b0f325ad1e56f57e26c745b09a3e503d86e00e5255ff7f715d3d1c - [ 0x20 ] 28 - [ 0x40 ] 0x73b1693892219d736caba55bdb67216e485557ea6b6af75f37096c9aa6a5a75f - [ 0x60 ] 0xeeb940b1d03b21e36b0e47e79769f095fe2ab855bd91e3a38756b7d75a9c4549 - ;; zero value - [ 0x0a0000 ] (CALL (GAS) 1 0 0 128 0x2000 32) - [ 0x0a0100 ] (MOD @0x2000 (EXP 2 160)) - [ 0x0a0200 ] (EQ (ORIGIN) @0x0a0100) - [ 0x00 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x20 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x40 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x60 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x2000 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - - ;; Hash function SHA256 - [ 0x00 ] 0x0000000ccccccccccccccccccccccccccccccccccccccccccccccccccc000000 - ;; zero value - [ 0x0a0300 ] (CALL (GAS) 2 0 0 32 0x2000 32) - [ 0x0a0400 ] @0 - [ 0x0a0500 ] @0x2000 - [ 0x00 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x2000 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - - ;; Hash function RIPEMD160 - [ 0x00 ] 0x0000000ccccccccccccccccccccccccccccccccccccccccccccccccccc000000 - ;; zero value - [ 0x0a0600 ] (CALL (GAS) 3 0 0 32 0x2000 32) - [ 0x0a0700 ] @0 - [ 0x0a0800 ] @0x2000 - [ 0x00 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x2000 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - - ;; Identity - [ 0x00 ] 0x0000000ccccccccccccccccccccccccccccccccccccccccccccccccccc000000 - ;; zero value - [ 0x0a0900 ] (CALL (GAS) 4 0 0 32 0x2000 32) - [ 0x0a1000 ] @0x2000 - [ 0x00 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x2000 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - - ;; Modular exponentiation - [ 0x00 ] 1 - [ 0x20 ] 32 - [ 0x40 ] 32 - [ 0x60 ] 0x03fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc - [ 0x80 ] 0x2efffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc - [ 0xa0 ] 0x2f00000000000000000000000000000000000000000000000000000000000000 - ;; zero value - [ 0x0a1100 ] (CALL (GAS) 5 0 0 0xa1 0x2000 32) - [ 0x0a1200 ] @0x2000 - [ 0x00 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x20 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x40 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x60 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x80 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0xa0 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x2000 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - - ;; Addition on elliptic curve alt_bn128 - [ 0x00 ] 0x0f25929bcb43d5a57391564615c9e70a992b10eafa4db109709649cf48c50dd2 - [ 0x20 ] 0x16da2f5cb6be7a0aa72c440c53c9bbdfec6c36c7d515536431b3a865468acbba - [ 0x40 ] 0x1de49a4b0233273bba8146af82042d004f2085ec982397db0d97da17204cc286 - [ 0x60 ] 0x0217327ffc463919bef80cc166d09c6172639d8589799928761bcd9f22c903d4 - ;; zero value - [ 0x0a1300 ] (CALL (GAS) 6 0 0 128 0x2000 64) - [ 0x0a1400 ] @0x2000 - [ 0x0a1500 ] @0x2020 - [ 0x00 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x20 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x40 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x60 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x2000 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x2020 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - - ;; Scalar multiplication on elliptic curve alt_bn128 - [ 0x00 ] 0x0f25929bcb43d5a57391564615c9e70a992b10eafa4db109709649cf48c50dd2 - [ 0x20 ] 0x16da2f5cb6be7a0aa72c440c53c9bbdfec6c36c7d515536431b3a865468acbba - [ 0x40 ] 0x0000000000000000000000000000000000000000000000000000000000000003 - ;; zero value - [ 0x0a1600 ] (CALL (GAS) 7 0 0 96 0x2000 64) - [ 0x0a1700 ] @0x2000 - [ 0x0a1800 ] @0x2020 - [ 0x00 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x20 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x40 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x2000 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x2020 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - - ;; Checking a pairing equation on curve alt_bn128 - ;; proof.A + vk.A + negateG1(proof.Ap) + P2 - [ 0x0000 ] 0x1c76476f4def4bb94541d57ebba1193381ffa7aa76ada664dd31c16024c43f59 - [ 0x0020 ] 0x3034dd2920f673e204fee2811c678745fc819b55d3e9d294e45c9b03a76aef41 - [ 0x0040 ] 0x209dd15ebff5d46c4bd888e51a93cf99a7329636c63514396b4a452003a35bf7 - [ 0x0060 ] 0x04bf11ca01483bfa8b34b43561848d28905960114c8ac04049af4b6315a41678 - [ 0x0080 ] 0x2bb8324af6cfc93537a2ad1a445cfd0ca2a71acd7ac41fadbf933c2a51be344d - [ 0x00a0 ] 0x120a2a4cf30c1bf9845f20c6fe39e07ea2cce61f0c9bb048165fe5e4de877550 - [ 0x00c0 ] 0x111e129f1cf1097710d41c4ac70fcdfa5ba2023c6ff1cbeac322de49d1b6df7c - [ 0x00e0 ] 0x2032c61a830e3c17286de9462bf242fca2883585b93870a73853face6a6bf411 - [ 0x0100 ] 0x198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c2 - [ 0x0120 ] 0x1800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed - [ 0x0140 ] 0x090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b - [ 0x0160 ] 0x12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa - ;; zero value - [ 0x0a1900 ] (CALL (GAS) 8 0 0 0x0180 0x2000 32) - [ 0x0a2000 ] @0x2000 - [ 0x2000 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - - ;; return all computed values to caller for checking - (RETURN 0x0a0000 0x012020) - } - a94f5374fce5edbc8e2a8697c15331677e6ebf0b: - balance: '1000000000000000000' - code: '' - nonce: '0' - storage: {} - transaction: - data: - - | - { - [[ 0x00 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed - (STATICCALL (GAS) 0xa000000000000000000000000000000000000000 0 0 0x0a0000 0x012020) - [[ 0x01 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed - ;; save results to store - [[ 0x0a00 ]] @0x0a0000 [[ 0x0a11 ]] @0x0a1100 - [[ 0x0a01 ]] @0x0a0100 [[ 0x0a12 ]] @0x0a1200 - [[ 0x0a02 ]] @0x0a0200 [[ 0x0a13 ]] @0x0a1300 - [[ 0x0a03 ]] @0x0a0300 [[ 0x0a14 ]] @0x0a1400 - [[ 0x0a04 ]] @0x0a0400 [[ 0x0a15 ]] @0x0a1500 - [[ 0x0a05 ]] @0x0a0500 [[ 0x0a16 ]] @0x0a1600 - [[ 0x0a06 ]] @0x0a0600 [[ 0x0a17 ]] @0x0a1700 - [[ 0x0a07 ]] @0x0a0700 [[ 0x0a18 ]] @0x0a1800 - [[ 0x0a08 ]] @0x0a0800 [[ 0x0a19 ]] @0x0a1900 - [[ 0x0a09 ]] @0x0a0900 [[ 0x0a20 ]] @0x0a2000 - [[ 0x0a10 ]] @0x0a1000 - } - gasLimit: - - '4000000' - gasPrice: '10' - nonce: '0' - secretKey: '0x45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8' - to: 'b000000000000000000000000000000000000000' - value: - - '100' diff --git a/tests/static/state_tests/stStaticFlagEnabled/CallWithZeroValueToPrecompileFromTransactionFiller.yml b/tests/static/state_tests/stStaticFlagEnabled/CallWithZeroValueToPrecompileFromTransactionFiller.yml deleted file mode 100644 index c34292cd2e..0000000000 --- a/tests/static/state_tests/stStaticFlagEnabled/CallWithZeroValueToPrecompileFromTransactionFiller.yml +++ /dev/null @@ -1,216 +0,0 @@ ---- -CallWithZeroValueToPrecompileFromTransaction: - _info: - comment: | - Contract B staticcalls contract A. - Contract A calls precompiled contracts with 0 value. - It should execute successfully for each precompiled contract. - env: - currentCoinbase: '0xcafe000000000000000000000000000000000001' - currentDifficulty: '0x20000' - currentGasLimit: '10000000' - currentNumber: '1' - currentTimestamp: '1000' - expect: - - indexes: - data: !!int -1 - gas: !!int -1 - value: !!int -1 - network: - - ">=Cancun" - result: - a000000000000000000000000000000000000000: - balance: '1000' - storage: {} - b000000000000000000000000000000000000000: - balance: '1100' - storage: - '0x00': '0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed' - '0x01': '0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed' - # precompiled contract #1 - Recovery of ECDSA signature - '0x0a00': '0x01' - '0x0a01': '0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b' - '0x0a02': '0x01' - # precompiled contract #2 - Hash function SHA256 - '0x0a03': '0x01' - '0x0a04': '0x0ccccccccccccccccccccccccccccccccccccccccccccccccccc000000' - '0x0a05': '0x73f5062fb68ed2a1ec82ff8c73f9251bb9cf53a623bc93527e16bc5ae29dad74' - # precompiled contract #3 - Hash function RIPEMD160 - '0x0a06': '0x01' - '0x0a07': '0x0ccccccccccccccccccccccccccccccccccccccccccccccccccc000000' - '0x0a08': '0x14ef238cfa4075e9ede92f18b1566c1dd0b99aaa' - # precompiled contract #4 - Identity - '0x0a09': '0x01' - '0x0a10': '0x0ccccccccccccccccccccccccccccccccccccccccccccccccccc000000' - # precompiled contract #5 - Modular exponentiation - '0x0a11': '0x01' - '0x0a12': '0x01' - # precompiled contract #6 - Addition on elliptic curve alt_bn128 - '0x0a13': '0x01' - '0x0a14': '0x1f4d1d80177b1377743d1901f70d7389be7f7a35a35bfd234a8aaee615b88c49' - '0x0a15': '0x018683193ae021a2f8920fed186cde5d9b1365116865281ccf884c1f28b1df8f' - # precompiled contract #7 - Scalar multiplication on elliptic curve alt_bn128 - '0x0a16': '0x01' - '0x0a17': '0x1f4d1d80177b1377743d1901f70d7389be7f7a35a35bfd234a8aaee615b88c49' - '0x0a18': '0x018683193ae021a2f8920fed186cde5d9b1365116865281ccf884c1f28b1df8f' - # precompiled contract #8 - Checking a pairing equation on curve alt_bn128 - '0x0a19': '0x01' - '0x0a20': '0x01' - pre: - b000000000000000000000000000000000000000: - balance: '1000' - nonce: '0' - storage: - '0x00': '0xdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeaf' - '0x01': '0xdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeafdeadbeaf' - code: | - { - [[ 0x00 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed - (STATICCALL (GAS) 0xa000000000000000000000000000000000000000 0 0 0x0a0000 0x012020) - [[ 0x01 ]] 0xfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeedfeed - ;; save results to store - [[ 0x0a00 ]] @0x0a0000 [[ 0x0a11 ]] @0x0a1100 - [[ 0x0a01 ]] @0x0a0100 [[ 0x0a12 ]] @0x0a1200 - [[ 0x0a02 ]] @0x0a0200 [[ 0x0a13 ]] @0x0a1300 - [[ 0x0a03 ]] @0x0a0300 [[ 0x0a14 ]] @0x0a1400 - [[ 0x0a04 ]] @0x0a0400 [[ 0x0a15 ]] @0x0a1500 - [[ 0x0a05 ]] @0x0a0500 [[ 0x0a16 ]] @0x0a1600 - [[ 0x0a06 ]] @0x0a0600 [[ 0x0a17 ]] @0x0a1700 - [[ 0x0a07 ]] @0x0a0700 [[ 0x0a18 ]] @0x0a1800 - [[ 0x0a08 ]] @0x0a0800 [[ 0x0a19 ]] @0x0a1900 - [[ 0x0a09 ]] @0x0a0900 [[ 0x0a20 ]] @0x0a2000 - [[ 0x0a10 ]] @0x0a1000 - } - a000000000000000000000000000000000000000: - balance: '1000' - nonce: '0' - storage: {} - code: | - { - ;; Recovery of ECDSA signature - [ 0x00 ] 0x18c547e4f7b0f325ad1e56f57e26c745b09a3e503d86e00e5255ff7f715d3d1c - [ 0x20 ] 28 - [ 0x40 ] 0x73b1693892219d736caba55bdb67216e485557ea6b6af75f37096c9aa6a5a75f - [ 0x60 ] 0xeeb940b1d03b21e36b0e47e79769f095fe2ab855bd91e3a38756b7d75a9c4549 - ;; zero value - [ 0x0a0000 ] (CALL (GAS) 1 0 0 128 0x2000 32) - [ 0x0a0100 ] (MOD @0x2000 (EXP 2 160)) - [ 0x0a0200 ] (EQ (ORIGIN) @0x0a0100) - [ 0x00 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x20 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x40 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x60 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x2000 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - - ;; Hash function SHA256 - [ 0x00 ] 0x0000000ccccccccccccccccccccccccccccccccccccccccccccccccccc000000 - ;; zero value - [ 0x0a0300 ] (CALL (GAS) 2 0 0 32 0x2000 32) - [ 0x0a0400 ] @0 - [ 0x0a0500 ] @0x2000 - [ 0x00 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x2000 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - - ;; Hash function RIPEMD160 - [ 0x00 ] 0x0000000ccccccccccccccccccccccccccccccccccccccccccccccccccc000000 - ;; zero value - [ 0x0a0600 ] (CALL (GAS) 3 0 0 32 0x2000 32) - [ 0x0a0700 ] @0 - [ 0x0a0800 ] @0x2000 - [ 0x00 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x2000 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - - ;; Identity - [ 0x00 ] 0x0000000ccccccccccccccccccccccccccccccccccccccccccccccccccc000000 - ;; zero value - [ 0x0a0900 ] (CALL (GAS) 4 0 0 32 0x2000 32) - [ 0x0a1000 ] @0x2000 - [ 0x00 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x2000 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - - ;; Modular exponentiation - [ 0x00 ] 1 - [ 0x20 ] 32 - [ 0x40 ] 32 - [ 0x60 ] 0x03fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc - [ 0x80 ] 0x2efffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc - [ 0xa0 ] 0x2f00000000000000000000000000000000000000000000000000000000000000 - ;; zero value - [ 0x0a1100 ] (CALL (GAS) 5 0 0 0xa1 0x2000 32) - [ 0x0a1200 ] @0x2000 - [ 0x00 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x20 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x40 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x60 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x80 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0xa0 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x2000 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - - ;; Addition on elliptic curve alt_bn128 - [ 0x00 ] 0x0f25929bcb43d5a57391564615c9e70a992b10eafa4db109709649cf48c50dd2 - [ 0x20 ] 0x16da2f5cb6be7a0aa72c440c53c9bbdfec6c36c7d515536431b3a865468acbba - [ 0x40 ] 0x1de49a4b0233273bba8146af82042d004f2085ec982397db0d97da17204cc286 - [ 0x60 ] 0x0217327ffc463919bef80cc166d09c6172639d8589799928761bcd9f22c903d4 - ;; zero value - [ 0x0a1300 ] (CALL (GAS) 6 0 0 128 0x2000 64) - [ 0x0a1400 ] @0x2000 - [ 0x0a1500 ] @0x2020 - [ 0x00 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x20 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x40 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x60 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x2000 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x2020 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - - ;; Scalar multiplication on elliptic curve alt_bn128 - [ 0x00 ] 0x0f25929bcb43d5a57391564615c9e70a992b10eafa4db109709649cf48c50dd2 - [ 0x20 ] 0x16da2f5cb6be7a0aa72c440c53c9bbdfec6c36c7d515536431b3a865468acbba - [ 0x40 ] 0x0000000000000000000000000000000000000000000000000000000000000003 - ;; zero value - [ 0x0a1600 ] (CALL (GAS) 7 0 0 96 0x2000 64) - [ 0x0a1700 ] @0x2000 - [ 0x0a1800 ] @0x2020 - [ 0x00 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x20 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x40 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x2000 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - [ 0x2020 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - - ;; Checking a pairing equation on curve alt_bn128 - ;; proof.A + vk.A + negateG1(proof.Ap) + P2 - [ 0x0000 ] 0x1c76476f4def4bb94541d57ebba1193381ffa7aa76ada664dd31c16024c43f59 - [ 0x0020 ] 0x3034dd2920f673e204fee2811c678745fc819b55d3e9d294e45c9b03a76aef41 - [ 0x0040 ] 0x209dd15ebff5d46c4bd888e51a93cf99a7329636c63514396b4a452003a35bf7 - [ 0x0060 ] 0x04bf11ca01483bfa8b34b43561848d28905960114c8ac04049af4b6315a41678 - [ 0x0080 ] 0x2bb8324af6cfc93537a2ad1a445cfd0ca2a71acd7ac41fadbf933c2a51be344d - [ 0x00a0 ] 0x120a2a4cf30c1bf9845f20c6fe39e07ea2cce61f0c9bb048165fe5e4de877550 - [ 0x00c0 ] 0x111e129f1cf1097710d41c4ac70fcdfa5ba2023c6ff1cbeac322de49d1b6df7c - [ 0x00e0 ] 0x2032c61a830e3c17286de9462bf242fca2883585b93870a73853face6a6bf411 - [ 0x0100 ] 0x198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c2 - [ 0x0120 ] 0x1800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed - [ 0x0140 ] 0x090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b - [ 0x0160 ] 0x12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa - ;; zero value - [ 0x0a1900 ] (CALL (GAS) 8 0 0 0x0180 0x2000 32) - [ 0x0a2000 ] @0x2000 - [ 0x2000 ] 0x0000000000000000000000000000000000000000000000000000000000000000 - - ;; return all computed values to caller for checking - (RETURN 0x0a0000 0x012020) - } - a94f5374fce5edbc8e2a8697c15331677e6ebf0b: - balance: '1000000000000000000' - code: '' - nonce: '0' - storage: {} - transaction: - data: - - '' - gasLimit: - - '4000000' - gasPrice: '10' - nonce: '0' - secretKey: '0x45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8' - to: 'b000000000000000000000000000000000000000' - value: - - '100' diff --git a/tests/static/state_tests/stStaticFlagEnabled/StaticcallForPrecompilesIssue683Filler.yml b/tests/static/state_tests/stStaticFlagEnabled/StaticcallForPrecompilesIssue683Filler.yml deleted file mode 100644 index 55c1471f12..0000000000 --- a/tests/static/state_tests/stStaticFlagEnabled/StaticcallForPrecompilesIssue683Filler.yml +++ /dev/null @@ -1,47 +0,0 @@ ---- -StaticcallForPrecompilesIssue683: - _info: - comment: | - Bytecode from issue 683, that initially show the problem with staticcall in hevm. - Issue link: https://github.com/ethereum/tests/issues/683 - Pull request link with original bytecode: https://github.com/dapphub/dapptools/pull/360 - env: - currentCoinbase: '0xcafe000000000000000000000000000000000001' - currentDifficulty: '0x20000' - currentGasLimit: '10000000' - currentNumber: '1' - currentTimestamp: '1000' - expect: - - indexes: - data: !!int -1 - gas: !!int -1 - value: !!int -1 - network: - - ">=Cancun" - result: - : - balance: '1100' - storage: - '0x00': '0x01' - pre: - : - balance: '1000' - nonce: '0' - storage: {} - code: ':raw 0x600080541515601d576001815580818283305afa15601b578081fd5b005b80818283600160025af15050' - : - balance: '1000000000000000000' - code: '' - nonce: '0' - storage: {} - transaction: - data: - - '' - gasLimit: - - '1000000' - gasPrice: '10' - nonce: '0' - secretKey: '' - to: '' - value: - - '100' From 3b9b018a95b40939dcb566a53fef7e0c2abb9194 Mon Sep 17 00:00:00 2001 From: felipe Date: Wed, 21 Jan 2026 17:09:19 -0700 Subject: [PATCH 092/154] feat(tests): turn on EIP-7934 tests with BALs (fill all for amsterdam) (#2058) --- .github/configs/feature.yaml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/configs/feature.yaml b/.github/configs/feature.yaml index 0b07782d7b..b9404e5d59 100644 --- a/.github/configs/feature.yaml +++ b/.github/configs/feature.yaml @@ -23,6 +23,5 @@ benchmark_fast: bal: evm-type: develop - # TODO: Turn on block rlp limit tests after making filling them more flexible. - fill-params: --fork=Amsterdam -k "not eip7934" --fill-static-tests + fill-params: --fork=Amsterdam --fill-static-tests feature_only: true From 31defc8050177af803b6a5285f675612ca27d2f4 Mon Sep 17 00:00:00 2001 From: felipe Date: Thu, 22 Jan 2026 09:32:32 -0700 Subject: [PATCH 093/154] refactor(spec): refactor `execute_code` into `process_message` (#2061) * chore(spec-specs): backport changes * refactor: align changes after cherry-pick; add to missing forks - Align changes to process_message after cherry-pick - Add changes to missing forks --------- Co-authored-by: carsons-eels --- .../forks/amsterdam/vm/interpreter.py | 113 +++++++----------- .../forks/arrow_glacier/vm/interpreter.py | 75 +++++------- src/ethereum/forks/berlin/vm/interpreter.py | 75 +++++------- src/ethereum/forks/bpo1/vm/interpreter.py | 82 +++++-------- src/ethereum/forks/bpo2/vm/interpreter.py | 82 +++++-------- src/ethereum/forks/bpo3/vm/interpreter.py | 82 +++++-------- src/ethereum/forks/bpo4/vm/interpreter.py | 82 +++++-------- src/ethereum/forks/bpo5/vm/interpreter.py | 82 +++++-------- .../forks/byzantium/vm/interpreter.py | 75 +++++------- src/ethereum/forks/cancun/vm/interpreter.py | 71 ++++------- .../forks/constantinople/vm/interpreter.py | 75 +++++------- src/ethereum/forks/dao_fork/vm/interpreter.py | 75 +++++------- src/ethereum/forks/frontier/vm/interpreter.py | 75 +++++------- .../forks/gray_glacier/vm/interpreter.py | 77 +++++------- .../forks/homestead/vm/interpreter.py | 75 +++++------- src/ethereum/forks/istanbul/vm/interpreter.py | 75 +++++------- src/ethereum/forks/london/vm/interpreter.py | 74 +++++------- .../forks/muir_glacier/vm/interpreter.py | 75 +++++------- src/ethereum/forks/osaka/vm/interpreter.py | 82 +++++-------- src/ethereum/forks/paris/vm/interpreter.py | 71 ++++------- src/ethereum/forks/prague/vm/interpreter.py | 82 +++++-------- src/ethereum/forks/shanghai/vm/interpreter.py | 71 ++++------- .../forks/spurious_dragon/vm/interpreter.py | 75 +++++------- .../forks/tangerine_whistle/vm/interpreter.py | 75 +++++------- 24 files changed, 705 insertions(+), 1171 deletions(-) diff --git a/src/ethereum/forks/amsterdam/vm/interpreter.py b/src/ethereum/forks/amsterdam/vm/interpreter.py index d73ba88a72..21aecab4a9 100644 --- a/src/ethereum/forks/amsterdam/vm/interpreter.py +++ b/src/ethereum/forks/amsterdam/vm/interpreter.py @@ -45,7 +45,6 @@ set_code, ) from ..state_tracker import ( - StateChanges, capture_pre_balance, capture_pre_code, merge_on_failure, @@ -270,6 +269,28 @@ def process_message(message: Message) -> Evm: if message.depth > STACK_DEPTH_LIMIT: raise StackDepthLimitError("Stack depth limit reached") + code = message.code + valid_jump_destinations = get_valid_jump_destinations(code) + evm = Evm( + pc=Uint(0), + stack=[], + memory=bytearray(), + code=code, + gas_left=message.gas, + valid_jump_destinations=valid_jump_destinations, + logs=(), + refund_counter=0, + running=True, + message=message, + output=b"", + accounts_to_delete=set(), + return_data=b"", + error=None, + accessed_addresses=message.accessed_addresses, + accessed_storage_keys=message.accessed_storage_keys, + state_changes=message.state_changes, + ) + # take snapshot of state before processing the message begin_transaction(state, transient_storage) @@ -310,77 +331,24 @@ def process_message(message: Message) -> Evm: U256(recipient_new_balance), ) - evm = execute_code(message, message.state_changes) - if evm.error: - rollback_transaction(state, transient_storage) - if not message.is_create: - merge_on_failure(evm.state_changes) - else: - commit_transaction(state, transient_storage) - if not message.is_create: - merge_on_success(evm.state_changes) - return evm - - -def execute_code(message: Message, state_changes: StateChanges) -> Evm: - """ - Executes bytecode present in the `message`. - - Parameters - ---------- - message : - Transaction specific items. - state_changes : - The state changes frame to use for tracking. - - Returns - ------- - evm: `ethereum.vm.EVM` - Items containing execution specific objects - - """ - code = message.code - valid_jump_destinations = get_valid_jump_destinations(code) - - evm = Evm( - pc=Uint(0), - stack=[], - memory=bytearray(), - code=code, - gas_left=message.gas, - valid_jump_destinations=valid_jump_destinations, - logs=(), - refund_counter=0, - running=True, - message=message, - output=b"", - accounts_to_delete=set(), - return_data=b"", - error=None, - accessed_addresses=message.accessed_addresses, - accessed_storage_keys=message.accessed_storage_keys, - state_changes=state_changes, - ) try: if evm.message.code_address in PRE_COMPILED_CONTRACTS: - if message.disable_precompiles: - return evm - evm_trace(evm, PrecompileStart(evm.message.code_address)) - PRE_COMPILED_CONTRACTS[evm.message.code_address](evm) - evm_trace(evm, PrecompileEnd()) - return evm - - while evm.running and evm.pc < ulen(evm.code): - try: - op = Ops(evm.code[evm.pc]) - except ValueError as e: - raise InvalidOpcode(evm.code[evm.pc]) from e + if not message.disable_precompiles: + evm_trace(evm, PrecompileStart(evm.message.code_address)) + PRE_COMPILED_CONTRACTS[evm.message.code_address](evm) + evm_trace(evm, PrecompileEnd()) + else: + while evm.running and evm.pc < ulen(evm.code): + try: + op = Ops(evm.code[evm.pc]) + except ValueError as e: + raise InvalidOpcode(evm.code[evm.pc]) from e - evm_trace(evm, OpStart(op)) - op_implementation[op](evm) - evm_trace(evm, OpEnd()) + evm_trace(evm, OpStart(op)) + op_implementation[op](evm) + evm_trace(evm, OpEnd()) - evm_trace(evm, EvmStop(Ops.STOP)) + evm_trace(evm, EvmStop(Ops.STOP)) except ExceptionalHalt as error: evm_trace(evm, OpException(error)) @@ -390,4 +358,13 @@ def execute_code(message: Message, state_changes: StateChanges) -> Evm: except Revert as error: evm_trace(evm, OpException(error)) evm.error = error + + if evm.error: + rollback_transaction(state, transient_storage) + if not message.is_create: + merge_on_failure(evm.state_changes) + else: + commit_transaction(state, transient_storage) + if not message.is_create: + merge_on_success(evm.state_changes) return evm diff --git a/src/ethereum/forks/arrow_glacier/vm/interpreter.py b/src/ethereum/forks/arrow_glacier/vm/interpreter.py index afc8e64275..267ff72cbb 100644 --- a/src/ethereum/forks/arrow_glacier/vm/interpreter.py +++ b/src/ethereum/forks/arrow_glacier/vm/interpreter.py @@ -225,44 +225,8 @@ def process_message(message: Message) -> Evm: if message.depth > STACK_DEPTH_LIMIT: raise StackDepthLimitError("Stack depth limit reached") - # take snapshot of state before processing the message - begin_transaction(state) - - touch_account(state, message.current_target) - - if message.should_transfer_value and message.value != 0: - move_ether( - state, message.caller, message.current_target, message.value - ) - - evm = execute_code(message) - if evm.error: - # revert state to the last saved checkpoint - # since the message call resulted in an error - rollback_transaction(state) - else: - commit_transaction(state) - return evm - - -def execute_code(message: Message) -> Evm: - """ - Executes bytecode present in the `message`. - - Parameters - ---------- - message : - Transaction specific items. - - Returns - ------- - evm: `ethereum.vm.EVM` - Items containing execution specific objects - - """ code = message.code valid_jump_destinations = get_valid_jump_destinations(code) - evm = Evm( pc=Uint(0), stack=[], @@ -282,24 +246,34 @@ def execute_code(message: Message) -> Evm: accessed_addresses=message.accessed_addresses, accessed_storage_keys=message.accessed_storage_keys, ) + + # take snapshot of state before processing the message + begin_transaction(state) + + touch_account(state, message.current_target) + + if message.should_transfer_value and message.value != 0: + move_ether( + state, message.caller, message.current_target, message.value + ) + try: if evm.message.code_address in PRE_COMPILED_CONTRACTS: evm_trace(evm, PrecompileStart(evm.message.code_address)) PRE_COMPILED_CONTRACTS[evm.message.code_address](evm) evm_trace(evm, PrecompileEnd()) - return evm - - while evm.running and evm.pc < ulen(evm.code): - try: - op = Ops(evm.code[evm.pc]) - except ValueError as e: - raise InvalidOpcode(evm.code[evm.pc]) from e + else: + while evm.running and evm.pc < ulen(evm.code): + try: + op = Ops(evm.code[evm.pc]) + except ValueError as e: + raise InvalidOpcode(evm.code[evm.pc]) from e - evm_trace(evm, OpStart(op)) - op_implementation[op](evm) - evm_trace(evm, OpEnd()) + evm_trace(evm, OpStart(op)) + op_implementation[op](evm) + evm_trace(evm, OpEnd()) - evm_trace(evm, EvmStop(Ops.STOP)) + evm_trace(evm, EvmStop(Ops.STOP)) except ExceptionalHalt as error: evm_trace(evm, OpException(error)) @@ -309,4 +283,11 @@ def execute_code(message: Message) -> Evm: except Revert as error: evm_trace(evm, OpException(error)) evm.error = error + + if evm.error: + # revert state to the last saved checkpoint + # since the message call resulted in an error + rollback_transaction(state) + else: + commit_transaction(state) return evm diff --git a/src/ethereum/forks/berlin/vm/interpreter.py b/src/ethereum/forks/berlin/vm/interpreter.py index 0d3c2028e0..9f57d428f6 100644 --- a/src/ethereum/forks/berlin/vm/interpreter.py +++ b/src/ethereum/forks/berlin/vm/interpreter.py @@ -221,44 +221,8 @@ def process_message(message: Message) -> Evm: if message.depth > STACK_DEPTH_LIMIT: raise StackDepthLimitError("Stack depth limit reached") - # take snapshot of state before processing the message - begin_transaction(state) - - touch_account(state, message.current_target) - - if message.should_transfer_value and message.value != 0: - move_ether( - state, message.caller, message.current_target, message.value - ) - - evm = execute_code(message) - if evm.error: - # revert state to the last saved checkpoint - # since the message call resulted in an error - rollback_transaction(state) - else: - commit_transaction(state) - return evm - - -def execute_code(message: Message) -> Evm: - """ - Executes bytecode present in the `message`. - - Parameters - ---------- - message : - Transaction specific items. - - Returns - ------- - evm: `ethereum.vm.EVM` - Items containing execution specific objects - - """ code = message.code valid_jump_destinations = get_valid_jump_destinations(code) - evm = Evm( pc=Uint(0), stack=[], @@ -278,24 +242,34 @@ def execute_code(message: Message) -> Evm: accessed_addresses=message.accessed_addresses, accessed_storage_keys=message.accessed_storage_keys, ) + + # take snapshot of state before processing the message + begin_transaction(state) + + touch_account(state, message.current_target) + + if message.should_transfer_value and message.value != 0: + move_ether( + state, message.caller, message.current_target, message.value + ) + try: if evm.message.code_address in PRE_COMPILED_CONTRACTS: evm_trace(evm, PrecompileStart(evm.message.code_address)) PRE_COMPILED_CONTRACTS[evm.message.code_address](evm) evm_trace(evm, PrecompileEnd()) - return evm - - while evm.running and evm.pc < ulen(evm.code): - try: - op = Ops(evm.code[evm.pc]) - except ValueError as e: - raise InvalidOpcode(evm.code[evm.pc]) from e + else: + while evm.running and evm.pc < ulen(evm.code): + try: + op = Ops(evm.code[evm.pc]) + except ValueError as e: + raise InvalidOpcode(evm.code[evm.pc]) from e - evm_trace(evm, OpStart(op)) - op_implementation[op](evm) - evm_trace(evm, OpEnd()) + evm_trace(evm, OpStart(op)) + op_implementation[op](evm) + evm_trace(evm, OpEnd()) - evm_trace(evm, EvmStop(Ops.STOP)) + evm_trace(evm, EvmStop(Ops.STOP)) except ExceptionalHalt as error: evm_trace(evm, OpException(error)) @@ -305,4 +279,11 @@ def execute_code(message: Message) -> Evm: except Revert as error: evm_trace(evm, OpException(error)) evm.error = error + + if evm.error: + # revert state to the last saved checkpoint + # since the message call resulted in an error + rollback_transaction(state) + else: + commit_transaction(state) return evm diff --git a/src/ethereum/forks/bpo1/vm/interpreter.py b/src/ethereum/forks/bpo1/vm/interpreter.py index 54f5172d6d..e5f724bd4f 100644 --- a/src/ethereum/forks/bpo1/vm/interpreter.py +++ b/src/ethereum/forks/bpo1/vm/interpreter.py @@ -234,46 +234,12 @@ def process_message(message: Message) -> Evm: """ state = message.block_env.state - transient_storage = message.tx_env.transient_storage if message.depth > STACK_DEPTH_LIMIT: raise StackDepthLimitError("Stack depth limit reached") - # take snapshot of state before processing the message - begin_transaction(state, transient_storage) - - if message.should_transfer_value and message.value != 0: - move_ether( - state, message.caller, message.current_target, message.value - ) - - evm = execute_code(message) - if evm.error: - # revert state to the last saved checkpoint - # since the message call resulted in an error - rollback_transaction(state, transient_storage) - else: - commit_transaction(state, transient_storage) - return evm - - -def execute_code(message: Message) -> Evm: - """ - Executes bytecode present in the `message`. - - Parameters - ---------- - message : - Transaction specific items. - - Returns - ------- - evm: `ethereum.vm.EVM` - Items containing execution specific objects - - """ + transient_storage = message.tx_env.transient_storage code = message.code valid_jump_destinations = get_valid_jump_destinations(code) - evm = Evm( pc=Uint(0), stack=[], @@ -292,26 +258,33 @@ def execute_code(message: Message) -> Evm: accessed_addresses=message.accessed_addresses, accessed_storage_keys=message.accessed_storage_keys, ) + + # take snapshot of state before processing the message + begin_transaction(state, transient_storage) + + if message.should_transfer_value and message.value != 0: + move_ether( + state, message.caller, message.current_target, message.value + ) + try: if evm.message.code_address in PRE_COMPILED_CONTRACTS: - if message.disable_precompiles: - return evm - evm_trace(evm, PrecompileStart(evm.message.code_address)) - PRE_COMPILED_CONTRACTS[evm.message.code_address](evm) - evm_trace(evm, PrecompileEnd()) - return evm - - while evm.running and evm.pc < ulen(evm.code): - try: - op = Ops(evm.code[evm.pc]) - except ValueError as e: - raise InvalidOpcode(evm.code[evm.pc]) from e + if not message.disable_precompiles: + evm_trace(evm, PrecompileStart(evm.message.code_address)) + PRE_COMPILED_CONTRACTS[evm.message.code_address](evm) + evm_trace(evm, PrecompileEnd()) + else: + while evm.running and evm.pc < ulen(evm.code): + try: + op = Ops(evm.code[evm.pc]) + except ValueError as e: + raise InvalidOpcode(evm.code[evm.pc]) from e - evm_trace(evm, OpStart(op)) - op_implementation[op](evm) - evm_trace(evm, OpEnd()) + evm_trace(evm, OpStart(op)) + op_implementation[op](evm) + evm_trace(evm, OpEnd()) - evm_trace(evm, EvmStop(Ops.STOP)) + evm_trace(evm, EvmStop(Ops.STOP)) except ExceptionalHalt as error: evm_trace(evm, OpException(error)) @@ -321,4 +294,11 @@ def execute_code(message: Message) -> Evm: except Revert as error: evm_trace(evm, OpException(error)) evm.error = error + + if evm.error: + # revert state to the last saved checkpoint + # since the message call resulted in an error + rollback_transaction(state, transient_storage) + else: + commit_transaction(state, transient_storage) return evm diff --git a/src/ethereum/forks/bpo2/vm/interpreter.py b/src/ethereum/forks/bpo2/vm/interpreter.py index 876a44fff8..8f79e265d2 100644 --- a/src/ethereum/forks/bpo2/vm/interpreter.py +++ b/src/ethereum/forks/bpo2/vm/interpreter.py @@ -234,46 +234,12 @@ def process_message(message: Message) -> Evm: """ state = message.block_env.state - transient_storage = message.tx_env.transient_storage if message.depth > STACK_DEPTH_LIMIT: raise StackDepthLimitError("Stack depth limit reached") - # take snapshot of state before processing the message - begin_transaction(state, transient_storage) - - if message.should_transfer_value and message.value != 0: - move_ether( - state, message.caller, message.current_target, message.value - ) - - evm = execute_code(message) - if evm.error: - # revert state to the last saved checkpoint - # since the message call resulted in an error - rollback_transaction(state, transient_storage) - else: - commit_transaction(state, transient_storage) - return evm - - -def execute_code(message: Message) -> Evm: - """ - Executes bytecode present in the `message`. - - Parameters - ---------- - message : - Transaction specific items. - - Returns - ------- - evm: `ethereum.vm.EVM` - Items containing execution specific objects - - """ + transient_storage = message.tx_env.transient_storage code = message.code valid_jump_destinations = get_valid_jump_destinations(code) - evm = Evm( pc=Uint(0), stack=[], @@ -292,26 +258,33 @@ def execute_code(message: Message) -> Evm: accessed_addresses=message.accessed_addresses, accessed_storage_keys=message.accessed_storage_keys, ) + + # take snapshot of state before processing the message + begin_transaction(state, transient_storage) + + if message.should_transfer_value and message.value != 0: + move_ether( + state, message.caller, message.current_target, message.value + ) + try: if evm.message.code_address in PRE_COMPILED_CONTRACTS: - if message.disable_precompiles: - return evm - evm_trace(evm, PrecompileStart(evm.message.code_address)) - PRE_COMPILED_CONTRACTS[evm.message.code_address](evm) - evm_trace(evm, PrecompileEnd()) - return evm - - while evm.running and evm.pc < ulen(evm.code): - try: - op = Ops(evm.code[evm.pc]) - except ValueError as e: - raise InvalidOpcode(evm.code[evm.pc]) from e + if not message.disable_precompiles: + evm_trace(evm, PrecompileStart(evm.message.code_address)) + PRE_COMPILED_CONTRACTS[evm.message.code_address](evm) + evm_trace(evm, PrecompileEnd()) + else: + while evm.running and evm.pc < ulen(evm.code): + try: + op = Ops(evm.code[evm.pc]) + except ValueError as e: + raise InvalidOpcode(evm.code[evm.pc]) from e - evm_trace(evm, OpStart(op)) - op_implementation[op](evm) - evm_trace(evm, OpEnd()) + evm_trace(evm, OpStart(op)) + op_implementation[op](evm) + evm_trace(evm, OpEnd()) - evm_trace(evm, EvmStop(Ops.STOP)) + evm_trace(evm, EvmStop(Ops.STOP)) except ExceptionalHalt as error: evm_trace(evm, OpException(error)) @@ -321,4 +294,11 @@ def execute_code(message: Message) -> Evm: except Revert as error: evm_trace(evm, OpException(error)) evm.error = error + + if evm.error: + # revert state to the last saved checkpoint + # since the message call resulted in an error + rollback_transaction(state, transient_storage) + else: + commit_transaction(state, transient_storage) return evm diff --git a/src/ethereum/forks/bpo3/vm/interpreter.py b/src/ethereum/forks/bpo3/vm/interpreter.py index d1084d0643..733bbec33c 100644 --- a/src/ethereum/forks/bpo3/vm/interpreter.py +++ b/src/ethereum/forks/bpo3/vm/interpreter.py @@ -234,46 +234,12 @@ def process_message(message: Message) -> Evm: """ state = message.block_env.state - transient_storage = message.tx_env.transient_storage if message.depth > STACK_DEPTH_LIMIT: raise StackDepthLimitError("Stack depth limit reached") - # take snapshot of state before processing the message - begin_transaction(state, transient_storage) - - if message.should_transfer_value and message.value != 0: - move_ether( - state, message.caller, message.current_target, message.value - ) - - evm = execute_code(message) - if evm.error: - # revert state to the last saved checkpoint - # since the message call resulted in an error - rollback_transaction(state, transient_storage) - else: - commit_transaction(state, transient_storage) - return evm - - -def execute_code(message: Message) -> Evm: - """ - Executes bytecode present in the `message`. - - Parameters - ---------- - message : - Transaction specific items. - - Returns - ------- - evm: `ethereum.vm.EVM` - Items containing execution specific objects - - """ + transient_storage = message.tx_env.transient_storage code = message.code valid_jump_destinations = get_valid_jump_destinations(code) - evm = Evm( pc=Uint(0), stack=[], @@ -292,26 +258,33 @@ def execute_code(message: Message) -> Evm: accessed_addresses=message.accessed_addresses, accessed_storage_keys=message.accessed_storage_keys, ) + + # take snapshot of state before processing the message + begin_transaction(state, transient_storage) + + if message.should_transfer_value and message.value != 0: + move_ether( + state, message.caller, message.current_target, message.value + ) + try: if evm.message.code_address in PRE_COMPILED_CONTRACTS: - if message.disable_precompiles: - return evm - evm_trace(evm, PrecompileStart(evm.message.code_address)) - PRE_COMPILED_CONTRACTS[evm.message.code_address](evm) - evm_trace(evm, PrecompileEnd()) - return evm - - while evm.running and evm.pc < ulen(evm.code): - try: - op = Ops(evm.code[evm.pc]) - except ValueError as e: - raise InvalidOpcode(evm.code[evm.pc]) from e + if not message.disable_precompiles: + evm_trace(evm, PrecompileStart(evm.message.code_address)) + PRE_COMPILED_CONTRACTS[evm.message.code_address](evm) + evm_trace(evm, PrecompileEnd()) + else: + while evm.running and evm.pc < ulen(evm.code): + try: + op = Ops(evm.code[evm.pc]) + except ValueError as e: + raise InvalidOpcode(evm.code[evm.pc]) from e - evm_trace(evm, OpStart(op)) - op_implementation[op](evm) - evm_trace(evm, OpEnd()) + evm_trace(evm, OpStart(op)) + op_implementation[op](evm) + evm_trace(evm, OpEnd()) - evm_trace(evm, EvmStop(Ops.STOP)) + evm_trace(evm, EvmStop(Ops.STOP)) except ExceptionalHalt as error: evm_trace(evm, OpException(error)) @@ -321,4 +294,11 @@ def execute_code(message: Message) -> Evm: except Revert as error: evm_trace(evm, OpException(error)) evm.error = error + + if evm.error: + # revert state to the last saved checkpoint + # since the message call resulted in an error + rollback_transaction(state, transient_storage) + else: + commit_transaction(state, transient_storage) return evm diff --git a/src/ethereum/forks/bpo4/vm/interpreter.py b/src/ethereum/forks/bpo4/vm/interpreter.py index df9a10ed64..7e084a888e 100644 --- a/src/ethereum/forks/bpo4/vm/interpreter.py +++ b/src/ethereum/forks/bpo4/vm/interpreter.py @@ -234,46 +234,12 @@ def process_message(message: Message) -> Evm: """ state = message.block_env.state - transient_storage = message.tx_env.transient_storage if message.depth > STACK_DEPTH_LIMIT: raise StackDepthLimitError("Stack depth limit reached") - # take snapshot of state before processing the message - begin_transaction(state, transient_storage) - - if message.should_transfer_value and message.value != 0: - move_ether( - state, message.caller, message.current_target, message.value - ) - - evm = execute_code(message) - if evm.error: - # revert state to the last saved checkpoint - # since the message call resulted in an error - rollback_transaction(state, transient_storage) - else: - commit_transaction(state, transient_storage) - return evm - - -def execute_code(message: Message) -> Evm: - """ - Executes bytecode present in the `message`. - - Parameters - ---------- - message : - Transaction specific items. - - Returns - ------- - evm: `ethereum.vm.EVM` - Items containing execution specific objects - - """ + transient_storage = message.tx_env.transient_storage code = message.code valid_jump_destinations = get_valid_jump_destinations(code) - evm = Evm( pc=Uint(0), stack=[], @@ -292,26 +258,33 @@ def execute_code(message: Message) -> Evm: accessed_addresses=message.accessed_addresses, accessed_storage_keys=message.accessed_storage_keys, ) + + # take snapshot of state before processing the message + begin_transaction(state, transient_storage) + + if message.should_transfer_value and message.value != 0: + move_ether( + state, message.caller, message.current_target, message.value + ) + try: if evm.message.code_address in PRE_COMPILED_CONTRACTS: - if message.disable_precompiles: - return evm - evm_trace(evm, PrecompileStart(evm.message.code_address)) - PRE_COMPILED_CONTRACTS[evm.message.code_address](evm) - evm_trace(evm, PrecompileEnd()) - return evm - - while evm.running and evm.pc < ulen(evm.code): - try: - op = Ops(evm.code[evm.pc]) - except ValueError as e: - raise InvalidOpcode(evm.code[evm.pc]) from e + if not message.disable_precompiles: + evm_trace(evm, PrecompileStart(evm.message.code_address)) + PRE_COMPILED_CONTRACTS[evm.message.code_address](evm) + evm_trace(evm, PrecompileEnd()) + else: + while evm.running and evm.pc < ulen(evm.code): + try: + op = Ops(evm.code[evm.pc]) + except ValueError as e: + raise InvalidOpcode(evm.code[evm.pc]) from e - evm_trace(evm, OpStart(op)) - op_implementation[op](evm) - evm_trace(evm, OpEnd()) + evm_trace(evm, OpStart(op)) + op_implementation[op](evm) + evm_trace(evm, OpEnd()) - evm_trace(evm, EvmStop(Ops.STOP)) + evm_trace(evm, EvmStop(Ops.STOP)) except ExceptionalHalt as error: evm_trace(evm, OpException(error)) @@ -321,4 +294,11 @@ def execute_code(message: Message) -> Evm: except Revert as error: evm_trace(evm, OpException(error)) evm.error = error + + if evm.error: + # revert state to the last saved checkpoint + # since the message call resulted in an error + rollback_transaction(state, transient_storage) + else: + commit_transaction(state, transient_storage) return evm diff --git a/src/ethereum/forks/bpo5/vm/interpreter.py b/src/ethereum/forks/bpo5/vm/interpreter.py index 40dc2a986d..a1de411be9 100644 --- a/src/ethereum/forks/bpo5/vm/interpreter.py +++ b/src/ethereum/forks/bpo5/vm/interpreter.py @@ -234,46 +234,12 @@ def process_message(message: Message) -> Evm: """ state = message.block_env.state - transient_storage = message.tx_env.transient_storage if message.depth > STACK_DEPTH_LIMIT: raise StackDepthLimitError("Stack depth limit reached") - # take snapshot of state before processing the message - begin_transaction(state, transient_storage) - - if message.should_transfer_value and message.value != 0: - move_ether( - state, message.caller, message.current_target, message.value - ) - - evm = execute_code(message) - if evm.error: - # revert state to the last saved checkpoint - # since the message call resulted in an error - rollback_transaction(state, transient_storage) - else: - commit_transaction(state, transient_storage) - return evm - - -def execute_code(message: Message) -> Evm: - """ - Executes bytecode present in the `message`. - - Parameters - ---------- - message : - Transaction specific items. - - Returns - ------- - evm: `ethereum.vm.EVM` - Items containing execution specific objects - - """ + transient_storage = message.tx_env.transient_storage code = message.code valid_jump_destinations = get_valid_jump_destinations(code) - evm = Evm( pc=Uint(0), stack=[], @@ -292,26 +258,33 @@ def execute_code(message: Message) -> Evm: accessed_addresses=message.accessed_addresses, accessed_storage_keys=message.accessed_storage_keys, ) + + # take snapshot of state before processing the message + begin_transaction(state, transient_storage) + + if message.should_transfer_value and message.value != 0: + move_ether( + state, message.caller, message.current_target, message.value + ) + try: if evm.message.code_address in PRE_COMPILED_CONTRACTS: - if message.disable_precompiles: - return evm - evm_trace(evm, PrecompileStart(evm.message.code_address)) - PRE_COMPILED_CONTRACTS[evm.message.code_address](evm) - evm_trace(evm, PrecompileEnd()) - return evm - - while evm.running and evm.pc < ulen(evm.code): - try: - op = Ops(evm.code[evm.pc]) - except ValueError as e: - raise InvalidOpcode(evm.code[evm.pc]) from e + if not message.disable_precompiles: + evm_trace(evm, PrecompileStart(evm.message.code_address)) + PRE_COMPILED_CONTRACTS[evm.message.code_address](evm) + evm_trace(evm, PrecompileEnd()) + else: + while evm.running and evm.pc < ulen(evm.code): + try: + op = Ops(evm.code[evm.pc]) + except ValueError as e: + raise InvalidOpcode(evm.code[evm.pc]) from e - evm_trace(evm, OpStart(op)) - op_implementation[op](evm) - evm_trace(evm, OpEnd()) + evm_trace(evm, OpStart(op)) + op_implementation[op](evm) + evm_trace(evm, OpEnd()) - evm_trace(evm, EvmStop(Ops.STOP)) + evm_trace(evm, EvmStop(Ops.STOP)) except ExceptionalHalt as error: evm_trace(evm, OpException(error)) @@ -321,4 +294,11 @@ def execute_code(message: Message) -> Evm: except Revert as error: evm_trace(evm, OpException(error)) evm.error = error + + if evm.error: + # revert state to the last saved checkpoint + # since the message call resulted in an error + rollback_transaction(state, transient_storage) + else: + commit_transaction(state, transient_storage) return evm diff --git a/src/ethereum/forks/byzantium/vm/interpreter.py b/src/ethereum/forks/byzantium/vm/interpreter.py index 1594c36f15..7237f30fe4 100644 --- a/src/ethereum/forks/byzantium/vm/interpreter.py +++ b/src/ethereum/forks/byzantium/vm/interpreter.py @@ -214,44 +214,8 @@ def process_message(message: Message) -> Evm: if message.depth > STACK_DEPTH_LIMIT: raise StackDepthLimitError("Stack depth limit reached") - # take snapshot of state before processing the message - begin_transaction(state) - - touch_account(state, message.current_target) - - if message.should_transfer_value and message.value != 0: - move_ether( - state, message.caller, message.current_target, message.value - ) - - evm = execute_code(message) - if evm.error: - # revert state to the last saved checkpoint - # since the message call resulted in an error - rollback_transaction(state) - else: - commit_transaction(state) - return evm - - -def execute_code(message: Message) -> Evm: - """ - Executes bytecode present in the `message`. - - Parameters - ---------- - message : - Transaction specific items. - - Returns - ------- - evm: `ethereum.vm.EVM` - Items containing execution specific objects - - """ code = message.code valid_jump_destinations = get_valid_jump_destinations(code) - evm = Evm( pc=Uint(0), stack=[], @@ -269,24 +233,34 @@ def execute_code(message: Message) -> Evm: return_data=b"", error=None, ) + + # take snapshot of state before processing the message + begin_transaction(state) + + touch_account(state, message.current_target) + + if message.should_transfer_value and message.value != 0: + move_ether( + state, message.caller, message.current_target, message.value + ) + try: if evm.message.code_address in PRE_COMPILED_CONTRACTS: evm_trace(evm, PrecompileStart(evm.message.code_address)) PRE_COMPILED_CONTRACTS[evm.message.code_address](evm) evm_trace(evm, PrecompileEnd()) - return evm - - while evm.running and evm.pc < ulen(evm.code): - try: - op = Ops(evm.code[evm.pc]) - except ValueError as e: - raise InvalidOpcode(evm.code[evm.pc]) from e + else: + while evm.running and evm.pc < ulen(evm.code): + try: + op = Ops(evm.code[evm.pc]) + except ValueError as e: + raise InvalidOpcode(evm.code[evm.pc]) from e - evm_trace(evm, OpStart(op)) - op_implementation[op](evm) - evm_trace(evm, OpEnd()) + evm_trace(evm, OpStart(op)) + op_implementation[op](evm) + evm_trace(evm, OpEnd()) - evm_trace(evm, EvmStop(Ops.STOP)) + evm_trace(evm, EvmStop(Ops.STOP)) except ExceptionalHalt as error: evm_trace(evm, OpException(error)) @@ -296,4 +270,11 @@ def execute_code(message: Message) -> Evm: except Revert as error: evm_trace(evm, OpException(error)) evm.error = error + + if evm.error: + # revert state to the last saved checkpoint + # since the message call resulted in an error + rollback_transaction(state) + else: + commit_transaction(state) return evm diff --git a/src/ethereum/forks/cancun/vm/interpreter.py b/src/ethereum/forks/cancun/vm/interpreter.py index bf8319544a..2399a49da9 100644 --- a/src/ethereum/forks/cancun/vm/interpreter.py +++ b/src/ethereum/forks/cancun/vm/interpreter.py @@ -218,42 +218,8 @@ def process_message(message: Message) -> Evm: if message.depth > STACK_DEPTH_LIMIT: raise StackDepthLimitError("Stack depth limit reached") - # take snapshot of state before processing the message - begin_transaction(state, transient_storage) - - if message.should_transfer_value and message.value != 0: - move_ether( - state, message.caller, message.current_target, message.value - ) - - evm = execute_code(message) - if evm.error: - # revert state to the last saved checkpoint - # since the message call resulted in an error - rollback_transaction(state, transient_storage) - else: - commit_transaction(state, transient_storage) - return evm - - -def execute_code(message: Message) -> Evm: - """ - Executes bytecode present in the `message`. - - Parameters - ---------- - message : - Transaction specific items. - - Returns - ------- - evm: `ethereum.vm.EVM` - Items containing execution specific objects - - """ code = message.code valid_jump_destinations = get_valid_jump_destinations(code) - evm = Evm( pc=Uint(0), stack=[], @@ -272,24 +238,32 @@ def execute_code(message: Message) -> Evm: accessed_addresses=message.accessed_addresses, accessed_storage_keys=message.accessed_storage_keys, ) + + # take snapshot of state before processing the message + begin_transaction(state, transient_storage) + + if message.should_transfer_value and message.value != 0: + move_ether( + state, message.caller, message.current_target, message.value + ) + try: if evm.message.code_address in PRE_COMPILED_CONTRACTS: evm_trace(evm, PrecompileStart(evm.message.code_address)) PRE_COMPILED_CONTRACTS[evm.message.code_address](evm) evm_trace(evm, PrecompileEnd()) - return evm - - while evm.running and evm.pc < ulen(evm.code): - try: - op = Ops(evm.code[evm.pc]) - except ValueError as e: - raise InvalidOpcode(evm.code[evm.pc]) from e + else: + while evm.running and evm.pc < ulen(evm.code): + try: + op = Ops(evm.code[evm.pc]) + except ValueError as e: + raise InvalidOpcode(evm.code[evm.pc]) from e - evm_trace(evm, OpStart(op)) - op_implementation[op](evm) - evm_trace(evm, OpEnd()) + evm_trace(evm, OpStart(op)) + op_implementation[op](evm) + evm_trace(evm, OpEnd()) - evm_trace(evm, EvmStop(Ops.STOP)) + evm_trace(evm, EvmStop(Ops.STOP)) except ExceptionalHalt as error: evm_trace(evm, OpException(error)) @@ -299,4 +273,11 @@ def execute_code(message: Message) -> Evm: except Revert as error: evm_trace(evm, OpException(error)) evm.error = error + + if evm.error: + # revert state to the last saved checkpoint + # since the message call resulted in an error + rollback_transaction(state, transient_storage) + else: + commit_transaction(state, transient_storage) return evm diff --git a/src/ethereum/forks/constantinople/vm/interpreter.py b/src/ethereum/forks/constantinople/vm/interpreter.py index 201446f30b..e0e028871d 100644 --- a/src/ethereum/forks/constantinople/vm/interpreter.py +++ b/src/ethereum/forks/constantinople/vm/interpreter.py @@ -215,44 +215,8 @@ def process_message(message: Message) -> Evm: if message.depth > STACK_DEPTH_LIMIT: raise StackDepthLimitError("Stack depth limit reached") - # take snapshot of state before processing the message - begin_transaction(state) - - touch_account(state, message.current_target) - - if message.should_transfer_value and message.value != 0: - move_ether( - state, message.caller, message.current_target, message.value - ) - - evm = execute_code(message) - if evm.error: - # revert state to the last saved checkpoint - # since the message call resulted in an error - rollback_transaction(state) - else: - commit_transaction(state) - return evm - - -def execute_code(message: Message) -> Evm: - """ - Executes bytecode present in the `message`. - - Parameters - ---------- - message : - Transaction specific items. - - Returns - ------- - evm: `ethereum.vm.EVM` - Items containing execution specific objects - - """ code = message.code valid_jump_destinations = get_valid_jump_destinations(code) - evm = Evm( pc=Uint(0), stack=[], @@ -270,24 +234,34 @@ def execute_code(message: Message) -> Evm: return_data=b"", error=None, ) + + # take snapshot of state before processing the message + begin_transaction(state) + + touch_account(state, message.current_target) + + if message.should_transfer_value and message.value != 0: + move_ether( + state, message.caller, message.current_target, message.value + ) + try: if evm.message.code_address in PRE_COMPILED_CONTRACTS: evm_trace(evm, PrecompileStart(evm.message.code_address)) PRE_COMPILED_CONTRACTS[evm.message.code_address](evm) evm_trace(evm, PrecompileEnd()) - return evm - - while evm.running and evm.pc < ulen(evm.code): - try: - op = Ops(evm.code[evm.pc]) - except ValueError as e: - raise InvalidOpcode(evm.code[evm.pc]) from e + else: + while evm.running and evm.pc < ulen(evm.code): + try: + op = Ops(evm.code[evm.pc]) + except ValueError as e: + raise InvalidOpcode(evm.code[evm.pc]) from e - evm_trace(evm, OpStart(op)) - op_implementation[op](evm) - evm_trace(evm, OpEnd()) + evm_trace(evm, OpStart(op)) + op_implementation[op](evm) + evm_trace(evm, OpEnd()) - evm_trace(evm, EvmStop(Ops.STOP)) + evm_trace(evm, EvmStop(Ops.STOP)) except ExceptionalHalt as error: evm_trace(evm, OpException(error)) @@ -297,4 +271,11 @@ def execute_code(message: Message) -> Evm: except Revert as error: evm_trace(evm, OpException(error)) evm.error = error + + if evm.error: + # revert state to the last saved checkpoint + # since the message call resulted in an error + rollback_transaction(state) + else: + commit_transaction(state) return evm diff --git a/src/ethereum/forks/dao_fork/vm/interpreter.py b/src/ethereum/forks/dao_fork/vm/interpreter.py index 19a7bd5782..b7e1ac6c91 100644 --- a/src/ethereum/forks/dao_fork/vm/interpreter.py +++ b/src/ethereum/forks/dao_fork/vm/interpreter.py @@ -195,44 +195,8 @@ def process_message(message: Message) -> Evm: if message.depth > STACK_DEPTH_LIMIT: raise StackDepthLimitError("Stack depth limit reached") - # take snapshot of state before processing the message - begin_transaction(state) - - touch_account(state, message.current_target) - - if message.should_transfer_value and message.value != 0: - move_ether( - state, message.caller, message.current_target, message.value - ) - - evm = execute_code(message) - if evm.error: - # revert state to the last saved checkpoint - # since the message call resulted in an error - rollback_transaction(state) - else: - commit_transaction(state) - return evm - - -def execute_code(message: Message) -> Evm: - """ - Executes bytecode present in the `message`. - - Parameters - ---------- - message : - Transaction specific items. - - Returns - ------- - evm: `ethereum.vm.EVM` - Items containing execution specific objects - - """ code = message.code valid_jump_destinations = get_valid_jump_destinations(code) - evm = Evm( pc=Uint(0), stack=[], @@ -248,27 +212,44 @@ def execute_code(message: Message) -> Evm: accounts_to_delete=set(), error=None, ) + + # take snapshot of state before processing the message + begin_transaction(state) + + touch_account(state, message.current_target) + + if message.should_transfer_value and message.value != 0: + move_ether( + state, message.caller, message.current_target, message.value + ) + try: if evm.message.code_address in PRE_COMPILED_CONTRACTS: evm_trace(evm, PrecompileStart(evm.message.code_address)) PRE_COMPILED_CONTRACTS[evm.message.code_address](evm) evm_trace(evm, PrecompileEnd()) - return evm - - while evm.running and evm.pc < ulen(evm.code): - try: - op = Ops(evm.code[evm.pc]) - except ValueError as e: - raise InvalidOpcode(evm.code[evm.pc]) from e + else: + while evm.running and evm.pc < ulen(evm.code): + try: + op = Ops(evm.code[evm.pc]) + except ValueError as e: + raise InvalidOpcode(evm.code[evm.pc]) from e - evm_trace(evm, OpStart(op)) - op_implementation[op](evm) - evm_trace(evm, OpEnd()) + evm_trace(evm, OpStart(op)) + op_implementation[op](evm) + evm_trace(evm, OpEnd()) - evm_trace(evm, EvmStop(Ops.STOP)) + evm_trace(evm, EvmStop(Ops.STOP)) except ExceptionalHalt as error: evm_trace(evm, OpException(error)) evm.gas_left = Uint(0) evm.error = error + + if evm.error: + # revert state to the last saved checkpoint + # since the message call resulted in an error + rollback_transaction(state) + else: + commit_transaction(state) return evm diff --git a/src/ethereum/forks/frontier/vm/interpreter.py b/src/ethereum/forks/frontier/vm/interpreter.py index 5a3c923f3c..335634b006 100644 --- a/src/ethereum/forks/frontier/vm/interpreter.py +++ b/src/ethereum/forks/frontier/vm/interpreter.py @@ -193,44 +193,8 @@ def process_message(message: Message) -> Evm: if message.depth > STACK_DEPTH_LIMIT: raise StackDepthLimitError("Stack depth limit reached") - # take snapshot of state before processing the message - begin_transaction(state) - - touch_account(state, message.current_target) - - if message.value != 0: - move_ether( - state, message.caller, message.current_target, message.value - ) - - evm = execute_code(message) - if evm.error: - # revert state to the last saved checkpoint - # since the message call resulted in an error - rollback_transaction(state) - else: - commit_transaction(state) - return evm - - -def execute_code(message: Message) -> Evm: - """ - Executes bytecode present in the `message`. - - Parameters - ---------- - message : - Transaction specific items. - - Returns - ------- - evm: `ethereum.vm.EVM` - Items containing execution specific objects - - """ code = message.code valid_jump_destinations = get_valid_jump_destinations(code) - evm = Evm( pc=Uint(0), stack=[], @@ -246,27 +210,44 @@ def execute_code(message: Message) -> Evm: accounts_to_delete=set(), error=None, ) + + # take snapshot of state before processing the message + begin_transaction(state) + + touch_account(state, message.current_target) + + if message.value != 0: + move_ether( + state, message.caller, message.current_target, message.value + ) + try: if evm.message.code_address in PRE_COMPILED_CONTRACTS: evm_trace(evm, PrecompileStart(evm.message.code_address)) PRE_COMPILED_CONTRACTS[evm.message.code_address](evm) evm_trace(evm, PrecompileEnd()) - return evm - - while evm.running and evm.pc < ulen(evm.code): - try: - op = Ops(evm.code[evm.pc]) - except ValueError as e: - raise InvalidOpcode(evm.code[evm.pc]) from e + else: + while evm.running and evm.pc < ulen(evm.code): + try: + op = Ops(evm.code[evm.pc]) + except ValueError as e: + raise InvalidOpcode(evm.code[evm.pc]) from e - evm_trace(evm, OpStart(op)) - op_implementation[op](evm) - evm_trace(evm, OpEnd()) + evm_trace(evm, OpStart(op)) + op_implementation[op](evm) + evm_trace(evm, OpEnd()) - evm_trace(evm, EvmStop(Ops.STOP)) + evm_trace(evm, EvmStop(Ops.STOP)) except ExceptionalHalt as error: evm_trace(evm, OpException(error)) evm.gas_left = Uint(0) evm.error = error + + if evm.error: + # revert state to the last saved checkpoint + # since the message call resulted in an error + rollback_transaction(state) + else: + commit_transaction(state) return evm diff --git a/src/ethereum/forks/gray_glacier/vm/interpreter.py b/src/ethereum/forks/gray_glacier/vm/interpreter.py index 851d1826fa..afe469ce3f 100644 --- a/src/ethereum/forks/gray_glacier/vm/interpreter.py +++ b/src/ethereum/forks/gray_glacier/vm/interpreter.py @@ -217,7 +217,7 @@ def process_message(message: Message) -> Evm: Returns ------- - evm: :py:class:`~ethereum.forks.gray_glacier.vm.Evm` + evm: :py:class:`~ethereum.forks.arrow_glacier.vm.Evm` Items containing execution specific objects """ @@ -225,44 +225,8 @@ def process_message(message: Message) -> Evm: if message.depth > STACK_DEPTH_LIMIT: raise StackDepthLimitError("Stack depth limit reached") - # take snapshot of state before processing the message - begin_transaction(state) - - touch_account(state, message.current_target) - - if message.should_transfer_value and message.value != 0: - move_ether( - state, message.caller, message.current_target, message.value - ) - - evm = execute_code(message) - if evm.error: - # revert state to the last saved checkpoint - # since the message call resulted in an error - rollback_transaction(state) - else: - commit_transaction(state) - return evm - - -def execute_code(message: Message) -> Evm: - """ - Executes bytecode present in the `message`. - - Parameters - ---------- - message : - Transaction specific items. - - Returns - ------- - evm: `ethereum.vm.EVM` - Items containing execution specific objects - - """ code = message.code valid_jump_destinations = get_valid_jump_destinations(code) - evm = Evm( pc=Uint(0), stack=[], @@ -282,24 +246,34 @@ def execute_code(message: Message) -> Evm: accessed_addresses=message.accessed_addresses, accessed_storage_keys=message.accessed_storage_keys, ) + + # take snapshot of state before processing the message + begin_transaction(state) + + touch_account(state, message.current_target) + + if message.should_transfer_value and message.value != 0: + move_ether( + state, message.caller, message.current_target, message.value + ) + try: if evm.message.code_address in PRE_COMPILED_CONTRACTS: evm_trace(evm, PrecompileStart(evm.message.code_address)) PRE_COMPILED_CONTRACTS[evm.message.code_address](evm) evm_trace(evm, PrecompileEnd()) - return evm - - while evm.running and evm.pc < ulen(evm.code): - try: - op = Ops(evm.code[evm.pc]) - except ValueError as e: - raise InvalidOpcode(evm.code[evm.pc]) from e + else: + while evm.running and evm.pc < ulen(evm.code): + try: + op = Ops(evm.code[evm.pc]) + except ValueError as e: + raise InvalidOpcode(evm.code[evm.pc]) from e - evm_trace(evm, OpStart(op)) - op_implementation[op](evm) - evm_trace(evm, OpEnd()) + evm_trace(evm, OpStart(op)) + op_implementation[op](evm) + evm_trace(evm, OpEnd()) - evm_trace(evm, EvmStop(Ops.STOP)) + evm_trace(evm, EvmStop(Ops.STOP)) except ExceptionalHalt as error: evm_trace(evm, OpException(error)) @@ -309,4 +283,11 @@ def execute_code(message: Message) -> Evm: except Revert as error: evm_trace(evm, OpException(error)) evm.error = error + + if evm.error: + # revert state to the last saved checkpoint + # since the message call resulted in an error + rollback_transaction(state) + else: + commit_transaction(state) return evm diff --git a/src/ethereum/forks/homestead/vm/interpreter.py b/src/ethereum/forks/homestead/vm/interpreter.py index bf064d39b0..ffdcd0f8fa 100644 --- a/src/ethereum/forks/homestead/vm/interpreter.py +++ b/src/ethereum/forks/homestead/vm/interpreter.py @@ -195,44 +195,8 @@ def process_message(message: Message) -> Evm: if message.depth > STACK_DEPTH_LIMIT: raise StackDepthLimitError("Stack depth limit reached") - # take snapshot of state before processing the message - begin_transaction(state) - - touch_account(state, message.current_target) - - if message.should_transfer_value and message.value != 0: - move_ether( - state, message.caller, message.current_target, message.value - ) - - evm = execute_code(message) - if evm.error: - # revert state to the last saved checkpoint - # since the message call resulted in an error - rollback_transaction(state) - else: - commit_transaction(state) - return evm - - -def execute_code(message: Message) -> Evm: - """ - Executes bytecode present in the `message`. - - Parameters - ---------- - message : - Transaction specific items. - - Returns - ------- - evm: `ethereum.vm.EVM` - Items containing execution specific objects - - """ code = message.code valid_jump_destinations = get_valid_jump_destinations(code) - evm = Evm( pc=Uint(0), stack=[], @@ -248,27 +212,44 @@ def execute_code(message: Message) -> Evm: accounts_to_delete=set(), error=None, ) + + # take snapshot of state before processing the message + begin_transaction(state) + + touch_account(state, message.current_target) + + if message.should_transfer_value and message.value != 0: + move_ether( + state, message.caller, message.current_target, message.value + ) + try: if evm.message.code_address in PRE_COMPILED_CONTRACTS: evm_trace(evm, PrecompileStart(evm.message.code_address)) PRE_COMPILED_CONTRACTS[evm.message.code_address](evm) evm_trace(evm, PrecompileEnd()) - return evm - - while evm.running and evm.pc < ulen(evm.code): - try: - op = Ops(evm.code[evm.pc]) - except ValueError as e: - raise InvalidOpcode(evm.code[evm.pc]) from e + else: + while evm.running and evm.pc < ulen(evm.code): + try: + op = Ops(evm.code[evm.pc]) + except ValueError as e: + raise InvalidOpcode(evm.code[evm.pc]) from e - evm_trace(evm, OpStart(op)) - op_implementation[op](evm) - evm_trace(evm, OpEnd()) + evm_trace(evm, OpStart(op)) + op_implementation[op](evm) + evm_trace(evm, OpEnd()) - evm_trace(evm, EvmStop(Ops.STOP)) + evm_trace(evm, EvmStop(Ops.STOP)) except ExceptionalHalt as error: evm_trace(evm, OpException(error)) evm.gas_left = Uint(0) evm.error = error + + if evm.error: + # revert state to the last saved checkpoint + # since the message call resulted in an error + rollback_transaction(state) + else: + commit_transaction(state) return evm diff --git a/src/ethereum/forks/istanbul/vm/interpreter.py b/src/ethereum/forks/istanbul/vm/interpreter.py index ae8ca5a314..bec60e1da1 100644 --- a/src/ethereum/forks/istanbul/vm/interpreter.py +++ b/src/ethereum/forks/istanbul/vm/interpreter.py @@ -221,44 +221,8 @@ def process_message(message: Message) -> Evm: if message.depth > STACK_DEPTH_LIMIT: raise StackDepthLimitError("Stack depth limit reached") - # take snapshot of state before processing the message - begin_transaction(state) - - touch_account(state, message.current_target) - - if message.should_transfer_value and message.value != 0: - move_ether( - state, message.caller, message.current_target, message.value - ) - - evm = execute_code(message) - if evm.error: - # revert state to the last saved checkpoint - # since the message call resulted in an error - rollback_transaction(state) - else: - commit_transaction(state) - return evm - - -def execute_code(message: Message) -> Evm: - """ - Executes bytecode present in the `message`. - - Parameters - ---------- - message : - Transaction specific items. - - Returns - ------- - evm: `ethereum.vm.EVM` - Items containing execution specific objects - - """ code = message.code valid_jump_destinations = get_valid_jump_destinations(code) - evm = Evm( pc=Uint(0), stack=[], @@ -276,24 +240,34 @@ def execute_code(message: Message) -> Evm: return_data=b"", error=None, ) + + # take snapshot of state before processing the message + begin_transaction(state) + + touch_account(state, message.current_target) + + if message.should_transfer_value and message.value != 0: + move_ether( + state, message.caller, message.current_target, message.value + ) + try: if evm.message.code_address in PRE_COMPILED_CONTRACTS: evm_trace(evm, PrecompileStart(evm.message.code_address)) PRE_COMPILED_CONTRACTS[evm.message.code_address](evm) evm_trace(evm, PrecompileEnd()) - return evm - - while evm.running and evm.pc < ulen(evm.code): - try: - op = Ops(evm.code[evm.pc]) - except ValueError as e: - raise InvalidOpcode(evm.code[evm.pc]) from e + else: + while evm.running and evm.pc < ulen(evm.code): + try: + op = Ops(evm.code[evm.pc]) + except ValueError as e: + raise InvalidOpcode(evm.code[evm.pc]) from e - evm_trace(evm, OpStart(op)) - op_implementation[op](evm) - evm_trace(evm, OpEnd()) + evm_trace(evm, OpStart(op)) + op_implementation[op](evm) + evm_trace(evm, OpEnd()) - evm_trace(evm, EvmStop(Ops.STOP)) + evm_trace(evm, EvmStop(Ops.STOP)) except ExceptionalHalt as error: evm_trace(evm, OpException(error)) @@ -303,4 +277,11 @@ def execute_code(message: Message) -> Evm: except Revert as error: evm_trace(evm, OpException(error)) evm.error = error + + if evm.error: + # revert state to the last saved checkpoint + # since the message call resulted in an error + rollback_transaction(state) + else: + commit_transaction(state) return evm diff --git a/src/ethereum/forks/london/vm/interpreter.py b/src/ethereum/forks/london/vm/interpreter.py index fec0f45d22..ee3e616e3f 100644 --- a/src/ethereum/forks/london/vm/interpreter.py +++ b/src/ethereum/forks/london/vm/interpreter.py @@ -225,44 +225,8 @@ def process_message(message: Message) -> Evm: if message.depth > STACK_DEPTH_LIMIT: raise StackDepthLimitError("Stack depth limit reached") - # take snapshot of state before processing the message - begin_transaction(state) - - touch_account(state, message.current_target) - - if message.should_transfer_value and message.value != 0: - move_ether( - state, message.caller, message.current_target, message.value - ) - - evm = execute_code(message) - if evm.error: - # revert state to the last saved checkpoint - # since the message call resulted in an error - rollback_transaction(state) - else: - commit_transaction(state) - return evm - - -def execute_code(message: Message) -> Evm: - """ - Executes bytecode present in the `message`. - - Parameters - ---------- - message : - Transaction specific items. - - Returns - ------- - evm: `ethereum.vm.EVM` - Items containing execution specific objects - - """ code = message.code valid_jump_destinations = get_valid_jump_destinations(code) - evm = Evm( pc=Uint(0), stack=[], @@ -282,24 +246,35 @@ def execute_code(message: Message) -> Evm: accessed_addresses=message.accessed_addresses, accessed_storage_keys=message.accessed_storage_keys, ) + + # take snapshot of state before processing the message + begin_transaction(state) + + touch_account(state, message.current_target) + + if message.should_transfer_value and message.value != 0: + move_ether( + state, message.caller, message.current_target, message.value + ) + try: if evm.message.code_address in PRE_COMPILED_CONTRACTS: evm_trace(evm, PrecompileStart(evm.message.code_address)) PRE_COMPILED_CONTRACTS[evm.message.code_address](evm) evm_trace(evm, PrecompileEnd()) - return evm - while evm.running and evm.pc < ulen(evm.code): - try: - op = Ops(evm.code[evm.pc]) - except ValueError as e: - raise InvalidOpcode(evm.code[evm.pc]) from e + else: + while evm.running and evm.pc < ulen(evm.code): + try: + op = Ops(evm.code[evm.pc]) + except ValueError as e: + raise InvalidOpcode(evm.code[evm.pc]) from e - evm_trace(evm, OpStart(op)) - op_implementation[op](evm) - evm_trace(evm, OpEnd()) + evm_trace(evm, OpStart(op)) + op_implementation[op](evm) + evm_trace(evm, OpEnd()) - evm_trace(evm, EvmStop(Ops.STOP)) + evm_trace(evm, EvmStop(Ops.STOP)) except ExceptionalHalt as error: evm_trace(evm, OpException(error)) @@ -309,4 +284,11 @@ def execute_code(message: Message) -> Evm: except Revert as error: evm_trace(evm, OpException(error)) evm.error = error + + if evm.error: + # revert state to the last saved checkpoint + # since the message call resulted in an error + rollback_transaction(state) + else: + commit_transaction(state) return evm diff --git a/src/ethereum/forks/muir_glacier/vm/interpreter.py b/src/ethereum/forks/muir_glacier/vm/interpreter.py index a5841b998d..67b6e7ca64 100644 --- a/src/ethereum/forks/muir_glacier/vm/interpreter.py +++ b/src/ethereum/forks/muir_glacier/vm/interpreter.py @@ -221,44 +221,8 @@ def process_message(message: Message) -> Evm: if message.depth > STACK_DEPTH_LIMIT: raise StackDepthLimitError("Stack depth limit reached") - # take snapshot of state before processing the message - begin_transaction(state) - - touch_account(state, message.current_target) - - if message.should_transfer_value and message.value != 0: - move_ether( - state, message.caller, message.current_target, message.value - ) - - evm = execute_code(message) - if evm.error: - # revert state to the last saved checkpoint - # since the message call resulted in an error - rollback_transaction(state) - else: - commit_transaction(state) - return evm - - -def execute_code(message: Message) -> Evm: - """ - Executes bytecode present in the `message`. - - Parameters - ---------- - message : - Transaction specific items. - - Returns - ------- - evm: `ethereum.vm.EVM` - Items containing execution specific objects - - """ code = message.code valid_jump_destinations = get_valid_jump_destinations(code) - evm = Evm( pc=Uint(0), stack=[], @@ -276,24 +240,34 @@ def execute_code(message: Message) -> Evm: return_data=b"", error=None, ) + + # take snapshot of state before processing the message + begin_transaction(state) + + touch_account(state, message.current_target) + + if message.should_transfer_value and message.value != 0: + move_ether( + state, message.caller, message.current_target, message.value + ) + try: if evm.message.code_address in PRE_COMPILED_CONTRACTS: evm_trace(evm, PrecompileStart(evm.message.code_address)) PRE_COMPILED_CONTRACTS[evm.message.code_address](evm) evm_trace(evm, PrecompileEnd()) - return evm - - while evm.running and evm.pc < ulen(evm.code): - try: - op = Ops(evm.code[evm.pc]) - except ValueError as e: - raise InvalidOpcode(evm.code[evm.pc]) from e + else: + while evm.running and evm.pc < ulen(evm.code): + try: + op = Ops(evm.code[evm.pc]) + except ValueError as e: + raise InvalidOpcode(evm.code[evm.pc]) from e - evm_trace(evm, OpStart(op)) - op_implementation[op](evm) - evm_trace(evm, OpEnd()) + evm_trace(evm, OpStart(op)) + op_implementation[op](evm) + evm_trace(evm, OpEnd()) - evm_trace(evm, EvmStop(Ops.STOP)) + evm_trace(evm, EvmStop(Ops.STOP)) except ExceptionalHalt as error: evm_trace(evm, OpException(error)) @@ -303,4 +277,11 @@ def execute_code(message: Message) -> Evm: except Revert as error: evm_trace(evm, OpException(error)) evm.error = error + + if evm.error: + # revert state to the last saved checkpoint + # since the message call resulted in an error + rollback_transaction(state) + else: + commit_transaction(state) return evm diff --git a/src/ethereum/forks/osaka/vm/interpreter.py b/src/ethereum/forks/osaka/vm/interpreter.py index 2d5eb64aff..944b2b5223 100644 --- a/src/ethereum/forks/osaka/vm/interpreter.py +++ b/src/ethereum/forks/osaka/vm/interpreter.py @@ -234,46 +234,12 @@ def process_message(message: Message) -> Evm: """ state = message.block_env.state - transient_storage = message.tx_env.transient_storage if message.depth > STACK_DEPTH_LIMIT: raise StackDepthLimitError("Stack depth limit reached") - # take snapshot of state before processing the message - begin_transaction(state, transient_storage) - - if message.should_transfer_value and message.value != 0: - move_ether( - state, message.caller, message.current_target, message.value - ) - - evm = execute_code(message) - if evm.error: - # revert state to the last saved checkpoint - # since the message call resulted in an error - rollback_transaction(state, transient_storage) - else: - commit_transaction(state, transient_storage) - return evm - - -def execute_code(message: Message) -> Evm: - """ - Executes bytecode present in the `message`. - - Parameters - ---------- - message : - Transaction specific items. - - Returns - ------- - evm: `ethereum.vm.EVM` - Items containing execution specific objects - - """ + transient_storage = message.tx_env.transient_storage code = message.code valid_jump_destinations = get_valid_jump_destinations(code) - evm = Evm( pc=Uint(0), stack=[], @@ -292,26 +258,33 @@ def execute_code(message: Message) -> Evm: accessed_addresses=message.accessed_addresses, accessed_storage_keys=message.accessed_storage_keys, ) + + # take snapshot of state before processing the message + begin_transaction(state, transient_storage) + + if message.should_transfer_value and message.value != 0: + move_ether( + state, message.caller, message.current_target, message.value + ) + try: if evm.message.code_address in PRE_COMPILED_CONTRACTS: - if message.disable_precompiles: - return evm - evm_trace(evm, PrecompileStart(evm.message.code_address)) - PRE_COMPILED_CONTRACTS[evm.message.code_address](evm) - evm_trace(evm, PrecompileEnd()) - return evm - - while evm.running and evm.pc < ulen(evm.code): - try: - op = Ops(evm.code[evm.pc]) - except ValueError as e: - raise InvalidOpcode(evm.code[evm.pc]) from e + if not message.disable_precompiles: + evm_trace(evm, PrecompileStart(evm.message.code_address)) + PRE_COMPILED_CONTRACTS[evm.message.code_address](evm) + evm_trace(evm, PrecompileEnd()) + else: + while evm.running and evm.pc < ulen(evm.code): + try: + op = Ops(evm.code[evm.pc]) + except ValueError as e: + raise InvalidOpcode(evm.code[evm.pc]) from e - evm_trace(evm, OpStart(op)) - op_implementation[op](evm) - evm_trace(evm, OpEnd()) + evm_trace(evm, OpStart(op)) + op_implementation[op](evm) + evm_trace(evm, OpEnd()) - evm_trace(evm, EvmStop(Ops.STOP)) + evm_trace(evm, EvmStop(Ops.STOP)) except ExceptionalHalt as error: evm_trace(evm, OpException(error)) @@ -321,4 +294,11 @@ def execute_code(message: Message) -> Evm: except Revert as error: evm_trace(evm, OpException(error)) evm.error = error + + if evm.error: + # revert state to the last saved checkpoint + # since the message call resulted in an error + rollback_transaction(state, transient_storage) + else: + commit_transaction(state, transient_storage) return evm diff --git a/src/ethereum/forks/paris/vm/interpreter.py b/src/ethereum/forks/paris/vm/interpreter.py index 957323deda..2837130881 100644 --- a/src/ethereum/forks/paris/vm/interpreter.py +++ b/src/ethereum/forks/paris/vm/interpreter.py @@ -214,42 +214,8 @@ def process_message(message: Message) -> Evm: if message.depth > STACK_DEPTH_LIMIT: raise StackDepthLimitError("Stack depth limit reached") - # take snapshot of state before processing the message - begin_transaction(state) - - if message.should_transfer_value and message.value != 0: - move_ether( - state, message.caller, message.current_target, message.value - ) - - evm = execute_code(message) - if evm.error: - # revert state to the last saved checkpoint - # since the message call resulted in an error - rollback_transaction(state) - else: - commit_transaction(state) - return evm - - -def execute_code(message: Message) -> Evm: - """ - Executes bytecode present in the `message`. - - Parameters - ---------- - message : - Transaction specific items. - - Returns - ------- - evm: `ethereum.vm.EVM` - Items containing execution specific objects - - """ code = message.code valid_jump_destinations = get_valid_jump_destinations(code) - evm = Evm( pc=Uint(0), stack=[], @@ -268,24 +234,32 @@ def execute_code(message: Message) -> Evm: accessed_addresses=message.accessed_addresses, accessed_storage_keys=message.accessed_storage_keys, ) + + # take snapshot of state before processing the message + begin_transaction(state) + + if message.should_transfer_value and message.value != 0: + move_ether( + state, message.caller, message.current_target, message.value + ) + try: if evm.message.code_address in PRE_COMPILED_CONTRACTS: evm_trace(evm, PrecompileStart(evm.message.code_address)) PRE_COMPILED_CONTRACTS[evm.message.code_address](evm) evm_trace(evm, PrecompileEnd()) - return evm - - while evm.running and evm.pc < ulen(evm.code): - try: - op = Ops(evm.code[evm.pc]) - except ValueError as e: - raise InvalidOpcode(evm.code[evm.pc]) from e + else: + while evm.running and evm.pc < ulen(evm.code): + try: + op = Ops(evm.code[evm.pc]) + except ValueError as e: + raise InvalidOpcode(evm.code[evm.pc]) from e - evm_trace(evm, OpStart(op)) - op_implementation[op](evm) - evm_trace(evm, OpEnd()) + evm_trace(evm, OpStart(op)) + op_implementation[op](evm) + evm_trace(evm, OpEnd()) - evm_trace(evm, EvmStop(Ops.STOP)) + evm_trace(evm, EvmStop(Ops.STOP)) except ExceptionalHalt as error: evm_trace(evm, OpException(error)) @@ -295,4 +269,11 @@ def execute_code(message: Message) -> Evm: except Revert as error: evm_trace(evm, OpException(error)) evm.error = error + + if evm.error: + # revert state to the last saved checkpoint + # since the message call resulted in an error + rollback_transaction(state) + else: + commit_transaction(state) return evm diff --git a/src/ethereum/forks/prague/vm/interpreter.py b/src/ethereum/forks/prague/vm/interpreter.py index 17349b4e3b..d0002eae34 100644 --- a/src/ethereum/forks/prague/vm/interpreter.py +++ b/src/ethereum/forks/prague/vm/interpreter.py @@ -234,46 +234,12 @@ def process_message(message: Message) -> Evm: """ state = message.block_env.state - transient_storage = message.tx_env.transient_storage if message.depth > STACK_DEPTH_LIMIT: raise StackDepthLimitError("Stack depth limit reached") - # take snapshot of state before processing the message - begin_transaction(state, transient_storage) - - if message.should_transfer_value and message.value != 0: - move_ether( - state, message.caller, message.current_target, message.value - ) - - evm = execute_code(message) - if evm.error: - # revert state to the last saved checkpoint - # since the message call resulted in an error - rollback_transaction(state, transient_storage) - else: - commit_transaction(state, transient_storage) - return evm - - -def execute_code(message: Message) -> Evm: - """ - Executes bytecode present in the `message`. - - Parameters - ---------- - message : - Transaction specific items. - - Returns - ------- - evm: `ethereum.vm.EVM` - Items containing execution specific objects - - """ + transient_storage = message.tx_env.transient_storage code = message.code valid_jump_destinations = get_valid_jump_destinations(code) - evm = Evm( pc=Uint(0), stack=[], @@ -292,26 +258,33 @@ def execute_code(message: Message) -> Evm: accessed_addresses=message.accessed_addresses, accessed_storage_keys=message.accessed_storage_keys, ) + + # take snapshot of state before processing the message + begin_transaction(state, transient_storage) + + if message.should_transfer_value and message.value != 0: + move_ether( + state, message.caller, message.current_target, message.value + ) + try: if evm.message.code_address in PRE_COMPILED_CONTRACTS: - if message.disable_precompiles: - return evm - evm_trace(evm, PrecompileStart(evm.message.code_address)) - PRE_COMPILED_CONTRACTS[evm.message.code_address](evm) - evm_trace(evm, PrecompileEnd()) - return evm - - while evm.running and evm.pc < ulen(evm.code): - try: - op = Ops(evm.code[evm.pc]) - except ValueError as e: - raise InvalidOpcode(evm.code[evm.pc]) from e + if not message.disable_precompiles: + evm_trace(evm, PrecompileStart(evm.message.code_address)) + PRE_COMPILED_CONTRACTS[evm.message.code_address](evm) + evm_trace(evm, PrecompileEnd()) + else: + while evm.running and evm.pc < ulen(evm.code): + try: + op = Ops(evm.code[evm.pc]) + except ValueError as e: + raise InvalidOpcode(evm.code[evm.pc]) from e - evm_trace(evm, OpStart(op)) - op_implementation[op](evm) - evm_trace(evm, OpEnd()) + evm_trace(evm, OpStart(op)) + op_implementation[op](evm) + evm_trace(evm, OpEnd()) - evm_trace(evm, EvmStop(Ops.STOP)) + evm_trace(evm, EvmStop(Ops.STOP)) except ExceptionalHalt as error: evm_trace(evm, OpException(error)) @@ -321,4 +294,11 @@ def execute_code(message: Message) -> Evm: except Revert as error: evm_trace(evm, OpException(error)) evm.error = error + + if evm.error: + # revert state to the last saved checkpoint + # since the message call resulted in an error + rollback_transaction(state, transient_storage) + else: + commit_transaction(state, transient_storage) return evm diff --git a/src/ethereum/forks/shanghai/vm/interpreter.py b/src/ethereum/forks/shanghai/vm/interpreter.py index 1b28c7e3e2..383f0d04cb 100644 --- a/src/ethereum/forks/shanghai/vm/interpreter.py +++ b/src/ethereum/forks/shanghai/vm/interpreter.py @@ -215,42 +215,8 @@ def process_message(message: Message) -> Evm: if message.depth > STACK_DEPTH_LIMIT: raise StackDepthLimitError("Stack depth limit reached") - # take snapshot of state before processing the message - begin_transaction(state) - - if message.should_transfer_value and message.value != 0: - move_ether( - state, message.caller, message.current_target, message.value - ) - - evm = execute_code(message) - if evm.error: - # revert state to the last saved checkpoint - # since the message call resulted in an error - rollback_transaction(state) - else: - commit_transaction(state) - return evm - - -def execute_code(message: Message) -> Evm: - """ - Executes bytecode present in the `message`. - - Parameters - ---------- - message : - Transaction specific items. - - Returns - ------- - evm: `ethereum.vm.EVM` - Items containing execution specific objects - - """ code = message.code valid_jump_destinations = get_valid_jump_destinations(code) - evm = Evm( pc=Uint(0), stack=[], @@ -269,24 +235,32 @@ def execute_code(message: Message) -> Evm: accessed_addresses=message.accessed_addresses, accessed_storage_keys=message.accessed_storage_keys, ) + + # take snapshot of state before processing the message + begin_transaction(state) + + if message.should_transfer_value and message.value != 0: + move_ether( + state, message.caller, message.current_target, message.value + ) + try: if evm.message.code_address in PRE_COMPILED_CONTRACTS: evm_trace(evm, PrecompileStart(evm.message.code_address)) PRE_COMPILED_CONTRACTS[evm.message.code_address](evm) evm_trace(evm, PrecompileEnd()) - return evm - - while evm.running and evm.pc < ulen(evm.code): - try: - op = Ops(evm.code[evm.pc]) - except ValueError as e: - raise InvalidOpcode(evm.code[evm.pc]) from e + else: + while evm.running and evm.pc < ulen(evm.code): + try: + op = Ops(evm.code[evm.pc]) + except ValueError as e: + raise InvalidOpcode(evm.code[evm.pc]) from e - evm_trace(evm, OpStart(op)) - op_implementation[op](evm) - evm_trace(evm, OpEnd()) + evm_trace(evm, OpStart(op)) + op_implementation[op](evm) + evm_trace(evm, OpEnd()) - evm_trace(evm, EvmStop(Ops.STOP)) + evm_trace(evm, EvmStop(Ops.STOP)) except ExceptionalHalt as error: evm_trace(evm, OpException(error)) @@ -296,4 +270,11 @@ def execute_code(message: Message) -> Evm: except Revert as error: evm_trace(evm, OpException(error)) evm.error = error + + if evm.error: + # revert state to the last saved checkpoint + # since the message call resulted in an error + rollback_transaction(state) + else: + commit_transaction(state) return evm diff --git a/src/ethereum/forks/spurious_dragon/vm/interpreter.py b/src/ethereum/forks/spurious_dragon/vm/interpreter.py index a25a86fc21..5313e29789 100644 --- a/src/ethereum/forks/spurious_dragon/vm/interpreter.py +++ b/src/ethereum/forks/spurious_dragon/vm/interpreter.py @@ -212,44 +212,8 @@ def process_message(message: Message) -> Evm: if message.depth > STACK_DEPTH_LIMIT: raise StackDepthLimitError("Stack depth limit reached") - # take snapshot of state before processing the message - begin_transaction(state) - - touch_account(state, message.current_target) - - if message.should_transfer_value and message.value != 0: - move_ether( - state, message.caller, message.current_target, message.value - ) - - evm = execute_code(message) - if evm.error: - # revert state to the last saved checkpoint - # since the message call resulted in an error - rollback_transaction(state) - else: - commit_transaction(state) - return evm - - -def execute_code(message: Message) -> Evm: - """ - Executes bytecode present in the `message`. - - Parameters - ---------- - message : - Transaction specific items. - - Returns - ------- - evm: `ethereum.vm.EVM` - Items containing execution specific objects - - """ code = message.code valid_jump_destinations = get_valid_jump_destinations(code) - evm = Evm( pc=Uint(0), stack=[], @@ -266,27 +230,44 @@ def execute_code(message: Message) -> Evm: touched_accounts=set(), error=None, ) + + # take snapshot of state before processing the message + begin_transaction(state) + + touch_account(state, message.current_target) + + if message.should_transfer_value and message.value != 0: + move_ether( + state, message.caller, message.current_target, message.value + ) + try: if evm.message.code_address in PRE_COMPILED_CONTRACTS: evm_trace(evm, PrecompileStart(evm.message.code_address)) PRE_COMPILED_CONTRACTS[evm.message.code_address](evm) evm_trace(evm, PrecompileEnd()) - return evm - - while evm.running and evm.pc < ulen(evm.code): - try: - op = Ops(evm.code[evm.pc]) - except ValueError as e: - raise InvalidOpcode(evm.code[evm.pc]) from e + else: + while evm.running and evm.pc < ulen(evm.code): + try: + op = Ops(evm.code[evm.pc]) + except ValueError as e: + raise InvalidOpcode(evm.code[evm.pc]) from e - evm_trace(evm, OpStart(op)) - op_implementation[op](evm) - evm_trace(evm, OpEnd()) + evm_trace(evm, OpStart(op)) + op_implementation[op](evm) + evm_trace(evm, OpEnd()) - evm_trace(evm, EvmStop(Ops.STOP)) + evm_trace(evm, EvmStop(Ops.STOP)) except ExceptionalHalt as error: evm_trace(evm, OpException(error)) evm.gas_left = Uint(0) evm.error = error + + if evm.error: + # revert state to the last saved checkpoint + # since the message call resulted in an error + rollback_transaction(state) + else: + commit_transaction(state) return evm diff --git a/src/ethereum/forks/tangerine_whistle/vm/interpreter.py b/src/ethereum/forks/tangerine_whistle/vm/interpreter.py index f17d93d2b2..5ae6e9fe4b 100644 --- a/src/ethereum/forks/tangerine_whistle/vm/interpreter.py +++ b/src/ethereum/forks/tangerine_whistle/vm/interpreter.py @@ -195,44 +195,8 @@ def process_message(message: Message) -> Evm: if message.depth > STACK_DEPTH_LIMIT: raise StackDepthLimitError("Stack depth limit reached") - # take snapshot of state before processing the message - begin_transaction(state) - - touch_account(state, message.current_target) - - if message.should_transfer_value and message.value != 0: - move_ether( - state, message.caller, message.current_target, message.value - ) - - evm = execute_code(message) - if evm.error: - # revert state to the last saved checkpoint - # since the message call resulted in an error - rollback_transaction(state) - else: - commit_transaction(state) - return evm - - -def execute_code(message: Message) -> Evm: - """ - Executes bytecode present in the `message`. - - Parameters - ---------- - message : - Transaction specific items. - - Returns - ------- - evm: `ethereum.vm.EVM` - Items containing execution specific objects - - """ code = message.code valid_jump_destinations = get_valid_jump_destinations(code) - evm = Evm( pc=Uint(0), stack=[], @@ -248,27 +212,44 @@ def execute_code(message: Message) -> Evm: accounts_to_delete=set(), error=None, ) + + # take snapshot of state before processing the message + begin_transaction(state) + + touch_account(state, message.current_target) + + if message.should_transfer_value and message.value != 0: + move_ether( + state, message.caller, message.current_target, message.value + ) + try: if evm.message.code_address in PRE_COMPILED_CONTRACTS: evm_trace(evm, PrecompileStart(evm.message.code_address)) PRE_COMPILED_CONTRACTS[evm.message.code_address](evm) evm_trace(evm, PrecompileEnd()) - return evm - - while evm.running and evm.pc < ulen(evm.code): - try: - op = Ops(evm.code[evm.pc]) - except ValueError as e: - raise InvalidOpcode(evm.code[evm.pc]) from e + else: + while evm.running and evm.pc < ulen(evm.code): + try: + op = Ops(evm.code[evm.pc]) + except ValueError as e: + raise InvalidOpcode(evm.code[evm.pc]) from e - evm_trace(evm, OpStart(op)) - op_implementation[op](evm) - evm_trace(evm, OpEnd()) + evm_trace(evm, OpStart(op)) + op_implementation[op](evm) + evm_trace(evm, OpEnd()) - evm_trace(evm, EvmStop(Ops.STOP)) + evm_trace(evm, EvmStop(Ops.STOP)) except ExceptionalHalt as error: evm_trace(evm, OpException(error)) evm.gas_left = Uint(0) evm.error = error + + if evm.error: + # revert state to the last saved checkpoint + # since the message call resulted in an error + rollback_transaction(state) + else: + commit_transaction(state) return evm From f9ae69d3430361ed0203679ab7054c1e9089f9e6 Mon Sep 17 00:00:00 2001 From: spencer Date: Thu, 22 Jan 2026 17:34:46 +0000 Subject: [PATCH 094/154] chore(docs): bump docs fork to amsterdam (#2064) --- packages/testing/src/execution_testing/config/docs.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/testing/src/execution_testing/config/docs.py b/packages/testing/src/execution_testing/config/docs.py index ac6c7e93cd..d52046c957 100644 --- a/packages/testing/src/execution_testing/config/docs.py +++ b/packages/testing/src/execution_testing/config/docs.py @@ -11,10 +11,10 @@ class DocsConfig(BaseModel): """A class for accessing documentation-related configurations.""" - TARGET_FORK: str = "Osaka" + TARGET_FORK: str = "Amsterdam" """The target fork for the documentation.""" - GENERATE_UNTIL_FORK: str = "Osaka" + GENERATE_UNTIL_FORK: str = "Amsterdam" """The fork until which documentation should be generated.""" DOCS_BASE_URL: str = "https://eest.ethereum.org" From 6e5f1f84690c6540c1f97e5602099b188aebeebf Mon Sep 17 00:00:00 2001 From: felipe Date: Thu, 22 Jan 2026 18:00:34 -0700 Subject: [PATCH 095/154] bugfix(tests): Put BAL back into test fixtures for next bal release (#2066) - BAL was removed from block body but we agreed to keep it in the fixtures for help with debugging against client BALs to spot diffs. --- packages/testing/src/execution_testing/fixtures/blockchain.py | 3 +++ packages/testing/src/execution_testing/specs/blockchain.py | 3 +++ 2 files changed, 6 insertions(+) diff --git a/packages/testing/src/execution_testing/fixtures/blockchain.py b/packages/testing/src/execution_testing/fixtures/blockchain.py index 23145e6238..fa85d4b60b 100644 --- a/packages/testing/src/execution_testing/fixtures/blockchain.py +++ b/packages/testing/src/execution_testing/fixtures/blockchain.py @@ -639,6 +639,9 @@ def strip_block_number_computed_field(cls, data: Any) -> Any: ) withdrawals: List[FixtureWithdrawal] | None = None execution_witness: WitnessChunk | None = None + block_access_list: BlockAccessList | None = Field( + None, description="EIP-7928 Block Access List" + ) fork: Fork | None = Field(None, exclude=True) @computed_field(alias="blocknumber") # type: ignore[prop-decorator] diff --git a/packages/testing/src/execution_testing/specs/blockchain.py b/packages/testing/src/execution_testing/specs/blockchain.py index b5a6ddffe9..3acd66a03f 100644 --- a/packages/testing/src/execution_testing/specs/blockchain.py +++ b/packages/testing/src/execution_testing/specs/blockchain.py @@ -381,6 +381,9 @@ def get_fixture_block(self) -> FixtureBlock | InvalidFixtureBlock: if self.withdrawals is not None else None ), + block_access_list=self.block_access_list + if self.block_access_list + else None, fork=self.fork, ).with_rlp(txs=self.txs) From 16316e236e8a7ce7af27e309b5e8f1ddcba9e583 Mon Sep 17 00:00:00 2001 From: spencer Date: Fri, 23 Jan 2026 03:36:22 +0000 Subject: [PATCH 096/154] chore(test-specs): fix fork transition tests (#2065) * chore(test-specs): fix fork transition tests * fix(test-forks): don't collect BPO tests w/ unchanged params on transition - Use `fork.excess_blob_gas_calculator` to account for fork-specific blob gas calculations. * refactor(test-forks): clearer definitions for transition forks --------- Co-authored-by: fselmo --- .../pytest_commands/plugins/forks/forks.py | 60 +++++++++++++++++++ .../forks/forks/transition.py | 12 ++++ .../test_excess_blob_gas_fork_transition.py | 33 ++++++---- .../test_modexp_upper_bounds.py | 2 +- .../test_tx_gas_limit_transition_fork.py | 2 +- .../test_modexp_thresholds_transition.py | 2 +- .../test_count_leading_zeros.py | 2 +- 7 files changed, 99 insertions(+), 14 deletions(-) diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/forks.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/forks.py index 197e9f595d..5c089f1570 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/forks.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/forks/forks.py @@ -1181,6 +1181,24 @@ def add_fork_covariant_parameters( fp for fp in fork_parametrizers if fp.fork >= param_min_fork ] + # Filter out forks where blob params don't change for valid_for_bpo_forks + if list(metafunc.definition.iter_markers(name="valid_for_bpo_forks")): + filtered_forks = [ + fp.fork + for fp in fork_parametrizers + if not blob_params_changed_at_transition(fp.fork) + ] + if filtered_forks: + logger.debug( + f"Skipping {metafunc.function.__name__} for forks with " + f"unchanged blob params: {[f.name() for f in filtered_forks]}" + ) + fork_parametrizers[:] = [ + fp + for fp in fork_parametrizers + if blob_params_changed_at_transition(fp.fork) + ] + for covariant_descriptor in fork_covariant_decorators: if list( metafunc.definition.iter_markers(covariant_descriptor.marker_name) @@ -1272,6 +1290,48 @@ def parametrize_fork( ) +def blob_params_changed_at_transition(fork: Fork) -> bool: + """ + Check if BPO-relevant blob parameters change at a fork transition. + + For transition forks, compares the 3 blob parameters that BPO forks modify + between the from_fork and to_fork: + + - target_blobs_per_block + - max_blobs_per_block + - blob_base_fee_update_fraction + + Returns True if any parameter changed, False otherwise. + + For non-transition forks, returns True (no filtering needed). + """ + # Check if this is a transition fork + if not hasattr(fork, "transitions_from") or not hasattr( + fork, "transitions_to" + ): + return True + + from_fork = fork.transitions_from() + to_fork = fork.transitions_to() + + # Compare the 3 blob parameters that BPO forks modify + bpo_blob_params = [ + "target_blobs_per_block", + "max_blobs_per_block", + "blob_base_fee_update_fraction", + ] + + for param in bpo_blob_params: + from_method = getattr(from_fork, param, None) + to_method = getattr(to_fork, param, None) + if from_method is None or to_method is None: + continue + if from_method() != to_method(): + return True + + return False + + def pytest_collection_modifyitems( config: pytest.Config, items: List[pytest.Item] ) -> None: diff --git a/packages/testing/src/execution_testing/forks/forks/transition.py b/packages/testing/src/execution_testing/forks/forks/transition.py index 249314098e..85c43a2c4d 100644 --- a/packages/testing/src/execution_testing/forks/forks/transition.py +++ b/packages/testing/src/execution_testing/forks/forks/transition.py @@ -6,6 +6,7 @@ BPO2, BPO3, BPO4, + Amsterdam, Berlin, Cancun, London, @@ -66,6 +67,17 @@ class BPO1ToBPO2AtTime15k(BPO1): pass +@transition_fork(to_fork=Amsterdam, at_timestamp=15_000) +class BPO2ToAmsterdamAtTime15k(BPO2): + """BPO2 to Amsterdam transition at Timestamp 15k.""" + + # TODO: We may need to adjust which BPO Amsterdam inherits from as the + # related Amsterdam specs change over time, and before Amsterdam is + # live on mainnet. + + pass + + @transition_fork(to_fork=BPO3, at_timestamp=15_000) class BPO2ToBPO3AtTime15k(BPO2): """BPO2 to BPO3 transition at Timestamp 15k.""" diff --git a/tests/cancun/eip4844_blobs/test_excess_blob_gas_fork_transition.py b/tests/cancun/eip4844_blobs/test_excess_blob_gas_fork_transition.py index d88207cfce..1866a4e210 100644 --- a/tests/cancun/eip4844_blobs/test_excess_blob_gas_fork_transition.py +++ b/tests/cancun/eip4844_blobs/test_excess_blob_gas_fork_transition.py @@ -158,24 +158,36 @@ def pre_fork_blocks( @pytest.fixture -def pre_fork_excess_blobs( +def pre_fork_excess_blob_gas( fork: Fork, pre_fork_blobs_per_block: int, pre_fork_blocks: List[Block], + block_base_fee_per_gas: int, ) -> int: """ - Return the cumulative excess blobs up until the fork given the - pre_fork_blobs_per_block and the target blobs in the fork prior. + Return the cumulative excess blob gas up until the fork. + + Calculates the expected excess blob gas by iterating through pre-fork + blocks using the fork's calculator, which handles EIP-7918 reserve price + for >=Osaka. """ if not fork.supports_blobs(timestamp=0): return 0 - target_blobs = fork.target_blobs_per_block(timestamp=0) - if pre_fork_blobs_per_block > target_blobs: - return (pre_fork_blobs_per_block - target_blobs) * ( - len(pre_fork_blocks) - 1 + calc_excess_blob_gas = fork.excess_blob_gas_calculator(timestamp=0) + excess_blob_gas = 0 + + # Calculate excess accumulation for each pre-fork block + # First block is built on genesis which has 0 blobs + for i in range(len(pre_fork_blocks)): + parent_blob_count = 0 if i == 0 else pre_fork_blobs_per_block + excess_blob_gas = calc_excess_blob_gas( + parent_excess_blob_gas=excess_blob_gas, + parent_blob_count=parent_blob_count, + parent_base_fee_per_gas=block_base_fee_per_gas, ) - return 0 + + return excess_blob_gas @pytest.fixture @@ -204,7 +216,7 @@ def gas_spender_account(pre: Alloc) -> Address: # noqa: D103 @pytest.fixture def fork_block_excess_blob_gas( fork: Fork, - pre_fork_excess_blobs: int, + pre_fork_excess_blob_gas: int, pre_fork_blobs_per_block: int, block_base_fee_per_gas: int, ) -> int: @@ -215,7 +227,7 @@ def fork_block_excess_blob_gas( timestamp=FORK_TIMESTAMP ) return calc_excess_blob_gas_post_fork( - parent_excess_blobs=pre_fork_excess_blobs, + parent_excess_blob_gas=pre_fork_excess_blob_gas, parent_blob_count=pre_fork_blobs_per_block, parent_base_fee_per_gas=block_base_fee_per_gas, ) @@ -463,6 +475,7 @@ def test_fork_transition_excess_blob_gas_at_blob_genesis( ) +@pytest.mark.valid_for_bpo_forks @pytest.mark.valid_at_transition_to("Prague", subsequent_forks=True) @pytest.mark.parametrize_by_fork( "post_fork_block_count,pre_fork_blobs_per_block,post_fork_blobs_per_block", diff --git a/tests/osaka/eip7823_modexp_upper_bounds/test_modexp_upper_bounds.py b/tests/osaka/eip7823_modexp_upper_bounds/test_modexp_upper_bounds.py index 60ae982971..264219cd36 100644 --- a/tests/osaka/eip7823_modexp_upper_bounds/test_modexp_upper_bounds.py +++ b/tests/osaka/eip7823_modexp_upper_bounds/test_modexp_upper_bounds.py @@ -283,7 +283,7 @@ def test_modexp_upper_bounds( ), ], ) -@pytest.mark.valid_at_transition_to("Osaka", subsequent_forks=True) +@pytest.mark.valid_at_transition_to("Osaka") def test_modexp_upper_bounds_fork_transition( blockchain_test: BlockchainTestFiller, pre: Alloc, diff --git a/tests/osaka/eip7825_transaction_gas_limit_cap/test_tx_gas_limit_transition_fork.py b/tests/osaka/eip7825_transaction_gas_limit_cap/test_tx_gas_limit_transition_fork.py index fc952760b3..243ce4640e 100644 --- a/tests/osaka/eip7825_transaction_gas_limit_cap/test_tx_gas_limit_transition_fork.py +++ b/tests/osaka/eip7825_transaction_gas_limit_cap/test_tx_gas_limit_transition_fork.py @@ -28,7 +28,7 @@ @EIPChecklist.ModifiedTransactionValidityConstraint.Test.ForkTransition.RejectedBeforeFork() @EIPChecklist.ModifiedTransactionValidityConstraint.Test.ForkTransition.AcceptedAfterFork() @EIPChecklist.ModifiedTransactionValidityConstraint.Test.ForkTransition.RejectedAfterFork() -@pytest.mark.valid_at_transition_to("Osaka", subsequent_forks=True) +@pytest.mark.valid_at_transition_to("Osaka") @pytest.mark.parametrize( "transaction_at_cap", [ diff --git a/tests/osaka/eip7883_modexp_gas_increase/test_modexp_thresholds_transition.py b/tests/osaka/eip7883_modexp_gas_increase/test_modexp_thresholds_transition.py index be711225a2..128d1306a7 100644 --- a/tests/osaka/eip7883_modexp_gas_increase/test_modexp_thresholds_transition.py +++ b/tests/osaka/eip7883_modexp_gas_increase/test_modexp_thresholds_transition.py @@ -21,7 +21,7 @@ REFERENCE_SPEC_GIT_PATH = ref_spec_7883.git_path REFERENCE_SPEC_VERSION = ref_spec_7883.version -pytestmark = pytest.mark.valid_at_transition_to("Osaka", subsequent_forks=True) +pytestmark = pytest.mark.valid_at_transition_to("Osaka") @pytest.mark.parametrize( diff --git a/tests/osaka/eip7939_count_leading_zeros/test_count_leading_zeros.py b/tests/osaka/eip7939_count_leading_zeros/test_count_leading_zeros.py index ec4e7ac17f..746e1794c3 100644 --- a/tests/osaka/eip7939_count_leading_zeros/test_count_leading_zeros.py +++ b/tests/osaka/eip7939_count_leading_zeros/test_count_leading_zeros.py @@ -284,7 +284,7 @@ def test_clz_push_operation_same_value( @EIPChecklist.Opcode.Test.ForkTransition.Invalid() @EIPChecklist.Opcode.Test.ForkTransition.At() -@pytest.mark.valid_at_transition_to("Osaka", subsequent_forks=True) +@pytest.mark.valid_at_transition_to("Osaka") def test_clz_fork_transition( blockchain_test: BlockchainTestFiller, pre: Alloc ) -> None: From a5546349d93eff3042f5bcc1cff4d77470bafa38 Mon Sep 17 00:00:00 2001 From: felipe Date: Fri, 23 Jan 2026 09:36:18 -0700 Subject: [PATCH 097/154] fix(tests): Remove bad opcode test; opt for test_all_opcodes coverage (#2070) - Remove hard-coded static test for undefined opcode first byte filler. This was causing issues with introduced opcodes in later forks. opt for coverage via ``test_all_opcodes`` instead. --- .../undefinedOpcodeFirstByteFiller.yml | 1407 ----------------- 1 file changed, 1407 deletions(-) delete mode 100644 tests/static/state_tests/stBadOpcode/undefinedOpcodeFirstByteFiller.yml diff --git a/tests/static/state_tests/stBadOpcode/undefinedOpcodeFirstByteFiller.yml b/tests/static/state_tests/stBadOpcode/undefinedOpcodeFirstByteFiller.yml deleted file mode 100644 index d76c80f7d8..0000000000 --- a/tests/static/state_tests/stBadOpcode/undefinedOpcodeFirstByteFiller.yml +++ /dev/null @@ -1,1407 +0,0 @@ -# Checks that behaviour of already deployed code, having invalid opcodes as the first byte. -# Execution is expected to end with an exception. -undefinedOpcodeFirstByte: - env: - currentCoinbase: 2adc25665018aa1fe0e6bc666dac8fc2697ff9ba - currentDifficulty: '0x020000' - currentGasLimit: '89128960' - currentNumber: '1' - currentTimestamp: '1000' - - pre: - a94f5374fce5edbc8e2a8697c15331677e6ebf0b: - balance: 1000000000 - code: '' - nonce: 0 - storage: {} - b94f5374fce5edbc8e2a8697c15331677e6ebf0b: - balance: 0 - code: | - :yul berlin - { - for { let opcode := 0 } lt(opcode, 256) { opcode := add(opcode, 1) } - { - let addr := shl(152, opcode) - if call(10000, addr, 0, 0, 0, 0, 0) { sstore(opcode, 1) } - } - sstore(256, 1) - } - nonce: 0 - storage: {} - 0000000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x00' - nonce: 0 - storage: {} - 0100000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x01' - nonce: 0 - storage: {} - 0200000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x02' - nonce: 0 - storage: {} - 0300000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x03' - nonce: 0 - storage: {} - 0400000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x04' - nonce: 0 - storage: {} - 0500000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x05' - nonce: 0 - storage: {} - 0600000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x06' - nonce: 0 - storage: {} - 0700000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x07' - nonce: 0 - storage: {} - 0800000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x08' - nonce: 0 - storage: {} - 0900000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x09' - nonce: 0 - storage: {} - 0a00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x0a' - nonce: 0 - storage: {} - 0b00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x0b' - nonce: 0 - storage: {} - 0c00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x0c' - nonce: 0 - storage: {} - 0d00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x0d' - nonce: 0 - storage: {} - 0e00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x0e' - nonce: 0 - storage: {} - 0f00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x0f' - nonce: 0 - storage: {} - - 1000000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x10' - nonce: 0 - storage: {} - 1100000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x11' - nonce: 0 - storage: {} - 1200000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x12' - nonce: 0 - storage: {} - 1300000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x13' - nonce: 0 - storage: {} - 1400000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x14' - nonce: 0 - storage: {} - 1500000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x15' - nonce: 0 - storage: {} - 1600000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x16' - nonce: 0 - storage: {} - 1700000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x17' - nonce: 0 - storage: {} - 1800000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x18' - nonce: 0 - storage: {} - 1900000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x19' - nonce: 0 - storage: {} - 1a00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x1a' - nonce: 0 - storage: {} - 1b00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x1b' - nonce: 0 - storage: {} - 1c00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x1c' - nonce: 0 - storage: {} - 1d00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x1d' - nonce: 0 - storage: {} - 1e00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x1e' - nonce: 0 - storage: {} - 1f00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x1f' - nonce: 0 - storage: {} - - 2000000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x20' - nonce: 0 - storage: {} - 2100000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x21' - nonce: 0 - storage: {} - 2200000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x22' - nonce: 0 - storage: {} - 2300000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x23' - nonce: 0 - storage: {} - 2400000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x24' - nonce: 0 - storage: {} - 2500000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x25' - nonce: 0 - storage: {} - 2600000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x26' - nonce: 0 - storage: {} - 2700000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x27' - nonce: 0 - storage: {} - 2800000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x28' - nonce: 0 - storage: {} - 2900000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x29' - nonce: 0 - storage: {} - 2a00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x2a' - nonce: 0 - storage: {} - 2b00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x2b' - nonce: 0 - storage: {} - 2c00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x2c' - nonce: 0 - storage: {} - 2d00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x2d' - nonce: 0 - storage: {} - 2e00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x2e' - nonce: 0 - storage: {} - 2f00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x2f' - nonce: 0 - storage: {} - - 3000000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x30' - nonce: 0 - storage: {} - 3100000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x31' - nonce: 0 - storage: {} - 3200000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x32' - nonce: 0 - storage: {} - 3300000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x33' - nonce: 0 - storage: {} - 3400000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x34' - nonce: 0 - storage: {} - 3500000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x35' - nonce: 0 - storage: {} - 3600000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x36' - nonce: 0 - storage: {} - 3700000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x37' - nonce: 0 - storage: {} - 3800000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x38' - nonce: 0 - storage: {} - 3900000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x39' - nonce: 0 - storage: {} - 3a00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x3a' - nonce: 0 - storage: {} - 3b00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x3b' - nonce: 0 - storage: {} - 3c00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x3c' - nonce: 0 - storage: {} - 3d00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x3d' - nonce: 0 - storage: {} - 3e00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x3e' - nonce: 0 - storage: {} - 3f00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x3f' - nonce: 0 - storage: {} - - 4000000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x40' - nonce: 0 - storage: {} - 4100000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x41' - nonce: 0 - storage: {} - 4200000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x42' - nonce: 0 - storage: {} - 4300000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x43' - nonce: 0 - storage: {} - 4400000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x44' - nonce: 0 - storage: {} - 4500000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x45' - nonce: 0 - storage: {} - 4600000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x46' - nonce: 0 - storage: {} - 4700000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x47' - nonce: 0 - storage: {} - 4800000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x48' - nonce: 0 - storage: {} - 4900000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x49' - nonce: 0 - storage: {} - 4a00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x4a' - nonce: 0 - storage: {} - 4b00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x4b' - nonce: 0 - storage: {} - 4c00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x4c' - nonce: 0 - storage: {} - 4d00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x4d' - nonce: 0 - storage: {} - 4e00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x4e' - nonce: 0 - storage: {} - 4f00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x4f' - nonce: 0 - storage: {} - - 5000000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x50' - nonce: 0 - storage: {} - 5100000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x51' - nonce: 0 - storage: {} - 5200000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x52' - nonce: 0 - storage: {} - 5300000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x53' - nonce: 0 - storage: {} - 5400000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x54' - nonce: 0 - storage: {} - 5500000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x55' - nonce: 0 - storage: {} - 5600000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x56' - nonce: 0 - storage: {} - 5700000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x57' - nonce: 0 - storage: {} - 5800000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x58' - nonce: 0 - storage: {} - 5900000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x59' - nonce: 0 - storage: {} - 5a00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x5a' - nonce: 0 - storage: {} - 5b00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x5b' - nonce: 0 - storage: {} - 5c00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x5c' - nonce: 0 - storage: {} - 5d00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x5d' - nonce: 0 - storage: {} - 5e00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x5e' - nonce: 0 - storage: {} - 5f00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x5f' - nonce: 0 - storage: {} - - 6000000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x60' - nonce: 0 - storage: {} - 6100000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x61' - nonce: 0 - storage: {} - 6200000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x62' - nonce: 0 - storage: {} - 6300000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x63' - nonce: 0 - storage: {} - 6400000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x64' - nonce: 0 - storage: {} - 6500000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x65' - nonce: 0 - storage: {} - 6600000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x66' - nonce: 0 - storage: {} - 6700000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x67' - nonce: 0 - storage: {} - 6800000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x68' - nonce: 0 - storage: {} - 6900000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x69' - nonce: 0 - storage: {} - 6a00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x6a' - nonce: 0 - storage: {} - 6b00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x6b' - nonce: 0 - storage: {} - 6c00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x6c' - nonce: 0 - storage: {} - 6d00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x6d' - nonce: 0 - storage: {} - 6e00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x6e' - nonce: 0 - storage: {} - 6f00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x6f' - nonce: 0 - storage: {} - - 7000000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x70' - nonce: 0 - storage: {} - 7100000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x71' - nonce: 0 - storage: {} - 7200000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x72' - nonce: 0 - storage: {} - 7300000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x73' - nonce: 0 - storage: {} - 7400000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x74' - nonce: 0 - storage: {} - 7500000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x75' - nonce: 0 - storage: {} - 7600000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x76' - nonce: 0 - storage: {} - 7700000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x77' - nonce: 0 - storage: {} - 7800000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x78' - nonce: 0 - storage: {} - 7900000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x79' - nonce: 0 - storage: {} - 7a00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x7a' - nonce: 0 - storage: {} - 7b00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x7b' - nonce: 0 - storage: {} - 7c00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x7c' - nonce: 0 - storage: {} - 7d00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x7d' - nonce: 0 - storage: {} - 7e00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x7e' - nonce: 0 - storage: {} - 7f00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x7f' - nonce: 0 - storage: {} - - 8000000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x80' - nonce: 0 - storage: {} - 8100000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x81' - nonce: 0 - storage: {} - 8200000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x82' - nonce: 0 - storage: {} - 8300000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x83' - nonce: 0 - storage: {} - 8400000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x84' - nonce: 0 - storage: {} - 8500000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x85' - nonce: 0 - storage: {} - 8600000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x86' - nonce: 0 - storage: {} - 8700000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x87' - nonce: 0 - storage: {} - 8800000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x88' - nonce: 0 - storage: {} - 8900000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x89' - nonce: 0 - storage: {} - 8a00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x8a' - nonce: 0 - storage: {} - 8b00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x8b' - nonce: 0 - storage: {} - 8c00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x8c' - nonce: 0 - storage: {} - 8d00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x8d' - nonce: 0 - storage: {} - 8e00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x8e' - nonce: 0 - storage: {} - 8f00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x8f' - nonce: 0 - storage: {} - - 9000000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x90' - nonce: 0 - storage: {} - 9100000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x91' - nonce: 0 - storage: {} - 9200000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x92' - nonce: 0 - storage: {} - 9300000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x93' - nonce: 0 - storage: {} - 9400000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x94' - nonce: 0 - storage: {} - 9500000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x95' - nonce: 0 - storage: {} - 9600000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x96' - nonce: 0 - storage: {} - 9700000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x97' - nonce: 0 - storage: {} - 9800000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x98' - nonce: 0 - storage: {} - 9900000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x99' - nonce: 0 - storage: {} - 9a00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x9a' - nonce: 0 - storage: {} - 9b00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x9b' - nonce: 0 - storage: {} - 9c00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x9c' - nonce: 0 - storage: {} - 9d00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x9d' - nonce: 0 - storage: {} - 9e00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x9e' - nonce: 0 - storage: {} - 9f00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0x9f' - nonce: 0 - storage: {} - - - a000000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xa0' - nonce: 0 - storage: {} - a100000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xa1' - nonce: 0 - storage: {} - a200000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xa2' - nonce: 0 - storage: {} - a300000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xa3' - nonce: 0 - storage: {} - a400000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xa4' - nonce: 0 - storage: {} - a500000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xa5' - nonce: 0 - storage: {} - a600000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xa6' - nonce: 0 - storage: {} - a700000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xa7' - nonce: 0 - storage: {} - a800000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xa8' - nonce: 0 - storage: {} - a900000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xa9' - nonce: 0 - storage: {} - aa00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xaa' - nonce: 0 - storage: {} - ab00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xab' - nonce: 0 - storage: {} - ac00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xac' - nonce: 0 - storage: {} - ad00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xad' - nonce: 0 - storage: {} - ae00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xae' - nonce: 0 - storage: {} - af00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xaf' - nonce: 0 - storage: {} - - b000000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xb0' - nonce: 0 - storage: {} - b100000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xb1' - nonce: 0 - storage: {} - b200000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xb2' - nonce: 0 - storage: {} - b300000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xb3' - nonce: 0 - storage: {} - b400000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xb4' - nonce: 0 - storage: {} - b500000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xb5' - nonce: 0 - storage: {} - b600000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xb6' - nonce: 0 - storage: {} - b700000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xb7' - nonce: 0 - storage: {} - b800000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xb8' - nonce: 0 - storage: {} - b900000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xb9' - nonce: 0 - storage: {} - ba00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xba' - nonce: 0 - storage: {} - bb00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xbb' - nonce: 0 - storage: {} - bc00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xbc' - nonce: 0 - storage: {} - bd00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xbd' - nonce: 0 - storage: {} - be00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xbe' - nonce: 0 - storage: {} - bf00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xbf' - nonce: 0 - storage: {} - - c000000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xc0' - nonce: 0 - storage: {} - c100000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xc1' - nonce: 0 - storage: {} - c200000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xc2' - nonce: 0 - storage: {} - c300000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xc3' - nonce: 0 - storage: {} - c400000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xc4' - nonce: 0 - storage: {} - c500000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xc5' - nonce: 0 - storage: {} - c600000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xc6' - nonce: 0 - storage: {} - c700000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xc7' - nonce: 0 - storage: {} - c800000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xc8' - nonce: 0 - storage: {} - c900000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xc9' - nonce: 0 - storage: {} - ca00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xca' - nonce: 0 - storage: {} - cb00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xcb' - nonce: 0 - storage: {} - cc00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xcc' - nonce: 0 - storage: {} - cd00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xcd' - nonce: 0 - storage: {} - ce00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xce' - nonce: 0 - storage: {} - cf00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xcf' - nonce: 0 - storage: {} - - d000000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xd0' - nonce: 0 - storage: {} - d100000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xd1' - nonce: 0 - storage: {} - d200000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xd2' - nonce: 0 - storage: {} - d300000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xd3' - nonce: 0 - storage: {} - d400000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xd4' - nonce: 0 - storage: {} - d500000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xd5' - nonce: 0 - storage: {} - d600000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xd6' - nonce: 0 - storage: {} - d700000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xd7' - nonce: 0 - storage: {} - d800000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xd8' - nonce: 0 - storage: {} - d900000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xd9' - nonce: 0 - storage: {} - da00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xda' - nonce: 0 - storage: {} - db00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xdb' - nonce: 0 - storage: {} - dc00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xdc' - nonce: 0 - storage: {} - dd00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xdd' - nonce: 0 - storage: {} - de00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xde' - nonce: 0 - storage: {} - df00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xdf' - nonce: 0 - storage: {} - - e000000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xe0' - nonce: 0 - storage: {} - e100000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xe1' - nonce: 0 - storage: {} - e200000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xe2' - nonce: 0 - storage: {} - e300000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xe3' - nonce: 0 - storage: {} - e400000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xe4' - nonce: 0 - storage: {} - e500000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xe5' - nonce: 0 - storage: {} - e600000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xe6' - nonce: 0 - storage: {} - e700000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xe7' - nonce: 0 - storage: {} - e800000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xe8' - nonce: 0 - storage: {} - e900000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xe9' - nonce: 0 - storage: {} - ea00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xea' - nonce: 0 - storage: {} - eb00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xeb' - nonce: 0 - storage: {} - ec00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xec' - nonce: 0 - storage: {} - ed00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xed' - nonce: 0 - storage: {} - ee00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xee' - nonce: 0 - storage: {} - ef00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xef' - nonce: 0 - storage: {} - - f000000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xf0' - nonce: 0 - storage: {} - f100000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xf1' - nonce: 0 - storage: {} - f200000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xf2' - nonce: 0 - storage: {} - f300000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xf3' - nonce: 0 - storage: {} - f400000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xf4' - nonce: 0 - storage: {} - f500000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xf5' - nonce: 0 - storage: {} - f600000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xf6' - nonce: 0 - storage: {} - f700000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xf7' - nonce: 0 - storage: {} - f800000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xf8' - nonce: 0 - storage: {} - f900000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xf9' - nonce: 0 - storage: {} - fa00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xfa' - nonce: 0 - storage: {} - fb00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xfb' - nonce: 0 - storage: {} - fc00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xfc' - nonce: 0 - storage: {} - fd00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xfd' - nonce: 0 - storage: {} - fe00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xfe' - nonce: 0 - storage: {} - ff00000000000000000000000000000000000000: - balance: 0 - code: ':raw 0xff' - nonce: 0 - storage: {} - - - transaction: - data: - - '' - gasLimit: - - 16777216 - gasPrice: 10 - nonce: 0 - secretKey: 45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8 - to: 'b94f5374fce5edbc8e2a8697c15331677e6ebf0b' - value: - - 0 - - expect: - - - network: - - '>=Cancun' - result: - a94f5374fce5edbc8e2a8697c15331677e6ebf0b: - nonce: 1 - b94f5374fce5edbc8e2a8697c15331677e6ebf0b: - storage: - '0x00': 1 # STOP - '0x30': 1 # ADDRESS - '0x32': 1 # ORIGIN - '0x33': 1 # CALLER - '0x34': 1 # CALLVALUE - '0x36': 1 # CALLDATASIZE - '0x38': 1 # CODESIZE - '0x3a': 1 # GASPRICE - '0x3d': 1 # RETURNDATASIZE - '0x41': 1 # COINBASE - '0x42': 1 # TIMESTAMP - '0x43': 1 # NUMBER - '0x44': 1 # DIFFICULTY - '0x45': 1 # GASLIMIT - '0x46': 1 # CHAINID - '0x47': 1 # SELFBALANCE - '0x48': 1 # BASEFEE - '0x4a': 1 # BEACON_ROOT - '0x58': 1 # PC - '0x59': 1 # MSIZE - '0x5A': 1 # GAS - '0x5B': 1 # JUMPDEST - - '0x5F': 1 # PUSH0 - '0x60': 1 # PUSHx ... - '0x61': 1 - '0x62': 1 - '0x63': 1 - '0x64': 1 - '0x65': 1 - '0x66': 1 - '0x67': 1 - '0x68': 1 - '0x69': 1 - '0x6a': 1 - '0x6b': 1 - '0x6c': 1 - '0x6d': 1 - '0x6e': 1 - '0x6f': 1 - '0x70': 1 - '0x71': 1 - '0x72': 1 - '0x73': 1 - '0x74': 1 - '0x75': 1 - '0x76': 1 - '0x77': 1 - '0x78': 1 - '0x79': 1 - '0x7a': 1 - '0x7b': 1 - '0x7c': 1 - '0x7d': 1 - '0x7e': 1 - '0x7f': 1 - - '0x100': 1 From fd9789f9469563d8023fde6aa646fdc51cc9e540 Mon Sep 17 00:00:00 2001 From: Mario Vega Date: Fri, 23 Jan 2026 18:13:53 +0100 Subject: [PATCH 098/154] feat(ci): Run static checks on all generated branches (#2071) --- .github/actions/merge-eip-branches/action.yaml | 3 +++ .github/actions/rebase-eip-branch/action.yaml | 3 +++ .github/workflows/eip-rebase.yaml | 3 +++ .github/workflows/update-devnet-branch.yaml | 3 +++ 4 files changed, 12 insertions(+) diff --git a/.github/actions/merge-eip-branches/action.yaml b/.github/actions/merge-eip-branches/action.yaml index 1a0b9afb64..3de3ecb5f2 100644 --- a/.github/actions/merge-eip-branches/action.yaml +++ b/.github/actions/merge-eip-branches/action.yaml @@ -113,6 +113,9 @@ runs: fi done + echo "Running static checks on merged branch" + uvx --with=tox-uv tox -e static + echo "All EIP branches merged successfully" - name: Push devnet branch diff --git a/.github/actions/rebase-eip-branch/action.yaml b/.github/actions/rebase-eip-branch/action.yaml index 37bbec6946..82eeddbe7f 100644 --- a/.github/actions/rebase-eip-branch/action.yaml +++ b/.github/actions/rebase-eip-branch/action.yaml @@ -57,6 +57,9 @@ runs: exit 1 fi + echo "Running static checks on rebased branch" + uvx --with=tox-uv tox -e static + echo "Rebase successful" - name: Push rebased branch diff --git a/.github/workflows/eip-rebase.yaml b/.github/workflows/eip-rebase.yaml index 01c486a25c..fbfdd1ad47 100644 --- a/.github/workflows/eip-rebase.yaml +++ b/.github/workflows/eip-rebase.yaml @@ -29,6 +29,9 @@ jobs: fetch-depth: 0 token: ${{ secrets.GITHUB_TOKEN }} + - name: Setup uv + uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5.4.1 + - name: Rebase EIP branch onto fork uses: ./.github/actions/rebase-eip-branch with: diff --git a/.github/workflows/update-devnet-branch.yaml b/.github/workflows/update-devnet-branch.yaml index 73f2229252..1655e5e00f 100644 --- a/.github/workflows/update-devnet-branch.yaml +++ b/.github/workflows/update-devnet-branch.yaml @@ -36,6 +36,9 @@ jobs: fetch-depth: 0 token: ${{ secrets.GITHUB_TOKEN }} + - name: Setup uv + uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5.4.1 + - name: Merge EIP branches into devnet uses: ./.github/actions/merge-eip-branches with: From c3813a57a01cd28d3ca1401f833cc5b1bc82b0dd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=94=A1=E4=BD=B3=E8=AA=A0=20Louis=20Tsai?= <72684086+LouisTsai-Csie@users.noreply.github.com> Date: Sat, 24 Jan 2026 07:31:10 +0800 Subject: [PATCH 099/154] feat(test-benchmark): Introduce create2 helper for address computation (#1996) --- .../testing/src/execution_testing/__init__.py | 2 + .../src/execution_testing/tools/__init__.py | 2 + .../tools/tools_code/__init__.py | 2 + .../tools/tools_code/generators.py | 77 ++++++++++++++++++ .../compute/instruction/test_system.py | 18 +++-- .../scenario/test_unchunkified_bytecode.py | 21 +++-- .../test_extcodesize_bytecode_sizes.py | 34 +++++--- .../stateful/bloatnet/test_multi_opcode.py | 80 ++++++++----------- 8 files changed, 160 insertions(+), 76 deletions(-) diff --git a/packages/testing/src/execution_testing/__init__.py b/packages/testing/src/execution_testing/__init__.py index b10c00e353..05f4914c83 100644 --- a/packages/testing/src/execution_testing/__init__.py +++ b/packages/testing/src/execution_testing/__init__.py @@ -87,6 +87,7 @@ Case, CodeGasMeasure, Conditional, + Create2PreimageLayout, DeploymentTestType, Initcode, ParameterSet, @@ -199,6 +200,7 @@ "compute_create_address", "compute_create2_address", "compute_deterministic_create2_address", + "Create2PreimageLayout", "extend_with_defaults", "gas_test", "generate_system_contract_deploy_test", diff --git a/packages/testing/src/execution_testing/tools/__init__.py b/packages/testing/src/execution_testing/tools/__init__.py index d47d964305..ce9268fcb7 100644 --- a/packages/testing/src/execution_testing/tools/__init__.py +++ b/packages/testing/src/execution_testing/tools/__init__.py @@ -8,6 +8,7 @@ Case, CodeGasMeasure, Conditional, + Create2PreimageLayout, Initcode, Switch, While, @@ -31,6 +32,7 @@ "ParameterSet", "Switch", "While", + "Create2PreimageLayout", "extend_with_defaults", "gas_test", "generate_system_contract_deploy_test", diff --git a/packages/testing/src/execution_testing/tools/tools_code/__init__.py b/packages/testing/src/execution_testing/tools/tools_code/__init__.py index 1ef17fd240..44092c59bd 100644 --- a/packages/testing/src/execution_testing/tools/tools_code/__init__.py +++ b/packages/testing/src/execution_testing/tools/tools_code/__init__.py @@ -5,6 +5,7 @@ Case, CodeGasMeasure, Conditional, + Create2PreimageLayout, Initcode, Switch, While, @@ -22,4 +23,5 @@ "While", "Yul", "YulCompiler", + "Create2PreimageLayout", ) diff --git a/packages/testing/src/execution_testing/tools/tools_code/generators.py b/packages/testing/src/execution_testing/tools/tools_code/generators.py index 6d7985bfd9..cf7e2c4d58 100644 --- a/packages/testing/src/execution_testing/tools/tools_code/generators.py +++ b/packages/testing/src/execution_testing/tools/tools_code/generators.py @@ -393,3 +393,80 @@ def __new__( instance.default_action = default_action instance.cases = cases return instance + + +class Create2PreimageLayout(Bytecode): + """ + Set up the preimage in memory for CREATE2 address computation. + + Creates the standard memory layout required to compute a CREATE2 address + using keccak256(0xFF ++ factory_address ++ salt ++ init_code_hash). + + Memory layout after execution: + - MEM[offset + 0: offset + 32] = zero padding + factory_address (20 bytes) + - MEM[offset + 11] = 0xFF prefix byte + - MEM[offset + 32: offset + 64] = salt (32 bytes) + - MEM[offset + 64: offset + 96] = init_code_hash (32 bytes) + + To compute the CREATE2 address, use: `.address_op` or + `Op.SHA3(offset + 11, 85)`. + The resulting hash's lower 20 bytes (bytes 12-31) form the address. + """ + + offset: int = 0 + + def __new__( + cls, + *, + factory_address: int | bytes | Bytecode, + salt: int | bytes | Bytecode, + init_code_hash: int | bytes | Bytecode, + offset: int = 0, + old_memory_size: int = 0, + ) -> Self: + """ + Assemble the bytecode that sets up the memory layout for CREATE2 + address computation. + """ + required_size = offset + 96 + new_memory_size = max(old_memory_size, required_size) + bytecode = ( + Op.MSTORE(offset=offset, value=factory_address) + + Op.MSTORE8(offset=offset + 11, value=0xFF) + + Op.MSTORE(offset=offset + 32, value=salt) + + Op.MSTORE( + offset=offset + 64, + value=init_code_hash, + # Gas accounting + old_memory_size=old_memory_size, + new_memory_size=new_memory_size, + ) + ) + instance = super().__new__(cls, bytecode) + instance.offset = offset + return instance + + @property + def salt_offset(self) -> int: + """ + Return the salt memory offset of the preimage. + """ + return self.offset + 32 + + def address_op(self) -> Bytecode: + """ + Return the bytecode that computes the CREATE2 address. + """ + return Op.SHA3( + offset=self.offset + 11, + size=85, + # Gas accounting + data_size=85, + ) + + def increment_salt_op(self, increment: int = 1) -> Bytecode: + """Return the bytecode that increments the current salt.""" + return Op.MSTORE( + self.salt_offset, + Op.ADD(Op.MLOAD(self.salt_offset), increment), + ) diff --git a/tests/benchmark/compute/instruction/test_system.py b/tests/benchmark/compute/instruction/test_system.py index 6433f0eaa1..2c3667fdb5 100644 --- a/tests/benchmark/compute/instruction/test_system.py +++ b/tests/benchmark/compute/instruction/test_system.py @@ -22,6 +22,7 @@ BenchmarkTestFiller, Block, Bytecode, + Create2PreimageLayout, Environment, ExtCallGenerator, Fork, @@ -387,16 +388,17 @@ def test_selfdestruct_existing( ) code = ( - # Setup memory for later CREATE2 address generation loop. - # 0xFF+[Address(20bytes)]+[seed(32bytes)]+[initcode keccak(32bytes)] - Op.MSTORE(0, factory_address) - + Op.MSTORE8(32 - 20 - 1, 0xFF) - + Op.MSTORE(32, Op.CALLDATALOAD(0)) # Starting address from calldata - + Op.MSTORE(64, initcode.keccak256()) + ( + create2_preimage := Create2PreimageLayout( + factory_address=factory_address, + salt=Op.CALLDATALOAD(0), + init_code_hash=initcode.keccak256(), + ) + ) # Main loop + While( - body=Op.POP(Op.CALL(address=Op.SHA3(32 - 20 - 1, 85))) - + Op.MSTORE(32, Op.ADD(Op.MLOAD(32), 1)), + body=Op.POP(Op.CALL(address=create2_preimage.address_op())) + + create2_preimage.increment_salt_op(), # Loop while we have enough gas AND within target count condition=Op.GT(Op.GAS, final_storage_gas + loop_cost), ) diff --git a/tests/benchmark/compute/scenario/test_unchunkified_bytecode.py b/tests/benchmark/compute/scenario/test_unchunkified_bytecode.py index 5e60d85a2a..a0e9c9c413 100644 --- a/tests/benchmark/compute/scenario/test_unchunkified_bytecode.py +++ b/tests/benchmark/compute/scenario/test_unchunkified_bytecode.py @@ -13,6 +13,7 @@ Block, BlockchainTestFiller, Bytecode, + Create2PreimageLayout, Fork, Hash, Op, @@ -109,27 +110,25 @@ def test_unchunkified_bytecode( ) post[deployed_contract_address] = Account(nonce=1) + create2_preimage = Create2PreimageLayout( + factory_address=factory_address, + salt=Op.CALLDATALOAD(0), + init_code_hash=initcode.keccak256(), + ) attack_call = Bytecode() if opcode == Op.EXTCODECOPY: attack_call = Op.EXTCODECOPY( - address=Op.SHA3(32 - 20 - 1, 85), dest_offset=96, size=1000 + address=create2_preimage.address_op(), dest_offset=96, size=1000 ) else: # For the rest of the opcodes, we can use the same generic attack call # since all only minimally need the `address` of the target. - attack_call = Op.POP(opcode(address=Op.SHA3(32 - 20 - 1, 85))) + attack_call = Op.POP(opcode(address=create2_preimage.address_op())) attack_code = ( - # Setup memory for later CREATE2 address generation loop. - # 0xFF+[Address(20bytes)]+[seed(32bytes)]+[initcode keccak(32bytes)] - Op.MSTORE(0, factory_address) - + Op.MSTORE8(32 - 20 - 1, 0xFF) - + Op.MSTORE( - 32, Op.CALLDATALOAD(0) - ) # Calldata is the starting value of the CREATE2 salt - + Op.MSTORE(64, initcode.keccak256()) + create2_preimage # Main loop + While( - body=attack_call + Op.MSTORE(32, Op.ADD(Op.MLOAD(32), 1)), + body=attack_call + create2_preimage.increment_salt_op(), ) ) diff --git a/tests/benchmark/stateful/bloatnet/test_extcodesize_bytecode_sizes.py b/tests/benchmark/stateful/bloatnet/test_extcodesize_bytecode_sizes.py index 42da6314c2..83812420f7 100644 --- a/tests/benchmark/stateful/bloatnet/test_extcodesize_bytecode_sizes.py +++ b/tests/benchmark/stateful/bloatnet/test_extcodesize_bytecode_sizes.py @@ -71,6 +71,7 @@ BlockchainTestFiller, Bytecode, Conditional, + Create2PreimageLayout, Op, Storage, Transaction, @@ -114,7 +115,9 @@ def build_attack_contract(factory_address: Address) -> Bytecode: - MEM[64-95] = init_code_hash (32 bytes) """ gas_reserve = 50_000 # Reserve for 2x SSTORE + cleanup - + num_deployed_offset = 96 + init_code_hash_offset = num_deployed_offset + 32 + return_size = 64 return ( # Call factory.getConfig() -> (num_deployed, init_code_hash) Conditional( @@ -123,26 +126,35 @@ def build_attack_contract(factory_address: Address) -> Bytecode: address=factory_address, args_offset=0, args_size=0, - ret_offset=96, # MEM[96]=num_deployed, MEM[128]=init_code_hash - ret_size=64, + # MEM[num_deployed_offset]=num_deployed + # MEM[num_deployed_offset + 32]=init_code_hash + ret_offset=num_deployed_offset, + ret_size=return_size, ), if_false=Op.REVERT(0, 0), ) - # Setup CREATE2 memory: keccak256(0xFF ++ factory ++ salt ++ hash) - + Op.MSTORE(0, factory_address) - + Op.MSTORE8(11, 0xFF) - + Op.MSTORE(32, Op.SLOAD(0)) # Load salt directly to memory - + Op.MSTORE(64, Op.MLOAD(128)) # init_code_hash + + ( + create2_preimage := Create2PreimageLayout( + factory_address=factory_address, + salt=Op.SLOAD(0), + init_code_hash=Op.MLOAD(init_code_hash_offset), + old_memory_size=num_deployed_offset + return_size, + ) + ) + Op.MSTORE(160, 0) # Initialize last_size + While( body=( - Op.MSTORE(160, Op.EXTCODESIZE(Op.SHA3(11, 85))) - + Op.MSTORE(32, Op.ADD(Op.MLOAD(32), 1)) + Op.MSTORE(160, Op.EXTCODESIZE(create2_preimage.address_op())) + + create2_preimage.increment_salt_op() ), condition=( Op.AND( Op.GT(Op.GAS, gas_reserve), - Op.GT(Op.MLOAD(96), Op.MLOAD(32)), # num_deployed > salt + # num_deployed > salt + Op.GT( + Op.MLOAD(num_deployed_offset), + Op.MLOAD(create2_preimage.salt_offset), + ), ) ), ) diff --git a/tests/benchmark/stateful/bloatnet/test_multi_opcode.py b/tests/benchmark/stateful/bloatnet/test_multi_opcode.py index a19be48830..691d39b46c 100755 --- a/tests/benchmark/stateful/bloatnet/test_multi_opcode.py +++ b/tests/benchmark/stateful/bloatnet/test_multi_opcode.py @@ -13,6 +13,7 @@ Block, BlockchainTestFiller, Bytecode, + Create2PreimageLayout, Fork, Op, Transaction, @@ -175,31 +176,25 @@ def test_bloatnet_balance_extcodesize( # Load results from memory # Memory[96:128] = num_deployed_contracts # Memory[128:160] = init_code_hash - + Op.MLOAD(96) # Load num_deployed_contracts - + Op.MLOAD(128) # Load init_code_hash - # Setup memory for CREATE2 address generation - # Memory layout at 0: 0xFF + factory_addr(20) + salt(32) + hash(32) - + Op.MSTORE( - 0, factory_address - ) # Store factory address at memory position 0 - + Op.MSTORE8(11, 0xFF) # Store 0xFF prefix at position (32 - 20 - 1) - + Op.MSTORE(32, 0) # Store salt at position 32 - # Stack now has: [num_contracts, init_code_hash] - + Op.PUSH1(64) # Push memory position - + Op.MSTORE # Store init_code_hash at memory[64] - # Stack now has: [num_contracts] + + Op.MLOAD(96) # Load num_deployed_contracts to stack + + ( + create2_preimage := Create2PreimageLayout( + factory_address=factory_address, + salt=0, + init_code_hash=Op.MLOAD(128), + ) + ) # Main attack loop - iterate through all deployed contracts + While( body=( # Generate CREATE2 addr: keccak256(0xFF+factory+salt+hash) - Op.SHA3(11, 85) # Generate CREATE2 address from memory[11:96] + # Hash CREATE2 address from memory + create2_preimage.address_op() # The address is now on the stack + Op.DUP1 # Duplicate for second operation + benchmark_ops # Execute operations in specified order # Increment salt for next iteration - + Op.MSTORE( - 32, Op.ADD(Op.MLOAD(32), 1) - ) # Increment and store salt + + create2_preimage.increment_salt_op() ), # Continue while we haven't reached the limit condition=Op.DUP1 @@ -372,31 +367,24 @@ def test_bloatnet_balance_extcodecopy( # Load results from memory # Memory[96:128] = num_deployed_contracts # Memory[128:160] = init_code_hash - + Op.MLOAD(96) # Load num_deployed_contracts - + Op.MLOAD(128) # Load init_code_hash - # Setup memory for CREATE2 address generation - # Memory layout at 0: 0xFF + factory_addr(20) + salt(32) + hash(32) - + Op.MSTORE( - 0, factory_address - ) # Store factory address at memory position 0 - + Op.MSTORE8(11, 0xFF) # Store 0xFF prefix at position (32 - 20 - 1) - + Op.MSTORE(32, 0) # Store salt at position 32 - # Stack now has: [num_contracts, init_code_hash] - + Op.PUSH1(64) # Push memory position - + Op.MSTORE # Store init_code_hash at memory[64] - # Stack now has: [num_contracts] + + Op.MLOAD(96) # Load num_deployed_contracts to stack + + ( + create2_preimage := Create2PreimageLayout( + factory_address=factory_address, + salt=0, + init_code_hash=Op.MLOAD(128), + ) + ) # Main attack loop - iterate through all deployed contracts + While( body=( - # Generate CREATE2 address - Op.SHA3(11, 85) # Generate CREATE2 address from memory[11:96] + # Hash CREATE2 address + create2_preimage.address_op() # The address is now on the stack + Op.DUP1 # Duplicate for later operations + benchmark_ops # Execute operations in specified order # Increment salt for next iteration - + Op.MSTORE( - 32, Op.ADD(Op.MLOAD(32), 1) - ) # Increment and store salt + + create2_preimage.increment_salt_op() ), # Continue while counter > 0 condition=Op.DUP1 @@ -554,23 +542,23 @@ def test_bloatnet_balance_extcodehash( + Op.PUSH2(0x1000) # Jump to error handler if failed + Op.JUMPI # Load results from memory - + Op.MLOAD(96) # Load num_deployed_contracts - + Op.MLOAD(128) # Load init_code_hash - # Setup memory for CREATE2 address generation - + Op.MSTORE(0, factory_address) - + Op.MSTORE8(11, 0xFF) - + Op.MSTORE(32, 0) # Initial salt - + Op.PUSH1(64) - + Op.MSTORE # Store init_code_hash + + Op.MLOAD(96) # Load num_deployed_contracts to stack + + ( + create2_preimage := Create2PreimageLayout( + factory_address=factory_address, + salt=0, + init_code_hash=Op.MLOAD(128), + ) + ) # Main attack loop + While( body=( - # Generate CREATE2 address - Op.SHA3(11, 85) + # Hash CREATE2 address + create2_preimage.address_op() + Op.DUP1 # Duplicate for second operation + benchmark_ops # Execute operations in specified order # Increment salt - + Op.MSTORE(32, Op.ADD(Op.MLOAD(32), 1)) + + create2_preimage.increment_salt_op() ), condition=Op.DUP1 + Op.PUSH1(1) From 58b4bb0674d50b31a805ea078afdc2f465cfcd59 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Bylica?= Date: Mon, 26 Jan 2026 15:04:03 +0100 Subject: [PATCH 100/154] refactor(test-tests): Port create collision tests (#2031) --- .../test_revert_in_create.py | 168 ++++++++++++++++++ ...evertInCreateInInitCreate2ParisFiller.json | 60 ------- .../create2collisionStorageParisFiller.json | 149 ---------------- 3 files changed, 168 insertions(+), 209 deletions(-) create mode 100644 tests/paris/eip7610_create_collision/test_revert_in_create.py delete mode 100644 tests/static/state_tests/stCreate2/RevertInCreateInInitCreate2ParisFiller.json delete mode 100644 tests/static/state_tests/stCreate2/create2collisionStorageParisFiller.json diff --git a/tests/paris/eip7610_create_collision/test_revert_in_create.py b/tests/paris/eip7610_create_collision/test_revert_in_create.py new file mode 100644 index 0000000000..f8834ea81e --- /dev/null +++ b/tests/paris/eip7610_create_collision/test_revert_in_create.py @@ -0,0 +1,168 @@ +""" +Test CREATE/CREATE2 collision scenarios with pre-existing storage per EIP-7610. +""" + +import pytest +from execution_testing import ( + Account, + Alloc, + Bytecode, + Initcode, + Op, + StateTestFiller, + Transaction, + compute_create2_address, +) + +REFERENCE_SPEC_GIT_PATH = "EIPS/eip-7610.md" +REFERENCE_SPEC_VERSION = "80ef48d0bbb5a4939ade51caaaac57b5df6acd4e" + +pytestmark = [ + pytest.mark.valid_from("Paris"), + # We need to modify the pre-alloc to include the collision + pytest.mark.pre_alloc_modify, +] + + +@pytest.mark.ported_from( + [ + "https://github.com/ethereum/tests/tree/v13.3/src/GeneralStateTestsFiller/stCreate2/RevertInCreateInInitCreate2ParisFiller.json", # noqa: E501 + ], + pr=["https://github.com/ethereum/execution-specs/pull/2031"], +) +def test_collision_with_create2_revert_in_initcode( + state_test: StateTestFiller, + pre: Alloc, +) -> None: + """ + Test that a CREATE transaction collision with pre-existing storage causes + the transaction to fail, even when the initcode would perform CREATE2 with + reverting inner initcode. + + The initcode (if it were to run) would: + 1. Execute CREATE2 with inner initcode that reverts with 32 bytes of data + 2. Store RETURNDATASIZE to storage slot 0 + 3. Copy returndata to memory and store to slot 1 + + Since there's a collision (pre-existing storage), the CREATE TX should fail + and the pre-existing account should remain unchanged. + """ + inner_initcode = Op.MSTORE(0, 0x112233) + Op.REVERT(0, 32) + + initcode = ( + Op.MSTORE(0, Op.PUSH32(bytes(inner_initcode).ljust(32, b"\0"))) + + Op.CREATE2(value=0, offset=0, size=len(inner_initcode), salt=0) + + Op.SSTORE(0, Op.RETURNDATASIZE) + + Op.RETURNDATACOPY(0, 0, 32) + + Op.SSTORE(1, Op.MLOAD(0)) + + Op.STOP + ) + + sender = pre.fund_eoa() + tx = Transaction( + sender=sender, + to=None, + data=initcode, + gas_limit=10_000_000, + ) + + # Pre-existing account with storage - this causes collision per EIP-7610. + pre[tx.created_contract] = Account( + balance=10, + storage={0x00: 0x01}, + ) + + state_test( + pre=pre, + post={ + (tx.created_contract): Account( + balance=10, + nonce=0, + storage={0x00: 0x01}, + ), + }, + tx=tx, + ) + + +@pytest.mark.ported_from( + [ + "https://github.com/ethereum/tests/tree/v13.3/src/GeneralStateTestsFiller/stCreate2/create2collisionStorageParisFiller.json", # noqa: E501 + ], + pr=["https://github.com/ethereum/execution-specs/pull/2031"], +) +@pytest.mark.parametrize( + "create2_initcode", + [ + pytest.param(b"", id="empty-initcode"), + pytest.param(Op.SSTORE(1, 1), id="sstore-initcode"), + pytest.param( + Initcode(deploy_code=Op.SSTORE(1, 1)), + id="initcode-with-deploy", + ), + ], +) +def test_create2_collision_storage( + state_test: StateTestFiller, + pre: Alloc, + create2_initcode: Bytecode, +) -> None: + """ + Test that CREATE2 fails when targeting an address with pre-existing + storage. + + A CREATE transaction deploys a contract that executes CREATE2. The CREATE2 + target address has pre-existing storage, which should cause the CREATE2 to + fail per EIP-7610. The deployer stores the CREATE2 result to slot 0 (0 on + failure). + """ + deployer_code = ( + Op.MSTORE(0, Op.PUSH32(bytes(create2_initcode).ljust(32, b"\0"))) + + Op.SSTORE( + 0, + Op.CREATE2(value=0, offset=0, size=len(create2_initcode), salt=0), + ) + + Op.STOP + ) + + sender = pre.fund_eoa() + tx = Transaction( + sender=sender, + to=None, + data=deployer_code, + value=1, + gas_limit=400_000, + ) + + deployer_address = tx.created_contract + + create2_address = compute_create2_address( + address=deployer_address, + salt=0, + initcode=create2_initcode, + ) + + pre[create2_address] = Account( + balance=10, + storage={0x00: 0x01}, + ) + + state_test( + pre=pre, + post={ + # CREATE2 target unchanged due to collision + create2_address: Account( + balance=10, + nonce=0, + storage={0x00: 0x01}, + ), + # Deployer: nonce=2 (1 for creation + 1 for failed CREATE2 attempt) + # storage[0]=0 indicates CREATE2 returned 0 (failure) + deployer_address: Account( + balance=1, + nonce=2, + storage={0x00: 0x00}, + ), + }, + tx=tx, + ) diff --git a/tests/static/state_tests/stCreate2/RevertInCreateInInitCreate2ParisFiller.json b/tests/static/state_tests/stCreate2/RevertInCreateInInitCreate2ParisFiller.json deleted file mode 100644 index b83a17e9de..0000000000 --- a/tests/static/state_tests/stCreate2/RevertInCreateInInitCreate2ParisFiller.json +++ /dev/null @@ -1,60 +0,0 @@ -{ - "RevertInCreateInInitCreate2Paris" : { - "_info" : { - "comment" : "RevertInCreateInInit for CREATE2" - }, - "env" : { - "currentCoinbase" : "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba", - "currentDifficulty" : "0x20000", - "currentGasLimit" : "0x0b00000000", - "currentNumber" : "0x01", - "currentTimestamp" : "0x03e8" - }, - "expect" : [ - { - "network" : [">=Cancun=Cancun"], - "result" : { - "e2b35478fdd26477cc576dd906e6277761246a3c" : { - "balance" : "10", - "nonce" : "0", - "storage" : { - "0x00" : "0x01" - } - }, - "6295ee1b4f6dd65047762f924ecd367c17eabf8f" : { - "balance" : "1", - "nonce" : "2", - "storage" : { - "0x00" : "0x00" - } - }, - "a94f5374fce5edbc8e2a8697c15331677e6ebf0b" : { - "nonce" : "1" - } - } - }, - { - "indexes" : { - "data" : 1, - "gas" : -1, - "value" : -1 - }, - "network" : [">=Cancun"], - "result" : { - "af3ecba2fe09a4f6c19f16a9d119e44e08c2da01" : { - "balance" : "10", - "nonce" : "0", - "code" : "0x", - "storage" : { - "0x00" : "0x01" - } - }, - "6295ee1b4f6dd65047762f924ecd367c17eabf8f" : { - "balance" : "1", - "nonce" : "2", - "storage" : { - "0x00" : "0x00" - } - }, - "a94f5374fce5edbc8e2a8697c15331677e6ebf0b" : { - "nonce" : "1" - } - } - }, - { - "indexes" : { - "data" : 2, - "gas" : -1, - "value" : -1 - }, - "network" : [">=Cancun"], - "result" : { - "ec2c6832d00680ece8ff9254f81fdab0a5a2ac50" : { - "balance" : "10", - "nonce" : "0", - "code" : "0x", - "storage" : { - "0x00" : "0x01" - } - }, - "6295ee1b4f6dd65047762f924ecd367c17eabf8f" : { - "balance" : "1", - "nonce" : "2", - "storage" : { - "0x00" : "0x00" - } - }, - "a94f5374fce5edbc8e2a8697c15331677e6ebf0b" : { - "nonce" : "1" - } - } - } - ], - "pre" : { - "a94f5374fce5edbc8e2a8697c15331677e6ebf0b" : { - "balance" : "1000000000000000000", - "code" : "", - "nonce" : "0", - "storage" : { - } - }, - "e2b35478fdd26477cc576dd906e6277761246a3c" : { - "balance" : "10", - "code" : "0x", - "nonce" : "0", - "storage" : { - "0x00" : "0x01" - } - }, - "af3ecba2fe09a4f6c19f16a9d119e44e08c2da01" : { - "balance" : "10", - "code" : "0x", - "nonce" : "0", - "storage" : { - "0x00" : "0x01" - } - }, - "ec2c6832d00680ece8ff9254f81fdab0a5a2ac50" : { - "balance" : "10", - "code" : "0x", - "nonce" : "0", - "storage" : { - "0x00" : "0x01" - } - } - }, - "transaction" : { - "data" : [ - "{ (CREATE2 0 0 0 0) }", - "{ (MSTORE 0 0x6001600155) (CREATE2 0 27 5 0) }", - "{ (MSTORE 0 0x6460016001556000526005601bf3) (CREATE2 0 18 14 0) }" - ], - "gasLimit" : [ - "400000" - ], - "gasPrice" : "10", - "nonce" : "0", - "secretKey" : "45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8", - "to" : "", - "value" : [ - "1" - ] - } - } -} From c6c380acffa586cfd683384fb2e09348e9a97c03 Mon Sep 17 00:00:00 2001 From: Guruprasad Kamath <48196632+gurukamath@users.noreply.github.com> Date: Mon, 26 Jan 2026 22:22:23 +0100 Subject: [PATCH 101/154] feat(tests): add BAL tests that dequeue consolidations (#2076) --- docs/CHANGELOG.md | 1 + .../test_block_access_lists_eip7251.py | 185 ++++++++++++++++++ .../test_cases.md | 2 +- 3 files changed, 187 insertions(+), 1 deletion(-) create mode 100644 tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip7251.py diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md index 3f1e21902e..0f2bab30b2 100644 --- a/docs/CHANGELOG.md +++ b/docs/CHANGELOG.md @@ -39,6 +39,7 @@ Test fixtures for use by clients are available for each release on the [Github r - ✨ New EIP-7702 test cases added ([#1974](https://github.com/ethereum/execution-specs/pull/1974)). - ✨ Add missing benchmark configurations / opcode to benchmark tests for repricing analysis([#2006](https://github.com/ethereum/execution-specs/pull/2006)). - ✨ Port STATICCALL to CALL tests with zero and non-zero value transfer from `tests/static`, extending coverage with `pytest.mark.with_all_precompiles` ([#1960](https://github.com/ethereum/execution-specs/pull/1960)). +- ✨ Add BAL tests that dequeue EIP-7251 consolidation requests. ([#2076](https://github.com/ethereum/execution-specs/pull/2076)). ## [v5.4.0](https://github.com/ethereum/execution-spec-tests/releases/tag/v5.4.0) - 2025-12-07 diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip7251.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip7251.py new file mode 100644 index 0000000000..b2cec1d28a --- /dev/null +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip7251.py @@ -0,0 +1,185 @@ +"""Tests for the effects of EIP-7251 beacon roots on EIP-7928.""" + +from typing import List + +import pytest +from execution_testing import ( + Address, + Alloc, + BalAccountExpectation, + BalStorageChange, + BalStorageSlot, + Block, + BlockAccessListExpectation, + BlockchainTestFiller, + Environment, +) + +from tests.prague.eip7251_consolidations.helpers import ( + ConsolidationRequest, + ConsolidationRequestTransaction, +) +from tests.prague.eip7251_consolidations.spec import Spec, ref_spec_7251 + +REFERENCE_SPEC_GIT_PATH = ref_spec_7251.git_path +REFERENCE_SPEC_VERSION = ref_spec_7251.version + +pytestmark = pytest.mark.valid_from("Amsterdam") + +CONSOLIDATION_REQUEST_PREDEPLOY_ADDRESS = ( + Spec.CONSOLIDATION_REQUEST_PREDEPLOY_ADDRESS +) +CONSOLIDATION_REQUEST_COUNT_STORAGE_SLOT = ( + Spec.CONSOLIDATION_REQUEST_COUNT_STORAGE_SLOT +) +CONSOLIDATION_REQUEST_QUEUE_HEAD_STORAGE_SLOT = ( + Spec.CONSOLIDATION_REQUEST_QUEUE_HEAD_STORAGE_SLOT +) +CONSOLIDATION_REQUEST_QUEUE_TAIL_STORAGE_SLOT = ( + Spec.CONSOLIDATION_REQUEST_QUEUE_TAIL_STORAGE_SLOT +) +MAX_CONSOLIDATION_REQUESTS_PER_BLOCK = ( + Spec.MAX_CONSOLIDATION_REQUESTS_PER_BLOCK +) +SYSTEM_ADDRESS = Address(Spec.SYSTEM_ADDRESS) + + +@pytest.mark.parametrize( + "blocks_consolidation_requests", + [ + pytest.param( + [ + ConsolidationRequestTransaction( + requests=[ + ConsolidationRequest( + source_pubkey=0x01, + target_pubkey=0x02, + fee=Spec.get_fee(0), + ) + ], + ) + ], + id="single_block_single_consolidation_request_from_eoa", + ), + pytest.param( + [ + ConsolidationRequestTransaction( + requests=[ + ConsolidationRequest( + source_pubkey=i * 2 + 1, + target_pubkey=i * 2 + 2, + fee=Spec.get_fee(0), + ) + ], + ) + for i in range(MAX_CONSOLIDATION_REQUESTS_PER_BLOCK) + ], + id="single_block_max_consolidation_per_block", + ), + pytest.param( + [ + ConsolidationRequestTransaction( + requests=[ + ConsolidationRequest( + source_pubkey=i * 2 + 1, + target_pubkey=i * 2 + 2, + fee=Spec.get_fee(0), + ) + ], + ) + for i in range(MAX_CONSOLIDATION_REQUESTS_PER_BLOCK + 1) + ], + id="single_block_max_consolidation_per_block_plus1", + ), + ], +) +@pytest.mark.pre_alloc_group( + "consolidation_requests", + reason="Tests standard consolidation request functionality", +) +def test_bal_system_dequeue_consolidations_eip7251( + blockchain_test: BlockchainTestFiller, + pre: Alloc, + blocks_consolidation_requests: List[ConsolidationRequestTransaction], +) -> None: + """Test making a consolidation request to the beacon chain.""" + txs = [] + + for request in blocks_consolidation_requests: + request.update_pre(pre=pre) + txs += request.transactions() + + num = len(txs) + + count_slot_changes = [] + head_slot_changes = [] + tail_slot_changes = [] + + for idx, _ in enumerate(txs): + count_slot_changes.append( + BalStorageChange(block_access_index=idx + 1, post_value=idx + 1) + ) + + tail_slot_changes.append( + BalStorageChange(block_access_index=idx + 1, post_value=idx + 1) + ) + + # Count slot is always reset to zero after request processing + count_slot_changes.append( + BalStorageChange(block_access_index=num + 1, post_value=0) + ) + + if num > MAX_CONSOLIDATION_REQUESTS_PER_BLOCK: + head_slot_changes.append( + BalStorageChange( + block_access_index=num + 1, + post_value=MAX_CONSOLIDATION_REQUESTS_PER_BLOCK, + ) + ) + else: + tail_slot_changes.append( + BalStorageChange(block_access_index=num + 1, post_value=0) + ) + + storage_changes = [] + if any(count_slot_changes): + storage_changes.append( + BalStorageSlot( + slot=CONSOLIDATION_REQUEST_COUNT_STORAGE_SLOT, + slot_changes=count_slot_changes, + ) + ) + + if any(head_slot_changes): + storage_changes.append( + BalStorageSlot( + slot=CONSOLIDATION_REQUEST_QUEUE_HEAD_STORAGE_SLOT, + slot_changes=head_slot_changes, + ) + ) + + if any(tail_slot_changes): + storage_changes.append( + BalStorageSlot( + slot=CONSOLIDATION_REQUEST_QUEUE_TAIL_STORAGE_SLOT, + slot_changes=tail_slot_changes, + ) + ) + + block = Block( + txs=txs, + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + CONSOLIDATION_REQUEST_PREDEPLOY_ADDRESS: ( + BalAccountExpectation(storage_changes=storage_changes) + ) + } + ), + ) + + blockchain_test( + genesis_environment=Environment(), + pre=pre, + post={}, + blocks=[block], + ) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index 54d53064fe..3aa38e0eae 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -29,7 +29,7 @@ | `test_bal_noop_storage_write` | Ensure BAL includes storage read but not write for no-op writes where pre-state equals post-state | Contract with pre-existing storage value `0x42` in slot `0x01`; transaction executes `SSTORE(0x01, 0x42)` (writing same value) | BAL **MUST** include the contract address with `storage_reads` for slot `0x01` since it was accessed, but **MUST NOT** include it in `storage_changes` (no actual state change). | ✅ Completed | | `test_bal_fully_unmutated_account` | Ensure BAL captures account that has zero net mutations | Alice sends 0 wei to `Oracle` which writes same pre-existing value to storage | BAL MUST include Alice with `nonce_changes` and balance changes (gas), `Oracle` with `storage_reads` for accessed slot but empty `storage_changes`. | ✅ Completed | | `test_bal_net_zero_balance_transfer` | BAL includes accounts with net-zero balance change but excludes them from balance changes | Contract receives and sends same amount to recipient using CALL or SELFDESTRUCT | BAL **MUST** include contract in `account_changes` without `balance_changes` (net zero). BAL **MUST** record non-zero `balance_changes` for recipient. | ✅ Completed | -| `test_bal_system_dequeue_consolidations_eip7251` | BAL tracks post-exec system dequeues for consolidations | Pre-populate EIP-7251 consolidation requests; produce a block where dequeues occur | BAL MUST include the 7251 system contract with `storage_changes` (queue slots 0–3) using `block_access_index = len(txs)`. | 🟡 Planned | +| `test_bal_system_dequeue_consolidations_eip7251` | BAL tracks post-exec system dequeues for consolidations | Pre-populate EIP-7251 consolidation requests; produce a block where dequeues occur | BAL MUST include the 7251 system contract with `storage_changes` (queue slots 0–3) using `block_access_index = len(txs)`. | ✅ Completed | | `test_bal_aborted_storage_access` | Ensure BAL captures storage access in aborted transactions correctly | Alice calls contract that reads storage slot `0x01`, writes to slot `0x02`, then aborts with `REVERT`/`INVALID` | BAL MUST include storage_reads for slots `0x01` and `0x02` (aborted writes become reads), empty storage_changes. Only nonce changes for Alice. | ✅ Completed | | `test_bal_aborted_account_access` | Ensure BAL captures account access in aborted transactions for all account accessing opcodes | Alice calls `AbortContract` that performs account access operations (`BALANCE`, `EXTCODESIZE`, `EXTCODECOPY`, `EXTCODEHASH`, `CALL`, `CALLCODE`, `DELEGATECALL`, `STATICCALL`) on `TargetContract` and aborts via `REVERT`/`INVALID` | BAL MUST include Alice, `TargetContract`, and `AbortContract` in account_changes and nonce changes for Alice. | ✅ Completed | | `test_bal_pure_contract_call` | Ensure BAL captures contract access for pure computation calls | Alice calls `PureContract` that performs pure arithmetic (ADD operation) without storage or balance changes | BAL MUST include Alice and `PureContract` in `account_changes`, and `nonce_changes` for Alice. | ✅ Completed | From 9325a81daf4627e8c205796ee56b3a2935b9a85d Mon Sep 17 00:00:00 2001 From: felipe Date: Tue, 27 Jan 2026 11:00:03 -0700 Subject: [PATCH 102/154] fix(tests): fix EIP-7934 tests logic accuracy (#2078) * refactor(tests): refactor to less error prone logic for eip7934 tests * refactor(tests): performance gain linear search -> binary search * feat(tests): perf gain, find a tx that is within +/- 15 bytes not 1 byte - extradata field is 32 bytes so we can adjust using the block's extradata up to +/- 15 bytes or so (we will need to add one more for rlp at limit + 1 byte invalid tests). This makes it so we should selfomly even hit the binary search logic. * refactor(tests): Get rid of dead binary search code; new tolerance does the job * fix(tests): fix typo - use consolidation requests * refactor(tests): cleanup from comments on PR #2078 --- .../test_block_access_lists_eip7251.py | 2 +- .../test_max_block_rlp_size.py | 174 +++++------------- 2 files changed, 49 insertions(+), 127 deletions(-) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip7251.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip7251.py index b2cec1d28a..a3ed19d957 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip7251.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip7251.py @@ -1,4 +1,4 @@ -"""Tests for the effects of EIP-7251 beacon roots on EIP-7928.""" +"""Tests for the effects of EIP-7251 consolidation requests on EIP-7928.""" from typing import List diff --git a/tests/osaka/eip7934_block_rlp_limit/test_max_block_rlp_size.py b/tests/osaka/eip7934_block_rlp_limit/test_max_block_rlp_size.py index fb062f413f..a32ba9a57e 100644 --- a/tests/osaka/eip7934_block_rlp_limit/test_max_block_rlp_size.py +++ b/tests/osaka/eip7934_block_rlp_limit/test_max_block_rlp_size.py @@ -45,7 +45,10 @@ HEADER_TIMESTAMP = 123456789 -EXTRA_DATA_AT_LIMIT = b"\x00\x00\x00" +EXTRA_DATA_AT_LIMIT = b"\x00" * 15 +# Max size adjustment extra_data can absorb +# reserves 1 byte so delta=-1 tests stay valid +EXTRA_DATA_TOLERANCE = len(EXTRA_DATA_AT_LIMIT) - 1 BLOCK_GAS_LIMIT = 100_000_000 @@ -130,6 +133,10 @@ def exact_size_transactions( The calculation uses caching to avoid recalculating the same block rlp for each fork. Calculate the block and fill with real sender for testing. + Due to RLP encoding boundaries, certain exact block sizes may be + unachievable (±1 byte). The returned extra_data_len compensates for + any gap so the final block hits the exact target. + Args: sender: The sender account block_size_limit: The target block RLP size limit @@ -141,6 +148,11 @@ def exact_size_transactions( be included withdrawals: Optional list of withdrawals to include in the block + Returns: + Tuple of (transactions, extra_data_len) where extra_data_len is + the number of extra_data bytes needed to hit the exact target + block size. + """ log_contract = None if emit_logs: @@ -169,7 +181,7 @@ def exact_size_transactions( if not specific_transaction_to_include and not withdrawals: # use cached version when possible for performance - transactions, gas_used = _exact_size_transactions_cached( + transactions, extra_data_len = _exact_size_transactions_cached( block_size_limit, fork, gas_limit, @@ -179,7 +191,7 @@ def exact_size_transactions( else: # Direct calculation, no cache, since `Transaction` / `Withdrawal` # are not hashable - transactions, gas_used = _exact_size_transactions_impl( + transactions, extra_data_len = _exact_size_transactions_impl( block_size_limit, fork, gas_limit, @@ -189,7 +201,7 @@ def exact_size_transactions( withdrawals=withdrawals, ) - return transactions, gas_used + return transactions, extra_data_len @lru_cache(maxsize=128) @@ -203,6 +215,12 @@ def _exact_size_transactions_cached( """ Generate transactions that fill a block to exactly the RLP size limit. Abstracted with hashable arguments for caching block calculations. + + Returns: + Tuple of (transactions, extra_data_len) where extra_data_len is + the number of extra_data bytes needed to hit the exact target + block size. + """ return _exact_size_transactions_impl( block_size_limit, @@ -339,62 +357,7 @@ def _exact_size_transactions_impl( gas_limit=target_gas, data=target_calldata, ) - - test_size = get_block_rlp_size( - fork, - transactions + [test_tx], - withdrawals=withdrawals, - ) - - if test_size == block_size_limit: - # if exact match, use the transaction - transactions.append(test_tx) - else: - # search for the best adjustment - diff = block_size_limit - test_size - best_diff = abs(diff) - - search_range = min(abs(diff) + 50, 1000) - - for adjustment in range(-search_range, search_range + 1): - adjusted_size = estimated_calldata + adjustment - if adjusted_size < 0: - continue - - adjusted_calldata = b"\x00" * adjusted_size - adjusted_gas = calculator(calldata=adjusted_calldata) - - if adjusted_gas <= remaining_gas: - adjusted_tx = Transaction( - sender=sender, - nonce=nonce, - max_fee_per_gas=10**11, - max_priority_fee_per_gas=10**11, - gas_limit=adjusted_gas, - data=adjusted_calldata, - ) - - adjusted_test_size = get_block_rlp_size( - fork, - transactions + [adjusted_tx], - withdrawals=withdrawals, - ) - - if adjusted_test_size == block_size_limit: - # exact match - transactions.append(adjusted_tx) - break - - adjusted_diff = abs( - block_size_limit - adjusted_test_size - ) - if adjusted_diff < best_diff: - best_diff = adjusted_diff - else: - raise RuntimeError( - "Failed to find a transaction that matches " - "the target size." - ) + transactions.append(test_tx) else: transactions.append(empty_tx) @@ -403,14 +366,15 @@ def _exact_size_transactions_impl( transactions, withdrawals=withdrawals, ) - final_gas = sum(tx.gas_limit for tx in transactions) - - assert final_size == block_size_limit, ( + # Compute the extra_data length that compensates for any size gap. + size_diff = final_size - block_size_limit + assert abs(size_diff) <= EXTRA_DATA_TOLERANCE, ( f"Size mismatch: got {final_size}, " f"expected {block_size_limit} " - f"({final_size - block_size_limit} bytes diff)" + f"({size_diff} bytes diff, exceeds ±{EXTRA_DATA_TOLERANCE} tolerance)" ) - return transactions, final_gas + extra_data_len = len(EXTRA_DATA_AT_LIMIT) - size_diff + return transactions, extra_data_len @EIPChecklist.BlockLevelConstraint.Test.Boundary.Under() @@ -446,19 +410,13 @@ def test_block_at_rlp_size_limit_boundary( - At the limit, the block is valid - At the limit + 1 byte, the block is invalid """ - transactions, gas_used = exact_size_transactions( + transactions, extra_data_len = exact_size_transactions( sender, block_size_limit, fork, pre, env.gas_limit, ) - block_rlp_size = get_block_rlp_size(fork, transactions) - assert block_rlp_size == block_size_limit, ( - f"Block RLP size {block_rlp_size} does not exactly match " - f"limit {block_size_limit}, difference: " - f"{block_rlp_size - block_size_limit} bytes" - ) block = Block( txs=transactions, @@ -467,12 +425,8 @@ def test_block_at_rlp_size_limit_boundary( else None, ) - if delta < 0: - block.extra_data = Bytes(EXTRA_DATA_AT_LIMIT[: -abs(delta)]) - elif delta == 0: - block.extra_data = Bytes(EXTRA_DATA_AT_LIMIT) - else: # delta > 0 - block.extra_data = Bytes(EXTRA_DATA_AT_LIMIT + b"\x00" * delta) + target_extra_data_len = max(extra_data_len + delta, 0) + block.extra_data = Bytes(b"\x00" * target_extra_data_len) block.timestamp = ZeroPaddedHexNumber(HEADER_TIMESTAMP) blockchain_test( @@ -498,7 +452,7 @@ def test_block_rlp_size_at_limit_with_all_typed_transactions( typed_transaction: Transaction, ) -> None: """Test the block RLP size limit with all transaction types.""" - transactions, gas_used = exact_size_transactions( + transactions, extra_data_len = exact_size_transactions( sender, block_size_limit, fork, @@ -506,15 +460,9 @@ def test_block_rlp_size_at_limit_with_all_typed_transactions( env.gas_limit, specific_transaction_to_include=typed_transaction, ) - block_rlp_size = get_block_rlp_size(fork, transactions) - assert block_rlp_size == block_size_limit, ( - f"Block RLP size {block_rlp_size} does not exactly match limit " - f"{block_size_limit}, difference: {block_rlp_size - block_size_limit} " - "bytes" - ) block = Block(txs=transactions) - block.extra_data = Bytes(EXTRA_DATA_AT_LIMIT) + block.extra_data = Bytes(b"\x00" * extra_data_len) block.timestamp = ZeroPaddedHexNumber(HEADER_TIMESTAMP) blockchain_test( @@ -541,7 +489,7 @@ def test_block_at_rlp_limit_with_logs( Test that a block at the RLP size limit is valid even when transactions emit logs. """ - transactions, gas_used = exact_size_transactions( + transactions, extra_data_len = exact_size_transactions( sender, block_size_limit, fork, @@ -550,15 +498,8 @@ def test_block_at_rlp_limit_with_logs( emit_logs=True, ) - block_rlp_size = get_block_rlp_size(fork, transactions) - assert block_rlp_size == block_size_limit, ( - f"Block RLP size {block_rlp_size} does not exactly match limit " - f"{block_size_limit}, difference: {block_rlp_size - block_size_limit} " - "bytes" - ) - block = Block(txs=transactions) - block.extra_data = Bytes(EXTRA_DATA_AT_LIMIT) + block.extra_data = Bytes(b"\x00" * extra_data_len) block.timestamp = ZeroPaddedHexNumber(HEADER_TIMESTAMP) blockchain_test( @@ -600,7 +541,7 @@ def test_block_at_rlp_limit_with_withdrawals( ), ] - transactions, gas_used = exact_size_transactions( + transactions, extra_data_len = exact_size_transactions( sender, block_size_limit, fork, @@ -609,19 +550,10 @@ def test_block_at_rlp_limit_with_withdrawals( withdrawals=withdrawals, ) - block_rlp_size = get_block_rlp_size( - fork, transactions, withdrawals=withdrawals - ) - assert block_rlp_size == block_size_limit, ( - f"Block RLP size {block_rlp_size} does not exactly match limit " - f"{block_size_limit}, difference: {block_rlp_size - block_size_limit} " - "bytes" - ) - block = Block( txs=transactions, withdrawals=withdrawals, - extra_data=Bytes(EXTRA_DATA_AT_LIMIT), + extra_data=Bytes(b"\x00" * extra_data_len), timestamp=ZeroPaddedHexNumber(HEADER_TIMESTAMP), ) @@ -664,7 +596,7 @@ def test_fork_transition_block_rlp_limit( sender_before_fork = pre.fund_eoa() sender_at_fork = pre.fund_eoa() - transactions_before, gas_used_before = exact_size_transactions( + transactions_before, extra_data_len_before = exact_size_transactions( sender_before_fork, block_size_limit, fork, @@ -672,7 +604,7 @@ def test_fork_transition_block_rlp_limit( env.gas_limit, ) - transactions_at_fork, gas_used_at_fork = exact_size_transactions( + transactions_at_fork, extra_data_len_at_fork = exact_size_transactions( sender_at_fork, block_size_limit, fork, @@ -680,23 +612,13 @@ def test_fork_transition_block_rlp_limit( env.gas_limit, ) - for fork_block_rlp_size in [ - get_block_rlp_size(fork, transactions_before), - get_block_rlp_size(fork, transactions_at_fork), - ]: - assert fork_block_rlp_size == block_size_limit, ( - f"Block RLP size {fork_block_rlp_size} does not exactly match " - f"limit {block_size_limit}, difference: " - f"{fork_block_rlp_size - block_size_limit} bytes" - ) - # HEADER_TIMESTAMP (123456789) used in calculation takes 4 bytes in RLP - # encoding. Transition timestamps (14_999 and 15_000) take 2 bytes - # Re-define `_extradata_at_limit` accounting for this difference + # encoding. Transition timestamps (14_999 and 15_000) take 2 bytes. + # Add the difference to extra_data to keep block at the limit. timestamp_byte_savings = 2 - _extradata_at_limit = EXTRA_DATA_AT_LIMIT + ( - b"\x00" * timestamp_byte_savings - ) + + extra_data_before = extra_data_len_before + timestamp_byte_savings + extra_data_at_fork = extra_data_len_at_fork + timestamp_byte_savings blocks = [ # before fork, block at limit +1 should be accepted @@ -704,7 +626,7 @@ def test_fork_transition_block_rlp_limit( timestamp=14_999, txs=transactions_before, # +1 to exceed limit - extra_data=Bytes(_extradata_at_limit + b"\x00"), + extra_data=Bytes(b"\x00" * (extra_data_before + 1)), ) ] @@ -715,7 +637,7 @@ def test_fork_transition_block_rlp_limit( timestamp=15_000, txs=transactions_at_fork, # +1 to exceed limit, should be rejected - extra_data=Bytes(_extradata_at_limit + b"\x00"), + extra_data=Bytes(b"\x00" * (extra_data_at_fork + 1)), exception=BlockException.RLP_BLOCK_LIMIT_EXCEEDED, ) ) @@ -725,7 +647,7 @@ def test_fork_transition_block_rlp_limit( timestamp=15_000, txs=transactions_at_fork, # exact limit should be accepted - extra_data=Bytes(EXTRA_DATA_AT_LIMIT), + extra_data=Bytes(b"\x00" * extra_data_at_fork), ) ) From 7c8ec4ff4ac84ce6cb61863a2311d7c80140db5c Mon Sep 17 00:00:00 2001 From: danceratopz Date: Tue, 27 Jan 2026 19:58:20 +0100 Subject: [PATCH 103/154] feat(test-cli): add `hasher compare` subcommand (#2080) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat(test-cli): add hasher compare subcommand * test(test-cli): add unit tests for hasher * refactor(test-cli): remove dead code and redundant comments - Remove unused HashableItem.print() method. - Remove obvious inline comments that duplicate code intent. - Simplify multi-line type annotation for differences list. * refactor(test-cli): simplify compare by diffing trees directly - Replace parse_hash_lines() with collect_hashes() that walks item tree. - Simplify display_diff() to work with dicts instead of parsing strings. - Eliminate round-trip: format → parse → diff is now just collect → diff. --- .../src/execution_testing/cli/hasher.py | 279 ++++++++++++-- .../cli/tests/test_hasher.py | 342 ++++++++++++++++++ 2 files changed, 598 insertions(+), 23 deletions(-) create mode 100644 packages/testing/src/execution_testing/cli/tests/test_hasher.py diff --git a/packages/testing/src/execution_testing/cli/hasher.py b/packages/testing/src/execution_testing/cli/hasher.py index ecb49665ac..5b13e229b1 100644 --- a/packages/testing/src/execution_testing/cli/hasher.py +++ b/packages/testing/src/execution_testing/cli/hasher.py @@ -2,12 +2,15 @@ import hashlib import json +import sys from dataclasses import dataclass, field from enum import IntEnum, auto from pathlib import Path -from typing import Dict, List, Optional +from typing import Any, Callable, Dict, List, Optional, TypeVar import click +from rich.console import Console +from rich.markup import escape as rich_escape class HashableItemType(IntEnum): @@ -42,26 +45,43 @@ def hash(self) -> bytes: all_hash_bytes += item_hash_bytes return hashlib.sha256(all_hash_bytes).digest() - def print( + def format_lines( self, *, name: str, level: int = 0, print_type: Optional[HashableItemType] = None, - ) -> None: - """Print the hash of the item and sub-items.""" + max_depth: Optional[int] = None, + ) -> List[str]: + """Return the hash lines for the item and sub-items.""" + lines: List[str] = [] next_level = level print_name = name + if level == 0 and self.parents: separator = "::" if self.type == HashableItemType.TEST else "/" print_name = f"{'/'.join(self.parents)}{separator}{name}" + if print_type is None or self.type >= print_type: next_level += 1 - print(f"{' ' * level}{print_name}: 0x{self.hash().hex()}") + lines.append(f"{' ' * level}{print_name}: 0x{self.hash().hex()}") + + # Stop recursion if we've reached max_depth + if max_depth is not None and next_level > max_depth: + return lines if self.items is not None: for key, item in sorted(self.items.items()): - item.print(name=key, level=next_level, print_type=print_type) + lines.extend( + item.format_lines( + name=key, + level=next_level, + print_type=print_type, + max_depth=max_depth, + ) + ) + + return lines @classmethod def from_json_file( @@ -126,34 +146,247 @@ def from_folder( return cls(type=HashableItemType.FOLDER, items=items, parents=parents) -@click.command() +def render_hash_report( + folder: Path, + *, + files: bool, + tests: bool, + root: bool, + name_override: Optional[str] = None, + max_depth: Optional[int] = None, +) -> List[str]: + """Return canonical output lines for a folder.""" + item = HashableItem.from_folder(folder_path=folder) + if root: + return [f"0x{item.hash().hex()}"] + print_type: Optional[HashableItemType] = None + if files: + print_type = HashableItemType.FILE + elif tests: + print_type = HashableItemType.TEST + name = name_override if name_override is not None else folder.name + return item.format_lines( + name=name, print_type=print_type, max_depth=max_depth + ) + + +def collect_hashes( + item: HashableItem, + *, + path: str = "", + print_type: Optional[HashableItemType] = None, + max_depth: Optional[int] = None, + depth: int = 0, +) -> Dict[str, str]: + """Collect hashes from item tree as {path: hash_hex}.""" + result: Dict[str, str] = {} + + if print_type is None or item.type >= print_type: + if path: + result[path] = f"0x{item.hash().hex()}" + depth += 1 + if max_depth is not None and depth > max_depth: + return result + + if item.items: + for name, child in sorted(item.items.items()): + child_path = f"{path}/{name}" if path else name + result.update( + collect_hashes( + child, + path=child_path, + print_type=print_type, + max_depth=max_depth, + depth=depth, + ) + ) + + return result + + +def display_diff( + left: Dict[str, str], + right: Dict[str, str], + *, + left_label: str, + right_label: str, +) -> None: + """Render diff showing only changed hashes.""" + differences: List[tuple[str, str, str]] = [] + + for path in left: + right_hash = right.get(path, "") + if left[path] != right_hash: + differences.append((path, left[path], right_hash)) + + for path in right: + if path not in left: + differences.append((path, "", right[path])) + + if not differences: + return + + console = Console() + console.print("── Fixture Hash Differences ──", style="bold") + console.print(f"[dim]--- {left_label}[/dim]") + console.print(f"[dim]+++ {right_label}[/dim]") + console.print() + + for path, left_hash, right_hash in differences: + depth = path.count("/") + indent = " " * (depth + 1) + console.print(f"{indent}[bold]{rich_escape(path)}[/bold]") + console.print(f"{indent} [red]- {left_hash}[/red]") + console.print(f"{indent} [green]+ {right_hash}[/green]") + console.print() + + +class DefaultGroup(click.Group): + """Click group with a default command fallback.""" + + def __init__( + self, *args: Any, default_cmd_name: str = "hash", **kwargs: Any + ): + super().__init__(*args, **kwargs) + self.default_cmd_name = default_cmd_name + + def resolve_command( + self, ctx: click.Context, args: List[str] + ) -> tuple[Optional[str], Optional[click.Command], List[str]]: + """Resolve command, inserting default if no subcommand given.""" + first_arg_idx = next( + (i for i, a in enumerate(args) if not a.startswith("-")), None + ) + if ( + first_arg_idx is not None + and args[first_arg_idx] not in self.commands + ): + args = list(args) + args.insert(first_arg_idx, self.default_cmd_name) + return super().resolve_command(ctx, args) + + +F = TypeVar("F", bound=Callable[..., None]) + + +def hash_options(func: F) -> F: + """Decorator for common hash options.""" + func = click.option( + "--root", "-r", is_flag=True, help="Only print hash of root folder" + )(func) + func = click.option( + "--tests", "-t", is_flag=True, help="Print hash of tests" + )(func) + func = click.option( + "--files", "-f", is_flag=True, help="Print hash of files" + )(func) + return func + + +@click.group( + cls=DefaultGroup, + default_cmd_name="hash", + context_settings={"help_option_names": ["-h", "--help"]}, +) +def hasher() -> None: + """Hash folders of JSON fixtures and compare them.""" + pass + + +@hasher.command(name="hash") @click.argument( "folder_path_str", type=click.Path( exists=True, file_okay=False, dir_okay=True, readable=True ), ) -@click.option("--files", "-f", is_flag=True, help="Print hash of files") -@click.option("--tests", "-t", is_flag=True, help="Print hash of tests") +@hash_options +def hash_cmd( + folder_path_str: str, files: bool, tests: bool, root: bool +) -> None: + """Hash folders of JSON fixtures and print their hashes.""" + lines = render_hash_report( + Path(folder_path_str), files=files, tests=tests, root=root + ) + for line in lines: + print(line) + + +@hasher.command(name="compare") +@click.argument( + "left_folder", + type=click.Path( + exists=True, file_okay=False, dir_okay=True, readable=True + ), +) +@click.argument( + "right_folder", + type=click.Path( + exists=True, file_okay=False, dir_okay=True, readable=True + ), +) @click.option( - "--root", "-r", is_flag=True, help="Only print hash of root folder" + "--depth", + "-d", + type=int, + default=None, + help="Limit to N levels (0=root, 1=folders, 2=files, 3=tests).", ) -def main(folder_path_str: str, files: bool, tests: bool, root: bool) -> None: - """Hash folders of JSON fixtures and print their hashes.""" - folder_path: Path = Path(folder_path_str) - item = HashableItem.from_folder(folder_path=folder_path) +@hash_options +def compare_cmd( + left_folder: str, + right_folder: str, + files: bool, + tests: bool, + root: bool, + depth: Optional[int], +) -> None: + """Compare two fixture directories and show differences.""" + try: + left_item = HashableItem.from_folder(folder_path=Path(left_folder)) + right_item = HashableItem.from_folder(folder_path=Path(right_folder)) - if root: - print(f"0x{item.hash().hex()}") - return + if root: + if left_item.hash() == right_item.hash(): + sys.exit(0) + left_hashes = {"root": f"0x{left_item.hash().hex()}"} + right_hashes = {"root": f"0x{right_item.hash().hex()}"} + else: + print_type: Optional[HashableItemType] = None + if files: + print_type = HashableItemType.FILE + elif tests: + print_type = HashableItemType.TEST + + left_hashes = collect_hashes( + left_item, print_type=print_type, max_depth=depth + ) + right_hashes = collect_hashes( + right_item, print_type=print_type, max_depth=depth + ) + + if left_hashes == right_hashes: + sys.exit(0) + + display_diff( + left_hashes, + right_hashes, + left_label=left_folder, + right_label=right_folder, + ) + sys.exit(1) + except PermissionError as e: + click.echo(f"Error: Permission denied - {e}", err=True) + sys.exit(2) + except (json.JSONDecodeError, KeyError, TypeError) as e: + click.echo(f"Error: Invalid fixture format - {e}", err=True) + sys.exit(2) + except Exception as e: + click.echo(f"Error: {e}", err=True) + sys.exit(2) - print_type: Optional[HashableItemType] = None - if files: - print_type = HashableItemType.FILE - elif tests: - print_type = HashableItemType.TEST - item.print(name=folder_path.name, print_type=print_type) +main = hasher # Entry point alias if __name__ == "__main__": diff --git a/packages/testing/src/execution_testing/cli/tests/test_hasher.py b/packages/testing/src/execution_testing/cli/tests/test_hasher.py new file mode 100644 index 0000000000..b80bdc1e30 --- /dev/null +++ b/packages/testing/src/execution_testing/cli/tests/test_hasher.py @@ -0,0 +1,342 @@ +"""Tests for the hasher CLI tool.""" + +import json +from pathlib import Path + +from click.testing import CliRunner + +from execution_testing.cli.hasher import hasher + + +def create_fixture(path: Path, test_name: str, hash_value: str) -> None: + """Create a test fixture JSON file.""" + path.parent.mkdir(parents=True, exist_ok=True) + path.write_text(json.dumps({test_name: {"_info": {"hash": hash_value}}})) + + +class TestCompareIdenticalDirectories: + """Test comparing identical directories.""" + + def test_compare_identical_directories(self, tmp_path: Path) -> None: + """Same content in both dirs should exit 0 with no output.""" + dir_a = tmp_path / "dir_a" / "state_tests" + dir_b = tmp_path / "dir_b" / "state_tests" + create_fixture(dir_a / "test.json", "test1", "0xabc123") + create_fixture(dir_b / "test.json", "test1", "0xabc123") + + runner = CliRunner() + result = runner.invoke( + hasher, ["compare", str(dir_a.parent), str(dir_b.parent)] + ) + assert result.exit_code == 0 + assert result.output == "" + + +class TestCompareDifferentDirectories: + """Test comparing different directories.""" + + def test_compare_different_directories(self, tmp_path: Path) -> None: + """Different hashes should exit 1 with diff in stdout.""" + dir_a = tmp_path / "dir_a" / "state_tests" + dir_b = tmp_path / "dir_b" / "state_tests" + create_fixture(dir_a / "test.json", "test1", "0xabc123") + create_fixture(dir_b / "test.json", "test1", "0xdef456") + + runner = CliRunner() + result = runner.invoke( + hasher, ["compare", str(dir_a.parent), str(dir_b.parent)] + ) + assert result.exit_code == 1 + assert "Fixture Hash Differences" in result.output + # Verify the new format shows the path and both hashes + assert "test1" in result.output + assert "0xabc123" in result.output + assert "0xdef456" in result.output + + +class TestCompareMissingDirectory: + """Test comparing when a directory doesn't exist.""" + + def test_compare_missing_directory(self, tmp_path: Path) -> None: + """One path doesn't exist should exit 2 with error in stderr.""" + dir_a = tmp_path / "dir_a" / "state_tests" + create_fixture(dir_a / "test.json", "test1", "0xabc123") + + runner = CliRunner() + result = runner.invoke( + hasher, + ["compare", str(dir_a.parent), str(tmp_path / "nonexistent")], + ) + assert result.exit_code == 2 + + +class TestCompareFlagParity: + """Test that flags work consistently between hash and compare commands.""" + + def test_compare_flag_parity_files(self, tmp_path: Path) -> None: + """Hasher -f X vs hasher compare -f X X should exit 0.""" + dir_a = tmp_path / "dir_a" / "state_tests" + create_fixture(dir_a / "test.json", "test1", "0xabc123") + + runner = CliRunner() + # Compare same directory with -f flag + result = runner.invoke( + hasher, ["compare", "-f", str(dir_a.parent), str(dir_a.parent)] + ) + assert result.exit_code == 0 + + def test_compare_flag_parity_tests(self, tmp_path: Path) -> None: + """Hasher -t X vs hasher compare -t X X should exit 0.""" + dir_a = tmp_path / "dir_a" / "state_tests" + create_fixture(dir_a / "test.json", "test1", "0xabc123") + + runner = CliRunner() + # Compare same directory with -t flag + result = runner.invoke( + hasher, ["compare", "-t", str(dir_a.parent), str(dir_a.parent)] + ) + assert result.exit_code == 0 + + def test_compare_flag_parity_root(self, tmp_path: Path) -> None: + """Hasher -r X vs hasher compare -r X X should exit 0.""" + dir_a = tmp_path / "dir_a" / "state_tests" + create_fixture(dir_a / "test.json", "test1", "0xabc123") + + runner = CliRunner() + # Compare same directory with -r flag + result = runner.invoke( + hasher, ["compare", "-r", str(dir_a.parent), str(dir_a.parent)] + ) + assert result.exit_code == 0 + + +class TestBackwardsCompatibility: + """Test backwards compatibility with existing hasher FOLDER syntax.""" + + def test_backwards_compat(self, tmp_path: Path) -> None: + """Hasher FOLDER without subcommand should work as before.""" + dir_a = tmp_path / "dir_a" / "state_tests" + create_fixture(dir_a / "test.json", "test1", "0xabc123") + + runner = CliRunner() + # Old syntax without subcommand + result = runner.invoke(hasher, [str(dir_a.parent)]) + assert result.exit_code == 0 + assert "0x" in result.output + + def test_explicit_hash_subcommand(self, tmp_path: Path) -> None: + """Hasher hash FOLDER should work.""" + dir_a = tmp_path / "dir_a" / "state_tests" + create_fixture(dir_a / "test.json", "test1", "0xabc123") + + runner = CliRunner() + # Explicit hash subcommand + result = runner.invoke(hasher, ["hash", str(dir_a.parent)]) + assert result.exit_code == 0 + assert "0x" in result.output + + def test_hash_output_matches_between_syntaxes( + self, tmp_path: Path + ) -> None: + """Both syntaxes should produce identical output.""" + dir_a = tmp_path / "dir_a" / "state_tests" + create_fixture(dir_a / "test.json", "test1", "0xabc123") + + runner = CliRunner() + # Old syntax + result_old = runner.invoke(hasher, [str(dir_a.parent)]) + # New syntax + result_new = runner.invoke(hasher, ["hash", str(dir_a.parent)]) + + assert result_old.exit_code == result_new.exit_code + assert result_old.output == result_new.output + + +class TestCompareEmptyDirectories: + """Test comparing empty directories.""" + + def test_compare_empty_directories(self, tmp_path: Path) -> None: + """Both dirs empty should exit 0.""" + dir_a = tmp_path / "dir_a" + dir_b = tmp_path / "dir_b" + dir_a.mkdir(parents=True) + dir_b.mkdir(parents=True) + + runner = CliRunner() + result = runner.invoke(hasher, ["compare", str(dir_a), str(dir_b)]) + assert result.exit_code == 0 + + +class TestErrorToStderr: + """Test that errors go to stderr.""" + + def test_error_to_stderr(self, tmp_path: Path) -> None: + """Invalid fixture JSON should produce error message.""" + dir_a = tmp_path / "dir_a" + dir_a.mkdir(parents=True) + (dir_a / "invalid.json").write_text("not valid json") + + runner = CliRunner() + result = runner.invoke(hasher, ["compare", str(dir_a), str(dir_a)]) + assert result.exit_code == 2 + assert "Error" in result.output + + +class TestHashCommandFlags: + """Test hash command with various flags.""" + + def test_hash_with_files_flag(self, tmp_path: Path) -> None: + """Hasher hash -f FOLDER should work.""" + dir_a = tmp_path / "dir_a" / "state_tests" + create_fixture(dir_a / "test.json", "test1", "0xabc123") + + runner = CliRunner() + result = runner.invoke(hasher, ["hash", "-f", str(dir_a.parent)]) + assert result.exit_code == 0 + assert "test.json" in result.output + + def test_hash_with_tests_flag(self, tmp_path: Path) -> None: + """Hasher hash -t FOLDER should work.""" + dir_a = tmp_path / "dir_a" / "state_tests" + create_fixture(dir_a / "test.json", "test1", "0xabc123") + + runner = CliRunner() + result = runner.invoke(hasher, ["hash", "-t", str(dir_a.parent)]) + assert result.exit_code == 0 + assert "test1" in result.output + + def test_hash_with_root_flag(self, tmp_path: Path) -> None: + """Hasher hash -r FOLDER should only print root hash.""" + dir_a = tmp_path / "dir_a" / "state_tests" + create_fixture(dir_a / "test.json", "test1", "0xabc123") + + runner = CliRunner() + result = runner.invoke(hasher, ["hash", "-r", str(dir_a.parent)]) + assert result.exit_code == 0 + # Should only have one line with the hash + lines = [line for line in result.output.strip().split("\n") if line] + assert len(lines) == 1 + assert lines[0].startswith("0x") + + +class TestCompareDepthFlag: + """Test --depth flag for compare command.""" + + def test_depth_limits_output(self, tmp_path: Path) -> None: + """--depth should limit how deep the comparison goes.""" + dir_a = tmp_path / "dir_a" / "folder" / "subfolder" + dir_b = tmp_path / "dir_b" / "folder" / "subfolder" + create_fixture(dir_a / "test.json", "test1", "0xabc123") + create_fixture(dir_b / "test.json", "test1", "0xdef456") + + runner = CliRunner() + + # depth=1 should show folder but not subfolder + result = runner.invoke( + hasher, + [ + "compare", + "--depth", + "1", + str(dir_a.parent.parent), + str(dir_b.parent.parent), + ], + ) + assert result.exit_code == 1 + assert "folder" in result.output + assert "subfolder" not in result.output + + def test_depth_2_shows_subfolders(self, tmp_path: Path) -> None: + """--depth 2 should show subfolders.""" + dir_a = tmp_path / "dir_a" / "folder" / "subfolder" + dir_b = tmp_path / "dir_b" / "folder" / "subfolder" + create_fixture(dir_a / "test.json", "test1", "0xabc123") + create_fixture(dir_b / "test.json", "test1", "0xdef456") + + runner = CliRunner() + + result = runner.invoke( + hasher, + [ + "compare", + "-d", + "2", + str(dir_a.parent.parent), + str(dir_b.parent.parent), + ], + ) + assert result.exit_code == 1 + assert "folder" in result.output + assert "subfolder" in result.output + + +class TestCompareHierarchy: + """Test that diff output preserves hierarchy.""" + + def test_full_paths_in_output(self, tmp_path: Path) -> None: + """Diff should show full paths to disambiguate items with same name.""" + # Create two folders each with a "shanghai" subfolder + dir_a = tmp_path / "dir_a" + dir_b = tmp_path / "dir_b" + create_fixture( + dir_a / "blockchain_tests" / "shanghai" / "test.json", + "test1", + "0xaaa111", + ) + create_fixture( + dir_a / "state_tests" / "shanghai" / "test.json", + "test1", + "0xbbb222", + ) + create_fixture( + dir_b / "blockchain_tests" / "shanghai" / "test.json", + "test1", + "0xccc333", + ) + create_fixture( + dir_b / "state_tests" / "shanghai" / "test.json", + "test1", + "0xddd444", + ) + + runner = CliRunner() + result = runner.invoke( + hasher, ["compare", "--depth", "2", str(dir_a), str(dir_b)] + ) + + assert result.exit_code == 1 + # Should show full paths, not just "shanghai" twice + assert "blockchain_tests/shanghai" in result.output + assert "state_tests/shanghai" in result.output + + +class TestHelpOptions: + """Test help options.""" + + def test_help_short(self) -> None: + """-h should show help.""" + runner = CliRunner() + result = runner.invoke(hasher, ["-h"]) + assert result.exit_code == 0 + assert "Hash folders of JSON fixtures" in result.output + + def test_help_long(self) -> None: + """--help should show help.""" + runner = CliRunner() + result = runner.invoke(hasher, ["--help"]) + assert result.exit_code == 0 + assert "Hash folders of JSON fixtures" in result.output + + def test_compare_help(self) -> None: + """Compare --help should show compare help.""" + runner = CliRunner() + result = runner.invoke(hasher, ["compare", "--help"]) + assert result.exit_code == 0 + assert "Compare two fixture directories" in result.output + + def test_hash_help(self) -> None: + """Hash --help should show hash help.""" + runner = CliRunner() + result = runner.invoke(hasher, ["hash", "--help"]) + assert result.exit_code == 0 + assert "Hash folders of JSON fixtures" in result.output From 590bc9f045baacca112ef56e4fe0769eed8b76d3 Mon Sep 17 00:00:00 2001 From: Alexey Osipov Date: Thu, 29 Jan 2026 15:22:13 +0300 Subject: [PATCH 104/154] fix(tests): update nethermind exceptions (#2098) --- .../src/execution_testing/client_clis/clis/nethermind.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/testing/src/execution_testing/client_clis/clis/nethermind.py b/packages/testing/src/execution_testing/client_clis/clis/nethermind.py index bb23b04aa0..e687c77066 100644 --- a/packages/testing/src/execution_testing/client_clis/clis/nethermind.py +++ b/packages/testing/src/execution_testing/client_clis/clis/nethermind.py @@ -320,10 +320,10 @@ class NethermindExceptionMapper(ExceptionMapper): "max initcode size exceeded" ), TransactionException.NONCE_MISMATCH_TOO_LOW: ( - "wrong transaction nonce" + "transaction nonce is too low" ), TransactionException.NONCE_MISMATCH_TOO_HIGH: ( - "wrong transaction nonce" + "transaction nonce is too high" ), TransactionException.INSUFFICIENT_MAX_FEE_PER_BLOB_GAS: ( "InsufficientMaxFeePerBlobGas: Not enough to cover blob gas fee" @@ -338,7 +338,7 @@ class NethermindExceptionMapper(ExceptionMapper): "InvalidTxType: Transaction type in Custom is not supported" ), TransactionException.TYPE_3_TX_ZERO_BLOBS: ( - "blob transaction missing blob hashes" + "blob transaction must have at least 1 blob" ), TransactionException.TYPE_3_TX_INVALID_BLOB_VERSIONED_HASH: ( "InvalidBlobVersionedHashVersion: Blob version not supported" From b68a53250a3c4c0b816fec12f38bb4f0c38b7518 Mon Sep 17 00:00:00 2001 From: felipe Date: Thu, 29 Jan 2026 08:56:10 -0700 Subject: [PATCH 105/154] feat(test-fill): speed up filling (#2079) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat(test-fill): Build index incrementally, not at the end of fill * perf(test): defer index model validation * perf(tests): O(n²) to O(n) trie-building approach for perf * perf(test-fill): Use --no-html; releases don't publish these (needless overhead) * refactor: cleanup; add more hasher compat tests * refactor(perf): validate at IndexFile once, not at every single file * feat(perf): distribute slow-marked tests early to runners; avoid long tail - mark more slow tests * refactor: changes from comments on PR #2079 * chore(ci): show the slowest 50 tests on every py3 run * refactor(perf): pre-serialize fixture JSON while workers parallelize * refactor(perf): write partial JSON fixture files + merge at end - xdist workers were writing to the same fixture JSON file causing O(n²) work... each worker had to read, parse, and rewrite all previous entries. - now workers write to their own partial JSONL file (append-only, O(1)) - test_blob_txs.partial.gw0.jsonl - test_blob_txs.partial.gw1.jsonl - etc. .. and at session end, ``merge_partial_fixture_files()`` combines all partials into the final JSON file Test teardown on some tests dropped from ~80s to ~1s * fix: remove pre-serialization small win to help pypy runs --- .github/configs/feature.yaml | 14 +- .../src/execution_testing/cli/gen_index.py | 74 +++ .../src/execution_testing/cli/hasher.py | 105 ++++- .../pytest_commands/plugins/filler/filler.py | 37 +- .../cli/tests/test_hasher.py | 431 ++++++++++++++++- .../execution_testing/fixtures/__init__.py | 7 +- .../execution_testing/fixtures/collector.py | 159 ++++++- .../src/execution_testing/fixtures/consume.py | 4 +- .../src/execution_testing/fixtures/file.py | 8 +- .../fixtures/tests/test_collector.py | 435 ++++++++++++++++++ .../test_excess_blob_gas_fork_transition.py | 1 + tests/frontier/opcodes/test_blockhash.py | 1 + .../test_blob_reserve_price_with_bpo.py | 1 + ...blob_reserve_price_with_bpo_transitions.py | 1 + .../prague/eip6110_deposits/test_deposits.py | 1 + .../ContractCreationSpamFiller.json | 3 + .../Return50000_2Filler.json | 3 + .../static_Return50000_2Filler.json | 3 + tox.ini | 2 + 19 files changed, 1264 insertions(+), 26 deletions(-) create mode 100644 packages/testing/src/execution_testing/fixtures/tests/test_collector.py diff --git a/.github/configs/feature.yaml b/.github/configs/feature.yaml index b9404e5d59..17ac97dce5 100644 --- a/.github/configs/feature.yaml +++ b/.github/configs/feature.yaml @@ -1,27 +1,27 @@ # Unless filling for special features, all features should fill for previous forks (starting from Frontier) too stable: evm-type: stable - fill-params: --until=Prague --fill-static-tests --ignore=tests/static/state_tests/stQuadraticComplexityTest + fill-params: --no-html --until=Prague --fill-static-tests --ignore=tests/static/state_tests/stQuadraticComplexityTest develop: - evm-type: develop - fill-params: --until=BPO4 --fill-static-tests --ignore=tests/static/state_tests/stQuadraticComplexityTest + evm-type: develop + fill-params: --no-html --until=BPO4 --fill-static-tests --ignore=tests/static/state_tests/stQuadraticComplexityTest benchmark: evm-type: benchmark - fill-params: --fork=Prague --gas-benchmark-values 1,5,10,30,60,100,150 -m benchmark ./tests/benchmark + fill-params: --no-html --fork=Prague --gas-benchmark-values 1,5,10,30,60,100,150 -m benchmark ./tests/benchmark benchmark_develop: evm-type: benchmark - fill-params: --fork=Osaka --gas-benchmark-values 1,5,10,30,60,100,150 -m "benchmark" ./tests/benchmark + fill-params: --no-html --fork=Osaka --gas-benchmark-values 1,5,10,30,60,100,150 -m "benchmark" ./tests/benchmark feature_only: true benchmark_fast: evm-type: benchmark - fill-params: --fork=Prague --gas-benchmark-values 100 -m "benchmark" ./tests/benchmark + fill-params: --no-html --fork=Prague --gas-benchmark-values 100 -m "benchmark" ./tests/benchmark feature_only: true bal: evm-type: develop - fill-params: --fork=Amsterdam --fill-static-tests + fill-params: --no-html --fork=Amsterdam --fill-static-tests feature_only: true diff --git a/packages/testing/src/execution_testing/cli/gen_index.py b/packages/testing/src/execution_testing/cli/gen_index.py index 1e4af37cf1..3a95688d5e 100644 --- a/packages/testing/src/execution_testing/cli/gen_index.py +++ b/packages/testing/src/execution_testing/cli/gen_index.py @@ -226,5 +226,79 @@ def generate_fixtures_index( f.write(index.model_dump_json(exclude_none=False, indent=2)) +def merge_partial_indexes(output_dir: Path, quiet_mode: bool = False) -> None: + """ + Merge partial index files from all workers into final index.json. + + This is called by pytest_sessionfinish on the master process after all + workers have finished and written their partial indexes. + + Partial indexes use JSONL format (one JSON object per line) for efficient + append-only writes during fill. Entries are validated with Pydantic here. + + Args: + output_dir: The fixture output directory. + quiet_mode: If True, don't print status messages. + + """ + meta_dir = output_dir / ".meta" + partial_files = list(meta_dir.glob("partial_index*.jsonl")) + + if not partial_files: + raise Exception("No partial indexes found.") + + # Merge all partial indexes (JSONL format: one entry per line) + # Read as raw dicts — the data was already validated when collected + # from live Pydantic fixture objects in add_fixture(). + all_raw_entries: list[dict] = [] + all_forks: set = set() + all_formats: set = set() + + for partial_file in partial_files: + with open(partial_file) as f: + for line in f: + line = line.strip() + if not line: + continue + entry_data = json.loads(line) + all_raw_entries.append(entry_data) + # Collect forks and formats from raw strings + if entry_data.get("fork"): + all_forks.add(entry_data["fork"]) + if entry_data.get("format"): + all_formats.add(entry_data["format"]) + + # Compute root hash from raw dicts (no Pydantic needed) + root_hash = HashableItem.from_raw_entries(all_raw_entries).hash() + + # Build final index — Pydantic validates the entire structure once + # via model_validate(), not 96k individual model_validate() calls. + index = IndexFile.model_validate( + { + "test_cases": all_raw_entries, + "root_hash": HexNumber(root_hash), + "created_at": datetime.datetime.now(), + "test_count": len(all_raw_entries), + "forks": list(all_forks), + "fixture_formats": list(all_formats), + } + ) + + # Write final index + index_path = meta_dir / "index.json" + index_path.parent.mkdir(parents=True, exist_ok=True) + index_path.write_text(index.model_dump_json(exclude_none=True, indent=2)) + + if not quiet_mode: + rich.print( + f"[green]Merged {len(partial_files)} partial indexes " + f"({len(all_raw_entries)} test cases) into {index_path}[/]" + ) + + # Cleanup partial files + for partial_file in partial_files: + partial_file.unlink() + + if __name__ == "__main__": generate_fixtures_index_cli() diff --git a/packages/testing/src/execution_testing/cli/hasher.py b/packages/testing/src/execution_testing/cli/hasher.py index 5b13e229b1..5bd6a9b8e9 100644 --- a/packages/testing/src/execution_testing/cli/hasher.py +++ b/packages/testing/src/execution_testing/cli/hasher.py @@ -1,17 +1,22 @@ """Simple CLI tool to hash a directory of JSON fixtures.""" +from __future__ import annotations + import hashlib import json import sys from dataclasses import dataclass, field from enum import IntEnum, auto from pathlib import Path -from typing import Any, Callable, Dict, List, Optional, TypeVar +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, TypeVar import click from rich.console import Console from rich.markup import escape as rich_escape +if TYPE_CHECKING: + from execution_testing.fixtures.consume import TestCaseIndexFile + class HashableItemType(IntEnum): """Represents the type of a hashable item.""" @@ -145,6 +150,104 @@ def from_folder( items[file_path.name] = item return cls(type=HashableItemType.FOLDER, items=items, parents=parents) + @classmethod + def from_index_entries( + cls, entries: List["TestCaseIndexFile"] + ) -> "HashableItem": + """ + Create a hashable item tree from index entries (no file I/O). + + This produces the same hash as from_folder() but uses pre-collected + fixture hashes instead of reading files from disk. + + Optimized to O(n) using a trie-like structure built in a single pass, + avoiding repeated path operations and iterations. + """ + raw = [ + { + "id": e.id, + "json_path": str(e.json_path), + "fixture_hash": str(e.fixture_hash) + if e.fixture_hash + else None, + } + for e in entries + ] + return cls.from_raw_entries(raw) + + @classmethod + def from_raw_entries(cls, entries: List[Dict]) -> "HashableItem": + """ + Create a hashable item tree from raw entry dicts (no file I/O). + + Accepts dicts with "id", "json_path", and "fixture_hash" keys. + This avoids Pydantic overhead entirely — only plain string/int + operations are used to build the hash tree. + + Produces the same hash as from_folder() and from_index_entries(). + """ + # Build a trie where each node is either: + # - A dict (folder node) containing child nodes + # - A list of (test_id, hash_bytes) tuples (file node marker) + # + # Structure: {folder: {folder: {file.json: [(id, hash), ...]}}} + root_trie: dict = {} + + # Single pass: insert all entries into trie + for entry in entries: + fixture_hash = entry.get("fixture_hash") + if not fixture_hash: + continue + + # Navigate/create path to file node + path_parts = Path(entry["json_path"]).parts + current = root_trie + + # Navigate to parent folder, creating nodes as needed + for part in path_parts[:-1]: + if part not in current: + current[part] = {} + current = current[part] + + # Add test entry to file node + file_name = path_parts[-1] + if file_name not in current: + current[file_name] = [] + + # Convert hex string to 32-byte hash + hash_bytes = int(fixture_hash, 16).to_bytes(32, "big") + current[file_name].append((entry["id"], hash_bytes)) + + # Convert trie to HashableItem tree (single recursive pass) + def trie_to_hashable(node: dict) -> Dict[str, "HashableItem"]: + """Convert a trie node to HashableItem dict.""" + items: Dict[str, HashableItem] = {} + + for name, child in node.items(): + if isinstance(child, list): + # File node: child is list of (test_id, hash_bytes) + test_items = { + test_id: cls( + type=HashableItemType.TEST, root=hash_bytes + ) + for test_id, hash_bytes in child + } + items[name] = cls( + type=HashableItemType.FILE, items=test_items + ) + else: + # Folder node: recurse + items[name] = cls( + type=HashableItemType.FOLDER, + items=trie_to_hashable(child), + ) + + return items + + return cls( + type=HashableItemType.FOLDER, items=trie_to_hashable(root_trie) + ) + def render_hash_report( folder: Path, diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/filler.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/filler.py index 55a8f37c1e..bd2490f820 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/filler.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/filler.py @@ -29,7 +29,7 @@ ReferenceSpec, ) from execution_testing.cli.gen_index import ( - generate_fixtures_index, + merge_partial_indexes, ) from execution_testing.client_clis import TransitionTool from execution_testing.client_clis.clis.geth import FixtureConsumerTool @@ -44,6 +44,7 @@ PreAllocGroupBuilders, PreAllocGroups, TestInfo, + merge_partial_fixture_files, ) from execution_testing.forks import ( Fork, @@ -1237,11 +1238,16 @@ def fixture_collector( single_fixture_per_file=fixture_output.single_fixture_per_file, filler_path=filler_path, base_dump_dir=base_dump_dir, + generate_index=request.config.getoption("generate_index"), ) yield fixture_collector - fixture_collector.dump_fixtures() + worker_id = os.environ.get("PYTEST_XDIST_WORKER", None) + fixture_collector.dump_fixtures(worker_id) if do_fixture_verification: fixture_collector.verify_fixture_files(evm_fixture_verification) + # Write partial index for this worker/scope + if fixture_collector.generate_index: + fixture_collector.write_partial_index(worker_id) @pytest.fixture(autouse=True, scope="session") @@ -1589,6 +1595,19 @@ def pytest_collection_modifyitems( for i in reversed(items_for_removal): items.pop(i) + # Schedule slow-marked tests first (Longest Processing Time First). + # Workers each grab the next test from the queue, so slow tests get + # distributed across workers and finish before the fast-test tail. + slow_items = [] + normal_items = [] + for item in items: + if item.get_closest_marker("slow") is not None: + slow_items.append(item) + else: + normal_items.append(item) + if slow_items: + items[:] = slow_items + normal_items + def pytest_sessionfinish(session: pytest.Session, exitstatus: int) -> None: """ @@ -1630,18 +1649,24 @@ def pytest_sessionfinish(session: pytest.Session, exitstatus: int) -> None: if fixture_output.is_stdout or is_help_or_collectonly_mode(session.config): return + # Merge partial fixture files from all workers into final JSON files + merge_partial_fixture_files(fixture_output.directory) + # Remove any lock files that may have been created. for file in fixture_output.directory.rglob("*.lock"): file.unlink() - # Generate index file for all produced fixtures. + # Generate index file for all produced fixtures by merging partial indexes. + # Only merge if partial indexes were actually written (i.e., tests produced + # fixtures). When no tests are filled (e.g., all skipped), no partial + # indexes exist and merge_partial_indexes should not be called. if ( session.config.getoption("generate_index") and not session_instance.phase_manager.is_pre_alloc_generation ): - generate_fixtures_index( - fixture_output.directory, quiet_mode=True, force_flag=False - ) + meta_dir = fixture_output.directory / ".meta" + if meta_dir.exists() and any(meta_dir.glob("partial_index*.jsonl")): + merge_partial_indexes(fixture_output.directory, quiet_mode=True) # Create tarball of the output directory if the output is a tarball. fixture_output.create_tarball() diff --git a/packages/testing/src/execution_testing/cli/tests/test_hasher.py b/packages/testing/src/execution_testing/cli/tests/test_hasher.py index b80bdc1e30..bd5c935780 100644 --- a/packages/testing/src/execution_testing/cli/tests/test_hasher.py +++ b/packages/testing/src/execution_testing/cli/tests/test_hasher.py @@ -1,11 +1,57 @@ -"""Tests for the hasher CLI tool.""" +"""Tests for the hasher CLI tool, module, and merge_partial_indexes.""" import json +import tempfile from pathlib import Path +from typing import Generator, List +import pytest from click.testing import CliRunner -from execution_testing.cli.hasher import hasher +from execution_testing.base_types import HexNumber +from execution_testing.cli.gen_index import merge_partial_indexes +from execution_testing.cli.hasher import HashableItem, hasher +from execution_testing.fixtures.consume import IndexFile, TestCaseIndexFile + +HASH_1 = 0x1111111111111111111111111111111111111111111111111111111111111111 +HASH_2 = 0x2222222222222222222222222222222222222222222222222222222222222222 +HASH_3 = 0x3333333333333333333333333333333333333333333333333333333333333333 +HASH_4 = 0x4444444444444444444444444444444444444444444444444444444444444444 +HASH_9 = 0x9999999999999999999999999999999999999999999999999999999999999999 + + +def _hex_str(h: int) -> str: + """Convert an integer hash to its 0x-prefixed hex string.""" + return f"0x{h:064x}" + + +def _make_entry( + test_id: str, + json_path: str, + fixture_hash: int, + fork: str | None = None, + fmt: str | None = None, +) -> TestCaseIndexFile: + """Create a TestCaseIndexFile for testing.""" + return TestCaseIndexFile( + id=test_id, + json_path=Path(json_path), + fixture_hash=HexNumber(fixture_hash), + fork=fork, + format=fmt, + ) + + +def _make_json_fixture(test_names_and_hashes: dict[str, int]) -> str: + """Create a JSON fixture file matching from_folder expectations.""" + data = {} + for name, h in test_names_and_hashes.items(): + data[name] = { + "_info": {"hash": _hex_str(h)}, + "pre": {}, + "post": {}, + } + return json.dumps(data) def create_fixture(path: Path, test_name: str, hash_value: str) -> None: @@ -340,3 +386,384 @@ def test_hash_help(self) -> None: result = runner.invoke(hasher, ["hash", "--help"]) assert result.exit_code == 0 assert "Hash folders of JSON fixtures" in result.output + + +class TestHashableItemFromIndexEntries: + """Test that from_index_entries produces same hash as from_folder.""" + + @pytest.fixture + def fixture_dir(self) -> Generator[Path, None, None]: + """Create a temporary directory with test fixtures.""" + with tempfile.TemporaryDirectory() as tmpdir: + base = Path(tmpdir) + + # state_tests/cancun/test.json (two tests) + state_tests = base / "state_tests" / "cancun" + state_tests.mkdir(parents=True) + (state_tests / "test.json").write_text( + _make_json_fixture({"test_one": HASH_1, "test_two": HASH_2}) + ) + + # blockchain_tests/cancun/test.json (one test) + blockchain_tests = base / "blockchain_tests" / "cancun" + blockchain_tests.mkdir(parents=True) + (blockchain_tests / "test.json").write_text( + _make_json_fixture({"test_three": HASH_3}) + ) + + yield base + + @pytest.fixture + def index_entries(self) -> List[TestCaseIndexFile]: + """Create index entries matching the fixture_dir structure.""" + return [ + _make_entry("test_one", "state_tests/cancun/test.json", HASH_1), + _make_entry("test_two", "state_tests/cancun/test.json", HASH_2), + _make_entry( + "test_three", "blockchain_tests/cancun/test.json", HASH_3 + ), + ] + + def test_hash_matches_from_folder( + self, + fixture_dir: Path, + index_entries: List[TestCaseIndexFile], + ) -> None: + """Verify from_index_entries produces same hash as from_folder.""" + hash_from_folder = HashableItem.from_folder( + folder_path=fixture_dir + ).hash() + hash_from_entries = HashableItem.from_index_entries( + index_entries + ).hash() + assert hash_from_folder == hash_from_entries + + def test_hash_changes_with_different_entries( + self, index_entries: List[TestCaseIndexFile] + ) -> None: + """Verify hash changes when entries change.""" + hash1 = HashableItem.from_index_entries(index_entries).hash() + + modified = index_entries.copy() + modified[0] = _make_entry( + "test_one", "state_tests/cancun/test.json", HASH_9 + ) + hash2 = HashableItem.from_index_entries(modified).hash() + + assert hash1 != hash2 + + def test_empty_entries(self) -> None: + """Verify empty entries produces a valid hash.""" + result = HashableItem.from_index_entries([]).hash() + assert result is not None + assert len(result) == 32 + + def test_multiple_files_in_same_folder(self) -> None: + """Verify hash with multiple JSON files in the same folder.""" + with tempfile.TemporaryDirectory() as tmpdir: + base = Path(tmpdir) + folder = base / "tests" / "cancun" + folder.mkdir(parents=True) + + (folder / "test_a.json").write_text( + _make_json_fixture({"a1": HASH_1}) + ) + (folder / "test_b.json").write_text( + _make_json_fixture({"b1": HASH_2}) + ) + + entries = [ + _make_entry("a1", "tests/cancun/test_a.json", HASH_1), + _make_entry("b1", "tests/cancun/test_b.json", HASH_2), + ] + + hash_from_folder = HashableItem.from_folder( + folder_path=base + ).hash() + hash_from_entries = HashableItem.from_index_entries(entries).hash() + assert hash_from_folder == hash_from_entries + + def test_deeply_nested_paths(self) -> None: + """Verify hash with deeply nested folder structures (3+ levels).""" + with tempfile.TemporaryDirectory() as tmpdir: + base = Path(tmpdir) + deep = base / "a" / "b" / "c" / "d" + deep.mkdir(parents=True) + + (deep / "test.json").write_text( + _make_json_fixture({"t1": HASH_1, "t2": HASH_2}) + ) + + entries = [ + _make_entry("t1", "a/b/c/d/test.json", HASH_1), + _make_entry("t2", "a/b/c/d/test.json", HASH_2), + ] + + hash_from_folder = HashableItem.from_folder( + folder_path=base + ).hash() + hash_from_entries = HashableItem.from_index_entries(entries).hash() + assert hash_from_folder == hash_from_entries + + def test_single_file_single_test(self) -> None: + """Verify degenerate case: one folder, one file, one test.""" + with tempfile.TemporaryDirectory() as tmpdir: + base = Path(tmpdir) + folder = base / "tests" + folder.mkdir() + + (folder / "only.json").write_text( + _make_json_fixture({"solo": HASH_4}) + ) + + entries = [_make_entry("solo", "tests/only.json", HASH_4)] + + hash_from_folder = HashableItem.from_folder( + folder_path=base + ).hash() + hash_from_entries = HashableItem.from_index_entries(entries).hash() + assert hash_from_folder == hash_from_entries + + def test_entries_with_none_fixture_hash_skipped(self) -> None: + """Verify entries with fixture_hash=None are skipped.""" + entries_with_none = [ + _make_entry("t1", "tests/a.json", HASH_1), + TestCaseIndexFile( + id="t_null", + json_path=Path("tests/a.json"), + fixture_hash=None, + fork=None, + format=None, + ), + ] + entries_without_none = [ + _make_entry("t1", "tests/a.json", HASH_1), + ] + + hash_with = HashableItem.from_index_entries(entries_with_none).hash() + hash_without = HashableItem.from_index_entries( + entries_without_none + ).hash() + assert hash_with == hash_without + + +class TestMergePartialIndexes: + """Test the JSONL partial index merge pipeline end-to-end.""" + + def _write_jsonl(self, path: Path, entries: list[dict]) -> None: + """Write a list of dicts as JSONL lines.""" + path.parent.mkdir(parents=True, exist_ok=True) + with open(path, "w") as f: + for entry in entries: + f.write(json.dumps(entry) + "\n") + + def _make_entry_dict( + self, + test_id: str, + json_path: str, + fixture_hash: int, + fork: str | None = None, + fmt: str | None = None, + ) -> dict: + """Create a dict matching what collector.py writes to JSONL.""" + return { + "id": test_id, + "json_path": json_path, + "fixture_hash": _hex_str(fixture_hash), + "fork": fork, + "format": fmt, + "pre_hash": None, + } + + def test_merge_produces_valid_index(self) -> None: + """Verify merging JSONL partials produces a valid index.json.""" + with tempfile.TemporaryDirectory() as tmpdir: + output_dir = Path(tmpdir) + meta_dir = output_dir / ".meta" + meta_dir.mkdir(parents=True) + + entries = [ + self._make_entry_dict( + "test_a", + "state_tests/cancun/test.json", + HASH_1, + fork="Cancun", + fmt="state_test", + ), + self._make_entry_dict( + "test_b", + "blockchain_tests/cancun/test.json", + HASH_2, + fork="Cancun", + fmt="blockchain_test", + ), + ] + + self._write_jsonl( + meta_dir / "partial_index.gw0.jsonl", entries[:1] + ) + self._write_jsonl( + meta_dir / "partial_index.gw1.jsonl", entries[1:] + ) + + merge_partial_indexes(output_dir, quiet_mode=True) + + index_path = meta_dir / "index.json" + assert index_path.exists() + + index = IndexFile.model_validate_json(index_path.read_text()) + assert index.test_count == 2 + assert index.root_hash is not None + assert index.root_hash != 0 + + def test_merge_fixture_formats_uses_format_name(self) -> None: + """ + Verify fixture_formats contains format_name values (e.g. + 'state_test') not class names (e.g. 'StateFixture'). + + This is the exact bug that format.__name__ would have caused. + """ + with tempfile.TemporaryDirectory() as tmpdir: + output_dir = Path(tmpdir) + meta_dir = output_dir / ".meta" + + entries = [ + self._make_entry_dict( + "t1", + "state_tests/test.json", + HASH_1, + fork="Cancun", + fmt="state_test", + ), + self._make_entry_dict( + "t2", + "blockchain_tests/test.json", + HASH_2, + fork="Cancun", + fmt="blockchain_test", + ), + ] + self._write_jsonl(meta_dir / "partial_index.gw0.jsonl", entries) + + merge_partial_indexes(output_dir, quiet_mode=True) + + index = IndexFile.model_validate_json( + (meta_dir / "index.json").read_text() + ) + assert index.fixture_formats is not None + assert sorted(index.fixture_formats) == [ + "blockchain_test", + "state_test", + ] + + def test_merge_forks_collected_correctly(self) -> None: + """Verify forks are collected from validated entries.""" + with tempfile.TemporaryDirectory() as tmpdir: + output_dir = Path(tmpdir) + meta_dir = output_dir / ".meta" + + entries = [ + self._make_entry_dict( + "t1", + "state_tests/test.json", + HASH_1, + fork="Cancun", + fmt="state_test", + ), + self._make_entry_dict( + "t2", + "state_tests/test2.json", + HASH_2, + fork="Shanghai", + fmt="state_test", + ), + ] + self._write_jsonl(meta_dir / "partial_index.gw0.jsonl", entries) + + merge_partial_indexes(output_dir, quiet_mode=True) + + index = IndexFile.model_validate_json( + (meta_dir / "index.json").read_text() + ) + assert index.forks is not None + assert sorted(str(f) for f in index.forks) == [ + "Cancun", + "Shanghai", + ] + + def test_merge_cleans_up_partial_files(self) -> None: + """Verify partial JSONL files are deleted after merge.""" + with tempfile.TemporaryDirectory() as tmpdir: + output_dir = Path(tmpdir) + meta_dir = output_dir / ".meta" + + entries = [ + self._make_entry_dict( + "t1", + "state_tests/test.json", + HASH_1, + fmt="state_test", + ), + ] + self._write_jsonl(meta_dir / "partial_index.gw0.jsonl", entries) + self._write_jsonl(meta_dir / "partial_index.gw1.jsonl", entries) + + merge_partial_indexes(output_dir, quiet_mode=True) + + remaining = list(meta_dir.glob("partial_index*.jsonl")) + assert remaining == [] + + def test_merge_multiple_workers_same_hash_as_single(self) -> None: + """Verify hash is the same regardless of how entries are split.""" + entry_dicts = [ + self._make_entry_dict( + "t1", "state_tests/a.json", HASH_1, fmt="state_test" + ), + self._make_entry_dict( + "t2", "state_tests/a.json", HASH_2, fmt="state_test" + ), + self._make_entry_dict( + "t3", "blockchain_tests/b.json", HASH_3, fmt="blockchain_test" + ), + ] + + # Single worker: all entries in one file + with tempfile.TemporaryDirectory() as tmpdir1: + output1 = Path(tmpdir1) + meta1 = output1 / ".meta" + self._write_jsonl(meta1 / "partial_index.gw0.jsonl", entry_dicts) + merge_partial_indexes(output1, quiet_mode=True) + index1 = IndexFile.model_validate_json( + (meta1 / "index.json").read_text() + ) + + # Multiple workers: entries split across files + with tempfile.TemporaryDirectory() as tmpdir2: + output2 = Path(tmpdir2) + meta2 = output2 / ".meta" + self._write_jsonl( + meta2 / "partial_index.gw0.jsonl", entry_dicts[:1] + ) + self._write_jsonl( + meta2 / "partial_index.gw1.jsonl", entry_dicts[1:2] + ) + self._write_jsonl( + meta2 / "partial_index.gw2.jsonl", entry_dicts[2:] + ) + merge_partial_indexes(output2, quiet_mode=True) + index2 = IndexFile.model_validate_json( + (meta2 / "index.json").read_text() + ) + + assert index1.root_hash == index2.root_hash + assert index1.test_count == index2.test_count + + def test_merge_raises_when_no_partial_files(self) -> None: + """Verify merge_partial_indexes raises when no partials exist.""" + with tempfile.TemporaryDirectory() as tmpdir: + output_dir = Path(tmpdir) + meta_dir = output_dir / ".meta" + meta_dir.mkdir(parents=True) + + with pytest.raises(Exception, match="No partial indexes found"): + merge_partial_indexes(output_dir, quiet_mode=True) diff --git a/packages/testing/src/execution_testing/fixtures/__init__.py b/packages/testing/src/execution_testing/fixtures/__init__.py index 9d882707e7..18d4b3a118 100644 --- a/packages/testing/src/execution_testing/fixtures/__init__.py +++ b/packages/testing/src/execution_testing/fixtures/__init__.py @@ -14,7 +14,11 @@ BlockchainFixture, BlockchainFixtureCommon, ) -from .collector import FixtureCollector, TestInfo +from .collector import ( + FixtureCollector, + TestInfo, + merge_partial_fixture_files, +) from .consume import FixtureConsumer from .pre_alloc_groups import ( PreAllocGroup, @@ -45,4 +49,5 @@ "StateFixture", "TestInfo", "TransactionFixture", + "merge_partial_fixture_files", ] diff --git a/packages/testing/src/execution_testing/fixtures/collector.py b/packages/testing/src/execution_testing/fixtures/collector.py index b389609d5d..74bdb6b471 100644 --- a/packages/testing/src/execution_testing/fixtures/collector.py +++ b/packages/testing/src/execution_testing/fixtures/collector.py @@ -9,7 +9,16 @@ import sys from dataclasses import dataclass, field from pathlib import Path -from typing import ClassVar, Dict, Literal, Optional, Tuple +from typing import ( + ClassVar, + Dict, + List, + Literal, + Optional, + Tuple, +) + +from filelock import FileLock from execution_testing.base_types import to_json @@ -18,6 +27,70 @@ from .file import Fixtures +def merge_partial_fixture_files(output_dir: Path) -> None: + """ + Merge all partial fixture JSONL files into final JSON fixture files. + + Called at session end after all workers have written their partials. + Each partial file contains JSONL lines: {"k": fixture_id, "v": json_str} + """ + # Find all partial files + partial_files = list(output_dir.rglob("*.partial.*.jsonl")) + if not partial_files: + return + + # Group partial files by their target fixture file + # e.g., "test.partial.gw0.jsonl" -> "test.json" + partials_by_target: Dict[Path, List[Path]] = {} + for partial in partial_files: + # Remove .partial.{worker_id}.jsonl suffix to get target + name = partial.name + # Find ".partial." and remove everything after + idx = name.find(".partial.") + if idx == -1: + continue + target_name = name[:idx] + ".json" + target_path = partial.parent / target_name + if target_path not in partials_by_target: + partials_by_target[target_path] = [] + partials_by_target[target_path].append(partial) + + # Merge each group into its target file + for target_path, partials in partials_by_target.items(): + entries: Dict[str, str] = {} + + # Read all partial files + for partial in partials: + with open(partial) as f: + for line in f: + line = line.strip() + if not line: + continue + entry = json.loads(line) + entries[entry["k"]] = entry["v"] + + # Write final JSON file + sorted_keys = sorted(entries.keys()) + parts = ["{\n"] + last_idx = len(sorted_keys) - 1 + for i, key in enumerate(sorted_keys): + key_json = json.dumps(key) + # Add indentation for nesting inside outer JSON object + value_indented = entries[key].replace("\n", "\n ") + parts.append(f" {key_json}: {value_indented}") + parts.append(",\n" if i < last_idx else "\n") + parts.append("}") + target_path.write_text("".join(parts)) + + # Clean up partial files + for partial in partials: + partial.unlink() + # Also remove lock files + lock_file = partial.with_suffix(".lock") + if lock_file.exists(): + lock_file.unlink() + + @dataclass(kw_only=True, slots=True) class TestInfo: """Contains test information from the current node.""" @@ -125,10 +198,14 @@ class FixtureCollector: filler_path: Path base_dump_dir: Optional[Path] = None flush_interval: int = 1000 + generate_index: bool = True # Internal state all_fixtures: Dict[Path, Fixtures] = field(default_factory=dict) json_path_to_test_item: Dict[Path, TestInfo] = field(default_factory=dict) + # Store index entries as simple dicts + # (avoid Pydantic overhead during collection) + index_entries: List[Dict] = field(default_factory=list) def get_fixture_basename(self, info: TestInfo) -> Path: """Return basename of the fixture file for a given test case.""" @@ -166,6 +243,22 @@ def add_fixture(self, info: TestInfo, fixture: BaseFixture) -> Path: self.all_fixtures[fixture_path][info.get_id()] = fixture + # Collect index entry while data is in memory (if indexing enabled) + # Store as simple dict to avoid Pydantic overhead during collection + if self.generate_index: + relative_path = fixture_path.relative_to(self.output_dir) + fixture_fork = fixture.get_fork() + index_entry = { + "id": info.get_id(), + "json_path": str(relative_path), + "fixture_hash": str(fixture.hash) if fixture.hash else None, + "fork": fixture_fork.name() if fixture_fork else None, + "format": fixture.format_name, + } + if (pre_hash := getattr(fixture, "pre_hash", None)) is not None: + index_entry["pre_hash"] = pre_hash + self.index_entries.append(index_entry) + if ( self.flush_interval > 0 and len(self.all_fixtures) >= self.flush_interval @@ -174,7 +267,7 @@ def add_fixture(self, info: TestInfo, fixture: BaseFixture) -> Path: return fixture_path - def dump_fixtures(self) -> None: + def dump_fixtures(self, worker_id: str | None = None) -> None: """Dump all collected fixtures to their respective files.""" if self.output_dir.name == "stdout": combined_fixtures = { @@ -191,10 +284,35 @@ def dump_fixtures(self) -> None: raise TypeError( "All fixtures in a single file must have the same format." ) - fixtures.collect_into_file(fixture_path) + self._write_partial_fixtures(fixture_path, fixtures, worker_id) self.all_fixtures.clear() + def _write_partial_fixtures( + self, file_path: Path, fixtures: Fixtures, worker_id: str | None + ) -> None: + """ + Write fixtures to a partial JSONL file (append-only). + + Each line is a JSON object: {"key": "fixture_id", "value": "json_str"} + This avoids O(n) merge work per worker - just O(1) append. + Final merge to JSON happens at session end. + """ + suffix = f".{worker_id}" if worker_id else ".main" + partial_path = file_path.with_suffix(f".partial{suffix}.jsonl") + partial_path.parent.mkdir(parents=True, exist_ok=True) + lock_file_path = partial_path.with_suffix(".lock") + + lines = [] + for name in fixtures: + value = json.dumps(fixtures[name].json_dict_with_info(), indent=4) + # Store as JSONL: {"k": key, "v": serialized value string} + lines.append(json.dumps({"k": name, "v": value}) + "\n") + + with FileLock(lock_file_path): + with open(partial_path, "a") as f: + f.writelines(lines) + def verify_fixture_files( self, evm_fixture_verification: FixtureConsumer ) -> None: @@ -231,3 +349,38 @@ def _get_consume_direct_dump_dir( return info.get_dump_dir_path( self.base_dump_dir, self.filler_path, level="test_function" ) + + def write_partial_index(self, worker_id: str | None = None) -> Path | None: + """ + Append collected index entries to a partial index file using JSONL + format. + + Uses append-only JSONL (JSON Lines) format for efficient writes without + read-modify-write cycles. Each line is a complete JSON object + representing one index entry. + + Args: + worker_id: The xdist worker ID (e.g., "gw0"), or None for master. + + Returns: + Path to the partial index file, or None if indexing is disabled. + + """ + if not self.generate_index or not self.index_entries: + return None + + suffix = f".{worker_id}" if worker_id else ".master" + partial_index_path = ( + self.output_dir / ".meta" / f"partial_index{suffix}.jsonl" + ) + partial_index_path.parent.mkdir(parents=True, exist_ok=True) + lock_file_path = partial_index_path.with_suffix(".lock") + + # Append entries as JSONL (one JSON object per line) + # This avoids read-modify-write cycles + with FileLock(lock_file_path): + with open(partial_index_path, "a") as f: + for entry in self.index_entries: + f.write(json.dumps(entry) + "\n") + + return partial_index_path diff --git a/packages/testing/src/execution_testing/fixtures/consume.py b/packages/testing/src/execution_testing/fixtures/consume.py index d43ad6deac..3c03a85cd5 100644 --- a/packages/testing/src/execution_testing/fixtures/consume.py +++ b/packages/testing/src/execution_testing/fixtures/consume.py @@ -47,8 +47,8 @@ class TestCaseBase(BaseModel): """Base model for a test case used in EEST consume commands.""" id: str - fixture_hash: HexNumber | None - fork: Fork | None + fixture_hash: HexNumber | None = None + fork: Fork | None = None format: FixtureFormat pre_hash: str | None = None __test__ = False # stop pytest from collecting this class as a test diff --git a/packages/testing/src/execution_testing/fixtures/file.py b/packages/testing/src/execution_testing/fixtures/file.py index 4cf09cb1b1..5656359136 100644 --- a/packages/testing/src/execution_testing/fixtures/file.py +++ b/packages/testing/src/execution_testing/fixtures/file.py @@ -62,10 +62,10 @@ def collect_into_file(self, file_path: Path) -> None: lock_file_path = file_path.with_suffix(".lock") with FileLock(lock_file_path): if file_path.exists(): - with open(file_path, "r") as f: - json_fixtures = json.load(f) + json_fixtures = json.loads(file_path.read_bytes()) for name, fixture in self.items(): json_fixtures[name] = fixture.json_dict_with_info() - with open(file_path, "w") as f: - json.dump(dict(sorted(json_fixtures.items())), f, indent=4) + file_path.write_text( + json.dumps(dict(sorted(json_fixtures.items())), indent=4) + ) diff --git a/packages/testing/src/execution_testing/fixtures/tests/test_collector.py b/packages/testing/src/execution_testing/fixtures/tests/test_collector.py new file mode 100644 index 0000000000..87e55e6f89 --- /dev/null +++ b/packages/testing/src/execution_testing/fixtures/tests/test_collector.py @@ -0,0 +1,435 @@ +"""Test cases for the execution_testing.fixtures.collector module.""" + +import json +from pathlib import Path + +import pytest + +from ..base import BaseFixture +from ..collector import FixtureCollector, TestInfo, merge_partial_fixture_files +from ..file import Fixtures +from ..transaction import FixtureResult, TransactionFixture + + +def _make_fixture(nonce: int = 0) -> TransactionFixture: + """Create a minimal TransactionFixture for testing.""" + fixture = TransactionFixture( + transaction=f"0x{nonce:04x}", + result={"Paris": FixtureResult(intrinsic_gas=nonce)}, + ) + fixture.fill_info( + "t8n-test", + f"test description {nonce}", + fixture_source_url="http://example.com", + ref_spec=None, + _info_metadata={}, + ) + return fixture + + +def _make_info(test_id: str, module_path: Path) -> TestInfo: + """Create a TestInfo for testing.""" + return TestInfo( + name=f"test_func[fork_Paris-{test_id}]", + id=f"{module_path}::test_func[fork_Paris-{test_id}]", + original_name="test_func", + module_path=module_path, + ) + + +@pytest.fixture +def output_dir(tmp_path: Path) -> Path: + """Create output directory for test fixtures.""" + out = tmp_path / "output" + out.mkdir() + return out + + +@pytest.fixture +def filler_path(tmp_path: Path) -> Path: + """Create a filler path (tests directory root).""" + p = tmp_path / "tests" + p.mkdir() + return p + + +@pytest.fixture +def module_path(filler_path: Path) -> Path: + """Create a dummy test module path.""" + mod = filler_path / "cancun" / "test_example.py" + mod.parent.mkdir(parents=True, exist_ok=True) + mod.touch() + return mod + + +class TestPartialFixtureFiles: + """Tests for partial fixture file writing and merging.""" + + def test_single_fixture_matches_json_dumps( + self, output_dir: Path, filler_path: Path, module_path: Path + ) -> None: + """Output for a single fixture must match json.dumps(..., indent=4).""" + collector = FixtureCollector( + output_dir=output_dir, + fill_static_tests=False, + single_fixture_per_file=False, + filler_path=filler_path, + generate_index=False, + ) + fixture = _make_fixture(1) + info = _make_info("tx_test", module_path) + collector.add_fixture(info, fixture) + collector.dump_fixtures(worker_id="gw0") + merge_partial_fixture_files(output_dir) + + # Find the written file + json_files = list(output_dir.rglob("*.json")) + assert len(json_files) == 1 + written = json_files[0].read_text() + + # Build expected output using the original json.dumps approach + fixture_id = info.get_id() + expected_dict = {fixture_id: fixture.json_dict_with_info()} + expected = json.dumps(dict(sorted(expected_dict.items())), indent=4) + assert written == expected + + def test_multiple_fixtures_match_json_dumps( + self, output_dir: Path, filler_path: Path, module_path: Path + ) -> None: + """ + Output for multiple fixtures must match json.dumps(..., indent=4). + """ + collector = FixtureCollector( + output_dir=output_dir, + fill_static_tests=False, + single_fixture_per_file=False, + filler_path=filler_path, + generate_index=False, + ) + fixtures_and_infos = [] + for i in range(5): + fixture = _make_fixture(i) + info = _make_info(f"tx_test_{i}", module_path) + collector.add_fixture(info, fixture) + fixtures_and_infos.append((info, fixture)) + + collector.dump_fixtures(worker_id="gw0") + merge_partial_fixture_files(output_dir) + + json_files = list(output_dir.rglob("*.json")) + assert len(json_files) == 1 + written = json_files[0].read_text() + + expected_dict = { + info.get_id(): fixture.json_dict_with_info() + for info, fixture in fixtures_and_infos + } + expected = json.dumps(dict(sorted(expected_dict.items())), indent=4) + assert written == expected + + def test_multiple_workers_merge_correctly( + self, output_dir: Path, filler_path: Path, module_path: Path + ) -> None: + """ + Simulates xdist: worker A and B write partial files, merge at end. + Final output should match json.dumps of all fixtures. + """ + collector1 = FixtureCollector( + output_dir=output_dir, + fill_static_tests=False, + single_fixture_per_file=False, + filler_path=filler_path, + generate_index=False, + ) + # Worker A writes fixtures 0-2 + pairs_a = [] + for i in range(3): + fixture = _make_fixture(i) + info = _make_info(f"tx_test_{i}", module_path) + collector1.add_fixture(info, fixture) + pairs_a.append((info, fixture)) + collector1.dump_fixtures(worker_id="gw0") + + # Worker B writes fixtures 3-5 (separate partial file) + collector2 = FixtureCollector( + output_dir=output_dir, + fill_static_tests=False, + single_fixture_per_file=False, + filler_path=filler_path, + generate_index=False, + ) + pairs_b = [] + for i in range(3, 6): + fixture = _make_fixture(i) + info = _make_info(f"tx_test_{i}", module_path) + collector2.add_fixture(info, fixture) + pairs_b.append((info, fixture)) + collector2.dump_fixtures(worker_id="gw1") + + # Merge at session end + merge_partial_fixture_files(output_dir) + + # Verify final output matches json.dumps of all 6 fixtures + json_files = list(output_dir.rglob("*.json")) + assert len(json_files) == 1 + written = json_files[0].read_text() + + expected_dict = { + info.get_id(): fixture.json_dict_with_info() + for info, fixture in pairs_a + pairs_b + } + expected = json.dumps(dict(sorted(expected_dict.items())), indent=4) + assert written == expected + + def test_output_is_valid_json( + self, output_dir: Path, filler_path: Path, module_path: Path + ) -> None: + """The written file must be parseable as valid JSON.""" + collector = FixtureCollector( + output_dir=output_dir, + fill_static_tests=False, + single_fixture_per_file=False, + filler_path=filler_path, + generate_index=False, + ) + for i in range(3): + fixture = _make_fixture(i) + info = _make_info(f"tx_test_{i}", module_path) + collector.add_fixture(info, fixture) + + collector.dump_fixtures(worker_id="gw0") + merge_partial_fixture_files(output_dir) + + json_files = list(output_dir.rglob("*.json")) + assert len(json_files) == 1 + parsed = json.loads(json_files[0].read_text()) + assert isinstance(parsed, dict) + assert len(parsed) == 3 + + def test_fixtures_sorted_by_key( + self, output_dir: Path, filler_path: Path, module_path: Path + ) -> None: + """Fixture entries in the output file must be sorted by key.""" + collector = FixtureCollector( + output_dir=output_dir, + fill_static_tests=False, + single_fixture_per_file=False, + filler_path=filler_path, + generate_index=False, + ) + # Add in reverse order + for i in reversed(range(3)): + fixture = _make_fixture(i) + info = _make_info(f"tx_test_{i}", module_path) + collector.add_fixture(info, fixture) + + collector.dump_fixtures(worker_id="gw0") + merge_partial_fixture_files(output_dir) + + json_files = list(output_dir.rglob("*.json")) + assert len(json_files) == 1 + content = json_files[0].read_text() + parsed = json.loads(content) + keys = list(parsed.keys()) + assert keys == sorted(keys) + + def test_partial_files_cleaned_up_after_merge( + self, output_dir: Path, filler_path: Path, module_path: Path + ) -> None: + """Partial JSONL files are deleted after merging.""" + collector = FixtureCollector( + output_dir=output_dir, + fill_static_tests=False, + single_fixture_per_file=False, + filler_path=filler_path, + generate_index=False, + ) + fixture = _make_fixture(1) + info = _make_info("tx_test", module_path) + collector.add_fixture(info, fixture) + collector.dump_fixtures(worker_id="gw0") + + # Verify partial file exists before merge + partial_files = list(output_dir.rglob("*.partial.*.jsonl")) + assert len(partial_files) == 1 + + merge_partial_fixture_files(output_dir) + + # Verify partial file is deleted after merge + partial_files = list(output_dir.rglob("*.partial.*.jsonl")) + assert len(partial_files) == 0 + + +class TestLegacyCompatibility: + """ + Tests verifying the new partial file approach produces byte-identical + output to the legacy Fixtures.collect_into_file() method. + """ + + def test_single_fixture_matches_legacy( + self, output_dir: Path, filler_path: Path, module_path: Path + ) -> None: + """Single fixture output matches legacy collect_into_file().""" + fixture: BaseFixture = _make_fixture(1) + info = _make_info("tx_test", module_path) + fixture_id = info.get_id() + + # Legacy approach: use Fixtures.collect_into_file() + legacy_dir = output_dir / "legacy" + legacy_dir.mkdir() + legacy_file = legacy_dir / "test.json" + legacy_fixtures = Fixtures(root={fixture_id: fixture}) + legacy_fixtures.collect_into_file(legacy_file) + legacy_output = legacy_file.read_text() + + # New approach: use partial files + merge + new_dir = output_dir / "new" + new_dir.mkdir() + collector = FixtureCollector( + output_dir=new_dir, + fill_static_tests=False, + single_fixture_per_file=False, + filler_path=filler_path, + generate_index=False, + ) + collector.add_fixture(info, fixture) + collector.dump_fixtures(worker_id="gw0") + merge_partial_fixture_files(new_dir) + new_files = list(new_dir.rglob("*.json")) + assert len(new_files) == 1 + new_output = new_files[0].read_text() + + assert new_output == legacy_output + + def test_multiple_fixtures_match_legacy( + self, output_dir: Path, filler_path: Path, module_path: Path + ) -> None: + """Multiple fixtures output matches legacy collect_into_file().""" + fixtures_dict: dict[str, BaseFixture] = {} + infos = [] + for i in range(5): + fixture = _make_fixture(i) + info = _make_info(f"tx_test_{i}", module_path) + fixtures_dict[info.get_id()] = fixture + infos.append(info) + + # Legacy approach + legacy_dir = output_dir / "legacy" + legacy_dir.mkdir() + legacy_file = legacy_dir / "test.json" + legacy_fixtures = Fixtures(root=fixtures_dict) + legacy_fixtures.collect_into_file(legacy_file) + legacy_output = legacy_file.read_text() + + # New approach + new_dir = output_dir / "new" + new_dir.mkdir() + collector = FixtureCollector( + output_dir=new_dir, + fill_static_tests=False, + single_fixture_per_file=False, + filler_path=filler_path, + generate_index=False, + ) + for i, info in enumerate(infos): + collector.add_fixture(info, list(fixtures_dict.values())[i]) + collector.dump_fixtures(worker_id="gw0") + merge_partial_fixture_files(new_dir) + new_files = list(new_dir.rglob("*.json")) + assert len(new_files) == 1 + new_output = new_files[0].read_text() + + assert new_output == legacy_output + + def test_multiple_workers_match_legacy( + self, output_dir: Path, filler_path: Path, module_path: Path + ) -> None: + """ + Multiple workers writing to same logical file matches legacy output. + """ + fixtures_dict: dict[str, BaseFixture] = {} + infos = [] + for i in range(6): + fixture = _make_fixture(i) + info = _make_info(f"tx_test_{i}", module_path) + fixtures_dict[info.get_id()] = fixture + infos.append(info) + + # Legacy approach: all fixtures in one call + legacy_dir = output_dir / "legacy" + legacy_dir.mkdir() + legacy_file = legacy_dir / "test.json" + legacy_fixtures = Fixtures(root=fixtures_dict) + legacy_fixtures.collect_into_file(legacy_file) + legacy_output = legacy_file.read_text() + + # New approach: simulate 3 workers, each with 2 fixtures + new_dir = output_dir / "new" + new_dir.mkdir() + fixture_values = list(fixtures_dict.values()) + for worker_idx in range(3): + collector = FixtureCollector( + output_dir=new_dir, + fill_static_tests=False, + single_fixture_per_file=False, + filler_path=filler_path, + generate_index=False, + ) + start = worker_idx * 2 + for i in range(start, start + 2): + collector.add_fixture(infos[i], fixture_values[i]) + collector.dump_fixtures(worker_id=f"gw{worker_idx}") + + merge_partial_fixture_files(new_dir) + new_files = list(new_dir.rglob("*.json")) + assert len(new_files) == 1 + new_output = new_files[0].read_text() + + assert new_output == legacy_output + + def test_special_characters_in_keys_match_legacy( + self, output_dir: Path, filler_path: Path, module_path: Path + ) -> None: + """Fixture IDs with special characters produce identical output.""" + # Create fixtures with complex IDs (typical pytest node IDs) + fixtures_dict: dict[str, BaseFixture] = {} + infos = [] + complex_ids = [ + "param[fork_Paris-state_test]", + "param[fork_Shanghai-blockchain_test]", + 'param[value="quoted"]', + "param[path/with/slashes]", + ] + for i, test_id in enumerate(complex_ids): + fixture = _make_fixture(i) + info = _make_info(test_id, module_path) + fixtures_dict[info.get_id()] = fixture + infos.append(info) + + # Legacy approach + legacy_dir = output_dir / "legacy" + legacy_dir.mkdir() + legacy_file = legacy_dir / "test.json" + legacy_fixtures = Fixtures(root=fixtures_dict) + legacy_fixtures.collect_into_file(legacy_file) + legacy_output = legacy_file.read_text() + + # New approach + new_dir = output_dir / "new" + new_dir.mkdir() + collector = FixtureCollector( + output_dir=new_dir, + fill_static_tests=False, + single_fixture_per_file=False, + filler_path=filler_path, + generate_index=False, + ) + for i, info in enumerate(infos): + collector.add_fixture(info, list(fixtures_dict.values())[i]) + collector.dump_fixtures(worker_id="gw0") + merge_partial_fixture_files(new_dir) + new_files = list(new_dir.rglob("*.json")) + assert len(new_files) == 1 + new_output = new_files[0].read_text() + + assert new_output == legacy_output diff --git a/tests/cancun/eip4844_blobs/test_excess_blob_gas_fork_transition.py b/tests/cancun/eip4844_blobs/test_excess_blob_gas_fork_transition.py index 1866a4e210..d5f6dbff75 100644 --- a/tests/cancun/eip4844_blobs/test_excess_blob_gas_fork_transition.py +++ b/tests/cancun/eip4844_blobs/test_excess_blob_gas_fork_transition.py @@ -526,6 +526,7 @@ def test_fork_transition_excess_blob_gas_at_blob_genesis( ], ) @pytest.mark.parametrize("block_base_fee_per_gas", [7, 16, 23]) +@pytest.mark.slow def test_fork_transition_excess_blob_gas_post_blob_genesis( blockchain_test: BlockchainTestFiller, genesis_environment: Environment, diff --git a/tests/frontier/opcodes/test_blockhash.py b/tests/frontier/opcodes/test_blockhash.py index de0b7a3034..ca7c07459f 100644 --- a/tests/frontier/opcodes/test_blockhash.py +++ b/tests/frontier/opcodes/test_blockhash.py @@ -22,6 +22,7 @@ pytest.param(256, True, id="256_empty_blocks"), ], ) +@pytest.mark.slow() def test_genesis_hash_available( blockchain_test: BlockchainTestFiller, pre: Alloc, diff --git a/tests/osaka/eip7918_blob_reserve_price/test_blob_reserve_price_with_bpo.py b/tests/osaka/eip7918_blob_reserve_price/test_blob_reserve_price_with_bpo.py index 79c4bd14d8..c0eb3af65c 100644 --- a/tests/osaka/eip7918_blob_reserve_price/test_blob_reserve_price_with_bpo.py +++ b/tests/osaka/eip7918_blob_reserve_price/test_blob_reserve_price_with_bpo.py @@ -21,6 +21,7 @@ @pytest.mark.valid_for_bpo_forks() @pytest.mark.parametrize("parent_excess_blobs", [27]) @pytest.mark.parametrize("block_base_fee_per_gas", [17]) +@pytest.mark.slow def test_blob_base_fee_with_bpo_transition( blockchain_test: BlockchainTestFiller, pre: Alloc, diff --git a/tests/osaka/eip7918_blob_reserve_price/test_blob_reserve_price_with_bpo_transitions.py b/tests/osaka/eip7918_blob_reserve_price/test_blob_reserve_price_with_bpo_transitions.py index 08f98b96df..f4e3af6282 100644 --- a/tests/osaka/eip7918_blob_reserve_price/test_blob_reserve_price_with_bpo_transitions.py +++ b/tests/osaka/eip7918_blob_reserve_price/test_blob_reserve_price_with_bpo_transitions.py @@ -560,6 +560,7 @@ def get_fork_scenarios(fork: Fork) -> Iterator[ParameterSet]: ) @pytest.mark.valid_at_transition_to("Osaka", subsequent_forks=True) @pytest.mark.valid_for_bpo_forks() +@pytest.mark.slow() def test_reserve_price_at_transition( blockchain_test: BlockchainTestFiller, pre: Alloc, diff --git a/tests/prague/eip6110_deposits/test_deposits.py b/tests/prague/eip6110_deposits/test_deposits.py index be345f02a1..7f69c23ee9 100644 --- a/tests/prague/eip6110_deposits/test_deposits.py +++ b/tests/prague/eip6110_deposits/test_deposits.py @@ -914,6 +914,7 @@ ), ], ) +@pytest.mark.slow() @pytest.mark.pre_alloc_group( "deposit_requests", reason="Tests standard deposit request functionality with system contract", diff --git a/tests/static/state_tests/stAttackTest/ContractCreationSpamFiller.json b/tests/static/state_tests/stAttackTest/ContractCreationSpamFiller.json index 543bb0e046..07acf98ea3 100644 --- a/tests/static/state_tests/stAttackTest/ContractCreationSpamFiller.json +++ b/tests/static/state_tests/stAttackTest/ContractCreationSpamFiller.json @@ -1,5 +1,8 @@ { "ContractCreationSpam" : { + "_info" : { + "pytest_marks": ["slow"] + }, "env" : { "currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba", "currentDifficulty" : "0x20000", diff --git a/tests/static/state_tests/stQuadraticComplexityTest/Return50000_2Filler.json b/tests/static/state_tests/stQuadraticComplexityTest/Return50000_2Filler.json index 486b49cbe7..c5646174aa 100644 --- a/tests/static/state_tests/stQuadraticComplexityTest/Return50000_2Filler.json +++ b/tests/static/state_tests/stQuadraticComplexityTest/Return50000_2Filler.json @@ -1,5 +1,8 @@ { "Return50000_2" : { + "_info" : { + "pytest_marks": ["slow"] + }, "env" : { "currentCoinbase" : "b94f5374fce5edbc8e2a8697c15331677e6ebf0b", "currentDifficulty" : "0x020000", diff --git a/tests/static/state_tests/stStaticCall/static_Return50000_2Filler.json b/tests/static/state_tests/stStaticCall/static_Return50000_2Filler.json index f2c178c800..7b7ce761bf 100644 --- a/tests/static/state_tests/stStaticCall/static_Return50000_2Filler.json +++ b/tests/static/state_tests/stStaticCall/static_Return50000_2Filler.json @@ -1,5 +1,8 @@ { "static_Return50000_2" : { + "_info" : { + "pytest_marks": ["slow"] + }, "env" : { "currentCoinbase" : "b94f5374fce5edbc8e2a8697c15331677e6ebf0b", "currentDifficulty" : "0x020000", diff --git a/tox.ini b/tox.ini index 22b12f6121..36f8fc652d 100644 --- a/tox.ini +++ b/tox.ini @@ -93,6 +93,7 @@ commands = fill \ -m "not slow and not zkevm and not benchmark" \ -n auto --maxprocesses 10 --dist=loadgroup \ + --skip-index \ --cov-config=pyproject.toml \ --cov=ethereum \ --cov-report=term \ @@ -103,6 +104,7 @@ commands = --log-to "{toxworkdir}/logs" \ --clean \ --until Amsterdam \ + --durations=50 \ {posargs} \ tests From 1b266ab5cfc4147a0e5a3a866b42805b2ae14ac8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=94=A1=E4=BD=B3=E8=AA=A0=20Louis=20Tsai?= <72684086+LouisTsai-Csie@users.noreply.github.com> Date: Fri, 30 Jan 2026 00:52:59 +0800 Subject: [PATCH 106/154] fix(test-benchmark): support `blobhash` benchmark (#2067) * fix: failing blobhash test * refactor: update blobhash benchmark --- .../compute/instruction/test_tx_context.py | 47 ++++++++++++------- 1 file changed, 29 insertions(+), 18 deletions(-) diff --git a/tests/benchmark/compute/instruction/test_tx_context.py b/tests/benchmark/compute/instruction/test_tx_context.py index cded6cab60..3c4975fc93 100644 --- a/tests/benchmark/compute/instruction/test_tx_context.py +++ b/tests/benchmark/compute/instruction/test_tx_context.py @@ -7,7 +7,7 @@ - BLOBHASH """ -from typing import Any, Dict +import math import pytest from execution_testing import ( @@ -41,36 +41,47 @@ def test_call_frame_context_ops( ) -@pytest.mark.repricing(blob_index=0, blobs_present=1) +@pytest.mark.repricing @pytest.mark.parametrize( - "blob_index, blobs_present", + "blob_present", [ - pytest.param(0, 0, id="no blobs"), - pytest.param(0, 1, id="one blob and accessed"), - pytest.param(1, 1, id="one blob but access non-existent index"), - pytest.param(5, 6, id="six blobs, access latest"), + pytest.param(0, id="no_blobs"), + pytest.param(1, id="one_blob"), ], ) def test_blobhash( fork: Fork, benchmark_test: BenchmarkTestFiller, - blob_index: int, - blobs_present: bool, + blob_present: int, + fixed_opcode_count: int | None, + gas_benchmark_value: int, ) -> None: """Benchmark BLOBHASH instruction.""" - tx_kwargs: Dict[str, Any] = {} - if blobs_present > 0: - tx_kwargs["ty"] = TransactionType.BLOB_TRANSACTION - tx_kwargs["max_fee_per_blob_gas"] = fork.min_base_fee_per_blob_gas() - tx_kwargs["blob_versioned_hashes"] = add_kzg_version( - [i.to_bytes() * 32 for i in range(blobs_present)], - BlobsSpec.BLOB_COMMITMENT_VERSION_KZG, - ) + tx_kwargs: dict = {} + if blob_present: + cap = fork.transaction_gas_limit_cap() + if fixed_opcode_count is None and cap is not None: + # Check if blob tx splits would exceed block blob limit + required_splits = math.ceil(gas_benchmark_value / cap) + max_blobs = fork.max_blobs_per_block() + if required_splits > max_blobs: + pytest.skip( + f"Blob tx needs {required_splits} splits but fork allows " + f"{max_blobs} blobs/block" + ) + tx_kwargs = { + "ty": TransactionType.BLOB_TRANSACTION, + "max_fee_per_blob_gas": fork.min_base_fee_per_blob_gas(), + "blob_versioned_hashes": add_kzg_version( + [i.to_bytes(32, "big") for i in range(blob_present)], + BlobsSpec.BLOB_COMMITMENT_VERSION_KZG, + ), + } benchmark_test( target_opcode=Op.BLOBHASH, code_generator=ExtCallGenerator( - attack_block=Op.BLOBHASH(blob_index), + attack_block=Op.BLOBHASH(Op.PUSH0), tx_kwargs=tx_kwargs, ), ) From 2c549ce0b655dab328e917ed251acbae5a655cbb Mon Sep 17 00:00:00 2001 From: Mario Vega Date: Thu, 29 Jan 2026 20:01:38 +0100 Subject: [PATCH 107/154] feat(testing/fixtures): Add logs to state fixtures (#2091) * feat(testing/specs): Add receipt, including log list, to state fixture * fix * feat(test-specs): add transaction log verification support * fix(testing): unit test expectations * fix: tox -e static * feat(testing/tests): Add log unit test * feat(test): Add receipts to blockchain_tests to help debug root mismatches (#5) * refactor(specs/testing): gasUsed -> cumulativeGasUsed * refactor: Add RLP to tx receipts and use proper fields --------- Co-authored-by: spencer-tb Co-authored-by: felipe --- packages/testing/src/conftest.py | 16 +- .../testing/src/execution_testing/__init__.py | 2 + .../base_types/serialization.py | 22 +- .../client_clis/clis/execution_specs.py | 1 + .../client_clis/tests/fixtures/1/exp.json | 4 +- .../client_clis/tests/fixtures/3/exp.json | 4 +- .../client_clis/tests/test_execution_specs.py | 2 - .../exceptions/exceptions/transaction.py | 2 + .../execution_testing/fixtures/blockchain.py | 7 +- .../src/execution_testing/fixtures/common.py | 90 +++- .../src/execution_testing/fixtures/state.py | 7 +- .../src/execution_testing/specs/blockchain.py | 15 +- .../src/execution_testing/specs/helpers.py | 86 +++- .../src/execution_testing/specs/state.py | 17 + .../blockchain_london_invalid_filled.json | 422 +++++++++++------- .../blockchain_london_valid_filled.json | 392 ++++++++++------ ...ncun_blockchain_test_engine_tx_type_0.json | 30 +- ...inid_cancun_blockchain_test_tx_type_0.json | 155 ++++--- .../chainid_cancun_state_test_tx_type_0.json | 33 +- .../chainid_cancun_state_test_tx_type_1.json | 39 +- ...id_istanbul_blockchain_test_tx_type_0.json | 133 +++--- ...inid_london_blockchain_test_tx_type_0.json | 133 +++--- ...aris_blockchain_test_engine_tx_type_0.json | 30 +- .../chainid_paris_state_test_tx_type_0.json | 33 +- ...ghai_blockchain_test_engine_tx_type_0.json | 30 +- ...chainid_shanghai_state_test_tx_type_0.json | 33 +- .../specs/tests/test_expect.py | 170 ++++++- .../specs/tests/test_fixtures.py | 23 +- .../execution_testing/test_types/__init__.py | 3 +- .../test_types/receipt_types.py | 34 +- .../test_types/transaction_types.py | 10 - .../evm_tools/t8n/t8n_types.py | 15 +- .../test_point_evaluation_precompile.py | 2 +- .../test_mcopy_memory_expansion.py | 2 +- .../test_execution_gas.py | 8 +- .../test_refunds.py | 2 +- tests/prague/eip7702_set_code_tx/test_gas.py | 2 +- .../eip7702_set_code_tx/test_set_code_txs.py | 4 +- .../eip3860_initcode/test_initcode.py | 2 +- 39 files changed, 1384 insertions(+), 631 deletions(-) diff --git a/packages/testing/src/conftest.py b/packages/testing/src/conftest.py index 835ef90170..9f7efe2dd8 100644 --- a/packages/testing/src/conftest.py +++ b/packages/testing/src/conftest.py @@ -8,6 +8,7 @@ from execution_testing.client_clis import ( BesuTransitionTool, ExecutionSpecsTransitionTool, + GethTransitionTool, TransitionTool, ) @@ -49,11 +50,16 @@ def installed_transition_tool_instances() -> Generator[ @pytest.fixture( - params=INSTALLED_TRANSITION_TOOLS, - ids=[ - transition_tool_class.__name__ - for transition_tool_class in INSTALLED_TRANSITION_TOOLS - ], + params=[ + pytest.param( + transition_tool, + marks=[pytest.mark.xfail(reason="Geth t8n needs update")] + if transition_tool == GethTransitionTool + else [], + id=transition_tool.__name__, + ) + for transition_tool in INSTALLED_TRANSITION_TOOLS + ] ) def installed_t8n( request: pytest.FixtureRequest, diff --git a/packages/testing/src/execution_testing/__init__.py b/packages/testing/src/execution_testing/__init__.py index 05f4914c83..fbc3d6c5eb 100644 --- a/packages/testing/src/execution_testing/__init__.py +++ b/packages/testing/src/execution_testing/__init__.py @@ -71,6 +71,7 @@ TestParameterGroup, TestPhaseManager, Transaction, + TransactionLog, TransactionReceipt, TransactionType, Withdrawal, @@ -186,6 +187,7 @@ "TestPrivateKey2", "Transaction", "TransactionException", + "TransactionLog", "TransactionReceipt", "TransactionTest", "TransactionTestFiller", diff --git a/packages/testing/src/execution_testing/base_types/serialization.py b/packages/testing/src/execution_testing/base_types/serialization.py index 251fd4de26..5d5f75e7af 100644 --- a/packages/testing/src/execution_testing/base_types/serialization.py +++ b/packages/testing/src/execution_testing/base_types/serialization.py @@ -1,9 +1,10 @@ """Ethereum test types for serialization and encoding.""" -from typing import Any, ClassVar, List +from typing import Any, ClassVar, List, Self, Sequence import ethereum_rlp as eth_rlp from ethereum_types.numeric import Uint +from trie import HexaryTrie from execution_testing.base_types import Bytes @@ -33,6 +34,7 @@ class RLPSerializable: signable: ClassVar[bool] = False rlp_fields: ClassVar[List[str]] rlp_signing_fields: ClassVar[List[str]] + rlp_exclude_none: ClassVar[bool] = False def get_rlp_fields(self) -> List[str]: """ @@ -102,9 +104,10 @@ def to_list_from_fields(self, fields: List[str]) -> List[Any]: f'in object type "{self.__class__.__name__}"' ) try: - values_list.append( - to_serializable_element(getattr(self, field)) - ) + value = getattr(self, field) + if self.rlp_exclude_none and value is None: + continue + values_list.append(to_serializable_element(value)) except Exception as e: raise Exception( f'Unable to rlp serialize field "{field}" ' @@ -151,6 +154,17 @@ def rlp(self) -> Bytes: self.get_rlp_prefix() + eth_rlp.encode(self.to_list(signing=False)) ) + @classmethod + def list_root(cls, element_list: Sequence[Self]) -> bytes: + """Return the root of a list of the given type.""" + t = HexaryTrie(db={}) + for i, e in enumerate(element_list): + t.set( + eth_rlp.encode(Uint(i)), + e.rlp(), + ) + return t.root_hash + class SignableRLPSerializable(RLPSerializable): """ diff --git a/packages/testing/src/execution_testing/client_clis/clis/execution_specs.py b/packages/testing/src/execution_testing/client_clis/clis/execution_specs.py index 7f181df9cc..4066124e4e 100644 --- a/packages/testing/src/execution_testing/client_clis/clis/execution_specs.py +++ b/packages/testing/src/execution_testing/client_clis/clis/execution_specs.py @@ -227,6 +227,7 @@ class ExecutionSpecsExceptionMapper(ExceptionMapper): BlockException.SYSTEM_CONTRACT_EMPTY: "System contract address", BlockException.SYSTEM_CONTRACT_CALL_FAILED: "call failed:", BlockException.INVALID_DEPOSIT_EVENT_LAYOUT: "deposit", + TransactionException.LOG_MISMATCH: "LogMismatchError", } mapping_regex: ClassVar[Dict[ExceptionBase, str]] = { # Temporary solution for issue #1981. diff --git a/packages/testing/src/execution_testing/client_clis/tests/fixtures/1/exp.json b/packages/testing/src/execution_testing/client_clis/tests/fixtures/1/exp.json index 03da7dcdb2..92f768586d 100644 --- a/packages/testing/src/execution_testing/client_clis/tests/fixtures/1/exp.json +++ b/packages/testing/src/execution_testing/client_clis/tests/fixtures/1/exp.json @@ -30,10 +30,10 @@ "root": "0x", "status": "0x1", "cumulativeGasUsed": "0x5208", - "logsBloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "logs": [], + "bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", "transactionHash": "0x0557bacce3375c98d806609b8d5043072f0b6a8bae45ae5a67a00d3a1a18d673", "contractAddress": "0x0000000000000000000000000000000000000000", - "gasUsed": "0x5208", "blockHash": "0x0000000000000000000000000000000000000000000000000000000000000000", "transactionIndex": "0x0" } diff --git a/packages/testing/src/execution_testing/client_clis/tests/fixtures/3/exp.json b/packages/testing/src/execution_testing/client_clis/tests/fixtures/3/exp.json index 6238b76ecb..753bd8b44c 100644 --- a/packages/testing/src/execution_testing/client_clis/tests/fixtures/3/exp.json +++ b/packages/testing/src/execution_testing/client_clis/tests/fixtures/3/exp.json @@ -31,10 +31,10 @@ "root": "0x", "status": "0x1", "cumulativeGasUsed": "0x521f", - "logsBloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "logs": [], + "bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", "transactionHash": "0x72fadbef39cd251a437eea619cfeda752271a5faaaa2147df012e112159ffb81", "contractAddress": "0x0000000000000000000000000000000000000000", - "gasUsed": "0x521f", "blockHash": "0x0000000000000000000000000000000000000000000000000000000000000000", "transactionIndex": "0x0" } diff --git a/packages/testing/src/execution_testing/client_clis/tests/test_execution_specs.py b/packages/testing/src/execution_testing/client_clis/tests/test_execution_specs.py index 498e08deb0..4f8964a67f 100644 --- a/packages/testing/src/execution_testing/client_clis/tests/test_execution_specs.py +++ b/packages/testing/src/execution_testing/client_clis/tests/test_execution_specs.py @@ -190,8 +190,6 @@ def test_evm_t8n( # eels are handled here. missing_receipt_fields = [ "root", - "status", - "cumulativeGasUsed", "contractAddress", "blockHash", "transactionIndex", diff --git a/packages/testing/src/execution_testing/exceptions/exceptions/transaction.py b/packages/testing/src/execution_testing/exceptions/exceptions/transaction.py index 64d79e0478..a2b90227fd 100644 --- a/packages/testing/src/execution_testing/exceptions/exceptions/transaction.py +++ b/packages/testing/src/execution_testing/exceptions/exceptions/transaction.py @@ -194,3 +194,5 @@ class TransactionException(ExceptionBase): """ TYPE_4_TX_PRE_FORK = auto() """Transaction type 4 included before activation fork.""" + LOG_MISMATCH = auto() + """Transaction receipt logs do not match expected logs.""" diff --git a/packages/testing/src/execution_testing/fixtures/blockchain.py b/packages/testing/src/execution_testing/fixtures/blockchain.py index fa85d4b60b..6b15039515 100644 --- a/packages/testing/src/execution_testing/fixtures/blockchain.py +++ b/packages/testing/src/execution_testing/fixtures/blockchain.py @@ -64,7 +64,11 @@ ) from .base import BaseFixture, FixtureFillingPhase -from .common import FixtureAuthorizationTuple, FixtureBlobSchedule +from .common import ( + FixtureAuthorizationTuple, + FixtureBlobSchedule, + FixtureTransactionReceipt, +) def post_state_validator( @@ -638,6 +642,7 @@ def strip_block_number_computed_field(cls, data: Any) -> Any: default_factory=list, alias="uncleHeaders" ) withdrawals: List[FixtureWithdrawal] | None = None + receipts: List[FixtureTransactionReceipt] | None = None execution_witness: WitnessChunk | None = None block_access_list: BlockAccessList | None = Field( None, description="EIP-7928 Block Access List" diff --git a/packages/testing/src/execution_testing/fixtures/common.py b/packages/testing/src/execution_testing/fixtures/common.py index eb381019f9..2dadf0127b 100644 --- a/packages/testing/src/execution_testing/fixtures/common.py +++ b/packages/testing/src/execution_testing/fixtures/common.py @@ -1,19 +1,28 @@ """Common types used to define multiple fixture types.""" -from typing import Any, Dict +from typing import Any, ClassVar, Dict, List -from pydantic import AliasChoices, Field, model_validator +from pydantic import AliasChoices, Field, computed_field, model_validator from execution_testing.base_types import ( BlobSchedule, + Bloom, + Bytes, CamelModel, EthereumTestRootModel, + Hash, + RLPSerializable, SignableRLPSerializable, ZeroPaddedHexNumber, ) from execution_testing.test_types.account_types import Address +from execution_testing.test_types.receipt_types import ( + ReceiptDelegation, + TransactionReceipt, +) from execution_testing.test_types.transaction_types import ( AuthorizationTupleGeneric, + Transaction, ) @@ -88,3 +97,80 @@ def sign(self) -> None: """Sign the current object for further serialization.""" # No-op, as the object is always already signed return + + +class FixtureTransactionLog(CamelModel, RLPSerializable): + """Fixture variant of the TransactionLog type.""" + + address: Address | None = None + topics: List[Hash] | None = None + data: Bytes | None = None + + rlp_fields: ClassVar[List[str]] = [ + "address", + "topics", + "data", + ] + + +class FixtureReceiptDelegation(ReceiptDelegation): + """Fixture variant of the ReceiptDelegation type.""" + + nonce: ZeroPaddedHexNumber + + +class FixtureTransactionReceipt(CamelModel, RLPSerializable): + """Fixture variant of the TransactionReceipt type.""" + + transaction_hash: Hash + ty: ZeroPaddedHexNumber = Field(..., alias="type") + cumulative_gas_used: ZeroPaddedHexNumber + bloom: Bloom + logs: List[FixtureTransactionLog] + post_state: Hash | None = None + status: bool | None = None + + rlp_fields: ClassVar[List[str]] = [ + "post_state", + "status", + "cumulative_gas_used", + "bloom", + "logs", + ] + rlp_exclude_none: ClassVar[bool] = True + + @model_validator(mode="before") + @classmethod + def _drop_computed_fields(cls, data: Any) -> Any: + if isinstance(data, dict): + data = dict(data) + data.pop("rlp", None) + data.pop("rlp_field", None) + return data + + @computed_field(alias="rlp") + def rlp_field(self) -> Bytes: + """Return the RLP.""" + return self.rlp() + + def get_rlp_prefix(self) -> bytes: + """ + Return a prefix that has to be appended to the serialized object. + + By default, an empty string is returned. + """ + if self.ty > 0: + return bytes([self.ty]) + return b"" + + @classmethod + def from_transaction_receipt( + cls, + receipt: TransactionReceipt, + tx: Transaction, + ) -> "FixtureTransactionReceipt": + """Return FixtureTransactionReceipt from a TransactionReceipt.""" + model_as_dict = receipt.model_dump( + exclude_none=True, include=set(cls.model_fields.keys()) + ) | {"ty": tx.ty, "transaction_hash": tx.hash} + return cls(**model_as_dict) diff --git a/packages/testing/src/execution_testing/fixtures/state.py b/packages/testing/src/execution_testing/fixtures/state.py index a69eb35899..32891b5dc0 100644 --- a/packages/testing/src/execution_testing/fixtures/state.py +++ b/packages/testing/src/execution_testing/fixtures/state.py @@ -22,7 +22,11 @@ ) from .base import BaseFixture -from .common import FixtureAuthorizationTuple, FixtureBlobSchedule +from .common import ( + FixtureAuthorizationTuple, + FixtureBlobSchedule, + FixtureTransactionReceipt, +) class FixtureEnvironment(EnvironmentGeneric[ZeroPaddedHexNumber]): @@ -89,6 +93,7 @@ class FixtureForkPost(CamelModel): state_root: Hash = Field(..., alias="hash") logs_hash: Hash = Field(..., alias="logs") + receipt: FixtureTransactionReceipt | None = None tx_bytes: Bytes = Field(..., alias="txbytes") indexes: FixtureForkPostIndexes = Field( default_factory=FixtureForkPostIndexes diff --git a/packages/testing/src/execution_testing/specs/blockchain.py b/packages/testing/src/execution_testing/specs/blockchain.py index 3acd66a03f..41f42f597d 100644 --- a/packages/testing/src/execution_testing/specs/blockchain.py +++ b/packages/testing/src/execution_testing/specs/blockchain.py @@ -64,7 +64,10 @@ FixtureWithdrawal, InvalidFixtureBlock, ) -from execution_testing.fixtures.common import FixtureBlobSchedule +from execution_testing.fixtures.common import ( + FixtureBlobSchedule, + FixtureTransactionReceipt, +) from execution_testing.forks import Fork from execution_testing.test_types import ( Alloc, @@ -381,6 +384,16 @@ def get_fixture_block(self) -> FixtureBlock | InvalidFixtureBlock: if self.withdrawals is not None else None ), + receipts=( + [ + FixtureTransactionReceipt.from_transaction_receipt( + r, self.txs[i] + ) + for i, r in enumerate(self.result.receipts) + ] + if self.result.receipts + else None + ), block_access_list=self.block_access_list if self.block_access_list else None, diff --git a/packages/testing/src/execution_testing/specs/helpers.py b/packages/testing/src/execution_testing/specs/helpers.py index 42f1a911a5..a176cd1c59 100644 --- a/packages/testing/src/execution_testing/specs/helpers.py +++ b/packages/testing/src/execution_testing/specs/helpers.py @@ -12,7 +12,11 @@ TransactionException, UndefinedException, ) -from execution_testing.test_types import Transaction, TransactionReceipt +from execution_testing.test_types import ( + Transaction, + TransactionLog, + TransactionReceipt, +) class ExecutionContext(StrEnum): @@ -131,6 +135,29 @@ def __init__( super().__init__(message) +class LogMismatchError(Exception): + """ + Exception used when an actual log field differs from the expected one. + """ + + def __init__( + self, + index: int, + log_index: int, + field_name: str, + expected_value: Any, + actual_value: Any, + ): + """Initialize the exception.""" + message = ( + f"\nLogMismatch (pos={index}, log={log_index}):" + f"\n What: {field_name} mismatch!" + f"\n Want: {expected_value}" + f"\n Got: {actual_value}" + ) + super().__init__(message) + + @dataclass class ExceptionInfo: """Info to print transaction exception error messages.""" @@ -236,6 +263,39 @@ def __init__( ) +def verify_log( + tx_index: int, + log_index: int, + expected: TransactionLog, + actual: TransactionLog, +) -> None: + """Verify a single log matches expected values (only specified fields).""" + if expected.address is not None and expected.address != actual.address: + raise LogMismatchError( + index=tx_index, + log_index=log_index, + field_name="address", + expected_value=expected.address, + actual_value=actual.address, + ) + if expected.topics is not None and expected.topics != actual.topics: + raise LogMismatchError( + index=tx_index, + log_index=log_index, + field_name="topics", + expected_value=expected.topics, + actual_value=actual.topics, + ) + if expected.data is not None and expected.data != actual.data: + raise LogMismatchError( + index=tx_index, + log_index=log_index, + field_name="data", + expected_value=expected.data, + actual_value=actual.data, + ) + + def verify_transaction_receipt( transaction_index: int, expected_receipt: TransactionReceipt | None, @@ -252,15 +312,31 @@ def verify_transaction_receipt( return assert actual_receipt is not None if ( - expected_receipt.gas_used is not None - and actual_receipt.gas_used != expected_receipt.gas_used + expected_receipt.cumulative_gas_used is not None + and actual_receipt.cumulative_gas_used + != expected_receipt.cumulative_gas_used ): raise TransactionReceiptMismatchError( index=transaction_index, field_name="gas_used", - expected_value=expected_receipt.gas_used, - actual_value=actual_receipt.gas_used, + expected_value=expected_receipt.cumulative_gas_used, + actual_value=actual_receipt.cumulative_gas_used, ) + if expected_receipt.logs is not None and actual_receipt.logs is not None: + actual_logs = actual_receipt.logs + expected_logs = expected_receipt.logs + if len(expected_logs) != len(actual_logs): + raise LogMismatchError( + index=transaction_index, + log_index=0, + field_name="log_count", + expected_value=len(expected_logs), + actual_value=len(actual_logs), + ) + for log_idx, (expected, actual) in enumerate( + zip(expected_logs, actual_logs, strict=True) + ): + verify_log(transaction_index, log_idx, expected, actual) # TODO: Add more fields as needed diff --git a/packages/testing/src/execution_testing/specs/state.py b/packages/testing/src/execution_testing/specs/state.py index e4d5f9c61c..16af5fe4e1 100644 --- a/packages/testing/src/execution_testing/specs/state.py +++ b/packages/testing/src/execution_testing/specs/state.py @@ -44,6 +44,7 @@ FixtureEnvironment, FixtureForkPost, FixtureTransaction, + FixtureTransactionReceipt, ) from execution_testing.forks import Fork from execution_testing.logging import ( @@ -472,6 +473,21 @@ def make_state_test_fixture( f"expected_benchmark_gas_used " f"({expected_benchmark_gas_used}), difference: {diff}" ) + if len(transition_tool_output.result.receipts) == 1: + receipt = FixtureTransactionReceipt.from_transaction_receipt( + transition_tool_output.result.receipts[0], tx + ) + receipt_root = FixtureTransactionReceipt.list_root([receipt]) + assert ( + transition_tool_output.result.receipts_root == receipt_root + ), ( + f"Receipts root mismatch: " + f"{transition_tool_output.result.receipts_root} != " + f"{receipt_root.hex()}" + f"Receipt: {receipt.rlp()}" + ) + else: + receipt = None return StateFixture( env=FixtureEnvironment(**env.model_dump(exclude_none=True)), @@ -481,6 +497,7 @@ def make_state_test_fixture( FixtureForkPost( state_root=transition_tool_output.result.state_root, logs_hash=transition_tool_output.result.logs_hash, + receipt=receipt, tx_bytes=tx.rlp(), expect_exception=tx.error, state=output_alloc, diff --git a/packages/testing/src/execution_testing/specs/tests/fixtures/blockchain_london_invalid_filled.json b/packages/testing/src/execution_testing/specs/tests/fixtures/blockchain_london_invalid_filled.json index b70a5b4027..9f3aa32522 100644 --- a/packages/testing/src/execution_testing/specs/tests/fixtures/blockchain_london_invalid_filled.json +++ b/packages/testing/src/execution_testing/specs/tests/fixtures/blockchain_london_invalid_filled.json @@ -1,11 +1,6 @@ { "000/my_blockchain_test/London": { - "_info": { - "hash": "0x94854ed3844fcf06f8b74349e63aa0e6dcf43a307f5888b93be310ff39de55ff", - "fixture_format": "blockchain_test" - }, "network": "London", - "genesisRLP": "0xf90200f901fba00000000000000000000000000000000000000000000000000000000000000000a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347940000000000000000000000000000000000000000a089a5be1d3306f6f05b42678ef13ac3dbc37bef9a2a80862c21eb22eee29194c2a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000830200008088016345785d8a0000808000a000000000000000000000000000000000000000000000000000000000000000008800000000000000008203e8c0c0", "genesisBlockHeader": { "parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000", "uncleHash": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", @@ -25,9 +20,139 @@ "baseFeePerGas": "0x03e8", "hash": "0x6241b4534da26b654ec5bb30d29b1d5202454af544b05828433354da7471957c" }, + "pre": { + "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { + "nonce": "0x00", + "balance": "0x01000000000000000000", + "code": "0x", + "storage": {} + }, + "0xd02d72e067e77158444ef2020ff2d325f929b363": { + "nonce": "0x01", + "balance": "0x01000000000000000000", + "code": "0x", + "storage": {} + }, + "0xcccccccccccccccccccccccccccccccccccccccc": { + "nonce": "0x01", + "balance": "0x010000000000", + "code": "0x484355483a036110004301554761200043015500", + "storage": {} + }, + "0xcccccccccccccccccccccccccccccccccccccccd": { + "nonce": "0x01", + "balance": "0x020000000000", + "code": "0x60008060008073cccccccccccccccccccccccccccccccccccccccc5af450", + "storage": {} + }, + "0x000000000000000000000000000000000000c0de": { + "nonce": "0x01", + "balance": "0x00", + "code": "0x60008060008073cccccccccccccccccccccccccccccccccccccccc5af450", + "storage": {} + }, + "0xccccccccccccccccccccccccccccccccccccccce": { + "nonce": "0x01", + "balance": "0x020000000000", + "code": "0x60008060008061100061c0de5af160008060008073cccccccccccccccccccccccccccccccccccccccc5af4905050", + "storage": {} + } + }, + "postState": { + "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { + "nonce": "0x0a", + "balance": "0xfffffffffba0afe5e7", + "code": "0x", + "storage": {} + }, + "0xd02d72e067e77158444ef2020ff2d325f929b363": { + "nonce": "0x01", + "balance": "0x01000000000000000000", + "code": "0x", + "storage": {} + }, + "0xcccccccccccccccccccccccccccccccccccccccc": { + "nonce": "0x01", + "balance": "0x010000000000", + "code": "0x484355483a036110004301554761200043015500", + "storage": { + "0x01": "0x036b", + "0x1001": "0x01", + "0x2001": "0x010000000000", + "0x02": "0x02fe", + "0x1002": "0x0a", + "0x2002": "0x010000000000", + "0x03": "0x029f", + "0x1003": "0x0149", + "0x2003": "0x010000000000", + "0x04": "0x024c", + "0x1004": "0x019c", + "0x2004": "0x010000000000" + } + }, + "0xcccccccccccccccccccccccccccccccccccccccd": { + "nonce": "0x01", + "balance": "0x020000000000", + "code": "0x60008060008073cccccccccccccccccccccccccccccccccccccccc5af450", + "storage": { + "0x02": "0x02fe", + "0x1002": "0x64", + "0x2002": "0x020000000000", + "0x03": "0x029f", + "0x1003": "0x018401", + "0x2003": "0x020000000000", + "0x04": "0x024c", + "0x1004": "0x018454", + "0x2004": "0x020000000000" + } + }, + "0x000000000000000000000000000000000000c0de": { + "nonce": "0x01", + "balance": "0x3000", + "code": "0x60008060008073cccccccccccccccccccccccccccccccccccccccc5af450", + "storage": { + "0x02": "0x02fe", + "0x1002": "0x64", + "0x2002": "0x1000", + "0x03": "0x029f", + "0x1003": "0x64", + "0x2003": "0x2000", + "0x04": "0x024c", + "0x1004": "0x64", + "0x2004": "0x3000" + } + }, + "0xccccccccccccccccccccccccccccccccccccccce": { + "nonce": "0x01", + "balance": "0x01ffffffd000", + "code": "0x60008060008061100061c0de5af160008060008073cccccccccccccccccccccccccccccccccccccccc5af4905050", + "storage": { + "0x02": "0x02fe", + "0x1002": "0x64", + "0x2002": "0x01fffffff000", + "0x03": "0x029f", + "0x1003": "0x64", + "0x2003": "0x01ffffffe000", + "0x04": "0x024c", + "0x1004": "0x64", + "0x2004": "0x01ffffffd000" + } + }, + "0xba5e000000000000000000000000000000000000": { + "nonce": "0x00", + "balance": "0x6f05b5a16c783b4b", + "code": "0x", + "storage": {} + } + }, + "lastblockhash": "0xf5e2f23d9a212edbb35a07bc9f582f4a632b694bd4ef8742de8ad6c6acacf72c", + "config": { + "network": "London", + "chainid": "0x01" + }, + "genesisRLP": "0xf90200f901fba00000000000000000000000000000000000000000000000000000000000000000a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347940000000000000000000000000000000000000000a089a5be1d3306f6f05b42678ef13ac3dbc37bef9a2a80862c21eb22eee29194c2a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000830200008088016345785d8a0000808000a000000000000000000000000000000000000000000000000000000000000000008800000000000000008203e8c0c0", "blocks": [ { - "rlp": "0xf9026ef901fea06241b4534da26b654ec5bb30d29b1d5202454af544b05828433354da7471957ca01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d4934794ba5e000000000000000000000000000000000000a03eb2e72e8ed9a59768bb9ac05915b781a764f2582edcf111053fe6531e466613a0586f963eea0fb4726f0f91f895f2aa5d67bffb5207a529b40d781244a0c7017ba029b0562f7140574dd0d50dee8a271b22e1a0a7b78fca58f7c60370d8317ba2a9b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000830200000188016345785d8a0000830155340c80a0000000000000000000000000000000000000000000000000000000000000000088000000000000000082036bf86ab86802f8650180018203e8830f424094cccccccccccccccccccccccccccccccccccccccc8001c080a03351b6993208fc7b03fd770c8c06440cfb0d75b29aafee0a4c64c8ba20a80e58a067817fdb3058e75c5d26e51a33d1e338346bc7d406e115447a4bb5f7ab01625bc0", "blockHeader": { "parentHash": "0x6241b4534da26b654ec5bb30d29b1d5202454af544b05828433354da7471957c", "uncleHash": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", @@ -47,7 +172,6 @@ "baseFeePerGas": "0x036b", "hash": "0x12bba91a7e1f277f1549e832e06820f8849308f70f8659acf846bdc15f5d586e" }, - "blocknumber": "1", "transactions": [ { "type": "0x02", @@ -66,10 +190,22 @@ "sender": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b" } ], - "uncleHeaders": [] + "uncleHeaders": [], + "receipts": [ + { + "transactionHash": "0xbca913628955677525aef79728df903b090ce75a14d47dfaf9200753ee28f8a9", + "type": "0x02", + "cumulativeGasUsed": "0x015534", + "bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "logs": [], + "status": true, + "rlp": "0x02f901090183015534b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0" + } + ], + "rlp": "0xf9026ef901fea06241b4534da26b654ec5bb30d29b1d5202454af544b05828433354da7471957ca01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d4934794ba5e000000000000000000000000000000000000a03eb2e72e8ed9a59768bb9ac05915b781a764f2582edcf111053fe6531e466613a0586f963eea0fb4726f0f91f895f2aa5d67bffb5207a529b40d781244a0c7017ba029b0562f7140574dd0d50dee8a271b22e1a0a7b78fca58f7c60370d8317ba2a9b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000830200000188016345785d8a0000830155340c80a0000000000000000000000000000000000000000000000000000000000000000088000000000000000082036bf86ab86802f8650180018203e8830f424094cccccccccccccccccccccccccccccccccccccccc8001c080a03351b6993208fc7b03fd770c8c06440cfb0d75b29aafee0a4c64c8ba20a80e58a067817fdb3058e75c5d26e51a33d1e338346bc7d406e115447a4bb5f7ab01625bc0", + "blocknumber": "1" }, { - "rlp": "0xf90349f901fea012bba91a7e1f277f1549e832e06820f8849308f70f8659acf846bdc15f5d586ea01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d4934794ba5e000000000000000000000000000000000000a0d9d9cc8ae73834ba9dc75fe8c68d36e980c82fcaf887dc220a05f152a327ae55a05521d9ad5adef72f021e4270a1f6851ca772dd56acaf4ff03362151bfb715298a0e225d44649351c3dccc61c1d904451d6f0f5a407c072099fe1085cfad88447d6b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000830200000288016345785d8a000083053c421880a000000000000000000000000000000000000000000000000000000000000000008800000000000000008202fef90144b86a02f86701010a8203e8830f424094cccccccccccccccccccccccccccccccccccccccc80820201c080a06ea285a870a051df2b8c80c462b7d3517f984815e09c4748efc8548a40434050a052f635268c1b9e1538ac76b37cb69c7b897595744d6de2dda9507b6624d352d0b86a02f8670102648203e8830f424094cccccccccccccccccccccccccccccccccccccccd80820202c080a0218549e818b36b3823c3f11a65ab5c1e16f6886469c385503cc2f1af1f53825da058b082850f55fd61290a99add11b7af6356ac8d55fbe4d513f06bf648824a64db86a02f8670103648203e8830f424094ccccccccccccccccccccccccccccccccccccccce80820203c001a0339e9ed3f6342f2644e4cd33a775b7e62a8208a137dcf2e354c7473caa77782aa074004c85b651c8ca9828aac28414997f3eff46edbba2bb606a545d95fd4c9b3ac0", "blockHeader": { "parentHash": "0x12bba91a7e1f277f1549e832e06820f8849308f70f8659acf846bdc15f5d586e", "uncleHash": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", @@ -89,7 +225,6 @@ "baseFeePerGas": "0x02fe", "hash": "0x0e043cb2eb0339900f6199c0ab517e5be3a81d898fa58078ed8b866ddc60b010" }, - "blocknumber": "2", "transactions": [ { "type": "0x02", @@ -140,7 +275,38 @@ "sender": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b" } ], - "uncleHeaders": [] + "uncleHeaders": [], + "receipts": [ + { + "transactionHash": "0x58c03df6d8bcf23e82becf748be6804b152fe2b00c569b33c6d3d85b58aafc17", + "type": "0x02", + "cumulativeGasUsed": "0x015544", + "bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "logs": [], + "status": true, + "rlp": "0x02f901090183015544b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0" + }, + { + "transactionHash": "0x2a74b75600019e641a3106069cd02330ff969cd0538d087b6a81c46e01d8d3fd", + "type": "0x02", + "cumulativeGasUsed": "0x02b4c3", + "bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "logs": [], + "status": true, + "rlp": "0x02f90109018302b4c3b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0" + }, + { + "transactionHash": "0x7809ac11ae8a0b4c05b635dd3e3dbca1c99757bd790157d4a8d15f1d2a765768", + "type": "0x02", + "cumulativeGasUsed": "0x053c42", + "bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "logs": [], + "status": true, + "rlp": "0x02f901090183053c42b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0" + } + ], + "rlp": "0xf90349f901fea012bba91a7e1f277f1549e832e06820f8849308f70f8659acf846bdc15f5d586ea01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d4934794ba5e000000000000000000000000000000000000a0d9d9cc8ae73834ba9dc75fe8c68d36e980c82fcaf887dc220a05f152a327ae55a05521d9ad5adef72f021e4270a1f6851ca772dd56acaf4ff03362151bfb715298a0e225d44649351c3dccc61c1d904451d6f0f5a407c072099fe1085cfad88447d6b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000830200000288016345785d8a000083053c421880a000000000000000000000000000000000000000000000000000000000000000008800000000000000008202fef90144b86a02f86701010a8203e8830f424094cccccccccccccccccccccccccccccccccccccccc80820201c080a06ea285a870a051df2b8c80c462b7d3517f984815e09c4748efc8548a40434050a052f635268c1b9e1538ac76b37cb69c7b897595744d6de2dda9507b6624d352d0b86a02f8670102648203e8830f424094cccccccccccccccccccccccccccccccccccccccd80820202c080a0218549e818b36b3823c3f11a65ab5c1e16f6886469c385503cc2f1af1f53825da058b082850f55fd61290a99add11b7af6356ac8d55fbe4d513f06bf648824a64db86a02f8670103648203e8830f424094ccccccccccccccccccccccccccccccccccccccce80820203c001a0339e9ed3f6342f2644e4cd33a775b7e62a8208a137dcf2e354c7473caa77782aa074004c85b651c8ca9828aac28414997f3eff46edbba2bb606a545d95fd4c9b3ac0", + "blocknumber": "2" }, { "rlp": "0xf902e1f901fea00e043cb2eb0339900f6199c0ab517e5be3a81d898fa58078ed8b866ddc60b010a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d4934794ba5e000000000000000000000000000000000000a069f3a735c7a7e1ea24a03a7107eba6a880d2d0251aaf24eaa7f109ece7969bf9a07c6d7fe1d1734fca072880e563f763405dc362222d37487cb098a006f7db3b2ca0976beb67b634171d419ef326220dfdda98074e3495940240a105e17643f0a4efb9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000830200000388016345785d8a0000830155442480a0000000000000000000000000000000000000000000000000000000000000000088000000000000000082029ff8ddb86c02f86901048203e88203e8830f424094cccccccccccccccccccccccccccccccccccccccc80820301c001a0720e2870881f8b0e285b7ec02c169f1165847bcb5f36ea5f33f3db6079854f63a04448266b715d7d99acd1e31dcab50d7119faa620d44c69b3f64f97d636634169b86d02f86a0105830186a08203e8830f424094cccccccccccccccccccccccccccccccccccccccd80820302c080a06c7fb2be7e001a210d72480522b9ebecade52d721360ce5242e34a6c05a02715a01220e3cb7418cd6294443b38d05f5ed9f2967b182d25c784e11e7863454b8f9bc0", @@ -165,7 +331,6 @@ "baseFeePerGas": "0x029f", "hash": "0x0cb9b60de1bb3893d7b7b806562a78aca5e9fbff47bf62893a5f6c0afcc73b48" }, - "blocknumber": "3", "transactions": [ { "type": "0x02", @@ -200,11 +365,22 @@ "sender": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b" } ], - "uncleHeaders": [] + "uncleHeaders": [], + "receipts": [ + { + "transactionHash": "0x968ad99d155b27adf6347a19648ee67c40f091a1bae1f373ff3de625fcc3e2ed", + "type": "0x02", + "cumulativeGasUsed": "0x015544", + "bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "logs": [], + "status": true, + "rlp": "0x02f901090183015544b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0" + } + ], + "blocknumber": "3" } }, { - "rlp": "0xf9034ff901fea00e043cb2eb0339900f6199c0ab517e5be3a81d898fa58078ed8b866ddc60b010a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d4934794ba5e000000000000000000000000000000000000a0bb08d7ca9c904f3d01b78041a9d70f69e83b0a6ec7af471cbd00933a47fdacaea027f7b224df1d270bfa03ba564cd4962071b89f91c965dbbfacff55e7ec66c652a0f42d43454db7c51eadf004bd9e43522c4894f02c602b709cd45e67597c622f2eb9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000830200000388016345785d8a000083053c422480a0000000000000000000000000000000000000000000000000000000000000000088000000000000000082029ff9014ab86c02f86901048203e88203e8830f424094cccccccccccccccccccccccccccccccccccccccc80820301c001a0720e2870881f8b0e285b7ec02c169f1165847bcb5f36ea5f33f3db6079854f63a04448266b715d7d99acd1e31dcab50d7119faa620d44c69b3f64f97d636634169b86a02f8670105648203e8830f424094ccccccccccccccccccccccccccccccccccccccce80820303c080a09c8531a41f9281633470c5e12b6c72c8930409a6433f26bf7b394a703d18512ea07a0c6151fde75f10a7e4efdd17a21f1f25206559bd4b8cf7880e5bc30e1cfe33b86e02f86b0106830186a0830186a0830f424094cccccccccccccccccccccccccccccccccccccccd80820304c001a0c8b85e158b532a0e3b3b5848fad0f4d5c6807805a4ce65e8591de13a62f3ac6aa03e923eb1be030c3ca69623f31ad3a357368b1ccb7ee48ac8deec5cb5dc49cb0cc0", "blockHeader": { "parentHash": "0x0e043cb2eb0339900f6199c0ab517e5be3a81d898fa58078ed8b866ddc60b010", "uncleHash": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", @@ -224,7 +400,6 @@ "baseFeePerGas": "0x029f", "hash": "0x5c66e5b6d6513ec98e9d8ee88137f1a2418542550977ea02015439acd2bf8f8e" }, - "blocknumber": "3", "transactions": [ { "type": "0x02", @@ -275,7 +450,38 @@ "sender": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b" } ], - "uncleHeaders": [] + "uncleHeaders": [], + "receipts": [ + { + "transactionHash": "0x968ad99d155b27adf6347a19648ee67c40f091a1bae1f373ff3de625fcc3e2ed", + "type": "0x02", + "cumulativeGasUsed": "0x015544", + "bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "logs": [], + "status": true, + "rlp": "0x02f901090183015544b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0" + }, + { + "transactionHash": "0x371a182905cef209b524b49169cf7706803c08f78cd5a3511a040dc33f964b9d", + "type": "0x02", + "cumulativeGasUsed": "0x03dcc3", + "bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "logs": [], + "status": true, + "rlp": "0x02f90109018303dcc3b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0" + }, + { + "transactionHash": "0x9ef9c65647e57c7a93d6e88e47e03ae4e4c28625d301bd74d633d95291ce5ec1", + "type": "0x02", + "cumulativeGasUsed": "0x053c42", + "bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "logs": [], + "status": true, + "rlp": "0x02f901090183053c42b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0" + } + ], + "rlp": "0xf9034ff901fea00e043cb2eb0339900f6199c0ab517e5be3a81d898fa58078ed8b866ddc60b010a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d4934794ba5e000000000000000000000000000000000000a0bb08d7ca9c904f3d01b78041a9d70f69e83b0a6ec7af471cbd00933a47fdacaea027f7b224df1d270bfa03ba564cd4962071b89f91c965dbbfacff55e7ec66c652a0f42d43454db7c51eadf004bd9e43522c4894f02c602b709cd45e67597c622f2eb9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000830200000388016345785d8a000083053c422480a0000000000000000000000000000000000000000000000000000000000000000088000000000000000082029ff9014ab86c02f86901048203e88203e8830f424094cccccccccccccccccccccccccccccccccccccccc80820301c001a0720e2870881f8b0e285b7ec02c169f1165847bcb5f36ea5f33f3db6079854f63a04448266b715d7d99acd1e31dcab50d7119faa620d44c69b3f64f97d636634169b86a02f8670105648203e8830f424094ccccccccccccccccccccccccccccccccccccccce80820303c080a09c8531a41f9281633470c5e12b6c72c8930409a6433f26bf7b394a703d18512ea07a0c6151fde75f10a7e4efdd17a21f1f25206559bd4b8cf7880e5bc30e1cfe33b86e02f86b0106830186a0830186a0830f424094cccccccccccccccccccccccccccccccccccccccd80820304c001a0c8b85e158b532a0e3b3b5848fad0f4d5c6807805a4ce65e8591de13a62f3ac6aa03e923eb1be030c3ca69623f31ad3a357368b1ccb7ee48ac8deec5cb5dc49cb0cc0", + "blocknumber": "3" }, { "rlp": "0xf902e1f901fea05c66e5b6d6513ec98e9d8ee88137f1a2418542550977ea02015439acd2bf8f8ea01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d4934794ba5e000000000000000000000000000000000000a0e834ba6cd27f2702b0adf2ef6a85e2fbc340fb948c96e75b674e9a73a5dbc3d1a04722f7b17f27aee5dfa0d92ba40e16de960374a98ec63e728acaa1564d8a54f3a0976beb67b634171d419ef326220dfdda98074e3495940240a105e17643f0a4efb9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000830200000488016345785d8a0000830155443080a0000000000000000000000000000000000000000000000000000000000000000088000000000000000082024cf8ddb86c02f86901078203e88203e8830f424094cccccccccccccccccccccccccccccccccccccccc80820401c001a0113c54f83e1b1e5c689ba86d288ec0ce2877f350b71821c4c7a3f7073b46602ca0548848e711b86ceeb657fd0a0bf44b792f6665ed18ec8a04f498471e811f8f97b86d02f86a0108830186a08203e8830f424094cccccccccccccccccccccccccccccccccccccccd80820402c001a0ebc8ad530ec3d510998aa2485763fcd1c6958c900c8d8ae6eaf86e1eddde8b23a0341e4a021f7b77da28d853c07d11253b92331ab640ad3f28f5d7b2cdbc7ceca7c0", @@ -300,7 +506,6 @@ "baseFeePerGas": "0x024c", "hash": "0x1f01f6d8ff3a461486c4c4334c94a05f114d161b1ac082c7374ad7ac51eea7f2" }, - "blocknumber": "4", "transactions": [ { "type": "0x02", @@ -335,11 +540,22 @@ "sender": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b" } ], - "uncleHeaders": [] + "uncleHeaders": [], + "receipts": [ + { + "transactionHash": "0x2e93d5b1a0c54422cc7603f0d3f18e14283f9a0e45a6dbd97e5dc9380a32777e", + "type": "0x02", + "cumulativeGasUsed": "0x015544", + "bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "logs": [], + "status": true, + "rlp": "0x02f901090183015544b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0" + } + ], + "blocknumber": "4" } }, { - "rlp": "0xf9034ff901fea05c66e5b6d6513ec98e9d8ee88137f1a2418542550977ea02015439acd2bf8f8ea01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d4934794ba5e000000000000000000000000000000000000a0e2b2b992b108bcd0e036067ef693f2d1b94c2f48d074a4f6b9d98537bbf15e9aa07617400c1efcb3e64b8cf55ccaaae8e335621bd6897b5e439d93b8dc011a4331a0f42d43454db7c51eadf004bd9e43522c4894f02c602b709cd45e67597c622f2eb9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000830200000488016345785d8a000083053c423080a0000000000000000000000000000000000000000000000000000000000000000088000000000000000082024cf9014ab86c02f86901078203e88203e8830f424094cccccccccccccccccccccccccccccccccccccccc80820401c001a0113c54f83e1b1e5c689ba86d288ec0ce2877f350b71821c4c7a3f7073b46602ca0548848e711b86ceeb657fd0a0bf44b792f6665ed18ec8a04f498471e811f8f97b86a02f8670108648203e8830f424094ccccccccccccccccccccccccccccccccccccccce80820403c080a08d7ec1116399aab6e1297b09302b291d73c5898a0338fb62a46c74b037d15a15a03cacc1a12eb47c261394443d490b8436f53a99d2109dac9ca5018cf531e6b29db86e02f86b0109830186a0830186a0830f424094cccccccccccccccccccccccccccccccccccccccd80820404c001a054bd3a30ee3c2182d92f30223adb53feb0f51d76970a2628d9479536ff3edfe9a06f681aa0ad9362eeeafb981394526ca6425f3a24e1c7f44c413b68dd2e56e5d0c0", "blockHeader": { "parentHash": "0x5c66e5b6d6513ec98e9d8ee88137f1a2418542550977ea02015439acd2bf8f8e", "uncleHash": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", @@ -359,7 +575,6 @@ "baseFeePerGas": "0x024c", "hash": "0xf5e2f23d9a212edbb35a07bc9f582f4a632b694bd4ef8742de8ad6c6acacf72c" }, - "blocknumber": "4", "transactions": [ { "type": "0x02", @@ -410,139 +625,44 @@ "sender": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b" } ], - "uncleHeaders": [] + "uncleHeaders": [], + "receipts": [ + { + "transactionHash": "0x2e93d5b1a0c54422cc7603f0d3f18e14283f9a0e45a6dbd97e5dc9380a32777e", + "type": "0x02", + "cumulativeGasUsed": "0x015544", + "bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "logs": [], + "status": true, + "rlp": "0x02f901090183015544b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0" + }, + { + "transactionHash": "0x00f633ca59767a100b4b0cf66e92b60eccb1c806a84facd4b4c838d52d877302", + "type": "0x02", + "cumulativeGasUsed": "0x03dcc3", + "bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "logs": [], + "status": true, + "rlp": "0x02f90109018303dcc3b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0" + }, + { + "transactionHash": "0x26f6a986bb0f85d8f7abbce82c0b77ba59a8ea57486775076e37590a9c30e47d", + "type": "0x02", + "cumulativeGasUsed": "0x053c42", + "bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "logs": [], + "status": true, + "rlp": "0x02f901090183053c42b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0" + } + ], + "rlp": "0xf9034ff901fea05c66e5b6d6513ec98e9d8ee88137f1a2418542550977ea02015439acd2bf8f8ea01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d4934794ba5e000000000000000000000000000000000000a0e2b2b992b108bcd0e036067ef693f2d1b94c2f48d074a4f6b9d98537bbf15e9aa07617400c1efcb3e64b8cf55ccaaae8e335621bd6897b5e439d93b8dc011a4331a0f42d43454db7c51eadf004bd9e43522c4894f02c602b709cd45e67597c622f2eb9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000830200000488016345785d8a000083053c423080a0000000000000000000000000000000000000000000000000000000000000000088000000000000000082024cf9014ab86c02f86901078203e88203e8830f424094cccccccccccccccccccccccccccccccccccccccc80820401c001a0113c54f83e1b1e5c689ba86d288ec0ce2877f350b71821c4c7a3f7073b46602ca0548848e711b86ceeb657fd0a0bf44b792f6665ed18ec8a04f498471e811f8f97b86a02f8670108648203e8830f424094ccccccccccccccccccccccccccccccccccccccce80820403c080a08d7ec1116399aab6e1297b09302b291d73c5898a0338fb62a46c74b037d15a15a03cacc1a12eb47c261394443d490b8436f53a99d2109dac9ca5018cf531e6b29db86e02f86b0109830186a0830186a0830f424094cccccccccccccccccccccccccccccccccccccccd80820404c001a054bd3a30ee3c2182d92f30223adb53feb0f51d76970a2628d9479536ff3edfe9a06f681aa0ad9362eeeafb981394526ca6425f3a24e1c7f44c413b68dd2e56e5d0c0", + "blocknumber": "4" } ], - "lastblockhash": "0xf5e2f23d9a212edbb35a07bc9f582f4a632b694bd4ef8742de8ad6c6acacf72c", - "pre": { - "0x000000000000000000000000000000000000c0de": { - "nonce": "0x01", - "balance": "0x00", - "code": "0x60008060008073cccccccccccccccccccccccccccccccccccccccc5af450", - "storage": {} - }, - "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { - "nonce": "0x00", - "balance": "0x01000000000000000000", - "code": "0x", - "storage": {} - }, - "0xcccccccccccccccccccccccccccccccccccccccc": { - "nonce": "0x01", - "balance": "0x010000000000", - "code": "0x484355483a036110004301554761200043015500", - "storage": {} - }, - "0xcccccccccccccccccccccccccccccccccccccccd": { - "nonce": "0x01", - "balance": "0x020000000000", - "code": "0x60008060008073cccccccccccccccccccccccccccccccccccccccc5af450", - "storage": {} - }, - "0xccccccccccccccccccccccccccccccccccccccce": { - "nonce": "0x01", - "balance": "0x020000000000", - "code": "0x60008060008061100061c0de5af160008060008073cccccccccccccccccccccccccccccccccccccccc5af4905050", - "storage": {} - }, - "0xd02d72e067e77158444ef2020ff2d325f929b363": { - "nonce": "0x01", - "balance": "0x01000000000000000000", - "code": "0x", - "storage": {} - } - }, - "postState": { - "0x000000000000000000000000000000000000c0de": { - "nonce": "0x01", - "balance": "0x3000", - "code": "0x60008060008073cccccccccccccccccccccccccccccccccccccccc5af450", - "storage": { - "0x02": "0x02fe", - "0x03": "0x029f", - "0x04": "0x024c", - "0x1002": "0x64", - "0x1003": "0x64", - "0x1004": "0x64", - "0x2002": "0x1000", - "0x2003": "0x2000", - "0x2004": "0x3000" - } - }, - "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { - "nonce": "0x0a", - "balance": "0xfffffffffba0afe5e7", - "code": "0x", - "storage": {} - }, - "0xba5e000000000000000000000000000000000000": { - "nonce": "0x00", - "balance": "0x6f05b5a16c783b4b", - "code": "0x", - "storage": {} - }, - "0xcccccccccccccccccccccccccccccccccccccccc": { - "nonce": "0x01", - "balance": "0x010000000000", - "code": "0x484355483a036110004301554761200043015500", - "storage": { - "0x01": "0x036b", - "0x02": "0x02fe", - "0x03": "0x029f", - "0x04": "0x024c", - "0x1001": "0x01", - "0x1002": "0x0a", - "0x1003": "0x0149", - "0x1004": "0x019c", - "0x2001": "0x010000000000", - "0x2002": "0x010000000000", - "0x2003": "0x010000000000", - "0x2004": "0x010000000000" - } - }, - "0xcccccccccccccccccccccccccccccccccccccccd": { - "nonce": "0x01", - "balance": "0x020000000000", - "code": "0x60008060008073cccccccccccccccccccccccccccccccccccccccc5af450", - "storage": { - "0x02": "0x02fe", - "0x03": "0x029f", - "0x04": "0x024c", - "0x1002": "0x64", - "0x1003": "0x018401", - "0x1004": "0x018454", - "0x2002": "0x020000000000", - "0x2003": "0x020000000000", - "0x2004": "0x020000000000" - } - }, - "0xccccccccccccccccccccccccccccccccccccccce": { - "nonce": "0x01", - "balance": "0x01ffffffd000", - "code": "0x60008060008061100061c0de5af160008060008073cccccccccccccccccccccccccccccccccccccccc5af4905050", - "storage": { - "0x02": "0x02fe", - "0x03": "0x029f", - "0x04": "0x024c", - "0x1002": "0x64", - "0x1003": "0x64", - "0x1004": "0x64", - "0x2002": "0x01fffffff000", - "0x2003": "0x01ffffffe000", - "0x2004": "0x01ffffffd000" - } - }, - "0xd02d72e067e77158444ef2020ff2d325f929b363": { - "nonce": "0x01", - "balance": "0x01000000000000000000", - "code": "0x", - "storage": {} - } - }, "sealEngine": "NoProof", - "config": { - "network": "London", - "chainid": "0x01" + "_info": { + "hash": "0x6e42f77408e60c3e36fa2e84a95fe0d885b013138c10dd90ca340e49343bafdd", + "fixture_format": "blockchain_test" } } -} +} \ No newline at end of file diff --git a/packages/testing/src/execution_testing/specs/tests/fixtures/blockchain_london_valid_filled.json b/packages/testing/src/execution_testing/specs/tests/fixtures/blockchain_london_valid_filled.json index 0968fb5608..3dd67a53a2 100644 --- a/packages/testing/src/execution_testing/specs/tests/fixtures/blockchain_london_valid_filled.json +++ b/packages/testing/src/execution_testing/specs/tests/fixtures/blockchain_london_valid_filled.json @@ -1,11 +1,6 @@ { "000/my_blockchain_test/London": { - "_info": { - "hash": "0x4ba67dfce5957e3339bf3e6e7ad78f4345b34a087c4656c92fddbef3726b1ec2", - "fixture_format": "blockchain_test" - }, "network": "London", - "genesisRLP": "0xf90200f901fba00000000000000000000000000000000000000000000000000000000000000000a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347940000000000000000000000000000000000000000a089a5be1d3306f6f05b42678ef13ac3dbc37bef9a2a80862c21eb22eee29194c2a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000830200008088016345785d8a0000808000a000000000000000000000000000000000000000000000000000000000000000008800000000000000008203e8c0c0", "genesisBlockHeader": { "parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000", "uncleHash": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", @@ -25,9 +20,139 @@ "baseFeePerGas": "0x03e8", "hash": "0x6241b4534da26b654ec5bb30d29b1d5202454af544b05828433354da7471957c" }, + "pre": { + "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { + "nonce": "0x00", + "balance": "0x01000000000000000000", + "code": "0x", + "storage": {} + }, + "0xd02d72e067e77158444ef2020ff2d325f929b363": { + "nonce": "0x01", + "balance": "0x01000000000000000000", + "code": "0x", + "storage": {} + }, + "0xcccccccccccccccccccccccccccccccccccccccc": { + "nonce": "0x01", + "balance": "0x010000000000", + "code": "0x484355483a036110004301554761200043015500", + "storage": {} + }, + "0xcccccccccccccccccccccccccccccccccccccccd": { + "nonce": "0x01", + "balance": "0x020000000000", + "code": "0x60008060008073cccccccccccccccccccccccccccccccccccccccc5af450", + "storage": {} + }, + "0x000000000000000000000000000000000000c0de": { + "nonce": "0x01", + "balance": "0x00", + "code": "0x60008060008073cccccccccccccccccccccccccccccccccccccccc5af450", + "storage": {} + }, + "0xccccccccccccccccccccccccccccccccccccccce": { + "nonce": "0x01", + "balance": "0x020000000000", + "code": "0x60008060008061100061c0de5af160008060008073cccccccccccccccccccccccccccccccccccccccc5af4905050", + "storage": {} + } + }, + "postState": { + "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { + "nonce": "0x0a", + "balance": "0xfffffffffba0afe5e7", + "code": "0x", + "storage": {} + }, + "0xd02d72e067e77158444ef2020ff2d325f929b363": { + "nonce": "0x01", + "balance": "0x01000000000000000000", + "code": "0x", + "storage": {} + }, + "0xcccccccccccccccccccccccccccccccccccccccc": { + "nonce": "0x01", + "balance": "0x010000000000", + "code": "0x484355483a036110004301554761200043015500", + "storage": { + "0x01": "0x036b", + "0x1001": "0x01", + "0x2001": "0x010000000000", + "0x02": "0x02fe", + "0x1002": "0x0a", + "0x2002": "0x010000000000", + "0x03": "0x029f", + "0x1003": "0x0149", + "0x2003": "0x010000000000", + "0x04": "0x024c", + "0x1004": "0x019c", + "0x2004": "0x010000000000" + } + }, + "0xcccccccccccccccccccccccccccccccccccccccd": { + "nonce": "0x01", + "balance": "0x020000000000", + "code": "0x60008060008073cccccccccccccccccccccccccccccccccccccccc5af450", + "storage": { + "0x02": "0x02fe", + "0x1002": "0x64", + "0x2002": "0x020000000000", + "0x03": "0x029f", + "0x1003": "0x018401", + "0x2003": "0x020000000000", + "0x04": "0x024c", + "0x1004": "0x018454", + "0x2004": "0x020000000000" + } + }, + "0x000000000000000000000000000000000000c0de": { + "nonce": "0x01", + "balance": "0x3000", + "code": "0x60008060008073cccccccccccccccccccccccccccccccccccccccc5af450", + "storage": { + "0x02": "0x02fe", + "0x1002": "0x64", + "0x2002": "0x1000", + "0x03": "0x029f", + "0x1003": "0x64", + "0x2003": "0x2000", + "0x04": "0x024c", + "0x1004": "0x64", + "0x2004": "0x3000" + } + }, + "0xccccccccccccccccccccccccccccccccccccccce": { + "nonce": "0x01", + "balance": "0x01ffffffd000", + "code": "0x60008060008061100061c0de5af160008060008073cccccccccccccccccccccccccccccccccccccccc5af4905050", + "storage": { + "0x02": "0x02fe", + "0x1002": "0x64", + "0x2002": "0x01fffffff000", + "0x03": "0x029f", + "0x1003": "0x64", + "0x2003": "0x01ffffffe000", + "0x04": "0x024c", + "0x1004": "0x64", + "0x2004": "0x01ffffffd000" + } + }, + "0xba5e000000000000000000000000000000000000": { + "nonce": "0x00", + "balance": "0x6f05b5a16c783b4b", + "code": "0x", + "storage": {} + } + }, + "lastblockhash": "0xf5e2f23d9a212edbb35a07bc9f582f4a632b694bd4ef8742de8ad6c6acacf72c", + "config": { + "network": "London", + "chainid": "0x01" + }, + "genesisRLP": "0xf90200f901fba00000000000000000000000000000000000000000000000000000000000000000a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347940000000000000000000000000000000000000000a089a5be1d3306f6f05b42678ef13ac3dbc37bef9a2a80862c21eb22eee29194c2a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000830200008088016345785d8a0000808000a000000000000000000000000000000000000000000000000000000000000000008800000000000000008203e8c0c0", "blocks": [ { - "rlp": "0xf9026ef901fea06241b4534da26b654ec5bb30d29b1d5202454af544b05828433354da7471957ca01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d4934794ba5e000000000000000000000000000000000000a03eb2e72e8ed9a59768bb9ac05915b781a764f2582edcf111053fe6531e466613a0586f963eea0fb4726f0f91f895f2aa5d67bffb5207a529b40d781244a0c7017ba029b0562f7140574dd0d50dee8a271b22e1a0a7b78fca58f7c60370d8317ba2a9b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000830200000188016345785d8a0000830155340c80a0000000000000000000000000000000000000000000000000000000000000000088000000000000000082036bf86ab86802f8650180018203e8830f424094cccccccccccccccccccccccccccccccccccccccc8001c080a03351b6993208fc7b03fd770c8c06440cfb0d75b29aafee0a4c64c8ba20a80e58a067817fdb3058e75c5d26e51a33d1e338346bc7d406e115447a4bb5f7ab01625bc0", "blockHeader": { "parentHash": "0x6241b4534da26b654ec5bb30d29b1d5202454af544b05828433354da7471957c", "uncleHash": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", @@ -47,7 +172,6 @@ "baseFeePerGas": "0x036b", "hash": "0x12bba91a7e1f277f1549e832e06820f8849308f70f8659acf846bdc15f5d586e" }, - "blocknumber": "1", "transactions": [ { "type": "0x02", @@ -66,10 +190,22 @@ "sender": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b" } ], - "uncleHeaders": [] + "uncleHeaders": [], + "receipts": [ + { + "transactionHash": "0xbca913628955677525aef79728df903b090ce75a14d47dfaf9200753ee28f8a9", + "type": "0x02", + "cumulativeGasUsed": "0x015534", + "bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "logs": [], + "status": true, + "rlp": "0x02f901090183015534b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0" + } + ], + "rlp": "0xf9026ef901fea06241b4534da26b654ec5bb30d29b1d5202454af544b05828433354da7471957ca01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d4934794ba5e000000000000000000000000000000000000a03eb2e72e8ed9a59768bb9ac05915b781a764f2582edcf111053fe6531e466613a0586f963eea0fb4726f0f91f895f2aa5d67bffb5207a529b40d781244a0c7017ba029b0562f7140574dd0d50dee8a271b22e1a0a7b78fca58f7c60370d8317ba2a9b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000830200000188016345785d8a0000830155340c80a0000000000000000000000000000000000000000000000000000000000000000088000000000000000082036bf86ab86802f8650180018203e8830f424094cccccccccccccccccccccccccccccccccccccccc8001c080a03351b6993208fc7b03fd770c8c06440cfb0d75b29aafee0a4c64c8ba20a80e58a067817fdb3058e75c5d26e51a33d1e338346bc7d406e115447a4bb5f7ab01625bc0", + "blocknumber": "1" }, { - "rlp": "0xf90349f901fea012bba91a7e1f277f1549e832e06820f8849308f70f8659acf846bdc15f5d586ea01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d4934794ba5e000000000000000000000000000000000000a0d9d9cc8ae73834ba9dc75fe8c68d36e980c82fcaf887dc220a05f152a327ae55a05521d9ad5adef72f021e4270a1f6851ca772dd56acaf4ff03362151bfb715298a0e225d44649351c3dccc61c1d904451d6f0f5a407c072099fe1085cfad88447d6b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000830200000288016345785d8a000083053c421880a000000000000000000000000000000000000000000000000000000000000000008800000000000000008202fef90144b86a02f86701010a8203e8830f424094cccccccccccccccccccccccccccccccccccccccc80820201c080a06ea285a870a051df2b8c80c462b7d3517f984815e09c4748efc8548a40434050a052f635268c1b9e1538ac76b37cb69c7b897595744d6de2dda9507b6624d352d0b86a02f8670102648203e8830f424094cccccccccccccccccccccccccccccccccccccccd80820202c080a0218549e818b36b3823c3f11a65ab5c1e16f6886469c385503cc2f1af1f53825da058b082850f55fd61290a99add11b7af6356ac8d55fbe4d513f06bf648824a64db86a02f8670103648203e8830f424094ccccccccccccccccccccccccccccccccccccccce80820203c001a0339e9ed3f6342f2644e4cd33a775b7e62a8208a137dcf2e354c7473caa77782aa074004c85b651c8ca9828aac28414997f3eff46edbba2bb606a545d95fd4c9b3ac0", "blockHeader": { "parentHash": "0x12bba91a7e1f277f1549e832e06820f8849308f70f8659acf846bdc15f5d586e", "uncleHash": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", @@ -89,7 +225,6 @@ "baseFeePerGas": "0x02fe", "hash": "0x0e043cb2eb0339900f6199c0ab517e5be3a81d898fa58078ed8b866ddc60b010" }, - "blocknumber": "2", "transactions": [ { "type": "0x02", @@ -140,10 +275,40 @@ "sender": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b" } ], - "uncleHeaders": [] + "uncleHeaders": [], + "receipts": [ + { + "transactionHash": "0x58c03df6d8bcf23e82becf748be6804b152fe2b00c569b33c6d3d85b58aafc17", + "type": "0x02", + "cumulativeGasUsed": "0x015544", + "bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "logs": [], + "status": true, + "rlp": "0x02f901090183015544b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0" + }, + { + "transactionHash": "0x2a74b75600019e641a3106069cd02330ff969cd0538d087b6a81c46e01d8d3fd", + "type": "0x02", + "cumulativeGasUsed": "0x02b4c3", + "bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "logs": [], + "status": true, + "rlp": "0x02f90109018302b4c3b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0" + }, + { + "transactionHash": "0x7809ac11ae8a0b4c05b635dd3e3dbca1c99757bd790157d4a8d15f1d2a765768", + "type": "0x02", + "cumulativeGasUsed": "0x053c42", + "bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "logs": [], + "status": true, + "rlp": "0x02f901090183053c42b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0" + } + ], + "rlp": "0xf90349f901fea012bba91a7e1f277f1549e832e06820f8849308f70f8659acf846bdc15f5d586ea01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d4934794ba5e000000000000000000000000000000000000a0d9d9cc8ae73834ba9dc75fe8c68d36e980c82fcaf887dc220a05f152a327ae55a05521d9ad5adef72f021e4270a1f6851ca772dd56acaf4ff03362151bfb715298a0e225d44649351c3dccc61c1d904451d6f0f5a407c072099fe1085cfad88447d6b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000830200000288016345785d8a000083053c421880a000000000000000000000000000000000000000000000000000000000000000008800000000000000008202fef90144b86a02f86701010a8203e8830f424094cccccccccccccccccccccccccccccccccccccccc80820201c080a06ea285a870a051df2b8c80c462b7d3517f984815e09c4748efc8548a40434050a052f635268c1b9e1538ac76b37cb69c7b897595744d6de2dda9507b6624d352d0b86a02f8670102648203e8830f424094cccccccccccccccccccccccccccccccccccccccd80820202c080a0218549e818b36b3823c3f11a65ab5c1e16f6886469c385503cc2f1af1f53825da058b082850f55fd61290a99add11b7af6356ac8d55fbe4d513f06bf648824a64db86a02f8670103648203e8830f424094ccccccccccccccccccccccccccccccccccccccce80820203c001a0339e9ed3f6342f2644e4cd33a775b7e62a8208a137dcf2e354c7473caa77782aa074004c85b651c8ca9828aac28414997f3eff46edbba2bb606a545d95fd4c9b3ac0", + "blocknumber": "2" }, { - "rlp": "0xf9034ff901fea00e043cb2eb0339900f6199c0ab517e5be3a81d898fa58078ed8b866ddc60b010a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d4934794ba5e000000000000000000000000000000000000a0bb08d7ca9c904f3d01b78041a9d70f69e83b0a6ec7af471cbd00933a47fdacaea027f7b224df1d270bfa03ba564cd4962071b89f91c965dbbfacff55e7ec66c652a0f42d43454db7c51eadf004bd9e43522c4894f02c602b709cd45e67597c622f2eb9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000830200000388016345785d8a000083053c422480a0000000000000000000000000000000000000000000000000000000000000000088000000000000000082029ff9014ab86c02f86901048203e88203e8830f424094cccccccccccccccccccccccccccccccccccccccc80820301c001a0720e2870881f8b0e285b7ec02c169f1165847bcb5f36ea5f33f3db6079854f63a04448266b715d7d99acd1e31dcab50d7119faa620d44c69b3f64f97d636634169b86a02f8670105648203e8830f424094ccccccccccccccccccccccccccccccccccccccce80820303c080a09c8531a41f9281633470c5e12b6c72c8930409a6433f26bf7b394a703d18512ea07a0c6151fde75f10a7e4efdd17a21f1f25206559bd4b8cf7880e5bc30e1cfe33b86e02f86b0106830186a0830186a0830f424094cccccccccccccccccccccccccccccccccccccccd80820304c001a0c8b85e158b532a0e3b3b5848fad0f4d5c6807805a4ce65e8591de13a62f3ac6aa03e923eb1be030c3ca69623f31ad3a357368b1ccb7ee48ac8deec5cb5dc49cb0cc0", "blockHeader": { "parentHash": "0x0e043cb2eb0339900f6199c0ab517e5be3a81d898fa58078ed8b866ddc60b010", "uncleHash": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", @@ -163,7 +328,6 @@ "baseFeePerGas": "0x029f", "hash": "0x5c66e5b6d6513ec98e9d8ee88137f1a2418542550977ea02015439acd2bf8f8e" }, - "blocknumber": "3", "transactions": [ { "type": "0x02", @@ -214,10 +378,40 @@ "sender": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b" } ], - "uncleHeaders": [] + "uncleHeaders": [], + "receipts": [ + { + "transactionHash": "0x968ad99d155b27adf6347a19648ee67c40f091a1bae1f373ff3de625fcc3e2ed", + "type": "0x02", + "cumulativeGasUsed": "0x015544", + "bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "logs": [], + "status": true, + "rlp": "0x02f901090183015544b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0" + }, + { + "transactionHash": "0x371a182905cef209b524b49169cf7706803c08f78cd5a3511a040dc33f964b9d", + "type": "0x02", + "cumulativeGasUsed": "0x03dcc3", + "bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "logs": [], + "status": true, + "rlp": "0x02f90109018303dcc3b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0" + }, + { + "transactionHash": "0x9ef9c65647e57c7a93d6e88e47e03ae4e4c28625d301bd74d633d95291ce5ec1", + "type": "0x02", + "cumulativeGasUsed": "0x053c42", + "bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "logs": [], + "status": true, + "rlp": "0x02f901090183053c42b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0" + } + ], + "rlp": "0xf9034ff901fea00e043cb2eb0339900f6199c0ab517e5be3a81d898fa58078ed8b866ddc60b010a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d4934794ba5e000000000000000000000000000000000000a0bb08d7ca9c904f3d01b78041a9d70f69e83b0a6ec7af471cbd00933a47fdacaea027f7b224df1d270bfa03ba564cd4962071b89f91c965dbbfacff55e7ec66c652a0f42d43454db7c51eadf004bd9e43522c4894f02c602b709cd45e67597c622f2eb9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000830200000388016345785d8a000083053c422480a0000000000000000000000000000000000000000000000000000000000000000088000000000000000082029ff9014ab86c02f86901048203e88203e8830f424094cccccccccccccccccccccccccccccccccccccccc80820301c001a0720e2870881f8b0e285b7ec02c169f1165847bcb5f36ea5f33f3db6079854f63a04448266b715d7d99acd1e31dcab50d7119faa620d44c69b3f64f97d636634169b86a02f8670105648203e8830f424094ccccccccccccccccccccccccccccccccccccccce80820303c080a09c8531a41f9281633470c5e12b6c72c8930409a6433f26bf7b394a703d18512ea07a0c6151fde75f10a7e4efdd17a21f1f25206559bd4b8cf7880e5bc30e1cfe33b86e02f86b0106830186a0830186a0830f424094cccccccccccccccccccccccccccccccccccccccd80820304c001a0c8b85e158b532a0e3b3b5848fad0f4d5c6807805a4ce65e8591de13a62f3ac6aa03e923eb1be030c3ca69623f31ad3a357368b1ccb7ee48ac8deec5cb5dc49cb0cc0", + "blocknumber": "3" }, { - "rlp": "0xf9034ff901fea05c66e5b6d6513ec98e9d8ee88137f1a2418542550977ea02015439acd2bf8f8ea01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d4934794ba5e000000000000000000000000000000000000a0e2b2b992b108bcd0e036067ef693f2d1b94c2f48d074a4f6b9d98537bbf15e9aa07617400c1efcb3e64b8cf55ccaaae8e335621bd6897b5e439d93b8dc011a4331a0f42d43454db7c51eadf004bd9e43522c4894f02c602b709cd45e67597c622f2eb9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000830200000488016345785d8a000083053c423080a0000000000000000000000000000000000000000000000000000000000000000088000000000000000082024cf9014ab86c02f86901078203e88203e8830f424094cccccccccccccccccccccccccccccccccccccccc80820401c001a0113c54f83e1b1e5c689ba86d288ec0ce2877f350b71821c4c7a3f7073b46602ca0548848e711b86ceeb657fd0a0bf44b792f6665ed18ec8a04f498471e811f8f97b86a02f8670108648203e8830f424094ccccccccccccccccccccccccccccccccccccccce80820403c080a08d7ec1116399aab6e1297b09302b291d73c5898a0338fb62a46c74b037d15a15a03cacc1a12eb47c261394443d490b8436f53a99d2109dac9ca5018cf531e6b29db86e02f86b0109830186a0830186a0830f424094cccccccccccccccccccccccccccccccccccccccd80820404c001a054bd3a30ee3c2182d92f30223adb53feb0f51d76970a2628d9479536ff3edfe9a06f681aa0ad9362eeeafb981394526ca6425f3a24e1c7f44c413b68dd2e56e5d0c0", "blockHeader": { "parentHash": "0x5c66e5b6d6513ec98e9d8ee88137f1a2418542550977ea02015439acd2bf8f8e", "uncleHash": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", @@ -237,7 +431,6 @@ "baseFeePerGas": "0x024c", "hash": "0xf5e2f23d9a212edbb35a07bc9f582f4a632b694bd4ef8742de8ad6c6acacf72c" }, - "blocknumber": "4", "transactions": [ { "type": "0x02", @@ -288,139 +481,44 @@ "sender": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b" } ], - "uncleHeaders": [] + "uncleHeaders": [], + "receipts": [ + { + "transactionHash": "0x2e93d5b1a0c54422cc7603f0d3f18e14283f9a0e45a6dbd97e5dc9380a32777e", + "type": "0x02", + "cumulativeGasUsed": "0x015544", + "bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "logs": [], + "status": true, + "rlp": "0x02f901090183015544b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0" + }, + { + "transactionHash": "0x00f633ca59767a100b4b0cf66e92b60eccb1c806a84facd4b4c838d52d877302", + "type": "0x02", + "cumulativeGasUsed": "0x03dcc3", + "bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "logs": [], + "status": true, + "rlp": "0x02f90109018303dcc3b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0" + }, + { + "transactionHash": "0x26f6a986bb0f85d8f7abbce82c0b77ba59a8ea57486775076e37590a9c30e47d", + "type": "0x02", + "cumulativeGasUsed": "0x053c42", + "bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "logs": [], + "status": true, + "rlp": "0x02f901090183053c42b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0" + } + ], + "rlp": "0xf9034ff901fea05c66e5b6d6513ec98e9d8ee88137f1a2418542550977ea02015439acd2bf8f8ea01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d4934794ba5e000000000000000000000000000000000000a0e2b2b992b108bcd0e036067ef693f2d1b94c2f48d074a4f6b9d98537bbf15e9aa07617400c1efcb3e64b8cf55ccaaae8e335621bd6897b5e439d93b8dc011a4331a0f42d43454db7c51eadf004bd9e43522c4894f02c602b709cd45e67597c622f2eb9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000830200000488016345785d8a000083053c423080a0000000000000000000000000000000000000000000000000000000000000000088000000000000000082024cf9014ab86c02f86901078203e88203e8830f424094cccccccccccccccccccccccccccccccccccccccc80820401c001a0113c54f83e1b1e5c689ba86d288ec0ce2877f350b71821c4c7a3f7073b46602ca0548848e711b86ceeb657fd0a0bf44b792f6665ed18ec8a04f498471e811f8f97b86a02f8670108648203e8830f424094ccccccccccccccccccccccccccccccccccccccce80820403c080a08d7ec1116399aab6e1297b09302b291d73c5898a0338fb62a46c74b037d15a15a03cacc1a12eb47c261394443d490b8436f53a99d2109dac9ca5018cf531e6b29db86e02f86b0109830186a0830186a0830f424094cccccccccccccccccccccccccccccccccccccccd80820404c001a054bd3a30ee3c2182d92f30223adb53feb0f51d76970a2628d9479536ff3edfe9a06f681aa0ad9362eeeafb981394526ca6425f3a24e1c7f44c413b68dd2e56e5d0c0", + "blocknumber": "4" } ], - "lastblockhash": "0xf5e2f23d9a212edbb35a07bc9f582f4a632b694bd4ef8742de8ad6c6acacf72c", - "pre": { - "0x000000000000000000000000000000000000c0de": { - "nonce": "0x01", - "balance": "0x00", - "code": "0x60008060008073cccccccccccccccccccccccccccccccccccccccc5af450", - "storage": {} - }, - "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { - "nonce": "0x00", - "balance": "0x01000000000000000000", - "code": "0x", - "storage": {} - }, - "0xcccccccccccccccccccccccccccccccccccccccc": { - "nonce": "0x01", - "balance": "0x010000000000", - "code": "0x484355483a036110004301554761200043015500", - "storage": {} - }, - "0xcccccccccccccccccccccccccccccccccccccccd": { - "nonce": "0x01", - "balance": "0x020000000000", - "code": "0x60008060008073cccccccccccccccccccccccccccccccccccccccc5af450", - "storage": {} - }, - "0xccccccccccccccccccccccccccccccccccccccce": { - "nonce": "0x01", - "balance": "0x020000000000", - "code": "0x60008060008061100061c0de5af160008060008073cccccccccccccccccccccccccccccccccccccccc5af4905050", - "storage": {} - }, - "0xd02d72e067e77158444ef2020ff2d325f929b363": { - "nonce": "0x01", - "balance": "0x01000000000000000000", - "code": "0x", - "storage": {} - } - }, - "postState": { - "0x000000000000000000000000000000000000c0de": { - "nonce": "0x01", - "balance": "0x3000", - "code": "0x60008060008073cccccccccccccccccccccccccccccccccccccccc5af450", - "storage": { - "0x02": "0x02fe", - "0x03": "0x029f", - "0x04": "0x024c", - "0x1002": "0x64", - "0x1003": "0x64", - "0x1004": "0x64", - "0x2002": "0x1000", - "0x2003": "0x2000", - "0x2004": "0x3000" - } - }, - "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { - "nonce": "0x0a", - "balance": "0xfffffffffba0afe5e7", - "code": "0x", - "storage": {} - }, - "0xba5e000000000000000000000000000000000000": { - "nonce": "0x00", - "balance": "0x6f05b5a16c783b4b", - "code": "0x", - "storage": {} - }, - "0xcccccccccccccccccccccccccccccccccccccccc": { - "nonce": "0x01", - "balance": "0x010000000000", - "code": "0x484355483a036110004301554761200043015500", - "storage": { - "0x01": "0x036b", - "0x02": "0x02fe", - "0x03": "0x029f", - "0x04": "0x024c", - "0x1001": "0x01", - "0x1002": "0x0a", - "0x1003": "0x0149", - "0x1004": "0x019c", - "0x2001": "0x010000000000", - "0x2002": "0x010000000000", - "0x2003": "0x010000000000", - "0x2004": "0x010000000000" - } - }, - "0xcccccccccccccccccccccccccccccccccccccccd": { - "nonce": "0x01", - "balance": "0x020000000000", - "code": "0x60008060008073cccccccccccccccccccccccccccccccccccccccc5af450", - "storage": { - "0x02": "0x02fe", - "0x03": "0x029f", - "0x04": "0x024c", - "0x1002": "0x64", - "0x1003": "0x018401", - "0x1004": "0x018454", - "0x2002": "0x020000000000", - "0x2003": "0x020000000000", - "0x2004": "0x020000000000" - } - }, - "0xccccccccccccccccccccccccccccccccccccccce": { - "nonce": "0x01", - "balance": "0x01ffffffd000", - "code": "0x60008060008061100061c0de5af160008060008073cccccccccccccccccccccccccccccccccccccccc5af4905050", - "storage": { - "0x02": "0x02fe", - "0x03": "0x029f", - "0x04": "0x024c", - "0x1002": "0x64", - "0x1003": "0x64", - "0x1004": "0x64", - "0x2002": "0x01fffffff000", - "0x2003": "0x01ffffffe000", - "0x2004": "0x01ffffffd000" - } - }, - "0xd02d72e067e77158444ef2020ff2d325f929b363": { - "nonce": "0x01", - "balance": "0x01000000000000000000", - "code": "0x", - "storage": {} - } - }, "sealEngine": "NoProof", - "config": { - "network": "London", - "chainid": "0x01" + "_info": { + "hash": "0x620a13e96b3eb731834fad810d11e7c1ce83353cf2fdd5a2548e730d960a3951", + "fixture_format": "blockchain_test" } } -} +} \ No newline at end of file diff --git a/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_cancun_blockchain_test_engine_tx_type_0.json b/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_cancun_blockchain_test_engine_tx_type_0.json index f455f4ed49..759d758cb4 100644 --- a/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_cancun_blockchain_test_engine_tx_type_0.json +++ b/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_cancun_blockchain_test_engine_tx_type_0.json @@ -1,7 +1,7 @@ { "000/my_chain_id_test/Cancun/tx_type_0": { "_info": { - "hash": "0x82d853aef147345cedd8eb383c3ea00c1d1c4b64222911fd2580806de1f51c15", + "hash": "0xceef046d56927cb3ba54a0dddae52780b253edceb40696e62683dff192a68956", "fixture_format": "blockchain_test_engine" }, "network": "Cancun", @@ -9,7 +9,7 @@ "parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000", "uncleHash": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", "coinbase": "0x0000000000000000000000000000000000000000", - "stateRoot": "0x789d559bf5d313e15da4139b57627160d23146cf6cdf9995e0394d165b1527ef", + "stateRoot": "0xd2600b08e8060646941f031c57f82638332bb61fbf50d4dd7ee79e044a10a7be", "transactionsTrie": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", "receiptTrie": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", "bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", @@ -26,27 +26,27 @@ "blobGasUsed": "0x00", "excessBlobGas": "0x00", "parentBeaconBlockRoot": "0x0000000000000000000000000000000000000000000000000000000000000000", - "hash": "0x28c341ffc393152bd02e8689d8172dd66939ac3be2b91c5841721d1755d13c2b" + "hash": "0x7f67195b5e60c9989632e87927cbbeadeed869fda749e45f9bdc7cf6e2941a1b" }, "engineNewPayloads": [ { "params": [ { - "parentHash": "0x28c341ffc393152bd02e8689d8172dd66939ac3be2b91c5841721d1755d13c2b", + "parentHash": "0x7f67195b5e60c9989632e87927cbbeadeed869fda749e45f9bdc7cf6e2941a1b", "feeRecipient": "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba", - "stateRoot": "0xdb94ed1be4d52fb6360c88b363cb71af08df2493e57a66ec4751f90097f2fcbb", - "receiptsRoot": "0xc598f69a5674cae9337261b669970e24abc0b46e6d284372a239ec8ccbf20b0a", - "logsBloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "stateRoot": "0xdcdfda5b82f001e215063fbe542aa92ef1790e84b0a4d3a65845f4a5c644aa5d", + "receiptsRoot": "0x4b93b3c0006d672c5dfd4094132d3e8acd463e7cb018f86df29136c9a399d0b6", + "logsBloom": "0x04000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000", "blockNumber": "0x1", "gasLimit": "0x2540be400", - "gasUsed": "0xa861", + "gasUsed": "0xab63", "timestamp": "0x3e8", "extraData": "0x00", "prevRandao": "0x0000000000000000000000000000000000000000000000000000000000000000", "baseFeePerGas": "0x7", "blobGasUsed": "0x0", "excessBlobGas": "0x0", - "blockHash": "0x3cce15f0f21f4ea23499856be01a090f0ea1be6bea4a11777b4d0cc70d105a4b", + "blockHash": "0x08333a79bf9a2f9c8807c9ea88ade4609d082582e80f84934bd98819002451ba", "transactions": [ "0xf861800a8405f5e10094100000000000000000000000000000000000000080801ba07e09e26678ed4fac08a249ebe8ed680bf9051a5e14ad223e4b2b9d26e0208f37a05f6e3f188e3e6eab7d7d3b6568f5eac7d687b08d307d3154ccd8c87b4630509b" ], @@ -59,7 +59,7 @@ "forkchoiceUpdatedVersion": "3" } ], - "lastblockhash": "0x3cce15f0f21f4ea23499856be01a090f0ea1be6bea4a11777b4d0cc70d105a4b", + "lastblockhash": "0x08333a79bf9a2f9c8807c9ea88ade4609d082582e80f84934bd98819002451ba", "pre": { "0x000f3df6d732807ef1319fb7b8bb8522d0beac02": { "balance": "0x00", @@ -70,7 +70,7 @@ "0x1000000000000000000000000000000000000000": { "nonce": "0x00", "balance": "0x00", - "code": "0x4660015500", + "code": "0x46600155600260016000a100", "storage": {} }, "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { @@ -92,20 +92,20 @@ "0x1000000000000000000000000000000000000000": { "nonce": "0x00", "balance": "0x00", - "code": "0x4660015500", + "code": "0x46600155600260016000a100", "storage": { "0x01": "0x01" } }, "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba": { "nonce": "0x00", - "balance": "0x01f923", + "balance": "0x020229", "code": "0x", "storage": {} }, "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { "nonce": "0x01", - "balance": "0x3635c9adc5de996c36", + "balance": "0x3635c9adc5de994e22", "code": "0x", "storage": {} } @@ -122,4 +122,4 @@ } } } -} +} \ No newline at end of file diff --git a/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_cancun_blockchain_test_tx_type_0.json b/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_cancun_blockchain_test_tx_type_0.json index 766dd9d9f5..d7b989563d 100644 --- a/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_cancun_blockchain_test_tx_type_0.json +++ b/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_cancun_blockchain_test_tx_type_0.json @@ -1,16 +1,11 @@ { "000/my_chain_id_test/Cancun/tx_type_0": { - "_info": { - "hash": "0x6e77c3ba39a0874917e9dcd7f911de1e950e82e028603a0fc39630f973579fd9", - "fixture_format": "blockchain_test" - }, "network": "Cancun", - "genesisRLP": "0xf9023df90237a00000000000000000000000000000000000000000000000000000000000000000a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347940000000000000000000000000000000000000000a0789d559bf5d313e15da4139b57627160d23146cf6cdf9995e0394d165b1527efa056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421b901000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000080808502540be400808000a0000000000000000000000000000000000000000000000000000000000000000088000000000000000007a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b4218080a00000000000000000000000000000000000000000000000000000000000000000c0c0c0", "genesisBlockHeader": { "parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000", "uncleHash": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", "coinbase": "0x0000000000000000000000000000000000000000", - "stateRoot": "0x789d559bf5d313e15da4139b57627160d23146cf6cdf9995e0394d165b1527ef", + "stateRoot": "0xd2600b08e8060646941f031c57f82638332bb61fbf50d4dd7ee79e044a10a7be", "transactionsTrie": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", "receiptTrie": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", "bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", @@ -27,67 +22,19 @@ "blobGasUsed": "0x00", "excessBlobGas": "0x00", "parentBeaconBlockRoot": "0x0000000000000000000000000000000000000000000000000000000000000000", - "hash": "0x28c341ffc393152bd02e8689d8172dd66939ac3be2b91c5841721d1755d13c2b" + "hash": "0x7f67195b5e60c9989632e87927cbbeadeed869fda749e45f9bdc7cf6e2941a1b" }, - "blocks": [ - { - "rlp": "0xf902a5f9023ba028c341ffc393152bd02e8689d8172dd66939ac3be2b91c5841721d1755d13c2ba01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347942adc25665018aa1fe0e6bc666dac8fc2697ff9baa0db94ed1be4d52fb6360c88b363cb71af08df2493e57a66ec4751f90097f2fcbba08151d548273f6683169524b66ca9fe338b9ce42bc3540046c828fd939ae23bcba0c598f69a5674cae9337261b669970e24abc0b46e6d284372a239ec8ccbf20b0ab901000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000080018502540be40082a8618203e800a0000000000000000000000000000000000000000000000000000000000000000088000000000000000007a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b4218080a00000000000000000000000000000000000000000000000000000000000000000f863f861800a8405f5e10094100000000000000000000000000000000000000080801ba07e09e26678ed4fac08a249ebe8ed680bf9051a5e14ad223e4b2b9d26e0208f37a05f6e3f188e3e6eab7d7d3b6568f5eac7d687b08d307d3154ccd8c87b4630509bc0c0", - "blockHeader": { - "parentHash": "0x28c341ffc393152bd02e8689d8172dd66939ac3be2b91c5841721d1755d13c2b", - "uncleHash": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", - "coinbase": "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba", - "stateRoot": "0xdb94ed1be4d52fb6360c88b363cb71af08df2493e57a66ec4751f90097f2fcbb", - "transactionsTrie": "0x8151d548273f6683169524b66ca9fe338b9ce42bc3540046c828fd939ae23bcb", - "receiptTrie": "0xc598f69a5674cae9337261b669970e24abc0b46e6d284372a239ec8ccbf20b0a", - "bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", - "difficulty": "0x00", - "number": "0x01", - "gasLimit": "0x02540be400", - "gasUsed": "0xa861", - "timestamp": "0x03e8", - "extraData": "0x00", - "mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000", - "nonce": "0x0000000000000000", - "baseFeePerGas": "0x07", - "withdrawalsRoot": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", - "blobGasUsed": "0x00", - "excessBlobGas": "0x00", - "parentBeaconBlockRoot": "0x0000000000000000000000000000000000000000000000000000000000000000", - "hash": "0x3cce15f0f21f4ea23499856be01a090f0ea1be6bea4a11777b4d0cc70d105a4b" - }, - "blocknumber": "1", - "transactions": [ - { - "type": "0x00", - "chainId": "0x00", - "nonce": "0x00", - "gasPrice": "0x0a", - "gasLimit": "0x05f5e100", - "to": "0x1000000000000000000000000000000000000000", - "value": "0x00", - "data": "0x", - "v": "0x1b", - "r": "0x7e09e26678ed4fac08a249ebe8ed680bf9051a5e14ad223e4b2b9d26e0208f37", - "s": "0x5f6e3f188e3e6eab7d7d3b6568f5eac7d687b08d307d3154ccd8c87b4630509b", - "sender": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b" - } - ], - "uncleHeaders": [], - "withdrawals": [] - } - ], - "lastblockhash": "0x3cce15f0f21f4ea23499856be01a090f0ea1be6bea4a11777b4d0cc70d105a4b", "pre": { "0x000f3df6d732807ef1319fb7b8bb8522d0beac02": { + "nonce": "0x01", "balance": "0x00", "code": "0x3373fffffffffffffffffffffffffffffffffffffffe14604d57602036146024575f5ffd5b5f35801560495762001fff810690815414603c575f5ffd5b62001fff01545f5260205ff35b5f5ffd5b62001fff42064281555f359062001fff015500", - "nonce": "0x01", "storage": {} }, "0x1000000000000000000000000000000000000000": { "nonce": "0x00", "balance": "0x00", - "code": "0x4660015500", + "code": "0x46600155600260016000a100", "storage": {} }, "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { @@ -99,9 +46,9 @@ }, "postState": { "0x000f3df6d732807ef1319fb7b8bb8522d0beac02": { + "nonce": "0x01", "balance": "0x00", "code": "0x3373fffffffffffffffffffffffffffffffffffffffe14604d57602036146024575f5ffd5b5f35801560495762001fff810690815414603c575f5ffd5b62001fff01545f5260205ff35b5f5ffd5b62001fff42064281555f359062001fff015500", - "nonce": "0x01", "storage": { "0x03e8": "0x03e8" } @@ -109,35 +56,107 @@ "0x1000000000000000000000000000000000000000": { "nonce": "0x00", "balance": "0x00", - "code": "0x4660015500", + "code": "0x46600155600260016000a100", "storage": { "0x01": "0x01" } }, - "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba": { - "nonce": "0x00", - "balance": "0x01f923", + "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { + "nonce": "0x01", + "balance": "0x3635c9adc5de994e22", "code": "0x", "storage": {} }, - "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { - "nonce": "0x01", - "balance": "0x3635c9adc5de996c36", + "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba": { + "nonce": "0x00", + "balance": "0x020229", "code": "0x", "storage": {} } }, - "sealEngine": "NoProof", + "lastblockhash": "0x08333a79bf9a2f9c8807c9ea88ade4609d082582e80f84934bd98819002451ba", "config": { "network": "Cancun", "chainid": "0x01", "blobSchedule": { "Cancun": { - "max": "0x06", "target": "0x03", + "max": "0x06", "baseFeeUpdateFraction": "0x32f0ed" } } + }, + "genesisRLP": "0xf9023df90237a00000000000000000000000000000000000000000000000000000000000000000a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347940000000000000000000000000000000000000000a0d2600b08e8060646941f031c57f82638332bb61fbf50d4dd7ee79e044a10a7bea056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421b901000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000080808502540be400808000a0000000000000000000000000000000000000000000000000000000000000000088000000000000000007a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b4218080a00000000000000000000000000000000000000000000000000000000000000000c0c0c0", + "blocks": [ + { + "blockHeader": { + "parentHash": "0x7f67195b5e60c9989632e87927cbbeadeed869fda749e45f9bdc7cf6e2941a1b", + "uncleHash": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", + "coinbase": "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba", + "stateRoot": "0xdcdfda5b82f001e215063fbe542aa92ef1790e84b0a4d3a65845f4a5c644aa5d", + "transactionsTrie": "0x8151d548273f6683169524b66ca9fe338b9ce42bc3540046c828fd939ae23bcb", + "receiptTrie": "0x4b93b3c0006d672c5dfd4094132d3e8acd463e7cb018f86df29136c9a399d0b6", + "bloom": "0x04000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000", + "difficulty": "0x00", + "number": "0x01", + "gasLimit": "0x02540be400", + "gasUsed": "0xab63", + "timestamp": "0x03e8", + "extraData": "0x00", + "mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000", + "nonce": "0x0000000000000000", + "baseFeePerGas": "0x07", + "withdrawalsRoot": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", + "blobGasUsed": "0x00", + "excessBlobGas": "0x00", + "parentBeaconBlockRoot": "0x0000000000000000000000000000000000000000000000000000000000000000", + "hash": "0x08333a79bf9a2f9c8807c9ea88ade4609d082582e80f84934bd98819002451ba" + }, + "transactions": [ + { + "type": "0x00", + "chainId": "0x00", + "nonce": "0x00", + "gasPrice": "0x0a", + "gasLimit": "0x05f5e100", + "to": "0x1000000000000000000000000000000000000000", + "value": "0x00", + "data": "0x", + "v": "0x1b", + "r": "0x7e09e26678ed4fac08a249ebe8ed680bf9051a5e14ad223e4b2b9d26e0208f37", + "s": "0x5f6e3f188e3e6eab7d7d3b6568f5eac7d687b08d307d3154ccd8c87b4630509b", + "sender": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b" + } + ], + "uncleHeaders": [], + "withdrawals": [], + "receipts": [ + { + "transactionHash": "0xdb1e409d11d92e6e8b3825ec82dff14f3661f1247c0d306ed4ff6aa22b0987f4", + "cumulativeGasUsed": "0xab63", + "bloom": "0x04000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000", + "logs": [ + { + "address": "0x1000000000000000000000000000000000000000", + "topics": [ + "0x0000000000000000000000000000000000000000000000000000000000000002" + ], + "data": "0x00" + } + ], + "rlp": "0xf901430182ab63b9010004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000f83af838941000000000000000000000000000000000000000e1a0000000000000000000000000000000000000000000000000000000000000000200", + "status": true, + "type": "0x00" + } + ], + "rlp": "0xf902a5f9023ba07f67195b5e60c9989632e87927cbbeadeed869fda749e45f9bdc7cf6e2941a1ba01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347942adc25665018aa1fe0e6bc666dac8fc2697ff9baa0dcdfda5b82f001e215063fbe542aa92ef1790e84b0a4d3a65845f4a5c644aa5da08151d548273f6683169524b66ca9fe338b9ce42bc3540046c828fd939ae23bcba04b93b3c0006d672c5dfd4094132d3e8acd463e7cb018f86df29136c9a399d0b6b901000400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000800000000000000000000080018502540be40082ab638203e800a0000000000000000000000000000000000000000000000000000000000000000088000000000000000007a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b4218080a00000000000000000000000000000000000000000000000000000000000000000f863f861800a8405f5e10094100000000000000000000000000000000000000080801ba07e09e26678ed4fac08a249ebe8ed680bf9051a5e14ad223e4b2b9d26e0208f37a05f6e3f188e3e6eab7d7d3b6568f5eac7d687b08d307d3154ccd8c87b4630509bc0c0", + "blocknumber": "1" + } + ], + "sealEngine": "NoProof", + "_info": { + "hash": "0x5e72c19553fb0f49fe42ab04412d476c3106c4a949a43545451a4bf0688f7a96", + "fixture_format": "blockchain_test" } } -} +} \ No newline at end of file diff --git a/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_cancun_state_test_tx_type_0.json b/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_cancun_state_test_tx_type_0.json index 6cfaf9eff9..0b68cc8628 100644 --- a/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_cancun_state_test_tx_type_0.json +++ b/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_cancun_state_test_tx_type_0.json @@ -1,7 +1,7 @@ { "000/my_chain_id_test/Cancun/tx_type_0": { "_info": { - "hash": "0x9e980ae5cce8c6222831e99c0a33facfd7b8e50be053b84e3ed357c2c1818491", + "hash": "0x68c19b4fa2c34898a20f27615b4f84dc46fc84bf704972212c72602dae27d6eb", "fixture_format": "state_test" }, "env": { @@ -28,7 +28,7 @@ "0x1000000000000000000000000000000000000000": { "nonce": "0x00", "balance": "0x00", - "code": "0x4660015500", + "code": "0x46600155600260016000a100", "storage": {} }, "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { @@ -57,8 +57,25 @@ "post": { "Cancun": [ { - "hash": "0x19919608275963e6e20a1191996f5b19db8208dd8df54097cfd2b9cb14f682b6", - "logs": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", + "hash": "0xaf03c39e7a64dc7072c098673612e0e0aa75419aec9bf8a454da0332fed3ae09", + "logs": "0x6f322afda7b9376eb43961bc85e0a097c0118bb3545c42c888830702a95b18a5", + "receipt": { + "cumulativeGasUsed": "0xab63", + "logs": [ + { + "address": "0x1000000000000000000000000000000000000000", + "data": "0x00", + "topics": [ + "0x0000000000000000000000000000000000000000000000000000000000000002" + ] + } + ], + "bloom": "0x04000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000", + "transactionHash": "0xdb1e409d11d92e6e8b3825ec82dff14f3661f1247c0d306ed4ff6aa22b0987f4", + "rlp": "0xf901430182ab63b9010004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000f83af838941000000000000000000000000000000000000000e1a0000000000000000000000000000000000000000000000000000000000000000200", + "status": true, + "type": "0x00" + }, "txbytes": "0xf861800a8405f5e10094100000000000000000000000000000000000000080801ba07e09e26678ed4fac08a249ebe8ed680bf9051a5e14ad223e4b2b9d26e0208f37a05f6e3f188e3e6eab7d7d3b6568f5eac7d687b08d307d3154ccd8c87b4630509b", "indexes": { "data": 0, @@ -69,20 +86,20 @@ "0x1000000000000000000000000000000000000000": { "nonce": "0x00", "balance": "0x00", - "code": "0x4660015500", + "code": "0x46600155600260016000a100", "storage": { "0x01": "0x01" } }, "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba": { - "balance": "0x01f923", + "balance": "0x020229", "nonce": "0x00", "code": "0x", "storage": {} }, "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { "nonce": "0x01", - "balance": "0x3635c9adc5de996c36", + "balance": "0x3635c9adc5de994e22", "code": "0x", "storage": {} } @@ -91,4 +108,4 @@ ] } } -} +} \ No newline at end of file diff --git a/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_cancun_state_test_tx_type_1.json b/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_cancun_state_test_tx_type_1.json index 6ff672ed22..387201566c 100644 --- a/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_cancun_state_test_tx_type_1.json +++ b/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_cancun_state_test_tx_type_1.json @@ -1,7 +1,7 @@ { "000/my_chain_id_test/Cancun/tx_type_1": { "_info": { - "hash": "0x5ca0b01465e00811b102c33dadc2366055d6c73684bfb67b8414fa6146b38bca", + "hash": "0x49130f37343fa73f364ed83e2a2e7146c4effa64cdb6b371f5f2bf3f2004fade", "fixture_format": "state_test" }, "env": { @@ -28,7 +28,7 @@ "0x1000000000000000000000000000000000000000": { "nonce": "0x00", "balance": "0x00", - "code": "0x4660015500", + "code": "0x46600155600260016000a100", "storage": {} }, "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { @@ -39,11 +39,11 @@ } }, "transaction": { - "accessLists" : [ + "accessLists": [ [ { - "address" : "0x0000000000000000000000000000000000001234", - "storageKeys" : [ + "address": "0x0000000000000000000000000000000000001234", + "storageKeys": [ "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001" ] @@ -68,8 +68,25 @@ "post": { "Cancun": [ { - "hash": "0xac58d5721514e1a25b3d952de498e835c25ad680fc6883c7cccfdb89582734dd", - "logs": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", + "hash": "0x0ee52e9f67e3c0327408ba3a0b530d6832345fa94420053457e3ae147c4f2363", + "logs": "0x6f322afda7b9376eb43961bc85e0a097c0118bb3545c42c888830702a95b18a5", + "receipt": { + "cumulativeGasUsed": "0xc39b", + "logs": [ + { + "address": "0x1000000000000000000000000000000000000000", + "data": "0x00", + "topics": [ + "0x0000000000000000000000000000000000000000000000000000000000000002" + ] + } + ], + "bloom": "0x04000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000", + "transactionHash": "0x74db1f529be708c28edd23279649482c2174aff44ee7190929673c868d7b6bcf", + "rlp": "0x01f901430182c39bb9010004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000f83af838941000000000000000000000000000000000000000e1a0000000000000000000000000000000000000000000000000000000000000000200", + "status": true, + "type": "0x01" + }, "txbytes": "0x01f8bf01800a8405f5e1009410000000000000000000000000000000000000008080f85bf859940000000000000000000000000000000000001234f842a00000000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000000180a07d812c52979e6d8616538f2146e9499ca59f01b8c540d7a950ec7e695020a123a02cec47a8982651456ad416578b8792e197e4f1861bcbcfb33e3f2ef75109c322", "indexes": { "data": 0, @@ -80,20 +97,20 @@ "0x1000000000000000000000000000000000000000": { "nonce": "0x00", "balance": "0x00", - "code": "0x4660015500", + "code": "0x46600155600260016000a100", "storage": { "0x01": "0x01" } }, "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba": { - "balance": "0x0241cb", + "balance": "0x024ad1", "nonce": "0x00", "code": "0x", "storage": {} }, "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { "nonce": "0x01", - "balance": "0x3635c9adc5de987a06", + "balance": "0x3635c9adc5de985bf2", "code": "0x", "storage": {} } @@ -102,4 +119,4 @@ ] } } -} +} \ No newline at end of file diff --git a/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_istanbul_blockchain_test_tx_type_0.json b/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_istanbul_blockchain_test_tx_type_0.json index 801b5428c4..024b092b1c 100644 --- a/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_istanbul_blockchain_test_tx_type_0.json +++ b/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_istanbul_blockchain_test_tx_type_0.json @@ -1,16 +1,11 @@ { "000/my_chain_id_test/Istanbul/tx_type_0": { - "_info": { - "hash": "0x8b79023dc05fa9a4e374938ea0c6e04248cc6f30a3e2754737885901693e0c6e", - "fixture_format": "blockchain_test" - }, "network": "Istanbul", - "genesisRLP": "0xf901faf901f5a00000000000000000000000000000000000000000000000000000000000000000a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347940000000000000000000000000000000000000000a0aff9f63320a482f8c4e4f15f659e6a7ac382138fbbb6919243b0cba4c5988a5aa056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421b901000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000083020000808502540be400808000a00000000000000000000000000000000000000000000000000000000000000000880000000000000000c0c0", "genesisBlockHeader": { "parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000", "uncleHash": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", "coinbase": "0x0000000000000000000000000000000000000000", - "stateRoot": "0xaff9f63320a482f8c4e4f15f659e6a7ac382138fbbb6919243b0cba4c5988a5a", + "stateRoot": "0x03727f261910c4f666532f9374d9dadaa72a69c2d4171955caa71e7a98adc447", "transactionsTrie": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", "receiptTrie": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", "bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", @@ -22,30 +17,70 @@ "extraData": "0x00", "mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000", "nonce": "0x0000000000000000", - "hash": "0xfcf37297d9e49a1c75e6d18f0e490d1c0ecb3c49cb464f4fd95bb224a8262bda" + "hash": "0xec3ce185f68c896dd44ffa0411678f59027ea09caf66de21213257b5c1241218" + }, + "pre": { + "0x1000000000000000000000000000000000000000": { + "nonce": "0x00", + "balance": "0x00", + "code": "0x46600155600260016000a100", + "storage": {} + }, + "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { + "nonce": "0x00", + "balance": "0x3635c9adc5dea00000", + "code": "0x", + "storage": {} + } + }, + "postState": { + "0x1000000000000000000000000000000000000000": { + "nonce": "0x00", + "balance": "0x00", + "code": "0x46600155600260016000a100", + "storage": { + "0x01": "0x01" + } + }, + "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { + "nonce": "0x01", + "balance": "0x3635c9adc5de99a02a", + "code": "0x", + "storage": {} + }, + "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba": { + "nonce": "0x00", + "balance": "0x1bc16d674ece5fd6", + "code": "0x", + "storage": {} + } }, + "lastblockhash": "0xe8c05589c5de362c930acbe987e2791fbf96537f7bf6977875cbe6ed1479d3fa", + "config": { + "network": "Istanbul", + "chainid": "0x01" + }, + "genesisRLP": "0xf901faf901f5a00000000000000000000000000000000000000000000000000000000000000000a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347940000000000000000000000000000000000000000a003727f261910c4f666532f9374d9dadaa72a69c2d4171955caa71e7a98adc447a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421b901000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000083020000808502540be400808000a00000000000000000000000000000000000000000000000000000000000000000880000000000000000c0c0", "blocks": [ { - "rlp": "0xf90262f901f9a0fcf37297d9e49a1c75e6d18f0e490d1c0ecb3c49cb464f4fd95bb224a8262bdaa01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347942adc25665018aa1fe0e6bc666dac8fc2697ff9baa0330a7882a8fadd60d0b6bf3d8ce7a8ae024800ae31ad8fae24d654a6a83fcad6a08151d548273f6683169524b66ca9fe338b9ce42bc3540046c828fd939ae23bcba0fa9e942c7bab1017c29ab8b7f9484e311f3a2ba680c2ec8abbaea2365cecc93eb901000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000083020000018502540be40082a02d8203e800a00000000000000000000000000000000000000000000000000000000000000000880000000000000000f863f861800a8405f5e10094100000000000000000000000000000000000000080801ba07e09e26678ed4fac08a249ebe8ed680bf9051a5e14ad223e4b2b9d26e0208f37a05f6e3f188e3e6eab7d7d3b6568f5eac7d687b08d307d3154ccd8c87b4630509bc0", "blockHeader": { - "parentHash": "0xfcf37297d9e49a1c75e6d18f0e490d1c0ecb3c49cb464f4fd95bb224a8262bda", + "parentHash": "0xec3ce185f68c896dd44ffa0411678f59027ea09caf66de21213257b5c1241218", "uncleHash": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", "coinbase": "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba", - "stateRoot": "0x330a7882a8fadd60d0b6bf3d8ce7a8ae024800ae31ad8fae24d654a6a83fcad6", + "stateRoot": "0xecfbb77cf4742ee1ea84df09516302b4e59829582d6ad68c48fd605cb3ccdab4", "transactionsTrie": "0x8151d548273f6683169524b66ca9fe338b9ce42bc3540046c828fd939ae23bcb", - "receiptTrie": "0xfa9e942c7bab1017c29ab8b7f9484e311f3a2ba680c2ec8abbaea2365cecc93e", - "bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "receiptTrie": "0xea2896eb820f2178aaddde2a641e11a37cf9c7c36e8ef302f0eb924c0318b671", + "bloom": "0x04000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000", "difficulty": "0x020000", "number": "0x01", "gasLimit": "0x02540be400", - "gasUsed": "0xa02d", + "gasUsed": "0xa32f", "timestamp": "0x03e8", "extraData": "0x00", "mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000", "nonce": "0x0000000000000000", - "hash": "0xc413245fffae8b7c6392bcd3dfbbdee24118e94d9a58722a7abd91a4e1d048b7" + "hash": "0xe8c05589c5de362c930acbe987e2791fbf96537f7bf6977875cbe6ed1479d3fa" }, - "blocknumber": "1", "transactions": [ { "type": "0x00", @@ -62,50 +97,34 @@ "sender": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b" } ], - "uncleHeaders": [] + "uncleHeaders": [], + "receipts": [ + { + "transactionHash": "0xdb1e409d11d92e6e8b3825ec82dff14f3661f1247c0d306ed4ff6aa22b0987f4", + "cumulativeGasUsed": "0xa32f", + "bloom": "0x04000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000", + "logs": [ + { + "address": "0x1000000000000000000000000000000000000000", + "topics": [ + "0x0000000000000000000000000000000000000000000000000000000000000002" + ], + "data": "0x00" + } + ], + "rlp": "0xf901430182a32fb9010004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000f83af838941000000000000000000000000000000000000000e1a0000000000000000000000000000000000000000000000000000000000000000200", + "status": true, + "type": "0x00" + } + ], + "rlp": "0xf90262f901f9a0ec3ce185f68c896dd44ffa0411678f59027ea09caf66de21213257b5c1241218a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347942adc25665018aa1fe0e6bc666dac8fc2697ff9baa0ecfbb77cf4742ee1ea84df09516302b4e59829582d6ad68c48fd605cb3ccdab4a08151d548273f6683169524b66ca9fe338b9ce42bc3540046c828fd939ae23bcba0ea2896eb820f2178aaddde2a641e11a37cf9c7c36e8ef302f0eb924c0318b671b901000400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000800000000000000000000083020000018502540be40082a32f8203e800a00000000000000000000000000000000000000000000000000000000000000000880000000000000000f863f861800a8405f5e10094100000000000000000000000000000000000000080801ba07e09e26678ed4fac08a249ebe8ed680bf9051a5e14ad223e4b2b9d26e0208f37a05f6e3f188e3e6eab7d7d3b6568f5eac7d687b08d307d3154ccd8c87b4630509bc0", + "blocknumber": "1" } ], - "lastblockhash": "0xc413245fffae8b7c6392bcd3dfbbdee24118e94d9a58722a7abd91a4e1d048b7", - "pre": { - "0x1000000000000000000000000000000000000000": { - "nonce": "0x00", - "balance": "0x00", - "code": "0x4660015500", - "storage": {} - }, - "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { - "nonce": "0x00", - "balance": "0x3635c9adc5dea00000", - "code": "0x", - "storage": {} - } - }, - "postState": { - "0x1000000000000000000000000000000000000000": { - "nonce": "0x00", - "balance": "0x00", - "code": "0x4660015500", - "storage": { - "0x01": "0x01" - } - }, - "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba": { - "nonce": "0x00", - "balance": "0x1bc16d674ece41c2", - "code": "0x", - "storage": {} - }, - "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { - "nonce": "0x01", - "balance": "0x3635c9adc5de99be3e", - "code": "0x", - "storage": {} - } - }, "sealEngine": "NoProof", - "config": { - "network": "Istanbul", - "chainid": "0x01" + "_info": { + "hash": "0x57a6f950bef3a44741ac3617e53f37bb55b124ce2e26d02508b217e6d351eba8", + "fixture_format": "blockchain_test" } } -} +} \ No newline at end of file diff --git a/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_london_blockchain_test_tx_type_0.json b/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_london_blockchain_test_tx_type_0.json index e35630dc8a..6ea499bbc5 100644 --- a/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_london_blockchain_test_tx_type_0.json +++ b/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_london_blockchain_test_tx_type_0.json @@ -1,16 +1,11 @@ { "000/my_chain_id_test/London/tx_type_0": { - "_info": { - "hash": "0x8f3a32616f93086c33339f1d020a97103df4963edcef3a108babcc95d4cd3951", - "fixture_format": "blockchain_test" - }, "network": "London", - "genesisRLP": "0xf901fbf901f6a00000000000000000000000000000000000000000000000000000000000000000a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347940000000000000000000000000000000000000000a0aff9f63320a482f8c4e4f15f659e6a7ac382138fbbb6919243b0cba4c5988a5aa056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421b901000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000083020000808502540be400808000a0000000000000000000000000000000000000000000000000000000000000000088000000000000000007c0c0", "genesisBlockHeader": { "parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000", "uncleHash": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", "coinbase": "0x0000000000000000000000000000000000000000", - "stateRoot": "0xaff9f63320a482f8c4e4f15f659e6a7ac382138fbbb6919243b0cba4c5988a5a", + "stateRoot": "0x03727f261910c4f666532f9374d9dadaa72a69c2d4171955caa71e7a98adc447", "transactionsTrie": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", "receiptTrie": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", "bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", @@ -23,31 +18,71 @@ "mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000", "nonce": "0x0000000000000000", "baseFeePerGas": "0x07", - "hash": "0x107e91b6c929ab8d50e2de4f3952d602f5531b3f5348430b4005fbbf9d195375" + "hash": "0xd8357264cc6251a5396f5fbae1e3b41fef2217b931b6da35e07cffc95838bf0b" + }, + "pre": { + "0x1000000000000000000000000000000000000000": { + "nonce": "0x00", + "balance": "0x00", + "code": "0x46600155600260016000a100", + "storage": {} + }, + "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { + "nonce": "0x00", + "balance": "0x3635c9adc5dea00000", + "code": "0x", + "storage": {} + } + }, + "postState": { + "0x1000000000000000000000000000000000000000": { + "nonce": "0x00", + "balance": "0x00", + "code": "0x46600155600260016000a100", + "storage": { + "0x01": "0x01" + } + }, + "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { + "nonce": "0x01", + "balance": "0x3635c9adc5de994e22", + "code": "0x", + "storage": {} + }, + "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba": { + "nonce": "0x00", + "balance": "0x1bc16d674eca0229", + "code": "0x", + "storage": {} + } }, + "lastblockhash": "0x771beea22a98357c1fce6a4955c7023c95d515f5f3f761f8b5abd23d0c1805b5", + "config": { + "network": "London", + "chainid": "0x01" + }, + "genesisRLP": "0xf901fbf901f6a00000000000000000000000000000000000000000000000000000000000000000a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347940000000000000000000000000000000000000000a003727f261910c4f666532f9374d9dadaa72a69c2d4171955caa71e7a98adc447a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421b901000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000083020000808502540be400808000a0000000000000000000000000000000000000000000000000000000000000000088000000000000000007c0c0", "blocks": [ { - "rlp": "0xf90263f901faa0107e91b6c929ab8d50e2de4f3952d602f5531b3f5348430b4005fbbf9d195375a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347942adc25665018aa1fe0e6bc666dac8fc2697ff9baa0a48abc194fdd8e58a32a90874e9144e19eb68306ec5e51bca9389d1043eeb20fa08151d548273f6683169524b66ca9fe338b9ce42bc3540046c828fd939ae23bcba0c598f69a5674cae9337261b669970e24abc0b46e6d284372a239ec8ccbf20b0ab901000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000083020000018502540be40082a8618203e800a0000000000000000000000000000000000000000000000000000000000000000088000000000000000007f863f861800a8405f5e10094100000000000000000000000000000000000000080801ba07e09e26678ed4fac08a249ebe8ed680bf9051a5e14ad223e4b2b9d26e0208f37a05f6e3f188e3e6eab7d7d3b6568f5eac7d687b08d307d3154ccd8c87b4630509bc0", "blockHeader": { - "parentHash": "0x107e91b6c929ab8d50e2de4f3952d602f5531b3f5348430b4005fbbf9d195375", + "parentHash": "0xd8357264cc6251a5396f5fbae1e3b41fef2217b931b6da35e07cffc95838bf0b", "uncleHash": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", "coinbase": "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba", - "stateRoot": "0xa48abc194fdd8e58a32a90874e9144e19eb68306ec5e51bca9389d1043eeb20f", + "stateRoot": "0x802b7a48210549d8f4b5662c44aede70132d55a42f09364a049bf14dd776ab9c", "transactionsTrie": "0x8151d548273f6683169524b66ca9fe338b9ce42bc3540046c828fd939ae23bcb", - "receiptTrie": "0xc598f69a5674cae9337261b669970e24abc0b46e6d284372a239ec8ccbf20b0a", - "bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "receiptTrie": "0x4b93b3c0006d672c5dfd4094132d3e8acd463e7cb018f86df29136c9a399d0b6", + "bloom": "0x04000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000", "difficulty": "0x020000", "number": "0x01", "gasLimit": "0x02540be400", - "gasUsed": "0xa861", + "gasUsed": "0xab63", "timestamp": "0x03e8", "extraData": "0x00", "mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000", "nonce": "0x0000000000000000", "baseFeePerGas": "0x07", - "hash": "0xe05293fe6050385e463d93c310bc52f87715f509aeb036455bbe4597cf36706a" + "hash": "0x771beea22a98357c1fce6a4955c7023c95d515f5f3f761f8b5abd23d0c1805b5" }, - "blocknumber": "1", "transactions": [ { "type": "0x00", @@ -64,50 +99,34 @@ "sender": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b" } ], - "uncleHeaders": [] + "uncleHeaders": [], + "receipts": [ + { + "transactionHash": "0xdb1e409d11d92e6e8b3825ec82dff14f3661f1247c0d306ed4ff6aa22b0987f4", + "cumulativeGasUsed": "0xab63", + "bloom": "0x04000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000", + "logs": [ + { + "address": "0x1000000000000000000000000000000000000000", + "topics": [ + "0x0000000000000000000000000000000000000000000000000000000000000002" + ], + "data": "0x00" + } + ], + "rlp": "0xf901430182ab63b9010004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000f83af838941000000000000000000000000000000000000000e1a0000000000000000000000000000000000000000000000000000000000000000200", + "status": true, + "type": "0x00" + } + ], + "rlp": "0xf90263f901faa0d8357264cc6251a5396f5fbae1e3b41fef2217b931b6da35e07cffc95838bf0ba01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347942adc25665018aa1fe0e6bc666dac8fc2697ff9baa0802b7a48210549d8f4b5662c44aede70132d55a42f09364a049bf14dd776ab9ca08151d548273f6683169524b66ca9fe338b9ce42bc3540046c828fd939ae23bcba04b93b3c0006d672c5dfd4094132d3e8acd463e7cb018f86df29136c9a399d0b6b901000400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000800000000000000000000083020000018502540be40082ab638203e800a0000000000000000000000000000000000000000000000000000000000000000088000000000000000007f863f861800a8405f5e10094100000000000000000000000000000000000000080801ba07e09e26678ed4fac08a249ebe8ed680bf9051a5e14ad223e4b2b9d26e0208f37a05f6e3f188e3e6eab7d7d3b6568f5eac7d687b08d307d3154ccd8c87b4630509bc0", + "blocknumber": "1" } ], - "lastblockhash": "0xe05293fe6050385e463d93c310bc52f87715f509aeb036455bbe4597cf36706a", - "pre": { - "0x1000000000000000000000000000000000000000": { - "nonce": "0x00", - "balance": "0x00", - "code": "0x4660015500", - "storage": {} - }, - "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { - "nonce": "0x00", - "balance": "0x3635c9adc5dea00000", - "code": "0x", - "storage": {} - } - }, - "postState": { - "0x1000000000000000000000000000000000000000": { - "nonce": "0x00", - "balance": "0x00", - "code": "0x4660015500", - "storage": { - "0x01": "0x01" - } - }, - "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba": { - "nonce": "0x00", - "balance": "0x1bc16d674ec9f923", - "code": "0x", - "storage": {} - }, - "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { - "nonce": "0x01", - "balance": "0x3635c9adc5de996c36", - "code": "0x", - "storage": {} - } - }, "sealEngine": "NoProof", - "config": { - "network": "London", - "chainid": "0x01" + "_info": { + "hash": "0xc2da7ab682d573a2ff88b987526c1ae2d5abac2c738b575294b7976c5358078b", + "fixture_format": "blockchain_test" } } -} +} \ No newline at end of file diff --git a/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_paris_blockchain_test_engine_tx_type_0.json b/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_paris_blockchain_test_engine_tx_type_0.json index 4d4711e47a..a2f767b425 100644 --- a/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_paris_blockchain_test_engine_tx_type_0.json +++ b/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_paris_blockchain_test_engine_tx_type_0.json @@ -1,16 +1,16 @@ { "000/my_chain_id_test/Paris/tx_type_0": { "_info": { - "hash": "0xf07acbb0efdbea6064fa443f0c8a74de6bb3e9895a7717759deedb99310eec4e", + "hash": "0xfb3bfb73acf7ccc1592d06a570cf2e95eec18c6335bd095e425e814c8bb3433d", "fixture_format": "blockchain_test_engine" }, "network": "Paris", - "lastblockhash": "0xe92eedff2a0489bd861f528e248994b6791b0f5b845d90b34c68bc8cbc51c369", + "lastblockhash": "0xc24d78b58b7ccc57effaee66f54e3c6c2c79de399568432ed1aa34c66475c2ee", "genesisBlockHeader": { "parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000", "uncleHash": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", "coinbase": "0x0000000000000000000000000000000000000000", - "stateRoot": "0xaff9f63320a482f8c4e4f15f659e6a7ac382138fbbb6919243b0cba4c5988a5a", + "stateRoot": "0x03727f261910c4f666532f9374d9dadaa72a69c2d4171955caa71e7a98adc447", "transactionsTrie": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", "receiptTrie": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", "bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", @@ -23,25 +23,25 @@ "mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000", "nonce": "0x0000000000000000", "baseFeePerGas": "0x07", - "hash": "0xb2b30c502e6c7cafc6324b17a6aedebf7bd14a0eec632d5a1b50eede93965a86" + "hash": "0xd1a7a221b753e7e951359b0785c57d498709224b299b0a925cefd5b4076c56dd" }, "engineNewPayloads": [ { "params": [ { - "parentHash": "0xb2b30c502e6c7cafc6324b17a6aedebf7bd14a0eec632d5a1b50eede93965a86", + "parentHash": "0xd1a7a221b753e7e951359b0785c57d498709224b299b0a925cefd5b4076c56dd", "feeRecipient": "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba", - "stateRoot": "0x19919608275963e6e20a1191996f5b19db8208dd8df54097cfd2b9cb14f682b6", - "receiptsRoot": "0xc598f69a5674cae9337261b669970e24abc0b46e6d284372a239ec8ccbf20b0a", - "logsBloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "stateRoot": "0xaf03c39e7a64dc7072c098673612e0e0aa75419aec9bf8a454da0332fed3ae09", + "receiptsRoot": "0x4b93b3c0006d672c5dfd4094132d3e8acd463e7cb018f86df29136c9a399d0b6", + "logsBloom": "0x04000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000", "blockNumber": "0x1", "gasLimit": "0x2540be400", - "gasUsed": "0xa861", + "gasUsed": "0xab63", "timestamp": "0x3e8", "extraData": "0x00", "prevRandao": "0x0000000000000000000000000000000000000000000000000000000000000000", "baseFeePerGas": "0x7", - "blockHash": "0xe92eedff2a0489bd861f528e248994b6791b0f5b845d90b34c68bc8cbc51c369", + "blockHash": "0xc24d78b58b7ccc57effaee66f54e3c6c2c79de399568432ed1aa34c66475c2ee", "transactions": [ "0xf861800a8405f5e10094100000000000000000000000000000000000000080801ba07e09e26678ed4fac08a249ebe8ed680bf9051a5e14ad223e4b2b9d26e0208f37a05f6e3f188e3e6eab7d7d3b6568f5eac7d687b08d307d3154ccd8c87b4630509b" ] @@ -55,7 +55,7 @@ "0x1000000000000000000000000000000000000000": { "nonce": "0x00", "balance": "0x00", - "code": "0x4660015500", + "code": "0x46600155600260016000a100", "storage": {} }, "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { @@ -69,20 +69,20 @@ "0x1000000000000000000000000000000000000000": { "nonce": "0x00", "balance": "0x00", - "code": "0x4660015500", + "code": "0x46600155600260016000a100", "storage": { "0x01": "0x01" } }, "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba": { "nonce": "0x00", - "balance": "0x01f923", + "balance": "0x020229", "code": "0x", "storage": {} }, "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { "nonce": "0x01", - "balance": "0x3635c9adc5de996c36", + "balance": "0x3635c9adc5de994e22", "code": "0x", "storage": {} } @@ -92,4 +92,4 @@ "chainid": "0x01" } } -} +} \ No newline at end of file diff --git a/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_paris_state_test_tx_type_0.json b/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_paris_state_test_tx_type_0.json index 245d253106..c017f1c564 100644 --- a/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_paris_state_test_tx_type_0.json +++ b/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_paris_state_test_tx_type_0.json @@ -1,7 +1,7 @@ { "000/my_chain_id_test/Paris/tx_type_0": { "_info": { - "hash": "0x9de75fac42e382815fa12e0252e64b901ed1b0225446209223c8a3569e1c2857", + "hash": "0xdf866e0391fe827cdcde77f06d63ca9fc3a8692a1048618c2cf12a5e6e6649ae", "fixture_format": "state_test" }, "env": { @@ -17,7 +17,7 @@ "0x1000000000000000000000000000000000000000": { "nonce": "0x00", "balance": "0x00", - "code": "0x4660015500", + "code": "0x46600155600260016000a100", "storage": {} }, "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { @@ -46,8 +46,25 @@ "post": { "Paris": [ { - "hash": "0x19919608275963e6e20a1191996f5b19db8208dd8df54097cfd2b9cb14f682b6", - "logs": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", + "hash": "0xaf03c39e7a64dc7072c098673612e0e0aa75419aec9bf8a454da0332fed3ae09", + "logs": "0x6f322afda7b9376eb43961bc85e0a097c0118bb3545c42c888830702a95b18a5", + "receipt": { + "transactionHash": "0xdb1e409d11d92e6e8b3825ec82dff14f3661f1247c0d306ed4ff6aa22b0987f4", + "cumulativeGasUsed": "0xab63", + "bloom": "0x04000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000", + "logs": [ + { + "address": "0x1000000000000000000000000000000000000000", + "data": "0x00", + "topics": [ + "0x0000000000000000000000000000000000000000000000000000000000000002" + ] + } + ], + "rlp": "0xf901430182ab63b9010004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000f83af838941000000000000000000000000000000000000000e1a0000000000000000000000000000000000000000000000000000000000000000200", + "status": true, + "type": "0x00" + }, "txbytes": "0xf861800a8405f5e10094100000000000000000000000000000000000000080801ba07e09e26678ed4fac08a249ebe8ed680bf9051a5e14ad223e4b2b9d26e0208f37a05f6e3f188e3e6eab7d7d3b6568f5eac7d687b08d307d3154ccd8c87b4630509b", "indexes": { "data": 0, @@ -58,20 +75,20 @@ "0x1000000000000000000000000000000000000000": { "nonce": "0x00", "balance": "0x00", - "code": "0x4660015500", + "code": "0x46600155600260016000a100", "storage": { "0x01": "0x01" } }, "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba": { - "balance": "0x01f923", + "balance": "0x020229", "nonce": "0x00", "code": "0x", "storage": {} }, "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { "nonce": "0x01", - "balance": "0x3635c9adc5de996c36", + "balance": "0x3635c9adc5de994e22", "code": "0x", "storage": {} } @@ -83,4 +100,4 @@ "chainid": "0x01" } } -} +} \ No newline at end of file diff --git a/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_shanghai_blockchain_test_engine_tx_type_0.json b/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_shanghai_blockchain_test_engine_tx_type_0.json index bdf89fb02e..a0f1290ba1 100644 --- a/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_shanghai_blockchain_test_engine_tx_type_0.json +++ b/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_shanghai_blockchain_test_engine_tx_type_0.json @@ -1,16 +1,16 @@ { "000/my_chain_id_test/Shanghai/tx_type_0": { "_info": { - "hash": "0x361818ee5736a45c10c34ec445bf872d280b2f2ad61c297cceb14772b1393006", + "hash": "0x0f064e5636b7eff1f95283ffd0ff3fc866f4e52232b6fb2e9d13959844a32068", "fixture_format": "blockchain_test_engine" }, - "lastblockhash": "0x9c10141361e180632f7973f4f3a0aed2baa5ebb776bae84caafdcc07a24933e8", + "lastblockhash": "0x31657672062664f60cb3b80949cdbb2e8df92337b1d526386c5bccf5ff518709", "network": "Shanghai", "genesisBlockHeader": { "parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000", "uncleHash": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", "coinbase": "0x0000000000000000000000000000000000000000", - "stateRoot": "0xaff9f63320a482f8c4e4f15f659e6a7ac382138fbbb6919243b0cba4c5988a5a", + "stateRoot": "0x03727f261910c4f666532f9374d9dadaa72a69c2d4171955caa71e7a98adc447", "transactionsTrie": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", "receiptTrie": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", "bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", @@ -24,25 +24,25 @@ "nonce": "0x0000000000000000", "baseFeePerGas": "0x07", "withdrawalsRoot": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", - "hash": "0x62f038416dbe4a03ea264e084edd9024b3589b49e66f7d5528b72a138a34570f" + "hash": "0xbc604017bb703b299b2d56725813fe58bf755640c2e784335ca1c74ace774762" }, "engineNewPayloads": [ { "params": [ { - "parentHash": "0x62f038416dbe4a03ea264e084edd9024b3589b49e66f7d5528b72a138a34570f", + "parentHash": "0xbc604017bb703b299b2d56725813fe58bf755640c2e784335ca1c74ace774762", "feeRecipient": "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba", - "stateRoot": "0x19919608275963e6e20a1191996f5b19db8208dd8df54097cfd2b9cb14f682b6", - "receiptsRoot": "0xc598f69a5674cae9337261b669970e24abc0b46e6d284372a239ec8ccbf20b0a", - "logsBloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "stateRoot": "0xaf03c39e7a64dc7072c098673612e0e0aa75419aec9bf8a454da0332fed3ae09", + "receiptsRoot": "0x4b93b3c0006d672c5dfd4094132d3e8acd463e7cb018f86df29136c9a399d0b6", + "logsBloom": "0x04000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000", "blockNumber": "0x1", "gasLimit": "0x2540be400", - "gasUsed": "0xa861", + "gasUsed": "0xab63", "timestamp": "0x3e8", "extraData": "0x00", "prevRandao": "0x0000000000000000000000000000000000000000000000000000000000000000", "baseFeePerGas": "0x7", - "blockHash": "0x9c10141361e180632f7973f4f3a0aed2baa5ebb776bae84caafdcc07a24933e8", + "blockHash": "0x31657672062664f60cb3b80949cdbb2e8df92337b1d526386c5bccf5ff518709", "transactions": [ "0xf861800a8405f5e10094100000000000000000000000000000000000000080801ba07e09e26678ed4fac08a249ebe8ed680bf9051a5e14ad223e4b2b9d26e0208f37a05f6e3f188e3e6eab7d7d3b6568f5eac7d687b08d307d3154ccd8c87b4630509b" ], @@ -57,7 +57,7 @@ "0x1000000000000000000000000000000000000000": { "nonce": "0x00", "balance": "0x00", - "code": "0x4660015500", + "code": "0x46600155600260016000a100", "storage": {} }, "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { @@ -71,20 +71,20 @@ "0x1000000000000000000000000000000000000000": { "nonce": "0x00", "balance": "0x00", - "code": "0x4660015500", + "code": "0x46600155600260016000a100", "storage": { "0x01": "0x01" } }, "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba": { "nonce": "0x00", - "balance": "0x01f923", + "balance": "0x020229", "code": "0x", "storage": {} }, "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { "nonce": "0x01", - "balance": "0x3635c9adc5de996c36", + "balance": "0x3635c9adc5de994e22", "code": "0x", "storage": {} } @@ -94,4 +94,4 @@ "chainid": "0x01" } } -} +} \ No newline at end of file diff --git a/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_shanghai_state_test_tx_type_0.json b/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_shanghai_state_test_tx_type_0.json index d26ffeaa8b..c34b7bc8f1 100644 --- a/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_shanghai_state_test_tx_type_0.json +++ b/packages/testing/src/execution_testing/specs/tests/fixtures/chainid_shanghai_state_test_tx_type_0.json @@ -1,7 +1,7 @@ { "000/my_chain_id_test/Shanghai/tx_type_0": { "_info": { - "hash": "0xdd6900920530f2ba834d659b952c023a5962c352512529da8ed9614d99a0c1b8", + "hash": "0x7cddf0155fe83996e0a3b835dffa494c9612e84ea5b5ac018eb8591c67cd5079", "fixture_format": "state_test" }, "env": { @@ -17,7 +17,7 @@ "0x1000000000000000000000000000000000000000": { "nonce": "0x00", "balance": "0x00", - "code": "0x4660015500", + "code": "0x46600155600260016000a100", "storage": {} }, "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { @@ -46,8 +46,25 @@ "post": { "Shanghai": [ { - "hash": "0x19919608275963e6e20a1191996f5b19db8208dd8df54097cfd2b9cb14f682b6", - "logs": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", + "hash": "0xaf03c39e7a64dc7072c098673612e0e0aa75419aec9bf8a454da0332fed3ae09", + "logs": "0x6f322afda7b9376eb43961bc85e0a097c0118bb3545c42c888830702a95b18a5", + "receipt": { + "cumulativeGasUsed": "0xab63", + "logs": [ + { + "address": "0x1000000000000000000000000000000000000000", + "data": "0x00", + "topics": [ + "0x0000000000000000000000000000000000000000000000000000000000000002" + ] + } + ], + "bloom": "0x04000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000", + "transactionHash": "0xdb1e409d11d92e6e8b3825ec82dff14f3661f1247c0d306ed4ff6aa22b0987f4", + "rlp": "0xf901430182ab63b9010004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000f83af838941000000000000000000000000000000000000000e1a0000000000000000000000000000000000000000000000000000000000000000200", + "status": true, + "type": "0x00" + }, "txbytes": "0xf861800a8405f5e10094100000000000000000000000000000000000000080801ba07e09e26678ed4fac08a249ebe8ed680bf9051a5e14ad223e4b2b9d26e0208f37a05f6e3f188e3e6eab7d7d3b6568f5eac7d687b08d307d3154ccd8c87b4630509b", "indexes": { "data": 0, @@ -58,20 +75,20 @@ "0x1000000000000000000000000000000000000000": { "nonce": "0x00", "balance": "0x00", - "code": "0x4660015500", + "code": "0x46600155600260016000a100", "storage": { "0x01": "0x01" } }, "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba": { - "balance": "0x01f923", + "balance": "0x020229", "nonce": "0x00", "code": "0x", "storage": {} }, "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { "nonce": "0x01", - "balance": "0x3635c9adc5de996c36", + "balance": "0x3635c9adc5de994e22", "code": "0x", "storage": {} } @@ -83,4 +100,4 @@ "chainid": "0x01" } } -} +} \ No newline at end of file diff --git a/packages/testing/src/execution_testing/specs/tests/test_expect.py b/packages/testing/src/execution_testing/specs/tests/test_expect.py index 5cb5d0519c..6a5e69a10e 100644 --- a/packages/testing/src/execution_testing/specs/tests/test_expect.py +++ b/packages/testing/src/execution_testing/specs/tests/test_expect.py @@ -7,6 +7,8 @@ from execution_testing.base_types import ( Account, Address, + Bytes, + Hash, Storage, TestAddress, TestPrivateKey, @@ -24,15 +26,18 @@ Alloc, Environment, Transaction, + TransactionLog, TransactionReceipt, ) from ..blockchain import BlockchainEngineFixture, BlockchainTest from ..helpers import ( ExecutionExceptionMismatchError, + LogMismatchError, TransactionReceiptMismatchError, UnexpectedExecutionFailError, UnexpectedExecutionSuccessError, + verify_log, ) from ..state import StateTest @@ -378,7 +383,9 @@ def test_post_account_mismatch( Transaction( secret_key=TestPrivateKey, error=TransactionException.INTRINSIC_GAS_TOO_LOW, - expected_receipt=TransactionReceipt(gas_used=21_000), + expected_receipt=TransactionReceipt( + cumulative_gas_used=21_000 + ), ), UnexpectedExecutionSuccessError, id="TransactionUnexpectedExecutionSuccessError", @@ -387,7 +394,9 @@ def test_post_account_mismatch( Transaction( secret_key=TestPrivateKey, gas_limit=20_999, - expected_receipt=TransactionReceipt(gas_used=21_000), + expected_receipt=TransactionReceipt( + cumulative_gas_used=21_000 + ), ), UnexpectedExecutionFailError, id="TransactionUnexpectedExecutionFailError", @@ -395,7 +404,9 @@ def test_post_account_mismatch( pytest.param( Transaction( secret_key=TestPrivateKey, - expected_receipt=TransactionReceipt(gas_used=21_001), + expected_receipt=TransactionReceipt( + cumulative_gas_used=21_001 + ), ), TransactionReceiptMismatchError, id="TransactionReceiptMismatchError", @@ -404,7 +415,9 @@ def test_post_account_mismatch( Transaction( secret_key=TestPrivateKey, gas_limit=20_999, - expected_receipt=TransactionReceipt(gas_used=21_001), + expected_receipt=TransactionReceipt( + cumulative_gas_used=21_001 + ), ), UnexpectedExecutionFailError, id="TransactionUnexpectedExecutionFailError+TransactionReceiptMismatchError", @@ -413,7 +426,9 @@ def test_post_account_mismatch( Transaction( secret_key=TestPrivateKey, error=TransactionException.INTRINSIC_GAS_TOO_LOW, - expected_receipt=TransactionReceipt(gas_used=21_001), + expected_receipt=TransactionReceipt( + cumulative_gas_used=21_001 + ), ), UnexpectedExecutionSuccessError, id="TransactionUnexpectedExecutionSuccessError+TransactionReceiptMismatchError", @@ -543,3 +558,148 @@ def test_block_intermediate_state( post=block_3.expected_post_state, blocks=[block_1, block_2, block_3], ).generate(t8n=default_t8n, fixture_format=fixture_format) + + +# Log verification tests +@pytest.mark.parametrize( + "expected_log,actual_log,should_raise", + [ + pytest.param( + TransactionLog( + address=Address(0x100), + topics=[Hash(b"\x01" * 32)], + data=Bytes(b"\x02" * 32), + ), + TransactionLog( + address=Address(0x100), + topics=[Hash(b"\x01" * 32)], + data=Bytes(b"\x02" * 32), + ), + False, + id="matching_logs", + ), + pytest.param( + TransactionLog( + address=Address(0x100), + ), + TransactionLog( + address=Address(0x200), + topics=[Hash(b"\x01" * 32)], + data=Bytes(b"\x02" * 32), + ), + True, + id="address_mismatch", + ), + pytest.param( + TransactionLog( + topics=[Hash(b"\x01" * 32)], + ), + TransactionLog( + address=Address(0x100), + topics=[Hash(b"\x02" * 32)], + data=Bytes(b"\x02" * 32), + ), + True, + id="topics_mismatch", + ), + pytest.param( + TransactionLog( + data=Bytes(b"\x01" * 32), + ), + TransactionLog( + address=Address(0x100), + topics=[Hash(b"\x01" * 32)], + data=Bytes(b"\x02" * 32), + ), + True, + id="data_mismatch", + ), + pytest.param( + TransactionLog( + address=None, + topics=None, + data=None, + ), + TransactionLog( + address=Address(0x100), + topics=[Hash(b"\x01" * 32)], + data=Bytes(b"\x02" * 32), + ), + False, + id="no_fields_specified", + ), + ], +) +def test_verify_log( + expected_log: TransactionLog, + actual_log: TransactionLog, + should_raise: bool, +) -> None: + """Test verify_log function for log field mismatches.""" + if should_raise: + with pytest.raises(LogMismatchError): + verify_log(0, 0, expected_log, actual_log) + else: + verify_log(0, 0, expected_log, actual_log) + + +# Log mismatch integration tests using Amsterdam fork (EIP-7708) +@pytest.mark.parametrize( + "mismatch_type", + [ + pytest.param("address", id="log_address_mismatch"), + pytest.param("topics", id="log_topics_mismatch"), + pytest.param("data", id="log_data_mismatch"), + ], +) +def test_log_mismatch_during_generation( + default_t8n: TransitionTool, + mismatch_type: str, +) -> None: + """ + Test that log mismatches raise LogMismatchError during test generation. + """ + from execution_testing.forks import Amsterdam + + # EIP-7708 transfer log constants + system_address = Address(0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE) + + # Create a simple transfer transaction + recipient = Address(0x100) + transfer_value = 1000 + + # Create intentionally wrong expected logs based on mismatch type + if mismatch_type == "address": + wrong_log = TransactionLog( + address=Address(0x1234), # Wrong address, should be system_address + ) + elif mismatch_type == "topics": + wrong_log = TransactionLog( + address=system_address, + topics=[Hash(b"\x00" * 32)], # Wrong topic + ) + else: # data + wrong_log = TransactionLog( + address=system_address, + data=Bytes((9999).to_bytes(32, "big")), # Wrong data + ) + + tx = Transaction( + secret_key=TestPrivateKey, + to=recipient, + value=transfer_value, + expected_receipt=TransactionReceipt(logs=[wrong_log]), + ) + + pre = Alloc({TestAddress: Account(balance=10**18)}) + + state_test = StateTest( + env=Environment(), + pre=pre, + post={}, # Empty post to skip post-state verification + tx=tx, + fork=Amsterdam, + ) + + with pytest.raises(LogMismatchError): + state_test.generate(t8n=default_t8n, fixture_format=StateFixture) diff --git a/packages/testing/src/execution_testing/specs/tests/test_fixtures.py b/packages/testing/src/execution_testing/specs/tests/test_fixtures.py index 90680eb05d..21d5267857 100644 --- a/packages/testing/src/execution_testing/specs/tests/test_fixtures.py +++ b/packages/testing/src/execution_testing/specs/tests/test_fixtures.py @@ -152,10 +152,10 @@ def test_fill_state_test( number=1, timestamp=1000, ) - + contract_code = Op.SSTORE(1, Op.CHAINID) + Op.LOG1(0, 1, 2) + Op.STOP pre = { 0x1000000000000000000000000000000000000000: Account( - code="0x4660015500" + code=contract_code ), "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": Account( balance=1000000000000000000000 @@ -189,7 +189,7 @@ def test_fill_state_test( post = { "0x1000000000000000000000000000000000000000": Account( - code="0x4660015500", storage={"0x01": "0x01"} + code=contract_code, storage={"0x01": "0x01"} ), } @@ -545,6 +545,12 @@ def test_fill_blockchain_valid_txs( # noqa: D102 BlockchainEngineFixtureCommon, ) + with open("/tmp/actual.json", "w") as f: + f.write( + json.dumps( + blockchain_test_fixture.json_dict_with_info(), indent=4 + ) + ) assert isinstance( blockchain_test_fixture, (BlockchainFixtureCommon, BlockchainEngineFixtureCommon), @@ -566,6 +572,7 @@ def test_fill_blockchain_valid_txs( # noqa: D102 remove_info_metadata(fixture) assert fixture_name in fixture assert fixture_name in expected + assert fixture[fixture_name] == expected[fixture_name] @pytest.mark.parametrize("fork", [London], indirect=True) @@ -946,4 +953,12 @@ def test_fill_blockchain_invalid_txs( remove_info_metadata(fixture) assert fixture_name in fixture assert fixture_name in expected - assert fixture[fixture_name] == expected[fixture_name] + with open("/tmp/actual.json", "w") as f: + f.write( + json.dumps( + generated_fixture.json_dict_with_info(hash_only=True), indent=4 + ) + ) + assert fixture[fixture_name] == expected[fixture_name], ( + f"EXPECTED: {json.dumps(expected[fixture_name])}" + ) diff --git a/packages/testing/src/execution_testing/test_types/__init__.py b/packages/testing/src/execution_testing/test_types/__init__.py index ef30805950..c87148f92a 100644 --- a/packages/testing/src/execution_testing/test_types/__init__.py +++ b/packages/testing/src/execution_testing/test_types/__init__.py @@ -30,7 +30,7 @@ compute_deterministic_create2_address, ) from .phase_manager import TestPhase, TestPhaseManager -from .receipt_types import TransactionReceipt +from .receipt_types import TransactionLog, TransactionReceipt from .request_types import ( ConsolidationRequest, DepositRequest, @@ -77,6 +77,7 @@ "TestPhaseManager", "Transaction", "TransactionDefaults", + "TransactionLog", "TransactionReceipt", "TransactionTestMetadata", "TransactionType", diff --git a/packages/testing/src/execution_testing/test_types/receipt_types.py b/packages/testing/src/execution_testing/test_types/receipt_types.py index 66b1216daf..b4bbf9b95a 100644 --- a/packages/testing/src/execution_testing/test_types/receipt_types.py +++ b/packages/testing/src/execution_testing/test_types/receipt_types.py @@ -17,15 +17,15 @@ class TransactionLog(CamelModel): """Transaction log.""" - address: Address - topics: List[Hash] - data: Bytes - block_number: HexNumber - transaction_hash: Hash - transaction_index: HexNumber - block_hash: Hash - log_index: HexNumber - removed: bool + address: Address | None = None + topics: List[Hash] | None = None + data: Bytes | None = None + block_number: HexNumber | None = None + transaction_hash: Hash | None = None + transaction_index: HexNumber | None = None + block_hash: Hash | None = None + log_index: HexNumber | None = None + removed: bool | None = None class ReceiptDelegation(CamelModel): @@ -44,25 +44,25 @@ class TransactionReceipt(CamelModel): def strip_extra_fields(cls, data: Any) -> Any: """Strip extra fields from t8n tool output not part of model.""" if isinstance(data, dict): - # t8n tool returns 'succeeded' which is redundant with 'status' - data.pop("succeeded", None) - # t8n tool may return 'post_state' which is not part of this model - data.pop("post_state", None) - data.pop("postState", None) # geth (1.16+) returns extra fields in receipts data.pop("type", None) data.pop("blockNumber", None) return data transaction_hash: Hash | None = None - gas_used: HexNumber | None = None + post_state: Hash | None = Field( + None, validation_alias=AliasChoices("post_state", "postState") + ) root: Bytes | None = None - status: HexNumber | None = None + status: HexNumber | None = Field( + None, validation_alias=AliasChoices("status", "succeeded") + ) cumulative_gas_used: HexNumber | None = None - logs_bloom: Bloom | None = Field( + bloom: Bloom | None = Field( None, validation_alias=AliasChoices("logs_bloom", "logsBloom", "bloom") ) logs: List[TransactionLog] | None = None + gas_used: HexNumber | None = None contract_address: Address | None = None effective_gas_price: HexNumber | None = None block_hash: Hash | None = None diff --git a/packages/testing/src/execution_testing/test_types/transaction_types.py b/packages/testing/src/execution_testing/test_types/transaction_types.py index cb1267b94d..2c26953666 100644 --- a/packages/testing/src/execution_testing/test_types/transaction_types.py +++ b/packages/testing/src/execution_testing/test_types/transaction_types.py @@ -8,7 +8,6 @@ import ethereum_rlp as eth_rlp from coincurve.keys import PrivateKey, PublicKey -from ethereum_types.numeric import Uint from pydantic import ( AliasChoices, BaseModel, @@ -18,7 +17,6 @@ model_serializer, model_validator, ) -from trie import HexaryTrie from execution_testing.base_types import ( AccessList, @@ -759,14 +757,6 @@ def serializable_list(self) -> Any: """ return self.rlp() if self.ty > 0 else self.to_list(signing=False) - @staticmethod - def list_root(input_txs: List["Transaction"]) -> Hash: - """Return transactions root of a list of transactions.""" - t = HexaryTrie(db={}) - for i, tx in enumerate(input_txs): - t.set(eth_rlp.encode(Uint(i)), tx.rlp()) - return Hash(t.root_hash) - @staticmethod def list_blob_versioned_hashes( input_txs: List["Transaction"], diff --git a/src/ethereum_spec_tools/evm_tools/t8n/t8n_types.py b/src/ethereum_spec_tools/evm_tools/t8n/t8n_types.py index 544838a5db..5595c38960 100644 --- a/src/ethereum_spec_tools/evm_tools/t8n/t8n_types.py +++ b/src/ethereum_spec_tools/evm_tools/t8n/t8n_types.py @@ -413,9 +413,22 @@ def json_encode_receipts(self) -> Any: assert hasattr(receipt, "post_state") receipt_dict["post_state"] = "0x" + receipt.post_state.hex() - receipt_dict["gasUsed"] = hex(receipt.cumulative_gas_used) + receipt_dict["cumulativeGasUsed"] = hex( + receipt.cumulative_gas_used + ) receipt_dict["bloom"] = "0x" + receipt.bloom.hex() + # Add logs to receipts + logs_json = [] + for log in receipt.logs: + log_dict = { + "address": "0x" + log.address.hex(), + "topics": ["0x" + topic.hex() for topic in log.topics], + "data": "0x" + log.data.hex(), + } + logs_json.append(log_dict) + receipt_dict["logs"] = logs_json + receipts_json.append(receipt_dict) return receipts_json diff --git a/tests/cancun/eip4844_blobs/test_point_evaluation_precompile.py b/tests/cancun/eip4844_blobs/test_point_evaluation_precompile.py index 8a0e26aebf..a46cc06314 100644 --- a/tests/cancun/eip4844_blobs/test_point_evaluation_precompile.py +++ b/tests/cancun/eip4844_blobs/test_point_evaluation_precompile.py @@ -599,7 +599,7 @@ def test_tx_entry_point( access_list=access_list, to=Address(Spec.POINT_EVALUATION_PRECOMPILE_ADDRESS), gas_limit=call_gas + intrinsic_gas_cost, - expected_receipt=TransactionReceipt(gas_used=consumed_gas), + expected_receipt=TransactionReceipt(cumulative_gas_used=consumed_gas), ) post = { diff --git a/tests/cancun/eip5656_mcopy/test_mcopy_memory_expansion.py b/tests/cancun/eip5656_mcopy/test_mcopy_memory_expansion.py index 7dcc5fecc5..23f32896ed 100644 --- a/tests/cancun/eip5656_mcopy/test_mcopy_memory_expansion.py +++ b/tests/cancun/eip5656_mcopy/test_mcopy_memory_expansion.py @@ -135,7 +135,7 @@ def tx( # noqa: D103 access_list=tx_access_list, data=initial_memory, gas_limit=tx_gas_limit, - expected_receipt=TransactionReceipt(gas_used=tx_gas_limit), + expected_receipt=TransactionReceipt(cumulative_gas_used=tx_gas_limit), ) diff --git a/tests/prague/eip7623_increase_calldata_cost/test_execution_gas.py b/tests/prague/eip7623_increase_calldata_cost/test_execution_gas.py index cef1bb2a6f..7e8bc2c80f 100644 --- a/tests/prague/eip7623_increase_calldata_cost/test_execution_gas.py +++ b/tests/prague/eip7623_increase_calldata_cost/test_execution_gas.py @@ -87,7 +87,9 @@ def test_full_gas_consumption( Test executing a transaction that fully consumes its execution gas allocation. """ - tx.expected_receipt = TransactionReceipt(gas_used=tx.gas_limit) + tx.expected_receipt = TransactionReceipt( + cumulative_gas_used=tx.gas_limit + ) state_test( pre=pre, post={}, @@ -163,7 +165,9 @@ def test_gas_consumption_below_data_floor( """ Test executing a transaction that almost consumes the floor data cost. """ - tx.expected_receipt = TransactionReceipt(gas_used=tx_floor_data_cost) + tx.expected_receipt = TransactionReceipt( + cumulative_gas_used=tx_floor_data_cost + ) state_test( pre=pre, post={}, diff --git a/tests/prague/eip7623_increase_calldata_cost/test_refunds.py b/tests/prague/eip7623_increase_calldata_cost/test_refunds.py index 7e61fa058b..5d11c57cf4 100644 --- a/tests/prague/eip7623_increase_calldata_cost/test_refunds.py +++ b/tests/prague/eip7623_increase_calldata_cost/test_refunds.py @@ -330,7 +330,7 @@ def test_gas_refunds_from_data_floor( # (t8n) is verified against the expected receipt. # - During test consumption, this is reflected in the balance difference # and the state root. - tx.expected_receipt = TransactionReceipt(gas_used=gas_used) + tx.expected_receipt = TransactionReceipt(cumulative_gas_used=gas_used) state_test( pre=pre, post={ diff --git a/tests/prague/eip7702_set_code_tx/test_gas.py b/tests/prague/eip7702_set_code_tx/test_gas.py index 93c2747019..27cd2725ef 100644 --- a/tests/prague/eip7702_set_code_tx/test_gas.py +++ b/tests/prague/eip7702_set_code_tx/test_gas.py @@ -940,7 +940,7 @@ def test_gas_cost( authorization_list=authorization_list, access_list=access_list, sender=sender, - expected_receipt=TransactionReceipt(gas_used=gas_used), + expected_receipt=TransactionReceipt(cumulative_gas_used=gas_used), ) state_test( diff --git a/tests/prague/eip7702_set_code_tx/test_set_code_txs.py b/tests/prague/eip7702_set_code_tx/test_set_code_txs.py index ac5685a759..9cda00b380 100644 --- a/tests/prague/eip7702_set_code_tx/test_set_code_txs.py +++ b/tests/prague/eip7702_set_code_tx/test_set_code_txs.py @@ -2930,7 +2930,9 @@ def test_set_code_to_precompile_not_enough_gas_for_precompile_execution( value=1, authorization_list=[auth], # explicitly check expected gas, no precompile code executed - expected_receipt=TransactionReceipt(gas_used=intrinsic_gas - discount), + expected_receipt=TransactionReceipt( + cumulative_gas_used=intrinsic_gas - discount + ), ) state_test( diff --git a/tests/shanghai/eip3860_initcode/test_initcode.py b/tests/shanghai/eip3860_initcode/test_initcode.py index bc645c494d..cb7b3ae91e 100644 --- a/tests/shanghai/eip3860_initcode/test_initcode.py +++ b/tests/shanghai/eip3860_initcode/test_initcode.py @@ -336,7 +336,7 @@ def tx( error=tx_error, sender=sender, # The entire gas limit is expected to be consumed. - expected_receipt=TransactionReceipt(gas_used=gas_limit), + expected_receipt=TransactionReceipt(cumulative_gas_used=gas_limit), ) @pytest.fixture From 438bac1af095031e3ec15dcee73510cdaf7f8b65 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=94=A1=E4=BD=B3=E8=AA=A0=20Louis=20Tsai?= <72684086+LouisTsai-Csie@users.noreply.github.com> Date: Mon, 2 Feb 2026 22:24:59 +0800 Subject: [PATCH 108/154] refactor(test-benchmark): update benchmark fork to Osaka (#2104) * refactor: cold access storage operations * chore: bump forks to osaka * fix(test,benchmark): cap data tests to rlp block limit * chore: apply suggested changes --------- Co-authored-by: fselmo --- .github/configs/feature.yaml | 9 +- .../compute/instruction/test_storage.py | 487 ++++++++++++------ .../scenario/test_transaction_types.py | 40 +- tox.ini | 2 +- 4 files changed, 354 insertions(+), 184 deletions(-) diff --git a/.github/configs/feature.yaml b/.github/configs/feature.yaml index 17ac97dce5..275888ff0b 100644 --- a/.github/configs/feature.yaml +++ b/.github/configs/feature.yaml @@ -9,16 +9,11 @@ develop: benchmark: evm-type: benchmark - fill-params: --no-html --fork=Prague --gas-benchmark-values 1,5,10,30,60,100,150 -m benchmark ./tests/benchmark - -benchmark_develop: - evm-type: benchmark - fill-params: --no-html --fork=Osaka --gas-benchmark-values 1,5,10,30,60,100,150 -m "benchmark" ./tests/benchmark - feature_only: true + fill-params: --no-html --fork=Osaka --gas-benchmark-values 1,5,10,30,60,100,150 -m benchmark ./tests/benchmark benchmark_fast: evm-type: benchmark - fill-params: --no-html --fork=Prague --gas-benchmark-values 100 -m "benchmark" ./tests/benchmark + fill-params: --no-html --fork=Osaka --gas-benchmark-values 100 -m "benchmark" ./tests/benchmark feature_only: true bal: diff --git a/tests/benchmark/compute/instruction/test_storage.py b/tests/benchmark/compute/instruction/test_storage.py index aa1c08725e..ef5bb02f6f 100644 --- a/tests/benchmark/compute/instruction/test_storage.py +++ b/tests/benchmark/compute/instruction/test_storage.py @@ -13,12 +13,13 @@ import pytest from execution_testing import ( Alloc, + AuthorizationTuple, BenchmarkTestFiller, Block, Bytecode, - Environment, ExtCallGenerator, Fork, + Hash, JumpLoopGenerator, Op, TestPhaseManager, @@ -75,12 +76,7 @@ def test_tstore( init_key = 42 setup = Op.PUSH1(init_key) - # If fixed_value is False, we use GAS as a cheap way of always - # storing a different value than the previous one. attack_block = Op.TSTORE(Op.DUP2, Op.GAS if not fixed_value else Op.DUP1) - - # If fixed_key is False, we mutate the key on every iteration of the - # big loop. cleanup = Op.POP + Op.GAS if not fixed_key else Bytecode() benchmark_test( @@ -91,13 +87,134 @@ def test_tstore( ) +def create_storage_initializer(fork: Fork) -> tuple[Bytecode, int, int]: + """ + Create a contract that initializes storage slots from calldata parameters. + + - CALLDATA[0..32] start slot (index) + - CALLDATA[32..64] slot count (num) + + storage[i] = i for i in [index, index + num). + + Returns: (bytecode, loop_cost, overhead) + """ + prefix = ( + Op.CALLDATALOAD(0) # [index] + + Op.DUP1 # [index, index] + + Op.CALLDATALOAD(32) # [index, index, num] + + Op.ADD # [index, index + num] + ) + + loop = ( + Op.JUMPDEST + + Op.PUSH1(1) # [index, index + num, 1] + + Op.SWAP1 # [index, 1, index + num] + + Op.SUB # [index, index + num - 1] + + Op.SSTORE( + Op.DUP1, + Op.DUP1, + key_warm=False, + original_value=0, + current_value=0, + new_value=1, + ) + + Op.JUMPI(len(prefix), Op.GT(Op.DUP2, Op.DUP2)) + ) + + return prefix + loop, loop.gas_cost(fork), prefix.gas_cost(fork) + + +def create_benchmark_executor( + storage_action: StorageAction, + absent_slots: bool, + tx_result: TransactionResult, + fork: Fork, +) -> tuple[Bytecode, int, int]: + """ + Create a contract that executes benchmark operations. + + - CALLDATA[0..32] start slot (index) + - CALLDATA[32..64] slot count (num) + + Returns: (bytecode, loop_cost, overhead) + """ + prefix = ( + Op.CALLDATALOAD(0) # [index] + + Op.CALLDATALOAD(32) # [index, num] + ) + + slot_calculation = ( + Op.DUP2 # [index, num, index] + + Op.DUP2 # [index, num, index, num] + + Op.ADD # [index, num, index + num] + + Op.PUSH1(1) # [index, num, index + num, 1] + + Op.SWAP1 # [index, num, 1, index + num] + + Op.SUB # [index, num, index + num - 1] + ) + + original = 0 if absent_slots else 1 + + # [index, num, index + num - 1] + match storage_action: + case StorageAction.READ: + operation = Op.POP(Op.SLOAD.with_metadata(key_warm=False)) + case StorageAction.WRITE_SAME_VALUE: + new_value = 1 if absent_slots else original + operation = ( + Op.SSTORE( + Op.DUP1, + Op.DUP1, + key_warm=False, + original_value=original, + current_value=original, + new_value=new_value, + ) + + Op.POP + ) + case StorageAction.WRITE_NEW_VALUE: + operation = Op.SSTORE( + Op.SWAP1, + Op.NOT(0), + key_warm=False, + original_value=original, + current_value=original, + new_value=2**256 - 1, + ) + + # [index, num] + loop_condition = ( + Op.PUSH1(1) # [index, num, 1] + + Op.SWAP1 # [index, 1, num] + + Op.SUB # [index, num - 1] + + Op.DUP1 # [index, num - 1, num - 1] + + Op.ISZERO # [index, num - 1 == 0] + + Op.ISZERO # [index, num - 1 != 0] + ) + + match tx_result: + case TransactionResult.REVERT: + suffix = Op.REVERT(0, 0) + case TransactionResult.OUT_OF_GAS: + suffix = Bytecode() + case _: + suffix = Op.STOP + + loop = ( + Op.JUMPDEST + + slot_calculation + + operation + + Op.JUMPI(len(prefix), loop_condition) + ) + code = prefix + loop + suffix + + return code, loop.gas_cost(fork), (prefix + suffix).gas_cost(fork) + + @pytest.mark.parametrize( "storage_action,tx_result", [ pytest.param( - StorageAction.READ, - TransactionResult.SUCCESS, - id="SSLOAD", + StorageAction.READ, TransactionResult.SUCCESS, id="SSLOAD" ), pytest.param( StorageAction.WRITE_SAME_VALUE, @@ -131,161 +248,201 @@ def test_tstore( ), ], ) -@pytest.mark.parametrize( - "absent_slots", - [ - True, - False, - ], -) +@pytest.mark.parametrize("absent_slots", [True, False]) def test_storage_access_cold( benchmark_test: BenchmarkTestFiller, pre: Alloc, fork: Fork, storage_action: StorageAction, absent_slots: bool, - env: Environment, gas_benchmark_value: int, + tx_gas_limit: int, tx_result: TransactionResult, ) -> None: """ - Benchmark cold storage slot accesses. + Benchmark cold storage slot accesses using EIP-7702 delegation. + + The authority EOA delegates to: + - StorageInitializer: storage[i] = i for each slot (absent_slots=False) + - BenchmarkExecutor: performs the benchmark operation (SLOAD/SSTORE) """ + intrinsic_calc = fork.transaction_intrinsic_cost_calculator() gas_costs = fork.gas_costs() - intrinsic_gas_cost_calc = fork.transaction_intrinsic_cost_calculator() - - loop_cost = gas_costs.G_COLD_SLOAD # All accesses are always cold - if storage_action == StorageAction.WRITE_NEW_VALUE: - if not absent_slots: - loop_cost += gas_costs.G_STORAGE_RESET - else: - loop_cost += gas_costs.G_STORAGE_SET - elif storage_action == StorageAction.WRITE_SAME_VALUE: - if absent_slots: - loop_cost += gas_costs.G_STORAGE_SET - else: - loop_cost += gas_costs.G_WARM_SLOAD - elif storage_action == StorageAction.READ: - loop_cost += 0 # Only G_COLD_SLOAD is charged - - # Contract code - execution_code_body = Bytecode() - if storage_action == StorageAction.WRITE_SAME_VALUE: - # All the storage slots in the contract are initialized to their index. - # That is, storage slot `i` is initialized to `i`. - execution_code_body = Op.SSTORE(Op.DUP1, Op.DUP1) - loop_cost += gas_costs.G_VERY_LOW * 2 - elif storage_action == StorageAction.WRITE_NEW_VALUE: - # The new value 2^256-1 is guaranteed to be different from the initial - # value. - execution_code_body = Op.SSTORE(Op.DUP2, Op.NOT(0)) - loop_cost += gas_costs.G_VERY_LOW * 3 - elif storage_action == StorageAction.READ: - execution_code_body = Op.POP(Op.SLOAD(Op.DUP1)) - loop_cost += gas_costs.G_VERY_LOW + gas_costs.G_BASE - - # Add costs jump-logic costs - loop_cost += ( - gas_costs.G_JUMPDEST # Prefix Jumpdest - + gas_costs.G_VERY_LOW * 7 # ISZEROs, PUSHs, SWAPs, SUB, DUP - + gas_costs.G_HIGH # JUMPI - ) - prefix_cost = ( - gas_costs.G_VERY_LOW # Target slots push + executor_code, exec_loop_cost, exec_overhead = create_benchmark_executor( + storage_action, absent_slots, tx_result, fork + ) + initializer_code, init_loop_cost, init_overhead = ( + create_storage_initializer(fork) ) - suffix_cost = 0 - if tx_result == TransactionResult.REVERT: - suffix_cost = ( - gas_costs.G_VERY_LOW * 2 # Revert PUSHs - ) + authority = pre.fund_eoa(amount=0) + initializer_addr = pre.deploy_contract(code=initializer_code) + executor_addr = pre.deploy_contract(code=executor_code) - num_target_slots = ( - gas_benchmark_value - - intrinsic_gas_cost_calc() - - prefix_cost - - suffix_cost - ) // loop_cost - if tx_result == TransactionResult.OUT_OF_GAS: - # Add an extra slot to make it run out-of-gas - num_target_slots += 1 - - code_prefix = Op.PUSH4(num_target_slots) + Op.JUMPDEST - code_loop = execution_code_body + Op.JUMPI( - len(code_prefix) - 1, - Op.PUSH1(1) + Op.SWAP1 + Op.SUB + Op.DUP1 + Op.ISZERO + Op.ISZERO, - ) - execution_code = code_prefix + code_loop + delegation_intrinsic = intrinsic_calc(authorization_list_or_count=1) + max_intrinsic = intrinsic_calc(calldata=bytes([0xFF] * 64)) - if tx_result == TransactionResult.REVERT: - execution_code += Op.REVERT(0, 0) - else: - execution_code += Op.STOP + # Number of slots that can be processed in the execution phase + num_target_slots = 0 + current_slot = 1 + gas_remaining = gas_benchmark_value - delegation_intrinsic + while gas_remaining > 0: + tx_gas = min(tx_gas_limit, gas_remaining) + if tx_gas < max_intrinsic + exec_overhead + exec_loop_cost: + break - execution_code_address = pre.deploy_contract(code=execution_code) + slots = (tx_gas - max_intrinsic - exec_overhead) // exec_loop_cost - total_gas_used = ( - num_target_slots * loop_cost - + intrinsic_gas_cost_calc() - + prefix_cost - + suffix_cost - ) + calldata = bytes(Hash(current_slot)) + bytes(Hash(slots)) + execution_intrinsic = intrinsic_calc(calldata=calldata) + + slots = ( + tx_gas - execution_intrinsic - exec_overhead + ) // exec_loop_cost + + num_target_slots += slots + current_slot += slots + gas_remaining -= tx_gas + + blocks = [] + authority_nonce = 0 - # Contract creation - slots_init = Bytecode() + # Setup phase: initialize storage slots (only if absent_slots=False) if not absent_slots: - slots_init = Op.PUSH4(num_target_slots) + While( - body=Op.SSTORE(Op.DUP1, Op.DUP1), - condition=Op.PUSH1(1) - + Op.SWAP1 - + Op.SUB - + Op.DUP1 - + Op.ISZERO - + Op.ISZERO, - ) + setup_txs = [] + + with TestPhaseManager.setup(): + delegation_sender = pre.fund_eoa() + delegation_tx = Transaction( + to=delegation_sender, + gas_limit=tx_gas_limit, + sender=delegation_sender, + authorization_list=[ + AuthorizationTuple( + address=initializer_addr, + nonce=authority_nonce, + signer=authority, + ), + ], + ) + authority_nonce += 1 + + setup_txs.append(delegation_tx) + + current_slot = 1 + remaining_slots = num_target_slots + + while remaining_slots > 0: + if ( + tx_gas_limit + < max_intrinsic + init_overhead + init_loop_cost + ): + break + + slots = ( + tx_gas_limit - max_intrinsic - init_overhead + ) // init_loop_cost + slots = min(slots, remaining_slots) + + calldata = bytes(Hash(current_slot)) + bytes(Hash(slots)) + execution_intrinsic = intrinsic_calc(calldata=calldata) + + slots = ( + tx_gas_limit - execution_intrinsic - init_overhead + ) // init_loop_cost + slots = min(slots, remaining_slots) + + setup_txs.append( + Transaction( + to=authority, + gas_limit=tx_gas_limit, + data=Hash(current_slot) + Hash(slots), + sender=pre.fund_eoa(), + ) + ) + current_slot += slots + remaining_slots -= slots + + blocks.append(Block(txs=setup_txs)) + + # Execution phase: run benchmark + # For absent_slots=False, authority has storage, triggering refund + expected_gas_used = delegation_intrinsic + exec_txs = [] - # To create the contract, we apply the slots_init code to initialize the - # storage slots (int the case of absent_slots=False) and then copy the - # execution code to the contract. - creation_code = ( - slots_init - + Op.EXTCODECOPY( - address=execution_code_address, - dest_offset=0, - offset=0, - size=Op.EXTCODESIZE(execution_code_address), + if not absent_slots: + expected_gas_used -= min( + gas_costs.R_AUTHORIZATION_EXISTING_AUTHORITY, + delegation_intrinsic // 5, ) - + Op.RETURN(0, Op.MSIZE) - ) - sender_addr = pre.fund_eoa() + with TestPhaseManager.setup(): - setup_tx = Transaction( - to=None, - gas_limit=env.gas_limit, - data=creation_code, - sender=sender_addr, + delegation_sender = pre.fund_eoa() + delegation_tx = Transaction( + to=delegation_sender, + gas_limit=tx_gas_limit, + sender=delegation_sender, + authorization_list=[ + AuthorizationTuple( + address=executor_addr, + nonce=authority_nonce, + signer=authority, + ), + ], ) - blocks = [Block(txs=[setup_tx])] - - contract_address = compute_create_address(address=sender_addr, nonce=0) + exec_txs.append(delegation_tx) + current_slot = 1 + gas_remaining = gas_benchmark_value - delegation_intrinsic with TestPhaseManager.execution(): - op_tx = Transaction( - to=contract_address, - gas_limit=gas_benchmark_value, - sender=pre.fund_eoa(), - ) - blocks.append(Block(txs=[op_tx])) + while gas_remaining > 0: + tx_gas = min(tx_gas_limit, gas_remaining) + + if tx_gas < max_intrinsic + exec_overhead + exec_loop_cost: + break + + slots = (tx_gas - max_intrinsic - exec_overhead) // exec_loop_cost + + calldata = bytes(Hash(current_slot)) + bytes(Hash(slots)) + execution_intrinsic = intrinsic_calc(calldata=calldata) + slots = ( + tx_gas - execution_intrinsic - exec_overhead + ) // exec_loop_cost + + if tx_result == TransactionResult.OUT_OF_GAS: + slots = slots * 2 + + exec_txs.append( + Transaction( + to=authority, + gas_limit=tx_gas, + data=Hash(current_slot) + Hash(slots), + sender=pre.fund_eoa(), + ) + ) + + if tx_result == TransactionResult.OUT_OF_GAS: + expected_gas_used += tx_gas + else: + expected_gas_used += ( + intrinsic_calc( + calldata=calldata, + return_cost_deducted_prior_execution=True, + ) + + slots * exec_loop_cost + + exec_overhead + ) + current_slot += slots + + gas_remaining -= tx_gas + + blocks.append(Block(txs=exec_txs)) benchmark_test( blocks=blocks, - expected_benchmark_gas_used=( - total_gas_used - if tx_result != TransactionResult.OUT_OF_GAS - else gas_benchmark_value - ), + expected_benchmark_gas_used=expected_gas_used, ) @@ -319,9 +476,7 @@ def test_storage_access_cold_benchmark( target_opcode=Op.SLOAD if storage_action == StorageAction.READ else Op.SSTORE, - code_generator=ExtCallGenerator( - attack_block=attack_block, - ), + code_generator=ExtCallGenerator(attack_block=attack_block), ) @@ -340,25 +495,20 @@ def test_storage_access_warm( gas_benchmark_value: int, tx_gas_limit: int, ) -> None: - """ - Benchmark warm storage slot accesses. - """ + """Benchmark warm storage slot accesses.""" blocks = [] - # The warm access is done in storage slot 0. - - # Contract code - execution_code_body = Bytecode() - if storage_action == StorageAction.WRITE_SAME_VALUE: - execution_code_body = Op.SSTORE(0, Op.DUP1) - elif storage_action == StorageAction.WRITE_NEW_VALUE: - execution_code_body = Op.SSTORE(0, Op.GAS) - elif storage_action == StorageAction.READ: - execution_code_body = Op.POP(Op.SLOAD(0)) - - execution_code = Op.SLOAD(0) + While( - body=execution_code_body, - ) + match storage_action: + case StorageAction.WRITE_SAME_VALUE: + execution_code_body = Op.SSTORE(0, Op.DUP1) + case StorageAction.WRITE_NEW_VALUE: + execution_code_body = Op.SSTORE(0, Op.GAS) + case StorageAction.READ: + execution_code_body = Op.POP(Op.SLOAD(0)) + case _: + raise ValueError("Unspecified storage action") + + execution_code = Op.SLOAD(0) + While(body=execution_code_body) execution_code_address = pre.deploy_contract(code=execution_code) creation_code = ( @@ -391,12 +541,13 @@ def test_storage_access_warm( gas_limit = min( tx_gas_limit, gas_benchmark_value - i * tx_gas_limit ) - op_tx = Transaction( - to=contract_address, - gas_limit=gas_limit, - sender=pre.fund_eoa(), + txs.append( + Transaction( + to=contract_address, + gas_limit=gas_limit, + sender=pre.fund_eoa(), + ) ) - txs.append(op_tx) blocks.append(Block(txs=txs)) benchmark_test(blocks=blocks) @@ -421,19 +572,19 @@ def test_storage_access_warm_benchmark( Each iteration accesses a different storage slot (incrementing key) to ensure warm access costs are measured. """ - attack_block = Bytecode() - if storage_action == StorageAction.WRITE_SAME_VALUE: - attack_block = Op.SSTORE(Op.PUSH0, Op.PUSH0) - elif storage_action == StorageAction.WRITE_NEW_VALUE: - attack_block = Op.SSTORE(Op.PUSH0, Op.GAS) - elif storage_action == StorageAction.READ: - attack_block = Op.SLOAD(Op.PUSH0) + match storage_action: + case StorageAction.WRITE_SAME_VALUE: + attack_block = Op.SSTORE(Op.PUSH0, Op.PUSH0) + case StorageAction.WRITE_NEW_VALUE: + attack_block = Op.SSTORE(Op.PUSH0, Op.GAS) + case StorageAction.READ: + attack_block = Op.SLOAD(Op.PUSH0) + case _: + raise ValueError("Unspecified storage action") benchmark_test( target_opcode=Op.SLOAD if storage_action == StorageAction.READ else Op.SSTORE, - code_generator=ExtCallGenerator( - attack_block=attack_block, - ), + code_generator=ExtCallGenerator(attack_block=attack_block), ) diff --git a/tests/benchmark/compute/scenario/test_transaction_types.py b/tests/benchmark/compute/scenario/test_transaction_types.py index 23c00f0ce7..e9c047d9bd 100644 --- a/tests/benchmark/compute/scenario/test_transaction_types.py +++ b/tests/benchmark/compute/scenario/test_transaction_types.py @@ -190,15 +190,15 @@ def test_block_full_of_ether_transfers( @pytest.fixture -def total_cost_floor_per_token() -> int: - """Total cost floor per token.""" - return 10 +def total_cost_floor_per_token(fork: Fork) -> int: + """Total cost floor per token (EIP-7623).""" + return fork.gas_costs().G_TX_DATA_FLOOR_TOKEN_COST @pytest.fixture -def total_cost_standard_per_token() -> int: - """Total cost floor per token.""" - return 4 +def total_cost_standard_per_token(fork: Fork) -> int: + """Standard cost per token (EIP-7623).""" + return fork.gas_costs().G_TX_DATA_STANDARD_TOKEN_COST def calldata_generator( @@ -251,13 +251,37 @@ def test_block_full_data( tx_gas_limit: int, fork: Fork, ) -> None: - """Test a block with empty payload.""" + """Test a block full of calldata, respecting RLP size limits.""" iteration_count = math.ceil(gas_benchmark_value / tx_gas_limit) - gas_remaining = gas_benchmark_value + # check for EIP-7934 block RLP size limit and cap gas to stay under it + block_rlp_limit = fork.block_rlp_size_limit() + effective_gas = gas_benchmark_value + + if block_rlp_limit: + # Max calldata bytes at 99% of limit (Osaka: 8,388,608 * 0.99 ≈ 8.3 MB) + safe_calldata_bytes = int(block_rlp_limit * 0.99) + + # convert to gas: zero bytes = 10 gas/byte, non-zero = 40 gas/byte + gas_per_byte = ( + total_cost_floor_per_token + if zero_byte + else total_cost_floor_per_token * 4 + ) + # For zero bytes: 8.3MB * 10 = 83M gas just for calldata + max_calldata_gas = safe_calldata_bytes * gas_per_byte + # Add intrinsic cost per tx (Osaka): 83M + 6 txs * 21k ≈ 83.1M total + rlp_limited_gas = max_calldata_gas + iteration_count * intrinsic_cost + + # use the min between benchmark target and the RLP limit + effective_gas = min(gas_benchmark_value, rlp_limited_gas) + + gas_remaining = effective_gas total_gas_used = 0 txs = [] for _ in range(iteration_count): + if gas_remaining <= intrinsic_cost: + break gas_available = min(tx_gas_limit, gas_remaining) - intrinsic_cost data = calldata_generator( gas_available, diff --git a/tox.ini b/tox.ini index 36f8fc652d..1eb80d0d5e 100644 --- a/tox.ini +++ b/tox.ini @@ -156,7 +156,7 @@ commands = --evm-bin={env:EVM_BIN:evmone-t8n} \ --gas-benchmark-values 1 \ --generate-pre-alloc-groups \ - --fork Prague \ + --fork Osaka \ -m "benchmark and not slow" \ -n auto --maxprocesses 10 --dist=loadgroup \ --basetemp="{temp_dir}/pytest" \ From 5cb915f35aa813488fa6ac5594b2030abd13559a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=94=A1=E4=BD=B3=E8=AA=A0=20Louis=20Tsai?= <72684086+LouisTsai-Csie@users.noreply.github.com> Date: Mon, 2 Feb 2026 23:15:05 +0800 Subject: [PATCH 109/154] doc(test-benchmark): framework description and developer guideline (#2100) * doc: benchmark description * fix: doc linting issue --- docs/writing_tests/benchmarks.md | 313 +++++++++++++++++++++++++++++-- 1 file changed, 301 insertions(+), 12 deletions(-) diff --git a/docs/writing_tests/benchmarks.md b/docs/writing_tests/benchmarks.md index 818802b18f..ae8476c766 100644 --- a/docs/writing_tests/benchmarks.md +++ b/docs/writing_tests/benchmarks.md @@ -1,16 +1,85 @@ -# Benchmark Test Cases +# Benchmark Tests -Benchmark tests aim to maximize the usage of a specific opcode, precompile, or operation within a transaction or block. They are located in the `./tests/benchmarks` folder and the available test cases are documented in [test case reference](../tests/benchmark/index.md). +The EELS benchmark serves as a centralized hub for benchmarking test cases, evaluating execution layer performance across a wide range of scenarios, including gas limit testing, zkEVM, Bloatnet, gas repricing and EIPs that introduce new opcodes, precompiles, transaction types, or more use cases. -To fill a benchmark test, in addition to the usual test flags, you must include the `-m benchmark` flag. This is necessary because benchmark tests are ignored by default; they must be manually selected via the `benchmark` pytest marker (="tag"). This marker is applied to all tests under `./tests/benchmark/` automatically by the framework. +All benchmark tests are maintained under the `./tests/benchmark` directory. The benchmark suite is further organized based on whether tests require a pre-configured, stateful environment. + +## Directory Structure + +The benchmark suite is organized as follows: + +```text +tests/benchmark/ +├── compute/ +│ ├── instruction/ # Individual EVM opcodes +│ ├── precompile/ # EVM precompiles +│ └── scenario/ # Mix of operations, transaction types, etc. +└── stateful/ # Pre-configured state environments required +``` + +There are multiple files under `instruction/`, users can check each file's docstring to understand which opcodes are covered in the file. + +### Stateful Benchmarks + +A subset of benchmark test cases run on top of stateful environments (such as bloatnet or mainnet-like setups), in order to analyze how state size, structure, and access patterns influence performance. These tests may (1) pre-deploy contracts (2) construct initial storage state (3) Interact with pre-deployed contracts via stub addresses. + +Such tests are located under `./tests/benchmark/stateful`. When running these tests, users should specify the `stateful` flag as `-m stateful`, or the test would be ignored, even the path is specified correctly. + +### Compute Benchmarks + +Other benchmark tests do not require any pre-state configuration. These benchmarks could be run even without pre-deployed contracts or initialized storage. + +These tests are located under `./tests/benchmark/compute.` When running these cases, users should specify the `benchmark` marker like `-m benchmark`, or the test would be ignored, even the path is specified correctly. + +**Note:** Using `-m benchmark` under `tests/benchmark/stateful`, or `-m stateful` under `tests/benchmark/compute`, will cause the tests to be ignored. Make sure the user-provided flag matches the directory of the test being executed. **Note:** Benchmark tests are now only available starting from the `Prague` fork. Tests targeting earlier forks (`Cancun` or prior) are not supported in benchmark mode. -## Setting the Gas Limit for Benchmarking +## Benchmark Modes + +### Fixed Opcode Count Mode + +In this mode, users either: + +- First generate an opcode-count configuration mapping file via `uv run benchmark_parser`, then run the benchmark test with the `--fixed-opcode-count` flag **without parameters**, or +- Specify the opcode count directly via a CLI flag (e.g., `--fixed-opcode-count N`) + +The benchmark test wrapper then constructs a test that executes approximately `N × 1000` opcode invocations during execution, allowing for up to ±5% deviation in the final opcode count. + +This mode is primarily used for gas repricing analysis, where it enables: + +- Controlled executed opcode/precompile counts. +- Measurement of execution time as a function of opcode count. +- Derivation of regression models between opcode frequency and execution time. + +**Note:** Flag ordering matters: if `--fixed-opcode-count` is followed immediately by another flag, that flag may be incorrectly interpreted as its parameter. + +### Worst-Case Mode + +In worst-case mode, users specify a target block gas limit instead of an opcode count. +By providing `--gas-benchmark-values N` (where N denotes the gas limit in millions), the benchmark construction process packs each block with as many instances as possible of the selected operation. + +This mode is designed for gas limit testing, and gas repricing, where it enables: + +- Evaluate execution-layer performance under extreme, worst-case conditions given certain operations. +- Identify bottlenecks that may only surface at high gas utilization levels + +**Note:** For both benchmark modes, users may supply multiple values in a single invocation. For example: + +- `--gas-benchmark-values 1,2,3` runs the test with 1M, 2M, and 3M block gas limits +- `--fixed-opcode-count 4,5` runs the test with approximately 4K and 5K opcode executions + +## Developing Benchmarks + +Before writing benchmark-specific tests, please refer to the [general documentation](./writing_a_new_test.md) for the fundamentals of writing tests in the EELS framework. + +### Environment Variables + +#### Accessing the Block Gas Limit -To consume the full benchmark gas limit, use the `gas_benchmark_value` fixture as the gas limit: +When using `--gas-benchmark-values`, do not read the block gas limit from `env.gas_limit`. Instead, tests consume the injected `gas_benchmark_value` parameter, which reflects the current benchmark iteration block gas limit value. -```py +```python def test_benchmark( blockchain_test: BlockchainTestFiller, pre: Alloc, @@ -19,11 +88,11 @@ def test_benchmark( ... ``` -You can specify the block gas limit used in benchmark tests by setting the `--gas-benchmark-values` flag. This flag accepts a comma-separated list of values (in millions of gas), e.g. `--gas-benchmark-values 1,10,45,60`. This example would run the test 4 times, using a `gas_benchmark_value` of 1M, 10M, 45M, and 60M respectively. +For example, running the test with `--gas-benchmark-values 1,10,45,60`. will execute the test 4 times, passing `gas_benchmark_value` as 1M, 10M, 45M, and 60M respectively. -Do not configure the transaction/block gas limit to `env.gas_limit`. When running in benchmark mode, the test framework sets this value to a very large number (e.g., `1_000_000_000_000`), this setup allows the framework to reuse a single genesis file for all specified gas limits. I.e., the example below is invalid: +Never configure the transaction / block gas limit to `env.gas_limit`. When running in benchmark mode, the test framework sets this value to a very large number (e.g., `1_000_000_000_000`), this setup allows the framework to reuse a single genesis file for all specified gas limits. I.e., the example below should be avoided: -```py +```python def test_benchmark( blockchain_test: BlockchainTestFiller, pre: Alloc, @@ -38,11 +107,214 @@ def test_benchmark( ... ``` -## Expected Gas Usage +#### Referencing Transaction Gas Limit + +Since the Osaka fork, EIP-7825 introduces a transaction gas limit cap (approximately 16M). Instead of hardcoding this value in the test, use `fork.transaction_gas_limit_cap()` for a cleaner, fork-aware approach. + +This helper fixture could simplify the logic of determine the transaction gas limit cap, it returns the value if available, otherwise falls back to the block gas limit: + +```python +@pytest.fixture +def tx_gas_limit(fork: Fork, gas_benchmark_value: int) -> int: + """Return the transaction gas limit cap, or block gas limit if not available.""" + return fork.transaction_gas_limit_cap() or gas_benchmark_value +``` + +Example usage: import `tx_gas_limit` to calculate how many transactions fit in the block: + +```python +def test_benchmark( + blockchain_test: BlockchainTestFiller, + pre: Alloc, + gas_benchmark_value: int, + tx_gas_limit: int +): + ... + num_of_full_tx_num = gas_benchmark_value // tx_gas_limit + gas_for_last_tx = gas_benchmark_value % tx_gas_limit + ... +``` + +#### Specifying Execution Semantics + +When constructing benchmark tests with multiple transactions or blocks, identify which transaction is the actual benchmark transaction becomes difficult. `TestPhaseManager` is used to label transactions as either setup or execution phases. + +```python +def test_complex_benchmark( + benchmark_test: BenchmarkTestFiller, + pre: Alloc, +) -> None: + # Setup phase + with TestPhaseManager.setup(): + setup_tx = Transaction(...) + + # Execution phase + with TestPhaseManager.execution(): + exec_tx = Transaction(...) + + benchmark_test( + blocks=[Block(txs=[setup_tx]), Block(txs=[exec_tx])], + expected_benchmark_gas_used=..., + ) +``` + +Import `TestPhaseManager` and use it to annotate each transaction or block with its corresponding phase. During analysis, filters transactions by metadata, excluding setup transactions and measuring only the actual benchmark transaction. + +### BenchmarkTest Wrapper + +Within the EELS framework, tests can be written using existing fixtures such as `BlockchainTest` and `StateTest`. However, for benchmark scenarios, we strongly recommend using the `BenchmarkTest` wrapper, which encapsulates repetitive logic commonly required in benchmark test construction. + +Note that `BenchmarkTest` is a wrapper, not a new fixture type. It does not introduce a new fixture format, and therefore clients do not need to add special support for it. Internally, `BenchmarkTest` accepts user-provided parameters and converts them into the corresponding `BlockchainTest` representation. + +#### Mode 1: Using Custom Blocks + +This mode is suitable for complex scenarios that require multiple transactions, where each transaction's logic is completely different. + +```python +def test_complex_benchmark( + benchmark_test: BenchmarkTestFiller, + pre: Alloc, +) -> None: + ... + exec_tx_1 = Transaction(...) + exec_tx_2 = Transaction(...) + attack_block = Block(txs=[exec_tx_1, exec_tx_2]) + ... + benchmark_test( + blocks=[attack_block], + ) +``` + +#### Mode 2: Using a Single Transaction + +Users may also provide a single transaction directly. In this mode, the wrapper automatically generates multiple transactions to fully utilize the target block gas limit. + +For example, assume 60M block gas limit, and 16M transaction gas limit cap, the wrapper will construct 3 transactions with 16M gas limit and the final transaction with 12M gas limit + +```python +def test_simple_benchmark( + benchmark_test: BenchmarkTestFiller, + pre: Alloc, + gas_benchmark_value: int, +) -> None: + contract_address = pre.deploy_contract(code=Op.PUSH1(1) + Op.STOP) + benchmark_test( + tx=Transaction( + to=contract_address, + gas_limit=gas_benchmark_value, + sender=pre.fund_eoa(), + ), + ) +``` + +#### Mode 3: Using a Code Generator (Recommended) + +This mode allows users to provide a code generator that emits execution payloads dynamically. It is the recommended approach for most benchmark use cases, as it offers the greatest flexibility and reuse. + +Currently, EELS provides two built-in code generators, `JumpLoopGenerator` and `ExtCallGenerator`. Both generators accept the following components to construct the benchmark contracts: -In benchmark mode, the developer should set the expected gas consumption using the `expected_benchmark_gas_used` field. Benchmark tests do not need to consume the full gas limit, instead, you could calculate and specify the expected usage. If `expected_benchmark_gas_used` is not set, the test will fall back to using `gas_benchmark_value` as the expected value. +- `setup`: Code executed once before the attack loop +- `attack_block`: The core operation to be benchmarked +- `cleanup`: Optional cleanup logic executed after benchmarking + +In addition, users may customize transaction and contract construction via: + +- `tx_kwargs`: Transaction-level parameters (e.g., calldata, blob fields) +- `code_padding_opcode`: If specified, the contract bytecode will be padded with the given opcode up to the maximum contract size + +##### JumpLoopGenerator + +`JumpLoopGenerator` maximizes the number of `attack_block` repetitions within a single contract by looping via `JUMP`. The benchmark construction repeats the `attack_block` as many times as possible. + +```python +target_contract = ( + setup + + JUMPDEST + + attack_block + + ... + + attack_block + + cleanup + + JUMP(len(setup)) +) +``` + +This generator is suitable when the benchmarked operation does **not** grow the EVM stack unboundedly, or when stack growth is explicitly managed (e.g., by pairing stack-producing opcodes with `POP`). + +##### ExtCallGenerator + +`ExtCallGenerator` constructs two contracts: (1) a target contract, which contains the benchmarked logic and (2) a loop contract, which repeatedly calls into the target contract + +In this design, The `attack_block` inside the target contract is repeated 1024 times, corresponding to the EVM maximum stack size. And the loop contract repeatedly invokes the target contract to amplify execution via `STATICCALL`. + +The contract structures are as follows: + +Target contract: + +```python +target_contract = ( + setup + + attack_block + + (repeat another 1022 times) + + attack_block + + cleanup # usually empty +) +``` -```py +Loop contract: + +```python +attack_block = pop(staticcall(addr=target_contract, argsize=CALLDATASIZE)) + +loop_contract = ( + CALLDATACOPY(size=CALLDATASIZE) + + JUMPDEST + + attack_block + + ... + + attack_block + + cleanup + + JUMP(len(setup)) +) +``` + +`CALLDATACOPY` is required in loop contract since some target operation requires access to calldata, while the calldata is supplied in the transaction object. As a result, the loop contract must explicitly forward the calldata to the target contract. The calldata is first copied from transaction to the memory via `CALLDATACOPY` and pass to the target contract via `STATICCALL`. + +##### Choosing Between Generators + +`ExtCallGenerator` is particularly useful when benchmarking stack-growing opcodes (i.e., opcodes that push values onto the stack). + +For example: + +- When benchmarking `CALLDATASIZE` using `JumpLoopGenerator`, the `attack_block` must be written as `POP(CALLDATASIZE)` to avoid stack overflow +- With `ExtCallGenerator`, this restriction does not apply, as target contract execution naturally stops at the maximum stack size + +Based on experimental results, `ExtCallGenerator` is often more optimized than `JumpLoopGenerator`, as it requires fewer glue opcodes in the benchmarked execution path. + +**Note:** Users must provide exactly one parameter, either `tx`, `block` or `code_generator` to `BenchmarkTest`, more than one of these inputs simultaneously is not allowed. + +##### Fixed Opcode Count Test Construction + +The fixed-opcode-count feature is currently limited to benchmark tests that: + +1. Use the `BenchmarkTest` wrapper, and +2. Use a code generator (`JumpLoopGenerator` or `ExtCallGenerator`) + +If one of the condition is not met, the benchmark does not support fixed-opcode-count mode and the test will be ignored during the test-selection phase. + +As a result, the test construction logic for fixed-opcode-count mode **differs** from the general code-generation behavior described above. + +In fixed-opcode-count mode, both `ExtCallGenerator` and `JumpLoopGenerator` always constructs target contract and loop contract. The target contract executes the `attack_block` exactly 1000 times, while the loop contract repeatedly calls into the target contract N times. + +As a result, the total opcode execution count is `1000 * N`, which matches the semantics of the `--fixed-opcode-count N` flag. + +## Validating Benchmarks + +### Setting Expected Gas Usage + +In benchmark mode, set the expected gas consumption using the `expected_benchmark_gas_used` field, if the test do not need to consume the full gas limit. Developer could calculate and specify the expected usage. If `expected_benchmark_gas_used` is not available, the setting will fall back to using `gas_benchmark_value` as the expected value. + +This feature is primarily used in `worst-case` benchmark mode. + +```python @pytest.mark.valid_from("Prague") def test_empty_block( blockchain_test: BlockchainTestFiller, @@ -62,3 +334,20 @@ This is a safety check to make sure the benchmark works as expected. For example This check helps catch such issues. As a result, the post-storage comparison method via `SSTORE` is no longer needed, thereby reducing the additional storage cost. However, in cases where it is difficult to determine the total gas usage, or if an alternative verification method is used, developers may set `skip_gas_used_validation` to `True` to disable the gas usage check. + +### Setting Target Operation + +For `fixed-opcode-count` mode, specify which opcode to target using the `target_opcode` parameter. The benchmark will compare the opcode count of `target_opcode` from test execution to the expected count for verification. + +```python +def test_jumpdests( + benchmark_test: BenchmarkTestFiller, +) -> None: + """Benchmark JUMPDEST instruction.""" + benchmark_test( + target_opcode=Op.JUMPDEST, + code_generator=JumpLoopGenerator(attack_block=Op.JUMPDEST), + ) +``` + +**Note:** This verification currently only works in `fill` mode, not in `execute-remote` mode. From 3821cb41b03a9df1b5b389a9d0c0ce4b219487f0 Mon Sep 17 00:00:00 2001 From: CPerezz <37264926+CPerezz@users.noreply.github.com> Date: Mon, 2 Feb 2026 16:44:04 +0100 Subject: [PATCH 110/154] feat(test-benchmark): add EIP-7825 tx splitting and fix & include stubs.json (#2112) * enhance(bloatnet): add EIP-7825 transaction splitting support Add support for EIP-7825 transaction gas limit cap (16M gas) to bloatnet benchmark tests. When gas_benchmark_value exceeds tx_gas_limit, tests now automatically split into multiple transactions. Changes: - Add tx_gas_limit fixture parameter to all bloatnet test functions - Calculate num_txs based on gas_benchmark_value / tx_gas_limit - Split attack contracts across multiple transactions with proper iteration count distribution - Track salt_offset for factory-based tests to ensure unique contract addresses across transactions This ensures bloatnet tests work correctly with gas values > 16M while maintaining EIP-7825 compliance. * enhance(bloatnet): add stubs.json with corrected token addresses Add stubs.json mapping test names to mainnet ERC-20 contract addresses: - 297 entries for 99 unique tokens (excluding 30GB_ERC20) - Fixed 22 incorrect addresses (ALT, AOA, cETH, CHZ, CRO, ENA, ETHFI, GTC, IMT, IMX, KOK, LEND, LOOKS, LYM, MITx, Monfter/Monavale, ONDO, POLY, WEPE, ZETA, ZSC) - Removed WELL token (doesn't exist on Ethereum mainnet) - All addresses verified to have deployed code via eth_getCode * fix(bloatnet): fix line length violations for ruff linter * fix(bloatnet): apply ruff format to ternary expressions Collapse ternary expressions to single lines as required by ruff format. --- tests/benchmark/stateful/bloatnet/stubs.json | 299 ++++++ .../stateful/bloatnet/test_multi_opcode.py | 862 ++++++++---------- .../stateful/bloatnet/test_single_opcode.py | 380 +++----- 3 files changed, 843 insertions(+), 698 deletions(-) create mode 100644 tests/benchmark/stateful/bloatnet/stubs.json diff --git a/tests/benchmark/stateful/bloatnet/stubs.json b/tests/benchmark/stateful/bloatnet/stubs.json new file mode 100644 index 0000000000..f10cc6c5e2 --- /dev/null +++ b/tests/benchmark/stateful/bloatnet/stubs.json @@ -0,0 +1,299 @@ +{ + "test_sload_empty_erc20_balanceof_30GB_ERC20": "0x19fc17d87D946BBA47ca276f7b06Ee5737c4679C", + "test_sload_empty_erc20_balanceof_XEN": "0x06450dEe7FD2Fb8E39061434BAbCFC05599a6Fb8", + "test_sload_empty_erc20_balanceof_USDT": "0xdAC17F958D2ee523a2206206994597C13D831ec7", + "test_sload_empty_erc20_balanceof_USDC": "0xA0b86991C6218B36c1d19D4a2E9Eb0CE3606EB48", + "test_sload_empty_erc20_balanceof_LPT": "0x58b6A8a3302369DAEc383334672404Ee733AB239", + "test_sload_empty_erc20_balanceof_SHIB": "0x95aD61B0a150d79219dCF64E1E6Cc01f0B64C4cE", + "test_sload_empty_erc20_balanceof_WETH": "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2", + "test_sload_empty_erc20_balanceof_G-CRE": "0xa3Ee21c306A700E682AbcDfE9bAA6A08F3820419", + "test_sload_empty_erc20_balanceof_MEME": "0xB131F4A55907B10d1F0A50d8Ab8FA09EC342CD74", + "test_sload_empty_erc20_balanceof_OMG": "0xd26114cD6EE289AccF82350c8d8487fedB8A0C07", + "test_sload_empty_erc20_balanceof_MATIC": "0x7d1Afa7B718fb893DB30A3abc0Cfc608AaCfEbB0", + "test_sload_empty_erc20_balanceof_stETH": "0xae7ab96520DE3A18E5e111B5EaAb095312D7fE84", + "test_sload_empty_erc20_balanceof_DAI": "0x6B175474E89094C44Da98b954EedeAC495271d0F", + "test_sload_empty_erc20_balanceof_PEPE": "0x6982508145454Ce325dDbE47a25d4eC3d2311933", + "test_sload_empty_erc20_balanceof_old": "0x0cf0ee63788A0849FE5297F3407f701E122CC023", + "test_sload_empty_erc20_balanceof_BAT": "0x0D8775F648430679A709E98d2b0Cb6250d2887EF", + "test_sload_empty_erc20_balanceof_UNI": "0x1F9840a85d5aF5bf1D1762F925BdADdC4201F984", + "test_sload_empty_erc20_balanceof_AMB": "0x4dc3643Dbc642b72C158E7F3d2FF232df61cB6CE", + "test_sload_empty_erc20_balanceof_HEX": "0x2b591e99afE9f32eAA6214f7B7629768c40eEb39", + "test_sload_empty_erc20_balanceof_CRO": "0xa0b73e1ff0b80914ab6fe0444e65848c4c34450b", + "test_sload_empty_erc20_balanceof_UCASH": "0x92e52a1A235d9A103D970901066CE910AAceFD37", + "test_sload_empty_erc20_balanceof_BNB": "0xB8c77482e45F1F44dE1745F52C74426C631bDd52", + "test_sload_empty_erc20_balanceof_GSE": "0xe530441f4f73bdb6dc2fa5af7c3fc5fd551ec838", + "test_sload_empty_erc20_balanceof_MANA": "0x0F5D2FB29fb7d3cFeE444A200298f468908cC942", + "test_sload_empty_erc20_balanceof_OCN": "0x4092678e4E78230F46A1534C0fBC8Fa39780892B", + "test_sload_empty_erc20_balanceof_EIGEN": "0xEC53BF9167F50cDEb3aE105F56099AaAb9061F83", + "test_sload_empty_erc20_balanceof_COMP": "0xc00e94Cb662C3520282E6f5717214004A7f26888", + "test_sload_empty_erc20_balanceof_cUSDC": "0x39AA39c021dfbaE8faC545936693aC917d5E7563", + "test_sload_empty_erc20_balanceof_sMEME": "0xc059A531B4234d05e9EF4aC51028f7E6156E2CcE", + "test_sload_empty_erc20_balanceof_SAND": "0x3845badade8e6dff049820680d1f14bd3903a5d0", + "test_sload_empty_erc20_balanceof_AAVE": "0x7Fc66500c84A76Ad7e9c93437bFc5AC33E2DDAe9", + "test_sload_empty_erc20_balanceof_ZRX": "0xE41d2489571d322189246DaFA5ebDe1F4699F498", + "test_sload_empty_erc20_balanceof_KOK": "0x9B9647431632AF44be02ddd22477Ed94d14AacAa", + "test_sload_empty_erc20_balanceof_APE": "0x4d224452801ACEd8B2F0aebe155379bb5D594381", + "test_sload_empty_erc20_balanceof_SAI": "0x89d24A6b4CcB1B6fAA2625fE562bDD9a23260359", + "test_sload_empty_erc20_balanceof_GRT": "0xc944E90C64B2c07662A292be6244BDf05Cda44a7", + "test_sload_empty_erc20_balanceof_LRC": "0xBBbbCA6A901c926F240b89EacB641d8Aec7AEafD", + "test_sload_empty_erc20_balanceof_ELON": "0x761D38e5DDf6ccf6Cf7C55759d5210750B5D60F3", + "test_sload_empty_erc20_balanceof_QNT": "0x4a220E6096B25EADb88358cb44068A3248254675", + "test_sload_empty_erc20_balanceof_ONDO": "0xfAbA6f8e4a5E8Ab82F62fe7C39859FA577269BE3", + "test_sload_empty_erc20_balanceof_ENJ": "0xF629cBd94d3791c9250152BD8dfBDF380E2a3B9c", + "test_sload_empty_erc20_balanceof_FET": "0x1D287CC25dAD7cCaF76a26bc660c5F7C8E2a05BD", + "test_sload_empty_erc20_balanceof_eETH": "0x6c5024Cd4F8A59110119C56f8933403A539555EB", + "test_sload_empty_erc20_balanceof_XMX": "0x0F8c45B896784A1E408526B9300519ef8660209c", + "test_sload_empty_erc20_balanceof_FTI": "0x943ed852Dadb5C3938ECdC6883718df8142de4C8", + "test_sload_empty_erc20_balanceof_WBTC": "0x2260FAC5E5542a773Aa44fBCfeDf7C193bc2C599", + "test_sload_empty_erc20_balanceof_LEND": "0x80fB784B7eD66730e8b1DBd9820aFD29931aab03", + "test_sload_empty_erc20_balanceof_ELEC": "0xd49ff13661451313ca1553fd6954bd1d9b6e02b9", + "test_sload_empty_erc20_balanceof_SUSHI": "0x6B3595068778DD592e39A122f4f5a5CF09C90fE2", + "test_sload_empty_erc20_balanceof_HOT": "0x6c6EE5e31d828De241282B9606C8e98Ea48526E2", + "test_sload_empty_erc20_balanceof_MITx": "0x4a527d8fc13c5203ab24ba0944f4cb14658d1db6", + "test_sload_empty_erc20_balanceof_1INCH": "0x111111111117dC0aa78b770fA6A738034120C302", + "test_sload_empty_erc20_balanceof_USDP": "0x1456688345527bE1f37E9e627DA0837D6f08C925", + "test_sload_empty_erc20_balanceof_ETHFI": "0xfe0c30065b384f05761f15d0cc899d4f9f9cc0eb", + "test_sload_empty_erc20_balanceof_POLY": "0x9992ec3cf6a55b00978cddf2b27bc6882d88d1ec", + "test_sload_empty_erc20_balanceof_AOA": "0x9ab165d795019b6d8b3e971dda91071421305e5a", + "test_sload_empty_erc20_balanceof_STORJ": "0xB64ef51C888972c908CFacf59B47C1AfBC0Ab8aC", + "test_sload_empty_erc20_balanceof_MKR": "0x9f8F72aA9304c8B593d555F12eF6589cC3A579A2", + "test_sload_empty_erc20_balanceof_AMP": "0xfF20817765cB7F73d4Bde2e66e067e58d11095c2", + "test_sload_empty_erc20_balanceof_VRA": "0xF411903cbc70a74d22900a5DE66A2dda66507255", + "test_sload_empty_erc20_balanceof_GTC": "0xde30da39c46104798bb5aa3fe8b9e0e1f348163f", + "test_sload_empty_erc20_balanceof_FLOKI": "0x43F11c02439E2736800433B4594994Bd43Cd066D", + "test_sload_empty_erc20_balanceof_ALT": "0x8457CA5040ad67fdebbCC8EdCE889A335Bc0fbFB", + "test_sload_empty_erc20_balanceof_IMX": "0xf57e7e7c23978c3caec3c3548e3d615c346e79ff", + "test_sload_empty_erc20_balanceof_XYO": "0x55296f69f40ea6d20e478533c15A6b08B654E758", + "test_sload_empty_erc20_balanceof_REV": "0x2ef27bf41236bd859a95209e17a43fbd26851f92", + "test_sload_empty_erc20_balanceof_FUN": "0x419d0d8bdd9af5e606ae2232ed285aff190e711b", + "test_sload_empty_erc20_balanceof_CRV": "0xD533a949740bb3306d119CC777fa900bA034cd52", + "test_sload_empty_erc20_balanceof_CHZ": "0x3506424f91fd33084466f402d5d97f05f8e3b4af", + "test_sload_empty_erc20_balanceof_SMT": "0x78Eb8DC641077F049f910659b6d580E80dC4d237", + "test_sload_empty_erc20_balanceof_SNX": "0xC011A72400E58ecD99Ee497CF89E3775d4bd732F", + "test_sload_empty_erc20_balanceof_DENT": "0x3597bfD533a99c9aa083587B074434E61Eb0A258", + "test_sload_empty_erc20_balanceof_RNDR": "0x6De037ef9aD2725EB40118Bb1702EBb27e4Aeb24", + "test_sload_empty_erc20_balanceof_SNT": "0x744d70FDBe2Ba4CF95131626614a1763DF805B9E", + "test_sload_empty_erc20_balanceof_AXS": "0xBB0E17EF65F82Ab018d8EDd776e8DD940327B28b", + "test_sload_empty_erc20_balanceof_KNC": "0xdd974D5C2e2928deA5F71b9825b8b646686BD200", + "test_sload_empty_erc20_balanceof_WEPE": "0xccB365D2e11aE4D6d74715c680f56cf58bF4bF10", + "test_sload_empty_erc20_balanceof_ZETA": "0xf091867ec603a6628ed83d274e835539d82e9cc8", + "test_sload_empty_erc20_balanceof_LYM": "0xc690f7c7fcffa6a82b79fab7508c466fefdfc8c5", + "test_sload_empty_erc20_balanceof_nCASH": "0x809826cceAb68c387726af962713b64Cb5Cb3CCA", + "test_sload_empty_erc20_balanceof_LOOKS": "0xf4d2888d29D722226FafA5d9B24F9164c092421E", + "test_sload_empty_erc20_balanceof_Monfter/Monavale": "0x275f5ad03be0fa221b4c6649b8aee09a42d9412a", + "test_sload_empty_erc20_balanceof_cETH": "0x4Ddc2D193948926D02f9B1fE9e1daa0718270ED5", + "test_sload_empty_erc20_balanceof_SALT": "0x4156D3342D5c385a87D264F90653733592000581", + "test_sload_empty_erc20_balanceof_HOGE": "0xfAd45E47083e4607302aa43c65fB3106F1cd7607", + "test_sload_empty_erc20_balanceof_REN": "0x408e41876cCCDC0F92210600ef50372656052a38", + "test_sload_empty_erc20_balanceof_ENS": "0xC56b13EBBCffa67cfB7979b900B736b3fb480D78", + "test_sload_empty_erc20_balanceof_NEXO": "0xB62132e35a6c13ee1EE0f84dC5d40bad8d815206", + "test_sload_empty_erc20_balanceof_RFR": "0xD0929d411954c47438Dc1D871dd6081F5C5e149c", + "test_sload_empty_erc20_balanceof_COFI": "0x3137619705b5fc22a3048989F983905e456B59Ab", + "test_sload_empty_erc20_balanceof_SLP": "0xcc8fa225d80b9c7d42f96e9570156c65d6cAAa25", + "test_sload_empty_erc20_balanceof_FUEL": "0xea38eaa3c86c8f9b751533ba2e562deb9acded40", + "test_sload_empty_erc20_balanceof_ENA": "0x57e114B691Db790C35207b2e685D4A43181e6061", + "test_sload_empty_erc20_balanceof_AKITA": "0x3301Ee63Fb29F863f2333Bd4466acb46CD8323E6", + "test_sload_empty_erc20_balanceof_CVC": "0x41e5560054824ea6B0732E656e3Ad64E20e94e45", + "test_sload_empty_erc20_balanceof_IHT": "0xEda8B016efa8b1161208Cf041cD86972EEE0F31E", + "test_sload_empty_erc20_balanceof_ZSC": "0x7A41e0517a5ecA4FdbC7FbebA4D4c47B9fF6DC63", + "test_sload_empty_erc20_balanceof_cbETH": "0xBe9895146f7AF43049ca1c1AE358B0541Ea49704", + "test_sload_empty_erc20_balanceof_IMT": "0x13119e34e140097a507b07a5564bde1bc375d9e6", + "test_sstore_erc20_approve_30GB_ERC20": "0x19fc17d87D946BBA47ca276f7b06Ee5737c4679C", + "test_sstore_erc20_approve_XEN": "0x06450dEe7FD2Fb8E39061434BAbCFC05599a6Fb8", + "test_sstore_erc20_approve_USDT": "0xdAC17F958D2ee523a2206206994597C13D831ec7", + "test_sstore_erc20_approve_USDC": "0xA0b86991C6218B36c1d19D4a2E9Eb0CE3606EB48", + "test_sstore_erc20_approve_LPT": "0x58b6A8a3302369DAEc383334672404Ee733AB239", + "test_sstore_erc20_approve_SHIB": "0x95aD61B0a150d79219dCF64E1E6Cc01f0B64C4cE", + "test_sstore_erc20_approve_WETH": "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2", + "test_sstore_erc20_approve_G-CRE": "0xa3Ee21c306A700E682AbcDfE9bAA6A08F3820419", + "test_sstore_erc20_approve_MEME": "0xB131F4A55907B10d1F0A50d8Ab8FA09EC342CD74", + "test_sstore_erc20_approve_OMG": "0xd26114cD6EE289AccF82350c8d8487fedB8A0C07", + "test_sstore_erc20_approve_MATIC": "0x7d1Afa7B718fb893DB30A3abc0Cfc608AaCfEbB0", + "test_sstore_erc20_approve_stETH": "0xae7ab96520DE3A18E5e111B5EaAb095312D7fE84", + "test_sstore_erc20_approve_DAI": "0x6B175474E89094C44Da98b954EedeAC495271d0F", + "test_sstore_erc20_approve_PEPE": "0x6982508145454Ce325dDbE47a25d4eC3d2311933", + "test_sstore_erc20_approve_old": "0x0cf0ee63788A0849FE5297F3407f701E122CC023", + "test_sstore_erc20_approve_BAT": "0x0D8775F648430679A709E98d2b0Cb6250d2887EF", + "test_sstore_erc20_approve_UNI": "0x1F9840a85d5aF5bf1D1762F925BdADdC4201F984", + "test_sstore_erc20_approve_AMB": "0x4dc3643Dbc642b72C158E7F3d2FF232df61cB6CE", + "test_sstore_erc20_approve_HEX": "0x2b591e99afE9f32eAA6214f7B7629768c40eEb39", + "test_sstore_erc20_approve_CRO": "0xa0b73e1ff0b80914ab6fe0444e65848c4c34450b", + "test_sstore_erc20_approve_UCASH": "0x92e52a1A235d9A103D970901066CE910AAceFD37", + "test_sstore_erc20_approve_BNB": "0xB8c77482e45F1F44dE1745F52C74426C631bDd52", + "test_sstore_erc20_approve_GSE": "0xe530441f4f73bdb6dc2fa5af7c3fc5fd551ec838", + "test_sstore_erc20_approve_MANA": "0x0F5D2FB29fb7d3cFeE444A200298f468908cC942", + "test_sstore_erc20_approve_OCN": "0x4092678e4E78230F46A1534C0fBC8Fa39780892B", + "test_sstore_erc20_approve_EIGEN": "0xEC53BF9167F50cDEb3aE105F56099AaAb9061F83", + "test_sstore_erc20_approve_COMP": "0xc00e94Cb662C3520282E6f5717214004A7f26888", + "test_sstore_erc20_approve_cUSDC": "0x39AA39c021dfbaE8faC545936693aC917d5E7563", + "test_sstore_erc20_approve_sMEME": "0xc059A531B4234d05e9EF4aC51028f7E6156E2CcE", + "test_sstore_erc20_approve_SAND": "0x3845badade8e6dff049820680d1f14bd3903a5d0", + "test_sstore_erc20_approve_AAVE": "0x7Fc66500c84A76Ad7e9c93437bFc5AC33E2DDAe9", + "test_sstore_erc20_approve_ZRX": "0xE41d2489571d322189246DaFA5ebDe1F4699F498", + "test_sstore_erc20_approve_KOK": "0x9B9647431632AF44be02ddd22477Ed94d14AacAa", + "test_sstore_erc20_approve_APE": "0x4d224452801ACEd8B2F0aebe155379bb5D594381", + "test_sstore_erc20_approve_SAI": "0x89d24A6b4CcB1B6fAA2625fE562bDD9a23260359", + "test_sstore_erc20_approve_GRT": "0xc944E90C64B2c07662A292be6244BDf05Cda44a7", + "test_sstore_erc20_approve_LRC": "0xBBbbCA6A901c926F240b89EacB641d8Aec7AEafD", + "test_sstore_erc20_approve_ELON": "0x761D38e5DDf6ccf6Cf7C55759d5210750B5D60F3", + "test_sstore_erc20_approve_QNT": "0x4a220E6096B25EADb88358cb44068A3248254675", + "test_sstore_erc20_approve_ONDO": "0xfAbA6f8e4a5E8Ab82F62fe7C39859FA577269BE3", + "test_sstore_erc20_approve_ENJ": "0xF629cBd94d3791c9250152BD8dfBDF380E2a3B9c", + "test_sstore_erc20_approve_FET": "0x1D287CC25dAD7cCaF76a26bc660c5F7C8E2a05BD", + "test_sstore_erc20_approve_eETH": "0x6c5024Cd4F8A59110119C56f8933403A539555EB", + "test_sstore_erc20_approve_XMX": "0x0F8c45B896784A1E408526B9300519ef8660209c", + "test_sstore_erc20_approve_FTI": "0x943ed852Dadb5C3938ECdC6883718df8142de4C8", + "test_sstore_erc20_approve_WBTC": "0x2260FAC5E5542a773Aa44fBCfeDf7C193bc2C599", + "test_sstore_erc20_approve_LEND": "0x80fB784B7eD66730e8b1DBd9820aFD29931aab03", + "test_sstore_erc20_approve_ELEC": "0xd49ff13661451313ca1553fd6954bd1d9b6e02b9", + "test_sstore_erc20_approve_SUSHI": "0x6B3595068778DD592e39A122f4f5a5CF09C90fE2", + "test_sstore_erc20_approve_HOT": "0x6c6EE5e31d828De241282B9606C8e98Ea48526E2", + "test_sstore_erc20_approve_MITx": "0x4a527d8fc13c5203ab24ba0944f4cb14658d1db6", + "test_sstore_erc20_approve_1INCH": "0x111111111117dC0aa78b770fA6A738034120C302", + "test_sstore_erc20_approve_USDP": "0x1456688345527bE1f37E9e627DA0837D6f08C925", + "test_sstore_erc20_approve_ETHFI": "0xfe0c30065b384f05761f15d0cc899d4f9f9cc0eb", + "test_sstore_erc20_approve_POLY": "0x9992ec3cf6a55b00978cddf2b27bc6882d88d1ec", + "test_sstore_erc20_approve_AOA": "0x9ab165d795019b6d8b3e971dda91071421305e5a", + "test_sstore_erc20_approve_STORJ": "0xB64ef51C888972c908CFacf59B47C1AfBC0Ab8aC", + "test_sstore_erc20_approve_MKR": "0x9f8F72aA9304c8B593d555F12eF6589cC3A579A2", + "test_sstore_erc20_approve_AMP": "0xfF20817765cB7F73d4Bde2e66e067e58d11095c2", + "test_sstore_erc20_approve_VRA": "0xF411903cbc70a74d22900a5DE66A2dda66507255", + "test_sstore_erc20_approve_GTC": "0xde30da39c46104798bb5aa3fe8b9e0e1f348163f", + "test_sstore_erc20_approve_FLOKI": "0x43F11c02439E2736800433B4594994Bd43Cd066D", + "test_sstore_erc20_approve_ALT": "0x8457CA5040ad67fdebbCC8EdCE889A335Bc0fbFB", + "test_sstore_erc20_approve_IMX": "0xf57e7e7c23978c3caec3c3548e3d615c346e79ff", + "test_sstore_erc20_approve_XYO": "0x55296f69f40ea6d20e478533c15A6b08B654E758", + "test_sstore_erc20_approve_REV": "0x2ef27bf41236bd859a95209e17a43fbd26851f92", + "test_sstore_erc20_approve_FUN": "0x419d0d8bdd9af5e606ae2232ed285aff190e711b", + "test_sstore_erc20_approve_CRV": "0xD533a949740bb3306d119CC777fa900bA034cd52", + "test_sstore_erc20_approve_CHZ": "0x3506424f91fd33084466f402d5d97f05f8e3b4af", + "test_sstore_erc20_approve_SMT": "0x78Eb8DC641077F049f910659b6d580E80dC4d237", + "test_sstore_erc20_approve_SNX": "0xC011A72400E58ecD99Ee497CF89E3775d4bd732F", + "test_sstore_erc20_approve_DENT": "0x3597bfD533a99c9aa083587B074434E61Eb0A258", + "test_sstore_erc20_approve_RNDR": "0x6De037ef9aD2725EB40118Bb1702EBb27e4Aeb24", + "test_sstore_erc20_approve_SNT": "0x744d70FDBe2Ba4CF95131626614a1763DF805B9E", + "test_sstore_erc20_approve_AXS": "0xBB0E17EF65F82Ab018d8EDd776e8DD940327B28b", + "test_sstore_erc20_approve_KNC": "0xdd974D5C2e2928deA5F71b9825b8b646686BD200", + "test_sstore_erc20_approve_WEPE": "0xccB365D2e11aE4D6d74715c680f56cf58bF4bF10", + "test_sstore_erc20_approve_ZETA": "0xf091867ec603a6628ed83d274e835539d82e9cc8", + "test_sstore_erc20_approve_LYM": "0xc690f7c7fcffa6a82b79fab7508c466fefdfc8c5", + "test_sstore_erc20_approve_nCASH": "0x809826cceAb68c387726af962713b64Cb5Cb3CCA", + "test_sstore_erc20_approve_LOOKS": "0xf4d2888d29D722226FafA5d9B24F9164c092421E", + "test_sstore_erc20_approve_Monfter/Monavale": "0x275f5ad03be0fa221b4c6649b8aee09a42d9412a", + "test_sstore_erc20_approve_cETH": "0x4Ddc2D193948926D02f9B1fE9e1daa0718270ED5", + "test_sstore_erc20_approve_SALT": "0x4156D3342D5c385a87D264F90653733592000581", + "test_sstore_erc20_approve_HOGE": "0xfAd45E47083e4607302aa43c65fB3106F1cd7607", + "test_sstore_erc20_approve_REN": "0x408e41876cCCDC0F92210600ef50372656052a38", + "test_sstore_erc20_approve_ENS": "0xC56b13EBBCffa67cfB7979b900B736b3fb480D78", + "test_sstore_erc20_approve_NEXO": "0xB62132e35a6c13ee1EE0f84dC5d40bad8d815206", + "test_sstore_erc20_approve_RFR": "0xD0929d411954c47438Dc1D871dd6081F5C5e149c", + "test_sstore_erc20_approve_COFI": "0x3137619705b5fc22a3048989F983905e456B59Ab", + "test_sstore_erc20_approve_SLP": "0xcc8fa225d80b9c7d42f96e9570156c65d6cAAa25", + "test_sstore_erc20_approve_FUEL": "0xea38eaa3c86c8f9b751533ba2e562deb9acded40", + "test_sstore_erc20_approve_ENA": "0x57e114B691Db790C35207b2e685D4A43181e6061", + "test_sstore_erc20_approve_AKITA": "0x3301Ee63Fb29F863f2333Bd4466acb46CD8323E6", + "test_sstore_erc20_approve_CVC": "0x41e5560054824ea6B0732E656e3Ad64E20e94e45", + "test_sstore_erc20_approve_IHT": "0xEda8B016efa8b1161208Cf041cD86972EEE0F31E", + "test_sstore_erc20_approve_ZSC": "0x7A41e0517a5ecA4FdbC7FbebA4D4c47B9fF6DC63", + "test_sstore_erc20_approve_cbETH": "0xBe9895146f7AF43049ca1c1AE358B0541Ea49704", + "test_sstore_erc20_approve_IMT": "0x13119e34e140097a507b07a5564bde1bc375d9e6", + "test_mixed_sload_sstore_30GB_ERC20": "0x19fc17d87D946BBA47ca276f7b06Ee5737c4679C", + "test_mixed_sload_sstore_XEN": "0x06450dEe7FD2Fb8E39061434BAbCFC05599a6Fb8", + "test_mixed_sload_sstore_USDT": "0xdAC17F958D2ee523a2206206994597C13D831ec7", + "test_mixed_sload_sstore_USDC": "0xA0b86991C6218B36c1d19D4a2E9Eb0CE3606EB48", + "test_mixed_sload_sstore_LPT": "0x58b6A8a3302369DAEc383334672404Ee733AB239", + "test_mixed_sload_sstore_SHIB": "0x95aD61B0a150d79219dCF64E1E6Cc01f0B64C4cE", + "test_mixed_sload_sstore_WETH": "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2", + "test_mixed_sload_sstore_G-CRE": "0xa3Ee21c306A700E682AbcDfE9bAA6A08F3820419", + "test_mixed_sload_sstore_MEME": "0xB131F4A55907B10d1F0A50d8Ab8FA09EC342CD74", + "test_mixed_sload_sstore_OMG": "0xd26114cD6EE289AccF82350c8d8487fedB8A0C07", + "test_mixed_sload_sstore_MATIC": "0x7d1Afa7B718fb893DB30A3abc0Cfc608AaCfEbB0", + "test_mixed_sload_sstore_stETH": "0xae7ab96520DE3A18E5e111B5EaAb095312D7fE84", + "test_mixed_sload_sstore_DAI": "0x6B175474E89094C44Da98b954EedeAC495271d0F", + "test_mixed_sload_sstore_PEPE": "0x6982508145454Ce325dDbE47a25d4eC3d2311933", + "test_mixed_sload_sstore_old": "0x0cf0ee63788A0849FE5297F3407f701E122CC023", + "test_mixed_sload_sstore_BAT": "0x0D8775F648430679A709E98d2b0Cb6250d2887EF", + "test_mixed_sload_sstore_UNI": "0x1F9840a85d5aF5bf1D1762F925BdADdC4201F984", + "test_mixed_sload_sstore_AMB": "0x4dc3643Dbc642b72C158E7F3d2FF232df61cB6CE", + "test_mixed_sload_sstore_HEX": "0x2b591e99afE9f32eAA6214f7B7629768c40eEb39", + "test_mixed_sload_sstore_CRO": "0xa0b73e1ff0b80914ab6fe0444e65848c4c34450b", + "test_mixed_sload_sstore_UCASH": "0x92e52a1A235d9A103D970901066CE910AAceFD37", + "test_mixed_sload_sstore_BNB": "0xB8c77482e45F1F44dE1745F52C74426C631bDd52", + "test_mixed_sload_sstore_GSE": "0xe530441f4f73bdb6dc2fa5af7c3fc5fd551ec838", + "test_mixed_sload_sstore_MANA": "0x0F5D2FB29fb7d3cFeE444A200298f468908cC942", + "test_mixed_sload_sstore_OCN": "0x4092678e4E78230F46A1534C0fBC8Fa39780892B", + "test_mixed_sload_sstore_EIGEN": "0xEC53BF9167F50cDEb3aE105F56099AaAb9061F83", + "test_mixed_sload_sstore_COMP": "0xc00e94Cb662C3520282E6f5717214004A7f26888", + "test_mixed_sload_sstore_cUSDC": "0x39AA39c021dfbaE8faC545936693aC917d5E7563", + "test_mixed_sload_sstore_sMEME": "0xc059A531B4234d05e9EF4aC51028f7E6156E2CcE", + "test_mixed_sload_sstore_SAND": "0x3845badade8e6dff049820680d1f14bd3903a5d0", + "test_mixed_sload_sstore_AAVE": "0x7Fc66500c84A76Ad7e9c93437bFc5AC33E2DDAe9", + "test_mixed_sload_sstore_ZRX": "0xE41d2489571d322189246DaFA5ebDe1F4699F498", + "test_mixed_sload_sstore_KOK": "0x9B9647431632AF44be02ddd22477Ed94d14AacAa", + "test_mixed_sload_sstore_APE": "0x4d224452801ACEd8B2F0aebe155379bb5D594381", + "test_mixed_sload_sstore_SAI": "0x89d24A6b4CcB1B6fAA2625fE562bDD9a23260359", + "test_mixed_sload_sstore_GRT": "0xc944E90C64B2c07662A292be6244BDf05Cda44a7", + "test_mixed_sload_sstore_LRC": "0xBBbbCA6A901c926F240b89EacB641d8Aec7AEafD", + "test_mixed_sload_sstore_ELON": "0x761D38e5DDf6ccf6Cf7C55759d5210750B5D60F3", + "test_mixed_sload_sstore_QNT": "0x4a220E6096B25EADb88358cb44068A3248254675", + "test_mixed_sload_sstore_ONDO": "0xfAbA6f8e4a5E8Ab82F62fe7C39859FA577269BE3", + "test_mixed_sload_sstore_ENJ": "0xF629cBd94d3791c9250152BD8dfBDF380E2a3B9c", + "test_mixed_sload_sstore_FET": "0x1D287CC25dAD7cCaF76a26bc660c5F7C8E2a05BD", + "test_mixed_sload_sstore_eETH": "0x6c5024Cd4F8A59110119C56f8933403A539555EB", + "test_mixed_sload_sstore_XMX": "0x0F8c45B896784A1E408526B9300519ef8660209c", + "test_mixed_sload_sstore_FTI": "0x943ed852Dadb5C3938ECdC6883718df8142de4C8", + "test_mixed_sload_sstore_WBTC": "0x2260FAC5E5542a773Aa44fBCfeDf7C193bc2C599", + "test_mixed_sload_sstore_LEND": "0x80fB784B7eD66730e8b1DBd9820aFD29931aab03", + "test_mixed_sload_sstore_ELEC": "0xd49ff13661451313ca1553fd6954bd1d9b6e02b9", + "test_mixed_sload_sstore_SUSHI": "0x6B3595068778DD592e39A122f4f5a5CF09C90fE2", + "test_mixed_sload_sstore_HOT": "0x6c6EE5e31d828De241282B9606C8e98Ea48526E2", + "test_mixed_sload_sstore_MITx": "0x4a527d8fc13c5203ab24ba0944f4cb14658d1db6", + "test_mixed_sload_sstore_1INCH": "0x111111111117dC0aa78b770fA6A738034120C302", + "test_mixed_sload_sstore_USDP": "0x1456688345527bE1f37E9e627DA0837D6f08C925", + "test_mixed_sload_sstore_ETHFI": "0xfe0c30065b384f05761f15d0cc899d4f9f9cc0eb", + "test_mixed_sload_sstore_POLY": "0x9992ec3cf6a55b00978cddf2b27bc6882d88d1ec", + "test_mixed_sload_sstore_AOA": "0x9ab165d795019b6d8b3e971dda91071421305e5a", + "test_mixed_sload_sstore_STORJ": "0xB64ef51C888972c908CFacf59B47C1AfBC0Ab8aC", + "test_mixed_sload_sstore_MKR": "0x9f8F72aA9304c8B593d555F12eF6589cC3A579A2", + "test_mixed_sload_sstore_AMP": "0xfF20817765cB7F73d4Bde2e66e067e58d11095c2", + "test_mixed_sload_sstore_VRA": "0xF411903cbc70a74d22900a5DE66A2dda66507255", + "test_mixed_sload_sstore_GTC": "0xde30da39c46104798bb5aa3fe8b9e0e1f348163f", + "test_mixed_sload_sstore_FLOKI": "0x43F11c02439E2736800433B4594994Bd43Cd066D", + "test_mixed_sload_sstore_ALT": "0x8457CA5040ad67fdebbCC8EdCE889A335Bc0fbFB", + "test_mixed_sload_sstore_IMX": "0xf57e7e7c23978c3caec3c3548e3d615c346e79ff", + "test_mixed_sload_sstore_XYO": "0x55296f69f40ea6d20e478533c15A6b08B654E758", + "test_mixed_sload_sstore_REV": "0x2ef27bf41236bd859a95209e17a43fbd26851f92", + "test_mixed_sload_sstore_FUN": "0x419d0d8bdd9af5e606ae2232ed285aff190e711b", + "test_mixed_sload_sstore_CRV": "0xD533a949740bb3306d119CC777fa900bA034cd52", + "test_mixed_sload_sstore_CHZ": "0x3506424f91fd33084466f402d5d97f05f8e3b4af", + "test_mixed_sload_sstore_SMT": "0x78Eb8DC641077F049f910659b6d580E80dC4d237", + "test_mixed_sload_sstore_SNX": "0xC011A72400E58ecD99Ee497CF89E3775d4bd732F", + "test_mixed_sload_sstore_DENT": "0x3597bfD533a99c9aa083587B074434E61Eb0A258", + "test_mixed_sload_sstore_RNDR": "0x6De037ef9aD2725EB40118Bb1702EBb27e4Aeb24", + "test_mixed_sload_sstore_SNT": "0x744d70FDBe2Ba4CF95131626614a1763DF805B9E", + "test_mixed_sload_sstore_AXS": "0xBB0E17EF65F82Ab018d8EDd776e8DD940327B28b", + "test_mixed_sload_sstore_KNC": "0xdd974D5C2e2928deA5F71b9825b8b646686BD200", + "test_mixed_sload_sstore_WEPE": "0xccB365D2e11aE4D6d74715c680f56cf58bF4bF10", + "test_mixed_sload_sstore_ZETA": "0xf091867ec603a6628ed83d274e835539d82e9cc8", + "test_mixed_sload_sstore_LYM": "0xc690f7c7fcffa6a82b79fab7508c466fefdfc8c5", + "test_mixed_sload_sstore_nCASH": "0x809826cceAb68c387726af962713b64Cb5Cb3CCA", + "test_mixed_sload_sstore_LOOKS": "0xf4d2888d29D722226FafA5d9B24F9164c092421E", + "test_mixed_sload_sstore_Monfter/Monavale": "0x275f5ad03be0fa221b4c6649b8aee09a42d9412a", + "test_mixed_sload_sstore_cETH": "0x4Ddc2D193948926D02f9B1fE9e1daa0718270ED5", + "test_mixed_sload_sstore_SALT": "0x4156D3342D5c385a87D264F90653733592000581", + "test_mixed_sload_sstore_HOGE": "0xfAd45E47083e4607302aa43c65fB3106F1cd7607", + "test_mixed_sload_sstore_REN": "0x408e41876cCCDC0F92210600ef50372656052a38", + "test_mixed_sload_sstore_ENS": "0xC56b13EBBCffa67cfB7979b900B736b3fb480D78", + "test_mixed_sload_sstore_NEXO": "0xB62132e35a6c13ee1EE0f84dC5d40bad8d815206", + "test_mixed_sload_sstore_RFR": "0xD0929d411954c47438Dc1D871dd6081F5C5e149c", + "test_mixed_sload_sstore_COFI": "0x3137619705b5fc22a3048989F983905e456B59Ab", + "test_mixed_sload_sstore_SLP": "0xcc8fa225d80b9c7d42f96e9570156c65d6cAAa25", + "test_mixed_sload_sstore_FUEL": "0xea38eaa3c86c8f9b751533ba2e562deb9acded40", + "test_mixed_sload_sstore_ENA": "0x57e114B691Db790C35207b2e685D4A43181e6061", + "test_mixed_sload_sstore_AKITA": "0x3301Ee63Fb29F863f2333Bd4466acb46CD8323E6", + "test_mixed_sload_sstore_CVC": "0x41e5560054824ea6B0732E656e3Ad64E20e94e45", + "test_mixed_sload_sstore_IHT": "0xEda8B016efa8b1161208Cf041cD86972EEE0F31E", + "test_mixed_sload_sstore_ZSC": "0x7A41e0517a5ecA4FdbC7FbebA4D4c47B9fF6DC63", + "test_mixed_sload_sstore_cbETH": "0xBe9895146f7AF43049ca1c1AE358B0541Ea49704", + "test_mixed_sload_sstore_IMT": "0x13119e34e140097a507b07a5564bde1bc375d9e6" +} diff --git a/tests/benchmark/stateful/bloatnet/test_multi_opcode.py b/tests/benchmark/stateful/bloatnet/test_multi_opcode.py index 691d39b46c..0521462066 100755 --- a/tests/benchmark/stateful/bloatnet/test_multi_opcode.py +++ b/tests/benchmark/stateful/bloatnet/test_multi_opcode.py @@ -6,6 +6,10 @@ operations. """ +import json +import math +from pathlib import Path + import pytest from execution_testing import ( Account, @@ -13,15 +17,11 @@ Block, BlockchainTestFiller, Bytecode, - Create2PreimageLayout, Fork, Op, Transaction, While, ) -from execution_testing.cli.pytest_commands.plugins.execute.pre_alloc import ( - AddressStubs, -) REFERENCE_SPEC_GIT_PATH = "DUMMY/bloatnet.md" REFERENCE_SPEC_VERSION = "1.0" @@ -81,56 +81,23 @@ def test_bloatnet_balance_extcodesize( # Calculate gas costs intrinsic_gas = fork.transaction_intrinsic_cost_calculator()(calldata=b"") - # Setup overhead (before loop): STATICCALL + result handling + memory setup - setup_overhead = ( - gas_costs.G_COLD_ACCOUNT_ACCESS # STATICCALL to factory (2600) - + gas_costs.G_VERY_LOW # ISZERO (3) - + gas_costs.G_VERY_LOW # PUSH2 (3) - + gas_costs.G_HIGH # JUMPI (10) - + gas_costs.G_VERY_LOW * 2 # MLOAD × 2 for factory results (3 * 2) - + gas_costs.G_VERY_LOW * 3 # MSTORE × 3 for memory setup (3 * 3) - + gas_costs.G_VERY_LOW # MSTORE8 for 0xFF prefix (3) - + gas_costs.G_VERY_LOW # PUSH1 for memory position (3) - ) - - # Cleanup overhead (after loop) - cleanup_overhead = gas_costs.G_BASE # POP counter (2) - - # While loop condition overhead per iteration - loop_condition_overhead = ( - gas_costs.G_VERY_LOW # DUP1 (3) - + gas_costs.G_VERY_LOW # PUSH1 (3) - + gas_costs.G_VERY_LOW # SWAP1 (3) - + gas_costs.G_VERY_LOW # SUB (3) - + gas_costs.G_VERY_LOW # DUP1 (3) - + gas_costs.G_VERY_LOW # ISZERO (3) - + gas_costs.G_VERY_LOW # ISZERO (3) - + gas_costs.G_HIGH # JUMPI (10) - ) - # Cost per contract access with CREATE2 address generation cost_per_contract = ( gas_costs.G_KECCAK_256 # SHA3 static cost for address generation (30) - + gas_costs.G_KECCAK_256_WORD * 3 # SHA3 dynamic (85 bytes = 3 words) + + gas_costs.G_KECCAK_256_WORD + * 3 # SHA3 dynamic cost (85 bytes = 3 words * 6) + gas_costs.G_COLD_ACCOUNT_ACCESS # Cold access (2600) + gas_costs.G_BASE # POP first result (2) + gas_costs.G_WARM_ACCOUNT_ACCESS # Warm access (100) + gas_costs.G_BASE # POP second result (2) - + gas_costs.G_VERY_LOW # DUP1 before first op (3) - + gas_costs.G_VERY_LOW # MLOAD for salt (3) + + gas_costs.G_BASE # DUP1 before first op (3) + + gas_costs.G_VERY_LOW * 4 # PUSH1 operations (4 * 3) + + gas_costs.G_LOW # MLOAD for salt (3) + gas_costs.G_VERY_LOW # ADD for increment (3) - + gas_costs.G_VERY_LOW # MSTORE salt back (3) - + loop_condition_overhead # While loop condition + + gas_costs.G_LOW # MSTORE salt back (3) + + 10 # While loop overhead ) - # Calculate how many transactions we need to fill the block - num_txs = max(1, gas_benchmark_value // tx_gas_limit) - - # Calculate how many contracts to access per transaction - total_overhead = setup_overhead + cleanup_overhead - available_gas_per_tx = tx_gas_limit - intrinsic_gas - total_overhead - contracts_per_tx = int(available_gas_per_tx // cost_per_contract) - # Deploy factory using stub contract - NO HARDCODED VALUES # The stub "bloatnet_factory" must be provided via --address-stubs flag # The factory at that address MUST have: @@ -141,11 +108,21 @@ def test_bloatnet_balance_extcodesize( stub="bloatnet_factory", ) + # Calculate number of transactions needed (EIP-7825 compliance) + num_txs = max(1, math.ceil(gas_benchmark_value / tx_gas_limit)) + + # Calculate how many contracts to access based on available gas + total_available_gas = ( + gas_benchmark_value - (intrinsic_gas * num_txs) - 1000 + ) + total_contracts = int(total_available_gas // cost_per_contract) + contracts_per_tx = total_contracts // num_txs + # Log test requirements - deployed count read from factory storage print( - f"Tx gas limit: {tx_gas_limit / 1_000_000:.1f}M gas. " - f"Number of txs: {num_txs}. " - f"Contracts per tx: {contracts_per_tx}. " + f"Test needs {total_contracts} contracts for " + f"{gas_benchmark_value / 1_000_000:.1f}M gas " + f"across {num_txs} transaction(s). " f"Factory storage will be checked during execution." ) @@ -158,78 +135,100 @@ def test_bloatnet_balance_extcodesize( else (extcodesize_op + balance_op) ) - # Build attack contract that reads config from factory and performs attack - attack_code = ( - # Call getConfig() on factory to get num_deployed and init_code_hash - Op.STATICCALL( - gas=Op.GAS, - address=factory_address, - args_offset=0, - args_size=0, - ret_offset=96, - ret_size=64, + # Build transactions + txs = [] + post = {} + contracts_remaining = total_contracts + salt_offset = 0 + + for i in range(num_txs): + # Last tx gets remaining contracts + tx_contracts = ( + contracts_per_tx if i < num_txs - 1 else contracts_remaining ) - # Check if call succeeded - + Op.ISZERO - + Op.PUSH2(0x1000) # Jump to error handler if failed (far jump) - + Op.JUMPI - # Load results from memory - # Memory[96:128] = num_deployed_contracts - # Memory[128:160] = init_code_hash - + Op.MLOAD(96) # Load num_deployed_contracts to stack - + ( - create2_preimage := Create2PreimageLayout( - factory_address=factory_address, - salt=0, - init_code_hash=Op.MLOAD(128), + contracts_remaining -= tx_contracts + + # Build attack contract that reads config from factory + attack_code = ( + # Call getConfig() on factory to get config + Op.STATICCALL( + gas=Op.GAS, + address=factory_address, + args_offset=0, + args_size=0, + ret_offset=96, + ret_size=64, ) - ) - # Main attack loop - iterate through all deployed contracts - + While( - body=( - # Generate CREATE2 addr: keccak256(0xFF+factory+salt+hash) - # Hash CREATE2 address from memory - create2_preimage.address_op() - # The address is now on the stack - + Op.DUP1 # Duplicate for second operation - + benchmark_ops # Execute operations in specified order - # Increment salt for next iteration - + create2_preimage.increment_salt_op() - ), - # Continue while we haven't reached the limit - condition=Op.DUP1 - + Op.PUSH1(1) - + Op.SWAP1 - + Op.SUB - + Op.DUP1 + # Check if call succeeded + Op.ISZERO - + Op.ISZERO, + + Op.PUSH2(0x1000) # Jump to error handler if failed (far jump) + + Op.JUMPI + # Load results from memory + # Memory[96:128] = num_deployed_contracts + # Memory[128:160] = init_code_hash + + Op.MLOAD(128) # Load init_code_hash + # Setup memory for CREATE2 address generation + # Memory layout at 0: 0xFF + factory_addr(20) + salt(32) + hash(32) + + Op.MSTORE( + 0, factory_address + ) # Store factory address at memory position 0 + + Op.MSTORE8(11, 0xFF) # Store 0xFF prefix at byte 11 + + Op.MSTORE(32, salt_offset) # Store starting salt at position 32 + # Stack now has: [init_code_hash] + + Op.PUSH1(64) # Push memory position + + Op.MSTORE # Store init_code_hash at memory[64] + # Push our iteration count onto stack + + Op.PUSH4(tx_contracts) + # Main attack loop - iterate through contracts for this tx + + While( + body=( + # Generate CREATE2 addr: keccak256(0xFF+factory+salt+hash) + Op.SHA3(11, 85) # CREATE2 addr from memory[11:96] + # The address is now on the stack + + Op.DUP1 # Duplicate for second operation + + benchmark_ops # Execute operations in specified order + # Increment salt for next iteration + + Op.MSTORE( + 32, Op.ADD(Op.MLOAD(32), 1) + ) # Increment and store salt + ), + # Continue while we haven't reached the limit + condition=Op.DUP1 + + Op.PUSH1(1) + + Op.SWAP1 + + Op.SUB + + Op.DUP1 + + Op.ISZERO + + Op.ISZERO, + ) + + Op.POP # Clean up counter ) - + Op.POP # Clean up counter - ) - # Deploy attack contract - attack_address = pre.deploy_contract(code=attack_code) + # Deploy attack contract for this tx + attack_address = pre.deploy_contract(code=attack_code) + + # Calculate gas for this transaction + this_tx_gas = min( + tx_gas_limit, gas_benchmark_value - (i * tx_gas_limit) + ) - # Create multiple attack transactions to fill the block - sender = pre.fund_eoa() - attack_txs = [ - Transaction( - to=attack_address, - gas_limit=tx_gas_limit, - sender=sender, + txs.append( + Transaction( + to=attack_address, + gas_limit=this_tx_gas, + sender=pre.fund_eoa(), + ) ) - for _ in range(num_txs) - ] - # Post-state: just verify attack contract exists - post = { - attack_address: Account(storage={}), - } + # Add to post-state + post[attack_address] = Account(storage={}) + + # Update salt offset for next transaction + salt_offset += tx_contracts blockchain_test( pre=pre, - blocks=[Block(txs=attack_txs)], + blocks=[Block(txs=txs)], post=post, ) @@ -264,57 +263,24 @@ def test_bloatnet_balance_extcodecopy( # Calculate costs intrinsic_gas = fork.transaction_intrinsic_cost_calculator()(calldata=b"") - # Setup overhead (before loop): STATICCALL + result handling + memory setup - setup_overhead = ( - gas_costs.G_COLD_ACCOUNT_ACCESS # STATICCALL to factory (2600) - + gas_costs.G_VERY_LOW # ISZERO (3) - + gas_costs.G_VERY_LOW # PUSH2 (3) - + gas_costs.G_HIGH # JUMPI (10) - + gas_costs.G_VERY_LOW * 2 # MLOAD × 2 for factory results (3 * 2) - + gas_costs.G_VERY_LOW * 3 # MSTORE × 3 for memory setup (3 * 3) - + gas_costs.G_VERY_LOW # MSTORE8 for 0xFF prefix (3) - + gas_costs.G_VERY_LOW # PUSH1 for memory position (3) - ) - - # Cleanup overhead (after loop) - cleanup_overhead = gas_costs.G_BASE # POP counter (2) - - # While loop condition overhead per iteration - loop_condition_overhead = ( - gas_costs.G_VERY_LOW # DUP1 (3) - + gas_costs.G_VERY_LOW # PUSH1 (3) - + gas_costs.G_VERY_LOW # SWAP1 (3) - + gas_costs.G_VERY_LOW # SUB (3) - + gas_costs.G_VERY_LOW # DUP1 (3) - + gas_costs.G_VERY_LOW # ISZERO (3) - + gas_costs.G_VERY_LOW # ISZERO (3) - + gas_costs.G_HIGH # JUMPI (10) - ) - # Cost per contract with EXTCODECOPY and CREATE2 address generation cost_per_contract = ( gas_costs.G_KECCAK_256 # SHA3 static cost for address generation (30) - + gas_costs.G_KECCAK_256_WORD * 3 # SHA3 dynamic (85 bytes = 3 words) + + gas_costs.G_KECCAK_256_WORD + * 3 # SHA3 dynamic cost (85 bytes = 3 words * 6) + gas_costs.G_COLD_ACCOUNT_ACCESS # Cold access (2600) + gas_costs.G_BASE # POP first result (2) + gas_costs.G_WARM_ACCOUNT_ACCESS # Warm access base (100) + gas_costs.G_COPY * 1 # Copy cost for 1 byte (3) - + gas_costs.G_VERY_LOW * 2 # DUP1 + DUP4 for address (6) - + gas_costs.G_VERY_LOW * 2 # MLOAD for salt twice (6) + + gas_costs.G_BASE * 2 # DUP1 before first op, DUP4 for address (6) + + gas_costs.G_VERY_LOW * 8 # PUSH operations (8 * 3 = 24) + + gas_costs.G_LOW * 2 # MLOAD for salt twice (6) + gas_costs.G_VERY_LOW * 2 # ADD operations (6) - + gas_costs.G_VERY_LOW # MSTORE salt back (3) + + gas_costs.G_LOW # MSTORE salt back (3) + gas_costs.G_BASE # POP after second op (2) - + loop_condition_overhead # While loop condition + + 10 # While loop overhead ) - # Calculate how many transactions we need to fill the block - num_txs = max(1, gas_benchmark_value // tx_gas_limit) - - # Calculate how many contracts to access per transaction - total_overhead = setup_overhead + cleanup_overhead - available_gas_per_tx = tx_gas_limit - intrinsic_gas - total_overhead - contracts_per_tx = int(available_gas_per_tx // cost_per_contract) - # Deploy factory using stub contract - NO HARDCODED VALUES # The stub "bloatnet_factory" must be provided via --address-stubs flag # The factory at that address MUST have: @@ -325,11 +291,21 @@ def test_bloatnet_balance_extcodecopy( stub="bloatnet_factory", ) + # Calculate number of transactions needed (EIP-7825 compliance) + num_txs = max(1, math.ceil(gas_benchmark_value / tx_gas_limit)) + + # Calculate how many contracts to access + total_available_gas = ( + gas_benchmark_value - (intrinsic_gas * num_txs) - 1000 + ) + total_contracts = int(total_available_gas // cost_per_contract) + contracts_per_tx = total_contracts // num_txs + # Log test requirements - deployed count read from factory storage print( - f"Tx gas limit: {tx_gas_limit / 1_000_000:.1f}M gas. " - f"Number of txs: {num_txs}. " - f"Contracts per tx: {contracts_per_tx}. " + f"Test needs {total_contracts} contracts for " + f"{gas_benchmark_value / 1_000_000:.1f}M gas " + f"across {num_txs} transaction(s). " f"Factory storage will be checked during execution." ) @@ -349,77 +325,99 @@ def test_bloatnet_balance_extcodecopy( else (extcodecopy_op + balance_op) ) - # Build attack contract that reads config from factory and performs attack - attack_code = ( - # Call getConfig() on factory to get num_deployed and init_code_hash - Op.STATICCALL( - gas=Op.GAS, - address=factory_address, - args_offset=0, - args_size=0, - ret_offset=96, - ret_size=64, + # Build transactions + txs = [] + post = {} + contracts_remaining = total_contracts + salt_offset = 0 + + for i in range(num_txs): + # Last tx gets remaining contracts + tx_contracts = ( + contracts_per_tx if i < num_txs - 1 else contracts_remaining ) - # Check if call succeeded - + Op.ISZERO - + Op.PUSH2(0x1000) # Jump to error handler if failed (far jump) - + Op.JUMPI - # Load results from memory - # Memory[96:128] = num_deployed_contracts - # Memory[128:160] = init_code_hash - + Op.MLOAD(96) # Load num_deployed_contracts to stack - + ( - create2_preimage := Create2PreimageLayout( - factory_address=factory_address, - salt=0, - init_code_hash=Op.MLOAD(128), + contracts_remaining -= tx_contracts + + # Build attack contract that reads config from factory + attack_code = ( + # Call getConfig() on factory to get config + Op.STATICCALL( + gas=Op.GAS, + address=factory_address, + args_offset=0, + args_size=0, + ret_offset=96, + ret_size=64, ) - ) - # Main attack loop - iterate through all deployed contracts - + While( - body=( - # Hash CREATE2 address - create2_preimage.address_op() - # The address is now on the stack - + Op.DUP1 # Duplicate for later operations - + benchmark_ops # Execute operations in specified order - # Increment salt for next iteration - + create2_preimage.increment_salt_op() - ), - # Continue while counter > 0 - condition=Op.DUP1 - + Op.PUSH1(1) - + Op.SWAP1 - + Op.SUB - + Op.DUP1 + # Check if call succeeded + Op.ISZERO - + Op.ISZERO, + + Op.PUSH2(0x1000) # Jump to error handler if failed (far jump) + + Op.JUMPI + # Load results from memory + # Memory[128:160] = init_code_hash + + Op.MLOAD(128) # Load init_code_hash + # Setup memory for CREATE2 address generation + # Memory layout at 0: 0xFF + factory_addr(20) + salt(32) + hash(32) + + Op.MSTORE( + 0, factory_address + ) # Store factory address at memory position 0 + + Op.MSTORE8(11, 0xFF) # Store 0xFF prefix at byte 11 + + Op.MSTORE(32, salt_offset) # Store starting salt at position 32 + # Stack now has: [init_code_hash] + + Op.PUSH1(64) # Push memory position + + Op.MSTORE # Store init_code_hash at memory[64] + # Push our iteration count onto stack + + Op.PUSH4(tx_contracts) + # Main attack loop - iterate through contracts for this tx + + While( + body=( + # Generate CREATE2 address + Op.SHA3(11, 85) # CREATE2 addr from memory[11:96] + # The address is now on the stack + + Op.DUP1 # Duplicate for later operations + + benchmark_ops # Execute operations in specified order + # Increment salt for next iteration + + Op.MSTORE( + 32, Op.ADD(Op.MLOAD(32), 1) + ) # Increment and store salt + ), + # Continue while counter > 0 + condition=Op.DUP1 + + Op.PUSH1(1) + + Op.SWAP1 + + Op.SUB + + Op.DUP1 + + Op.ISZERO + + Op.ISZERO, + ) + + Op.POP # Clean up counter ) - + Op.POP # Clean up counter - ) - # Deploy attack contract - attack_address = pre.deploy_contract(code=attack_code) + # Deploy attack contract for this tx + attack_address = pre.deploy_contract(code=attack_code) - # Create multiple attack transactions to fill the block - sender = pre.fund_eoa() - attack_txs = [ - Transaction( - to=attack_address, - gas_limit=tx_gas_limit, - sender=sender, + # Calculate gas for this transaction + this_tx_gas = min( + tx_gas_limit, gas_benchmark_value - (i * tx_gas_limit) ) - for _ in range(num_txs) - ] - # Post-state - post = { - attack_address: Account(storage={}), - } + txs.append( + Transaction( + to=attack_address, + gas_limit=this_tx_gas, + sender=pre.fund_eoa(), + ) + ) + + # Add to post-state + post[attack_address] = Account(storage={}) + + # Update salt offset for next transaction + salt_offset += tx_contracts blockchain_test( pre=pre, - blocks=[Block(txs=attack_txs)], + blocks=[Block(txs=txs)], post=post, ) @@ -453,67 +451,44 @@ def test_bloatnet_balance_extcodehash( # Calculate gas costs intrinsic_gas = fork.transaction_intrinsic_cost_calculator()(calldata=b"") - # Setup overhead (before loop): STATICCALL + result handling + memory setup - setup_overhead = ( - gas_costs.G_COLD_ACCOUNT_ACCESS # STATICCALL to factory (2600) - + gas_costs.G_VERY_LOW # ISZERO (3) - + gas_costs.G_VERY_LOW # PUSH2 (3) - + gas_costs.G_HIGH # JUMPI (10) - + gas_costs.G_VERY_LOW * 2 # MLOAD × 2 for factory results (3 * 2) - + gas_costs.G_VERY_LOW * 3 # MSTORE × 3 for memory setup (3 * 3) - + gas_costs.G_VERY_LOW # MSTORE8 for 0xFF prefix (3) - + gas_costs.G_VERY_LOW # PUSH1 for memory position (3) - ) - - # Cleanup overhead (after loop) - cleanup_overhead = gas_costs.G_BASE # POP counter (2) - - # While loop condition overhead per iteration - loop_condition_overhead = ( - gas_costs.G_VERY_LOW # DUP1 (3) - + gas_costs.G_VERY_LOW # PUSH1 (3) - + gas_costs.G_VERY_LOW # SWAP1 (3) - + gas_costs.G_VERY_LOW # SUB (3) - + gas_costs.G_VERY_LOW # DUP1 (3) - + gas_costs.G_VERY_LOW # ISZERO (3) - + gas_costs.G_VERY_LOW # ISZERO (3) - + gas_costs.G_HIGH # JUMPI (10) - ) - # Cost per contract access with CREATE2 address generation cost_per_contract = ( gas_costs.G_KECCAK_256 # SHA3 static cost for address generation (30) - + gas_costs.G_KECCAK_256_WORD * 3 # SHA3 dynamic (85 bytes = 3 words) + + gas_costs.G_KECCAK_256_WORD + * 3 # SHA3 dynamic cost (85 bytes = 3 words * 6) + gas_costs.G_COLD_ACCOUNT_ACCESS # Cold access (2600) + gas_costs.G_BASE # POP first result (2) + gas_costs.G_WARM_ACCOUNT_ACCESS # Warm access (100) + gas_costs.G_BASE # POP second result (2) - + gas_costs.G_VERY_LOW # DUP1 before first op (3) - + gas_costs.G_VERY_LOW # MLOAD for salt (3) + + gas_costs.G_BASE # DUP1 before first op (3) + + gas_costs.G_VERY_LOW * 4 # PUSH1 operations (4 * 3) + + gas_costs.G_LOW # MLOAD for salt (3) + gas_costs.G_VERY_LOW # ADD for increment (3) - + gas_costs.G_VERY_LOW # MSTORE salt back (3) - + loop_condition_overhead # While loop condition + + gas_costs.G_LOW # MSTORE salt back (3) + + 10 # While loop overhead ) - # Calculate how many transactions we need to fill the block - num_txs = max(1, gas_benchmark_value // tx_gas_limit) - - # Calculate how many contracts to access per transaction - total_overhead = setup_overhead + cleanup_overhead - available_gas_per_tx = tx_gas_limit - intrinsic_gas - total_overhead - contracts_per_tx = int(available_gas_per_tx // cost_per_contract) - # Deploy factory using stub contract factory_address = pre.deploy_contract( code=Bytecode(), stub="bloatnet_factory", ) + # Calculate number of transactions needed (EIP-7825 compliance) + num_txs = max(1, math.ceil(gas_benchmark_value / tx_gas_limit)) + + # Calculate how many contracts to access based on available gas + total_available_gas = ( + gas_benchmark_value - (intrinsic_gas * num_txs) - 1000 + ) + total_contracts = int(total_available_gas // cost_per_contract) + contracts_per_tx = total_contracts // num_txs + # Log test requirements print( - f"Tx gas limit: {tx_gas_limit / 1_000_000:.1f}M gas. " - f"Number of txs: {num_txs}. " - f"Contracts per tx: {contracts_per_tx}. " + f"Test needs {total_contracts} contracts for " + f"{gas_benchmark_value / 1_000_000:.1f}M gas " + f"across {num_txs} transaction(s). " f"Factory storage will be checked during execution." ) @@ -526,73 +501,90 @@ def test_bloatnet_balance_extcodehash( else (extcodehash_op + balance_op) ) - # Build attack contract that reads config from factory and performs attack - attack_code = ( - # Call getConfig() on factory to get num_deployed and init_code_hash - Op.STATICCALL( - gas=Op.GAS, - address=factory_address, - args_offset=0, - args_size=0, - ret_offset=96, - ret_size=64, + # Build transactions + txs = [] + post = {} + contracts_remaining = total_contracts + salt_offset = 0 + + for i in range(num_txs): + # Last tx gets remaining contracts + tx_contracts = ( + contracts_per_tx if i < num_txs - 1 else contracts_remaining ) - # Check if call succeeded - + Op.ISZERO - + Op.PUSH2(0x1000) # Jump to error handler if failed - + Op.JUMPI - # Load results from memory - + Op.MLOAD(96) # Load num_deployed_contracts to stack - + ( - create2_preimage := Create2PreimageLayout( - factory_address=factory_address, - salt=0, - init_code_hash=Op.MLOAD(128), + contracts_remaining -= tx_contracts + + # Build attack contract that reads config from factory + attack_code = ( + # Call getConfig() on factory to get config + Op.STATICCALL( + gas=Op.GAS, + address=factory_address, + args_offset=0, + args_size=0, + ret_offset=96, + ret_size=64, ) - ) - # Main attack loop - + While( - body=( - # Hash CREATE2 address - create2_preimage.address_op() - + Op.DUP1 # Duplicate for second operation - + benchmark_ops # Execute operations in specified order - # Increment salt - + create2_preimage.increment_salt_op() - ), - condition=Op.DUP1 - + Op.PUSH1(1) - + Op.SWAP1 - + Op.SUB - + Op.DUP1 + # Check if call succeeded + Op.ISZERO - + Op.ISZERO, + + Op.PUSH2(0x1000) # Jump to error handler if failed + + Op.JUMPI + # Load results from memory + + Op.MLOAD(128) # Load init_code_hash + # Setup memory for CREATE2 address generation + + Op.MSTORE(0, factory_address) + + Op.MSTORE8(11, 0xFF) + + Op.MSTORE(32, salt_offset) # Starting salt for this tx + + Op.PUSH1(64) + + Op.MSTORE # Store init_code_hash + # Push our iteration count onto stack + + Op.PUSH4(tx_contracts) + # Main attack loop + + While( + body=( + # Generate CREATE2 address + Op.SHA3(11, 85) + + Op.DUP1 # Duplicate for second operation + + benchmark_ops # Execute operations in specified order + # Increment salt + + Op.MSTORE(32, Op.ADD(Op.MLOAD(32), 1)) + ), + condition=Op.DUP1 + + Op.PUSH1(1) + + Op.SWAP1 + + Op.SUB + + Op.DUP1 + + Op.ISZERO + + Op.ISZERO, + ) + + Op.POP # Clean up counter ) - + Op.POP # Clean up counter - ) - # Deploy attack contract - attack_address = pre.deploy_contract(code=attack_code) + # Deploy attack contract for this tx + attack_address = pre.deploy_contract(code=attack_code) - # Create multiple attack transactions to fill the block - sender = pre.fund_eoa() - attack_txs = [ - Transaction( - to=attack_address, - gas_limit=tx_gas_limit, - sender=sender, + # Calculate gas for this transaction + this_tx_gas = min( + tx_gas_limit, gas_benchmark_value - (i * tx_gas_limit) ) - for _ in range(num_txs) - ] - # Post-state - post = { - attack_address: Account(storage={}), - } + txs.append( + Transaction( + to=attack_address, + gas_limit=this_tx_gas, + sender=pre.fund_eoa(), + ) + ) + + # Add to post-state + post[attack_address] = Account(storage={}) + + # Update salt offset for next transaction + salt_offset += tx_contracts blockchain_test( pre=pre, - blocks=[Block(txs=attack_txs)], + blocks=[Block(txs=txs)], post=post, ) @@ -601,9 +593,21 @@ def test_bloatnet_balance_extcodehash( BALANCEOF_SELECTOR = 0x70A08231 # balanceOf(address) APPROVE_SELECTOR = 0x095EA7B3 # approve(address,uint256) +# Load token names from stubs.json for test parametrization +_STUBS_FILE = Path(__file__).parent / "stubs.json" +with open(_STUBS_FILE) as f: + _STUBS = json.load(f) + +# Extract unique token names for mixed sload/sstore tests +MIXED_TOKENS = [ + k.replace("test_mixed_sload_sstore_", "") + for k in _STUBS.keys() + if k.startswith("test_mixed_sload_sstore_") +] + @pytest.mark.valid_from("Prague") -@pytest.mark.parametrize("num_contracts", [1, 5, 10, 20, 100]) +@pytest.mark.parametrize("token_name", MIXED_TOKENS) @pytest.mark.parametrize( "sload_percent,sstore_percent", [ @@ -620,74 +624,26 @@ def test_mixed_sload_sstore( fork: Fork, gas_benchmark_value: int, tx_gas_limit: int, - address_stubs: AddressStubs | None, - num_contracts: int, + token_name: str, sload_percent: int, sstore_percent: int, - request: pytest.FixtureRequest, ) -> None: """ BloatNet mixed SLOAD/SSTORE benchmark with configurable operation ratios. This test: - 1. Filters stubs matching test name prefix - (e.g., test_mixed_sload_sstore_*) - 2. Uses first N contracts based on num_contracts parameter - 3. Divides gas budget evenly across all selected contracts - 4. For each contract, divides gas into SLOAD and SSTORE portions by - percentage - 5. Executes balanceOf (SLOAD) and approve (SSTORE) calls per the ratio - 6. Stresses clients with combined read/write operations on large - contracts + 1. Uses a single ERC20 contract specified by token_name parameter + 2. Allocates full gas budget to that contract + 3. Divides gas into SLOAD and SSTORE portions by percentage + 4. Executes balanceOf (SLOAD) and approve (SSTORE) calls per the ratio + 5. Stresses clients with combined read/write operations on large contracts """ - # Extract test function name for stub filtering - # Remove parametrization suffix - test_name = request.node.name.split("[")[0] - - # Filter stubs that match the test name prefix - matching_stubs = [] - if address_stubs is not None: - matching_stubs = [ - stub_name - for stub_name in address_stubs.root.keys() - if stub_name.startswith(test_name) - ] - - # Validate we have enough stubs - if len(matching_stubs) < num_contracts: - pytest.fail( - f"Not enough matching stubs for test '{test_name}'. " - f"Required: {num_contracts}, Found: {len(matching_stubs)}. " - f"Matching stubs: {matching_stubs}" - ) - - # Select first N stubs - selected_stubs = matching_stubs[:num_contracts] + stub_name = f"test_mixed_sload_sstore_{token_name}" gas_costs = fork.gas_costs() # Calculate gas costs intrinsic_gas = fork.transaction_intrinsic_cost_calculator()(calldata=b"") - # Per-contract fixed overhead (setup + teardown for each contract's loops) - # Each contract has two loops: SLOAD (balanceOf) and SSTORE (approve) - overhead_per_contract = ( - # SLOAD loop setup/teardown - gas_costs.G_VERY_LOW # MSTORE to initialize counter (3) - + gas_costs.G_JUMPDEST # JUMPDEST at loop start (1) - + gas_costs.G_VERY_LOW # MLOAD for While condition (3) - + gas_costs.G_VERY_LOW # ISZERO (3) - + gas_costs.G_VERY_LOW # ISZERO (3) - + gas_costs.G_HIGH # JUMPI (10) - # SSTORE loop setup/teardown - + gas_costs.G_VERY_LOW # MSTORE selector (3) - + gas_costs.G_VERY_LOW # MSTORE to initialize counter (3) - + gas_costs.G_JUMPDEST # JUMPDEST at loop start (1) - + gas_costs.G_VERY_LOW # MLOAD for While condition (3) - + gas_costs.G_VERY_LOW # ISZERO (3) - + gas_costs.G_VERY_LOW # ISZERO (3) - + gas_costs.G_HIGH # JUMPI (10) - ) - # Fixed overhead for SLOAD loop sload_loop_overhead = ( # Attack contract loop overhead @@ -695,16 +651,16 @@ def test_mixed_sload_sstore( + gas_costs.G_VERY_LOW * 2 # MSTORE selector (3*2) + gas_costs.G_VERY_LOW * 3 # MLOAD + MSTORE address (3*3) + gas_costs.G_BASE # POP (2) - + gas_costs.G_VERY_LOW * 3 # SUB + MLOAD + MSTORE decrement (3*3) - + gas_costs.G_VERY_LOW * 2 # ISZERO * 2 for loop condition (3*2) - + gas_costs.G_HIGH # JUMPI (10) + + gas_costs.G_BASE * 3 # SUB + MLOAD + MSTORE counter decrement + + gas_costs.G_BASE * 2 # ISZERO * 2 for loop condition (2*2) + + gas_costs.G_MID # JUMPI (8) ) # ERC20 balanceOf internal gas sload_erc20_internal = ( gas_costs.G_VERY_LOW # PUSH4 selector (3) + gas_costs.G_BASE # EQ selector match (2) - + gas_costs.G_HIGH # JUMPI to function (10) + + gas_costs.G_MID # JUMPI to function (8) + gas_costs.G_JUMPDEST # JUMPDEST at function start (1) + gas_costs.G_VERY_LOW * 2 # CALLDATALOAD arg (3*2) + gas_costs.G_KECCAK_256 # keccak256 static (30) @@ -717,19 +673,19 @@ def test_mixed_sload_sstore( sstore_loop_overhead = ( # Attack contract loop body operations gas_costs.G_VERY_LOW # MSTORE selector at memory[32] (3) - + gas_costs.G_VERY_LOW # MLOAD counter (3) + + gas_costs.G_LOW # MLOAD counter (5) + gas_costs.G_VERY_LOW # MSTORE spender at memory[64] (3) + gas_costs.G_BASE # POP call result (2) # Counter decrement - + gas_costs.G_VERY_LOW # MLOAD counter (3) + + gas_costs.G_LOW # MLOAD counter (5) + gas_costs.G_VERY_LOW # PUSH1 1 (3) + gas_costs.G_VERY_LOW # SUB (3) + gas_costs.G_VERY_LOW # MSTORE counter back (3) # While loop condition check - + gas_costs.G_VERY_LOW # MLOAD counter (3) - + gas_costs.G_VERY_LOW # ISZERO (3) - + gas_costs.G_VERY_LOW # ISZERO (3) - + gas_costs.G_HIGH # JUMPI back to loop start (10) + + gas_costs.G_LOW # MLOAD counter (5) + + gas_costs.G_BASE # ISZERO (2) + + gas_costs.G_BASE # ISZERO (2) + + gas_costs.G_MID # JUMPI back to loop start (8) ) # ERC20 approve internal gas @@ -737,7 +693,7 @@ def test_mixed_sload_sstore( sstore_erc20_internal = ( gas_costs.G_VERY_LOW # PUSH4 selector (3) + gas_costs.G_BASE # EQ selector match (2) - + gas_costs.G_HIGH # JUMPI to function (10) + + gas_costs.G_MID # JUMPI to function (8) + gas_costs.G_JUMPDEST # JUMPDEST at function start (1) + gas_costs.G_VERY_LOW # CALLDATALOAD spender (3) + gas_costs.G_VERY_LOW # CALLDATALOAD amount (3) @@ -752,20 +708,6 @@ def test_mixed_sload_sstore( + gas_costs.G_VERY_LOW # PUSH1 0 for return offset (3) ) - # Calculate how many transactions we need to fill the block - num_txs = max(1, gas_benchmark_value // tx_gas_limit) - - # Calculate gas budget per contract per transaction - total_overhead_per_tx = intrinsic_gas + ( - overhead_per_contract * num_contracts - ) - available_gas_per_tx = tx_gas_limit - total_overhead_per_tx - gas_per_contract_per_tx = available_gas_per_tx // num_contracts - - # For each contract, split gas by percentage - sload_gas_per_contract = (gas_per_contract_per_tx * sload_percent) // 100 - sstore_gas_per_contract = (gas_per_contract_per_tx * sstore_percent) // 100 - # Account for cold/warm transitions in CALL costs # First SLOAD call is COLD (2600), rest are WARM (100) sload_warm_cost = ( @@ -776,9 +718,6 @@ def test_mixed_sload_sstore( cold_warm_diff = ( gas_costs.G_COLD_ACCOUNT_ACCESS - gas_costs.G_WARM_ACCOUNT_ACCESS ) - sload_calls_per_contract = int( - (sload_gas_per_contract - cold_warm_diff) // sload_warm_cost - ) # First SSTORE call is COLD (2600), rest are WARM (100) sstore_warm_cost = ( @@ -786,49 +725,64 @@ def test_mixed_sload_sstore( + gas_costs.G_WARM_ACCOUNT_ACCESS + sstore_erc20_internal ) - sstore_calls_per_contract = int( - (sstore_gas_per_contract - cold_warm_diff) // sstore_warm_cost + + # Deploy ERC20 contract using stub + erc20_address = pre.deploy_contract( + code=Bytecode(), + stub=stub_name, ) - # Deploy selected ERC20 contracts using stubs - erc20_addresses = [] - for stub_name in selected_stubs: - addr = pre.deploy_contract( - code=Bytecode(), - stub=stub_name, - ) - erc20_addresses.append(addr) + # Calculate number of transactions needed (EIP-7825 compliance) + num_txs = max(1, math.ceil(gas_benchmark_value / tx_gas_limit)) + + # Calculate total available gas and split by percentage + total_available_gas = gas_benchmark_value - (intrinsic_gas * num_txs) + sload_gas = (total_available_gas * sload_percent) // 100 + sstore_gas = (total_available_gas * sstore_percent) // 100 + + # Calculate total calls for each operation type + total_sload_calls = int((sload_gas - cold_warm_diff) // sload_warm_cost) + total_sstore_calls = int((sstore_gas - cold_warm_diff) // sstore_warm_cost) + + # Distribute calls across transactions + sload_calls_per_tx = total_sload_calls // num_txs + sstore_calls_per_tx = total_sstore_calls // num_txs # Log test requirements print( - f"Total gas budget: {gas_benchmark_value / 1_000_000:.1f}M gas. " - f"Tx gas limit: {tx_gas_limit / 1_000_000:.1f}M gas. " - f"Number of txs: {num_txs}. " - f"~{gas_per_contract_per_tx / 1_000_000:.2f}M gas per contract per tx " + f"Token: {token_name}, " + f"Total gas budget: {gas_benchmark_value / 1_000_000:.1f}M gas " f"({sload_percent}% SLOAD, {sstore_percent}% SSTORE). " - f"Per contract per tx: {sload_calls_per_contract} balanceOf calls, " - f"{sstore_calls_per_contract} approve calls." + f"{total_sload_calls} balanceOf, {total_sstore_calls} approve " + f"across {num_txs} tx(s)." ) - # Build attack code that loops through each contract - attack_code: Bytecode = ( - Op.JUMPDEST # Entry point - # Store selector once for all contracts - + Op.MSTORE(offset=0, value=BALANCEOF_SELECTOR) - ) + # Build transactions + txs = [] + post = {} + sload_remaining = total_sload_calls + sstore_remaining = total_sstore_calls - for erc20_address in erc20_addresses: - # For each contract, execute SLOAD operations (balanceOf) - attack_code += ( - # Initialize counter in memory[32] = number of balanceOf calls - Op.MSTORE(offset=32, value=sload_calls_per_contract) - # Loop for balanceOf calls + for i in range(num_txs): + # Last tx gets remaining calls + tx_sload_calls = ( + sload_calls_per_tx if i < num_txs - 1 else sload_remaining + ) + tx_sstore_calls = ( + sstore_calls_per_tx if i < num_txs - 1 else sstore_remaining + ) + sload_remaining -= tx_sload_calls + sstore_remaining -= tx_sstore_calls + + # Build attack code for this transaction + attack_code: Bytecode = ( + Op.JUMPDEST # Entry point + + Op.MSTORE(offset=0, value=BALANCEOF_SELECTOR) + # SLOAD operations (balanceOf) + + Op.MSTORE(offset=32, value=tx_sload_calls) + While( condition=Op.MLOAD(32) + Op.ISZERO + Op.ISZERO, body=( - # Call balanceOf(address) on ERC20 contract - # args_offset=28 reads: selector from MEM[28:32] + address - # from MEM[32:64] Op.CALL( address=erc20_address, value=0, @@ -837,32 +791,17 @@ def test_mixed_sload_sstore( ret_offset=0, ret_size=0, ) - + Op.POP # Discard CALL success status - # Decrement counter + + Op.POP + Op.MSTORE(offset=32, value=Op.SUB(Op.MLOAD(32), 1)) ), ) - ) - - # For each contract, execute SSTORE operations (approve) - # Reuse the same memory layout as balanceOf - attack_code += ( - # Store approve selector at memory[0] (reusing same slot) - Op.MSTORE(offset=0, value=APPROVE_SELECTOR) - # Initialize counter in memory[32] = number of approve calls - # (reusing same slot) - + Op.MSTORE(offset=32, value=sstore_calls_per_contract) - # Loop for approve calls + # SSTORE operations (approve) + + Op.MSTORE(offset=0, value=APPROVE_SELECTOR) + + Op.MSTORE(offset=32, value=tx_sstore_calls) + While( condition=Op.MLOAD(32) + Op.ISZERO + Op.ISZERO, body=( - # Store spender at memory[64] (counter as spender/amount) Op.MSTORE(offset=64, value=Op.MLOAD(32)) - # Call approve(spender, amount) on ERC20 contract - # args_offset=28 reads: selector from MEM[28:32] + - # spender from MEM[32:64] + amount from MEM[64:96] - # Note: counter at MEM[32:64] is reused as spender, - # and value at MEM[64:96] serves as the amount + Op.CALL( address=erc20_address, value=0, @@ -871,34 +810,33 @@ def test_mixed_sload_sstore( ret_offset=0, ret_size=0, ) - + Op.POP # Discard CALL success status - # Decrement counter + + Op.POP + Op.MSTORE(offset=32, value=Op.SUB(Op.MLOAD(32), 1)) ), ) ) - # Deploy attack contract - attack_address = pre.deploy_contract(code=attack_code) + # Deploy attack contract for this tx + attack_address = pre.deploy_contract(code=attack_code) - # Create multiple attack transactions to fill the block - sender = pre.fund_eoa() - attack_txs = [ - Transaction( - to=attack_address, - gas_limit=tx_gas_limit, - sender=sender, + # Calculate gas for this transaction + this_tx_gas = min( + tx_gas_limit, gas_benchmark_value - (i * tx_gas_limit) + ) + + txs.append( + Transaction( + to=attack_address, + gas_limit=this_tx_gas, + sender=pre.fund_eoa(), + ) ) - for _ in range(num_txs) - ] - # Post-state - post = { - attack_address: Account(storage={}), - } + # Add to post-state + post[attack_address] = Account(storage={}) blockchain_test( pre=pre, - blocks=[Block(txs=attack_txs)], + blocks=[Block(txs=txs)], post=post, ) diff --git a/tests/benchmark/stateful/bloatnet/test_single_opcode.py b/tests/benchmark/stateful/bloatnet/test_single_opcode.py index 04dc629a80..664ba48db0 100644 --- a/tests/benchmark/stateful/bloatnet/test_single_opcode.py +++ b/tests/benchmark/stateful/bloatnet/test_single_opcode.py @@ -7,6 +7,10 @@ to benchmark specific state-handling bottlenecks. """ +import json +import math +from pathlib import Path + import pytest from execution_testing import ( Account, @@ -19,9 +23,6 @@ Transaction, While, ) -from execution_testing.cli.pytest_commands.plugins.execute.pre_alloc import ( - AddressStubs, -) REFERENCE_SPEC_GIT_PATH = "DUMMY/bloatnet.md" REFERENCE_SPEC_VERSION = "1.0" @@ -31,6 +32,23 @@ APPROVE_SELECTOR = 0x095EA7B3 # approve(address,uint256) ALLOWANCE_SELECTOR = 0xDD62ED3E # allowance(address,address) +# Load token names from stubs.json for test parametrization +_STUBS_FILE = Path(__file__).parent / "stubs.json" +with open(_STUBS_FILE) as f: + _STUBS = json.load(f) + +# Extract unique token names for each test type +SLOAD_TOKENS = [ + k.replace("test_sload_empty_erc20_balanceof_", "") + for k in _STUBS.keys() + if k.startswith("test_sload_empty_erc20_balanceof_") +] +SSTORE_TOKENS = [ + k.replace("test_sstore_erc20_approve_", "") + for k in _STUBS.keys() + if k.startswith("test_sstore_erc20_approve_") +] + # SLOAD BENCHMARK ARCHITECTURE: # @@ -78,68 +96,33 @@ @pytest.mark.valid_from("Prague") -@pytest.mark.parametrize("num_contracts", [1, 5, 10, 20, 100]) +@pytest.mark.parametrize("token_name", SLOAD_TOKENS) def test_sload_empty_erc20_balanceof( blockchain_test: BlockchainTestFiller, pre: Alloc, fork: Fork, gas_benchmark_value: int, tx_gas_limit: int, - address_stubs: AddressStubs | None, - num_contracts: int, - request: pytest.FixtureRequest, + token_name: str, ) -> None: """ BloatNet SLOAD benchmark using ERC20 balanceOf queries on random addresses. This test: - 1. Filters stubs matching test name prefix - (e.g., test_sload_empty_erc20_balanceof_*) - 2. Uses first N contracts based on num_contracts parameter - 3. Splits gas budget evenly across the selected contracts - 4. Queries balanceOf() incrementally starting by 0 and increasing by 1 + 1. Uses a single ERC20 contract specified by token_name parameter + 2. Allocates full gas budget to that contract + 3. Queries balanceOf() incrementally starting by 0 and increasing by 1 (thus forcing SLOADs to non-existing addresses) + 4. Splits into multiple transactions if gas_benchmark_value > tx_gas_limit + (EIP-7825 compliance) """ - # Extract test function name for stub filtering - # Remove parametrization suffix - test_name = request.node.name.split("[")[0] - - # Filter stubs that match the test name prefix - matching_stubs = [] - if address_stubs is not None: - matching_stubs = [ - stub_name - for stub_name in address_stubs.root.keys() - if stub_name.startswith(test_name) - ] - - # Validate we have enough stubs - if len(matching_stubs) < num_contracts: - pytest.fail( - f"Not enough matching stubs for test '{test_name}'. " - f"Required: {num_contracts}, Found: {len(matching_stubs)}. " - f"Matching stubs: {matching_stubs}" - ) - - # Select first N stubs - selected_stubs = matching_stubs[:num_contracts] + stub_name = f"test_sload_empty_erc20_balanceof_{token_name}" gas_costs = fork.gas_costs() # Calculate gas costs intrinsic_gas = fork.transaction_intrinsic_cost_calculator()(calldata=b"") - # Per-contract fixed overhead (setup + teardown for each contract's loop) - overhead_per_contract = ( - gas_costs.G_VERY_LOW # MSTORE to initialize counter (3) - + gas_costs.G_JUMPDEST # JUMPDEST at loop start (1) - + gas_costs.G_VERY_LOW # MLOAD for While condition check (3) - + gas_costs.G_VERY_LOW # ISZERO (3) - + gas_costs.G_VERY_LOW # ISZERO (3) - + gas_costs.G_HIGH # JUMPI (10) - + gas_costs.G_BASE # POP to clean up counter at end (2) - ) - # Fixed overhead per iteration (loop mechanics, independent of warm/cold) loop_overhead = ( # Attack contract loop overhead @@ -147,16 +130,16 @@ def test_sload_empty_erc20_balanceof( + gas_costs.G_VERY_LOW * 2 # MSTORE selector (3*2) + gas_costs.G_VERY_LOW * 3 # MLOAD + MSTORE address (3*3) + gas_costs.G_BASE # POP (2) - + gas_costs.G_VERY_LOW * 3 # SUB + MLOAD + MSTORE decrement (3*3) - + gas_costs.G_VERY_LOW * 2 # ISZERO * 2 for loop condition (3*2) - + gas_costs.G_HIGH # JUMPI (10) + + gas_costs.G_BASE * 3 # SUB + MLOAD + MSTORE counter decrement + + gas_costs.G_BASE * 2 # ISZERO * 2 for loop condition (2*2) + + gas_costs.G_MID # JUMPI (8) ) # ERC20 internal gas (same for all calls) erc20_internal_gas = ( gas_costs.G_VERY_LOW # PUSH4 selector (3) + gas_costs.G_BASE # EQ selector match (2) - + gas_costs.G_HIGH # JUMPI to function (10) + + gas_costs.G_MID # JUMPI to function (8) + gas_costs.G_JUMPDEST # JUMPDEST at function start (1) + gas_costs.G_VERY_LOW * 2 # CALLDATALOAD arg (3*2) + gas_costs.G_KECCAK_256 # keccak256 static (30) @@ -166,7 +149,7 @@ def test_sload_empty_erc20_balanceof( # RETURN costs 0 gas ) - # For each contract: first call is COLD (2600), subsequent are WARM (100) + # First call is COLD (2600), subsequent are WARM (100) warm_call_cost = ( loop_overhead + gas_costs.G_WARM_ACCOUNT_ACCESS + erc20_internal_gas ) @@ -174,65 +157,47 @@ def test_sload_empty_erc20_balanceof( gas_costs.G_COLD_ACCOUNT_ACCESS - gas_costs.G_WARM_ACCOUNT_ACCESS ) - # Calculate how many transactions we need to fill the block - num_txs = max(1, gas_benchmark_value // tx_gas_limit) - - # Calculate gas budget per contract per transaction - total_overhead_per_tx = intrinsic_gas + ( - overhead_per_contract * num_contracts - ) - available_gas_per_tx = tx_gas_limit - total_overhead_per_tx - gas_per_contract_per_tx = available_gas_per_tx // num_contracts - - # Solve for calls_per_contract per tx: - # gas_per_contract_per_tx = cold_call + (calls-1) * warm_call - # Simplifies to: gas = cold_warm_diff + calls * warm_call_cost - calls_per_contract = int( - (gas_per_contract_per_tx - cold_warm_diff) // warm_call_cost + # Deploy ERC20 contract using stub + # In execute mode: stub points to already-deployed contract on chain + # In fill mode: empty bytecode is deployed as placeholder + erc20_address = pre.deploy_contract( + code=Bytecode(), + stub=stub_name, ) - # Deploy selected ERC20 contracts using stubs - # In execute mode: stubs point to already-deployed contracts on chain - # In fill mode: empty bytecode is deployed as placeholder - erc20_addresses = [] - for stub_name in selected_stubs: - addr = pre.deploy_contract( - # Required parameter, ignored for stubs in execute mode - code=Bytecode(), - stub=stub_name, - ) - erc20_addresses.append(addr) + # Calculate number of transactions needed (EIP-7825 compliance) + num_txs = max(1, math.ceil(gas_benchmark_value / tx_gas_limit)) + + # Calculate total calls based on full gas budget + total_available_gas = gas_benchmark_value - (intrinsic_gas * num_txs) + total_calls = int((total_available_gas - cold_warm_diff) // warm_call_cost) + calls_per_tx = total_calls // num_txs # Log test requirements print( - f"Total gas budget: {gas_benchmark_value / 1_000_000:.1f}M gas. " - f"Tx gas limit: {tx_gas_limit / 1_000_000:.1f}M gas. " - f"Number of txs: {num_txs}. " - f"Overhead per contract: {overhead_per_contract}. " - f"~{gas_per_contract_per_tx / 1_000_000:.2f}M gas/contract/tx, " - f"{calls_per_contract} balanceOf calls/contract/tx." - ) - - # Build attack code that loops through each contract - attack_code: Bytecode = ( - Op.JUMPDEST # Entry point - # Store selector once for all contracts - + Op.MSTORE(offset=0, value=BALANCEOF_SELECTOR) + f"Token: {token_name}, " + f"Total gas budget: {gas_benchmark_value / 1_000_000:.1f}M gas, " + f"{total_calls} balanceOf calls across {num_txs} transaction(s)." ) - for erc20_address in erc20_addresses: - # For each contract, initialize counter and loop - attack_code += ( - # Initialize counter in memory[32] = number of calls - Op.MSTORE(offset=32, value=calls_per_contract) - # Loop for this specific contract + # Build transactions + txs = [] + post = {} + calls_remaining = total_calls + + for i in range(num_txs): + # Last tx gets remaining calls + tx_calls = calls_per_tx if i < num_txs - 1 else calls_remaining + calls_remaining -= tx_calls + + # Build attack code for this transaction + attack_code: Bytecode = ( + Op.JUMPDEST # Entry point + + Op.MSTORE(offset=0, value=BALANCEOF_SELECTOR) + + Op.MSTORE(offset=32, value=tx_calls) + While( - # Continue while counter > 0 condition=Op.MLOAD(32) + Op.ISZERO + Op.ISZERO, body=( - # Call balanceOf(address) on ERC20 contract - # args_offset=28 reads: selector from MEM[28:32] + address - # from MEM[32:64] Op.CALL( address=erc20_address, value=0, @@ -241,120 +206,82 @@ def test_sload_empty_erc20_balanceof( ret_offset=0, ret_size=0, ) - + Op.POP # Discard CALL success status - # Decrement counter: counter - 1 + + Op.POP + Op.MSTORE(offset=32, value=Op.SUB(Op.MLOAD(32), 1)) ), ) ) - # Deploy attack contract - attack_address = pre.deploy_contract(code=attack_code) + # Deploy attack contract for this tx + attack_address = pre.deploy_contract(code=attack_code) + + # Calculate gas for this transaction + this_tx_gas = min( + tx_gas_limit, gas_benchmark_value - (i * tx_gas_limit) + ) - # Create multiple attack transactions to fill the block - sender = pre.fund_eoa() - attack_txs = [ - Transaction( - to=attack_address, - gas_limit=tx_gas_limit, - sender=sender, + txs.append( + Transaction( + to=attack_address, + gas_limit=this_tx_gas, + sender=pre.fund_eoa(), + ) ) - for _ in range(num_txs) - ] - # Post-state - post = { - attack_address: Account(storage={}), - } + # Add to post-state + post[attack_address] = Account(storage={}) blockchain_test( pre=pre, - blocks=[Block(txs=attack_txs)], + blocks=[Block(txs=txs)], post=post, ) @pytest.mark.valid_from("Prague") -@pytest.mark.parametrize("num_contracts", [1, 5, 10, 20, 100]) +@pytest.mark.parametrize("token_name", SSTORE_TOKENS) def test_sstore_erc20_approve( blockchain_test: BlockchainTestFiller, pre: Alloc, fork: Fork, gas_benchmark_value: int, tx_gas_limit: int, - address_stubs: AddressStubs | None, - num_contracts: int, - request: pytest.FixtureRequest, + token_name: str, ) -> None: """ BloatNet SSTORE benchmark using ERC20 approve to write to storage. This test: - 1. Filters stubs matching test name prefix - (e.g., test_sstore_erc20_approve_*) - 2. Uses first N contracts based on num_contracts parameter - 3. Splits gas budget evenly across the selected contracts - 4. Calls approve(spender, amount) incrementally (counter as spender) - 5. Forces SSTOREs to allowance mapping storage slots + 1. Uses a single ERC20 contract specified by token_name parameter + 2. Allocates full gas budget to that contract + 3. Calls approve(spender, amount) incrementally (counter as spender) + 4. Forces SSTOREs to allowance mapping storage slots + 5. Splits into multiple transactions if gas_benchmark_value > tx_gas_limit + (EIP-7825 compliance) """ - # Extract test function name for stub filtering - # Remove parametrization suffix - test_name = request.node.name.split("[")[0] - - # Filter stubs that match the test name prefix - matching_stubs = [] - if address_stubs is not None: - matching_stubs = [ - stub_name - for stub_name in address_stubs.root.keys() - if stub_name.startswith(test_name) - ] - - # Validate we have enough stubs - if len(matching_stubs) < num_contracts: - pytest.fail( - f"Not enough matching stubs for test '{test_name}'. " - f"Required: {num_contracts}, Found: {len(matching_stubs)}. " - f"Matching stubs: {matching_stubs}" - ) - - # Select first N stubs - selected_stubs = matching_stubs[:num_contracts] + stub_name = f"test_sstore_erc20_approve_{token_name}" gas_costs = fork.gas_costs() # Calculate gas costs intrinsic_gas = fork.transaction_intrinsic_cost_calculator()(calldata=b"") - # Per-contract fixed overhead (setup + teardown) - memory_expansion_cost = 15 # Memory expansion to 160 bytes (5 words) - overhead_per_contract = ( - gas_costs.G_VERY_LOW # MSTORE to initialize counter (3) - + memory_expansion_cost # Memory expansion (15) - + gas_costs.G_JUMPDEST # JUMPDEST at loop start (1) - + gas_costs.G_VERY_LOW # MLOAD for While condition check (3) - + gas_costs.G_VERY_LOW # ISZERO (3) - + gas_costs.G_VERY_LOW # ISZERO (3) - + gas_costs.G_HIGH # JUMPI (10) - + gas_costs.G_BASE # POP to clean up counter at end (2) - ) # = 40 - # Fixed overhead per iteration (loop mechanics, independent of warm/cold) loop_overhead = ( # Attack contract loop body operations gas_costs.G_VERY_LOW # MSTORE selector at memory[32] (3) - + gas_costs.G_VERY_LOW # MLOAD counter (3) + + gas_costs.G_LOW # MLOAD counter (5) + gas_costs.G_VERY_LOW # MSTORE spender at memory[64] (3) + gas_costs.G_BASE # POP call result (2) # Counter decrement: MSTORE(0, SUB(MLOAD(0), 1)) - + gas_costs.G_VERY_LOW # MLOAD counter (3) + + gas_costs.G_LOW # MLOAD counter (5) + gas_costs.G_VERY_LOW # PUSH1 1 (3) + gas_costs.G_VERY_LOW # SUB (3) + gas_costs.G_VERY_LOW # MSTORE counter back (3) # While loop condition check - + gas_costs.G_VERY_LOW # MLOAD counter (3) - + gas_costs.G_VERY_LOW # ISZERO (3) - + gas_costs.G_VERY_LOW # ISZERO (3) - + gas_costs.G_HIGH # JUMPI back to loop start (10) + + gas_costs.G_LOW # MLOAD counter (5) + + gas_costs.G_BASE # ISZERO (2) + + gas_costs.G_BASE # ISZERO (2) + + gas_costs.G_MID # JUMPI back to loop start (8) ) # ERC20 internal gas (same for all calls) @@ -363,7 +290,7 @@ def test_sstore_erc20_approve( erc20_internal_gas = ( gas_costs.G_VERY_LOW # PUSH4 selector (3) + gas_costs.G_BASE # EQ selector match (2) - + gas_costs.G_HIGH # JUMPI to function (10) + + gas_costs.G_MID # JUMPI to function (8) + gas_costs.G_JUMPDEST # JUMPDEST at function start (1) + gas_costs.G_VERY_LOW # CALLDATALOAD spender (3) + gas_costs.G_VERY_LOW # CALLDATALOAD amount (3) @@ -379,8 +306,7 @@ def test_sstore_erc20_approve( # RETURN costs 0 gas ) - # For each contract: first call is COLD (2600), subsequent are WARM (100) - # Solve for calls per contract accounting for cold/warm transition + # First call is COLD (2600), subsequent are WARM (100) warm_call_cost = ( loop_overhead + gas_costs.G_WARM_ACCOUNT_ACCESS + erc20_internal_gas ) @@ -388,65 +314,48 @@ def test_sstore_erc20_approve( gas_costs.G_COLD_ACCOUNT_ACCESS - gas_costs.G_WARM_ACCOUNT_ACCESS ) - # Calculate how many transactions we need to fill the block - num_txs = max(1, gas_benchmark_value // tx_gas_limit) - - # Calculate gas budget per contract per transaction - total_overhead_per_tx = intrinsic_gas + ( - overhead_per_contract * num_contracts + # Deploy ERC20 contract using stub + erc20_address = pre.deploy_contract( + code=Bytecode(), + stub=stub_name, ) - available_gas_per_tx = tx_gas_limit - total_overhead_per_tx - gas_per_contract_per_tx = available_gas_per_tx // num_contracts - # Per contract per tx: gas = cold_warm_diff + calls * warm_call_cost - calls_per_contract = int( - (gas_per_contract_per_tx - cold_warm_diff) // warm_call_cost - ) + # Calculate number of transactions needed (EIP-7825 compliance) + num_txs = max(1, math.ceil(gas_benchmark_value / tx_gas_limit)) - # Deploy selected ERC20 contracts using stubs - erc20_addresses = [] - for stub_name in selected_stubs: - addr = pre.deploy_contract( - code=Bytecode(), - stub=stub_name, - ) - erc20_addresses.append(addr) + # Calculate total calls based on full gas budget + total_available_gas = gas_benchmark_value - (intrinsic_gas * num_txs) + total_calls = int((total_available_gas - cold_warm_diff) // warm_call_cost) + calls_per_tx = total_calls // num_txs # Log test requirements print( - f"Total gas budget: {gas_benchmark_value / 1_000_000:.1f}M gas. " - f"Tx gas limit: {tx_gas_limit / 1_000_000:.1f}M gas. " - f"Number of txs: {num_txs}. " - f"Overhead per contract: {overhead_per_contract}, " - f"Warm call cost: {warm_call_cost}. " - f"{calls_per_contract} approve calls per contract per tx " - f"({num_contracts} contracts)." - ) - - # Build attack code that loops through each contract - attack_code: Bytecode = ( - Op.JUMPDEST # Entry point - # Store selector once for all contracts - + Op.MSTORE(offset=0, value=APPROVE_SELECTOR) + f"Token: {token_name}, " + f"Total gas budget: {gas_benchmark_value / 1_000_000:.1f}M gas, " + f"{total_calls} approve calls across {num_txs} transaction(s)." ) - for erc20_address in erc20_addresses: - # For each contract, initialize counter and loop - attack_code += ( - # Initialize counter in memory[32] = number of calls - Op.MSTORE(offset=32, value=calls_per_contract) - # Loop for this specific contract + # Build transactions + txs = [] + post = {} + calls_remaining = total_calls + + for i in range(num_txs): + # Last tx gets remaining calls + tx_calls = calls_per_tx if i < num_txs - 1 else calls_remaining + calls_remaining -= tx_calls + + # Build attack code for this transaction + attack_code: Bytecode = ( + Op.JUMPDEST # Entry point + + Op.MSTORE(offset=0, value=APPROVE_SELECTOR) + + Op.MSTORE(offset=32, value=tx_calls) + While( - # Continue while counter > 0 condition=Op.MLOAD(32) + Op.ISZERO + Op.ISZERO, body=( # Store spender at memory[64] (counter as spender/amount) Op.MSTORE(offset=64, value=Op.MLOAD(32)) # Call approve(spender, amount) on ERC20 contract - # args_offset=28 reads: selector from MEM[28:32] + - # spender from MEM[32:64] + amount from MEM[64:96] - # Note: counter at MEM[32:64] is reused as spender, - # and value at MEM[64:96] serves as the amount + Op.CALL( address=erc20_address, value=0, @@ -455,34 +364,33 @@ def test_sstore_erc20_approve( ret_offset=0, ret_size=0, ) - + Op.POP # Discard CALL success status - # Decrement counter + + Op.POP + Op.MSTORE(offset=32, value=Op.SUB(Op.MLOAD(32), 1)) ), ) ) - # Deploy attack contract - attack_address = pre.deploy_contract(code=attack_code) + # Deploy attack contract for this tx + attack_address = pre.deploy_contract(code=attack_code) - # Create multiple attack transactions to fill the block - sender = pre.fund_eoa() - attack_txs = [ - Transaction( - to=attack_address, - gas_limit=tx_gas_limit, - sender=sender, + # Calculate gas for this transaction + this_tx_gas = min( + tx_gas_limit, gas_benchmark_value - (i * tx_gas_limit) + ) + + txs.append( + Transaction( + to=attack_address, + gas_limit=this_tx_gas, + sender=pre.fund_eoa(), + ) ) - for _ in range(num_txs) - ] - # Post-state - post = { - attack_address: Account(storage={}), - } + # Add to post-state + post[attack_address] = Account(storage={}) blockchain_test( pre=pre, - blocks=[Block(txs=attack_txs)], + blocks=[Block(txs=txs)], post=post, ) From 187223461dc2b741c9626c84ff87d4a820b29a25 Mon Sep 17 00:00:00 2001 From: felipe Date: Mon, 2 Feb 2026 11:19:47 -0700 Subject: [PATCH 111/154] fix(test,hive): Add hive ruleset BPO2ToAmsterdamAtTime15k (#2111) --- .../consume/simulators/helpers/ruleset.py | 23 ++++++++++++++++++- .../src/execution_testing/forks/__init__.py | 2 ++ 2 files changed, 24 insertions(+), 1 deletion(-) diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/helpers/ruleset.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/helpers/ruleset.py index 87d3783ce9..b14c9427c5 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/helpers/ruleset.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/consume/simulators/helpers/ruleset.py @@ -16,6 +16,7 @@ Berlin, BerlinToLondonAt5, BPO1ToBPO2AtTime15k, + BPO2ToAmsterdamAtTime15k, BPO2ToBPO3AtTime15k, BPO3ToBPO4AtTime15k, Byzantium, @@ -480,6 +481,26 @@ def get_blob_schedule_entries(fork: Fork) -> Dict[str, int]: "HIVE_BPO4_TIMESTAMP": 15000, **get_blob_schedule_entries(BPO4), }, + BPO2ToAmsterdamAtTime15k: { + "HIVE_FORK_HOMESTEAD": 0, + "HIVE_FORK_TANGERINE": 0, + "HIVE_FORK_SPURIOUS": 0, + "HIVE_FORK_BYZANTIUM": 0, + "HIVE_FORK_CONSTANTINOPLE": 0, + "HIVE_FORK_PETERSBURG": 0, + "HIVE_FORK_ISTANBUL": 0, + "HIVE_FORK_BERLIN": 0, + "HIVE_FORK_MERGE": 0, + "HIVE_TERMINAL_TOTAL_DIFFICULTY": 0, + "HIVE_SHANGHAI_TIMESTAMP": 0, + "HIVE_CANCUN_TIMESTAMP": 0, + "HIVE_PRAGUE_TIMESTAMP": 0, + "HIVE_OSAKA_TIMESTAMP": 0, + "HIVE_BPO1_TIMESTAMP": 0, + "HIVE_BPO2_TIMESTAMP": 0, + "HIVE_AMSTERDAM_TIMESTAMP": 15000, + **get_blob_schedule_entries(BPO2), + }, Amsterdam: { "HIVE_FORK_HOMESTEAD": 0, "HIVE_FORK_TANGERINE": 0, @@ -504,6 +525,6 @@ def get_blob_schedule_entries(fork: Fork) -> Dict[str, int]: # "HIVE_BPO3_TIMESTAMP": 0, # "HIVE_BPO4_TIMESTAMP": 0, "HIVE_AMSTERDAM_TIMESTAMP": 0, - **get_blob_schedule_entries(Amsterdam), + **get_blob_schedule_entries(BPO2), }, } diff --git a/packages/testing/src/execution_testing/forks/__init__.py b/packages/testing/src/execution_testing/forks/__init__.py index 760dbd0677..efd39010c4 100644 --- a/packages/testing/src/execution_testing/forks/__init__.py +++ b/packages/testing/src/execution_testing/forks/__init__.py @@ -30,6 +30,7 @@ from .forks.transition import ( BerlinToLondonAt5, BPO1ToBPO2AtTime15k, + BPO2ToAmsterdamAtTime15k, BPO2ToBPO3AtTime15k, BPO3ToBPO4AtTime15k, CancunToPragueAtTime15k, @@ -117,6 +118,7 @@ "BPO1ToBPO2AtTime15k", "BPO2", "BPO2ToBPO3AtTime15k", + "BPO2ToAmsterdamAtTime15k", "BPO3", "BPO3ToBPO4AtTime15k", "BPO4", From 2e0c8c63ffbc5cad4282981b90437628bfc217d7 Mon Sep 17 00:00:00 2001 From: talhaaktss Date: Mon, 2 Feb 2026 23:01:48 +0100 Subject: [PATCH 112/154] chore(pyproject): declare Python 3.12 support in package metadata (#1888) Add the Python 3.12 classifier to project metadata to reflect support for Python >=3.11 on modern interpreters. --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index dc05b9dad3..76ffc24f81 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,6 +13,7 @@ urls = { "Homepage" = "https://github.com/ethereum/execution-specs" } classifiers = [ "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Programming Language :: Python :: Implementation :: PyPy", "Programming Language :: Python :: Implementation :: CPython", "Intended Audience :: Developers", From 0c65695a03c592e73dec3521f971564d5ff6caf5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=94=A1=E4=BD=B3=E8=AA=A0=20Louis=20Tsai?= <72684086+LouisTsai-Csie@users.noreply.github.com> Date: Tue, 3 Feb 2026 15:44:02 +0800 Subject: [PATCH 113/154] chore(test-benchmark): skip rlp size limit check (#2118) --- tests/benchmark/compute/scenario/test_transaction_types.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tests/benchmark/compute/scenario/test_transaction_types.py b/tests/benchmark/compute/scenario/test_transaction_types.py index e9c047d9bd..7c72af0b56 100644 --- a/tests/benchmark/compute/scenario/test_transaction_types.py +++ b/tests/benchmark/compute/scenario/test_transaction_types.py @@ -322,6 +322,13 @@ def test_block_full_access_list_and_data( Test a block with access lists (60% gas) and calldata (40% gas) using random mixed bytes. """ + # Skip if EIP-7934 block RLP size limit would be exceeded + block_rlp_limit = fork.block_rlp_size_limit() + if block_rlp_limit: + pytest.skip( + "Test skipped: EIP-7934 block RLP size limit might be exceeded" + ) + iteration_count = math.ceil(gas_benchmark_value / tx_gas_limit) gas_remaining = gas_benchmark_value From d0fd33945cd2272c926b898e122fdaa57ac81581 Mon Sep 17 00:00:00 2001 From: danceratopz Date: Tue, 3 Feb 2026 21:19:17 +0100 Subject: [PATCH 114/154] fix(fixtures): stream fixture JSON writes to reduce peak memory. (#2124) - Write merged fixture JSON entries directly to file instead of building the full output string in memory via `"".join(parts)`. - Avoids OOM during `merge_partial_fixture_files()` for large fixtures. --- .../src/execution_testing/fixtures/collector.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/packages/testing/src/execution_testing/fixtures/collector.py b/packages/testing/src/execution_testing/fixtures/collector.py index 74bdb6b471..925fe0be77 100644 --- a/packages/testing/src/execution_testing/fixtures/collector.py +++ b/packages/testing/src/execution_testing/fixtures/collector.py @@ -71,16 +71,15 @@ def merge_partial_fixture_files(output_dir: Path) -> None: # Write final JSON file sorted_keys = sorted(entries.keys()) - parts = ["{\n"] last_idx = len(sorted_keys) - 1 - for i, key in enumerate(sorted_keys): - key_json = json.dumps(key) - # Add indentation for nesting inside outer JSON object - value_indented = entries[key].replace("\n", "\n ") - parts.append(f" {key_json}: {value_indented}") - parts.append(",\n" if i < last_idx else "\n") - parts.append("}") - target_path.write_text("".join(parts)) + with open(target_path, "w") as f: + f.write("{\n") + for i, key in enumerate(sorted_keys): + key_json = json.dumps(key) + value_indented = entries[key].replace("\n", "\n ") + f.write(f" {key_json}: {value_indented}") + f.write(",\n" if i < last_idx else "\n") + f.write("}") # Clean up partial files for partial in partials: From 2c1a10d9939a0a1a6e7cb69db43a15c6c5c9bd8d Mon Sep 17 00:00:00 2001 From: felipe Date: Tue, 3 Feb 2026 13:32:31 -0700 Subject: [PATCH 115/154] fix(test-fill): improve memory buildup for fill; merge on `SIGINT` / `SIGTERM` (#2117) * fix(test-fill): Merge partial fixtures on ``KeyboardInterrupt`` * feat(test-fill): Merge partial fixtures on ``SIGTERM`` * performance: stream fixture and index writes to disk; prevent memory bloat * fix(test-fill): merge indexes on `SIGINT` / `SIGTERM` * fix: add hotfix for `--verify-fixtures`, has been broken for some time. --- .../pytest_commands/plugins/filler/filler.py | 147 +++++++++- .../execution_testing/fixtures/collector.py | 251 ++++++++++-------- .../fixtures/tests/test_collector.py | 25 +- 3 files changed, 289 insertions(+), 134 deletions(-) diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/filler.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/filler.py index bd2490f820..7dfef105de 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/filler.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/filler.py @@ -6,10 +6,12 @@ and writes the generated fixtures to file. """ +import atexit import configparser import datetime import json import os +import signal import warnings from dataclasses import dataclass, field from pathlib import Path @@ -35,6 +37,8 @@ from execution_testing.client_clis.clis.geth import FixtureConsumerTool from execution_testing.fixtures import ( BaseFixture, + BlockchainEngineFixture, + BlockchainFixture, FixtureCollector, FixtureConsumer, FixtureFillingPhase, @@ -43,6 +47,7 @@ PreAllocGroupBuilder, PreAllocGroupBuilders, PreAllocGroups, + StateFixture, TestInfo, merge_partial_fixture_files, ) @@ -70,6 +75,47 @@ ) from .fixture_output import FixtureOutput +# Fixture output dir for keyboard interrupt cleanup (set in pytest_configure). +# Used by _merge_on_exit to merge partial JSONL files on Ctrl+C or SIGTERM. +_fixture_output_dir: Path | None = None +_atexit_registered: bool = False +_interrupt_count: int = 0 +_original_sigint_handler: Any = None +_original_sigterm_handler: Any = None + + +def _termination_handler(signum: int, frame: Any) -> None: + """Handle SIGINT/SIGTERM gracefully during test filling.""" + del frame + global _interrupt_count + global _original_sigint_handler, _original_sigterm_handler + _interrupt_count += 1 + + if _interrupt_count == 1: + # First interrupt: restore original handlers and re-raise + if _original_sigint_handler is not None: + signal.signal(signal.SIGINT, _original_sigint_handler) + if _original_sigterm_handler is not None: + signal.signal(signal.SIGTERM, _original_sigterm_handler) + if signum == signal.SIGTERM: + raise SystemExit(128 + signum) + raise KeyboardInterrupt + # Subsequent interrupts: ignore and print message + print("\nMerging fixtures, please wait...", flush=True) + + +def _merge_on_exit() -> None: + """Atexit handler to merge partial JSONL files. Ignores signals.""" + global _fixture_output_dir + if _fixture_output_dir is not None: + signal.signal(signal.SIGINT, signal.SIG_IGN) + signal.signal(signal.SIGTERM, signal.SIG_IGN) + merge_partial_fixture_files(_fixture_output_dir) + # Also merge index if partial indexes exist + meta_dir = _fixture_output_dir / ".meta" + if meta_dir.exists() and any(meta_dir.glob("partial_index*.jsonl")): + merge_partial_indexes(_fixture_output_dir, quiet_mode=True) + @dataclass(kw_only=True) class PhaseManager: @@ -706,6 +752,22 @@ def pytest_configure(config: pytest.Config) -> None: except ValueError as e: pytest.exit(str(e), returncode=pytest.ExitCode.USAGE_ERROR) + # Register atexit/signal handlers for cleanup (master only, not workers). + global _fixture_output_dir, _atexit_registered + global _original_sigint_handler, _original_sigterm_handler + is_xdist_worker = hasattr(config, "workerinput") + if not config.fixture_output.is_stdout: # type: ignore[attr-defined] + _fixture_output_dir = config.fixture_output.directory # type: ignore[attr-defined] + if not _atexit_registered and not is_xdist_worker: + atexit.register(_merge_on_exit) + _original_sigint_handler = signal.signal( + signal.SIGINT, _termination_handler + ) + _original_sigterm_handler = signal.signal( + signal.SIGTERM, _termination_handler + ) + _atexit_registered = True + if ( not config.getoption("disable_html") and config.getoption("htmlpath") is None @@ -1047,7 +1109,11 @@ def evm_fixture_verification( verify_fixtures_bin = evm_bin reused_evm_bin = True if not verify_fixtures_bin: - return + pytest.exit( + "--verify-fixtures requires --evm-bin or --verify-fixtures-bin " + "to be specified.", + returncode=pytest.ExitCode.USAGE_ERROR, + ) try: evm_fixture_verification = FixtureConsumerTool.from_binary_path( binary_path=Path(verify_fixtures_bin), @@ -1241,13 +1307,16 @@ def fixture_collector( generate_index=request.config.getoption("generate_index"), ) yield fixture_collector - worker_id = os.environ.get("PYTEST_XDIST_WORKER", None) - fixture_collector.dump_fixtures(worker_id) - if do_fixture_verification: - fixture_collector.verify_fixture_files(evm_fixture_verification) - # Write partial index for this worker/scope - if fixture_collector.generate_index: - fixture_collector.write_partial_index(worker_id) + try: + # dump_fixtures() only needed for stdout mode + fixture_collector.dump_fixtures() + # Verify fixtures for stdout mode only (files are in memory). + # For file mode, verification happens at session finish after merge. + if do_fixture_verification and fixture_output.is_stdout: + fixture_collector.verify_fixture_files(evm_fixture_verification) + finally: + # Always close streaming file handles, even on error + fixture_collector.close_streaming_files() @pytest.fixture(autouse=True, scope="session") @@ -1609,6 +1678,65 @@ def pytest_collection_modifyitems( items[:] = slow_items + normal_items +def _verify_fixtures_post_merge( + config: pytest.Config, output_dir: Path +) -> None: + """ + Verify fixtures after merge if verification is enabled. + + Called from pytest_sessionfinish after partial files are merged into + final JSON fixtures. Runs evm statetest/blocktest on each fixture. + """ + if not config.getoption("verify_fixtures"): + return + + # Get the verification binary (same logic as evm_fixture_verification) + verify_fixtures_bin = config.getoption("verify_fixtures_bin") + if not verify_fixtures_bin: + verify_fixtures_bin = config.getoption("evm_bin") + if not verify_fixtures_bin: + return + + try: + evm_verification = FixtureConsumerTool.from_binary_path( + binary_path=Path(verify_fixtures_bin), + trace=getattr(config, "collect_traces", False), + ) + except Exception: + # Binary not recognized, skip verification (error already shown + # during fixture setup if --verify-fixtures was used) + return + + # Map directory names to fixture format classes + dir_to_format: dict[str, type[BaseFixture]] = { + StateFixture.output_base_dir_name(): StateFixture, + BlockchainFixture.output_base_dir_name(): BlockchainFixture, + BlockchainEngineFixture.output_base_dir_name(): ( + BlockchainEngineFixture + ), + } + + # Find all JSON fixture files and verify them + for json_file in output_dir.rglob("*.json"): + # Determine fixture format from top-level directory + relative_path = json_file.relative_to(output_dir) + if not relative_path.parts: + continue + + top_dir = relative_path.parts[0] + fixture_format = dir_to_format.get(top_dir) + if fixture_format is None: + continue + + if evm_verification.can_consume(fixture_format): + evm_verification.consume_fixture( + fixture_format, + json_file, + fixture_name=None, + debug_output_path=None, + ) + + def pytest_sessionfinish(session: pytest.Session, exitstatus: int) -> None: """ Perform session finish tasks. @@ -1656,6 +1784,9 @@ def pytest_sessionfinish(session: pytest.Session, exitstatus: int) -> None: for file in fixture_output.directory.rglob("*.lock"): file.unlink() + # Verify fixtures after merge if verification is enabled + _verify_fixtures_post_merge(session.config, fixture_output.directory) + # Generate index file for all produced fixtures by merging partial indexes. # Only merge if partial indexes were actually written (i.e., tests produced # fixtures). When no tests are filled (e.g., all skipped), no partial diff --git a/packages/testing/src/execution_testing/fixtures/collector.py b/packages/testing/src/execution_testing/fixtures/collector.py index 925fe0be77..0627df1c5f 100644 --- a/packages/testing/src/execution_testing/fixtures/collector.py +++ b/packages/testing/src/execution_testing/fixtures/collector.py @@ -10,6 +10,7 @@ from dataclasses import dataclass, field from pathlib import Path from typing import ( + IO, ClassVar, Dict, List, @@ -18,8 +19,6 @@ Tuple, ) -from filelock import FileLock - from execution_testing.base_types import to_json from .base import BaseFixture @@ -196,15 +195,23 @@ class FixtureCollector: single_fixture_per_file: bool filler_path: Path base_dump_dir: Optional[Path] = None - flush_interval: int = 1000 generate_index: bool = True + # Worker ID for partial files. None = read from env var. + worker_id: Optional[str] = None - # Internal state + # Internal state (only used for stdout mode) all_fixtures: Dict[Path, Fixtures] = field(default_factory=dict) - json_path_to_test_item: Dict[Path, TestInfo] = field(default_factory=dict) - # Store index entries as simple dicts - # (avoid Pydantic overhead during collection) - index_entries: List[Dict] = field(default_factory=list) + + # Streaming file handles - kept open for module duration + _partial_fixture_files: Dict[Path, IO[str]] = field(default_factory=dict) + _partial_index_file: Optional[IO[str]] = field(default=None) + _worker_id_cached: bool = field(default=False, init=False) + + # Lightweight tracking for verification (path, format class, debug_path) + # Only stores metadata, not fixture data - memory efficient + _fixtures_to_verify: List[Tuple[Path, type, Optional[Path]]] = field( + default_factory=list + ) def get_fixture_basename(self, info: TestInfo) -> Path: """Return basename of the fixture file for a given test case.""" @@ -226,8 +233,20 @@ def get_fixture_basename(self, info: TestInfo) -> Path: mode="module" ) + def _get_worker_id(self) -> str | None: + """Get the worker ID (from constructor or environment).""" + if self.worker_id is not None: + return self.worker_id + if not self._worker_id_cached: + # Cache the env var lookup + env_worker_id = os.environ.get("PYTEST_XDIST_WORKER") + if env_worker_id: + self.worker_id = env_worker_id + self._worker_id_cached = True + return self.worker_id + def add_fixture(self, info: TestInfo, fixture: BaseFixture) -> Path: - """Add fixture to the list of fixtures of a given test case.""" + """Add fixture and immediately stream to partial JSONL file.""" fixture_basename = self.get_fixture_basename(info) fixture_path = ( @@ -235,16 +254,25 @@ def add_fixture(self, info: TestInfo, fixture: BaseFixture) -> Path: / fixture.output_base_dir_name() / fixture_basename.with_suffix(fixture.output_file_extension) ) - # relevant when we group by test function - if fixture_path not in self.all_fixtures.keys(): - self.all_fixtures[fixture_path] = Fixtures(root={}) - self.json_path_to_test_item[fixture_path] = info - self.all_fixtures[fixture_path][info.get_id()] = fixture + # Stream fixture directly to partial JSONL (no memory accumulation) + if self.output_dir.name != "stdout": + self._stream_fixture_to_partial( + fixture_path, info.get_id(), fixture + ) + # Track for verification (lightweight - only path and format class) + debug_path = self._get_consume_direct_dump_dir(info) + self._fixtures_to_verify.append( + (fixture_path, fixture.__class__, debug_path) + ) + else: + # stdout mode: accumulate for final JSON dump + if fixture_path not in self.all_fixtures: + self.all_fixtures[fixture_path] = Fixtures(root={}) + self.all_fixtures[fixture_path][info.get_id()] = fixture - # Collect index entry while data is in memory (if indexing enabled) - # Store as simple dict to avoid Pydantic overhead during collection - if self.generate_index: + # Stream index entry directly to partial JSONL + if self.generate_index and self.output_dir.name != "stdout": relative_path = fixture_path.relative_to(self.output_dir) fixture_fork = fixture.get_fork() index_entry = { @@ -256,18 +284,67 @@ def add_fixture(self, info: TestInfo, fixture: BaseFixture) -> Path: } if (pre_hash := getattr(fixture, "pre_hash", None)) is not None: index_entry["pre_hash"] = pre_hash - self.index_entries.append(index_entry) - - if ( - self.flush_interval > 0 - and len(self.all_fixtures) >= self.flush_interval - ): - self.dump_fixtures() + self._stream_index_entry_to_partial(index_entry) return fixture_path - def dump_fixtures(self, worker_id: str | None = None) -> None: - """Dump all collected fixtures to their respective files.""" + def _get_partial_fixture_file(self, fixture_path: Path) -> "IO[str]": + """Get or create a file handle for streaming fixtures.""" + worker_id = self._get_worker_id() + suffix = f".{worker_id}" if worker_id else ".main" + partial_path = fixture_path.with_suffix(f".partial{suffix}.jsonl") + + if partial_path not in self._partial_fixture_files: + partial_path.parent.mkdir(parents=True, exist_ok=True) + self._partial_fixture_files[partial_path] = open(partial_path, "a") + + return self._partial_fixture_files[partial_path] + + def _stream_fixture_to_partial( + self, + fixture_path: Path, + fixture_id: str, + fixture: BaseFixture, + ) -> None: + """Stream a single fixture to its partial JSONL file.""" + value = json.dumps(fixture.json_dict_with_info(), indent=4) + line = json.dumps({"k": fixture_id, "v": value}) + "\n" + + f = self._get_partial_fixture_file(fixture_path) + f.write(line) + f.flush() # Ensure data is written immediately + + def _get_partial_index_file(self) -> "IO[str]": + """Get or create the file handle for streaming index entries.""" + if self._partial_index_file is None: + worker_id = self._get_worker_id() + suffix = f".{worker_id}" if worker_id else ".main" + partial_index_path = ( + self.output_dir / ".meta" / f"partial_index{suffix}.jsonl" + ) + partial_index_path.parent.mkdir(parents=True, exist_ok=True) + self._partial_index_file = open(partial_index_path, "a") + + return self._partial_index_file + + def _stream_index_entry_to_partial(self, entry: Dict) -> None: + """Stream a single index entry to partial JSONL file.""" + f = self._get_partial_index_file() + f.write(json.dumps(entry) + "\n") + f.flush() # Ensure data is written immediately + + def close_streaming_files(self) -> None: + """Close all open streaming file handles.""" + for f in self._partial_fixture_files.values(): + f.close() + self._partial_fixture_files.clear() + + if self._partial_index_file is not None: + self._partial_index_file.close() + self._partial_index_file = None + + def dump_fixtures(self) -> None: + """Dump collected fixtures (only used for stdout mode).""" if self.output_dir.name == "stdout": combined_fixtures = { k: to_json(v) @@ -275,65 +352,10 @@ def dump_fixtures(self, worker_id: str | None = None) -> None: for k, v in fixture.items() } json.dump(combined_fixtures, sys.stdout, indent=4) - return - os.makedirs(self.output_dir, exist_ok=True) - for fixture_path, fixtures in self.all_fixtures.items(): - os.makedirs(fixture_path.parent, exist_ok=True) - if len({fixture.__class__ for fixture in fixtures.values()}) != 1: - raise TypeError( - "All fixtures in a single file must have the same format." - ) - self._write_partial_fixtures(fixture_path, fixtures, worker_id) - - self.all_fixtures.clear() - - def _write_partial_fixtures( - self, file_path: Path, fixtures: Fixtures, worker_id: str | None - ) -> None: - """ - Write fixtures to a partial JSONL file (append-only). - - Each line is a JSON object: {"key": "fixture_id", "value": "json_str"} - This avoids O(n) merge work per worker - just O(1) append. - Final merge to JSON happens at session end. - """ - suffix = f".{worker_id}" if worker_id else ".main" - partial_path = file_path.with_suffix(f".partial{suffix}.jsonl") - partial_path.parent.mkdir(parents=True, exist_ok=True) - lock_file_path = partial_path.with_suffix(".lock") - - lines = [] - for name in fixtures: - value = json.dumps(fixtures[name].json_dict_with_info(), indent=4) - # Store as JSONL: {"k": key, "v": serialized value string} - lines.append(json.dumps({"k": name, "v": value}) + "\n") - - with FileLock(lock_file_path): - with open(partial_path, "a") as f: - f.writelines(lines) + self.all_fixtures.clear() + # For file output, fixtures are already streamed in add_fixture() - def verify_fixture_files( - self, evm_fixture_verification: FixtureConsumer - ) -> None: - """Run `evm [state|block]test` on each fixture.""" - for fixture_path, name_fixture_dict in self.all_fixtures.items(): - for _fixture_name, fixture in name_fixture_dict.items(): - if evm_fixture_verification.can_consume(fixture.__class__): - info = self.json_path_to_test_item[fixture_path] - consume_direct_dump_dir = ( - self._get_consume_direct_dump_dir(info) - ) - evm_fixture_verification.consume_fixture( - fixture.__class__, - fixture_path, - fixture_name=None, - debug_output_path=consume_direct_dump_dir, - ) - - def _get_consume_direct_dump_dir( - self, - info: TestInfo, - ) -> Path | None: + def _get_consume_direct_dump_dir(self, info: TestInfo) -> Path | None: """ Directory to dump the current test function's fixture.json and fixture verification debug output. @@ -349,37 +371,36 @@ def _get_consume_direct_dump_dir( self.base_dump_dir, self.filler_path, level="test_function" ) - def write_partial_index(self, worker_id: str | None = None) -> Path | None: + def verify_fixture_files( + self, evm_fixture_verification: FixtureConsumer + ) -> None: """ - Append collected index entries to a partial index file using JSONL - format. - - Uses append-only JSONL (JSON Lines) format for efficient writes without - read-modify-write cycles. Each line is a complete JSON object - representing one index entry. - - Args: - worker_id: The xdist worker ID (e.g., "gw0"), or None for master. - - Returns: - Path to the partial index file, or None if indexing is disabled. + Run `evm [state|block]test` on each fixture. + For streaming mode, uses lightweight tracking of fixture paths/formats + rather than keeping full fixtures in memory. """ - if not self.generate_index or not self.index_entries: - return None - - suffix = f".{worker_id}" if worker_id else ".master" - partial_index_path = ( - self.output_dir / ".meta" / f"partial_index{suffix}.jsonl" - ) - partial_index_path.parent.mkdir(parents=True, exist_ok=True) - lock_file_path = partial_index_path.with_suffix(".lock") - - # Append entries as JSONL (one JSON object per line) - # This avoids read-modify-write cycles - with FileLock(lock_file_path): - with open(partial_index_path, "a") as f: - for entry in self.index_entries: - f.write(json.dumps(entry) + "\n") - - return partial_index_path + if self.output_dir.name == "stdout": + # stdout mode: fixtures are in memory + for fixture_path, name_fixture_dict in self.all_fixtures.items(): + for _fixture_name, fixture in name_fixture_dict.items(): + if evm_fixture_verification.can_consume(fixture.__class__): + evm_fixture_verification.consume_fixture( + fixture.__class__, + fixture_path, + fixture_name=None, + debug_output_path=None, + ) + else: + # Streaming mode: use tracked fixture metadata + for entry in self._fixtures_to_verify: + fixture_path, fixture_format, debug_path = entry + if evm_fixture_verification.can_consume(fixture_format): + evm_fixture_verification.consume_fixture( + fixture_format, + fixture_path, + fixture_name=None, + debug_output_path=debug_path, + ) + # Clear tracking after verification + self._fixtures_to_verify.clear() diff --git a/packages/testing/src/execution_testing/fixtures/tests/test_collector.py b/packages/testing/src/execution_testing/fixtures/tests/test_collector.py index 87e55e6f89..4b41f1c6d4 100644 --- a/packages/testing/src/execution_testing/fixtures/tests/test_collector.py +++ b/packages/testing/src/execution_testing/fixtures/tests/test_collector.py @@ -79,7 +79,7 @@ def test_single_fixture_matches_json_dumps( fixture = _make_fixture(1) info = _make_info("tx_test", module_path) collector.add_fixture(info, fixture) - collector.dump_fixtures(worker_id="gw0") + collector.dump_fixtures() merge_partial_fixture_files(output_dir) # Find the written file @@ -113,7 +113,7 @@ def test_multiple_fixtures_match_json_dumps( collector.add_fixture(info, fixture) fixtures_and_infos.append((info, fixture)) - collector.dump_fixtures(worker_id="gw0") + collector.dump_fixtures() merge_partial_fixture_files(output_dir) json_files = list(output_dir.rglob("*.json")) @@ -140,6 +140,7 @@ def test_multiple_workers_merge_correctly( single_fixture_per_file=False, filler_path=filler_path, generate_index=False, + worker_id="gw0", ) # Worker A writes fixtures 0-2 pairs_a = [] @@ -148,7 +149,7 @@ def test_multiple_workers_merge_correctly( info = _make_info(f"tx_test_{i}", module_path) collector1.add_fixture(info, fixture) pairs_a.append((info, fixture)) - collector1.dump_fixtures(worker_id="gw0") + collector1.close_streaming_files() # Worker B writes fixtures 3-5 (separate partial file) collector2 = FixtureCollector( @@ -157,6 +158,7 @@ def test_multiple_workers_merge_correctly( single_fixture_per_file=False, filler_path=filler_path, generate_index=False, + worker_id="gw1", ) pairs_b = [] for i in range(3, 6): @@ -164,7 +166,7 @@ def test_multiple_workers_merge_correctly( info = _make_info(f"tx_test_{i}", module_path) collector2.add_fixture(info, fixture) pairs_b.append((info, fixture)) - collector2.dump_fixtures(worker_id="gw1") + collector2.close_streaming_files() # Merge at session end merge_partial_fixture_files(output_dir) @@ -197,7 +199,7 @@ def test_output_is_valid_json( info = _make_info(f"tx_test_{i}", module_path) collector.add_fixture(info, fixture) - collector.dump_fixtures(worker_id="gw0") + collector.dump_fixtures() merge_partial_fixture_files(output_dir) json_files = list(output_dir.rglob("*.json")) @@ -223,7 +225,7 @@ def test_fixtures_sorted_by_key( info = _make_info(f"tx_test_{i}", module_path) collector.add_fixture(info, fixture) - collector.dump_fixtures(worker_id="gw0") + collector.dump_fixtures() merge_partial_fixture_files(output_dir) json_files = list(output_dir.rglob("*.json")) @@ -247,7 +249,7 @@ def test_partial_files_cleaned_up_after_merge( fixture = _make_fixture(1) info = _make_info("tx_test", module_path) collector.add_fixture(info, fixture) - collector.dump_fixtures(worker_id="gw0") + collector.dump_fixtures() # Verify partial file exists before merge partial_files = list(output_dir.rglob("*.partial.*.jsonl")) @@ -293,7 +295,7 @@ def test_single_fixture_matches_legacy( generate_index=False, ) collector.add_fixture(info, fixture) - collector.dump_fixtures(worker_id="gw0") + collector.dump_fixtures() merge_partial_fixture_files(new_dir) new_files = list(new_dir.rglob("*.json")) assert len(new_files) == 1 @@ -333,7 +335,7 @@ def test_multiple_fixtures_match_legacy( ) for i, info in enumerate(infos): collector.add_fixture(info, list(fixtures_dict.values())[i]) - collector.dump_fixtures(worker_id="gw0") + collector.dump_fixtures() merge_partial_fixture_files(new_dir) new_files = list(new_dir.rglob("*.json")) assert len(new_files) == 1 @@ -374,11 +376,12 @@ def test_multiple_workers_match_legacy( single_fixture_per_file=False, filler_path=filler_path, generate_index=False, + worker_id=f"gw{worker_idx}", ) start = worker_idx * 2 for i in range(start, start + 2): collector.add_fixture(infos[i], fixture_values[i]) - collector.dump_fixtures(worker_id=f"gw{worker_idx}") + collector.close_streaming_files() merge_partial_fixture_files(new_dir) new_files = list(new_dir.rglob("*.json")) @@ -426,7 +429,7 @@ def test_special_characters_in_keys_match_legacy( ) for i, info in enumerate(infos): collector.add_fixture(info, list(fixtures_dict.values())[i]) - collector.dump_fixtures(worker_id="gw0") + collector.dump_fixtures() merge_partial_fixture_files(new_dir) new_files = list(new_dir.rglob("*.json")) assert len(new_files) == 1 From 0d5a00be8a6082d50130f7b41a5805f1ee02ae35 Mon Sep 17 00:00:00 2001 From: daniellehrner Date: Tue, 3 Feb 2026 21:21:23 +0000 Subject: [PATCH 116/154] Add BlockException.GAS_USED_OVERFLOW to BesuExceptionMapper (#2126) Signed-off-by: daniellehrner --- .../testing/src/execution_testing/client_clis/clis/besu.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/packages/testing/src/execution_testing/client_clis/clis/besu.py b/packages/testing/src/execution_testing/client_clis/clis/besu.py index 3a75c202d4..c2062a58b8 100644 --- a/packages/testing/src/execution_testing/client_clis/clis/besu.py +++ b/packages/testing/src/execution_testing/client_clis/clis/besu.py @@ -236,6 +236,9 @@ class BesuExceptionMapper(ExceptionMapper): TransactionException.INSUFFICIENT_MAX_FEE_PER_GAS: ( "transaction invalid gasPrice is less than the current BaseFee" ), + BlockException.GAS_USED_OVERFLOW: ( + "provided gas insufficient" + ), TransactionException.GAS_ALLOWANCE_EXCEEDED: ( "provided gas insufficient" ), From 72af492f01dfa5b09644b20fe5ec68ffc6a546f5 Mon Sep 17 00:00:00 2001 From: felipe Date: Tue, 3 Feb 2026 16:30:27 -0700 Subject: [PATCH 117/154] fix(test): lint (#2128) --- .../testing/src/execution_testing/client_clis/clis/besu.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/packages/testing/src/execution_testing/client_clis/clis/besu.py b/packages/testing/src/execution_testing/client_clis/clis/besu.py index c2062a58b8..b5698ef396 100644 --- a/packages/testing/src/execution_testing/client_clis/clis/besu.py +++ b/packages/testing/src/execution_testing/client_clis/clis/besu.py @@ -236,9 +236,7 @@ class BesuExceptionMapper(ExceptionMapper): TransactionException.INSUFFICIENT_MAX_FEE_PER_GAS: ( "transaction invalid gasPrice is less than the current BaseFee" ), - BlockException.GAS_USED_OVERFLOW: ( - "provided gas insufficient" - ), + BlockException.GAS_USED_OVERFLOW: "provided gas insufficient", TransactionException.GAS_ALLOWANCE_EXCEEDED: ( "provided gas insufficient" ), From 72addb2c1865340572a89b282f1a32643ce3ff60 Mon Sep 17 00:00:00 2001 From: danceratopz Date: Wed, 4 Feb 2026 02:48:40 +0100 Subject: [PATCH 118/154] chore(tests): use representative blob combos in high blob count forks (#2127) * chore(tests): use representative blob combinations for EIP-4844 high-blob forks * fix: tox * fix: test IDs --------- Co-authored-by: Mario Vega --- tests/cancun/eip4844_blobs/spec.py | 181 +++++++++++++++++++++++++++-- 1 file changed, 169 insertions(+), 12 deletions(-) diff --git a/tests/cancun/eip4844_blobs/spec.py b/tests/cancun/eip4844_blobs/spec.py index 4c669f7b0e..430137bf47 100644 --- a/tests/cancun/eip4844_blobs/spec.py +++ b/tests/cancun/eip4844_blobs/spec.py @@ -1,11 +1,13 @@ """Defines EIP-4844 specification constants and functions.""" import itertools +import math from dataclasses import dataclass from hashlib import sha256 -from typing import List, Optional, Tuple +from typing import List, Optional, Set, Tuple -from execution_testing import Fork, Transaction +import pytest +from execution_testing import Fork, ParameterSet, Transaction @dataclass(frozen=True) @@ -87,6 +89,136 @@ class SpecHelpers: """ BYTES_PER_FIELD_ELEMENT = 32 + _EXHAUSTIVE_MAX_BLOBS_PER_BLOCK = ( + 9 # Osaka max; exhaustive is tractable up to here + ) + + @classmethod + def get_representative_blob_combinations( + cls, + blob_count: int, + max_blobs_per_tx: int, + ) -> List[Tuple[int, ...]]: + """ + Get a bounded set of representative blob-per-tx partitions for a given + blob count, instead of exhaustively enumerating all valid partitions. + """ + n = blob_count + if n < 1: + return [] + m = max_blobs_per_tx + seen: Set[Tuple[int, ...]] = set() + result: List[Tuple[int, ...]] = [] + + def add(combo: Tuple[int, ...]) -> None: + if combo not in seen: + seen.add(combo) + result.append(combo) + + # 1. Single tx (if it fits) + # e.g. n=5, m=6 → (5,) + if n <= m: + add((n,)) + + # 2. All singles + # e.g. n=10 → (1,1,1,1,1,1,1,1,1,1) + if n > 1: + add((1,) * n) + + # 3. Greedy pack: fill max-sized txs first + # e.g. n=10, m=6 → (6,4) + if n > m: + q, r = divmod(n, m) + greedy = (m,) * q + ((r,) if r else ()) + add(greedy) + + # 4. Reversed greedy + # e.g. n=10, m=6 → (4,6) + rev = tuple(reversed(greedy)) + add(rev) + + # 5. One big tx + singles for the rest (and reversed) + # e.g. n=10, m=6 → (6,1,1,1,1) and (1,1,1,1,6) + if n > 1: + big = min(n - 1, m) + rest = n - big + combo = (big,) + (1,) * rest + add(combo) + add(tuple(reversed(combo))) + + # 6. Balanced split into two txs (and reversed) + # e.g. n=10, m=6 → (5,5); n=9, m=6 → (5,4) and (4,5) + if n > 1: + half_hi = math.ceil(n / 2) + half_lo = n - half_hi + if half_hi <= m and half_lo >= 1: + add((half_hi, half_lo)) + if half_hi != half_lo: + add((half_lo, half_hi)) + + # 7. Uniform non-max: all txs same size, 1 < k < m + # e.g. n=12, m=6 → (4,4,4); n=15, m=6 → (5,5,5) + if n > 1: + for k in range(m - 1, 1, -1): + if n % k == 0 and n // k > 1: + add((k,) * (n // k)) + break + + return result + + @classmethod + def get_representative_invalid_blob_combinations( + cls, + fork: Fork, + ) -> List[Tuple[int, ...]]: + """ + Get a bounded set of representative invalid blob-per-tx partitions + that exceed the block blob limit by exactly one. + """ + max_blobs_per_block = fork.max_blobs_per_block() + max_blobs_per_tx = fork.max_blobs_per_tx() + total = max_blobs_per_block + 1 + m = max_blobs_per_tx + seen: Set[Tuple[int, ...]] = set() + result: List[Tuple[int, ...]] = [] + + def add(combo: Tuple[int, ...]) -> None: + if combo not in seen: + seen.add(combo) + result.append(combo) + + # 1. Single oversized tx — e.g. (16,) + add((total,)) + + # 2. Greedy pack of total — e.g. total=16, m=6 → (6,6,4) + q, r = divmod(total, m) + greedy = (m,) * q + ((r,) if r else ()) + add(greedy) + + # 3. All singles — e.g. (1,)*16 + add((1,) * total) + + # 4. One full tx + overflow — e.g. total=16, m=6 → (6,10) + overflow = total - m + if overflow >= 1: + add((m, overflow)) + + # 5. One blob + full block — e.g. (1,21) + # Per-tx-oversized elements must be last: the test sends all txs from + # one sender with sequential nonces, so a rejected non-last tx creates + # a nonce gap that causes subsequent txs to fail with NONCE_MISMATCH, + # not the expected blob error. + add((1, max_blobs_per_block)) + + # 6. Balanced all-valid: near-equal tx sizes, all within per-tx limit + # e.g. total=16, m=6 → (6,5,5) + num_txs = math.ceil(total / m) + base, extra = divmod(total, num_txs) + balanced = (base + 1,) * extra + (base,) * (num_txs - extra) + if all(b <= m for b in balanced): + add(balanced) + + return result @classmethod def get_min_excess_blob_gas_for_blob_gas_price( @@ -166,30 +298,55 @@ def get_blob_combinations( return combinations @classmethod - def all_valid_blob_combinations(cls, fork: Fork) -> List[Tuple[int, ...]]: + def all_valid_blob_combinations(cls, fork: Fork) -> List[ParameterSet]: """ Return all valid blob tx combinations for a given block, assuming the given MAX_BLOBS_PER_BLOCK, whilst respecting MAX_BLOBS_PER_TX. """ max_blobs_per_block = fork.max_blobs_per_block() max_blobs_per_tx = fork.max_blobs_per_tx() + exhaustive = max_blobs_per_block <= cls._EXHAUSTIVE_MAX_BLOBS_PER_BLOCK combinations: List[Tuple[int, ...]] = [] for i in range(1, max_blobs_per_block + 1): - combinations += cls.get_blob_combinations(i, max_blobs_per_tx) - return combinations + if exhaustive: + combinations += cls.get_blob_combinations(i, max_blobs_per_tx) + else: + combinations += cls.get_representative_blob_combinations( + i, max_blobs_per_tx + ) + return [ + pytest.param( + combination, + id=f"blobs_per_tx_{repr(combination).replace(' ', '')}", + ) + for combination in combinations + ] @classmethod - def invalid_blob_combinations(cls, fork: Fork) -> List[Tuple[int, ...]]: + def invalid_blob_combinations(cls, fork: Fork) -> List[ParameterSet]: """ Return invalid blob tx combinations for a given block that use up to MAX_BLOBS_PER_BLOCK+1 blobs. """ max_blobs_per_block = fork.max_blobs_per_block() max_blobs_per_tx = fork.max_blobs_per_tx() - invalid_combinations = cls.get_blob_combinations( - max_blobs_per_block + 1, - max_blobs_per_tx, - ) - invalid_combinations.append((max_blobs_per_block + 1,)) - return invalid_combinations + + invalid_combinations: List[Tuple[int, ...]] = [] + if max_blobs_per_block <= cls._EXHAUSTIVE_MAX_BLOBS_PER_BLOCK: + invalid_combinations += cls.get_blob_combinations( + max_blobs_per_block + 1, + max_blobs_per_tx, + ) + invalid_combinations.append((max_blobs_per_block + 1,)) + else: + invalid_combinations = ( + cls.get_representative_invalid_blob_combinations(fork) + ) + return [ + pytest.param( + combination, + id=f"blobs_per_tx_{repr(combination).replace(' ', '')}", + ) + for combination in invalid_combinations + ] From 554c353b357e7bb340a5bb0aa16772f17a8368ea Mon Sep 17 00:00:00 2001 From: Mario Vega Date: Wed, 4 Feb 2026 10:50:09 +0100 Subject: [PATCH 119/154] feat(benchmark/tests): Extra sstore benchmark tests (#2130) --- .../benchmark/benchmark_code_generator.py | 9 +- .../stateful/bloatnet/test_single_opcode.py | 496 ++++++++++++++++++ 2 files changed, 502 insertions(+), 3 deletions(-) diff --git a/packages/testing/src/execution_testing/benchmark/benchmark_code_generator.py b/packages/testing/src/execution_testing/benchmark/benchmark_code_generator.py index dd8deb42f4..8a77c4eb96 100644 --- a/packages/testing/src/execution_testing/benchmark/benchmark_code_generator.py +++ b/packages/testing/src/execution_testing/benchmark/benchmark_code_generator.py @@ -3,9 +3,9 @@ optimized bytecode patterns. """ -from dataclasses import dataclass +from dataclasses import dataclass, field -from execution_testing.base_types import Address +from execution_testing.base_types import Address, Storage from execution_testing.forks import Fork from execution_testing.specs.benchmark import BenchmarkCodeGenerator from execution_testing.test_types import Alloc @@ -17,6 +17,7 @@ class JumpLoopGenerator(BenchmarkCodeGenerator): """Generates bytecode that loops execution using JUMP operations.""" contract_balance: int = 0 + contract_storage: Storage = field(default_factory=Storage) def deploy_contracts(self, *, pre: Alloc, fork: Fork) -> Address: """Deploy the looping contract.""" @@ -31,7 +32,9 @@ def deploy_contracts(self, *, pre: Alloc, fork: Fork) -> Address: fork=fork, ) self._contract_address = pre.deploy_contract( - code=code, balance=self.contract_balance + code=code, + balance=self.contract_balance, + storage=self.contract_storage, ) return self._contract_address diff --git a/tests/benchmark/stateful/bloatnet/test_single_opcode.py b/tests/benchmark/stateful/bloatnet/test_single_opcode.py index 664ba48db0..51d53705e6 100644 --- a/tests/benchmark/stateful/bloatnet/test_single_opcode.py +++ b/tests/benchmark/stateful/bloatnet/test_single_opcode.py @@ -10,16 +10,23 @@ import json import math from pathlib import Path +from typing import Tuple import pytest from execution_testing import ( + AccessList, Account, + Address, Alloc, + BenchmarkTestFiller, Block, BlockchainTestFiller, Bytecode, Fork, + Hash, + JumpLoopGenerator, Op, + Storage, Transaction, While, ) @@ -394,3 +401,492 @@ def test_sstore_erc20_approve( blocks=[Block(txs=txs)], post=post, ) + + +def sstore_helper_contract( + *, + sloads_before_sstore: bool, + key_warm: bool, + original_value: int, + new_value: int, +) -> Tuple[Bytecode, Bytecode, Bytecode]: + """ + Storage contract for benchmark slot access. + + # Calldata Layout: + # - CALLDATA[0..31]: Starting slot + # - CALLDATA[32..63]: Ending slot + # - CALLDATA[64..95]: Value to write + + Returns: + - setup: Bytecode of the setup of the contract + - loop: Bytecode of the loop of the contract + - cleanup: Bytecode of the cleanup of the contract + + """ + setup = Bytecode() + loop = Bytecode() + cleanup = Bytecode() + + setup += ( + Op.CALLDATALOAD(32) # end_slot + + Op.CALLDATALOAD(64) # value + + Op.CALLDATALOAD(0) # start_slot = counter + ) + # [counter, value, end_slot] + + loop += Op.JUMPDEST + # Loop Body: Store Value at Start Slot + Counter + if sloads_before_sstore: + loop += Op.DUP1 # [counter, counter, value, end_slot] + loop += Op.SLOAD(key_warm=key_warm) + loop += Op.POP + loop += Op.DUP2 # [value, counter, value, end_slot] + loop += Op.DUP2 # [counter, value, counter, value, end_slot] + loop += Op.SSTORE( + key_warm=True, + original_value=original_value, + new_value=new_value, + ) + else: + loop += Op.DUP2 # [value, counter, value, end_slot] + loop += Op.DUP2 # [counter, value, counter, value, end_slot] + loop += Op.SSTORE( # STORAGE[counter, value] = value + key_warm=key_warm, + original_value=original_value, + new_value=new_value, + ) + + # Loop Post: Increment Counter + loop += Op.PUSH1(1) + loop += Op.ADD + # [counter + 1, value, end_slot] + + # Loop Condition: Counter < Num Slots + loop += Op.DUP3 # [end_slot, counter + 1, value, end_slot] + loop += Op.DUP2 # [counter + 1, end_slot, counter + 1, value, end_slot] + loop += Op.LT # [counter + 1 < end_slot, counter + 1, value, end_slot] + loop += Op.ISZERO + loop += Op.ISZERO + loop += Op.PUSH1(len(setup)) + loop += Op.JUMPI + # [counter + 1, value, end_slot] + + # Cleanup: Stop + cleanup += Op.STOP + + return setup, loop, cleanup + + +@pytest.mark.parametrize("use_access_list", [True, False]) +@pytest.mark.parametrize("sloads_before_sstore", [True, False]) +@pytest.mark.parametrize("num_contracts", [1, 5, 10]) +@pytest.mark.parametrize( + "initial_value,write_value", + [ + pytest.param(0, 0, id="zero_to_zero"), + pytest.param(0, 0xDEADBEEF, id="zero_to_nonzero"), + pytest.param(0xDEADBEEF, 0, id="nonzero_to_zero"), + pytest.param(0xDEADBEEF, 0xBEEFBEEF, id="nonzero_to_diff"), + pytest.param(0xDEADBEEF, 0xDEADBEEF, id="nonzero_to_same"), + ], +) +def test_sstore_variants( + benchmark_test: BenchmarkTestFiller, + fork: Fork, + pre: Alloc, + tx_gas_limit: int, + gas_benchmark_value: int, + use_access_list: bool, + sloads_before_sstore: bool, + num_contracts: int, + initial_value: int, + write_value: int, +) -> None: + """ + Benchmark SSTORE instruction with various configurations. + + Variants: + - use_access_list: Warm storage slots via access list + - sloads_before_sstore: Number of SLOADs per slot before SSTORE + - num_contracts: Number of contract instances (cold storage writes) + - initial_value/write_value: Storage transitions + (zero_to_zero, zero_to_nonzero, nonzero_to_zero, nonzero_to_nonzero) + """ + ( + contract_setup, + contract_loop, + contract_cleanup, + ) = sstore_helper_contract( + sloads_before_sstore=sloads_before_sstore, + key_warm=use_access_list, + original_value=initial_value, + new_value=write_value, + ) + contract = contract_setup + contract_loop + contract_cleanup + + gas_per_contract = gas_benchmark_value // num_contracts + gas_limit_cap = fork.transaction_gas_limit_cap() + intrinsic_gas_cost_calc = fork.transaction_intrinsic_cost_calculator() + + def get_calldata(iteration_count: int, start_slot: int) -> bytes: + return ( + Hash(start_slot) + + Hash(start_slot + iteration_count) + + Hash(write_value) + ) + + def get_access_list( + iteration_count: int, start_slot: int, contract_addr: Address + ) -> list[AccessList] | None: + if use_access_list: + storage_keys = [ + Hash(i) + for i in range(start_slot, start_slot + iteration_count) + ] + return [ + AccessList( + address=contract_addr, + storage_keys=storage_keys, + ) + ] + return None + + def calc_gas_consumed( + iteration_count: int, start_slot: int, contract_addr: Address + ) -> int: + intrinsic_gas_cost = intrinsic_gas_cost_calc( + calldata=get_calldata(iteration_count, start_slot), + access_list=get_access_list( + iteration_count, start_slot, contract_addr + ), + return_cost_deducted_prior_execution=True, + ) + overhead_gas = ( + contract_setup.gas_cost(fork) + + contract_cleanup.gas_cost(fork) + + intrinsic_gas_cost + ) + iteration_cost = contract_loop.gas_cost(fork) * iteration_count + return overhead_gas + iteration_cost + + def calc_gas_required( + iteration_count: int, start_slot: int, contract_addr: Address + ) -> int: + gsc = fork.gas_costs() + # SSTORE requires a minimum gas of G_CALL_STIPEND to operate. + # TODO: Correct fix is to introduce bytecode.gas_required. + return ( + calc_gas_consumed(iteration_count, start_slot, contract_addr) + + gsc.G_CALL_STIPEND + ) + + # Calculate how many slots per contract per transaction are required + iteration_counts: list[int] = [] + remaining_gas = gas_per_contract + start_slot = 0 + while remaining_gas > 0: + gas_limit = ( + min(remaining_gas, gas_limit_cap) + if gas_limit_cap is not None + else remaining_gas + ) + if calc_gas_required(0, start_slot, Address(0)) > gas_limit: + break + + # Binary search the optimal number of iterations given the gas limit + low, high = 1, 2 + while calc_gas_required(high, start_slot, Address(0)) <= gas_limit: + high *= 2 + + while low < high: + mid = (low + high) // 2 + if calc_gas_required(mid, start_slot, Address(0)) > gas_limit: + high = mid + else: + low = mid + 1 + + iteration_count = low - 1 + iteration_counts.append(iteration_count) + start_slot += iteration_count + remaining_gas -= calc_gas_required( + iteration_count, start_slot, Address(0) + ) + + assert len(iteration_counts) > 0, ( + f"No iteration counts found for {num_contracts} contracts" + ) + + slots_per_contract = sum(iteration_counts) + + txs: list[Transaction] = [] + post = {} + + gas_used = 0 + for _ in range(num_contracts): + initial_storage = Storage() + + if initial_value != 0: + for i in range(slots_per_contract): + initial_storage[i] = initial_value + + contract_addr = pre.deploy_contract( + code=contract, + storage=initial_storage, + ) + + start_slot = 0 + for iteration_count in iteration_counts: + calldata = get_calldata(iteration_count, start_slot) + access_list = get_access_list( + iteration_count, start_slot, contract_addr + ) + tx_gas_limit = calc_gas_required( + iteration_count, start_slot, contract_addr + ) + tx_gas_consumed = calc_gas_consumed( + iteration_count, start_slot, contract_addr + ) + max_refund = tx_gas_consumed // 5 + refund = min( + contract_loop.refund(fork) * iteration_count, max_refund + ) + gas_used += tx_gas_consumed - refund + + tx = Transaction( + to=contract_addr, + data=calldata, + gas_limit=tx_gas_limit, + sender=pre.fund_eoa(), + access_list=access_list, + ) + txs.append(tx) + + start_slot += iteration_count + + expected_storage = Storage() + for i in range(slots_per_contract): + expected_storage[i] = write_value + + post[contract_addr] = Account( + code=contract, + storage=expected_storage, + ) + + benchmark_test( + blocks=[Block(txs=txs)], + post=post, + expected_benchmark_gas_used=gas_used, + ) + + +def sload_helper_contract( + *, key_warm: bool +) -> Tuple[Bytecode, Bytecode, Bytecode]: + """ + Storage contract for benchmark slot access. + + # Calldata Layout: + # - CALLDATA[0..31]: Starting slot + # - CALLDATA[32..63]: Ending slot + """ + setup = Bytecode() + loop = Bytecode() + cleanup = Bytecode() + + setup += Op.CALLDATALOAD(32) # end_slot + setup += Op.CALLDATALOAD(0) # start slot = counter + # [counter, end_slot] + + loop += Op.JUMPDEST + + # Loop Body: Load key from storage + loop += Op.DUP1 + loop += Op.SLOAD(key_warm=key_warm) + loop += Op.POP + # [counter, end_slot] + + # Loop Post: Increment Counter + loop += Op.PUSH1(1) + loop += Op.ADD + # [counter + 1, end_slot] + + # Loop Condition: Counter < Num Slots + loop += Op.DUP2 # [end_slot, counter + 1, end_slot] + loop += Op.DUP2 # [counter + 1, end_slot, counter + 1, end_slot] + loop += Op.LT # [counter + 1 < end_slot, counter + 1, end_slot] + loop += Op.ISZERO + loop += Op.ISZERO + loop += Op.PUSH1(len(setup)) + loop += Op.JUMPI + # [counter + 1, value, end_slot] + + # Cleanup: Stop + cleanup += Op.STOP + + return setup, loop, cleanup + + +@pytest.mark.parametrize("warm_slots", [False, True]) +@pytest.mark.parametrize("storage_keys_pre_set", [False, True]) +def test_storage_sload_benchmark( + benchmark_test: BenchmarkTestFiller, + pre: Alloc, + fork: Fork, + gas_benchmark_value: int, + warm_slots: bool, + storage_keys_pre_set: bool, + tx_gas_limit: int, +) -> None: + """ + Benchmark SLOAD instruction with various configurations. + + Variants: + - warm_slots: Warm storage slots via access list + - storage_keys_pre_set: Whether the storage keys are pre-set + """ + contract_setup, contract_loop, contract_cleanup = sload_helper_contract( + key_warm=warm_slots + ) + contract = contract_setup + contract_loop + contract_cleanup + + gas_limit_cap = fork.transaction_gas_limit_cap() + intrinsic_gas_cost_calc = fork.transaction_intrinsic_cost_calculator() + + def get_calldata(iteration_count: int, start_slot: int) -> bytes: + return Hash(start_slot) + Hash(start_slot + iteration_count) + + def get_access_list( + iteration_count: int, start_slot: int, contract_addr: Address + ) -> list[AccessList] | None: + if warm_slots: + storage_keys = [ + Hash(i) + for i in range(start_slot, start_slot + iteration_count) + ] + return [ + AccessList( + address=contract_addr, + storage_keys=storage_keys, + ) + ] + return None + + def calc_gas_required( + iteration_count: int, start_slot: int, contract_addr: Address + ) -> int: + intrinsic_gas_cost = intrinsic_gas_cost_calc( + calldata=get_calldata(iteration_count, start_slot), + access_list=get_access_list( + iteration_count, start_slot, contract_addr + ), + return_cost_deducted_prior_execution=True, + ) + overhead_gas = ( + contract_setup.gas_cost(fork) + + contract_cleanup.gas_cost(fork) + + intrinsic_gas_cost + ) + iteration_cost = contract_loop.gas_cost(fork) * iteration_count + return overhead_gas + iteration_cost + + # Calculate how many slots per transaction are required + iteration_counts: list[int] = [] + remaining_gas = gas_benchmark_value + start_slot = 0 + while remaining_gas > 0: + gas_limit = ( + min(remaining_gas, gas_limit_cap) + if gas_limit_cap is not None + else remaining_gas + ) + if calc_gas_required(0, start_slot, Address(0)) > gas_limit: + break + + # Binary search the optimal number of iterations given the gas limit + low, high = 1, 2 + while calc_gas_required(high, start_slot, Address(0)) <= gas_limit: + high *= 2 + + while low < high: + mid = (low + high) // 2 + if calc_gas_required(mid, start_slot, Address(0)) > gas_limit: + high = mid + else: + low = mid + 1 + + iteration_count = low - 1 + iteration_counts.append(iteration_count) + start_slot += iteration_count + remaining_gas -= calc_gas_required( + iteration_count, start_slot, Address(0) + ) + + assert len(iteration_counts) > 0, "No iteration counts found" + + slot_count = sum(iteration_counts) + + initial_storage = Storage() + if storage_keys_pre_set: + for i in range(slot_count): + initial_storage[i] = 1 + + contract_addr = pre.deploy_contract( + code=contract, + storage=initial_storage, + ) + + start_slot = 0 + txs: list[Transaction] = [] + gas_used = 0 + for iteration_count in iteration_counts: + calldata = get_calldata(iteration_count, start_slot) + access_list = get_access_list( + iteration_count, start_slot, contract_addr + ) + tx_gas_limit = calc_gas_required( + iteration_count, start_slot, contract_addr + ) + gas_used += tx_gas_limit + + tx = Transaction( + to=contract_addr, + data=calldata, + gas_limit=tx_gas_limit, + sender=pre.fund_eoa(), + access_list=access_list, + ) + txs.append(tx) + + start_slot += iteration_count + + benchmark_test( + pre=pre, + blocks=[Block(txs=txs)], + expected_benchmark_gas_used=gas_used, + ) + + +@pytest.mark.parametrize("storage_keys_pre_set", [False, True]) +def test_storage_sload_same_key_benchmark( + benchmark_test: BenchmarkTestFiller, + storage_keys_pre_set: bool, +) -> None: + """ + Benchmark SLOAD instruction when loading the same key over and over. + + Variants: + - storage_keys_pre_set: The key is pre-set to a non-zero value. + """ + contract_storage = Storage() + if storage_keys_pre_set: + contract_storage[1] = 1 + + benchmark_test( + target_opcode=Op.SLOAD, + code_generator=JumpLoopGenerator( + setup=Op.PUSH1(1) if storage_keys_pre_set else Op.PUSH0, + attack_block=Op.SLOAD, + contract_storage=contract_storage, + ), + ) From b808aa267ac2f74cf16e7ebcef71e5d680dedb8f Mon Sep 17 00:00:00 2001 From: felipe Date: Wed, 4 Feb 2026 10:46:34 -0700 Subject: [PATCH 120/154] fix(test-fill): use k-way merge for fixture json to reduce memory footprint (#2131) * fix(test-fill): don't load all partials into memory at once when merging * feat(test): add durations and reduce workers for benchmark feature runs --- .github/configs/feature.yaml | 2 +- .../execution_testing/fixtures/collector.py | 66 ++++++++++++------- 2 files changed, 45 insertions(+), 23 deletions(-) diff --git a/.github/configs/feature.yaml b/.github/configs/feature.yaml index 275888ff0b..2c70dfddb7 100644 --- a/.github/configs/feature.yaml +++ b/.github/configs/feature.yaml @@ -9,7 +9,7 @@ develop: benchmark: evm-type: benchmark - fill-params: --no-html --fork=Osaka --gas-benchmark-values 1,5,10,30,60,100,150 -m benchmark ./tests/benchmark + fill-params: --no-html --fork=Osaka --gas-benchmark-values 1,5,10,30,60,100,150 -m benchmark ./tests/benchmark --maxprocesses=30 --durations=50 benchmark_fast: evm-type: benchmark diff --git a/packages/testing/src/execution_testing/fixtures/collector.py b/packages/testing/src/execution_testing/fixtures/collector.py index 0627df1c5f..0e1c05da71 100644 --- a/packages/testing/src/execution_testing/fixtures/collector.py +++ b/packages/testing/src/execution_testing/fixtures/collector.py @@ -3,6 +3,7 @@ of generated fixtures. """ +import heapq import json import os import re @@ -13,6 +14,7 @@ IO, ClassVar, Dict, + Generator, List, Literal, Optional, @@ -26,12 +28,36 @@ from .file import Fixtures +def _sorted_entries_from_partial( + partial_path: Path, +) -> Generator[Tuple[str, str], None, None]: + """ + Generator yielding (key, value) pairs from a partial file, sorted by key. + + Loads one partial file into memory at a time (not all partials together). + Each worker's partial file is typically small relative to the total. + """ + entries = [] + with open(partial_path) as f: + for line in f: + line = line.strip() + if line: + entry = json.loads(line) + entries.append((entry["k"], entry["v"])) + entries.sort(key=lambda x: x[0]) + yield from entries + + def merge_partial_fixture_files(output_dir: Path) -> None: """ Merge all partial fixture JSONL files into final JSON fixture files. Called at session end after all workers have written their partials. Each partial file contains JSONL lines: {"k": fixture_id, "v": json_str} + + Uses k-way merge: each partial file is sorted individually, then merged + using heapq.merge. This keeps memory usage proportional to the largest + single partial file, not the total of all partials. """ # Find all partial files partial_files = list(output_dir.rglob("*.partial.*.jsonl")) @@ -56,29 +82,25 @@ def merge_partial_fixture_files(output_dir: Path) -> None: # Merge each group into its target file for target_path, partials in partials_by_target.items(): - entries: Dict[str, str] = {} - - # Read all partial files - for partial in partials: - with open(partial) as f: - for line in f: - line = line.strip() - if not line: - continue - entry = json.loads(line) - entries[entry["k"]] = entry["v"] - - # Write final JSON file - sorted_keys = sorted(entries.keys()) - last_idx = len(sorted_keys) - 1 - with open(target_path, "w") as f: - f.write("{\n") - for i, key in enumerate(sorted_keys): + # K-way merge: sort each partial individually, then merge streams + # Memory = O(largest single partial), not O(sum of all partials) + sorted_iterators = [_sorted_entries_from_partial(p) for p in partials] + merged = heapq.merge(*sorted_iterators, key=lambda x: x[0]) + + # Stream merged entries to output file + with open(target_path, "w") as out_f: + out_f.write("{\n") + first = True + for key, value in merged: + if not first: + out_f.write(",\n") + first = False key_json = json.dumps(key) - value_indented = entries[key].replace("\n", "\n ") - f.write(f" {key_json}: {value_indented}") - f.write(",\n" if i < last_idx else "\n") - f.write("}") + value_indented = value.replace("\n", "\n ") + out_f.write(f" {key_json}: {value_indented}") + if not first: + out_f.write("\n") + out_f.write("}") # Clean up partial files for partial in partials: From 28eb3d5907bc51e0a03da8087ca511fd2d4390b2 Mon Sep 17 00:00:00 2001 From: Sam Wilson <57262657+SamWilsn@users.noreply.github.com> Date: Wed, 4 Feb 2026 17:09:55 -0500 Subject: [PATCH 121/154] feat(tools): retry rpc calls on server-side error (#2056) --- src/ethereum_spec_tools/sync.py | 241 +++++++++++++++++--------------- 1 file changed, 130 insertions(+), 111 deletions(-) diff --git a/src/ethereum_spec_tools/sync.py b/src/ethereum_spec_tools/sync.py index 416de50f62..83bf0a0c71 100644 --- a/src/ethereum_spec_tools/sync.py +++ b/src/ethereum_spec_tools/sync.py @@ -188,6 +188,34 @@ def download(self) -> None: except Full: pass + def _make_request(self, req: request.Request) -> Any: + backoff = 1.75 + start = time.monotonic() + last_error = None + delay = 1.0 + while True: + now = time.monotonic() + elapsed = now - start + remaining = (60.0 * 60.0) - elapsed + + if 0.0 >= remaining: + assert last_error is not None + raise last_error + + try: + with request.urlopen(req, timeout=60.0) as response: + return json.load(response) + except request.HTTPError as e: + if e.code < 500 or e.code > 599: + raise + logging.warning( + "server-side error during RPC request", exc_info=True + ) + last_error = e + + time.sleep(min(delay, remaining)) + delay *= backoff + def fetch_blocks( self, first: Uint, @@ -243,70 +271,65 @@ def fetch_blocks_debug( headers=headers, ) - with request.urlopen(post) as response: - replies = json.load(response) - if not isinstance(replies, list): - self.log.error( - "got non-list JSON-RPC response. replies=%r", replies + replies = self._make_request(post) + if not isinstance(replies, list): + self.log.error( + "got non-list JSON-RPC response. replies=%r", replies + ) + raise ValueError + + block_rlps: Dict[Uint, Union[RpcError, bytes]] = {} + + for reply in replies: + try: + reply_id = Uint(int(reply["id"], 0)) + except Exception: + self.log.exception("unable to parse RPC id. reply=%r", reply) + raise + + if reply_id < first or reply_id >= first + count: + raise Exception("mismatched request id") + + if "error" in reply: + block_rlps[reply_id] = RpcError( + reply["error"]["code"], + reply["error"]["message"], ) - raise ValueError + else: + block_rlps[reply_id] = bytes.fromhex(reply["result"][2:]) - block_rlps: Dict[Uint, Union[RpcError, bytes]] = {} + if len(block_rlps) != count: + raise Exception( + f"expected {count} blocks but only got {len(block_rlps)}" + ) - for reply in replies: + self.log.info("blocks [%d, %d) fetched", first, first + count) + + blocks: List[Union[RpcError, Any]] = [] + for _, block_rlp in sorted(block_rlps.items()): + if isinstance(block_rlp, RpcError): + blocks.append(block_rlp) + else: + # Unfortunately we have to decode the RLP twice. + decoded_block = rlp.decode(block_rlp) + assert not isinstance(decoded_block, bytes) + assert not isinstance(decoded_block[0], bytes) + assert isinstance(decoded_block[0][11], bytes) + timestamp = U256.from_be_bytes(decoded_block[0][11]) + self.advance_block(timestamp) try: - reply_id = Uint(int(reply["id"], 0)) + blocks.append( + rlp.decode_to(self.module("blocks").Block, block_rlp) + ) except Exception: self.log.exception( - "unable to parse RPC id. reply=%r", reply + "failed to decode block %d with timestamp %d", + self.block_number, + timestamp, ) raise - if reply_id < first or reply_id >= first + count: - raise Exception("mismatched request id") - - if "error" in reply: - block_rlps[reply_id] = RpcError( - reply["error"]["code"], - reply["error"]["message"], - ) - else: - block_rlps[reply_id] = bytes.fromhex(reply["result"][2:]) - - if len(block_rlps) != count: - raise Exception( - f"expected {count} blocks but only got {len(block_rlps)}" - ) - - self.log.info("blocks [%d, %d) fetched", first, first + count) - - blocks: List[Union[RpcError, Any]] = [] - for _, block_rlp in sorted(block_rlps.items()): - if isinstance(block_rlp, RpcError): - blocks.append(block_rlp) - else: - # Unfortunately we have to decode the RLP twice. - decoded_block = rlp.decode(block_rlp) - assert not isinstance(decoded_block, bytes) - assert not isinstance(decoded_block[0], bytes) - assert isinstance(decoded_block[0][11], bytes) - timestamp = U256.from_be_bytes(decoded_block[0][11]) - self.advance_block(timestamp) - try: - blocks.append( - rlp.decode_to( - self.module("blocks").Block, block_rlp - ) - ) - except Exception: - self.log.exception( - "failed to decode block %d with timestamp %d", - self.block_number, - timestamp, - ) - raise - - return blocks + return blocks def load_transaction(self, t: Any) -> Any: """ @@ -438,44 +461,41 @@ def fetch_blocks_eth( headers=headers, ) - with request.urlopen(post) as response: - replies = json.load(response) - block_jsons: Dict[Uint, Any] = {} - ommers_needed: Dict[Uint, int] = {} - blocks: Dict[Uint, Union[Any, RpcError]] = {} + replies = self._make_request(post) + block_jsons: Dict[Uint, Any] = {} + ommers_needed: Dict[Uint, int] = {} + blocks: Dict[Uint, Union[Any, RpcError]] = {} - for reply in replies: - reply_id = Uint(int(reply["id"], 0)) + for reply in replies: + reply_id = Uint(int(reply["id"], 0)) - if reply_id < first or reply_id >= first + count: - raise Exception("mismatched request id") + if reply_id < first or reply_id >= first + count: + raise Exception("mismatched request id") - if "error" in reply: - blocks[reply_id] = RpcError( - reply["error"]["code"], - reply["error"]["message"], - ) - else: - res = reply["result"] - if res is None: - from time import sleep + if "error" in reply: + blocks[reply_id] = RpcError( + reply["error"]["code"], + reply["error"]["message"], + ) + else: + res = reply["result"] + if res is None: + from time import sleep - sleep(12) - break + sleep(12) + break - block_jsons[reply_id] = res - ommers_needed[reply_id] = len(res["uncles"]) + block_jsons[reply_id] = res + ommers_needed[reply_id] = len(res["uncles"]) - ommers = self.fetch_ommers(ommers_needed) - for id in block_jsons: # noqa A001 - self.advance_block(hex_to_u256(block_jsons[id]["timestamp"])) - blocks[id] = self.make_block( - block_jsons[id], ommers.get(id, ()) - ) + ommers = self.fetch_ommers(ommers_needed) + for id in block_jsons: # noqa A001 + self.advance_block(hex_to_u256(block_jsons[id]["timestamp"])) + blocks[id] = self.make_block(block_jsons[id], ommers.get(id, ())) - self.log.info("blocks [%d, %d) fetched", first, first + count) + self.log.info("blocks [%d, %d) fetched", first, first + count) - return [v for (_, v) in sorted(blocks.items())] + return [v for (_, v) in sorted(blocks.items())] def fetch_ommers(self, ommers_needed: Dict[Uint, int]) -> Dict[Uint, Any]: """ @@ -519,37 +539,36 @@ def fetch_ommers(self, ommers_needed: Dict[Uint, int]) -> Dict[Uint, Any]: headers=headers, ) - with request.urlopen(post) as response: - replies = json.load(response) - ommers: Dict[Uint, Dict[Uint, Any]] = {} + replies = self._make_request(post) + ommers: Dict[Uint, Dict[Uint, Any]] = {} - twenty = Uint(20) - for reply in replies: - reply_id = Uint(int(reply["id"], 0)) + twenty = Uint(20) + for reply in replies: + reply_id = Uint(int(reply["id"], 0)) - if reply_id // twenty not in ommers: - ommers[reply_id // twenty] = {} + if reply_id // twenty not in ommers: + ommers[reply_id // twenty] = {} - if "error" in reply: - raise RpcError( - reply["error"]["code"], - reply["error"]["message"], - ) - else: - ommers[reply_id // twenty][reply_id % twenty] = ( - self.make_header(reply["result"]) - ) + if "error" in reply: + raise RpcError( + reply["error"]["code"], + reply["error"]["message"], + ) + else: + ommers[reply_id // twenty][reply_id % twenty] = ( + self.make_header(reply["result"]) + ) - self.log.info( - "ommers [%d, %d] fetched", - min(ommers_needed), - max(ommers_needed), - ) + self.log.info( + "ommers [%d, %d] fetched", + min(ommers_needed), + max(ommers_needed), + ) - return { - k: tuple(x for (_, x) in sorted(v.items())) - for (k, v) in ommers.items() - } + return { + k: tuple(x for (_, x) in sorted(v.items())) + for (k, v) in ommers.items() + } def make_header(self, json: Any) -> Any: """ From 26918e5e44782ede5ea760e5ee03e11ea48860fd Mon Sep 17 00:00:00 2001 From: felipe Date: Wed, 4 Feb 2026 18:30:42 -0700 Subject: [PATCH 122/154] feat(test-fill): performance improvements for release processes and fill (#2140) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(test-fil): attempt dist=worksteal for benchmark release * fix(test-fill): fix exponential hanging in phase 1 due to multiple state root calcs * feat(test-fill): more performance updates for filling - hasher.py: O(n²) → O(n) byte concatenation fix (bytes are immutable + recreated instead of mutated) - gen_index.py: 2-pass streaming (less memory, 2x I/O) - collector.py: Simplified dict-based merge - filler.py: Timing instrumentation to debug where time is spent * feat(tool): Speed up tarball creation for releases using pigz if available - Use pigz if available, otherwise use gzip as before * feat(test,fill): add --durations to release runs; some tests are holding workers * fix(test-fill): garbage collect at each worker session finish to prevent buildup * fix: Minor fixes from claude PR review --- .github/actions/build-fixtures/action.yaml | 3 + .github/configs/feature.yaml | 10 +- .../src/execution_testing/cli/gen_index.py | 145 ++++++++++++---- .../src/execution_testing/cli/hasher.py | 8 +- .../cli/pytest_commands/base.py | 3 + .../pytest_commands/plugins/filler/filler.py | 115 +++++++++++-- .../plugins/filler/fixture_output.py | 58 ++++++- .../execution_testing/fixtures/collector.py | 60 +++---- .../fixtures/pre_alloc_groups.py | 157 ++++++++++++++---- 9 files changed, 427 insertions(+), 132 deletions(-) diff --git a/.github/actions/build-fixtures/action.yaml b/.github/actions/build-fixtures/action.yaml index a9fec3371c..f80b057519 100644 --- a/.github/actions/build-fixtures/action.yaml +++ b/.github/actions/build-fixtures/action.yaml @@ -32,6 +32,9 @@ runs: id: evm-builder with: type: ${{ steps.properties.outputs.evm-type }} + - name: Install pigz for parallel tarball compression + shell: bash + run: sudo apt-get install -y pigz - name: Generate fixtures using fill shell: bash run: | diff --git a/.github/configs/feature.yaml b/.github/configs/feature.yaml index 2c70dfddb7..efd6442ab5 100644 --- a/.github/configs/feature.yaml +++ b/.github/configs/feature.yaml @@ -1,22 +1,22 @@ # Unless filling for special features, all features should fill for previous forks (starting from Frontier) too stable: evm-type: stable - fill-params: --no-html --until=Prague --fill-static-tests --ignore=tests/static/state_tests/stQuadraticComplexityTest + fill-params: --until=Prague --fill-static-tests --ignore=tests/static/state_tests/stQuadraticComplexityTest --no-html --durations=50 develop: evm-type: develop - fill-params: --no-html --until=BPO4 --fill-static-tests --ignore=tests/static/state_tests/stQuadraticComplexityTest + fill-params: --until=BPO4 --fill-static-tests --ignore=tests/static/state_tests/stQuadraticComplexityTest --no-html --durations=50 benchmark: evm-type: benchmark - fill-params: --no-html --fork=Osaka --gas-benchmark-values 1,5,10,30,60,100,150 -m benchmark ./tests/benchmark --maxprocesses=30 --durations=50 + fill-params: --fork=Osaka --gas-benchmark-values 1,5,10,30,60,100,150 -m benchmark ./tests/benchmark --no-html --durations=50 --maxprocesses=30 --dist=worksteal benchmark_fast: evm-type: benchmark - fill-params: --no-html --fork=Osaka --gas-benchmark-values 100 -m "benchmark" ./tests/benchmark + fill-params: --fork=Osaka --gas-benchmark-values 100 -m "benchmark" ./tests/benchmark --no-html --durations=50 feature_only: true bal: evm-type: develop - fill-params: --no-html --fork=Amsterdam --fill-static-tests + fill-params: --fork=Amsterdam --fill-static-tests --no-html --durations=50 feature_only: true diff --git a/packages/testing/src/execution_testing/cli/gen_index.py b/packages/testing/src/execution_testing/cli/gen_index.py index 3a95688d5e..c8551fc067 100644 --- a/packages/testing/src/execution_testing/cli/gen_index.py +++ b/packages/testing/src/execution_testing/cli/gen_index.py @@ -234,7 +234,11 @@ def merge_partial_indexes(output_dir: Path, quiet_mode: bool = False) -> None: workers have finished and written their partial indexes. Partial indexes use JSONL format (one JSON object per line) for efficient - append-only writes during fill. Entries are validated with Pydantic here. + append-only writes during fill. + + Memory-optimized: Builds hash trie directly while streaming entries, + avoiding accumulation of all entries in a single list. Writes final + JSON by re-reading partials (2x I/O but ~50% less peak memory). Args: output_dir: The fixture output directory. @@ -247,12 +251,12 @@ def merge_partial_indexes(output_dir: Path, quiet_mode: bool = False) -> None: if not partial_files: raise Exception("No partial indexes found.") - # Merge all partial indexes (JSONL format: one entry per line) - # Read as raw dicts — the data was already validated when collected - # from live Pydantic fixture objects in add_fixture(). - all_raw_entries: list[dict] = [] + # Pass 1: Build hash trie directly while streaming (no intermediate list) + # Only keep what's needed for hash computation: path parts and fixture_hash + root_trie: dict = {} all_forks: set = set() all_formats: set = set() + test_count = 0 for partial_file in partial_files: with open(partial_file) as f: @@ -260,39 +264,89 @@ def merge_partial_indexes(output_dir: Path, quiet_mode: bool = False) -> None: line = line.strip() if not line: continue - entry_data = json.loads(line) - all_raw_entries.append(entry_data) - # Collect forks and formats from raw strings - if entry_data.get("fork"): - all_forks.add(entry_data["fork"]) - if entry_data.get("format"): - all_formats.add(entry_data["format"]) - - # Compute root hash from raw dicts (no Pydantic needed) - root_hash = HashableItem.from_raw_entries(all_raw_entries).hash() - - # Build final index — Pydantic validates the entire structure once - # via model_validate(), not 96k individual model_validate() calls. - index = IndexFile.model_validate( - { - "test_cases": all_raw_entries, - "root_hash": HexNumber(root_hash), - "created_at": datetime.datetime.now(), - "test_count": len(all_raw_entries), - "forks": list(all_forks), - "fixture_formats": list(all_formats), - } - ) + entry = json.loads(line) + test_count += 1 + + # Collect metadata + if entry.get("fork"): + all_forks.add(entry["fork"]) + if entry.get("format"): + all_formats.add(entry["format"]) + + # Insert directly into trie for hash computation + fixture_hash = entry.get("fixture_hash") + if not fixture_hash: + continue + + path_parts = Path(entry["json_path"]).parts + current = root_trie + + # Navigate to parent folder, creating nodes as needed + for part in path_parts[:-1]: + if part not in current: + current[part] = {} + current = current[part] + + # Add test entry to file node + file_name = path_parts[-1] + if file_name not in current: + current[file_name] = [] - # Write final index + hash_bytes = int(fixture_hash, 16).to_bytes(32, "big") + current[file_name].append((entry["id"], hash_bytes)) + + # Compute root hash from trie (reusing hasher's trie_to_hashable logic) + root_hash = _trie_to_hash(root_trie) + + # Free trie memory before pass 2 + del root_trie + + # Pass 2: Stream entries to final JSON file (re-read partials) + # This avoids keeping all entries in memory simultaneously index_path = meta_dir / "index.json" index_path.parent.mkdir(parents=True, exist_ok=True) - index_path.write_text(index.model_dump_json(exclude_none=True, indent=2)) + + with open(index_path, "w") as out_f: + # Write header + out_f.write("{\n") + out_f.write(f' "root_hash": "0x{root_hash.hex()}",\n') + out_f.write( + f' "created_at": "{datetime.datetime.now().isoformat()}",\n' + ) + out_f.write(f' "test_count": {test_count},\n') + out_f.write(f' "forks": {json.dumps(sorted(all_forks))},\n') + out_f.write( + f' "fixture_formats": {json.dumps(sorted(all_formats))},\n' + ) + out_f.write(' "test_cases": [\n') + + # Stream test cases from partials (second read) + first_entry = True + for partial_file in partial_files: + with open(partial_file) as f: + for line in f: + line = line.strip() + if not line: + continue + if not first_entry: + out_f.write(",\n") + first_entry = False + # Write entry with indentation + entry = json.loads(line) + entry_json = json.dumps(entry, indent=2) + # Indent each line of the entry + indented = "\n".join( + " " + ln for ln in entry_json.split("\n") + ) + out_f.write(indented) + + out_f.write("\n ]\n") + out_f.write("}") if not quiet_mode: rich.print( f"[green]Merged {len(partial_files)} partial indexes " - f"({len(all_raw_entries)} test cases) into {index_path}[/]" + f"({test_count} test cases) into {index_path}[/]" ) # Cleanup partial files @@ -300,5 +354,34 @@ def merge_partial_indexes(output_dir: Path, quiet_mode: bool = False) -> None: partial_file.unlink() +def _trie_to_hash(root_trie: dict) -> bytes: + """ + Compute hash from trie structure built during streaming. + + Mirrors HashableItem.from_raw_entries logic but works on pre-built trie. + """ + import hashlib + + def hash_node(node: dict) -> bytes: + """Recursively hash a trie node.""" + hash_parts: list[bytes] = [] + + for name in sorted(node.keys()): + child = node[name] + if isinstance(child, list): + # File node: child is list of (test_id, hash_bytes) + # Hash = sha256(sorted test hashes concatenated) + test_hashes = [h for _, h in sorted(child, key=lambda x: x[0])] + file_hash = hashlib.sha256(b"".join(test_hashes)).digest() + hash_parts.append(file_hash) + else: + # Folder node: recurse + hash_parts.append(hash_node(child)) + + return hashlib.sha256(b"".join(hash_parts)).digest() + + return hash_node(root_trie) + + if __name__ == "__main__": generate_fixtures_index_cli() diff --git a/packages/testing/src/execution_testing/cli/hasher.py b/packages/testing/src/execution_testing/cli/hasher.py index 5bd6a9b8e9..4e3b57da14 100644 --- a/packages/testing/src/execution_testing/cli/hasher.py +++ b/packages/testing/src/execution_testing/cli/hasher.py @@ -44,11 +44,9 @@ def hash(self) -> bytes: return self.root if self.items is None: raise ValueError("No items to hash") - all_hash_bytes = b"" - for _, item in sorted(self.items.items()): - item_hash_bytes = item.hash() - all_hash_bytes += item_hash_bytes - return hashlib.sha256(all_hash_bytes).digest() + # Use list + join instead of += to avoid O(n²) byte concatenation + hash_parts = [item.hash() for _, item in sorted(self.items.items())] + return hashlib.sha256(b"".join(hash_parts)).digest() def format_lines( self, diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/base.py b/packages/testing/src/execution_testing/cli/pytest_commands/base.py index 5a6835fd6b..47891d5652 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/base.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/base.py @@ -94,6 +94,9 @@ def run_multiple(self, executions: List[PytestExecution]) -> int: f"{execution.description}[/bold blue]" ) self.console.rule(phase_text, style="bold blue") + # Flush for CI visibility (GitHub Actions buffers output) + sys.stdout.flush() + sys.stderr.flush() result = self.run_single(execution) if result != 0: diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/filler.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/filler.py index 7dfef105de..3559776bcd 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/filler.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/filler.py @@ -9,9 +9,12 @@ import atexit import configparser import datetime +import gc import json import os import signal +import sys +import time import warnings from dataclasses import dataclass, field from pathlib import Path @@ -51,6 +54,10 @@ TestInfo, merge_partial_fixture_files, ) +from execution_testing.fixtures.pre_alloc_groups import ( + _get_worker_id, + merge_partial_group_files, +) from execution_testing.forks import ( Fork, get_transition_fork_predecessor, @@ -402,13 +409,17 @@ def update_pre_alloc_group_builder( self.pre_alloc_group_builders.root[hash_key] = group_builder def save_pre_alloc_groups(self) -> None: - """Save pre-allocation groups to disk.""" + """Save pre-allocation groups to disk as partial files.""" if self.pre_alloc_group_builders is None: return pre_alloc_folder = self.fixture_output.pre_alloc_groups_folder_path pre_alloc_folder.mkdir(parents=True, exist_ok=True) - self.pre_alloc_group_builders.to_folder(pre_alloc_folder) + # Pass worker_id so each worker writes its own partial files + # (no lock contention). Master merges them after all workers finish. + self.pre_alloc_group_builders.to_folder( + pre_alloc_folder, worker_id=_get_worker_id() + ) def calculate_post_state_diff( @@ -901,21 +912,30 @@ def pytest_terminal_summary( session_instance: FillingSession = config.filling_session # type: ignore[attr-defined] if session_instance.phase_manager.is_pre_alloc_generation: # Generate summary stats - pre_alloc_groups: PreAllocGroups + # For xdist, count files and accounts without fully loading groups + # (avoids expensive state_root computation just for summary stats) if config.pluginmanager.hasplugin("xdist"): - # Load pre-allocation groups from disk - pre_alloc_groups = PreAllocGroups.from_folder( - config.fixture_output.pre_alloc_groups_folder_path, # type: ignore[attr-defined] - lazy_load=False, + pre_alloc_folder = ( + config.fixture_output.pre_alloc_groups_folder_path # type: ignore[attr-defined] ) + group_files = list(pre_alloc_folder.glob("*.json")) + total_groups = len(group_files) + # Count accounts by loading as builder (no genesis computation) + total_accounts = 0 + for group_file in group_files: + builder = PreAllocGroupBuilder.model_validate_json( + group_file.read_text() + ) + total_accounts += builder.get_pre_account_count() else: - assert session_instance.pre_alloc_groups is not None - pre_alloc_groups = session_instance.pre_alloc_groups - - total_groups = len(pre_alloc_groups.root) - total_accounts = sum( - group.pre_account_count for group in pre_alloc_groups.values() - ) + assert session_instance.pre_alloc_group_builders is not None + total_groups = len( + session_instance.pre_alloc_group_builders.root + ) + total_accounts = sum( + builder.get_pre_account_count() + for builder in session_instance.pre_alloc_group_builders.root.values() # noqa: E501 + ) terminalreporter.write_sep( "=", @@ -1746,13 +1766,45 @@ def pytest_sessionfinish(session: pytest.Session, exitstatus: int) -> None: - Generate index file for all produced fixtures. - Create tarball of the output directory if the output is a tarball. """ + + def _log_timing(msg: str) -> None: + """Log with timestamp and flush immediately for CI visibility.""" + log_line = f"[sessionfinish] {time.strftime('%H:%M:%S')} {msg}" + # Print to stderr (unbuffered) for immediate CI visibility + print(log_line, file=sys.stderr, flush=True) + + # Log immediately when hook is entered (before any early returns) + is_worker = xdist.is_xdist_worker(session) + _log_timing(f"pytest_sessionfinish ENTERED (worker={is_worker})") + del exitstatus # Save pre-allocation groups after phase 1 fixture_output: FixtureOutput = session.config.fixture_output # type: ignore[attr-defined] session_instance: FillingSession = session.config.filling_session # type: ignore[attr-defined] if session_instance.phase_manager.is_pre_alloc_generation: + _log_timing("Phase 1: saving pre-alloc groups (partial)...") + t0 = time.time() session_instance.save_pre_alloc_groups() + _log_timing( + f"Phase 1: save_pre_alloc_groups done in {time.time() - t0:.1f}s" + ) + + # Master merges all worker partial files after all workers finish + if not is_worker: + _log_timing("Phase 1 (master): merging partial group files...") + t0 = time.time() + pre_alloc_folder = fixture_output.pre_alloc_groups_folder_path + merge_partial_group_files(pre_alloc_folder) + _log_timing( + f"Phase 1 (master): merge done in {time.time() - t0:.1f}s" + ) + else: + # Workers: clear in-memory state to reduce memory pressure while + # waiting for other workers to finish + session_instance.pre_alloc_group_builders = None + gc.collect() + return if session.config.getoption("optimize_gas", False): @@ -1771,21 +1823,44 @@ def pytest_sessionfinish(session: pytest.Session, exitstatus: int) -> None: json.dumps(gas_optimized_tests, indent=2, sort_keys=True) ) - if xdist.is_xdist_worker(session): + if is_worker: + # Workers: clear in-memory state to reduce memory pressure while + # waiting for other workers to finish + session_instance.pre_alloc_groups = None + if hasattr(session.config, "fixture_collector"): + fc = session.config.fixture_collector + fc.all_fixtures.clear() + fc._fixtures_to_verify.clear() + gc.collect() return if fixture_output.is_stdout or is_help_or_collectonly_mode(session.config): return + _log_timing("Finalization (master): starting...") + # Merge partial fixture files from all workers into final JSON files + _log_timing("merge_partial_fixture_files: starting...") + t0 = time.time() merge_partial_fixture_files(fixture_output.directory) + _log_timing( + f"merge_partial_fixture_files: done in {time.time() - t0:.1f}s" + ) # Remove any lock files that may have been created. + _log_timing("Removing lock files...") + t0 = time.time() for file in fixture_output.directory.rglob("*.lock"): file.unlink() + _log_timing(f"Lock files removed in {time.time() - t0:.1f}s") # Verify fixtures after merge if verification is enabled + _log_timing("_verify_fixtures_post_merge: starting...") + t0 = time.time() _verify_fixtures_post_merge(session.config, fixture_output.directory) + _log_timing( + f"_verify_fixtures_post_merge: done in {time.time() - t0:.1f}s" + ) # Generate index file for all produced fixtures by merging partial indexes. # Only merge if partial indexes were actually written (i.e., tests produced @@ -1797,7 +1872,17 @@ def pytest_sessionfinish(session: pytest.Session, exitstatus: int) -> None: ): meta_dir = fixture_output.directory / ".meta" if meta_dir.exists() and any(meta_dir.glob("partial_index*.jsonl")): + _log_timing("merge_partial_indexes: starting...") + t0 = time.time() merge_partial_indexes(fixture_output.directory, quiet_mode=True) + _log_timing( + f"merge_partial_indexes: done in {time.time() - t0:.1f}s" + ) # Create tarball of the output directory if the output is a tarball. + _log_timing("create_tarball: starting...") + t0 = time.time() fixture_output.create_tarball() + _log_timing(f"create_tarball: done in {time.time() - t0:.1f}s") + + _log_timing("Finalization (master): COMPLETE") diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/fixture_output.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/fixture_output.py index 110f093434..287aa324f1 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/fixture_output.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/fixture_output.py @@ -1,7 +1,9 @@ """Fixture output configuration for generated test fixtures.""" import shutil +import subprocess import tarfile +import warnings from pathlib import Path import pytest @@ -219,11 +221,28 @@ def create_directories(self, is_master: bool) -> None: parents=True, exist_ok=True ) + @staticmethod + def _pigz_available() -> bool: + """Check if pigz (parallel gzip) is available on the system.""" + return shutil.which("pigz") is not None + def create_tarball(self) -> None: - """Create tarball of the output directory if configured to do so.""" + """ + Create tarball of the output directory if configured to do so. + + Automatically uses pigz for parallel compression if available, + otherwise falls back to standard single-threaded gzip. + """ if not self.is_tarball: return + if self._pigz_available(): + self._create_tarball_with_pigz() + else: + self._create_tarball_standard() + + def _create_tarball_standard(self) -> None: + """Create tarball using Python's tarfile module (single-threaded).""" with tarfile.open(self.output_path, "w:gz") as tar: for file in self.directory.rglob("*"): if file.suffix in {".json", ".ini"}: @@ -232,6 +251,43 @@ def create_tarball(self) -> None: ) tar.add(file, arcname=arcname) + def _create_tarball_with_pigz(self) -> None: + """ + Create tarball using Python tarfile + pigz for parallel compression. + + This approach uses Python's tarfile to create the uncompressed .tar + (which correctly handles arcnames across all platforms), then uses + pigz for parallel gzip compression with auto-detected core count. + """ + # Create uncompressed tar first (output_path minus .gz suffix) + temp_tar = self.output_path.with_suffix("") # Remove .gz suffix + + try: + # Use Python tarfile for cross-platform tar creation with arcnames + with tarfile.open(temp_tar, "w") as tar: + for file in self.directory.rglob("*"): + if file.suffix in {".json", ".ini"}: + arcname = Path("fixtures") / file.relative_to( + self.directory + ) + tar.add(file, arcname=arcname) + + # Compress with pigz (parallel gzip, auto-detects available cores) + subprocess.run( + ["pigz", "-f", str(temp_tar)], check=True, capture_output=True + ) + except (subprocess.CalledProcessError, OSError) as e: + # Clean up temp file if it exists + if temp_tar.exists(): + temp_tar.unlink() + # Fall back to standard tarball creation with warning + warnings.warn( + f"pigz compression failed ({type(e).__name__}: {e}), " + "falling back to standard gzip", + stacklevel=2, + ) + self._create_tarball_standard() + @classmethod def from_config(cls, config: pytest.Config) -> "FixtureOutput": """Create a FixtureOutput instance from pytest configuration.""" diff --git a/packages/testing/src/execution_testing/fixtures/collector.py b/packages/testing/src/execution_testing/fixtures/collector.py index 0e1c05da71..3b10c823ee 100644 --- a/packages/testing/src/execution_testing/fixtures/collector.py +++ b/packages/testing/src/execution_testing/fixtures/collector.py @@ -3,7 +3,6 @@ of generated fixtures. """ -import heapq import json import os import re @@ -14,7 +13,6 @@ IO, ClassVar, Dict, - Generator, List, Literal, Optional, @@ -28,26 +26,6 @@ from .file import Fixtures -def _sorted_entries_from_partial( - partial_path: Path, -) -> Generator[Tuple[str, str], None, None]: - """ - Generator yielding (key, value) pairs from a partial file, sorted by key. - - Loads one partial file into memory at a time (not all partials together). - Each worker's partial file is typically small relative to the total. - """ - entries = [] - with open(partial_path) as f: - for line in f: - line = line.strip() - if line: - entry = json.loads(line) - entries.append((entry["k"], entry["v"])) - entries.sort(key=lambda x: x[0]) - yield from entries - - def merge_partial_fixture_files(output_dir: Path) -> None: """ Merge all partial fixture JSONL files into final JSON fixture files. @@ -55,9 +33,8 @@ def merge_partial_fixture_files(output_dir: Path) -> None: Called at session end after all workers have written their partials. Each partial file contains JSONL lines: {"k": fixture_id, "v": json_str} - Uses k-way merge: each partial file is sorted individually, then merged - using heapq.merge. This keeps memory usage proportional to the largest - single partial file, not the total of all partials. + Processes one target file at a time, reading its partials sequentially + into a dict. Memory = O(entries per target), freed before next target. """ # Find all partial files partial_files = list(output_dir.rglob("*.partial.*.jsonl")) @@ -82,26 +59,31 @@ def merge_partial_fixture_files(output_dir: Path) -> None: # Merge each group into its target file for target_path, partials in partials_by_target.items(): - # K-way merge: sort each partial individually, then merge streams - # Memory = O(largest single partial), not O(sum of all partials) - sorted_iterators = [_sorted_entries_from_partial(p) for p in partials] - merged = heapq.merge(*sorted_iterators, key=lambda x: x[0]) - - # Stream merged entries to output file + # Read partials sequentially into dict (one at a time) + entries: Dict[str, str] = {} + for partial in partials: + with open(partial) as f: + for line in f: + line = line.strip() + if line: + entry = json.loads(line) + entries[entry["k"]] = entry["v"] + + # Write sorted entries to output file with open(target_path, "w") as out_f: out_f.write("{\n") - first = True - for key, value in merged: - if not first: - out_f.write(",\n") - first = False + sorted_keys = sorted(entries.keys()) + last_idx = len(sorted_keys) - 1 + for i, key in enumerate(sorted_keys): key_json = json.dumps(key) - value_indented = value.replace("\n", "\n ") + value_indented = entries[key].replace("\n", "\n ") out_f.write(f" {key_json}: {value_indented}") - if not first: - out_f.write("\n") + out_f.write(",\n" if i < last_idx else "\n") out_f.write("}") + # Free memory before processing next target + entries.clear() + # Clean up partial files for partial in partials: partial.unlink() diff --git a/packages/testing/src/execution_testing/fixtures/pre_alloc_groups.py b/packages/testing/src/execution_testing/fixtures/pre_alloc_groups.py index 41aa1d1150..6ade0d7666 100644 --- a/packages/testing/src/execution_testing/fixtures/pre_alloc_groups.py +++ b/packages/testing/src/execution_testing/fixtures/pre_alloc_groups.py @@ -1,6 +1,7 @@ """Pre-allocation group models for test fixture generation.""" import json +import os from dataclasses import dataclass from pathlib import Path from typing import ( @@ -11,12 +12,12 @@ KeysView, List, Literal, + Optional, Self, Tuple, ) -from filelock import FileLock -from pydantic import Field, PrivateAttr +from pydantic import Field, PrivateAttr, ValidationError from execution_testing.base_types import ( CamelModel, @@ -76,44 +77,104 @@ def build(self) -> "PreAllocGroup": genesis=self.calculate_genesis(), ) - def to_file(self, file: Path) -> None: - """Save PreAllocGroup to a file.""" - lock_file_path = file.with_suffix(".lock") - with FileLock(lock_file_path): - if file.exists(): - with open(file, "r") as f: - previous_pre_alloc_group = ( - PreAllocGroup.model_validate_json(f.read()) - ) - for account in previous_pre_alloc_group.pre: - existing_account = previous_pre_alloc_group.pre[account] - if account not in self.pre: - self.pre[account] = existing_account + def to_partial_file( + self, file: Path, worker_id: Optional[str] = None + ) -> None: + """ + Save PreAllocGroupBuilder to a partial file (no locking). + + Each worker writes its own partial file, which are merged at session + end by merge_partial_group_files(). This eliminates lock contention + that caused workers to take 30-180+ seconds each. + + Saves the builder format (without genesis/state_root) to avoid + expensive state root computation during Phase 1. State root is + computed once when loading in Phase 2 via PreAllocGroup.from_file(). + """ + suffix = f".{worker_id}" if worker_id else ".main" + partial_path = file.with_suffix(f".partial{suffix}.json") + partial_path.write_text( + self.model_dump_json(by_alias=True, exclude_none=True, indent=2) + ) + + +def _get_worker_id() -> Optional[str]: + """Get the xdist worker ID from environment, or None if not in xdist.""" + return os.environ.get("PYTEST_XDIST_WORKER") + + +def merge_partial_group_files(folder: Path) -> None: + """ + Merge all partial group files into final group files. + + Called by master process after all workers have finished Phase 1. + Each worker writes {group_hash}.partial.{worker_id}.json files, + which are merged here into {group_hash}.json files. + """ + partial_files = list(folder.glob("*.partial.*.json")) + if not partial_files: + return + + # Group partials by target: {hash}.partial.{worker}.json -> {hash}.json + partials_by_target: Dict[Path, List[Path]] = {} + for partial in partial_files: + name = partial.name + idx = name.find(".partial.") + if idx == -1: + continue + target_name = name[:idx] + ".json" + target_path = partial.parent / target_name + if target_path not in partials_by_target: + partials_by_target[target_path] = [] + partials_by_target[target_path].append(partial) + + # Merge each group's partials + for target_path, partials in partials_by_target.items(): + merged_builder: Optional[PreAllocGroupBuilder] = None + + for partial in partials: + builder = PreAllocGroupBuilder.model_validate_json( + partial.read_text() + ) + + if merged_builder is None: + merged_builder = builder + else: + # Merge pre-allocations (check for collisions) + for account in builder.pre: + new_account = builder.pre[account] + if account not in merged_builder.pre: + merged_builder.pre[account] = new_account else: - new_account = self.pre[account] + existing_account = merged_builder.pre[account] if new_account != existing_account: - # This procedure fails during xdist worker's - # pytest_sessionfinish and is not reported to the - # master thread. We signal here that the groups - # created contain a collision. - collision_file_path = file.with_suffix(".fail") + # Write collision file for error reporting + collision_file_path = target_path.with_suffix( + ".fail" + ) collision_exception = Alloc.CollisionError( address=account, account_1=existing_account, account_2=new_account, ) - with open(collision_file_path, "w") as f: - f.write( - json.dumps(collision_exception.to_json()) - ) + collision_file_path.write_text( + json.dumps(collision_exception.to_json()) + ) raise collision_exception - self.test_ids.extend(previous_pre_alloc_group.test_ids) - with open(file, "w") as f: - f.write( - self.build().model_dump_json( - by_alias=True, exclude_none=True, indent=2 - ) + + # Merge test_ids + merged_builder.test_ids.extend(builder.test_ids) + + # Clean up partial file after processing + partial.unlink() + + # Write final merged file + if merged_builder is not None: + target_path.write_text( + merged_builder.model_dump_json( + by_alias=True, exclude_none=True, indent=2 ) + ) class PreAllocGroupBuilders(EthereumTestRootModel): @@ -128,11 +189,16 @@ class PreAllocGroupBuilders(EthereumTestRootModel): root: Dict[str, PreAllocGroupBuilder] - def to_folder(self, folder: Path) -> None: - """Save PreAllocGroups to a folder of pre-allocation files.""" + def to_folder(self, folder: Path, worker_id: Optional[str] = None) -> None: + """ + Save PreAllocGroups to a folder as partial files. + + Each worker writes its own partial files (no lock contention). + Call merge_partial_group_files() on master after all workers finish. + """ for key, value in self.root.items(): assert value is not None, f"Value for key {key} is None" - value.to_file(folder / f"{key}.json") + value.to_partial_file(folder / f"{key}.json", worker_id=worker_id) def add_test_pre( self, @@ -271,9 +337,28 @@ def model_post_init(self, __context: Any) -> None: @classmethod def from_file(cls, file: Path) -> Self: - """Load a pre-allocation group from a JSON file.""" + """ + Load a pre-allocation group from a JSON file. + + Handles both builder format (without genesis) and full format (with + genesis). If genesis is missing, computes it from the pre-allocation + state. This ensures state root computation happens exactly once when + loading in Phase 2, not during Phase 1 merging. + """ with open(file) as f: - return cls.model_validate_json(f.read()) + data = f.read() + + # Try loading as full PreAllocGroup first (backwards compatibility) + try: + return cls.model_validate_json(data) + except ValidationError: + pass + + # Load as builder format and compute genesis + builder = PreAllocGroupBuilder.model_validate_json(data) + built = builder.build() + # Use cls.model_validate to ensure proper Self return type + return cls.model_validate(built.model_dump()) class PreAllocGroups(EthereumTestRootModel): From a9df61aa447373532b2939cc1d28f1abd2435b2b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=94=A1=E4=BD=B3=E8=AA=A0=20Louis=20Tsai?= <72684086+LouisTsai-Csie@users.noreply.github.com> Date: Thu, 5 Feb 2026 16:37:50 +0800 Subject: [PATCH 123/154] feat(test-benchmark): add gas limit check for BLS12_G2_MSM benchmark (#2143) --- tests/benchmark/compute/precompile/test_bls12_381.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/tests/benchmark/compute/precompile/test_bls12_381.py b/tests/benchmark/compute/precompile/test_bls12_381.py index 0d2ea91310..653473dac6 100644 --- a/tests/benchmark/compute/precompile/test_bls12_381.py +++ b/tests/benchmark/compute/precompile/test_bls12_381.py @@ -177,6 +177,7 @@ def test_bls12_g1_msm( def test_bls12_g2_msm( benchmark_test: BenchmarkTestFiller, fork: Fork, + gas_benchmark_value: int, k: int, ) -> None: """Benchmark BLS12_G2_MSM precompile with varying number of points.""" @@ -190,6 +191,13 @@ def test_bls12_g2_msm( * k ) + intrinsic_gas_cost = fork.transaction_intrinsic_cost_calculator()( + calldata=calldata + ) + + if intrinsic_gas_cost > gas_benchmark_value: + pytest.skip("k configuration exceeds the gas limit") + attack_block = Op.POP( Op.STATICCALL( gas=Op.GAS, address=precompile_address, args_size=Op.CALLDATASIZE From 0a07c004079ac48790e08e958d9d9e311dd52ce0 Mon Sep 17 00:00:00 2001 From: felipe Date: Thu, 5 Feb 2026 01:40:08 -0700 Subject: [PATCH 124/154] fix(test-ci): Fix benchmark artifact expectation for new builder prealloc (#2142) --- .../src/execution_testing/cli/extract_config.py | 7 ++++--- .../fixtures/pre_alloc_groups.py | 16 ++++------------ 2 files changed, 8 insertions(+), 15 deletions(-) diff --git a/packages/testing/src/execution_testing/cli/extract_config.py b/packages/testing/src/execution_testing/cli/extract_config.py index 10cf25c965..af98b15a9d 100755 --- a/packages/testing/src/execution_testing/cli/extract_config.py +++ b/packages/testing/src/execution_testing/cli/extract_config.py @@ -36,7 +36,7 @@ ) from execution_testing.fixtures.blockchain import FixtureHeader from execution_testing.fixtures.file import Fixtures -from execution_testing.fixtures.pre_alloc_groups import PreAllocGroup +from execution_testing.fixtures.pre_alloc_groups import PreAllocGroupBuilder from execution_testing.forks import Fork @@ -176,8 +176,9 @@ def from_fixture(cls, fixture_path: Path) -> Self: pass try: - # Try to load pre-allocation group - pre_alloc_group = PreAllocGroup.model_validate_json(fixture_bytes) + # Load as builder format and compute genesis on-demand + builder = PreAllocGroupBuilder.model_validate_json(fixture_bytes) + pre_alloc_group = builder.build() return cls( header=pre_alloc_group.genesis, alloc=pre_alloc_group.pre, diff --git a/packages/testing/src/execution_testing/fixtures/pre_alloc_groups.py b/packages/testing/src/execution_testing/fixtures/pre_alloc_groups.py index 6ade0d7666..69e69fcf21 100644 --- a/packages/testing/src/execution_testing/fixtures/pre_alloc_groups.py +++ b/packages/testing/src/execution_testing/fixtures/pre_alloc_groups.py @@ -17,7 +17,7 @@ Tuple, ) -from pydantic import Field, PrivateAttr, ValidationError +from pydantic import Field, PrivateAttr from execution_testing.base_types import ( CamelModel, @@ -340,21 +340,13 @@ def from_file(cls, file: Path) -> Self: """ Load a pre-allocation group from a JSON file. - Handles both builder format (without genesis) and full format (with - genesis). If genesis is missing, computes it from the pre-allocation - state. This ensures state root computation happens exactly once when - loading in Phase 2, not during Phase 1 merging. + Files are stored in builder format (without genesis). Genesis is + computed on-demand when loading, ensuring state root computation + happens exactly once in Phase 2, not during Phase 1 merging. """ with open(file) as f: data = f.read() - # Try loading as full PreAllocGroup first (backwards compatibility) - try: - return cls.model_validate_json(data) - except ValidationError: - pass - - # Load as builder format and compute genesis builder = PreAllocGroupBuilder.model_validate_json(data) built = builder.build() # Use cls.model_validate to ensure proper Self return type From 1140c0edf9802f56e185faa76e1bf2286bc4e7bc Mon Sep 17 00:00:00 2001 From: Mario Vega Date: Thu, 5 Feb 2026 10:17:47 +0100 Subject: [PATCH 125/154] refactor(testing): Implement IteratingBytecode, FixedIterationsBytecode (#2030) --- .../testing/src/execution_testing/__init__.py | 6 + .../src/execution_testing/tools/__init__.py | 8 +- .../tools/tests/test_iterating_bytecode.py | 338 ++++++++++ .../tools/tools_code/__init__.py | 8 +- .../tools/tools_code/generators.py | 595 +++++++++++++++++- tests/benchmark/compute/helpers.py | 232 ++++++- .../compute/instruction/test_keccak.py | 15 +- .../compute/instruction/test_storage.py | 217 +++---- .../scenario/test_unchunkified_bytecode.py | 264 +++----- 9 files changed, 1343 insertions(+), 340 deletions(-) create mode 100644 packages/testing/src/execution_testing/tools/tests/test_iterating_bytecode.py diff --git a/packages/testing/src/execution_testing/__init__.py b/packages/testing/src/execution_testing/__init__.py index fbc3d6c5eb..66139cea57 100644 --- a/packages/testing/src/execution_testing/__init__.py +++ b/packages/testing/src/execution_testing/__init__.py @@ -90,9 +90,12 @@ Conditional, Create2PreimageLayout, DeploymentTestType, + FixedIterationsBytecode, Initcode, + IteratingBytecode, ParameterSet, Switch, + TransactionWithCost, While, extend_with_defaults, gas_test, @@ -155,12 +158,14 @@ "EngineAPIError", "Environment", "EOA", + "FixedIterationsBytecode", "FixtureCollector", "Fork", "GasCosts", "Hash", "Header", "Initcode", + "IteratingBytecode", "JumpLoopGenerator", "Macro", "Macros", @@ -192,6 +197,7 @@ "TransactionTest", "TransactionTestFiller", "TransactionType", + "TransactionWithCost", "While", "CoerceBytes", "Withdrawal", diff --git a/packages/testing/src/execution_testing/tools/__init__.py b/packages/testing/src/execution_testing/tools/__init__.py index ce9268fcb7..684a946bf4 100644 --- a/packages/testing/src/execution_testing/tools/__init__.py +++ b/packages/testing/src/execution_testing/tools/__init__.py @@ -9,8 +9,11 @@ CodeGasMeasure, Conditional, Create2PreimageLayout, + FixedIterationsBytecode, Initcode, + IteratingBytecode, Switch, + TransactionWithCost, While, ) from .utility.generators import ( @@ -27,12 +30,15 @@ "Case", "CodeGasMeasure", "Conditional", + "Create2PreimageLayout", "DeploymentTestType", + "FixedIterationsBytecode", "Initcode", + "IteratingBytecode", "ParameterSet", "Switch", + "TransactionWithCost", "While", - "Create2PreimageLayout", "extend_with_defaults", "gas_test", "generate_system_contract_deploy_test", diff --git a/packages/testing/src/execution_testing/tools/tests/test_iterating_bytecode.py b/packages/testing/src/execution_testing/tools/tests/test_iterating_bytecode.py new file mode 100644 index 0000000000..7f509a51c9 --- /dev/null +++ b/packages/testing/src/execution_testing/tools/tests/test_iterating_bytecode.py @@ -0,0 +1,338 @@ +"""Test suite for `IteratingBytecode` class.""" + +from typing import Self, Type + +import pytest + +from execution_testing.forks import Osaka +from execution_testing.vm import Op + +from ..tools_code import FixedIterationsBytecode, IteratingBytecode + +OSAKA_GAS_COSTS = Osaka.gas_costs() + + +class CustomOsaka(Osaka): + """Custom Osaka fork with a custom transaction gas limit cap.""" + + tx_gas_limit_cap: int | None = 1_000_000 + + @classmethod + def with_tx_gas_limit_cap(cls, tx_gas_limit_cap: int | None) -> Type[Self]: + """ + Return a new CustomOsaka fork with the given transaction gas limit cap. + """ + return type( + cls.__name__, (cls,), {"tx_gas_limit_cap": tx_gas_limit_cap} + ) + + @classmethod + def transaction_gas_limit_cap( + cls, *, block_number: int = 0, timestamp: int = 0 + ) -> int | None: + """Return the transaction gas limit cap.""" + del block_number, timestamp + return cls.tx_gas_limit_cap + + +@pytest.mark.parametrize( + "iterating_bytecode,iterations,expected_cost", + [ + pytest.param( + IteratingBytecode(iterating=Op.ADD(1, 2)), + 10, + 10 * (Op.ADD(1, 2).gas_cost(Osaka)), + id="simple_code", + ), + pytest.param( + IteratingBytecode( + iterating=Op.CALL(address=1), + warm_iterating=Op.CALL(address=1, address_warm=True), + ), + 10, + 1 * (Op.CALL(address=1).gas_cost(Osaka)) + + 9 * (Op.CALL(address=1, address_warm=True).gas_cost(Osaka)), + id="simple_code_with_warm_variation", + ), + pytest.param( + IteratingBytecode(iterating=Op.ADD(1, 2)), + 0, + 0, + id="zero_iterations", + ), + pytest.param( + IteratingBytecode( + setup=Op.PUSH1(0), + iterating=Op.ADD(1, 2), + cleanup=Op.STOP, + ), + 5, + Op.PUSH1(0).gas_cost(Osaka) + + 5 * Op.ADD(1, 2).gas_cost(Osaka) + + Op.STOP.gas_cost(Osaka), + id="with_setup_and_cleanup", + ), + pytest.param( + IteratingBytecode( + iterating=Op.CALL(address=1), + iterating_subcall=Op.RETURN(0, 0), + ), + 3, + 3 * Op.CALL(address=1).gas_cost(Osaka) + + 3 * Op.RETURN(0, 0).gas_cost(Osaka), + id="with_subcall_bytecode", + ), + pytest.param( + IteratingBytecode( + iterating=Op.SSTORE(0, 1), + iterating_subcall=10000, + ), + 3, + 3 * Op.SSTORE(0, 1).gas_cost(Osaka) + 3 * 10000, + id="with_subcall_int", + ), + ], +) +def test_iterating_bytecode_gas_cost( + iterating_bytecode: IteratingBytecode, iterations: int, expected_cost: int +) -> None: + """Test the gas cost calculating function of an iterating bytecode.""" + calculated_cost = iterating_bytecode.gas_cost_by_iteration_count( + fork=Osaka, iteration_count=iterations + ) + assert calculated_cost == expected_cost, ( + f"Gas cost for {iterations} iterations is {expected_cost}, " + f"but got {calculated_cost}" + ) + + +def test_iterating_subcall_gas_cost() -> None: + """Test iterating_subcall_gas_cost with both bytecode and int.""" + # Test with Bytecode + bytecode = IteratingBytecode( + iterating=Op.STOP, + iterating_subcall=Op.CALL(address=1), + ) + assert bytecode.iterating_subcall_gas_cost(fork=Osaka) == Op.CALL( + address=1 + ).gas_cost(Osaka) + + # Test with int + bytecode_int = IteratingBytecode( + iterating=Op.STOP, + iterating_subcall=5000, + ) + assert bytecode_int.iterating_subcall_gas_cost(fork=Osaka) == 5000 + + +def test_iterating_subcall_reserve() -> None: + """Test the 63/64 rule gas reserve calculation.""" + bytecode = IteratingBytecode( + iterating=Op.STOP, + iterating_subcall=6300, + ) + reserve = bytecode.iterating_subcall_reserve(fork=Osaka) + # Reserve should be: (6300 * 64 / 63) - 6300 = 100 + assert reserve == 100 + + +def test_with_fixed_iteration_count() -> None: + """Test conversion to FixedIterationsBytecode.""" + iterating_bytecode = IteratingBytecode( + setup=Op.PUSH1(0), + iterating=Op.ADD(1, 2), + cleanup=Op.STOP, + ) + fixed = iterating_bytecode.with_fixed_iteration_count(iteration_count=10) + + assert isinstance(fixed, FixedIterationsBytecode) + assert fixed.iteration_count == 10 + assert fixed.gas_cost( + Osaka + ) == iterating_bytecode.gas_cost_by_iteration_count( + fork=Osaka, iteration_count=10 + ) + + +def test_tx_gas_cost_by_iteration_count() -> None: + """Test transaction gas cost calculation.""" + bytecode = IteratingBytecode( + iterating=Op.ADD(1, 2), + ) + intrinsic_gas_cost_calc = Osaka.transaction_intrinsic_cost_calculator() + + tx_gas = bytecode.tx_gas_cost_by_iteration_count( + fork=Osaka, + iteration_count=5, + ) + + expected = ( + bytecode.gas_cost_by_iteration_count(fork=Osaka, iteration_count=5) + + intrinsic_gas_cost_calc() + ) + assert tx_gas == expected + + # With calldata + tx_gas = bytecode.tx_gas_cost_by_iteration_count( + fork=Osaka, + iteration_count=5, + calldata=b"hello", + ) + expected = bytecode.gas_cost_by_iteration_count( + fork=Osaka, iteration_count=5 + ) + intrinsic_gas_cost_calc( + calldata=b"hello", return_cost_deducted_prior_execution=True + ) + assert tx_gas == expected + + +def test_tx_gas_limit_by_iteration_count() -> None: + """Test transaction gas limit calculation includes 63/64 rule reserve.""" + bytecode = IteratingBytecode( + iterating=Op.ADD(1, 2), + iterating_subcall=6300, + ) + + tx_gas_limit = bytecode.tx_gas_limit_by_iteration_count( + fork=Osaka, + iteration_count=5, + ) + tx_gas_cost = bytecode.tx_gas_cost_by_iteration_count( + fork=Osaka, + iteration_count=5, + ) + reserve = bytecode.iterating_subcall_reserve(fork=Osaka) + + assert tx_gas_limit == tx_gas_cost + reserve + + +@pytest.mark.parametrize( + "gas_limit,gas_limit_cap,expected_transactions", + [ + pytest.param( + 500_000, + None, + 1, + id="single_tx_no_cap", + ), + pytest.param( + 500_000, + 100_000, + 6, + id="split_across_multiple_txs", + ), + pytest.param( + 1_000_000, + 60_000, + 23, + id="split_across_many_txs", + ), + ], +) +def test_tx_iterations_by_gas_limit( + gas_limit: int, gas_limit_cap: int | None, expected_transactions: int +) -> None: + """Test splitting iterations by target gas usage.""" + fork = CustomOsaka.with_tx_gas_limit_cap(gas_limit_cap) + bytecode = IteratingBytecode( + iterating=Op.ADD(1, 2) + Op.SSTORE(0, 1), + ) + + result = list( + bytecode.tx_iterations_by_gas_limit( + fork=fork, + gas_limit=gas_limit, + ) + ) + + # Check we got the expected number of transactions + assert len(result) == expected_transactions + + # Check total gas used is close to target + total_gas = sum( + bytecode.tx_gas_limit_by_iteration_count( + fork=fork, iteration_count=iters + ) + for iters in result + ) + assert total_gas <= gas_limit + + # Check each transaction respects the gas limit cap + if gas_limit_cap is not None: + for iters in result: + tx_gas = bytecode.tx_gas_limit_by_iteration_count( + fork=fork, iteration_count=iters + ) + assert tx_gas <= gas_limit_cap + + +@pytest.mark.parametrize( + "total_iterations,gas_limit_cap,min_expected_txs", + [ + pytest.param( + 100, + None, + 1, + id="single_tx_no_cap", + ), + pytest.param( + 10, + 100000, + 1, + id="split_with_reasonable_cap", + ), + pytest.param( + 50, + 60000, + 40, + id="many_txs_needed", + ), + ], +) +def test_tx_iterations_by_total_iteration_count( + total_iterations: int, gas_limit_cap: int | None, min_expected_txs: int +) -> None: + """Test splitting a fixed number of iterations across transactions.""" + bytecode = IteratingBytecode( + iterating=Op.ADD(1, 2) + Op.SSTORE(0, 1), + ) + + result = list( + bytecode.tx_iterations_by_total_iteration_count( + fork=CustomOsaka.with_tx_gas_limit_cap(gas_limit_cap), + total_iterations=total_iterations, + ) + ) + + # Check we got at least the expected number of transactions + assert len(result) >= min_expected_txs + + # Check total iterations matches exactly + assert sum(result) == total_iterations + + # Check each transaction respects the gas limit cap + if gas_limit_cap is not None: + for iters in result: + tx_gas = bytecode.tx_gas_limit_by_iteration_count( + fork=Osaka, iteration_count=iters + ) + assert tx_gas <= gas_limit_cap + + +def test_tx_iterations_by_total_iteration_count_raises_on_impossible() -> None: + """Test that ValueError is raised when gas limit is too low.""" + bytecode = IteratingBytecode( + setup=Op.PUSH1(0) * 1000, # Large setup to exceed small gas limit + iterating=Op.ADD(1, 2), + ) + + with pytest.raises( + ValueError, + match="Single iteration gas cost is greater than gas limit.", + ): + list( + bytecode.tx_iterations_by_total_iteration_count( + fork=CustomOsaka.with_tx_gas_limit_cap(1000), + total_iterations=10, + ) + ) diff --git a/packages/testing/src/execution_testing/tools/tools_code/__init__.py b/packages/testing/src/execution_testing/tools/tools_code/__init__.py index 44092c59bd..b3f118cad9 100644 --- a/packages/testing/src/execution_testing/tools/tools_code/__init__.py +++ b/packages/testing/src/execution_testing/tools/tools_code/__init__.py @@ -6,8 +6,11 @@ CodeGasMeasure, Conditional, Create2PreimageLayout, + FixedIterationsBytecode, Initcode, + IteratingBytecode, Switch, + TransactionWithCost, While, ) from .yul import Solc, Yul, YulCompiler @@ -17,11 +20,14 @@ "Case", "CodeGasMeasure", "Conditional", + "Create2PreimageLayout", + "FixedIterationsBytecode", "Initcode", + "IteratingBytecode", "Solc", "Switch", + "TransactionWithCost", "While", "Yul", "YulCompiler", - "Create2PreimageLayout", ) diff --git a/packages/testing/src/execution_testing/tools/tools_code/generators.py b/packages/testing/src/execution_testing/tools/tools_code/generators.py index cf7e2c4d58..78dea8ed00 100644 --- a/packages/testing/src/execution_testing/tools/tools_code/generators.py +++ b/packages/testing/src/execution_testing/tools/tools_code/generators.py @@ -1,13 +1,14 @@ """Code generating classes and functions.""" from dataclasses import dataclass -from typing import Any, List, SupportsBytes +from typing import Any, Generator, List, Self, SupportsBytes, Tuple, Type -from typing_extensions import Self +from pydantic import Field -from execution_testing.base_types import Bytes -from execution_testing.test_types import ceiling_division -from execution_testing.vm import Bytecode, Op +from execution_testing.base_types import Address, Bytes +from execution_testing.forks import Fork +from execution_testing.test_types import EOA, Transaction, ceiling_division +from execution_testing.vm import Bytecode, ForkOpcodeInterface, Op GAS_PER_DEPLOYED_CODE_BYTE = 0xC8 @@ -470,3 +471,587 @@ def increment_salt_op(self, increment: int = 1) -> Bytecode: self.salt_offset, Op.ADD(Op.MLOAD(self.salt_offset), increment), ) + + +class TransactionWithCost(Transaction): + """Transaction object that can include the expected gas to be consumed.""" + + gas_cost: int = Field(..., exclude=True) + + +class IteratingBytecode(Bytecode): + """ + Bytecode composed of distinct execution phases: setup, iteration, and + cleanup. + + Some phases (warm_iterating and iterating_subcall) are analytical only and + exist solely to model gas costs; they are not emitted in the final + bytecode. + """ + + setup: Bytecode + """Bytecode executed once at the beginning before iterations start.""" + iterating: Bytecode + """Bytecode executed in the first iteration.""" + warm_iterating: Bytecode + """ + Analytical bytecode representing subsequent iterations after the first + (warm state). + This bytecode is _not_ included in the final bytecode, and it's only + used for the gas accounting properties of its opcodes and therefore gas + calculation. + """ + iterating_subcall: Bytecode | int + """ + Analytical bytecode representing a subcall performed during each iteration. + This bytecode is _not_ included in the final bytecode, and it's only + used for gas calculation. + + The value can also be an integer, in which case it represents the gas cost + of the subcall (e.g. the subcall is a precompiled contract) + """ + cleanup: Bytecode + """Bytecode executed once at the end after all iterations complete.""" + + def __new__( + cls, + *, + setup: Bytecode | None = None, + iterating: Bytecode, + cleanup: Bytecode | None = None, + warm_iterating: Bytecode | None = None, + iterating_subcall: Bytecode | int | None = None, + ) -> Self: + """ + Create a new iterating bytecode. + + Args: + setup: Bytecode executed once at the beginning before + iterations start. + iterating: Bytecode executed in the first iteration. + cleanup: Bytecode executed once at the end after all + iterations complete. + warm_iterating: Analytical bytecode representing subsequent + iterations after the first (warm state). + iterating_subcall: Analytical bytecode representing a subcall + performed during each iteration. This bytecode is _not_ + included in the final bytecode, and it's only used for gas + calculation. The value can also be an integer, in which case it + represents the gas cost of the subcall (e.g. the subcall is a + precompiled contract). + + Returns: + A new IteratingBytecode instance. + + """ + instance = super(IteratingBytecode, cls).__new__( + cls, + setup + iterating + cleanup, + ) + if setup is None: + setup = Bytecode() + instance.setup = setup + instance.iterating = iterating + if warm_iterating is None: + instance.warm_iterating = iterating + else: + assert bytes(iterating) == bytes(warm_iterating), ( + "iterating and warm_iterating must have the same bytecode" + ) + instance.warm_iterating = warm_iterating + if iterating_subcall is None: + instance.iterating_subcall = Bytecode() + else: + instance.iterating_subcall = iterating_subcall + if cleanup is None: + cleanup = Bytecode() + instance.cleanup = cleanup + return instance + + def iterating_subcall_gas_cost( + self, *, fork: Type[ForkOpcodeInterface] + ) -> int: + """Return the gas cost of the iterating subcall.""" + if isinstance(self.iterating_subcall, int): + return self.iterating_subcall + return self.iterating_subcall.gas_cost(fork=fork) + + def iterating_subcall_reserve( + self, *, fork: Type[ForkOpcodeInterface] + ) -> int: + """ + Return the gas reserve needed so that the last iterating subcall does + not fail due to the 63/64 rule. + """ + iterating_subcall_gas_cost = self.iterating_subcall_gas_cost(fork=fork) + return ( + iterating_subcall_gas_cost * 64 // 63 + ) - iterating_subcall_gas_cost + + def gas_cost_by_iteration_count( + self, *, fork: Type[ForkOpcodeInterface], iteration_count: int + ) -> int: + """Return the cost of iterating through the bytecode N times.""" + loop_gas_cost = 0 + if iteration_count > 0: + # Cold cost is just charged for the first iteration + loop_gas_cost = self.iterating.gas_cost(fork=fork) + # Warm cost is charged for all iterations except the first + loop_gas_cost += self.warm_iterating.gas_cost(fork=fork) * ( + iteration_count - 1 + ) + # Subcall cost is charged for all iterations. + loop_gas_cost += ( + self.iterating_subcall_gas_cost(fork=fork) * iteration_count + ) + return ( + self.setup.gas_cost(fork=fork) + + loop_gas_cost + + self.cleanup.gas_cost(fork=fork) + ) + + def with_fixed_iteration_count( + self, *, iteration_count: int + ) -> "FixedIterationsBytecode": + """ + Return a new FixedIterationsBytecode with the iteration count fixed. + """ + return FixedIterationsBytecode( + setup=self.setup, + iterating=self.iterating, + cleanup=self.cleanup, + warm_iterating=self.warm_iterating, + iterating_subcall=self.iterating_subcall, + iteration_count=iteration_count, + ) + + # Methods to calculate transactions that call a contract containing the + # iterating bytecode. + + def tx_gas_cost_by_iteration_count( + self, + *, + fork: Fork, + iteration_count: int, + start_iteration: int = 0, + **intrinsic_cost_kwargs: Any, + ) -> int: + """ + Calculate the exact gas cost of a transaction calling the bytecode + for a given number of iterations. + + The method accepts intrinsic gas cost kwargs to allow for the + calculation of the intrinsic gas cost of the transaction. + + If any of the intrinsic gas cost kwarg is callable, it will be called + with iteration_count and start_iteration as keyword arguments. + """ + intrinsic_gas_cost_calc = fork.transaction_intrinsic_cost_calculator() + if "data" in intrinsic_cost_kwargs: + intrinsic_cost_kwargs["calldata"] = intrinsic_cost_kwargs.pop( + "data" + ) + if "authorization_list" in intrinsic_cost_kwargs: + intrinsic_cost_kwargs["authorization_list_or_count"] = len( + intrinsic_cost_kwargs.pop("authorization_list") + ) + if "return_cost_deducted_prior_execution" not in intrinsic_cost_kwargs: + intrinsic_cost_kwargs["return_cost_deducted_prior_execution"] = ( + True + ) + for key, value in intrinsic_cost_kwargs.items(): + if callable(value): + intrinsic_cost_kwargs[key] = value( + iteration_count=iteration_count, + start_iteration=start_iteration, + ) + return self.gas_cost_by_iteration_count( + fork=fork, iteration_count=iteration_count + ) + intrinsic_gas_cost_calc(**intrinsic_cost_kwargs) + + def tx_gas_limit_by_iteration_count( + self, + *, + fork: Fork, + iteration_count: int, + start_iteration: int = 0, + **intrinsic_cost_kwargs: Any, + ) -> int: + """ + Calculate the minimum gas limit of a transaction calling the bytecode + for a given number of iterations. + + The gas limit is calculated by adding the required extra gas for the + last iteration due to the 63/64 rule. + """ + return self.tx_gas_cost_by_iteration_count( + fork=fork, + iteration_count=iteration_count, + start_iteration=start_iteration, + **intrinsic_cost_kwargs, + ) + self.iterating_subcall_reserve(fork=fork) + + def _binary_search_iterations( + self, + *, + fork: Fork, + gas_limit: int, + start_iteration: int, + **intrinsic_cost_kwargs: Any, + ) -> Tuple[int, int]: + """ + Binary search for the maximum iterations that fit within a gas limit. + """ + single_iteration_gas = self.tx_gas_limit_by_iteration_count( + fork=fork, + iteration_count=1, + start_iteration=start_iteration, + **intrinsic_cost_kwargs, + ) + if single_iteration_gas > gas_limit: + raise ValueError( + "Single iteration gas cost is greater than gas limit." + ) + low = 1 + high = 2 + + # Exponential search to find upper bound + high_gas_cost = self.tx_gas_limit_by_iteration_count( + fork=fork, + iteration_count=high, + start_iteration=start_iteration, + **intrinsic_cost_kwargs, + ) + while high_gas_cost < gas_limit: + low = high + high *= 2 + high_gas_cost = self.tx_gas_limit_by_iteration_count( + fork=fork, + iteration_count=high, + start_iteration=start_iteration, + **intrinsic_cost_kwargs, + ) + + # Binary search for exact fit + best_iterations = 0 + while low < high: + mid = (low + high) // 2 + + if ( + self.tx_gas_limit_by_iteration_count( + fork=fork, + iteration_count=mid, + start_iteration=start_iteration, + **intrinsic_cost_kwargs, + ) + > gas_limit + ): + high = mid + else: + low = mid + 1 + + best_iterations = low - 1 + best_iterations_gas = self.tx_gas_limit_by_iteration_count( + fork=fork, + iteration_count=best_iterations, + start_iteration=start_iteration, + **intrinsic_cost_kwargs, + ) + return best_iterations, best_iterations_gas + + def tx_iterations_by_gas_limit( + self, + *, + fork: Fork, + gas_limit: int, + start_iteration: int = 0, + **intrinsic_cost_kwargs: Any, + ) -> Generator[int, None, None]: + """ + Calculate the number of iterations needed to reach the given a + gas-to-be-used value. + + Each element of the returned list represents the number of iterations + for a single transaction. + + If the fork's transaction gas limit cap is not `None`, the returned + list will contain one item per transaction that represents the + iteration count for that transaction, and no transaction will exceed + the gas limit cap. + """ + gas_limit_cap = fork.transaction_gas_limit_cap() + remaining_gas = gas_limit + + while remaining_gas >= self.tx_gas_limit_by_iteration_count( + fork=fork, + iteration_count=1, + start_iteration=start_iteration, + **intrinsic_cost_kwargs, + ): + # Binary search for the maximum number of iterations that fits + # within remaining_gas + max_gas_limit = ( + min(remaining_gas, gas_limit_cap) + if gas_limit_cap is not None + else remaining_gas + ) + best_iterations, best_iterations_gas = ( + self._binary_search_iterations( + fork=fork, + gas_limit=max_gas_limit, + start_iteration=start_iteration, + **intrinsic_cost_kwargs, + ) + ) + yield best_iterations + remaining_gas -= best_iterations_gas + start_iteration += best_iterations + + def tx_iterations_by_total_iteration_count( + self, + *, + fork: Fork, + total_iterations: int, + start_iteration: int = 0, + **intrinsic_cost_kwargs: Any, + ) -> Generator[int, None, None]: + """ + Calculate how to split a total number of iterations across multiple + transactions so that each transaction fits within the gas limit cap. + + Returns a list where each element represents the number of iterations + for that transaction, and the sum equals total_iterations. + """ + gas_limit_cap = fork.transaction_gas_limit_cap() + if gas_limit_cap is None: + # No limit, all iterations fit in a single transaction. + yield total_iterations + return + remaining_iterations = total_iterations + + while remaining_iterations > 0: + best_iterations, _ = self._binary_search_iterations( + fork=fork, + gas_limit=gas_limit_cap, + start_iteration=start_iteration, + **intrinsic_cost_kwargs, + ) + if best_iterations >= remaining_iterations: + yield remaining_iterations + return + else: + yield best_iterations + remaining_iterations -= best_iterations + start_iteration += best_iterations + + # Transaction generators that call the iterating bytecode with given + # limits. + + def transactions_by_gas_limit( + self, + *, + fork: Fork, + gas_limit: int, + start_iteration: int = 0, + sender: EOA, + to: Address | None, + tx_gas_limit_delta: int = 0, + **tx_kwargs: Any, + ) -> Generator[TransactionWithCost, None, None]: + """ + Generate a list of transactions calling the bytecode with a given gas + limit. + + The method accepts all keyword arguments that can be passed to the + `Transaction` constructor. + + If any of the keyword arguments is callable, it will be called with + iteration_count and start_iteration as keyword arguments. + E.g. when the calldata that needs to be passed to the iterating + bytecode changes with each iteration, the calldata can be generated + dynamically by passing a callable to the calldata keyword argument. + + The returned object also contains an extra field with the expected + gas cost of the transaction by the end of execution. + """ + intrinsic_cost_kwargs = tx_kwargs.copy() + + if "calldata" in tx_kwargs: + tx_kwargs["data"] = tx_kwargs.pop("calldata") + if "return_cost_deducted_prior_execution" in tx_kwargs: + tx_kwargs.pop("return_cost_deducted_prior_execution") + for iteration_count in self.tx_iterations_by_gas_limit( + fork=fork, + gas_limit=gas_limit, + start_iteration=start_iteration, + **intrinsic_cost_kwargs, + ): + tx_gas_limit = self.tx_gas_limit_by_iteration_count( + fork=fork, + iteration_count=iteration_count, + start_iteration=start_iteration, + **intrinsic_cost_kwargs, + ) + tx_gas_cost = self.tx_gas_cost_by_iteration_count( + fork=fork, + iteration_count=iteration_count, + start_iteration=start_iteration, + **intrinsic_cost_kwargs, + ) + current_tx_kwargs = tx_kwargs.copy() + + for key, value in current_tx_kwargs.items(): + if callable(value): + current_tx_kwargs[key] = value( + iteration_count=iteration_count, + start_iteration=start_iteration, + ) + yield TransactionWithCost( + to=to, + gas_limit=tx_gas_limit + tx_gas_limit_delta, + sender=sender, + gas_cost=tx_gas_cost, + **current_tx_kwargs, + ) + start_iteration += iteration_count + + def transactions_by_total_iteration_count( + self, + *, + fork: Fork, + total_iterations: int, + start_iteration: int = 0, + sender: EOA, + to: Address | None, + tx_gas_limit_delta: int = 0, + **tx_kwargs: Any, + ) -> Generator[TransactionWithCost, None, None]: + """ + Generate a list of transactions calling the bytecode with a given + total iteration count. + + The method accepts all keyword arguments that can be passed to the + `Transaction` constructor. + + If any of the keyword arguments is callable, it will be called with + iteration_count and start_iteration as keyword arguments. + E.g. when the calldata that needs to be passed to the iterating + bytecode changes with each iteration, the calldata can be generated + dynamically by passing a callable to the calldata keyword argument. + + The returned object also contains an extra field with the expected + gas cost of the transaction by the end of execution. + """ + intrinsic_cost_kwargs = tx_kwargs.copy() + + if "calldata" in tx_kwargs: + tx_kwargs["data"] = tx_kwargs.pop("calldata") + if "return_cost_deducted_prior_execution" in tx_kwargs: + tx_kwargs.pop("return_cost_deducted_prior_execution") + for iteration_count in self.tx_iterations_by_total_iteration_count( + fork=fork, + total_iterations=total_iterations, + start_iteration=start_iteration, + **intrinsic_cost_kwargs, + ): + tx_gas_limit = self.tx_gas_limit_by_iteration_count( + fork=fork, + iteration_count=iteration_count, + start_iteration=start_iteration, + **intrinsic_cost_kwargs, + ) + tx_gas_cost = self.tx_gas_cost_by_iteration_count( + fork=fork, + iteration_count=iteration_count, + start_iteration=start_iteration, + **intrinsic_cost_kwargs, + ) + current_tx_kwargs = tx_kwargs.copy() + + for key, value in current_tx_kwargs.items(): + if callable(value): + current_tx_kwargs[key] = value( + iteration_count=iteration_count, + start_iteration=start_iteration, + ) + yield TransactionWithCost( + to=to, + gas_limit=tx_gas_limit + tx_gas_limit_delta, + sender=sender, + gas_cost=tx_gas_cost, + **current_tx_kwargs, + ) + start_iteration += iteration_count + + +class FixedIterationsBytecode(IteratingBytecode): + """ + Bytecode that contains a setup phase, an iterating phase, and a cleanup + phase, with a fixed number of iterations. + + This type can be used in place of a normal Bytecode and will return the + appropriate gas cost for the given number of iterations. + """ + + iteration_count: int + """The fixed number of times the iterating bytecode will be executed.""" + + def __new__( + cls, + *, + setup: Bytecode, + iterating: Bytecode, + cleanup: Bytecode, + iteration_count: int, + warm_iterating: Bytecode | None = None, + iterating_subcall: Bytecode | int | None = None, + ) -> Self: + """ + Create a new FixedIterationsBytecode instance. + + Args: + setup: Bytecode executed once at the beginning before + iterations start. + iterating: Bytecode executed in the first iteration. + cleanup: Bytecode executed once at the end after all + iterations complete. + iteration_count: The fixed number of times the iterating + bytecode will be executed. + warm_iterating: Bytecode executed in subsequent iterations + after the first. If None, uses the same bytecode as + iterating. + iterating_subcall: Analytical bytecode representing a subcall + performed during each iteration. This bytecode is _not_ + included in the final bytecode, and it's only used for gas + calculation. The value can also be an integer, in which case it + represents the gas cost of the subcall (e.g. the subcall is a + precompiled contract). + + Returns: + A new FixedIterationsBytecode instance. + + """ + instance = super(FixedIterationsBytecode, cls).__new__( + cls, + setup=setup, + iterating=iterating, + cleanup=cleanup, + warm_iterating=warm_iterating, + iterating_subcall=iterating_subcall, + ) + instance.iteration_count = iteration_count + return instance + + def gas_cost( + self, + fork: Type[ForkOpcodeInterface], + *, + block_number: int = 0, + timestamp: int = 0, + ) -> int: + """Return the cost of iterating through the bytecode N times.""" + del block_number, timestamp + return self.gas_cost_by_iteration_count( + fork=fork, + iteration_count=self.iteration_count, + ) diff --git a/tests/benchmark/compute/helpers.py b/tests/benchmark/compute/helpers.py index 981c7fbbfd..f0a32cedd0 100644 --- a/tests/benchmark/compute/helpers.py +++ b/tests/benchmark/compute/helpers.py @@ -2,9 +2,24 @@ import math from enum import Enum, auto -from typing import Sequence, cast - -from execution_testing import BytesConcatenation, Fork, Hash, Op +from typing import Generator, Self, Sequence, cast + +from execution_testing import ( + EOA, + Address, + Alloc, + BytesConcatenation, + FixedIterationsBytecode, + Fork, + Hash, + Initcode, + IteratingBytecode, + Op, + TransactionWithCost, + While, + compute_create2_address, + compute_deterministic_create2_address, +) from tests.osaka.eip7951_p256verify_precompiles.spec import ( FieldElement, @@ -204,3 +219,214 @@ def calculate_optimal_input_length( optimal_input_length = input_length return optimal_input_length + + +class MaxSizedContractInitcode(FixedIterationsBytecode): + """ + Initcode that deploys a random and maximum-sized contract for the given + fork's limits. + """ + + def __new__(cls, *, pre: Alloc, fork: Fork) -> Self: + """ + Create a new MaxSizedContractInitcode instance. + + Args: + pre: The pre-allocation state where the contract will be + deployed. + fork: The fork to use for determining maximum contract size + limits. + + Returns: + A new MaxSizedContractInitcode instance. + + """ + max_contract_size = fork.max_code_size() + xor_table_byte_size = XOR_TABLE_SIZE * 32 + iteration_count = ((max_contract_size - 32) // xor_table_byte_size) + 1 + setup = Op.MSTORE( + 0, + Op.ADDRESS, + # Gas accounting + old_memory_size=0, + new_memory_size=32, + ) + iterating = While( + body=( + Op.SHA3(Op.SUB(Op.MSIZE, 32), 32, data_size=32) + # Use a xor table to avoid having to call the "expensive" sha3 + # opcode as much + + sum( + ( + Op.PUSH32[xor_value] + + Op.XOR + + Op.DUP1 + + Op.MSIZE + + Op.MSTORE + ) + for xor_value in XOR_TABLE + ) + + Op.POP + ), + condition=Op.LT(Op.MSIZE, max_contract_size), + ) + cleanup = ( + # Despite the whole contract has random bytecode, we need the first + # opcode be a STOP so CALL-like attacks return as soon as possible. + # However, since the memory starts with address, the first 12 bytes + # are always zero, so no need to do anything but return. + Op.RETURN( + 0, + max_contract_size, + # Gas accounting + code_deposit_size=max_contract_size, + # Memory is not expanded here, but it is expanded in the loop. + old_memory_size=32, + new_memory_size=(xor_table_byte_size * iteration_count) + 32, + ) + ) + instance = super(MaxSizedContractInitcode, cls).__new__( + cls, + setup=setup, + iterating=iterating, + cleanup=cleanup, + iteration_count=iteration_count, + ) + deployed_address = pre.deterministic_deploy_contract( + deploy_code=instance + ) + assert deployed_address == instance.address(fork=fork) + return instance + + def address(self, *, fork: Fork) -> Address: + """Get the deterministic address of the initcode.""" + return compute_deterministic_create2_address( + salt=0, + initcode=Initcode(deploy_code=self), + fork=fork, + ) + + +class MaxSizedContractFactory(IteratingBytecode): + """ + Factory contract that creates maximum-sized contracts. + + The contract takes two 32-byte arguments in the calldata: + - start_index: the starting index of the contract to deploy + - end_index: the ending index of the contract to deploy + + The contract will deploy a maximum-sized contract for each index in the + range, inclusive. + """ + + initcode: MaxSizedContractInitcode + """The initcode used to deploy maximum-sized contracts via CREATE2.""" + + def __new__(cls, *, pre: Alloc, fork: Fork) -> Self: + """ + Create a new MaxSizedContractFactory instance. + + Args: + pre: The pre-allocation state where the factory will be + deployed. + fork: The fork to use for gas calculations and contract + size limits. + + Returns: + A new MaxSizedContractFactory instance. + + """ + initcode = MaxSizedContractInitcode(pre=pre, fork=fork) + initcode_address = initcode.address(fork=fork) + setup = ( + Op.EXTCODECOPY( + address=initcode_address, + dest_offset=0, + offset=0, + size=len(initcode), + # Gas accounting + address_warm=False, + data_size=len(initcode), + new_memory_size=len(initcode), + ) + # CALLDATA[0:32] = start_index + # CALLDATA[32:64] = end_index + + Op.ADD(1, Op.CALLDATALOAD(32)) + + Op.CALLDATALOAD(0) + ) + iterating = While( + body=Op.POP( + Op.CREATE2( + value=0, + offset=0, + size=len(initcode), + salt=Op.DUP1, + # Gas accounting + init_code_size=len(initcode), + ) + ), + condition=Op.PUSH1(1) + + Op.ADD + + Op.DUP1 + + Op.DUP3 + + Op.LT + + Op.ISZERO, + ) + cleanup = Op.STOP + instance = super(MaxSizedContractFactory, cls).__new__( + cls, + setup=setup, + iterating=iterating, + iterating_subcall=initcode, + cleanup=cleanup, + ) + instance.initcode = initcode + deployed_address = pre.deterministic_deploy_contract( + deploy_code=instance + ) + assert deployed_address == instance.address(fork=fork) + return instance + + def transactions_by_total_contract_count( + self, + *, + fork: Fork, + sender: EOA, + contract_count: int, + contract_start_index: int = 0, + ) -> Generator[TransactionWithCost, None, None]: + """ + Create a list of transactions calling the factory to create the + given number of contracts, each capped tx properly capped by the + gas limit cap of the fork. + """ + to = self.address(fork=fork) + + def calldata(iteration_count: int, start_iteration: int) -> bytes: + index_end = iteration_count + start_iteration - 1 + return Hash(start_iteration) + Hash(index_end) + + yield from self.transactions_by_total_iteration_count( + fork=fork, + total_iterations=contract_count, + start_iteration=contract_start_index, + sender=sender, + to=to, + calldata=calldata, + ) + + def address(self, *, fork: Fork) -> Address: + """Get the deterministic address of the initcode.""" + return compute_deterministic_create2_address( + salt=0, + initcode=Initcode(deploy_code=self), + fork=fork, + ) + + def created_contract_address(self, *, fork: Fork, salt: int) -> Address: + """Get the deterministic address of the created contract.""" + return compute_create2_address( + address=self.address(fork=fork), + salt=salt, + initcode=self.initcode, + ) diff --git a/tests/benchmark/compute/instruction/test_keccak.py b/tests/benchmark/compute/instruction/test_keccak.py index 647bd3173d..f117a17323 100644 --- a/tests/benchmark/compute/instruction/test_keccak.py +++ b/tests/benchmark/compute/instruction/test_keccak.py @@ -29,7 +29,6 @@ def test_keccak_max_permutations( intrinsic_gas_calculator = fork.transaction_intrinsic_cost_calculator() available_gas = tx_gas_limit - intrinsic_gas_calculator() - gsc = fork.gas_costs() mem_exp_gas_calculator = fork.memory_expansion_gas_calculator() # Discover the optimal input size to maximize keccak-permutations, @@ -39,13 +38,9 @@ def test_keccak_max_permutations( max_keccak_perm_per_block = 0 optimal_input_length = 0 for i in range(1, 1_000_000, 32): - iteration_gas_cost = ( - 2 * gsc.G_VERY_LOW # PUSHN + PUSH1 - + gsc.G_KECCAK_256 # KECCAK256 static cost - + math.ceil(i / 32) * gsc.G_KECCAK_256_WORD # KECCAK256 dynamic - # cost - + gsc.G_BASE # POP - ) + # Iteration cost disregarding memory expansion + iteration_bytecode = Op.POP(Op.SHA3(Op.PUSH0, Op.DUP1, data_size=i)) + iteration_gas_cost = iteration_bytecode.gas_cost(fork) # From the available gas, we subtract the mem expansion costs # considering we know the current input size length i. available_gas_after_expansion = max( @@ -66,7 +61,9 @@ def test_keccak_max_permutations( target_opcode=Op.SHA3, code_generator=JumpLoopGenerator( setup=Op.PUSH20[optimal_input_length], - attack_block=Op.POP(Op.SHA3(Op.PUSH0, Op.DUP1)), + attack_block=Op.POP( + Op.SHA3(Op.PUSH0, Op.DUP1, data_size=optimal_input_length) + ), ), ) diff --git a/tests/benchmark/compute/instruction/test_storage.py b/tests/benchmark/compute/instruction/test_storage.py index ef5bb02f6f..68420c284e 100644 --- a/tests/benchmark/compute/instruction/test_storage.py +++ b/tests/benchmark/compute/instruction/test_storage.py @@ -20,6 +20,7 @@ ExtCallGenerator, Fork, Hash, + IteratingBytecode, JumpLoopGenerator, Op, TestPhaseManager, @@ -87,7 +88,7 @@ def test_tstore( ) -def create_storage_initializer(fork: Fork) -> tuple[Bytecode, int, int]: +def create_storage_initializer() -> IteratingBytecode: """ Create a contract that initializes storage slots from calldata parameters. @@ -121,15 +122,14 @@ def create_storage_initializer(fork: Fork) -> tuple[Bytecode, int, int]: + Op.JUMPI(len(prefix), Op.GT(Op.DUP2, Op.DUP2)) ) - return prefix + loop, loop.gas_cost(fork), prefix.gas_cost(fork) + return IteratingBytecode(setup=prefix, iterating=loop) def create_benchmark_executor( storage_action: StorageAction, absent_slots: bool, tx_result: TransactionResult, - fork: Fork, -) -> tuple[Bytecode, int, int]: +) -> IteratingBytecode: """ Create a contract that executes benchmark operations. @@ -205,9 +205,8 @@ def create_benchmark_executor( + operation + Op.JUMPI(len(prefix), loop_condition) ) - code = prefix + loop + suffix - return code, loop.gas_cost(fork), (prefix + suffix).gas_cost(fork) + return IteratingBytecode(setup=prefix, iterating=loop, cleanup=suffix) @pytest.mark.parametrize( @@ -266,177 +265,105 @@ def test_storage_access_cold( - StorageInitializer: storage[i] = i for each slot (absent_slots=False) - BenchmarkExecutor: performs the benchmark operation (SLOAD/SSTORE) """ - intrinsic_calc = fork.transaction_intrinsic_cost_calculator() - gas_costs = fork.gas_costs() - - executor_code, exec_loop_cost, exec_overhead = create_benchmark_executor( - storage_action, absent_slots, tx_result, fork - ) - initializer_code, init_loop_cost, init_overhead = ( - create_storage_initializer(fork) + executor_code = create_benchmark_executor( + storage_action, absent_slots, tx_result ) + initializer_code = create_storage_initializer() authority = pre.fund_eoa(amount=0) initializer_addr = pre.deploy_contract(code=initializer_code) executor_addr = pre.deploy_contract(code=executor_code) - delegation_intrinsic = intrinsic_calc(authorization_list_or_count=1) - max_intrinsic = intrinsic_calc(calldata=bytes([0xFF] * 64)) + # Calldata generator for both the executor and initializer. + def calldata_generator( + iteration_count: int, start_iteration: int + ) -> bytes: + return Hash(start_iteration) + Hash(iteration_count) # Number of slots that can be processed in the execution phase - num_target_slots = 0 - current_slot = 1 - gas_remaining = gas_benchmark_value - delegation_intrinsic - while gas_remaining > 0: - tx_gas = min(tx_gas_limit, gas_remaining) - if tx_gas < max_intrinsic + exec_overhead + exec_loop_cost: - break - - slots = (tx_gas - max_intrinsic - exec_overhead) // exec_loop_cost - - calldata = bytes(Hash(current_slot)) + bytes(Hash(slots)) - execution_intrinsic = intrinsic_calc(calldata=calldata) - - slots = ( - tx_gas - execution_intrinsic - exec_overhead - ) // exec_loop_cost - - num_target_slots += slots - current_slot += slots - gas_remaining -= tx_gas + num_target_slots = sum( + executor_code.tx_iterations_by_gas_limit( + fork=fork, + gas_limit=gas_benchmark_value, + calldata=calldata_generator, + ) + ) blocks = [] - authority_nonce = 0 + delegation_sender = pre.fund_eoa() # Setup phase: initialize storage slots (only if absent_slots=False) - if not absent_slots: + with TestPhaseManager.setup(): setup_txs = [] + authority_nonce = 0 + if not absent_slots: + setup_txs.append( + Transaction( + to=delegation_sender, + gas_limit=tx_gas_limit, + sender=delegation_sender, + authorization_list=[ + AuthorizationTuple( + address=initializer_addr, + nonce=authority_nonce, + signer=authority, + ), + ], + ) + ) + authority_nonce += 1 - with TestPhaseManager.setup(): - delegation_sender = pre.fund_eoa() - delegation_tx = Transaction( + setup_txs += list( + initializer_code.transactions_by_total_iteration_count( + fork=fork, + total_iterations=num_target_slots, + sender=pre.fund_eoa(), + to=authority, + start_iteration=1, + calldata=calldata_generator, + ) + ) + + setup_txs.append( + Transaction( to=delegation_sender, gas_limit=tx_gas_limit, sender=delegation_sender, authorization_list=[ AuthorizationTuple( - address=initializer_addr, + address=executor_addr, nonce=authority_nonce, signer=authority, ), ], ) - authority_nonce += 1 - - setup_txs.append(delegation_tx) - - current_slot = 1 - remaining_slots = num_target_slots - - while remaining_slots > 0: - if ( - tx_gas_limit - < max_intrinsic + init_overhead + init_loop_cost - ): - break - - slots = ( - tx_gas_limit - max_intrinsic - init_overhead - ) // init_loop_cost - slots = min(slots, remaining_slots) - - calldata = bytes(Hash(current_slot)) + bytes(Hash(slots)) - execution_intrinsic = intrinsic_calc(calldata=calldata) - - slots = ( - tx_gas_limit - execution_intrinsic - init_overhead - ) // init_loop_cost - slots = min(slots, remaining_slots) - - setup_txs.append( - Transaction( - to=authority, - gas_limit=tx_gas_limit, - data=Hash(current_slot) + Hash(slots), - sender=pre.fund_eoa(), - ) - ) - current_slot += slots - remaining_slots -= slots - - blocks.append(Block(txs=setup_txs)) + ) + blocks.append(Block(txs=setup_txs)) # Execution phase: run benchmark # For absent_slots=False, authority has storage, triggering refund - expected_gas_used = delegation_intrinsic - exec_txs = [] - - if not absent_slots: - expected_gas_used -= min( - gas_costs.R_AUTHORIZATION_EXISTING_AUTHORITY, - delegation_intrinsic // 5, - ) - - with TestPhaseManager.setup(): - delegation_sender = pre.fund_eoa() - delegation_tx = Transaction( - to=delegation_sender, - gas_limit=tx_gas_limit, - sender=delegation_sender, - authorization_list=[ - AuthorizationTuple( - address=executor_addr, - nonce=authority_nonce, - signer=authority, - ), - ], - ) - - exec_txs.append(delegation_tx) - current_slot = 1 - gas_remaining = gas_benchmark_value - delegation_intrinsic + expected_gas_used = 0 with TestPhaseManager.execution(): - while gas_remaining > 0: - tx_gas = min(tx_gas_limit, gas_remaining) - - if tx_gas < max_intrinsic + exec_overhead + exec_loop_cost: - break - - slots = (tx_gas - max_intrinsic - exec_overhead) // exec_loop_cost - - calldata = bytes(Hash(current_slot)) + bytes(Hash(slots)) - execution_intrinsic = intrinsic_calc(calldata=calldata) - slots = ( - tx_gas - execution_intrinsic - exec_overhead - ) // exec_loop_cost - - if tx_result == TransactionResult.OUT_OF_GAS: - slots = slots * 2 - - exec_txs.append( - Transaction( - to=authority, - gas_limit=tx_gas, - data=Hash(current_slot) + Hash(slots), - sender=pre.fund_eoa(), - ) + tx_gas_limit_delta = ( + -1 if tx_result == TransactionResult.OUT_OF_GAS else 0 + ) + exec_txs = list( + executor_code.transactions_by_gas_limit( + fork=fork, + gas_limit=gas_benchmark_value, + sender=pre.fund_eoa(), + to=authority, + calldata=calldata_generator, + start_iteration=1, + tx_gas_limit_delta=tx_gas_limit_delta, ) - + ) + for exec_tx in exec_txs: if tx_result == TransactionResult.OUT_OF_GAS: - expected_gas_used += tx_gas + expected_gas_used += exec_tx.gas_limit else: - expected_gas_used += ( - intrinsic_calc( - calldata=calldata, - return_cost_deducted_prior_execution=True, - ) - + slots * exec_loop_cost - + exec_overhead - ) - current_slot += slots - - gas_remaining -= tx_gas + expected_gas_used += exec_tx.gas_cost blocks.append(Block(txs=exec_txs)) diff --git a/tests/benchmark/compute/scenario/test_unchunkified_bytecode.py b/tests/benchmark/compute/scenario/test_unchunkified_bytecode.py index a0e9c9c413..178af76dfc 100644 --- a/tests/benchmark/compute/scenario/test_unchunkified_bytecode.py +++ b/tests/benchmark/compute/scenario/test_unchunkified_bytecode.py @@ -3,12 +3,9 @@ This scenario is relevant in forks that have unchunkified bytecode. """ -import math - import pytest from execution_testing import ( Account, - Address, Alloc, Block, BlockchainTestFiller, @@ -16,14 +13,13 @@ Create2PreimageLayout, Fork, Hash, + IteratingBytecode, Op, TestPhaseManager, - Transaction, While, - compute_create2_address, ) -from tests.benchmark.compute.helpers import XOR_TABLE +from tests.benchmark.compute.helpers import MaxSizedContractFactory @pytest.mark.parametrize( @@ -44,7 +40,6 @@ def test_unchunkified_bytecode( fork: Fork, opcode: Op, gas_benchmark_value: int, - tx_gas_limit: int, ) -> None: """Benchmark scenario of accessing max-code size bytecode.""" # The attack gas limit represents the transaction gas limit cap or @@ -53,202 +48,119 @@ def test_unchunkified_bytecode( # for the 200 gas per byte cost and the quadratic memory-expansion # costs, which must be paid each time memory is initialized. attack_gas_limit = gas_benchmark_value - max_contract_size = fork.max_code_size() - - gas_costs = fork.gas_costs() - - intrinsic_gas_cost_calc = fork.transaction_intrinsic_cost_calculator() - # Calculate the loop cost of the attacker to query one address - loop_cost = ( - gas_costs.G_KECCAK_256 # KECCAK static cost - + math.ceil(85 / 32) * gas_costs.G_KECCAK_256_WORD # KECCAK dynamic - # cost for CREATE2 - + gas_costs.G_VERY_LOW * 3 # ~MSTOREs+ADDs - + gas_costs.G_COLD_ACCOUNT_ACCESS # Opcode cost - + 30 # ~Gluing opcodes - ) - # Calculate an upper bound of the number of contracts to be targeted - num_contracts = ( - # Base available gas = GAS_LIMIT - intrinsic - (out of loop MSTOREs) - attack_gas_limit - intrinsic_gas_cost_calc() - gas_costs.G_VERY_LOW * 4 - ) // loop_cost - - initcode, factory_address, factory_caller_address = ( - _deploy_max_contract_factory(pre, fork) - ) - - # Deploy num_contracts via multiple txs (each capped by tx gas limit). - with TestPhaseManager.setup(): - # Rough estimate (rounded down) of contracts per tx based on dominant - # cost factor only, and up to 90% of the block gas limit. - # The goal is to involve the minimum amount of gas pricing to avoid - # complexity and potential brittleness. - num_contracts_per_tx = int(tx_gas_limit * 0.9) // ( - gas_costs.G_CODE_DEPOSIT_BYTE * max_contract_size - ) - if num_contracts_per_tx == 0: - pytest.skip("tx_gas_limit too low to deploy max-size contract") - setup_txs = math.ceil(num_contracts / num_contracts_per_tx) - - contracts_deployment_txs = [] - for _ in range(setup_txs): - contracts_deployment_txs.append( - Transaction( - to=factory_caller_address, - gas_limit=tx_gas_limit, - data=Hash(num_contracts_per_tx), - sender=pre.fund_eoa(), - ) - ) - post = {} - for i in range(num_contracts): - deployed_contract_address = compute_create2_address( - address=factory_address, - salt=i, - initcode=initcode, - ) - post[deployed_contract_address] = Account(nonce=1) + # Create the max-sized fork-dependent contract factory. + max_sized_contract_factory = MaxSizedContractFactory(pre=pre, fork=fork) + factory_address = max_sized_contract_factory.address(fork=fork) + initcode = max_sized_contract_factory.initcode + # Prepare the attack iterating bytecode. + # Setup is just placing the CREATE2 Preimage in memory. create2_preimage = Create2PreimageLayout( factory_address=factory_address, salt=Op.CALLDATALOAD(0), init_code_hash=initcode.keccak256(), ) - attack_call = Bytecode() + setup_code: Bytecode = create2_preimage + if opcode == Op.EXTCODECOPY: + copy_size = 1000 attack_call = Op.EXTCODECOPY( - address=create2_preimage.address_op(), dest_offset=96, size=1000 + address=create2_preimage.address_op(), + dest_offset=96, + size=copy_size, + # Gas accounting + data_size=copy_size, + address_warm=False, + ) + # Also, expand memory during setup so the loop cost is constant. + setup_code += Op.MSTORE8( + 96 + copy_size - 1, + 0, + # Gas accounting + old_memory_size=96, + new_memory_size=96 + copy_size, ) else: # For the rest of the opcodes, we can use the same generic attack call # since all only minimally need the `address` of the target. - attack_call = Op.POP(opcode(address=create2_preimage.address_op())) - attack_code = ( - create2_preimage - # Main loop - + While( - body=attack_call + create2_preimage.increment_salt_op(), + attack_call = Op.POP( + opcode( + address=create2_preimage.address_op(), + # Gas accounting + address_warm=False, + ) ) + + loop_code = While( + body=attack_call + create2_preimage.increment_salt_op(), ) + attack_code = IteratingBytecode( + setup=setup_code, + iterating=loop_code, + # Since the target contract is guaranteed to have a STOP as the first + # instruction, we can use a STOP as the iterating subcall code. + iterating_subcall=Op.STOP, + ) + + # Calldata generator for each transaction of the iterating bytecode. + def calldata(iteration_count: int, start_iteration: int) -> bytes: + del iteration_count + # We only pass the start iteration index as calldata for this bytecode + return Hash(start_iteration) + attack_address = pre.deploy_contract(code=attack_code) + # Calculate the number of contracts to be targeted. + num_contracts = sum( + attack_code.tx_iterations_by_gas_limit( + fork=fork, + gas_limit=attack_gas_limit, + calldata=calldata, + ) + ) + + # Deploy num_contracts via multiple txs (each capped by tx gas limit). + with TestPhaseManager.setup(): + setup_sender = pre.fund_eoa() + contracts_deployment_txs = list( + max_sized_contract_factory.transactions_by_total_contract_count( + fork=fork, + sender=setup_sender, + contract_count=num_contracts, + ) + ) + with TestPhaseManager.execution(): - full_txs = attack_gas_limit // tx_gas_limit - remainder = attack_gas_limit % tx_gas_limit - - num_targeted_contracts_per_full_tx = ( - # Base available gas: - # TX_GAS_LIMIT - intrinsic - (out of loop MSTOREs) - tx_gas_limit - intrinsic_gas_cost_calc() - gas_costs.G_VERY_LOW * 4 - ) // loop_cost - contract_start_index = 0 - opcode_txs = [] - for _ in range(full_txs): - opcode_txs.append( - Transaction( - to=attack_address, - gas_limit=tx_gas_limit, - data=Hash(contract_start_index), - sender=pre.fund_eoa(), - ) + attack_sender = pre.fund_eoa() + attack_txs = list( + attack_code.transactions_by_gas_limit( + fork=fork, + gas_limit=attack_gas_limit, + sender=attack_sender, + to=attack_address, + calldata=calldata, ) - contract_start_index += num_targeted_contracts_per_full_tx - if remainder > intrinsic_gas_cost_calc(calldata=bytes(32)): - opcode_txs.append( - Transaction( - to=attack_address, - gas_limit=remainder, - data=Hash(contract_start_index), - sender=pre.fund_eoa(), - ) + ) + total_gas_cost = sum(tx.gas_cost for tx in attack_txs) + + post = {} + for i in range(num_contracts): + deployed_contract_address = ( + max_sized_contract_factory.created_contract_address( + fork=fork, salt=i ) + ) + post[deployed_contract_address] = Account(nonce=1) blockchain_test( pre=pre, post=post, blocks=[ Block(txs=contracts_deployment_txs), - Block(txs=opcode_txs), + Block(txs=attack_txs), ], exclude_full_post_state_in_output=True, + expected_benchmark_gas_used=total_gas_cost, ) - - -def _deploy_max_contract_factory( - pre: Alloc, - fork: Fork, -) -> tuple[Bytecode, Address, Address]: - max_contract_size = fork.max_code_size() - - # The initcode will take its address as a starting point to the input to - # the keccak hash function. It will reuse the output of the hash function - # in a loop to create a large amount of seemingly random code, until it - # reaches the maximum contract size. - initcode = ( - Op.MSTORE(0, Op.ADDRESS) - + While( - body=( - Op.SHA3(Op.SUB(Op.MSIZE, 32), 32) - # Use a xor table to avoid having to call the "expensive" sha3 - # opcode as much - + sum( - ( - Op.PUSH32[xor_value] - + Op.XOR - + Op.DUP1 - + Op.MSIZE - + Op.MSTORE - ) - for xor_value in XOR_TABLE - ) - + Op.POP - ), - condition=Op.LT(Op.MSIZE, max_contract_size), - ) - # Despite the whole contract has random bytecode, we make the first - # opcode be a STOP so CALL-like attacks return as soon as possible, - # while EXTCODE(HASH|SIZE) work as intended. - + Op.MSTORE8(0, 0x00) - + Op.RETURN(0, max_contract_size) - ) - initcode_address = pre.deploy_contract(code=initcode) - - # The factory contract will simply use the initcode that is already - # deployed, and create a new contract and return its address if successful. - factory_code = ( - Op.EXTCODECOPY( - address=initcode_address, - dest_offset=0, - offset=0, - size=Op.EXTCODESIZE(initcode_address), - ) - + Op.MSTORE( - 0, - Op.CREATE2( - value=0, - offset=0, - size=Op.EXTCODESIZE(initcode_address), - salt=Op.SLOAD(0), - ), - ) - + Op.SSTORE(0, Op.ADD(Op.SLOAD(0), 1)) - + Op.RETURN(0, 32) - ) - factory_address = pre.deploy_contract(code=factory_code) - - # The factory caller will call the factory contract N times, creating N new - # contracts. Calldata should contain the N value. - factory_caller_code = Op.CALLDATALOAD(0) + While( - body=Op.POP(Op.CALL(address=factory_address)), - condition=Op.PUSH1(1) - + Op.SWAP1 - + Op.SUB - + Op.DUP1 - + Op.ISZERO - + Op.ISZERO, - ) - factory_caller_address = pre.deploy_contract(code=factory_caller_code) - - return initcode, factory_address, factory_caller_address From 7ab5c836b9a931feb295fce0a88e7f70cb27cd5b Mon Sep 17 00:00:00 2001 From: spencer Date: Thu, 5 Feb 2026 12:19:37 +0000 Subject: [PATCH 126/154] feat(test-benchmark): updates and fixes for fixed opcode count (#1985) --- .github/workflows/benchmark.yaml | 7 + .../execution_testing/cli/benchmark_parser.py | 128 ++--- .../plugins/filler/tests/test_benchmarking.py | 494 +++++++++++++++++- .../plugins/shared/benchmarking.py | 231 +++++++- .../src/execution_testing/specs/benchmark.py | 61 ++- .../compute/instruction/test_arithmetic.py | 1 - .../benchmark/compute/instruction/test_log.py | 20 +- .../compute/instruction/test_system.py | 23 +- .../compute/precompile/test_ecrecover.py | 2 +- .../precompile/test_point_evaluation.py | 2 +- whitelist.txt | 4 + 11 files changed, 847 insertions(+), 126 deletions(-) diff --git a/.github/workflows/benchmark.yaml b/.github/workflows/benchmark.yaml index 532f765ce2..0e492a5932 100644 --- a/.github/workflows/benchmark.yaml +++ b/.github/workflows/benchmark.yaml @@ -40,8 +40,15 @@ jobs: enable-cache: false version: ${{ vars.UV_VERSION }} + - uses: ./.github/actions/build-evm-base + id: evm-builder + with: + type: benchmark + - name: Run benchmark unit tests run: uvx tox -e tests_benchmark_pytest_py3 + env: + EVM_BIN: ${{ steps.evm-builder.outputs.evm-bin }} sanity-checks: name: ${{ matrix.name }} diff --git a/packages/testing/src/execution_testing/cli/benchmark_parser.py b/packages/testing/src/execution_testing/cli/benchmark_parser.py index d8e400952a..e0d74817ec 100644 --- a/packages/testing/src/execution_testing/cli/benchmark_parser.py +++ b/packages/testing/src/execution_testing/cli/benchmark_parser.py @@ -6,11 +6,12 @@ Usage: uv run benchmark_parser # Update `.fixed_opcode_counts.json` - uv run benchmark_parser --check # Check for new/missing entries (CI) + uv run benchmark_parser --check # Check for new/missing entries """ import argparse import ast +import re import sys from pathlib import Path @@ -19,6 +20,31 @@ ) +def is_related_pattern(pattern: str, detected_patterns: set[str]) -> bool: + """ + Check if a pattern is related to any detected patterns or more specific. + Related patterns are preserved as they're intentional overrides. + """ + # Check if existing pattern is BROADER than detected + try: + compiled = re.compile(pattern) + for detected in detected_patterns: + if compiled.search(detected): + return True + except re.error: + pass + + # Check if existing pattern is MORE SPECIFIC than detected + for detected in detected_patterns: + try: + if re.search(detected, pattern): + return True + except re.error: + continue + + return False + + def get_repo_root() -> Path: """Get the repository root directory.""" current = Path.cwd() @@ -209,21 +235,16 @@ def _extract_opcode_name(self, node: ast.expr) -> str | None: return None -def scan_benchmark_tests( - base_path: Path, -) -> tuple[dict[str, list[int]], dict[str, Path]]: +def scan_benchmark_tests(base_path: Path) -> dict[str, list[float]]: """ Scan benchmark test files and extract opcode patterns. Returns: - Tuple of (config, pattern_sources) where: - - config: mapping of pattern -> opcode counts - - pattern_sources: mapping of pattern -> source file path + Mapping of pattern -> opcode counts (default [1] for new patterns). """ - config: dict[str, list[int]] = {} - pattern_sources: dict[str, Path] = {} - default_counts = [1] + config: dict[str, list[float]] = {} + default_counts: list[float] = [1.0] test_files = [ f @@ -242,12 +263,11 @@ def scan_benchmark_tests( for pattern in extractor.patterns: if pattern not in config: config[pattern] = default_counts - pattern_sources[pattern] = test_file except Exception as e: print(f"Warning: Failed to parse {test_file}: {e}") continue - return config, pattern_sources + return config def load_existing_config(config_file: Path) -> OpcodeCountsConfig: @@ -257,47 +277,12 @@ def load_existing_config(config_file: Path) -> OpcodeCountsConfig: return OpcodeCountsConfig.model_validate_json(config_file.read_bytes()) -def categorize_patterns( - config: dict[str, list[int]], pattern_sources: dict[str, Path] -) -> dict[str, list[str]]: - """ - Categorize patterns by deriving category from source file name. - - Example: test_arithmetic.py -> ARITHMETIC - """ - categories: dict[str, list[str]] = {} - - for pattern in config.keys(): - if pattern in pattern_sources: - source_file = pattern_sources[pattern] - file_name = source_file.stem - if file_name.startswith("test_"): - category = file_name[5:].upper() # Remove "test_" prefix - else: - category = "OTHER" - else: - category = "OTHER" - - if category not in categories: - categories[category] = [] - categories[category].append(pattern) - - return {k: sorted(v) for k, v in sorted(categories.items())} - - def generate_config_json( - config: dict[str, list[int]], - pattern_sources: dict[str, Path], - default_counts: list[int], + config: dict[str, list[float]], + default_counts: list[float], ) -> OpcodeCountsConfig: - """Generate the JSON config file content.""" - categories = categorize_patterns(config, pattern_sources) - - scenario_configs: dict[str, list[int]] = {} - for _, patterns in categories.items(): - for pattern in patterns: - scenario_configs[pattern] = config[pattern] - + """Generate the JSON config file content with sorted patterns.""" + scenario_configs = {k: config[k] for k in sorted(config.keys())} return OpcodeCountsConfig( scenario_configs=scenario_configs, default_counts=default_counts, @@ -324,7 +309,7 @@ def main() -> int: return 1 print(f"Scanning benchmark tests in {benchmark_dir}...") - detected, pattern_sources = scan_benchmark_tests(benchmark_dir) + detected = scan_benchmark_tests(benchmark_dir) print(f"Detected {len(detected)} opcode patterns") existing_file = load_existing_config(config_file) @@ -334,11 +319,28 @@ def main() -> int: detected_keys = set(detected.keys()) existing_keys = set(existing.keys()) new_patterns = sorted(detected_keys - existing_keys) - obsolete_patterns = sorted(existing_keys - detected_keys) + # Separate truly obsolete patterns from related patterns that should be + # kept + potentially_obsolete = existing_keys - detected_keys + related_patterns: set[str] = set() + obsolete_patterns: set[str] = set() + for pattern in potentially_obsolete: + if is_related_pattern(pattern, detected_keys): + related_patterns.add(pattern) + else: + obsolete_patterns.add(pattern) + + # Merge: start with detected, preserve existing counts, keep related + # patterns merged = detected.copy() for pattern, counts in existing.items(): if pattern in detected_keys: + # Preserve existing counts for detected patterns + merged[pattern] = counts + elif pattern in related_patterns: + # Keep related patterns (broader or more specific) with their + # existing counts merged[pattern] = counts print("\n" + "=" * 60) @@ -352,14 +354,21 @@ def main() -> int: if len(new_patterns) > 15: print(f" ... and {len(new_patterns) - 15} more") + if related_patterns: + print(f"\n~ Preserving {len(related_patterns)} RELATED patterns:") + for p in sorted(related_patterns)[:15]: + print(f" {p}") + if len(related_patterns) > 15: + print(f" ... and {len(related_patterns) - 15} more") + if obsolete_patterns: print(f"\n- Found {len(obsolete_patterns)} OBSOLETE patterns:") - for p in obsolete_patterns[:15]: + for p in sorted(obsolete_patterns)[:15]: print(f" {p}") if len(obsolete_patterns) > 15: print(f" ... and {len(obsolete_patterns) - 15} more") - if not new_patterns and not obsolete_patterns: + if not new_patterns and not obsolete_patterns and not related_patterns: print("\nConfiguration is up to date!") print("=" * 60) @@ -370,14 +379,7 @@ def main() -> int: return 1 return 0 - for pattern in obsolete_patterns: - print(f"Removing obsolete: {pattern}") - if pattern in merged: - del merged[pattern] - - content = generate_config_json( - merged, pattern_sources, existing_file.default_counts - ) + content = generate_config_json(merged, existing_file.default_counts) config_file.write_text( content.model_dump_json(exclude_defaults=True, indent=2) ) diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_benchmarking.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_benchmarking.py index 2138aa6a11..2dd85aa18f 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_benchmarking.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_benchmarking.py @@ -1,17 +1,27 @@ """Test the benchmarking pytest plugin for gas benchmark values.""" +import json import textwrap from pathlib import Path from typing import List +from unittest.mock import MagicMock import pytest +from execution_testing.cli.pytest_commands.plugins.shared.benchmarking import ( + OpcodeCountsConfig, +) + +# EVM binary for tests that actually fill (not just collect) +BENCHMARK_EVM_T8N = "evmone-t8n" + test_module_dummy = textwrap.dedent( """\ import pytest from execution_testing import BenchmarkTestFiller, JumpLoopGenerator, Op @pytest.mark.valid_at("Prague") + @pytest.mark.benchmark def test_dummy_benchmark_test(benchmark_test: BenchmarkTestFiller) -> None: benchmark_test( target_opcode=Op.JUMPDEST, @@ -26,6 +36,7 @@ def test_dummy_benchmark_test(benchmark_test: BenchmarkTestFiller) -> None: from execution_testing import BenchmarkTestFiller, JumpLoopGenerator, Op @pytest.mark.valid_at("Prague") + @pytest.mark.benchmark def test_dummy_no_benchmark_test(benchmark_test: BenchmarkTestFiller) -> None: benchmark_test( target_opcode=Op.JUMPDEST, @@ -40,6 +51,7 @@ def test_dummy_no_benchmark_test(benchmark_test: BenchmarkTestFiller) -> None: from execution_testing import BenchmarkTestFiller, JumpLoopGenerator, Op @pytest.mark.valid_at("Prague") + @pytest.mark.benchmark @pytest.mark.repricing def test_benchmark_with_repricing(benchmark_test: BenchmarkTestFiller) -> None: benchmark_test( @@ -48,9 +60,8 @@ def test_benchmark_with_repricing(benchmark_test: BenchmarkTestFiller) -> None: ) @pytest.mark.valid_at("Prague") - def test_benchmark_without_repricing( - benchmark_test: BenchmarkTestFiller - ) -> None: + @pytest.mark.benchmark + def test_benchmark_without_repricing(benchmark_test: BenchmarkTestFiller) -> None: benchmark_test( target_opcode=Op.JUMPDEST, code_generator=JumpLoopGenerator(attack_block=Op.JUMPDEST), @@ -64,12 +75,14 @@ def test_benchmark_without_repricing( from execution_testing import BenchmarkTestFiller, JumpLoopGenerator, Op @pytest.mark.valid_at("Prague") + @pytest.mark.benchmark def test_with_gas_benchmark_value(state_test, gas_benchmark_value: int) -> None: # This test intentionally uses state_test instead of benchmark_test # to verify that --fixed-opcode-count filters it out state_test(pre={}, post={}, tx=None) @pytest.mark.valid_at("Prague") + @pytest.mark.benchmark def test_with_benchmark_test(benchmark_test: BenchmarkTestFiller) -> None: benchmark_test( target_opcode=Op.JUMPDEST, @@ -81,28 +94,32 @@ def test_with_benchmark_test(benchmark_test: BenchmarkTestFiller) -> None: test_module_with_repricing_kwargs = textwrap.dedent( """\ import pytest - from execution_testing import BenchmarkTestFiller, ExtCallGenerator, Op + from execution_testing import BenchmarkTestFiller, JumpLoopGenerator, Op @pytest.mark.valid_at("Prague") + @pytest.mark.benchmark @pytest.mark.repricing(opcode=Op.ADD) @pytest.mark.parametrize("opcode", [Op.ADD, Op.SUB, Op.MUL]) def test_parametrized_with_repricing_kwargs( benchmark_test: BenchmarkTestFiller, opcode ) -> None: + # Use JUMPDEST for benchmarking; opcode param is for filtering benchmark_test( - target_opcode=opcode, - code_generator=ExtCallGenerator(attack_block=opcode), + target_opcode=Op.JUMPDEST, + code_generator=JumpLoopGenerator(attack_block=Op.JUMPDEST), ) @pytest.mark.valid_at("Prague") + @pytest.mark.benchmark @pytest.mark.repricing @pytest.mark.parametrize("opcode", [Op.ADD, Op.SUB]) def test_parametrized_with_repricing_no_kwargs( benchmark_test: BenchmarkTestFiller, opcode ) -> None: + # Use JUMPDEST for benchmarking; opcode param is for filtering benchmark_test( - target_opcode=opcode, - code_generator=ExtCallGenerator(attack_block=opcode), + target_opcode=Op.JUMPDEST, + code_generator=JumpLoopGenerator(attack_block=Op.JUMPDEST), ) """ ) @@ -476,3 +493,464 @@ def test_without_repricing_flag_collects_all_tests( assert any( "test_benchmark_without_repricing" in line for line in result.outlines ) + + +def test_fixed_opcode_count_exact_match_priority() -> None: + """ + Exact match takes priority over regex patterns. + + When using a config file, patterns are matched against test names. An exact + string match should take priority over a regex pattern that also matches. + """ + config = OpcodeCountsConfig( + scenario_configs={ + "test_dup": [10], + "test_dup.*": [1], + }, + default_counts=[99], + ) + + params = config.get_test_parameters("test_dup") + assert params[0].values[0] == 10 + + +def test_fixed_opcode_count_longest_pattern_wins() -> None: + """ + Longest matching pattern takes priority. + + When using a config file, if multiple regex patterns match a test name, the + longest pattern should win. This allows more specific patterns to override + broader ones. + """ + config = OpcodeCountsConfig( + scenario_configs={ + "test_dup.*": [1], + "test_dup.*DUP1.*": [5], + }, + default_counts=[99], + ) + + # Longer pattern should win for DUP1 + params = config.get_test_parameters( + "test_dup[fork_Prague-opcount_1K-opcode_DUP1]" + ) + assert params[0].values[0] == 5 + + # Shorter pattern should match for DUP2 + params = config.get_test_parameters( + "test_dup[fork_Prague-opcount_1K-opcode_DUP2]" + ) + assert params[0].values[0] == 1 + + +def test_fixed_opcode_count_default_fallback() -> None: + """ + Default counts are used when no pattern matches. + + When using a config file, if no pattern matches the test name, the + default_counts should be used as a fallback. + """ + config = OpcodeCountsConfig( + scenario_configs={ + "test_dup.*": [1], + }, + default_counts=[99], + ) + + params = config.get_test_parameters("test_other") + assert params[0].values[0] == 99 + + +def test_fixed_opcode_count_multiple_patterns() -> None: + """ + Multiple overlapping patterns are handled correctly. + + Verifies that multiple overlapping patterns of different lengths are + handled correctly. The most specific (longest) matching pattern wins. + """ + config = OpcodeCountsConfig( + scenario_configs={ + "test_.*": [1], + "test_bitwise.*": [2], + "test_bitwise.*AND.*": [3], + }, + default_counts=[99], + ) + + # Most specific pattern should win + params = config.get_test_parameters("test_bitwise[fork_Prague-opcode_AND]") + assert params[0].values[0] == 3 + + # Middle specificity + params = config.get_test_parameters("test_bitwise[fork_Prague-opcode_OR]") + assert params[0].values[0] == 2 + + # Least specific + params = config.get_test_parameters("test_other[fork_Prague]") + assert params[0].values[0] == 1 + + +@pytest.mark.parametrize( + "cli_input,expected_counts", + [ + ("1", [1]), # Single integer + ("1,2,3", [1, 2, 3]), # Multiple integers + ("0.5", [0.5]), # Single float + ("0.1,0.5,1", [0.1, 0.5, 1]), # Multiple floats + ("1,0.5,2", [1, 0.5, 2]), # Mixed int/float + # 10 mixed values + ( + "0.1,0.25,0.5,0.75,1,1.25,1.5,1.75,2,3", + [0.1, 0.25, 0.5, 0.75, 1, 1.25, 1.5, 1.75, 2, 3], + ), + ], +) +def test_fixed_opcode_count_valid_input( + cli_input: str, expected_counts: list +) -> None: + """ + Valid comma-separated numbers are accepted. + + The flag accepts comma-separated numbers (integers or floats) as default + opcode counts. This test verifies valid inputs are parsed correctly. + """ + mock_config = MagicMock() + mock_config.rootpath = Path("/tmp") + + result = OpcodeCountsConfig.from_parameter_value(mock_config, cli_input) + assert result is not None + assert result.default_counts == expected_counts + + +def test_fixed_opcode_count_invalid_input() -> None: + """ + Invalid values like test paths are rejected. + + The flag should reject invalid inputs like test paths that get accidentally + consumed by argparse. This prevents confusing errors when users forget to + specify opcode counts before the test path. + """ + mock_config = MagicMock() + mock_config.rootpath = Path("/tmp") + + with pytest.raises(pytest.UsageError) as exc_info: + OpcodeCountsConfig.from_parameter_value( + mock_config, "tests/benchmark/compute/test_foo.py" + ) + + assert "Invalid value for --fixed-opcode-count" in str(exc_info.value) + + +def test_fixed_opcode_count_missing_config() -> None: + """ + Missing config file raises UsageError with helpful message. + + When used without arguments, it expects to load config from + .fixed_opcode_counts.json. If the file is missing, a helpful UsageError + should be raised explaining where to create the config file. + """ + mock_config = MagicMock() + mock_config.rootpath = Path("/nonexistent/path") + + with pytest.raises(pytest.UsageError) as exc_info: + OpcodeCountsConfig.from_parameter_value(mock_config, "") + + assert ".fixed_opcode_counts.json" in str(exc_info.value) + assert "was not found" in str(exc_info.value) + + +def test_fixed_opcode_count_float_values() -> None: + """ + Float values are supported for sub-1K opcode iterations. + + For expensive precompiles that can't run 1000+ iterations within gas + limits, float values like 0.001 (1 opcode) or 0.5 (500 opcodes) work. + """ + config = OpcodeCountsConfig( + scenario_configs={ + "test_precompile.*": [0.001, 0.01, 0.1], + }, + default_counts=[1.0], + ) + + counts = config.get_opcode_counts("test_precompile_bn128") + assert counts == [0.001, 0.01, 0.1] + + params = config.get_test_parameters("test_precompile_bn128") + assert len(params) == 3 + assert params[0].id == "opcount_0.001K" + assert params[1].id == "opcount_0.01K" + assert params[2].id == "opcount_0.1K" + + +def test_fixed_opcode_count_invalid_regex_raises_error() -> None: + """ + Invalid regex patterns raise an error. + + If a pattern in the config file contains invalid regex syntax, it should + raise a ValueError with a helpful message indicating the invalid pattern. + """ + config = OpcodeCountsConfig( + scenario_configs={ + "[invalid(regex": [10.0], # Invalid regex + "test_valid.*": [5.0], + }, + default_counts=[1.0], + ) + + # Should raise error when trying to match against invalid regex + with pytest.raises(ValueError) as exc_info: + config.get_opcode_counts("test_other") + + assert "Invalid regex pattern" in str(exc_info.value) + assert "[invalid(regex" in str(exc_info.value) + + +@pytest.mark.parametrize( + "config_counts,expected_tests,expected_ids", + [ + pytest.param([1], 2, ["opcount_1"], id="single_int"), + pytest.param( + [1, 2, 3], + 6, + ["opcount_1", "opcount_2", "opcount_3"], + id="multiple_ints", + ), + pytest.param([0.5], 2, ["opcount_0.5"], id="single_float"), + pytest.param( + [0.5, 1, 2], + 6, + ["opcount_0.5", "opcount_1", "opcount_2"], + id="multiple_floats", + ), + pytest.param( + [1, 0.5, 2], + 6, + ["opcount_1", "opcount_0.5", "opcount_2"], + id="mixed_int_float", + ), + pytest.param( + [1, 2, 3, 5], + 8, + ["opcount_1", "opcount_2", "opcount_3", "opcount_5"], + id="four_ints", + ), + ], +) +def test_fixed_opcode_count_config_file_parametrized( + pytester: pytest.Pytester, + config_counts: list, + expected_tests: int, + expected_ids: list, +) -> None: + """ + Config file opcode counts create correct test variants. + + The config file can specify single counts, multiple counts, or floats. + Each should parametrize tests correctly. + """ + setup_test_directory_structure( + pytester, test_module_dummy, "test_config_counts.py" + ) + + config_file = pytester.path / ".fixed_opcode_counts.json" + config_file.write_text( + json.dumps( + { + "scenario_configs": { + "test_dummy_benchmark_test.*": config_counts + } + } + ) + ) + + # Place --fixed-opcode-count after test path to avoid argparse consuming + # the path as the option value (nargs='?' behavior) + result = pytester.runpytest( + "-c", + "pytest-fill.ini", + "--fork", + "Prague", + "tests/benchmark/dummy_test_module/", + f"--evm-bin={BENCHMARK_EVM_T8N}", + "--fixed-opcode-count", + "-v", + ) + + assert result.ret == 0 + # Check expected number of tests (2 test types * len(counts)) + assert any(f"{expected_tests} passed" in line for line in result.outlines) + # Check opcode count IDs are present + for expected_id in expected_ids: + assert any(expected_id in line for line in result.outlines) + + +# Test module with parametrized test for per-parameter pattern matching +test_module_parametrized = textwrap.dedent( + """\ + import pytest + from execution_testing import BenchmarkTestFiller, JumpLoopGenerator, Op + + @pytest.mark.valid_at("Prague") + @pytest.mark.benchmark + @pytest.mark.parametrize("size", [0, 32, 256, 1024]) + def test_parametrized_benchmark( + benchmark_test: BenchmarkTestFiller, size: int + ) -> None: + benchmark_test( + target_opcode=Op.JUMPDEST, + code_generator=JumpLoopGenerator(attack_block=Op.JUMPDEST), + ) + """ +) + + +@pytest.mark.parametrize( + "config,expected_test_ids", + [ + # Single count per parameter - different counts for different sizes + pytest.param( + { + "test_parametrized_benchmark.*size_0.*": [5], + "test_parametrized_benchmark.*size_256.*": [3], + "test_parametrized_benchmark.*size_1024.*": [2], + }, + [ + # size_0->5, size_32->default(1), size_256->3, size_1024->2 + "size_0-opcount_5", + "size_32-opcount_1", + "size_256-opcount_3", + "size_1024-opcount_2", + ], + id="single_count_per_param", + ), + # Multiple counts per parameter (floats and ints) + pytest.param( + { + "test_parametrized_benchmark.*size_0.*": [0.5, 1, 2], + "test_parametrized_benchmark.*size_1024.*": [0.5, 0.75], + }, + [ + # size_0->[0.5,1,2], size_32->default[1], size_1024->[0.5,0.75] + "size_0-opcount_0.5", + "size_0-opcount_1", + "size_0-opcount_2", + "size_32-opcount_1", + "size_256-opcount_1", + "size_1024-opcount_0.5", + "size_1024-opcount_0.75", + ], + id="multiple_counts_per_param", + ), + # Per-param patterns with test_.* fallback for unmatched params + pytest.param( + { + "test_parametrized_benchmark.*size_0.*": [5], + "test_parametrized_benchmark.*size_1024.*": [10], + "test_.*": [2, 3], # Fallback for size_32, size_256 + }, + [ + # size_0 -> [5] (specific), size_32 -> [2,3] (fallback), + # size_256 -> [2,3] (fallback), size_1024 -> [10] (specific) + "size_0-opcount_5", + "size_32-opcount_2", + "size_32-opcount_3", + "size_256-opcount_2", + "size_256-opcount_3", + "size_1024-opcount_10", + ], + id="per_param_with_fallback", + ), + # All params same counts via broad pattern + pytest.param( + { + "test_parametrized_benchmark.*": [1, 2, 3], + }, + [ + # All sizes get [1, 2, 3] + "size_0-opcount_1", + "size_0-opcount_2", + "size_0-opcount_3", + "size_32-opcount_1", + "size_1024-opcount_3", + ], + id="all_same_counts", + ), + ], +) +def test_fixed_opcode_count_per_parameter_patterns( + pytester: pytest.Pytester, + config: dict, + expected_test_ids: List[str], +) -> None: + """ + Per-parameter opcode count patterns work correctly. + + Patterns like "test_foo.*size_256.*" should match tests with that specific + parameter value and apply the corresponding opcode counts. + """ + setup_test_directory_structure( + pytester, test_module_parametrized, "test_param_benchmark.py" + ) + + config_file = pytester.path / ".fixed_opcode_counts.json" + config_file.write_text(json.dumps({"scenario_configs": config})) + + result = pytester.runpytest( + "-c", + "pytest-fill.ini", + "--fork", + "Prague", + "tests/benchmark/dummy_test_module/", + f"--evm-bin={BENCHMARK_EVM_T8N}", + "--fixed-opcode-count", + "-v", + ) + + assert result.ret == 0 + + # Verify expected test IDs are present + output = "\n".join(result.outlines) + for expected_id in expected_test_ids: + assert expected_id in output, ( + f"Expected '{expected_id}' in output but not found.\n" + f"Output:\n{output}" + ) + + +def test_cli_mode_ignores_per_parameter_patterns( + pytester: pytest.Pytester, +) -> None: + """ + CLI mode applies same counts to all parameters. + + When using --fixed-opcode-count=1,5 (explicit CLI values), all test + variants should get the same opcode counts regardless of their parameters. + This verifies CLI mode doesn't accidentally use per-parameter matching. + """ + setup_test_directory_structure( + pytester, test_module_parametrized, "test_cli_mode.py" + ) + + result = pytester.runpytest( + "-c", + "pytest-fill.ini", + "--fork", + "Prague", + "--fixed-opcode-count=1,5", + "tests/benchmark/dummy_test_module/", + f"--evm-bin={BENCHMARK_EVM_T8N}", + "-v", + ) + + assert result.ret == 0 + output = "\n".join(result.outlines) + + # All size variants should have both opcount_1 and opcount_5 + for size in ["size_0", "size_32", "size_256", "size_1024"]: + assert ( + f"{size}-opcount_1.0K" in output or f"{size}-opcount_1K" in output + ), f"Expected {size} with opcount_1 in output" + assert ( + f"{size}-opcount_5.0K" in output or f"{size}-opcount_5K" in output + ), f"Expected {size} with opcount_5 in output" diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/shared/benchmarking.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/shared/benchmarking.py index b75335333f..6e1d26414d 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/shared/benchmarking.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/shared/benchmarking.py @@ -41,7 +41,12 @@ def pytest_addoption(parser: pytest.Parser) -> None: const="", help=( "Opcode counts (in thousands) for benchmark tests. " - "Example: '1,10,100' runs tests with 1K, 10K, 100K opcodes. " + "Granularity rules (for ≤10%% CALL overhead): " + "cheap ops (1-2 gas): integers only, no sub-1K; " + "medium ops (3-5 gas): 0.5 increments, min 0.5K; " + "expensive ops (6+ gas): 0.25 increments, min 0.25K; " + "very expensive (100+ gas): 0.25 increments, min 0.01K. " + "Example: '0.5,1,2' runs 500, 1K, 2K opcodes. " "Without value, uses .fixed_opcode_counts.json config. " f"Cannot be used with {GasBenchmarkValues.flag}." ), @@ -134,15 +139,13 @@ class GasBenchmarkValues(RootModel, BenchmarkParametrizer): @classmethod def from_parameter_value( - cls, config: pytest.Config, value: str + cls, _config: pytest.Config, value: str ) -> Self | None: """Given the parameter value and config, return the expected object.""" - del config return cls.model_validate(value.split(",")) - def get_test_parameters(self, test_name: str) -> list[ParameterSet]: + def get_test_parameters(self, _test_name: str) -> list[ParameterSet]: """Get benchmark values. All tests have the same list.""" - del test_name return [ pytest.param( gas_value * 1_000_000, @@ -155,8 +158,9 @@ def get_test_parameters(self, test_name: str) -> list[ParameterSet]: class OpcodeCountsConfig(BaseModel, BenchmarkParametrizer): """Opcode counts configuration object.""" - scenario_configs: Dict[str, List[int]] = Field(default_factory=dict) - default_counts: List[int] = Field(default_factory=lambda: [1]) + scenario_configs: Dict[str, List[float]] = Field(default_factory=dict) + default_counts: List[float] = Field(default_factory=lambda: [1.0]) + uses_config_file: bool = Field(default=False) default_config_file_name: ClassVar[str] = ".fixed_opcode_counts.json" flag: ClassVar[str] = "--fixed-opcode-count" @@ -171,48 +175,171 @@ def from_parameter_value( if value == "": default_file = Path(config.rootpath) / cls.default_config_file_name if default_file.exists(): - return cls.model_validate_json(default_file.read_bytes()) + data = default_file.read_bytes() + instance = cls.model_validate_json(data) + instance.uses_config_file = True + return instance else: - pytest.UsageError( + raise pytest.UsageError( "--fixed-opcode-count was provided without a value, but " f"{cls.default_config_file_name} was not found. " "Run 'uv run benchmark_parser' to generate it, or provide " "explicit values (e.g., --fixed-opcode-count 1,10,100)." ) - return cls.model_validate({"default_counts": value.split(",")}) + # Validate that value looks like comma-separated numbers (int or float) + # This catches the case where argparse greedily consumes a test path + parts = value.split(",") + + def is_number(s: str) -> bool: + try: + float(s.strip()) + return True + except ValueError: + return False + + if not all(is_number(part) for part in parts): + raise pytest.UsageError( + f"Invalid value for --fixed-opcode-count: '{value}'. " + "Expected comma-separated numbers (e.g., '1,10,100' or " + "'0.25,0.5,1') or no value to use the config file. " + "If providing a value, use --fixed-opcode-count=VALUE " + "syntax to avoid argparse consuming test paths as the value." + ) + return cls.model_validate( + {"default_counts": parts, "uses_config_file": False} + ) - def get_test_parameters(self, test_name: str) -> list[ParameterSet]: + def get_opcode_counts(self, test_name: str) -> list[float]: """ - Get opcode counts for a test using regex pattern matching. + Get opcode counts for a test using pattern matching. + + Matching priority: + 1. Exact match in scenario_configs + 2. Regex pattern match (longest pattern wins for specificity) + 3. Default counts as fallback + + Example with config: + {"test_dup": [10], "test_dup.*": [1], "test_dup.*DUP1.*": [5]} + + - "test_dup" -> [10] (exact match) + - "test_dup[fork_Prague-opcode_DUP1]" -> [5] (longest pattern matches) + - "test_dup[fork_Prague-opcode_DUP2]" -> [1] (matches "test_dup.*") + - "test_other" -> default_counts (no match) + + Note: In config file mode, test names don't have opcount yet when this + is called - we look up the count first, then add it to the test name. """ counts = self.default_counts - # Try exact match first (faster) + if test_name in self.scenario_configs: counts = self.scenario_configs[test_name] else: - # Try regex patterns + matches: list[tuple[str, list[float]]] = [] for pattern, pattern_counts in self.scenario_configs.items(): if pattern == test_name: continue try: if re.search(pattern, test_name): - counts = pattern_counts - break - except re.error: - continue + matches.append((pattern, pattern_counts)) + except re.error as e: + raise ValueError( + f"Invalid regex pattern '{pattern}' in config: {e}" + ) from e + + if matches: + matches.sort(key=lambda x: len(x[0]), reverse=True) + counts = matches[0][1] + + return counts + + def get_test_parameters(self, test_name: str) -> list[ParameterSet]: + """Get opcode counts as pytest parameters.""" + # Deduplicate while preserving order + unique_counts = list(dict.fromkeys(self.get_opcode_counts(test_name))) return [ - pytest.param( - opcode_count, - id=f"opcount_{opcode_count}K", - ) - for opcode_count in counts + pytest.param(opcode_count, id=f"opcount_{opcode_count}K") + for opcode_count in unique_counts ] + def parametrize(self, metafunc: pytest.Metafunc) -> None: + """ + Parametrize a test with opcode counts. + + In config file mode with existing parametrizations (metafunc._calls), + generates opcode counts per-parameter by matching patterns against + simulated test IDs built from existing params. + + In CLI mode (explicit counts), uses function name for pattern matching. + """ + # Check for direct or indirect use of fixed_opcode_count. + # The benchmark_test fixture depends on fixed_opcode_count, so if the + # test uses benchmark_test, we need to parametrize fixed_opcode_count. + if self.parameter_name not in metafunc.fixturenames: + if "benchmark_test" not in metafunc.fixturenames: + return + # benchmark_test uses fixed_opcode_count - add it to fixtures + metafunc.fixturenames.append(self.parameter_name) + + test_name = metafunc.function.__name__ + + if ( + self.uses_config_file + and hasattr(metafunc, "_calls") + and metafunc._calls + ): + # Config file mode with existing parametrizations: + # Build simulated IDs from existing params and match patterns + self._parametrize_with_existing_params(metafunc, test_name) + else: + # Config file mode (no existing params) or CLI mode: + # match against function name + metafunc.parametrize( + self.parameter_name, + self.get_test_parameters(test_name), + scope="function", + ) + + def _parametrize_with_existing_params( + self, metafunc: pytest.Metafunc, test_name: str + ) -> None: + """ + Parametrize opcode counts based on existing test parameters. + + For each existing parameter combination in metafunc._calls, build a + simulated test ID and match patterns to get the appropriate counts. + + We collect ALL unique counts across all parameter combinations and add + them as a simple parametrization. This creates all combinations + (cartesian product). Unwanted combinations filtered in modifyitems. + """ + # Collect opcode counts for each call (indexed by position) + all_unique_counts: set[float] = set() + + for call in metafunc._calls: + # Build simulated test ID using call.id (already formatted) + # Format: test_name[fork_--] + simulated_id = f"{test_name}[{call.id}]" if call.id else test_name + + # Get opcode counts for this simulated ID and add to unique set + counts = self.get_opcode_counts(simulated_id) + all_unique_counts.update(counts) + + # Add all unique counts as simple parametrization (multiplies with + # existing). Unwanted combinations filtered in collection_modifyitems + metafunc.parametrize( + self.parameter_name, + [ + pytest.param(count, id=f"opcount_{count}K") + for count in sorted(all_unique_counts) + ], + scope="function", + ) + def pytest_collection_modifyitems( config: pytest.Config, items: list[pytest.Item] ) -> None: - """Filter tests based on repricing marker.""" + """Filter tests based on repricing marker and opcode count patterns.""" gas_benchmark_value = GasBenchmarkValues.from_config(config) fixed_opcode_count = OpcodeCountsConfig.from_config(config) @@ -238,6 +365,10 @@ def pytest_collection_modifyitems( filtered.append(item) items[:] = filtered + # Filter per-parameter opcode counts if using config file mode + if fixed_opcode_count.uses_config_file: + _filter_opcode_count_combinations(items, fixed_opcode_count) + # Extract the specified flag from the command line. # If the `-m repricing` flag is not specified, or is negated, # we skip filtering tests by the repricing marker. @@ -270,8 +401,58 @@ def pytest_collection_modifyitems( items[:] = filtered +def _filter_opcode_count_combinations( + items: list[pytest.Item], opcode_config: "OpcodeCountsConfig" +) -> None: + """ + Filter test items to only keep valid opcode count combinations. + + When using config file mode with per-parameter patterns, we generate all + combinations (cartesian product) in pytest_generate_tests. Here we filter + out combinations where the opcode count doesn't match the pattern for + that specific parameter combination. + """ + filtered = [] + + for item in items: + if not hasattr(item, "callspec"): + filtered.append(item) + continue + + params = item.callspec.params + opcode_count = params.get(OpcodeCountsConfig.parameter_name) + + if opcode_count is None: + filtered.append(item) + continue + + # Build simulated test ID WITHOUT the opcode count for pattern matching + # Format: test_func[fork_X-fixture_format-params-opcount_Y] + # Target: test_func[fork_X-fixture_format-params] + test_name = item.name + + # Remove the opcode count part from the test ID for pattern matching + # Pattern: -opcount_X.XK or -opcount_XK at the end before ] + simulated_id = re.sub(r"-opcount_[\d.]+K\]$", "]", test_name) + + # Get valid counts for this parameter combination + valid_counts = opcode_config.get_opcode_counts(simulated_id) + + # Keep item only if its opcode count is valid for this combination + if opcode_count in valid_counts: + filtered.append(item) + + items[:] = filtered + + +@pytest.hookimpl(trylast=True) def pytest_generate_tests(metafunc: pytest.Metafunc) -> None: - """Generate tests for the gas benchmark values and fixed opcode counts.""" + """ + Generate tests for the gas benchmark values and fixed opcode counts. + + Uses trylast=True to run after other parametrizations so we can access + existing parameters in metafunc._calls for pattern matching. + """ parametrizer = GasBenchmarkValues.from_config( metafunc.config ) or OpcodeCountsConfig.from_config(metafunc.config) diff --git a/packages/testing/src/execution_testing/specs/benchmark.py b/packages/testing/src/execution_testing/specs/benchmark.py index 0777d69b7c..2ba70e8059 100644 --- a/packages/testing/src/execution_testing/specs/benchmark.py +++ b/packages/testing/src/execution_testing/specs/benchmark.py @@ -54,7 +54,7 @@ class BenchmarkCodeGenerator(ABC): setup: Bytecode = field(default_factory=Bytecode) cleanup: Bytecode = field(default_factory=Bytecode) tx_kwargs: Dict[str, Any] = field(default_factory=dict) - fixed_opcode_count: int | None = None + fixed_opcode_count: float | None = None code_padding_opcode: Op | None = None _contract_address: Address | None = None _inner_iterations: int = 1000 @@ -78,10 +78,13 @@ def deploy_fix_count_contracts(self, *, pre: Alloc, fork: Fork) -> Address: "fixed_opcode_count is not set" ) # Adjust outer loop iterations based on inner iterations - # If inner is 500 instead of 1000, double the outer loop - outer_multiplier = 1000 // self._inner_iterations - iterations = self.fixed_opcode_count * outer_multiplier - + if self.fixed_opcode_count < 1.0: + # < 1000 opcodes, outer = 1 as inner already set to exact count + iterations = 1 + else: + # >= 1000: calculate outer iterations from target / inner + target_opcodes = int(self.fixed_opcode_count * 1000) + iterations = target_opcodes // self._inner_iterations prefix = Op.CALLDATACOPY( Op.PUSH0, Op.PUSH0, Op.CALLDATASIZE ) + Op.PUSH4(iterations) @@ -188,19 +191,51 @@ def generate_repeated_code( # # 1a. Calculate 'max_iterations' to fill the block. # 1b. The Inner Iteration count (N) is capped at 1000. - # 1c. If the calculated N is less than 1000, use 500 as the fallback. + # 1c. If the calculated N is less than 1000, use 250 as fallback. # --- 2. Determine Outer Iterations (M) --- # The Loop Contract's call count (M) is set to ensure the final # total execution is consistent. # - # 2a. If N is 1000: Set M = fixed_opcode_count. - # (Total ops: fixed_opcode_count * 1000) - # 2b. If N is 500: Set M = fixed_opcode_count * 2. - # (Total ops: (fixed_opcode_count * 2) * 500) + # 2a. If N=1000: M = fixed_opcode_count (Total: foc*1000) + # 2b. If N=250: M = fixed_opcode_count*4 (Total: same as above) + # + # --- 3. Sub-1K Case (fixed_opcode_count < 1.0) --- + # For Sub-1K counts (e.g., 0.25 = 250 opcodes): N = exact count, M = 1. if self.fixed_opcode_count is not None: - inner_iterations = 1000 if max_iterations >= 1000 else 500 - self._inner_iterations = min(max_iterations, inner_iterations) + if self.fixed_opcode_count < 0.001: + raise ValueError( + f"fixed_opcode_count must be >= 0.001 (1 opcode), " + f"got {self.fixed_opcode_count}" + ) + if self.fixed_opcode_count < 1.0: + # < 1000 opcodes, inner = exact count, outer = 1 + self._inner_iterations = min( + max_iterations, int(self.fixed_opcode_count * 1000) + ) + else: + # >= 1000 opcodes: use 250 inner iterations (0.25K granularity) + target_opcodes = int(self.fixed_opcode_count * 1000) + + if max_iterations >= 250 and target_opcodes % 250 == 0: + inner_iterations = 250 + elif max_iterations >= target_opcodes: + # Use exact count as inner with outer = 1 + inner_iterations = target_opcodes + else: + suggested_lo = ((target_opcodes // 250) * 250) / 1000 + suggested_hi = ((target_opcodes // 250 + 1) * 250) / 1000 + raise ValueError( + f"fixed_opcode_count {self.fixed_opcode_count} " + f"({target_opcodes} opcodes) exceeds max contract " + f"size for this attack block.\n" + f"Contract size limit allows up to {max_iterations} " + f"opcodes ({max_iterations / 1000:.3f}K) in the " + f"inner loop.\n" + f"For counts above this limit, use multiples of 0.25K " + f"(e.g., {suggested_lo:.2f} or {suggested_hi:.2f})." + ) + self._inner_iterations = inner_iterations # TODO: Unify the PUSH0 and PUSH1 usage. iterations = ( @@ -252,7 +287,7 @@ class BenchmarkTest(BaseTest): gas_benchmark_value: int = Field( default_factory=lambda: int(Environment().gas_limit) ) - fixed_opcode_count: int | None = None + fixed_opcode_count: float | None = None target_opcode: Op | None = None code_generator: BenchmarkCodeGenerator | None = None diff --git a/tests/benchmark/compute/instruction/test_arithmetic.py b/tests/benchmark/compute/instruction/test_arithmetic.py index 765541e4e9..c6a7261fe1 100644 --- a/tests/benchmark/compute/instruction/test_arithmetic.py +++ b/tests/benchmark/compute/instruction/test_arithmetic.py @@ -185,7 +185,6 @@ def test_arithmetic( ) -@pytest.mark.repricing(mod_bits=127) @pytest.mark.parametrize("mod_bits", [255, 191, 127, 63]) @pytest.mark.parametrize("opcode", [Op.MOD, Op.SMOD]) def test_mod( diff --git a/tests/benchmark/compute/instruction/test_log.py b/tests/benchmark/compute/instruction/test_log.py index 956dc8acb2..5e904552d1 100644 --- a/tests/benchmark/compute/instruction/test_log.py +++ b/tests/benchmark/compute/instruction/test_log.py @@ -21,11 +21,11 @@ @pytest.mark.parametrize( "opcode", [ - pytest.param(Op.LOG0, id="log0"), - pytest.param(Op.LOG1, id="log1"), - pytest.param(Op.LOG2, id="log2"), - pytest.param(Op.LOG3, id="log3"), - pytest.param(Op.LOG4, id="log4"), + Op.LOG0, + Op.LOG1, + Op.LOG2, + Op.LOG3, + Op.LOG4, ], ) @pytest.mark.parametrize( @@ -88,11 +88,11 @@ def test_log( @pytest.mark.parametrize( "opcode", [ - pytest.param(Op.LOG0, id="log0"), - pytest.param(Op.LOG1, id="log1"), - pytest.param(Op.LOG2, id="log2"), - pytest.param(Op.LOG3, id="log3"), - pytest.param(Op.LOG4, id="log4"), + Op.LOG0, + Op.LOG1, + Op.LOG2, + Op.LOG3, + Op.LOG4, ], ) @pytest.mark.parametrize("mem_size", [0, 32, 256, 1024]) diff --git a/tests/benchmark/compute/instruction/test_system.py b/tests/benchmark/compute/instruction/test_system.py index 2c3667fdb5..87fbdcf67f 100644 --- a/tests/benchmark/compute/instruction/test_system.py +++ b/tests/benchmark/compute/instruction/test_system.py @@ -122,8 +122,17 @@ def test_create( ) if opcode == Op.CREATE2: - # For CREATE2, we provide an initial salt. - setup += Op.PUSH1(42) + # For CREATE2, load salt from storage (persist across outer loop calls) + # If storage is 0 (first call), use initial salt of 42. + # Stack after setup: [..., value, code_size, salt] + setup += ( + Op.SLOAD(0) # Load saved salt + + Op.DUP1 # Duplicate for check + + Op.ISZERO # Check if zero + + Op.PUSH1(42) # Default salt + + Op.MUL # 42 if zero, 0 if not + + Op.ADD # Add to get final salt (saved or 42) + ) attack_block = ( # For CREATE: @@ -133,10 +142,16 @@ def test_create( if opcode == Op.CREATE # For CREATE2: we manually push the arguments because we leverage the # return value of previous CREATE2 calls as salt for the next CREATE2 - # call. + # call. After CREATE2, save result to storage for next outer loop call. # - DUP4 is targeting the PUSH1(value) from the code_prefix. # - DUP3 is targeting the EXTCODESIZE value pushed in code_prefix. - else Op.DUP3 + Op.PUSH0 + Op.DUP4 + Op.CREATE2 + else Op.DUP3 + + Op.PUSH0 + + Op.DUP4 + + Op.CREATE2 + + Op.DUP1 + + Op.PUSH0 + + Op.SSTORE ) benchmark_test( diff --git a/tests/benchmark/compute/precompile/test_ecrecover.py b/tests/benchmark/compute/precompile/test_ecrecover.py index 63c4c72e08..22aca3b5ca 100644 --- a/tests/benchmark/compute/precompile/test_ecrecover.py +++ b/tests/benchmark/compute/precompile/test_ecrecover.py @@ -12,6 +12,7 @@ from tests.benchmark.compute.helpers import concatenate_parameters +@pytest.mark.repricing @pytest.mark.parametrize( "precompile_address,calldata", [ @@ -28,7 +29,6 @@ ] ), id="ecrecover", - marks=pytest.mark.repricing, ) ], ) diff --git a/tests/benchmark/compute/precompile/test_point_evaluation.py b/tests/benchmark/compute/precompile/test_point_evaluation.py index e179f27526..7597a9fa1d 100644 --- a/tests/benchmark/compute/precompile/test_point_evaluation.py +++ b/tests/benchmark/compute/precompile/test_point_evaluation.py @@ -13,6 +13,7 @@ from tests.cancun.eip4844_blobs.spec import Spec as BlobsSpec +@pytest.mark.repricing @pytest.mark.parametrize( "precompile_address,calldata", [ @@ -28,7 +29,6 @@ ] ), id="point_evaluation", - marks=pytest.mark.repricing, ), ], ) diff --git a/whitelist.txt b/whitelist.txt index 2cf28a9928..955a57adff 100644 --- a/whitelist.txt +++ b/whitelist.txt @@ -486,6 +486,7 @@ epilog eq ERC Erigon +errlines esbenp etc ETH @@ -761,6 +762,7 @@ makereport marcin marioevz markdownlint +markexpr master matchers mcopy @@ -847,6 +849,7 @@ ommers oneliner oob opc +opcount opcode's OpenSSL oprypin @@ -867,6 +870,7 @@ P7692 paradigmxyz param parametrization +parametrizations parametrize parametrized parametrizer From ea028c948e33ae2dce9c9d5b36f7e4dfa433a139 Mon Sep 17 00:00:00 2001 From: felix Date: Thu, 5 Feb 2026 14:27:56 +0000 Subject: [PATCH 127/154] fix(docs): add missing lines code coverage field to eip checklist (#2137) * fix(docs): docs ci build failed due to being in strict mode and emitting warning from missing field in eip-checking-template * fix: disable strict mode for building docs, only errors should lead to ci failure * fix: info warnings about missing navigation elements that spam ci output * PR feedback --- docs/navigation.md | 15 ++++++++++++++- .../eip_testing_checklist_template.md | 3 ++- 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/docs/navigation.md b/docs/navigation.md index ec245156d9..002b2b94aa 100644 --- a/docs/navigation.md +++ b/docs/navigation.md @@ -27,7 +27,10 @@ * [Testing Checklist Templates](writing_tests/checklist_templates/index.md) * [EIP Execution Layer Testing Checklist Template](writing_tests/checklist_templates/eip_testing_checklist_template.md) * [Post-mortems](writing_tests/post_mortems.md) - * [Tutorial: Adding a State Test](writing_tests/tutorials/state_transition.md) + * Tutorials + * [Adding a State Test](writing_tests/tutorials/state_transition.md) + * [Adding a Blockchain Test](writing_tests/tutorials/blockchain.md) + * [Opcode Metadata](writing_tests/opcode_metadata.md) * [Porting Legacy Tests](writing_tests/porting_legacy_tests.md) * [Filling Tests](filling_tests/index.md) * [Getting Started](filling_tests/getting_started.md) @@ -48,6 +51,7 @@ * [Blockchain Engine Tests](running_tests/test_formats/blockchain_test_engine.md) * [Blockchain Engine X Tests](running_tests/test_formats/blockchain_test_engine_x.md) * [Transaction Tests](running_tests/test_formats/transaction_test.md) + * [Blockchain Sync Tests](running_tests/test_formats/blockchain_test_sync.md) * [Common Types](running_tests/test_formats/common_types.md) * [Exceptions](running_tests/test_formats/exceptions.md) * [Hive](running_tests/hive/index.md) @@ -65,6 +69,7 @@ * [Execute Hive](./running_tests/execute/hive.md) * [Execute Remote](./running_tests/execute/remote.md) * [Execute Eth Config](./running_tests/execute/eth_config.md) + * [Transaction Metadata](./running_tests/execute/transaction_metadata.md) * [Useful Pytest Options](running_tests/useful_pytest_options.md) * [Developer Doc](dev/index.md) * [Managing Configurations](dev/configurations.md) @@ -75,9 +80,14 @@ * [Logging](dev/logging.md) * [Enabling Precommit Checks](dev/precommit.md) * [Running Github Actions Locally](dev/test_actions_locally.md) + * [Dependencies and Packaging](dev/deps_and_packaging.md) * [Changelog](CHANGELOG.md) + * [Changelog Section Template](changelog_section_template.md) * [Library Reference](library/index.md) * [EEST CLI Tools](library/cli/index.md) + * [eest](library/cli/eest.md) + * [evm_bytes](library/cli/evm_bytes.md) + * [extract_config](library/cli/extract_config.md) * [Execution Testing Base Types Package](library/execution_testing_base_types.md) * [Execution Testing Exceptions Package](library/execution_testing_exceptions.md) * [Execution Testing Fixtures Package](library/execution_testing_fixtures.md) @@ -88,3 +98,6 @@ * [Execution Testing VM Package](library/execution_testing_vm.md) * [Execution Testing Client CLIs Package](library/execution_testing_client_clis.md) * [Pytest Plugins](library/pytest_plugins/index.md) + * [Filler](library/pytest_plugins/filler.md) + * [Forks](library/pytest_plugins/forks.md) + * [Spec Version Checker](library/pytest_plugins/spec_version_checker.md) diff --git a/docs/writing_tests/checklist_templates/eip_testing_checklist_template.md b/docs/writing_tests/checklist_templates/eip_testing_checklist_template.md index f6ecf8d8d9..57520b2a4f 100644 --- a/docs/writing_tests/checklist_templates/eip_testing_checklist_template.md +++ b/docs/writing_tests/checklist_templates/eip_testing_checklist_template.md @@ -18,7 +18,8 @@ Depending on the changes introduced by an EIP, the following template is the min | ID | Description | Status | Tests | | ------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------ | ----- | | `general/code_coverage/eels` | Run produced tests against [EELS](https://github.com/ethereum/execution-specs) and verify that line code coverage of new added lines for the EIP is 100%, with only exceptions being unreachable code lines. | | | -| `general/code_coverage/test_coverage` | Run coverage on the test code itself (as a basic logic sanity check), i.e., `uv run fill --cov tests`. | | | +| `general/code_coverage/test_coverage` | Run coverage on the test code itself (as a basic logic sanity check), i.e., `uv run fill --cov tests`. | | | +| `general/code_coverage/missed_lines` | Document any lines missed in coverage reports and explain why they are acceptable (e.g., unreachable code, general infrastructure not related to the EIP). | | | | `general/code_coverage/second_client` | Optional - Run against a second client and verify sufficient code coverage over new code added for the EIP. | | | #### Fuzzing From f74aa1c5c23802bbdbf644fcb6400286c68c758e Mon Sep 17 00:00:00 2001 From: spencer Date: Thu, 5 Feb 2026 15:56:35 +0000 Subject: [PATCH 128/154] test(benchmark): skip test_blockhash pending investigation (#2145) --- tests/benchmark/compute/instruction/test_block_context.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/benchmark/compute/instruction/test_block_context.py b/tests/benchmark/compute/instruction/test_block_context.py index d31dba05c0..c2a1e6cbbc 100644 --- a/tests/benchmark/compute/instruction/test_block_context.py +++ b/tests/benchmark/compute/instruction/test_block_context.py @@ -48,6 +48,7 @@ def test_block_context_ops( ) +@pytest.mark.skip(reason="Temporarily disabled pending investigation") @pytest.mark.repricing @pytest.mark.parametrize( "index,chain_length", From e7d8ccf45a0d523623e7d478d6e7d60c863dcbfa Mon Sep 17 00:00:00 2001 From: danceratopz Date: Thu, 5 Feb 2026 17:08:00 +0100 Subject: [PATCH 129/154] chore(ci): only fill native test formats in tox's `pypy3` env (#2116) * chore(ci): don't fill derived test formats in the pypy3 tox env This means that only the native test format used by the test will be generated; all derived fixture formats get skipped, e.g., `blockchain_test_engine` and `blockchain_test_from_state_test` won't get generated. * chore(ci): only fill amsterdam in pypy3 tox env * chore(ci,tooling): enable --until Amsterdam for pypy3 --- tox.ini | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tox.ini b/tox.ini index 1eb80d0d5e..8ecc63d37c 100644 --- a/tox.ini +++ b/tox.ini @@ -91,7 +91,7 @@ commands = description = Fill the tests using EELS (with Python) commands = fill \ - -m "not slow and not zkevm and not benchmark" \ + -m "not slow and not benchmark" \ -n auto --maxprocesses 10 --dist=loadgroup \ --skip-index \ --cov-config=pyproject.toml \ @@ -123,12 +123,12 @@ commands = --tb=no \ --show-capture=no \ --disable-warnings \ - -m "not slow and not zkevm and not benchmark" \ + -m "not slow and not benchmark and not derived_test" \ -n auto --maxprocesses 7 --dist=loadgroup \ --basetemp="{temp_dir}/pytest" \ --log-to "{toxworkdir}/logs" \ --clean \ - --until BPO4 \ + --until Amsterdam \ {posargs} \ tests From 35f83db2f85e87a45ae4e9b13032d84b2e8d59a8 Mon Sep 17 00:00:00 2001 From: Mario Vega Date: Thu, 5 Feb 2026 20:22:43 +0100 Subject: [PATCH 130/154] fix(tests/benchmark): Max size contract deployment execution (#2149) * fix(tests/benchmark): Max size contract deployment execution * fix * fix(test-fill): cache address, don't re-calculate every time; very expensive --------- Co-authored-by: fselmo --- .../tools/tools_code/generators.py | 29 ++++-- tests/benchmark/compute/helpers.py | 95 ++++++++++++++----- .../scenario/test_unchunkified_bytecode.py | 6 +- 3 files changed, 94 insertions(+), 36 deletions(-) diff --git a/packages/testing/src/execution_testing/tools/tools_code/generators.py b/packages/testing/src/execution_testing/tools/tools_code/generators.py index 78dea8ed00..924245842d 100644 --- a/packages/testing/src/execution_testing/tools/tools_code/generators.py +++ b/packages/testing/src/execution_testing/tools/tools_code/generators.py @@ -1,7 +1,7 @@ """Code generating classes and functions.""" from dataclasses import dataclass -from typing import Any, Generator, List, Self, SupportsBytes, Tuple, Type +from typing import Any, Dict, Generator, List, Self, SupportsBytes, Tuple, Type from pydantic import Field @@ -807,6 +807,16 @@ def tx_iterations_by_gas_limit( remaining_gas -= best_iterations_gas start_iteration += best_iterations + def _intrinsic_cost_is_constant( + self, + intrinsic_cost_kwargs: Dict[str, Any], + ) -> bool: + """If none of the kwarg values is callable, return True.""" + for _, value in intrinsic_cost_kwargs.items(): + if callable(value): + return False + return True + def tx_iterations_by_total_iteration_count( self, *, @@ -828,14 +838,19 @@ def tx_iterations_by_total_iteration_count( yield total_iterations return remaining_iterations = total_iterations + best_iterations: int | None = None + constant_intrinsic_gas_cost = self._intrinsic_cost_is_constant( + intrinsic_cost_kwargs + ) while remaining_iterations > 0: - best_iterations, _ = self._binary_search_iterations( - fork=fork, - gas_limit=gas_limit_cap, - start_iteration=start_iteration, - **intrinsic_cost_kwargs, - ) + if best_iterations is None or not constant_intrinsic_gas_cost: + best_iterations, _ = self._binary_search_iterations( + fork=fork, + gas_limit=gas_limit_cap, + start_iteration=start_iteration, + **intrinsic_cost_kwargs, + ) if best_iterations >= remaining_iterations: yield remaining_iterations return diff --git a/tests/benchmark/compute/helpers.py b/tests/benchmark/compute/helpers.py index f0a32cedd0..1b77386465 100644 --- a/tests/benchmark/compute/helpers.py +++ b/tests/benchmark/compute/helpers.py @@ -227,6 +227,9 @@ class MaxSizedContractInitcode(FixedIterationsBytecode): fork's limits. """ + _cached_address: Address + """Cached address to avoid expensive recomputation.""" + def __new__(cls, *, pre: Alloc, fork: Fork) -> Self: """ Create a new MaxSizedContractInitcode instance. @@ -292,19 +295,21 @@ def __new__(cls, *, pre: Alloc, fork: Fork) -> Self: cleanup=cleanup, iteration_count=iteration_count, ) + # Cache the address to avoid expensive recomputation + instance._cached_address = compute_deterministic_create2_address( + salt=0, + initcode=Initcode(deploy_code=instance), + fork=fork, + ) deployed_address = pre.deterministic_deploy_contract( deploy_code=instance ) - assert deployed_address == instance.address(fork=fork) + assert deployed_address == instance._cached_address return instance - def address(self, *, fork: Fork) -> Address: + def address(self) -> Address: """Get the deterministic address of the initcode.""" - return compute_deterministic_create2_address( - salt=0, - initcode=Initcode(deploy_code=self), - fork=fork, - ) + return self._cached_address class MaxSizedContractFactory(IteratingBytecode): @@ -322,6 +327,9 @@ class MaxSizedContractFactory(IteratingBytecode): initcode: MaxSizedContractInitcode """The initcode used to deploy maximum-sized contracts via CREATE2.""" + _cached_address: Address + """Cached address to avoid expensive recomputation.""" + def __new__(cls, *, pre: Alloc, fork: Fork) -> Self: """ Create a new MaxSizedContractFactory instance. @@ -337,7 +345,7 @@ def __new__(cls, *, pre: Alloc, fork: Fork) -> Self: """ initcode = MaxSizedContractInitcode(pre=pre, fork=fork) - initcode_address = initcode.address(fork=fork) + initcode_address = initcode.address() setup = ( Op.EXTCODECOPY( address=initcode_address, @@ -381,10 +389,16 @@ def __new__(cls, *, pre: Alloc, fork: Fork) -> Self: cleanup=cleanup, ) instance.initcode = initcode + # Cache the address to avoid expensive recomputation + instance._cached_address = compute_deterministic_create2_address( + salt=0, + initcode=Initcode(deploy_code=instance), + fork=fork, + ) deployed_address = pre.deterministic_deploy_contract( deploy_code=instance ) - assert deployed_address == instance.address(fork=fork) + assert deployed_address == instance._cached_address return instance def transactions_by_total_contract_count( @@ -400,33 +414,64 @@ def transactions_by_total_contract_count( given number of contracts, each capped tx properly capped by the gas limit cap of the fork. """ - to = self.address(fork=fork) + to = self.address() + + # Use a sensible hardcoded maximum for the calldata, to avoid + # binary searching. + max_number = (2 ** (contract_count.bit_length() + 1)) - 1 + calldata_max = Hash(max_number) + Hash(max_number) def calldata(iteration_count: int, start_iteration: int) -> bytes: index_end = iteration_count + start_iteration - 1 return Hash(start_iteration) + Hash(index_end) - yield from self.transactions_by_total_iteration_count( + start_iteration: int = contract_start_index + + tx_gas_limit: int | None = None + tx_gas_cost: int | None = None + last_iteration_count: int = 0 + + for iteration_count in self.tx_iterations_by_total_iteration_count( fork=fork, total_iterations=contract_count, - start_iteration=contract_start_index, - sender=sender, - to=to, - calldata=calldata, - ) + start_iteration=start_iteration, + calldata=calldata_max, + ): + if ( + tx_gas_limit is None + or tx_gas_cost is None + or iteration_count != last_iteration_count + ): + tx_gas_limit = self.tx_gas_limit_by_iteration_count( + fork=fork, + iteration_count=iteration_count, + start_iteration=start_iteration, + calldata=calldata_max, + ) + tx_gas_cost = self.tx_gas_cost_by_iteration_count( + fork=fork, + iteration_count=iteration_count, + start_iteration=start_iteration, + calldata=calldata_max, + ) + yield TransactionWithCost( + to=to, + gas_limit=tx_gas_limit, + sender=sender, + gas_cost=tx_gas_cost, + data=calldata(iteration_count, start_iteration), + ) + start_iteration += iteration_count + last_iteration_count = iteration_count - def address(self, *, fork: Fork) -> Address: - """Get the deterministic address of the initcode.""" - return compute_deterministic_create2_address( - salt=0, - initcode=Initcode(deploy_code=self), - fork=fork, - ) + def address(self) -> Address: + """Get the deterministic address of the factory contract.""" + return self._cached_address - def created_contract_address(self, *, fork: Fork, salt: int) -> Address: + def created_contract_address(self, *, salt: int) -> Address: """Get the deterministic address of the created contract.""" return compute_create2_address( - address=self.address(fork=fork), + address=self.address(), salt=salt, initcode=self.initcode, ) diff --git a/tests/benchmark/compute/scenario/test_unchunkified_bytecode.py b/tests/benchmark/compute/scenario/test_unchunkified_bytecode.py index 178af76dfc..4ca7c2ead0 100644 --- a/tests/benchmark/compute/scenario/test_unchunkified_bytecode.py +++ b/tests/benchmark/compute/scenario/test_unchunkified_bytecode.py @@ -51,7 +51,7 @@ def test_unchunkified_bytecode( # Create the max-sized fork-dependent contract factory. max_sized_contract_factory = MaxSizedContractFactory(pre=pre, fork=fork) - factory_address = max_sized_contract_factory.address(fork=fork) + factory_address = max_sized_contract_factory.address() initcode = max_sized_contract_factory.initcode # Prepare the attack iterating bytecode. @@ -148,9 +148,7 @@ def calldata(iteration_count: int, start_iteration: int) -> bytes: post = {} for i in range(num_contracts): deployed_contract_address = ( - max_sized_contract_factory.created_contract_address( - fork=fork, salt=i - ) + max_sized_contract_factory.created_contract_address(salt=i) ) post[deployed_contract_address] = Account(nonce=1) From b206688babdb3bd2cb58bee5c73110972b9fc143 Mon Sep 17 00:00:00 2001 From: felipe Date: Thu, 5 Feb 2026 14:24:09 -0700 Subject: [PATCH 131/154] fix(test-ci): fix issues with recent changes to checklist (#2151) --- .github/workflows/test-checklist.yaml | 37 +++++++++++++++++++ .../checklists/eip_checklist.py | 5 +++ .../checklists/eip_checklist.pyi | 1 + 3 files changed, 43 insertions(+) create mode 100644 .github/workflows/test-checklist.yaml diff --git a/.github/workflows/test-checklist.yaml b/.github/workflows/test-checklist.yaml new file mode 100644 index 0000000000..91ab9d367b --- /dev/null +++ b/.github/workflows/test-checklist.yaml @@ -0,0 +1,37 @@ +name: Test checklist consistency + +on: + push: + branches: + - master + - mainnet + paths: &checklist_paths + - "docs/writing_tests/checklist_templates/**" + - "packages/testing/src/execution_testing/checklists/eip_checklist.py" + - "packages/testing/src/execution_testing/checklists/eip_checklist.pyi" + - "packages/testing/src/execution_testing/checklists/tests/test_checklist_template_consistency.py" + - ".github/workflows/test-checklist.yaml" + pull_request: + paths: *checklist_paths + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.ref || github.run_id }} + cancel-in-progress: true + +jobs: + checklist-consistency: + name: Test checklist template consistency + runs-on: ubuntu-latest + steps: + - name: Checkout ethereum/execution-specs + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + - name: Install uv ${{ vars.UV_VERSION }} and python ${{ vars.DEFAULT_PYTHON_VERSION }} + uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 + with: + enable-cache: true + cache-dependency-glob: "uv.lock" + version: ${{ vars.UV_VERSION }} + python-version: ${{ vars.DEFAULT_PYTHON_VERSION }} + - name: Run checklist consistency test + run: uvx tox -e tests_pytest_py3 -- -k test_checklist_template_consistency diff --git a/packages/testing/src/execution_testing/checklists/eip_checklist.py b/packages/testing/src/execution_testing/checklists/eip_checklist.py index df0f7f700f..711a831340 100644 --- a/packages/testing/src/execution_testing/checklists/eip_checklist.py +++ b/packages/testing/src/execution_testing/checklists/eip_checklist.py @@ -133,6 +133,11 @@ class TestCoverage(ChecklistItem): pass + class MissedLines(ChecklistItem): + """Document missed lines in coverage reports.""" + + pass + class SecondClient(ChecklistItem): """Second client code coverage.""" diff --git a/packages/testing/src/execution_testing/checklists/eip_checklist.pyi b/packages/testing/src/execution_testing/checklists/eip_checklist.pyi index 5219add093..6d350cf4dd 100644 --- a/packages/testing/src/execution_testing/checklists/eip_checklist.pyi +++ b/packages/testing/src/execution_testing/checklists/eip_checklist.pyi @@ -107,6 +107,7 @@ class EIPChecklist: class General(_CallableChecklistItem): class CodeCoverage(_CallableChecklistItem): Eels: _CallableChecklistItem + MissedLines: _CallableChecklistItem SecondClient: _CallableChecklistItem TestCoverage: _CallableChecklistItem From 8a5a2d5123bb89329f2d0a02ed30e8a3b0ea0574 Mon Sep 17 00:00:00 2001 From: Guruprasad Kamath <48196632+gurukamath@users.noreply.github.com> Date: Fri, 6 Feb 2026 01:36:47 +0100 Subject: [PATCH 132/154] feat(tests): BAL tests for 7702 delegation reset, create and access (#2097) * feat(tests): test 7702 double auth reset for self_funded tx test_bal_7702_double_auth_reset_minimal and test_bal_7702_double_auth_reset in the test cases markdown are essentially the same test with varying parameters. The commit de-duplicates the entries and implements the missing case * feat(tests): test 7702 delegation with CREATE * feat(tests): test 7702 delegation access * fix(tests): post review updates * feat(test-vm): Update MSTORE macro to use gas-accounting metadata * chore(test): remove duplicated test case covered under test_bal_call_7702_delegation_and_oog --------- Co-authored-by: fselmo --- docs/CHANGELOG.md | 1 + .../src/execution_testing/vm/opcodes.py | 16 +- .../src/execution_testing/vm/tests/test_vm.py | 49 +++++ .../test_block_access_lists_eip7702.py | 202 ++++++++++++++++-- .../test_cases.md | 7 +- 5 files changed, 255 insertions(+), 20 deletions(-) diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md index 0f2bab30b2..c6f5f82ba9 100644 --- a/docs/CHANGELOG.md +++ b/docs/CHANGELOG.md @@ -40,6 +40,7 @@ Test fixtures for use by clients are available for each release on the [Github r - ✨ Add missing benchmark configurations / opcode to benchmark tests for repricing analysis([#2006](https://github.com/ethereum/execution-specs/pull/2006)). - ✨ Port STATICCALL to CALL tests with zero and non-zero value transfer from `tests/static`, extending coverage with `pytest.mark.with_all_precompiles` ([#1960](https://github.com/ethereum/execution-specs/pull/1960)). - ✨ Add BAL tests that dequeue EIP-7251 consolidation requests. ([#2076](https://github.com/ethereum/execution-specs/pull/2076)). +- ✨ Add BAL tests for handling 7702 delegation reset and delegated create. ([#2097](https://github.com/ethereum/execution-specs/pull/2097)). ## [v5.4.0](https://github.com/ethereum/execution-spec-tests/releases/tag/v5.4.0) - 2025-12-07 diff --git a/packages/testing/src/execution_testing/vm/opcodes.py b/packages/testing/src/execution_testing/vm/opcodes.py index 31f7cf10e9..79d35860dd 100644 --- a/packages/testing/src/execution_testing/vm/opcodes.py +++ b/packages/testing/src/execution_testing/vm/opcodes.py @@ -5694,14 +5694,25 @@ def _mstore_operation( data = data.to_bytes(32, "big") data = to_bytes(data) # type: ignore bytecode = Bytecode() + current_memory_size = 0 for i in range(0, len(data), 32): chunk = data[i : i + 32] + new_memory_size = offset + 32 if len(chunk) == 32: - bytecode += Opcodes.MSTORE(offset, chunk) + bytecode += Opcodes.MSTORE( + offset, + chunk, + old_memory_size=current_memory_size, + new_memory_size=new_memory_size, + ) else: # We need to MLOAD the existing data at the offset and then # do a bitwise OR with the new data to store it in memory. - bytecode += Opcodes.MLOAD(offset) + bytecode += Opcodes.MLOAD( + offset, + old_memory_size=current_memory_size, + new_memory_size=new_memory_size, + ) # Create a mask to zero out the leftmost bytes of # the existing data. mask_size = 32 - len(chunk) @@ -5711,6 +5722,7 @@ def _mstore_operation( bytecode += Opcodes.OR bytecode += _stack_argument_to_bytecode(offset) bytecode += Opcodes.MSTORE + current_memory_size = new_memory_size offset += len(chunk) return bytecode diff --git a/packages/testing/src/execution_testing/vm/tests/test_vm.py b/packages/testing/src/execution_testing/vm/tests/test_vm.py index 837de4b25e..85fe28fed5 100644 --- a/packages/testing/src/execution_testing/vm/tests/test_vm.py +++ b/packages/testing/src/execution_testing/vm/tests/test_vm.py @@ -240,6 +240,55 @@ def test_macros() -> None: assert opcode != Om.OOG +@pytest.mark.parametrize( + "data,offset", + [ + pytest.param(b"", 0, id="empty"), + pytest.param(bytes(range(32)), 0, id="exactly_32_bytes_offset_0"), + pytest.param(bytes(range(64)), 0, id="exactly_64_bytes_offset_0"), + pytest.param(bytes(range(12)), 0, id="partial_12_bytes_offset_0"), + pytest.param(bytes(range(33)), 0, id="33_bytes_offset_0"), + pytest.param(bytes(range(63)), 0, id="63_bytes_offset_0"), + pytest.param(bytes(range(32)), 32, id="exactly_32_bytes_offset_32"), + pytest.param(bytes(range(12)), 64, id="partial_12_bytes_offset_64"), + ], +) +def test_mstore_macro_memory_metadata(data: bytes, offset: int) -> None: + """Test that Om.MSTORE sets memory size metadata on emitted opcodes.""" + bytecode = Om.MSTORE(data, offset) + if len(data) == 0: + assert len(bytecode.opcode_list) == 0 + return + + # Collect all memory metadata from the opcode list + memory_opcodes = [ + op + for op in bytecode.opcode_list + if op.metadata.get("new_memory_size", 0) > 0 + ] + + # At least one opcode must carry memory expansion metadata + assert len(memory_opcodes) > 0, "No opcodes with memory metadata found" + + # The maximum new_memory_size should reflect the full data stored + num_chunks = (len(data) + 31) // 32 + expected_final_memory_size = offset + num_chunks * 32 + max_new_memory = max( + op.metadata["new_memory_size"] for op in memory_opcodes + ) + assert max_new_memory == expected_final_memory_size + + # First memory-expanding opcode should have old_memory_size=0 + assert memory_opcodes[0].metadata["old_memory_size"] == 0 + + # Each subsequent memory opcode should chain: old = previous new + for i in range(1, len(memory_opcodes)): + assert ( + memory_opcodes[i].metadata["old_memory_size"] + == memory_opcodes[i - 1].metadata["new_memory_size"] + ) + + @pytest.mark.parametrize( "bytecode,expected_popped_items,expected_pushed_items," "expected_max_stack_height,expected_min_stack_height", diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip7702.py b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip7702.py index 60676948a5..5449210dd8 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip7702.py +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_block_access_lists_eip7702.py @@ -14,9 +14,15 @@ Block, BlockAccessListExpectation, BlockchainTestFiller, + Fork, + Initcode, Op, Transaction, Withdrawal, + compute_create_address, +) +from execution_testing import ( + Macros as Om, ) from ...prague.eip7702_set_code_tx.spec import Spec as Spec7702 @@ -690,9 +696,17 @@ def test_bal_7702_null_address_delegation_no_code_change( ) +@pytest.mark.parametrize( + "self_funded", + [ + pytest.param(False, id="sponsored"), + pytest.param(True, id="self_funded"), + ], +) def test_bal_7702_double_auth_reset( pre: Alloc, blockchain_test: BlockchainTestFiller, + self_funded: bool, ) -> None: """ Ensure BAL captures the net code change when multiple authorizations @@ -702,6 +716,8 @@ def test_bal_7702_double_auth_reset( 1. First auth sets delegation to CONTRACT_A 2. Second auth resets delegation to empty (address 0) + Scenario where the transaction is sponsored and self-funded are covered. + The BAL should show the NET change (empty -> empty), not intermediate states. This is a regression test for the bug where the BAL showed the first auth's code but the final state was empty. @@ -716,7 +732,7 @@ def test_bal_7702_double_auth_reset( # 1. First sets delegation to contract_a # 2. Second resets to empty tx = Transaction( - sender=relayer, + sender=alice if self_funded else relayer, to=bob, value=10, gas_limit=1_000_000, @@ -724,17 +740,27 @@ def test_bal_7702_double_auth_reset( authorization_list=[ AuthorizationTuple( address=contract_a, - nonce=0, + nonce=1 if self_funded else 0, signer=alice, ), AuthorizationTuple( address=0, # Reset to empty - nonce=1, + nonce=2 if self_funded else 1, signer=alice, ), ], ) + alice_nonce = 3 if self_funded else 2 + relayer_nonce = 1 if not self_funded else 0 + relayer_bal_expectation = ( + BalAccountExpectation( + nonce_changes=[BalNonceChange(block_access_index=1, post_nonce=1)] + ) + if not self_funded + else None + ) + blockchain_test( pre=pre, blocks=[ @@ -745,7 +771,8 @@ def test_bal_7702_double_auth_reset( alice: BalAccountExpectation( nonce_changes=[ BalNonceChange( - block_access_index=1, post_nonce=2 + block_access_index=1, + post_nonce=alice_nonce, ) ], code_changes=[], @@ -757,22 +784,16 @@ def test_bal_7702_double_auth_reset( ) ] ), - relayer: BalAccountExpectation( - nonce_changes=[ - BalNonceChange( - block_access_index=1, post_nonce=1 - ) - ], - ), + relayer: relayer_bal_expectation, contract_a: None, } ), ) ], post={ - alice: Account(nonce=2, code=b""), # Final code is empty + alice: Account(nonce=alice_nonce, code=b""), # Final code is empty bob: Account(balance=10), - relayer: Account(nonce=1), + relayer: Account(nonce=relayer_nonce), }, ) @@ -1113,3 +1134,158 @@ def test_bal_withdrawal_to_7702_delegation( blocks=[block], post=post, ) + + +@pytest.mark.with_all_create_opcodes +def test_bal_7702_delegated_create( + fork: Fork, + pre: Alloc, + blockchain_test: BlockchainTestFiller, + create_opcode: Op, +) -> None: + """ + BAL tracks EIP-7702 delegation indicator write and contract creation. + + Alice sends a type-4 (7702) tx authorizing herself to delegate to + Deployer code which executes CREATE. + """ + # Alice (EOA) + alice_initial_balance = 10**18 # 1 ETH default + alice = pre.fund_eoa(amount=alice_initial_balance) + + # Simple init code that deploys STOP + deploy_code = Op.STOP + init_code = Initcode(deploy_code=deploy_code) + + # Deployer code: CREATE/CREATE2 and store result in slot 0 + deployer_code = Om.MSTORE(init_code) + Op.SSTORE( + 0, + create_opcode( + offset=0, + size=len(init_code), + init_code_size=len(init_code), + ), + original_value=0xDEAD, + ) + + deployer_initial_balance = 10**18 # 1 ETH default + # Deploy factory + deployer = pre.deploy_contract( + code=deployer_code, + balance=deployer_initial_balance, + storage={0x00: 0xDEAD}, # Initial value to prove SSTORE works + ) + + # Calculate what the contract address WOULD be + create_contract_address = compute_create_address( + address=deployer, + initcode=init_code, + nonce=1, + opcode=create_opcode, + ) + + tx = Transaction( + sender=alice, + to=deployer, + gas_limit=1_000_000, + authorization_list=[ + AuthorizationTuple( + address=deployer, + nonce=1, + signer=alice, + ) + ], + ) + + # Calculate gas cost + intrinsic_gas_calculator = fork.transaction_intrinsic_cost_calculator() + gsc = fork.gas_costs() + max_refund_quotient = fork.max_refund_quotient() + gas_used = ( + intrinsic_gas_calculator( + return_cost_deducted_prior_execution=True, + authorization_list_or_count=tx.authorization_list, + ) + + deployer_code.gas_cost(fork) + + init_code.execution_gas + + gsc.G_CODE_DEPOSIT_BYTE * len(deploy_code) + ) + + refund_counter = gsc.R_AUTHORIZATION_EXISTING_AUTHORITY + + effective_refund = min(refund_counter, gas_used // max_refund_quotient) + gas_used_post_refund = gas_used - effective_refund + + assert tx.max_fee_per_gas is not None + alice_expected_balance = alice_initial_balance - ( + gas_used_post_refund * tx.max_fee_per_gas + ) + + block = Block( + txs=[tx], + expected_block_access_list=BlockAccessListExpectation( + account_expectations={ + alice: BalAccountExpectation( + nonce_changes=[ + # sending the tx + delegation processing + BalNonceChange(block_access_index=1, post_nonce=2) + ], + balance_changes=[ + BalBalanceChange( + block_access_index=1, + post_balance=alice_expected_balance, + ) + ], + code_changes=[ + BalCodeChange( + block_access_index=1, + new_code=Spec7702.delegation_designation(deployer), + ), + ], + ), + deployer: BalAccountExpectation( + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=2) + ], + # Storage changes: slot 0 = 0xDEAD → contract_address + # since CREATE returned contract_address + storage_changes=[ + BalStorageSlot( + slot=0x00, + slot_changes=[ + BalStorageChange( + block_access_index=1, + post_value=create_contract_address, + ) + ], + ) + ], + ), + create_contract_address: BalAccountExpectation( + nonce_changes=[ + BalNonceChange(block_access_index=1, post_nonce=1) + ], + code_changes=[ + BalCodeChange(block_access_index=1, new_code=Op.STOP) + ], + ), + } + ), + ) + + blockchain_test( + pre=pre, + blocks=[block], + post={ + alice: Account( + nonce=2, + balance=alice_expected_balance, + code=Spec7702.delegation_designation(deployer), + ), + deployer: Account( + nonce=2, + storage={0x00: create_contract_address}, + ), + create_contract_address: Account(nonce=1, code=Op.STOP), + }, + ) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index 3aa38e0eae..d07ae30214 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -37,7 +37,7 @@ | `test_bal_create2_to_A_write_then_selfdestruct` | BAL records balance change for A and storage access even if a write occurred (no persistent change) | Tx0: Alice sends ETH to **A**. Tx1: Deployer `CREATE2` contract **at A**; contract does `SSTORE(B, v)` (optionally `SLOAD(B)`), then `SELFDESTRUCT(beneficiary=Y)` in the same tx. | BAL **MUST** include **A** with `balance_changes` (Tx0 fund; Tx1 outflow to `Y`). BAL **MUST** include **B** as `StorageKey` accessed, and **MUST NOT** include **B** under `storage_changes` (ephemeral write discarded because the contract was created and destroyed in the same tx). | 🟡 Planned | | `test_bal_precompile_funded` | BAL records precompile value transfer with or without balance change | Alice sends value to precompile (all precompiles) via direct transaction. Parameterized: (1) with value (1 ETH), (2) without value (0 ETH). | For with_value: BAL **MUST** include precompile with `balance_changes`. For no_value: BAL **MUST** include precompile with empty `balance_changes`. No `storage_changes` or `code_changes` in either case. | ✅ Completed | | `test_bal_precompile_call` | BAL records precompile when called via contract | Alice calls Oracle contract which calls precompile (all precompiles) via CALL opcode with 0 ETH | BAL **MUST** include Alice with `nonce_changes`, Oracle with empty changes, and precompile with empty changes. No `balance_changes`, `storage_changes`, or `code_changes` for precompile. | ✅ Completed | -| `test_bal_7702_delegated_create` | BAL tracks EIP-7702 delegation indicator write and contract creation | Alice sends a type-4 (7702) tx authorizing herself to delegate to `Deployer` code which executes `CREATE` | BAL MUST include for **Alice**: `code_changes` (delegation indicator), `nonce_changes` (increment from 7702 processing), and `balance_changes` (post-gas). For **Child**: `code_changes` (runtime bytecode) and `nonce_changes = 1`. | 🟡 Planned | +| `test_bal_7702_delegated_create` | BAL tracks EIP-7702 delegation indicator write and contract creation | Alice sends a type-4 (7702) tx authorizing herself to delegate to `Deployer` code which executes `CREATE` | BAL MUST include for **Alice**: `code_changes` (delegation indicator), `nonce_changes` (increment from 7702 processing), and `balance_changes` (post-gas). For **Child**: `code_changes` (runtime bytecode) and `nonce_changes = 1`. | ✅ Completed | | `test_bal_7702_delegation_create` | Ensure BAL captures creation of EOA delegation | Alice authorizes delegation to contract `Oracle`. Transaction sends 10 wei to Bob. Two variants: (1) Self-funded: Alice sends 7702 tx herself. (2) Sponsored: `Relayer` sends 7702 tx on Alice's behalf. | BAL **MUST** include Alice: `code_changes` (delegation designation `0xef0100\|\|address(Oracle)`),`nonce_changes` (increment). Bob: `balance_changes` (receives 10 wei). For sponsored variant, BAL **MUST** also include `Relayer`:`nonce_changes`.`Oracle` **MUST NOT** be present in BAL - the account is never accessed. | ✅ Completed | | `test_bal_7702_delegation_update` | Ensure BAL captures update of existing EOA delegation | Alice first delegates to `Oracle1`, then in second tx updates delegation to `Oracle2`. Each transaction sends 10 wei to Bob. Two variants: (1) Self-funded: Alice sends both 7702 txs herself. (2) Sponsored: `Relayer` sends both 7702 txs on Alice's behalf. | BAL **MUST** include Alice: first tx has `code_changes` (delegation designation `0xef0100\|\|address(Oracle1)`),`nonce_changes`. Second tx has`code_changes` (delegation designation `0xef0100\|\|address(Oracle2)`),`nonce_changes`. Bob:`balance_changes` (receives 10 wei on each tx). For sponsored variant, BAL **MUST** also include `Relayer`:`nonce_changes` for both transactions. `Oracle1` and `Oracle2` **MUST NOT** be present in BAL - accounts are never accessed. | ✅ Completed | | `test_bal_7702_delegation_clear` | Ensure BAL captures clearing of EOA delegation | Alice first delegates to `Oracle`, then in second tx clears delegation by authorizing to `0x0` address. Each transaction sends 10 wei to Bob. Two variants: (1) Self-funded: Alice sends both 7702 txs herself. (2) Sponsored: `Relayer` sends both 7702 txs on Alice's behalf. | BAL **MUST** include Alice: first tx has `code_changes` (delegation designation `0xef0100\|\|address(Oracle)`), `nonce_changes`. Second tx has `code_changes` (empty code - delegation cleared), `nonce_changes`. Bob: `balance_changes` (receives 10 wei on each tx). For sponsored variant, BAL **MUST** also include `Relayer`: `nonce_changes` for both transactions. `Oracle` and `0x0` address **MUST NOT** be present in BAL - accounts are never accessed. | ✅ Completed | @@ -46,7 +46,7 @@ | `test_bal_7702_invalid_chain_id_authorization` | Ensure BAL handles failed authorization due to wrong chain id | `Relayer` sends sponsored transaction to Bob (10 wei transfer succeeds) but Alice's authorization to delegate to `Oracle` uses incorrect chain id, causing authorization failure before account access | BAL **MUST** include Bob with `balance_changes` (receives 10 wei), Relayer with `nonce_changes`. **MUST NOT** include Alice (authorization fails before loading account) or `Oracle` (authorization failed, no delegation) | ✅ Completed | | `test_bal_7702_delegated_via_call_opcode` | Ensure BAL captures delegation target when a contract uses *CALL opcodes to call a delegated account | Pre-deployed contract `Alice` delegated to `Oracle`. `Caller` contract uses CALL/CALLCODE/DELEGATECALL/STATICCALL to call `Alice`. Bob sends transaction to `Caller`. | BAL **MUST** include Bob: `nonce_changes`. `Caller`: empty changes (account access). `Alice`: empty changes (account access - delegated account being called). `Oracle`: empty changes (delegation target access). | ✅ Completed | | `test_bal_7702_null_address_delegation` | Ensure BAL does not record spurious code changes for net-zero code operations | Alice sends transaction with authorization delegating to NULL_ADDRESS (0x0), which sets code to `b""` on an account that already has `b""` code. Transaction sends 10 wei to Bob. | BAL **MUST** include Alice with `nonce_changes` (tx nonce + auth nonce increment) but **MUST NOT** include `code_changes` (setting `b"" -> b""` is net-zero and filtered out). Bob: `balance_changes` (receives 10 wei). This ensures net-zero code change is not recorded. | ✅ Completed | -| `test_bal_7702_double_auth_reset` | Ensure BAL captures net code change when double auth resets delegation | `Relayer` sends transaction with two authorizations for Alice: (1) First auth sets delegation to `CONTRACT_A` at nonce=0, (2) Second auth resets delegation to empty (address 0) at nonce=1. Transaction sends 10 wei to Bob. Per EIP-7702, only the last authorization takes effect. | BAL **MUST** include Alice with `nonce_changes` (both auths increment nonce to 2) but **MUST NOT** include `code_changes` (net change is empty → empty). Bob: `balance_changes` (receives 10 wei). Relayer: `nonce_changes`. `CONTRACT_A` **MUST NOT** be in BAL (never accessed). This is a regression test for the bug where BAL showed first auth's code despite final state being empty. | ✅ Completed | +| `test_bal_7702_double_auth_reset` | Ensure BAL tracks multiple 7702 nonce increments but filters net-zero code change | Single transaction contains two EIP-7702 authorizations for `Alice`: (1) first auth sets delegation `0xef0100\|\|Oracle`, (2) second auth clears delegation back to empty. Transaction sends 10 wei to `Bob`. Two variants: (a) Self-funded: `Alice` is tx sender (one tx nonce bump + two auth bumps → nonce 0→3). (b) Sponsored: `Relayer` is tx sender (`Alice` only in auths → nonce 0→2 for `Alice`, plus one nonce bump for `Relayer`). | Variant (a): BAL **MUST** include `Alice` with `nonce_changes` 0→3. Variant (b): BAL **MUST** include `Alice` with `nonce_changes` 0→2 and `Relayer` with its own `nonce_changes`. For both variants, BAL **MUST NOT** include `code_changes` for `Alice` (net code is empty), **MUST** include `Bob` with `balance_changes` (receives 10 wei), and `Oracle` **MUST NOT** appear in BAL. | ✅ Completed | | `test_bal_7702_double_auth_swap` | Ensure BAL captures final code when double auth swaps delegation targets | `Relayer` sends transaction with two authorizations for Alice: (1) First auth sets delegation to `CONTRACT_A` at nonce=0, (2) Second auth changes delegation to `CONTRACT_B` at nonce=1. Transaction sends 10 wei to Bob. Per EIP-7702, only the last authorization takes effect. | BAL **MUST** include Alice with `nonce_changes` (both auths increment nonce to 2) and `code_changes` (final code is delegation designation for `CONTRACT_B`, not `CONTRACT_A`). Bob: `balance_changes` (receives 10 wei). Relayer: `nonce_changes`. Neither `CONTRACT_A` nor `CONTRACT_B` appear in BAL during delegation setup (never accessed). This ensures BAL shows final state, not intermediate changes. | ✅ Completed | | `test_bal_sstore_and_oog` | Ensure BAL handles OOG during SSTORE execution at various gas boundaries (EIP-2200 stipend and implicit SLOAD) | Alice calls contract that attempts `SSTORE` to cold slot `0x01`. Parameterized: (1) OOG at EIP-2200 stipend check (2300 gas after PUSH opcodes) - fails before implicit SLOAD, (2) OOG at stipend + 1 (2301 gas) - passes stipend check but fails after implicit SLOAD, (3) OOG at exact gas - 1, (4) Successful SSTORE with exact gas. | For case (1): BAL **MUST NOT** include slot `0x01` in `storage_reads` or `storage_changes` (fails before implicit SLOAD). For cases (2) and (3): BAL **MUST** include slot `0x01` in `storage_reads` (implicit SLOAD occurred) but **MUST NOT** include in `storage_changes` (write didn't complete). For case (4): BAL **MUST** include slot `0x01` in `storage_changes` only (successful write; read is filtered by builder). | ✅ Completed | | `test_bal_sstore_static_context` | Ensure BAL does not capture spurious storage access when SSTORE fails in static context | Alice calls contract with `STATICCALL` which attempts `SSTORE` to slot `0x01`. SSTORE must fail before any storage access occurs. | BAL **MUST NOT** include slot `0x01` in `storage_reads` or `storage_changes`. Static context check happens before storage access, preventing spurious reads. Alice has `nonce_changes` and `balance_changes` (gas cost). Target contract included with empty changes. | ✅ Completed | @@ -60,8 +60,6 @@ | `test_bal_staticcall_no_delegation_and_oog_before_target_access` | Ensure BAL handles OOG before target access and success for non-delegated STATICCALL | Parametrized: target warm/cold, memory expansion, OOG boundary (before_target_access/success). | OOG: target in BAL ONLY if pre-warmed. Success: target always in BAL. | ✅ Completed | | `test_bal_staticcall_7702_delegation_and_oog` | Ensure BAL handles OOG at all 4 boundaries for STATICCALL to 7702 delegated accounts | Parametrized: target warm/cold, delegation warm/cold, memory expansion, OOG boundary (before_target_access/after_target_access/success_minus_1/success). | OOG before: neither in BAL. OOG after & success_minus_1: target in BAL, delegation NOT in BAL (static check optimization). Success: all in BAL. | ✅ Completed | | `test_bal_extcodecopy_and_oog` | Ensure BAL handles OOG during EXTCODECOPY at various failure points | Alice calls contract that attempts `EXTCODECOPY` from cold target contract. Parameterized: (1) Successful EXTCODECOPY, (2) OOG at cold access (insufficient gas for account access), (3) OOG at memory expansion with large offset (64KB offset, gas covers cold access + copy but NOT memory expansion), (4) OOG at memory expansion boundary (256 byte offset, gas is exactly 1 less than needed). | For success case: BAL **MUST** include target contract. For all OOG cases: BAL **MUST NOT** include target contract. Gas for ALL components (cold access + copy + memory expansion) must be checked BEFORE recording account access. | ✅ Completed | -| `test_bal_oog_7702_delegated_cold_cold` | Ensure BAL handles OOG during EIP-7702 delegated account loading when both accounts are cold | Alice calls cold delegated account Bob (7702) which delegates to cold `TargetContract` with insufficient gas for second cold load | BAL **MUST** include Bob in `account_changes` (first cold load succeeds) but **MUST NOT** include `TargetContract` (second cold load fails due to OOG) | 🟡 Planned | -| `test_bal_oog_7702_delegated_warm_cold` | Ensure BAL handles OOG during EIP-7702 delegated account loading when first account is warm, second is cold | Alice calls warm delegated account Bob (7702) which delegates to cold `TargetContract` with insufficient gas for second cold load | BAL **MUST** include Bob in `account_changes` (warm load succeeds) but **MUST NOT** include `TargetContract` (cold load fails due to OOG) | 🟡 Planned | | `test_bal_multiple_balance_changes_same_account` | Ensure BAL tracks multiple balance changes to same account across transactions | Alice funds Bob (starts at 0) in tx0 with exact amount needed. Bob spends everything in tx1 to Charlie. Bob's balance: 0 → funding_amount → 0 | BAL **MUST** include Bob with two `balance_changes`: one at txIndex=1 (receives funds) and one at txIndex=2 (balance returns to 0). This tests balance tracking across two transactions. | ✅ Completed | | `test_bal_multiple_storage_writes_same_slot` | Ensure BAL tracks multiple writes to same storage slot across transactions | Alice calls contract 3 times in same block. Contract increments slot 1 on each call: 0 → 1 → 2 → 3 | BAL **MUST** include contract with slot 1 having three `slot_changes`: txIndex=1 (value 1), txIndex=2 (value 2), txIndex=3 (value 3). Each transaction's write must be recorded separately. | ✅ Completed | | `test_bal_nested_delegatecall_storage_writes_net_zero` | Ensure BAL correctly filters net-zero storage changes across nested DELEGATECALL frames | Parametrized by nesting depth (1-3). Root contract has slot 0 = 1. Each frame writes a different intermediate value via DELEGATECALL chain, deepest frame writes back to original value (1). Example depth=2: 1 → 2 → 3 → 1 | BAL **MUST** include root contract with `storage_reads` for slot 0 but **MUST NOT** include `storage_changes` (net-zero). All delegate contracts **MUST** have empty changes. Tests that frame merging correctly removes parent's intermediate writes when child reverts to pre-tx value. | ✅ Completed | @@ -123,7 +121,6 @@ | `test_bal_4788_empty_block` | Ensure BAL captures beacon root storage writes in empty block | Block with no transactions. At block start (pre-execution), `SYSTEM_ADDRESS` calls `BEACON_ROOTS_ADDRESS` to store parent beacon root | BAL **MUST** include at `block_access_index=0`: `BEACON_ROOTS_ADDRESS` with two `storage_changes` (timestamp slot and beacon root slot); `SYSTEM_ADDRESS` **MUST NOT** be included in BAL. No transaction-related BAL entries. | ✅ Completed | | `test_bal_4788_query` | Ensure BAL captures storage reads when querying beacon root (valid and invalid queries) with optional value transfer | Parameterized test: Block 1 stores beacon root at timestamp 12. Block 2 queries with three timestamp scenarios (valid=12, invalid non-zero=42, invalid zero=0) and value (0 or 100 wei). Valid query (timestamp=12): reads both timestamp and root slots, writes returned value. If value > 0, beacon root contract receives balance. Invalid query with non-zero timestamp (timestamp=42): reads only timestamp slot before reverting, query contract has implicit SLOAD recorded (SSTORE reverts), no value transferred. Invalid query with zero timestamp (timestamp=0): reverts immediately without any storage access, query contract has implicit SLOAD recorded, no value transferred. | Block 1 BAL: System call writes. Block 2 BAL **MUST** include at `block_access_index=0`: System call writes for block 2. Valid case (timestamp=12) at `block_access_index=1`: `BEACON_ROOTS_ADDRESS` with `storage_reads` [timestamp_slot, root_slot] and `balance_changes` if value > 0, query contract with `storage_changes`. Invalid non-zero case (timestamp=42) at `block_access_index=1`: `BEACON_ROOTS_ADDRESS` with `storage_reads` [timestamp_slot only] and NO `balance_changes` (reverted), query contract with `storage_reads` [0] and NO `storage_changes`. Invalid zero case (timestamp=0) at `block_access_index=1`: `BEACON_ROOTS_ADDRESS` with NO `storage_reads` (reverts before access) and NO `balance_changes`, query contract with `storage_reads` [0] and NO `storage_changes`. | ✅ Completed | | `test_bal_4788_selfdestruct_to_beacon_root` | Ensure BAL captures `SELFDESTRUCT` to beacon root address alongside system call storage writes | Single block: Pre-execution system call writes beacon root to storage. Transaction: Alice calls contract (pre-funded with 100 wei) that selfdestructs with `BEACON_ROOTS_ADDRESS` as beneficiary. | BAL **MUST** include at `block_access_index=0`: `BEACON_ROOTS_ADDRESS` with `storage_changes` (timestamp and root slots from system call). At `block_access_index=1`: Alice with `nonce_changes`, contract with `balance_changes` (100→0), `BEACON_ROOTS_ADDRESS` with `balance_changes` (receives 100 wei). | ✅ Completed | -| `test_bal_7702_double_auth_reset_minimal` | Ensure BAL tracks multiple 7702 nonce increments but filters net-zero code change | Single transaction contains two EIP-7702 authorizations for `Alice`: (1) first auth sets delegation `0xef0100\|\|Oracle`, (2) second auth clears delegation back to empty. Transaction sends 10 wei to `Bob`. Two variants: (a) Self-funded: `Alice` is tx sender (one tx nonce bump + two auth bumps → nonce 0→3). (b) Sponsored: `Relayer` is tx sender (`Alice` only in auths → nonce 0→2 for `Alice`, plus one nonce bump for `Relayer`). | Variant (a): BAL **MUST** include `Alice` with `nonce_changes` 0→3. Variant (b): BAL **MUST** include `Alice` with `nonce_changes` 0→2 and `Relayer` with its own `nonce_changes`. For both variants, BAL **MUST NOT** include `code_changes` for `Alice` (net code is empty), **MUST** include `Bob` with `balance_changes` (receives 10 wei), and `Oracle` **MUST NOT** appear in BAL. | 🟡 Planned | | `test_bal_selfdestruct_send_to_sender` | Ensure BAL tracks SELFDESTRUCT sending all funds back to the tx sender (no burn) | Pre-state: contract `C` exists from a prior transaction with non-empty code and balance = 100 wei. EOA `Alice` sends a transaction calling `C`. `C`’s code executes `SELFDESTRUCT(Alice)`. Under EIP-6780, because `C` was not created in this transaction, SELFDESTRUCT does not delete code or storage; it only transfers the entire 100 wei balance from `C` to `Alice`. Final post-state: `C` still exists with the same code and balance = 0; `Alice`’s balance increased by 100 wei (ignoring gas for this test). | BAL **MUST** include `Alice` with `nonce_changes` (tx sender) and `balance_changes` reflecting receipt of 100 wei, and **MUST** include `C` with `balance_changes` 100→0 and no `code_changes`. BAL **MUST NOT** include any other accounts. This test ensures SELFDESTRUCT-to-sender is modeled as a pure value transfer (no burn, no code deletion). | 🟡 Planned | | `test_bal_7002_clean_sweep` | Ensure BAL correctly tracks "clean sweep" where all withdrawal requests are dequeued in same block (requests ≤ MAX). Parameterized: (1) pubkey first 32 bytes zero / non-zero, (2) amount zero / non-zero | Alice sends transaction to `WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS` with 1 withdrawal request. Validator pubkey has either first 32 bytes zero or non-zero. Amount is either zero or non-zero. Since 1 ≤ MAX_WITHDRAWAL_REQUESTS_PER_BLOCK, post-execution system call dequeues all requests ("clean sweep"), resetting head and tail to 0. | BAL **MUST** include Alice with `nonce_changes` at `block_access_index=1`. `WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS` **MUST** have: `balance_changes` at `block_access_index=1` (receives fee), `storage_reads` for excess, head, and slot 5 (first 32 bytes of pubkey) if zero. At `block_access_index=1` (tx enqueue): `storage_changes` for count (0→1), tail (0→1), slot 4 (source address), slot 5 (first 32 bytes, **ONLY** if non-zero), slot 6. At `block_access_index=2` (post-exec dequeue): `storage_changes` for count (1→0), tail (1→0). Clean sweep invariant: when all requests dequeued, both head and tail reset to 0. | ✅ Completed | | `test_bal_7002_partial_sweep` | Ensure BAL correctly tracks queue overflow when requests exceed MAX, demonstrating partial sweep in block 1 and cleanup in block 2 | Block 1: 20 different EOAs each send withdrawal request to `WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS`. Since 20 > MAX_WITHDRAWAL_REQUESTS_PER_BLOCK, only first MAX requests dequeued ("partial sweep"), leaving 4 in queue. Block 2: Empty block (no transactions), remaining 4 requests dequeued ("clean sweep"), queue becomes empty. | Block 1 BAL **MUST** include all 20 senders with `nonce_changes` at respective `block_access_index` (1-20). `WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS` at each tx: `storage_changes` for count (increments to 20), tail (increments to 20). At `block_access_index=21` (post-exec partial dequeue): `storage_changes` for count (20→0), head (0→MAX). Partial sweep: head advances by MAX, tail stays 20, queue has 4 remaining (tail - head = 4). Block 2 BAL **MUST** include `WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS` at `block_access_index=1` (post-exec clean sweep): `storage_changes` for head (MAX→0), tail (20→0). Clean sweep: both head and tail reset to 0, queue empty. |✅ Completed | From cad8b90306b5308e5cb88c133b228f78587eac1f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=94=A1=E4=BD=B3=E8=AA=A0=20Louis=20Tsai?= <72684086+LouisTsai-Csie@users.noreply.github.com> Date: Fri, 6 Feb 2026 22:23:35 +0800 Subject: [PATCH 133/154] feat(tests): add warm and cold account query benchmark (#2138) --- tests/benchmark/compute/helpers.py | 118 ++++++---- .../compute/instruction/test_account_query.py | 213 ++++++++++++++++++ .../scenario/test_unchunkified_bytecode.py | 21 +- 3 files changed, 295 insertions(+), 57 deletions(-) diff --git a/tests/benchmark/compute/helpers.py b/tests/benchmark/compute/helpers.py index 1b77386465..c3281ed43f 100644 --- a/tests/benchmark/compute/helpers.py +++ b/tests/benchmark/compute/helpers.py @@ -8,6 +8,7 @@ EOA, Address, Alloc, + Bytecode, BytesConcatenation, FixedIterationsBytecode, Fork, @@ -221,32 +222,40 @@ def calculate_optimal_input_length( return optimal_input_length -class MaxSizedContractInitcode(FixedIterationsBytecode): +class CustomSizedContractInitcode(FixedIterationsBytecode): """ - Initcode that deploys a random and maximum-sized contract for the given - fork's limits. + Initcode that deploys a random contract with a custom size. + + If no contract size is provided, the maximum contract size for the given + fork is used. """ _cached_address: Address """Cached address to avoid expensive recomputation.""" + contract_size: int + """The size of the contract to deploy.""" - def __new__(cls, *, pre: Alloc, fork: Fork) -> Self: + def __new__( + cls, *, pre: Alloc, fork: Fork, contract_size: int | None = None + ) -> Self: """ - Create a new MaxSizedContractInitcode instance. + Create a new CustomSizedContractInitcode instance. Args: pre: The pre-allocation state where the contract will be deployed. fork: The fork to use for determining maximum contract size limits. + contract_size: The size of the contract to deploy. If None, + the maximum contract size for the fork is used. Returns: - A new MaxSizedContractInitcode instance. + A new CustomSizedContractInitcode instance. """ - max_contract_size = fork.max_code_size() + if contract_size is None: + contract_size = fork.max_code_size() xor_table_byte_size = XOR_TABLE_SIZE * 32 - iteration_count = ((max_contract_size - 32) // xor_table_byte_size) + 1 setup = Op.MSTORE( 0, Op.ADDRESS, @@ -254,25 +263,33 @@ def __new__(cls, *, pre: Alloc, fork: Fork) -> Self: old_memory_size=0, new_memory_size=32, ) - iterating = While( - body=( - Op.SHA3(Op.SUB(Op.MSIZE, 32), 32, data_size=32) - # Use a xor table to avoid having to call the "expensive" sha3 - # opcode as much - + sum( - ( - Op.PUSH32[xor_value] - + Op.XOR - + Op.DUP1 - + Op.MSIZE - + Op.MSTORE + iterating: While | Bytecode + if contract_size > 32: + iteration_count = ((contract_size - 32) // xor_table_byte_size) + 1 + iterating = While( + body=( + Op.SHA3(Op.SUB(Op.MSIZE, 32), 32, data_size=32) + # Use a xor table to avoid having to call the "expensive" + # sha3 opcode as much + + sum( + ( + Op.PUSH32[xor_value] + + Op.XOR + + Op.DUP1 + + Op.MSIZE + + Op.MSTORE + ) + for xor_value in XOR_TABLE ) - for xor_value in XOR_TABLE - ) - + Op.POP - ), - condition=Op.LT(Op.MSIZE, max_contract_size), - ) + + Op.POP + ), + condition=Op.LT(Op.MSIZE, contract_size), + ) + final_memory_size = (xor_table_byte_size * iteration_count) + 32 + else: + iteration_count = 0 + iterating = Bytecode() + final_memory_size = 32 cleanup = ( # Despite the whole contract has random bytecode, we need the first # opcode be a STOP so CALL-like attacks return as soon as possible. @@ -280,15 +297,15 @@ def __new__(cls, *, pre: Alloc, fork: Fork) -> Self: # are always zero, so no need to do anything but return. Op.RETURN( 0, - max_contract_size, + contract_size, # Gas accounting - code_deposit_size=max_contract_size, + code_deposit_size=contract_size, # Memory is not expanded here, but it is expanded in the loop. old_memory_size=32, - new_memory_size=(xor_table_byte_size * iteration_count) + 32, + new_memory_size=final_memory_size, ) ) - instance = super(MaxSizedContractInitcode, cls).__new__( + instance = super(CustomSizedContractInitcode, cls).__new__( cls, setup=setup, iterating=iterating, @@ -301,6 +318,7 @@ def __new__(cls, *, pre: Alloc, fork: Fork) -> Self: initcode=Initcode(deploy_code=instance), fork=fork, ) + instance.contract_size = contract_size deployed_address = pre.deterministic_deploy_contract( deploy_code=instance ) @@ -312,39 +330,49 @@ def address(self) -> Address: return self._cached_address -class MaxSizedContractFactory(IteratingBytecode): +class CustomSizedContractFactory(IteratingBytecode): """ - Factory contract that creates maximum-sized contracts. + Factory contract that creates contracts with a custom size. The contract takes two 32-byte arguments in the calldata: - start_index: the starting index of the contract to deploy - end_index: the ending index of the contract to deploy - The contract will deploy a maximum-sized contract for each index in the - range, inclusive. + The contract will deploy a contract for each index in the range, inclusive. + + If no contract size is provided, the maximum contract size for the given + fork is used. """ - initcode: MaxSizedContractInitcode - """The initcode used to deploy maximum-sized contracts via CREATE2.""" + initcode: CustomSizedContractInitcode + """The initcode used to deploy contracts via CREATE2.""" _cached_address: Address """Cached address to avoid expensive recomputation.""" + contract_size: int + """The size of the contracts to deploy.""" - def __new__(cls, *, pre: Alloc, fork: Fork) -> Self: + def __new__( + cls, *, pre: Alloc, fork: Fork, contract_size: int | None = None + ) -> Self: """ - Create a new MaxSizedContractFactory instance. + Create a new CustomSizedContractFactory instance. Args: pre: The pre-allocation state where the factory will be deployed. fork: The fork to use for gas calculations and contract size limits. + contract_size: The size of the contracts to deploy. If None, + the maximum contract size for the fork is used. Returns: - A new MaxSizedContractFactory instance. + A new CustomSizedContractFactory instance. """ - initcode = MaxSizedContractInitcode(pre=pre, fork=fork) + initcode = CustomSizedContractInitcode( + pre=pre, fork=fork, contract_size=contract_size + ) initcode_address = initcode.address() setup = ( Op.EXTCODECOPY( @@ -373,15 +401,10 @@ def __new__(cls, *, pre: Alloc, fork: Fork) -> Self: init_code_size=len(initcode), ) ), - condition=Op.PUSH1(1) - + Op.ADD - + Op.DUP1 - + Op.DUP3 - + Op.LT - + Op.ISZERO, + condition=Op.PUSH1(1) + Op.ADD + Op.DUP1 + Op.DUP3 + Op.GT, ) cleanup = Op.STOP - instance = super(MaxSizedContractFactory, cls).__new__( + instance = super(CustomSizedContractFactory, cls).__new__( cls, setup=setup, iterating=iterating, @@ -395,6 +418,7 @@ def __new__(cls, *, pre: Alloc, fork: Fork) -> Self: initcode=Initcode(deploy_code=instance), fork=fork, ) + instance.contract_size = initcode.contract_size deployed_address = pre.deterministic_deploy_contract( deploy_code=instance ) diff --git a/tests/benchmark/compute/instruction/test_account_query.py b/tests/benchmark/compute/instruction/test_account_query.py index 4cc9f60315..590e70b9ea 100644 --- a/tests/benchmark/compute/instruction/test_account_query.py +++ b/tests/benchmark/compute/instruction/test_account_query.py @@ -17,15 +17,18 @@ import pytest from execution_testing import ( + AccessList, Account, Address, Alloc, BenchmarkTestFiller, Block, Bytecode, + Create2PreimageLayout, ExtCallGenerator, Fork, Hash, + IteratingBytecode, JumpLoopGenerator, Op, TestPhaseManager, @@ -33,6 +36,8 @@ While, ) +from tests.benchmark.compute.helpers import CustomSizedContractFactory + @pytest.mark.repricing(contract_balance=1) @pytest.mark.parametrize("contract_balance", [0, 1]) @@ -395,3 +400,211 @@ def test_ext_account_query_cold( post=post, blocks=blocks, ) + + +@pytest.mark.parametrize( + "opcode", + [ + Op.BALANCE, + # CALL* + Op.CALL, + Op.CALLCODE, + Op.DELEGATECALL, + Op.STATICCALL, + # EXTCODE* + Op.EXTCODESIZE, + Op.EXTCODEHASH, + Op.EXTCODECOPY, + ], +) +@pytest.mark.parametrize("access_warm", [True, False]) +@pytest.mark.parametrize("mem_size", [0, 32, 256, 1024]) +@pytest.mark.parametrize( + "code_size", [0, 32, 256, 1024, pytest.param(None, id="max_code_size")] +) +@pytest.mark.parametrize("value_sent", [0, 1]) +def test_account_query( + benchmark_test: BenchmarkTestFiller, + pre: Alloc, + fork: Fork, + opcode: Op, + access_warm: bool, + mem_size: int, + code_size: int, + value_sent: int, + gas_benchmark_value: int, +) -> None: + """Benchmark scenario of accessing max-code size bytecode.""" + if opcode in (Op.EXTCODESIZE, Op.EXTCODEHASH, Op.BALANCE) and ( + mem_size != 0 or code_size != 0 + ): + pytest.skip(f"No memory size configuration for {opcode}") + + if opcode not in (Op.CALL, Op.CALLCODE) and value_sent > 0: + pytest.skip(f"No value configuration for {opcode}") + + if ( + opcode in (Op.CALL, Op.CALLCODE, Op.STATICCALL, Op.DELEGATECALL) + and code_size != 0 + ): + pytest.skip(f"No code size configuration for {opcode}") + + attack_gas_limit = gas_benchmark_value + + # Create the max-sized fork-dependent contract factory. + custom_sized_contract_factory = CustomSizedContractFactory( + pre=pre, fork=fork, contract_size=code_size + ) + factory_address = custom_sized_contract_factory.address() + initcode = custom_sized_contract_factory.initcode + + # Prepare the attack iterating bytecode. + # Setup is just placing the CREATE2 Preimage in memory. + create2_preimage = Create2PreimageLayout( + factory_address=factory_address, + salt=Op.CALLDATALOAD(0), + init_code_hash=initcode.keccak256(), + ) + setup_code: Bytecode = create2_preimage + + if mem_size > 96: + setup_code += Op.MSTORE8( + mem_size - 1, + 0, + # Gas accounting + old_memory_size=96, + new_memory_size=mem_size, + ) + + if opcode == Op.EXTCODECOPY: + attack_call = Op.EXTCODECOPY( + address=create2_preimage.address_op(), + dest_offset=0, + size=mem_size, + # Gas accounting + data_size=mem_size, + address_warm=access_warm, + ) + elif opcode in (Op.CALL, Op.CALLCODE): + # CALL and CALLCODE accept value parameter + attack_call = Op.POP( + opcode( + address=create2_preimage.address_op(), + value=value_sent, + args_size=mem_size, + # Gas accounting + address_warm=access_warm, + new_memory_size=max(mem_size, 96), + ) + ) + elif opcode in (Op.STATICCALL, Op.DELEGATECALL): + # STATICCALL and DELEGATECALL don't have value parameter + attack_call = Op.POP( + opcode( + address=create2_preimage.address_op(), + args_size=mem_size, + # Gas accounting + address_warm=access_warm, + new_memory_size=max(mem_size, 96), + ) + ) + else: + # BALANCE, EXTCODESIZE, EXTCODEHASH + attack_call = Op.POP( + opcode( + address=create2_preimage.address_op(), + # Gas accounting + address_warm=access_warm, + ) + ) + + loop_code = While( + body=attack_call + create2_preimage.increment_salt_op(), + ) + + attack_code = IteratingBytecode( + setup=setup_code, + iterating=loop_code, + # Since the target contract is guaranteed to have a STOP as the first + # instruction, we can use a STOP as the iterating subcall code. + iterating_subcall=Op.STOP, + ) + + # Calldata generator for each transaction of the iterating bytecode. + def calldata(iteration_count: int, start_iteration: int) -> bytes: + del iteration_count + # We only pass the start iteration index as calldata for this bytecode + return Hash(start_iteration) + + # Access list generator for warm access tests. + # When access_warm=True, include all contract addresses that will be + # accessed in each transaction to warm them up via access list. + def access_list_generator( + iteration_count: int, start_iteration: int + ) -> list[AccessList] | None: + if not access_warm: + return None + return [ + AccessList( + address=custom_sized_contract_factory.created_contract_address( + salt=i + ), + storage_keys=[], + ) + for i in range(start_iteration, start_iteration + iteration_count) + ] + + attack_address = pre.deploy_contract(code=attack_code, balance=10**21) + + # Calculate the number of contracts to be targeted. + num_contracts = sum( + attack_code.tx_iterations_by_gas_limit( + fork=fork, + gas_limit=attack_gas_limit, + calldata=calldata, + access_list=access_list_generator, + ) + ) + + # Deploy num_contracts via multiple txs (each capped by tx gas limit). + with TestPhaseManager.setup(): + setup_sender = pre.fund_eoa() + contracts_deployment_txs = list( + custom_sized_contract_factory.transactions_by_total_contract_count( + fork=fork, + sender=setup_sender, + contract_count=num_contracts, + ) + ) + + with TestPhaseManager.execution(): + attack_sender = pre.fund_eoa() + attack_txs = list( + attack_code.transactions_by_gas_limit( + fork=fork, + gas_limit=attack_gas_limit, + sender=attack_sender, + to=attack_address, + calldata=calldata, + access_list=access_list_generator, + ) + ) + total_gas_cost = sum(tx.gas_cost for tx in attack_txs) + + post = {} + if custom_sized_contract_factory.contract_size > 0: + for i in range(num_contracts): + deployed_contract_address = ( + custom_sized_contract_factory.created_contract_address(salt=i) + ) + post[deployed_contract_address] = Account(nonce=1) + + benchmark_test( + pre=pre, + post=post, + blocks=[ + Block(txs=contracts_deployment_txs), + Block(txs=attack_txs), + ], + expected_benchmark_gas_used=total_gas_cost, + ) diff --git a/tests/benchmark/compute/scenario/test_unchunkified_bytecode.py b/tests/benchmark/compute/scenario/test_unchunkified_bytecode.py index 4ca7c2ead0..9ac2e77245 100644 --- a/tests/benchmark/compute/scenario/test_unchunkified_bytecode.py +++ b/tests/benchmark/compute/scenario/test_unchunkified_bytecode.py @@ -7,8 +7,8 @@ from execution_testing import ( Account, Alloc, + BenchmarkTestFiller, Block, - BlockchainTestFiller, Bytecode, Create2PreimageLayout, Fork, @@ -19,7 +19,7 @@ While, ) -from tests.benchmark.compute.helpers import MaxSizedContractFactory +from tests.benchmark.compute.helpers import CustomSizedContractFactory @pytest.mark.parametrize( @@ -35,7 +35,7 @@ ], ) def test_unchunkified_bytecode( - blockchain_test: BlockchainTestFiller, + benchmark_test: BenchmarkTestFiller, pre: Alloc, fork: Fork, opcode: Op, @@ -50,9 +50,11 @@ def test_unchunkified_bytecode( attack_gas_limit = gas_benchmark_value # Create the max-sized fork-dependent contract factory. - max_sized_contract_factory = MaxSizedContractFactory(pre=pre, fork=fork) - factory_address = max_sized_contract_factory.address() - initcode = max_sized_contract_factory.initcode + custom_sized_contract_factory = CustomSizedContractFactory( + pre=pre, fork=fork + ) + factory_address = custom_sized_contract_factory.address() + initcode = custom_sized_contract_factory.initcode # Prepare the attack iterating bytecode. # Setup is just placing the CREATE2 Preimage in memory. @@ -125,7 +127,7 @@ def calldata(iteration_count: int, start_iteration: int) -> bytes: with TestPhaseManager.setup(): setup_sender = pre.fund_eoa() contracts_deployment_txs = list( - max_sized_contract_factory.transactions_by_total_contract_count( + custom_sized_contract_factory.transactions_by_total_contract_count( fork=fork, sender=setup_sender, contract_count=num_contracts, @@ -148,17 +150,16 @@ def calldata(iteration_count: int, start_iteration: int) -> bytes: post = {} for i in range(num_contracts): deployed_contract_address = ( - max_sized_contract_factory.created_contract_address(salt=i) + custom_sized_contract_factory.created_contract_address(salt=i) ) post[deployed_contract_address] = Account(nonce=1) - blockchain_test( + benchmark_test( pre=pre, post=post, blocks=[ Block(txs=contracts_deployment_txs), Block(txs=attack_txs), ], - exclude_full_post_state_in_output=True, expected_benchmark_gas_used=total_gas_cost, ) From a32148175b3ea1db5a34caba939627af5be60c9a Mon Sep 17 00:00:00 2001 From: CPerezz <37264926+CPerezz@users.noreply.github.com> Date: Fri, 6 Feb 2026 17:13:56 +0100 Subject: [PATCH 134/154] fix(benchmark): reduce stateful stubs to 4 representative tokens (#2141) --- tests/benchmark/stateful/bloatnet/stubs.json | 290 +---------------- .../benchmark/stateful/bloatnet/stubs_99.json | 299 ++++++++++++++++++ 2 files changed, 303 insertions(+), 286 deletions(-) create mode 100644 tests/benchmark/stateful/bloatnet/stubs_99.json diff --git a/tests/benchmark/stateful/bloatnet/stubs.json b/tests/benchmark/stateful/bloatnet/stubs.json index f10cc6c5e2..c572ef2cee 100644 --- a/tests/benchmark/stateful/bloatnet/stubs.json +++ b/tests/benchmark/stateful/bloatnet/stubs.json @@ -1,299 +1,17 @@ { "test_sload_empty_erc20_balanceof_30GB_ERC20": "0x19fc17d87D946BBA47ca276f7b06Ee5737c4679C", "test_sload_empty_erc20_balanceof_XEN": "0x06450dEe7FD2Fb8E39061434BAbCFC05599a6Fb8", - "test_sload_empty_erc20_balanceof_USDT": "0xdAC17F958D2ee523a2206206994597C13D831ec7", "test_sload_empty_erc20_balanceof_USDC": "0xA0b86991C6218B36c1d19D4a2E9Eb0CE3606EB48", - "test_sload_empty_erc20_balanceof_LPT": "0x58b6A8a3302369DAEc383334672404Ee733AB239", - "test_sload_empty_erc20_balanceof_SHIB": "0x95aD61B0a150d79219dCF64E1E6Cc01f0B64C4cE", - "test_sload_empty_erc20_balanceof_WETH": "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2", - "test_sload_empty_erc20_balanceof_G-CRE": "0xa3Ee21c306A700E682AbcDfE9bAA6A08F3820419", - "test_sload_empty_erc20_balanceof_MEME": "0xB131F4A55907B10d1F0A50d8Ab8FA09EC342CD74", - "test_sload_empty_erc20_balanceof_OMG": "0xd26114cD6EE289AccF82350c8d8487fedB8A0C07", - "test_sload_empty_erc20_balanceof_MATIC": "0x7d1Afa7B718fb893DB30A3abc0Cfc608AaCfEbB0", - "test_sload_empty_erc20_balanceof_stETH": "0xae7ab96520DE3A18E5e111B5EaAb095312D7fE84", - "test_sload_empty_erc20_balanceof_DAI": "0x6B175474E89094C44Da98b954EedeAC495271d0F", - "test_sload_empty_erc20_balanceof_PEPE": "0x6982508145454Ce325dDbE47a25d4eC3d2311933", - "test_sload_empty_erc20_balanceof_old": "0x0cf0ee63788A0849FE5297F3407f701E122CC023", - "test_sload_empty_erc20_balanceof_BAT": "0x0D8775F648430679A709E98d2b0Cb6250d2887EF", - "test_sload_empty_erc20_balanceof_UNI": "0x1F9840a85d5aF5bf1D1762F925BdADdC4201F984", - "test_sload_empty_erc20_balanceof_AMB": "0x4dc3643Dbc642b72C158E7F3d2FF232df61cB6CE", - "test_sload_empty_erc20_balanceof_HEX": "0x2b591e99afE9f32eAA6214f7B7629768c40eEb39", - "test_sload_empty_erc20_balanceof_CRO": "0xa0b73e1ff0b80914ab6fe0444e65848c4c34450b", - "test_sload_empty_erc20_balanceof_UCASH": "0x92e52a1A235d9A103D970901066CE910AAceFD37", - "test_sload_empty_erc20_balanceof_BNB": "0xB8c77482e45F1F44dE1745F52C74426C631bDd52", - "test_sload_empty_erc20_balanceof_GSE": "0xe530441f4f73bdb6dc2fa5af7c3fc5fd551ec838", - "test_sload_empty_erc20_balanceof_MANA": "0x0F5D2FB29fb7d3cFeE444A200298f468908cC942", - "test_sload_empty_erc20_balanceof_OCN": "0x4092678e4E78230F46A1534C0fBC8Fa39780892B", - "test_sload_empty_erc20_balanceof_EIGEN": "0xEC53BF9167F50cDEb3aE105F56099AaAb9061F83", - "test_sload_empty_erc20_balanceof_COMP": "0xc00e94Cb662C3520282E6f5717214004A7f26888", - "test_sload_empty_erc20_balanceof_cUSDC": "0x39AA39c021dfbaE8faC545936693aC917d5E7563", - "test_sload_empty_erc20_balanceof_sMEME": "0xc059A531B4234d05e9EF4aC51028f7E6156E2CcE", - "test_sload_empty_erc20_balanceof_SAND": "0x3845badade8e6dff049820680d1f14bd3903a5d0", - "test_sload_empty_erc20_balanceof_AAVE": "0x7Fc66500c84A76Ad7e9c93437bFc5AC33E2DDAe9", - "test_sload_empty_erc20_balanceof_ZRX": "0xE41d2489571d322189246DaFA5ebDe1F4699F498", - "test_sload_empty_erc20_balanceof_KOK": "0x9B9647431632AF44be02ddd22477Ed94d14AacAa", - "test_sload_empty_erc20_balanceof_APE": "0x4d224452801ACEd8B2F0aebe155379bb5D594381", - "test_sload_empty_erc20_balanceof_SAI": "0x89d24A6b4CcB1B6fAA2625fE562bDD9a23260359", - "test_sload_empty_erc20_balanceof_GRT": "0xc944E90C64B2c07662A292be6244BDf05Cda44a7", - "test_sload_empty_erc20_balanceof_LRC": "0xBBbbCA6A901c926F240b89EacB641d8Aec7AEafD", - "test_sload_empty_erc20_balanceof_ELON": "0x761D38e5DDf6ccf6Cf7C55759d5210750B5D60F3", - "test_sload_empty_erc20_balanceof_QNT": "0x4a220E6096B25EADb88358cb44068A3248254675", - "test_sload_empty_erc20_balanceof_ONDO": "0xfAbA6f8e4a5E8Ab82F62fe7C39859FA577269BE3", - "test_sload_empty_erc20_balanceof_ENJ": "0xF629cBd94d3791c9250152BD8dfBDF380E2a3B9c", - "test_sload_empty_erc20_balanceof_FET": "0x1D287CC25dAD7cCaF76a26bc660c5F7C8E2a05BD", - "test_sload_empty_erc20_balanceof_eETH": "0x6c5024Cd4F8A59110119C56f8933403A539555EB", - "test_sload_empty_erc20_balanceof_XMX": "0x0F8c45B896784A1E408526B9300519ef8660209c", - "test_sload_empty_erc20_balanceof_FTI": "0x943ed852Dadb5C3938ECdC6883718df8142de4C8", - "test_sload_empty_erc20_balanceof_WBTC": "0x2260FAC5E5542a773Aa44fBCfeDf7C193bc2C599", - "test_sload_empty_erc20_balanceof_LEND": "0x80fB784B7eD66730e8b1DBd9820aFD29931aab03", - "test_sload_empty_erc20_balanceof_ELEC": "0xd49ff13661451313ca1553fd6954bd1d9b6e02b9", - "test_sload_empty_erc20_balanceof_SUSHI": "0x6B3595068778DD592e39A122f4f5a5CF09C90fE2", - "test_sload_empty_erc20_balanceof_HOT": "0x6c6EE5e31d828De241282B9606C8e98Ea48526E2", - "test_sload_empty_erc20_balanceof_MITx": "0x4a527d8fc13c5203ab24ba0944f4cb14658d1db6", - "test_sload_empty_erc20_balanceof_1INCH": "0x111111111117dC0aa78b770fA6A738034120C302", - "test_sload_empty_erc20_balanceof_USDP": "0x1456688345527bE1f37E9e627DA0837D6f08C925", - "test_sload_empty_erc20_balanceof_ETHFI": "0xfe0c30065b384f05761f15d0cc899d4f9f9cc0eb", - "test_sload_empty_erc20_balanceof_POLY": "0x9992ec3cf6a55b00978cddf2b27bc6882d88d1ec", - "test_sload_empty_erc20_balanceof_AOA": "0x9ab165d795019b6d8b3e971dda91071421305e5a", - "test_sload_empty_erc20_balanceof_STORJ": "0xB64ef51C888972c908CFacf59B47C1AfBC0Ab8aC", - "test_sload_empty_erc20_balanceof_MKR": "0x9f8F72aA9304c8B593d555F12eF6589cC3A579A2", - "test_sload_empty_erc20_balanceof_AMP": "0xfF20817765cB7F73d4Bde2e66e067e58d11095c2", - "test_sload_empty_erc20_balanceof_VRA": "0xF411903cbc70a74d22900a5DE66A2dda66507255", - "test_sload_empty_erc20_balanceof_GTC": "0xde30da39c46104798bb5aa3fe8b9e0e1f348163f", - "test_sload_empty_erc20_balanceof_FLOKI": "0x43F11c02439E2736800433B4594994Bd43Cd066D", - "test_sload_empty_erc20_balanceof_ALT": "0x8457CA5040ad67fdebbCC8EdCE889A335Bc0fbFB", - "test_sload_empty_erc20_balanceof_IMX": "0xf57e7e7c23978c3caec3c3548e3d615c346e79ff", - "test_sload_empty_erc20_balanceof_XYO": "0x55296f69f40ea6d20e478533c15A6b08B654E758", - "test_sload_empty_erc20_balanceof_REV": "0x2ef27bf41236bd859a95209e17a43fbd26851f92", - "test_sload_empty_erc20_balanceof_FUN": "0x419d0d8bdd9af5e606ae2232ed285aff190e711b", - "test_sload_empty_erc20_balanceof_CRV": "0xD533a949740bb3306d119CC777fa900bA034cd52", - "test_sload_empty_erc20_balanceof_CHZ": "0x3506424f91fd33084466f402d5d97f05f8e3b4af", - "test_sload_empty_erc20_balanceof_SMT": "0x78Eb8DC641077F049f910659b6d580E80dC4d237", - "test_sload_empty_erc20_balanceof_SNX": "0xC011A72400E58ecD99Ee497CF89E3775d4bd732F", - "test_sload_empty_erc20_balanceof_DENT": "0x3597bfD533a99c9aa083587B074434E61Eb0A258", - "test_sload_empty_erc20_balanceof_RNDR": "0x6De037ef9aD2725EB40118Bb1702EBb27e4Aeb24", - "test_sload_empty_erc20_balanceof_SNT": "0x744d70FDBe2Ba4CF95131626614a1763DF805B9E", - "test_sload_empty_erc20_balanceof_AXS": "0xBB0E17EF65F82Ab018d8EDd776e8DD940327B28b", - "test_sload_empty_erc20_balanceof_KNC": "0xdd974D5C2e2928deA5F71b9825b8b646686BD200", - "test_sload_empty_erc20_balanceof_WEPE": "0xccB365D2e11aE4D6d74715c680f56cf58bF4bF10", - "test_sload_empty_erc20_balanceof_ZETA": "0xf091867ec603a6628ed83d274e835539d82e9cc8", - "test_sload_empty_erc20_balanceof_LYM": "0xc690f7c7fcffa6a82b79fab7508c466fefdfc8c5", - "test_sload_empty_erc20_balanceof_nCASH": "0x809826cceAb68c387726af962713b64Cb5Cb3CCA", - "test_sload_empty_erc20_balanceof_LOOKS": "0xf4d2888d29D722226FafA5d9B24F9164c092421E", - "test_sload_empty_erc20_balanceof_Monfter/Monavale": "0x275f5ad03be0fa221b4c6649b8aee09a42d9412a", - "test_sload_empty_erc20_balanceof_cETH": "0x4Ddc2D193948926D02f9B1fE9e1daa0718270ED5", - "test_sload_empty_erc20_balanceof_SALT": "0x4156D3342D5c385a87D264F90653733592000581", - "test_sload_empty_erc20_balanceof_HOGE": "0xfAd45E47083e4607302aa43c65fB3106F1cd7607", - "test_sload_empty_erc20_balanceof_REN": "0x408e41876cCCDC0F92210600ef50372656052a38", - "test_sload_empty_erc20_balanceof_ENS": "0xC56b13EBBCffa67cfB7979b900B736b3fb480D78", - "test_sload_empty_erc20_balanceof_NEXO": "0xB62132e35a6c13ee1EE0f84dC5d40bad8d815206", - "test_sload_empty_erc20_balanceof_RFR": "0xD0929d411954c47438Dc1D871dd6081F5C5e149c", - "test_sload_empty_erc20_balanceof_COFI": "0x3137619705b5fc22a3048989F983905e456B59Ab", - "test_sload_empty_erc20_balanceof_SLP": "0xcc8fa225d80b9c7d42f96e9570156c65d6cAAa25", - "test_sload_empty_erc20_balanceof_FUEL": "0xea38eaa3c86c8f9b751533ba2e562deb9acded40", - "test_sload_empty_erc20_balanceof_ENA": "0x57e114B691Db790C35207b2e685D4A43181e6061", - "test_sload_empty_erc20_balanceof_AKITA": "0x3301Ee63Fb29F863f2333Bd4466acb46CD8323E6", - "test_sload_empty_erc20_balanceof_CVC": "0x41e5560054824ea6B0732E656e3Ad64E20e94e45", - "test_sload_empty_erc20_balanceof_IHT": "0xEda8B016efa8b1161208Cf041cD86972EEE0F31E", - "test_sload_empty_erc20_balanceof_ZSC": "0x7A41e0517a5ecA4FdbC7FbebA4D4c47B9fF6DC63", - "test_sload_empty_erc20_balanceof_cbETH": "0xBe9895146f7AF43049ca1c1AE358B0541Ea49704", "test_sload_empty_erc20_balanceof_IMT": "0x13119e34e140097a507b07a5564bde1bc375d9e6", "test_sstore_erc20_approve_30GB_ERC20": "0x19fc17d87D946BBA47ca276f7b06Ee5737c4679C", "test_sstore_erc20_approve_XEN": "0x06450dEe7FD2Fb8E39061434BAbCFC05599a6Fb8", - "test_sstore_erc20_approve_USDT": "0xdAC17F958D2ee523a2206206994597C13D831ec7", "test_sstore_erc20_approve_USDC": "0xA0b86991C6218B36c1d19D4a2E9Eb0CE3606EB48", - "test_sstore_erc20_approve_LPT": "0x58b6A8a3302369DAEc383334672404Ee733AB239", - "test_sstore_erc20_approve_SHIB": "0x95aD61B0a150d79219dCF64E1E6Cc01f0B64C4cE", - "test_sstore_erc20_approve_WETH": "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2", - "test_sstore_erc20_approve_G-CRE": "0xa3Ee21c306A700E682AbcDfE9bAA6A08F3820419", - "test_sstore_erc20_approve_MEME": "0xB131F4A55907B10d1F0A50d8Ab8FA09EC342CD74", - "test_sstore_erc20_approve_OMG": "0xd26114cD6EE289AccF82350c8d8487fedB8A0C07", - "test_sstore_erc20_approve_MATIC": "0x7d1Afa7B718fb893DB30A3abc0Cfc608AaCfEbB0", - "test_sstore_erc20_approve_stETH": "0xae7ab96520DE3A18E5e111B5EaAb095312D7fE84", - "test_sstore_erc20_approve_DAI": "0x6B175474E89094C44Da98b954EedeAC495271d0F", - "test_sstore_erc20_approve_PEPE": "0x6982508145454Ce325dDbE47a25d4eC3d2311933", - "test_sstore_erc20_approve_old": "0x0cf0ee63788A0849FE5297F3407f701E122CC023", - "test_sstore_erc20_approve_BAT": "0x0D8775F648430679A709E98d2b0Cb6250d2887EF", - "test_sstore_erc20_approve_UNI": "0x1F9840a85d5aF5bf1D1762F925BdADdC4201F984", - "test_sstore_erc20_approve_AMB": "0x4dc3643Dbc642b72C158E7F3d2FF232df61cB6CE", - "test_sstore_erc20_approve_HEX": "0x2b591e99afE9f32eAA6214f7B7629768c40eEb39", - "test_sstore_erc20_approve_CRO": "0xa0b73e1ff0b80914ab6fe0444e65848c4c34450b", - "test_sstore_erc20_approve_UCASH": "0x92e52a1A235d9A103D970901066CE910AAceFD37", - "test_sstore_erc20_approve_BNB": "0xB8c77482e45F1F44dE1745F52C74426C631bDd52", - "test_sstore_erc20_approve_GSE": "0xe530441f4f73bdb6dc2fa5af7c3fc5fd551ec838", - "test_sstore_erc20_approve_MANA": "0x0F5D2FB29fb7d3cFeE444A200298f468908cC942", - "test_sstore_erc20_approve_OCN": "0x4092678e4E78230F46A1534C0fBC8Fa39780892B", - "test_sstore_erc20_approve_EIGEN": "0xEC53BF9167F50cDEb3aE105F56099AaAb9061F83", - "test_sstore_erc20_approve_COMP": "0xc00e94Cb662C3520282E6f5717214004A7f26888", - "test_sstore_erc20_approve_cUSDC": "0x39AA39c021dfbaE8faC545936693aC917d5E7563", - "test_sstore_erc20_approve_sMEME": "0xc059A531B4234d05e9EF4aC51028f7E6156E2CcE", - "test_sstore_erc20_approve_SAND": "0x3845badade8e6dff049820680d1f14bd3903a5d0", - "test_sstore_erc20_approve_AAVE": "0x7Fc66500c84A76Ad7e9c93437bFc5AC33E2DDAe9", - "test_sstore_erc20_approve_ZRX": "0xE41d2489571d322189246DaFA5ebDe1F4699F498", - "test_sstore_erc20_approve_KOK": "0x9B9647431632AF44be02ddd22477Ed94d14AacAa", - "test_sstore_erc20_approve_APE": "0x4d224452801ACEd8B2F0aebe155379bb5D594381", - "test_sstore_erc20_approve_SAI": "0x89d24A6b4CcB1B6fAA2625fE562bDD9a23260359", - "test_sstore_erc20_approve_GRT": "0xc944E90C64B2c07662A292be6244BDf05Cda44a7", - "test_sstore_erc20_approve_LRC": "0xBBbbCA6A901c926F240b89EacB641d8Aec7AEafD", - "test_sstore_erc20_approve_ELON": "0x761D38e5DDf6ccf6Cf7C55759d5210750B5D60F3", - "test_sstore_erc20_approve_QNT": "0x4a220E6096B25EADb88358cb44068A3248254675", - "test_sstore_erc20_approve_ONDO": "0xfAbA6f8e4a5E8Ab82F62fe7C39859FA577269BE3", - "test_sstore_erc20_approve_ENJ": "0xF629cBd94d3791c9250152BD8dfBDF380E2a3B9c", - "test_sstore_erc20_approve_FET": "0x1D287CC25dAD7cCaF76a26bc660c5F7C8E2a05BD", - "test_sstore_erc20_approve_eETH": "0x6c5024Cd4F8A59110119C56f8933403A539555EB", - "test_sstore_erc20_approve_XMX": "0x0F8c45B896784A1E408526B9300519ef8660209c", - "test_sstore_erc20_approve_FTI": "0x943ed852Dadb5C3938ECdC6883718df8142de4C8", - "test_sstore_erc20_approve_WBTC": "0x2260FAC5E5542a773Aa44fBCfeDf7C193bc2C599", - "test_sstore_erc20_approve_LEND": "0x80fB784B7eD66730e8b1DBd9820aFD29931aab03", - "test_sstore_erc20_approve_ELEC": "0xd49ff13661451313ca1553fd6954bd1d9b6e02b9", - "test_sstore_erc20_approve_SUSHI": "0x6B3595068778DD592e39A122f4f5a5CF09C90fE2", - "test_sstore_erc20_approve_HOT": "0x6c6EE5e31d828De241282B9606C8e98Ea48526E2", - "test_sstore_erc20_approve_MITx": "0x4a527d8fc13c5203ab24ba0944f4cb14658d1db6", - "test_sstore_erc20_approve_1INCH": "0x111111111117dC0aa78b770fA6A738034120C302", - "test_sstore_erc20_approve_USDP": "0x1456688345527bE1f37E9e627DA0837D6f08C925", - "test_sstore_erc20_approve_ETHFI": "0xfe0c30065b384f05761f15d0cc899d4f9f9cc0eb", - "test_sstore_erc20_approve_POLY": "0x9992ec3cf6a55b00978cddf2b27bc6882d88d1ec", - "test_sstore_erc20_approve_AOA": "0x9ab165d795019b6d8b3e971dda91071421305e5a", - "test_sstore_erc20_approve_STORJ": "0xB64ef51C888972c908CFacf59B47C1AfBC0Ab8aC", - "test_sstore_erc20_approve_MKR": "0x9f8F72aA9304c8B593d555F12eF6589cC3A579A2", - "test_sstore_erc20_approve_AMP": "0xfF20817765cB7F73d4Bde2e66e067e58d11095c2", - "test_sstore_erc20_approve_VRA": "0xF411903cbc70a74d22900a5DE66A2dda66507255", - "test_sstore_erc20_approve_GTC": "0xde30da39c46104798bb5aa3fe8b9e0e1f348163f", - "test_sstore_erc20_approve_FLOKI": "0x43F11c02439E2736800433B4594994Bd43Cd066D", - "test_sstore_erc20_approve_ALT": "0x8457CA5040ad67fdebbCC8EdCE889A335Bc0fbFB", - "test_sstore_erc20_approve_IMX": "0xf57e7e7c23978c3caec3c3548e3d615c346e79ff", - "test_sstore_erc20_approve_XYO": "0x55296f69f40ea6d20e478533c15A6b08B654E758", - "test_sstore_erc20_approve_REV": "0x2ef27bf41236bd859a95209e17a43fbd26851f92", - "test_sstore_erc20_approve_FUN": "0x419d0d8bdd9af5e606ae2232ed285aff190e711b", - "test_sstore_erc20_approve_CRV": "0xD533a949740bb3306d119CC777fa900bA034cd52", - "test_sstore_erc20_approve_CHZ": "0x3506424f91fd33084466f402d5d97f05f8e3b4af", - "test_sstore_erc20_approve_SMT": "0x78Eb8DC641077F049f910659b6d580E80dC4d237", - "test_sstore_erc20_approve_SNX": "0xC011A72400E58ecD99Ee497CF89E3775d4bd732F", - "test_sstore_erc20_approve_DENT": "0x3597bfD533a99c9aa083587B074434E61Eb0A258", - "test_sstore_erc20_approve_RNDR": "0x6De037ef9aD2725EB40118Bb1702EBb27e4Aeb24", - "test_sstore_erc20_approve_SNT": "0x744d70FDBe2Ba4CF95131626614a1763DF805B9E", - "test_sstore_erc20_approve_AXS": "0xBB0E17EF65F82Ab018d8EDd776e8DD940327B28b", - "test_sstore_erc20_approve_KNC": "0xdd974D5C2e2928deA5F71b9825b8b646686BD200", - "test_sstore_erc20_approve_WEPE": "0xccB365D2e11aE4D6d74715c680f56cf58bF4bF10", - "test_sstore_erc20_approve_ZETA": "0xf091867ec603a6628ed83d274e835539d82e9cc8", - "test_sstore_erc20_approve_LYM": "0xc690f7c7fcffa6a82b79fab7508c466fefdfc8c5", - "test_sstore_erc20_approve_nCASH": "0x809826cceAb68c387726af962713b64Cb5Cb3CCA", - "test_sstore_erc20_approve_LOOKS": "0xf4d2888d29D722226FafA5d9B24F9164c092421E", - "test_sstore_erc20_approve_Monfter/Monavale": "0x275f5ad03be0fa221b4c6649b8aee09a42d9412a", - "test_sstore_erc20_approve_cETH": "0x4Ddc2D193948926D02f9B1fE9e1daa0718270ED5", - "test_sstore_erc20_approve_SALT": "0x4156D3342D5c385a87D264F90653733592000581", - "test_sstore_erc20_approve_HOGE": "0xfAd45E47083e4607302aa43c65fB3106F1cd7607", - "test_sstore_erc20_approve_REN": "0x408e41876cCCDC0F92210600ef50372656052a38", - "test_sstore_erc20_approve_ENS": "0xC56b13EBBCffa67cfB7979b900B736b3fb480D78", - "test_sstore_erc20_approve_NEXO": "0xB62132e35a6c13ee1EE0f84dC5d40bad8d815206", - "test_sstore_erc20_approve_RFR": "0xD0929d411954c47438Dc1D871dd6081F5C5e149c", - "test_sstore_erc20_approve_COFI": "0x3137619705b5fc22a3048989F983905e456B59Ab", - "test_sstore_erc20_approve_SLP": "0xcc8fa225d80b9c7d42f96e9570156c65d6cAAa25", - "test_sstore_erc20_approve_FUEL": "0xea38eaa3c86c8f9b751533ba2e562deb9acded40", - "test_sstore_erc20_approve_ENA": "0x57e114B691Db790C35207b2e685D4A43181e6061", - "test_sstore_erc20_approve_AKITA": "0x3301Ee63Fb29F863f2333Bd4466acb46CD8323E6", - "test_sstore_erc20_approve_CVC": "0x41e5560054824ea6B0732E656e3Ad64E20e94e45", - "test_sstore_erc20_approve_IHT": "0xEda8B016efa8b1161208Cf041cD86972EEE0F31E", - "test_sstore_erc20_approve_ZSC": "0x7A41e0517a5ecA4FdbC7FbebA4D4c47B9fF6DC63", - "test_sstore_erc20_approve_cbETH": "0xBe9895146f7AF43049ca1c1AE358B0541Ea49704", "test_sstore_erc20_approve_IMT": "0x13119e34e140097a507b07a5564bde1bc375d9e6", "test_mixed_sload_sstore_30GB_ERC20": "0x19fc17d87D946BBA47ca276f7b06Ee5737c4679C", "test_mixed_sload_sstore_XEN": "0x06450dEe7FD2Fb8E39061434BAbCFC05599a6Fb8", - "test_mixed_sload_sstore_USDT": "0xdAC17F958D2ee523a2206206994597C13D831ec7", "test_mixed_sload_sstore_USDC": "0xA0b86991C6218B36c1d19D4a2E9Eb0CE3606EB48", - "test_mixed_sload_sstore_LPT": "0x58b6A8a3302369DAEc383334672404Ee733AB239", - "test_mixed_sload_sstore_SHIB": "0x95aD61B0a150d79219dCF64E1E6Cc01f0B64C4cE", - "test_mixed_sload_sstore_WETH": "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2", - "test_mixed_sload_sstore_G-CRE": "0xa3Ee21c306A700E682AbcDfE9bAA6A08F3820419", - "test_mixed_sload_sstore_MEME": "0xB131F4A55907B10d1F0A50d8Ab8FA09EC342CD74", - "test_mixed_sload_sstore_OMG": "0xd26114cD6EE289AccF82350c8d8487fedB8A0C07", - "test_mixed_sload_sstore_MATIC": "0x7d1Afa7B718fb893DB30A3abc0Cfc608AaCfEbB0", - "test_mixed_sload_sstore_stETH": "0xae7ab96520DE3A18E5e111B5EaAb095312D7fE84", - "test_mixed_sload_sstore_DAI": "0x6B175474E89094C44Da98b954EedeAC495271d0F", - "test_mixed_sload_sstore_PEPE": "0x6982508145454Ce325dDbE47a25d4eC3d2311933", - "test_mixed_sload_sstore_old": "0x0cf0ee63788A0849FE5297F3407f701E122CC023", - "test_mixed_sload_sstore_BAT": "0x0D8775F648430679A709E98d2b0Cb6250d2887EF", - "test_mixed_sload_sstore_UNI": "0x1F9840a85d5aF5bf1D1762F925BdADdC4201F984", - "test_mixed_sload_sstore_AMB": "0x4dc3643Dbc642b72C158E7F3d2FF232df61cB6CE", - "test_mixed_sload_sstore_HEX": "0x2b591e99afE9f32eAA6214f7B7629768c40eEb39", - "test_mixed_sload_sstore_CRO": "0xa0b73e1ff0b80914ab6fe0444e65848c4c34450b", - "test_mixed_sload_sstore_UCASH": "0x92e52a1A235d9A103D970901066CE910AAceFD37", - "test_mixed_sload_sstore_BNB": "0xB8c77482e45F1F44dE1745F52C74426C631bDd52", - "test_mixed_sload_sstore_GSE": "0xe530441f4f73bdb6dc2fa5af7c3fc5fd551ec838", - "test_mixed_sload_sstore_MANA": "0x0F5D2FB29fb7d3cFeE444A200298f468908cC942", - "test_mixed_sload_sstore_OCN": "0x4092678e4E78230F46A1534C0fBC8Fa39780892B", - "test_mixed_sload_sstore_EIGEN": "0xEC53BF9167F50cDEb3aE105F56099AaAb9061F83", - "test_mixed_sload_sstore_COMP": "0xc00e94Cb662C3520282E6f5717214004A7f26888", - "test_mixed_sload_sstore_cUSDC": "0x39AA39c021dfbaE8faC545936693aC917d5E7563", - "test_mixed_sload_sstore_sMEME": "0xc059A531B4234d05e9EF4aC51028f7E6156E2CcE", - "test_mixed_sload_sstore_SAND": "0x3845badade8e6dff049820680d1f14bd3903a5d0", - "test_mixed_sload_sstore_AAVE": "0x7Fc66500c84A76Ad7e9c93437bFc5AC33E2DDAe9", - "test_mixed_sload_sstore_ZRX": "0xE41d2489571d322189246DaFA5ebDe1F4699F498", - "test_mixed_sload_sstore_KOK": "0x9B9647431632AF44be02ddd22477Ed94d14AacAa", - "test_mixed_sload_sstore_APE": "0x4d224452801ACEd8B2F0aebe155379bb5D594381", - "test_mixed_sload_sstore_SAI": "0x89d24A6b4CcB1B6fAA2625fE562bDD9a23260359", - "test_mixed_sload_sstore_GRT": "0xc944E90C64B2c07662A292be6244BDf05Cda44a7", - "test_mixed_sload_sstore_LRC": "0xBBbbCA6A901c926F240b89EacB641d8Aec7AEafD", - "test_mixed_sload_sstore_ELON": "0x761D38e5DDf6ccf6Cf7C55759d5210750B5D60F3", - "test_mixed_sload_sstore_QNT": "0x4a220E6096B25EADb88358cb44068A3248254675", - "test_mixed_sload_sstore_ONDO": "0xfAbA6f8e4a5E8Ab82F62fe7C39859FA577269BE3", - "test_mixed_sload_sstore_ENJ": "0xF629cBd94d3791c9250152BD8dfBDF380E2a3B9c", - "test_mixed_sload_sstore_FET": "0x1D287CC25dAD7cCaF76a26bc660c5F7C8E2a05BD", - "test_mixed_sload_sstore_eETH": "0x6c5024Cd4F8A59110119C56f8933403A539555EB", - "test_mixed_sload_sstore_XMX": "0x0F8c45B896784A1E408526B9300519ef8660209c", - "test_mixed_sload_sstore_FTI": "0x943ed852Dadb5C3938ECdC6883718df8142de4C8", - "test_mixed_sload_sstore_WBTC": "0x2260FAC5E5542a773Aa44fBCfeDf7C193bc2C599", - "test_mixed_sload_sstore_LEND": "0x80fB784B7eD66730e8b1DBd9820aFD29931aab03", - "test_mixed_sload_sstore_ELEC": "0xd49ff13661451313ca1553fd6954bd1d9b6e02b9", - "test_mixed_sload_sstore_SUSHI": "0x6B3595068778DD592e39A122f4f5a5CF09C90fE2", - "test_mixed_sload_sstore_HOT": "0x6c6EE5e31d828De241282B9606C8e98Ea48526E2", - "test_mixed_sload_sstore_MITx": "0x4a527d8fc13c5203ab24ba0944f4cb14658d1db6", - "test_mixed_sload_sstore_1INCH": "0x111111111117dC0aa78b770fA6A738034120C302", - "test_mixed_sload_sstore_USDP": "0x1456688345527bE1f37E9e627DA0837D6f08C925", - "test_mixed_sload_sstore_ETHFI": "0xfe0c30065b384f05761f15d0cc899d4f9f9cc0eb", - "test_mixed_sload_sstore_POLY": "0x9992ec3cf6a55b00978cddf2b27bc6882d88d1ec", - "test_mixed_sload_sstore_AOA": "0x9ab165d795019b6d8b3e971dda91071421305e5a", - "test_mixed_sload_sstore_STORJ": "0xB64ef51C888972c908CFacf59B47C1AfBC0Ab8aC", - "test_mixed_sload_sstore_MKR": "0x9f8F72aA9304c8B593d555F12eF6589cC3A579A2", - "test_mixed_sload_sstore_AMP": "0xfF20817765cB7F73d4Bde2e66e067e58d11095c2", - "test_mixed_sload_sstore_VRA": "0xF411903cbc70a74d22900a5DE66A2dda66507255", - "test_mixed_sload_sstore_GTC": "0xde30da39c46104798bb5aa3fe8b9e0e1f348163f", - "test_mixed_sload_sstore_FLOKI": "0x43F11c02439E2736800433B4594994Bd43Cd066D", - "test_mixed_sload_sstore_ALT": "0x8457CA5040ad67fdebbCC8EdCE889A335Bc0fbFB", - "test_mixed_sload_sstore_IMX": "0xf57e7e7c23978c3caec3c3548e3d615c346e79ff", - "test_mixed_sload_sstore_XYO": "0x55296f69f40ea6d20e478533c15A6b08B654E758", - "test_mixed_sload_sstore_REV": "0x2ef27bf41236bd859a95209e17a43fbd26851f92", - "test_mixed_sload_sstore_FUN": "0x419d0d8bdd9af5e606ae2232ed285aff190e711b", - "test_mixed_sload_sstore_CRV": "0xD533a949740bb3306d119CC777fa900bA034cd52", - "test_mixed_sload_sstore_CHZ": "0x3506424f91fd33084466f402d5d97f05f8e3b4af", - "test_mixed_sload_sstore_SMT": "0x78Eb8DC641077F049f910659b6d580E80dC4d237", - "test_mixed_sload_sstore_SNX": "0xC011A72400E58ecD99Ee497CF89E3775d4bd732F", - "test_mixed_sload_sstore_DENT": "0x3597bfD533a99c9aa083587B074434E61Eb0A258", - "test_mixed_sload_sstore_RNDR": "0x6De037ef9aD2725EB40118Bb1702EBb27e4Aeb24", - "test_mixed_sload_sstore_SNT": "0x744d70FDBe2Ba4CF95131626614a1763DF805B9E", - "test_mixed_sload_sstore_AXS": "0xBB0E17EF65F82Ab018d8EDd776e8DD940327B28b", - "test_mixed_sload_sstore_KNC": "0xdd974D5C2e2928deA5F71b9825b8b646686BD200", - "test_mixed_sload_sstore_WEPE": "0xccB365D2e11aE4D6d74715c680f56cf58bF4bF10", - "test_mixed_sload_sstore_ZETA": "0xf091867ec603a6628ed83d274e835539d82e9cc8", - "test_mixed_sload_sstore_LYM": "0xc690f7c7fcffa6a82b79fab7508c466fefdfc8c5", - "test_mixed_sload_sstore_nCASH": "0x809826cceAb68c387726af962713b64Cb5Cb3CCA", - "test_mixed_sload_sstore_LOOKS": "0xf4d2888d29D722226FafA5d9B24F9164c092421E", - "test_mixed_sload_sstore_Monfter/Monavale": "0x275f5ad03be0fa221b4c6649b8aee09a42d9412a", - "test_mixed_sload_sstore_cETH": "0x4Ddc2D193948926D02f9B1fE9e1daa0718270ED5", - "test_mixed_sload_sstore_SALT": "0x4156D3342D5c385a87D264F90653733592000581", - "test_mixed_sload_sstore_HOGE": "0xfAd45E47083e4607302aa43c65fB3106F1cd7607", - "test_mixed_sload_sstore_REN": "0x408e41876cCCDC0F92210600ef50372656052a38", - "test_mixed_sload_sstore_ENS": "0xC56b13EBBCffa67cfB7979b900B736b3fb480D78", - "test_mixed_sload_sstore_NEXO": "0xB62132e35a6c13ee1EE0f84dC5d40bad8d815206", - "test_mixed_sload_sstore_RFR": "0xD0929d411954c47438Dc1D871dd6081F5C5e149c", - "test_mixed_sload_sstore_COFI": "0x3137619705b5fc22a3048989F983905e456B59Ab", - "test_mixed_sload_sstore_SLP": "0xcc8fa225d80b9c7d42f96e9570156c65d6cAAa25", - "test_mixed_sload_sstore_FUEL": "0xea38eaa3c86c8f9b751533ba2e562deb9acded40", - "test_mixed_sload_sstore_ENA": "0x57e114B691Db790C35207b2e685D4A43181e6061", - "test_mixed_sload_sstore_AKITA": "0x3301Ee63Fb29F863f2333Bd4466acb46CD8323E6", - "test_mixed_sload_sstore_CVC": "0x41e5560054824ea6B0732E656e3Ad64E20e94e45", - "test_mixed_sload_sstore_IHT": "0xEda8B016efa8b1161208Cf041cD86972EEE0F31E", - "test_mixed_sload_sstore_ZSC": "0x7A41e0517a5ecA4FdbC7FbebA4D4c47B9fF6DC63", - "test_mixed_sload_sstore_cbETH": "0xBe9895146f7AF43049ca1c1AE358B0541Ea49704", - "test_mixed_sload_sstore_IMT": "0x13119e34e140097a507b07a5564bde1bc375d9e6" + "test_mixed_sload_sstore_IMT": "0x13119e34e140097a507b07a5564bde1bc375d9e6", + "test_sload_empty_erc20_balanceof_STR": "0x7c4d13a8e743b036f6ba71c5dcadfe4fc9aa7a17", + "test_sstore_erc20_approve_STR": "0x7c4d13a8e743b036f6ba71c5dcadfe4fc9aa7a17", + "test_mixed_sload_sstore_STR": "0x7c4d13a8e743b036f6ba71c5dcadfe4fc9aa7a17" } diff --git a/tests/benchmark/stateful/bloatnet/stubs_99.json b/tests/benchmark/stateful/bloatnet/stubs_99.json new file mode 100644 index 0000000000..f10cc6c5e2 --- /dev/null +++ b/tests/benchmark/stateful/bloatnet/stubs_99.json @@ -0,0 +1,299 @@ +{ + "test_sload_empty_erc20_balanceof_30GB_ERC20": "0x19fc17d87D946BBA47ca276f7b06Ee5737c4679C", + "test_sload_empty_erc20_balanceof_XEN": "0x06450dEe7FD2Fb8E39061434BAbCFC05599a6Fb8", + "test_sload_empty_erc20_balanceof_USDT": "0xdAC17F958D2ee523a2206206994597C13D831ec7", + "test_sload_empty_erc20_balanceof_USDC": "0xA0b86991C6218B36c1d19D4a2E9Eb0CE3606EB48", + "test_sload_empty_erc20_balanceof_LPT": "0x58b6A8a3302369DAEc383334672404Ee733AB239", + "test_sload_empty_erc20_balanceof_SHIB": "0x95aD61B0a150d79219dCF64E1E6Cc01f0B64C4cE", + "test_sload_empty_erc20_balanceof_WETH": "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2", + "test_sload_empty_erc20_balanceof_G-CRE": "0xa3Ee21c306A700E682AbcDfE9bAA6A08F3820419", + "test_sload_empty_erc20_balanceof_MEME": "0xB131F4A55907B10d1F0A50d8Ab8FA09EC342CD74", + "test_sload_empty_erc20_balanceof_OMG": "0xd26114cD6EE289AccF82350c8d8487fedB8A0C07", + "test_sload_empty_erc20_balanceof_MATIC": "0x7d1Afa7B718fb893DB30A3abc0Cfc608AaCfEbB0", + "test_sload_empty_erc20_balanceof_stETH": "0xae7ab96520DE3A18E5e111B5EaAb095312D7fE84", + "test_sload_empty_erc20_balanceof_DAI": "0x6B175474E89094C44Da98b954EedeAC495271d0F", + "test_sload_empty_erc20_balanceof_PEPE": "0x6982508145454Ce325dDbE47a25d4eC3d2311933", + "test_sload_empty_erc20_balanceof_old": "0x0cf0ee63788A0849FE5297F3407f701E122CC023", + "test_sload_empty_erc20_balanceof_BAT": "0x0D8775F648430679A709E98d2b0Cb6250d2887EF", + "test_sload_empty_erc20_balanceof_UNI": "0x1F9840a85d5aF5bf1D1762F925BdADdC4201F984", + "test_sload_empty_erc20_balanceof_AMB": "0x4dc3643Dbc642b72C158E7F3d2FF232df61cB6CE", + "test_sload_empty_erc20_balanceof_HEX": "0x2b591e99afE9f32eAA6214f7B7629768c40eEb39", + "test_sload_empty_erc20_balanceof_CRO": "0xa0b73e1ff0b80914ab6fe0444e65848c4c34450b", + "test_sload_empty_erc20_balanceof_UCASH": "0x92e52a1A235d9A103D970901066CE910AAceFD37", + "test_sload_empty_erc20_balanceof_BNB": "0xB8c77482e45F1F44dE1745F52C74426C631bDd52", + "test_sload_empty_erc20_balanceof_GSE": "0xe530441f4f73bdb6dc2fa5af7c3fc5fd551ec838", + "test_sload_empty_erc20_balanceof_MANA": "0x0F5D2FB29fb7d3cFeE444A200298f468908cC942", + "test_sload_empty_erc20_balanceof_OCN": "0x4092678e4E78230F46A1534C0fBC8Fa39780892B", + "test_sload_empty_erc20_balanceof_EIGEN": "0xEC53BF9167F50cDEb3aE105F56099AaAb9061F83", + "test_sload_empty_erc20_balanceof_COMP": "0xc00e94Cb662C3520282E6f5717214004A7f26888", + "test_sload_empty_erc20_balanceof_cUSDC": "0x39AA39c021dfbaE8faC545936693aC917d5E7563", + "test_sload_empty_erc20_balanceof_sMEME": "0xc059A531B4234d05e9EF4aC51028f7E6156E2CcE", + "test_sload_empty_erc20_balanceof_SAND": "0x3845badade8e6dff049820680d1f14bd3903a5d0", + "test_sload_empty_erc20_balanceof_AAVE": "0x7Fc66500c84A76Ad7e9c93437bFc5AC33E2DDAe9", + "test_sload_empty_erc20_balanceof_ZRX": "0xE41d2489571d322189246DaFA5ebDe1F4699F498", + "test_sload_empty_erc20_balanceof_KOK": "0x9B9647431632AF44be02ddd22477Ed94d14AacAa", + "test_sload_empty_erc20_balanceof_APE": "0x4d224452801ACEd8B2F0aebe155379bb5D594381", + "test_sload_empty_erc20_balanceof_SAI": "0x89d24A6b4CcB1B6fAA2625fE562bDD9a23260359", + "test_sload_empty_erc20_balanceof_GRT": "0xc944E90C64B2c07662A292be6244BDf05Cda44a7", + "test_sload_empty_erc20_balanceof_LRC": "0xBBbbCA6A901c926F240b89EacB641d8Aec7AEafD", + "test_sload_empty_erc20_balanceof_ELON": "0x761D38e5DDf6ccf6Cf7C55759d5210750B5D60F3", + "test_sload_empty_erc20_balanceof_QNT": "0x4a220E6096B25EADb88358cb44068A3248254675", + "test_sload_empty_erc20_balanceof_ONDO": "0xfAbA6f8e4a5E8Ab82F62fe7C39859FA577269BE3", + "test_sload_empty_erc20_balanceof_ENJ": "0xF629cBd94d3791c9250152BD8dfBDF380E2a3B9c", + "test_sload_empty_erc20_balanceof_FET": "0x1D287CC25dAD7cCaF76a26bc660c5F7C8E2a05BD", + "test_sload_empty_erc20_balanceof_eETH": "0x6c5024Cd4F8A59110119C56f8933403A539555EB", + "test_sload_empty_erc20_balanceof_XMX": "0x0F8c45B896784A1E408526B9300519ef8660209c", + "test_sload_empty_erc20_balanceof_FTI": "0x943ed852Dadb5C3938ECdC6883718df8142de4C8", + "test_sload_empty_erc20_balanceof_WBTC": "0x2260FAC5E5542a773Aa44fBCfeDf7C193bc2C599", + "test_sload_empty_erc20_balanceof_LEND": "0x80fB784B7eD66730e8b1DBd9820aFD29931aab03", + "test_sload_empty_erc20_balanceof_ELEC": "0xd49ff13661451313ca1553fd6954bd1d9b6e02b9", + "test_sload_empty_erc20_balanceof_SUSHI": "0x6B3595068778DD592e39A122f4f5a5CF09C90fE2", + "test_sload_empty_erc20_balanceof_HOT": "0x6c6EE5e31d828De241282B9606C8e98Ea48526E2", + "test_sload_empty_erc20_balanceof_MITx": "0x4a527d8fc13c5203ab24ba0944f4cb14658d1db6", + "test_sload_empty_erc20_balanceof_1INCH": "0x111111111117dC0aa78b770fA6A738034120C302", + "test_sload_empty_erc20_balanceof_USDP": "0x1456688345527bE1f37E9e627DA0837D6f08C925", + "test_sload_empty_erc20_balanceof_ETHFI": "0xfe0c30065b384f05761f15d0cc899d4f9f9cc0eb", + "test_sload_empty_erc20_balanceof_POLY": "0x9992ec3cf6a55b00978cddf2b27bc6882d88d1ec", + "test_sload_empty_erc20_balanceof_AOA": "0x9ab165d795019b6d8b3e971dda91071421305e5a", + "test_sload_empty_erc20_balanceof_STORJ": "0xB64ef51C888972c908CFacf59B47C1AfBC0Ab8aC", + "test_sload_empty_erc20_balanceof_MKR": "0x9f8F72aA9304c8B593d555F12eF6589cC3A579A2", + "test_sload_empty_erc20_balanceof_AMP": "0xfF20817765cB7F73d4Bde2e66e067e58d11095c2", + "test_sload_empty_erc20_balanceof_VRA": "0xF411903cbc70a74d22900a5DE66A2dda66507255", + "test_sload_empty_erc20_balanceof_GTC": "0xde30da39c46104798bb5aa3fe8b9e0e1f348163f", + "test_sload_empty_erc20_balanceof_FLOKI": "0x43F11c02439E2736800433B4594994Bd43Cd066D", + "test_sload_empty_erc20_balanceof_ALT": "0x8457CA5040ad67fdebbCC8EdCE889A335Bc0fbFB", + "test_sload_empty_erc20_balanceof_IMX": "0xf57e7e7c23978c3caec3c3548e3d615c346e79ff", + "test_sload_empty_erc20_balanceof_XYO": "0x55296f69f40ea6d20e478533c15A6b08B654E758", + "test_sload_empty_erc20_balanceof_REV": "0x2ef27bf41236bd859a95209e17a43fbd26851f92", + "test_sload_empty_erc20_balanceof_FUN": "0x419d0d8bdd9af5e606ae2232ed285aff190e711b", + "test_sload_empty_erc20_balanceof_CRV": "0xD533a949740bb3306d119CC777fa900bA034cd52", + "test_sload_empty_erc20_balanceof_CHZ": "0x3506424f91fd33084466f402d5d97f05f8e3b4af", + "test_sload_empty_erc20_balanceof_SMT": "0x78Eb8DC641077F049f910659b6d580E80dC4d237", + "test_sload_empty_erc20_balanceof_SNX": "0xC011A72400E58ecD99Ee497CF89E3775d4bd732F", + "test_sload_empty_erc20_balanceof_DENT": "0x3597bfD533a99c9aa083587B074434E61Eb0A258", + "test_sload_empty_erc20_balanceof_RNDR": "0x6De037ef9aD2725EB40118Bb1702EBb27e4Aeb24", + "test_sload_empty_erc20_balanceof_SNT": "0x744d70FDBe2Ba4CF95131626614a1763DF805B9E", + "test_sload_empty_erc20_balanceof_AXS": "0xBB0E17EF65F82Ab018d8EDd776e8DD940327B28b", + "test_sload_empty_erc20_balanceof_KNC": "0xdd974D5C2e2928deA5F71b9825b8b646686BD200", + "test_sload_empty_erc20_balanceof_WEPE": "0xccB365D2e11aE4D6d74715c680f56cf58bF4bF10", + "test_sload_empty_erc20_balanceof_ZETA": "0xf091867ec603a6628ed83d274e835539d82e9cc8", + "test_sload_empty_erc20_balanceof_LYM": "0xc690f7c7fcffa6a82b79fab7508c466fefdfc8c5", + "test_sload_empty_erc20_balanceof_nCASH": "0x809826cceAb68c387726af962713b64Cb5Cb3CCA", + "test_sload_empty_erc20_balanceof_LOOKS": "0xf4d2888d29D722226FafA5d9B24F9164c092421E", + "test_sload_empty_erc20_balanceof_Monfter/Monavale": "0x275f5ad03be0fa221b4c6649b8aee09a42d9412a", + "test_sload_empty_erc20_balanceof_cETH": "0x4Ddc2D193948926D02f9B1fE9e1daa0718270ED5", + "test_sload_empty_erc20_balanceof_SALT": "0x4156D3342D5c385a87D264F90653733592000581", + "test_sload_empty_erc20_balanceof_HOGE": "0xfAd45E47083e4607302aa43c65fB3106F1cd7607", + "test_sload_empty_erc20_balanceof_REN": "0x408e41876cCCDC0F92210600ef50372656052a38", + "test_sload_empty_erc20_balanceof_ENS": "0xC56b13EBBCffa67cfB7979b900B736b3fb480D78", + "test_sload_empty_erc20_balanceof_NEXO": "0xB62132e35a6c13ee1EE0f84dC5d40bad8d815206", + "test_sload_empty_erc20_balanceof_RFR": "0xD0929d411954c47438Dc1D871dd6081F5C5e149c", + "test_sload_empty_erc20_balanceof_COFI": "0x3137619705b5fc22a3048989F983905e456B59Ab", + "test_sload_empty_erc20_balanceof_SLP": "0xcc8fa225d80b9c7d42f96e9570156c65d6cAAa25", + "test_sload_empty_erc20_balanceof_FUEL": "0xea38eaa3c86c8f9b751533ba2e562deb9acded40", + "test_sload_empty_erc20_balanceof_ENA": "0x57e114B691Db790C35207b2e685D4A43181e6061", + "test_sload_empty_erc20_balanceof_AKITA": "0x3301Ee63Fb29F863f2333Bd4466acb46CD8323E6", + "test_sload_empty_erc20_balanceof_CVC": "0x41e5560054824ea6B0732E656e3Ad64E20e94e45", + "test_sload_empty_erc20_balanceof_IHT": "0xEda8B016efa8b1161208Cf041cD86972EEE0F31E", + "test_sload_empty_erc20_balanceof_ZSC": "0x7A41e0517a5ecA4FdbC7FbebA4D4c47B9fF6DC63", + "test_sload_empty_erc20_balanceof_cbETH": "0xBe9895146f7AF43049ca1c1AE358B0541Ea49704", + "test_sload_empty_erc20_balanceof_IMT": "0x13119e34e140097a507b07a5564bde1bc375d9e6", + "test_sstore_erc20_approve_30GB_ERC20": "0x19fc17d87D946BBA47ca276f7b06Ee5737c4679C", + "test_sstore_erc20_approve_XEN": "0x06450dEe7FD2Fb8E39061434BAbCFC05599a6Fb8", + "test_sstore_erc20_approve_USDT": "0xdAC17F958D2ee523a2206206994597C13D831ec7", + "test_sstore_erc20_approve_USDC": "0xA0b86991C6218B36c1d19D4a2E9Eb0CE3606EB48", + "test_sstore_erc20_approve_LPT": "0x58b6A8a3302369DAEc383334672404Ee733AB239", + "test_sstore_erc20_approve_SHIB": "0x95aD61B0a150d79219dCF64E1E6Cc01f0B64C4cE", + "test_sstore_erc20_approve_WETH": "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2", + "test_sstore_erc20_approve_G-CRE": "0xa3Ee21c306A700E682AbcDfE9bAA6A08F3820419", + "test_sstore_erc20_approve_MEME": "0xB131F4A55907B10d1F0A50d8Ab8FA09EC342CD74", + "test_sstore_erc20_approve_OMG": "0xd26114cD6EE289AccF82350c8d8487fedB8A0C07", + "test_sstore_erc20_approve_MATIC": "0x7d1Afa7B718fb893DB30A3abc0Cfc608AaCfEbB0", + "test_sstore_erc20_approve_stETH": "0xae7ab96520DE3A18E5e111B5EaAb095312D7fE84", + "test_sstore_erc20_approve_DAI": "0x6B175474E89094C44Da98b954EedeAC495271d0F", + "test_sstore_erc20_approve_PEPE": "0x6982508145454Ce325dDbE47a25d4eC3d2311933", + "test_sstore_erc20_approve_old": "0x0cf0ee63788A0849FE5297F3407f701E122CC023", + "test_sstore_erc20_approve_BAT": "0x0D8775F648430679A709E98d2b0Cb6250d2887EF", + "test_sstore_erc20_approve_UNI": "0x1F9840a85d5aF5bf1D1762F925BdADdC4201F984", + "test_sstore_erc20_approve_AMB": "0x4dc3643Dbc642b72C158E7F3d2FF232df61cB6CE", + "test_sstore_erc20_approve_HEX": "0x2b591e99afE9f32eAA6214f7B7629768c40eEb39", + "test_sstore_erc20_approve_CRO": "0xa0b73e1ff0b80914ab6fe0444e65848c4c34450b", + "test_sstore_erc20_approve_UCASH": "0x92e52a1A235d9A103D970901066CE910AAceFD37", + "test_sstore_erc20_approve_BNB": "0xB8c77482e45F1F44dE1745F52C74426C631bDd52", + "test_sstore_erc20_approve_GSE": "0xe530441f4f73bdb6dc2fa5af7c3fc5fd551ec838", + "test_sstore_erc20_approve_MANA": "0x0F5D2FB29fb7d3cFeE444A200298f468908cC942", + "test_sstore_erc20_approve_OCN": "0x4092678e4E78230F46A1534C0fBC8Fa39780892B", + "test_sstore_erc20_approve_EIGEN": "0xEC53BF9167F50cDEb3aE105F56099AaAb9061F83", + "test_sstore_erc20_approve_COMP": "0xc00e94Cb662C3520282E6f5717214004A7f26888", + "test_sstore_erc20_approve_cUSDC": "0x39AA39c021dfbaE8faC545936693aC917d5E7563", + "test_sstore_erc20_approve_sMEME": "0xc059A531B4234d05e9EF4aC51028f7E6156E2CcE", + "test_sstore_erc20_approve_SAND": "0x3845badade8e6dff049820680d1f14bd3903a5d0", + "test_sstore_erc20_approve_AAVE": "0x7Fc66500c84A76Ad7e9c93437bFc5AC33E2DDAe9", + "test_sstore_erc20_approve_ZRX": "0xE41d2489571d322189246DaFA5ebDe1F4699F498", + "test_sstore_erc20_approve_KOK": "0x9B9647431632AF44be02ddd22477Ed94d14AacAa", + "test_sstore_erc20_approve_APE": "0x4d224452801ACEd8B2F0aebe155379bb5D594381", + "test_sstore_erc20_approve_SAI": "0x89d24A6b4CcB1B6fAA2625fE562bDD9a23260359", + "test_sstore_erc20_approve_GRT": "0xc944E90C64B2c07662A292be6244BDf05Cda44a7", + "test_sstore_erc20_approve_LRC": "0xBBbbCA6A901c926F240b89EacB641d8Aec7AEafD", + "test_sstore_erc20_approve_ELON": "0x761D38e5DDf6ccf6Cf7C55759d5210750B5D60F3", + "test_sstore_erc20_approve_QNT": "0x4a220E6096B25EADb88358cb44068A3248254675", + "test_sstore_erc20_approve_ONDO": "0xfAbA6f8e4a5E8Ab82F62fe7C39859FA577269BE3", + "test_sstore_erc20_approve_ENJ": "0xF629cBd94d3791c9250152BD8dfBDF380E2a3B9c", + "test_sstore_erc20_approve_FET": "0x1D287CC25dAD7cCaF76a26bc660c5F7C8E2a05BD", + "test_sstore_erc20_approve_eETH": "0x6c5024Cd4F8A59110119C56f8933403A539555EB", + "test_sstore_erc20_approve_XMX": "0x0F8c45B896784A1E408526B9300519ef8660209c", + "test_sstore_erc20_approve_FTI": "0x943ed852Dadb5C3938ECdC6883718df8142de4C8", + "test_sstore_erc20_approve_WBTC": "0x2260FAC5E5542a773Aa44fBCfeDf7C193bc2C599", + "test_sstore_erc20_approve_LEND": "0x80fB784B7eD66730e8b1DBd9820aFD29931aab03", + "test_sstore_erc20_approve_ELEC": "0xd49ff13661451313ca1553fd6954bd1d9b6e02b9", + "test_sstore_erc20_approve_SUSHI": "0x6B3595068778DD592e39A122f4f5a5CF09C90fE2", + "test_sstore_erc20_approve_HOT": "0x6c6EE5e31d828De241282B9606C8e98Ea48526E2", + "test_sstore_erc20_approve_MITx": "0x4a527d8fc13c5203ab24ba0944f4cb14658d1db6", + "test_sstore_erc20_approve_1INCH": "0x111111111117dC0aa78b770fA6A738034120C302", + "test_sstore_erc20_approve_USDP": "0x1456688345527bE1f37E9e627DA0837D6f08C925", + "test_sstore_erc20_approve_ETHFI": "0xfe0c30065b384f05761f15d0cc899d4f9f9cc0eb", + "test_sstore_erc20_approve_POLY": "0x9992ec3cf6a55b00978cddf2b27bc6882d88d1ec", + "test_sstore_erc20_approve_AOA": "0x9ab165d795019b6d8b3e971dda91071421305e5a", + "test_sstore_erc20_approve_STORJ": "0xB64ef51C888972c908CFacf59B47C1AfBC0Ab8aC", + "test_sstore_erc20_approve_MKR": "0x9f8F72aA9304c8B593d555F12eF6589cC3A579A2", + "test_sstore_erc20_approve_AMP": "0xfF20817765cB7F73d4Bde2e66e067e58d11095c2", + "test_sstore_erc20_approve_VRA": "0xF411903cbc70a74d22900a5DE66A2dda66507255", + "test_sstore_erc20_approve_GTC": "0xde30da39c46104798bb5aa3fe8b9e0e1f348163f", + "test_sstore_erc20_approve_FLOKI": "0x43F11c02439E2736800433B4594994Bd43Cd066D", + "test_sstore_erc20_approve_ALT": "0x8457CA5040ad67fdebbCC8EdCE889A335Bc0fbFB", + "test_sstore_erc20_approve_IMX": "0xf57e7e7c23978c3caec3c3548e3d615c346e79ff", + "test_sstore_erc20_approve_XYO": "0x55296f69f40ea6d20e478533c15A6b08B654E758", + "test_sstore_erc20_approve_REV": "0x2ef27bf41236bd859a95209e17a43fbd26851f92", + "test_sstore_erc20_approve_FUN": "0x419d0d8bdd9af5e606ae2232ed285aff190e711b", + "test_sstore_erc20_approve_CRV": "0xD533a949740bb3306d119CC777fa900bA034cd52", + "test_sstore_erc20_approve_CHZ": "0x3506424f91fd33084466f402d5d97f05f8e3b4af", + "test_sstore_erc20_approve_SMT": "0x78Eb8DC641077F049f910659b6d580E80dC4d237", + "test_sstore_erc20_approve_SNX": "0xC011A72400E58ecD99Ee497CF89E3775d4bd732F", + "test_sstore_erc20_approve_DENT": "0x3597bfD533a99c9aa083587B074434E61Eb0A258", + "test_sstore_erc20_approve_RNDR": "0x6De037ef9aD2725EB40118Bb1702EBb27e4Aeb24", + "test_sstore_erc20_approve_SNT": "0x744d70FDBe2Ba4CF95131626614a1763DF805B9E", + "test_sstore_erc20_approve_AXS": "0xBB0E17EF65F82Ab018d8EDd776e8DD940327B28b", + "test_sstore_erc20_approve_KNC": "0xdd974D5C2e2928deA5F71b9825b8b646686BD200", + "test_sstore_erc20_approve_WEPE": "0xccB365D2e11aE4D6d74715c680f56cf58bF4bF10", + "test_sstore_erc20_approve_ZETA": "0xf091867ec603a6628ed83d274e835539d82e9cc8", + "test_sstore_erc20_approve_LYM": "0xc690f7c7fcffa6a82b79fab7508c466fefdfc8c5", + "test_sstore_erc20_approve_nCASH": "0x809826cceAb68c387726af962713b64Cb5Cb3CCA", + "test_sstore_erc20_approve_LOOKS": "0xf4d2888d29D722226FafA5d9B24F9164c092421E", + "test_sstore_erc20_approve_Monfter/Monavale": "0x275f5ad03be0fa221b4c6649b8aee09a42d9412a", + "test_sstore_erc20_approve_cETH": "0x4Ddc2D193948926D02f9B1fE9e1daa0718270ED5", + "test_sstore_erc20_approve_SALT": "0x4156D3342D5c385a87D264F90653733592000581", + "test_sstore_erc20_approve_HOGE": "0xfAd45E47083e4607302aa43c65fB3106F1cd7607", + "test_sstore_erc20_approve_REN": "0x408e41876cCCDC0F92210600ef50372656052a38", + "test_sstore_erc20_approve_ENS": "0xC56b13EBBCffa67cfB7979b900B736b3fb480D78", + "test_sstore_erc20_approve_NEXO": "0xB62132e35a6c13ee1EE0f84dC5d40bad8d815206", + "test_sstore_erc20_approve_RFR": "0xD0929d411954c47438Dc1D871dd6081F5C5e149c", + "test_sstore_erc20_approve_COFI": "0x3137619705b5fc22a3048989F983905e456B59Ab", + "test_sstore_erc20_approve_SLP": "0xcc8fa225d80b9c7d42f96e9570156c65d6cAAa25", + "test_sstore_erc20_approve_FUEL": "0xea38eaa3c86c8f9b751533ba2e562deb9acded40", + "test_sstore_erc20_approve_ENA": "0x57e114B691Db790C35207b2e685D4A43181e6061", + "test_sstore_erc20_approve_AKITA": "0x3301Ee63Fb29F863f2333Bd4466acb46CD8323E6", + "test_sstore_erc20_approve_CVC": "0x41e5560054824ea6B0732E656e3Ad64E20e94e45", + "test_sstore_erc20_approve_IHT": "0xEda8B016efa8b1161208Cf041cD86972EEE0F31E", + "test_sstore_erc20_approve_ZSC": "0x7A41e0517a5ecA4FdbC7FbebA4D4c47B9fF6DC63", + "test_sstore_erc20_approve_cbETH": "0xBe9895146f7AF43049ca1c1AE358B0541Ea49704", + "test_sstore_erc20_approve_IMT": "0x13119e34e140097a507b07a5564bde1bc375d9e6", + "test_mixed_sload_sstore_30GB_ERC20": "0x19fc17d87D946BBA47ca276f7b06Ee5737c4679C", + "test_mixed_sload_sstore_XEN": "0x06450dEe7FD2Fb8E39061434BAbCFC05599a6Fb8", + "test_mixed_sload_sstore_USDT": "0xdAC17F958D2ee523a2206206994597C13D831ec7", + "test_mixed_sload_sstore_USDC": "0xA0b86991C6218B36c1d19D4a2E9Eb0CE3606EB48", + "test_mixed_sload_sstore_LPT": "0x58b6A8a3302369DAEc383334672404Ee733AB239", + "test_mixed_sload_sstore_SHIB": "0x95aD61B0a150d79219dCF64E1E6Cc01f0B64C4cE", + "test_mixed_sload_sstore_WETH": "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2", + "test_mixed_sload_sstore_G-CRE": "0xa3Ee21c306A700E682AbcDfE9bAA6A08F3820419", + "test_mixed_sload_sstore_MEME": "0xB131F4A55907B10d1F0A50d8Ab8FA09EC342CD74", + "test_mixed_sload_sstore_OMG": "0xd26114cD6EE289AccF82350c8d8487fedB8A0C07", + "test_mixed_sload_sstore_MATIC": "0x7d1Afa7B718fb893DB30A3abc0Cfc608AaCfEbB0", + "test_mixed_sload_sstore_stETH": "0xae7ab96520DE3A18E5e111B5EaAb095312D7fE84", + "test_mixed_sload_sstore_DAI": "0x6B175474E89094C44Da98b954EedeAC495271d0F", + "test_mixed_sload_sstore_PEPE": "0x6982508145454Ce325dDbE47a25d4eC3d2311933", + "test_mixed_sload_sstore_old": "0x0cf0ee63788A0849FE5297F3407f701E122CC023", + "test_mixed_sload_sstore_BAT": "0x0D8775F648430679A709E98d2b0Cb6250d2887EF", + "test_mixed_sload_sstore_UNI": "0x1F9840a85d5aF5bf1D1762F925BdADdC4201F984", + "test_mixed_sload_sstore_AMB": "0x4dc3643Dbc642b72C158E7F3d2FF232df61cB6CE", + "test_mixed_sload_sstore_HEX": "0x2b591e99afE9f32eAA6214f7B7629768c40eEb39", + "test_mixed_sload_sstore_CRO": "0xa0b73e1ff0b80914ab6fe0444e65848c4c34450b", + "test_mixed_sload_sstore_UCASH": "0x92e52a1A235d9A103D970901066CE910AAceFD37", + "test_mixed_sload_sstore_BNB": "0xB8c77482e45F1F44dE1745F52C74426C631bDd52", + "test_mixed_sload_sstore_GSE": "0xe530441f4f73bdb6dc2fa5af7c3fc5fd551ec838", + "test_mixed_sload_sstore_MANA": "0x0F5D2FB29fb7d3cFeE444A200298f468908cC942", + "test_mixed_sload_sstore_OCN": "0x4092678e4E78230F46A1534C0fBC8Fa39780892B", + "test_mixed_sload_sstore_EIGEN": "0xEC53BF9167F50cDEb3aE105F56099AaAb9061F83", + "test_mixed_sload_sstore_COMP": "0xc00e94Cb662C3520282E6f5717214004A7f26888", + "test_mixed_sload_sstore_cUSDC": "0x39AA39c021dfbaE8faC545936693aC917d5E7563", + "test_mixed_sload_sstore_sMEME": "0xc059A531B4234d05e9EF4aC51028f7E6156E2CcE", + "test_mixed_sload_sstore_SAND": "0x3845badade8e6dff049820680d1f14bd3903a5d0", + "test_mixed_sload_sstore_AAVE": "0x7Fc66500c84A76Ad7e9c93437bFc5AC33E2DDAe9", + "test_mixed_sload_sstore_ZRX": "0xE41d2489571d322189246DaFA5ebDe1F4699F498", + "test_mixed_sload_sstore_KOK": "0x9B9647431632AF44be02ddd22477Ed94d14AacAa", + "test_mixed_sload_sstore_APE": "0x4d224452801ACEd8B2F0aebe155379bb5D594381", + "test_mixed_sload_sstore_SAI": "0x89d24A6b4CcB1B6fAA2625fE562bDD9a23260359", + "test_mixed_sload_sstore_GRT": "0xc944E90C64B2c07662A292be6244BDf05Cda44a7", + "test_mixed_sload_sstore_LRC": "0xBBbbCA6A901c926F240b89EacB641d8Aec7AEafD", + "test_mixed_sload_sstore_ELON": "0x761D38e5DDf6ccf6Cf7C55759d5210750B5D60F3", + "test_mixed_sload_sstore_QNT": "0x4a220E6096B25EADb88358cb44068A3248254675", + "test_mixed_sload_sstore_ONDO": "0xfAbA6f8e4a5E8Ab82F62fe7C39859FA577269BE3", + "test_mixed_sload_sstore_ENJ": "0xF629cBd94d3791c9250152BD8dfBDF380E2a3B9c", + "test_mixed_sload_sstore_FET": "0x1D287CC25dAD7cCaF76a26bc660c5F7C8E2a05BD", + "test_mixed_sload_sstore_eETH": "0x6c5024Cd4F8A59110119C56f8933403A539555EB", + "test_mixed_sload_sstore_XMX": "0x0F8c45B896784A1E408526B9300519ef8660209c", + "test_mixed_sload_sstore_FTI": "0x943ed852Dadb5C3938ECdC6883718df8142de4C8", + "test_mixed_sload_sstore_WBTC": "0x2260FAC5E5542a773Aa44fBCfeDf7C193bc2C599", + "test_mixed_sload_sstore_LEND": "0x80fB784B7eD66730e8b1DBd9820aFD29931aab03", + "test_mixed_sload_sstore_ELEC": "0xd49ff13661451313ca1553fd6954bd1d9b6e02b9", + "test_mixed_sload_sstore_SUSHI": "0x6B3595068778DD592e39A122f4f5a5CF09C90fE2", + "test_mixed_sload_sstore_HOT": "0x6c6EE5e31d828De241282B9606C8e98Ea48526E2", + "test_mixed_sload_sstore_MITx": "0x4a527d8fc13c5203ab24ba0944f4cb14658d1db6", + "test_mixed_sload_sstore_1INCH": "0x111111111117dC0aa78b770fA6A738034120C302", + "test_mixed_sload_sstore_USDP": "0x1456688345527bE1f37E9e627DA0837D6f08C925", + "test_mixed_sload_sstore_ETHFI": "0xfe0c30065b384f05761f15d0cc899d4f9f9cc0eb", + "test_mixed_sload_sstore_POLY": "0x9992ec3cf6a55b00978cddf2b27bc6882d88d1ec", + "test_mixed_sload_sstore_AOA": "0x9ab165d795019b6d8b3e971dda91071421305e5a", + "test_mixed_sload_sstore_STORJ": "0xB64ef51C888972c908CFacf59B47C1AfBC0Ab8aC", + "test_mixed_sload_sstore_MKR": "0x9f8F72aA9304c8B593d555F12eF6589cC3A579A2", + "test_mixed_sload_sstore_AMP": "0xfF20817765cB7F73d4Bde2e66e067e58d11095c2", + "test_mixed_sload_sstore_VRA": "0xF411903cbc70a74d22900a5DE66A2dda66507255", + "test_mixed_sload_sstore_GTC": "0xde30da39c46104798bb5aa3fe8b9e0e1f348163f", + "test_mixed_sload_sstore_FLOKI": "0x43F11c02439E2736800433B4594994Bd43Cd066D", + "test_mixed_sload_sstore_ALT": "0x8457CA5040ad67fdebbCC8EdCE889A335Bc0fbFB", + "test_mixed_sload_sstore_IMX": "0xf57e7e7c23978c3caec3c3548e3d615c346e79ff", + "test_mixed_sload_sstore_XYO": "0x55296f69f40ea6d20e478533c15A6b08B654E758", + "test_mixed_sload_sstore_REV": "0x2ef27bf41236bd859a95209e17a43fbd26851f92", + "test_mixed_sload_sstore_FUN": "0x419d0d8bdd9af5e606ae2232ed285aff190e711b", + "test_mixed_sload_sstore_CRV": "0xD533a949740bb3306d119CC777fa900bA034cd52", + "test_mixed_sload_sstore_CHZ": "0x3506424f91fd33084466f402d5d97f05f8e3b4af", + "test_mixed_sload_sstore_SMT": "0x78Eb8DC641077F049f910659b6d580E80dC4d237", + "test_mixed_sload_sstore_SNX": "0xC011A72400E58ecD99Ee497CF89E3775d4bd732F", + "test_mixed_sload_sstore_DENT": "0x3597bfD533a99c9aa083587B074434E61Eb0A258", + "test_mixed_sload_sstore_RNDR": "0x6De037ef9aD2725EB40118Bb1702EBb27e4Aeb24", + "test_mixed_sload_sstore_SNT": "0x744d70FDBe2Ba4CF95131626614a1763DF805B9E", + "test_mixed_sload_sstore_AXS": "0xBB0E17EF65F82Ab018d8EDd776e8DD940327B28b", + "test_mixed_sload_sstore_KNC": "0xdd974D5C2e2928deA5F71b9825b8b646686BD200", + "test_mixed_sload_sstore_WEPE": "0xccB365D2e11aE4D6d74715c680f56cf58bF4bF10", + "test_mixed_sload_sstore_ZETA": "0xf091867ec603a6628ed83d274e835539d82e9cc8", + "test_mixed_sload_sstore_LYM": "0xc690f7c7fcffa6a82b79fab7508c466fefdfc8c5", + "test_mixed_sload_sstore_nCASH": "0x809826cceAb68c387726af962713b64Cb5Cb3CCA", + "test_mixed_sload_sstore_LOOKS": "0xf4d2888d29D722226FafA5d9B24F9164c092421E", + "test_mixed_sload_sstore_Monfter/Monavale": "0x275f5ad03be0fa221b4c6649b8aee09a42d9412a", + "test_mixed_sload_sstore_cETH": "0x4Ddc2D193948926D02f9B1fE9e1daa0718270ED5", + "test_mixed_sload_sstore_SALT": "0x4156D3342D5c385a87D264F90653733592000581", + "test_mixed_sload_sstore_HOGE": "0xfAd45E47083e4607302aa43c65fB3106F1cd7607", + "test_mixed_sload_sstore_REN": "0x408e41876cCCDC0F92210600ef50372656052a38", + "test_mixed_sload_sstore_ENS": "0xC56b13EBBCffa67cfB7979b900B736b3fb480D78", + "test_mixed_sload_sstore_NEXO": "0xB62132e35a6c13ee1EE0f84dC5d40bad8d815206", + "test_mixed_sload_sstore_RFR": "0xD0929d411954c47438Dc1D871dd6081F5C5e149c", + "test_mixed_sload_sstore_COFI": "0x3137619705b5fc22a3048989F983905e456B59Ab", + "test_mixed_sload_sstore_SLP": "0xcc8fa225d80b9c7d42f96e9570156c65d6cAAa25", + "test_mixed_sload_sstore_FUEL": "0xea38eaa3c86c8f9b751533ba2e562deb9acded40", + "test_mixed_sload_sstore_ENA": "0x57e114B691Db790C35207b2e685D4A43181e6061", + "test_mixed_sload_sstore_AKITA": "0x3301Ee63Fb29F863f2333Bd4466acb46CD8323E6", + "test_mixed_sload_sstore_CVC": "0x41e5560054824ea6B0732E656e3Ad64E20e94e45", + "test_mixed_sload_sstore_IHT": "0xEda8B016efa8b1161208Cf041cD86972EEE0F31E", + "test_mixed_sload_sstore_ZSC": "0x7A41e0517a5ecA4FdbC7FbebA4D4c47B9fF6DC63", + "test_mixed_sload_sstore_cbETH": "0xBe9895146f7AF43049ca1c1AE358B0541Ea49704", + "test_mixed_sload_sstore_IMT": "0x13119e34e140097a507b07a5564bde1bc375d9e6" +} From c5f1dc7cfdab79cea632ff7d4e2d0bdac4c67001 Mon Sep 17 00:00:00 2001 From: XaxaxaX Date: Sat, 7 Feb 2026 03:33:23 +0530 Subject: [PATCH 135/154] docs: Update package names from ethereum_test_* to execution_testing.* (#1793) * docs: update package names from ethereum_test_* to execution_testing.* Update documentation to reflect package renaming from ethereum_test_* to execution_testing.* namespace (follow-up to PR #1654). Changes: - Update module references in eip_testing_checklist_template.md - Update directory structure in repository_overview.md All module references now use the unified execution_testing.* namespace. * chore: hotfix for getting started docs update post-weld --------- Co-authored-by: fselmo --- docs/getting_started/repository_overview.md | 38 +++++++++---------- .../eip_testing_checklist_template.md | 16 ++++---- 2 files changed, 27 insertions(+), 27 deletions(-) diff --git a/docs/getting_started/repository_overview.md b/docs/getting_started/repository_overview.md index 669a464707..ff08e51f13 100644 --- a/docs/getting_started/repository_overview.md +++ b/docs/getting_started/repository_overview.md @@ -4,43 +4,43 @@ The most relevant folders and files in the repo are: ```text -📁 execution-test-specs/ -├─╴📁 tests/ # test cases -│ ├── 📁 eips/ -│ ├── 📁 vm/ +📁 execution-specs/ +├─╴📁 tests/ # test cases organized by fork +│ ├── 📁 amsterdam/ +│ ├── 📁 osaka/ +│ ├── 📁 prague/ │ └── 📁 ... ├─╴📁 fixtures/ # default fixture output dir │ ├── 📁 blockchain_tests/ │ ├── 📁 blockchain_tests_engine/ │ ├── 📁 state_tests/ │ └── 📁 ... -├─╴📁 src/ # library & framework packages -│ ├── 📁 ethereum_test_fork/ -│ ├── 📁 ethereum_test_tools/ +├─╴📁 packages/ # library & framework packages +│ └── 📁 testing/ +│ └── 📁 src/ +│ └── 📁 execution_testing/ +├─╴📁 src/ # execution spec packages +│ ├── 📁 ethereum/ │ └── 📁 ... ├─╴📁 docs/ # markdown documentation -│ ├── 📁 getting_started -│ ├── 📁 dev +│ ├── 📁 getting_started/ +│ ├── 📁 dev/ │ └── 📁 ... -├─╴📁 .vscode/ # visual studio code config -│ ├── 📄 settings.recommended.json # copy to settings.json -│ ├── 📄 launch.recommended.json -│ └── 📄 extensions.json └── 📄 whitelist.txt # spellcheck dictionary ``` #### `tests/` -Contains the implementation of the Ethereum consensus tests available in this repository. +Contains the implementation of the Ethereum consensus tests available in this repository, organized by fork. + +#### `packages/` + +Contains the `execution_testing` package which provides tools to define test cases and to interface with the `evm t8n` command. Additionally, it contains packages that enable test case execution by customizing pytest which acts as the test framework. #### `src/` -Contains various packages that help to define test cases and to interface with the `evm t8n` command. Additionally, it contains some packages that enable test case execution by customizing pytest which acts as the test framework. +Contains the Ethereum execution spec packages. #### `docs/` Contains documentation configuration and source files. - -#### `.vscode/` - -See [VS Code Setup](./setup_vs_code.md). diff --git a/docs/writing_tests/checklist_templates/eip_testing_checklist_template.md b/docs/writing_tests/checklist_templates/eip_testing_checklist_template.md index 57520b2a4f..05b9616b7d 100644 --- a/docs/writing_tests/checklist_templates/eip_testing_checklist_template.md +++ b/docs/writing_tests/checklist_templates/eip_testing_checklist_template.md @@ -1040,10 +1040,10 @@ Verify, given multiple initial values, that a block is accepted or rejected depe ### Framework Changes - Add the new header field to the relevant objects: - - `ethereum_test_fixtures.FixtureHeader`. - - `ethereum_test_fixtures.FixtureExecutionPayload`. - - `ethereum_test_specs.Header`. -- Add the appropriate `header_*_required` fork method to `BaseFork` in `ethereum_test_forks`. + - `execution_testing.fixtures.FixtureHeader`. + - `execution_testing.fixtures.FixtureExecutionPayload`. + - `execution_testing.specs.Header`. +- Add the appropriate `header_*_required` fork method to `BaseFork` in `execution_testing.forks`. ## New Block Body Field @@ -1068,10 +1068,10 @@ Verify, given multiple initial values, that a block is accepted or rejected depe ### Framework Changes - Add the new body field to the relevant objects. - - `ethereum_test_fixtures.FixtureBlockBase`. - - `ethereum_test_fixtures.FixtureEngineNewPayload`. - - `ethereum_test_specs.Block`. -- Modify `ethereum_test_specs.BlockchainTest` filling behavior to account for the new block field. + - `execution_testing.fixtures.FixtureBlockBase`. + - `execution_testing.fixtures.FixtureEngineNewPayload`. + - `execution_testing.specs.Block`. +- Modify `execution_testing.specs.BlockchainTest` filling behavior to account for the new block field. ## Gas Cost Changes From e1b871b36d6d9873c6ed1a0a58fcaf5670221abb Mon Sep 17 00:00:00 2001 From: Jochem Brouwer Date: Fri, 6 Feb 2026 23:28:23 +0100 Subject: [PATCH 136/154] feat(benchmarks): add keccak benchmark with updated memory (#1849) * feat(benchmarks): add keccak benchmark with updated memory * chore: fix lint --------- Co-authored-by: fselmo --- tests/benchmark/compute/instruction/test_keccak.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/tests/benchmark/compute/instruction/test_keccak.py b/tests/benchmark/compute/instruction/test_keccak.py index f117a17323..874ba139de 100644 --- a/tests/benchmark/compute/instruction/test_keccak.py +++ b/tests/benchmark/compute/instruction/test_keccak.py @@ -70,17 +70,24 @@ def test_keccak_max_permutations( @pytest.mark.parametrize("mem_alloc", [b"", b"ff", b"ff" * 32]) @pytest.mark.parametrize("offset", [0, 31, 1024]) +@pytest.mark.parametrize("mem_update", [True, False]) def test_keccak( benchmark_test: BenchmarkTestFiller, offset: int, mem_alloc: bytes, + mem_update: bool, ) -> None: """Benchmark KECCAK256 instruction with diff input data and offsets.""" + code_hash = Op.SHA3(offset, Op.CALLDATASIZE) + attack_block = ( + Op.MSTORE(Op.PUSH0, code_hash) if mem_update else Op.POP(code_hash) + ) + benchmark_test( target_opcode=Op.SHA3, code_generator=JumpLoopGenerator( setup=Op.CALLDATACOPY(offset, Op.PUSH0, Op.CALLDATASIZE), - attack_block=Op.POP(Op.SHA3(offset, Op.CALLDATASIZE)), + attack_block=attack_block, tx_kwargs={"data": mem_alloc}, ), ) From 02456f2849ce5821189d3e5ad89f5b31d31e295d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Toni=20Wahrst=C3=A4tter?= <51536394+nerolation@users.noreply.github.com> Date: Sat, 7 Feb 2026 00:08:36 +0100 Subject: [PATCH 137/154] feat(tests): add test case for create2 selfdestruct and recreate (#2121) * feat(tests): add test case for create2 selfdestruct and recreate * chore: update test with parametrization for first CREATE and pre-existing balance --------- Co-authored-by: fselmo --- tests/amsterdam/eip7928_block_level_access_lists/test_cases.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md index d07ae30214..9cae353205 100644 --- a/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md +++ b/tests/amsterdam/eip7928_block_level_access_lists/test_cases.md @@ -128,3 +128,5 @@ | `test_bal_7002_request_from_contract` | Ensure BAL captures withdrawal request from contract with correct source address | Alice calls `RelayContract` which internally calls EIP-7002 system contract with withdrawal request. Withdrawal request should have `source_address = RelayContract` (not Alice). | BAL **MUST** include Alice with `nonce_changes` at `block_access_index=1`. BAL **MUST** include `RelayContract` with `balance_changes` (fee paid to system contract) at `block_access_index=1`. BAL **MUST** include system contract with `balance_changes`, `storage_reads`, and `storage_changes` (queue modified). Source address in withdrawal request **MUST** be `RelayContract`. Clean sweep: count and tail reset to 0 at `block_access_index=2`. | ✅ Completed | | `test_bal_7002_request_invalid` | Ensure BAL correctly handles invalid withdrawal request scenarios | Parameterized test with 8 invalid scenarios: (1) insufficient_fee (fee=0), (2) calldata_too_short (55 bytes), (3) calldata_too_long (57 bytes), (4) oog (insufficient gas), (5-7) invalid_call_type (DELEGATECALL/STATICCALL/CALLCODE), (8) contract_reverts. Tests both EOA and contract-based withdrawal requests. | BAL **MUST** include sender with `nonce_changes` at `block_access_index=1`. BAL **MUST** include system contract with `storage_reads` for slots: excess (slot 0), count (slot 1), head (slot 2), tail (slot 3). System contract **MUST NOT** have `storage_changes` (transaction failed, no queue modification). | ✅ Completed | | `test_bal_invalid_extraneous_entries` | Verify clients reject blocks with any type of extraneous BAL entries | Alice sends 100 wei to Oracle contract (which reads storage slot 0). Charlie is uninvolved in this transaction. A valid BAL is created containing nonce change for Alice, balance change and storage read for Oracle. The BAL is corrupted by adding various extraneous entries: (1) extra_nonce, (2) extra_balance, (3) extra_code, (4) extra_storage_write_touched (slot 0 - already read), (5) extra_storage_write_untouched (slot 1 - not accessed), (6) extra_storage_write_uninvolved_account (Charlie - uninvolved account), (7) extra_account_access (Charlie), (8) extra_storage_read (slot 999). Each tested at block_access_index 1 (same tx), 2 (system tx), 3 (out of bounds). | Block **MUST** be rejected with `INVALID_BLOCK_ACCESS_LIST` exception. Clients **MUST** detect any extraneous entries in BAL. | ✅ Completed | +| `test_bal_create2_selfdestruct_then_recreate_same_block` | Ensure BAL handles **(tx1) create+SELFDESTRUCT** then **(tx2) CREATE2 "resurrection"** of the *same address* in the same block | Parameterized: `@pytest.mark.with_all_create_opcodes` for Tx1 create opcode (CREATE or CREATE2), and whether **A** has a pre-existing balance or not. **Tx1:** `Factory` executes `create_opcode` to deploy contract at **A** (for CREATE2: fixed `salt`, fixed `initcode` → deterministic address). The created contract optionally does `SLOAD/SSTORE` (to prove state touches), then `SELFDESTRUCT(beneficiary=B)` **in the same tx** (so under EIP-6780 the account is actually deleted after Tx1). **Tx2:** `Factory` executes `CREATE2(salt, initcode)` to recreate **A** at the same deterministic address, and this time the runtime code persists (no SELFDESTRUCT). | BAL **MUST** include: **Tx1 (`block_access_index=1`)**: (1) `Factory` with `nonce_changes` (create opcode increments nonce), and `balance_changes` if it endows A. (2) **A** in `account_changes` (it was accessed/created) but **MUST NOT** have persistent `code_changes`, `nonce_changes`, or `storage_changes` (it ends Tx1 non-existent due to same-tx create+SELFDESTRUCT). Any attempted `SSTORE` in A before SELFDESTRUCT **MUST NOT** appear in `storage_changes` (ephemeral). If A had a pre-existing balance, it **MUST** have `balance_changes` reflecting the transfer to B. (3) `B` with `balance_changes` if A had balance transferred on SELFDESTRUCT. **Tx2 (`block_access_index=2`)**: (1) `Factory` with another `nonce_changes`. (2) **A** with `code_changes` (runtime bytecode present), `nonce_changes = 1`, plus any `storage_changes` performed in Tx2. (3) If Tx2 endows or transfers value, include corresponding `balance_changes` for involved accounts. | 🟡 Planned | + From 551c8f8488d7d039d6cac0812ad06de9f52e6c30 Mon Sep 17 00:00:00 2001 From: Galoretka Date: Sat, 7 Feb 2026 02:00:47 +0200 Subject: [PATCH 138/154] refactor(spec-tool): remove unused fields (#2105) * refactor: remove unused Load._network and _fork_module * refactor: remove unused Load._network and _fork_module * chore: fix lint, removing unused network field and param from call --------- Co-authored-by: fselmo --- .../evm_tools/loaders/fixture_loader.py | 7 +------ src/ethereum_spec_tools/evm_tools/t8n/__init__.py | 5 +---- tests/json_infra/helpers/load_blockchain_tests.py | 5 +---- tests/json_infra/test_ethash.py | 2 +- vulture_whitelist.py | 4 ---- 5 files changed, 4 insertions(+), 19 deletions(-) diff --git a/src/ethereum_spec_tools/evm_tools/loaders/fixture_loader.py b/src/ethereum_spec_tools/evm_tools/loaders/fixture_loader.py index ab9e1b99d9..3d7f2b6f5c 100644 --- a/src/ethereum_spec_tools/evm_tools/loaders/fixture_loader.py +++ b/src/ethereum_spec_tools/evm_tools/loaders/fixture_loader.py @@ -49,17 +49,12 @@ def json_to_block(self, json_data: Any) -> Any: class Load(BaseLoad): """Class for loading json fixtures.""" - _network: str - _fork_module: str fork: ForkLoad - def __init__(self, network: str, fork_module: str | Hardfork): - self._network = network + def __init__(self, fork_module: str | Hardfork): if isinstance(fork_module, Hardfork): self.fork = ForkLoad(fork_module) - self._fork_module = fork_module.short_name else: - self._fork_module = fork_module for fork in Hardfork.discover(): if fork.short_name == fork_module: self.fork = ForkLoad(fork) diff --git a/src/ethereum_spec_tools/evm_tools/t8n/__init__.py b/src/ethereum_spec_tools/evm_tools/t8n/__init__.py index 4988ef25bc..03939e8601 100644 --- a/src/ethereum_spec_tools/evm_tools/t8n/__init__.py +++ b/src/ethereum_spec_tools/evm_tools/t8n/__init__.py @@ -256,10 +256,7 @@ def __init__( self.logger = get_stream_logger("T8N") - super().__init__( - self.options.state_fork, - fork, - ) + super().__init__(fork) self.chain_id = parse_hex_or_int(self.options.state_chainid, U64) self.alloc = Alloc(self, stdin) diff --git a/tests/json_infra/helpers/load_blockchain_tests.py b/tests/json_infra/helpers/load_blockchain_tests.py index ec82b42f47..8bab4269dd 100644 --- a/tests/json_infra/helpers/load_blockchain_tests.py +++ b/tests/json_infra/helpers/load_blockchain_tests.py @@ -121,10 +121,7 @@ def runtest(self) -> None: f"{self.test_file}[{self.test_key}] has unrelated exceptions" ) - load = Load( - self.fork_name, - self.eels_fork, - ) + load = Load(self.eels_fork) genesis_header = load.json_to_header(json_data["genesisBlockHeader"]) parameters = [ diff --git a/tests/json_infra/test_ethash.py b/tests/json_infra/test_ethash.py index e12ae035d1..03708d011f 100644 --- a/tests/json_infra/test_ethash.py +++ b/tests/json_infra/test_ethash.py @@ -142,7 +142,7 @@ def test_pow_validation_block_headers( ).decode() block_json_data = json.loads(block_str_data) - load = Load(json_fork, eels_fork) + load = Load(eels_fork) header = load.json_to_header(block_json_data) fork_module.validate_proof_of_work(header) diff --git a/vulture_whitelist.py b/vulture_whitelist.py index 2d7509f2c5..fe0c9d48de 100644 --- a/vulture_whitelist.py +++ b/vulture_whitelist.py @@ -16,7 +16,6 @@ from ethereum_optimized.state_db import State from ethereum_spec_tools.docc import * from ethereum_spec_tools.evm_tools.daemon import _EvmToolHandler -from ethereum_spec_tools.evm_tools.loaders.fixture_loader import Load from ethereum_spec_tools.evm_tools.loaders.transaction_loader import ( TransactionLoad, ) @@ -83,9 +82,6 @@ EELST8N.is_fork_supported EELST8N.evaluate -# src/ethereum_spec_tools/loaders/fixture_loader.py -Load._network - # src/ethereum_spec_tools/loaders/transaction_loader.py TransactionLoad.json_to_authorizations TransactionLoad.json_to_chain_id From 342c7bcfee1120b002b698cc54a471721bb4120a Mon Sep 17 00:00:00 2001 From: danceratopz Date: Sun, 8 Feb 2026 16:10:51 +0100 Subject: [PATCH 139/154] chore(tooling): add initial claude config and skills (#2024) Co-authored-by: Felipe Selmo --- .claude/commands/assess-eip.md | 45 +++++++++++++ .claude/commands/audit-config.md | 47 +++++++++++++ .claude/commands/edit-workflow.md | 18 +++++ .claude/commands/eip-checklist.md | 58 ++++++++++++++++ .claude/commands/fill-tests.md | 60 +++++++++++++++++ .claude/commands/grammar-check.md | 59 +++++++++++++++++ .claude/commands/implement-eip.md | 62 +++++++++++++++++ .claude/commands/lint.md | 42 ++++++++++++ .claude/commands/write-test.md | 66 +++++++++++++++++++ .claude/settings.json | 31 +++++++++ .gitignore | 3 +- CLAUDE.md | 62 +++++++++++++++++ .../execution_testing/forks/forks/forks.py | 62 ++++++++++------- pyproject.toml | 2 + scripts/lint-dispatch.sh | 39 +++++++++++ .../test_block_access_lists_eip7002.py | 5 +- .../istanbul/eip1344_chainid/test_chainid.py | 12 ++-- .../eip7702_set_code_tx/test_set_code_txs.py | 38 ++++++----- .../eip3860_initcode/test_initcode.py | 6 +- 19 files changed, 666 insertions(+), 51 deletions(-) create mode 100644 .claude/commands/assess-eip.md create mode 100644 .claude/commands/audit-config.md create mode 100644 .claude/commands/edit-workflow.md create mode 100644 .claude/commands/eip-checklist.md create mode 100644 .claude/commands/fill-tests.md create mode 100644 .claude/commands/grammar-check.md create mode 100644 .claude/commands/implement-eip.md create mode 100644 .claude/commands/lint.md create mode 100644 .claude/commands/write-test.md create mode 100644 .claude/settings.json create mode 100644 CLAUDE.md create mode 100755 scripts/lint-dispatch.sh diff --git a/.claude/commands/assess-eip.md b/.claude/commands/assess-eip.md new file mode 100644 index 0000000000..1144a9fb93 --- /dev/null +++ b/.claude/commands/assess-eip.md @@ -0,0 +1,45 @@ +# Assess EIP + +Structured assessment of EIP implementation complexity. When invoked with an EIP number or description, perform the following analysis. + +## 1. Classify the Change Type(s) + +- **New opcode** — requires: `vm/instructions/`, gas cost, `op_implementation` registration +- **New precompile** — requires: `vm/precompiled_contracts/`, address constant, mapping, gas cost +- **New transaction type** — requires: `transactions.py`, `fork.py` validation, exception types +- **System contract** — requires: contract deployment in genesis, state handling +- **Block header/body field** — requires: `blocks.py`, RLP encoding changes +- **Gas cost change** — requires: `vm/gas.py` constant updates, possibly interpreter changes +- **Execution layer request** — requires: request handling in `requests.py` +- **Constraint change** — requires: validation logic in `fork.py` or `blocks.py` + +## 2. Estimate Scope + +- **Small** (1-2 files in spec, 1 test file): gas repricing, simple constraint +- **Medium** (3-5 files in spec, 2-3 test files): new opcode, new precompile +- **Large** (5-10 files in spec, 5+ test files): new tx type, new system contract +- **XL** (10+ files, multi-EIP umbrella): VM overhaul (e.g., EOF) + +## 3. Identify Required Test Categories + +Map the change types to the relevant `EIPChecklist` categories (from `execution_testing.checklists.eip_checklist`). List the checklist items that need to be covered. + +## 4. Identify Prior Art + +Find similar completed EIPs in the repo to use as implementation reference: + +- New opcode → check recent opcode additions in latest fork's `vm/instructions/` +- New precompile → `tests/osaka/eip7951_p256verify_precompiles/` +- New tx type → `tests/prague/eip7702_set_code_tx/` +- Gas changes → check `vm/gas.py` diffs between recent forks + +## 5. Output Structured Assessment + +Produce a summary with: + +- Change types identified +- Estimated scope (Small / Medium / Large / XL) +- Spec files to modify (with paths) +- Test files to create +- EIPChecklist categories to cover +- Reference implementations to follow diff --git a/.claude/commands/audit-config.md b/.claude/commands/audit-config.md new file mode 100644 index 0000000000..932074ac9a --- /dev/null +++ b/.claude/commands/audit-config.md @@ -0,0 +1,47 @@ +# Audit Config + +Periodic verification skill to prevent CLAUDE.md and skills from going stale. Run this manually to check freshness (e.g., after a major refactor, before a release, or when onboarding). + +## Checks to Perform + +### 1. Verify File Paths + +Check that every file path or directory referenced in `CLAUDE.md` and `.claude/commands/*.md` still exists. Report any broken references. + +### 2. Verify CLI Commands + +Run `--help` on referenced commands and confirm mentioned flags still exist: + +- `uv run fill --help` +- `uv run ethereum-spec-new-fork --help` +- `uv run ethereum-spec-lint --help` +- `uv run checklist --help` + +### 3. Verify Code Patterns + +Spot-check code patterns mentioned in skills against actual code: + +- Does `op_implementation` dict exist in the latest fork's `vm/instructions/__init__.py`? +- Does `PRE_COMPILED_CONTRACTS` exist in the latest fork's `vm/precompiled_contracts/mapping.py`? +- Does the `Ops` enum exist in `vm/instructions/__init__.py`? +- Does `FORK_CRITERIA` or equivalent exist in the latest fork's `__init__.py`? + +### 4. Verify Fork List + +Check that the fork order and default branch mentioned in `CLAUDE.md` match reality by inspecting `src/ethereum/forks/` and git branch configuration. + +### 5. Verify Docs References + +Confirm that `docs/` paths referenced in skills still exist: + +- `docs/writing_tests/` +- `docs/writing_tests/opcode_metadata.md` +- `docs/writing_tests/checklist_templates/` +- `docs/filling_tests/` + +## Output + +Produce a summary with: + +- **Current**: references that are still valid +- **Stale**: references that need updating, with suggested fixes diff --git a/.claude/commands/edit-workflow.md b/.claude/commands/edit-workflow.md new file mode 100644 index 0000000000..f576b8572f --- /dev/null +++ b/.claude/commands/edit-workflow.md @@ -0,0 +1,18 @@ +# Edit Workflow + +GitHub Actions conventions. Run this skill before modifying workflow files in `.github/`. + +## Action Version Pinning (Required) + +All actions must be pinned to commit SHA with version comment: + +```yaml +uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 +``` + +- Never use version tags alone (`@v4` is wrong) +- Local actions (`./.github/actions/*`) are exempt from pinning + +## Validation + +Run `uvx tox -e static` before committing — this runs `actionlint` to validate YAML syntax and structure. diff --git a/.claude/commands/eip-checklist.md b/.claude/commands/eip-checklist.md new file mode 100644 index 0000000000..68170ca545 --- /dev/null +++ b/.claude/commands/eip-checklist.md @@ -0,0 +1,58 @@ +# EIP Checklist + +Guide for using the EIP testing checklist system to track test coverage. Run this skill when working on EIP test coverage or checklists. + +## What It Is + +The `EIPChecklist` class (in `execution_testing.checklists.eip_checklist`) provides a hierarchical marker system for tagging tests with what aspect of an EIP they cover. Categories include: + +- `General`, `Opcode`, `Precompile`, `SystemContract`, `TransactionType` +- `BlockHeaderField`, `BlockBodyField`, `GasCostChanges`, `GasRefundsChanges` +- `ExecutionLayerRequest`, `BlobCountChanges` + +Each category has deep sub-items (e.g., `EIPChecklist.Opcode.Test.GasUsage.Normal`). + +## Usage in Tests + +```python +@EIPChecklist.TransactionType.Test.IntrinsicValidity.GasLimit.Exact() +def test_exact_intrinsic_gas(state_test: StateTestFiller): + ... + +# Multi-EIP coverage: +@EIPChecklist.TransactionType.Test.Signature.Invalid.V.Two(eip=[2930]) +def test_invalid_v(state_test: StateTestFiller): + ... +``` + +## Generating Checklists + +Run `uv run checklist` to generate coverage reports. Template at `docs/writing_tests/checklist_templates/eip_testing_checklist_template.md`. + +## Marking Items as Externally Covered or N/A + +Create `eip_checklist_external_coverage.txt` in the EIP test directory: + +``` +general/code_coverage/eels = Covered by EELS test suite +``` + +Create `eip_checklist_not_applicable.txt` for inapplicable items: + +``` +system_contract = EIP-7702 does not introduce a system contract +precompile/ = EIP-7702 does not introduce a precompile +``` + +(trailing `/` marks entire category as N/A) + +## Completed Examples + +Reference these for patterns: + +- `tests/prague/eip7702_set_code_tx/` — comprehensive checklist for a transaction type EIP +- `tests/osaka/eip7951_p256verify_precompiles/` — precompile checklist example + +## References + +See `docs/writing_tests/checklist_templates/` for templates and detailed documentation. diff --git a/.claude/commands/fill-tests.md b/.claude/commands/fill-tests.md new file mode 100644 index 0000000000..e1f3db7967 --- /dev/null +++ b/.claude/commands/fill-tests.md @@ -0,0 +1,60 @@ +# Fill Tests + +CLI reference for the `fill` command. Run this skill before filling test fixtures. The `fill` command is pytest-based — all standard pytest flags work. + +## Basic Usage + +``` +uv run fill tests/ # Fill all tests +uv run fill tests/cancun/ --fork Cancun # Specific fork +uv run fill tests/path/to/test.py -k "test_name" # Specific test +uv run fill tests/osaka/ --until Osaka # Up to fork (inclusive) +uv run fill --collect-only tests/ # Dry run: list tests without executing +``` + +## Key Flags + +- `--fork FORK` / `--until FORK` — target specific fork or range +- `--output DIR` + `--clean` — output directory; `--clean` required when re-filling +- `-k "pattern"` — filter tests by name pattern +- `-m "marker"` — filter by pytest marker (e.g. `-m state_test`, `-m blockchain_test`) +- `-n auto --maxprocesses N` — parallel execution (use `--dist=loadgroup`) +- `--evm-bin PATH` — specify t8n tool (default: `ethereum-spec-evm-resolver`) +- `--verify-fixtures` — verify generated fixtures against geth blocktest +- `--generate-all-formats` — generate all fixture formats (2-phase) + +## Debugging + +- `--evm-dump-dir DIR` — dump t8n input/output for debugging +- `--traces` — collect execution traces +- `--pdb` — drop into debugger on failure +- `-vv` — verbose output; `-x` — stop on first failure; `-s` — print stdout + +## Watch Mode + +- `--watch` — re-run on file changes (clears screen between runs) +- `--watcherfall` — same but keeps output history + +## Benchmark Tests + +- Must use `-m benchmark` — benchmark tests are excluded by default +- Require evmone as backend: `--evm-bin=evmone-t8n` +- Default benchmark fork is Prague (set in `tests/benchmark/conftest.py`) +- Gas values mode: `--gas-benchmark-values 1,10,100` (values in millions of gas) +- Fixed opcode count mode: `--fixed-opcode-count 1,10,100` (values in thousands) +- These two modes are **mutually exclusive** +- Use `--generate-pre-alloc-groups` for stateful benchmarks + +## Static Tests (Legacy) + +- `uv run fill --fill-static-tests tests/static/` — fills YAML/JSON fillers from `ethereum/tests` +- Legacy only — do NOT add new static fillers. Use Python tests instead +- Useful to check if spec changes broke how legacy tests fill + +## Fixture Formats + +One test function auto-generates multiple formats: `StateFixture`, `BlockchainFixture`, `BlockchainEngineFixture`. Use `--generate-all-formats` for additional formats via 2-phase execution. + +## References + +See `docs/filling_tests/` for detailed documentation. diff --git a/.claude/commands/grammar-check.md b/.claude/commands/grammar-check.md new file mode 100644 index 0000000000..5f55a6eaa5 --- /dev/null +++ b/.claude/commands/grammar-check.md @@ -0,0 +1,59 @@ +# Grammar Check + +Audit grammar in documentation and code comments. + +## Files to Check + +Check `$ARGUMENTS` (default: `src/`). Use Glob to find: + +- `**/*.py` - check docstrings and `#` comments only +- `**/*.md` - check prose content, skip code blocks + +## What to Detect + +1. Missing prepositions ("refer the" → "refer to the", "comply the" → "comply with the") +2. Subject-verb disagreement +3. Missing articles where required +4. Incorrect word order +5. Sentence fragments in documentation +6. Double words ("the the", "is is") + +## What to Ignore + +- Code syntax and variable names +- Technical terms, EIP numbers, hex values +- Intentional shorthand in inline code comments +- Content inside code blocks (``` or indented blocks in markdown) +- URLs and email addresses + +## Output Format + +For each issue, output a clickable link with line number: + +``` +path/to/file.md:42 - "original problematic text" + Suggestion: "corrected text" + Reason: brief explanation +``` + +For issues spanning multiple lines, use range format: + +``` +path/to/file.py:15-17 - "multi-line docstring issue" + Suggestion: "corrected text" + Reason: brief explanation +``` + +## Process + +1. Find all matching files +2. For `.md` files: check full prose content, skip code blocks +3. For `.py` files: extract and check only docstrings (triple-quoted) and `#` comments +4. Group findings by file +5. End with summary: "Found N grammar issues in M files." or "No grammar issues found." + +## Important + +- Report findings only, do not auto-fix +- Be conservative: only flag clear errors, not style preferences +- When uncertain, skip rather than false-positive diff --git a/.claude/commands/implement-eip.md b/.claude/commands/implement-eip.md new file mode 100644 index 0000000000..c06c6e22d1 --- /dev/null +++ b/.claude/commands/implement-eip.md @@ -0,0 +1,62 @@ +# Implement EIP + +Patterns for implementing spec changes in `src/ethereum/forks/`. Run this skill before implementing an EIP or modifying fork code. + +## Fork Directory Layout + +Each fork lives at `src/ethereum/forks//`. Explore the latest fork directory for current structure. Key files: + +- `__init__.py` — FORK_CRITERIA, fork metadata +- `fork.py` — state transition functions +- `blocks.py` — block structure and validation +- `transactions.py` — transaction types and processing +- `state.py` — state trie operations +- `vm/instructions/__init__.py` — Ops enum + `op_implementation` dict +- `vm/gas.py` — gas constants and calculations +- `vm/precompiled_contracts/__init__.py` — precompile address constants +- `vm/precompiled_contracts/mapping.py` — `PRE_COMPILED_CONTRACTS` registry + +## Import Isolation (enforced by `ethereum-spec-lint`) + +- **Within same fork**: relative imports (`from . import vm`, `from .state import ...`) +- **Previous fork only**: absolute imports (`from ethereum.cancun import ...`) +- **Shared modules**: always OK (`ethereum.crypto`, `ethereum.utils`, `ethereum.exceptions`) +- **Future forks**: NEVER allowed +- **Ancient forks (2+ back)**: NEVER allowed +- Run `ethereum-spec-lint` to verify before committing + +## Adding a New Opcode + +1. Add to `Ops` enum in `vm/instructions/__init__.py` with hex value +2. Implement function in appropriate `vm/instructions/.py` — follows pattern: STACK → GAS (`charge_gas`) → OPERATION → PROGRAM COUNTER +3. Register in `op_implementation` dict in `vm/instructions/__init__.py` +4. Add gas constant in `vm/gas.py` if needed + +## Adding a New Precompile + +1. Define address constant in `vm/precompiled_contracts/__init__.py` using `hex_to_address("0x...")` +2. Create implementation file `vm/precompiled_contracts/.py` +3. Register in `PRE_COMPILED_CONTRACTS` dict in `vm/precompiled_contracts/mapping.py` +4. Add gas constant in `vm/gas.py` + +## Adding a New Transaction Type + +1. Define `@slotted_freezable @dataclass` class in `transactions.py` +2. Add to `Transaction` union type at bottom of file +3. Handle in `fork.py` validation/processing logic +4. Add exception type in `exceptions.py` if needed + +## Creating a New Fork + +```bash +uv run ethereum-spec-new-fork --new-fork= --template-fork=