From 9f25c60bf79bea337d9dbc92a2c0ca576fc29dbd Mon Sep 17 00:00:00 2001 From: arvidn Date: Wed, 21 Aug 2024 12:25:36 +0200 Subject: [PATCH] change the type of ref_list to List[bytes] instead of List[SerializedProgram]. Also, avoid passing the block generator twice to the worker process for validation --- .../core/full_node/stores/test_block_store.py | 27 ++++++++++++------- chia/_tests/generator/test_rom.py | 4 +-- chia/_tests/util/run_block.py | 10 ++++--- chia/_tests/util/test_full_block_utils.py | 2 +- chia/consensus/blockchain.py | 5 ++-- chia/consensus/multiprocess_validation.py | 12 ++++----- chia/full_node/block_store.py | 17 ++++++------ chia/full_node/mempool_check_conditions.py | 8 +++--- chia/simulator/block_tools.py | 4 +-- chia/types/generator_types.py | 2 +- chia/util/full_block_utils.py | 4 +-- tools/analyze-chain.py | 2 +- 12 files changed, 53 insertions(+), 44 deletions(-) diff --git a/chia/_tests/core/full_node/stores/test_block_store.py b/chia/_tests/core/full_node/stores/test_block_store.py index 8a2f96454ac4..34a1c6cf4079 100644 --- a/chia/_tests/core/full_node/stores/test_block_store.py +++ b/chia/_tests/core/full_node/stores/test_block_store.py @@ -5,7 +5,7 @@ import random import sqlite3 from pathlib import Path -from typing import List, cast +from typing import List, Optional, cast import pytest @@ -39,6 +39,13 @@ def use_cache(request: SubRequest) -> bool: return cast(bool, request.param) +def maybe_serialize(gen: Optional[SerializedProgram]) -> Optional[bytes]: + if gen is None: + return None + else: + return bytes(gen) + + @pytest.mark.limit_consensus_modes(reason="save time") @pytest.mark.anyio async def test_block_store(tmp_dir: Path, db_version: int, bt: BlockTools, use_cache: bool) -> None: @@ -85,7 +92,7 @@ async def test_block_store(tmp_dir: Path, db_version: int, bt: BlockTools, use_c assert GeneratorBlockInfo( block.foliage.prev_block_hash, block.transactions_generator, block.transactions_generator_ref_list ) == await store.get_block_info(block.header_hash) - assert block.transactions_generator == await store.get_generator(block.header_hash) + assert maybe_serialize(block.transactions_generator) == await store.get_generator(block.header_hash) assert block_record == (await store.get_block_record(block_record_hh)) await store.set_in_chain([(block_record.header_hash,)]) await store.set_peak(block_record.header_hash) @@ -98,7 +105,7 @@ async def test_block_store(tmp_dir: Path, db_version: int, bt: BlockTools, use_c assert await store.get_full_blocks_at([block.height]) == [block] if block.transactions_generator is not None: - assert await store.get_generators_at([block.height]) == [block.transactions_generator] + assert await store.get_generators_at([block.height]) == [bytes(block.transactions_generator)] else: with pytest.raises(ValueError, match="GENERATOR_REF_HAS_NO_GENERATOR"): await store.get_generators_at([block.height]) @@ -315,22 +322,24 @@ def generator(i: int) -> SerializedProgram: await store.set_peak(block_record.header_hash) new_blocks.append(block) - expected_generators = list(map(lambda x: x.transactions_generator, new_blocks[1:10])) + expected_generators = list(map(lambda x: maybe_serialize(x.transactions_generator), new_blocks[1:10])) generators = await store.get_generators_at([uint32(x) for x in range(1, 10)]) assert generators == expected_generators # test out-of-order heights - expected_generators = list(map(lambda x: x.transactions_generator, [new_blocks[i] for i in [4, 8, 3, 9]])) + expected_generators = list( + map(lambda x: maybe_serialize(x.transactions_generator), [new_blocks[i] for i in [4, 8, 3, 9]]) + ) generators = await store.get_generators_at([uint32(4), uint32(8), uint32(3), uint32(9)]) assert generators == expected_generators with pytest.raises(KeyError): await store.get_generators_at([uint32(100)]) - assert await store.get_generator(blocks[2].header_hash) == new_blocks[2].transactions_generator - assert await store.get_generator(blocks[4].header_hash) == new_blocks[4].transactions_generator - assert await store.get_generator(blocks[6].header_hash) == new_blocks[6].transactions_generator - assert await store.get_generator(blocks[7].header_hash) == new_blocks[7].transactions_generator + assert await store.get_generator(blocks[2].header_hash) == maybe_serialize(new_blocks[2].transactions_generator) + assert await store.get_generator(blocks[4].header_hash) == maybe_serialize(new_blocks[4].transactions_generator) + assert await store.get_generator(blocks[6].header_hash) == maybe_serialize(new_blocks[6].transactions_generator) + assert await store.get_generator(blocks[7].header_hash) == maybe_serialize(new_blocks[7].transactions_generator) @pytest.mark.limit_consensus_modes(reason="save time") diff --git a/chia/_tests/generator/test_rom.py b/chia/_tests/generator/test_rom.py index 031c84290fab..410393050f8b 100644 --- a/chia/_tests/generator/test_rom.py +++ b/chia/_tests/generator/test_rom.py @@ -63,7 +63,7 @@ def to_sp(sexp: bytes) -> SerializedProgram: def block_generator() -> BlockGenerator: - generator_list = [to_sp(FIRST_GENERATOR), to_sp(SECOND_GENERATOR)] + generator_list = [FIRST_GENERATOR, SECOND_GENERATOR] return BlockGenerator(to_sp(COMPILED_GENERATOR_CODE), generator_list) @@ -80,7 +80,7 @@ def block_generator() -> BlockGenerator: def run_generator(self: BlockGenerator) -> Tuple[int, Program]: """This mode is meant for accepting possibly soft-forked transactions into the mempool""" - args = Program.to([[bytes(g) for g in self.generator_refs]]) + args = Program.to([self.generator_refs]) return GENERATOR_MOD.run_with_cost(MAX_COST, [self.program, args]) diff --git a/chia/_tests/util/run_block.py b/chia/_tests/util/run_block.py index 1c0e40ac9439..be8c5ed4deba 100644 --- a/chia/_tests/util/run_block.py +++ b/chia/_tests/util/run_block.py @@ -61,7 +61,7 @@ def npc_to_dict(npc: NPC) -> Dict[str, Any]: def run_generator(block_generator: BlockGenerator, constants: ConsensusConstants, max_cost: int) -> List[CAT]: - block_args = [bytes(a) for a in block_generator.generator_refs] + block_args = block_generator.generator_refs cost, block_result = block_generator.program.run_with_cost(max_cost, [DESERIALIZE_MOD, block_args]) coin_spends = block_result.first() @@ -126,18 +126,20 @@ def run_generator(block_generator: BlockGenerator, constants: ConsensusConstants return cat_list -def ref_list_to_args(ref_list: List[uint32], root_path: Path) -> List[SerializedProgram]: +def ref_list_to_args(ref_list: List[uint32], root_path: Path) -> List[bytes]: args = [] for height in ref_list: with open(root_path / f"{height}.json", "rb") as f: program_str = json.load(f)["block"]["transactions_generator"] - args.append(SerializedProgram.fromhex(program_str)) + # we need to SerializedProgram to handle possible leading 0x in the + # hex string + args.append(bytes(SerializedProgram.fromhex(program_str))) return args def run_generator_with_args( generator_program_hex: str, - generator_args: List[SerializedProgram], + generator_args: List[bytes], constants: ConsensusConstants, cost: uint64, ) -> List[CAT]: diff --git a/chia/_tests/util/test_full_block_utils.py b/chia/_tests/util/test_full_block_utils.py index a7242ac45275..05e6c365b92a 100644 --- a/chia/_tests/util/test_full_block_utils.py +++ b/chia/_tests/util/test_full_block_utils.py @@ -253,7 +253,7 @@ async def test_parser(): for block in get_full_blocks(): block_bytes = bytes(block) gen = generator_from_block(block_bytes) - assert gen == block.transactions_generator + assert gen == bytes(block.transactions_generator) bi = block_info_from_block(block_bytes) assert block.transactions_generator == bi.transactions_generator assert block.prev_header_hash == bi.prev_header_hash diff --git a/chia/consensus/blockchain.py b/chia/consensus/blockchain.py index 455cda34c7cc..8f1585073365 100644 --- a/chia/consensus/blockchain.py +++ b/chia/consensus/blockchain.py @@ -35,7 +35,6 @@ from chia.full_node.mempool_check_conditions import get_name_puzzle_conditions from chia.types.block_protocol import BlockInfo from chia.types.blockchain_format.coin import Coin -from chia.types.blockchain_format.serialized_program import SerializedProgram from chia.types.blockchain_format.sized_bytes import bytes32 from chia.types.blockchain_format.sub_epoch_summary import SubEpochSummary from chia.types.blockchain_format.vdf import VDFInfo @@ -1069,7 +1068,7 @@ async def get_block_generator( if len(ref_list) == 0: return BlockGenerator(block.transactions_generator, []) - result: List[SerializedProgram] = [] + result: List[bytes] = [] previous_br = await self.get_block_record_from_db(block.prev_header_hash) if previous_br is not None and self.height_to_hash(previous_br.height) == block.prev_header_hash: # We are not in a reorg, no need to look up alternate header hashes @@ -1104,7 +1103,7 @@ async def get_block_generator( ref_block = additional_height_dict[ref_height] if ref_block.transactions_generator is None: raise ValueError(Err.GENERATOR_REF_HAS_NO_GENERATOR) - result.append(ref_block.transactions_generator) + result.append(bytes(ref_block.transactions_generator)) elif ref_height in reorg_chain: gen = await self.block_store.get_generator(reorg_chain[ref_height]) if gen is None: diff --git a/chia/consensus/multiprocess_validation.py b/chia/consensus/multiprocess_validation.py index 2fb06b1f8186..b3e2c3e776cb 100644 --- a/chia/consensus/multiprocess_validation.py +++ b/chia/consensus/multiprocess_validation.py @@ -52,7 +52,7 @@ def batch_pre_validate_blocks( constants: ConsensusConstants, blocks_pickled: Dict[bytes, bytes], full_blocks_pickled: List[bytes], - prev_transaction_generators: List[Optional[bytes]], + prev_transaction_generators: List[Optional[List[bytes]]], npc_results: Dict[uint32, bytes], check_filter: bool, expected_difficulty: List[uint64], @@ -81,10 +81,10 @@ def batch_pre_validate_blocks( removals, tx_additions = [], [] if block.transactions_generator is not None and npc_result is None: - prev_generator_bytes = prev_transaction_generators[i] - assert prev_generator_bytes is not None + prev_generators = prev_transaction_generators[i] + assert prev_generators is not None assert block.transactions_info is not None - block_generator: BlockGenerator = BlockGenerator.from_bytes(prev_generator_bytes) + block_generator = BlockGenerator(block.transactions_generator, prev_generators) assert block_generator.program == block.transactions_generator npc_result = get_name_puzzle_conditions( block_generator, @@ -312,7 +312,7 @@ async def pre_validate_blocks_multiprocessing( end_i = min(i + batch_size, len(blocks)) blocks_to_validate = blocks[i:end_i] b_pickled: List[bytes] = [] - previous_generators: List[Optional[bytes]] = [] + previous_generators: List[Optional[List[bytes]]] = [] for block in blocks_to_validate: # We ONLY add blocks which are in the past, based on header hashes (which are validated later) to the # prev blocks dict. This is important since these blocks are assumed to be valid and are used as previous @@ -337,7 +337,7 @@ async def pre_validate_blocks_multiprocessing( ) ] if block_generator is not None: - previous_generators.append(bytes(block_generator)) + previous_generators.append(block_generator.generator_refs) else: previous_generators.append(None) diff --git a/chia/full_node/block_store.py b/chia/full_node/block_store.py index 2badae0160c6..ae99845c7bd1 100644 --- a/chia/full_node/block_store.py +++ b/chia/full_node/block_store.py @@ -9,7 +9,6 @@ import zstd from chia.consensus.block_record import BlockRecord -from chia.types.blockchain_format.serialized_program import SerializedProgram from chia.types.blockchain_format.sized_bytes import bytes32 from chia.types.full_block import FullBlock from chia.types.weight_proof import SubEpochChallengeSegment, SubEpochSegments @@ -264,10 +263,10 @@ async def get_block_info(self, header_hash: bytes32) -> Optional[GeneratorBlockI b.foliage.prev_block_hash, b.transactions_generator, b.transactions_generator_ref_list ) - async def get_generator(self, header_hash: bytes32) -> Optional[SerializedProgram]: + async def get_generator(self, header_hash: bytes32) -> Optional[bytes]: cached = self.block_cache.get(header_hash) if cached is not None: - return cached.transactions_generator + return None if cached.transactions_generator is None else bytes(cached.transactions_generator) formatted_str = "SELECT block, height from full_blocks WHERE header_hash=?" async with self.db_wrapper.reader_no_transaction() as conn: @@ -278,19 +277,19 @@ async def get_generator(self, header_hash: bytes32) -> Optional[SerializedProgra try: return generator_from_block(block_bytes) - except Exception as e: + except Exception as e: # pragma: no cover log.error(f"cheap parser failed for block at height {row[1]}: {e}") # this is defensive, on the off-chance that # generator_from_block() fails, fall back to the reliable # definition of parsing a block b = FullBlock.from_bytes(block_bytes) - return b.transactions_generator + return None if b.transactions_generator is None else bytes(b.transactions_generator) - async def get_generators_at(self, heights: List[uint32]) -> List[SerializedProgram]: + async def get_generators_at(self, heights: List[uint32]) -> List[bytes]: if len(heights) == 0: return [] - generators: Dict[uint32, SerializedProgram] = {} + generators: Dict[uint32, bytes] = {} formatted_str = ( f"SELECT block, height from full_blocks " f'WHERE in_main_chain=1 AND height in ({"?," * (len(heights) - 1)}?)' @@ -302,13 +301,13 @@ async def get_generators_at(self, heights: List[uint32]) -> List[SerializedProgr try: gen = generator_from_block(block_bytes) - except Exception as e: + except Exception as e: # pragma: no cover log.error(f"cheap parser failed for block at height {row[1]}: {e}") # this is defensive, on the off-chance that # generator_from_block() fails, fall back to the reliable # definition of parsing a block b = FullBlock.from_bytes(block_bytes) - gen = b.transactions_generator + gen = None if b.transactions_generator is None else bytes(b.transactions_generator) if gen is None: raise ValueError(Err.GENERATOR_REF_HAS_NO_GENERATOR) generators[uint32(row[1])] = gen diff --git a/chia/full_node/mempool_check_conditions.py b/chia/full_node/mempool_check_conditions.py index 14851241df66..a84df7f04ed2 100644 --- a/chia/full_node/mempool_check_conditions.py +++ b/chia/full_node/mempool_check_conditions.py @@ -47,7 +47,7 @@ def get_name_puzzle_conditions( run_block = run_block_generator try: - block_args = [bytes(gen) for gen in generator.generator_refs] + block_args = generator.generator_refs err, result = run_block(bytes(generator.program), block_args, max_cost, flags, constants) assert (err is None) != (result is None) if err is not None: @@ -66,7 +66,7 @@ def get_puzzle_and_solution_for_coin( try: puzzle, solution = get_puzzle_and_solution_for_coin_rust( generator.program, - [bytes(a) for a in generator.generator_refs], + generator.generator_refs, constants.MAX_BLOCK_COST_CLVM, coin, get_flags_for_height_and_constants(height, constants), @@ -80,7 +80,7 @@ def get_spends_for_block(generator: BlockGenerator, height: int, constants: Cons args = bytearray(b"\xff") args += bytes(DESERIALIZE_MOD) args += b"\xff" - args += bytes(Program.to([bytes(a) for a in generator.generator_refs])) + args += bytes(Program.to(generator.generator_refs)) args += b"\x80\x80" _, ret = run_chia_program( @@ -107,7 +107,7 @@ def get_spends_for_block_with_conditions( args = bytearray(b"\xff") args += bytes(DESERIALIZE_MOD) args += b"\xff" - args += bytes(Program.to([bytes(a) for a in generator.generator_refs])) + args += bytes(Program.to(generator.generator_refs)) args += b"\x80\x80" flags = get_flags_for_height_and_constants(height, constants) diff --git a/chia/simulator/block_tools.py b/chia/simulator/block_tools.py index ac73fa7213f5..98cddd694e08 100644 --- a/chia/simulator/block_tools.py +++ b/chia/simulator/block_tools.py @@ -1940,7 +1940,7 @@ def compute_cost_test(generator: BlockGenerator, constants: ConsensusConstants, clvm_cost = 0 if height >= constants.HARD_FORK_HEIGHT: - blocks = [bytes(g) for g in generator.generator_refs] + blocks = generator.generator_refs cost, result = generator.program._run(INFINITE_COST, MEMPOOL_MODE | ALLOW_BACKREFS, [DESERIALIZE_MOD, blocks]) clvm_cost += cost @@ -1955,7 +1955,7 @@ def compute_cost_test(generator: BlockGenerator, constants: ConsensusConstants, condition_cost += conditions_cost(result) else: - block_program_args = SerializedProgram.to([[bytes(g) for g in generator.generator_refs]]) + block_program_args = SerializedProgram.to([generator.generator_refs]) clvm_cost, result = GENERATOR_MOD._run(INFINITE_COST, MEMPOOL_MODE, [generator.program, block_program_args]) for res in result.first().as_iter(): diff --git a/chia/types/generator_types.py b/chia/types/generator_types.py index c60683783d6f..9198e4b13c83 100644 --- a/chia/types/generator_types.py +++ b/chia/types/generator_types.py @@ -19,4 +19,4 @@ def get_generator_for_block_height(self, height: uint32) -> SerializedProgram: @dataclass(frozen=True) class BlockGenerator(Streamable): program: SerializedProgram - generator_refs: List[SerializedProgram] + generator_refs: List[bytes] diff --git a/chia/util/full_block_utils.py b/chia/util/full_block_utils.py index 673e7d24e95d..2dbb31a703cd 100644 --- a/chia/util/full_block_utils.py +++ b/chia/util/full_block_utils.py @@ -204,7 +204,7 @@ def skip_transactions_info(buf: memoryview) -> memoryview: return skip_list(buf, skip_coin) -def generator_from_block(buf: memoryview) -> Optional[SerializedProgram]: +def generator_from_block(buf: memoryview) -> Optional[bytes]: buf = skip_list(buf, skip_end_of_sub_slot_bundle) # finished_sub_slots buf = skip_reward_chain_block(buf) # reward_chain_block buf = skip_optional(buf, skip_vdf_proof) # challenge_chain_sp_proof @@ -222,7 +222,7 @@ def generator_from_block(buf: memoryview) -> Optional[SerializedProgram]: buf = buf[1:] length = serialized_length(buf) - return SerializedProgram.from_bytes(bytes(buf[:length])) + return bytes(buf[:length]) # this implements the BlockInfo protocol diff --git a/tools/analyze-chain.py b/tools/analyze-chain.py index 5803c7e7010d..0a69aed6e2da 100755 --- a/tools/analyze-chain.py +++ b/tools/analyze-chain.py @@ -95,7 +95,7 @@ def main(file: Path, mempool_mode: bool, start: int, end: Optional[int], call: O ref = c.execute("SELECT block FROM full_blocks WHERE height=? and in_main_chain=1", (h,)) generator = generator_from_block(zstd.decompress(ref.fetchone()[0])) assert generator is not None - generator_blobs.append(bytes(generator)) + generator_blobs.append(generator) ref.close() ref_lookup_time = time() - start_time