Skip to content

Commit

Permalink
change the type of ref_list to List[bytes] instead of List[Serialized…
Browse files Browse the repository at this point in the history
…Program]. Also, avoid passing the block generator twice to the worker process for validation
  • Loading branch information
arvidn committed Aug 21, 2024
1 parent 49a8aab commit 9f25c60
Show file tree
Hide file tree
Showing 12 changed files with 53 additions and 44 deletions.
27 changes: 18 additions & 9 deletions chia/_tests/core/full_node/stores/test_block_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import random
import sqlite3
from pathlib import Path
from typing import List, cast
from typing import List, Optional, cast

import pytest

Expand Down Expand Up @@ -39,6 +39,13 @@ def use_cache(request: SubRequest) -> bool:
return cast(bool, request.param)


def maybe_serialize(gen: Optional[SerializedProgram]) -> Optional[bytes]:
if gen is None:
return None
else:
return bytes(gen)


@pytest.mark.limit_consensus_modes(reason="save time")
@pytest.mark.anyio
async def test_block_store(tmp_dir: Path, db_version: int, bt: BlockTools, use_cache: bool) -> None:
Expand Down Expand Up @@ -85,7 +92,7 @@ async def test_block_store(tmp_dir: Path, db_version: int, bt: BlockTools, use_c
assert GeneratorBlockInfo(
block.foliage.prev_block_hash, block.transactions_generator, block.transactions_generator_ref_list
) == await store.get_block_info(block.header_hash)
assert block.transactions_generator == await store.get_generator(block.header_hash)
assert maybe_serialize(block.transactions_generator) == await store.get_generator(block.header_hash)
assert block_record == (await store.get_block_record(block_record_hh))
await store.set_in_chain([(block_record.header_hash,)])
await store.set_peak(block_record.header_hash)
Expand All @@ -98,7 +105,7 @@ async def test_block_store(tmp_dir: Path, db_version: int, bt: BlockTools, use_c

assert await store.get_full_blocks_at([block.height]) == [block]
if block.transactions_generator is not None:
assert await store.get_generators_at([block.height]) == [block.transactions_generator]
assert await store.get_generators_at([block.height]) == [bytes(block.transactions_generator)]
else:
with pytest.raises(ValueError, match="GENERATOR_REF_HAS_NO_GENERATOR"):
await store.get_generators_at([block.height])
Expand Down Expand Up @@ -315,22 +322,24 @@ def generator(i: int) -> SerializedProgram:
await store.set_peak(block_record.header_hash)
new_blocks.append(block)

expected_generators = list(map(lambda x: x.transactions_generator, new_blocks[1:10]))
expected_generators = list(map(lambda x: maybe_serialize(x.transactions_generator), new_blocks[1:10]))
generators = await store.get_generators_at([uint32(x) for x in range(1, 10)])
assert generators == expected_generators

# test out-of-order heights
expected_generators = list(map(lambda x: x.transactions_generator, [new_blocks[i] for i in [4, 8, 3, 9]]))
expected_generators = list(
map(lambda x: maybe_serialize(x.transactions_generator), [new_blocks[i] for i in [4, 8, 3, 9]])
)
generators = await store.get_generators_at([uint32(4), uint32(8), uint32(3), uint32(9)])
assert generators == expected_generators

with pytest.raises(KeyError):
await store.get_generators_at([uint32(100)])

assert await store.get_generator(blocks[2].header_hash) == new_blocks[2].transactions_generator
assert await store.get_generator(blocks[4].header_hash) == new_blocks[4].transactions_generator
assert await store.get_generator(blocks[6].header_hash) == new_blocks[6].transactions_generator
assert await store.get_generator(blocks[7].header_hash) == new_blocks[7].transactions_generator
assert await store.get_generator(blocks[2].header_hash) == maybe_serialize(new_blocks[2].transactions_generator)
assert await store.get_generator(blocks[4].header_hash) == maybe_serialize(new_blocks[4].transactions_generator)
assert await store.get_generator(blocks[6].header_hash) == maybe_serialize(new_blocks[6].transactions_generator)
assert await store.get_generator(blocks[7].header_hash) == maybe_serialize(new_blocks[7].transactions_generator)


@pytest.mark.limit_consensus_modes(reason="save time")
Expand Down
4 changes: 2 additions & 2 deletions chia/_tests/generator/test_rom.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def to_sp(sexp: bytes) -> SerializedProgram:


def block_generator() -> BlockGenerator:
generator_list = [to_sp(FIRST_GENERATOR), to_sp(SECOND_GENERATOR)]
generator_list = [FIRST_GENERATOR, SECOND_GENERATOR]
return BlockGenerator(to_sp(COMPILED_GENERATOR_CODE), generator_list)


Expand All @@ -80,7 +80,7 @@ def block_generator() -> BlockGenerator:

def run_generator(self: BlockGenerator) -> Tuple[int, Program]:
"""This mode is meant for accepting possibly soft-forked transactions into the mempool"""
args = Program.to([[bytes(g) for g in self.generator_refs]])
args = Program.to([self.generator_refs])
return GENERATOR_MOD.run_with_cost(MAX_COST, [self.program, args])


Expand Down
10 changes: 6 additions & 4 deletions chia/_tests/util/run_block.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def npc_to_dict(npc: NPC) -> Dict[str, Any]:


def run_generator(block_generator: BlockGenerator, constants: ConsensusConstants, max_cost: int) -> List[CAT]:
block_args = [bytes(a) for a in block_generator.generator_refs]
block_args = block_generator.generator_refs
cost, block_result = block_generator.program.run_with_cost(max_cost, [DESERIALIZE_MOD, block_args])

coin_spends = block_result.first()
Expand Down Expand Up @@ -126,18 +126,20 @@ def run_generator(block_generator: BlockGenerator, constants: ConsensusConstants
return cat_list


def ref_list_to_args(ref_list: List[uint32], root_path: Path) -> List[SerializedProgram]:
def ref_list_to_args(ref_list: List[uint32], root_path: Path) -> List[bytes]:
args = []
for height in ref_list:
with open(root_path / f"{height}.json", "rb") as f:
program_str = json.load(f)["block"]["transactions_generator"]
args.append(SerializedProgram.fromhex(program_str))
# we need to SerializedProgram to handle possible leading 0x in the
# hex string
args.append(bytes(SerializedProgram.fromhex(program_str)))
return args


def run_generator_with_args(
generator_program_hex: str,
generator_args: List[SerializedProgram],
generator_args: List[bytes],
constants: ConsensusConstants,
cost: uint64,
) -> List[CAT]:
Expand Down
2 changes: 1 addition & 1 deletion chia/_tests/util/test_full_block_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,7 @@ async def test_parser():
for block in get_full_blocks():
block_bytes = bytes(block)
gen = generator_from_block(block_bytes)
assert gen == block.transactions_generator
assert gen == bytes(block.transactions_generator)
bi = block_info_from_block(block_bytes)
assert block.transactions_generator == bi.transactions_generator
assert block.prev_header_hash == bi.prev_header_hash
Expand Down
5 changes: 2 additions & 3 deletions chia/consensus/blockchain.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@
from chia.full_node.mempool_check_conditions import get_name_puzzle_conditions
from chia.types.block_protocol import BlockInfo
from chia.types.blockchain_format.coin import Coin
from chia.types.blockchain_format.serialized_program import SerializedProgram
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.types.blockchain_format.sub_epoch_summary import SubEpochSummary
from chia.types.blockchain_format.vdf import VDFInfo
Expand Down Expand Up @@ -1069,7 +1068,7 @@ async def get_block_generator(
if len(ref_list) == 0:
return BlockGenerator(block.transactions_generator, [])

result: List[SerializedProgram] = []
result: List[bytes] = []
previous_br = await self.get_block_record_from_db(block.prev_header_hash)
if previous_br is not None and self.height_to_hash(previous_br.height) == block.prev_header_hash:
# We are not in a reorg, no need to look up alternate header hashes
Expand Down Expand Up @@ -1104,7 +1103,7 @@ async def get_block_generator(
ref_block = additional_height_dict[ref_height]
if ref_block.transactions_generator is None:
raise ValueError(Err.GENERATOR_REF_HAS_NO_GENERATOR)
result.append(ref_block.transactions_generator)
result.append(bytes(ref_block.transactions_generator))
elif ref_height in reorg_chain:
gen = await self.block_store.get_generator(reorg_chain[ref_height])
if gen is None:
Expand Down
12 changes: 6 additions & 6 deletions chia/consensus/multiprocess_validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def batch_pre_validate_blocks(
constants: ConsensusConstants,
blocks_pickled: Dict[bytes, bytes],
full_blocks_pickled: List[bytes],
prev_transaction_generators: List[Optional[bytes]],
prev_transaction_generators: List[Optional[List[bytes]]],
npc_results: Dict[uint32, bytes],
check_filter: bool,
expected_difficulty: List[uint64],
Expand Down Expand Up @@ -81,10 +81,10 @@ def batch_pre_validate_blocks(
removals, tx_additions = [], []

if block.transactions_generator is not None and npc_result is None:
prev_generator_bytes = prev_transaction_generators[i]
assert prev_generator_bytes is not None
prev_generators = prev_transaction_generators[i]
assert prev_generators is not None
assert block.transactions_info is not None
block_generator: BlockGenerator = BlockGenerator.from_bytes(prev_generator_bytes)
block_generator = BlockGenerator(block.transactions_generator, prev_generators)
assert block_generator.program == block.transactions_generator
npc_result = get_name_puzzle_conditions(
block_generator,
Expand Down Expand Up @@ -312,7 +312,7 @@ async def pre_validate_blocks_multiprocessing(
end_i = min(i + batch_size, len(blocks))
blocks_to_validate = blocks[i:end_i]
b_pickled: List[bytes] = []
previous_generators: List[Optional[bytes]] = []
previous_generators: List[Optional[List[bytes]]] = []
for block in blocks_to_validate:
# We ONLY add blocks which are in the past, based on header hashes (which are validated later) to the
# prev blocks dict. This is important since these blocks are assumed to be valid and are used as previous
Expand All @@ -337,7 +337,7 @@ async def pre_validate_blocks_multiprocessing(
)
]
if block_generator is not None:
previous_generators.append(bytes(block_generator))
previous_generators.append(block_generator.generator_refs)
else:
previous_generators.append(None)

Expand Down
17 changes: 8 additions & 9 deletions chia/full_node/block_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
import zstd

from chia.consensus.block_record import BlockRecord
from chia.types.blockchain_format.serialized_program import SerializedProgram
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.types.full_block import FullBlock
from chia.types.weight_proof import SubEpochChallengeSegment, SubEpochSegments
Expand Down Expand Up @@ -264,10 +263,10 @@ async def get_block_info(self, header_hash: bytes32) -> Optional[GeneratorBlockI
b.foliage.prev_block_hash, b.transactions_generator, b.transactions_generator_ref_list
)

async def get_generator(self, header_hash: bytes32) -> Optional[SerializedProgram]:
async def get_generator(self, header_hash: bytes32) -> Optional[bytes]:
cached = self.block_cache.get(header_hash)
if cached is not None:
return cached.transactions_generator
return None if cached.transactions_generator is None else bytes(cached.transactions_generator)

formatted_str = "SELECT block, height from full_blocks WHERE header_hash=?"
async with self.db_wrapper.reader_no_transaction() as conn:
Expand All @@ -278,19 +277,19 @@ async def get_generator(self, header_hash: bytes32) -> Optional[SerializedProgra

try:
return generator_from_block(block_bytes)
except Exception as e:
except Exception as e: # pragma: no cover
log.error(f"cheap parser failed for block at height {row[1]}: {e}")
# this is defensive, on the off-chance that
# generator_from_block() fails, fall back to the reliable
# definition of parsing a block
b = FullBlock.from_bytes(block_bytes)
return b.transactions_generator
return None if b.transactions_generator is None else bytes(b.transactions_generator)

async def get_generators_at(self, heights: List[uint32]) -> List[SerializedProgram]:
async def get_generators_at(self, heights: List[uint32]) -> List[bytes]:
if len(heights) == 0:
return []

generators: Dict[uint32, SerializedProgram] = {}
generators: Dict[uint32, bytes] = {}
formatted_str = (
f"SELECT block, height from full_blocks "
f'WHERE in_main_chain=1 AND height in ({"?," * (len(heights) - 1)}?)'
Expand All @@ -302,13 +301,13 @@ async def get_generators_at(self, heights: List[uint32]) -> List[SerializedProgr

try:
gen = generator_from_block(block_bytes)
except Exception as e:
except Exception as e: # pragma: no cover
log.error(f"cheap parser failed for block at height {row[1]}: {e}")
# this is defensive, on the off-chance that
# generator_from_block() fails, fall back to the reliable
# definition of parsing a block
b = FullBlock.from_bytes(block_bytes)
gen = b.transactions_generator
gen = None if b.transactions_generator is None else bytes(b.transactions_generator)
if gen is None:
raise ValueError(Err.GENERATOR_REF_HAS_NO_GENERATOR)
generators[uint32(row[1])] = gen
Expand Down
8 changes: 4 additions & 4 deletions chia/full_node/mempool_check_conditions.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def get_name_puzzle_conditions(
run_block = run_block_generator

try:
block_args = [bytes(gen) for gen in generator.generator_refs]
block_args = generator.generator_refs
err, result = run_block(bytes(generator.program), block_args, max_cost, flags, constants)
assert (err is None) != (result is None)
if err is not None:
Expand All @@ -66,7 +66,7 @@ def get_puzzle_and_solution_for_coin(
try:
puzzle, solution = get_puzzle_and_solution_for_coin_rust(
generator.program,
[bytes(a) for a in generator.generator_refs],
generator.generator_refs,
constants.MAX_BLOCK_COST_CLVM,
coin,
get_flags_for_height_and_constants(height, constants),
Expand All @@ -80,7 +80,7 @@ def get_spends_for_block(generator: BlockGenerator, height: int, constants: Cons
args = bytearray(b"\xff")
args += bytes(DESERIALIZE_MOD)
args += b"\xff"
args += bytes(Program.to([bytes(a) for a in generator.generator_refs]))
args += bytes(Program.to(generator.generator_refs))
args += b"\x80\x80"

_, ret = run_chia_program(
Expand All @@ -107,7 +107,7 @@ def get_spends_for_block_with_conditions(
args = bytearray(b"\xff")
args += bytes(DESERIALIZE_MOD)
args += b"\xff"
args += bytes(Program.to([bytes(a) for a in generator.generator_refs]))
args += bytes(Program.to(generator.generator_refs))
args += b"\x80\x80"

flags = get_flags_for_height_and_constants(height, constants)
Expand Down
4 changes: 2 additions & 2 deletions chia/simulator/block_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -1940,7 +1940,7 @@ def compute_cost_test(generator: BlockGenerator, constants: ConsensusConstants,
clvm_cost = 0

if height >= constants.HARD_FORK_HEIGHT:
blocks = [bytes(g) for g in generator.generator_refs]
blocks = generator.generator_refs
cost, result = generator.program._run(INFINITE_COST, MEMPOOL_MODE | ALLOW_BACKREFS, [DESERIALIZE_MOD, blocks])
clvm_cost += cost

Expand All @@ -1955,7 +1955,7 @@ def compute_cost_test(generator: BlockGenerator, constants: ConsensusConstants,
condition_cost += conditions_cost(result)

else:
block_program_args = SerializedProgram.to([[bytes(g) for g in generator.generator_refs]])
block_program_args = SerializedProgram.to([generator.generator_refs])
clvm_cost, result = GENERATOR_MOD._run(INFINITE_COST, MEMPOOL_MODE, [generator.program, block_program_args])

for res in result.first().as_iter():
Expand Down
2 changes: 1 addition & 1 deletion chia/types/generator_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,4 +19,4 @@ def get_generator_for_block_height(self, height: uint32) -> SerializedProgram:
@dataclass(frozen=True)
class BlockGenerator(Streamable):
program: SerializedProgram
generator_refs: List[SerializedProgram]
generator_refs: List[bytes]
4 changes: 2 additions & 2 deletions chia/util/full_block_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@ def skip_transactions_info(buf: memoryview) -> memoryview:
return skip_list(buf, skip_coin)


def generator_from_block(buf: memoryview) -> Optional[SerializedProgram]:
def generator_from_block(buf: memoryview) -> Optional[bytes]:
buf = skip_list(buf, skip_end_of_sub_slot_bundle) # finished_sub_slots
buf = skip_reward_chain_block(buf) # reward_chain_block
buf = skip_optional(buf, skip_vdf_proof) # challenge_chain_sp_proof
Expand All @@ -222,7 +222,7 @@ def generator_from_block(buf: memoryview) -> Optional[SerializedProgram]:

buf = buf[1:]
length = serialized_length(buf)
return SerializedProgram.from_bytes(bytes(buf[:length]))
return bytes(buf[:length])


# this implements the BlockInfo protocol
Expand Down
2 changes: 1 addition & 1 deletion tools/analyze-chain.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ def main(file: Path, mempool_mode: bool, start: int, end: Optional[int], call: O
ref = c.execute("SELECT block FROM full_blocks WHERE height=? and in_main_chain=1", (h,))
generator = generator_from_block(zstd.decompress(ref.fetchone()[0]))
assert generator is not None
generator_blobs.append(bytes(generator))
generator_blobs.append(generator)
ref.close()

ref_lookup_time = time() - start_time
Expand Down

0 comments on commit 9f25c60

Please sign in to comment.