Skip to content

Commit

Permalink
fix syncing when extending the main chain. In this case we should not…
Browse files Browse the repository at this point in the history
… build a ForkInfo object with the coin set. It would end up taking too much memory
  • Loading branch information
arvidn committed Oct 17, 2023
1 parent eb3e9ec commit 8925f25
Showing 1 changed file with 22 additions and 7 deletions.
29 changes: 22 additions & 7 deletions chia/full_node/full_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -1061,12 +1061,6 @@ async def sync_from_fork_point(
# normally "fork_point" or "fork_height" refers to the first common
# block between the main chain and the fork. Here "fork_point_height"
# seems to refer to the first diverging block
if fork_point_height == 0:
fork_info = ForkInfo(-1, -1, bytes32([0] * 32))
else:
fork_hash = self.blockchain.height_to_hash(uint32(fork_point_height - 1))
assert fork_hash is not None
fork_info = ForkInfo(fork_point_height - 1, fork_point_height - 1, fork_hash)

async def fetch_block_batches(
batch_queue: asyncio.Queue[Optional[Tuple[WSChiaConnection, List[FullBlock]]]]
Expand Down Expand Up @@ -1103,6 +1097,8 @@ async def fetch_block_batches(
async def validate_block_batches(
inner_batch_queue: asyncio.Queue[Optional[Tuple[WSChiaConnection, List[FullBlock]]]]
) -> None:
fork_info: Optional[ForkInfo] = None

while True:
res: Optional[Tuple[WSChiaConnection, List[FullBlock]]] = await inner_batch_queue.get()
if res is None:
Expand All @@ -1111,6 +1107,25 @@ async def validate_block_batches(
peer, blocks = res
start_height = blocks[0].height
end_height = blocks[-1].height

# in case we're validating a reorg fork (i.e. not extending the
# main chain), we need to record the coin set from that fork in
# fork_info. Otherwise validation is very expensive, especially
# for deep reorgs
peak: Optional[BlockRecord]
if fork_info is None:
peak = self.blockchain.get_peak()
extending_main_chain: bool = peak is None or (
peak.header_hash == blocks[0].prev_header_hash or peak.header_hash == blocks[0].header_hash
)
if not extending_main_chain:
if fork_point_height == 0:
fork_info = ForkInfo(-1, -1, bytes32([0] * 32))
else:
fork_hash = self.blockchain.height_to_hash(uint32(fork_point_height - 1))
assert fork_hash is not None
fork_info = ForkInfo(fork_point_height - 1, fork_point_height - 1, fork_hash)

success, state_change_summary, err = await self.add_block_batch(
blocks,
peer,
Expand All @@ -1125,7 +1140,7 @@ async def validate_block_batches(
raise ValidationError(err, f"Failed to validate block batch {start_height} to {end_height}")
raise ValueError(f"Failed to validate block batch {start_height} to {end_height}")
self.log.info(f"Added blocks {start_height} to {end_height}")
peak: Optional[BlockRecord] = self.blockchain.get_peak()
peak = self.blockchain.get_peak()
if state_change_summary is not None:
assert peak is not None
# Hints must be added to the DB. The other post-processing tasks are not required when syncing
Expand Down

0 comments on commit 8925f25

Please sign in to comment.