diff --git a/crates/optimism/trie/src/backfill.rs b/crates/optimism/trie/src/backfill.rs index 5d7808bf7be..3f0372900e2 100644 --- a/crates/optimism/trie/src/backfill.rs +++ b/crates/optimism/trie/src/backfill.rs @@ -197,6 +197,28 @@ async fn backfill< Ok(total_entries) } +/// Save storage branches to storage. +async fn save_storage_branches( + storage: &S, + entries: Vec<(B256, StorageTrieEntry)>, +) -> Result<(), OpProofsStorageError> { + // Group entries by hashed address + let mut by_address: HashMap)>> = + HashMap::default(); + for (hashed_address, storage_entry) in entries { + by_address + .entry(hashed_address) + .or_default() + .push((storage_entry.nibbles.0, Some(storage_entry.node))); + } + + // Store each address's storage trie branches + for (address, branches) in by_address { + storage.store_storage_branches(address, branches).await?; + } + Ok(()) +} + impl<'a, Tx: DbTx, S: OpProofsStore + Send> BackfillJob<'a, Tx, S> { /// Create a new backfill job. pub const fn new(storage: S, tx: &'a Tx) -> Self { @@ -324,31 +346,12 @@ impl<'a, Tx: DbTx, S: OpProofsStore + Send> BackfillJob<'a, Tx, S> { let source = StoragesTrieIter::new(start_cursor); let storage = &self.storage; - let save_fn = - async |entries: Vec<(B256, StorageTrieEntry)>| -> Result<(), OpProofsStorageError> { - // Group entries by hashed address - let mut by_address: HashMap)>> = - HashMap::default(); - for (hashed_address, storage_entry) in entries { - by_address - .entry(hashed_address) - .or_default() - .push((storage_entry.nibbles.0, Some(storage_entry.node))); - } - - // Store each address's storage trie branches - for (address, branches) in by_address { - storage.store_storage_branches(address, branches).await?; - } - Ok(()) - }; - backfill( "storage trie", source, BACKFILL_STORAGE_THRESHOLD, BACKFILL_LOG_THRESHOLD, - save_fn, + |entries| save_storage_branches(storage, entries), ) .await?;