diff --git a/CHANGELOG.md b/CHANGELOG.md index 3a63ba81c..4a9f57113 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,7 @@ - Now accounts for genesis are optional. Accounts directory will be overwritten, if `--force` flag is set (#420). - Added `GetAccountStateDelta` endpoint (#418). - Added `CheckNullifiersByPrefix` endpoint (#419). +- Support multiple inflight transactions on the same account (#407). ### Fixes diff --git a/crates/block-producer/src/batch_builder/batch.rs b/crates/block-producer/src/batch_builder/batch.rs index 04dbe72dc..916b63bd8 100644 --- a/crates/block-producer/src/batch_builder/batch.rs +++ b/crates/block-producer/src/batch_builder/batch.rs @@ -1,19 +1,18 @@ use std::{ - collections::{BTreeMap, BTreeSet}, + collections::{btree_map::Entry, BTreeMap, BTreeSet}, mem, }; use miden_objects::{ - accounts::AccountId, + accounts::{delta::AccountUpdateDetails, AccountId}, batches::BatchNoteTree, - block::BlockAccountUpdate, crypto::{ hash::blake::{Blake3Digest, Blake3_256}, merkle::MerklePath, }, notes::{NoteHeader, NoteId, Nullifier}, - transaction::{InputNoteCommitment, OutputNote, TransactionId, TxAccountUpdate}, - Digest, MAX_NOTES_PER_BATCH, + transaction::{InputNoteCommitment, OutputNote, TransactionId}, + AccountDeltaError, Digest, MAX_NOTES_PER_BATCH, }; use tracing::instrument; @@ -31,12 +30,45 @@ pub type BatchId = Blake3Digest<32>; #[derive(Debug, Clone, PartialEq, Eq)] pub struct TransactionBatch { id: BatchId, - updated_accounts: Vec<(TransactionId, TxAccountUpdate)>, + updated_accounts: BTreeMap, input_notes: Vec, output_notes_smt: BatchNoteTree, output_notes: Vec, } +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct AccountUpdate { + pub init_state: Digest, + pub final_state: Digest, + pub transactions: Vec, + pub details: AccountUpdateDetails, +} + +impl AccountUpdate { + fn new(tx: &ProvenTransaction) -> Self { + Self { + init_state: tx.account_update().init_state_hash(), + final_state: tx.account_update().final_state_hash(), + transactions: vec![tx.id()], + details: tx.account_update().details().clone(), + } + } + + /// Merges the transaction's update into this account update. + fn merge_tx(&mut self, tx: &ProvenTransaction) -> Result<(), AccountDeltaError> { + assert!( + self.final_state == tx.account_update().init_state_hash(), + "Transacion's initial state does not match current account state" + ); + + self.final_state = tx.account_update().final_state_hash(); + self.transactions.push(tx.id()); + self.details = self.details.clone().merge(tx.account_update().details().clone())?; + + Ok(()) + } +} + impl TransactionBatch { // CONSTRUCTORS // -------------------------------------------------------------------------------------------- @@ -62,14 +94,23 @@ impl TransactionBatch { // Populate batch output notes and updated accounts. let mut output_notes = OutputNoteTracker::new(&txs)?; - let mut updated_accounts = vec![]; + let mut updated_accounts = BTreeMap::::new(); let mut unauthenticated_input_notes = BTreeSet::new(); for tx in &txs { - // TODO: we need to handle a possibility that a batch contains multiple transactions against - // the same account (e.g., transaction `x` takes account from state `A` to `B` and - // transaction `y` takes account from state `B` to `C`). These will need to be merged - // into a single "update" `A` to `C`. - updated_accounts.push((tx.id(), tx.account_update().clone())); + // Merge account updates so that state transitions A->B->C become A->C. + match updated_accounts.entry(tx.account_id()) { + Entry::Vacant(vacant) => { + vacant.insert(AccountUpdate::new(tx)); + }, + Entry::Occupied(occupied) => occupied.into_mut().merge_tx(tx).map_err(|error| { + BuildBatchError::AccountUpdateError { + account_id: tx.account_id(), + error, + txs: txs.clone(), + } + })?, + }; + // Check unauthenticated input notes for duplicates: for note in tx.get_unauthenticated_notes() { let id = note.id(); @@ -142,20 +183,13 @@ impl TransactionBatch { pub fn account_initial_states(&self) -> impl Iterator + '_ { self.updated_accounts .iter() - .map(|(_, update)| (update.account_id(), update.init_state_hash())) + .map(|(&account_id, update)| (account_id, update.init_state)) } /// Returns an iterator over (account_id, details, new_state_hash) tuples for accounts that were /// modified in this transaction batch. - pub fn updated_accounts(&self) -> impl Iterator + '_ { - self.updated_accounts.iter().map(|(transaction_id, update)| { - BlockAccountUpdate::new( - update.account_id(), - update.final_state_hash(), - update.details().clone(), - vec![*transaction_id], - ) - }) + pub fn updated_accounts(&self) -> impl Iterator + '_ { + self.updated_accounts.iter() } /// Returns input notes list consumed by the transactions in this batch. Any unauthenticated diff --git a/crates/block-producer/src/batch_builder/tests/mod.rs b/crates/block-producer/src/batch_builder/tests/mod.rs index 173c23016..77118b5c6 100644 --- a/crates/block-producer/src/batch_builder/tests/mod.rs +++ b/crates/block-producer/src/batch_builder/tests/mod.rs @@ -162,12 +162,15 @@ async fn test_batch_builder_find_dangling_notes() { }, )); + // An account with 5 states so that we can simulate running 2 transactions against it. + let account = MockPrivateAccount::<3>::from(1); + let note_1 = mock_note(1); let note_2 = mock_note(2); - let tx1 = MockProvenTxBuilder::with_account_index(1) + let tx1 = MockProvenTxBuilder::with_account(account.id, account.states[0], account.states[1]) .output_notes(vec![OutputNote::Full(note_1.clone())]) .build(); - let tx2 = MockProvenTxBuilder::with_account_index(1) + let tx2 = MockProvenTxBuilder::with_account(account.id, account.states[1], account.states[2]) .unauthenticated_notes(vec![note_1.clone()]) .output_notes(vec![OutputNote::Full(note_2.clone())]) .build(); @@ -184,10 +187,10 @@ async fn test_batch_builder_find_dangling_notes() { let note_3 = mock_note(3); - let tx1 = MockProvenTxBuilder::with_account_index(1) + let tx1 = MockProvenTxBuilder::with_account(account.id, account.states[0], account.states[1]) .unauthenticated_notes(vec![note_2.clone()]) .build(); - let tx2 = MockProvenTxBuilder::with_account_index(1) + let tx2 = MockProvenTxBuilder::with_account(account.id, account.states[1], account.states[2]) .unauthenticated_notes(vec![note_3.clone()]) .build(); diff --git a/crates/block-producer/src/block_builder/mod.rs b/crates/block-producer/src/block_builder/mod.rs index b6a64bd3e..7783d95ea 100644 --- a/crates/block-producer/src/block_builder/mod.rs +++ b/crates/block-producer/src/block_builder/mod.rs @@ -3,7 +3,8 @@ use std::{collections::BTreeSet, sync::Arc}; use async_trait::async_trait; use miden_node_utils::formatting::{format_array, format_blake3_digest}; use miden_objects::{ - block::{Block, BlockAccountUpdate}, + accounts::AccountId, + block::Block, notes::{NoteHeader, Nullifier}, transaction::InputNoteCommitment, }; @@ -74,8 +75,11 @@ where batches = %format_array(batches.iter().map(|batch| format_blake3_digest(batch.id()))), ); - let updated_accounts: Vec<_> = - batches.iter().flat_map(TransactionBatch::updated_accounts).collect(); + let updated_account_set: BTreeSet = batches + .iter() + .flat_map(TransactionBatch::updated_accounts) + .map(|(account_id, _)| *account_id) + .collect(); let output_notes: Vec<_> = batches.iter().map(TransactionBatch::output_notes).cloned().collect(); @@ -103,7 +107,7 @@ where let block_inputs = self .store .get_block_inputs( - updated_accounts.iter().map(BlockAccountUpdate::account_id), + updated_account_set.into_iter(), produced_nullifiers.iter(), dangling_notes.iter(), ) @@ -117,7 +121,7 @@ where return Err(BuildBlockError::UnauthenticatedNotesNotFound(missing_notes)); } - let block_header_witness = BlockWitness::new(block_inputs, batches)?; + let (block_header_witness, updated_accounts) = BlockWitness::new(block_inputs, batches)?; let new_block_header = self.block_kernel.prove(block_header_witness)?; let block_num = new_block_header.block_num(); diff --git a/crates/block-producer/src/block_builder/prover/block_witness.rs b/crates/block-producer/src/block_builder/prover/block_witness.rs index e2f02401c..53148abab 100644 --- a/crates/block-producer/src/block_builder/prover/block_witness.rs +++ b/crates/block-producer/src/block_builder/prover/block_witness.rs @@ -1,7 +1,8 @@ use std::collections::{BTreeMap, BTreeSet}; use miden_objects::{ - accounts::AccountId, + accounts::{delta::AccountUpdateDetails, AccountId}, + block::BlockAccountUpdate, crypto::merkle::{EmptySubtreeRoots, MerklePath, MerkleStore, MmrPeaks, SmtProof}, notes::Nullifier, transaction::TransactionId, @@ -10,6 +11,7 @@ use miden_objects::{ }; use crate::{ + batch_builder::batch::AccountUpdate, block::BlockInputs, errors::{BlockProverError, BuildBlockError}, TransactionBatch, @@ -31,44 +33,13 @@ pub struct BlockWitness { impl BlockWitness { pub fn new( - block_inputs: BlockInputs, + mut block_inputs: BlockInputs, batches: &[TransactionBatch], - ) -> Result { - Self::validate_inputs(&block_inputs, batches)?; - - let updated_accounts = { - let mut account_initial_states: BTreeMap = - batches.iter().flat_map(TransactionBatch::account_initial_states).collect(); - - let mut account_merkle_proofs: BTreeMap = block_inputs - .accounts - .into_iter() - .map(|(account_id, witness)| (account_id, witness.proof)) - .collect(); - - batches - .iter() - .flat_map(TransactionBatch::updated_accounts) - .map(|update| { - let initial_state_hash = account_initial_states - .remove(&update.account_id()) - .expect("already validated that key exists"); - let proof = account_merkle_proofs - .remove(&update.account_id()) - .expect("already validated that key exists"); - - ( - update.account_id(), - AccountUpdateWitness { - initial_state_hash, - final_state_hash: update.new_state_hash(), - proof, - transactions: update.transactions().to_vec(), - }, - ) - }) - .collect() - }; + ) -> Result<(Self, Vec), BuildBlockError> { + if batches.len() > MAX_BATCHES_PER_BLOCK { + return Err(BuildBlockError::TooManyBatchesInBlock(batches.len())); + } + Self::validate_nullifiers(&block_inputs, batches)?; let batch_created_notes_roots = batches .iter() @@ -77,13 +48,87 @@ impl BlockWitness { .map(|(batch_index, batch)| (batch_index, batch.output_notes_root())) .collect(); - Ok(Self { - updated_accounts, - batch_created_notes_roots, - produced_nullifiers: block_inputs.nullifiers, - chain_peaks: block_inputs.chain_peaks, - prev_header: block_inputs.block_header, - }) + // Order account updates by account ID and each update's initial state hash. + // + // This let's us chronologically order the updates per account across batches. + let mut updated_accounts = BTreeMap::>::new(); + for (account_id, update) in batches.iter().flat_map(TransactionBatch::updated_accounts) { + updated_accounts + .entry(*account_id) + .or_default() + .insert(update.init_state, update.clone()); + } + + // Build account witnesses. + let mut account_witnesses = Vec::with_capacity(updated_accounts.len()); + let mut block_updates = Vec::with_capacity(updated_accounts.len()); + + for (account_id, mut updates) in updated_accounts { + let (initial_state_hash, proof) = block_inputs + .accounts + .remove(&account_id) + .map(|witness| (witness.hash, witness.proof)) + .ok_or(BuildBlockError::MissingAccountInput(account_id))?; + + let mut details: Option = None; + + // Chronologically chain updates for this account together using the state hashes to link them. + let mut transactions = Vec::new(); + let mut current_hash = initial_state_hash; + while !updates.is_empty() { + let update = updates.remove(¤t_hash).ok_or_else(|| { + BuildBlockError::InconsistentAccountStateTransition( + account_id, + current_hash, + updates.keys().copied().collect(), + ) + })?; + + transactions.extend(update.transactions); + current_hash = update.final_state; + + details = Some(match details { + None => update.details, + Some(details) => details.merge(update.details).map_err(|err| { + BuildBlockError::AccountUpdateError { account_id, error: err } + })?, + }); + } + + account_witnesses.push(( + account_id, + AccountUpdateWitness { + initial_state_hash, + final_state_hash: current_hash, + proof, + transactions: transactions.clone(), + }, + )); + + block_updates.push(BlockAccountUpdate::new( + account_id, + current_hash, + details.expect("Must be some by now"), + transactions, + )); + } + + if !block_inputs.accounts.is_empty() { + return Err(BuildBlockError::ExtraStoreData( + block_inputs.accounts.keys().copied().collect(), + )); + } + + Ok(( + Self { + updated_accounts: account_witnesses, + batch_created_notes_roots, + produced_nullifiers: block_inputs.nullifiers, + chain_peaks: block_inputs.chain_peaks, + prev_header: block_inputs.block_header, + }, + block_updates, + )) } /// Converts [`BlockWitness`] into inputs to the block kernel program @@ -106,70 +151,6 @@ impl BlockWitness { // HELPERS // --------------------------------------------------------------------------------------------- - fn validate_inputs( - block_inputs: &BlockInputs, - batches: &[TransactionBatch], - ) -> Result<(), BuildBlockError> { - if batches.len() > MAX_BATCHES_PER_BLOCK { - return Err(BuildBlockError::TooManyBatchesInBlock(batches.len())); - } - - Self::validate_account_states(block_inputs, batches)?; - Self::validate_nullifiers(block_inputs, batches)?; - - Ok(()) - } - - /// Validates that initial account states coming from the batches are the same as the account - /// states returned from the store - fn validate_account_states( - block_inputs: &BlockInputs, - batches: &[TransactionBatch], - ) -> Result<(), BuildBlockError> { - let batches_initial_states: BTreeMap = - batches.iter().flat_map(|batch| batch.account_initial_states()).collect(); - - let accounts_in_batches: BTreeSet = - batches_initial_states.keys().cloned().collect(); - let accounts_in_store: BTreeSet = - block_inputs.accounts.keys().copied().collect(); - - if accounts_in_batches == accounts_in_store { - let accounts_with_different_hashes: Vec = block_inputs - .accounts - .iter() - .filter_map(|(account_id, witness)| { - let hash_in_store = witness.hash; - let hash_in_batches = batches_initial_states - .get(account_id) - .expect("we already verified that account id is contained in batches"); - - if hash_in_store == *hash_in_batches { - None - } else { - Some(*account_id) - } - }) - .collect(); - - if accounts_with_different_hashes.is_empty() { - Ok(()) - } else { - Err(BuildBlockError::InconsistentAccountStates(accounts_with_different_hashes)) - } - } else { - // The batches and store don't modify the same set of accounts - let union: BTreeSet = - accounts_in_batches.union(&accounts_in_store).cloned().collect(); - let intersection: BTreeSet = - accounts_in_batches.intersection(&accounts_in_store).cloned().collect(); - - let difference: Vec = union.difference(&intersection).cloned().collect(); - - Err(BuildBlockError::InconsistentAccountIds(difference)) - } - } - /// Validates that the nullifiers returned from the store are the same the produced nullifiers in the batches. /// Note that validation that the value of the nullifiers is `0` will be done in MASM. fn validate_nullifiers( diff --git a/crates/block-producer/src/block_builder/prover/tests.rs b/crates/block-producer/src/block_builder/prover/tests.rs index 9dacd6dcb..5c99b5f7c 100644 --- a/crates/block-producer/src/block_builder/prover/tests.rs +++ b/crates/block-producer/src/block_builder/prover/tests.rs @@ -14,10 +14,11 @@ use miden_objects::{ SMT_DEPTH, }, notes::{NoteHeader, NoteMetadata, NoteTag, NoteType}, - transaction::OutputNote, + transaction::{OutputNote, ProvenTransaction}, Felt, BLOCK_OUTPUT_NOTES_TREE_DEPTH, ONE, ZERO, }; +use self::block_witness::AccountUpdateWitness; use super::*; use crate::{ block::{AccountWitness, BlockInputs}, @@ -88,10 +89,7 @@ fn test_block_witness_validation_inconsistent_account_ids() { let block_witness_result = BlockWitness::new(block_inputs_from_store, &batches); - assert_eq!( - block_witness_result, - Err(BuildBlockError::InconsistentAccountIds(vec![account_id_1, account_id_3])) - ); + assert!(block_witness_result.is_err()); } /// Tests that `BlockWitness` constructor fails if the store and transaction batches contain a @@ -162,10 +160,108 @@ fn test_block_witness_validation_inconsistent_account_hashes() { assert_eq!( block_witness_result, - Err(BuildBlockError::InconsistentAccountStates(vec![account_id_1])) + Err(BuildBlockError::InconsistentAccountStateTransition( + account_id_1, + account_1_hash_store, + vec![account_1_hash_batches] + )) ); } +/// Creates two batches which each update the same pair of accounts. +/// +/// The transactions are ordered such that the batches cannot be chronologically ordered +/// themselves: `[tx_x0, tx_y1], [tx_y0, tx_x1]`. This test ensures that the witness is +/// produced correctly as if for a single batch: `[tx_x0, tx_x1, tx_y0, tx_y1]`. +#[test] +fn test_block_witness_multiple_batches_per_account() { + let x_account_id = + AccountId::new_unchecked(Felt::new(ACCOUNT_ID_REGULAR_ACCOUNT_UPDATABLE_CODE_OFF_CHAIN)); + let y_account_id = AccountId::new_unchecked(Felt::new(ACCOUNT_ID_OFF_CHAIN_SENDER)); + + let x_hashes = [ + Digest::new((0..4).map(Felt::new).collect::>().try_into().unwrap()), + Digest::new((4..8).map(Felt::new).collect::>().try_into().unwrap()), + Digest::new((8..12).map(Felt::new).collect::>().try_into().unwrap()), + ]; + let y_hashes = [ + Digest::new((12..16).map(Felt::new).collect::>().try_into().unwrap()), + Digest::new((16..20).map(Felt::new).collect::>().try_into().unwrap()), + Digest::new((20..24).map(Felt::new).collect::>().try_into().unwrap()), + ]; + + let x_txs = [ + MockProvenTxBuilder::with_account(x_account_id, x_hashes[0], x_hashes[1]).build(), + MockProvenTxBuilder::with_account(x_account_id, x_hashes[1], x_hashes[2]).build(), + ]; + let y_txs = [ + MockProvenTxBuilder::with_account(y_account_id, y_hashes[0], y_hashes[1]).build(), + MockProvenTxBuilder::with_account(y_account_id, y_hashes[1], y_hashes[2]).build(), + ]; + + let x_proof = MerklePath::new(vec![Digest::new( + (24..28).map(Felt::new).collect::>().try_into().unwrap(), + )]); + let y_proof = MerklePath::new(vec![Digest::new( + (28..32).map(Felt::new).collect::>().try_into().unwrap(), + )]); + + let block_inputs_from_store: BlockInputs = { + let block_header = BlockHeader::mock(0, None, None, &[]); + let chain_peaks = MmrPeaks::new(0, Vec::new()).unwrap(); + + let x_witness = AccountWitness { + hash: x_hashes[0], + proof: x_proof.clone(), + }; + let y_witness = AccountWitness { + hash: y_hashes[0], + proof: y_proof.clone(), + }; + let accounts = BTreeMap::from_iter([(x_account_id, x_witness), (y_account_id, y_witness)]); + + BlockInputs { + block_header, + chain_peaks, + accounts, + nullifiers: Default::default(), + found_unauthenticated_notes: Default::default(), + } + }; + + let batches = { + let batch_1 = + TransactionBatch::new(vec![x_txs[0].clone(), y_txs[1].clone()], Default::default()) + .unwrap(); + let batch_2 = + TransactionBatch::new(vec![y_txs[0].clone(), x_txs[1].clone()], Default::default()) + .unwrap(); + + vec![batch_1, batch_2] + }; + + let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); + let account_witnesses = block_witness.updated_accounts.into_iter().collect::>(); + + let x_expected = AccountUpdateWitness { + initial_state_hash: x_hashes[0], + final_state_hash: *x_hashes.last().unwrap(), + proof: x_proof, + transactions: x_txs.iter().map(ProvenTransaction::id).collect(), + }; + + let y_expected = AccountUpdateWitness { + initial_state_hash: y_hashes[0], + final_state_hash: *y_hashes.last().unwrap(), + proof: y_proof, + transactions: y_txs.iter().map(ProvenTransaction::id).collect(), + }; + + let expected = [(x_account_id, x_expected), (y_account_id, y_expected)].into(); + + assert_eq!(account_witnesses, expected); +} + // ACCOUNT ROOT TESTS // ================================================================================================= @@ -241,7 +337,7 @@ async fn test_compute_account_root_success() { vec![batch_1, batch_2] }; - let block_witness = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); + let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); let block_prover = BlockProver::new(); let block_header = block_prover.prove(block_witness).unwrap(); @@ -314,7 +410,7 @@ async fn test_compute_account_root_empty_batches() { .unwrap(); let batches = Vec::new(); - let block_witness = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); + let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); let block_prover = BlockProver::new(); let block_header = block_prover.prove(block_witness).unwrap(); @@ -348,7 +444,7 @@ async fn test_compute_note_root_empty_batches_success() { let batches: Vec = Vec::new(); - let block_witness = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); + let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); let block_prover = BlockProver::new(); let block_header = block_prover.prove(block_witness).unwrap(); @@ -383,7 +479,7 @@ async fn test_compute_note_root_empty_notes_success() { vec![batch] }; - let block_witness = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); + let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); let block_prover = BlockProver::new(); let block_header = block_prover.prove(block_witness).unwrap(); @@ -458,7 +554,7 @@ async fn test_compute_note_root_success() { vec![batch_1, batch_2] }; - let block_witness = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); + let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); let block_prover = BlockProver::new(); let block_header = block_prover.prove(block_witness).unwrap(); @@ -609,7 +705,7 @@ async fn test_compute_nullifier_root_empty_success() { .await .unwrap(); - let block_witness = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); + let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); let block_prover = BlockProver::new(); let block_header = block_prover.prove(block_witness).unwrap(); @@ -670,7 +766,7 @@ async fn test_compute_nullifier_root_success() { .await .unwrap(); - let block_witness = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); + let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); let block_prover = BlockProver::new(); let block_header = block_prover.prove(block_witness).unwrap(); diff --git a/crates/block-producer/src/errors.rs b/crates/block-producer/src/errors.rs index bec167e09..c68808172 100644 --- a/crates/block-producer/src/errors.rs +++ b/crates/block-producer/src/errors.rs @@ -5,7 +5,8 @@ use miden_objects::{ crypto::merkle::{MerkleError, MmrError}, notes::{NoteId, Nullifier}, transaction::{ProvenTransaction, TransactionId}, - Digest, TransactionInputError, BLOCK_OUTPUT_NOTES_BATCH_TREE_DEPTH, MAX_NOTES_PER_BATCH, + AccountDeltaError, Digest, TransactionInputError, BLOCK_OUTPUT_NOTES_BATCH_TREE_DEPTH, + MAX_NOTES_PER_BATCH, }; use miden_processor::ExecutionError; use thiserror::Error; @@ -15,11 +16,6 @@ use thiserror::Error; #[derive(Debug, PartialEq, Eq, Error)] pub enum VerifyTxError { - /// The account that the transaction modifies has already been modified and isn't yet committed - /// to a block - #[error("Account {0} was already modified by other transaction")] - AccountAlreadyModifiedByOtherTx(AccountId), - /// Another transaction already consumed the notes with given nullifiers #[error("Input notes with given nullifiers were already consumed by another transaction")] InputNotesAlreadyConsumed(Vec), @@ -31,10 +27,10 @@ pub enum VerifyTxError { UnauthenticatedNotesNotFound(Vec), /// The account's initial hash did not match the current account's hash - #[error("Incorrect account's initial hash ({tx_initial_account_hash}, stored: {})", format_opt(.store_account_hash.as_ref()))] + #[error("Incorrect account's initial hash ({tx_initial_account_hash}, current: {})", format_opt(.current_account_hash.as_ref()))] IncorrectAccountInitialHash { tx_initial_account_hash: Digest, - store_account_hash: Option, + current_account_hash: Option, }, /// Failed to retrieve transaction inputs from the store @@ -96,6 +92,13 @@ pub enum BuildBatchError { output_hash: Digest, txs: Vec, }, + + #[error("Failed to merge transaction delta into account {account_id}: {error}")] + AccountUpdateError { + account_id: AccountId, + error: AccountDeltaError, + txs: Vec, + }, } impl BuildBatchError { @@ -108,6 +111,7 @@ impl BuildBatchError { BuildBatchError::DuplicateOutputNote(_, txs) => txs, BuildBatchError::UnauthenticatedNotesNotFound(_, txs) => txs, BuildBatchError::NoteHashesMismatch { txs, .. } => txs, + BuildBatchError::AccountUpdateError { txs, .. } => txs, } } } @@ -171,10 +175,12 @@ pub enum BuildBlockError { ApplyBlockFailed(#[from] ApplyBlockError), #[error("failed to get block inputs from store: {0}")] GetBlockInputsFailed(#[from] BlockInputsError), - #[error("transaction batches and store don't modify the same account IDs. Offending accounts: {0:?}")] - InconsistentAccountIds(Vec), - #[error("transaction batches and store contain different hashes for some accounts. Offending accounts: {0:?}")] - InconsistentAccountStates(Vec), + #[error("store did not produce data for account: {0}")] + MissingAccountInput(AccountId), + #[error("store produced extra account data. Offending accounts: {0:?}")] + ExtraStoreData(Vec), + #[error("no matching state transition found for account {0}. Current account state is {1}, remaining updates: {2:?}")] + InconsistentAccountStateTransition(AccountId, Digest, Vec), #[error("transaction batches and store don't produce the same nullifiers. Offending nullifiers: {0:?}")] InconsistentNullifiers(Vec), #[error("unauthenticated transaction notes not found in the store or in outputs of other transactions in the block: {0:?}")] @@ -184,6 +190,11 @@ pub enum BuildBlockError { BLOCK_OUTPUT_NOTES_BATCH_TREE_DEPTH )] TooManyBatchesInBlock(usize), + #[error("Failed to merge transaction delta into account {account_id}: {error}")] + AccountUpdateError { + account_id: AccountId, + error: AccountDeltaError, + }, } // Transaction inputs errors diff --git a/crates/block-producer/src/state_view/account_state.rs b/crates/block-producer/src/state_view/account_state.rs new file mode 100644 index 000000000..b3c538cc9 --- /dev/null +++ b/crates/block-producer/src/state_view/account_state.rs @@ -0,0 +1,123 @@ +use std::collections::{BTreeMap, VecDeque}; + +use miden_objects::{accounts::AccountId, Digest}; + +use crate::errors::VerifyTxError; + +/// Tracks the list of inflight account updates. +/// +/// New transactions can be registered with [Self::verify_and_add]. States that are no longer +/// considered inflight (e.g. due to being applied) may be removed using [Self::remove]. +/// +/// Both functions perform safety checks to ensure the states match what we expect. +#[derive(Debug, Default)] +pub struct InflightAccountStates(BTreeMap>); + +impl InflightAccountStates { + /// Verifies that the provided initial state matches the latest inflight account state (if any). + pub fn verify_update(&self, id: AccountId, init_state: Digest) -> Result<(), VerifyTxError> { + if let Some(latest) = self.get(id) { + if latest != &init_state { + return Err(VerifyTxError::IncorrectAccountInitialHash { + tx_initial_account_hash: init_state, + current_account_hash: Some(*latest), + }); + } + } + + Ok(()) + } + + /// [Verifies](Self::verify_update) the update and appends it to the list of inflight account + /// updates. + pub fn verify_and_add( + &mut self, + id: AccountId, + init_state: Digest, + final_state: Digest, + ) -> Result<(), VerifyTxError> { + let states = self.0.entry(id).or_default(); + + // Ensure the latest state matches the new inital state. + if let Some(latest) = states.back() { + if latest != &init_state { + return Err(VerifyTxError::IncorrectAccountInitialHash { + tx_initial_account_hash: init_state, + current_account_hash: Some(*latest), + }); + } + } + + states.push_back(final_state); + + Ok(()) + } + + /// Remove state transitions from earliest up until a state that matches the given + /// final state. Returns an error if no match was found. + /// + /// In other words, if an account has state transitions `a->b->c->d` then calling `remove(b)` + /// would leave behind `c->d`. + pub fn remove(&mut self, id: AccountId, final_state: Digest) -> Result<(), ()> { + let states = self.0.get_mut(&id).ok_or(())?; + let Some(idx) = states.iter().position(|x| x == &final_state) else { + return Err(()); + }; + + states.drain(..=idx); + // Prevent infinite growth by removing entries which have no + // inflight state changes. + if states.is_empty() { + self.0.remove(&id); + } + + Ok(()) + } + + /// The latest value of the given account. + pub fn get(&self, id: AccountId) -> Option<&Digest> { + self.0.get(&id).and_then(|states| states.back()) + } + + /// Number of accounts with inflight transactions. + #[cfg(test)] + pub fn contains(&self, id: AccountId) -> bool { + self.0.contains_key(&id) + } + + /// Number of accounts with inflight transactions. + #[cfg(test)] + pub fn len(&self) -> usize { + self.0.len() + } +} + +#[cfg(test)] +mod tests { + use miden_air::Felt; + use miden_objects::accounts::AccountId; + + use super::*; + + #[test] + fn account_states_must_chain() { + let account: AccountId = AccountId::new_unchecked(Felt::new(10)); + const ONE: Digest = Digest::new([Felt::new(1), Felt::new(1), Felt::new(1), Felt::new(1)]); + const TWO: Digest = Digest::new([Felt::new(2), Felt::new(2), Felt::new(2), Felt::new(2)]); + const THREE: Digest = Digest::new([Felt::new(3), Felt::new(3), Felt::new(3), Felt::new(3)]); + let mut uut = InflightAccountStates::default(); + + assert!(uut.verify_and_add(account, Digest::default(), ONE).is_ok()); + assert!(uut.verify_and_add(account, ONE, TWO).is_ok()); + assert!(uut.verify_and_add(account, TWO, THREE).is_ok()); + assert!(uut.verify_and_add(account, TWO, ONE).is_err()); + + assert!(uut.remove(account, TWO).is_ok()); + // Repeat removal should fail since this is no longer present. + assert!(uut.remove(account, TWO).is_err()); + assert!(uut.remove(account, THREE).is_ok()); + + // Check that cleanup is performed. + assert!(uut.0.is_empty()); + } +} diff --git a/crates/block-producer/src/state_view/mod.rs b/crates/block-producer/src/state_view/mod.rs index c90f74757..91a3cae4e 100644 --- a/crates/block-producer/src/state_view/mod.rs +++ b/crates/block-producer/src/state_view/mod.rs @@ -3,7 +3,6 @@ use std::{collections::BTreeSet, sync::Arc}; use async_trait::async_trait; use miden_node_utils::formatting::format_array; use miden_objects::{ - accounts::AccountId, block::Block, notes::{NoteId, Nullifier}, transaction::OutputNote, @@ -13,6 +12,7 @@ use miden_tx::TransactionVerifier; use tokio::sync::RwLock; use tracing::{debug, instrument}; +use self::account_state::InflightAccountStates; use crate::{ errors::VerifyTxError, store::{ApplyBlock, ApplyBlockError, Store, TransactionInputs}, @@ -20,6 +20,7 @@ use crate::{ ProvenTransaction, COMPONENT, }; +mod account_state; #[cfg(test)] mod tests; @@ -29,9 +30,8 @@ pub struct DefaultStateView { /// Enables or disables the verification of transaction proofs in `verify_tx` verify_tx_proofs: bool, - /// The account ID of accounts being modified by transactions currently in the block production - /// pipeline. We currently ensure that only 1 tx/block modifies any given account (issue: #186). - accounts_in_flight: Arc>>, + /// The account states modified by transactions currently in the block production pipeline. + accounts_in_flight: Arc>, /// The nullifiers of notes consumed by transactions currently in the block production pipeline. nullifiers_in_flight: Arc>>, @@ -92,7 +92,12 @@ where // identify a set of unauthenticated input notes which are not in the store yet; we'll use // this set to verify that these notes are currently in flight (i.e., they are output notes // of one of the inflight transactions) - let tx_inputs = self.store.get_tx_inputs(candidate_tx).await?; + let mut tx_inputs = self.store.get_tx_inputs(candidate_tx).await?; + // The latest inflight account state takes precedence since this is the current block being constructed. + if let Some(inflight) = self.accounts_in_flight.read().await.get(candidate_tx.account_id()) + { + tx_inputs.account_hash = Some(*inflight); + } let missing_notes = ensure_tx_inputs_constraints(candidate_tx, tx_inputs)?; // Re-check in-flight transaction constraints, and if verification passes, register @@ -113,8 +118,11 @@ where &missing_notes, )?; - // Success! Register transaction as successfully verified - locked_accounts_in_flight.insert(candidate_tx.account_id()); + locked_accounts_in_flight.verify_and_add( + candidate_tx.account_id(), + candidate_tx.account_update().init_state_hash(), + candidate_tx.account_update().final_state_hash(), + )?; locked_nullifiers_in_flight.extend(&mut candidate_tx.get_nullifiers()); locked_notes_in_flight.extend(candidate_tx.output_notes().iter().map(OutputNote::id)); } @@ -141,8 +149,9 @@ where // Remove account ids of transactions in block for update in block.updated_accounts() { - let was_in_flight = locked_accounts_in_flight.remove(&update.account_id()); - debug_assert!(was_in_flight); + locked_accounts_in_flight + .remove(update.account_id(), update.new_state_hash()) + .expect("Account update should be valid"); } // Remove new nullifiers of transactions in block @@ -165,25 +174,26 @@ where // ------------------------------------------------------------------------------------------------- /// Ensures the constraints related to in-flight transactions: -/// - the candidate transaction doesn't modify the same account as an existing in-flight -/// transaction (issue: #186) +/// - the candidate tx's initial state matches the latest inflight state (if any) /// - no note's nullifier in candidate tx's consumed notes is already contained in /// `already_consumed_nullifiers` /// - all notes in `tx_notes_not_in_store` are currently in flight +/// +/// The account state is not verified as it is performed by [InflightAccountStates]. #[instrument(target = "miden-block-producer", skip_all, err)] fn ensure_in_flight_constraints( candidate_tx: &ProvenTransaction, - accounts_in_flight: &BTreeSet, + accounts_in_flight: &InflightAccountStates, already_consumed_nullifiers: &BTreeSet, notes_in_flight: &BTreeSet, tx_notes_not_in_store: &[NoteId], ) -> Result<(), VerifyTxError> { - debug!(target: COMPONENT, accounts_in_flight = %format_array(accounts_in_flight), already_consumed_nullifiers = %format_array(already_consumed_nullifiers)); + debug!(target: COMPONENT, already_consumed_nullifiers = %format_array(already_consumed_nullifiers)); - // Check account id hasn't been modified yet - if accounts_in_flight.contains(&candidate_tx.account_id()) { - return Err(VerifyTxError::AccountAlreadyModifiedByOtherTx(candidate_tx.account_id())); - } + accounts_in_flight.verify_update( + candidate_tx.account_id(), + candidate_tx.account_update().init_state_hash(), + )?; // Check no consumed notes were already consumed let infracting_nullifiers: Vec = { @@ -230,7 +240,7 @@ fn ensure_tx_inputs_constraints( if candidate_tx.account_update().init_state_hash() != store_account_hash { return Err(VerifyTxError::IncorrectAccountInitialHash { tx_initial_account_hash: candidate_tx.account_update().init_state_hash(), - store_account_hash: Some(store_account_hash), + current_account_hash: Some(store_account_hash), }); } }, @@ -241,7 +251,7 @@ fn ensure_tx_inputs_constraints( if candidate_tx.account_update().init_state_hash() != Digest::default() { return Err(VerifyTxError::IncorrectAccountInitialHash { tx_initial_account_hash: candidate_tx.account_update().init_state_hash(), - store_account_hash: None, + current_account_hash: None, }); } }, diff --git a/crates/block-producer/src/state_view/tests/apply_block.rs b/crates/block-producer/src/state_view/tests/apply_block.rs index 9d01970f4..fa1fb9596 100644 --- a/crates/block-producer/src/state_view/tests/apply_block.rs +++ b/crates/block-producer/src/state_view/tests/apply_block.rs @@ -103,7 +103,7 @@ async fn test_apply_block_ab2() { // Only the first account should still be in flight assert_eq!(accounts_still_in_flight.len(), 1); - assert!(accounts_still_in_flight.contains(&accounts[0].id)); + assert!(accounts_still_in_flight.contains(accounts[0].id)); } /// Tests requirement AB3 diff --git a/crates/block-producer/src/state_view/tests/verify_tx.rs b/crates/block-producer/src/state_view/tests/verify_tx.rs index faec60205..85b8b2761 100644 --- a/crates/block-producer/src/state_view/tests/verify_tx.rs +++ b/crates/block-producer/src/state_view/tests/verify_tx.rs @@ -99,7 +99,7 @@ async fn test_verify_tx_vt1() { verify_tx_result, Err(VerifyTxError::IncorrectAccountInitialHash { tx_initial_account_hash: account.states[1], - store_account_hash: Some(account.states[0]), + current_account_hash: Some(account.states[0]), }) ); } @@ -171,8 +171,8 @@ async fn test_verify_tx_vt4() { let tx1 = MockProvenTxBuilder::with_account(account.id, account.states[0], account.states[1]).build(); - // Notice: tx2 modifies the same account as tx1, even though from a different initial state, - // which is currently disallowed + // Notice: tx2 follows tx1, using the same account and with an initial state matching the final state of the first. + // We expect both to pass. let tx2 = MockProvenTxBuilder::with_account(account.id, account.states[1], account.states[2]).build(); @@ -182,10 +182,7 @@ async fn test_verify_tx_vt4() { assert!(verify_tx1_result.is_ok()); let verify_tx2_result = state_view.verify_tx(&tx2).await; - assert_eq!( - verify_tx2_result, - Err(VerifyTxError::AccountAlreadyModifiedByOtherTx(account.id)) - ); + assert!(verify_tx2_result.is_ok()); } /// Verifies requirement VT5 diff --git a/crates/block-producer/src/test_utils/block.rs b/crates/block-producer/src/test_utils/block.rs index 44c961014..e5c841abc 100644 --- a/crates/block-producer/src/test_utils/block.rs +++ b/crates/block-producer/src/test_utils/block.rs @@ -29,8 +29,8 @@ pub async fn build_expected_block_header( batches.iter().flat_map(TransactionBatch::updated_accounts).collect(); let new_account_root = { let mut store_accounts = store.accounts.read().await.clone(); - for update in updated_accounts { - store_accounts.insert(update.account_id().into(), update.new_state_hash().into()); + for (&account_id, update) in updated_accounts { + store_accounts.insert(account_id.into(), update.final_state.into()); } store_accounts.root() @@ -76,14 +76,14 @@ pub async fn build_actual_block_header( let block_inputs_from_store: BlockInputs = store .get_block_inputs( - updated_accounts.iter().map(|update| update.account_id()), + updated_accounts.iter().map(|(&account_id, _)| account_id), produced_nullifiers.iter(), iter::empty(), ) .await .unwrap(); - let block_witness = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); + let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); BlockProver::new().prove(block_witness).unwrap() } diff --git a/crates/block-producer/src/txqueue/tests/mod.rs b/crates/block-producer/src/txqueue/tests/mod.rs index 49ac130e6..62c92bd57 100644 --- a/crates/block-producer/src/txqueue/tests/mod.rs +++ b/crates/block-producer/src/txqueue/tests/mod.rs @@ -22,7 +22,7 @@ struct TransactionValidatorFailure; #[async_trait] impl TransactionValidator for TransactionValidatorFailure { async fn verify_tx(&self, tx: &ProvenTransaction) -> Result<(), VerifyTxError> { - Err(VerifyTxError::AccountAlreadyModifiedByOtherTx(tx.account_id())) + Err(VerifyTxError::InvalidTransactionProof(tx.id())) } } @@ -111,12 +111,13 @@ async fn test_build_batch_success() { // a batch will include up to `batch_size` transactions let mut txs = Vec::new(); - for _ in 0..batch_size { + for i in 0..batch_size { + let tx = MockProvenTxBuilder::with_account_index(i as u32).build(); tx_queue .add_transaction(tx.clone()) .await .expect("Transaction queue is running"); - txs.push(tx.clone()) + txs.push(tx); } tokio::time::advance(build_batch_frequency).await; let batch = receiver.try_recv().expect("Queue not empty"); @@ -130,7 +131,8 @@ async fn test_build_batch_success() { // the transaction queue eagerly produces batches let mut txs = Vec::new(); - for _ in 0..(2 * batch_size + 1) { + for i in 0..(2 * batch_size + 1) { + let tx = MockProvenTxBuilder::with_account_index(i as u32).build(); tx_queue .add_transaction(tx.clone()) .await