Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
139 changes: 102 additions & 37 deletions yarn-project/end-to-end/src/e2e_block_building.test.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
import { AztecNodeService } from '@aztec/aztec-node';
import { AztecRPCServer } from '@aztec/aztec-rpc';
import { ContractDeployer, Fr, isContractDeployed } from '@aztec/aztec.js';
import { BatchCall, ContractDeployer, Fr, Wallet, isContractDeployed } from '@aztec/aztec.js';
import { CircuitsWasm } from '@aztec/circuits.js';
import { pedersenPlookupCommitInputs } from '@aztec/circuits.js/barretenberg';
import { DebugLogger } from '@aztec/foundation/log';
import { TestContractAbi } from '@aztec/noir-contracts/artifacts';
import { TestContract } from '@aztec/noir-contracts/types';
import { AztecRPC, TxStatus } from '@aztec/types';

import times from 'lodash.times';
Expand All @@ -13,45 +16,107 @@ describe('e2e_block_building', () => {
let aztecNode: AztecNodeService | undefined;
let aztecRpcServer: AztecRPC;
let logger: DebugLogger;
let wallet: Wallet;

const abi = TestContractAbi;
describe('multi-txs block', () => {
const abi = TestContractAbi;

beforeEach(async () => {
({ aztecNode, aztecRpcServer, logger } = await setup());
}, 100_000);
beforeAll(async () => {
({ aztecNode, aztecRpcServer, logger, wallet } = await setup(1));
}, 100_000);

afterEach(async () => {
await aztecNode?.stop();
if (aztecRpcServer instanceof AztecRPCServer) {
await aztecRpcServer?.stop();
}
afterAll(async () => {
await aztecNode?.stop();
if (aztecRpcServer instanceof AztecRPCServer) {
await aztecRpcServer?.stop();
}
});

it('assembles a block with multiple txs', async () => {
// Assemble N contract deployment txs
// We need to create them sequentially since we cannot have parallel calls to a circuit
const TX_COUNT = 8;
const deployer = new ContractDeployer(abi, wallet);
const methods = times(TX_COUNT, () => deployer.deploy());

for (const i in methods) {
await methods[i].create({ contractAddressSalt: new Fr(BigInt(i + 1)) });
await methods[i].simulate({});
}

// Send them simultaneously to be picked up by the sequencer
const txs = await Promise.all(methods.map(method => method.send()));
logger(`Txs sent with hashes: `);
for (const tx of txs) logger(` ${await tx.getTxHash()}`);

// Await txs to be mined and assert they are all mined on the same block
const receipts = await Promise.all(txs.map(tx => tx.wait()));
expect(receipts.map(r => r.status)).toEqual(times(TX_COUNT, () => TxStatus.MINED));
expect(receipts.map(r => r.blockNumber)).toEqual(times(TX_COUNT, () => receipts[0].blockNumber));

// Assert all contracts got deployed
const areDeployed = await Promise.all(receipts.map(r => isContractDeployed(aztecRpcServer, r.contractAddress!)));
expect(areDeployed).toEqual(times(TX_COUNT, () => true));
}, 60_000);
});

it('should assemble a block with multiple txs', async () => {
// Assemble N contract deployment txs
// We need to create them sequentially since we cannot have parallel calls to a circuit
const TX_COUNT = 8;
const deployer = new ContractDeployer(abi, aztecRpcServer);
const methods = times(TX_COUNT, () => deployer.deploy());

for (const i in methods) {
await methods[i].create({ contractAddressSalt: new Fr(BigInt(i + 1)) });
await methods[i].simulate({});
}

// Send them simultaneously to be picked up by the sequencer
const txs = await Promise.all(methods.map(method => method.send()));
logger(`Txs sent with hashes: `);
for (const tx of txs) logger(` ${await tx.getTxHash()}`);

// Await txs to be mined and assert they are all mined on the same block
await Promise.all(txs.map(tx => tx.isMined()));
const receipts = await Promise.all(txs.map(tx => tx.getReceipt()));
expect(receipts.map(r => r.status)).toEqual(times(TX_COUNT, () => TxStatus.MINED));
expect(receipts.map(r => r.blockNumber)).toEqual(times(TX_COUNT, () => receipts[0].blockNumber));

// Assert all contracts got deployed
const areDeployed = await Promise.all(receipts.map(r => isContractDeployed(aztecRpcServer, r.contractAddress!)));
expect(areDeployed).toEqual(times(TX_COUNT, () => true));
}, 60_000);
// Regressions for https://github.com/AztecProtocol/aztec-packages/issues/2502
describe('double-spends on the same block', () => {
let contract: TestContract;

beforeAll(async () => {
({ aztecNode, aztecRpcServer, logger, wallet } = await setup(1));
contract = await TestContract.deploy(wallet).send().deployed();
}, 100_000);

afterAll(async () => {
await aztecNode?.stop();
if (aztecRpcServer instanceof AztecRPCServer) {
await aztecRpcServer?.stop();
}
});

it('drops tx with private nullifier already emitted on the same block', async () => {
const nullifier = Fr.random();
const calls = times(2, () => contract.methods.emit_nullifier(nullifier));
for (const call of calls) await call.simulate();
const [tx1, tx2] = calls.map(call => call.send());
await tx1.wait();
await expect(tx2.wait()).rejects.toThrowError(/dropped/);
}, 30_000);

it('drops tx with public nullifier already emitted on the same block', async () => {
const secret = Fr.random();
const calls = times(2, () => contract.methods.createNullifierPublic(140n, secret));
for (const call of calls) await call.simulate();
const [tx1, tx2] = calls.map(call => call.send());
await tx1.wait();
await expect(tx2.wait()).rejects.toThrowError(/dropped/);
}, 30_000);

it('drops tx with two equal nullifiers', async () => {
const nullifier = Fr.random();
const calls = times(2, () => contract.methods.emit_nullifier(nullifier).request());
await expect(new BatchCall(wallet, calls).send().wait()).rejects.toThrowError(/dropped/);
});

it('drops tx with private nullifier already emitted from public on the same block', async () => {
const secret = Fr.random();
// See yarn-project/acir-simulator/src/public/index.test.ts 'Should be able to create a nullifier from the public context'
const emittedPublicNullifier = pedersenPlookupCommitInputs(
await CircuitsWasm.get(),
[new Fr(140), secret].map(a => a.toBuffer()),
);

const calls = [
contract.methods.createNullifierPublic(140n, secret),
contract.methods.emit_nullifier(emittedPublicNullifier),
];

for (const call of calls) await call.simulate();
const [tx1, tx2] = calls.map(call => call.send());
await tx1.wait();
await expect(tx2.wait()).rejects.toThrowError(/dropped/);
});
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,18 @@ impl TestPrivateContextInterface {
}


fn emit_nullifier(
self,
context: &mut PrivateContext,
nullifier: Field
) -> [Field; RETURN_VALUES_LENGTH] {
let mut serialized_args = [0; 1];
serialized_args[0] = nullifier;

context.call_private_function(self.address, 0x82a8b183, serialized_args)
}


fn emit_unencrypted(
self,
context: &mut PrivateContext,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ contract Test {
logs::emit_unencrypted_log
},
types::vec::BoundedVec,
constants_gen::EMPTY_NULLIFIED_COMMITMENT,
};

#[aztec(private)]
Expand Down Expand Up @@ -105,6 +106,12 @@ contract Test {
context.push_new_nullifier(note.get_commitment(), 0);
}

// Forcefully emits a nullifier (for testing purposes)
#[aztec(private)]
fn emit_nullifier(nullifier: Field) {
context.push_new_nullifier(nullifier, EMPTY_NULLIFIED_COMMITMENT);
}

// docs:start:is-time-equal
#[aztec(public)]
fn isTimeEqual(
Expand Down
77 changes: 46 additions & 31 deletions yarn-project/sequencer-client/src/sequencer/sequencer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,7 @@ export class Sequencer {
if (pendingTxs.length < this.minTxsPerBLock) return;

// Filter out invalid txs
// TODO: It should be responsibility of the P2P layer to validate txs before passing them on here
const validTxs = await this.takeValidTxs(pendingTxs);
if (validTxs.length < this.minTxsPerBLock) {
return;
Expand All @@ -147,9 +148,11 @@ export class Sequencer {
await this.p2pClient.deleteTxs(await Tx.getHashes(failedTxData));
}

// Only accept processed transactions that are not double-spends
// public functions emitting nullifiers would pass earlier check but fail here
const processedValidTxs = await this.takeValidProcessedTxs(processedTxs);
// Only accept processed transactions that are not double-spends,
// public functions emitting nullifiers would pass earlier check but fail here.
// Note that we're checking all nullifiers generated in the private execution twice,
// we could store the ones already checked and skip them here as an optimisation.
const processedValidTxs = await this.takeValidTxs(processedTxs);

if (processedValidTxs.length === 0) {
this.log('No txs processed correctly to build block. Exiting');
Expand Down Expand Up @@ -224,25 +227,27 @@ export class Sequencer {
}
}

// TODO: It should be responsibility of the P2P layer to validate txs before passing them on here
protected async takeValidTxs(txs: Tx[]) {
const validTxs = [];
protected async takeValidTxs<T extends Tx | ProcessedTx>(txs: T[]): Promise<T[]> {
const validTxs: T[] = [];
const doubleSpendTxs = [];
const thisBlockNullifiers: Set<bigint> = new Set();

// Process txs until we get to maxTxsPerBlock, rejecting double spends in the process
for (const tx of txs) {
// TODO(AD) - eventually we should add a limit to how many transactions we
// skip in this manner and do something more DDOS-proof (like letting the transaction fail and pay a fee).
if (await this.isTxDoubleSpend(tx)) {
this.log(`Deleting double spend tx ${await tx.getTxHash()}`);
this.log(`Deleting double spend tx ${await Tx.getHash(tx)}`);
doubleSpendTxs.push(tx);
continue;
} else if (this.isTxDoubleSpendSameBlock(tx, thisBlockNullifiers)) {
// We don't drop these txs from the p2p pool immediately since they become valid
// again if the current block fails to be published for some reason.
this.log(`Skipping tx with double-spend for this same block ${await Tx.getHash(tx)}`);
continue;
}

tx.data.end.newNullifiers.forEach(n => thisBlockNullifiers.add(n.toBigInt()));
validTxs.push(tx);
if (validTxs.length >= this.maxTxsPerBlock) {
break;
}
if (validTxs.length >= this.maxTxsPerBlock) break;
}

// Make sure we remove these from the tx pool so we do not consider it again
Expand All @@ -253,13 +258,6 @@ export class Sequencer {
return validTxs;
}

protected async takeValidProcessedTxs(txs: ProcessedTx[]) {
const isDoubleSpends = await Promise.all(txs.map(async tx => await this.isTxDoubleSpend(tx as unknown as Tx)));
const doubleSpends = txs.filter((tx, index) => isDoubleSpends[index]).map(tx => tx.hash);
await this.p2pClient.deleteTxs(doubleSpends);
return txs.filter((tx, index) => !isDoubleSpends[index]);
}

/**
* Returns whether the previous block sent has been mined, and all dependencies have caught up with it.
* @returns Boolean indicating if our dependencies are synced to the latest block.
Expand Down Expand Up @@ -307,25 +305,42 @@ export class Sequencer {
return await this.l1ToL2MessageSource.getPendingL1ToL2Messages();
}

/**
* Returns true if one of the tx nullifiers exist on the block being built.
* @param tx - The tx to test.
* @param thisBlockNullifiers - The nullifiers added so far.
*/
protected isTxDoubleSpendSameBlock(tx: Tx | ProcessedTx, thisBlockNullifiers: Set<bigint>): boolean {
// We only consider non-empty nullifiers
const newNullifiers = tx.data.end.newNullifiers.filter(n => !n.isZero());

for (const nullifier of newNullifiers) {
if (thisBlockNullifiers.has(nullifier.toBigInt())) {
return true;
}
}
return false;
}

/**
* Returns true if one of the transaction nullifiers exist.
* Nullifiers prevent double spends in a private context.
* @param tx - The transaction.
* @returns Whether this is a problematic double spend that the L1 contract would reject.
*/
protected async isTxDoubleSpend(tx: Tx): Promise<boolean> {
// eslint-disable-next-line @typescript-eslint/await-thenable
for (const nullifier of tx.data.end.newNullifiers) {
// Skip nullifier if it's empty
if (nullifier.isZero()) continue;
protected async isTxDoubleSpend(tx: Tx | ProcessedTx): Promise<boolean> {
// We only consider non-empty nullifiers
const newNullifiers = tx.data.end.newNullifiers.filter(n => !n.isZero());

// Ditch this tx if it has a repeated nullifiers
const uniqNullifiers = new Set(newNullifiers.map(n => n.toBigInt()));
if (uniqNullifiers.size !== newNullifiers.length) return true;

for (const nullifier of newNullifiers) {
// TODO(AD): this is an exhaustive search currently
if (
(await this.worldState.getLatest().findLeafIndex(MerkleTreeId.NULLIFIER_TREE, nullifier.toBuffer())) !==
undefined
) {
// Our nullifier tree has this nullifier already - this transaction is a double spend / not well-formed
return true;
}
const db = this.worldState.getLatest();
const indexInDb = await db.findLeafIndex(MerkleTreeId.NULLIFIER_TREE, nullifier.toBuffer());
if (indexInDb !== undefined) return true;
}
return false;
}
Expand Down
17 changes: 15 additions & 2 deletions yarn-project/types/src/tx/tx.ts
Original file line number Diff line number Diff line change
Expand Up @@ -147,13 +147,23 @@ export class Tx {
return Promise.resolve(new TxHash(firstNullifier.toBuffer()));
}

/**
* Convenience function to get a hash out of a tx or a tx-like.
* @param tx - Tx-like object.
* @returns - The hash.
*/
static getHash(tx: Tx | HasHash): Promise<TxHash> {
const hasHash = (tx: Tx | HasHash): tx is HasHash => (tx as HasHash).hash !== undefined;
return Promise.resolve(hasHash(tx) ? tx.hash : tx.getTxHash());
}

/**
* Convenience function to get array of hashes for an array of txs.
* @param txs - The txs to get the hashes from.
* @returns The corresponding array of hashes.
*/
static async getHashes(txs: Tx[]): Promise<TxHash[]> {
return await Promise.all(txs.map(tx => tx.getTxHash()));
static async getHashes(txs: (Tx | HasHash)[]): Promise<TxHash[]> {
return await Promise.all(txs.map(tx => Tx.getHash(tx)));
}

/**
Expand All @@ -176,3 +186,6 @@ export class Tx {
return new Tx(publicInputs, proof, encryptedLogs, unencryptedLogs, enqueuedPublicFunctions, newContracts);
}
}

/** Utility type for an entity that has a hash property for a txhash */
type HasHash = { /** The tx hash */ hash: TxHash };