diff --git a/ci3/tmux_split b/ci3/tmux_split index 1db3ce92f6fe..28b0d68dc769 100755 --- a/ci3/tmux_split +++ b/ci3/tmux_split @@ -17,10 +17,13 @@ session_name=$1 tmux kill-session -t "$session_name" 2>/dev/null || true # Start a new tmux session with log level set +# Passing through env vars from run_native_testnet.sh otherwise they end up unset tmux new-session -d -s "$session_name" -e LOG_LEVEL=${LOG_LEVEL:-"debug"} \ -e OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=${OTEL_EXPORTER_OTLP_LOGS_ENDPOINT:-} \ -e OTEL_EXPORTER_OTLP_METRICS_ENDPOINT=${OTEL_EXPORTER_OTLP_METRICS_ENDPOINT:-} \ -e OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=${OTEL_EXPORTER_OTLP_TRACES_ENDPOINT:-} \ + -e L1_CONSENSUS_HOST_URL=${L1_CONSENSUS_HOST_URL:-} \ + -e ETHEREUM_HOST=${ETHEREUM_HOST:-} \ -e LOG_JSON=${LOG_JSON:-} shift 1 diff --git a/spartan/aztec-network/eth-devnet/run-locally.sh b/spartan/aztec-network/eth-devnet/run-locally.sh index c0fc71b8f284..5a53730a0089 100755 --- a/spartan/aztec-network/eth-devnet/run-locally.sh +++ b/spartan/aztec-network/eth-devnet/run-locally.sh @@ -3,4 +3,4 @@ REPO_ROOT=$(git rev-parse --show-toplevel) ${REPO_ROOT}/spartan/aztec-network/eth-devnet/create.sh -(cd ${REPO_ROOT}/spartan/aztec-network/eth-devnet && docker compose build &&docker compose up) +(cd ${REPO_ROOT}/spartan/aztec-network/eth-devnet && docker compose build && docker compose up) diff --git a/yarn-project/archiver/src/archiver/data_retrieval.ts b/yarn-project/archiver/src/archiver/data_retrieval.ts index 080c41812c40..bde78daa4071 100644 --- a/yarn-project/archiver/src/archiver/data_retrieval.ts +++ b/yarn-project/archiver/src/archiver/data_retrieval.ts @@ -2,7 +2,7 @@ import { type BlobSinkClientInterface } from '@aztec/blob-sink/client'; import { Body, InboxLeaf, L2Block } from '@aztec/circuit-types'; import { AppendOnlyTreeSnapshot, BlockHeader, Fr, Proof } from '@aztec/circuits.js'; import { asyncPool } from '@aztec/foundation/async-pool'; -import { Blob } from '@aztec/foundation/blob'; +import { Blob, BlobDeserializationError } from '@aztec/foundation/blob'; import { type EthAddress } from '@aztec/foundation/eth-address'; import { type ViemSignature } from '@aztec/foundation/eth-signature'; import { type Logger, createLogger } from '@aztec/foundation/log'; @@ -109,6 +109,7 @@ export async function processL2BlockProposedLogs( blobHashes, l2BlockNumber, rollup.address, + logger, ); const l1: L1PublishedData = { @@ -201,6 +202,7 @@ async function getBlockFromRollupTx( blobHashes: Buffer[], // WORKTODO(md): buffer32? l2BlockNum: bigint, rollupAddress: Hex, + logger: Logger, ): Promise { const { input: forwarderData, blockHash } = await publicClient.getTransaction({ hash: txHash }); @@ -232,13 +234,25 @@ async function getBlockFromRollupTx( ]; const header = BlockHeader.fromBuffer(Buffer.from(hexToBytes(decodedArgs.header))); - const blobBodies = await blobSinkClient.getBlobSidecar(blockHash, blobHashes); if (blobBodies.length === 0) { throw new NoBlobBodiesFoundError(Number(l2BlockNum)); } - const blockFields = blobBodies.flatMap(b => b.toEncodedFields()); + // TODO(#9101): Once calldata is removed, we can remove this field encoding and update + // Body.fromBlobFields to accept blob buffers directly + let blockFields: Fr[]; + try { + blockFields = blobBodies.flatMap(b => b.toEncodedFields()); + } catch (err: any) { + if (err instanceof BlobDeserializationError) { + logger.fatal(err.message); + } else { + logger.fatal('Unable to sync: failed to decode fetched blob, this blob was likely not created by us'); + } + throw err; + } + // TODO(#9101): Retreiving the block body from calldata is a temporary soln before we have // either a beacon chain client or link to some blob store. Web2 is ok because we will // verify the block body vs the blob as below. diff --git a/yarn-project/blob-sink/src/client/http.test.ts b/yarn-project/blob-sink/src/client/http.test.ts index 6af0e4f587bb..5230ead1733f 100644 --- a/yarn-project/blob-sink/src/client/http.test.ts +++ b/yarn-project/blob-sink/src/client/http.test.ts @@ -1,4 +1,4 @@ -import { Blob, makeEncodedBlob } from '@aztec/foundation/blob'; +import { Blob, makeEncodedBlob, makeUnencodedBlob } from '@aztec/foundation/blob'; import { Fr } from '@aztec/foundation/fields'; import { jest } from '@jest/globals'; @@ -43,8 +43,11 @@ describe('HttpBlobSinkClient', () => { describe('Mock Ethereum Clients', () => { let blobSinkServer: BlobSinkServer; - let testBlob: Blob; - let testBlobHash: Buffer; + let testEncodedBlob: Blob; + let testEncodedBlobHash: Buffer; + + let testNonEncodedBlob: Blob; + let testNonEncodedBlobHash: Buffer; // A blob to be ignored when requesting blobs // - we do not include it's blobHash in our queries @@ -59,10 +62,13 @@ describe('HttpBlobSinkClient', () => { const MOCK_SLOT_NUMBER = 1; beforeEach(async () => { - testBlob = await makeEncodedBlob(3); - testBlobHash = testBlob.getEthVersionedBlobHash(); + testEncodedBlob = await makeEncodedBlob(3); + testEncodedBlobHash = testEncodedBlob.getEthVersionedBlobHash(); testBlobIgnore = await makeEncodedBlob(3); + + testNonEncodedBlob = await makeUnencodedBlob(3); + testNonEncodedBlobHash = testNonEncodedBlob.getEthVersionedBlobHash(); }); const startExecutionHostServer = (): Promise => { @@ -89,14 +95,16 @@ describe('HttpBlobSinkClient', () => { res.end( JSON.stringify({ data: [ + // Correctly encoded blob { index: 0, - blob: `0x${Buffer.from(testBlob.data).toString('hex')}`, + blob: `0x${Buffer.from(testEncodedBlob.data).toString('hex')}`, // eslint-disable-next-line camelcase - kzg_commitment: `0x${testBlob.commitment.toString('hex')}`, + kzg_commitment: `0x${testEncodedBlob.commitment.toString('hex')}`, // eslint-disable-next-line camelcase - kzg_proof: `0x${testBlob.proof.toString('hex')}`, + kzg_proof: `0x${testEncodedBlob.proof.toString('hex')}`, }, + // Correctly encoded blob, but we do not ask for it in the client { index: 1, blob: `0x${Buffer.from(testBlobIgnore.data).toString('hex')}`, @@ -105,6 +113,15 @@ describe('HttpBlobSinkClient', () => { // eslint-disable-next-line camelcase kzg_proof: `0x${testBlobIgnore.proof.toString('hex')}`, }, + // Incorrectly encoded blob + { + index: 2, + blob: `0x${Buffer.from(testNonEncodedBlob.data).toString('hex')}`, + // eslint-disable-next-line camelcase + kzg_commitment: `0x${testNonEncodedBlob.commitment.toString('hex')}`, + // eslint-disable-next-line camelcase + kzg_proof: `0x${testNonEncodedBlob.proof.toString('hex')}`, + }, ], }), ); @@ -147,11 +164,11 @@ describe('HttpBlobSinkClient', () => { l1RpcUrl: `http://localhost:${executionHostPort}`, }); - const success = await client.sendBlobsToBlobSink('0x1234', [testBlob]); + const success = await client.sendBlobsToBlobSink('0x1234', [testEncodedBlob]); expect(success).toBe(true); - const retrievedBlobs = await client.getBlobSidecar('0x1234', [testBlobHash]); - expect(retrievedBlobs).toEqual([testBlob]); + const retrievedBlobs = await client.getBlobSidecar('0x1234', [testEncodedBlobHash]); + expect(retrievedBlobs).toEqual([testEncodedBlob]); // Check that the blob sink was called with the correct block hash and no index expect(blobSinkSpy).toHaveBeenCalledWith('0x1234', undefined); @@ -160,25 +177,30 @@ describe('HttpBlobSinkClient', () => { // When the consensus host is responding, we should request blobs from the consensus host // based on the slot number it('should request based on slot where consensus host is provided', async () => { - blobSinkServer = new BlobSinkServer({ - port: 0, + await startExecutionHostServer(); + await startConsensusHostServer(); + + const client = new HttpBlobSinkClient({ + l1RpcUrl: `http://localhost:${executionHostPort}`, + l1ConsensusHostUrl: `http://localhost:${consensusHostPort}`, }); - await blobSinkServer.start(); + const retrievedBlobs = await client.getBlobSidecar('0x1234', [testEncodedBlobHash]); + expect(retrievedBlobs).toEqual([testEncodedBlob]); + }); + + it('Even if we ask for non-encoded blobs, we should only get encoded blobs', async () => { await startExecutionHostServer(); await startConsensusHostServer(); const client = new HttpBlobSinkClient({ - blobSinkUrl: `http://localhost:${blobSinkServer.port}`, l1RpcUrl: `http://localhost:${executionHostPort}`, l1ConsensusHostUrl: `http://localhost:${consensusHostPort}`, }); - const success = await client.sendBlobsToBlobSink('0x1234', [testBlob]); - expect(success).toBe(true); - - const retrievedBlobs = await client.getBlobSidecar('0x1234', [testBlobHash]); - expect(retrievedBlobs).toEqual([testBlob]); + const retrievedBlobs = await client.getBlobSidecar('0x1234', [testEncodedBlobHash, testNonEncodedBlobHash]); + // We should only get the correctly encoded blob + expect(retrievedBlobs).toEqual([testEncodedBlob]); }); }); }); diff --git a/yarn-project/blob-sink/src/client/http.ts b/yarn-project/blob-sink/src/client/http.ts index 203f6323d7b6..4641126e9864 100644 --- a/yarn-project/blob-sink/src/client/http.ts +++ b/yarn-project/blob-sink/src/client/http.ts @@ -1,4 +1,4 @@ -import { Blob, type BlobJson } from '@aztec/foundation/blob'; +import { Blob, BlobDeserializationError, type BlobJson } from '@aztec/foundation/blob'; import { type Logger, createLogger } from '@aztec/foundation/log'; import { makeBackoff, retry } from '@aztec/foundation/retry'; @@ -15,13 +15,20 @@ export class HttpBlobSinkClient implements BlobSinkClientInterface { this.config = config ?? getBlobSinkConfigFromEnv(); this.log = createLogger('aztec:blob-sink-client'); this.fetch = async (...args: Parameters): Promise => { - return await retry(() => fetch(...args), `Fetching ${args[0]}`, makeBackoff([1, 1, 3]), this.log); + return await retry( + () => fetch(...args), + `Fetching ${args[0]}`, + makeBackoff([1, 1, 3]), + this.log, + /*failSilently=*/ true, + ); }; } public async sendBlobsToBlobSink(blockHash: string, blobs: Blob[]): Promise { // TODO(md): for now we are assuming the indexes of the blobs will be 0, 1, 2 // When in reality they will not, but for testing purposes this is fine + // Right now we fetch everything, then filter out the blobs that we don't want if (!this.config.blobSinkUrl) { this.log.verbose('No blob sink url configured'); return false; @@ -49,7 +56,10 @@ export class HttpBlobSinkClient implements BlobSinkClientInterface { this.log.error('Failed to send blobs to blob sink', res.status); return false; } catch (err) { - this.log.error(`Error sending blobs to blob sink`, err); + this.log.warn(`Blob sink url configured, but unable to send blobs`, { + blobSinkUrl: this.config.blobSinkUrl, + blockHash, + }); return false; } } @@ -72,10 +82,14 @@ export class HttpBlobSinkClient implements BlobSinkClientInterface { */ public async getBlobSidecar(blockHash: string, blobHashes: Buffer[], indices?: number[]): Promise { let blobs: Blob[] = []; + if (this.config.blobSinkUrl) { this.log.debug('Getting blob sidecar from blob sink'); - blobs = await this.getBlobSidecarFrom(this.config.blobSinkUrl, blockHash, indices); + blobs = await this.getBlobSidecarFrom(this.config.blobSinkUrl, blockHash, blobHashes, indices); this.log.debug(`Got ${blobs.length} blobs from blob sink`); + if (blobs.length > 0) { + return blobs; + } } if (blobs.length == 0 && this.config.l1ConsensusHostUrl) { @@ -86,7 +100,7 @@ export class HttpBlobSinkClient implements BlobSinkClientInterface { }); const slotNumber = await this.getSlotNumber(blockHash); if (slotNumber) { - const blobs = await this.getBlobSidecarFrom(this.config.l1ConsensusHostUrl, slotNumber, indices); + const blobs = await this.getBlobSidecarFrom(this.config.l1ConsensusHostUrl, slotNumber, blobHashes, indices); this.log.debug(`Got ${blobs.length} blobs from consensus host`); if (blobs.length > 0) { return blobs; @@ -94,10 +108,6 @@ export class HttpBlobSinkClient implements BlobSinkClientInterface { } } - if (blobs.length > 0) { - return filterRelevantBlobs(blobs, blobHashes); - } - this.log.verbose('No blob sources available'); return []; } @@ -105,9 +115,9 @@ export class HttpBlobSinkClient implements BlobSinkClientInterface { public async getBlobSidecarFrom( hostUrl: string, blockHashOrSlot: string | number, + blobHashes: Buffer[], indices?: number[], ): Promise { - // TODO(md): right now we assume all blobs are ours, this will not yet work on sepolia try { let baseUrl = `${hostUrl}/eth/v1/beacon/blob_sidecars/${blockHashOrSlot}`; if (indices && indices.length > 0) { @@ -122,14 +132,42 @@ export class HttpBlobSinkClient implements BlobSinkClientInterface { if (res.ok) { const body = await res.json(); - const blobs = await Promise.all(body.data.map((b: BlobJson) => Blob.fromJson(b))); - return blobs; + const preFilteredBlobsPromise = body.data + // Filter out blobs that did not come from our rollup + .filter((b: BlobJson) => { + const committment = Buffer.from(b.kzg_commitment.slice(2), 'hex'); + const blobHash = Blob.getEthVersionedBlobHash(committment); + return blobHashes.some(hash => hash.equals(blobHash)); + }) + // Attempt to deserialise the blob + // If we cannot decode it, then it is malicious and we should not use it + .map(async (b: BlobJson): Promise => { + try { + return await Blob.fromJson(b); + } catch (err) { + if (err instanceof BlobDeserializationError) { + this.log.warn(`Failed to deserialise blob`, { commitment: b.kzg_commitment }); + return undefined; + } + throw err; + } + }); + + // Second map is async, so we need to await it + const preFilteredBlobs = await Promise.all(preFilteredBlobsPromise); + + // Filter out blobs that did not deserialise + const filteredBlobs = preFilteredBlobs.filter((b: Blob | undefined) => { + return b !== undefined; + }); + + return filteredBlobs; } this.log.debug(`Unable to get blob sidecar`, res.status); return []; } catch (err: any) { - this.log.error(`Unable to get blob sidecar`, err.message); + this.log.warn(`Unable to get blob sidecar from ${hostUrl}`, err.message); return []; } } @@ -210,19 +248,6 @@ export class HttpBlobSinkClient implements BlobSinkClientInterface { } } -/** - * Filter blobs based on a list of blob hashes - * @param blobs - * @param blobHashes - * @returns - */ -function filterRelevantBlobs(blobs: Blob[], blobHashes: Buffer[]): Blob[] { - return blobs.filter(blob => { - const blobHash = blob.getEthVersionedBlobHash(); - return blobHashes.some(hash => hash.equals(blobHash)); - }); -} - function getBeaconNodeFetchOptions(url: string, config: BlobSinkConfig) { let formattedUrl = url; if (config.l1ConsensusHostApiKey && !config.l1ConsensusHostApiKeyHeader) { diff --git a/yarn-project/foundation/src/blob/blob.ts b/yarn-project/foundation/src/blob/blob.ts index 3c363b3b2dbb..e1504f392534 100644 --- a/yarn-project/foundation/src/blob/blob.ts +++ b/yarn-project/foundation/src/blob/blob.ts @@ -5,7 +5,8 @@ import type { Blob as BlobBuffer } from 'c-kzg'; import { poseidon2Hash, sha256 } from '../crypto/index.js'; import { Fr } from '../fields/index.js'; import { BufferReader, serializeToBuffer } from '../serialize/index.js'; -import { deserializeEncodedBlobFields, extractBlobFieldsFromBuffer } from './encoding.js'; +import { deserializeEncodedBlobToFields, extractBlobFieldsFromBuffer } from './encoding.js'; +import { BlobDeserializationError } from './errors.js'; import { type BlobJson } from './interface.js'; /* eslint-disable import/no-named-as-default-member */ @@ -43,10 +44,18 @@ export class Blob { * @param blob - The buffer to create the Blob from. * @param multiBlobFieldsHash - The fields hash to use for the Blob. * @returns A Blob created from the buffer. + * + * @throws If unable to deserialize the blob. */ static fromEncodedBlobBuffer(blob: BlobBuffer, multiBlobFieldsHash?: Fr): Promise { - const fields: Fr[] = deserializeEncodedBlobFields(blob); - return Blob.fromFields(fields, multiBlobFieldsHash); + try { + const fields: Fr[] = deserializeEncodedBlobToFields(blob); + return Blob.fromFields(fields, multiBlobFieldsHash); + } catch (err) { + throw new BlobDeserializationError( + `Failed to create Blob from encoded blob buffer, this blob was likely not created by us`, + ); + } } /** @@ -141,9 +150,17 @@ export class Blob { * @dev This method takes into account trailing zeros * * @returns The encoded fields from the blob. + * + * @throws If unable to deserialize the blob. */ toEncodedFields(): Fr[] { - return deserializeEncodedBlobFields(this.data); + try { + return deserializeEncodedBlobToFields(this.data); + } catch (err) { + throw new BlobDeserializationError( + `Failed to deserialize encoded blob fields, this blob was likely not created by us`, + ); + } } /** diff --git a/yarn-project/foundation/src/blob/encoding.ts b/yarn-project/foundation/src/blob/encoding.ts index f568274dd202..c9b63b242802 100644 --- a/yarn-project/foundation/src/blob/encoding.ts +++ b/yarn-project/foundation/src/blob/encoding.ts @@ -42,7 +42,7 @@ export const TX_EFFECT_PREFIX_BYTE_LENGTH = TX_START_PREFIX_BYTES_LENGTH + 7; * @param blob - The blob buffer to deserialize. * @returns An array of field elements. */ -export function deserializeEncodedBlobFields(blob: BlobBuffer): Fr[] { +export function deserializeEncodedBlobToFields(blob: BlobBuffer): Fr[] { // Convert blob buffer to array of field elements const reader = BufferReader.asReader(blob); const array = reader.readArray(blob.length >> 5, Fr); // >> 5 = / 32 (bytes per field) diff --git a/yarn-project/foundation/src/blob/errors.ts b/yarn-project/foundation/src/blob/errors.ts new file mode 100644 index 000000000000..c696e626771f --- /dev/null +++ b/yarn-project/foundation/src/blob/errors.ts @@ -0,0 +1,6 @@ +export class BlobDeserializationError extends Error { + constructor(message: string) { + super(message); + this.name = 'BlobDeserializationError'; + } +} diff --git a/yarn-project/foundation/src/blob/index.ts b/yarn-project/foundation/src/blob/index.ts index 3a9c91ea1f7d..c1e0b9bd7852 100644 --- a/yarn-project/foundation/src/blob/index.ts +++ b/yarn-project/foundation/src/blob/index.ts @@ -7,6 +7,7 @@ export * from './blob.js'; export * from './mocks.js'; export * from './encoding.js'; export * from './interface.js'; +export * from './errors.js'; try { loadTrustedSetup(); diff --git a/yarn-project/foundation/src/blob/mocks.ts b/yarn-project/foundation/src/blob/mocks.ts index 7d4464b83399..5ce53b0485b7 100644 --- a/yarn-project/foundation/src/blob/mocks.ts +++ b/yarn-project/foundation/src/blob/mocks.ts @@ -21,10 +21,28 @@ function encodeFirstField(length: number): Fr { ); } +/** + * Make an encoded blob with the given length + * + * This will deserialise correctly in the archiver + * @param length + * @returns + */ export function makeEncodedBlob(length: number): Promise { return Blob.fromFields([encodeFirstField(length + 1), ...Array.from({ length: length }, () => Fr.random())]); } +/** + * Make an unencoded blob with the given length + * + * This will fail deserialisation in the archiver + * @param length + * @returns + */ +export function makeUnencodedBlob(length: number): Promise { + return Blob.fromFields([...Array.from({ length: length }, () => Fr.random())]); +} + export function makeEncodedBlobFields(fields: Fr[]): Promise { return Blob.fromFields([encodeFirstField(fields.length + 1), ...fields]); }