diff --git a/yarn-project/archiver/src/archiver-sync.test.ts b/yarn-project/archiver/src/archiver-sync.test.ts index 76a12af1f559..bbe5f3aa236e 100644 --- a/yarn-project/archiver/src/archiver-sync.test.ts +++ b/yarn-project/archiver/src/archiver-sync.test.ts @@ -380,7 +380,7 @@ describe('Archiver Sync', () => { }); // Create a random blob that doesn't match the checkpoint - const randomBlob = makeRandomBlob(3); + const randomBlob = await makeRandomBlob(3); // Override blob client to return the random blob instead of the correct one blobClient.getBlobSidecar.mockResolvedValue([randomBlob]); diff --git a/yarn-project/archiver/src/test/fake_l1_state.ts b/yarn-project/archiver/src/test/fake_l1_state.ts index 4afb84902926..e55a234b544b 100644 --- a/yarn-project/archiver/src/test/fake_l1_state.ts +++ b/yarn-project/archiver/src/test/fake_l1_state.ts @@ -195,9 +195,9 @@ export class FakeL1State { this.addMessages(checkpointNumber, messagesL1BlockNumber, messages); // Create the transaction and blobs - const tx = this.makeRollupTx(checkpoint, signers); - const blobHashes = this.makeVersionedBlobHashes(checkpoint); - const blobs = this.makeBlobsFromCheckpoint(checkpoint); + const tx = await this.makeRollupTx(checkpoint, signers); + const blobHashes = await this.makeVersionedBlobHashes(checkpoint); + const blobs = await this.makeBlobsFromCheckpoint(checkpoint); // Store the checkpoint data this.checkpoints.push({ @@ -539,14 +539,14 @@ export class FakeL1State { })); } - private makeRollupTx(checkpoint: Checkpoint, signers: Secp256k1Signer[]): Transaction { + private async makeRollupTx(checkpoint: Checkpoint, signers: Secp256k1Signer[]): Promise { const attestations = signers .map(signer => makeCheckpointAttestationFromCheckpoint(checkpoint, signer)) .map(attestation => CommitteeAttestation.fromSignature(attestation.signature)) .map(committeeAttestation => committeeAttestation.toViem()); const header = checkpoint.header.toViem(); - const blobInput = getPrefixedEthBlobCommitments(getBlobsPerL1Block(checkpoint.toBlobFields())); + const blobInput = getPrefixedEthBlobCommitments(await getBlobsPerL1Block(checkpoint.toBlobFields())); const archive = toHex(checkpoint.archive.root.toBuffer()); const attestationsAndSigners = new CommitteeAttestationsAndSigners( attestations.map(attestation => CommitteeAttestation.fromViem(attestation)), @@ -595,13 +595,13 @@ export class FakeL1State { } as Transaction; } - private makeVersionedBlobHashes(checkpoint: Checkpoint): `0x${string}`[] { - return getBlobsPerL1Block(checkpoint.toBlobFields()).map( + private async makeVersionedBlobHashes(checkpoint: Checkpoint): Promise<`0x${string}`[]> { + return (await getBlobsPerL1Block(checkpoint.toBlobFields())).map( b => `0x${b.getEthVersionedBlobHash().toString('hex')}` as `0x${string}`, ); } - private makeBlobsFromCheckpoint(checkpoint: Checkpoint): Blob[] { - return getBlobsPerL1Block(checkpoint.toBlobFields()); + private async makeBlobsFromCheckpoint(checkpoint: Checkpoint): Promise { + return await getBlobsPerL1Block(checkpoint.toBlobFields()); } } diff --git a/yarn-project/blob-client/src/blobstore/blob_store_test_suite.ts b/yarn-project/blob-client/src/blobstore/blob_store_test_suite.ts index e7f9df2e627e..3786f1e3bfff 100644 --- a/yarn-project/blob-client/src/blobstore/blob_store_test_suite.ts +++ b/yarn-project/blob-client/src/blobstore/blob_store_test_suite.ts @@ -13,7 +13,7 @@ export function describeBlobStore(getBlobStore: () => Promise) { it('should store and retrieve a blob by hash', async () => { // Create a test blob with random fields const testFields = [Fr.random(), Fr.random(), Fr.random()]; - const blob = Blob.fromFields(testFields); + const blob = await Blob.fromFields(testFields); const blobHash = blob.getEthVersionedBlobHash(); // Store the blob @@ -29,8 +29,8 @@ export function describeBlobStore(getBlobStore: () => Promise) { it('should handle multiple blobs stored and retrieved by their hashes', async () => { // Create two different blobs - const blob1 = Blob.fromFields([Fr.random(), Fr.random()]); - const blob2 = Blob.fromFields([Fr.random(), Fr.random(), Fr.random()]); + const blob1 = await Blob.fromFields([Fr.random(), Fr.random()]); + const blob2 = await Blob.fromFields([Fr.random(), Fr.random(), Fr.random()]); const blobHash1 = blob1.getEthVersionedBlobHash(); const blobHash2 = blob2.getEthVersionedBlobHash(); @@ -57,9 +57,9 @@ export function describeBlobStore(getBlobStore: () => Promise) { it('should handle retrieving subset of stored blobs', async () => { // Store multiple blobs - const blob1 = Blob.fromFields([Fr.random()]); - const blob2 = Blob.fromFields([Fr.random()]); - const blob3 = Blob.fromFields([Fr.random()]); + const blob1 = await Blob.fromFields([Fr.random()]); + const blob2 = await Blob.fromFields([Fr.random()]); + const blob3 = await Blob.fromFields([Fr.random()]); await blobStore.addBlobs([blob1, blob2, blob3]); @@ -75,7 +75,7 @@ export function describeBlobStore(getBlobStore: () => Promise) { }); it('should handle duplicate blob hashes in request', async () => { - const blob = Blob.fromFields([Fr.random()]); + const blob = await Blob.fromFields([Fr.random()]); const blobHash = blob.getEthVersionedBlobHash(); await blobStore.addBlobs([blob]); @@ -91,8 +91,8 @@ export function describeBlobStore(getBlobStore: () => Promise) { it('should overwrite blob when storing with same hash', async () => { // Create two blobs that will have the same hash (same content) const fields = [Fr.random(), Fr.random()]; - const blob1 = Blob.fromFields(fields); - const blob2 = Blob.fromFields(fields); + const blob1 = await Blob.fromFields(fields); + const blob2 = await Blob.fromFields(fields); const blobHash = blob1.getEthVersionedBlobHash(); diff --git a/yarn-project/blob-client/src/client/http.test.ts b/yarn-project/blob-client/src/client/http.test.ts index 5d3c6b16422e..c5c7d8eb832f 100644 --- a/yarn-project/blob-client/src/client/http.test.ts +++ b/yarn-project/blob-client/src/client/http.test.ts @@ -15,7 +15,7 @@ import { HttpBlobClient } from './http.js'; describe('HttpBlobClient', () => { it('should handle no sources configured', async () => { const client = new HttpBlobClient({}); - const blob = Blob.fromFields([Fr.random()]); + const blob = await Blob.fromFields([Fr.random()]); const blobHash = blob.getEthVersionedBlobHash(); const success = await client.sendBlobsToFilestore([blob]); @@ -40,11 +40,11 @@ describe('HttpBlobClient', () => { let latestSlotNumber: number; let missedSlots: number[]; - beforeEach(() => { + beforeEach(async () => { latestSlotNumber = 1; missedSlots = []; - testBlobs = Array.from({ length: 2 }, () => makeRandomBlob(3)); + testBlobs = await Promise.all(Array.from({ length: 2 }, () => makeRandomBlob(3))); testBlobsHashes = testBlobs.map(b => b.getEthVersionedBlobHash()); blobData = testBlobs.map(b => b.toJSON()); @@ -292,7 +292,7 @@ describe('HttpBlobClient', () => { }); // Create a blob that has mismatch data and commitment. - const randomBlobs = Array.from({ length: 2 }, () => makeRandomBlob(3)); + const randomBlobs = await Promise.all(Array.from({ length: 2 }, () => makeRandomBlob(3))); const incorrectBlob = new Blob(randomBlobs[0].data, randomBlobs[1].commitment); const incorrectBlobHash = incorrectBlob.getEthVersionedBlobHash(); // Update blobData to include the incorrect blob @@ -312,7 +312,7 @@ describe('HttpBlobClient', () => { it('should accumulate blobs across all three sources (filestore, consensus, archive)', async () => { // Create three blobs for testing - const blobs = Array.from({ length: 3 }, () => makeRandomBlob(3)); + const blobs = await Promise.all(Array.from({ length: 3 }, () => makeRandomBlob(3))); const blobHashes = blobs.map(b => b.getEthVersionedBlobHash()); // Blob 0 only in filestore @@ -368,7 +368,7 @@ describe('HttpBlobClient', () => { it('should preserve blob order when requesting multiple blobs', async () => { // Create three distinct blobs - const blobs = Array.from({ length: 3 }, () => makeRandomBlob(3)); + const blobs = await Promise.all(Array.from({ length: 3 }, () => makeRandomBlob(3))); const blobHashes = blobs.map(b => b.getEthVersionedBlobHash()); // Add all blobs to filestore @@ -477,7 +477,7 @@ describe('HttpBlobClient', () => { it('should return only one blob when multiple blobs with the same blobHash exist on a block', async () => { // Create a blob data array with two blobs that have the same commitment (thus same blobHash) - const blob = makeRandomBlob(3); + const blob = await makeRandomBlob(3); const blobHash = blob.getEthVersionedBlobHash(); const duplicateBlobData = [blob.toJSON(), blob.toJSON()]; @@ -503,7 +503,7 @@ describe('HttpBlobClient', () => { l1ConsensusHostUrls: [`http://localhost:${consensusHostPort}`], }); - const blob = makeRandomBlob(3); + const blob = await makeRandomBlob(3); const blobHash = blob.getEthVersionedBlobHash(); const blobJson = blob.toJSON(); @@ -616,8 +616,8 @@ describe('HttpBlobClient FileStore Integration', () => { let testBlobs: Blob[]; let testBlobsHashes: Buffer[]; - beforeEach(() => { - testBlobs = Array.from({ length: 2 }, () => makeRandomBlob(3)); + beforeEach(async () => { + testBlobs = await Promise.all(Array.from({ length: 2 }, () => makeRandomBlob(3))); testBlobsHashes = testBlobs.map(b => b.getEthVersionedBlobHash()); }); diff --git a/yarn-project/blob-client/src/client/http.ts b/yarn-project/blob-client/src/client/http.ts index a27fd2ea1d98..5d626933261a 100644 --- a/yarn-project/blob-client/src/client/http.ts +++ b/yarn-project/blob-client/src/client/http.ts @@ -215,8 +215,8 @@ export class HttpBlobClient implements BlobClientInterface { const getFilledBlobs = (): Blob[] => resultBlobs.filter((b): b is Blob => b !== undefined); // Helper to fill in results from fetched blobs - const fillResults = (fetchedBlobs: BlobJson[]): Blob[] => { - const blobs = processFetchedBlobs(fetchedBlobs, blobHashes, this.log); + const fillResults = async (fetchedBlobs: BlobJson[]): Promise => { + const blobs = await processFetchedBlobs(fetchedBlobs, blobHashes, this.log); // Fill in any missing positions with matching blobs for (let i = 0; i < blobHashes.length; i++) { if (resultBlobs[i] === undefined) { @@ -269,7 +269,7 @@ export class HttpBlobClient implements BlobClientInterface { ...ctx, }); const blobs = await this.getBlobsFromHost(l1ConsensusHostUrl, slotNumber, l1ConsensusHostIndex); - const result = fillResults(blobs); + const result = await fillResults(blobs); this.log.debug( `Got ${blobs.length} blobs from consensus host (total: ${result.length}/${blobHashes.length})`, { slotNumber, l1ConsensusHostUrl, ...ctx }, @@ -312,7 +312,7 @@ export class HttpBlobClient implements BlobClientInterface { this.log.debug('No blobs found from archive client', archiveCtx); } else { this.log.trace(`Got ${allBlobs.length} blobs from archive client before filtering`, archiveCtx); - const result = fillResults(allBlobs); + const result = await fillResults(allBlobs); this.log.debug( `Got ${allBlobs.length} blobs from archive client (total: ${result.length}/${blobHashes.length})`, archiveCtx, @@ -345,7 +345,7 @@ export class HttpBlobClient implements BlobClientInterface { */ private async tryFileStores( getMissingBlobHashes: () => Buffer[], - fillResults: (blobs: BlobJson[]) => Blob[], + fillResults: (blobs: BlobJson[]) => Promise, ctx: { blockHash: string; blobHashes: string[] }, ): Promise { // Shuffle clients for load distribution @@ -366,7 +366,7 @@ export class HttpBlobClient implements BlobClientInterface { }); const blobs = await client.getBlobsByHashes(blobHashStrings); if (blobs.length > 0) { - const result = fillResults(blobs); + const result = await fillResults(blobs); this.log.debug( `Got ${blobs.length} blobs from filestore (total: ${result.length}/${ctx.blobHashes.length})`, { @@ -388,7 +388,7 @@ export class HttpBlobClient implements BlobClientInterface { l1ConsensusHostIndex?: number, ): Promise { const blobs = await this.getBlobsFromHost(hostUrl, blockHashOrSlot, l1ConsensusHostIndex); - return processFetchedBlobs(blobs, blobHashes, this.log).filter((b): b is Blob => b !== undefined); + return (await processFetchedBlobs(blobs, blobHashes, this.log)).filter((b): b is Blob => b !== undefined); } public async getBlobsFromHost( @@ -616,7 +616,11 @@ function parseBlobJson(data: any): BlobJson { // Returns an array that maps each blob hash to the corresponding blob, or undefined if the blob is not found // or the data does not match the commitment. -function processFetchedBlobs(blobs: BlobJson[], blobHashes: Buffer[], logger: Logger): (Blob | undefined)[] { +async function processFetchedBlobs( + blobs: BlobJson[], + blobHashes: Buffer[], + logger: Logger, +): Promise<(Blob | undefined)[]> { const requestedBlobHashes = new Set(blobHashes.map(bufferToHex)); const hashToBlob = new Map(); for (const blobJson of blobs) { @@ -626,7 +630,7 @@ function processFetchedBlobs(blobs: BlobJson[], blobHashes: Buffer[], logger: Lo } try { - const blob = Blob.fromJson(blobJson); + const blob = await Blob.fromJson(blobJson); hashToBlob.set(hashHex, blob); } catch (err) { // If the above throws, it's likely that the blob commitment does not match the hash of the blob data. diff --git a/yarn-project/blob-client/src/client/tests.ts b/yarn-project/blob-client/src/client/tests.ts index 826aa286781f..d85430f4fb51 100644 --- a/yarn-project/blob-client/src/client/tests.ts +++ b/yarn-project/blob-client/src/client/tests.ts @@ -28,7 +28,7 @@ export function runBlobClientTests( }); it('should send and retrieve blobs by hash', async () => { - const blob = makeRandomBlob(5); + const blob = await makeRandomBlob(5); const blobHash = blob.getEthVersionedBlobHash(); await client.sendBlobsToFilestore([blob]); @@ -39,7 +39,7 @@ export function runBlobClientTests( }); it('should handle multiple blobs', async () => { - const blobs = Array.from({ length: 3 }, () => makeRandomBlob(7)); + const blobs = await Promise.all(Array.from({ length: 3 }, () => makeRandomBlob(7))); const blobHashes = blobs.map(blob => blob.getEthVersionedBlobHash()); await client.sendBlobsToFilestore(blobs); diff --git a/yarn-project/blob-client/src/filestore/filestore_blob_client.test.ts b/yarn-project/blob-client/src/filestore/filestore_blob_client.test.ts index fe30df599709..872f37cb20b4 100644 --- a/yarn-project/blob-client/src/filestore/filestore_blob_client.test.ts +++ b/yarn-project/blob-client/src/filestore/filestore_blob_client.test.ts @@ -74,7 +74,7 @@ describe('FileStoreBlobClient', () => { describe('saveBlob', () => { it('should save a blob to the filestore', async () => { - const blob = Blob.fromFields([Fr.random(), Fr.random()]); + const blob = await Blob.fromFields([Fr.random(), Fr.random()]); const versionedHash = `0x${blob.getEthVersionedBlobHash().toString('hex')}`; await client.saveBlob(blob); @@ -88,7 +88,7 @@ describe('FileStoreBlobClient', () => { }); it('should skip saving if blob already exists and skipIfExists=true', async () => { - const blob = Blob.fromFields([Fr.random()]); + const blob = await Blob.fromFields([Fr.random()]); const versionedHash = `0x${blob.getEthVersionedBlobHash().toString('hex')}`; // Save first time @@ -107,7 +107,7 @@ describe('FileStoreBlobClient', () => { }); it('should overwrite if skipIfExists=false', async () => { - const blob = Blob.fromFields([Fr.random()]); + const blob = await Blob.fromFields([Fr.random()]); const versionedHash = `0x${blob.getEthVersionedBlobHash().toString('hex')}`; // Save first time @@ -130,8 +130,8 @@ describe('FileStoreBlobClient', () => { describe('saveBlobs', () => { it('should save multiple blobs', async () => { - const blob1 = Blob.fromFields([Fr.random()]); - const blob2 = Blob.fromFields([Fr.random()]); + const blob1 = await Blob.fromFields([Fr.random()]); + const blob2 = await Blob.fromFields([Fr.random()]); await client.saveBlobs([blob1, blob2]); @@ -145,7 +145,7 @@ describe('FileStoreBlobClient', () => { describe('getBlobsByHashes', () => { it('should retrieve blobs by their versioned hashes', async () => { - const blob = Blob.fromFields([Fr.random(), Fr.random()]); + const blob = await Blob.fromFields([Fr.random(), Fr.random()]); const versionedHash = `0x${blob.getEthVersionedBlobHash().toString('hex')}`; await client.saveBlob(blob); @@ -163,8 +163,8 @@ describe('FileStoreBlobClient', () => { }); it('should retrieve multiple blobs', async () => { - const blob1 = Blob.fromFields([Fr.random()]); - const blob2 = Blob.fromFields([Fr.random()]); + const blob1 = await Blob.fromFields([Fr.random()]); + const blob2 = await Blob.fromFields([Fr.random()]); await client.saveBlobs([blob1, blob2]); @@ -177,7 +177,7 @@ describe('FileStoreBlobClient', () => { }); it('should skip blobs that fail to parse', async () => { - const blob = Blob.fromFields([Fr.random()]); + const blob = await Blob.fromFields([Fr.random()]); const hash = `0x${blob.getEthVersionedBlobHash().toString('hex')}`; // Save invalid JSON @@ -191,7 +191,7 @@ describe('FileStoreBlobClient', () => { describe('exists', () => { it('should return true if blob exists', async () => { - const blob = Blob.fromFields([Fr.random()]); + const blob = await Blob.fromFields([Fr.random()]); const versionedHash = `0x${blob.getEthVersionedBlobHash().toString('hex')}`; await client.saveBlob(blob); @@ -240,14 +240,14 @@ describe('FileStoreBlobClient', () => { const readOnlyStore = new MockReadOnlyFileStore(); const readOnlyClient = new FileStoreBlobClient(readOnlyStore, basePath); - const blob = Blob.fromFields([Fr.random()]); + const blob = await Blob.fromFields([Fr.random()]); await expect(readOnlyClient.saveBlob(blob)).rejects.toThrow('FileStore is read-only'); }); it('should be able to read from read-only store', async () => { const files = new Map(); - const blob = Blob.fromFields([Fr.random()]); + const blob = await Blob.fromFields([Fr.random()]); const versionedHash = `0x${blob.getEthVersionedBlobHash().toString('hex')}`; const path = `${basePath}/blobs/${versionedHash}.data`; diff --git a/yarn-project/blob-lib/src/blob.test.ts b/yarn-project/blob-lib/src/blob.test.ts index 4939baa09f84..3053eb1d077d 100644 --- a/yarn-project/blob-lib/src/blob.test.ts +++ b/yarn-project/blob-lib/src/blob.test.ts @@ -58,10 +58,10 @@ describe('blob', () => { // This test ensures that the noir blob lib correctly matches the kzg lib const blobFields = Array(400).fill(new Fr(3)); const blobFieldsHash = await poseidon2Hash(blobFields); - const blob = Blob.fromFields(blobFields); + const blob = await Blob.fromFields(blobFields); const challengeZ = await blob.computeChallengeZ(blobFieldsHash); - const { y } = blob.evaluate(challengeZ, true /* verifyProof */); + const { y } = await blob.evaluate(challengeZ, true /* verifyProof */); expect(blob.commitment.toString('hex')).toMatchInlineSnapshot( `"b2803d5fe972914ba3616033e2748bbaa6dbcddefc3721a54895a7a45e77504dd1a971c7e8d8292be943d05bccebcfea"`, @@ -88,10 +88,10 @@ describe('blob', () => { // This test ensures that the noir blob lib correctly matches the kzg lib const blobFields = Array.from({ length: FIELDS_PER_BLOB }).map((_, i) => new Fr(i + 2)); const blobFieldsHash = await poseidon2Hash(blobFields); - const blob = Blob.fromFields(blobFields); + const blob = await Blob.fromFields(blobFields); const challengeZ = await blob.computeChallengeZ(blobFieldsHash); - const { y } = blob.evaluate(challengeZ, true /* verifyProof */); + const { y } = await blob.evaluate(challengeZ, true /* verifyProof */); expect(blob.commitment.toString('hex')).toMatchInlineSnapshot( `"ac771dea41e29fc2b7016c32731602c0812548ba0f491864a4e03fdb94b8d3d195faad1967cdf005acf73088b0e8474a"`, @@ -114,15 +114,15 @@ describe('blob', () => { ); }); - it('should serialize and deserialize a blob', () => { - const blob = makeRandomBlob(5); + it('should serialize and deserialize a blob', async () => { + const blob = await makeRandomBlob(5); const blobBuffer = blob.toBuffer(); expect(Blob.fromBuffer(blobBuffer)).toEqual(blob); }); - it('should create a blob from a JSON object', () => { - const blob = makeRandomBlob(7); + it('should create a blob from a JSON object', async () => { + const blob = await makeRandomBlob(7); const blobJson = blob.toJSON(); - expect(Blob.fromJson(blobJson)).toEqual(blob); + expect(await Blob.fromJson(blobJson)).toEqual(blob); }); }); diff --git a/yarn-project/blob-lib/src/blob.ts b/yarn-project/blob-lib/src/blob.ts index 25f0b087e773..230b2ff957a9 100644 --- a/yarn-project/blob-lib/src/blob.ts +++ b/yarn-project/blob-lib/src/blob.ts @@ -42,8 +42,8 @@ export class Blob { * * @throws If data does not match the expected length (BYTES_PER_BLOB). */ - static fromBlobBuffer(data: Uint8Array): Blob { - const commitment = computeBlobCommitment(data); + static async fromBlobBuffer(data: Uint8Array): Promise { + const commitment = await computeBlobCommitment(data); return new Blob(data, commitment); } @@ -55,13 +55,13 @@ export class Blob { * @param fields - The array of fields to create the Blob from. * @returns A Blob created from the array of fields. */ - static fromFields(fields: Fr[]): Blob { + static async fromFields(fields: Fr[]): Promise { if (fields.length > FIELDS_PER_BLOB) { throw new Error(`Attempted to overfill blob with ${fields.length} fields. The maximum is ${FIELDS_PER_BLOB}.`); } const data = Buffer.concat([serializeToBuffer(fields)], BYTES_PER_BLOB); - const commitment = computeBlobCommitment(data); + const commitment = await computeBlobCommitment(data); return new Blob(data, commitment); } @@ -88,9 +88,9 @@ export class Blob { * @param json - The JSON object to create the Blob from. * @returns A Blob created from the JSON object. */ - static fromJson(json: BlobJson): Blob { + static async fromJson(json: BlobJson): Promise { const blobBuffer = Buffer.from(json.blob.slice(2), 'hex'); - const blob = Blob.fromBlobBuffer(blobBuffer); + const blob = await Blob.fromBlobBuffer(blobBuffer); if (blob.commitment.toString('hex') !== json.kzg_commitment.slice(2)) { throw new Error('KZG commitment does not match'); @@ -134,9 +134,9 @@ export class Blob { * y: BLS12Fr - Evaluation y = p(z), where p() is the blob polynomial. BLS12 field element, rep. as BigNum in nr, bigint in ts. * proof: Buffer - KZG opening proof for y = p(z). The commitment to quotient polynomial Q, used in compressed BLS12 point format (48 bytes). */ - evaluate(challengeZ: Fr, verifyProof = false) { + async evaluate(challengeZ: Fr, verifyProof = false) { const kzg = getKzg(); - const res = kzg.computeKzgProof(this.data, challengeZ.toBuffer()); + const res = await kzg.asyncComputeKzgProof(this.data, challengeZ.toBuffer()); if (verifyProof && !kzg.verifyKzgProof(this.commitment, challengeZ.toBuffer(), res[1], res[0])) { throw new Error(`KZG proof did not verify.`); } diff --git a/yarn-project/blob-lib/src/blob_batching.test.ts b/yarn-project/blob-lib/src/blob_batching.test.ts index 1568be1ef2ea..11e2da774411 100644 --- a/yarn-project/blob-lib/src/blob_batching.test.ts +++ b/yarn-project/blob-lib/src/blob_batching.test.ts @@ -24,9 +24,9 @@ const trustedSetup = JSON.parse( ); describe('Blob Batching', () => { - it.each([10, 100, 400])('our BLS library should correctly commit to a blob of %p items', size => { + it.each([10, 100, 400])('our BLS library should correctly commit to a blob of %p items', async size => { const blobFields = [new Fr(size)].concat(Array.from({ length: size - 1 }).map((_, i) => new Fr(size + i))); - const ourBlob = Blob.fromFields(blobFields); + const ourBlob = await Blob.fromFields(blobFields); const point = BLS12Point.decompress(ourBlob.commitment); @@ -49,7 +49,7 @@ describe('Blob Batching', () => { it('should construct and verify 1 blob', async () => { // Initialize 400 fields. This test shows that a single blob works with batching methods. const blobFields = Array.from({ length: 400 }, (_, i) => new Fr(i + 123)); - const blobs = getBlobsPerL1Block(blobFields); + const blobs = await getBlobsPerL1Block(blobFields); expect(blobs.length).toBe(1); const onlyBlob = blobs[0]; @@ -66,7 +66,7 @@ describe('Blob Batching', () => { const commitment = BLS12Point.decompress(onlyBlob.commitment); // 'Batched' evaluation - const { y, proof } = onlyBlob.evaluate(finalZ); + const { y, proof } = await onlyBlob.evaluate(finalZ); const q = BLS12Point.decompress(proof); const finalBlobCommitmentsHash = sha256ToField([onlyBlob.commitment]); @@ -134,7 +134,7 @@ describe('Blob Batching', () => { blobFields[numBlobFields - 1] = encodeCheckpointEndMarker({ numBlobFields }); } - const blobs = getBlobsPerL1Block(blobFields); + const blobs = await getBlobsPerL1Block(blobFields); expect(blobs.length).toBe(numBlobs); const finalChallenges = await BatchedBlobAccumulator.precomputeBatchedBlobChallenges([blobFields]); @@ -153,7 +153,7 @@ describe('Blob Batching', () => { // Batched evaluation // NB: we share the same finalZ between blobs - const proofObjects = blobs.map(b => b.evaluate(finalZ)); + const proofObjects = await Promise.all(blobs.map(b => b.evaluate(finalZ))); const evalYs = proofObjects.map(({ y }) => y); const qs = proofObjects.map(({ proof }) => BLS12Point.decompress(proof)); diff --git a/yarn-project/blob-lib/src/blob_batching.ts b/yarn-project/blob-lib/src/blob_batching.ts index 25b58416426a..d4f1b058622b 100644 --- a/yarn-project/blob-lib/src/blob_batching.ts +++ b/yarn-project/blob-lib/src/blob_batching.ts @@ -109,7 +109,7 @@ export class BatchedBlobAccumulator { for (const blobFields of blobFieldsPerCheckpoint) { // Compute the hash of all the fields in the block. const blobFieldsHash = await computeBlobFieldsHash(blobFields); - const blobs = getBlobsPerL1Block(blobFields); + const blobs = await getBlobsPerL1Block(blobFields); for (const blob of blobs) { // Compute the challenge z for each blob and accumulate it. const challengeZ = await blob.computeChallengeZ(blobFieldsHash); @@ -126,7 +126,7 @@ export class BatchedBlobAccumulator { } // Now we have a shared challenge for all blobs, evaluate them... - const proofObjects = allBlobs.map(b => b.evaluate(z)); + const proofObjects = await Promise.all(allBlobs.map(b => b.evaluate(z))); const evaluations = await Promise.all(proofObjects.map(({ y }) => hashNoirBigNumLimbs(y))); // ...and find the challenge for the linear combination of blobs. let gamma = evaluations[0]; @@ -145,7 +145,7 @@ export class BatchedBlobAccumulator { * @returns An updated blob accumulator. */ async accumulateBlob(blob: Blob, blobFieldsHash: Fr) { - const { proof, y: thisY } = blob.evaluate(this.finalBlobChallenges.z); + const { proof, y: thisY } = await blob.evaluate(this.finalBlobChallenges.z); const thisC = BLS12Point.decompress(blob.commitment); const thisQ = BLS12Point.decompress(proof); const blobChallengeZ = await blob.computeChallengeZ(blobFieldsHash); @@ -192,7 +192,7 @@ export class BatchedBlobAccumulator { * @returns An updated blob accumulator. */ async accumulateFields(blobFields: Fr[]) { - const blobs = getBlobsPerL1Block(blobFields); + const blobs = await getBlobsPerL1Block(blobFields); if (blobs.length > BLOBS_PER_CHECKPOINT) { throw new Error( diff --git a/yarn-project/blob-lib/src/blob_utils.test.ts b/yarn-project/blob-lib/src/blob_utils.test.ts index ba7b9120d5b7..c1e5cbf0820d 100644 --- a/yarn-project/blob-lib/src/blob_utils.test.ts +++ b/yarn-project/blob-lib/src/blob_utils.test.ts @@ -7,33 +7,33 @@ import { makeCheckpointBlobData } from './encoding/fixtures.js'; import { BlobDeserializationError } from './errors.js'; describe('blob fields encoding', () => { - it('can process correct encoding for a single blob', () => { + it('can process correct encoding for a single blob', async () => { const checkpointBlobData = makeCheckpointBlobData(); const blobFields = encodeCheckpointBlobData(checkpointBlobData); expect(blobFields.length).toBeLessThan(FIELDS_PER_BLOB); - const blobs = getBlobsPerL1Block(blobFields); + const blobs = await getBlobsPerL1Block(blobFields); expect(blobs.length).toBe(1); const decoded = decodeCheckpointBlobDataFromBlobs(blobs); expect(decoded).toEqual(checkpointBlobData); }); - it('can process correct encoding for multiple blobs', () => { + it('can process correct encoding for multiple blobs', async () => { const checkpointBlobData = makeCheckpointBlobData({ numBlocks: 2, numTxsPerBlock: 1, isFullTx: true }); const blobFields = encodeCheckpointBlobData(checkpointBlobData); expect(blobFields.length).toBeGreaterThan(FIELDS_PER_BLOB); - const blobs = getBlobsPerL1Block(blobFields); + const blobs = await getBlobsPerL1Block(blobFields); expect(blobs.length).toBeGreaterThan(1); const decoded = decodeCheckpointBlobDataFromBlobs(blobs); expect(decoded).toEqual(checkpointBlobData); }); - it('throws processing random blob data', () => { + it('throws processing random blob data', async () => { const blobFields = Array.from({ length: 10 }, () => Fr.random()); - const blobs = getBlobsPerL1Block(blobFields); + const blobs = await getBlobsPerL1Block(blobFields); expect(blobs.length).toBe(1); expect(() => decodeCheckpointBlobDataFromBlobs(blobs)).toThrow(BlobDeserializationError); diff --git a/yarn-project/blob-lib/src/blob_utils.ts b/yarn-project/blob-lib/src/blob_utils.ts index 14a380b4446b..d0d23804fe43 100644 --- a/yarn-project/blob-lib/src/blob_utils.ts +++ b/yarn-project/blob-lib/src/blob_utils.ts @@ -30,14 +30,16 @@ export function getPrefixedEthBlobCommitments(blobs: Blob[]): `0x${string}` { * * @throws If the number of fields does not match what's indicated by the checkpoint prefix. */ -export function getBlobsPerL1Block(fields: Fr[]): Blob[] { +export async function getBlobsPerL1Block(fields: Fr[]): Promise { if (!fields.length) { throw new Error('Cannot create blobs from empty fields.'); } const numBlobs = Math.ceil(fields.length / FIELDS_PER_BLOB); - return Array.from({ length: numBlobs }, (_, i) => - Blob.fromFields(fields.slice(i * FIELDS_PER_BLOB, (i + 1) * FIELDS_PER_BLOB)), + return await Promise.all( + Array.from({ length: numBlobs }, (_, i) => + Blob.fromFields(fields.slice(i * FIELDS_PER_BLOB, (i + 1) * FIELDS_PER_BLOB)), + ), ); } diff --git a/yarn-project/blob-lib/src/hash.ts b/yarn-project/blob-lib/src/hash.ts index 31f83e6b8524..a1ae9040fc88 100644 --- a/yarn-project/blob-lib/src/hash.ts +++ b/yarn-project/blob-lib/src/hash.ts @@ -44,12 +44,12 @@ export async function computeBlobFieldsHash(fields: Fr[]): Promise { return sponge.squeeze(); } -export function computeBlobCommitment(data: Uint8Array): Buffer { +export async function computeBlobCommitment(data: Uint8Array): Promise { if (data.length !== BYTES_PER_BLOB) { throw new Error(`Expected ${BYTES_PER_BLOB} bytes per blob. Got ${data.length}.`); } - return Buffer.from(getKzg().blobToKzgCommitment(data)); + return Buffer.from(await getKzg().asyncBlobToKzgCommitment(data)); } /** diff --git a/yarn-project/blob-lib/src/testing.ts b/yarn-project/blob-lib/src/testing.ts index 8b7bf19740cd..759f7da0790d 100644 --- a/yarn-project/blob-lib/src/testing.ts +++ b/yarn-project/blob-lib/src/testing.ts @@ -89,6 +89,6 @@ export function makeFinalBlobBatchingChallenges(seed = 1) { * @param length * @returns */ -export function makeRandomBlob(length: number): Blob { +export function makeRandomBlob(length: number): Promise { return Blob.fromFields([...Array.from({ length: length }, () => Fr.random())]); } diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_l1_reorgs.parallel.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_l1_reorgs.parallel.test.ts index 186b3b02c12d..f2f78d843213 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_l1_reorgs.parallel.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_l1_reorgs.parallel.test.ts @@ -86,12 +86,12 @@ describe('e2e_epochs/epochs_l1_reorgs', () => { }); describe('blocks', () => { - const getBlobs = (serializedTx: `0x${string}`) => { + const getBlobs = async (serializedTx: `0x${string}`) => { const parsedTx = parseTransaction(serializedTx); if (parsedTx.sidecars === false) { throw new Error('No sidecars found in tx'); } - return parsedTx.sidecars!.map(sidecar => Blob.fromBlobBuffer(hexToBuffer(sidecar.blob))); + return await Promise.all(parsedTx.sidecars!.map(sidecar => Blob.fromBlobBuffer(hexToBuffer(sidecar.blob)))); }; /** Returns the last synced checkpoint number for a node */ @@ -383,7 +383,7 @@ describe('e2e_epochs/epochs_l1_reorgs', () => { // We also need to send the blob to the sink, so the node can get it logger.warn(`Sending blobs to blob client`); - const blobs = getBlobs(l2BlockTx); + const blobs = await getBlobs(l2BlockTx); const blobClient = createBlobClient(context.config); await blobClient.sendBlobsToFilestore(blobs); diff --git a/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts b/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts index 9b36f58493ff..c28b6532bace 100644 --- a/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts @@ -457,7 +457,7 @@ describe('L1Publisher integration', () => { blockSource.getL1ToL2Messages.mockResolvedValueOnce(currentL1ToL2Messages); const checkpointBlobFields = checkpoint.toBlobFields(); - const blockBlobs = getBlobsPerL1Block(checkpointBlobFields); + const blockBlobs = await getBlobsPerL1Block(checkpointBlobFields); let prevBlobAccumulatorHash = (await rollup.getCurrentBlobCommitmentsHash()).toBuffer(); diff --git a/yarn-project/noir-protocol-circuits-types/src/utils/server/foreign_call_handler.ts b/yarn-project/noir-protocol-circuits-types/src/utils/server/foreign_call_handler.ts index ccc3d73ee1c5..33e40d554ee6 100644 --- a/yarn-project/noir-protocol-circuits-types/src/utils/server/foreign_call_handler.ts +++ b/yarn-project/noir-protocol-circuits-types/src/utils/server/foreign_call_handler.ts @@ -94,7 +94,7 @@ export async function foreignCallHandler(name: string, args: ForeignCallInput[]) ); } - const blobs = getBlobsPerL1Block(blobFields); + const blobs = await getBlobsPerL1Block(blobFields); blobs.forEach((blob, i) => { const injected = kzgCommitments[i]; const calculated = BLS12Point.decompress(blob.commitment); diff --git a/yarn-project/prover-client/src/light/lightweight_checkpoint_builder.ts b/yarn-project/prover-client/src/light/lightweight_checkpoint_builder.ts index 883e75dc2102..cb789075c3f8 100644 --- a/yarn-project/prover-client/src/light/lightweight_checkpoint_builder.ts +++ b/yarn-project/prover-client/src/light/lightweight_checkpoint_builder.ts @@ -242,7 +242,7 @@ export class LightweightCheckpointBuilder { const newArchive = this.lastArchives[this.lastArchives.length - 1]; - const blobs = getBlobsPerL1Block(this.blobFields); + const blobs = await getBlobsPerL1Block(this.blobFields); const blobsHash = computeBlobsHashFromBlobs(blobs); const inHash = computeInHashFromL1ToL2Messages(this.l1ToL2Messages); diff --git a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts index 13d29d31bc98..01c0726d70bb 100644 --- a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts +++ b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts @@ -253,8 +253,8 @@ export function getPublicChonkVerifierPrivateInputsFromTx(tx: Tx | ProcessedTx, // Build "hints" as the private inputs for the checkpoint root rollup circuit. // The `blobCommitments` will be accumulated and checked in the root rollup against the `finalBlobChallenges`. // The `blobsHash` will be validated on L1 against the submitted blob data. -export const buildBlobHints = (blobFields: Fr[]) => { - const blobs = getBlobsPerL1Block(blobFields); +export const buildBlobHints = async (blobFields: Fr[]) => { + const blobs = await getBlobsPerL1Block(blobFields); const blobCommitments = getBlobCommitmentsFromBlobs(blobs); const blobsHash = computeBlobsHashFromBlobs(blobs); return { blobCommitments, blobs, blobsHash }; diff --git a/yarn-project/prover-client/src/orchestrator/block_building_helpers.test.ts b/yarn-project/prover-client/src/orchestrator/block_building_helpers.test.ts index e38d60e8ccb6..5f1755f192fe 100644 --- a/yarn-project/prover-client/src/orchestrator/block_building_helpers.test.ts +++ b/yarn-project/prover-client/src/orchestrator/block_building_helpers.test.ts @@ -16,7 +16,7 @@ describe('buildBlobHints', () => { encodeCheckpointEndMarker({ numBlobFields: blobFieldsWithoutEndMarker.length + 1 }), ]); - const { blobCommitments, blobsHash, blobs } = buildBlobHints(blobFields); + const { blobCommitments, blobsHash, blobs } = await buildBlobHints(blobFields); expect(blobs.length).toBe(1); const onlyBlob = blobs[0]; @@ -37,7 +37,7 @@ describe('buildBlobHints', () => { const zStr = challengeZ.toString(); expect(zStr).toMatchInlineSnapshot(`"0x11d6daed56531bd5c5acf341663d21089bb96913f4e716dca3cdb01b8d5735a3"`); - const proof = onlyBlob.evaluate(challengeZ, true /* verifyProof */); + const proof = await onlyBlob.evaluate(challengeZ, true /* verifyProof */); const yStr = proof.y.toString(); expect(yStr).toMatchInlineSnapshot(`"0x6033e46c697b3de1a5ddedb940ae6ccdb6efc0adeb255336b0220d3fd4b76720"`); diff --git a/yarn-project/prover-client/src/orchestrator/checkpoint-proving-state.ts b/yarn-project/prover-client/src/orchestrator/checkpoint-proving-state.ts index 7b92edcd2d6e..32a813c6bc25 100644 --- a/yarn-project/prover-client/src/orchestrator/checkpoint-proving-state.ts +++ b/yarn-project/prover-client/src/orchestrator/checkpoint-proving-state.ts @@ -85,7 +85,7 @@ export class CheckpointProvingState { typeof L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH >, public parentEpoch: EpochProvingState, - private onBlobAccumulatorSet: (checkpoint: CheckpointProvingState) => void, + private onBlobAccumulatorSet: (checkpoint: CheckpointProvingState) => Promise, ) { this.blockProofs = new UnbalancedTreeStore(totalNumBlocks); this.firstBlockNumber = BlockNumber(headerOfLastBlockInPreviousCheckpoint.globalVariables.blockNumber + 1); @@ -245,7 +245,7 @@ export class CheckpointProvingState { this.endBlobAccumulator = await accumulateBlobs(this.blobFields!, startBlobAccumulator); this.startBlobAccumulator = startBlobAccumulator; - this.onBlobAccumulatorSet(this); + await this.onBlobAccumulatorSet(this); return this.endBlobAccumulator; } @@ -271,7 +271,7 @@ export class CheckpointProvingState { return this.totalNumBlocks === 1 ? 'rollup-checkpoint-root-single-block' : 'rollup-checkpoint-root'; } - public getCheckpointRootRollupInputs() { + public async getCheckpointRootRollupInputs() { const proofs = this.#getChildProofsForRoot(); const nonEmptyProofs = proofs.filter(p => !!p); if (proofs.length !== nonEmptyProofs.length) { @@ -287,7 +287,7 @@ export class CheckpointProvingState { // `blobFields` must've been set if `startBlobAccumulator` is set (in `accumulateBlobs`). const blobFields = this.blobFields!; - const { blobCommitments, blobsHash } = buildBlobHints(blobFields); + const { blobCommitments, blobsHash } = await buildBlobHints(blobFields); const hints = CheckpointRootRollupHints.from({ previousBlockHeader: this.headerOfLastBlockInPreviousCheckpoint, diff --git a/yarn-project/prover-client/src/orchestrator/epoch-proving-state.ts b/yarn-project/prover-client/src/orchestrator/epoch-proving-state.ts index 97cbc1013827..a551082873c6 100644 --- a/yarn-project/prover-client/src/orchestrator/epoch-proving-state.ts +++ b/yarn-project/prover-client/src/orchestrator/epoch-proving-state.ts @@ -76,7 +76,7 @@ export class EpochProvingState { public readonly epochNumber: EpochNumber, public readonly totalNumCheckpoints: number, private readonly finalBlobBatchingChallenges: FinalBlobBatchingChallenges, - private onCheckpointBlobAccumulatorSet: (checkpoint: CheckpointProvingState) => void, + private onCheckpointBlobAccumulatorSet: (checkpoint: CheckpointProvingState) => Promise, private completionCallback: (result: ProvingResult) => void, private rejectionCallback: (reason: string) => void, ) { diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.ts index 4ac203a6c271..23a9f040a550 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.ts @@ -898,9 +898,9 @@ export class ProvingOrchestrator implements EpochProver { await this.verifyBuiltBlockAgainstSyncedState(provingState); if (checkpointProvingState.totalNumBlocks === 1) { - this.checkAndEnqueueCheckpointRootRollup(checkpointProvingState); + await this.checkAndEnqueueCheckpointRootRollup(checkpointProvingState); } else { - this.checkAndEnqueueNextBlockMergeRollup(checkpointProvingState, leafLocation); + await this.checkAndEnqueueNextBlockMergeRollup(checkpointProvingState, leafLocation); } // We are finished with the block at this point, ensure the fork is cleaned up @@ -1009,14 +1009,14 @@ export class ProvingOrchestrator implements EpochProver { }, signal => this.prover.getBlockMergeRollupProof(inputs, signal, provingState.epochNumber), ), - result => { + async result => { provingState.setBlockMergeRollupProof(location, result); - this.checkAndEnqueueNextBlockMergeRollup(provingState, location); + await this.checkAndEnqueueNextBlockMergeRollup(provingState, location); }, ); } - private enqueueCheckpointRootRollup(provingState: CheckpointProvingState) { + private async enqueueCheckpointRootRollup(provingState: CheckpointProvingState) { if (!provingState.verifyState()) { this.logger.debug('Not running checkpoint root rollup. State no longer valid.'); return; @@ -1031,7 +1031,7 @@ export class ProvingOrchestrator implements EpochProver { this.logger.debug(`Enqueuing ${rollupType} for checkpoint ${provingState.index}.`); - const inputs = provingState.getCheckpointRootRollupInputs(); + const inputs = await provingState.getCheckpointRootRollupInputs(); this.deferredProving( provingState, @@ -1191,25 +1191,28 @@ export class ProvingOrchestrator implements EpochProver { this.enqueueBlockRootRollup(provingState); } - private checkAndEnqueueNextBlockMergeRollup(provingState: CheckpointProvingState, currentLocation: TreeNodeLocation) { + private async checkAndEnqueueNextBlockMergeRollup( + provingState: CheckpointProvingState, + currentLocation: TreeNodeLocation, + ) { if (!provingState.isReadyForBlockMerge(currentLocation)) { return; } const parentLocation = provingState.getParentLocation(currentLocation); if (parentLocation.level === 0) { - this.checkAndEnqueueCheckpointRootRollup(provingState); + await this.checkAndEnqueueCheckpointRootRollup(provingState); } else { this.enqueueBlockMergeRollup(provingState, parentLocation); } } - private checkAndEnqueueCheckpointRootRollup(provingState: CheckpointProvingState) { + private async checkAndEnqueueCheckpointRootRollup(provingState: CheckpointProvingState) { if (!provingState.isReadyForCheckpointRoot()) { return; } - this.enqueueCheckpointRootRollup(provingState); + await this.enqueueCheckpointRootRollup(provingState); } private checkAndEnqueueNextCheckpointMergeRollup(provingState: EpochProvingState, currentLocation: TreeNodeLocation) { diff --git a/yarn-project/prover-node/src/job/epoch-proving-job.ts b/yarn-project/prover-node/src/job/epoch-proving-job.ts index 8983a5047bdb..499049701e89 100644 --- a/yarn-project/prover-node/src/job/epoch-proving-job.ts +++ b/yarn-project/prover-node/src/job/epoch-proving-job.ts @@ -149,7 +149,9 @@ export class EpochProvingJob implements Traceable { try { const blobFieldsPerCheckpoint = this.checkpoints.map(checkpoint => checkpoint.toBlobFields()); + this.log.info(`Blob fields per checkpoint: ${timer.ms()}ms`); const finalBlobBatchingChallenges = await buildFinalBlobChallenges(blobFieldsPerCheckpoint); + this.log.info(`Final blob batching challeneger: ${timer.ms()}ms`); this.prover.startNewEpoch(epochNumber, epochSizeCheckpoints, finalBlobBatchingChallenges); await this.prover.startChonkVerifierCircuits(Array.from(this.txs.values())); diff --git a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts index 789dae9a37eb..d944412263d5 100644 --- a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts +++ b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts @@ -207,7 +207,7 @@ describe('SequencerPublisher', () => { it('bundles propose and vote tx to l1', async () => { const checkpoint = new Checkpoint(l2Block.archive, header, [l2Block], l2Block.checkpointNumber); - const expectedBlobs = getBlobsPerL1Block(checkpoint.toBlobFields()); + const expectedBlobs = await getBlobsPerL1Block(checkpoint.toBlobFields()); await publisher.enqueueProposeCheckpoint(checkpoint, CommitteeAttestationsAndSigners.empty(), Signature.empty()); const { govPayload, voteSig } = mockGovernancePayload(); diff --git a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts index a88d42f05029..bbc0335d29c8 100644 --- a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts @@ -643,7 +643,7 @@ export class SequencerPublisher { ): Promise { const ts = BigInt((await this.l1TxUtils.getBlock()).timestamp + this.ethereumSlotDuration); const blobFields = checkpoint.toBlobFields(); - const blobs = getBlobsPerL1Block(blobFields); + const blobs = await getBlobsPerL1Block(blobFields); const blobInput = getPrefixedEthBlobCommitments(blobs); const args = [ @@ -953,7 +953,7 @@ export class SequencerPublisher { const checkpointHeader = checkpoint.header; const blobFields = checkpoint.toBlobFields(); - const blobs = getBlobsPerL1Block(blobFields); + const blobs = await getBlobsPerL1Block(blobFields); const proposeTxArgs: L1ProcessArgs = { header: checkpointHeader, diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index 29c5d497576e..8bb3c3f773f2 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -765,7 +765,7 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) } const blobFields = blocks.flatMap(b => b.toBlobFields()); - const blobs: Blob[] = getBlobsPerL1Block(blobFields); + const blobs: Blob[] = await getBlobsPerL1Block(blobFields); await this.blobClient.sendBlobsToFilestore(blobs); this.log.debug(`Uploaded ${blobs.length} blobs to filestore for checkpoint at slot ${proposal.slotNumber}`, { ...proposalInfo,