Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions yarn-project/blob-sink/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,9 @@
"name": "@aztec/blob-sink",
"version": "0.1.0",
"type": "module",
"bin": {
"blob-sink-client": "./dest/client/bin/index.js"
},
"exports": {
"./server": "./dest/server/index.js",
"./client": "./dest/client/index.js",
Expand Down
13 changes: 8 additions & 5 deletions yarn-project/blob-sink/src/archive/blobscan_archive_client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ export const BlobscanBlockResponseSchema = z
commitment: z.string(),
proof: z.string(),
size: z.number().int(),
index: z.number().int(), // This is the index within the tx, not within the block!
index: z.number().int().optional(), // This is the index within the tx, not within the block!
}),
),
}),
Expand Down Expand Up @@ -59,11 +59,14 @@ export class BlobscanArchiveClient implements BlobArchiveClient {
this.baseUrl = baseUrl.replace(/^https?:\/\//, '');
}

public getBaseUrl(): string {
return this.baseUrl;
}

public async getBlobsFromBlock(blockId: string): Promise<BlobJson[] | undefined> {
const response = await this.fetch(
`https://${this.baseUrl}/blocks/${blockId}?type=canonical&expand=blob%2Cblob_data`,
this.fetchOpts,
);
const url = `https://${this.baseUrl}/blocks/${blockId}?type=canonical&expand=blob%2Cblob_data`;
this.logger.trace(`Fetching blobs for block ${blockId} from ${url}`);
const response = await this.fetch(url, this.fetchOpts);

if (response.status === 404) {
this.logger.debug(`No blobs found for block ${blockId} at ${this.baseUrl}`);
Expand Down
14 changes: 14 additions & 0 deletions yarn-project/blob-sink/src/archive/config.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import { type L1ReaderConfig, l1ReaderConfigMappings } from '@aztec/ethereum';
import { type ConfigMappingsType, pickConfigMappings } from '@aztec/foundation/config';

export type BlobSinkArchiveApiConfig = {
archiveApiUrl?: string;
} & Partial<Pick<L1ReaderConfig, 'l1ChainId'>>;

export const blobSinkArchiveApiConfigMappings: ConfigMappingsType<BlobSinkArchiveApiConfig> = {
archiveApiUrl: {
env: 'BLOB_SINK_ARCHIVE_API_URL',
description: 'The URL of the archive API',
},
...pickConfigMappings(l1ReaderConfigMappings, ['l1ChainId']),
};
1 change: 1 addition & 0 deletions yarn-project/blob-sink/src/archive/interface.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,5 @@ import type { BlobJson } from '@aztec/blob-lib';
export interface BlobArchiveClient {
getBlobData(id: string): Promise<Buffer | undefined>;
getBlobsFromBlock(blockId: string): Promise<BlobJson[] | undefined>;
getBaseUrl(): string;
}
36 changes: 36 additions & 0 deletions yarn-project/blob-sink/src/client/bin/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
#!/usr/bin/env node

/* eslint-disable no-console */
import { createLogger } from '@aztec/foundation/log';
import { bufferToHex, hexToBuffer } from '@aztec/foundation/string';

import { getBlobSinkConfigFromEnv } from '../config.js';
import { createBlobSinkClient } from '../factory.js';

async function main() {
const logger = createLogger('blob-sink-client');
const blockHash = process.argv[2];
if (!blockHash) {
logger.error('Please provide a block hash as an argument.');
process.exit(1);
}
const blobHashes = process.argv.slice(3).map(hexToBuffer);
logger.info(`Fetching blobs for block hash ${blockHash}`);
if (blobHashes.length > 0) {
logger.info(`Filtering by blob hashes ${blobHashes.map(bufferToHex).join(', ')}`);
}

const blobSinkClient = createBlobSinkClient(getBlobSinkConfigFromEnv());
const blobs = await blobSinkClient.getBlobSidecar(blockHash, blobHashes);
logger.info(`Got ${blobs.length} blobs`);
for (const blob of blobs) {
console.log(blob.toJson());
}
}

// Example usage:
// $ L1_CHAIN_ID=11155111 LOG_LEVEL=trace yarn blob-sink-client 0x7d81980a40426c40544f0f729ada953be406730b877b5865d6cdc35cc8f9c84e 0x010657f37554c781402a22917dee2f75def7ab966d7b770905398eba3c444014
main().catch(err => {
console.error(err);
process.exit(1);
});
5 changes: 4 additions & 1 deletion yarn-project/blob-sink/src/client/config.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
import { type ConfigMappingsType, getConfigFromMappings } from '@aztec/foundation/config';

import { type BlobSinkArchiveApiConfig, blobSinkArchiveApiConfigMappings } from '../archive/config.js';

/**
* The configuration for the blob sink client
*/
export interface BlobSinkConfig {
export interface BlobSinkConfig extends BlobSinkArchiveApiConfig {
/**
* The URL of the blob sink
*/
Expand Down Expand Up @@ -54,6 +56,7 @@ export const blobSinkConfigMapping: ConfigMappingsType<BlobSinkConfig> = {
description:
'The header name for the L1 consensus client API key, if needed. Added as "<api-key-header>: <api-key>"',
},
...blobSinkArchiveApiConfigMappings,
};

/**
Expand Down
2 changes: 1 addition & 1 deletion yarn-project/blob-sink/src/client/factory.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import type { BlobSinkClientInterface } from './interface.js';
import { LocalBlobSinkClient } from './local.js';

export function createBlobSinkClient(config?: BlobSinkConfig): BlobSinkClientInterface {
if (!config?.blobSinkUrl && !config?.l1ConsensusHostUrl) {
if (!config?.blobSinkUrl && !config?.l1ConsensusHostUrl && !config?.archiveApiUrl && !config?.l1ChainId) {
const blobStore = new MemoryBlobStore();
return new LocalBlobSinkClient(blobStore);
}
Expand Down
83 changes: 49 additions & 34 deletions yarn-project/blob-sink/src/client/http.test.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { Blob } from '@aztec/blob-lib';
import { Blob, type BlobJson } from '@aztec/blob-lib';
import { makeEncodedBlob, makeUnencodedBlob } from '@aztec/blob-lib/testing';
import { Fr } from '@aztec/foundation/fields';

Expand Down Expand Up @@ -60,6 +60,8 @@ describe('HttpBlobSinkClient', () => {
let consensusHostServer: http.Server | undefined = undefined;
let consensusHostPort: number | undefined = undefined;

let blobData: BlobJson[];

const MOCK_SLOT_NUMBER = 1;

beforeEach(async () => {
Expand All @@ -70,6 +72,36 @@ describe('HttpBlobSinkClient', () => {

testNonEncodedBlob = await makeUnencodedBlob(3);
testNonEncodedBlobHash = testNonEncodedBlob.getEthVersionedBlobHash();

blobData = [
// Correctly encoded blob
{
index: 0,
blob: `0x${Buffer.from(testEncodedBlob.data).toString('hex')}`,
// eslint-disable-next-line camelcase
kzg_commitment: `0x${testEncodedBlob.commitment.toString('hex')}`,
// eslint-disable-next-line camelcase
kzg_proof: `0x${testEncodedBlob.proof.toString('hex')}`,
},
// Correctly encoded blob, but we do not ask for it in the client
{
index: 1,
blob: `0x${Buffer.from(testBlobIgnore.data).toString('hex')}`,
// eslint-disable-next-line camelcase
kzg_commitment: `0x${testBlobIgnore.commitment.toString('hex')}`,
// eslint-disable-next-line camelcase
kzg_proof: `0x${testBlobIgnore.proof.toString('hex')}`,
},
// Incorrectly encoded blob
{
index: 2,
blob: `0x${Buffer.from(testNonEncodedBlob.data).toString('hex')}`,
// eslint-disable-next-line camelcase
kzg_commitment: `0x${testNonEncodedBlob.commitment.toString('hex')}`,
// eslint-disable-next-line camelcase
kzg_proof: `0x${testNonEncodedBlob.proof.toString('hex')}`,
},
];
});

const startExecutionHostServer = (): Promise<void> => {
Expand Down Expand Up @@ -98,39 +130,7 @@ describe('HttpBlobSinkClient', () => {
res.end(JSON.stringify({ error: 'Not Found' }));
} else {
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(
JSON.stringify({
data: [
// Correctly encoded blob
{
index: 0,
blob: `0x${Buffer.from(testEncodedBlob.data).toString('hex')}`,
// eslint-disable-next-line camelcase
kzg_commitment: `0x${testEncodedBlob.commitment.toString('hex')}`,
// eslint-disable-next-line camelcase
kzg_proof: `0x${testEncodedBlob.proof.toString('hex')}`,
},
// Correctly encoded blob, but we do not ask for it in the client
{
index: 1,
blob: `0x${Buffer.from(testBlobIgnore.data).toString('hex')}`,
// eslint-disable-next-line camelcase
kzg_commitment: `0x${testBlobIgnore.commitment.toString('hex')}`,
// eslint-disable-next-line camelcase
kzg_proof: `0x${testBlobIgnore.proof.toString('hex')}`,
},
// Incorrectly encoded blob
{
index: 2,
blob: `0x${Buffer.from(testNonEncodedBlob.data).toString('hex')}`,
// eslint-disable-next-line camelcase
kzg_commitment: `0x${testNonEncodedBlob.commitment.toString('hex')}`,
// eslint-disable-next-line camelcase
kzg_proof: `0x${testNonEncodedBlob.proof.toString('hex')}`,
},
],
}),
);
res.end(JSON.stringify({ data: blobData }));
}
} else {
res.writeHead(404, { 'Content-Type': 'application/json' });
Expand Down Expand Up @@ -238,5 +238,20 @@ describe('HttpBlobSinkClient', () => {
expect.any(Object),
);
});

it('should fall back to archive client', async () => {
const client = new TestHttpBlobSinkClient({ archiveApiUrl: `http://api.blobscan.com` });
const archiveSpy = jest.spyOn(client.getArchiveClient(), 'getBlobsFromBlock').mockResolvedValue(blobData);

const retrievedBlobs = await client.getBlobSidecar('0x1234', [testEncodedBlobHash]);
expect(retrievedBlobs).toEqual([testEncodedBlob]);
expect(archiveSpy).toHaveBeenCalledWith('0x1234');
});
});
});

class TestHttpBlobSinkClient extends HttpBlobSinkClient {
public getArchiveClient() {
return this.archiveClient!;
}
}
46 changes: 34 additions & 12 deletions yarn-project/blob-sink/src/client/http.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,17 +5,21 @@ import { bufferToHex } from '@aztec/foundation/string';

import { type RpcBlock, createPublicClient, fallback, http } from 'viem';

import { createBlobArchiveClient } from '../archive/factory.js';
import type { BlobArchiveClient } from '../archive/interface.js';
import { outboundTransform } from '../encoding/index.js';
import { type BlobSinkConfig, getBlobSinkConfigFromEnv } from './config.js';
import type { BlobSinkClientInterface } from './interface.js';

export class HttpBlobSinkClient implements BlobSinkClientInterface {
private readonly log: Logger;
private readonly config: BlobSinkConfig;
private readonly fetch: typeof fetch;
protected readonly log: Logger;
protected readonly config: BlobSinkConfig;
protected readonly archiveClient: BlobArchiveClient | undefined;
protected readonly fetch: typeof fetch;

constructor(config?: BlobSinkConfig) {
this.config = config ?? getBlobSinkConfigFromEnv();
this.archiveClient = createBlobArchiveClient(this.config);
this.log = createLogger('aztec:blob-sink-client');
this.fetch = async (...args: Parameters<typeof fetch>): Promise<Response> => {
return await retry(
Expand Down Expand Up @@ -74,9 +78,9 @@ export class HttpBlobSinkClient implements BlobSinkClientInterface {
* If requesting from the beacon node, we send the slot number
*
* 1. First atttempts to get blobs from a configured blob sink
* 2. If no blob sink is configured, attempts to get blobs from a configured consensus host
*
* 3. If none configured, fails
* 2. On failure, attempts to get blobs from a configured consensus host
* 3. On failure, attempts to get blobs from an archive client (eg blobscan)
* 4. Else, fails
*
* @param blockHash - The block hash
* @param indices - The indices of the blobs to get
Expand All @@ -89,6 +93,7 @@ export class HttpBlobSinkClient implements BlobSinkClientInterface {
const ctx = { blockHash, blobHashes: blobHashes.map(bufferToHex), indices };

if (blobSinkUrl) {
this.log.trace(`Attempting to get blobs from blob sink`, { blobSinkUrl, ...ctx });
blobs = await this.getBlobSidecarFrom(blobSinkUrl, blockHash, blobHashes, indices);
this.log.debug(`Got ${blobs.length} blobs from blob sink`, { blobSinkUrl, ...ctx });
if (blobs.length > 0) {
Expand All @@ -98,21 +103,37 @@ export class HttpBlobSinkClient implements BlobSinkClientInterface {

if (blobs.length == 0 && l1ConsensusHostUrl) {
// The beacon api can query by slot number, so we get that first
const consensusCtx = { l1ConsensusHostUrl, ...ctx };
this.log.trace(`Attempting to get slot number for block hash`, consensusCtx);
const slotNumber = await this.getSlotNumber(blockHash);
this.log.debug(`Got slot number ${slotNumber} from consensus host for querying blobs`, {
blockHash,
l1ConsensusHostUrl,
});
this.log.debug(`Got slot number ${slotNumber} from consensus host for querying blobs`, consensusCtx);
if (slotNumber) {
this.log.trace(`Attempting to get blobs from consensus host`, { slotNumber, ...consensusCtx });
const blobs = await this.getBlobSidecarFrom(l1ConsensusHostUrl, slotNumber, blobHashes, indices);
this.log.debug(`Got ${blobs.length} blobs from consensus host`, { l1ConsensusHostUrl, slotNumber, ...ctx });
this.log.debug(`Got ${blobs.length} blobs from consensus host`, { slotNumber, ...consensusCtx });
if (blobs.length > 0) {
return blobs;
}
}
}

this.log.verbose('No blob sources available');
if (blobs.length == 0 && this.archiveClient) {
const archiveCtx = { archiveUrl: this.archiveClient.getBaseUrl(), ...ctx };
this.log.trace(`Attempting to get blobs from archive`, archiveCtx);
const allBlobs = await this.archiveClient.getBlobsFromBlock(blockHash);
if (!allBlobs) {
this.log.debug('No blobs found from archive client', archiveCtx);
return [];
}
this.log.trace(`Got ${allBlobs.length} blobs from archive client before filtering`, archiveCtx);
blobs = await getRelevantBlobs(allBlobs, blobHashes, this.log);
this.log.debug(`Got ${blobs.length} blobs from archive client`, archiveCtx);
if (blobs.length > 0) {
return blobs;
}
}

this.log.debug('No blob sources available');
return [];
}

Expand Down Expand Up @@ -234,6 +255,7 @@ async function getRelevantBlobs(data: any, blobHashes: Buffer[], logger: Logger)
.filter((b: BlobJson) => {
const commitment = Buffer.from(b.kzg_commitment.slice(2), 'hex');
const blobHash = Blob.getEthVersionedBlobHash(commitment);
logger.trace(`Filtering blob with hash ${blobHash.toString('hex')}`);
return blobHashes.some(hash => hash.equals(blobHash));
})
// Attempt to deserialise the blob
Expand Down
13 changes: 6 additions & 7 deletions yarn-project/blob-sink/src/server/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,13 @@ import {
import { type ConfigMappingsType, getConfigFromMappings, pickConfigMappings } from '@aztec/foundation/config';
import { type DataStoreConfig, dataConfigMappings } from '@aztec/kv-store/config';

import { type BlobSinkArchiveApiConfig, blobSinkArchiveApiConfigMappings } from '../archive/config.js';

export type BlobSinkConfig = {
port?: number;
archiveApiUrl?: string;
dataStoreConfig?: DataStoreConfig;
} & Partial<Pick<L1ReaderConfig, 'l1ChainId' | 'l1RpcUrls'> & Pick<L1ContractAddresses, 'rollupAddress'>>;
} & BlobSinkArchiveApiConfig &
Partial<Pick<L1ReaderConfig, 'l1RpcUrls'> & Pick<L1ContractAddresses, 'rollupAddress'>>;

export const blobSinkConfigMappings: ConfigMappingsType<BlobSinkConfig> = {
port: {
Expand All @@ -22,11 +24,8 @@ export const blobSinkConfigMappings: ConfigMappingsType<BlobSinkConfig> = {
...dataConfigMappings,
description: 'The configuration for the data store',
},
archiveApiUrl: {
env: 'BLOB_SINK_ARCHIVE_API_URL',
description: 'The URL of the archive API',
},
...pickConfigMappings(l1ReaderConfigMappings, ['l1ChainId', 'l1RpcUrls']),
...blobSinkArchiveApiConfigMappings,
...pickConfigMappings(l1ReaderConfigMappings, ['l1RpcUrls']),
...pickConfigMappings(l1ContractAddressesMapping, ['rollupAddress']),
};

Expand Down
2 changes: 2 additions & 0 deletions yarn-project/yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -406,6 +406,8 @@ __metadata:
typescript: "npm:^5.0.4"
viem: "npm:2.23.7"
zod: "npm:^3.23.8"
bin:
blob-sink-client: ./dest/client/bin/index.js
languageName: unknown
linkType: soft

Expand Down