Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions spartan/scripts/deploy_network.sh
Original file line number Diff line number Diff line change
Expand Up @@ -522,6 +522,8 @@ FISHERMAN_MNEMONIC_START_INDEX = ${FISHERMAN_MNEMONIC_START_INDEX}
FULL_NODE_REPLICAS = ${FULL_NODE_REPLICAS:-1}

PROVER_FAILED_PROOF_STORE = "${PROVER_FAILED_PROOF_STORE}"
PROVER_PROOF_STORE = "${PROVER_PROOF_STORE:-}"
PROVER_BROKER_DEBUG_REPLAY_ENABLED = ${PROVER_BROKER_DEBUG_REPLAY_ENABLED:-false}
DEPLOY_ARCHIVAL_NODE = ${DEPLOY_ARCHIVAL_NODE}
PROVER_REPLICAS = ${PROVER_REPLICAS}

Expand Down
3 changes: 3 additions & 0 deletions spartan/terraform/deploy-aztec-infra/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -311,6 +311,7 @@ locals {
"node.node.proverRealProofs" = var.PROVER_REAL_PROOFS
"node.node.logLevel" = var.LOG_LEVEL
"node.node.env.PROVER_FAILED_PROOF_STORE" = var.PROVER_FAILED_PROOF_STORE
"node.node.env.PROVER_PROOF_STORE" = var.PROVER_PROOF_STORE
"node.node.env.DEBUG_FORCE_TX_PROOF_VERIFICATION" = var.DEBUG_FORCE_TX_PROOF_VERIFICATION
"node.node.env.KEY_INDEX_START" = var.PROVER_PUBLISHER_MNEMONIC_START_INDEX
"node.node.env.PUBLISHER_KEY_INDEX_START" = var.PROVER_PUBLISHER_MNEMONIC_START_INDEX
Expand All @@ -324,13 +325,15 @@ locals {
"broker.node.proverRealProofs" = var.PROVER_REAL_PROOFS
"broker.node.logLevel" = var.LOG_LEVEL
"broker.node.env.BOOTSTRAP_NODES" = "asdf"
"broker.node.env.PROVER_BROKER_DEBUG_REPLAY_ENABLED" = var.PROVER_BROKER_DEBUG_REPLAY_ENABLED
"agent.node.proverRealProofs" = var.PROVER_REAL_PROOFS
"agent.node.env.PROVER_AGENT_POLL_INTERVAL_MS" = var.PROVER_AGENT_POLL_INTERVAL_MS
"agent.replicaCount" = var.PROVER_REPLICAS
"agent.node.env.BOOTSTRAP_NODES" = "asdf"
"agent.node.env.PROVER_AGENT_COUNT" = var.PROVER_AGENTS_PER_PROVER
"agent.node.env.PROVER_TEST_DELAY_TYPE" = var.PROVER_TEST_DELAY_TYPE
"agent.node.env.PROVER_AGENT_PROOF_TYPES" = join(",", var.PROVER_AGENT_PROOF_TYPES)
"agent.node.env.PROVER_PROOF_STORE" = var.PROVER_PROOF_STORE
"agent.node.otelIncludeMetrics" = var.PROVER_AGENT_INCLUDE_METRICS
"agent.node.logLevel" = var.LOG_LEVEL
"node.node.env.L1_PRIORITY_FEE_BUMP_PERCENTAGE" = var.PROVER_L1_PRIORITY_FEE_BUMP_PERCENTAGE
Expand Down
13 changes: 13 additions & 0 deletions spartan/terraform/deploy-aztec-infra/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -576,6 +576,19 @@ variable "PROVER_FAILED_PROOF_STORE" {
default = ""
}

variable "PROVER_PROOF_STORE" {
description = "Optional GCS/S3/file URI to store proof inputs and outputs (e.g. gs://bucket/path, s3://bucket/path, file:///path)"
type = string
nullable = false
default = ""
}

variable "PROVER_BROKER_DEBUG_REPLAY_ENABLED" {
description = "Enable debug replay mode for the prover broker to replay proving jobs from stored inputs"
type = bool
default = false
}

variable "RPC_REPLICAS" {
description = "The number of RPC replicas"
type = string
Expand Down
4 changes: 2 additions & 2 deletions yarn-project/aztec/src/cli/cmds/start_prover_agent.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@ import { Agent, makeUndiciFetch } from '@aztec/foundation/json-rpc/undici';
import type { LogFn } from '@aztec/foundation/log';
import { buildServerCircuitProver } from '@aztec/prover-client';
import {
InlineProofStore,
type ProverAgentConfig,
ProvingAgent,
createProofStore,
createProvingJobBrokerClient,
proverAgentConfigMappings,
} from '@aztec/prover-client/broker';
Expand Down Expand Up @@ -55,7 +55,7 @@ export async function startProverAgent(

const telemetry = await initTelemetryClient(extractRelevantOptions(options, telemetryClientConfigMappings, 'tel'));
const prover = await buildServerCircuitProver(config, telemetry);
const proofStore = new InlineProofStore();
const proofStore = await createProofStore(config.proofStore);
const agents = times(
config.proverAgentCount,
() => new ProvingAgent(broker, proofStore, prover, config.proverAgentProofTypes, config.proverAgentPollIntervalMs),
Expand Down
6 changes: 5 additions & 1 deletion yarn-project/aztec/src/cli/cmds/start_prover_broker.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import type { LogFn } from '@aztec/foundation/log';
import {
type ProverBrokerConfig,
ProvingJobBrokerSchema,
ProvingJobBrokerSchemaWithDebug,
createAndStartProvingBroker,
proverBrokerConfigMappings,
} from '@aztec/prover-client/broker';
Expand Down Expand Up @@ -59,7 +60,10 @@ export async function startProverBroker(
);
}

services.proverBroker = [broker, ProvingJobBrokerSchema];
services.proverBroker = [
broker,
config.proverBrokerDebugReplayEnabled ? ProvingJobBrokerSchemaWithDebug : ProvingJobBrokerSchema,
];
signalHandlers.push(() => broker.stop());

return { broker, config };
Expand Down
2 changes: 2 additions & 0 deletions yarn-project/foundation/src/config/env_var.ts
Original file line number Diff line number Diff line change
Expand Up @@ -156,8 +156,10 @@ export type EnvVar =
| 'PROVER_BROKER_BATCH_INTERVAL_MS'
| 'PROVER_BROKER_BATCH_SIZE'
| 'PROVER_BROKER_MAX_EPOCHS_TO_KEEP_RESULTS_FOR'
| 'PROVER_BROKER_DEBUG_REPLAY_ENABLED'
| 'PROVER_CANCEL_JOBS_ON_STOP'
| 'PROVER_COORDINATION_NODE_URLS'
| 'PROVER_PROOF_STORE'
| 'PROVER_FAILED_PROOF_STORE'
| 'PROVER_NODE_FAILED_EPOCH_STORE'
| 'PROVER_NODE_DISABLE_PROOF_PUBLISH'
Expand Down
14 changes: 6 additions & 8 deletions yarn-project/prover-client/src/prover-client/prover-client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,20 +29,16 @@ export class ProverClient implements EpochProverManager {
private running = false;
private agents: ProvingAgent[] = [];

private proofStore: ProofStore;
private failedProofStore: ProofStore | undefined;

private constructor(
private config: ProverClientConfig,
private worldState: ForkMerkleTreeOperations & ReadonlyWorldStateAccess,
private orchestratorClient: ProvingJobProducer,
private proofStore: ProofStore,
private failedProofStore: ProofStore | undefined,
private agentClient?: ProvingJobConsumer,
private telemetry: TelemetryClient = getTelemetryClient(),
private log: Logger = createLogger('prover-client:tx-prover'),
) {
this.proofStore = new InlineProofStore();
this.failedProofStore = this.config.failedProofStore ? createProofStore(this.config.failedProofStore) : undefined;
}
) {}

public createEpochProver(): EpochProver {
const bindings = this.log.getBindings();
Expand Down Expand Up @@ -118,7 +114,9 @@ export class ProverClient implements EpochProverManager {
broker: ProvingJobBroker,
telemetry: TelemetryClient = getTelemetryClient(),
) {
const prover = new ProverClient(config, worldState, broker, broker, telemetry);
const proofStore = await createProofStore(config.proofStore);
const failedProofStore = config.failedProofStore ? await createProofStore(config.failedProofStore) : undefined;
const prover = new ProverClient(config, worldState, broker, proofStore, failedProofStore, broker, telemetry);
await prover.start();
return prover;
}
Expand Down
13 changes: 13 additions & 0 deletions yarn-project/prover-client/src/proving_broker/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@ export const ProverBrokerConfig = z.object({
proverBrokerBatchIntervalMs: z.number().int().nonnegative(),
/** The maximum number of epochs to keep results for */
proverBrokerMaxEpochsToKeepResultsFor: z.number().int().nonnegative(),
/** Enable debug replay mode for replaying proving jobs from stored inputs */
proverBrokerDebugReplayEnabled: z.boolean(),
});

export type ProverBrokerConfig = z.infer<typeof ProverBrokerConfig> &
Expand Down Expand Up @@ -74,6 +76,11 @@ export const proverBrokerConfigMappings: ConfigMappingsType<ProverBrokerConfig>
parseEnv: (val: string | undefined) => (val ? +val : undefined),
description: "The size of the prover broker's database. Will override the dataStoreMapSizeKb if set.",
},
proverBrokerDebugReplayEnabled: {
env: 'PROVER_BROKER_DEBUG_REPLAY_ENABLED',
description: 'Enable debug replay mode for replaying proving jobs from stored inputs',
...booleanConfigHelper(false),
},
...dataConfigMappings,
...l1ReaderConfigMappings,
...pickConfigMappings(chainConfigMappings, ['rollupVersion']),
Expand Down Expand Up @@ -102,6 +109,8 @@ export const ProverAgentConfig = z.object({
proverTestVerificationDelayMs: z.number().optional(),
/** Whether to abort pending proving jobs when the orchestrator is cancelled */
cancelJobsOnStop: z.boolean(),
/** Where to store proving results. Must be accessible to both prover node and agents. If not set will inline-encode the parameters */
proofStore: z.string().optional(),
});

export type ProverAgentConfig = z.infer<typeof ProverAgentConfig>;
Expand Down Expand Up @@ -162,4 +171,8 @@ export const proverAgentConfigMappings: ConfigMappingsType<ProverAgentConfig> =
'When false (default), jobs remain in the broker queue and can be reused on restart/reorg.',
...booleanConfigHelper(false),
},
proofStore: {
env: 'PROVER_PROOF_STORE',
description: 'Optional proof input store for the prover',
},
};
Original file line number Diff line number Diff line change
@@ -1,42 +1,20 @@
import { createLogger } from '@aztec/foundation/log';
import { createFileStore } from '@aztec/stdlib/file-store';

import { GoogleCloudStorageProofStore } from './gcs_proof_store.js';
import { FileStoreProofStore } from './file_store_proof_store.js';
import { InlineProofStore } from './inline_proof_store.js';
import type { ProofStore } from './proof_store.js';

export function createProofStore(config: string | undefined, logger = createLogger('prover-client:proof-store')) {
if (config === undefined) {
export async function createProofStore(
config: string | undefined,
logger = createLogger('prover-client:proof-store'),
): Promise<ProofStore> {
if (!config) {
logger.info('Creating inline proof store');
return new InlineProofStore();
} else if (config.startsWith('gs://')) {
try {
const url = new URL(config);
const bucket = url.host;
const path = url.pathname.replace(/^\/+/, '');
logger.info(`Creating google cloud proof store at ${bucket}`, { bucket, path });
return new GoogleCloudStorageProofStore(bucket, path);
} catch {
throw new Error(
`Invalid google cloud proof store definition: '${config}'. Supported values are 'gs://bucket-name/path/to/store'.`,
);
}
} else {
throw new Error(`Unknown proof store config: '${config}'. Supported values are 'gs://bucket-name/path/to/store'.`);
}
}

export function createProofStoreForUri(
uri: string,
logger = createLogger('prover-client:proof-store'),
): Pick<ProofStore, 'getProofInput' | 'getProofOutput'> {
if (uri.startsWith('data://')) {
return createProofStore(undefined, logger);
} else if (uri.startsWith('gs://')) {
const url = new URL(uri);
const basePath = url.pathname.replace(/^\/+/, '').split('/').slice(0, -3);
url.pathname = basePath.join('/');
return createProofStore(uri, logger);
} else {
throw new Error(`Unknown proof store config: '${uri}'. Supported protocols are 'data://' and 'gs://'.`);
}
const fileStore = await createFileStore(config, logger);
logger.info(`Creating file store proof store at ${config}`);
return new FileStoreProofStore(fileStore);
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
import { jsonParseWithSchema, jsonStringify } from '@aztec/foundation/json-rpc';
import type { FileStore } from '@aztec/stdlib/file-store';
import {
type ProofUri,
type ProvingJobId,
type ProvingJobInputs,
type ProvingJobInputsMap,
ProvingJobResult,
type ProvingJobResultsMap,
getProvingJobInputClassFor,
} from '@aztec/stdlib/interfaces/server';
import { ProvingRequestType } from '@aztec/stdlib/proofs';

import type { ProofStore } from './proof_store.js';

const INPUTS_PATH = 'inputs';
const OUTPUTS_PATH = 'outputs';

/**
* A proof store implementation backed by a generic FileStore.
* Supports any storage backend (GCS, S3, local filesystem) via the FileStore abstraction.
*/
export class FileStoreProofStore implements ProofStore {
constructor(private readonly fileStore: FileStore) {}

async saveProofInput<T extends ProvingRequestType>(
id: ProvingJobId,
type: T,
inputs: ProvingJobInputsMap[T],
): Promise<ProofUri> {
const path = `${INPUTS_PATH}/${ProvingRequestType[type]}/${id}`;
const uri = await this.fileStore.save(path, inputs.toBuffer());
return uri as ProofUri;
}

async saveProofOutput<T extends ProvingRequestType>(
id: ProvingJobId,
type: T,
result: ProvingJobResultsMap[T],
): Promise<ProofUri> {
const jobResult = { type, result } as ProvingJobResult;
const json = jsonStringify(jobResult);
const path = `${OUTPUTS_PATH}/${ProvingRequestType[type]}/${id}.json`;
const uri = await this.fileStore.save(path, Buffer.from(json, 'utf-8'));
return uri as ProofUri;
}

async getProofInput(uri: ProofUri): Promise<ProvingJobInputs> {
try {
const buffer = await this.fileStore.read(uri);
const type = this.extractTypeFromUri(uri);
const inputs = getProvingJobInputClassFor(type).fromBuffer(buffer);
return { inputs, type } as ProvingJobInputs;
} catch (err) {
throw new Error(`Error getting proof input at ${uri}: ${err}`);
}
}

async getProofOutput(uri: ProofUri): Promise<ProvingJobResult> {
try {
const buffer = await this.fileStore.read(uri);
return jsonParseWithSchema(buffer.toString('utf-8'), ProvingJobResult);
} catch (err) {
throw new Error(`Error getting proof output at ${uri}: ${err}`);
}
}

private extractTypeFromUri(uri: string): ProvingRequestType {
const url = new URL(uri);
const pathParts = url.pathname.split('/').filter(Boolean);
const typeString = pathParts.at(-2);
const type = typeString ? ProvingRequestType[typeString as keyof typeof ProvingRequestType] : undefined;
if (type === undefined) {
throw new Error(`Unrecognized proof type ${typeString} in URI ${uri}`);
}
return type;
}
}

This file was deleted.

Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
export * from './proof_store.js';
export * from './inline_proof_store.js';
export * from './factory.js';
export * from './gcs_proof_store.js';
export * from './file_store_proof_store.js';
Loading
Loading