diff --git a/.github/workflows/publish-aztec-packages.yml b/.github/workflows/publish-aztec-packages.yml index 9da6aad15897..54983f29ea10 100644 --- a/.github/workflows/publish-aztec-packages.yml +++ b/.github/workflows/publish-aztec-packages.yml @@ -191,6 +191,36 @@ jobs: run: | earthly-ci --no-output --push ./yarn-project+export-cli-wallet --DIST_TAG=${{ env.GIT_COMMIT }} --ARCH=arm64 + build-blob-sink-x86: + needs: [configure, build-aztec-x86] + runs-on: ${{ needs.configure.outputs.username }}-x86 + steps: + - uses: actions/checkout@v4 + with: + ref: "${{ env.GIT_COMMIT }}" + - uses: ./.github/ci-setup-action + with: + concurrency_key: build-aztec-blob-sink-x86_64 + dockerhub_password: "${{ env.DOCKERHUB_PASSWORD }}" + - name: Build & push aztec blob sink image x86-64 + run: | + earthly-ci --no-output --push ./yarn-project+export-blob-sink --DIST_TAG=${{ env.GIT_COMMIT }} --ARCH=x86_64 + + build-blob-sink-arm: + needs: [configure, build-aztec-arm] + runs-on: ${{ needs.configure.outputs.username }}-arm + steps: + - uses: actions/checkout@v4 + with: + ref: "${{ env.GIT_COMMIT }}" + - uses: ./.github/ci-setup-action + with: + concurrency_key: build-aztec-blob-sink-arm64 + dockerhub_password: "${{ env.DOCKERHUB_PASSWORD }}" + - name: Build & push aztec blob sink image arm + run: | + earthly-ci --no-output --push ./yarn-project+export-blob-sink --DIST_TAG=${{ env.GIT_COMMIT }} --ARCH=arm64 + publish-manifests: needs: - configure diff --git a/aztec-up/bin/aztec-install b/aztec-up/bin/aztec-install index 470664f70400..51c1bcb41ab6 100755 --- a/aztec-up/bin/aztec-install +++ b/aztec-up/bin/aztec-install @@ -107,6 +107,7 @@ if [ -z "${SKIP_PULL:-}" ]; then pull_container aztec-nargo pull_container aztec pull_container cli-wallet + pull_container blob-sink fi # Download the Docker Compose file. Used by aztec. diff --git a/aztec-up/bin/docker-compose.sandbox.yml b/aztec-up/bin/docker-compose.sandbox.yml index 999aa5676850..184f85fb3787 100644 --- a/aztec-up/bin/docker-compose.sandbox.yml +++ b/aztec-up/bin/docker-compose.sandbox.yml @@ -15,6 +15,13 @@ services: FORK_BLOCK_NUMBER: ANVIL_PORT: ${ANVIL_PORT:-8545} + # TODO: add a readiness probe to this + # TODO: delete all of these extra images and just have one, then symlink them??? + blob-sink: + image: aztecprotocol/blob-sink + ports: + - "${BLOB_SINK_PORT:-5052}:${BLOB_SINK_PORT:-5052}" + aztec: image: "aztecprotocol/aztec" ports: @@ -29,6 +36,7 @@ services: SEQ_TX_POLLING_INTERVAL_MS: 50 WS_BLOCK_CHECK_INTERVAL_MS: 50 ARCHIVER_VIEM_POLLING_INTERVAL_MS: 500 + SEQ_BLOB_SINK_URL: http://blob-sink:${BLOB_SINK_PORT:-5052} PXE_PORT: ${PXE_PORT:-8080} PORT: ${AZTEC_NODE_PORT:-8080} TEST_ACCOUNTS: ${TEST_ACCOUNTS:-true} @@ -36,4 +44,5 @@ services: - ./log:/usr/src/yarn-project/aztec/log:rw depends_on: - ethereum + - blob-sink command: "start --sandbox" diff --git a/boxes/docker-compose.yml b/boxes/docker-compose.yml index a19c72fbc17a..60d571a0d23b 100644 --- a/boxes/docker-compose.yml +++ b/boxes/docker-compose.yml @@ -18,9 +18,17 @@ services: SEQ_TX_POLLING_INTERVAL_MS: 50 WS_BLOCK_CHECK_INTERVAL_MS: 50 ARCHIVER_VIEM_POLLING_INTERVAL_MS: 500 + SEQ_BLOB_SINK_URL: http://blob-sink:5052 depends_on: - ethereum + blob-sink: + image: aztecprotocol/blob-sink:${AZTEC_DOCKER_TAG:-latest} + environment: + PORT: 5052 + DEBUG: "aztec:*" + DEBUG_COLORS: "true" + boxes: image: aztecprotocol/boxes:${AZTEC_DOCKER_TAG:-latest} entrypoint: > diff --git a/build_manifest.yml b/build_manifest.yml index d469a72fd34f..dbffc1c63aeb 100644 --- a/build_manifest.yml +++ b/build_manifest.yml @@ -242,6 +242,13 @@ cli-wallet: - yarn-project multiarch: buildx +blob-sink: + buildDir: yarn-project + projectDir: yarn-project/blob-sink + dependencies: + - yarn-project + multiarch: buildx + # Builds all the boxes. They are then independently tested in the container. boxes: buildDir: boxes diff --git a/scripts/run_native_testnet.sh b/scripts/run_native_testnet.sh index 4cf6d83900a1..2e6bc2c433ba 100755 --- a/scripts/run_native_testnet.sh +++ b/scripts/run_native_testnet.sh @@ -128,7 +128,8 @@ BASE_CMD="INTERLEAVED=$INTERLEAVED ./yarn-project/end-to-end/scripts/native_netw \"./validators.sh $NUM_VALIDATORS\" \ $PROVER_SCRIPT \ ./pxe.sh \ - ./transaction-bot.sh" + ./transaction-bot.sh \ + ./blob-sink.sh" # Execute the command eval $BASE_CMD diff --git a/spartan/aztec-network/files/config/setup-service-addresses.sh b/spartan/aztec-network/files/config/setup-service-addresses.sh index 063c84a16e5d..0a0280763b56 100644 --- a/spartan/aztec-network/files/config/setup-service-addresses.sh +++ b/spartan/aztec-network/files/config/setup-service-addresses.sh @@ -57,6 +57,8 @@ if [ "${ETHEREUM_EXTERNAL_HOST}" != "" ]; then ETHEREUM_ADDR="${ETHEREUM_EXTERNAL_HOST}" elif [ "${NETWORK_PUBLIC}" = "true" ]; then ETHEREUM_ADDR=$(get_service_address "ethereum" "${ETHEREUM_PORT}") +elif [ "${ETHEREUM__NAMESPACE}" != "" ]; then + ETHEREUM_ADDR="http://${ETHEREUM_EL_SERVICE_NAME}.${ETHEREUM__NAMESPACE}:${ETHEREUM_PORT}" else ETHEREUM_ADDR="http://${SERVICE_NAME}-ethereum.${NAMESPACE}:${ETHEREUM_PORT}" fi diff --git a/spartan/aztec-network/templates/_helpers.tpl b/spartan/aztec-network/templates/_helpers.tpl index 9191dab8895a..7e66acdc7b9f 100644 --- a/spartan/aztec-network/templates/_helpers.tpl +++ b/spartan/aztec-network/templates/_helpers.tpl @@ -143,8 +143,14 @@ Service Address Setup Container value: "{{ .Values.telemetry.otelCollectorEndpoint }}" - name: EXTERNAL_ETHEREUM_HOST value: "{{ .Values.ethereum.externalHost }}" + - name: ETHEREUM__NAMESPACE + value: "{{ .Values.ethereum.namespace }}" + - name: ETHEREUM_EL_SERVICE_NAME + value: "{{ .Values.ethereum.elServiceName }}" + - name: ETHEREUM_CL_SERVICE_NAME + value: "{{ .Values.ethereum.clServiceName }}" - name: ETHEREUM_PORT - value: "{{ .Values.ethereum.service.port }}" + value: "{{ .Values.ethereum.elPort }}" - name: EXTERNAL_BOOT_NODE_HOST value: "{{ .Values.bootNode.externalHost }}" - name: BOOT_NODE_PORT diff --git a/spartan/aztec-network/templates/reth.yaml b/spartan/aztec-network/templates/reth.yaml deleted file mode 100644 index 938d48b649ab..000000000000 --- a/spartan/aztec-network/templates/reth.yaml +++ /dev/null @@ -1,147 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: {{ include "aztec-network.fullname" . }}-ethereum - labels: - {{- include "aztec-network.labels" . | nindent 4 }} -spec: - replicas: {{ .Values.ethereum.replicas }} - selector: - matchLabels: - {{- include "aztec-network.selectorLabels" . | nindent 6 }} - app: ethereum - template: - metadata: - labels: - {{- include "aztec-network.selectorLabels" . | nindent 8 }} - app: ethereum - spec: - {{- if .Values.network.public }} - hostNetwork: true - {{- end }} - initContainers: - - name: prepare-genesis - image: node:18-alpine - command: ["/bin/sh", "-c"] - args: - - | - cd /tmp - npm init -y - npm install ethers@6 - cat > derive.js << 'EOF' - const { ethers } = require('ethers'); - const fs = require('fs'); - - async function main() { - const mnemonic = process.env.DEPLOYMENT_MNEMONIC; - const wallet = ethers.Wallet.fromPhrase(mnemonic); - - const genesis = JSON.parse(fs.readFileSync('/genesis-template/genesis.json', 'utf8')); - - genesis.alloc[wallet.address] = { - balance: '0x3635c9adc5dea00000' // 1000 ETH in wei - }; - - // We rely on the deterministic deployment proxy to deploy the contracts - // It comes preloaded on anvil (https://book.getfoundry.sh/tutorials/create2-tutorial) - // But we need to do it ourselves for reth - // Addresses/tx in https://github.com/Arachnid/deterministic-deployment-proxy/tree/master - const deployer = '0x3fab184622dc19b6109349b94811493bf2a45362' - genesis.alloc[deployer] = { - balance: '0x3635c9adc5dea00000' // 1000 ETH in wei - }; - - fs.writeFileSync('/genesis-output/genesis.json', JSON.stringify(genesis, null, 2)); - } - - main().catch(console.error); - EOF - node derive.js - env: - - name: DEPLOYMENT_MNEMONIC - value: {{ .Values.aztec.l1DeploymentMnemonic }} - volumeMounts: - - name: genesis-template - mountPath: /genesis-template - - name: genesis-output - mountPath: /genesis-output - containers: - - name: ethereum - image: "{{ .Values.images.reth.image }}" - imagePullPolicy: {{ .Values.images.reth.pullPolicy }} - command: ["/bin/sh", "-c"] - args: - - >- - reth node {{ include "helpers.flag" (list "http.addr" "0.0.0.0") }} - {{- include "helpers.flag" (list "http.port" .Values.ethereum.service.port) }} - {{- include "helpers.flag" (list "builder.gaslimit" .Values.ethereum.gasLimit) }} - {{- include "helpers.flag" (list "txpool.gas-limit" .Values.ethereum.gasLimit) }} - {{- include "helpers.flag" (list "dev.block-time" .Values.ethereum.blockTime) }} - --chain /genesis/genesis.json - --datadir /data - --dev - ports: - - containerPort: {{ .Values.ethereum.service.port }} - name: reth - volumeMounts: - - name: shared-volume - mountPath: /data - - name: genesis-output - mountPath: /genesis - resources: - {{- toYaml .Values.ethereum.resources | nindent 12 }} - volumes: - - name: shared-volume - persistentVolumeClaim: - claimName: {{ include "aztec-network.fullname" . }}-ethereum-pvc - - name: genesis-template - configMap: - name: {{ include "aztec-network.fullname" . }}-reth-genesis - - name: genesis-output - emptyDir: {} -{{if not .Values.network.public }} ---- -apiVersion: v1 -kind: Service -metadata: - name: {{ include "aztec-network.fullname" . }}-ethereum - labels: - {{- include "aztec-network.labels" . | nindent 4 }} -spec: - type: {{ .Values.ethereum.service.type }} - selector: - {{- include "aztec-network.selectorLabels" . | nindent 4 }} - app: ethereum - ports: - - protocol: TCP - port: {{ .Values.ethereum.service.port }} - targetPort: {{ .Values.ethereum.service.targetPort }} - {{- if and (eq .Values.ethereum.service.type "NodePort") .Values.ethereum.service.nodePort }} - nodePort: {{ .Values.ethereum.service.nodePort }} - {{- end }} -{{ end }} ---- -apiVersion: v1 -kind: ConfigMap -metadata: - name: {{ include "aztec-network.fullname" . }}-reth-genesis - labels: - {{- include "aztec-network.labels" . | nindent 4 }} -data: - genesis.json: | - {{ .Files.Get "files/config/genesis.json" | nindent 4 }} ---- -{{- if gt (.Values.ethereum.replicas | int) 0 }} -apiVersion: v1 -kind: PersistentVolumeClaim -metadata: - name: {{ include "aztec-network.fullname" . }}-ethereum-pvc - labels: - {{- include "aztec-network.labels" . | nindent 4 }} -spec: - accessModes: ["ReadWriteOnce"] - resources: - requests: - storage: {{ .Values.ethereum.storage }} -{{- end }} ---- \ No newline at end of file diff --git a/spartan/aztec-network/templates/setup-l2-contracts.yaml b/spartan/aztec-network/templates/setup-l2-contracts.yaml index 218db443662c..440f473428df 100644 --- a/spartan/aztec-network/templates/setup-l2-contracts.yaml +++ b/spartan/aztec-network/templates/setup-l2-contracts.yaml @@ -82,10 +82,17 @@ spec: value: "{{ .Values.network.public }}" - name: NAMESPACE value: {{ .Release.Namespace }} + # TODO: clean up these variables + - name: ETHEREUM_HOST + value: "http://${ETHEREUM_EL_SERVICE_NAME}.${ETHEREUM__NAMESPACE}:${ETHEREUM_PORT}" - name: EXTERNAL_ETHEREUM_HOST value: "{{ .Values.ethereum.externalHost }}" + - name: ETHEREUM_EL_SERVICE_NAME + value: "{{ .Values.ethereum.elServiceName }}" + - name: ETHEREUM_CL_SERVICE_NAME + value: "{{ .Values.ethereum.clServiceName }}" - name: ETHEREUM_PORT - value: "{{ .Values.ethereum.service.port }}" + value: "{{ .Values.ethereum.elPort }}" - name: EXTERNAL_BOOT_NODE_HOST value: "{{ .Values.bootNode.externalHost }}" - name: BOOT_NODE_PORT diff --git a/spartan/aztec-network/values.yaml b/spartan/aztec-network/values.yaml index 26cf80bb5f7e..a17a841197a4 100644 --- a/spartan/aztec-network/values.yaml +++ b/spartan/aztec-network/values.yaml @@ -185,30 +185,14 @@ bot: cpu: "200m" ethereum: - externalHost: "" - replicas: 1 chainId: 1337 - blockTime: 12sec # 1 billion gas limit # helps ensure we can deploy public contracts gasLimit: "1000000000" - args: "" - service: - type: ClusterIP - port: 8545 - targetPort: 8545 - nodePort: "" - readinessProbe: - initialDelaySeconds: 5 - periodSeconds: 10 - timeoutSeconds: 5 - successThreshold: 1 - failureThreshold: 3 - resources: - requests: - memory: "2Gi" - cpu: "200m" - storage: "80Gi" + namespace: "kt-ethereum-testnet" + elServiceName: el-1-reth-lighthouse + clServiceName: cl-1-lighthouse-reth + elPort: 8545 proverAgent: service: diff --git a/spartan/ethereum-testnet/README.md b/spartan/ethereum-testnet/README.md new file mode 100644 index 000000000000..bd2da9730d54 --- /dev/null +++ b/spartan/ethereum-testnet/README.md @@ -0,0 +1,20 @@ +## Ethereum Testnet + +This directory contains scripts to install a local Ethereum testnet using Kurtosis. + +To install: +```bash +./install_kurtosis.sh +``` + +To deploy the testnet: +```bash +export ENCLAVE_NAME=ethereum-testnet # defaults to this +./deploy.sh +``` +This will deploy a testnet with into the namespace `kt-{ENCLAVE_NAME}`. Defaults to `kt-ethereum-testnet`. + +To destroy the testnet: +```bash +./teardown.sh +``` diff --git a/spartan/ethereum-testnet/config/kurtosis-config.yml b/spartan/ethereum-testnet/config/kurtosis-config.yml new file mode 100644 index 000000000000..597db865181c --- /dev/null +++ b/spartan/ethereum-testnet/config/kurtosis-config.yml @@ -0,0 +1,11 @@ +config-version: 2 +should-send-metrics: false +kurtosis-clusters: + docker: + type: "docker" + kind: + type: "kubernetes" + config: + kubernetes-cluster-name: "kind-kind" + storage-class: "standard" + enclave-size-in-megabytes: 10 diff --git a/spartan/ethereum-testnet/deploy.sh b/spartan/ethereum-testnet/deploy.sh new file mode 100755 index 000000000000..f673ee352bcf --- /dev/null +++ b/spartan/ethereum-testnet/deploy.sh @@ -0,0 +1,26 @@ +#!/bin/bash + +REPO=$(git rev-parse --show-toplevel) + +ENCLAVE_NAME=${ENCLAVE_NAME:-"ethereum-testnet"} + +# Check if the kurtosis CLI is installed +if ! command -v kurtosis &> /dev/null +then + echo "Kurtosis CLI not found, run ${REPO}/scripts/install_kurtosis.sh" + exit 1 +fi + +# Run the kurtosis gateway in the background +kurtosis gateway start & + +function cleanup() { + # kill everything in our process group except our process + trap - SIGTERM && kill -9 $(pgrep -g $$ | grep -v $$) $(jobs -p) $STERN_PID &>/dev/null || true +} +trap cleanup SIGINT SIGTERM EXIT + +echo "Deploying Ethereum Testnet" +kurtosis run --enclave ${ENCLAVE_NAME} github.com/ethpandaops/ethereum-package --args-file ${REPO}/spartan/ethereum-testnet/network_params.yaml --image-download always + +echo "Ethereum Testnet deployed successfully" diff --git a/spartan/ethereum-testnet/install_kurtosis.sh b/spartan/ethereum-testnet/install_kurtosis.sh new file mode 100755 index 000000000000..495f0f2ccb73 --- /dev/null +++ b/spartan/ethereum-testnet/install_kurtosis.sh @@ -0,0 +1,25 @@ +#!/bin/bash + +REPO=$(git rev-parse --show-toplevel) + +if command -v kurtosis &> /dev/null; then + echo "Kurtosis CLI already installed" + exit 0 +fi + +echo "Installing Kurtosis CLI" + +# Installed from a their custom apt source +echo "deb [trusted=yes] https://apt.fury.io/kurtosis-tech/ /" | sudo tee /etc/apt/sources.list.d/kurtosis.list +sudo apt update +sudo apt install kurtosis-cli + +# Install Kurtosis config +echo "Installing Kurtosis config" +cp ${REPO}/config/kurtosis-config.yml "$(kurtosis config path)" + +# Set kurtosis to use the cluster configuration +echo "Setting Kurtosis to kind cluster configuration" +kurtosis cluster set kind + +echo "Kurtosis installed successfully" diff --git a/spartan/ethereum-testnet/kurtosis-config.yaml b/spartan/ethereum-testnet/kurtosis-config.yaml new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/spartan/ethereum-testnet/network_params.yaml b/spartan/ethereum-testnet/network_params.yaml new file mode 100644 index 000000000000..9f11ed536294 --- /dev/null +++ b/spartan/ethereum-testnet/network_params.yaml @@ -0,0 +1,12 @@ +participants: + - el_type: reth + el_image: ghcr.io/paradigmxyz/reth + cl_type: lighthouse + cl_image: sigp/lighthouse:latest + cl_min_cpu: 2 + cl_max_cpu: 4 +network_params: + network_id: "1337" + seconds_per_slot: 12 + genesis_gaslimit: 1000000000000 + prefunded_accounts: '{"0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266": { "balance": "1000000ETH" },"0x70997970C51812dc3A010C7d01b50e0d17dc79C8": { "balance": "1000000ETH" },"0x3C44CdDdB6a900fa2b585dd299e03d12FA4293BC": { "balance": "1000000ETH" },"0x90F79bf6EB2c4f870365E785982E1f101E93b906": { "balance": "1000000ETH" },"0x15d34AAf54267DB7D7c367839AAf71A00a2C6A65": { "balance": "1000000ETH" },"0x9965507D1a55bcC2695C58ba16FB37d819B0A4dc": { "balance": "1000000ETH" },"0x976EA74026E726554dB657fA54763abd0C3a0aa9": { "balance": "1000000ETH" },"0x14dC79964da2C08b23698B3D3cc7Ca32193d9955": {"balance": "1000000ETH"},"0x23618e81E3f5cdF7f54C3d65f7FBc0aBf5B21E8f": {"balance": "1000000ETH"},"0xa0Ee7A142d267C1f36714E4a8F75612F20a79720": {"balance": "1000000ETH"},"0xBcd4042DE499D14e55001CcbB24a551F3b954096": {"balance": "1000000ETH"},"0x71bE63f3384f5fb98995898A86B02Fb2426c5788": {"balance": "1000000ETH"},"0xFABB0ac9d68B0B445fB7357272Ff202C5651694a": {"balance": "1000000ETH"},"0x1CBd3b2770909D4e10f157cABC84C7264073C9Ec": {"balance": "1000000ETH"},"0xdF3e18d64BC6A983f673Ab319CCaE4f1a57C7097": {"balance": "1000000ETH"},"0xcd3B766CCDd6AE721141F452C550Ca635964ce71": {"balance": "1000000ETH"},"0x2546BcD3c84621e976D8185a91A922aE77ECEc30": {"balance": "1000000ETH"},"0xbDA5747bFD65F08deb54cb465eB87D40e51B197E": {"balance": "1000000ETH"},"0xdD2FD4581271e230360230F9337D5c0430Bf44C0": {"balance": "1000000ETH"},"0x8626f6940E2eb28930eFb4CeF49B2d1F2C9C1199": {"balance": "1000000ETH"},"0x09DB0a93B389bEF724429898f539AEB7ac2Dd55f": {"balance": "1000000ETH"},"0x02484cb50AAC86Eae85610D6f4Bf026f30f6627D": {"balance": "1000000ETH"},"0x08135Da0A343E492FA2d4282F2AE34c6c5CC1BbE": {"balance": "1000000ETH"},"0x5E661B79FE2D3F6cE70F5AAC07d8Cd9abb2743F1": {"balance": "1000000ETH"},"0x61097BA76cD906d2ba4FD106E757f7Eb455fc295": {"balance": "1000000ETH"},"0xDf37F81dAAD2b0327A0A50003740e1C935C70913": {"balance": "1000000ETH"},"0x553BC17A05702530097c3677091C5BB47a3a7931": {"balance": "1000000ETH"},"0x87BdCE72c06C21cd96219BD8521bDF1F42C78b5e": {"balance": "1000000ETH"},"0x40Fc963A729c542424cD800349a7E4Ecc4896624": {"balance": "1000000ETH"},"0x9DCCe783B6464611f38631e6C851bf441907c710": {"balance": "1000000ETH"},"0x1BcB8e569EedAb4668e55145Cfeaf190902d3CF2": {"balance": "1000000ETH"},"0x8263Fce86B1b78F95Ab4dae11907d8AF88f841e7": {"balance": "1000000ETH"},"0xcF2d5b3cBb4D7bF04e3F7bFa8e27081B52191f91": {"balance": "1000000ETH"},"0x86c53Eb85D0B7548fea5C4B4F82b4205C8f6Ac18": {"balance": "1000000ETH"},"0x1aac82773CB722166D7dA0d5b0FA35B0307dD99D": {"balance": "1000000ETH"},"0x2f4f06d218E426344CFE1A83D53dAd806994D325": {"balance": "1000000ETH"},"0x1003ff39d25F2Ab16dBCc18EcE05a9B6154f65F4": {"balance": "1000000ETH"},"0x9eAF5590f2c84912A08de97FA28d0529361Deb9E": {"balance": "1000000ETH"},"0x11e8F3eA3C6FcF12EcfF2722d75CEFC539c51a1C": {"balance": "1000000ETH"},"0x7D86687F980A56b832e9378952B738b614A99dc6": {"balance": "1000000ETH"},"0x9eF6c02FB2ECc446146E05F1fF687a788a8BF76d": {"balance": "1000000ETH"},"0x08A2DE6F3528319123b25935C92888B16db8913E": {"balance": "1000000ETH"},"0xe141C82D99D85098e03E1a1cC1CdE676556fDdE0": {"balance": "1000000ETH"},"0x4b23D303D9e3719D6CDf8d172Ea030F80509ea15": {"balance": "1000000ETH"},"0xC004e69C5C04A223463Ff32042dd36DabF63A25a": {"balance": "1000000ETH"},"0x5eb15C0992734B5e77c888D713b4FC67b3D679A2": {"balance": "1000000ETH"},"0x7Ebb637fd68c523613bE51aad27C35C4DB199B9c": {"balance": "1000000ETH"},"0x3c3E2E178C69D4baD964568415a0f0c84fd6320A": {"balance": "1000000ETH"}}' diff --git a/spartan/ethereum-testnet/reference/network_params.yaml b/spartan/ethereum-testnet/reference/network_params.yaml new file mode 100644 index 000000000000..7111382ac0b4 --- /dev/null +++ b/spartan/ethereum-testnet/reference/network_params.yaml @@ -0,0 +1,198 @@ +# +# Reference network params for the Ethereum Testnet +# These are the values accepted in network_params.yaml +# +participants: +# EL + - el_type: geth + el_image: ethereum/client-go:latest + el_log_level: "" + el_extra_env_vars: {} + el_extra_labels: {} + el_extra_params: [] + el_tolerations: [] + el_volume_size: 0 + el_min_cpu: 0 + el_max_cpu: 0 + el_min_mem: 0 + el_max_mem: 0 +# CL + cl_type: lighthouse + cl_image: sigp/lighthouse:latest-unstable + cl_log_level: "" + cl_extra_env_vars: {} + cl_extra_labels: {} + cl_extra_params: [] + cl_tolerations: [] + cl_volume_size: 0 + cl_min_cpu: 0 + cl_max_cpu: 0 + cl_min_mem: 0 + cl_max_mem: 0 + supernode: false + use_separate_vc: true +# Validator + vc_type: lighthouse + vc_image: sigp/lighthouse:latest-unstable + vc_log_level: "" + vc_count: 1 + vc_extra_env_vars: {} + vc_extra_labels: {} + vc_extra_params: [] + vc_tolerations: [] + vc_min_cpu: 0 + vc_max_cpu: 0 + vc_min_mem: 0 + vc_max_mem: 0 + validator_count: null + use_remote_signer: false +# Remote signer + remote_signer_type: web3signer + remote_signer_image: consensys/web3signer:latest + remote_signer_extra_env_vars: {} + remote_signer_extra_labels: {} + remote_signer_extra_params: [] + remote_signer_tolerations: [] + remote_signer_min_cpu: 0 + remote_signer_max_cpu: 0 + remote_signer_min_mem: 0 + remote_signer_max_mem: 0 +# participant specific + node_selectors: {} + tolerations: [] + count: 2 + snooper_enabled: false + ethereum_metrics_exporter_enabled: false + xatu_sentry_enabled: false + prometheus_config: + scrape_interval: 15s + labels: {} + blobber_enabled: false + blobber_extra_params: [] + builder_network_params: null + keymanager_enabled: false +network_params: + network: kurtosis + network_id: "3151908" + deposit_contract_address: "0x4242424242424242424242424242424242424242" + seconds_per_slot: 12 + num_validator_keys_per_node: 64 + preregistered_validator_keys_mnemonic: + "giant issue aisle success illegal bike spike + question tent bar rely arctic volcano long crawl hungry vocal artwork sniff fantasy + very lucky have athlete" + preregistered_validator_count: 0 + genesis_delay: 20 + genesis_gaslimit: 30000000 + max_per_epoch_activation_churn_limit: 8 + churn_limit_quotient: 65536 + ejection_balance: 16000000000 + eth1_follow_distance: 2048 + min_validator_withdrawability_delay: 256 + shard_committee_period: 256 + deneb_fork_epoch: 0 + electra_fork_epoch: 100000000 + fulu_fork_epoch: 100000001 + eip7594_fork_epoch: 100000002 + eip7594_fork_version: "0x60000038" + network_sync_base_url: https://snapshots.ethpandaops.io/ + data_column_sidecar_subnet_count: 128 + samples_per_slot: 8 + custody_requirement: 4 + max_blobs_per_block: 6 + additional_preloaded_contracts: {} + devnet_repo: ethpandaops + prefunded_accounts: {} +additional_services: [] +dora_params: + image: "" +tx_spammer_params: + tx_spammer_extra_args: [] +goomy_blob_params: + goomy_blob_args: [] +prometheus_params: + storage_tsdb_retention_time: "1d" + storage_tsdb_retention_size: "512MB" + min_cpu: 10 + max_cpu: 1000 + min_mem: 128 + max_mem: 2048 +grafana_params: + additional_dashboards: [] + min_cpu: 10 + max_cpu: 1000 + min_mem: 128 + max_mem: 2048 +assertoor_params: + image: "" + run_stability_check: false + run_block_proposal_check: false + run_transaction_test: false + run_blob_transaction_test: false + run_opcodes_transaction_test: false + run_lifecycle_test: false + tests: [] +wait_for_finalization: false +global_log_level: info +snooper_enabled: false +ethereum_metrics_exporter_enabled: false +parallel_keystore_generation: false +disable_peer_scoring: false +persistent: false +mev_type: null +mev_params: + mev_relay_image: flashbots/mev-boost-relay + mev_builder_image: ethpandaops/flashbots-builder:main + mev_builder_cl_image: sigp/lighthouse:latest + mev_boost_image: flashbots/mev-boost + mev_boost_args: ["mev-boost", "--relay-check"] + mev_relay_api_extra_args: [] + mev_relay_housekeeper_extra_args: [] + mev_relay_website_extra_args: [] + mev_builder_extra_args: [] + mev_builder_prometheus_config: + scrape_interval: 15s + labels: {} + mev_flood_image: flashbots/mev-flood + mev_flood_extra_args: [] + mev_flood_seconds_per_bundle: 15 + custom_flood_params: + interval_between_transactions: 1 +xatu_sentry_enabled: false +xatu_sentry_params: + xatu_sentry_image: ethpandaops/xatu-sentry + xatu_server_addr: localhost:8000 + xatu_server_tls: false + xatu_server_headers: {} + beacon_subscriptions: + - attestation + - block + - chain_reorg + - finalized_checkpoint + - head + - voluntary_exit + - contribution_and_proof + - blob_sidecar +apache_port: 40000 +global_tolerations: [] +global_node_selectors: {} +keymanager_enabled: false +checkpoint_sync_enabled: false +checkpoint_sync_url: "" +port_publisher: + nat_exit_ip: KURTOSIS_IP_ADDR_PLACEHOLDER + el: + enabled: false + public_port_start: 32000 + cl: + enabled: false + public_port_start: 33000 + vc: + enabled: false + public_port_start: 34000 + remote_signer: + enabled: false + public_port_start: 35000 + additional_services: + enabled: false + public_port_start: 36000 \ No newline at end of file diff --git a/spartan/ethereum-testnet/teardown.sh b/spartan/ethereum-testnet/teardown.sh new file mode 100755 index 000000000000..74f28f0ca110 --- /dev/null +++ b/spartan/ethereum-testnet/teardown.sh @@ -0,0 +1,14 @@ +#!/bin/bash + +ENCLAVE_NAME=${ENCLAVE_NAME:-"ethereum-testnet"} + +# Run the kurtosis gateway in the background +kurtosis gateway start &>/dev/null & + +function cleanup() { + # kill everything in our process group except our process + trap - SIGTERM && kill -9 $(pgrep -g $$ | grep -v $$) $(jobs -p) $STERN_PID &>/dev/null || true +} +trap cleanup SIGINT SIGTERM EXIT + +kurtosis enclave rm -f ${ENCLAVE_NAME} \ No newline at end of file diff --git a/spartan/scripts/setup_local_k8s.sh b/spartan/scripts/setup_local_k8s.sh index 8068ce867ae3..48532a779e49 100755 --- a/spartan/scripts/setup_local_k8s.sh +++ b/spartan/scripts/setup_local_k8s.sh @@ -63,3 +63,4 @@ kubectl config use-context kind-kind || true "$SCRIPT_DIR"/../chaos-mesh/install.sh "$SCRIPT_DIR"/../metrics/install-kind.sh +"$SCRIPT_DIR"/../ethereum-testnet/install_kurtosis.sh diff --git a/yarn-project/Earthfile b/yarn-project/Earthfile index adf74380994a..6174cd060e39 100644 --- a/yarn-project/Earthfile +++ b/yarn-project/Earthfile @@ -148,6 +148,23 @@ export-cli-wallet: ARG ARCH SAVE IMAGE --push aztecprotocol/cli-wallet:${DIST_TAG}${ARCH:+-$ARCH} +blob-sink-build: + FROM +cli-base + RUN yarn workspaces focus @aztec/blob-sink --production && yarn cache clean + SAVE ARTIFACT /usr/src /usr/src + +blob-sink: + FROM ubuntu:noble + RUN apt update && apt install nodejs curl -y && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* + COPY +blob-sink-build/usr/src /usr/src + ENTRYPOINT ["node", "/usr/src/yarn-project/blob-sink/dest/run.js"] + +export-blob-sink: + FROM +blob-sink + ARG DIST_TAG="latest" + ARG ARCH + SAVE IMAGE --push aztecprotocol/blob-sink:${DIST_TAG}${ARCH:+-$ARCH} + aztec-prod: FROM +cli-base RUN yarn workspaces focus @aztec/aztec --production && yarn cache clean @@ -265,6 +282,11 @@ export-aztec: FROM +aztec SAVE IMAGE aztecprotocol/aztec:$EARTHLY_GIT_HASH +export-e2e-blob-sink: + ARG EARTHLY_GIT_HASH + FROM +blob-sink + SAVE IMAGE aztecprotocol/blob-sink:$EARTHLY_GIT_HASH + export-aztec-arch: FROM +aztec ARG DIST_TAG="latest" @@ -285,6 +307,7 @@ export-end-to-end-arch: export-e2e-test-images: BUILD +export-aztec BUILD +export-end-to-end + BUILD +export-e2e-blob-sink export-images-arch: ARG DIST_TAG="latest" diff --git a/yarn-project/blob-sink/.eslintrc.cjs b/yarn-project/blob-sink/.eslintrc.cjs new file mode 100644 index 000000000000..e659927475c0 --- /dev/null +++ b/yarn-project/blob-sink/.eslintrc.cjs @@ -0,0 +1 @@ +module.exports = require('@aztec/foundation/eslint'); diff --git a/yarn-project/blob-sink/Dockerfile b/yarn-project/blob-sink/Dockerfile new file mode 100644 index 000000000000..610ea2b398f9 --- /dev/null +++ b/yarn-project/blob-sink/Dockerfile @@ -0,0 +1,7 @@ +## TODO THIS + +FROM aztecprotocol/yarn-project AS yarn-project + +RUN apt update + +ENTRYPOINT ["node", "/usr/src/yarn-project/blob-sink/dest/bin/run.js"] diff --git a/yarn-project/blob-sink/README.md b/yarn-project/blob-sink/README.md new file mode 100644 index 000000000000..32d9b9d59b30 --- /dev/null +++ b/yarn-project/blob-sink/README.md @@ -0,0 +1,7 @@ +## Blob Sink + +A HTTP api that emulated the https://ethereum.github.io/beacon-APIs/?urls.primaryName=dev#/Beacon/getBlobSidecars API. + +## When is this used? + +This service will run alongside end to end tests to capture the blob transactions that are sent alongside a `propose` transaction. \ No newline at end of file diff --git a/yarn-project/blob-sink/package.json b/yarn-project/blob-sink/package.json new file mode 100644 index 000000000000..414d398dca26 --- /dev/null +++ b/yarn-project/blob-sink/package.json @@ -0,0 +1,160 @@ +{ + "name": "@aztec/blob-sink", + "version": "0.1.0", + "type": "module", + "exports": { + ".": "./dest/index.js" + }, + "inherits": [ + "../package.common.json" + ], + "scripts": { + "build": "yarn clean && tsc -b", + "build:dev": "tsc -b --watch", + "clean": "rm -rf ./dest .tsbuildinfo", + "formatting": "run -T prettier --check ./src && run -T eslint ./src", + "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", + "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests" + }, + "jest": { + "moduleNameMapper": { + "^(\\.{1,2}/.*)\\.[cm]?js$": "$1" + }, + "testRegex": "./src/.*\\.test\\.(js|mjs|ts)$", + "rootDir": "./src", + "transform": { + "^.+\\.tsx?$": [ + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" + } + } + } + ] + }, + "extensionsToTreatAsEsm": [ + ".ts" + ], + "scripts": { + "build": "yarn clean && tsc -b", + "start": "node ./dest/bin/run.js", + "build:dev": "tsc -b --watch", + "clean": "rm -rf ./dest .tsbuildinfo", + "formatting": "run -T prettier --check ./src && run -T eslint ./src", + "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", + "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests" + }, + "jest": { + "moduleNameMapper": { + "^(\\.{1,2}/.*)\\.[cm]?js$": "$1" + }, + "testRegex": "./src/.*\\.test\\.(js|mjs|ts)$", + "rootDir": "./src", + "transform": { + "^.+\\.tsx?$": [ + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" + } + } + } + ] + }, + "extensionsToTreatAsEsm": [ + ".ts" + ], + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] + ] + }, + "dependencies": { + "@aztec/circuit-types": "workspace:^", + "@aztec/foundation": "workspace:^", + "@aztec/kv-store": "workspace:*", + "@aztec/telemetry-client": "workspace:*", + "express": "^4.21.1", + "source-map-support": "^0.5.21", + "tslib": "^2.4.0", + "zod": "^3.23.8" + }, + "devDependencies": { + "@jest/globals": "^29.5.0", + "@types/jest": "^29.5.0", + "@types/memdown": "^3.0.0", + "@types/node": "^18.7.23", + "@types/source-map-support": "^0.5.10", + "@types/supertest": "^6.0.2", + "jest": "^29.5.0", + "jest-mock-extended": "^3.0.3", + "supertest": "^7.0.0", + "ts-node": "^10.9.1", + "typescript": "^5.0.4" + }, + "files": [ + "dest", + "src", + "!*.test.*" + ], + "testTimeout": 30000, + "setupFiles": [ + "../../foundation/src/jest/setup.mjs" + ], + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] + ] + }, + "dependencies": { + "@aztec/circuit-types": "workspace:^", + "@aztec/foundation": "workspace:^", + "@aztec/kv-store": "workspace:*", + "@aztec/telemetry-client": "workspace:*", + "express": "^4.21.1", + "source-map-support": "^0.5.21", + "tslib": "^2.4.0", + "zod": "^3.23.8" + }, + "devDependencies": { + "@jest/globals": "^29.5.0", + "@types/jest": "^29.5.0", + "@types/memdown": "^3.0.0", + "@types/node": "^18.7.23", + "@types/source-map-support": "^0.5.10", + "@types/supertest": "^6.0.2", + "jest": "^29.5.0", + "jest-mock-extended": "^3.0.3", + "supertest": "^7.0.0", + "ts-node": "^10.9.1", + "typescript": "^5.0.4" + }, + "files": [ + "dest", + "src", + "!*.test.*" + ], + "types": "./dest/index.d.ts", + "engines": { + "node": ">=18" + } +} diff --git a/yarn-project/blob-sink/src/blob-sink.test.ts b/yarn-project/blob-sink/src/blob-sink.test.ts new file mode 100644 index 000000000000..03f240edc041 --- /dev/null +++ b/yarn-project/blob-sink/src/blob-sink.test.ts @@ -0,0 +1,91 @@ +import { Blob } from '@aztec/foundation/blob'; +import { Fr } from '@aztec/foundation/fields'; + +import request from 'supertest'; + +import { BlobSinkServer } from './server.js'; + +describe('BlobSinkService', () => { + let service: BlobSinkServer; + + beforeEach(async () => { + service = new BlobSinkServer({ + port: 0, // Using port 0 lets the OS assign a random available port + }); + await service.start(); + }); + + afterEach(async () => { + await service.stop(); + }); + + it('should store and retrieve a blob sidecar', async () => { + // Create a test blob + const testFields = [Fr.random(), Fr.random(), Fr.random()]; + const blob = Blob.fromFields(testFields); + const blockId = '0x1234'; + + // Post the blob + const postResponse = await request(service.getApp()) + .post('/blob_sidecar') + .send({ + // eslint-disable-next-line camelcase + block_id: blockId, + blobs: [ + { + index: 0, + blob: blob.toBuffer(), + }, + ], + }); + + expect(postResponse.status).toBe(200); + + // Retrieve the blob + const getResponse = await request(service.getApp()).get(`/eth/v1/beacon/blob_sidecars/${blockId}`); + + expect(getResponse.status).toBe(200); + + // Convert the response blob back to a Blob object and verify it matches + const retrievedBlobs = getResponse.body.data; + + const retrievedBlob = Blob.fromBuffer(Buffer.from(retrievedBlobs[0].blob, 'hex')); + expect(retrievedBlob.fieldsHash.toString()).toBe(blob.fieldsHash.toString()); + expect(retrievedBlob.commitment.toString('hex')).toBe(blob.commitment.toString('hex')); + }); + + it('should return an error if the block ID is invalid (POST)', async () => { + const response = await request(service.getApp()).post('/blob_sidecar').send({ + // eslint-disable-next-line camelcase + block_id: undefined, + }); + + expect(response.status).toBe(400); + }); + + it('should return an error if the block ID is invalid (GET)', async () => { + const response = await request(service.getApp()).get('/eth/v1/beacon/blob_sidecars/invalid-id'); + + expect(response.status).toBe(400); + }); + + it('should return 404 for non-existent blob', async () => { + const response = await request(service.getApp()).get('/eth/v1/beacon/blob_sidecars/0x999999'); + + expect(response.status).toBe(404); + }); + + it('should reject invalid block IDs', async () => { + const response = await request(service.getApp()).get('/eth/v1/beacon/blob_sidecars/invalid-id'); + + expect(response.status).toBe(400); + expect(response.body.error).toBe('Invalid block_id parameter'); + }); + + it('should reject negative block IDs', async () => { + const response = await request(service.getApp()).get('/eth/v1/beacon/blob_sidecars/-123'); + + expect(response.status).toBe(400); + expect(response.body.error).toBe('Invalid block_id parameter'); + }); +}); diff --git a/yarn-project/blob-sink/src/blobstore/blob_store_test_suite.ts b/yarn-project/blob-sink/src/blobstore/blob_store_test_suite.ts new file mode 100644 index 000000000000..28c851f003d2 --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/blob_store_test_suite.ts @@ -0,0 +1,94 @@ +import { Blob } from '@aztec/foundation/blob'; +import { Fr } from '@aztec/foundation/fields'; + +import { BlobWithIndex } from '../types/index.js'; +import { type BlobStore } from './interface.js'; + +export function describeBlobStore(getBlobStore: () => BlobStore) { + let blobStore: BlobStore; + + beforeEach(() => { + blobStore = getBlobStore(); + }); + + it('should store and retrieve a blob', async () => { + // Create a test blob with random fields + const testFields = [Fr.random(), Fr.random(), Fr.random()]; + const blob = Blob.fromFields(testFields); + const blockId = '12345'; + const blobWithIndex = new BlobWithIndex(blob, 0); + + // Store the blob + await blobStore.addBlobSidecars(blockId, [blobWithIndex]); + + // Retrieve the blob + const retrievedBlobs = await blobStore.getBlobSidecars(blockId); + const [retrievedBlob] = retrievedBlobs!; + + // Verify the blob was retrieved and matches + expect(retrievedBlob).toBeDefined(); + expect(retrievedBlob.blob.fieldsHash.toString()).toBe(blob.fieldsHash.toString()); + expect(retrievedBlob.blob.commitment.toString('hex')).toBe(blob.commitment.toString('hex')); + }); + + it('should return undefined for non-existent blob', async () => { + const nonExistentBlob = await blobStore.getBlobSidecars('999999'); + expect(nonExistentBlob).toBeUndefined(); + }); + + it('should handle multiple blobs with different block IDs', async () => { + // Create two different blobs + const blob1 = Blob.fromFields([Fr.random(), Fr.random()]); + const blob2 = Blob.fromFields([Fr.random(), Fr.random(), Fr.random()]); + const blobWithIndex1 = new BlobWithIndex(blob1, 0); + const blobWithIndex2 = new BlobWithIndex(blob2, 0); + + // Store both blobs + await blobStore.addBlobSidecars('1', [blobWithIndex1]); + await blobStore.addBlobSidecars('2', [blobWithIndex2]); + + // Retrieve and verify both blobs + const retrieved1 = await blobStore.getBlobSidecars('1'); + const retrieved2 = await blobStore.getBlobSidecars('2'); + const [retrievedBlob1] = retrieved1!; + const [retrievedBlob2] = retrieved2!; + + expect(retrievedBlob1.blob.commitment.toString('hex')).toBe(blob1.commitment.toString('hex')); + expect(retrievedBlob2.blob.commitment.toString('hex')).toBe(blob2.commitment.toString('hex')); + }); + + it('should overwrite blob when using same block ID', async () => { + // Create two different blobs + const originalBlob = Blob.fromFields([Fr.random()]); + const newBlob = Blob.fromFields([Fr.random(), Fr.random()]); + const blockId = '1'; + const originalBlobWithIndex = new BlobWithIndex(originalBlob, 0); + const newBlobWithIndex = new BlobWithIndex(newBlob, 0); + + // Store original blob + await blobStore.addBlobSidecars(blockId, [originalBlobWithIndex]); + + // Overwrite with new blob + await blobStore.addBlobSidecars(blockId, [newBlobWithIndex]); + + // Retrieve and verify it's the new blob + const retrievedBlobs = await blobStore.getBlobSidecars(blockId); + const [retrievedBlob] = retrievedBlobs!; + expect(retrievedBlob.blob.commitment.toString('hex')).toBe(newBlob.commitment.toString('hex')); + expect(retrievedBlob.blob.commitment.toString('hex')).not.toBe(originalBlob.commitment.toString('hex')); + }); + + it('should handle multiple blobs with the same block ID', async () => { + const blob1 = Blob.fromFields([Fr.random()]); + const blob2 = Blob.fromFields([Fr.random()]); + const blobWithIndex1 = new BlobWithIndex(blob1, 0); + const blobWithIndex2 = new BlobWithIndex(blob2, 0); + + await blobStore.addBlobSidecars('1', [blobWithIndex1, blobWithIndex2]); + const retrievedBlobs = await blobStore.getBlobSidecars('1'); + const [retrievedBlob1, retrievedBlob2] = retrievedBlobs!; + + expect(retrievedBlob1.blob.commitment.toString('hex')).toBe(blob1.commitment.toString('hex')); + expect(retrievedBlob2.blob.commitment.toString('hex')).toBe(blob2.commitment.toString('hex')); + }); +} diff --git a/yarn-project/blob-sink/src/blobstore/disk_blob_store.test.ts b/yarn-project/blob-sink/src/blobstore/disk_blob_store.test.ts new file mode 100644 index 000000000000..8b523dbaef14 --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/disk_blob_store.test.ts @@ -0,0 +1,8 @@ +import { openTmpStore } from '@aztec/kv-store/lmdb'; + +import { describeBlobStore } from './blob_store_test_suite.js'; +import { DiskBlobStore } from './disk_blob_store.js'; + +describe('DiskBlobStore', () => { + describeBlobStore(() => new DiskBlobStore(openTmpStore())); +}); diff --git a/yarn-project/blob-sink/src/blobstore/disk_blob_store.ts b/yarn-project/blob-sink/src/blobstore/disk_blob_store.ts new file mode 100644 index 000000000000..34ac8e2ec735 --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/disk_blob_store.ts @@ -0,0 +1,25 @@ +import { type AztecKVStore, type AztecMap } from '@aztec/kv-store'; + +import { type BlobWithIndex, BlobsWithIndexes } from '../types/index.js'; +import { type BlobStore } from './interface.js'; + +export class DiskBlobStore implements BlobStore { + blobs: AztecMap; + + constructor(store: AztecKVStore) { + this.blobs = store.openMap('blobs'); + } + + public getBlobSidecars(blockId: string): Promise { + const blobBuffer = this.blobs.get(`${blockId}`); + if (!blobBuffer) { + return Promise.resolve(undefined); + } + return Promise.resolve(BlobsWithIndexes.fromBuffer(blobBuffer).blobs); + } + + public async addBlobSidecars(blockId: string, blobSidecars: BlobWithIndex[]): Promise { + await this.blobs.set(blockId, new BlobsWithIndexes(blobSidecars).toBuffer()); + return Promise.resolve(); + } +} diff --git a/yarn-project/blob-sink/src/blobstore/index.ts b/yarn-project/blob-sink/src/blobstore/index.ts new file mode 100644 index 000000000000..fd3901930cfc --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/index.ts @@ -0,0 +1,3 @@ +export * from './memory_blob_store.js'; +export * from './disk_blob_store.js'; +export * from './interface.js'; diff --git a/yarn-project/blob-sink/src/blobstore/interface.ts b/yarn-project/blob-sink/src/blobstore/interface.ts new file mode 100644 index 000000000000..9a6e3d26d8c1 --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/interface.ts @@ -0,0 +1,12 @@ +import { type BlobWithIndex } from '../types/index.js'; + +export interface BlobStore { + /** + * Get a blob by block id + */ + getBlobSidecars: (blockId: string) => Promise; + /** + * Add a blob to the store + */ + addBlobSidecars: (blockId: string, blobSidecars: BlobWithIndex[]) => Promise; +} diff --git a/yarn-project/blob-sink/src/blobstore/memory_blob_store.test.ts b/yarn-project/blob-sink/src/blobstore/memory_blob_store.test.ts new file mode 100644 index 000000000000..2f13926cd1a4 --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/memory_blob_store.test.ts @@ -0,0 +1,6 @@ +import { describeBlobStore } from './blob_store_test_suite.js'; +import { MemoryBlobStore } from './memory_blob_store.js'; + +describe('MemoryBlobStore', () => { + describeBlobStore(() => new MemoryBlobStore()); +}); diff --git a/yarn-project/blob-sink/src/blobstore/memory_blob_store.ts b/yarn-project/blob-sink/src/blobstore/memory_blob_store.ts new file mode 100644 index 000000000000..23ed7274edbd --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/memory_blob_store.ts @@ -0,0 +1,19 @@ +import { type BlobWithIndex, BlobsWithIndexes } from '../types/index.js'; +import { type BlobStore } from './interface.js'; + +export class MemoryBlobStore implements BlobStore { + private blobs: Map = new Map(); + + public getBlobSidecars(blockId: string): Promise { + const blobBuffer = this.blobs.get(blockId); + if (!blobBuffer) { + return Promise.resolve(undefined); + } + return Promise.resolve(BlobsWithIndexes.fromBuffer(blobBuffer).blobs); + } + + public addBlobSidecars(blockId: string, blobSidecars: BlobWithIndex[]): Promise { + this.blobs.set(blockId, new BlobsWithIndexes(blobSidecars).toBuffer()); + return Promise.resolve(); + } +} diff --git a/yarn-project/blob-sink/src/config.ts b/yarn-project/blob-sink/src/config.ts new file mode 100644 index 000000000000..0dfb0e57c128 --- /dev/null +++ b/yarn-project/blob-sink/src/config.ts @@ -0,0 +1,30 @@ +import { type ConfigMappingsType, getConfigFromMappings } from '@aztec/foundation/config'; +import { type DataStoreConfig, dataConfigMappings } from '@aztec/kv-store/config'; + +export type BlobSinkConfig = { + port?: number; + // otelMetricsCollectorUrl?: string; +} & DataStoreConfig; + +export const blobSinkConfigMappings: ConfigMappingsType = { + port: { + env: 'BLOB_SINK_PORT', + description: 'The port to run the blob sink server on', + }, + ...dataConfigMappings, + + // TODO: bring otel endpoints back + // otelMetricsCollectorUrl: { + // env: 'OTEL_METRICS_COLLECTOR_URL', + // description: 'The URL of the OTLP metrics collector', + // }, +}; + +/** + * Returns the blob sink configuration from the environment variables. + * Note: If an environment variable is not set, the default value is used. + * @returns The blob sink configuration. + */ +export function getBlobSinkConfigFromEnv(): BlobSinkConfig { + return getConfigFromMappings(blobSinkConfigMappings); +} diff --git a/yarn-project/blob-sink/src/factory.ts b/yarn-project/blob-sink/src/factory.ts new file mode 100644 index 000000000000..88d4c1d75b2d --- /dev/null +++ b/yarn-project/blob-sink/src/factory.ts @@ -0,0 +1,25 @@ +import { type AztecKVStore } from '@aztec/kv-store'; +import { createStore } from '@aztec/kv-store/lmdb'; + +import { type BlobSinkConfig } from './config.js'; +import { BlobSinkServer } from './server.js'; + +// If data store settings are provided, the store is created and returned. +// Otherwise, undefined is returned and an in memory store will be used. +async function getDataStoreConfig(config?: BlobSinkConfig): Promise { + if (!config?.dataDirectory) { + return undefined; + } + return await createStore('blob-sink', config); +} + +// TOOD: telemetry client config too + +/** + * Creates a blob sink service from the provided config. + */ +export async function createBlobSinkServer(config?: BlobSinkConfig): Promise { + const store = await getDataStoreConfig(config); + + return new BlobSinkServer(config, store); +} diff --git a/yarn-project/blob-sink/src/index.ts b/yarn-project/blob-sink/src/index.ts new file mode 100644 index 000000000000..25844130c2f7 --- /dev/null +++ b/yarn-project/blob-sink/src/index.ts @@ -0,0 +1,3 @@ +export * from './server.js'; +export * from './config.js'; +export * from './factory.js'; diff --git a/yarn-project/blob-sink/src/metrics.ts b/yarn-project/blob-sink/src/metrics.ts new file mode 100644 index 000000000000..fae9efc7ef6c --- /dev/null +++ b/yarn-project/blob-sink/src/metrics.ts @@ -0,0 +1,27 @@ +import { type Histogram, Metrics, type TelemetryClient, type UpDownCounter } from '@aztec/telemetry-client'; + +import { type BlobWithIndex } from './types/blob_with_index.js'; + +export class BlobSinkMetrics { + /** The number of blobs in the blob store */ + private objectsInBlobStore: UpDownCounter; + + /** Tracks blob size */ + private blobSize: Histogram; + + constructor(telemetry: TelemetryClient) { + const name = 'BlobSink'; + this.objectsInBlobStore = telemetry.getMeter(name).createUpDownCounter(Metrics.BLOB_SINK_OBJECTS_IN_BLOB_STORE, { + description: 'The current number of blobs in the blob store', + }); + + this.blobSize = telemetry.getMeter(name).createHistogram(Metrics.BLOB_SINK_BLOB_SIZE, { + description: 'The size of blobs in the blob store', + }); + } + + public recordBlobReciept(blobs: BlobWithIndex[]) { + this.objectsInBlobStore.add(blobs.length); + blobs.forEach(b => this.blobSize.record(b.blob.getSize())); + } +} diff --git a/yarn-project/blob-sink/src/run.ts b/yarn-project/blob-sink/src/run.ts new file mode 100644 index 000000000000..02c240691c92 --- /dev/null +++ b/yarn-project/blob-sink/src/run.ts @@ -0,0 +1,25 @@ +// Run a standalone blob sink server +import { createLogger } from '@aztec/foundation/log'; + +import { getBlobSinkConfigFromEnv } from './config.js'; +import { BlobSinkServer } from './server.js'; + +const logger = createLogger('aztec:blob-sink'); + +async function main() { + const config = getBlobSinkConfigFromEnv(); + const blobSinkServer = new BlobSinkServer(config); + + await blobSinkServer.start(); + + const stop = async () => { + logger.debug('Stopping Blob Sink...'); + await blobSinkServer.stop(); + logger.info('Node stopped'); + process.exit(0); + }; + process.on('SIGTERM', stop); + process.on('SIGINT', stop); +} + +void main(); diff --git a/yarn-project/blob-sink/src/server.ts b/yarn-project/blob-sink/src/server.ts new file mode 100644 index 000000000000..b281c6215a69 --- /dev/null +++ b/yarn-project/blob-sink/src/server.ts @@ -0,0 +1,170 @@ +import { Blob } from '@aztec/foundation/blob'; +import { type Logger, createLogger } from '@aztec/foundation/log'; +import { type AztecKVStore } from '@aztec/kv-store'; +import { type TelemetryClient } from '@aztec/telemetry-client'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; + +import express, { type Express, type Request, type Response, json } from 'express'; +import { type Server } from 'http'; +import { z } from 'zod'; + +import { type BlobStore, DiskBlobStore } from './blobstore/index.js'; +import { MemoryBlobStore } from './blobstore/memory_blob_store.js'; +import { type BlobSinkConfig } from './config.js'; +import { BlobSinkMetrics } from './metrics.js'; +import { BlobWithIndex } from './types/index.js'; + +// For now, the block ID is the aztec block ID where the blobs were added. +const blockIdSchema = z.coerce + .string() + .regex(/^0x[0-9a-fA-F]{0,64}$/) + .max(66); + +/** + * Example usage: + * const service = new BlobSinkService({ port: 5052 }); + * await service.start(); + * ... later ... + * await service.stop(); + */ +export class BlobSinkServer { + private app: Express; + private server: Server | null = null; + private blobStore: BlobStore; + private readonly port: number; + private metrics: BlobSinkMetrics; + private log: Logger = createLogger('aztec:blob-sink'); + + constructor( + config?: Partial, + store?: AztecKVStore, + telemetry: TelemetryClient = new NoopTelemetryClient(), + ) { + this.port = config?.port ?? 5052; // 5052 is beacon chain default http port + this.app = express(); + + // Setup middleware + this.app.use(json({ limit: '1mb' })); // Increase the limit to allow for a blob to be sent + + this.metrics = new BlobSinkMetrics(telemetry); + + this.blobStore = store === undefined ? new MemoryBlobStore() : new DiskBlobStore(store); + + // Setup routes + this.setupRoutes(); + } + + private setupRoutes() { + this.app.get('/eth/v1/beacon/blob_sidecars/:block_id', this.handleBlobSidecar.bind(this)); + this.app.post('/blob_sidecar', this.handlePostBlobSidecar.bind(this)); + } + + private async handleBlobSidecar(req: Request, res: Response) { + // eslint-disable-next-line camelcase + const { block_id } = req.params; + + try { + // eslint-disable-next-line camelcase + const parsedBlockId = blockIdSchema.parse(block_id); + + if (!parsedBlockId) { + res.status(400).json({ + error: 'Invalid block_id parameter', + }); + return; + } + + const blobs = await this.blobStore.getBlobSidecars(parsedBlockId.toString()); + + if (!blobs) { + res.status(404).json({ error: 'Blob not found' }); + return; + } + + res.json({ + version: 'deneb', + data: blobs.map(blob => blob.toJSON()), + }); + } catch (error) { + if (error instanceof z.ZodError) { + res.status(400).json({ + error: 'Invalid block_id parameter', + details: error.errors, + }); + } else { + res.status(500).json({ + error: 'Internal server error', + }); + } + } + } + + private async handlePostBlobSidecar(req: Request, res: Response) { + // eslint-disable-next-line camelcase + const { block_id, blobs } = req.body; + + try { + // eslint-disable-next-line camelcase + const parsedBlockId = blockIdSchema.parse(block_id); + if (!parsedBlockId) { + res.status(400).json({ + error: 'Invalid block_id parameter', + }); + return; + } + + this.log.info(`Received blob sidecar for block ${parsedBlockId}`); + + // TODO: tidy up the blob parsing + const blobObjects: BlobWithIndex[] = blobs.map( + (b: { index: number; blob: { type: string; data: string } }) => + new BlobWithIndex(Blob.fromBuffer(Buffer.from(b.blob.data)), b.index), + ); + + await this.blobStore.addBlobSidecars(parsedBlockId.toString(), blobObjects); + this.metrics.recordBlobReciept(blobObjects); + + this.log.info(`Blob sidecar stored successfully for block ${parsedBlockId}`); + + res.json({ message: 'Blob sidecar stored successfully' }); + } catch (error) { + res.status(400).json({ + error: 'Invalid blob data', + }); + } + } + + public start(): Promise { + return new Promise(resolve => { + this.server = this.app.listen(this.port, () => { + this.log.info(`Server is running on http://localhost:${this.port}`); + resolve(); + }); + }); + } + + public stop(): Promise { + this.log.info('Stopping blob sink'); + return new Promise((resolve, reject) => { + if (!this.server) { + resolve(); + this.log.info('Blob sink already stopped'); + return; + } + + this.server.close(err => { + if (err) { + reject(err); + return; + } + this.server = null; + this.log.info('Blob sink stopped'); + resolve(); + }); + }); + } + + public getApp(): Express { + return this.app; + } +} diff --git a/yarn-project/blob-sink/src/types/blob_with_index.test.ts b/yarn-project/blob-sink/src/types/blob_with_index.test.ts new file mode 100644 index 000000000000..d29c6b98b887 --- /dev/null +++ b/yarn-project/blob-sink/src/types/blob_with_index.test.ts @@ -0,0 +1,31 @@ +import { Blob } from '@aztec/foundation/blob'; +import { Fr } from '@aztec/foundation/fields'; + +import { BlobWithIndex, BlobsWithIndexes } from './blob_with_index.js'; + +describe('BlobWithIndex Serde', () => { + it('should serialize and deserialize', () => { + const blob = Blob.fromFields([Fr.random(), Fr.random(), Fr.random()]); + const blobWithIndex = new BlobWithIndex(blob, 0); + const serialized = blobWithIndex.toBuffer(); + + const deserialized = BlobWithIndex.fromBuffer(serialized); + + expect(blobWithIndex).toEqual(deserialized); + }); +}); + +describe('BlobsWithIndexes Serde', () => { + it('should serialize and deserialize', () => { + const blobs = [ + new BlobWithIndex(Blob.fromFields([Fr.random(), Fr.random(), Fr.random()]), 0), + new BlobWithIndex(Blob.fromFields([Fr.random(), Fr.random(), Fr.random()]), 1), + ]; + const blobsWithIndexes = new BlobsWithIndexes(blobs); + + const serialized = blobsWithIndexes.toBuffer(); + const deserialized = BlobsWithIndexes.fromBuffer(serialized); + + expect(deserialized).toEqual(blobsWithIndexes); + }); +}); diff --git a/yarn-project/blob-sink/src/types/blob_with_index.ts b/yarn-project/blob-sink/src/types/blob_with_index.ts new file mode 100644 index 000000000000..b58d3b8b34ce --- /dev/null +++ b/yarn-project/blob-sink/src/types/blob_with_index.ts @@ -0,0 +1,47 @@ +import { Blob } from '@aztec/foundation/blob'; +import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; + +/** Serialized an array of blobs with their indexes to be stored at a given block id */ +export class BlobsWithIndexes { + constructor(public blobs: BlobWithIndex[]) {} + + public toBuffer(): Buffer { + return serializeToBuffer(this.blobs.length, this.blobs); + } + + public static fromBuffer(buffer: Buffer | BufferReader): BlobsWithIndexes { + const reader = BufferReader.asReader(buffer); + return new BlobsWithIndexes(reader.readArray(reader.readNumber(), BlobWithIndex)); + } +} + +/** We store blobs alongside their index in the block */ +export class BlobWithIndex { + constructor( + /** The blob */ + public blob: Blob, + /** The index of the blob in the block */ + public index: number, + ) {} + + public toBuffer(): Buffer { + return serializeToBuffer([this.blob, this.index]); + } + + public static fromBuffer(buffer: Buffer | BufferReader): BlobWithIndex { + const reader = BufferReader.asReader(buffer); + return new BlobWithIndex(reader.readObject(Blob), reader.readNumber()); + } + + // Follows the structure the beacon node api expects + public toJSON(): { blob: string; index: number; kzg_commitment: string; kzg_proof: string } { + return { + blob: this.blob.toBuffer().toString('hex'), + index: this.index, + // eslint-disable-next-line camelcase + kzg_commitment: this.blob.commitment.toString('hex'), + // eslint-disable-next-line camelcase + kzg_proof: this.blob.proof.toString('hex'), + }; + } +} diff --git a/yarn-project/blob-sink/src/types/index.ts b/yarn-project/blob-sink/src/types/index.ts new file mode 100644 index 000000000000..396b8fc805ed --- /dev/null +++ b/yarn-project/blob-sink/src/types/index.ts @@ -0,0 +1 @@ +export * from './blob_with_index.js'; diff --git a/yarn-project/blob-sink/tsconfig.json b/yarn-project/blob-sink/tsconfig.json new file mode 100644 index 000000000000..535eabe58633 --- /dev/null +++ b/yarn-project/blob-sink/tsconfig.json @@ -0,0 +1,23 @@ +{ + "extends": "..", + "compilerOptions": { + "outDir": "dest", + "rootDir": "src", + "tsBuildInfoFile": ".tsbuildinfo" + }, + "references": [ + { + "path": "../circuit-types" + }, + { + "path": "../foundation" + }, + { + "path": "../kv-store" + }, + { + "path": "../telemetry-client" + } + ], + "include": ["src"] +} diff --git a/yarn-project/circuits.js/src/structs/blob_public_inputs.test.ts b/yarn-project/circuits.js/src/structs/blob_public_inputs.test.ts index 9dc9c42d7be6..20d3f7e145ed 100644 --- a/yarn-project/circuits.js/src/structs/blob_public_inputs.test.ts +++ b/yarn-project/circuits.js/src/structs/blob_public_inputs.test.ts @@ -20,7 +20,7 @@ describe('BlobPublicInputs', () => { }); it('converts correctly from Blob class', () => { - const blob = new Blob(Array(400).fill(new Fr(3))); + const blob = Blob.fromFields(Array(400).fill(new Fr(3))); const converted = BlobPublicInputs.fromBlob(blob); expect(converted.z).toEqual(blob.challengeZ); expect(Buffer.from(converted.y.toString(16), 'hex')).toEqual(blob.evaluationY); @@ -55,7 +55,7 @@ describe('BlockBlobPublicInputs', () => { }); it('converts correctly from Blob class', () => { - const blobs = Array.from({ length: BLOBS_PER_BLOCK }, (_, i) => new Blob(Array(400).fill(new Fr(i + 1)))); + const blobs = Array.from({ length: BLOBS_PER_BLOCK }, (_, i) => Blob.fromFields(Array(400).fill(new Fr(i + 1)))); const converted = BlockBlobPublicInputs.fromBlobs(blobs); converted.inner.forEach((blobPI, i) => { expect(blobPI.z).toEqual(blobs[i].challengeZ); diff --git a/yarn-project/end-to-end/package.json b/yarn-project/end-to-end/package.json index 4a31d3e2ee15..8d4e80e014f3 100644 --- a/yarn-project/end-to-end/package.json +++ b/yarn-project/end-to-end/package.json @@ -30,6 +30,7 @@ "@aztec/aztec-node": "workspace:^", "@aztec/aztec.js": "workspace:^", "@aztec/bb-prover": "workspace:^", + "@aztec/blob-sink": "workspace:^", "@aztec/bot": "workspace:^", "@aztec/circuit-types": "workspace:^", "@aztec/circuits.js": "workspace:^", diff --git a/yarn-project/end-to-end/scripts/docker-compose.yml b/yarn-project/end-to-end/scripts/docker-compose.yml index eae41741bb50..238f976a5363 100644 --- a/yarn-project/end-to-end/scripts/docker-compose.yml +++ b/yarn-project/end-to-end/scripts/docker-compose.yml @@ -19,11 +19,19 @@ services: SEQ_TX_POLLING_INTERVAL_MS: 50 WS_BLOCK_CHECK_INTERVAL_MS: 50 ARCHIVER_VIEM_POLLING_INTERVAL_MS: 500 + SEQ_BLOB_SINK_URL: http://blob-sink:${BLOB_SINK_PORT:-5052} ENABLE_GAS: ${ENABLE_GAS:-} HARDWARE_CONCURRENCY: ${HARDWARE_CONCURRENCY:-} expose: - '8080' + blob-sink: + image: aztecprotocol/blob-sink + environment: + PORT: ${BLOB_SINK_PORT:-5052} + DEBUG: 'aztec:*' + DEBUG_COLORS: 1 + end-to-end: image: aztecprotocol/end-to-end:${AZTEC_DOCKER_TAG:-latest} environment: diff --git a/yarn-project/end-to-end/scripts/native-network/blob-sink.sh b/yarn-project/end-to-end/scripts/native-network/blob-sink.sh new file mode 100755 index 000000000000..b9ae64820030 --- /dev/null +++ b/yarn-project/end-to-end/scripts/native-network/blob-sink.sh @@ -0,0 +1,11 @@ +#!/bin/bash +set -eu + +REPO=$(git rev-parse --show-toplevel) + +# Starts the Blob Sink +export PORT=${BLOB_SINK_PORT:-5052} +export DEBUG=${DEBUG:-"aztec:*"} +export DEBUG_COLORS=${DEBUG_COLORS:-1} + +node --no-warnings "$REPO"/yarn-project/blob-sink/dest/run.js diff --git a/yarn-project/end-to-end/scripts/native-network/prover-node.sh b/yarn-project/end-to-end/scripts/native-network/prover-node.sh index 08a4c748855f..a68c1f6da55a 100755 --- a/yarn-project/end-to-end/scripts/native-network/prover-node.sh +++ b/yarn-project/end-to-end/scripts/native-network/prover-node.sh @@ -40,6 +40,7 @@ export PROVER_AGENT_ENABLED="true" export PROVER_PUBLISHER_PRIVATE_KEY=${PROVER_PUBLISHER_PRIVATE_KEY:-"0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80"} export PROVER_COORDINATION_NODE_URL="http://127.0.0.1:8080" export AZTEC_NODE_URL="http://127.0.0.1:8080" +export PROVER_BLOB_SINK_URL="http://127.0.0.1:${BLOB_SINK_PORT:-5052}" export OTEL_RESOURCE_ATTRIBUTES="service.name=prover-node-${PORT}" export OTEL_EXPORTER_OTLP_METRICS_ENDPOINT="${OTEL_EXPORTER_OTLP_METRICS_ENDPOINT:-}" export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT="${OTEL_EXPORTER_OTLP_TRACES_ENDPOINT:-}" diff --git a/yarn-project/end-to-end/scripts/native-network/validator.sh b/yarn-project/end-to-end/scripts/native-network/validator.sh index ecc04b6eedba..ee8c37cef5b8 100755 --- a/yarn-project/end-to-end/scripts/native-network/validator.sh +++ b/yarn-project/end-to-end/scripts/native-network/validator.sh @@ -68,6 +68,7 @@ export P2P_TCP_ANNOUNCE_ADDR="127.0.0.1:$P2P_PORT" export P2P_UDP_ANNOUNCE_ADDR="127.0.0.1:$P2P_PORT" export P2P_TCP_LISTEN_ADDR="0.0.0.0:$P2P_PORT" export P2P_UDP_LISTEN_ADDR="0.0.0.0:$P2P_PORT" +export SEQ_BLOB_SINK_URL="http://127.0.0.1:${BLOB_SINK_PORT:-5052}" export OTEL_RESOURCE_ATTRIBUTES="service.name=validator-node-${PORT}" export OTEL_EXPORTER_OTLP_METRICS_ENDPOINT="${OTEL_EXPORTER_OTLP_METRICS_ENDPOINT:-}" export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT="${OTEL_EXPORTER_OTLP_TRACES_ENDPOINT:-}" diff --git a/yarn-project/end-to-end/scripts/network_test.sh b/yarn-project/end-to-end/scripts/network_test.sh index 7c2089bb494e..ab33a55d86ae 100755 --- a/yarn-project/end-to-end/scripts/network_test.sh +++ b/yarn-project/end-to-end/scripts/network_test.sh @@ -45,6 +45,16 @@ if ! docker image ls --format '{{.Repository}}:{{.Tag}}' | grep -q "aztecprotoco exit 1 fi +# Setup kurtosis ethereum testnet +function setup_kurtosis_ethereum_testnet() { + echo "Setting up kurtosis ethereum testnet..." + $REPO/spartan/ethereum-testnet/deploy.sh +} + +# Start ethereum testnet setup in background and save its PID +setup_kurtosis_ethereum_testnet & +ETHEREUM_SETUP_PID=$! + # Load the Docker images into kind kind load docker-image aztecprotocol/aztec:$AZTEC_DOCKER_TAG @@ -129,6 +139,14 @@ if [ -z "${CHAOS_VALUES:-}" ]; then kubectl delete networkchaos --all --all-namespaces fi +# Wait for ethereum testnet setup to complete before deploying chart +echo "Waiting for ethereum testnet setup to complete..." +wait $ETHEREUM_SETUP_PID +if [ $? -ne 0 ]; then + echo "Ethereum testnet setup failed" + exit 1 +fi + # Install the Helm chart helm upgrade --install spartan "$REPO/spartan/aztec-network/" \ --namespace "$NAMESPACE" \ @@ -165,6 +183,8 @@ if ! handle_network_shaping; then fi fi + +# Now run the docker command docker run --rm --network=host \ -v ~/.kube:/root/.kube \ -e K8S=local \ diff --git a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts index d8030191abb0..d4600d37b677 100644 --- a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts @@ -70,6 +70,11 @@ config.l1RpcUrl = config.l1RpcUrl || 'http://127.0.0.1:8545'; const numberOfConsecutiveBlocks = 2; +const BLOB_SINK_PORT = 5052; +const BLOB_SINK_URL = `http://localhost:${BLOB_SINK_PORT}`; + +// TODO(md): THE FIRST PLACE TO TEST THE BLOB SINK E2E IS HERE + describe('L1Publisher integration', () => { let publicClient: PublicClient; let walletClient: WalletClient; @@ -186,6 +191,7 @@ describe('L1Publisher integration', () => { l1ChainId: 31337, viemPollingIntervalMS: 100, ethereumSlotDuration: config.ethereumSlotDuration, + blobSinkUrl: BLOB_SINK_URL, }, new NoopTelemetryClient(), ); diff --git a/yarn-project/end-to-end/src/e2e_synching.test.ts b/yarn-project/end-to-end/src/e2e_synching.test.ts index d4a5563fa37c..a1ddccc0c288 100644 --- a/yarn-project/end-to-end/src/e2e_synching.test.ts +++ b/yarn-project/end-to-end/src/e2e_synching.test.ts @@ -381,6 +381,9 @@ describe('e2e_synching', () => { l1ChainId: 31337, viemPollingIntervalMS: 100, ethereumSlotDuration: ETHEREUM_SLOT_DURATION, + + // TODO(md): update + blobSinkUrl: 'http://localhost:5052', }, new NoopTelemetryClient(), ); diff --git a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts index 5581397e60cd..029e96d2ea77 100644 --- a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts +++ b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts @@ -15,6 +15,7 @@ import { type Wallet, } from '@aztec/aztec.js'; import { deployInstance, registerContractClass } from '@aztec/aztec.js/deployment'; +import { type BlobSinkServer, createBlobSinkServer } from '@aztec/blob-sink'; import { type DeployL1ContractsArgs, createL1Clients, getL1ContractsConfigEnvVars, l1Artifacts } from '@aztec/ethereum'; import { startAnvil } from '@aztec/ethereum/test'; import { asyncMap } from '@aztec/foundation/async-map'; @@ -28,6 +29,7 @@ import { createAndStartTelemetryClient, getConfigEnvVars as getTelemetryConfig } import { type Anvil } from '@viem/anvil'; import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs'; import { copySync, removeSync } from 'fs-extra/esm'; +import getPort from 'get-port'; import { join } from 'path'; import { type Hex, getContract } from 'viem'; import { mnemonicToAccount } from 'viem/accounts'; @@ -51,6 +53,7 @@ export type SubsystemsContext = { watcher: AnvilTestWatcher; cheatCodes: CheatCodes; dateProvider: TestDateProvider; + blobSink: BlobSinkServer; }; type SnapshotEntry = { @@ -250,6 +253,7 @@ async function teardown(context: SubsystemsContext | undefined) { await context.acvmConfig?.cleanup(); await context.anvil.stop(); await context.watcher.stop(); + await context.blobSink.stop(); } catch (err) { getLogger().error('Error during teardown', err); } @@ -271,10 +275,21 @@ async function setupFromFresh( ): Promise { logger.verbose(`Initializing state...`); + const blobSinkPort = await getPort(); + // Fetch the AztecNode config. // TODO: For some reason this is currently the union of a bunch of subsystems. That needs fixing. const aztecNodeConfig: AztecNodeConfig & SetupOptions = { ...getConfigEnvVars(), ...opts }; aztecNodeConfig.dataDirectory = statePath; + aztecNodeConfig.blobSinkUrl = `http://127.0.0.1:${blobSinkPort}`; + + // Setup blob sink service + const blobSink = await createBlobSinkServer({ + port: blobSinkPort, + dataDirectory: statePath, + dataStoreMapSizeKB: aztecNodeConfig.dataStoreMapSizeKB, + }); + await blobSink.start(); // Start anvil. We go via a wrapper script to ensure if the parent dies, anvil dies. logger.verbose('Starting anvil...'); @@ -390,6 +405,7 @@ async function setupFromFresh( watcher, cheatCodes, dateProvider, + blobSink, }; } @@ -399,12 +415,23 @@ async function setupFromFresh( async function setupFromState(statePath: string, logger: Logger): Promise { logger.verbose(`Initializing with saved state at ${statePath}...`); + // Run the blob sink on a random port + const blobSinkPort = await getPort(); + // TODO: For some reason this is currently the union of a bunch of subsystems. That needs fixing. const aztecNodeConfig: AztecNodeConfig & SetupOptions = JSON.parse( readFileSync(`${statePath}/aztec_node_config.json`, 'utf-8'), reviver, ); aztecNodeConfig.dataDirectory = statePath; + aztecNodeConfig.blobSinkUrl = `http://127.0.0.1:${blobSinkPort}`; + + const blobSink = await createBlobSinkServer({ + port: blobSinkPort, + dataDirectory: statePath, + dataStoreMapSizeKB: aztecNodeConfig.dataStoreMapSizeKB, + }); + await blobSink.start(); // Start anvil. We go via a wrapper script to ensure if the parent dies, anvil dies. const { anvil, rpcUrl } = await startAnvil(); @@ -473,6 +500,7 @@ async function setupFromState(statePath: string, logger: Logger): Promise Promise; }; @@ -360,6 +365,15 @@ export async function setup( return await setupWithRemoteEnvironment(publisherHdAccount!, config, logger, numberOfAccounts); } + // Blob sink service - blobs get posted here and served from here + const blobSinkPort = await getPort(); + const blobSink = await createBlobSinkServer({ + port: blobSinkPort, + dataDirectory: undefined, + dataStoreMapSizeKB: 1, // will be unused, set as default + }); + config.blobSinkUrl = `http://127.0.0.1:${blobSinkPort}`; + const deployL1ContractsValues = opts.deployL1ContractsValues ?? (await setupL1Contracts(config.l1RpcUrl, publisherHdAccount!, logger, opts, chain)); @@ -457,6 +471,7 @@ export async function setup( await anvil?.stop(); await watcher.stop(); + await blobSink?.stop(); }; return { @@ -472,6 +487,7 @@ export async function setup( sequencer, watcher, dateProvider, + blobSink, teardown, }; } diff --git a/yarn-project/end-to-end/tsconfig.json b/yarn-project/end-to-end/tsconfig.json index 08932fbdb4a5..a8117b5a5dbc 100644 --- a/yarn-project/end-to-end/tsconfig.json +++ b/yarn-project/end-to-end/tsconfig.json @@ -21,6 +21,9 @@ { "path": "../bb-prover" }, + { + "path": "../blob-sink" + }, { "path": "../bot" }, diff --git a/yarn-project/foundation/src/blob/blob.test.ts b/yarn-project/foundation/src/blob/blob.test.ts index e4a5746ec06f..da4caa8fc74c 100644 --- a/yarn-project/foundation/src/blob/blob.test.ts +++ b/yarn-project/foundation/src/blob/blob.test.ts @@ -78,15 +78,19 @@ describe('blob', () => { // This test ensures that the Blob class correctly matches the c-kzg lib // The values here are used to test Noir's blob evaluation in noir-projects/noir-protocol-circuits/crates/blob/src/blob.nr -> test_400 const blobItems = Array(400).fill(new Fr(3)); - const ourBlob = new Blob(blobItems); + const ourBlob = Blob.fromFields(blobItems); const blobItemsHash = poseidon2Hash(Array(400).fill(new Fr(3))); expect(blobItemsHash).toEqual(ourBlob.fieldsHash); - expect(blobToKzgCommitment(ourBlob.data)).toEqual(ourBlob.commitment); + + // We add zeros before getting commitment as we do not store the blob along with + // all of the zeros + const dataWithZeros = Buffer.concat([ourBlob.data], BYTES_PER_BLOB); + expect(blobToKzgCommitment(dataWithZeros)).toEqual(ourBlob.commitment); const z = poseidon2Hash([blobItemsHash, ...ourBlob.commitmentToFields()]); expect(z).toEqual(ourBlob.challengeZ); - const res = computeKzgProof(ourBlob.data, ourBlob.challengeZ.toBuffer()); + const res = computeKzgProof(dataWithZeros, ourBlob.challengeZ.toBuffer()); expect(res[0]).toEqual(ourBlob.proof); expect(res[1]).toEqual(ourBlob.evaluationY); @@ -112,8 +116,9 @@ describe('blob', () => { const blobItemsHash = poseidon2Hash(blobItems); const blobs = Blob.getBlobs(blobItems); blobs.forEach(ourBlob => { - // const ourBlob = new Blob(blobItems.slice(j * FIELD_ELEMENTS_PER_BLOB, (j + 1) * FIELD_ELEMENTS_PER_BLOB), blobItemsHash); + // const ourBlob = Blob.fromFields(blobItems.slice(j * FIELD_ELEMENTS_PER_BLOB, (j + 1) * FIELD_ELEMENTS_PER_BLOB), blobItemsHash); expect(blobItemsHash).toEqual(ourBlob.fieldsHash); + expect(blobToKzgCommitment(ourBlob.data)).toEqual(ourBlob.commitment); const z = poseidon2Hash([blobItemsHash, ...ourBlob.commitmentToFields()]); @@ -132,4 +137,11 @@ describe('blob', () => { expect(isValid).toBe(true); }); }); + + it('Should serialise and deserialise a blob', () => { + const blob = Blob.fromFields([Fr.random(), Fr.random(), Fr.random()]); + const blobBuffer = blob.toBuffer(); + const deserialisedBlob = Blob.fromBuffer(blobBuffer); + expect(blob.fieldsHash.equals(deserialisedBlob.fieldsHash)).toBe(true); + }); }); diff --git a/yarn-project/foundation/src/blob/index.ts b/yarn-project/foundation/src/blob/index.ts index 6c1651f4c56a..381e7141c095 100644 --- a/yarn-project/foundation/src/blob/index.ts +++ b/yarn-project/foundation/src/blob/index.ts @@ -3,7 +3,7 @@ import type { Blob as BlobBuffer } from 'c-kzg'; import { poseidon2Hash, sha256 } from '../crypto/index.js'; import { Fr } from '../fields/index.js'; -import { serializeToBuffer } from '../serialize/index.js'; +import { BufferReader, serializeToBuffer } from '../serialize/index.js'; // Importing directly from 'c-kzg' does not work, ignoring import/no-named-as-default-member err: /* eslint-disable import/no-named-as-default-member */ @@ -36,48 +36,48 @@ export const VERSIONED_HASH_VERSION_KZG = 0x01; * A class to create, manage, and prove EVM blobs. */ export class Blob { - /** The blob to be broadcast on L1 in bytes form. */ - public readonly data: BlobBuffer; - /** The hash of all tx effects inside the blob. Used in generating the challenge z and proving that we have included all required effects. */ - public readonly fieldsHash: Fr; - /** Challenge point z (= H(H(tx_effects), kzgCommmitment). Used such that p(z) = y. */ - public readonly challengeZ: Fr; - /** Evaluation y = p(z), where p() is the blob polynomial. BLS12 field element, rep. as BigNum in nr, bigint in ts. */ - public readonly evaluationY: Buffer; - /** Commitment to the blob C. Used in compressed BLS12 point format (48 bytes). */ - public readonly commitment: Buffer; - /** KZG opening proof for y = p(z). The commitment to quotient polynomial Q, used in compressed BLS12 point format (48 bytes). */ - public readonly proof: Buffer; - constructor( - /** All fields to be broadcast in the blob. */ - fields: Fr[], - /** If we want to broadcast more fields than fit into a blob, we hash those and used it as the fieldsHash across all blobs. - * This is much simpler and cheaper in the circuit to do, but MUST BE CHECKED before injecting here. - */ - multiBlobFieldsHash?: Fr, - ) { + /** The blob to be broadcast on L1 in bytes form. */ + public readonly data: BlobBuffer, + /** The hash of all tx effects inside the blob. Used in generating the challenge z and proving that we have included all required effects. */ + public readonly fieldsHash: Fr, + /** Challenge point z (= H(H(tx_effects), kzgCommmitment). Used such that p(z) = y. */ + public readonly challengeZ: Fr, + /** Evaluation y = p(z), where p() is the blob polynomial. BLS12 field element, rep. as BigNum in nr, bigint in ts. */ + public readonly evaluationY: Buffer, + /** Commitment to the blob C. Used in compressed BLS12 point format (48 bytes). */ + public readonly commitment: Buffer, + /** KZG opening proof for y = p(z). The commitment to quotient polynomial Q, used in compressed BLS12 point format (48 bytes). */ + public readonly proof: Buffer, + ) {} + + static fromFields(fields: Fr[], multiBlobFieldsHash?: Fr): Blob { if (fields.length > FIELD_ELEMENTS_PER_BLOB) { throw new Error( `Attempted to overfill blob with ${fields.length} elements. The maximum is ${FIELD_ELEMENTS_PER_BLOB}`, ); } - this.data = Buffer.concat([serializeToBuffer(fields)], BYTES_PER_BLOB); + // TODO: do not store zeros + const dataWithoutZeros = serializeToBuffer(fields); + const data = Buffer.concat([dataWithoutZeros], BYTES_PER_BLOB); + // This matches the output of SpongeBlob.squeeze() in the blob circuit - this.fieldsHash = multiBlobFieldsHash ? multiBlobFieldsHash : poseidon2Hash(fields); - this.commitment = Buffer.from(blobToKzgCommitment(this.data)); - this.challengeZ = poseidon2Hash([this.fieldsHash, ...this.commitmentToFields()]); - const res = computeKzgProof(this.data, this.challengeZ.toBuffer()); - if (!verifyKzgProof(this.commitment, this.challengeZ.toBuffer(), res[1], res[0])) { + const fieldsHash = multiBlobFieldsHash ? multiBlobFieldsHash : poseidon2Hash(fields); + const commitment = Buffer.from(blobToKzgCommitment(data)); + const challengeZ = poseidon2Hash([fieldsHash, ...commitmentToFields(commitment)]); + const res = computeKzgProof(data, challengeZ.toBuffer()); + if (!verifyKzgProof(commitment, challengeZ.toBuffer(), res[1], res[0])) { throw new Error(`KZG proof did not verify.`); } - this.proof = Buffer.from(res[0]); - this.evaluationY = Buffer.from(res[1]); + const proof = Buffer.from(res[0]); + const evaluationY = Buffer.from(res[1]); + + return new Blob(dataWithoutZeros, fieldsHash, challengeZ, evaluationY, commitment, proof); } // 48 bytes encoded in fields as [Fr, Fr] = [0->31, 31->48] commitmentToFields(): [Fr, Fr] { - return [new Fr(this.commitment.subarray(0, 31)), new Fr(this.commitment.subarray(31, 48))]; + return commitmentToFields(this.commitment); } // Returns ethereum's versioned blob hash, following kzg_to_versioned_hash: https://eips.ethereum.org/EIPS/eip-4844#helpers @@ -93,6 +93,49 @@ export class Blob { return hash; } + toBuffer(): Buffer { + return Buffer.from( + serializeToBuffer( + this.data.length, + this.data, + this.fieldsHash, + this.challengeZ, + this.evaluationY.length, + this.evaluationY, + this.commitment.length, + this.commitment, + this.proof.length, + this.proof, + ), + ); + } + + static fromBuffer(buf: Buffer | BufferReader): Blob { + const reader = BufferReader.asReader(buf); + return new Blob( + reader.readUint8Array(), + reader.readObject(Fr), + reader.readObject(Fr), + reader.readBuffer(), + reader.readBuffer(), + reader.readBuffer(), + ); + } + + /** + * Pad the blob data to it's full size before posting + */ + get fullData(): BlobBuffer { + return Buffer.concat([this.data], BYTES_PER_BLOB); + } + + /** + * Get the size of the blob in bytes + */ + getSize() { + return this.data.length; + } + // Returns a proof of opening of the blob to verify on L1 using the point evaluation precompile: // * input[:32] - versioned_hash // * input[32:64] - z @@ -145,8 +188,13 @@ export class Blob { const res = []; for (let i = 0; i < numBlobs; i++) { const end = fields.length < (i + 1) * FIELD_ELEMENTS_PER_BLOB ? fields.length : (i + 1) * FIELD_ELEMENTS_PER_BLOB; - res.push(new Blob(fields.slice(i * FIELD_ELEMENTS_PER_BLOB, end), multiBlobFieldsHash)); + res.push(Blob.fromFields(fields.slice(i * FIELD_ELEMENTS_PER_BLOB, end), multiBlobFieldsHash)); } return res; } } + +// 48 bytes encoded in fields as [Fr, Fr] = [0->31, 31->48] +function commitmentToFields(commitment: Buffer): [Fr, Fr] { + return [new Fr(commitment.subarray(0, 31)), new Fr(commitment.subarray(31, 48))]; +} diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index 6fe164390660..86d5e9d0cea4 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -36,6 +36,7 @@ export type EnvVar = | 'BOT_TX_MINED_WAIT_SECONDS' | 'BOT_MAX_CONSECUTIVE_ERRORS' | 'BOT_STOP_WHEN_UNHEALTHY' + | 'BLOB_SINK_PORT' | 'COINBASE' | 'DATA_DIRECTORY' | 'DATA_STORE_MAP_SIZE_KB' @@ -100,6 +101,7 @@ export type EnvVar = | 'P2P_UDP_ANNOUNCE_ADDR' | 'P2P_UDP_LISTEN_ADDR' | 'PEER_ID_PRIVATE_KEY' + | 'PROVER_BLOB_SINK_URL' | 'PROOF_VERIFIER_L1_START_BLOCK' | 'PROOF_VERIFIER_POLL_INTERVAL_MS' | 'PROVER_AGENT_ENABLED' @@ -137,6 +139,7 @@ export type EnvVar = | 'ROLLUP_CONTRACT_ADDRESS' | 'SEQ_ALLOWED_SETUP_FN' | 'SEQ_ALLOWED_TEARDOWN_FN' + | 'SEQ_BLOB_SINK_URL' | 'SEQ_MAX_BLOCK_SIZE_IN_BYTES' | 'SEQ_MAX_SECONDS_BETWEEN_BLOCKS' | 'SEQ_MAX_TX_PER_BLOCK' diff --git a/yarn-project/foundation/src/serialize/buffer_reader.ts b/yarn-project/foundation/src/serialize/buffer_reader.ts index 7abe3f59336f..84b2ea86277e 100644 --- a/yarn-project/foundation/src/serialize/buffer_reader.ts +++ b/yarn-project/foundation/src/serialize/buffer_reader.ts @@ -307,6 +307,20 @@ export class BufferReader { return this.readBytes(size); } + /** + * Reads a buffer from the current position of the reader and advances the index. + * The method first reads the size (number) of bytes to be read, and then returns + * a Buffer with that size containing the bytes. Useful for reading variable-length + * binary data encoded as (size, data) format. + * + * @returns A Buffer containing the read bytes. + */ + public readUint8Array(): Uint8Array { + const size = this.readNumber(); + this.#rangeCheck(size); + return this.readBytes(size); + } + /** * Reads and constructs a map object from the current buffer using the provided deserializer. * The method reads the number of entries in the map, followed by iterating through each key-value pair. diff --git a/yarn-project/foundation/src/serialize/serialize.ts b/yarn-project/foundation/src/serialize/serialize.ts index 6698a7081e2b..fc2638ac3e74 100644 --- a/yarn-project/foundation/src/serialize/serialize.ts +++ b/yarn-project/foundation/src/serialize/serialize.ts @@ -109,6 +109,7 @@ export function deserializeField(buf: Buffer, offset = 0) { export type Bufferable = | boolean | Buffer + | Uint8Array | number | bigint | string diff --git a/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.test.ts index dfc5df7f105b..cb3abd077e3f 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.test.ts @@ -4,7 +4,7 @@ import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { AztecKVTxPool } from './aztec_kv_tx_pool.js'; import { describeTxPool } from './tx_pool_test_suite.js'; -describe('In-Memory TX pool', () => { +describe('KV TX pool', () => { let txPool: AztecKVTxPool; beforeEach(() => { txPool = new AztecKVTxPool(openTmpStore(), new NoopTelemetryClient()); diff --git a/yarn-project/package.json b/yarn-project/package.json index 7d31e95a5c88..8205824f3713 100644 --- a/yarn-project/package.json +++ b/yarn-project/package.json @@ -12,7 +12,7 @@ "format": "yarn prettier --cache -w .", "test": "FORCE_COLOR=true yarn workspaces foreach --exclude @aztec/aztec3-packages --exclude @aztec/end-to-end --exclude @aztec/prover-client -p -v run test && yarn workspaces foreach --include @aztec/end-to-end -p -v run test:unit", "build": "FORCE_COLOR=true yarn workspaces foreach --parallel --topological-dev --verbose --exclude @aztec/aztec3-packages --exclude @aztec/docs run build", - "build:fast": "cd foundation && yarn build && cd ../l1-artifacts && yarn build && cd ../circuits.js && yarn build && cd .. && yarn generate && tsc -b", + "build:fast": "cd foundation && yarn build && cd ../l1-artifacts && yarn build && cd ../circuits.js && yarn build && cd .. && yarn generate && echo 'Running tsc...' && tsc -b && echo 'tsc done.'", "build:dev": "./watch.sh", "generate": "FORCE_COLOR=true yarn workspaces foreach --parallel --topological-dev --verbose run generate", "clean": "yarn workspaces foreach -p -v run clean" @@ -26,6 +26,7 @@ "aztec-node", "validator-client", "bb-prover", + "blob-sink", "bot", "builder", "pxe", diff --git a/yarn-project/sequencer-client/package.json b/yarn-project/sequencer-client/package.json index 334377750069..8d3f351aa764 100644 --- a/yarn-project/sequencer-client/package.json +++ b/yarn-project/sequencer-client/package.json @@ -60,6 +60,7 @@ "@types/node": "^18.7.23", "concurrently": "^7.6.0", "eslint": "^8.37.0", + "express": "^4.21.1", "jest": "^29.5.0", "jest-mock-extended": "^3.0.3", "levelup": "^5.1.1", diff --git a/yarn-project/sequencer-client/src/publisher/config.ts b/yarn-project/sequencer-client/src/publisher/config.ts index 367f2aa66779..d77efa57ca2a 100644 --- a/yarn-project/sequencer-client/src/publisher/config.ts +++ b/yarn-project/sequencer-client/src/publisher/config.ts @@ -24,6 +24,11 @@ export type PublisherConfig = L1TxUtilsConfig & { * The interval to wait between publish retries. */ l1PublishRetryIntervalMS: number; + + /** + * The URL of the blob sink. + */ + blobSinkUrl?: string; }; export const getTxSenderConfigMappings: ( @@ -72,6 +77,11 @@ export const getPublisherConfigMappings: ( description: 'The interval to wait between publish retries.', }, ...l1TxUtilsConfigMappings, + blobSinkUrl: { + env: `${scope}_BLOB_SINK_URL`, + description: 'The URL of the blob sink.', + parseEnv: (val?: string) => val, + }, }); export function getPublisherConfigFromEnv(scope: 'PROVER' | 'SEQ'): PublisherConfig { diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts index 64ac88119d50..839dbcc4c25b 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts @@ -13,6 +13,9 @@ import { sleep } from '@aztec/foundation/sleep'; import { RollupAbi } from '@aztec/l1-artifacts'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; +import { jest } from '@jest/globals'; +import express, { json } from 'express'; +import { type Server } from 'http'; import { type MockProxy, mock } from 'jest-mock-extended'; import { type GetTransactionReceiptReturnType, @@ -68,6 +71,9 @@ class MockRollupContract { } } +const BLOB_SINK_PORT = 5052; +const BLOB_SINK_URL = `http://localhost:${BLOB_SINK_PORT}`; + describe('L1Publisher', () => { let rollupContractRead: MockProxy; let rollupContractWrite: MockProxy; @@ -85,11 +91,16 @@ describe('L1Publisher', () => { let blockHash: Buffer; let body: Buffer; + let mockBlobSinkServer: Server | undefined = undefined; + + // An l1 publisher with some private methods exposed let publisher: L1Publisher; const GAS_GUESS = 300_000n; beforeEach(() => { + mockBlobSinkServer = undefined; + l2Block = L2Block.random(42); header = l2Block.header.toBuffer(); @@ -112,6 +123,7 @@ describe('L1Publisher', () => { publicClient = mock(); l1TxUtils = mock(); const config = { + blobSinkUrl: BLOB_SINK_URL, l1RpcUrl: `http://127.0.0.1:8545`, l1ChainId: 1, publisherPrivateKey: `0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80`, @@ -138,18 +150,61 @@ describe('L1Publisher', () => { (l1TxUtils as any).estimateGas.mockResolvedValue(GAS_GUESS); }); + const closeServer = (server: Server): Promise => { + return new Promise((resolve, reject) => { + server.close(err => { + if (err) { + reject(err); + return; + } + resolve(); + }); + }); + }; + + afterEach(async () => { + if (mockBlobSinkServer) { + await closeServer(mockBlobSinkServer); + mockBlobSinkServer = undefined; + } + }); + + // Run a mock blob sink in the background, and test that the correct data is sent to it + const expectBlobsAreSentToBlobSink = (_blockId: string, blobs: Blob[]) => { + const sendToBlobSinkSpy = jest.spyOn(publisher as any, 'sendBlobsToBlobSink'); + + const app = express(); + app.use(json({ limit: '10mb' })); + + app.post('/blob_sidecar', (req, res) => { + const blobsBuffers = req.body.blobs.map((b: { index: number; blob: { type: string; data: string } }) => + Blob.fromBuffer(Buffer.from(b.blob.data)), + ); + + expect(blobsBuffers).toEqual(blobs); + res.status(200).send(); + }); + + mockBlobSinkServer = app.listen(BLOB_SINK_PORT); + + return sendToBlobSinkSpy; + }; + it('publishes and propose l2 block to l1', async () => { rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); rollupContractWrite.propose.mockResolvedValueOnce(proposeTxHash); - const result = await publisher.proposeL2Block(l2Block); - - expect(result).toEqual(true); - const kzg = Blob.getViemKzgInstance(); const blobs = Blob.getBlobs(l2Block.body.toBlobFields()); + // Check the blobs were forwarded to the blob sink service + const sendToBlobSinkSpy = expectBlobsAreSentToBlobSink(blockHash.toString('hex'), blobs); + + const result = await publisher.proposeL2Block(l2Block); + + expect(result).toEqual(true); + const blobInput = Blob.getEthBlobEvaluationInputs(blobs); const args = [ @@ -173,8 +228,13 @@ describe('L1Publisher', () => { data: encodeFunctionData({ abi: rollupContract.abi, functionName: 'propose', args }), }, { fixedGas: GAS_GUESS + L1Publisher.PROPOSE_GAS_GUESS }, - { blobs: blobs.map(b => b.data), kzg, maxFeePerBlobGas: 10000000000n }, + { blobs: blobs.map(b => b.fullData), kzg, maxFeePerBlobGas: 10000000000n }, ); + + expect(sendToBlobSinkSpy).toHaveBeenCalledTimes(1); + // If this does not return true, then the mocked server will have errored, and + // the expects that run there will have failed + expect(sendToBlobSinkSpy).toHaveReturnedWith(Promise.resolve(true)); }); it('does not retry if sending a propose tx fails', async () => { diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index f2580de6a6f2..133daeaf47b4 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -103,6 +103,8 @@ export type MinimalTransactionReceipt = { logs: any[]; /** Block number in which this tx was mined. */ blockNumber: bigint; + /** The block hash in which this tx was mined */ + blockHash: `0x${string}`; }; /** Arguments to the process method of the rollup contract */ @@ -171,6 +173,8 @@ export class L1Publisher { protected account: PrivateKeyAccount; protected ethereumSlotDuration: bigint; + private blobSinkUrl: string | undefined; + // @note - with blobs, the below estimate seems too large. // Total used for full block from int_l1_pub e2e test: 1m (of which 86k is 1x blob) // Total used for emptier block from above test: 429k (of which 84k is 1x blob) @@ -185,6 +189,7 @@ export class L1Publisher { ) { this.sleepTimeMs = config?.l1PublishRetryIntervalMS ?? 60_000; this.ethereumSlotDuration = BigInt(config.ethereumSlotDuration); + this.blobSinkUrl = config.blobSinkUrl; this.metrics = new L1PublisherMetrics(client, 'L1Publisher'); const { l1RpcUrl: rpcUrl, l1ChainId: chainId, publisherPrivateKey, l1Contracts } = config; @@ -537,15 +542,18 @@ export class L1Publisher { const consensusPayload = new ConsensusPayload(block.header, block.archive.root, txHashes ?? []); const digest = getHashedSignaturePayload(consensusPayload, SignatureDomainSeperator.blockAttestation); + + const blobs = Blob.getBlobs(block.body.toBlobFields()); const proposeTxArgs = { header: block.header.toBuffer(), archive: block.archive.root.toBuffer(), blockHash: block.header.hash().toBuffer(), body: block.body.toBuffer(), - blobs: Blob.getBlobs(block.body.toBlobFields()), + blobs, attestations, txHashes: txHashes ?? [], }; + // Publish body and propose block (if not already published) if (this.interrupted) { this.log.verbose('L2 block data syncing interrupted while processing blocks.', ctx); @@ -588,6 +596,21 @@ export class L1Publisher { }; this.log.verbose(`Published L2 block to L1 rollup contract`, { ...stats, ...ctx }); this.metrics.recordProcessBlockTx(timer.ms(), stats); + + // Send the blobs to the blob sink + this.sendBlobsToBlobSink(receipt.blockHash, blobs).then( + (success: boolean) => { + if (success) { + this.log.info('Successfully sent blobs to blob sink'); + } else { + this.log.error('Failed to send blobs to blob sink'); + } + }, + _err => { + this.log.error('Failed to send blobs to blob sink'); + }, + ); + return true; } @@ -946,7 +969,9 @@ export class L1Publisher { fixedGas: gas, }, { - blobs: encodedData.blobs.map(b => b.data), + // TODO(md): remove full data field, just use snappy compression when + // sending the sink server + blobs: encodedData.blobs.map(b => b.fullData), kzg, maxFeePerBlobGas: 10000000000n, //This is 10 gwei, taken from DEFAULT_MAX_FEE_PER_GAS }, @@ -989,7 +1014,7 @@ export class L1Publisher { }, { fixedGas: gas }, { - blobs: encodedData.blobs.map(b => b.data), + blobs: encodedData.blobs.map(b => b.fullData), kzg, maxFeePerBlobGas: 10000000000n, //This is 10 gwei, taken from DEFAULT_MAX_FEE_PER_GAS }, @@ -1033,6 +1058,7 @@ export class L1Publisher { gasPrice: receipt.effectiveGasPrice, logs: receipt.logs, blockNumber: receipt.blockNumber, + blockHash: receipt.blockHash, }; } @@ -1048,9 +1074,51 @@ export class L1Publisher { protected async sleepOrInterrupted() { await this.interruptibleSleep.sleep(this.sleepTimeMs); } + + /** + * Send blobs to the blob sink + * + * If a blob sink url is configured, then we send blobs to the blob sink + * - for now we use the blockHash as the identifier for the blobs; + * In the future this will move to be the beacon block id - which takes a bit more work + * to calculate and will need to be mocked in e2e tests + */ + protected async sendBlobsToBlobSink(blockHash: string, blobs: Blob[]): Promise { + // TODO(md): for now we are assuming the indexes of the blobs will be 0, 1, 2 + // When in reality they will not, but for testing purposes this is fine + if (!this.blobSinkUrl) { + this.log.verbose('No blob sink url configured'); + return false; + } + + this.log.verbose(`Sending ${blobs.length} blobs to blob sink`); + try { + const res = await fetch(`${this.blobSinkUrl}/blob_sidecar`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + // eslint-disable-next-line camelcase + block_id: blockHash, + blobs: blobs.map((b, i) => ({ blob: b.toBuffer(), index: i })), + }), + }); + + if (res.ok) { + return true; + } + + this.log.error('Failed to send blobs to blob sink', res.status); + return false; + } catch (err) { + this.log.error(`Error sending blobs to blob sink`, err); + return false; + } + } } -/** +/* * Returns cost of calldata usage in Ethereum. * @param data - Calldata. * @returns 4 for each zero byte, 16 for each nonzero. diff --git a/yarn-project/telemetry-client/src/metrics.ts b/yarn-project/telemetry-client/src/metrics.ts index 84fe768da50c..d4889c3b2d98 100644 --- a/yarn-project/telemetry-client/src/metrics.ts +++ b/yarn-project/telemetry-client/src/metrics.ts @@ -6,6 +6,9 @@ * @see {@link https://opentelemetry.io/docs/specs/semconv/general/metrics/ | OpenTelemetry Metrics} for naming conventions. */ +export const BLOB_SINK_OBJECTS_IN_BLOB_STORE = 'aztec.blob_sink.objects_in_blob_store'; +export const BLOB_SINK_BLOB_SIZE = 'aztec.blob_sink.blob_size'; + /** How long it takes to simulate a circuit */ export const CIRCUIT_SIMULATION_DURATION = 'aztec.circuit.simulation.duration'; export const CIRCUIT_SIMULATION_INPUT_SIZE = 'aztec.circuit.simulation.input_size'; diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index 5d3e2c5a5f38..d010efc2b999 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -331,6 +331,32 @@ __metadata: languageName: node linkType: soft +"@aztec/blob-sink@workspace:^, @aztec/blob-sink@workspace:blob-sink": + version: 0.0.0-use.local + resolution: "@aztec/blob-sink@workspace:blob-sink" + dependencies: + "@aztec/circuit-types": "workspace:^" + "@aztec/foundation": "workspace:^" + "@aztec/kv-store": "workspace:*" + "@aztec/telemetry-client": "workspace:*" + "@jest/globals": ^29.5.0 + "@types/jest": ^29.5.0 + "@types/memdown": ^3.0.0 + "@types/node": ^18.7.23 + "@types/source-map-support": ^0.5.10 + "@types/supertest": ^6.0.2 + express: ^4.21.1 + jest: ^29.5.0 + jest-mock-extended: ^3.0.3 + source-map-support: ^0.5.21 + supertest: ^7.0.0 + ts-node: ^10.9.1 + tslib: ^2.4.0 + typescript: ^5.0.4 + zod: ^3.23.8 + languageName: unknown + linkType: soft + "@aztec/bot@workspace:^, @aztec/bot@workspace:bot": version: 0.0.0-use.local resolution: "@aztec/bot@workspace:bot" @@ -528,6 +554,7 @@ __metadata: "@aztec/aztec-node": "workspace:^" "@aztec/aztec.js": "workspace:^" "@aztec/bb-prover": "workspace:^" + "@aztec/blob-sink": "workspace:^" "@aztec/bot": "workspace:^" "@aztec/circuit-types": "workspace:^" "@aztec/circuits.js": "workspace:^" @@ -791,7 +818,7 @@ __metadata: languageName: unknown linkType: soft -"@aztec/kv-store@workspace:^, @aztec/kv-store@workspace:kv-store": +"@aztec/kv-store@workspace:*, @aztec/kv-store@workspace:^, @aztec/kv-store@workspace:kv-store": version: 0.0.0-use.local resolution: "@aztec/kv-store@workspace:kv-store" dependencies: @@ -1182,6 +1209,7 @@ __metadata: "@types/node": ^18.7.23 concurrently: ^7.6.0 eslint: ^8.37.0 + express: ^4.21.1 jest: ^29.5.0 jest-mock-extended: ^3.0.3 levelup: ^5.1.1 @@ -1234,7 +1262,7 @@ __metadata: languageName: unknown linkType: soft -"@aztec/telemetry-client@workspace:^, @aztec/telemetry-client@workspace:telemetry-client": +"@aztec/telemetry-client@workspace:*, @aztec/telemetry-client@workspace:^, @aztec/telemetry-client@workspace:telemetry-client": version: 0.0.0-use.local resolution: "@aztec/telemetry-client@workspace:telemetry-client" dependencies: @@ -5668,6 +5696,18 @@ __metadata: languageName: node linkType: hard +"@types/superagent@npm:^8.1.0": + version: 8.1.9 + resolution: "@types/superagent@npm:8.1.9" + dependencies: + "@types/cookiejar": ^2.1.5 + "@types/methods": ^1.1.4 + "@types/node": "*" + form-data: ^4.0.0 + checksum: 530d8c2e87706315c82c8c9696500c40621de3353bc54ea9b104947f3530243abf54d0a49a6ae219d4947606a102ceb94bedfc43b9cc49f74069a18cbb3be8e2 + languageName: node + linkType: hard + "@types/supertest@npm:^2.0.12": version: 2.0.16 resolution: "@types/supertest@npm:2.0.16" @@ -5677,6 +5717,16 @@ __metadata: languageName: node linkType: hard +"@types/supertest@npm:^6.0.2": + version: 6.0.2 + resolution: "@types/supertest@npm:6.0.2" + dependencies: + "@types/methods": ^1.1.4 + "@types/superagent": ^8.1.0 + checksum: 1eafa472665757a6fd984439d11f388ae0480c6d243a6884066c474c4e0357de5373316488da503b1690c3163e075ca8c64c0c4853b3bb7deb09e05d1b64e556 + languageName: node + linkType: hard + "@types/wrap-ansi@npm:^3.0.0": version: 3.0.0 resolution: "@types/wrap-ansi@npm:3.0.0" @@ -10654,7 +10704,7 @@ __metadata: languageName: node linkType: hard -"express@npm:^4.19.2": +"express@npm:^4.19.2, express@npm:^4.21.1": version: 4.21.1 resolution: "express@npm:4.21.1" dependencies: @@ -11027,6 +11077,17 @@ __metadata: languageName: node linkType: hard +"formidable@npm:^3.5.1": + version: 3.5.2 + resolution: "formidable@npm:3.5.2" + dependencies: + dezalgo: ^1.0.4 + hexoid: ^2.0.0 + once: ^1.4.0 + checksum: 7c7972e8a15d45e6d2315a54d77f0900e5c610aff9b5730de326e2b34630604e1eff6c9d666e5504fba4c8818ccaed682d76a4fdb718b160c6afa2c250bf6a76 + languageName: node + linkType: hard + "forwarded@npm:0.2.0": version: 0.2.0 resolution: "forwarded@npm:0.2.0" @@ -11625,6 +11686,13 @@ __metadata: languageName: node linkType: hard +"hexoid@npm:^2.0.0": + version: 2.0.0 + resolution: "hexoid@npm:2.0.0" + checksum: 69a92b2bcd7c81c16557de017c59511643e3cb1f0d6e9e9b705859b798bfd059088e4d3cc85e9fe0a9e431007430f15393303c3e74320b5c4c28cb64fc7d8bb4 + languageName: node + linkType: hard + "hmac-drbg@npm:^1.0.1": version: 1.0.1 resolution: "hmac-drbg@npm:1.0.1" @@ -18255,6 +18323,23 @@ __metadata: languageName: node linkType: hard +"superagent@npm:^9.0.1": + version: 9.0.2 + resolution: "superagent@npm:9.0.2" + dependencies: + component-emitter: ^1.3.0 + cookiejar: ^2.1.4 + debug: ^4.3.4 + fast-safe-stringify: ^2.1.1 + form-data: ^4.0.0 + formidable: ^3.5.1 + methods: ^1.1.2 + mime: 2.6.0 + qs: ^6.11.0 + checksum: f471461b21f034d844fd0aca332128d61e3afb75c2ee5950f3339f2a3b5ca8b23e2861224f19ad9b43f21c9184d28b7d9384af5a4fde64fdef479efdb15036db + languageName: node + linkType: hard + "supertest@npm:^6.3.3": version: 6.3.4 resolution: "supertest@npm:6.3.4" @@ -18265,6 +18350,16 @@ __metadata: languageName: node linkType: hard +"supertest@npm:^7.0.0": + version: 7.0.0 + resolution: "supertest@npm:7.0.0" + dependencies: + methods: ^1.1.2 + superagent: ^9.0.1 + checksum: 974743aa511ec0f387135dfca05e378f6202366c81f0850dfbcc2c3d6fc690e856dda27e175c70db38510e21d87f331c0f62e1a942afea4c447953c647c26c8b + languageName: node + linkType: hard + "supports-color@npm:^2.0.0": version: 2.0.0 resolution: "supports-color@npm:2.0.0"