From 8cf4982175da82575087bb28590a3a71b88aaf6e Mon Sep 17 00:00:00 2001 From: Maddiaa <47148561+Maddiaa0@users.noreply.github.com> Date: Tue, 28 Jan 2025 01:04:17 +0800 Subject: [PATCH] feat(archiver): read blobs from cl (#11273) --- scripts/run_native_testnet.sh | 46 ++- .../files/config/setup-service-addresses.sh | 12 +- spartan/aztec-network/templates/_helpers.tpl | 4 + .../archiver/src/archiver/archiver.test.ts | 18 ++ .../archiver/src/archiver/archiver.ts | 13 +- yarn-project/archiver/src/archiver/config.ts | 8 +- .../archiver/src/archiver/data_retrieval.ts | 26 +- yarn-project/archiver/src/archiver/errors.ts | 5 + .../aztec-node/src/aztec-node/config.ts | 2 + .../aztec-node/src/aztec-node/server.ts | 2 +- .../aztec/src/cli/aztec_start_options.ts | 10 + yarn-project/blob-sink/README.md | 11 + yarn-project/blob-sink/package.json | 4 +- .../src/client/blob-sink-client-tests.ts | 12 +- yarn-project/blob-sink/src/client/config.ts | 44 +++ yarn-project/blob-sink/src/client/factory.ts | 7 +- .../blob-sink/src/client/http.test.ts | 138 +++++++- yarn-project/blob-sink/src/client/http.ts | 146 ++++++++- yarn-project/blob-sink/src/client/index.ts | 1 + yarn-project/blob-sink/src/encoding/index.ts | 21 ++ yarn-project/blob-sink/src/server/config.ts | 24 +- yarn-project/blob-sink/src/server/run.ts | 26 ++ .../blob-sink/src/server/server.test.ts | 31 +- yarn-project/blob-sink/src/server/server.ts | 38 ++- .../blob-sink/src/types/blob_with_index.ts | 13 +- yarn-project/circuit-types/src/tx_effect.ts | 7 +- .../scripts/native-network/blob-sink.sh | 11 + .../scripts/native-network/boot-node.sh | 10 +- .../native-network/deploy-l1-contracts.sh | 17 +- .../native-network/deploy-l2-contracts.sh | 12 +- .../scripts/native-network/prover-node.sh | 8 +- .../end-to-end/scripts/native-network/pxe.sh | 9 +- .../native-network/utils/get-chain-id.sh | 6 + .../scripts/native-network/validator.sh | 13 +- .../src/e2e_prover/e2e_prover_test.ts | 2 +- .../end-to-end/src/e2e_synching.test.ts | 9 +- .../end-to-end/src/fixtures/fixtures.ts | 2 + yarn-project/end-to-end/src/fixtures/utils.ts | 4 +- yarn-project/foundation/src/blob/blob.test.ts | 9 +- yarn-project/foundation/src/blob/blob.ts | 294 ++++++++++++++++++ yarn-project/foundation/src/blob/encoding.ts | 98 ++++++ yarn-project/foundation/src/blob/index.ts | 188 +---------- yarn-project/foundation/src/blob/interface.ts | 11 + yarn-project/foundation/src/blob/mocks.ts | 30 ++ yarn-project/foundation/src/config/env_var.ts | 6 +- .../src/promise/running-promise.test.ts | 26 +- .../foundation/src/promise/running-promise.ts | 5 +- .../src/serialize/field_reader.test.ts | 24 ++ .../foundation/src/serialize/field_reader.ts | 35 ++- yarn-project/prover-node/src/factory.ts | 2 +- .../sequencer-client/src/publisher/config.ts | 2 +- .../src/publisher/l1-publisher.test.ts | 9 +- .../src/publisher/l1-publisher.ts | 20 +- yarn-project/telemetry-client/package.json | 2 +- yarn-project/yarn.lock | 1 + 55 files changed, 1236 insertions(+), 298 deletions(-) create mode 100644 yarn-project/archiver/src/archiver/errors.ts create mode 100644 yarn-project/blob-sink/src/client/config.ts create mode 100644 yarn-project/blob-sink/src/encoding/index.ts create mode 100644 yarn-project/blob-sink/src/server/run.ts create mode 100755 yarn-project/end-to-end/scripts/native-network/blob-sink.sh create mode 100755 yarn-project/end-to-end/scripts/native-network/utils/get-chain-id.sh create mode 100644 yarn-project/foundation/src/blob/blob.ts create mode 100644 yarn-project/foundation/src/blob/encoding.ts create mode 100644 yarn-project/foundation/src/blob/interface.ts create mode 100644 yarn-project/foundation/src/blob/mocks.ts diff --git a/scripts/run_native_testnet.sh b/scripts/run_native_testnet.sh index 4cf6d83900a..544635123a3 100755 --- a/scripts/run_native_testnet.sh +++ b/scripts/run_native_testnet.sh @@ -32,7 +32,11 @@ PROVER_SCRIPT="\"./prover-node.sh 8078 false\"" NUM_VALIDATORS=3 INTERLEAVED=false METRICS=false +DISABLE_BLOB_SINK=false LOG_LEVEL="info" +ETHEREUM_HOST= +L1_CONSENSUS_HOST_URL= + OTEL_COLLECTOR_ENDPOINT=${OTEL_COLLECTOR_ENDPOINT:-"http://localhost:4318"} # Function to display help message @@ -49,6 +53,9 @@ display_help() { echo " -i Run interleaved (default: $INTERLEAVED)" echo " -m Run with metrics (default: $METRICS) will use $OTEL_COLLECTOR_ENDPOINT as default otel endpoint" echo " -c Specify the otel collector endpoint (default: $OTEL_COLLECTOR_ENDPOINT)" + echo " -b Disable the blob sink (default: false)" + echo " -e Specify the ethereum host url (default: $ETHEREUM_HOST)" + echo " -cl Specify the l1 consensus host url (default: $L1_CONSENSUS_HOST_URL)" echo echo "Example:" echo " $0 -t ./test-4epochs.sh -val 5 -v" @@ -97,6 +104,18 @@ while [[ $# -gt 0 ]]; do OTEL_COLLECTOR_ENDPOINT="$2" shift 2 ;; + -e) + ETHEREUM_HOST="$2" + shift 2 + ;; + -cl) + L1_CONSENSUS_HOST_URL="$2" + shift 2 + ;; + -b) + DISABLE_BLOB_SINK=true + shift + ;; *) echo "Invalid option: $1" >&2 display_help @@ -115,6 +134,28 @@ if $METRICS; then export LOG_JSON=1 fi +# If an ethereum rpc url is provided, use it +if [ -n "$ETHEREUM_HOST" ]; then + export ETHEREUM_HOST +fi +if [ -n "$L1_CONSENSUS_HOST_URL" ]; then + export L1_CONSENSUS_HOST_URL +fi + +# If an ethereum url has been provided, do not run the ethereum.sh script +if [ -n "$ETHEREUM_HOST" ]; then + ETHEREUM_SCRIPT="" +else + ETHEREUM_SCRIPT="./ethereum.sh" +fi + +# If the blob sink is disabled, do not run the blob-sink.sh script +if $DISABLE_BLOB_SINK; then + BLOB_SINK_SCRIPT="" +else + BLOB_SINK_SCRIPT="./blob-sink.sh" +fi + # Go to repo root cd $(git rev-parse --show-toplevel) @@ -124,11 +165,12 @@ BASE_CMD="INTERLEAVED=$INTERLEAVED ./yarn-project/end-to-end/scripts/native_netw \"./deploy-l1-contracts.sh $NUM_VALIDATORS\" \ ./deploy-l2-contracts.sh \ ./boot-node.sh \ - ./ethereum.sh \ + $ETHEREUM_SCRIPT \ \"./validators.sh $NUM_VALIDATORS\" \ $PROVER_SCRIPT \ ./pxe.sh \ - ./transaction-bot.sh" + ./transaction-bot.sh \ + $BLOB_SINK_SCRIPT" # Execute the command eval $BASE_CMD diff --git a/spartan/aztec-network/files/config/setup-service-addresses.sh b/spartan/aztec-network/files/config/setup-service-addresses.sh index 05934ad5916..83118f1bf2b 100644 --- a/spartan/aztec-network/files/config/setup-service-addresses.sh +++ b/spartan/aztec-network/files/config/setup-service-addresses.sh @@ -52,7 +52,7 @@ get_service_address() { echo "http://${NODE_IP}:${PORT}" } -# Configure Ethereum address +# Configure Ethereum execution client address if [ "${EXTERNAL_ETHEREUM_HOST}" != "" ]; then ETHEREUM_ADDR="${EXTERNAL_ETHEREUM_HOST}" elif [ "${NETWORK_PUBLIC}" = "true" ]; then @@ -61,6 +61,15 @@ else ETHEREUM_ADDR="http://${SERVICE_NAME}-eth-execution.${NAMESPACE}:${ETHEREUM_PORT}" fi +# Configure Ethereum Consensus address +if [ "${EXTERNAL_ETHEREUM_CONSENSUS_HOST}" != "" ]; then + ETHEREUM_CONSENSUS_ADDR="${EXTERNAL_ETHEREUM_CONSENSUS_HOST}" +elif [ "${NETWORK_PUBLIC}" = "true" ]; then + ETHEREUM_CONSENSUS_ADDR=$(get_service_address "eth-beacon" "${ETHEREUM_CONSENSUS_PORT}") +else + ETHEREUM_CONSENSUS_ADDR="http://${SERVICE_NAME}-eth-beacon.${NAMESPACE}:${ETHEREUM_CONSENSUS_PORT}" +fi + # Configure Boot Node address if [ "${BOOT_NODE_EXTERNAL_HOST}" != "" ]; then BOOT_NODE_ADDR="${BOOT_NODE_EXTERNAL_HOST}" @@ -93,6 +102,7 @@ fi # Write addresses to file for sourcing echo "export ETHEREUM_HOST=${ETHEREUM_ADDR}" >> /shared/config/service-addresses +echo "export L1_CONSENSUS_HOST_URL=${ETHEREUM_CONSENSUS_ADDR}" >> /shared/config/service-addresses echo "export BOOT_NODE_HOST=${BOOT_NODE_ADDR}" >> /shared/config/service-addresses echo "export PROVER_NODE_HOST=${PROVER_NODE_ADDR}" >> /shared/config/service-addresses echo "export PROVER_BROKER_HOST=${PROVER_BROKER_ADDR}" >> /shared/config/service-addresses diff --git a/spartan/aztec-network/templates/_helpers.tpl b/spartan/aztec-network/templates/_helpers.tpl index 9f11457e714..23713329ce3 100644 --- a/spartan/aztec-network/templates/_helpers.tpl +++ b/spartan/aztec-network/templates/_helpers.tpl @@ -145,6 +145,10 @@ Service Address Setup Container value: "{{ .Values.ethereum.externalHost }}" - name: ETHEREUM_PORT value: "{{ .Values.ethereum.execution.service.port }}" + - name: EXTERNAL_ETHEREUM_CONSENSUS_HOST + value: "{{ .Values.ethereum.beacon.externalHost }}" + - name: ETHEREUM_CONSENSUS_PORT + value: "{{ .Values.ethereum.beacon.service.port }}" - name: EXTERNAL_BOOT_NODE_HOST value: "{{ .Values.bootNode.externalHost }}" - name: BOOT_NODE_PORT diff --git a/yarn-project/archiver/src/archiver/archiver.test.ts b/yarn-project/archiver/src/archiver/archiver.test.ts index bd21698f8d2..0eeca5e43ad 100644 --- a/yarn-project/archiver/src/archiver/archiver.test.ts +++ b/yarn-project/archiver/src/archiver/archiver.test.ts @@ -196,6 +196,8 @@ describe('Archiver', () => { mockInbox.read.totalMessagesInserted.mockResolvedValueOnce(2n).mockResolvedValueOnce(6n); + blocks.forEach(b => blobSinkClient.getBlobSidecar.mockResolvedValueOnce([makeBlobFromBlock(b)])); + makeMessageSentEvent(98n, 1n, 0n); makeMessageSentEvent(99n, 1n, 1n); makeL2BlockProposedEvent(101n, 1n, blocks[0].archive.root.toString()); @@ -292,6 +294,7 @@ describe('Archiver', () => { makeL2BlockProposedEvent(90n, 3n, badArchive); rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx)); + blocks.forEach(b => blobSinkClient.getBlobSidecar.mockResolvedValueOnce([makeBlobFromBlock(b)])); await archiver.start(false); @@ -330,6 +333,7 @@ describe('Archiver', () => { makeL2BlockProposedEvent(80n, 2n, blocks[1].archive.root.toString()); rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx)); + blocks.forEach(b => blobSinkClient.getBlobSidecar.mockResolvedValueOnce([makeBlobFromBlock(b)])); await archiver.start(false); @@ -378,6 +382,7 @@ describe('Archiver', () => { makeL2BlockProposedEvent(80n, 2n, blocks[1].archive.root.toString()); rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx)); + blocks.forEach(b => blobSinkClient.getBlobSidecar.mockResolvedValueOnce([makeBlobFromBlock(b)])); await archiver.start(false); @@ -424,6 +429,7 @@ describe('Archiver', () => { mockRollup.read.status.mockResolvedValueOnce([0n, GENESIS_ROOT, 1n, l2Block.archive.root.toString(), GENESIS_ROOT]); makeL2BlockProposedEvent(l1BlockForL2Block, 1n, l2Block.archive.root.toString()); rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx)); + blocks.forEach(b => blobSinkClient.getBlobSidecar.mockResolvedValueOnce([makeBlobFromBlock(b)])); await archiver.start(false); @@ -454,7 +460,9 @@ describe('Archiver', () => { publicClient.getBlockNumber.mockResolvedValueOnce(l1BlockForL2Block); mockRollup.read.status.mockResolvedValueOnce([0n, GENESIS_ROOT, 1n, l2Block.archive.root.toString(), GENESIS_ROOT]); makeL2BlockProposedEvent(l1BlockForL2Block, 1n, l2Block.archive.root.toString()); + rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx)); + blocks.forEach(b => blobSinkClient.getBlobSidecar.mockResolvedValueOnce([makeBlobFromBlock(b)])); await archiver.start(false); @@ -557,3 +565,13 @@ function makeRollupTx(l2Block: L2Block) { }); return { input } as Transaction; } + +/** + * Blob response to be returned from the blob sink based on the expected block. + * @param block - The block. + * @returns The blob. + */ +function makeBlobFromBlock(block: L2Block) { + const blob = block.body.toBlobFields(); + return Blob.fromFields(blob); +} diff --git a/yarn-project/archiver/src/archiver/archiver.ts b/yarn-project/archiver/src/archiver/archiver.ts index 410110d51e4..88269d6b863 100644 --- a/yarn-project/archiver/src/archiver/archiver.ts +++ b/yarn-project/archiver/src/archiver/archiver.ts @@ -68,6 +68,7 @@ import { import { type ArchiverDataStore, type ArchiverL1SynchPoint } from './archiver_store.js'; import { type ArchiverConfig } from './config.js'; import { retrieveBlocksFromRollup, retrieveL1ToL2Messages } from './data_retrieval.js'; +import { NoBlobBodiesFoundError } from './errors.js'; import { ArchiverInstrumentation } from './instrumentation.js'; import { type DataRetrieval } from './structs/data_retrieval.js'; import { type L1Published } from './structs/published.js'; @@ -117,7 +118,7 @@ export class Archiver implements ArchiveSource, Traceable { private readonly l1Addresses: { rollupAddress: EthAddress; inboxAddress: EthAddress; registryAddress: EthAddress }, readonly dataStore: ArchiverDataStore, private readonly config: { pollingIntervalMs: number; batchSize: number }, - private readonly _blobSinkClient: BlobSinkClientInterface, + private readonly blobSinkClient: BlobSinkClientInterface, private readonly instrumentation: ArchiverInstrumentation, private readonly l1constants: L1RollupConstants, private readonly log: Logger = createLogger('archiver'), @@ -200,7 +201,12 @@ export class Archiver implements ArchiveSource, Traceable { await this.sync(blockUntilSynced); } - this.runningPromise = new RunningPromise(() => this.sync(false), this.log, this.config.pollingIntervalMs); + this.runningPromise = new RunningPromise(() => this.sync(false), this.log, this.config.pollingIntervalMs, [ + // Ignored errors will not log to the console + // We ignore NoBlobBodiesFound as the message may not have been passed to the blob sink yet + NoBlobBodiesFoundError, + ]); + this.runningPromise.start(); } @@ -470,9 +476,12 @@ export class Archiver implements ArchiveSource, Traceable { [searchStartBlock, searchEndBlock] = this.nextRange(searchEndBlock, currentL1BlockNumber); this.log.trace(`Retrieving L2 blocks from L1 block ${searchStartBlock} to ${searchEndBlock}`); + + // TODO(md): Retreive from blob sink then from consensus client, then from peers const retrievedBlocks = await retrieveBlocksFromRollup( this.rollup, this.publicClient, + this.blobSinkClient, searchStartBlock, // TODO(palla/reorg): If the L2 reorg was due to an L1 reorg, we need to start search earlier searchEndBlock, this.log, diff --git a/yarn-project/archiver/src/archiver/config.ts b/yarn-project/archiver/src/archiver/config.ts index 3f86dd1c12b..52a2aac3d72 100644 --- a/yarn-project/archiver/src/archiver/config.ts +++ b/yarn-project/archiver/src/archiver/config.ts @@ -22,7 +22,7 @@ export type ArchiverConfig = { archiverUrl?: string; /** URL for an L1 consensus client */ - l1ConsensusClientUrl: string; + l1ConsensusHostUrl?: string; /** The polling interval in ms for retrieving new L2 blocks and encrypted logs. */ archiverPollingIntervalMS?: number; @@ -47,10 +47,10 @@ export const archiverConfigMappings: ConfigMappingsType = { description: 'URL for an archiver service. If set, will return an archiver client as opposed to starting a new one.', }, - l1ConsensusClientUrl: { - env: 'L1_CONSENSUS_CLIENT_URL', + l1ConsensusHostUrl: { + env: 'L1_CONSENSUS_HOST_URL', description: 'URL for an L1 consensus client.', - parseEnv: (val: string) => (val ? val : 'http://localhost:5052'), + parseEnv: (val: string) => val, }, archiverPollingIntervalMS: { env: 'ARCHIVER_POLLING_INTERVAL_MS', diff --git a/yarn-project/archiver/src/archiver/data_retrieval.ts b/yarn-project/archiver/src/archiver/data_retrieval.ts index ccd3a44b80b..366240c326f 100644 --- a/yarn-project/archiver/src/archiver/data_retrieval.ts +++ b/yarn-project/archiver/src/archiver/data_retrieval.ts @@ -1,3 +1,4 @@ +import { type BlobSinkClientInterface } from '@aztec/blob-sink/client'; import { Body, InboxLeaf, L2Block } from '@aztec/circuit-types'; import { AppendOnlyTreeSnapshot, BlockHeader, Fr, Proof } from '@aztec/circuits.js'; import { asyncPool } from '@aztec/foundation/async-pool'; @@ -20,6 +21,7 @@ import { hexToBytes, } from 'viem'; +import { NoBlobBodiesFoundError } from './errors.js'; import { type DataRetrieval } from './structs/data_retrieval.js'; import { type L1Published, type L1PublishedData } from './structs/published.js'; @@ -35,6 +37,7 @@ import { type L1Published, type L1PublishedData } from './structs/published.js'; export async function retrieveBlocksFromRollup( rollup: GetContractReturnType>, publicClient: PublicClient, + blobSinkClient: BlobSinkClientInterface, searchStartBlock: bigint, searchEndBlock: bigint, logger: Logger = createLogger('archiver'), @@ -63,7 +66,13 @@ export async function retrieveBlocksFromRollup( `Got ${l2BlockProposedLogs.length} L2 block processed logs for L2 blocks ${l2BlockProposedLogs[0].args.blockNumber}-${lastLog.args.blockNumber} between L1 blocks ${searchStartBlock}-${searchEndBlock}`, ); - const newBlocks = await processL2BlockProposedLogs(rollup, publicClient, l2BlockProposedLogs, logger); + const newBlocks = await processL2BlockProposedLogs( + rollup, + publicClient, + blobSinkClient, + l2BlockProposedLogs, + logger, + ); retrievedBlocks.push(...newBlocks); searchStartBlock = lastLog.blockNumber! + 1n; } while (searchStartBlock <= searchEndBlock); @@ -80,6 +89,7 @@ export async function retrieveBlocksFromRollup( export async function processL2BlockProposedLogs( rollup: GetContractReturnType>, publicClient: PublicClient, + blobSinkClient: BlobSinkClientInterface, logs: GetContractEventsReturnType, logger: Logger, ): Promise[]> { @@ -91,7 +101,7 @@ export async function processL2BlockProposedLogs( // The value from the event and contract will match only if the block is in the chain. if (archive === archiveFromChain) { - const block = await getBlockFromRollupTx(publicClient, log.transactionHash!, l2BlockNumber); + const block = await getBlockFromRollupTx(publicClient, blobSinkClient, log.transactionHash!, l2BlockNumber); const l1: L1PublishedData = { blockNumber: log.blockNumber, @@ -127,10 +137,12 @@ export async function getL1BlockTime(publicClient: PublicClient, blockNumber: bi */ async function getBlockFromRollupTx( publicClient: PublicClient, + blobSinkClient: BlobSinkClientInterface, txHash: `0x${string}`, l2BlockNum: bigint, ): Promise { - const { input: data } = await publicClient.getTransaction({ hash: txHash }); + const { input: data, blockHash } = await publicClient.getTransaction({ hash: txHash }); + const { functionName, args } = decodeFunctionData({ abi: RollupAbi, data }); const allowedMethods = ['propose', 'proposeAndClaim']; @@ -156,12 +168,18 @@ async function getBlockFromRollupTx( ]; const header = BlockHeader.fromBuffer(Buffer.from(hexToBytes(decodedArgs.header))); + + const blobBodies = await blobSinkClient.getBlobSidecar(blockHash); + if (blobBodies.length === 0) { + throw new NoBlobBodiesFoundError(Number(l2BlockNum)); + } + + const blockFields = blobBodies.flatMap(b => b.toEncodedFields()); // TODO(#9101): Retreiving the block body from calldata is a temporary soln before we have // either a beacon chain client or link to some blob store. Web2 is ok because we will // verify the block body vs the blob as below. const blockBody = Body.fromBuffer(Buffer.from(hexToBytes(bodyHex))); - const blockFields = blockBody.toBlobFields(); // TODO(#9101): The below reconstruction is currently redundant, but once we extract blobs will be the way to construct blocks. // The blob source will give us blockFields, and we must construct the body from them: // TODO(#8954): When logs are refactored into fields, we won't need to inject them here. diff --git a/yarn-project/archiver/src/archiver/errors.ts b/yarn-project/archiver/src/archiver/errors.ts new file mode 100644 index 00000000000..6be4fb73beb --- /dev/null +++ b/yarn-project/archiver/src/archiver/errors.ts @@ -0,0 +1,5 @@ +export class NoBlobBodiesFoundError extends Error { + constructor(l2BlockNum: number) { + super(`No blob bodies found for block ${l2BlockNum}`); + } +} diff --git a/yarn-project/aztec-node/src/aztec-node/config.ts b/yarn-project/aztec-node/src/aztec-node/config.ts index 0489f18e909..826898e6377 100644 --- a/yarn-project/aztec-node/src/aztec-node/config.ts +++ b/yarn-project/aztec-node/src/aztec-node/config.ts @@ -1,4 +1,5 @@ import { type ArchiverConfig, archiverConfigMappings } from '@aztec/archiver/config'; +import { type BlobSinkConfig } from '@aztec/blob-sink/client'; import { type ConfigMappingsType, booleanConfigHelper, getConfigFromMappings } from '@aztec/foundation/config'; import { type DataStoreConfig, dataConfigMappings } from '@aztec/kv-store/config'; import { type P2PConfig, p2pConfigMappings } from '@aztec/p2p/config'; @@ -17,6 +18,7 @@ export { sequencerClientConfigMappings, SequencerClientConfig }; * The configuration the aztec node. */ export type AztecNodeConfig = ArchiverConfig & + BlobSinkConfig & SequencerClientConfig & ValidatorClientConfig & ProverClientConfig & diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 62a075c88c1..333a2bc59cc 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -152,7 +152,7 @@ export class AztecNodeService implements AztecNode, Traceable { const telemetry = deps.telemetry ?? getTelemetryClient(); const log = deps.logger ?? createLogger('node'); const dateProvider = deps.dateProvider ?? new DateProvider(); - const blobSinkClient = deps.blobSinkClient ?? createBlobSinkClient(config.blobSinkUrl); + const blobSinkClient = deps.blobSinkClient ?? createBlobSinkClient(config); const ethereumChain = createEthereumChain(config.l1RpcUrl, config.l1ChainId); //validate that the actual chain id matches that specified in configuration if (config.l1ChainId !== ethereumChain.chainInfo.id) { diff --git a/yarn-project/aztec/src/cli/aztec_start_options.ts b/yarn-project/aztec/src/cli/aztec_start_options.ts index 7758502a9a5..944a2e58782 100644 --- a/yarn-project/aztec/src/cli/aztec_start_options.ts +++ b/yarn-project/aztec/src/cli/aztec_start_options.ts @@ -1,6 +1,7 @@ import { type ArchiverConfig, archiverConfigMappings } from '@aztec/archiver/config'; import { faucetConfigMapping } from '@aztec/aztec-faucet/config'; import { sequencerClientConfigMappings } from '@aztec/aztec-node/config'; +import { blobSinkConfigMapping } from '@aztec/blob-sink/client'; import { botConfigMappings } from '@aztec/bot/config'; import { type ConfigMapping, @@ -251,6 +252,15 @@ export const aztecStartOptions: { [key: string]: AztecStartOption[] } = { }, ...getOptions('sequencer', sequencerClientConfigMappings), ], + BLOB_SINK: [ + { + flag: '--blob-sink', + description: 'Starts Aztec Blob Sink with options', + defaultValue: undefined, + envVar: undefined, + }, + ...getOptions('blobSink', blobSinkConfigMapping), + ], 'PROVER NODE': [ { flag: '--prover-node', diff --git a/yarn-project/blob-sink/README.md b/yarn-project/blob-sink/README.md index 649e8eab867..3e6d4541e25 100644 --- a/yarn-project/blob-sink/README.md +++ b/yarn-project/blob-sink/README.md @@ -19,3 +19,14 @@ Blobs are only held in the L1 consensus layer for a period of ~3 weeks, the blob ### How? The blob sink is a simple HTTP server that can be run alongside the e2e tests. It will store the blobs in a local file system and provide an API to query for them. + +### Configurations + +If no blob sink url or consensus host url is provided: +A local version of the blob sink will be used. This stores blobs in a local file system. + +Blob sink url is provided: +If requesting from the blob sink, we send the blobkHash + +Consensus host url is provided: +If requesting from the beacon node, we send the slot number diff --git a/yarn-project/blob-sink/package.json b/yarn-project/blob-sink/package.json index daa2c07ca27..e5d0bf98247 100644 --- a/yarn-project/blob-sink/package.json +++ b/yarn-project/blob-sink/package.json @@ -4,7 +4,8 @@ "type": "module", "exports": { "./server": "./dest/server/index.js", - "./client": "./dest/client/index.js" + "./client": "./dest/client/index.js", + "./encoding": "./dest/encoding/index.js" }, "inherits": [ "../package.common.json" @@ -56,6 +57,7 @@ "@aztec/kv-store": "workspace:*", "@aztec/telemetry-client": "workspace:*", "express": "^4.21.1", + "snappy": "^7.2.2", "source-map-support": "^0.5.21", "tslib": "^2.4.0", "zod": "^3.23.8" diff --git a/yarn-project/blob-sink/src/client/blob-sink-client-tests.ts b/yarn-project/blob-sink/src/client/blob-sink-client-tests.ts index 3e77a5ffe6b..bea94533769 100644 --- a/yarn-project/blob-sink/src/client/blob-sink-client-tests.ts +++ b/yarn-project/blob-sink/src/client/blob-sink-client-tests.ts @@ -1,5 +1,4 @@ -import { Blob } from '@aztec/foundation/blob'; -import { Fr } from '@aztec/foundation/fields'; +import { makeEncodedBlob } from '@aztec/foundation/blob'; import { type BlobSinkClientInterface } from './interface.js'; @@ -25,8 +24,7 @@ export function runBlobSinkClientTests( }); it('should send and retrieve blobs', async () => { - const testFields = [Fr.random(), Fr.random(), Fr.random()]; - const blob = Blob.fromFields(testFields); + const blob = makeEncodedBlob(3); const blockId = '0x1234'; const success = await client.sendBlobsToBlobSink(blockId, [blob]); @@ -39,11 +37,7 @@ export function runBlobSinkClientTests( }); it('should handle multiple blobs', async () => { - const blobs = [ - Blob.fromFields([Fr.random(), Fr.random()]), - Blob.fromFields([Fr.random(), Fr.random()]), - Blob.fromFields([Fr.random(), Fr.random()]), - ]; + const blobs = [makeEncodedBlob(2), makeEncodedBlob(2), makeEncodedBlob(2)]; const blockId = '0x5678'; const success = await client.sendBlobsToBlobSink(blockId, blobs); diff --git a/yarn-project/blob-sink/src/client/config.ts b/yarn-project/blob-sink/src/client/config.ts new file mode 100644 index 00000000000..deebc326a67 --- /dev/null +++ b/yarn-project/blob-sink/src/client/config.ts @@ -0,0 +1,44 @@ +import { type ConfigMappingsType, getConfigFromMappings } from '@aztec/foundation/config'; + +/** + * The configuration for the blob sink client + */ +export interface BlobSinkConfig { + /** + * The URL of the blob sink + */ + blobSinkUrl?: string; + + /** + * The URL of the L1 RPC Execution client + */ + l1RpcUrl?: string; + + /** + * The URL of the L1 consensus client + */ + l1ConsensusHostUrl?: string; +} + +export const blobSinkConfigMapping: ConfigMappingsType = { + blobSinkUrl: { + env: 'BLOB_SINK_URL', + description: 'The URL of the blob sink', + }, + l1RpcUrl: { + env: 'ETHEREUM_HOST', + description: 'The URL of the L1 RPC Execution client', + }, + l1ConsensusHostUrl: { + env: 'L1_CONSENSUS_HOST_URL', + description: 'The URL of the L1 consensus client', + }, +}; + +/** + * Returns the blob sink configuration from the environment variables. + * @returns The blob sink configuration. + */ +export function getBlobSinkConfigFromEnv(): BlobSinkConfig { + return getConfigFromMappings(blobSinkConfigMapping); +} diff --git a/yarn-project/blob-sink/src/client/factory.ts b/yarn-project/blob-sink/src/client/factory.ts index 796746a0ed8..16f8eaa3ac4 100644 --- a/yarn-project/blob-sink/src/client/factory.ts +++ b/yarn-project/blob-sink/src/client/factory.ts @@ -1,13 +1,14 @@ import { MemoryBlobStore } from '../blobstore/memory_blob_store.js'; +import { type BlobSinkConfig } from './config.js'; import { HttpBlobSinkClient } from './http.js'; import { type BlobSinkClientInterface } from './interface.js'; import { LocalBlobSinkClient } from './local.js'; -export function createBlobSinkClient(blobSinkUrl?: string): BlobSinkClientInterface { - if (!blobSinkUrl) { +export function createBlobSinkClient(config?: BlobSinkConfig): BlobSinkClientInterface { + if (!config?.blobSinkUrl && !config?.l1ConsensusHostUrl) { const blobStore = new MemoryBlobStore(); return new LocalBlobSinkClient(blobStore); } - return new HttpBlobSinkClient(blobSinkUrl); + return new HttpBlobSinkClient(config); } diff --git a/yarn-project/blob-sink/src/client/http.test.ts b/yarn-project/blob-sink/src/client/http.test.ts index 046fb0811d0..f652c719535 100644 --- a/yarn-project/blob-sink/src/client/http.test.ts +++ b/yarn-project/blob-sink/src/client/http.test.ts @@ -1,6 +1,10 @@ -import { Blob } from '@aztec/foundation/blob'; +import { Blob, makeEncodedBlob } from '@aztec/foundation/blob'; import { Fr } from '@aztec/foundation/fields'; +import { jest } from '@jest/globals'; +import http from 'http'; +import { type AddressInfo } from 'net'; + import { BlobSinkServer } from '../server/server.js'; import { runBlobSinkClientTests } from './blob-sink-client-tests.js'; import { HttpBlobSinkClient } from './http.js'; @@ -12,7 +16,9 @@ describe('HttpBlobSinkClient', () => { }); await server.start(); - const client = new HttpBlobSinkClient(`http://localhost:${server.port}`); + const client = new HttpBlobSinkClient({ + blobSinkUrl: `http://localhost:${server.port}`, + }); return { client, @@ -23,7 +29,7 @@ describe('HttpBlobSinkClient', () => { }); it('should handle server connection errors gracefully', async () => { - const client = new HttpBlobSinkClient('http://localhost:12345'); // Invalid port + const client = new HttpBlobSinkClient({ blobSinkUrl: 'http://localhost:12345' }); // Invalid port const blob = Blob.fromFields([Fr.random()]); const success = await client.sendBlobsToBlobSink('0x1234', [blob]); @@ -32,4 +38,130 @@ describe('HttpBlobSinkClient', () => { const retrievedBlobs = await client.getBlobSidecar('0x1234'); expect(retrievedBlobs).toEqual([]); }); + + describe('Mock Ethereum Clients', () => { + let blobSinkServer: BlobSinkServer; + + let testBlob: Blob; + + let executionHostServer: http.Server | undefined = undefined; + let executionHostPort: number | undefined = undefined; + + let consensusHostServer: http.Server | undefined = undefined; + let consensusHostPort: number | undefined = undefined; + + const MOCK_SLOT_NUMBER = 1; + + beforeEach(() => { + testBlob = makeEncodedBlob(3); + }); + + const startExecutionHostServer = (): Promise => { + executionHostServer = http.createServer((req, res) => { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ result: { parentBeaconBlockRoot: '0x1234' } })); + }); + + return new Promise((resolve, _reject) => { + executionHostServer?.listen(0, () => { + executionHostPort = (executionHostServer?.address() as AddressInfo).port; + resolve(); + }); + }); + }; + + const startConsensusHostServer = (): Promise => { + consensusHostServer = http.createServer((req, res) => { + if (req.url?.includes('/eth/v1/beacon/headers/')) { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ data: { header: { message: { slot: MOCK_SLOT_NUMBER } } } })); + } else if (req.url?.includes('/eth/v1/beacon/blob_sidecars/')) { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end( + JSON.stringify({ + data: [ + { + index: 0, + blob: `0x${Buffer.from(testBlob.data).toString('hex')}`, + // eslint-disable-next-line camelcase + kzg_commitment: `0x${testBlob.commitment.toString('hex')}`, + // eslint-disable-next-line camelcase + kzg_proof: `0x${testBlob.proof.toString('hex')}`, + }, + ], + }), + ); + } else { + res.writeHead(404, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: 'Not Found' })); + } + }); + + return new Promise((resolve, _reject) => { + consensusHostServer?.listen(0, () => { + consensusHostPort = (consensusHostServer?.address() as AddressInfo).port; + resolve(); + }); + }); + }; + + afterEach(async () => { + await blobSinkServer?.stop(); + executionHostServer?.close(); + consensusHostServer?.close(); + + executionHostPort = undefined; + consensusHostPort = undefined; + }); + + // When the consensus host is not responding, we should still be able to request blobs with the block hash + it('should handle no consensus host', async () => { + blobSinkServer = new BlobSinkServer({ + port: 0, + }); + await blobSinkServer.start(); + + const blobSinkSpy = jest.spyOn((blobSinkServer as any).blobStore, 'getBlobSidecars'); + + await startExecutionHostServer(); + + const client = new HttpBlobSinkClient({ + blobSinkUrl: `http://localhost:${blobSinkServer.port}`, + l1RpcUrl: `http://localhost:${executionHostPort}`, + }); + + const success = await client.sendBlobsToBlobSink('0x1234', [testBlob]); + expect(success).toBe(true); + + const retrievedBlobs = await client.getBlobSidecar('0x1234'); + expect(retrievedBlobs).toEqual([testBlob]); + + // Check that the blob sink was called with the correct block hash and no index + expect(blobSinkSpy).toHaveBeenCalledWith('0x1234', undefined); + }); + + // When the consensus host is responding, we should request blobs from the consensus host + // based on the slot number + it('should request based on slot where consensus host is provided', async () => { + blobSinkServer = new BlobSinkServer({ + port: 0, + }); + await blobSinkServer.start(); + + await startExecutionHostServer(); + await startConsensusHostServer(); + + const client = new HttpBlobSinkClient({ + blobSinkUrl: `http://localhost:${blobSinkServer.port}`, + l1RpcUrl: `http://localhost:${executionHostPort}`, + l1ConsensusHostUrl: `http://localhost:${consensusHostPort}`, + }); + + const success = await client.sendBlobsToBlobSink('0x1234', [testBlob]); + expect(success).toBe(true); + + const retrievedBlobs = await client.getBlobSidecar('0x1234'); + expect(retrievedBlobs).toEqual([testBlob]); + }); + }); }); diff --git a/yarn-project/blob-sink/src/client/http.ts b/yarn-project/blob-sink/src/client/http.ts index 444772a2e9c..d49f13a4647 100644 --- a/yarn-project/blob-sink/src/client/http.ts +++ b/yarn-project/blob-sink/src/client/http.ts @@ -1,26 +1,30 @@ -import { Blob } from '@aztec/foundation/blob'; +import { Blob, type BlobJson } from '@aztec/foundation/blob'; import { type Logger, createLogger } from '@aztec/foundation/log'; +import { outboundTransform } from '../encoding/index.js'; +import { type BlobSinkConfig, getBlobSinkConfigFromEnv } from './config.js'; import { type BlobSinkClientInterface } from './interface.js'; export class HttpBlobSinkClient implements BlobSinkClientInterface { private readonly log: Logger; + private readonly config: BlobSinkConfig; - constructor(private readonly blobSinkUrl: string) { + constructor(config?: BlobSinkConfig) { + this.config = config ?? getBlobSinkConfigFromEnv(); this.log = createLogger('aztec:blob-sink-client'); } public async sendBlobsToBlobSink(blockHash: string, blobs: Blob[]): Promise { // TODO(md): for now we are assuming the indexes of the blobs will be 0, 1, 2 // When in reality they will not, but for testing purposes this is fine - if (!this.blobSinkUrl) { + if (!this.config.blobSinkUrl) { this.log.verbose('No blob sink url configured'); return false; } this.log.verbose(`Sending ${blobs.length} blobs to blob sink`); try { - const res = await fetch(`${this.blobSinkUrl}/blob_sidecar`, { + const res = await fetch(`${this.config.blobSinkUrl}/blob_sidecar`, { method: 'POST', headers: { 'Content-Type': 'application/json', @@ -28,7 +32,8 @@ export class HttpBlobSinkClient implements BlobSinkClientInterface { body: JSON.stringify({ // eslint-disable-next-line camelcase block_id: blockHash, - blobs: blobs.map((b, i) => ({ blob: b.toBuffer(), index: i })), + // Snappy compress the blob buffer + blobs: blobs.map((b, i) => ({ blob: outboundTransform(b.toBuffer()), index: i })), }), }); @@ -44,14 +49,57 @@ export class HttpBlobSinkClient implements BlobSinkClientInterface { } } + /** + * Get the blob sidecar + * + * If requesting from the blob sink, we send the blobkHash + * If requesting from the beacon node, we send the slot number + * + * 1. First atttempts to get blobs from a configured blob sink + * 2. If no blob sink is configured, attempts to get blobs from a configured consensus host + + * // TODO(md): blow up? + * 3. If none configured, fails + * + * @param blockHash - The block hash + * @param indices - The indices of the blobs to get + * @returns The blobs + */ public async getBlobSidecar(blockHash: string, indices?: number[]): Promise { - if (!this.blobSinkUrl) { - this.log.verbose('No blob sink url configured'); - return []; + if (this.config.blobSinkUrl) { + this.log.debug('Getting blob sidecar from blob sink'); + const blobs = await this.getBlobSidecarFrom(this.config.blobSinkUrl, blockHash, indices); + if (blobs.length > 0) { + this.log.debug(`Got ${blobs.length} blobs from blob sink`); + return blobs; + } } + if (this.config.l1ConsensusHostUrl) { + // The beacon api can query by slot number, so we get that first + this.log.debug('Getting slot number from consensus host'); + const slotNumber = await this.getSlotNumber(blockHash); + if (slotNumber) { + const blobs = await this.getBlobSidecarFrom(this.config.l1ConsensusHostUrl, slotNumber, indices); + if (blobs.length > 0) { + this.log.debug(`Got ${blobs.length} blobs from consensus host`); + return blobs; + } + } + } + + this.log.verbose('No blob sources available'); + return []; + } + + public async getBlobSidecarFrom( + hostUrl: string, + blockHashOrSlot: string | number, + indices?: number[], + ): Promise { + // TODO(md): right now we assume all blobs are ours, this will not yet work on sepolia try { - let url = `${this.blobSinkUrl}/eth/v1/beacon/blob_sidecars/${blockHash}`; + let url = `${hostUrl}/eth/v1/beacon/blob_sidecars/${blockHashOrSlot}`; if (indices && indices.length > 0) { url += `?indices=${indices.join(',')}`; } @@ -60,17 +108,85 @@ export class HttpBlobSinkClient implements BlobSinkClientInterface { if (res.ok) { const body = await res.json(); - const blobs = body.data.map((b: { blob: string; index: number }) => - Blob.fromBuffer(Buffer.from(b.blob, 'hex')), - ); + const blobs = body.data.map((b: BlobJson) => Blob.fromJson(b)); return blobs; } - this.log.error('Failed to get blob sidecar', res.status); + this.log.debug(`Unable to get blob sidecar`, res.status); return []; - } catch (err) { - this.log.error(`Error getting blob sidecar`, err); + } catch (err: any) { + this.log.error(`Unable to get blob sidecar`, err.message); return []; } } + + /** + * Get the slot number from the consensus host + * As of eip-4788, the parentBeaconBlockRoot is included in the execution layer. + * This allows us to query the consensus layer for the slot number of the parent block, which we will then use + * to request blobs from the consensus layer. + * + * If this returns undefined, it means that we are not connected to a real consensus host, and we should + * query blobs with the blockHash. + * + * If this returns a number, then we should query blobs with the slot number + * + * @param blockHash - The block hash + * @returns The slot number + */ + private async getSlotNumber(blockHash: string): Promise { + if (!this.config.l1ConsensusHostUrl) { + this.log.debug('No consensus host url configured'); + return undefined; + } + + if (!this.config.l1RpcUrl) { + this.log.debug('No execution host url configured'); + return undefined; + } + + // Ping execution node to get the parentBeaconBlockRoot for this block + let parentBeaconBlockRoot: string | undefined; + try { + const res = await fetch(`${this.config.l1RpcUrl}`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + jsonrpc: '2.0', + method: 'eth_getBlockByHash', + params: [blockHash, /*tx flag*/ false], + id: 1, + }), + }); + + if (res.ok) { + const body = await res.json(); + parentBeaconBlockRoot = body.result.parentBeaconBlockRoot; + } + } catch (err) { + this.log.error(`Error getting parent beacon block root`, err); + } + + if (!parentBeaconBlockRoot) { + this.log.error(`No parent beacon block root found for block ${blockHash}`); + return undefined; + } + + // Query beacon chain to get the slot number for that block root + try { + const res = await fetch(`${this.config.l1ConsensusHostUrl}/eth/v1/beacon/headers/${parentBeaconBlockRoot}`); + if (res.ok) { + const body = await res.json(); + + // Add one to get the slot number of the original block hash + return Number(body.data.header.message.slot) + 1; + } + } catch (err) { + this.log.error(`Error getting slot number`, err); + } + + return undefined; + } } diff --git a/yarn-project/blob-sink/src/client/index.ts b/yarn-project/blob-sink/src/client/index.ts index 30172c11e39..b25b5fcd0f8 100644 --- a/yarn-project/blob-sink/src/client/index.ts +++ b/yarn-project/blob-sink/src/client/index.ts @@ -2,3 +2,4 @@ export * from './http.js'; export * from './local.js'; export * from './interface.js'; export * from './factory.js'; +export * from './config.js'; diff --git a/yarn-project/blob-sink/src/encoding/index.ts b/yarn-project/blob-sink/src/encoding/index.ts new file mode 100644 index 00000000000..d41989c8f35 --- /dev/null +++ b/yarn-project/blob-sink/src/encoding/index.ts @@ -0,0 +1,21 @@ +import { compressSync, uncompressSync } from 'snappy'; + +/** + * Snappy decompress the blob buffer + * + * @param data - The blob buffer + * @returns The decompressed blob buffer + */ +export function inboundTransform(data: Buffer): Buffer { + return Buffer.from(uncompressSync(data, { asBuffer: true })); +} + +/** + * Snappy compress the blob buffer + * + * @param data - The blob buffer + * @returns The compressed blob buffer + */ +export function outboundTransform(data: Buffer): Buffer { + return Buffer.from(compressSync(data)); +} diff --git a/yarn-project/blob-sink/src/server/config.ts b/yarn-project/blob-sink/src/server/config.ts index e18311f9f1d..faad4dc7b9c 100644 --- a/yarn-project/blob-sink/src/server/config.ts +++ b/yarn-project/blob-sink/src/server/config.ts @@ -1,7 +1,27 @@ -import { type DataStoreConfig } from '@aztec/kv-store/config'; +import { type ConfigMappingsType, getConfigFromMappings } from '@aztec/foundation/config'; +import { type DataStoreConfig, dataConfigMappings } from '@aztec/kv-store/config'; export interface BlobSinkConfig { port?: number; dataStoreConfig?: DataStoreConfig; - otelMetricsCollectorUrl?: string; +} + +export const blobSinkConfigMappings: ConfigMappingsType = { + port: { + env: 'BLOB_SINK_PORT', + description: 'The port to run the blob sink server on', + }, + dataStoreConfig: { + ...dataConfigMappings, + description: 'The configuration for the data store', + }, +}; + +/** + * Returns the blob sink configuration from the environment variables. + * Note: If an environment variable is not set, the default value is used. + * @returns The blob sink configuration. + */ +export function getBlobSinkConfigFromEnv(): BlobSinkConfig { + return getConfigFromMappings(blobSinkConfigMappings); } diff --git a/yarn-project/blob-sink/src/server/run.ts b/yarn-project/blob-sink/src/server/run.ts new file mode 100644 index 00000000000..3c0239d5c9c --- /dev/null +++ b/yarn-project/blob-sink/src/server/run.ts @@ -0,0 +1,26 @@ +// Run a standalone blob sink server +import { createLogger } from '@aztec/foundation/log'; + +import { getBlobSinkConfigFromEnv } from './config.js'; +import { createBlobSinkServer } from './factory.js'; + +const logger = createLogger('aztec:blob-sink'); + +async function main() { + const config = getBlobSinkConfigFromEnv(); + + const blobSinkServer = await createBlobSinkServer(config); + + await blobSinkServer.start(); + + const stop = async () => { + logger.debug('Stopping Blob Sink...'); + await blobSinkServer.stop(); + logger.info('Node stopped'); + process.exit(0); + }; + process.on('SIGTERM', stop); + process.on('SIGINT', stop); +} + +void main(); diff --git a/yarn-project/blob-sink/src/server/server.test.ts b/yarn-project/blob-sink/src/server/server.test.ts index 5ee0debe280..cc0571e9500 100644 --- a/yarn-project/blob-sink/src/server/server.test.ts +++ b/yarn-project/blob-sink/src/server/server.test.ts @@ -1,8 +1,8 @@ -import { Blob } from '@aztec/foundation/blob'; -import { Fr } from '@aztec/foundation/fields'; +import { Blob, makeEncodedBlob } from '@aztec/foundation/blob'; import request from 'supertest'; +import { outboundTransform } from '../encoding/index.js'; import { BlobSinkServer } from './server.js'; describe('BlobSinkService', () => { @@ -20,10 +20,8 @@ describe('BlobSinkService', () => { }); describe('should store and retrieve a blob sidecar', () => { - const testFields = [Fr.random(), Fr.random(), Fr.random()]; - const testFields2 = [Fr.random(), Fr.random(), Fr.random()]; - const blob = Blob.fromFields(testFields); - const blob2 = Blob.fromFields(testFields2); + const blob = makeEncodedBlob(3); + const blob2 = makeEncodedBlob(3); const blockId = '0x1234'; beforeEach(async () => { @@ -36,11 +34,11 @@ describe('BlobSinkService', () => { blobs: [ { index: 0, - blob: blob.toBuffer(), + blob: outboundTransform(blob.toBuffer()), }, { index: 1, - blob: blob2.toBuffer(), + blob: outboundTransform(blob2.toBuffer()), }, ], }); @@ -56,13 +54,16 @@ describe('BlobSinkService', () => { // Convert the response blob back to a Blob object and verify it matches const retrievedBlobs = getResponse.body.data; + const retrievedBlob = Blob.fromEncodedBlobBuffer(Buffer.from(retrievedBlobs[0].blob.slice(2), 'hex')); + const retrievedBlob2 = Blob.fromEncodedBlobBuffer(Buffer.from(retrievedBlobs[1].blob.slice(2), 'hex')); - const retrievedBlob = Blob.fromBuffer(Buffer.from(retrievedBlobs[0].blob, 'hex')); - const retrievedBlob2 = Blob.fromBuffer(Buffer.from(retrievedBlobs[1].blob, 'hex')); expect(retrievedBlob.fieldsHash.toString()).toBe(blob.fieldsHash.toString()); expect(retrievedBlob.commitment.toString('hex')).toBe(blob.commitment.toString('hex')); + expect(retrievedBlob.proof.toString('hex')).toBe(blob.proof.toString('hex')); + expect(retrievedBlob2.fieldsHash.toString()).toBe(blob2.fieldsHash.toString()); expect(retrievedBlob2.commitment.toString('hex')).toBe(blob2.commitment.toString('hex')); + expect(retrievedBlob2.proof.toString('hex')).toBe(blob2.proof.toString('hex')); }); it('should retrieve specific indicies', async () => { @@ -75,12 +76,15 @@ describe('BlobSinkService', () => { expect(getWithIndicies.body.data.length).toBe(2); const retrievedBlobs = getWithIndicies.body.data; - const retrievedBlob = Blob.fromBuffer(Buffer.from(retrievedBlobs[0].blob, 'hex')); - const retrievedBlob2 = Blob.fromBuffer(Buffer.from(retrievedBlobs[1].blob, 'hex')); + const retrievedBlob = Blob.fromEncodedBlobBuffer(Buffer.from(retrievedBlobs[0].blob.slice(2), 'hex')); + const retrievedBlob2 = Blob.fromEncodedBlobBuffer(Buffer.from(retrievedBlobs[1].blob.slice(2), 'hex')); expect(retrievedBlob.fieldsHash.toString()).toBe(blob.fieldsHash.toString()); expect(retrievedBlob.commitment.toString('hex')).toBe(blob.commitment.toString('hex')); + expect(retrievedBlob.proof.toString('hex')).toBe(blob.proof.toString('hex')); + expect(retrievedBlob2.fieldsHash.toString()).toBe(blob2.fieldsHash.toString()); expect(retrievedBlob2.commitment.toString('hex')).toBe(blob2.commitment.toString('hex')); + expect(retrievedBlob2.proof.toString('hex')).toBe(blob2.proof.toString('hex')); }); it('should retrieve a single index', async () => { @@ -90,9 +94,10 @@ describe('BlobSinkService', () => { expect(getWithIndicies.body.data.length).toBe(1); const retrievedBlobs = getWithIndicies.body.data; - const retrievedBlob = Blob.fromBuffer(Buffer.from(retrievedBlobs[0].blob, 'hex')); + const retrievedBlob = Blob.fromEncodedBlobBuffer(Buffer.from(retrievedBlobs[0].blob.slice(2), 'hex')); expect(retrievedBlob.fieldsHash.toString()).toBe(blob2.fieldsHash.toString()); expect(retrievedBlob.commitment.toString('hex')).toBe(blob2.commitment.toString('hex')); + expect(retrievedBlob.proof.toString('hex')).toBe(blob2.proof.toString('hex')); }); }); diff --git a/yarn-project/blob-sink/src/server/server.ts b/yarn-project/blob-sink/src/server/server.ts index 2103750bbd1..a4fe4177afd 100644 --- a/yarn-project/blob-sink/src/server/server.ts +++ b/yarn-project/blob-sink/src/server/server.ts @@ -10,6 +10,7 @@ import { z } from 'zod'; import { type BlobStore, DiskBlobStore } from '../blobstore/index.js'; import { MemoryBlobStore } from '../blobstore/memory_blob_store.js'; +import { inboundTransform } from '../encoding/index.js'; import { type PostBlobSidecarRequest, blockIdSchema, indicesSchema } from '../types/api.js'; import { BlobWithIndex } from '../types/index.js'; import { type BlobSinkConfig } from './config.js'; @@ -47,10 +48,30 @@ export class BlobSinkServer { } private setupRoutes() { + // TODO(md): needed? + this.app.get('/eth/v1/beacon/headers/:block_id', this.handleGetBlockHeader.bind(this)); this.app.get('/eth/v1/beacon/blob_sidecars/:block_id', this.handleGetBlobSidecar.bind(this)); this.app.post('/blob_sidecar', this.handlePostBlobSidecar.bind(this)); } + // TODO(md): needed? + /** + * This is a placeholder for the block header endpoint. + * It is not supported by the blob sink. + * + * The blob sink http client will ping this endpoint to check if it is talking to a beacon node + * or a blob sink + * + * @param _req - The request object + * @param res - The response object + */ + private handleGetBlockHeader(_req: Request, res: Response) { + res.status(400).json({ + error: 'Not Supported', + }); + return; + } + private async handleGetBlobSidecar(req: Request, res: Response) { // eslint-disable-next-line camelcase const { block_id } = req.params; @@ -130,8 +151,23 @@ export class BlobSinkServer { } } + /** + * Parse the blob data + * + * The blob sink http client will compress the blobs it sends + * + * @param blobs - The blob data + * @returns The parsed blob data + */ private parseBlobData(blobs: PostBlobSidecarRequest['blobs']): BlobWithIndex[] { - return blobs.map(({ index, blob }) => new BlobWithIndex(Blob.fromBuffer(Buffer.from(blob.data)), index)); + return blobs.map( + ({ index, blob }) => + new BlobWithIndex( + // Snappy decompress the blob buffer + Blob.fromBuffer(inboundTransform(Buffer.from(blob.data))), + index, + ), + ); } public start(): Promise { diff --git a/yarn-project/blob-sink/src/types/blob_with_index.ts b/yarn-project/blob-sink/src/types/blob_with_index.ts index 60446f2ff16..e3a217be528 100644 --- a/yarn-project/blob-sink/src/types/blob_with_index.ts +++ b/yarn-project/blob-sink/src/types/blob_with_index.ts @@ -1,4 +1,4 @@ -import { Blob } from '@aztec/foundation/blob'; +import { Blob, type BlobJson } from '@aztec/foundation/blob'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; /** Serialized an array of blobs with their indexes to be stored at a given block id */ @@ -38,14 +38,7 @@ export class BlobWithIndex { } // Follows the structure the beacon node api expects - public toJSON(): { blob: string; index: number; kzg_commitment: string; kzg_proof: string } { - return { - blob: this.blob.toBuffer().toString('hex'), - index: this.index, - // eslint-disable-next-line camelcase - kzg_commitment: this.blob.commitment.toString('hex'), - // eslint-disable-next-line camelcase - kzg_proof: this.blob.proof.toString('hex'), - }; + public toJSON(): BlobJson { + return this.blob.toJson(this.index); } } diff --git a/yarn-project/circuit-types/src/tx_effect.ts b/yarn-project/circuit-types/src/tx_effect.ts index 59ce8624480..6255d13c274 100644 --- a/yarn-project/circuit-types/src/tx_effect.ts +++ b/yarn-project/circuit-types/src/tx_effect.ts @@ -20,10 +20,10 @@ import { REVERT_CODE_PREFIX, RevertCode, TX_FEE_PREFIX, - TX_START_PREFIX, } from '@aztec/circuits.js'; import { type FieldsOf, makeTuple, makeTupleAsync } from '@aztec/foundation/array'; import { toBufferBE } from '@aztec/foundation/bigint-buffer'; +import { TX_EFFECT_PREFIX_BYTE_LENGTH, TX_START_PREFIX, TX_START_PREFIX_BYTES_LENGTH } from '@aztec/foundation/blob'; import { padArrayEnd } from '@aztec/foundation/collection'; import { sha256Trunc } from '@aztec/foundation/crypto'; import { jsonStringify } from '@aztec/foundation/json-rpc'; @@ -44,11 +44,6 @@ import { TxHash } from './tx/tx_hash.js'; export { RevertCodeEnum } from '@aztec/circuits.js'; -// These are helper constants to decode tx effects from blob encoded fields -const TX_START_PREFIX_BYTES_LENGTH = TX_START_PREFIX.toString(16).length / 2; -// 7 bytes for: | 0 | txlen[0] | txlen[1] | 0 | REVERT_CODE_PREFIX | 0 | revertCode | -const TX_EFFECT_PREFIX_BYTE_LENGTH = TX_START_PREFIX_BYTES_LENGTH + 7; - export class TxEffect { constructor( /** diff --git a/yarn-project/end-to-end/scripts/native-network/blob-sink.sh b/yarn-project/end-to-end/scripts/native-network/blob-sink.sh new file mode 100755 index 00000000000..2c5348a8865 --- /dev/null +++ b/yarn-project/end-to-end/scripts/native-network/blob-sink.sh @@ -0,0 +1,11 @@ +#!/bin/bash +set -eu + +REPO=$(git rev-parse --show-toplevel) + +# Starts the Blob Sink +export BLOB_SINK_PORT=${BLOB_SINK_PORT:-5053} +export DEBUG=${DEBUG:-"aztec:*"} +export DEBUG_COLORS=${DEBUG_COLORS:-1} + +node --no-warnings "$REPO"/yarn-project/blob-sink/dest/server/run.js \ No newline at end of file diff --git a/yarn-project/end-to-end/scripts/native-network/boot-node.sh b/yarn-project/end-to-end/scripts/native-network/boot-node.sh index b70d0bc1ea4..d84c1886d44 100755 --- a/yarn-project/end-to-end/scripts/native-network/boot-node.sh +++ b/yarn-project/end-to-end/scripts/native-network/boot-node.sh @@ -1,12 +1,15 @@ #!/bin/bash set -eu +REPO=$(git rev-parse --show-toplevel) + # Get the name of the script without the path and extension SCRIPT_NAME=$(basename "$0" .sh) # Redirect stdout and stderr to .log while also printing to the console exec > >(tee -a "$(dirname $0)/logs/${SCRIPT_NAME}.log") 2> >(tee -a "$(dirname $0)/logs/${SCRIPT_NAME}.log" >&2) + # Starts the Boot Node # Set environment variables @@ -14,8 +17,10 @@ export PORT=${PORT:-"8080"} export DEBUG=${DEBUG:-""} export LOG_LEVEL=${LOG_LEVEL:-"verbose"} export ETHEREUM_HOST=${ETHEREUM_HOST:-"http://127.0.0.1:8545"} +export L1_CONSENSUS_HOST_URL=${L1_CONSENSUS_HOST_URL:-} export P2P_ENABLED="true" export VALIDATOR_DISABLED="true" +export BLOB_SINK_URL="http://127.0.0.1:${BLOB_SINK_PORT:-5053}" export SEQ_MAX_SECONDS_BETWEEN_BLOCKS="0" export SEQ_MIN_TX_PER_BLOCK="1" export P2P_TCP_ANNOUNCE_ADDR="127.0.0.1:40400" @@ -28,7 +33,6 @@ export OTEL_EXPORTER_OTLP_LOGS_ENDPOINT="${OTEL_EXPORTER_OTLP_LOGS_ENDPOINT:-}" export OTEL_RESOURCE_ATTRIBUTES="service.name=boot-node" export VALIDATOR_PRIVATE_KEY=${VALIDATOR_PRIVATE_KEY:-"0x47e179ec197488593b187f80a00eb0da91f1b9d0b13f8733639f19c30a34926a"} -REPO=$(git rev-parse --show-toplevel) echo "Waiting for l1 contracts to be deployed..." until [ -f "$REPO"/yarn-project/end-to-end/scripts/native-network/state/l1-contracts.env ]; do @@ -38,6 +42,10 @@ echo "Done waiting." source "$REPO"/yarn-project/end-to-end/scripts/native-network/state/l1-contracts.env +# Get the chain ID from the Ethereum node +source "$REPO"/yarn-project/end-to-end/scripts/native-network/utils/get-chain-id.sh +export L1_CHAIN_ID=${L1_CHAIN_ID:-31337} + function filter_noise() { grep -Ev "node_getProvenBlockNumber|getBlocks|Last block mined|Running random nodes query|Not creating block because not enough txs in the pool|Peers to connect" } diff --git a/yarn-project/end-to-end/scripts/native-network/deploy-l1-contracts.sh b/yarn-project/end-to-end/scripts/native-network/deploy-l1-contracts.sh index 9c87ef3332c..d26ebf0321b 100755 --- a/yarn-project/end-to-end/scripts/native-network/deploy-l1-contracts.sh +++ b/yarn-project/end-to-end/scripts/native-network/deploy-l1-contracts.sh @@ -22,7 +22,7 @@ else fi export ETHEREUM_HOST=${ETHEREUM_HOST:-"http://127.0.0.1:8545"} -export L1_CHAIN_ID=${L1_CHAIN_ID:-"31337"} +# Remove hardcoded L1_CHAIN_ID and fetch it from the node export PRIVATE_KEY=${PRIVATE_KEY:-""} export SALT=${SALT:-"1337"} @@ -34,6 +34,9 @@ until curl -s -X POST -H 'Content-Type: application/json' \ done echo "Done waiting." +# Fetch chain ID from the Ethereum node +source "$REPO"/yarn-project/end-to-end/scripts/native-network/utils/get-chain-id.sh + # Construct base command COMMAND="node --no-warnings $(git rev-parse --show-toplevel)/yarn-project/aztec/dest/bin/index.js \ deploy-l1-contracts \ @@ -47,7 +50,17 @@ COMMAND="node --no-warnings $(git rev-parse --show-toplevel)/yarn-project/aztec/ # Add private key if provided [ -n "$PRIVATE_KEY" ] && COMMAND="$COMMAND --private-key $PRIVATE_KEY" -output=$($COMMAND) +MAX_RETRIES=5 +RETRY_DELAY=24 + +for attempt in $(seq 1 $MAX_RETRIES); do + output=$(eval $COMMAND) && break + echo "Attempt $attempt failed. Retrying in $RETRY_DELAY seconds..." + sleep "$RETRY_DELAY" +done || { + echo "All l1 contract deploy attempts failed." + exit 1 +} echo "$output" diff --git a/yarn-project/end-to-end/scripts/native-network/deploy-l2-contracts.sh b/yarn-project/end-to-end/scripts/native-network/deploy-l2-contracts.sh index a38d0584479..aef9240473d 100755 --- a/yarn-project/end-to-end/scripts/native-network/deploy-l2-contracts.sh +++ b/yarn-project/end-to-end/scripts/native-network/deploy-l2-contracts.sh @@ -2,6 +2,7 @@ # Get the name of the script without the path and extension SCRIPT_NAME=$(basename "$0" .sh) +REPO=$(git rev-parse --show-toplevel) # Redirect stdout and stderr to .log while also printing to the console exec > >(tee -a "$(dirname $0)/logs/${SCRIPT_NAME}.log") 2> >(tee -a "$(dirname $0)/logs/${SCRIPT_NAME}.log" >&2) @@ -19,14 +20,19 @@ until curl -s -X POST -H 'content-type: application/json' \ done echo "Done waiting." +# Get the chain ID from the Ethereum node +export ETHEREUM_HOST=${ETHEREUM_HOST:-"http://127.0.0.1:8545"} +source "$REPO"/yarn-project/end-to-end/scripts/native-network/utils/get-chain-id.sh +export L1_CHAIN_ID=${L1_CHAIN_ID:-31337} + # TODO(AD): Add option for prover-enabled mode -ARGS="--skipProofWait" +ARGS="--skipProofWait --l1-chain-id $L1_CHAIN_ID" # Deploy L2 contracts export AZTEC_NODE_URL="http://127.0.0.1:8080" export PXE_URL="http://127.0.0.1:8079" -node --no-warnings $(git rev-parse --show-toplevel)/yarn-project/aztec/dest/bin/index.js setup-protocol-contracts $ARGS +node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js setup-protocol-contracts $ARGS echo "Deployed L2 contracts" # Use file just as done signal echo "" > state/l2-contracts.env -echo "Wrote to state/l2-contracts.env to signal completion" \ No newline at end of file +echo "Wrote to state/l2-contracts.env to signal completion" diff --git a/yarn-project/end-to-end/scripts/native-network/prover-node.sh b/yarn-project/end-to-end/scripts/native-network/prover-node.sh index 08a4c748855..32998b5e43d 100755 --- a/yarn-project/end-to-end/scripts/native-network/prover-node.sh +++ b/yarn-project/end-to-end/scripts/native-network/prover-node.sh @@ -17,6 +17,11 @@ echo "Waiting for l1 contracts to be deployed..." until [ -f "$REPO"/yarn-project/end-to-end/scripts/native-network/state/l1-contracts.env ]; do sleep 1 done + +# Get the chain ID from the Ethereum node +export ETHEREUM_HOST=${ETHEREUM_HOST:-"http://127.0.0.1:8545"} +source "$REPO"/yarn-project/end-to-end/scripts/native-network/utils/get-chain-id.sh + echo "Waiting for Aztec Node..." until curl -s http://127.0.0.1:8080/status >/dev/null; do sleep 1 @@ -34,11 +39,12 @@ export BOOTSTRAP_NODES=$(echo "$output" | grep -oP 'Node ENR: \K.*') # Set environment variables export LOG_LEVEL=${LOG_LEVEL:-"verbose"} export DEBUG=${DEBUG:-""} -export ETHEREUM_HOST=${ETHEREUM_HOST:-"http://127.0.0.1:8545"} +export L1_CONSENSUS_HOST_URL=${L1_CONSENSUS_HOST_URL:-} export PROVER_AGENT_COUNT="1" export PROVER_AGENT_ENABLED="true" export PROVER_PUBLISHER_PRIVATE_KEY=${PROVER_PUBLISHER_PRIVATE_KEY:-"0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80"} export PROVER_COORDINATION_NODE_URL="http://127.0.0.1:8080" +export BLOB_SINK_URL="http://127.0.0.1:${BLOB_SINK_PORT:-5053}" export AZTEC_NODE_URL="http://127.0.0.1:8080" export OTEL_RESOURCE_ATTRIBUTES="service.name=prover-node-${PORT}" export OTEL_EXPORTER_OTLP_METRICS_ENDPOINT="${OTEL_EXPORTER_OTLP_METRICS_ENDPOINT:-}" diff --git a/yarn-project/end-to-end/scripts/native-network/pxe.sh b/yarn-project/end-to-end/scripts/native-network/pxe.sh index ee1fbca03e5..ce4992a91c0 100755 --- a/yarn-project/end-to-end/scripts/native-network/pxe.sh +++ b/yarn-project/end-to-end/scripts/native-network/pxe.sh @@ -3,6 +3,7 @@ set -eu # Get the name of the script without the path and extension SCRIPT_NAME=$(basename "$0" .sh) +REPO=$(git rev-parse --show-toplevel) # Redirect stdout and stderr to .log while also printing to the console exec > >(tee -a "$(dirname $0)/logs/${SCRIPT_NAME}.log") 2> >(tee -a "$(dirname $0)/logs/${SCRIPT_NAME}.log" >&2) @@ -10,6 +11,7 @@ exec > >(tee -a "$(dirname $0)/logs/${SCRIPT_NAME}.log") 2> >(tee -a "$(dirname # Starts the PXE (Private eXecution Environment) service # Set environment variables export ETHEREUM_HOST=${ETHEREUM_HOST:-"http://127.0.0.1:8545"} +export L1_CONSENSUS_HOST_URL=${L1_CONSENSUS_HOST_URL:-} export AZTEC_NODE_URL=${AZTEC_NODE_URL:-"http://127.0.0.1:8080"} export VALIDATOR_NODE_URL=${VALIDATOR_NODE_URL:-"http://127.0.0.1:8081"} export LOG_LEVEL=${LOG_LEVEL:-"verbose"} @@ -18,6 +20,11 @@ echo "Waiting for Aztec Node..." until curl -s $AZTEC_NODE_URL/status >/dev/null; do sleep 1 done + +# Get the chain ID from the Ethereum node +source "$REPO"/yarn-project/end-to-end/scripts/native-network/utils/get-chain-id.sh +export L1_CHAIN_ID=${L1_CHAIN_ID:-31337} + # We need to also wait for the validator, as the initial node cannot # Produce blocks on it's own echo "Waiting for Validator 0..." @@ -31,4 +38,4 @@ function filter_noise() { } # Start the PXE service -node --no-warnings $(git rev-parse --show-toplevel)/yarn-project/aztec/dest/bin/index.js start --port=8079 --pxe 2>&1 | filter_noise +node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js start --port=8079 --pxe 2>&1 | filter_noise diff --git a/yarn-project/end-to-end/scripts/native-network/utils/get-chain-id.sh b/yarn-project/end-to-end/scripts/native-network/utils/get-chain-id.sh new file mode 100755 index 00000000000..13ee498f521 --- /dev/null +++ b/yarn-project/end-to-end/scripts/native-network/utils/get-chain-id.sh @@ -0,0 +1,6 @@ + +export L1_CHAIN_ID=$(curl -s -X POST -H "Content-Type: application/json" \ + --data '{"jsonrpc":"2.0","method":"eth_chainId","params":[],"id":1}' \ + $ETHEREUM_HOST | grep -o '"result":"0x[^"]*"' | cut -d'"' -f4 | xargs printf "%d\n") + +echo "Using L1 chain ID: $L1_CHAIN_ID" \ No newline at end of file diff --git a/yarn-project/end-to-end/scripts/native-network/validator.sh b/yarn-project/end-to-end/scripts/native-network/validator.sh index f2b7a7ee5a5..adb98ab5931 100755 --- a/yarn-project/end-to-end/scripts/native-network/validator.sh +++ b/yarn-project/end-to-end/scripts/native-network/validator.sh @@ -29,6 +29,13 @@ until curl -s http://127.0.0.1:8080/status >/dev/null; do done echo "Done waiting." +# Ethereum host required for the chain id script +export ETHEREUM_HOST=${ETHEREUM_HOST:-"http://127.0.0.1:8545"} + +# Get the chain ID from the Ethereum node +source "$REPO"/yarn-project/end-to-end/scripts/native-network/utils/get-chain-id.sh +export L1_CHAIN_ID=${L1_CHAIN_ID:-31337} + # Set the boot node URL BOOT_NODE_URL="http://127.0.0.1:8080" @@ -51,7 +58,9 @@ export L1_PRIVATE_KEY=$VALIDATOR_PRIVATE_KEY export SEQ_PUBLISHER_PRIVATE_KEY=$VALIDATOR_PRIVATE_KEY export DEBUG=${DEBUG:-""} export LOG_LEVEL=${LOG_LEVEL:-"verbose"} -export ETHEREUM_HOST=${ETHEREUM_HOST:-"http://127.0.0.1:8545"} +export L1_CONSENSUS_HOST_URL=${L1_CONSENSUS_HOST_URL:-} + + # Automatically detect if we're using Anvil if curl -s -H "Content-Type: application/json" -X POST --data '{"method":"web3_clientVersion","params":[],"id":49,"jsonrpc":"2.0"}' $ETHEREUM_HOST | jq .result | grep -q anvil; then @@ -68,6 +77,8 @@ export P2P_TCP_ANNOUNCE_ADDR="127.0.0.1:$P2P_PORT" export P2P_UDP_ANNOUNCE_ADDR="127.0.0.1:$P2P_PORT" export P2P_TCP_LISTEN_ADDR="0.0.0.0:$P2P_PORT" export P2P_UDP_LISTEN_ADDR="0.0.0.0:$P2P_PORT" +export BLOB_SINK_URL="http://127.0.0.1:${BLOB_SINK_PORT:-5053}" +export L1_CHAIN_ID=${L1_CHAIN_ID:-31337} export OTEL_RESOURCE_ATTRIBUTES="service.name=validator-node-${PORT}" export OTEL_EXPORTER_OTLP_METRICS_ENDPOINT="${OTEL_EXPORTER_OTLP_METRICS_ENDPOINT:-}" export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT="${OTEL_EXPORTER_OTLP_TRACES_ENDPOINT:-}" diff --git a/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts b/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts index d95739c68ad..5619dc6c463 100644 --- a/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts +++ b/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts @@ -163,7 +163,7 @@ export class FullProverTest { blobSink: this.blobSink, } = this.context); - const blobSinkClient = createBlobSinkClient(`http://localhost:${this.blobSink.port}`); + const blobSinkClient = createBlobSinkClient({ blobSinkUrl: `http://localhost:${this.blobSink.port}` }); // Configure a full prover PXE let acvmConfig: Awaited> | undefined; diff --git a/yarn-project/end-to-end/src/e2e_synching.test.ts b/yarn-project/end-to-end/src/e2e_synching.test.ts index 2283214ce27..4eaa8430317 100644 --- a/yarn-project/end-to-end/src/e2e_synching.test.ts +++ b/yarn-project/end-to-end/src/e2e_synching.test.ts @@ -62,6 +62,7 @@ import { createWorldStateSynchronizer } from '@aztec/world-state'; import * as fs from 'fs'; import { getContract } from 'viem'; +import { DEFAULT_BLOB_SINK_PORT } from './fixtures/fixtures.js'; import { addAccounts } from './fixtures/snapshot_manager.js'; import { mintTokensToPrivate } from './fixtures/token_utils.js'; import { type EndToEndContext, getPrivateKeyFromIndex, setup, setupPXEService } from './fixtures/utils.js'; @@ -382,7 +383,9 @@ describe('e2e_synching', () => { await (sequencer as any).stop(); await watcher?.stop(); - const blobSinkClient = createBlobSinkClient(`http://localhost:${blobSink?.port ?? 5052}`); + const blobSinkClient = createBlobSinkClient({ + blobSinkUrl: `http://localhost:${blobSink?.port ?? DEFAULT_BLOB_SINK_PORT}`, + }); const sequencerPK: `0x${string}` = `0x${getPrivateKeyFromIndex(0)!.toString('hex')}`; const publisher = new L1Publisher( @@ -500,7 +503,9 @@ describe('e2e_synching', () => { await aztecNode.stop(); } - const blobSinkClient = createBlobSinkClient(`http://localhost:${opts.blobSink?.port ?? 5052}`); + const blobSinkClient = createBlobSinkClient({ + blobSinkUrl: `http://localhost:${opts.blobSink?.port ?? DEFAULT_BLOB_SINK_PORT}`, + }); const archiver = await createArchiver(opts.config!, blobSinkClient, { blockUntilSync: true, }); diff --git a/yarn-project/end-to-end/src/fixtures/fixtures.ts b/yarn-project/end-to-end/src/fixtures/fixtures.ts index 7fec6f90d39..48980db0e21 100644 --- a/yarn-project/end-to-end/src/fixtures/fixtures.ts +++ b/yarn-project/end-to-end/src/fixtures/fixtures.ts @@ -24,3 +24,5 @@ export const NO_L1_TO_L2_MSG_ERROR = export const STATIC_CALL_STATE_MODIFICATION_ERROR = /Static call cannot update the state, emit L2->L1 messages or generate logs.*/; export const STATIC_CONTEXT_ASSERTION_ERROR = /Assertion failed: Function .* can only be called statically.*/; + +export const DEFAULT_BLOB_SINK_PORT = '5052'; diff --git a/yarn-project/end-to-end/src/fixtures/utils.ts b/yarn-project/end-to-end/src/fixtures/utils.ts index c2c43a22af5..26af6f38cb5 100644 --- a/yarn-project/end-to-end/src/fixtures/utils.ts +++ b/yarn-project/end-to-end/src/fixtures/utils.ts @@ -470,7 +470,7 @@ export async function setup( const telemetry = getTelemetryClient(opts.telemetryConfig); - const blobSinkClient = createBlobSinkClient(config.blobSinkUrl); + const blobSinkClient = createBlobSinkClient(config); const publisher = new TestL1Publisher(config, { blobSinkClient }); const aztecNode = await AztecNodeService.createAndSync(config, { publisher, @@ -718,7 +718,7 @@ export async function createAndSyncProverNode( stop: () => Promise.resolve(), }; - const blobSinkClient = createBlobSinkClient(); + const blobSinkClient = createBlobSinkClient(aztecNodeConfig); // Creating temp store and archiver for simulated prover node const archiverConfig = { ...aztecNodeConfig, dataDirectory }; const archiver = await createArchiver(archiverConfig, blobSinkClient, { diff --git a/yarn-project/foundation/src/blob/blob.test.ts b/yarn-project/foundation/src/blob/blob.test.ts index da4caa8fc74..c730277e355 100644 --- a/yarn-project/foundation/src/blob/blob.test.ts +++ b/yarn-project/foundation/src/blob/blob.test.ts @@ -3,7 +3,7 @@ import type { Blob as BlobBuffer, Bytes48, KZGProof } from 'c-kzg'; import { poseidon2Hash } from '../crypto/index.js'; import { Fr } from '../fields/index.js'; -import { Blob } from './index.js'; +import { Blob, makeEncodedBlob } from './index.js'; // Importing directly from 'c-kzg' does not work, ignoring import/no-named-as-default-member err: /* eslint-disable import/no-named-as-default-member */ @@ -144,4 +144,11 @@ describe('blob', () => { const deserialisedBlob = Blob.fromBuffer(blobBuffer); expect(blob.fieldsHash.equals(deserialisedBlob.fieldsHash)).toBe(true); }); + + it('Should create a blob from a JSON object', () => { + const blob = makeEncodedBlob(3); + const blobJson = blob.toJson(); + const deserialisedBlob = Blob.fromJson(blobJson); + expect(blob.fieldsHash.equals(deserialisedBlob.fieldsHash)).toBe(true); + }); }); diff --git a/yarn-project/foundation/src/blob/blob.ts b/yarn-project/foundation/src/blob/blob.ts new file mode 100644 index 00000000000..36446de4602 --- /dev/null +++ b/yarn-project/foundation/src/blob/blob.ts @@ -0,0 +1,294 @@ +// Importing directly from 'c-kzg' does not work, ignoring import/no-named-as-default-member err: +import cKzg from 'c-kzg'; +import type { Blob as BlobBuffer } from 'c-kzg'; + +import { poseidon2Hash, sha256 } from '../crypto/index.js'; +import { Fr } from '../fields/index.js'; +import { BufferReader, serializeToBuffer } from '../serialize/index.js'; +import { deserializeEncodedBlobFields, extractBlobFieldsFromBuffer } from './encoding.js'; +import { type BlobJson } from './interface.js'; + +/* eslint-disable import/no-named-as-default-member */ +const { BYTES_PER_BLOB, FIELD_ELEMENTS_PER_BLOB, blobToKzgCommitment, computeKzgProof, verifyKzgProof } = cKzg; + +// The prefix to the EVM blobHash, defined here: https://eips.ethereum.org/EIPS/eip-4844#specification +export const VERSIONED_HASH_VERSION_KZG = 0x01; + +/** + * A class to create, manage, and prove EVM blobs. + */ +export class Blob { + constructor( + /** The blob to be broadcast on L1 in bytes form. */ + public readonly data: BlobBuffer, + /** The hash of all tx effects inside the blob. Used in generating the challenge z and proving that we have included all required effects. */ + public readonly fieldsHash: Fr, + /** Challenge point z (= H(H(tx_effects), kzgCommmitment). Used such that p(z) = y. */ + public readonly challengeZ: Fr, + /** Evaluation y = p(z), where p() is the blob polynomial. BLS12 field element, rep. as BigNum in nr, bigint in ts. */ + public readonly evaluationY: Buffer, + /** Commitment to the blob C. Used in compressed BLS12 point format (48 bytes). */ + public readonly commitment: Buffer, + /** KZG opening proof for y = p(z). The commitment to quotient polynomial Q, used in compressed BLS12 point format (48 bytes). */ + public readonly proof: Buffer, + ) {} + + /** + * The encoded version of the blob will determine the end of the blob based on the transaction encoding. + * This is required when the fieldsHash of a blob will contain trailing zeros. + * + * See `./encoding.ts` for more details. + * + * This method is used to create a Blob from a buffer. + * @param blob - The buffer to create the Blob from. + * @param multiBlobFieldsHash - The fields hash to use for the Blob. + * @returns A Blob created from the buffer. + */ + static fromEncodedBlobBuffer(blob: BlobBuffer, multiBlobFieldsHash?: Fr): Blob { + const fields: Fr[] = deserializeEncodedBlobFields(blob); + return Blob.fromFields(fields, multiBlobFieldsHash); + } + + /** + * Create a Blob from an array of fields. + * + * @param fields - The array of fields to create the Blob from. + * @param multiBlobFieldsHash - The fields hash to use for the Blob. + * @returns A Blob created from the array of fields. + */ + static fromFields(fields: Fr[], multiBlobFieldsHash?: Fr): Blob { + if (fields.length > FIELD_ELEMENTS_PER_BLOB) { + throw new Error( + `Attempted to overfill blob with ${fields.length} elements. The maximum is ${FIELD_ELEMENTS_PER_BLOB}`, + ); + } + + const data = Buffer.concat([serializeToBuffer(fields)], BYTES_PER_BLOB); + + // This matches the output of SpongeBlob.squeeze() in the blob circuit + const fieldsHash = multiBlobFieldsHash ? multiBlobFieldsHash : poseidon2Hash(fields); + const commitment = Buffer.from(blobToKzgCommitment(data)); + const challengeZ = poseidon2Hash([fieldsHash, ...commitmentToFields(commitment)]); + const res = computeKzgProof(data, challengeZ.toBuffer()); + if (!verifyKzgProof(commitment, challengeZ.toBuffer(), res[1], res[0])) { + throw new Error(`KZG proof did not verify.`); + } + const proof = Buffer.from(res[0]); + const evaluationY = Buffer.from(res[1]); + + return new Blob(data, fieldsHash, challengeZ, evaluationY, commitment, proof); + } + + /** + * Create a Blob from a JSON object. + * + * Blobs will be in this form when requested from the blob sink, or from + * the beacon chain via `getBlobSidecars` + * https://ethereum.github.io/beacon-APIs/?urls.primaryName=dev#/Beacon/getBlobSidecars + * + * @dev WARNING: by default json deals with encoded buffers + * + * @param json - The JSON object to create the Blob from. + * @returns A Blob created from the JSON object. + */ + static fromJson(json: BlobJson): Blob { + const blobBuffer = Buffer.from(json.blob.slice(2), 'hex'); + + const blob = Blob.fromEncodedBlobBuffer(blobBuffer); + + if (blob.commitment.toString('hex') !== json.kzg_commitment.slice(2)) { + throw new Error('KZG commitment does not match'); + } + + // We do not check the proof, as it will be different if the challenge is shared + // across multiple blobs + + return blob; + } + + /** + * Get the JSON representation of the blob. + * + * @dev WARNING: by default json deals with encoded buffers + * @param index - optional - The index of the blob in the block. + * @returns The JSON representation of the blob. + */ + toJson(index?: number): BlobJson { + return { + blob: `0x${Buffer.from(this.data).toString('hex')}`, + index, + // eslint-disable-next-line camelcase + kzg_commitment: `0x${this.commitment.toString('hex')}`, + // eslint-disable-next-line camelcase + kzg_proof: `0x${this.proof.toString('hex')}`, + }; + } + + /** + * Get the fields from the blob. + * + * @dev WARNING: this method does not take into account trailing zeros + * + * @returns The fields from the blob. + */ + toFields(): Fr[] { + return extractBlobFieldsFromBuffer(this.data); + } + + /** + * Get the encoded fields from the blob. + * + * @dev This method takes into account trailing zeros + * + * @returns The encoded fields from the blob. + */ + toEncodedFields(): Fr[] { + return deserializeEncodedBlobFields(this.data); + } + + /** + * Get the commitment fields from the blob. + * + * The 48-byte commitment is encoded into two field elements: + * +------------------+------------------+ + * | Field Element 1 | Field Element 2 | + * | [bytes 0-31] | [bytes 32-47] | + * +------------------+------------------+ + * | 32 bytes | 16 bytes | + * +------------------+------------------+ + * @returns The commitment fields from the blob. + */ + commitmentToFields(): [Fr, Fr] { + return commitmentToFields(this.commitment); + } + + // Returns ethereum's versioned blob hash, following kzg_to_versioned_hash: https://eips.ethereum.org/EIPS/eip-4844#helpers + getEthVersionedBlobHash(): Buffer { + const hash = sha256(this.commitment); + hash[0] = VERSIONED_HASH_VERSION_KZG; + return hash; + } + + static getEthVersionedBlobHash(commitment: Buffer): Buffer { + const hash = sha256(commitment); + hash[0] = VERSIONED_HASH_VERSION_KZG; + return hash; + } + + /** + * Get the buffer representation of the ENTIRE blob. + * + * @dev WARNING: this buffer contains all metadata aswell as the data itself + * + * @returns The buffer representation of the blob. + */ + toBuffer(): Buffer { + return Buffer.from( + serializeToBuffer( + this.data.length, + this.data, + this.fieldsHash, + this.challengeZ, + this.evaluationY.length, + this.evaluationY, + this.commitment.length, + this.commitment, + this.proof.length, + this.proof, + ), + ); + } + + /** + * Create a Blob from a buffer. + * + * @dev WARNING: this method contains all metadata aswell as the data itself + * + * @param buf - The buffer to create the Blob from. + * @returns A Blob created from the buffer. + */ + static fromBuffer(buf: Buffer | BufferReader): Blob { + const reader = BufferReader.asReader(buf); + return new Blob( + reader.readUint8Array(), + reader.readObject(Fr), + reader.readObject(Fr), + reader.readBuffer(), + reader.readBuffer(), + reader.readBuffer(), + ); + } + + /** + * Get the size of the blob in bytes + */ + getSize() { + return this.data.length; + } + + /** + * Returns a proof of opening of the blob to verify on L1 using the point evaluation precompile: + * + * input[:32] - versioned_hash + * input[32:64] - z + * input[64:96] - y + * input[96:144] - commitment C + * input[144:192] - proof (a commitment to the quotient polynomial q(X)) + * + * See https://eips.ethereum.org/EIPS/eip-4844#point-evaluation-precompile + */ + getEthBlobEvaluationInputs(): `0x${string}` { + const buf = Buffer.concat([ + this.getEthVersionedBlobHash(), + this.challengeZ.toBuffer(), + this.evaluationY, + this.commitment, + this.proof, + ]); + return `0x${buf.toString('hex')}`; + } + + static getEthBlobEvaluationInputs(blobs: Blob[]): `0x${string}` { + let buf = Buffer.alloc(0); + blobs.forEach(blob => { + buf = Buffer.concat([ + buf, + blob.getEthVersionedBlobHash(), + blob.challengeZ.toBuffer(), + blob.evaluationY, + blob.commitment, + blob.proof, + ]); + }); + // For multiple blobs, we prefix the number of blobs: + const lenBuf = Buffer.alloc(1); + lenBuf.writeUint8(blobs.length); + buf = Buffer.concat([lenBuf, buf]); + return `0x${buf.toString('hex')}`; + } + + static getViemKzgInstance() { + return { + blobToKzgCommitment: cKzg.blobToKzgCommitment, + computeBlobKzgProof: cKzg.computeBlobKzgProof, + }; + } + + // Returns as many blobs as we require to broadcast the given fields + // Assumes we share the fields hash between all blobs + static getBlobs(fields: Fr[]): Blob[] { + const numBlobs = Math.max(Math.ceil(fields.length / FIELD_ELEMENTS_PER_BLOB), 1); + const multiBlobFieldsHash = poseidon2Hash(fields); + const res = []; + for (let i = 0; i < numBlobs; i++) { + const end = fields.length < (i + 1) * FIELD_ELEMENTS_PER_BLOB ? fields.length : (i + 1) * FIELD_ELEMENTS_PER_BLOB; + res.push(Blob.fromFields(fields.slice(i * FIELD_ELEMENTS_PER_BLOB, end), multiBlobFieldsHash)); + } + return res; + } +} + +// 48 bytes encoded in fields as [Fr, Fr] = [0->31, 31->48] +function commitmentToFields(commitment: Buffer): [Fr, Fr] { + return [new Fr(commitment.subarray(0, 31)), new Fr(commitment.subarray(31, 48))]; +} diff --git a/yarn-project/foundation/src/blob/encoding.ts b/yarn-project/foundation/src/blob/encoding.ts new file mode 100644 index 00000000000..f568274dd20 --- /dev/null +++ b/yarn-project/foundation/src/blob/encoding.ts @@ -0,0 +1,98 @@ +import { Fr } from '@aztec/foundation/fields'; +import { BufferReader, FieldReader } from '@aztec/foundation/serialize'; + +import type { Blob as BlobBuffer } from 'c-kzg'; + +// This will appear as 0x74785f7374617274 in logs +export const TX_START_PREFIX = 8392562855083340404n; +// These are helper constants to decode tx effects from blob encoded fields +export const TX_START_PREFIX_BYTES_LENGTH = TX_START_PREFIX.toString(16).length / 2; +// 7 bytes for: | 0 | txlen[0] | txlen[1] | 0 | REVERT_CODE_PREFIX | 0 | revertCode | +export const TX_EFFECT_PREFIX_BYTE_LENGTH = TX_START_PREFIX_BYTES_LENGTH + 7; + +/** + * Deserializes a blob buffer into an array of field elements. + * + * Blobs are converted into BN254 fields to perform a poseidon2 hash on them (fieldHash). + * This method is sparse, meaning it does not include trailing zeros at the end of the blob. + * + * However, we cannot simply trim the zero's from the end of the blob, as some logs may include zero's + * within them. + * If we end on a set of zeros, such as the log below: + * length 7: [ a, b, c, d, e, 0, 0] + * + * we will end up with the incorrect hash if we trim the zeros from the end. + * + * Each transactions logs contains a TX start prefix, which includes a string followed + * by the length ( in field elements ) of the transaction's log. + * + * This function finds the end of the last transaction's logs, and returns the array up to this point. + * + * We search for a series of Tx Prefixes progressing the cursor in the field reader until we hit + * a field that is not a Tx Prefix, this indicates that we have reached the end of the last transaction's logs. + * + * +------------------+------------------+------------------+------------------+ + * | TX1 Start Prefix | TX1 Log Fields | TX2 Start Prefix | Padded zeros | + * | [3 a,b,c] | [3, a, b, c] | [5 d,e,f,0,0] | [0, 0, 0, .., 0] | + * +------------------+------------------+------------------+------------------+ + * ^ + * | + * Function reads until here -------------------------------- + * + * @param blob - The blob buffer to deserialize. + * @returns An array of field elements. + */ +export function deserializeEncodedBlobFields(blob: BlobBuffer): Fr[] { + // Convert blob buffer to array of field elements + const reader = BufferReader.asReader(blob); + const array = reader.readArray(blob.length >> 5, Fr); // >> 5 = / 32 (bytes per field) + const fieldReader = FieldReader.asReader(array); + + // Read fields until we hit zeros at the end + while (!fieldReader.isFinished()) { + const currentField = fieldReader.peekField(); + + // Stop when we hit a zero field + if (!currentField || currentField.isZero()) { + break; + } + + // Skip the remaining fields in this transaction + const len = getLengthFromFirstField(currentField); + fieldReader.skip(len); + } + + // Return array up to last non-zero field + return array.slice(0, fieldReader.cursor); +} + +export function getLengthFromFirstField(firstField: Fr): number { + const buf = firstField.toBuffer().subarray(-TX_EFFECT_PREFIX_BYTE_LENGTH); + return new Fr(buf.subarray(TX_START_PREFIX_BYTES_LENGTH + 1, TX_START_PREFIX_BYTES_LENGTH + 3)).toNumber(); +} + +/** + * Extract the fields from a blob buffer, but do not take into account encoding + * that will include trailing zeros. + * + * +------------------+------------------+------------------+------------------+ + * | | | | Padded zeros | + * | [3 a,b,c] | [3, a, b, c] | [5 d,e,f,0,0] | [0, 0, 0, .., 0] | + * +------------------+------------------+------------------+------------------+ + * ^ + * | + * Function reads until here ---------------------- + */ +export function extractBlobFieldsFromBuffer(blob: BlobBuffer): Fr[] { + const reader = BufferReader.asReader(blob); + const array = reader.readArray(blob.length >> 5, Fr); + + // Find the index of the last non-zero field + let lastNonZeroIndex = array.length - 1; + while (lastNonZeroIndex >= 0 && array[lastNonZeroIndex].isZero()) { + lastNonZeroIndex--; + } + + // Return the trimmed array + return array.slice(0, lastNonZeroIndex + 1); +} diff --git a/yarn-project/foundation/src/blob/index.ts b/yarn-project/foundation/src/blob/index.ts index dddb124f1d3..3a9c91ea1f7 100644 --- a/yarn-project/foundation/src/blob/index.ts +++ b/yarn-project/foundation/src/blob/index.ts @@ -1,21 +1,12 @@ import cKzg from 'c-kzg'; -import type { Blob as BlobBuffer } from 'c-kzg'; -import { poseidon2Hash, sha256 } from '../crypto/index.js'; -import { Fr } from '../fields/index.js'; -import { BufferReader, serializeToBuffer } from '../serialize/index.js'; - -// Importing directly from 'c-kzg' does not work, ignoring import/no-named-as-default-member err: /* eslint-disable import/no-named-as-default-member */ +const { loadTrustedSetup } = cKzg; -const { - BYTES_PER_BLOB, - FIELD_ELEMENTS_PER_BLOB, - blobToKzgCommitment, - computeKzgProof, - loadTrustedSetup, - verifyKzgProof, -} = cKzg; +export * from './blob.js'; +export * from './mocks.js'; +export * from './encoding.js'; +export * from './interface.js'; try { loadTrustedSetup(); @@ -28,172 +19,3 @@ try { throw new Error(error); } } - -// The prefix to the EVM blobHash, defined here: https://eips.ethereum.org/EIPS/eip-4844#specification -export const VERSIONED_HASH_VERSION_KZG = 0x01; - -/** - * A class to create, manage, and prove EVM blobs. - */ -export class Blob { - constructor( - /** The blob to be broadcast on L1 in bytes form. */ - public readonly data: BlobBuffer, - /** The hash of all tx effects inside the blob. Used in generating the challenge z and proving that we have included all required effects. */ - public readonly fieldsHash: Fr, - /** Challenge point z (= H(H(tx_effects), kzgCommmitment). Used such that p(z) = y. */ - public readonly challengeZ: Fr, - /** Evaluation y = p(z), where p() is the blob polynomial. BLS12 field element, rep. as BigNum in nr, bigint in ts. */ - public readonly evaluationY: Buffer, - /** Commitment to the blob C. Used in compressed BLS12 point format (48 bytes). */ - public readonly commitment: Buffer, - /** KZG opening proof for y = p(z). The commitment to quotient polynomial Q, used in compressed BLS12 point format (48 bytes). */ - public readonly proof: Buffer, - ) {} - - static fromFields(fields: Fr[], multiBlobFieldsHash?: Fr): Blob { - if (fields.length > FIELD_ELEMENTS_PER_BLOB) { - throw new Error( - `Attempted to overfill blob with ${fields.length} elements. The maximum is ${FIELD_ELEMENTS_PER_BLOB}`, - ); - } - const dataWithoutZeros = serializeToBuffer(fields); - const data = Buffer.concat([dataWithoutZeros], BYTES_PER_BLOB); - - // This matches the output of SpongeBlob.squeeze() in the blob circuit - const fieldsHash = multiBlobFieldsHash ? multiBlobFieldsHash : poseidon2Hash(fields); - const commitment = Buffer.from(blobToKzgCommitment(data)); - const challengeZ = poseidon2Hash([fieldsHash, ...commitmentToFields(commitment)]); - const res = computeKzgProof(data, challengeZ.toBuffer()); - if (!verifyKzgProof(commitment, challengeZ.toBuffer(), res[1], res[0])) { - throw new Error(`KZG proof did not verify.`); - } - const proof = Buffer.from(res[0]); - const evaluationY = Buffer.from(res[1]); - - return new Blob(dataWithoutZeros, fieldsHash, challengeZ, evaluationY, commitment, proof); - } - - // 48 bytes encoded in fields as [Fr, Fr] = [0->31, 31->48] - commitmentToFields(): [Fr, Fr] { - return commitmentToFields(this.commitment); - } - - // Returns ethereum's versioned blob hash, following kzg_to_versioned_hash: https://eips.ethereum.org/EIPS/eip-4844#helpers - getEthVersionedBlobHash(): Buffer { - const hash = sha256(this.commitment); - hash[0] = VERSIONED_HASH_VERSION_KZG; - return hash; - } - - static getEthVersionedBlobHash(commitment: Buffer): Buffer { - const hash = sha256(commitment); - hash[0] = VERSIONED_HASH_VERSION_KZG; - return hash; - } - - toBuffer(): Buffer { - return Buffer.from( - serializeToBuffer( - this.data.length, - this.data, - this.fieldsHash, - this.challengeZ, - this.evaluationY.length, - this.evaluationY, - this.commitment.length, - this.commitment, - this.proof.length, - this.proof, - ), - ); - } - - static fromBuffer(buf: Buffer | BufferReader): Blob { - const reader = BufferReader.asReader(buf); - return new Blob( - reader.readUint8Array(), - reader.readObject(Fr), - reader.readObject(Fr), - reader.readBuffer(), - reader.readBuffer(), - reader.readBuffer(), - ); - } - - /** - * Pad the blob data to it's full size before posting - */ - get dataWithZeros(): BlobBuffer { - return Buffer.concat([this.data], BYTES_PER_BLOB); - } - - /** - * Get the size of the blob in bytes - */ - getSize() { - return this.data.length; - } - - // Returns a proof of opening of the blob to verify on L1 using the point evaluation precompile: - // * input[:32] - versioned_hash - // * input[32:64] - z - // * input[64:96] - y - // * input[96:144] - commitment C - // * input[144:192] - proof (a commitment to the quotient polynomial q(X)) - // See https://eips.ethereum.org/EIPS/eip-4844#point-evaluation-precompile - getEthBlobEvaluationInputs(): `0x${string}` { - const buf = Buffer.concat([ - this.getEthVersionedBlobHash(), - this.challengeZ.toBuffer(), - this.evaluationY, - this.commitment, - this.proof, - ]); - return `0x${buf.toString('hex')}`; - } - - static getEthBlobEvaluationInputs(blobs: Blob[]): `0x${string}` { - let buf = Buffer.alloc(0); - blobs.forEach(blob => { - buf = Buffer.concat([ - buf, - blob.getEthVersionedBlobHash(), - blob.challengeZ.toBuffer(), - blob.evaluationY, - blob.commitment, - blob.proof, - ]); - }); - // For multiple blobs, we prefix the number of blobs: - const lenBuf = Buffer.alloc(1); - lenBuf.writeUint8(blobs.length); - buf = Buffer.concat([lenBuf, buf]); - return `0x${buf.toString('hex')}`; - } - - static getViemKzgInstance() { - return { - blobToKzgCommitment: cKzg.blobToKzgCommitment, - computeBlobKzgProof: cKzg.computeBlobKzgProof, - }; - } - - // Returns as many blobs as we require to broadcast the given fields - // Assumes we share the fields hash between all blobs - static getBlobs(fields: Fr[]): Blob[] { - const numBlobs = Math.max(Math.ceil(fields.length / FIELD_ELEMENTS_PER_BLOB), 1); - const multiBlobFieldsHash = poseidon2Hash(fields); - const res = []; - for (let i = 0; i < numBlobs; i++) { - const end = fields.length < (i + 1) * FIELD_ELEMENTS_PER_BLOB ? fields.length : (i + 1) * FIELD_ELEMENTS_PER_BLOB; - res.push(Blob.fromFields(fields.slice(i * FIELD_ELEMENTS_PER_BLOB, end), multiBlobFieldsHash)); - } - return res; - } -} - -// 48 bytes encoded in fields as [Fr, Fr] = [0->31, 31->48] -function commitmentToFields(commitment: Buffer): [Fr, Fr] { - return [new Fr(commitment.subarray(0, 31)), new Fr(commitment.subarray(31, 48))]; -} diff --git a/yarn-project/foundation/src/blob/interface.ts b/yarn-project/foundation/src/blob/interface.ts new file mode 100644 index 00000000000..61864793a64 --- /dev/null +++ b/yarn-project/foundation/src/blob/interface.ts @@ -0,0 +1,11 @@ +/** + * The relevant parts of a response from https://ethereum.github.io/beacon-APIs/?urls.primaryName=dev#/Beacon/getBlobSidecars + */ +export interface BlobJson { + blob: string; + index?: number; + // eslint-disable-next-line camelcase + kzg_commitment: string; + // eslint-disable-next-line camelcase + kzg_proof: string; +} diff --git a/yarn-project/foundation/src/blob/mocks.ts b/yarn-project/foundation/src/blob/mocks.ts new file mode 100644 index 00000000000..fd7890eebd9 --- /dev/null +++ b/yarn-project/foundation/src/blob/mocks.ts @@ -0,0 +1,30 @@ +import { toBufferBE } from '@aztec/foundation/bigint-buffer'; +import { Fr } from '@aztec/foundation/fields'; + +import { Blob } from './blob.js'; +import { TX_START_PREFIX, TX_START_PREFIX_BYTES_LENGTH } from './encoding.js'; + +// TODO: copied form circuit-types tx effect +function encodeFirstField(length: number): Fr { + const lengthBuf = Buffer.alloc(2); + lengthBuf.writeUInt16BE(length, 0); + return new Fr( + Buffer.concat([ + toBufferBE(TX_START_PREFIX, TX_START_PREFIX_BYTES_LENGTH), + Buffer.alloc(1), + lengthBuf, + Buffer.alloc(1), + Buffer.from([1]), + Buffer.alloc(1), + Buffer.alloc(1), + ]), + ); +} + +export function makeEncodedBlob(length: number): Blob { + return Blob.fromFields([encodeFirstField(length + 1), ...Array.from({ length: length }, () => Fr.random())]); +} + +export function makeEncodedBlobFields(fields: Fr[]): Blob { + return Blob.fromFields([encodeFirstField(fields.length + 1), ...fields]); +} diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index dbab1ffb24f..e6f082a9ad0 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -16,6 +16,8 @@ export type EnvVar = | 'BB_SKIP_CLEANUP' | 'BB_WORKING_DIRECTORY' | 'BOOTSTRAP_NODES' + | 'BLOB_SINK_PORT' + | 'BLOB_SINK_URL' | 'BOT_DA_GAS_LIMIT' | 'BOT_FEE_PAYMENT_METHOD' | 'BOT_FLUSH_SETUP_TRANSACTIONS' @@ -51,7 +53,7 @@ export type EnvVar = | 'GOVERNANCE_PROPOSER_PAYLOAD_ADDRESS' | 'INBOX_CONTRACT_ADDRESS' | 'L1_CHAIN_ID' - | 'L1_CONSENSUS_CLIENT_URL' + | 'L1_CONSENSUS_HOST_URL' | 'L1_PRIVATE_KEY' | 'L2_QUEUE_SIZE' | 'LOG_ELAPSED_TIME' @@ -102,7 +104,6 @@ export type EnvVar = | 'P2P_UDP_LISTEN_ADDR' | 'P2P_ARCHIVED_TX_LIMIT' | 'PEER_ID_PRIVATE_KEY' - | 'PROVER_BLOB_SINK_URL' | 'PROOF_VERIFIER_L1_START_BLOCK' | 'PROOF_VERIFIER_POLL_INTERVAL_MS' | 'PROVER_AGENT_ENABLED' @@ -142,7 +143,6 @@ export type EnvVar = | 'REGISTRY_CONTRACT_ADDRESS' | 'ROLLUP_CONTRACT_ADDRESS' | 'SEQ_ALLOWED_SETUP_FN' - | 'SEQ_BLOB_SINK_URL' | 'SEQ_MAX_BLOCK_SIZE_IN_BYTES' | 'SEQ_MAX_TX_PER_BLOCK' | 'SEQ_MIN_TX_PER_BLOCK' diff --git a/yarn-project/foundation/src/promise/running-promise.test.ts b/yarn-project/foundation/src/promise/running-promise.test.ts index f3ebe10d973..709e91c144a 100644 --- a/yarn-project/foundation/src/promise/running-promise.test.ts +++ b/yarn-project/foundation/src/promise/running-promise.test.ts @@ -1,3 +1,5 @@ +import { jest } from '@jest/globals'; + import { type Logger, createLogger } from '../log/pino-logger.js'; import { sleep } from '../sleep/index.js'; import { RunningPromise } from './running-promise.js'; @@ -49,10 +51,32 @@ describe('RunningPromise', () => { await fn(); throw new Error('ouch'); }; + const loggerSpy = jest.spyOn(logger, 'error'); runningPromise = new RunningPromise(failingFn, logger, 50); runningPromise.start(); - await sleep(90); + await sleep(150); + expect(counter).toEqual(1); + expect(loggerSpy).toHaveBeenCalledTimes(1); + }); + + class IgnoredError extends Error { + constructor() { + super('ignored'); + this.name = 'IgnoredError'; + } + } + + it('handles ignored errors', async () => { + const failingFn = async () => { + await fn(); + throw new IgnoredError(); + }; + const loggerSpy = jest.spyOn(logger, 'error'); + runningPromise = new RunningPromise(failingFn, logger, 50, [IgnoredError]); + runningPromise.start(); + await sleep(150); expect(counter).toEqual(1); + expect(loggerSpy).not.toHaveBeenCalled(); }); }); }); diff --git a/yarn-project/foundation/src/promise/running-promise.ts b/yarn-project/foundation/src/promise/running-promise.ts index 093f572d371..8039863a639 100644 --- a/yarn-project/foundation/src/promise/running-promise.ts +++ b/yarn-project/foundation/src/promise/running-promise.ts @@ -17,6 +17,7 @@ export class RunningPromise { private fn: () => void | Promise, private logger = createLogger('running-promise'), private pollingIntervalMS = 10000, + private ignoredErrors: (new (...args: any[]) => Error)[] = [], ) {} /** @@ -35,7 +36,9 @@ export class RunningPromise { try { await this.fn(); } catch (err) { - this.logger.error('Error in running promise', err); + if (err instanceof Error && !this.ignoredErrors.some(ErrorType => err instanceof ErrorType)) { + this.logger.error('Error in running promise', err); + } } // If an immediate run had been requested *before* the function started running, resolve the request. diff --git a/yarn-project/foundation/src/serialize/field_reader.test.ts b/yarn-project/foundation/src/serialize/field_reader.test.ts index 7a242bae1b0..575429a97d5 100644 --- a/yarn-project/foundation/src/serialize/field_reader.test.ts +++ b/yarn-project/foundation/src/serialize/field_reader.test.ts @@ -28,6 +28,30 @@ describe('field reader', () => { }); }); + describe('cursor', () => { + it('should return the current cursor position', () => { + expect(reader.cursor).toBe(0); + reader.readField(); + expect(reader.cursor).toBe(1); + }); + + it('should not progress when peeking', () => { + reader.peekField(); + expect(reader.cursor).toBe(0); + }); + }); + + describe('skip', () => { + it('should skip n fields', () => { + reader.skip(2); + expect(reader.readField()).toEqual(new Fr(23)); + }); + + it('should throw if skipping more fields than in the reader', () => { + expect(() => reader.skip(FIELDS.length + 1)).toThrow('Not enough fields to be consumed.'); + }); + }); + describe('readFq', () => { it('should get Fq from buffer', () => { expect(reader.readFq()).toEqual(Fq.fromHighLow(new Fr(0), new Fr(1))); diff --git a/yarn-project/foundation/src/serialize/field_reader.ts b/yarn-project/foundation/src/serialize/field_reader.ts index 98e80c80909..b9c593615a5 100644 --- a/yarn-project/foundation/src/serialize/field_reader.ts +++ b/yarn-project/foundation/src/serialize/field_reader.ts @@ -34,6 +34,27 @@ export class FieldReader { return new FieldReader(fields); } + /** + * Skips the next n fields. + * + * @param n - The number of fields to skip. + */ + public skip(n: number) { + if (this.index + n > this.length) { + throw new Error('Not enough fields to be consumed.'); + } + this.index += n; + } + + /** + * Returns the current cursor position. + * + * @returns The current cursor position. + */ + public get cursor() { + return this.index; + } + /** * Reads a single field from the array. * @@ -46,6 +67,18 @@ export class FieldReader { return this.fields[this.index++]; } + /** + * Peeks at the next field without advancing the cursor. + * + * @returns A field. + */ + public peekField(): Fr { + if (this.index === this.length) { + throw new Error('Not enough fields to be consumed.'); + } + return this.fields[this.index]; + } + /** * Reads a Fq from the array. * @@ -146,6 +179,6 @@ export class FieldReader { * @returns A bool. */ public isFinished(): boolean { - return this.index === this.length; + return this.index >= this.length; } } diff --git a/yarn-project/prover-node/src/factory.ts b/yarn-project/prover-node/src/factory.ts index c129933070d..fca8b4027f5 100644 --- a/yarn-project/prover-node/src/factory.ts +++ b/yarn-project/prover-node/src/factory.ts @@ -39,7 +39,7 @@ export async function createProverNode( } = {}, ) { const telemetry = deps.telemetry ?? getTelemetryClient(); - const blobSinkClient = deps.blobSinkClient ?? createBlobSinkClient(config.blobSinkUrl); + const blobSinkClient = deps.blobSinkClient ?? createBlobSinkClient(config); const log = deps.log ?? createLogger('prover-node'); const archiver = deps.archiver ?? (await createArchiver(config, blobSinkClient, { blockUntilSync: true }, telemetry)); log.verbose(`Created archiver and synced to block ${await archiver.getBlockNumber()}`); diff --git a/yarn-project/sequencer-client/src/publisher/config.ts b/yarn-project/sequencer-client/src/publisher/config.ts index d77efa57ca2..dfb97ea3ecf 100644 --- a/yarn-project/sequencer-client/src/publisher/config.ts +++ b/yarn-project/sequencer-client/src/publisher/config.ts @@ -78,7 +78,7 @@ export const getPublisherConfigMappings: ( }, ...l1TxUtilsConfigMappings, blobSinkUrl: { - env: `${scope}_BLOB_SINK_URL`, + env: 'BLOB_SINK_URL', description: 'The URL of the blob sink.', parseEnv: (val?: string) => val, }, diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts index 4c296d88e81..67d0edc1eb1 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts @@ -1,4 +1,5 @@ import { HttpBlobSinkClient } from '@aztec/blob-sink/client'; +import { inboundTransform } from '@aztec/blob-sink/encoding'; import { L2Block } from '@aztec/circuit-types'; import { EthAddress } from '@aztec/circuits.js'; import { @@ -72,7 +73,7 @@ class MockRollupContract { } } -const BLOB_SINK_PORT = 5052; +const BLOB_SINK_PORT = 50525; const BLOB_SINK_URL = `http://localhost:${BLOB_SINK_PORT}`; describe('L1Publisher', () => { @@ -102,7 +103,7 @@ describe('L1Publisher', () => { beforeEach(async () => { mockBlobSinkServer = undefined; - blobSinkClient = new HttpBlobSinkClient(BLOB_SINK_URL); + blobSinkClient = new HttpBlobSinkClient({ blobSinkUrl: BLOB_SINK_URL }); l2Block = await L2Block.random(42); @@ -184,7 +185,7 @@ describe('L1Publisher', () => { app.post('/blob_sidecar', (req, res) => { const blobsBuffers = req.body.blobs.map((b: { index: number; blob: { type: string; data: string } }) => - Blob.fromBuffer(Buffer.from(b.blob.data)), + Blob.fromBuffer(inboundTransform(Buffer.from(b.blob.data))), ); expect(blobsBuffers).toEqual(blobs); @@ -241,7 +242,7 @@ describe('L1Publisher', () => { }, // val + (val * 20n) / 100n { gasLimit: 1_000_000n + GAS_GUESS + ((1_000_000n + GAS_GUESS) * 20n) / 100n }, - { blobs: expectedBlobs.map(b => b.dataWithZeros), kzg }, + { blobs: expectedBlobs.map(b => b.data), kzg }, ); expect(sendToBlobSinkSpy).toHaveBeenCalledTimes(1); diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index 2252458fc95..947678fe291 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -210,7 +210,7 @@ export class L1Publisher { this.ethereumSlotDuration = BigInt(config.ethereumSlotDuration); const telemetry = deps.telemetry ?? getTelemetryClient(); - this.blobSinkClient = deps.blobSinkClient ?? createBlobSinkClient(config.blobSinkUrl); + this.blobSinkClient = deps.blobSinkClient ?? createBlobSinkClient(config); this.metrics = new L1PublisherMetrics(telemetry, 'L1Publisher'); @@ -630,6 +630,11 @@ export class L1Publisher { // Tx was mined successfully if (receipt.status === 'success') { + // Send the blobs to the blob sink + this.sendBlobsToBlobSink(receipt.blockHash, blobs).catch(_err => { + this.log.error('Failed to send blobs to blob sink'); + }); + const tx = await this.getTransactionStats(receipt.transactionHash); const stats: L1PublishBlockStats = { gasPrice: receipt.effectiveGasPrice, @@ -644,11 +649,6 @@ export class L1Publisher { this.log.verbose(`Published L2 block to L1 rollup contract`, { ...stats, ...ctx }); this.metrics.recordProcessBlockTx(timer.ms(), stats); - // Send the blobs to the blob sink - this.sendBlobsToBlobSink(receipt.blockHash, blobs).catch(_err => { - this.log.error('Failed to send blobs to blob sink'); - }); - return true; } @@ -663,7 +663,7 @@ export class L1Publisher { address: this.rollupContract.address, }, { - blobs: proposeTxArgs.blobs.map(b => b.dataWithZeros), + blobs: proposeTxArgs.blobs.map(b => b.data), kzg, maxFeePerBlobGas: gasPrice.maxFeePerBlobGas ?? 10000000000n, }, @@ -985,7 +985,7 @@ export class L1Publisher { }, {}, { - blobs: encodedData.blobs.map(b => b.dataWithZeros), + blobs: encodedData.blobs.map(b => b.data), kzg, }, ); @@ -1102,7 +1102,7 @@ export class L1Publisher { gasLimit: this.l1TxUtils.bumpGasLimit(simulationResult + blobEvaluationGas), }, { - blobs: encodedData.blobs.map(b => b.dataWithZeros), + blobs: encodedData.blobs.map(b => b.data), kzg, }, ); @@ -1182,7 +1182,7 @@ export class L1Publisher { gasLimit: this.l1TxUtils.bumpGasLimit(simulationResult + blobEvaluationGas), }, { - blobs: encodedData.blobs.map(b => b.dataWithZeros), + blobs: encodedData.blobs.map(b => b.data), kzg, }, ); diff --git a/yarn-project/telemetry-client/package.json b/yarn-project/telemetry-client/package.json index 887088da917..d2f76ca021e 100644 --- a/yarn-project/telemetry-client/package.json +++ b/yarn-project/telemetry-client/package.json @@ -88,4 +88,4 @@ "../../foundation/src/jest/setup.mjs" ] } -} +} \ No newline at end of file diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index 1b24beabaac..3a9045f4188 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -382,6 +382,7 @@ __metadata: express: "npm:^4.21.1" jest: "npm:^29.5.0" jest-mock-extended: "npm:^3.0.3" + snappy: "npm:^7.2.2" source-map-support: "npm:^0.5.21" supertest: "npm:^7.0.0" ts-node: "npm:^10.9.1"