From abc6b19b9483b02aa233ce844f01cfb876102531 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Fri, 16 Aug 2024 12:11:40 -0300 Subject: [PATCH] feat: Deterministic deployments for L1 (#8031) This PR does multiple things. In retrospective, should've broken it into multiple ones, but here we are. ## L1 contracts This PR updates L1 contracts that depended on `msg.sender` during deployment to instead accept the owner as an argument. This allows us to use the [deterministic deployment proxy](https://github.com/Arachnid/deterministic-deployment-proxy) (already deployed to mainnet and to anvil by default) for deploying our contracts using a salt (otherwise the factory contract would be appointed as owner), so we can get different deployment addresses based on the salt we use. With the current setup we'd always get the same deployment addresses on a fresh anvil instance, since contract addresses on ethereum are derived from the sender address and an incremental nonce, and we are consistently using the same deployer address. ## Creating data stores Refactors calls to creating new stores to ensure they always go through `initStoreForRollup`, which means that the lmdb store is cleared if we detect that it was created for a different rollup address contract (we should be using an Aztec Chain ID, though maybe we should make the rollup contract address **be** the chain id? anyway, that's another discussion). This, combined with a rollup contract address that can be different on each deployment, means that our components should automatically clean their db if connected to a different L1 rollup. ## Creating dirs Adds mkdir (recursive) calls wherever they were missing, so we no longer need to manually create data or working dirs in an init container in our ECS tasks. ## Deployments Removes the forced EFS replacement for each task, so they can reuse the same filesystem, which should greatly improve deployment times (from 2 min to 10 seconds). Given the above tasks, we can now rely on the changing rollup contract address for our components to init a new db as needed. To ensure we get a fresh rollup address on each deployment, we bind the deployment salt to the github run id, which should be unique. --- .github/workflows/devnet-deploys.yml | 5 +- l1-contracts/src/core/Rollup.sol | 5 +- .../interfaces/messagebridge/IRegistry.sol | 2 + .../src/core/messagebridge/Registry.sol | 18 ++- l1-contracts/test/Registry.t.sol | 2 +- l1-contracts/test/Rollup.t.sol | 6 +- l1-contracts/test/portals/TokenPortal.t.sol | 7 +- l1-contracts/test/portals/UniswapPortal.t.sol | 7 +- l1-contracts/test/sparta/DevNet.t.sol | 6 +- l1-contracts/test/sparta/Sparta.t.sol | 6 +- yarn-project/aztec/src/cli/cli.ts | 1 + .../aztec/src/cli/cmds/start_archiver.ts | 10 +- yarn-project/aztec/src/sandbox.ts | 1 + yarn-project/aztec/terraform/node/main.tf | 16 -- .../aztec/terraform/prover-node/main.tf | 16 -- .../src/prover/bb_private_kernel_prover.ts | 10 +- .../bb-prover/src/verifier/bb_verifier.ts | 1 + .../cli/src/cmds/devnet/bootstrap_network.ts | 16 +- .../cli/src/cmds/l1/deploy_l1_contracts.ts | 3 +- yarn-project/cli/src/cmds/l1/index.ts | 2 + yarn-project/cli/src/utils/aztec.ts | 2 + .../src/fixtures/setup_l1_contracts.ts | 1 + yarn-project/end-to-end/src/fixtures/utils.ts | 1 + .../ethereum/src/deploy_l1_contracts.ts | 151 ++++++++++-------- yarn-project/kv-store/src/lmdb/store.ts | 6 +- yarn-project/kv-store/src/utils.ts | 19 +-- yarn-project/prover-node/src/prover-node.ts | 1 + .../pxe/src/pxe_service/create_pxe_service.ts | 38 +++-- 28 files changed, 188 insertions(+), 171 deletions(-) diff --git a/.github/workflows/devnet-deploys.yml b/.github/workflows/devnet-deploys.yml index ac21ccdb446..a85c040f70f 100644 --- a/.github/workflows/devnet-deploys.yml +++ b/.github/workflows/devnet-deploys.yml @@ -401,6 +401,7 @@ jobs: --private-key ${{ env.CONTRACT_PUBLISHER_PRIVATE_KEY }} \ --rpc-url https://${{ env.DEPLOY_TAG }}-mainnet-fork.aztec.network:8545/${{ env.API_KEY }} \ --l1-chain-id ${{ env.L1_CHAIN_ID }} \ + --salt ${{ github.run_id }} \ --json | tee ./l1_contracts.json # upload contract addresses to S3 @@ -436,7 +437,7 @@ jobs: working-directory: ./yarn-project/aztec/terraform/node run: | terraform init -input=false -backend-config="key=${{ env.DEPLOY_TAG }}/aztec-node" - terraform apply -input=false -auto-approve -replace="aws_efs_file_system.node_data_store" -var="NODE_P2P_TCP_PORT=${{ needs.set-network.outputs.node_tcp_range_start }}" -var="NODE_P2P_UDP_PORT=${{ needs.set-network.outputs.node_udp_range_start }}" + terraform apply -input=false -auto-approve -var="NODE_P2P_TCP_PORT=${{ needs.set-network.outputs.node_tcp_range_start }}" -var="NODE_P2P_UDP_PORT=${{ needs.set-network.outputs.node_udp_range_start }}" - name: Deploy Aztec Prover Nodes working-directory: ./yarn-project/aztec/terraform/prover-node @@ -454,7 +455,7 @@ jobs: working-directory: ./yarn-project/aztec/terraform/pxe run: | terraform init -input=false -backend-config="key=${{ env.DEPLOY_TAG }}/pxe" - terraform apply -input=false -auto-approve -replace="aws_efs_file_system.pxe_data_store" + terraform apply -input=false -auto-approve bootstrap: runs-on: ubuntu-latest diff --git a/l1-contracts/src/core/Rollup.sol b/l1-contracts/src/core/Rollup.sol index f7460dd321f..3f53f193acf 100644 --- a/l1-contracts/src/core/Rollup.sol +++ b/l1-contracts/src/core/Rollup.sol @@ -75,8 +75,9 @@ contract Rollup is Leonidas, IRollup, ITestRollup { IRegistry _registry, IAvailabilityOracle _availabilityOracle, IERC20 _fpcJuice, - bytes32 _vkTreeRoot - ) Leonidas(msg.sender) { + bytes32 _vkTreeRoot, + address _ares + ) Leonidas(_ares) { verifier = new MockVerifier(); REGISTRY = _registry; AVAILABILITY_ORACLE = _availabilityOracle; diff --git a/l1-contracts/src/core/interfaces/messagebridge/IRegistry.sol b/l1-contracts/src/core/interfaces/messagebridge/IRegistry.sol index 3de70063340..707dd1ed704 100644 --- a/l1-contracts/src/core/interfaces/messagebridge/IRegistry.sol +++ b/l1-contracts/src/core/interfaces/messagebridge/IRegistry.sol @@ -31,4 +31,6 @@ interface IRegistry { // docs:start:registry_number_of_versions function numberOfVersions() external view returns (uint256); // docs:end:registry_number_of_versions + + function isRollupRegistered(address _rollup) external view returns (bool); } diff --git a/l1-contracts/src/core/messagebridge/Registry.sol b/l1-contracts/src/core/messagebridge/Registry.sol index 98cb60a4b89..013f63df759 100644 --- a/l1-contracts/src/core/messagebridge/Registry.sol +++ b/l1-contracts/src/core/messagebridge/Registry.sol @@ -24,10 +24,10 @@ contract Registry is IRegistry, Ownable { mapping(uint256 version => DataStructures.RegistrySnapshot snapshot) internal snapshots; mapping(address rollup => uint256 version) internal rollupToVersion; - constructor() Ownable(msg.sender) { + constructor(address _owner) Ownable(_owner) { // Inserts a "dead" rollup at version 0 // This is simply done to make first version 1, which fits better with the rest of the system - upgrade(address(0xdead)); + _upgrade(address(0xdead)); } /** @@ -49,6 +49,16 @@ contract Registry is IRegistry, Ownable { return version; } + /** + * @notice Returns whther the rollup is registered + * @param _rollup - The address of the rollup contract + * @return Whether the rollup is registered + */ + function isRollupRegistered(address _rollup) external view override(IRegistry) returns (bool) { + (, bool exists) = _getVersionFor(_rollup); + return exists; + } + /** * @notice Fetches a snapshot of the registry indicated by `version` * @dev the version is 0 indexed, so the first snapshot is version 0. @@ -87,6 +97,10 @@ contract Registry is IRegistry, Ownable { * @return The version of the new snapshot */ function upgrade(address _rollup) public override(IRegistry) onlyOwner returns (uint256) { + return _upgrade(_rollup); + } + + function _upgrade(address _rollup) internal returns (uint256) { (, bool exists) = _getVersionFor(_rollup); if (exists) revert Errors.Registry__RollupAlreadyRegistered(_rollup); diff --git a/l1-contracts/test/Registry.t.sol b/l1-contracts/test/Registry.t.sol index df6b03e7eb3..9935e77040b 100644 --- a/l1-contracts/test/Registry.t.sol +++ b/l1-contracts/test/Registry.t.sol @@ -15,7 +15,7 @@ contract RegistryTest is Test { Registry internal registry; function setUp() public { - registry = new Registry(); + registry = new Registry(address(this)); } function testConstructorSetup() public { diff --git a/l1-contracts/test/Rollup.t.sol b/l1-contracts/test/Rollup.t.sol index 3591e67cfb2..2d3d349b357 100644 --- a/l1-contracts/test/Rollup.t.sol +++ b/l1-contracts/test/Rollup.t.sol @@ -51,10 +51,12 @@ contract RollupTest is DecoderBase { vm.warp(initialTime); } - registry = new Registry(); + registry = new Registry(address(this)); availabilityOracle = new AvailabilityOracle(); portalERC20 = new PortalERC20(); - rollup = new Rollup(registry, availabilityOracle, IERC20(address(portalERC20)), bytes32(0)); + rollup = new Rollup( + registry, availabilityOracle, IERC20(address(portalERC20)), bytes32(0), address(this) + ); inbox = Inbox(address(rollup.INBOX())); outbox = Outbox(address(rollup.OUTBOX())); diff --git a/l1-contracts/test/portals/TokenPortal.t.sol b/l1-contracts/test/portals/TokenPortal.t.sol index d8f2b504c1f..476b72701ad 100644 --- a/l1-contracts/test/portals/TokenPortal.t.sol +++ b/l1-contracts/test/portals/TokenPortal.t.sol @@ -59,10 +59,11 @@ contract TokenPortalTest is Test { uint256 internal l2BlockNumber = 69; function setUp() public { - registry = new Registry(); + registry = new Registry(address(this)); portalERC20 = new PortalERC20(); - rollup = - new Rollup(registry, new AvailabilityOracle(), IERC20(address(portalERC20)), bytes32(0)); + rollup = new Rollup( + registry, new AvailabilityOracle(), IERC20(address(portalERC20)), bytes32(0), address(this) + ); inbox = rollup.INBOX(); outbox = rollup.OUTBOX(); diff --git a/l1-contracts/test/portals/UniswapPortal.t.sol b/l1-contracts/test/portals/UniswapPortal.t.sol index 61d3b8fede1..104b7664530 100644 --- a/l1-contracts/test/portals/UniswapPortal.t.sol +++ b/l1-contracts/test/portals/UniswapPortal.t.sol @@ -54,10 +54,11 @@ contract UniswapPortalTest is Test { uint256 forkId = vm.createFork(vm.rpcUrl("mainnet_fork")); vm.selectFork(forkId); - registry = new Registry(); + registry = new Registry(address(this)); PortalERC20 portalERC20 = new PortalERC20(); - rollup = - new Rollup(registry, new AvailabilityOracle(), IERC20(address(portalERC20)), bytes32(0)); + rollup = new Rollup( + registry, new AvailabilityOracle(), IERC20(address(portalERC20)), bytes32(0), address(this) + ); registry.upgrade(address(rollup)); portalERC20.mint(address(rollup), 1000000); diff --git a/l1-contracts/test/sparta/DevNet.t.sol b/l1-contracts/test/sparta/DevNet.t.sol index 12e64ec9614..cb591d3be32 100644 --- a/l1-contracts/test/sparta/DevNet.t.sol +++ b/l1-contracts/test/sparta/DevNet.t.sol @@ -57,10 +57,12 @@ contract DevNetTest is DecoderBase { vm.warp(initialTime); } - registry = new Registry(); + registry = new Registry(address(this)); availabilityOracle = new AvailabilityOracle(); portalERC20 = new PortalERC20(); - rollup = new Rollup(registry, availabilityOracle, IERC20(address(portalERC20)), bytes32(0)); + rollup = new Rollup( + registry, availabilityOracle, IERC20(address(portalERC20)), bytes32(0), address(this) + ); inbox = Inbox(address(rollup.INBOX())); outbox = Outbox(address(rollup.OUTBOX())); diff --git a/l1-contracts/test/sparta/Sparta.t.sol b/l1-contracts/test/sparta/Sparta.t.sol index ea3146b6446..eb8638e070a 100644 --- a/l1-contracts/test/sparta/Sparta.t.sol +++ b/l1-contracts/test/sparta/Sparta.t.sol @@ -60,10 +60,12 @@ contract SpartaTest is DecoderBase { vm.warp(initialTime); } - registry = new Registry(); + registry = new Registry(address(this)); availabilityOracle = new AvailabilityOracle(); portalERC20 = new PortalERC20(); - rollup = new Rollup(registry, availabilityOracle, IERC20(address(portalERC20)), bytes32(0)); + rollup = new Rollup( + registry, availabilityOracle, IERC20(address(portalERC20)), bytes32(0), address(this) + ); inbox = Inbox(address(rollup.INBOX())); outbox = Outbox(address(rollup.OUTBOX())); diff --git a/yarn-project/aztec/src/cli/cli.ts b/yarn-project/aztec/src/cli/cli.ts index 4242e07c644..1690f7e8149 100644 --- a/yarn-project/aztec/src/cli/cli.ts +++ b/yarn-project/aztec/src/cli/cli.ts @@ -99,6 +99,7 @@ export function injectAztecCommands(program: Command, userLog: LogFn, debugLogge process.exit(1); } } + installSignalHandlers(debugLogger.info, signalHandlers); if (services.length) { diff --git a/yarn-project/aztec/src/cli/cmds/start_archiver.ts b/yarn-project/aztec/src/cli/cmds/start_archiver.ts index c3464fdbad9..ff3690ae66d 100644 --- a/yarn-project/aztec/src/cli/cmds/start_archiver.ts +++ b/yarn-project/aztec/src/cli/cmds/start_archiver.ts @@ -7,8 +7,7 @@ import { } from '@aztec/archiver'; import { createDebugLogger } from '@aztec/aztec.js'; import { type ServerList } from '@aztec/foundation/json-rpc/server'; -import { AztecLmdbStore } from '@aztec/kv-store/lmdb'; -import { initStoreForRollup } from '@aztec/kv-store/utils'; +import { createStore } from '@aztec/kv-store/utils'; import { createAndStartTelemetryClient, getConfigEnvVars as getTelemetryClientConfig, @@ -22,11 +21,8 @@ export const startArchiver = async (options: any, signalHandlers: (() => Promise const archiverConfig = extractRelevantOptions(options, archiverConfigMappings); const storeLog = createDebugLogger('aztec:archiver:lmdb'); - const store = await initStoreForRollup( - AztecLmdbStore.open(archiverConfig.dataDirectory, false), - archiverConfig.l1Contracts.rollupAddress, - storeLog, - ); + const rollupAddress = archiverConfig.l1Contracts.rollupAddress; + const store = await createStore(archiverConfig, rollupAddress, storeLog); const archiverStore = new KVArchiverDataStore(store, archiverConfig.maxLogs); const telemetry = createAndStartTelemetryClient(getTelemetryClientConfig()); diff --git a/yarn-project/aztec/src/sandbox.ts b/yarn-project/aztec/src/sandbox.ts index c5fa92eec3a..c3f18cd4c60 100644 --- a/yarn-project/aztec/src/sandbox.ts +++ b/yarn-project/aztec/src/sandbox.ts @@ -132,6 +132,7 @@ export async function deployContractsToL1( l2FeeJuiceAddress: FeeJuiceAddress, vkTreeRoot: getVKTreeRoot(), assumeProvenUntil: opts.assumeProvenUntilBlockNumber, + salt: undefined, }), ); diff --git a/yarn-project/aztec/terraform/node/main.tf b/yarn-project/aztec/terraform/node/main.tf index 9df0305751d..b0242406c40 100644 --- a/yarn-project/aztec/terraform/node/main.tf +++ b/yarn-project/aztec/terraform/node/main.tf @@ -148,18 +148,6 @@ resource "aws_ecs_task_definition" "aztec-node" { } container_definitions = jsonencode([ - { - name = "init-container" - image = "amazonlinux:latest" - essential = false - command = ["sh", "-c", "mkdir -p ${local.data_dir}/node_${count.index + 1}/data ${local.data_dir}/node_${count.index + 1}/temp"] - mountPoints = [ - { - containerPath = local.data_dir - sourceVolume = "efs-data-store" - } - ] - }, { name = "${var.DEPLOY_TAG}-aztec-node-${count.index + 1}" image = "${var.DOCKERHUB_ACCOUNT}/aztec:${var.IMAGE_TAG}" @@ -380,10 +368,6 @@ resource "aws_ecs_task_definition" "aztec-node" { } ] dependsOn = [ - { - containerName = "init-container" - condition = "COMPLETE" - } ] logConfiguration = { logDriver = "awslogs" diff --git a/yarn-project/aztec/terraform/prover-node/main.tf b/yarn-project/aztec/terraform/prover-node/main.tf index f4cc8660022..b81ebaf76b7 100644 --- a/yarn-project/aztec/terraform/prover-node/main.tf +++ b/yarn-project/aztec/terraform/prover-node/main.tf @@ -148,18 +148,6 @@ resource "aws_ecs_task_definition" "aztec-prover-node" { } container_definitions = jsonencode([ - { - name = "init-container" - image = "amazonlinux:latest" - essential = false - command = ["sh", "-c", "mkdir -p ${local.data_dir}/prover_node_${count.index + 1}/data ${local.data_dir}/prover_node_${count.index + 1}/temp"] - mountPoints = [ - { - containerPath = local.data_dir - sourceVolume = "efs-data-store" - } - ] - }, { name = "${var.DEPLOY_TAG}-aztec-prover-node-${count.index + 1}" image = "${var.DOCKERHUB_ACCOUNT}/aztec:${var.IMAGE_TAG}" @@ -246,10 +234,6 @@ resource "aws_ecs_task_definition" "aztec-prover-node" { } ] dependsOn = [ - { - containerName = "init-container" - condition = "COMPLETE" - } ] logConfiguration = { logDriver = "awslogs" diff --git a/yarn-project/bb-prover/src/prover/bb_private_kernel_prover.ts b/yarn-project/bb-prover/src/prover/bb_private_kernel_prover.ts index 250801ed477..885eb37007a 100644 --- a/yarn-project/bb-prover/src/prover/bb_private_kernel_prover.ts +++ b/yarn-project/bb-prover/src/prover/bb_private_kernel_prover.ts @@ -22,7 +22,7 @@ import { } from '@aztec/circuits.js'; import { siloNoteHash } from '@aztec/circuits.js/hash'; import { runInDirectory } from '@aztec/foundation/fs'; -import { createDebugLogger } from '@aztec/foundation/log'; +import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; import { Timer } from '@aztec/foundation/timer'; import { ClientCircuitArtifacts, @@ -57,6 +57,7 @@ import { executeBbClientIvcProof, verifyProof, } from '../bb/execute.js'; +import { type BBConfig } from '../config.js'; import { mapProtocolArtifactNameToCircuitName } from '../stats.js'; import { extractVkData } from '../verification_key/verification_key_data.js'; @@ -73,13 +74,18 @@ export class BBNativePrivateKernelProver implements PrivateKernelProver { Promise >(); - constructor( + private constructor( private bbBinaryPath: string, private bbWorkingDirectory: string, private skipCleanup: boolean, private log = createDebugLogger('aztec:bb-native-prover'), ) {} + public static async new(config: BBConfig, log?: DebugLogger) { + await fs.mkdir(config.bbWorkingDirectory, { recursive: true }); + return new BBNativePrivateKernelProver(config.bbBinaryPath, config.bbWorkingDirectory, !!config.bbSkipCleanup, log); + } + private async _createClientIvcProof( directory: string, acirs: Buffer[], diff --git a/yarn-project/bb-prover/src/verifier/bb_verifier.ts b/yarn-project/bb-prover/src/verifier/bb_verifier.ts index 52ddf8129ab..8cfc2688de1 100644 --- a/yarn-project/bb-prover/src/verifier/bb_verifier.ts +++ b/yarn-project/bb-prover/src/verifier/bb_verifier.ts @@ -37,6 +37,7 @@ export class BBCircuitVerifier implements ClientProtocolCircuitVerifier { initialCircuits: ProtocolArtifact[] = [], logger = createDebugLogger('aztec:bb-verifier'), ) { + await fs.mkdir(config.bbWorkingDirectory, { recursive: true }); const keys = new Map>(); for (const circuit of initialCircuits) { const vkData = await this.generateVerificationKey( diff --git a/yarn-project/cli/src/cmds/devnet/bootstrap_network.ts b/yarn-project/cli/src/cmds/devnet/bootstrap_network.ts index ba9c908dcff..f76fe467596 100644 --- a/yarn-project/cli/src/cmds/devnet/bootstrap_network.ts +++ b/yarn-project/cli/src/cmds/devnet/bootstrap_network.ts @@ -112,20 +112,8 @@ async function deployERC20({ walletClient, publicClient }: L1Clients) { contractBytecode: TokenPortalBytecode, }; - const erc20Address = await deployL1Contract( - walletClient, - publicClient, - erc20.contractAbi, - erc20.contractBytecode, - [], - ); - const portalAddress = await deployL1Contract( - walletClient, - publicClient, - portal.contractAbi, - portal.contractBytecode, - [], - ); + const erc20Address = await deployL1Contract(walletClient, publicClient, erc20.contractAbi, erc20.contractBytecode); + const portalAddress = await deployL1Contract(walletClient, publicClient, portal.contractAbi, portal.contractBytecode); return { erc20Address, diff --git a/yarn-project/cli/src/cmds/l1/deploy_l1_contracts.ts b/yarn-project/cli/src/cmds/l1/deploy_l1_contracts.ts index afdf69c44f6..659d1c1e187 100644 --- a/yarn-project/cli/src/cmds/l1/deploy_l1_contracts.ts +++ b/yarn-project/cli/src/cmds/l1/deploy_l1_contracts.ts @@ -7,11 +7,12 @@ export async function deployL1Contracts( chainId: number, privateKey: string | undefined, mnemonic: string, + salt: number | undefined, json: boolean, log: LogFn, debugLogger: DebugLogger, ) { - const { l1ContractAddresses } = await deployAztecContracts(rpcUrl, chainId, privateKey, mnemonic, debugLogger); + const { l1ContractAddresses } = await deployAztecContracts(rpcUrl, chainId, privateKey, mnemonic, salt, debugLogger); if (json) { log( diff --git a/yarn-project/cli/src/cmds/l1/index.ts b/yarn-project/cli/src/cmds/l1/index.ts index ebb7c5da1e2..febe2a446ca 100644 --- a/yarn-project/cli/src/cmds/l1/index.ts +++ b/yarn-project/cli/src/cmds/l1/index.ts @@ -30,6 +30,7 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL 'test test test test test test test test test test test junk', ) .addOption(l1ChainIdOption) + .option('--salt ', 'The optional salt to use in deployment', arg => parseInt(arg)) .option('--json', 'Output the contract addresses in JSON format') .action(async options => { const { deployL1Contracts } = await import('./deploy_l1_contracts.js'); @@ -38,6 +39,7 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL options.l1ChainId, options.privateKey, options.mnemonic, + options.salt, options.json, log, debugLogger, diff --git a/yarn-project/cli/src/utils/aztec.ts b/yarn-project/cli/src/utils/aztec.ts index ddb32a74aa0..ec413f2689d 100644 --- a/yarn-project/cli/src/utils/aztec.ts +++ b/yarn-project/cli/src/utils/aztec.ts @@ -56,6 +56,7 @@ export async function deployAztecContracts( chainId: number, privateKey: string | undefined, mnemonic: string, + salt: number | undefined, debugLogger: DebugLogger, ): Promise { const { @@ -116,6 +117,7 @@ export async function deployAztecContracts( return await deployL1Contracts(chain.rpcUrl, account, chain.chainInfo, debugLogger, l1Artifacts, { l2FeeJuiceAddress: FeeJuiceAddress, vkTreeRoot: getVKTreeRoot(), + salt, }); } diff --git a/yarn-project/end-to-end/src/fixtures/setup_l1_contracts.ts b/yarn-project/end-to-end/src/fixtures/setup_l1_contracts.ts index 4e37c77f3b6..4cc48f528fa 100644 --- a/yarn-project/end-to-end/src/fixtures/setup_l1_contracts.ts +++ b/yarn-project/end-to-end/src/fixtures/setup_l1_contracts.ts @@ -62,6 +62,7 @@ export const setupL1Contracts = async ( const l1Data = await deployL1Contracts(l1RpcUrl, account, foundry, logger, l1Artifacts, { l2FeeJuiceAddress: FeeJuiceAddress, vkTreeRoot: getVKTreeRoot(), + salt: undefined, }); return l1Data; diff --git a/yarn-project/end-to-end/src/fixtures/utils.ts b/yarn-project/end-to-end/src/fixtures/utils.ts index 0161ff2c2aa..160449b6df7 100644 --- a/yarn-project/end-to-end/src/fixtures/utils.ts +++ b/yarn-project/end-to-end/src/fixtures/utils.ts @@ -146,6 +146,7 @@ export const setupL1Contracts = async ( const l1Data = await deployL1Contracts(l1RpcUrl, account, foundry, logger, l1Artifacts, { l2FeeJuiceAddress: FeeJuiceAddress, vkTreeRoot: getVKTreeRoot(), + salt: undefined, }); return l1Data; diff --git a/yarn-project/ethereum/src/deploy_l1_contracts.ts b/yarn-project/ethereum/src/deploy_l1_contracts.ts index 00a5faff592..c493809c632 100644 --- a/yarn-project/ethereum/src/deploy_l1_contracts.ts +++ b/yarn-project/ethereum/src/deploy_l1_contracts.ts @@ -11,11 +11,16 @@ import { type HttpTransport, type PublicClient, type WalletClient, + concatHex, createPublicClient, createWalletClient, + encodeDeployData, getAddress, getContract, + getContractAddress, http, + numberToHex, + padHex, } from 'viem'; import { type HDAccount, type PrivateKeyAccount, mnemonicToAccount, privateKeyToAccount } from 'viem/accounts'; import { foundry } from 'viem/chains'; @@ -141,7 +146,7 @@ export const deployL1Contracts = async ( chain: Chain, logger: DebugLogger, contractsToDeploy: L1ContractArtifactsForDeployment, - args: { l2FeeJuiceAddress: AztecAddress; vkTreeRoot: Fr; assumeProvenUntil?: number }, + args: { l2FeeJuiceAddress: AztecAddress; vkTreeRoot: Fr; assumeProvenUntil?: number; salt: number | undefined }, ): Promise => { // We are assuming that you are running this on a local anvil node which have 1s block times // To align better with actual deployment, we update the block interval to 12s @@ -162,55 +167,29 @@ export const deployL1Contracts = async ( } logger.info(`Set block interval to ${interval}`); - logger.debug('Deploying contracts...'); + logger.info(`Deploying contracts from ${account.address.toString()}...`); - const walletClient = createWalletClient({ - account, - chain, - transport: http(rpcUrl), - }); - const publicClient = createPublicClient({ - chain, - transport: http(rpcUrl), - }); + const walletClient = createWalletClient({ account, chain, transport: http(rpcUrl) }); + const publicClient = createPublicClient({ chain, transport: http(rpcUrl) }); + const deployer = new L1Deployer(walletClient, publicClient, args.salt, logger); - const registryAddress = await deployL1Contract( - walletClient, - publicClient, - contractsToDeploy.registry.contractAbi, - contractsToDeploy.registry.contractBytecode, - ); + const registryAddress = await deployer.deploy(contractsToDeploy.registry, [account.address.toString()]); logger.info(`Deployed Registry at ${registryAddress}`); - const availabilityOracleAddress = await deployL1Contract( - walletClient, - publicClient, - contractsToDeploy.availabilityOracle.contractAbi, - contractsToDeploy.availabilityOracle.contractBytecode, - ); + const availabilityOracleAddress = await deployer.deploy(contractsToDeploy.availabilityOracle); logger.info(`Deployed AvailabilityOracle at ${availabilityOracleAddress}`); - const feeJuiceAddress = await deployL1Contract( - walletClient, - publicClient, - contractsToDeploy.feeJuice.contractAbi, - contractsToDeploy.feeJuice.contractBytecode, - ); + const feeJuiceAddress = await deployer.deploy(contractsToDeploy.feeJuice); logger.info(`Deployed Fee Juice at ${feeJuiceAddress}`); - const rollupAddress = await deployL1Contract( - walletClient, - publicClient, - contractsToDeploy.rollup.contractAbi, - contractsToDeploy.rollup.contractBytecode, - [ - getAddress(registryAddress.toString()), - getAddress(availabilityOracleAddress.toString()), - getAddress(feeJuiceAddress.toString()), - args.vkTreeRoot.toString(), - ], - ); + const rollupAddress = await deployer.deploy(contractsToDeploy.rollup, [ + getAddress(registryAddress.toString()), + getAddress(availabilityOracleAddress.toString()), + getAddress(feeJuiceAddress.toString()), + args.vkTreeRoot.toString(), + account.address.toString(), + ]); logger.info(`Deployed Rollup at ${rollupAddress}`); // Set initial blocks as proven if requested @@ -253,15 +232,15 @@ export const deployL1Contracts = async ( abi: contractsToDeploy.registry.contractAbi, client: walletClient, }); - await registryContract.write.upgrade([getAddress(rollupAddress.toString())], { account }); + if (!(await registryContract.read.isRollupRegistered([getAddress(rollupAddress.toString())]))) { + await registryContract.write.upgrade([getAddress(rollupAddress.toString())], { account }); + logger.verbose(`Upgraded registry contract at ${registryAddress} to rollup ${rollupAddress}`); + } else { + logger.verbose(`Registry ${registryAddress} has already registered rollup ${rollupAddress}`); + } // this contract remains uninitialized because at this point we don't know the address of the Fee Juice on L2 - const feeJuicePortalAddress = await deployL1Contract( - walletClient, - publicClient, - contractsToDeploy.feeJuicePortal.contractAbi, - contractsToDeploy.feeJuicePortal.contractBytecode, - ); + const feeJuicePortalAddress = await deployer.deploy(contractsToDeploy.feeJuicePortal); logger.info(`Deployed Gas Portal at ${feeJuicePortalAddress}`); @@ -310,6 +289,33 @@ export const deployL1Contracts = async ( }; }; +class L1Deployer { + private salt: Hex | undefined; + constructor( + private walletClient: WalletClient, + private publicClient: PublicClient, + maybeSalt: number | undefined, + private logger: DebugLogger, + ) { + this.salt = maybeSalt ? padHex(numberToHex(maybeSalt), { size: 32 }) : undefined; + } + + deploy( + params: { contractAbi: Narrow; contractBytecode: Hex }, + args: readonly unknown[] = [], + ): Promise { + return deployL1Contract( + this.walletClient, + this.publicClient, + params.contractAbi, + params.contractBytecode, + args, + this.salt, + this.logger, + ); + } +} + // docs:start:deployL1Contract /** * Helper function to deploy ETH contracts. @@ -326,23 +332,40 @@ export async function deployL1Contract( abi: Narrow, bytecode: Hex, args: readonly unknown[] = [], + maybeSalt?: Hex, + logger?: DebugLogger, ): Promise { - const hash = await walletClient.deployContract({ - abi, - bytecode, - args, - }); - - const receipt = await publicClient.waitForTransactionReceipt({ hash, pollingInterval: 100 }); - const contractAddress = receipt.contractAddress; - if (!contractAddress) { - throw new Error( - `No contract address found in receipt: ${JSON.stringify(receipt, (_, val) => - typeof val === 'bigint' ? String(val) : val, - )}`, - ); + if (maybeSalt) { + const salt = padHex(maybeSalt, { size: 32 }); + const deployer: Hex = '0x4e59b44847b379578588920cA78FbF26c0B4956C'; + const calldata = encodeDeployData({ abi, bytecode, args }); + const address = getContractAddress({ from: deployer, salt, bytecode: calldata, opcode: 'CREATE2' }); + const existing = await publicClient.getBytecode({ address }); + + if (existing === undefined || existing === '0x') { + const hash = await walletClient.sendTransaction({ + to: deployer, + data: concatHex([salt, calldata]), + }); + logger?.verbose(`Deploying contract with salt ${salt} to address ${address} in tx ${hash}`); + await publicClient.waitForTransactionReceipt({ hash, pollingInterval: 100 }); + } else { + logger?.verbose(`Skipping existing deployment of contract with salt ${salt} to address ${address}`); + } + return EthAddress.fromString(address); + } else { + const hash = await walletClient.deployContract({ abi, bytecode, args }); + logger?.verbose(`Deploying contract in tx ${hash}`); + const receipt = await publicClient.waitForTransactionReceipt({ hash, pollingInterval: 100 }); + const contractAddress = receipt.contractAddress; + if (!contractAddress) { + throw new Error( + `No contract address found in receipt: ${JSON.stringify(receipt, (_, val) => + typeof val === 'bigint' ? String(val) : val, + )}`, + ); + } + return EthAddress.fromString(contractAddress); } - - return EthAddress.fromString(receipt.contractAddress!); } // docs:end:deployL1Contract diff --git a/yarn-project/kv-store/src/lmdb/store.ts b/yarn-project/kv-store/src/lmdb/store.ts index bbcf341859d..885e49ce477 100644 --- a/yarn-project/kv-store/src/lmdb/store.ts +++ b/yarn-project/kv-store/src/lmdb/store.ts @@ -1,5 +1,6 @@ import { createDebugLogger } from '@aztec/foundation/log'; +import { mkdirSync } from 'fs'; import { mkdtemp } from 'fs/promises'; import { type Database, type Key, type RootDatabase, open } from 'lmdb'; import { tmpdir } from 'os'; @@ -60,7 +61,10 @@ export class AztecLmdbStore implements AztecKVStore { ephemeral: boolean = false, log = createDebugLogger('aztec:kv-store:lmdb'), ): AztecLmdbStore { - log.verbose(`Opening LMDB database at ${path || 'temporary location'}`); + log.debug(`Opening LMDB database at ${path || 'temporary location'}`); + if (path) { + mkdirSync(path, { recursive: true }); + } const rootDb = open({ path, noSync: ephemeral }); return new AztecLmdbStore(rootDb, ephemeral, path); } diff --git a/yarn-project/kv-store/src/utils.ts b/yarn-project/kv-store/src/utils.ts index 0c96bf918d9..3c33d571e77 100644 --- a/yarn-project/kv-store/src/utils.ts +++ b/yarn-project/kv-store/src/utils.ts @@ -5,17 +5,13 @@ import { type AztecKVStore } from './interfaces/store.js'; import { AztecLmdbStore } from './lmdb/store.js'; export function createStore( - config: { dataDirectory: string | undefined }, + config: { dataDirectory: string | undefined } | (string | undefined), rollupAddress: EthAddress, log: Logger = createDebugLogger('aztec:kv-store'), ) { - if (config.dataDirectory) { - log.info(`Using data directory: ${config.dataDirectory}`); - } else { - log.info('Using ephemeral data directory'); - } - - return initStoreForRollup(AztecLmdbStore.open(config.dataDirectory, false), rollupAddress, log); + const dataDirectory = typeof config === 'string' ? config : config?.dataDirectory; + log.info(dataDirectory ? `Creating data store at directory ${dataDirectory}` : 'Creating ephemeral data store'); + return initStoreForRollup(AztecLmdbStore.open(dataDirectory, false), rollupAddress, log); } /** @@ -38,9 +34,10 @@ export async function initStoreForRollup( const storedRollupAddressString = rollupAddressValue.get(); if (typeof storedRollupAddressString !== 'undefined' && storedRollupAddressString !== rollupAddressString) { - log?.warn( - `Rollup address mismatch: expected ${rollupAddress}, found ${rollupAddressValue}. Clearing entire database...`, - ); + log?.warn(`Rollup address mismatch. Clearing entire database...`, { + expected: rollupAddressString, + found: storedRollupAddressString, + }); await store.clear(); } diff --git a/yarn-project/prover-node/src/prover-node.ts b/yarn-project/prover-node/src/prover-node.ts index 8c9f7a21fb1..358505889ff 100644 --- a/yarn-project/prover-node/src/prover-node.ts +++ b/yarn-project/prover-node/src/prover-node.ts @@ -124,6 +124,7 @@ export class ProverNode { } // Fast forward world state to right before the target block and get a fork + this.log.verbose(`Creating proving job for block ${fromBlock}`); const db = await this.worldState.syncImmediateAndFork(fromBlock - 1, true); // Create a processor using the forked world state diff --git a/yarn-project/pxe/src/pxe_service/create_pxe_service.ts b/yarn-project/pxe/src/pxe_service/create_pxe_service.ts index 62c15081acc..30d96288cca 100644 --- a/yarn-project/pxe/src/pxe_service/create_pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/create_pxe_service.ts @@ -3,8 +3,7 @@ import { type AztecNode, type PrivateKernelProver } from '@aztec/circuit-types'; import { randomBytes } from '@aztec/foundation/crypto'; import { createDebugLogger } from '@aztec/foundation/log'; import { KeyStore } from '@aztec/key-store'; -import { AztecLmdbStore } from '@aztec/kv-store/lmdb'; -import { initStoreForRollup } from '@aztec/kv-store/utils'; +import { createStore } from '@aztec/kv-store/utils'; import { getCanonicalAuthRegistry } from '@aztec/protocol-contracts/auth-registry'; import { getCanonicalClassRegisterer } from '@aztec/protocol-contracts/class-registerer'; import { getCanonicalFeeJuice } from '@aztec/protocol-contracts/fee-juice'; @@ -43,25 +42,10 @@ export async function createPXEService( const keyStorePath = config.dataDirectory ? join(config.dataDirectory, 'pxe_key_store') : undefined; const l1Contracts = await aztecNode.getL1ContractAddresses(); - const keyStore = new KeyStore(await initStoreForRollup(AztecLmdbStore.open(keyStorePath), l1Contracts.rollupAddress)); - const db = new KVPxeDatabase(await initStoreForRollup(AztecLmdbStore.open(pxeDbPath), l1Contracts.rollupAddress)); - - // (@PhilWindle) Temporary validation until WASM is implemented - let prover: PrivateKernelProver | undefined = proofCreator; - if (!prover) { - if (config.proverEnabled && (!config.bbBinaryPath || !config.bbWorkingDirectory)) { - throw new Error(`Prover must be configured with binary path and working directory`); - } - prover = !config.proverEnabled - ? new TestPrivateKernelProver() - : new BBNativePrivateKernelProver( - config.bbBinaryPath!, - config.bbWorkingDirectory!, - !!config.bbSkipCleanup, - createDebugLogger('aztec:pxe:bb-native-prover' + (logSuffix ? `:${logSuffix}` : '')), - ); - } + const keyStore = new KeyStore(await createStore(keyStorePath, l1Contracts.rollupAddress)); + const db = new KVPxeDatabase(await createStore(pxeDbPath, l1Contracts.rollupAddress)); + const prover = proofCreator ?? (await createProver(config, logSuffix)); const server = new PXEService(keyStore, aztecNode, db, prover, config, logSuffix); for (const contract of [ getCanonicalClassRegisterer(), @@ -77,3 +61,17 @@ export async function createPXEService( await server.start(); return server; } + +function createProver(config: PXEServiceConfig, logSuffix?: string) { + if (!config.proverEnabled) { + return new TestPrivateKernelProver(); + } + + // (@PhilWindle) Temporary validation until WASM is implemented + if (!config.bbBinaryPath || !config.bbWorkingDirectory) { + throw new Error(`Prover must be configured with binary path and working directory`); + } + const bbConfig = config as Required> & PXEServiceConfig; + const log = createDebugLogger('aztec:pxe:bb-native-prover' + (logSuffix ? `:${logSuffix}` : '')); + return BBNativePrivateKernelProver.new(bbConfig, log); +}