Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Handle reorgs on sequencer #9201

Merged
merged 13 commits into from
Oct 21, 2024
38 changes: 25 additions & 13 deletions l1-contracts/src/core/Rollup.sol
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup {
* @dev Will revert if there is nothing to prune or if the chain is not ready to be pruned
*/
function prune() external override(IRollup) {
require(_canPrune(), Errors.Rollup__NothingToPrune());
require(canPrune(), Errors.Rollup__NothingToPrune());
_prune();
}

Expand Down Expand Up @@ -315,20 +315,23 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup {
{
Slot slot = getSlotAt(_ts);

Slot lastSlot = blocks[tips.pendingBlockNumber].slotNumber;
// Consider if a prune will hit in this slot
uint256 pendingBlockNumber = _canPruneAt(_ts) ? tips.provenBlockNumber : tips.pendingBlockNumber;

Slot lastSlot = blocks[pendingBlockNumber].slotNumber;

require(slot > lastSlot, Errors.Rollup__SlotAlreadyInChain(lastSlot, slot));

// Make sure that the proposer is up to date
bytes32 tipArchive = archive();
// Make sure that the proposer is up to date and on the right chain (ie no reorgs)
bytes32 tipArchive = blocks[pendingBlockNumber].archive;
require(tipArchive == _archive, Errors.Rollup__InvalidArchive(tipArchive, _archive));

SignatureLib.Signature[] memory sigs = new SignatureLib.Signature[](0);
DataStructures.ExecutionFlags memory flags =
DataStructures.ExecutionFlags({ignoreDA: true, ignoreSignatures: true});
_validateLeonidas(slot, sigs, _archive, flags);

return (slot, tips.pendingBlockNumber + 1);
return (slot, pendingBlockNumber + 1);
}

/**
Expand Down Expand Up @@ -417,7 +420,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup {
SignatureLib.Signature[] memory _signatures,
bytes calldata _body
) public override(IRollup) {
if (_canPrune()) {
if (canPrune()) {
_prune();
}
bytes32 txsEffectsHash = TxsDecoder.decode(_body);
Expand Down Expand Up @@ -733,15 +736,19 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup {
emit PrunedPending(tips.provenBlockNumber, pending);
}

function _canPrune() internal view returns (bool) {
function canPrune() public view returns (bool) {
return _canPruneAt(Timestamp.wrap(block.timestamp));
}

function _canPruneAt(Timestamp _ts) internal view returns (bool) {
if (
tips.pendingBlockNumber == tips.provenBlockNumber
|| tips.pendingBlockNumber <= assumeProvenThroughBlockNumber
) {
return false;
}

Slot currentSlot = getCurrentSlot();
Slot currentSlot = getSlotAt(_ts);
Epoch oldestPendingEpoch = getEpochForBlock(tips.provenBlockNumber + 1);
Slot startSlotOfPendingEpoch = oldestPendingEpoch.toSlots();

Expand Down Expand Up @@ -780,7 +787,11 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup {
bytes32 _txEffectsHash,
DataStructures.ExecutionFlags memory _flags
) internal view {
_validateHeaderForSubmissionBase(_header, _currentTime, _txEffectsHash, _flags);
uint256 pendingBlockNumber =
_canPruneAt(_currentTime) ? tips.provenBlockNumber : tips.pendingBlockNumber;
_validateHeaderForSubmissionBase(
_header, _currentTime, _txEffectsHash, pendingBlockNumber, _flags
);
_validateHeaderForSubmissionSequencerSelection(
Slot.wrap(_header.globalVariables.slotNumber), _signatures, _digest, _currentTime, _flags
);
Expand Down Expand Up @@ -846,6 +857,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup {
HeaderLib.Header memory _header,
Timestamp _currentTime,
bytes32 _txsEffectsHash,
uint256 _pendingBlockNumber,
DataStructures.ExecutionFlags memory _flags
) internal view {
require(
Expand All @@ -859,20 +871,20 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup {
);

require(
_header.globalVariables.blockNumber == tips.pendingBlockNumber + 1,
_header.globalVariables.blockNumber == _pendingBlockNumber + 1,
Errors.Rollup__InvalidBlockNumber(
tips.pendingBlockNumber + 1, _header.globalVariables.blockNumber
_pendingBlockNumber + 1, _header.globalVariables.blockNumber
)
);

bytes32 tipArchive = archive();
bytes32 tipArchive = blocks[_pendingBlockNumber].archive;
require(
tipArchive == _header.lastArchive.root,
Errors.Rollup__InvalidArchive(tipArchive, _header.lastArchive.root)
);

Slot slot = Slot.wrap(_header.globalVariables.slotNumber);
Slot lastSlot = blocks[tips.pendingBlockNumber].slotNumber;
Slot lastSlot = blocks[_pendingBlockNumber].slotNumber;
require(slot > lastSlot, Errors.Rollup__SlotAlreadyInChain(lastSlot, slot));

Timestamp timestamp = getTimestampForSlot(slot);
Expand Down
2 changes: 2 additions & 0 deletions l1-contracts/src/core/interfaces/IRollup.sol
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,8 @@ interface IRollup {

function prune() external;

function canPrune() external view returns (bool);

function claimEpochProofRight(EpochProofQuoteLib.SignedEpochProofQuote calldata _quote) external;

function propose(
Expand Down
5 changes: 4 additions & 1 deletion yarn-project/archiver/src/archiver/archiver.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -298,7 +298,7 @@ describe('Archiver', () => {
expect(loggerSpy).toHaveBeenNthCalledWith(2, `No blocks to retrieve from ${1n} to ${50n}`);
}, 10_000);

it('Handle L2 reorg', async () => {
it('handles L2 reorg', async () => {
const loggerSpy = jest.spyOn((archiver as any).log, 'verbose');

let latestBlockNum = await archiver.getBlockNumber();
Expand Down Expand Up @@ -378,6 +378,9 @@ describe('Archiver', () => {
// The random blocks don't include contract instances nor classes we we cannot look for those here.
}, 10_000);

// TODO(palla/reorg): Add a unit test for the archiver handleEpochPrune
xit('handles an upcoming L2 prune', () => {});

// logs should be created in order of how archiver syncs.
const mockGetLogs = (logs: {
messageSent?: ReturnType<typeof makeMessageSentEventWithIndexInL2BlockSubtree>[];
Expand Down
62 changes: 49 additions & 13 deletions yarn-project/archiver/src/archiver/archiver.ts
Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,16 @@ export class Archiver implements ArchiveSource {
await this.handleL1ToL2Messages(blockUntilSynced, messagesSynchedTo, currentL1BlockNumber);

// ********** Events that are processed per L2 block **********
await this.handleL2blocks(blockUntilSynced, blocksSynchedTo, currentL1BlockNumber);
if (currentL1BlockNumber > blocksSynchedTo) {
// First we retrieve new L2 blocks
const { provenBlockNumber } = await this.handleL2blocks(blockUntilSynced, blocksSynchedTo, currentL1BlockNumber);
// And then we prune the current epoch if it'd reorg on next submission.
// Note that we don't do this before retrieving L2 blocks because we may need to retrieve
// blocks from more than 2 epochs ago, so we want to make sure we have the latest view of
// the chain locally before we start unwinding stuff. This can be optimized by figuring out
// up to which point we're pruning, and then requesting L2 blocks up to that point only.
await this.handleEpochPrune(provenBlockNumber, currentL1BlockNumber);
}

// Store latest l1 block number and timestamp seen. Used for epoch and slots calculations.
if (!this.l1BlockNumber || this.l1BlockNumber < currentL1BlockNumber) {
Expand All @@ -255,6 +264,27 @@ export class Archiver implements ArchiveSource {
}
}

/** Checks if there'd be a reorg for the next block submission and start pruning now. */
private async handleEpochPrune(provenBlockNumber: bigint, currentL1BlockNumber: bigint) {
const localPendingBlockNumber = BigInt(await this.getBlockNumber());

const canPrune =
localPendingBlockNumber > provenBlockNumber &&
(await this.rollup.read.canPrune({ blockNumber: currentL1BlockNumber }));
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

You will be running into the same issue as much of the other simulation, that the timestamp for this is in the past.

Essentially, you figure out that you can prune but only after the fact. In reality, you already know that it will be pruned when the next block land, so you should probably do the same as those others. @just-mitch ran into a similar issue with the proving claims. #9193

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Filed #9308


if (canPrune) {
this.log.verbose(`L2 prune will occur on next submission. Rolling back to last proven block.`);
const blocksToUnwind = localPendingBlockNumber - provenBlockNumber;
this.log.verbose(
`Unwinding ${blocksToUnwind} block${blocksToUnwind > 1n ? 's' : ''} from block ${localPendingBlockNumber}`,
);
await this.store.unwindBlocks(Number(localPendingBlockNumber), Number(blocksToUnwind));
// TODO(palla/reorg): Do we need to set the block synched L1 block number here?
// Seems like the next iteration should handle this.
// await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber);
}
}

private async handleL1ToL2Messages(
blockUntilSynced: boolean,
messagesSynchedTo: bigint,
Expand Down Expand Up @@ -291,11 +321,11 @@ export class Archiver implements ArchiveSource {
);
}

private async handleL2blocks(blockUntilSynced: boolean, blocksSynchedTo: bigint, currentL1BlockNumber: bigint) {
if (currentL1BlockNumber <= blocksSynchedTo) {
return;
}

private async handleL2blocks(
blockUntilSynced: boolean,
blocksSynchedTo: bigint,
currentL1BlockNumber: bigint,
): Promise<{ provenBlockNumber: bigint }> {
const localPendingBlockNumber = BigInt(await this.getBlockNumber());
const [
provenBlockNumber,
Expand All @@ -304,7 +334,7 @@ export class Archiver implements ArchiveSource {
pendingArchive,
archiveForLocalPendingBlockNumber,
provenEpochNumber,
] = await this.rollup.read.status([localPendingBlockNumber]);
] = await this.rollup.read.status([localPendingBlockNumber], { blockNumber: currentL1BlockNumber });

const updateProvenBlock = async () => {
const localBlockForDestinationProvenBlockNumber = await this.getBlock(Number(provenBlockNumber));
Expand All @@ -326,7 +356,7 @@ export class Archiver implements ArchiveSource {
if (noBlocks) {
await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber);
this.log.verbose(`No blocks to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`);
return;
return { provenBlockNumber };
}

await updateProvenBlock();
Expand All @@ -343,7 +373,7 @@ export class Archiver implements ArchiveSource {
if (noBlockSinceLast) {
await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber);
this.log.verbose(`No blocks to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`);
return;
return { provenBlockNumber };
}

const localPendingBlockInChain = archiveForLocalPendingBlockNumber === localPendingBlock.archive.root.toString();
Expand Down Expand Up @@ -383,16 +413,16 @@ export class Archiver implements ArchiveSource {
this.rollup,
this.publicClient,
blockUntilSynced,
blocksSynchedTo + 1n,
blocksSynchedTo + 1n, // TODO(palla/reorg): If the L2 reorg was due to an L1 reorg, we need to start search earlier
currentL1BlockNumber,
this.log,
);

if (retrievedBlocks.length === 0) {
// We are not calling `setBlockSynchedL1BlockNumber` because it may cause sync issues if based off infura.
// See further details in earlier comments.
this.log.verbose(`Retrieved no new blocks from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`);
return;
this.log.verbose(`Retrieved no new L2 blocks from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`);
return { provenBlockNumber };
}

this.log.debug(
Expand All @@ -410,6 +440,7 @@ export class Archiver implements ArchiveSource {

const timer = new Timer();
await this.store.addBlocks(retrievedBlocks);

// Important that we update AFTER inserting the blocks.
await updateProvenBlock();
this.instrumentation.processNewBlocks(
Expand All @@ -418,6 +449,8 @@ export class Archiver implements ArchiveSource {
);
const lastL2BlockNumber = retrievedBlocks[retrievedBlocks.length - 1].data.number;
this.log.verbose(`Processed ${retrievedBlocks.length} new L2 blocks up to ${lastL2BlockNumber}`);

return { provenBlockNumber };
}

/**
Expand Down Expand Up @@ -497,7 +530,10 @@ export class Archiver implements ArchiveSource {
const [_startTimestamp, endTimestamp] = getTimestampRangeForEpoch(epochNumber, this.l1constants);

// For this computation, we throw in a few extra seconds just for good measure,
// since we know the next L1 block won't be mined within this range
// since we know the next L1 block won't be mined within this range. Remember that
// l1timestamp is the timestamp of the last l1 block we've seen, so this 3s rely on
// the fact that L1 won't mine two blocks within 3s of each other.
// TODO(palla/reorg): Is the above a safe assumption?
const leeway = 3n;
return l1Timestamp + leeway >= endTimestamp;
}
Expand Down
22 changes: 17 additions & 5 deletions yarn-project/aztec.js/src/utils/cheat_codes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ export class EthCheatCodes {
if (res.error) {
throw new Error(`Error mining: ${res.error.message}`);
}
this.logger.verbose(`Mined ${numberOfBlocks} blocks`);
this.logger.verbose(`Mined ${numberOfBlocks} L1 blocks`);
}

/**
Expand All @@ -150,7 +150,7 @@ export class EthCheatCodes {
if (res.error) {
throw new Error(`Error setting block interval: ${res.error.message}`);
}
this.logger.verbose(`Set block interval to ${interval}`);
this.logger.verbose(`Set L1 block interval to ${interval}`);
}

/**
Expand All @@ -162,7 +162,7 @@ export class EthCheatCodes {
if (res.error) {
throw new Error(`Error setting next block timestamp: ${res.error.message}`);
}
this.logger.verbose(`Set next block timestamp to ${timestamp}`);
this.logger.verbose(`Set L1 next block timestamp to ${timestamp}`);
}

/**
Expand All @@ -175,7 +175,7 @@ export class EthCheatCodes {
throw new Error(`Error warping: ${res.error.message}`);
}
await this.mine();
this.logger.verbose(`Warped to ${timestamp}`);
this.logger.verbose(`Warped L1 timestamp to ${timestamp}`);
}

/**
Expand Down Expand Up @@ -228,7 +228,7 @@ export class EthCheatCodes {
if (res.error) {
throw new Error(`Error setting storage for contract ${contract} at ${slot}: ${res.error.message}`);
}
this.logger.verbose(`Set storage for contract ${contract} at ${slot} to ${value}`);
this.logger.verbose(`Set L1 storage for contract ${contract} at ${slot} to ${value}`);
}

/**
Expand Down Expand Up @@ -329,6 +329,18 @@ export class RollupCheatCodes {
this.logger.verbose(`Advanced to next epoch`);
}

/**
* Warps time in L1 equivalent to however many slots.
* @param howMany - The number of slots to advance.
*/
public async advanceSlots(howMany: number) {
const l1Timestamp = Number((await this.client.getBlock()).timestamp);
const timeToWarp = howMany * AZTEC_SLOT_DURATION;
await this.ethCheatCodes.warp(l1Timestamp + timeToWarp);
const [slot, epoch] = await Promise.all([this.getSlot(), this.getEpoch()]);
this.logger.verbose(`Advanced ${howMany} slots up to slot ${slot} in epoch ${epoch}`);
}

/** Returns the current proof claim (if any) */
public async getProofClaim(): Promise<EpochProofClaim | undefined> {
// REFACTOR: This code is duplicated from l1-publisher
Expand Down
3 changes: 2 additions & 1 deletion yarn-project/circuit-types/src/interfaces/world_state.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import { type L2BlockId } from '../l2_block_source.js';
import type { MerkleTreeReadOperations, MerkleTreeWriteOperations } from './merkle_tree_operations.js';

/**
Expand All @@ -21,7 +22,7 @@ export interface WorldStateSynchronizerStatus {
/**
* The block number that the world state synchronizer is synced to.
*/
syncedToL2Block: number;
syncedToL2Block: L2BlockId;
}

/**
Expand Down
Loading
Loading