diff --git a/packages/protocol/contracts/L1/TaikoToken.sol b/packages/protocol/contracts/L1/TaikoToken.sol index f1e75920611..82248509d1d 100644 --- a/packages/protocol/contracts/L1/TaikoToken.sol +++ b/packages/protocol/contracts/L1/TaikoToken.sol @@ -25,8 +25,8 @@ import { PausableUpgradeable } from import { Proxied } from "../common/Proxied.sol"; /// @title TaikoToken -/// @notice The TaikoToken (TKO) is used for proposing blocks and also for -/// staking in the Taiko protocol. It is an ERC20 token with 8 decimal places of +/// @notice The TaikoToken (TKO), in the protocol is used for prover collateral +/// in the form of bonds. It is an ERC20 token with 18 decimal places of /// precision. contract TaikoToken is EssentialContract, @@ -109,7 +109,7 @@ contract TaikoToken is uint256 amount ) public - onlyFromNamed("erc20_vault") + onlyFromNamed2("erc20_vault", "taiko") { _burn(from, amount); } diff --git a/packages/protocol/contracts/L1/libs/LibDepositing.sol b/packages/protocol/contracts/L1/libs/LibDepositing.sol index 0c2c97f9981..daf7b073809 100644 --- a/packages/protocol/contracts/L1/libs/LibDepositing.sol +++ b/packages/protocol/contracts/L1/libs/LibDepositing.sol @@ -61,6 +61,9 @@ library LibDepositing { }) ); + // Unchecked is safe: + // - uint64 can store up to ~1.8 * 1e19, which can represent 584K years + // if we are depositing at every second unchecked { state.slotA.numEthDeposits++; } @@ -106,6 +109,12 @@ library LibDepositing { }); uint96 _fee = deposits[i].amount > fee ? fee : deposits[i].amount; + + // Unchecked is safe: + // - _fee cannot be bigger than deposits[i].amount + // - all values are in the same range (uint96) except loop + // counter, which obviously cannot be bigger than uint95 + // otherwise the function would be gassing out. unchecked { deposits[i].amount -= _fee; totalFee += _fee; @@ -118,6 +127,10 @@ library LibDepositing { state.ethDeposits[state.slotA.numEthDeposits % config.ethDepositRingBufferSize] = _encodeEthDeposit(feeRecipient, totalFee); + + // Unchecked is safe: + // - uint64 can store up to ~1.8 * 1e19, which can represent 584K + // years if we are depositing at every second unchecked { state.slotA.numEthDeposits++; } @@ -138,6 +151,13 @@ library LibDepositing { view returns (bool) { + // Unchecked is safe: + // - both numEthDeposits and state.slotA.nextEthDepositToProcess are + // indexes. One is tracking all deposits (numEthDeposits: unprocessed) + // and the next to be processed, so nextEthDepositToProcess cannot be + // bigger than numEthDeposits + // - ethDepositRingBufferSize cannot be 0 by default (validity checked + // in LibVerifying) unchecked { return amount >= config.ethDepositMinAmount && amount <= config.ethDepositMaxAmount diff --git a/packages/protocol/contracts/L1/libs/LibProposing.sol b/packages/protocol/contracts/L1/libs/LibProposing.sol index 372622ff631..b99886954b6 100644 --- a/packages/protocol/contracts/L1/libs/LibProposing.sol +++ b/packages/protocol/contracts/L1/libs/LibProposing.sol @@ -83,6 +83,7 @@ library LibProposing { TaikoToken tt = TaikoToken(resolver.resolve("taiko_token", false)); if (state.taikoTokenBalances[assignment.prover] >= config.proofBond) { + // Safe, see the above constraint unchecked { state.taikoTokenBalances[assignment.prover] -= config.proofBond; } @@ -127,6 +128,11 @@ library LibProposing { uint256 reward; if (config.proposerRewardPerSecond > 0 && config.proposerRewardMax > 0) { + // Unchecked is safe: + // - block.timestamp is always greater than block.proposedAt + // (proposed in the past) + // - 1x state.taikoTokenBalances[addr] uint256 could theoretically + // store the whole token supply unchecked { uint256 blockTime = block.timestamp - state.blocks[(b.numBlocks - 1) % config.blockRingBufferSize] @@ -152,6 +158,10 @@ library LibProposing { } // Init the metadata + // Unchecked is safe: + // - equation is done among same variable types + // - incrementation (state.slotB.numBlocks++) is fine for 584K years if + // we propose at every second unchecked { meta.id = b.numBlocks; meta.timestamp = uint64(block.timestamp); diff --git a/packages/protocol/contracts/L1/libs/LibProving.sol b/packages/protocol/contracts/L1/libs/LibProving.sol index f7429827624..7658ec0bee0 100644 --- a/packages/protocol/contracts/L1/libs/LibProving.sol +++ b/packages/protocol/contracts/L1/libs/LibProving.sol @@ -90,6 +90,9 @@ library LibProving { if (tid == 0) { tid = blk.nextTransitionId; + // Unchecked is safe: + // - Not realistic 2**32 different fork choice per block will be + // proven and none of them is valid unchecked { ++blk.nextTransitionId; } diff --git a/packages/protocol/contracts/L1/libs/LibTaikoToken.sol b/packages/protocol/contracts/L1/libs/LibTaikoToken.sol index 487b30de6b6..047136d804e 100644 --- a/packages/protocol/contracts/L1/libs/LibTaikoToken.sol +++ b/packages/protocol/contracts/L1/libs/LibTaikoToken.sol @@ -25,7 +25,7 @@ library LibTaikoToken { { uint256 balance = state.taikoTokenBalances[msg.sender]; if (balance < amount) revert L1_INSUFFICIENT_TOKEN(); - + // Unchecked is safe per above check unchecked { state.taikoTokenBalances[msg.sender] -= amount; } diff --git a/packages/protocol/contracts/L1/libs/LibVerifying.sol b/packages/protocol/contracts/L1/libs/LibVerifying.sol index 270621ceac3..11f453ce8a3 100644 --- a/packages/protocol/contracts/L1/libs/LibVerifying.sol +++ b/packages/protocol/contracts/L1/libs/LibVerifying.sol @@ -62,6 +62,10 @@ library LibVerifying { >= type(uint96).max / config.ethDepositMaxCountPerBlock ) revert L1_INVALID_CONFIG(); + // Unchecked is safe: + // - assignment is within ranges + // - block.timestamp will still be within uint64 range for the next + // 500K+ years. unchecked { uint64 timeNow = uint64(block.timestamp); @@ -114,6 +118,11 @@ library LibVerifying { TaikoData.Transition memory tz; uint64 processed; + + // Unchecked is safe: + // - assignment is within ranges + // - blockId and processed values incremented will still be OK in the + // next 584K years if we verifying one block per every second unchecked { ++blockId; diff --git a/packages/protocol/contracts/L2/TaikoL2.sol b/packages/protocol/contracts/L2/TaikoL2.sol index 8bacf134d9e..1193e77793b 100644 --- a/packages/protocol/contracts/L2/TaikoL2.sol +++ b/packages/protocol/contracts/L2/TaikoL2.sol @@ -277,6 +277,8 @@ contract TaikoL2 is EssentialContract, TaikoL2Signer, ICrossChainSync { returns (bytes32 prevPIH, bytes32 currPIH) { bytes32[256] memory inputs; + + // Unchecked is safe because it cannot overflow. unchecked { // Put the previous 255 blockhashes (excluding the parent's) into a // ring buffer. @@ -307,6 +309,10 @@ contract TaikoL2 is EssentialContract, TaikoL2Signer, ICrossChainSync { view returns (uint256 _basefee, uint64 _gasExcess) { + // Unchecked is safe because: + // - gasExcess is capped at uint64 max ever, so multiplying with a + // uint32 value is safe + // - 'excess' is bigger than 'issued' unchecked { uint256 issued = timeSinceParent * config.gasIssuedPerSecond; uint256 excess = (uint256(gasExcess) + parentGasUsed).max(issued); diff --git a/packages/protocol/contracts/actors_privileges_deployments.md b/packages/protocol/contracts/actors_privileges_deployments.md new file mode 100644 index 00000000000..18f954d3199 --- /dev/null +++ b/packages/protocol/contracts/actors_privileges_deployments.md @@ -0,0 +1,67 @@ +# Actors, Privileges, and Upgradeable Procedures Documentation + +## Introduction + +This document provides a comprehensive overview of the actors involved in the smart contract system and outlines their respective privileges and roles. +Different `roles` (we call them `domain`) are granted via `AddressManager` contract's `setAddress()` function. Idea is very similar Optimism's `AddressManager` except that we use the `chainId + domainName` as the key for a given address. We need so, because for bridging purposes, the destination chain's bridge address needs to be inculded signaling the messgae hash is tamper-proof. +Every contract which needs some role-based authentication, needs to inherit from `AddressResolver` contract, which will serve as a 'middleman/lookup' by querying the `AddressManager` per given address is allowed to act on behalf of that domain or not. + +## 1. Domains (≈role per chainId) + +In the context of the smart contract system, various actors play distinct roles. Each actor is associated with specific responsibilities and privileges within the system. When there is a modifier called `onlyFromNamed` or `onlyFromNamed2`, it means we are checking access through the before mentioned contracts (`AddressResolver` and `AddressManager`), and one function maximum allows up to 2 domains (right now, but it might change when e.g.`DAO` is set up) can be given access. + +### 1.1 Taiko + +- **Role**: This domain role is given to TaikoL1 smart contract. +- **Privileges**: + - Possibility to mint/burn the taiko token + - Possibility to mint/burn erc20 tokens (I think we should remove this privilege) + +### 1.2 Bridge + +- **Role**: This domain role is given to Bridge smart contracts (both chains). +- **Privileges**: + - The right to trigger transfering/minting the tokens (on destination chain) (be it ERC20, ERC721, ERC1155) from the vault contracts + - The right to trigger releasing the custodied assets on the source chain (if bridging is not successful) + +### 1.3 ERCXXX_Vault + +- **Role**: This role is givne to respective token vault contracts (ERC20, ERC721, ERC1155) +- **Privileges**: + - Part of token briding, the possibility to burn and mint the respective standard tokens (no autotelic minting/burning) + +## 2. Different access modifiers + +Beside the `onlyFromNamed` or `onlyFromNamed2` modifiers, we have others such as: + +### 2.1 onlyOwner + +- **Description**: Only owner can be granted access. +- **Associated contracts**: TaikoToken, AddressManager, EtherVault + +### 2.2 onlyAuthorized + +- **Description**: Only authorized (by owner) can be granted access - the address shall be a smart contract. (`Bridge` in our case) +- **Associated Actors**: EtherVault + +## 3. Upgradeable Procedures + +The smart contract system incorporates upgradeable procedures to ensure flexibility and security. These procedures adhere to the following principles: + +### 3.1 Deployment Scripts + +- Deployment scripts are visible in the `packages/protocol/scripts` folder, encompassing both deployment and upgrade scripts for easy reference and replication. + +### 3.2 Transparent Upgradeability + +- Upgradeability is based on the Transparent Upgradeability Proxy by OpenZeppelin, ensuring that contract upgrades are secure and transparent to all stakeholders. + +### 3.3 Ownership Transition + +- Currently, on testnets, some privileges (like `onlyOwner`) are assigned to externally owned accounts (EOAs) for easier testing. However, it is essential to note that `TimeLockController` contracts will be the owners at a later stage. + +## Conclusion + +Clear documentation of actors and their privileges, combined with robust upgradeable procedures, is essential for smart contract systems, especially for based rollups. This documentation ensures that all stakeholders understand their roles and responsibilities within the system and guarantees its adaptability and security over time. + +Please ensure that this document is kept up to date as changes are made to the smart contract system and its actors or privileges. diff --git a/packages/protocol/contracts/libs/Lib1559Math.sol b/packages/protocol/contracts/libs/Lib1559Math.sol index 0ac8b76f01a..99c09bc4a8a 100644 --- a/packages/protocol/contracts/libs/Lib1559Math.sol +++ b/packages/protocol/contracts/libs/Lib1559Math.sol @@ -13,6 +13,11 @@ import { SafeCastUpgradeable } from /// @title Lib1559Math /// @dev This library provides utilities related to the L2 EIP-1559 /// implementation. +/// See formulas described in the whitepaper +/// https://taikoxyz.github.io/taiko-mono/taiko-whitepaper.pdf +/// From section: "9.6. Rate Limiting using EIP-1559." +/// Additional info about the arithmetic formula: +/// https://github.com/taikoxyz/taiko-mono/blob/main/packages/protocol/docs/L2EIP1559.md library Lib1559Math { using SafeCastUpgradeable for uint256; diff --git a/packages/protocol/incident_response_plan.md b/packages/protocol/incident_response_plan.md new file mode 100644 index 00000000000..7a5a742fc6c --- /dev/null +++ b/packages/protocol/incident_response_plan.md @@ -0,0 +1,57 @@ +# Incident Response Plan + +This document outlines the incident response plan for our smart contract system, addressing both ChainOps and SmartContract-related incidents. It provides a list of potential incidents and instructions on how to handle them effectively. + +## ChainOps-Related Incidents + +### 1. Congested Network + +**Description**: A congested network can lead to (slow transaction confirmations, higher gas fees, slashing provers) impacting the performance of the rollup. + +**Response**: + +1. Check Grafana Alerts: Monitor the Grafana dashboard at [Grafana Dashboard](https://grafana.test.taiko.xyz/) for alerts related to network congestion. +2. Engineer on Duty: The engineer on duty should be alerted automatically through the monitoring system. +3. Mitigation: If network congestion is detected, consider adjusting gas prices or scheduling transactions during off-peak times. + +### 2. Chain Head Number Stop Increasing + +**Description**: When the chain head stops, it indicates a potential issue with the operation of the network. + +**Response**: + +1. Grafana Alerts: Monitor Grafana for alerts regarding the chain head number. +2. Engineer on Duty: The engineer on duty should receive automatic alerts. +3. Investigation: Investigate the root cause by analyzing blockchain data and logs. +4. Collaboration: Collaborate with blockchain network administrators if necessary for a solution. + +### 3. Latest Verified Block Number Stop Increasing + +**Description**: A halt in the increase of the latest verified block number may indicate a problem with the operation of the network. + +**Response**: + +1. Grafana Alerts: Keep an eye on Grafana alerts regarding the latest verified block number. +2. Engineer on Duty: The engineer on duty should be automatically notified. +3. Troubleshooting: Investigate the node's syncing process and take corrective actions to ensure it resumes. + +## SmartContract-Related Incidents + +### 1. Unforeseeable Smart Contract Issue + +**Description**: Unforeseeable issues with the smart contracts may arise, which were not identified during the audit. + +**Response**: + +1. Incident Report: Create a detailed incident report, including the symptoms, affected contracts, and any relevant transaction or event data. +2. Escalation: Notify the development and audit teams for immediate attention. +3. Isolation: If necessary, isolate the affected smart contracts or functions to prevent further damage. +4. Analysis: Collaborate with the audit team to analyze and diagnose the issue. +5. Resolution: Implement necessary fixes, upgrades, or rollbacks as per the audit team's recommendations. +6. Communication: Keep stakeholders informed throughout the incident resolution process. + +## Conclusion + +This incident response plan ensures that potential incidents, whether related to ChainOps or SmartContracts, are promptly detected and addressed. The plan relies on monitoring tools like Grafana and the availability of an engineer on duty. In the case of unforeseeable smart contract issues, a systematic incident resolution process is in place to minimize the impact on the system's functionality and security. + +Regular testing and review of this plan are recommended to ensure its effectiveness in responding to incidents as the system evolves. diff --git a/packages/protocol/script/upgrade/SetAddressManager.s.sol b/packages/protocol/script/upgrade/SetAddressManager.s.sol new file mode 100644 index 00000000000..924943c4e34 --- /dev/null +++ b/packages/protocol/script/upgrade/SetAddressManager.s.sol @@ -0,0 +1,42 @@ +// SPDX-License-Identifier: MIT +// _____ _ _ _ _ +// |_ _|_ _(_) |_____ | | __ _| |__ ___ +// | |/ _` | | / / _ \ | |__/ _` | '_ (_-< +// |_|\__,_|_|_\_\___/ |____\__,_|_.__/__/ + +pragma solidity ^0.8.20; + +import "forge-std/Script.sol"; +import "forge-std/console2.sol"; +import "@openzeppelin/contracts-upgradeable/utils/math/SafeCastUpgradeable.sol"; +import "./UpgradeScript.s.sol"; + +interface IEssentialContract { + function setAddressManager(address newAddressManager) external; +} +/// @notice Each contract (which inherits EssentialContract) is having a +/// setAddressManager() setter. In such case AddressManager needs to get +/// changed, we need a quick way to update it. +/// +/// Invokaction example: +/// forge script SetAddressManager --sig "run(address,address)"
+/// + +contract SetAddressManager is UpgradeScript { + function run( + address essentialContract, + address newAddressManager + ) + external + setUp + { + IEssentialContract(essentialContract).setAddressManager( + newAddressManager + ); + console2.log( + essentialContract, + " contract set a new AddressManagerAddress:", + address(newAddressManager) + ); + } +} diff --git a/packages/protocol/script/upgrade/TransferOwnership.s.sol b/packages/protocol/script/upgrade/TransferOwnership.s.sol new file mode 100644 index 00000000000..b8398947f58 --- /dev/null +++ b/packages/protocol/script/upgrade/TransferOwnership.s.sol @@ -0,0 +1,45 @@ +// SPDX-License-Identifier: MIT +// _____ _ _ _ _ +// |_ _|_ _(_) |_____ | | __ _| |__ ___ +// | |/ _` | | / / _ \ | |__/ _` | '_ (_-< +// |_|\__,_|_|_\_\___/ |____\__,_|_.__/__/ + +pragma solidity ^0.8.20; + +import "forge-std/Script.sol"; +import "forge-std/console2.sol"; +import "@openzeppelin/contracts-upgradeable/utils/math/SafeCastUpgradeable.sol"; +import "./UpgradeScript.s.sol"; + +interface IOwnable { + function transferOwnership(address newOwner) external; +} +/// @notice As "single" owner is not desirable for protocols we need to +/// transfer ownership. BUT! Transferring ownership to a multisig also +/// does not help too much if the protocol wants to give some time for +/// the users to exit before an upgrade is effective. So implementing a +/// delay (L2Beat prefers 7 days) is essential. +/// So the usual approach is the following: +/// 1. Transfer ownership to TimeLockController contract which enforces the +/// delay +/// 2. The ownership of the TimeLockController contract shall be a multisig/DAO + +/// Invokaction example: +/// forge script TransferOwnership --sig "run(address,address)" +/// +contract TransferOwnership is UpgradeScript { + function run( + address contractAddr, + address timeLockContract + ) + external + setUp + { + IOwnable(contractAddr).transferOwnership(timeLockContract); + console2.log( + contractAddr, + " contract has a new owner:", + address(timeLockContract) + ); + } +} diff --git a/packages/protocol/test/L1/TaikoL1.sim.sol b/packages/protocol/test/L1/TaikoL1.sim.sol deleted file mode 100644 index 7076ce7ad59..00000000000 --- a/packages/protocol/test/L1/TaikoL1.sim.sol +++ /dev/null @@ -1,775 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity ^0.8.20; - -import { Test } from "forge-std/Test.sol"; -import { console2 } from "forge-std/console2.sol"; -import { TaikoData } from "../../contracts/L1/TaikoData.sol"; -import { TaikoL1 } from "../../contracts/L1/TaikoL1.sol"; -import { Strings } from "@openzeppelin/contracts/utils/Strings.sol"; -import { TaikoL1TestBase } from "./TaikoL1TestBase.sol"; - -/// @dev Warning: this test will take 7-10 minutes and require 1GB memory. -/// `pnpm sim` -contract TaikoL1_b is TaikoL1 { - function getConfig() - public - pure - override - returns (TaikoData.Config memory config) - { - config = TaikoL1.getConfig(); - - config.blockTxListExpiry = 0; - config.blockMaxProposals = 1100; - config.blockRingBufferSize = 1200; - config.blockMaxVerificationsPerTx = 10; - config.proofRegularCooldown = 5 minutes; - config.proofOracleCooldown = 3 minutes; - config.skipProverAssignmentVerificaiton = true; - config.proofBond = 1e18; // 1 Taiko token - config.proposerRewardPerSecond = 1e15; // 0.001 Taiko token - } -} - -contract Verifier { - fallback(bytes calldata) external returns (bytes memory) { - return bytes.concat(keccak256("taiko")); - } -} - -contract TaikoL1Simulation is TaikoL1TestBase { - // Need to bring variable declaration here - to avoid stack too deep - // Initial salt for semi-random generation - uint256 salt = 2_195_684_615_435_261_315_311; - // Can play to adjust - uint256 blocksToSimulate = 4000; - // RandomNumber - pseudo random but fine - uint256 newRandomWithoutSalt; - - ////////////////////////////////////////// - // TUNABLE PARAMS // - ////////////////////////////////////////// - // This means block proposals will be averaged out (long term if random - // function is random enough) to 18s - // It is fine it simulates that we do not necessarily put Taiko block at - // every 12s, but on average around every x1.5 of ETH block - // Meaninig we have less blocks / sec. (We should test what happens if - // quicker!) - uint256 nextBlockTime = 8 seconds; - uint256 minDiffToBlockPropTime = 8 seconds; - - // This means block provings will be averaged out (long term if random - // function is random enough) to 200s - uint256 startBlockProposeTime = 70 seconds; - uint256 upperDevToBlockProveTime = 40 seconds; - uint256 secondsToSimulate = blocksToSimulate * 18; //Because of the expected - // average blocktimestamp - we can tweak it obv. - ////////////////////////////////////////// - // TUNABLE PARAMS END // - ////////////////////////////////////////// - uint256 maxTime = 0; - uint256 totalDiffsProp = 0; - uint256 totalDiffsProve = 0; - uint256 lastTimestampProp = 0; - uint256 lastTimestampProve = 0; - // Need to map a second to a proofTIme, and might be possible that multiple - // proofs coming in the same block - mapping(uint256 proofTimeSecond => uint256[] arrivalIdxOfBlockIds) private - _proofTimeToBlockIndexes; - // Pre-calculate propose and prove timestamp - uint64[] blocksProposedTimestamp = new uint64[](blocksToSimulate); - - bytes32 parentHash = GENESIS_BLOCK_HASH; - - bytes32[] parentHashes = new bytes32[](blocksToSimulate); - bytes32[] blockHashes = new bytes32[](blocksToSimulate); - bytes32[] signalRoots = new bytes32[](blocksToSimulate); - uint32[] gasLimits = new uint32[](blocksToSimulate); - - function deployTaikoL1() internal override returns (TaikoL1 taikoL1) { - taikoL1 = new TaikoL1_b(); - } - - function setUp() public override { - TaikoL1TestBase.setUp(); - registerAddress(L1.getVerifierName(100), address(new Verifier())); - } - - // A real world scenario - function testGeneratingManyRandomBlocksNonConsecutive() external { - uint256 time = block.timestamp; - - assertEq(time, 1); - - giveEthAndTko(Bob, 1e9 ether, 10_000 ether); - - TaikoData.BlockMetadata[] memory metas = new TaikoData.BlockMetadata[]( - blocksToSimulate - ); - - // Determine every timestamp of the block we want to simulate - console2.log("BlockId, ProofTime"); - for (uint256 i = 0; i < blocksToSimulate; i++) { - newRandomWithoutSalt = uint256( - keccak256( - abi.encodePacked( - block.prevrandao, - msg.sender, - block.timestamp, - i, - newRandomWithoutSalt, - salt - ) - ) - ); - blocksProposedTimestamp[i] = uint64( - pickRandomNumber( - newRandomWithoutSalt, - nextBlockTime, - (minDiffToBlockPropTime + 1) - ) - ); - nextBlockTime = blocksProposedTimestamp[i] + minDiffToBlockPropTime; - - // Avg. calculation - if (lastTimestampProp > 0) { - totalDiffsProp += blocksProposedTimestamp[i] - lastTimestampProp; - } - - lastTimestampProp = blocksProposedTimestamp[i]; - // We need this info to extract / export !! - //console2.log("Time of PROPOSAL is:", blocksProposedTimestamp[i]); - salt = uint256( - keccak256( - abi.encodePacked( - nextBlockTime, salt, i, newRandomWithoutSalt - ) - ) - ); - - uint64 proofTimePerBlockI = uint64( - pickRandomNumber( - newRandomWithoutSalt, - (nextBlockTime + startBlockProposeTime), - (upperDevToBlockProveTime + 1) - ) - ); - - if (proofTimePerBlockI > maxTime) { - maxTime = proofTimePerBlockI; - } - - if (lastTimestampProve > 0) { - totalDiffsProve += proofTimePerBlockI - lastTimestampProp; - } - lastTimestampProve = proofTimePerBlockI; - // It is possible that proof for block N+1 comes before N, so we - // need to keep track of that. Because - // the proofs per block is related to propose of that same block - // (index). - _proofTimeToBlockIndexes[proofTimePerBlockI].push(i); - - // We need this info to extract / export !! - console2.log(i + 1, ";", proofTimePerBlockI - lastTimestampProp); - salt = - uint256(keccak256(abi.encodePacked(proofTimePerBlockI, salt))); - } - - uint256 proposedIndex; - - console2.log("Last second:", maxTime); - console2.log( - "Average proposal time: ", totalDiffsProp / blocksToSimulate - ); - console2.log("Average proof time: ", totalDiffsProve / blocksToSimulate); - printVariableHeaders(); - //It is a divider / marker for the parser - console2.log("!-----------------------------"); - printVariables(); - // This is a way we can de-couple proposing from proving - for ( - uint256 secondsElapsed = 0; - secondsElapsed <= maxTime; - secondsElapsed++ - ) { - newRandomWithoutSalt = uint256( - keccak256( - abi.encodePacked( - newRandomWithoutSalt, - block.prevrandao, - secondsElapsed, - msg.sender, - block.timestamp, - salt - ) - ) - ); - - // We are proposing here - if ( - secondsElapsed == blocksProposedTimestamp[proposedIndex] - && proposedIndex < blocksToSimulate - ) { - //console2.log("FOR CYCLE: Time of PROPOSAL is:", - // blocksProposedTimestamp[proposedIndex]); - uint32 gasLimit = uint32( - pickRandomNumber( - newRandomWithoutSalt, 100e3, (3_000_000 - 100_000 + 1) - ) - ); // 100K to 30M - salt = uint256(keccak256(abi.encodePacked(gasLimit, salt))); - - if (proposedIndex == 0) { - parentHashes[proposedIndex] = GENESIS_BLOCK_HASH; - } else { - parentHashes[proposedIndex] = blockHashes[proposedIndex - 1]; - } - - salt = uint256(keccak256(abi.encodePacked(salt))); - - uint24 txListSize = uint24( - pickRandomNumber( - newRandomWithoutSalt, 1, conf.blockMaxTxListBytes - ) //Actually (conf.blockMaxTxListBytes-1)+1 but that's the - // same - ); - salt = uint256(keccak256(abi.encodePacked(txListSize, salt))); - - blockHashes[proposedIndex] = bytes32( - pickRandomNumber(newRandomWithoutSalt, 0, type(uint256).max) - ); - salt = uint256( - keccak256( - abi.encodePacked(blockHashes[proposedIndex], salt) - ) - ); - - signalRoots[proposedIndex] = bytes32( - pickRandomNumber(newRandomWithoutSalt, 0, type(uint256).max) - ); - salt = uint256( - keccak256( - abi.encodePacked(signalRoots[proposedIndex], salt) - ) - ); - - metas[proposedIndex] = - proposeBlock(Alice, Bob, gasLimit, txListSize); - - if (proposedIndex < blocksToSimulate - 1) proposedIndex++; - - printVariables(); - } - - // We are proving here - if (_proofTimeToBlockIndexes[secondsElapsed].length > 0) { - //console2.log("Duplicates check"); - for ( - uint256 i; - i < _proofTimeToBlockIndexes[secondsElapsed].length; - i++ - ) { - uint256 blockId = - _proofTimeToBlockIndexes[secondsElapsed][i]; - - proveBlock( - Bob, - Bob, - metas[blockId], - parentHashes[blockId], - blockHashes[blockId], - signalRoots[blockId] - ); - } - } - - // Increment time with 1 seconds - vm.warp(block.timestamp + 1); - //Log every 12 sec - if (block.timestamp % 12 == 0) { - printVariables(); - } - } - console2.log("-----------------------------!"); - } - - // 90% slow proofs (around 30 mins or so) and 10% (around 1-5 mins ) - function test_90percent_slow_10percent_quick() external { - uint256 time = block.timestamp; - - uint256 startBlockProposeTime_quick = 60 seconds; // For the 10% where - // it is 'quick' - uint256 upperDevToBlockProveTime_quick = 240 seconds; // For the 10% - // where it is quick - - assertEq(time, 1); - - giveEthAndTko(Bob, 1e6 ether, 10_000 ether); - - TaikoData.BlockMetadata[] memory metas = new TaikoData.BlockMetadata[]( - blocksToSimulate - ); - - // Determine every timestamp of the block we want to simulate - console2.log("BlockId, ProofTime"); - for (uint256 i = 0; i < blocksToSimulate; i++) { - newRandomWithoutSalt = uint256( - keccak256( - abi.encodePacked( - block.prevrandao, - msg.sender, - block.timestamp, - i, - newRandomWithoutSalt, - salt - ) - ) - ); - blocksProposedTimestamp[i] = uint64( - pickRandomNumber( - newRandomWithoutSalt, - nextBlockTime, - (minDiffToBlockPropTime + 1) - ) - ); - nextBlockTime = blocksProposedTimestamp[i] + minDiffToBlockPropTime; - - // Avg. calculation - if (lastTimestampProp > 0) { - totalDiffsProp += blocksProposedTimestamp[i] - lastTimestampProp; - } - - lastTimestampProp = blocksProposedTimestamp[i]; - // We need this info to extract / export !! - //console2.log("Time of PROPOSAL is:", blocksProposedTimestamp[i]); - salt = uint256( - keccak256( - abi.encodePacked( - nextBlockTime, salt, i, newRandomWithoutSalt - ) - ) - ); - uint64 proofTimePerBlockI; - if (i % 10 == 0) { - // A very quick proof this case - proofTimePerBlockI = uint64( - pickRandomNumber( - newRandomWithoutSalt, - (nextBlockTime + startBlockProposeTime_quick), - (upperDevToBlockProveTime_quick + 1) - ) - ); - - if (proofTimePerBlockI > maxTime) { - maxTime = proofTimePerBlockI; - } - } else { - proofTimePerBlockI = uint64( - pickRandomNumber( - newRandomWithoutSalt, - (nextBlockTime + startBlockProposeTime), - (upperDevToBlockProveTime + 1) - ) - ); - - if (proofTimePerBlockI > maxTime) { - maxTime = proofTimePerBlockI; - } - } - - if (lastTimestampProve > 0) { - totalDiffsProve += proofTimePerBlockI - lastTimestampProp; - } - lastTimestampProve = proofTimePerBlockI; - // It is possible that proof for block N+1 comes before N, so we - // need to keep track of that. Because - // the proofs per block is related to propose of that same block - // (index). - _proofTimeToBlockIndexes[proofTimePerBlockI].push(i); - - // We need this info to extract / export !! - console2.log(i + 1, ";", proofTimePerBlockI - lastTimestampProp); - salt = - uint256(keccak256(abi.encodePacked(proofTimePerBlockI, salt))); - } - - uint256 proposedIndex; - - console2.log("Last second:", maxTime); - console2.log( - "Average proposal time: ", totalDiffsProp / blocksToSimulate - ); - console2.log("Average proof time: ", totalDiffsProve / blocksToSimulate); - printVariableHeaders(); - //It is a divider / marker for the parser - console2.log("!-----------------------------"); - printVariables(); - // This is a way we can de-couple proposing from proving - for ( - uint256 secondsElapsed = 0; - secondsElapsed <= maxTime; - secondsElapsed++ - ) { - newRandomWithoutSalt = uint256( - keccak256( - abi.encodePacked( - newRandomWithoutSalt, - block.prevrandao, - secondsElapsed, - msg.sender, - block.timestamp, - salt - ) - ) - ); - - // We are proposing here - if ( - secondsElapsed == blocksProposedTimestamp[proposedIndex] - && proposedIndex < blocksToSimulate - ) { - //console2.log("FOR CYCLE: Time of PROPOSAL is:", - // blocksProposedTimestamp[proposedIndex]); - uint32 gasLimit = uint32( - pickRandomNumber( - newRandomWithoutSalt, 100e3, (3_000_000 - 100_000 + 1) - ) - ); // 100K to 30M - salt = uint256(keccak256(abi.encodePacked(gasLimit, salt))); - - if (proposedIndex == 0) { - parentHashes[proposedIndex] = GENESIS_BLOCK_HASH; - } else { - parentHashes[proposedIndex] = blockHashes[proposedIndex - 1]; - } - - salt = uint256(keccak256(abi.encodePacked(salt))); - - uint24 txListSize = uint24( - pickRandomNumber( - newRandomWithoutSalt, 1, conf.blockMaxTxListBytes - ) //Actually (conf.blockMaxTxListBytes-1)+1 but that's the - // same - ); - salt = uint256(keccak256(abi.encodePacked(txListSize, salt))); - - blockHashes[proposedIndex] = bytes32( - pickRandomNumber(newRandomWithoutSalt, 0, type(uint256).max) - ); - salt = uint256( - keccak256( - abi.encodePacked(blockHashes[proposedIndex], salt) - ) - ); - - signalRoots[proposedIndex] = bytes32( - pickRandomNumber(newRandomWithoutSalt, 0, type(uint256).max) - ); - salt = uint256( - keccak256( - abi.encodePacked(signalRoots[proposedIndex], salt) - ) - ); - - metas[proposedIndex] = - proposeBlock(Alice, Bob, gasLimit, txListSize); - - if (proposedIndex < blocksToSimulate - 1) proposedIndex++; - - printVariables(); - } - - // We are proving here - if (_proofTimeToBlockIndexes[secondsElapsed].length > 0) { - //console2.log("Duplicates check"); - for ( - uint256 i; - i < _proofTimeToBlockIndexes[secondsElapsed].length; - i++ - ) { - uint256 blockId = - _proofTimeToBlockIndexes[secondsElapsed][i]; - - proveBlock( - Bob, - Bob, - metas[blockId], - parentHashes[blockId], - blockHashes[blockId], - signalRoots[blockId] - ); - } - } - - // Increment time with 1 seconds - vm.warp(block.timestamp + 1); - //Log every 12 sec - if (block.timestamp % 12 == 0) { - printVariables(); - } - } - console2.log("-----------------------------!"); - } - - // 90% slow proofs (around 30 mins or so) and 10% (around 1-5 mins ) - function test_90percent_quick_10percent_slow() external { - uint256 time = block.timestamp; - uint256 startBlockProposeTime_quick = 60 seconds; // For the 10% where - // it is 'quick' - uint256 upperDevToBlockProveTime_quick = 240 seconds; // For the 10% - // where it is quick - - assertEq(time, 1); - - giveEthAndTko(Bob, 1e6 ether, 10_000 ether); - - TaikoData.BlockMetadata[] memory metas = new TaikoData.BlockMetadata[]( - blocksToSimulate - ); - - // Determine every timestamp of the block we want to simulate - console2.log("BlockId, ProofTime"); - for (uint256 i = 0; i < blocksToSimulate; i++) { - newRandomWithoutSalt = uint256( - keccak256( - abi.encodePacked( - block.prevrandao, - msg.sender, - block.timestamp, - i, - newRandomWithoutSalt, - salt - ) - ) - ); - blocksProposedTimestamp[i] = uint64( - pickRandomNumber( - newRandomWithoutSalt, - nextBlockTime, - (minDiffToBlockPropTime + 1) - ) - ); - nextBlockTime = blocksProposedTimestamp[i] + minDiffToBlockPropTime; - - // Avg. calculation - if (lastTimestampProp > 0) { - totalDiffsProp += blocksProposedTimestamp[i] - lastTimestampProp; - } - - lastTimestampProp = blocksProposedTimestamp[i]; - // We need this info to extract / export !! - //console2.log("Time of PROPOSAL is:", blocksProposedTimestamp[i]); - salt = uint256( - keccak256( - abi.encodePacked( - nextBlockTime, salt, i, newRandomWithoutSalt - ) - ) - ); - - uint64 proofTimePerBlockI; - if (i % 10 == 0) { - // 10% 'slow proofs' - proofTimePerBlockI = uint64( - pickRandomNumber( - newRandomWithoutSalt, - (nextBlockTime + startBlockProposeTime), - (upperDevToBlockProveTime + 1) - ) - ); - - if (proofTimePerBlockI > maxTime) { - maxTime = proofTimePerBlockI; - } - } else { - // A very quick proof this case - proofTimePerBlockI = uint64( - pickRandomNumber( - newRandomWithoutSalt, - (nextBlockTime + startBlockProposeTime_quick), - (upperDevToBlockProveTime_quick + 1) - ) - ); - - if (proofTimePerBlockI > maxTime) { - maxTime = proofTimePerBlockI; - } - } - - if (proofTimePerBlockI > maxTime) { - maxTime = proofTimePerBlockI; - } - - if (lastTimestampProve > 0) { - totalDiffsProve += proofTimePerBlockI - lastTimestampProp; - } - lastTimestampProve = proofTimePerBlockI; - // It is possible that proof for block N+1 comes before N, so we - // need to keep track of that. Because - // the proofs per block is related to propose of that same block - // (index). - _proofTimeToBlockIndexes[proofTimePerBlockI].push(i); - - // We need this info to extract / export !! - console2.log(i + 1, ";", proofTimePerBlockI - lastTimestampProp); - salt = - uint256(keccak256(abi.encodePacked(proofTimePerBlockI, salt))); - } - - uint256 proposedIndex; - - console2.log("Last second:", maxTime); - console2.log( - "Average proposal time: ", totalDiffsProp / blocksToSimulate - ); - console2.log("Average proof time: ", totalDiffsProve / blocksToSimulate); - printVariableHeaders(); - //It is a divider / marker for the parser - console2.log("!-----------------------------"); - printVariables(); - // This is a way we can de-couple proposing from proving - for ( - uint256 secondsElapsed = 0; - secondsElapsed <= maxTime; - secondsElapsed++ - ) { - newRandomWithoutSalt = uint256( - keccak256( - abi.encodePacked( - newRandomWithoutSalt, - block.prevrandao, - secondsElapsed, - msg.sender, - block.timestamp, - salt - ) - ) - ); - - // We are proposing here - if ( - secondsElapsed == blocksProposedTimestamp[proposedIndex] - && proposedIndex < blocksToSimulate - ) { - //console2.log("FOR CYCLE: Time of PROPOSAL is:", - // blocksProposedTimestamp[proposedIndex]); - uint32 gasLimit = uint32( - pickRandomNumber( - newRandomWithoutSalt, 100e3, (3_000_000 - 100_000 + 1) - ) - ); // 100K to 30M - salt = uint256(keccak256(abi.encodePacked(gasLimit, salt))); - - if (proposedIndex == 0) { - parentHashes[proposedIndex] = GENESIS_BLOCK_HASH; - } else { - parentHashes[proposedIndex] = blockHashes[proposedIndex - 1]; - } - - salt = uint256(keccak256(abi.encodePacked(salt))); - - uint24 txListSize = uint24( - pickRandomNumber( - newRandomWithoutSalt, 1, conf.blockMaxTxListBytes - ) //Actually (conf.blockMaxTxListBytes-1)+1 but that's the - // same - ); - salt = uint256(keccak256(abi.encodePacked(txListSize, salt))); - - blockHashes[proposedIndex] = bytes32( - pickRandomNumber(newRandomWithoutSalt, 0, type(uint256).max) - ); - salt = uint256( - keccak256( - abi.encodePacked(blockHashes[proposedIndex], salt) - ) - ); - - signalRoots[proposedIndex] = bytes32( - pickRandomNumber(newRandomWithoutSalt, 0, type(uint256).max) - ); - salt = uint256( - keccak256( - abi.encodePacked(signalRoots[proposedIndex], salt) - ) - ); - - metas[proposedIndex] = - proposeBlock(Alice, Bob, gasLimit, txListSize); - - if (proposedIndex < blocksToSimulate - 1) proposedIndex++; - - printVariables(); - } - - // We are proving here - if (_proofTimeToBlockIndexes[secondsElapsed].length > 0) { - //console2.log("Duplicates check"); - for ( - uint256 i; - i < _proofTimeToBlockIndexes[secondsElapsed].length; - i++ - ) { - uint256 blockId = - _proofTimeToBlockIndexes[secondsElapsed][i]; - - proveBlock( - Bob, - Bob, - metas[blockId], - parentHashes[blockId], - blockHashes[blockId], - signalRoots[blockId] - ); - } - } - - // Increment time with 1 seconds - vm.warp(block.timestamp + 1); - //Log every 12 sec - if (block.timestamp % 12 == 0) { - printVariables(); - } - } - console2.log("-----------------------------!"); - } - - function printVariableHeaders() internal view { - string memory str = string.concat( - "\nlogCount,", - "time,", - "lastVerifiedBlockId,", - "numBlocks,", - "numAuctions," - ); - console2.log(str); - } - - function printVariables() internal { - TaikoData.StateVariables memory vars = L1.getStateVariables(); - string memory str = string.concat( - Strings.toString(logCount++), - ";", - Strings.toString(block.timestamp), - ";", - Strings.toString(vars.lastVerifiedBlockId), - ";", - Strings.toString(vars.numBlocks) - ); - console2.log(str); - } - - // Semi-random number generator - function pickRandomNumber( - uint256 randomNum, - uint256 lowerLimit, - uint256 diffBtwLowerAndUpperLimit - ) - internal - view - returns (uint256) - { - randomNum = uint256(keccak256(abi.encodePacked(randomNum, salt))); - return (lowerLimit + (randomNum % diffBtwLowerAndUpperLimit)); - } -} diff --git a/packages/protocol/test/L1/TaikoL1TestBase.sol b/packages/protocol/test/L1/TaikoL1TestBase.sol index 988b6d18b8a..0a1ff63749e 100644 --- a/packages/protocol/test/L1/TaikoL1TestBase.sol +++ b/packages/protocol/test/L1/TaikoL1TestBase.sol @@ -86,6 +86,9 @@ abstract contract TaikoL1TestBase is TestBase { ); // Set protocol broker + registerAddress("taiko", address(this)); + tko.mint(address(this), 1e9 ether); + registerAddress("taiko", address(L1)); L1.init(address(addressManager), GENESIS_BLOCK_HASH); diff --git a/packages/protocol/test/L1/TaikoToken.t.sol b/packages/protocol/test/L1/TaikoToken.t.sol new file mode 100644 index 00000000000..2af49e681b1 --- /dev/null +++ b/packages/protocol/test/L1/TaikoToken.t.sol @@ -0,0 +1,263 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.20; + +import { TestBase } from "../TestBase.sol"; +import { AddressManager } from "../../contracts/common/AddressManager.sol"; +import { AddressResolver } from "../../contracts/common/AddressResolver.sol"; +import { TaikoToken } from "../../contracts/L1/TaikoToken.sol"; +import { TransparentUpgradeableProxy } from + "@openzeppelin/contracts/proxy/transparent/TransparentUpgradeableProxy.sol"; + +contract TaikoTokenTest is TestBase { + bytes32 GENESIS_BLOCK_HASH; + + address public tokenOwner; + address public taikoL1; + + AddressManager public addressManager; + TransparentUpgradeableProxy public tokenProxy; + TaikoToken public tko; + TaikoToken public tkoUpgradedImpl; + + function setUp() public { + GENESIS_BLOCK_HASH = getRandomBytes32(); + + tokenOwner = getRandomAddress(); + taikoL1 = getRandomAddress(); + + addressManager = new AddressManager(); + addressManager.init(); + _registerAddress("taiko", taikoL1); + + tko = new TaikoToken(); + + address[] memory premintRecipients = new address[](2); + premintRecipients[0] = Yasmine; + premintRecipients[1] = Zachary; + + uint256[] memory premintAmounts = new uint256[](2); + premintAmounts[0] = 5 ether; + premintAmounts[1] = 5 ether; + + tokenProxy = _deployViaProxy( + address(tko), + bytes.concat( + tko.init.selector, + abi.encode( + address(addressManager), + "Taiko Token", + "TKO", + premintRecipients, + premintAmounts + ) + ) + ); + + tko = TaikoToken(address(tokenProxy)); + } + + function test_TaikoToken_proper_premint() public { + assertEq(tko.balanceOf(Yasmine), 5 ether); + + assertEq(tko.balanceOf(Zachary), 5 ether); + } + + function test_TaikoToken_upgrade() public { + tkoUpgradedImpl = new TaikoToken(); + + vm.prank(tokenOwner); + tokenProxy.upgradeTo(address(tkoUpgradedImpl)); + + // Check if balance is still same + assertEq(tko.balanceOf(Yasmine), 5 ether); + assertEq(tko.balanceOf(Zachary), 5 ether); + } + + function test_TaikoToken_upgrade_without_admin_rights() public { + tkoUpgradedImpl = new TaikoToken(); + + vm.expectRevert(); + tokenProxy.upgradeTo(address(tkoUpgradedImpl)); + } + + function test_TaikoToken_mint() public { + assertEq(tko.balanceOf(Emma), 0 ether); + + uint256 amountToMint = 1 ether; + vm.prank(taikoL1); + tko.mint(Emma, amountToMint); + assertEq(tko.balanceOf(Emma), amountToMint); + } + + function test_TaikoToken_mint_invalid_address() public { + vm.prank(taikoL1); + vm.expectRevert("ERC20: mint to the zero address"); + tko.mint(address(0), 1 ether); + } + + function test_TaikoToken_mint_not_taiko_l1() public { + vm.expectRevert(AddressResolver.RESOLVER_DENIED.selector); + tko.mint(Emma, 1 ether); + } + + function test_TaikoToken_burn() public { + uint256 amountToMint = 1 ether; + vm.prank(taikoL1); + tko.mint(Emma, amountToMint); + assertEq(tko.balanceOf(Emma), amountToMint); + + vm.prank(taikoL1); + tko.burn(Emma, amountToMint); + assertEq(tko.balanceOf(Emma), 0); + } + + function test_TaikoToken_burn_not_taiko_l1() public { + vm.expectRevert(AddressResolver.RESOLVER_DENIED.selector); + tko.burn(address(0), 1 ether); + } + + function test_TaikoToken_burn_amount_exceeded() public { + uint256 amountToMint = 1 ether; + vm.prank(taikoL1); + tko.mint(Emma, amountToMint); + assertEq(tko.balanceOf(Emma), amountToMint); + } + + function test_TaikoToken_transfer() public { + uint256 amountToMint = 1 ether; + vm.prank(taikoL1); + tko.mint(Emma, amountToMint); + assertEq(tko.balanceOf(Emma), amountToMint); + + vm.prank(Emma); + tko.transfer(David, amountToMint); + + assertEq(tko.balanceOf(Emma), 0); + assertEq(tko.balanceOf(David), amountToMint); + } + + function test_TaikoToken_transfer_invalid_address() public { + uint256 amountToMint = 1 ether; + vm.prank(taikoL1); + tko.mint(Emma, amountToMint); + assertEq(tko.balanceOf(Emma), amountToMint); + + vm.prank(Emma); + vm.expectRevert("ERC20: transfer to the zero address"); + tko.transfer(address(0), amountToMint); + } + + function test_TaikoToken_transfer_to_contract_address() public { + uint256 amountToMint = 1 ether; + vm.prank(taikoL1); + tko.mint(Emma, amountToMint); + assertEq(tko.balanceOf(Emma), amountToMint); + + vm.prank(Emma); + vm.expectRevert(TaikoToken.TKO_INVALID_ADDR.selector); + tko.transfer(address(tko), amountToMint); + } + + function test_TaikoToken_transfer_amount_exceeded() public { + uint256 amountToMint = 1 ether; + uint256 amountToTransfer = 2 ether; + vm.prank(taikoL1); + tko.mint(Emma, amountToMint); + assertEq(tko.balanceOf(Emma), amountToMint); + + vm.prank(Emma); + vm.expectRevert(); + tko.transfer(address(tko), amountToTransfer); + assertEq(tko.balanceOf(Emma), amountToMint); + } + + function test_TaikoToken_transferFrom() public { + uint256 amountToMint = 1 ether; + vm.prank(taikoL1); + tko.mint(Emma, amountToMint); + assertEq(tko.balanceOf(Emma), amountToMint); + + vm.prank(Emma); + tko.approve(David, 1 ether); + + vm.prank(David); + tko.transferFrom(Emma, David, amountToMint); + + assertEq(tko.balanceOf(Emma), 0); + assertEq(tko.balanceOf(David), amountToMint); + } + + function test_TaikoToken_transferFrom_to_is_invalid() public { + uint256 amountToMint = 1 ether; + vm.prank(taikoL1); + tko.mint(Emma, amountToMint); + assertEq(tko.balanceOf(Emma), amountToMint); + + vm.prank(Emma); + tko.approve(David, 1 ether); + + vm.prank(David); + vm.expectRevert("ERC20: transfer to the zero address"); + tko.transferFrom(Emma, address(0), amountToMint); + } + + function test_TaikoToken_transferFrom_to_is_the_contract() public { + uint256 amountToMint = 1 ether; + vm.prank(taikoL1); + tko.mint(Emma, amountToMint); + assertEq(tko.balanceOf(Emma), amountToMint); + + vm.prank(Emma); + tko.approve(David, 1 ether); + + vm.prank(David); + vm.expectRevert(TaikoToken.TKO_INVALID_ADDR.selector); + tko.transferFrom(Emma, address(tko), amountToMint); + } + + function test_TaikoToken_transferFrom_from_is_invalid() public { + uint256 amountToMint = 1 ether; + vm.prank(taikoL1); + tko.mint(Emma, amountToMint); + assertEq(tko.balanceOf(Emma), amountToMint); + + vm.prank(Emma); + tko.approve(David, 1 ether); + + vm.prank(David); + // transferFrom(address(0)) will always throw has no allowance + vm.expectRevert("ERC20: insufficient allowance"); + tko.transferFrom(address(0), David, amountToMint); + } + + function test_TaikoToken_transferFrom_amount_exceeded() public { + uint256 amountToMint = 1 ether; + uint256 amountToTransfer = 2 ether; + vm.prank(taikoL1); + tko.mint(Emma, amountToMint); + assertEq(tko.balanceOf(Emma), amountToMint); + + vm.prank(Emma); + vm.expectRevert(); + tko.transfer(address(tko), amountToTransfer); + assertEq(tko.balanceOf(Emma), amountToMint); + } + + function _registerAddress(bytes32 nameHash, address addr) private { + addressManager.setAddress(block.chainid, nameHash, addr); + } + + function _deployViaProxy( + address implementation, + bytes memory data + ) + private + returns (TransparentUpgradeableProxy) + { + return new TransparentUpgradeableProxy( + implementation, + tokenOwner, + data + ); + } +}