Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

core/types/rollup.go: add unscaled version of EstimatedL1Size and rename EstimatedDASize #424

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 2 additions & 4 deletions core/txpool/legacypool/legacypool.go
Original file line number Diff line number Diff line change
Expand Up @@ -570,8 +570,7 @@ func (pool *LegacyPool) Pending(filter txpool.PendingFilter) map[common.Address]
}
if filter.MaxDATxSize != nil && !pool.locals.contains(addr) {
for i, tx := range txs {
estimate := types.EstimatedL1SizeScaled(tx.RollupCostData())
estimate = estimate.Div(estimate, big.NewInt(1e6))
estimate := tx.RollupCostData().EstimatedDASize()
if estimate.Cmp(filter.MaxDATxSize) > 0 {
log.Debug("filtering tx that exceeds max da tx size",
"hash", tx.Hash(), "txda", estimate, "dalimit", filter.MaxDATxSize)
Expand All @@ -583,8 +582,7 @@ func (pool *LegacyPool) Pending(filter txpool.PendingFilter) map[common.Address]
if len(txs) > 0 {
lazies := make([]*txpool.LazyTransaction, len(txs))
for i := 0; i < len(txs); i++ {
daBytes := types.EstimatedL1SizeScaled(txs[i].RollupCostData())
daBytes = daBytes.Div(daBytes, big.NewInt(1e6))
daBytes := txs[i].RollupCostData().EstimatedDASize()
lazies[i] = &txpool.LazyTransaction{
Pool: pool,
Hash: txs[i].Hash(),
Expand Down
17 changes: 12 additions & 5 deletions core/types/rollup_cost.go
Original file line number Diff line number Diff line change
Expand Up @@ -364,7 +364,7 @@ func NewL1CostFuncFjord(l1BaseFee, l1BlobBaseFee, baseFeeScalar, blobFeeScalar *
calldataCostPerByte := new(big.Int).Mul(scaledL1BaseFee, sixteen)
blobCostPerByte := new(big.Int).Mul(blobFeeScalar, l1BlobBaseFee)
l1FeeScaled := new(big.Int).Add(calldataCostPerByte, blobCostPerByte)
estimatedSize := EstimatedL1SizeScaled(costData)
estimatedSize := costData.estimatedDASizeScaled()
l1CostScaled := new(big.Int).Mul(estimatedSize, l1FeeScaled)
l1Cost := new(big.Int).Div(l1CostScaled, fjordDivisor)

Expand All @@ -375,10 +375,10 @@ func NewL1CostFuncFjord(l1BaseFee, l1BlobBaseFee, baseFeeScalar, blobFeeScalar *
}
}

// EstimatedL1Size estimates the number of bytes the transaction will occupy in its L1 batch using
// the Fjord linear regression model, and returns this value scaled up by 1e6.
func EstimatedL1SizeScaled(costData RollupCostData) *big.Int {
fastLzSize := new(big.Int).SetUint64(costData.FastLzSize)
// estimatedDASizeScaled estimates the number of bytes the transaction will occupy in the DA batch using the Fjord
// linear regression model, and returns this value scaled up by 1e6.
func (cd RollupCostData) estimatedDASizeScaled() *big.Int {
fastLzSize := new(big.Int).SetUint64(cd.FastLzSize)
estimatedSize := new(big.Int).Add(L1CostIntercept, new(big.Int).Mul(L1CostFastlzCoef, fastLzSize))

if estimatedSize.Cmp(MinTransactionSizeScaled) < 0 {
Expand All @@ -387,6 +387,13 @@ func EstimatedL1SizeScaled(costData RollupCostData) *big.Int {
return estimatedSize
}

// EstimatedDASize estimates the number of bytes the transaction will occupy in its DA batch using the Fjord linear
// regression model.
func (cd RollupCostData) EstimatedDASize() *big.Int {
b := cd.estimatedDASizeScaled()
return b.Div(b, big.NewInt(1e6))
}

func extractEcotoneFeeParams(l1FeeParams []byte) (l1BaseFeeScalar, l1BlobBaseFeeScalar *big.Int) {
offset := scalarSectionStart
l1BaseFeeScalar = new(big.Int).SetBytes(l1FeeParams[offset : offset+4])
Expand Down