Skip to content

Commit

Permalink
fix(rollup-relayer): catch errors (#1427)
Browse files Browse the repository at this point in the history
  • Loading branch information
colinlyguo authored Jul 10, 2024
1 parent c812288 commit 8471838
Show file tree
Hide file tree
Showing 5 changed files with 62 additions and 5 deletions.
2 changes: 1 addition & 1 deletion common/version/version.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import (
"runtime/debug"
)

var tag = "v4.4.26"
var tag = "v4.4.27"

var commit = func() string {
if info, ok := debug.ReadBuildInfo(); ok {
Expand Down
3 changes: 2 additions & 1 deletion rollup/internal/controller/watcher/batch_proposer.go
Original file line number Diff line number Diff line change
Expand Up @@ -240,7 +240,8 @@ func (p *BatchProposer) proposeBatch() error {
batch.Chunks = append(batch.Chunks, chunk)
metrics, calcErr := utils.CalculateBatchMetrics(&batch, codecVersion)

if errors.Is(calcErr, &encoding.CompressedDataCompatibilityError{}) {
var compressErr *encoding.CompressedDataCompatibilityError
if errors.As(calcErr, &compressErr) {
if i == 0 {
// The first chunk fails compressed data compatibility check, manual fix is needed.
return fmt.Errorf("the first chunk fails compressed data compatibility check; start block number: %v, end block number: %v", dbChunks[0].StartBlockNumber, dbChunks[0].EndBlockNumber)
Expand Down
3 changes: 2 additions & 1 deletion rollup/internal/controller/watcher/chunk_proposer.go
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,8 @@ func (p *ChunkProposer) proposeChunk() error {
chunk.Blocks = append(chunk.Blocks, block)

metrics, calcErr := utils.CalculateChunkMetrics(&chunk, codecVersion)
if errors.Is(calcErr, &encoding.CompressedDataCompatibilityError{}) {
var compressErr *encoding.CompressedDataCompatibilityError
if errors.As(calcErr, &compressErr) {
if i == 0 {
// The first block fails compressed data compatibility check, manual fix is needed.
return fmt.Errorf("the first block fails compressed data compatibility check; block number: %v", block.Header.Number)
Expand Down
6 changes: 4 additions & 2 deletions rollup/internal/utils/utils.go
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,8 @@ func CalculateChunkMetrics(chunk *encoding.Chunk, codecVersion encoding.CodecVer
metrics.L1CommitUncompressedBatchBytesSize, metrics.L1CommitBlobSize, err = codecv2.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk)
metrics.EstimateBlobSizeTime = time.Since(start)
if err != nil {
if errors.Is(err, &encoding.CompressedDataCompatibilityError{}) {
var compressErr *encoding.CompressedDataCompatibilityError
if errors.As(err, &compressErr) {
return nil, err
} else {
return nil, fmt.Errorf("failed to estimate codecv2 chunk L1 commit batch size and blob size: %w", err)
Expand Down Expand Up @@ -176,7 +177,8 @@ func CalculateBatchMetrics(batch *encoding.Batch, codecVersion encoding.CodecVer
metrics.L1CommitUncompressedBatchBytesSize, metrics.L1CommitBlobSize, err = codecv2.EstimateBatchL1CommitBatchSizeAndBlobSize(batch)
metrics.EstimateBlobSizeTime = time.Since(start)
if err != nil {
if errors.Is(err, &encoding.CompressedDataCompatibilityError{}) {
var compressErr *encoding.CompressedDataCompatibilityError
if errors.As(err, &compressErr) {
return nil, err
} else {
return nil, fmt.Errorf("failed to estimate codecv2 batch L1 commit batch size and blob size: %w", err)
Expand Down
53 changes: 53 additions & 0 deletions rollup/internal/utils/utils_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
package utils

import (
"errors"
"math/big"
"testing"

"github.com/agiledragon/gomonkey/v2"
"github.com/scroll-tech/da-codec/encoding"
"github.com/scroll-tech/da-codec/encoding/codecv2"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/core/types"
"github.com/stretchr/testify/assert"
)

// regression test
func TestCompressedDataCompatibilityErrorCatching(t *testing.T) {
block := &encoding.Block{
Header: &types.Header{
Number: big.NewInt(0),
},
RowConsumption: &types.RowConsumption{},
}
chunk := &encoding.Chunk{
Blocks: []*encoding.Block{block},
}
batch := &encoding.Batch{
Index: 0,
TotalL1MessagePoppedBefore: 0,
ParentBatchHash: common.Hash{},
Chunks: []*encoding.Chunk{chunk},
}

patchGuard1 := gomonkey.ApplyFunc(codecv2.EstimateChunkL1CommitBatchSizeAndBlobSize, func(b *encoding.Chunk) (uint64, uint64, error) {
return 0, 0, &encoding.CompressedDataCompatibilityError{Err: errors.New("test-error-1")}
})
defer patchGuard1.Reset()

var compressErr *encoding.CompressedDataCompatibilityError

_, err := CalculateChunkMetrics(chunk, encoding.CodecV2)
assert.Error(t, err)
assert.ErrorAs(t, err, &compressErr)

patchGuard2 := gomonkey.ApplyFunc(codecv2.EstimateBatchL1CommitBatchSizeAndBlobSize, func(b *encoding.Batch) (uint64, uint64, error) {
return 0, 0, &encoding.CompressedDataCompatibilityError{Err: errors.New("test-error-2")}
})
defer patchGuard2.Reset()

_, err = CalculateBatchMetrics(batch, encoding.CodecV2)
assert.Error(t, err)
assert.ErrorAs(t, err, &compressErr)
}

0 comments on commit 8471838

Please sign in to comment.