Skip to content

Commit 57f05bf

Browse files
committed
Measure time spent on encoding and the compaction ratio
Add metrics to measure time spent on encoding CBOR+ZSTD, and the compaction ratio achieved by ZSTD. Fixes #863
1 parent 30fac2e commit 57f05bf

File tree

4 files changed

+136
-10
lines changed

4 files changed

+136
-10
lines changed

internal/encoding/encoding.go

+73-10
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,16 @@ package encoding
22

33
import (
44
"bytes"
5+
"context"
56
"fmt"
7+
"reflect"
68
"sync"
9+
"time"
710

811
"github.com/klauspost/compress/zstd"
912
cbg "github.com/whyrusleeping/cbor-gen"
13+
"go.opentelemetry.io/otel/attribute"
14+
"go.opentelemetry.io/otel/metric"
1015
)
1116

1217
// maxDecompressedSize is the default maximum amount of memory allocated by the
@@ -37,15 +42,29 @@ func NewCBOR[T CBORMarshalUnmarshaler]() *CBOR[T] {
3742
return &CBOR[T]{}
3843
}
3944

40-
func (c *CBOR[T]) Encode(m T) ([]byte, error) {
45+
func (c *CBOR[T]) Encode(m T) (_ []byte, _err error) {
46+
defer func(start time.Time) {
47+
if _err != nil {
48+
metrics.encodingTime.Record(context.Background(),
49+
time.Since(start).Seconds(),
50+
metric.WithAttributeSet(attrSetCborEncode))
51+
}
52+
}(time.Now())
4153
var out bytes.Buffer
4254
if err := m.MarshalCBOR(&out); err != nil {
4355
return nil, err
4456
}
4557
return out.Bytes(), nil
4658
}
4759

48-
func (c *CBOR[T]) Decode(v []byte, t T) error {
60+
func (c *CBOR[T]) Decode(v []byte, t T) (_err error) {
61+
defer func(start time.Time) {
62+
if _err != nil {
63+
metrics.encodingTime.Record(context.Background(),
64+
time.Since(start).Seconds(),
65+
metric.WithAttributeSet(attrSetCborDecode))
66+
}
67+
}(time.Now())
4968
r := bytes.NewReader(v)
5069
return t.UnmarshalCBOR(r)
5170
}
@@ -54,6 +73,9 @@ type ZSTD[T CBORMarshalUnmarshaler] struct {
5473
cborEncoding *CBOR[T]
5574
compressor *zstd.Encoder
5675
decompressor *zstd.Decoder
76+
77+
metricAttr attribute.KeyValue
78+
metricAttrLoader sync.Once
5779
}
5880

5981
func NewZSTD[T CBORMarshalUnmarshaler]() (*ZSTD[T], error) {
@@ -74,26 +96,67 @@ func NewZSTD[T CBORMarshalUnmarshaler]() (*ZSTD[T], error) {
7496
}, nil
7597
}
7698

77-
func (c *ZSTD[T]) Encode(m T) ([]byte, error) {
78-
cborEncoded, err := c.cborEncoding.Encode(m)
79-
if len(cborEncoded) > maxDecompressedSize {
99+
func (c *ZSTD[T]) Encode(t T) (_ []byte, _err error) {
100+
decompressed, err := c.cborEncoding.Encode(t)
101+
if len(decompressed) > maxDecompressedSize {
80102
// Error out early if the encoded value is too large to be decompressed.
81-
return nil, fmt.Errorf("encoded value cannot exceed maximum size: %d > %d", len(cborEncoded), maxDecompressedSize)
103+
return nil, fmt.Errorf("encoded value cannot exceed maximum size: %d > %d", len(decompressed), maxDecompressedSize)
82104
}
83105
if err != nil {
84106
return nil, err
85107
}
86-
compressed := c.compressor.EncodeAll(cborEncoded, make([]byte, 0, len(cborEncoded)))
108+
109+
compressed := c.compress(decompressed)
110+
c.meterCompactionRate(t, len(decompressed), len(compressed))
87111
return compressed, nil
88112
}
89113

90-
func (c *ZSTD[T]) Decode(v []byte, t T) error {
114+
func (c *ZSTD[T]) Decode(compressed []byte, t T) error {
91115
buf := bufferPool.Get().(*[]byte)
92116
defer bufferPool.Put(buf)
93117

94-
cborEncoded, err := c.decompressor.DecodeAll(v, (*buf)[:0])
118+
decompressed, err := c.decompressInto(compressed, (*buf)[:0])
95119
if err != nil {
96120
return err
97121
}
98-
return c.cborEncoding.Decode(cborEncoded, t)
122+
c.meterCompactionRate(t, len(decompressed), len(compressed))
123+
return c.cborEncoding.Decode(decompressed, t)
124+
}
125+
126+
func (c *ZSTD[T]) compress(decompressed []byte) []byte {
127+
defer func(start time.Time) {
128+
metrics.encodingTime.Record(context.Background(),
129+
time.Since(start).Seconds(),
130+
metric.WithAttributeSet(attrSetZstdEncode))
131+
}(time.Now())
132+
return c.compressor.EncodeAll(decompressed, make([]byte, 0, len(decompressed)))
133+
}
134+
135+
func (c *ZSTD[T]) decompressInto(compressed []byte, buf []byte) (_ []byte, _err error) {
136+
defer func(start time.Time) {
137+
if _err != nil {
138+
metrics.encodingTime.Record(context.Background(),
139+
time.Since(start).Seconds(),
140+
metric.WithAttributeSet(attrSetZstdDecode))
141+
}
142+
}(time.Now())
143+
return c.decompressor.DecodeAll(compressed, buf)
144+
}
145+
146+
func (c *ZSTD[T]) meterCompactionRate(target T, decompressedSize, compressedSize int) {
147+
compactionRatio := float64(decompressedSize) / float64(compressedSize)
148+
metrics.zstdCompactionRatio.Record(context.Background(), compactionRatio, metric.WithAttributes(c.getMetricAttribute(target)))
149+
}
150+
151+
func (c *ZSTD[T]) getMetricAttribute(t T) attribute.KeyValue {
152+
c.metricAttrLoader.Do(func() {
153+
const key = "type"
154+
switch target := reflect.TypeOf(t); {
155+
case target.Kind() == reflect.Ptr:
156+
c.metricAttr = attribute.String(key, target.Elem().Name())
157+
default:
158+
c.metricAttr = attribute.String(key, target.Name())
159+
}
160+
})
161+
return c.metricAttr
99162
}
+7
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
package encoding
2+
3+
import "go.opentelemetry.io/otel/attribute"
4+
5+
// GetMetricAttribute returns the attribute for metric collection, exported for
6+
// testing purposes.
7+
func (c *ZSTD[T]) GetMetricAttribute(t T) attribute.KeyValue { return c.getMetricAttribute(t) }

internal/encoding/encoding_test.go

+18
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ import (
99
"github.com/klauspost/compress/zstd"
1010
"github.com/stretchr/testify/require"
1111
cbg "github.com/whyrusleeping/cbor-gen"
12+
"go.opentelemetry.io/otel/attribute"
1213
)
1314

1415
var (
@@ -77,3 +78,20 @@ func TestZSTDLimits(t *testing.T) {
7778
var dest testValue
7879
require.ErrorContains(t, subject.Decode(tooLargeACompression, &dest), "decompressed size exceeds configured limit")
7980
}
81+
82+
func TestZSTD_GetMetricAttribute(t *testing.T) {
83+
t.Run("By Pointer", func(t *testing.T) {
84+
subject, err := encoding.NewZSTD[*testValue]()
85+
require.NoError(t, err)
86+
require.Equal(t, attribute.String("type", "testValue"), subject.GetMetricAttribute(&testValue{}))
87+
})
88+
t.Run("By Value", func(t *testing.T) {
89+
type anotherTestValue struct {
90+
cbg.CBORUnmarshaler
91+
cbg.CBORMarshaler
92+
}
93+
subject, err := encoding.NewZSTD[anotherTestValue]()
94+
require.NoError(t, err)
95+
require.Equal(t, attribute.String("type", "anotherTestValue"), subject.GetMetricAttribute(anotherTestValue{}))
96+
})
97+
}

internal/encoding/metrics.go

+38
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
package encoding
2+
3+
import (
4+
"github.com/filecoin-project/go-f3/internal/measurements"
5+
"go.opentelemetry.io/otel"
6+
"go.opentelemetry.io/otel/attribute"
7+
"go.opentelemetry.io/otel/metric"
8+
)
9+
10+
var (
11+
attrCodecCbor = attribute.String("codec", "cbor")
12+
attrCodecZstd = attribute.String("codec", "zstd")
13+
attrActionEncode = attribute.String("action", "encode")
14+
attrActionDecode = attribute.String("action", "decode")
15+
attrSetCborEncode = attribute.NewSet(attrCodecCbor, attrActionEncode)
16+
attrSetCborDecode = attribute.NewSet(attrCodecCbor, attrActionDecode)
17+
attrSetZstdEncode = attribute.NewSet(attrCodecZstd, attrActionEncode)
18+
attrSetZstdDecode = attribute.NewSet(attrCodecZstd, attrActionDecode)
19+
20+
meter = otel.Meter("f3/internal/encoding")
21+
22+
metrics = struct {
23+
encodingTime metric.Float64Histogram
24+
zstdCompactionRatio metric.Float64Histogram
25+
}{
26+
encodingTime: measurements.Must(meter.Float64Histogram(
27+
"f3_internal_encoding_time",
28+
metric.WithDescription("The time spent on encoding/decoding in seconds."),
29+
metric.WithUnit("s"),
30+
metric.WithExplicitBucketBoundaries(0.001, 0.003, 0.005, 0.01, 0.03, 0.05, 0.1, 0.3, 0.5, 1.0, 2.0, 5.0, 10.0),
31+
)),
32+
zstdCompactionRatio: measurements.Must(meter.Float64Histogram(
33+
"f3_internal_encoding_zstd_compaction_ratio",
34+
metric.WithDescription("The ratio of compressed to uncompressed data size for zstd encoding."),
35+
metric.WithExplicitBucketBoundaries(0.0, 0.1, 0.2, 0.5, 1.0, 2.0, 3.0, 4.0, 5.0, 10.0),
36+
)),
37+
}
38+
)

0 commit comments

Comments
 (0)