Skip to content

Commit be6f549

Browse files
committed
addtl linting
Signed-off-by: Owen Diehl <[email protected]>
1 parent 4625589 commit be6f549

File tree

6 files changed

+9
-37
lines changed

6 files changed

+9
-37
lines changed

pkg/bloomcompactor/spec.go

+1-1
Original file line numberDiff line numberDiff line change
@@ -115,7 +115,7 @@ func (s *SimpleBloomGenerator) populator(ctx context.Context) v1.BloomPopulatorF
115115
Chunks: toAdd,
116116
})
117117

118-
s.tokenizer.Populate(series, srcBlooms, chunkItersWithFP.itr, ch)
118+
s.tokenizer.Populate(srcBlooms, chunkItersWithFP.itr, ch)
119119

120120
if s.reporter != nil {
121121
s.reporter(series.Fingerprint)

pkg/storage/bloom/v1/bloom_tokenizer.go

+2-4
Original file line numberDiff line numberDiff line change
@@ -5,11 +5,12 @@ import (
55

66
"github.com/go-kit/log/level"
77

8-
"github.com/grafana/loki/pkg/push"
98
"github.com/grafana/loki/v3/pkg/iter"
109
"github.com/grafana/loki/v3/pkg/logproto"
1110
"github.com/grafana/loki/v3/pkg/storage/bloom/v1/filter"
1211

12+
"github.com/grafana/loki/pkg/push"
13+
1314
"github.com/grafana/loki/v3/pkg/util/encoding"
1415
util_log "github.com/grafana/loki/v3/pkg/util/log"
1516
)
@@ -97,7 +98,6 @@ func (bt *BloomTokenizer) newBloom() *Bloom {
9798
}
9899

99100
func (bt *BloomTokenizer) Populate(
100-
series *Series,
101101
blooms SizedIterator[*Bloom],
102102
chks Iterator[ChunkRefWithIter],
103103
ch chan *BloomCreation,
@@ -163,8 +163,6 @@ func (bt *BloomTokenizer) Populate(
163163
SourceBytesAdded: bytesAdded,
164164
}
165165
close(ch)
166-
return
167-
168166
}
169167

170168
// addChunkToBloom adds the tokens from the given chunk to the given bloom.

pkg/storage/bloom/v1/bloom_tokenizer_test.go

+2-29
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,6 @@ import (
88
"testing"
99
"time"
1010

11-
"github.com/prometheus/prometheus/model/labels"
12-
1311
"github.com/grafana/dskit/multierror"
1412

1513
"github.com/grafana/loki/pkg/push"
@@ -101,8 +99,6 @@ func TestTokenizerPopulate(t *testing.T) {
10199
bt := NewBloomTokenizer(DefaultNGramLength, DefaultNGramSkip, 0, metrics)
102100

103101
sbf := filter.NewScalableBloomFilter(1024, 0.01, 0.8)
104-
var lbsList []labels.Labels
105-
lbsList = append(lbsList, labels.FromStrings("foo", "bar"))
106102

107103
memChunk := chunkenc.NewMemChunk(chunkenc.ChunkFormatV4, chunkenc.EncSnappy, chunkenc.ChunkHeadFormatFor(chunkenc.ChunkFormatV4), 256000, 1500000)
108104
_ = memChunk.Append(&push.Entry{
@@ -121,13 +117,9 @@ func TestTokenizerPopulate(t *testing.T) {
121117
bloom := Bloom{
122118
ScalableBloomFilter: *sbf,
123119
}
124-
series := Series{
125-
Fingerprint: model.Fingerprint(lbsList[0].Hash()),
126-
}
127120

128121
blooms, err := populateAndConsumeBloom(
129122
bt,
130-
series,
131123
NewSliceIter([]*Bloom{&bloom}),
132124
NewSliceIter([]ChunkRefWithIter{{Ref: ChunkRef{},
133125
Itr: itr}}),
@@ -147,9 +139,6 @@ func TestBloomTokenizerPopulateWithoutPreexistingBloom(t *testing.T) {
147139
var testLine = "this is a log line"
148140
bt := NewBloomTokenizer(DefaultNGramLength, DefaultNGramSkip, 0, metrics)
149141

150-
var lbsList []labels.Labels
151-
lbsList = append(lbsList, labels.FromStrings("foo", "bar"))
152-
153142
memChunk := chunkenc.NewMemChunk(chunkenc.ChunkFormatV4, chunkenc.EncSnappy, chunkenc.ChunkHeadFormatFor(chunkenc.ChunkFormatV4), 256000, 1500000)
154143
_ = memChunk.Append(&push.Entry{
155144
Timestamp: time.Unix(0, 1),
@@ -164,13 +153,8 @@ func TestBloomTokenizerPopulateWithoutPreexistingBloom(t *testing.T) {
164153
)
165154
require.Nil(t, err)
166155

167-
series := Series{
168-
Fingerprint: model.Fingerprint(lbsList[0].Hash()),
169-
}
170-
171156
blooms, err := populateAndConsumeBloom(
172157
bt,
173-
series,
174158
NewEmptyIter[*Bloom](),
175159
NewSliceIter([]ChunkRefWithIter{{Ref: ChunkRef{},
176160
Itr: itr}}),
@@ -227,11 +211,6 @@ func TestTokenizerPopulateWontExceedMaxSize(t *testing.T) {
227211
itr, err := chunkRefItrFromLines(line)
228212
require.NoError(t, err)
229213
go bt.Populate(
230-
&Series{
231-
Chunks: ChunkRefs{
232-
{},
233-
},
234-
},
235214
NewSliceIter([]*Bloom{
236215
{
237216
*filter.NewScalableBloomFilter(1024, 0.01, 0.8),
@@ -258,13 +237,12 @@ func TestTokenizerPopulateWontExceedMaxSize(t *testing.T) {
258237

259238
func populateAndConsumeBloom(
260239
bt *BloomTokenizer,
261-
s Series,
262240
blooms SizedIterator[*Bloom],
263241
chks Iterator[ChunkRefWithIter],
264242
) (res []*Bloom, err error) {
265243
var e multierror.MultiError
266244
ch := make(chan *BloomCreation)
267-
go bt.Populate(&s, blooms, chks, ch)
245+
go bt.Populate(blooms, chks, ch)
268246
for x := range ch {
269247
if x.Err != nil {
270248
e = append(e, x.Err)
@@ -281,8 +259,6 @@ func BenchmarkPopulateSeriesWithBloom(b *testing.B) {
281259
bt := NewBloomTokenizer(DefaultNGramLength, DefaultNGramSkip, 0, metrics)
282260

283261
sbf := filter.NewScalableBloomFilter(1024, 0.01, 0.8)
284-
var lbsList []labels.Labels
285-
lbsList = append(lbsList, labels.FromStrings("foo", "bar"))
286262

287263
memChunk := chunkenc.NewMemChunk(chunkenc.ChunkFormatV4, chunkenc.EncSnappy, chunkenc.ChunkHeadFormatFor(chunkenc.ChunkFormatV4), 256000, 1500000)
288264
_ = memChunk.Append(&push.Entry{
@@ -301,12 +277,9 @@ func BenchmarkPopulateSeriesWithBloom(b *testing.B) {
301277
bloom := Bloom{
302278
ScalableBloomFilter: *sbf,
303279
}
304-
series := Series{
305-
Fingerprint: model.Fingerprint(lbsList[0].Hash()),
306-
}
280+
307281
_, err = populateAndConsumeBloom(
308282
bt,
309-
series,
310283
NewSliceIter([]*Bloom{&bloom}),
311284
NewSliceIter([]ChunkRefWithIter{{Ref: ChunkRef{},
312285
Itr: itr}}),

pkg/storage/bloom/v1/index.go

+1-1
Original file line numberDiff line numberDiff line change
@@ -421,7 +421,7 @@ func (s *SeriesWithOffsets) Decode(
421421
var (
422422
err error
423423
lastEnd model.Time
424-
lastOffset BloomOffset = previousOffset
424+
lastOffset = previousOffset
425425
)
426426
for i := range s.Offsets {
427427
err = s.Offsets[i].Decode(dec, lastOffset)

pkg/storage/bloom/v1/util.go

+2-2
Original file line numberDiff line numberDiff line change
@@ -16,8 +16,8 @@ const (
1616
magicNumber = uint32(0xCA7CAFE5)
1717
// Add new versions below
1818
V1 Version = iota
19-
// V2 supports single series blooms encoded over multipe pages
20-
// to accomodate larger single series
19+
// V2 supports single series blooms encoded over multiple pages
20+
// to accommodate larger single series
2121
V2
2222
)
2323

pkg/storage/bloom/v1/versioned_builder.go

+1
Original file line numberDiff line numberDiff line change
@@ -129,6 +129,7 @@ type SeriesWithBloom struct {
129129
Bloom *Bloom
130130
}
131131

132+
//nolint:revive
132133
type V1Builder struct {
133134
opts BlockOptions
134135

0 commit comments

Comments
 (0)