Skip to content

Commit 638b3e5

Browse files
committed
only skip bloom pages when there is no error
Signed-off-by: Owen Diehl <[email protected]>
1 parent 3ca36fa commit 638b3e5

File tree

5 files changed

+15
-12
lines changed

5 files changed

+15
-12
lines changed

pkg/storage/bloom/v1/block.go

+1-1
Original file line numberDiff line numberDiff line change
@@ -144,7 +144,7 @@ func (bq *BlockQuerier) Seek(fp model.Fingerprint) error {
144144
func (bq *BlockQuerier) Next() bool {
145145
for bq.series.Next() {
146146
series := bq.series.At()
147-
if ok := bq.blooms.LoadOffset(series.Offset); !ok {
147+
if skip := bq.blooms.LoadOffset(series.Offset); skip {
148148
// can't seek to the desired bloom, likely because the page was too large to load
149149
// so we skip this series and move on to the next
150150
continue

pkg/storage/bloom/v1/bloom.go

+1-2
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@ import (
1717
// gateways to OOM.
1818
// Figure out a decent default maximum page size that we can process.
1919
var DefaultMaxPageSize = 64 << 20 // 64MB
20-
var ErrPageTooLarge = errors.Errorf("bloom page too large")
2120

2221
type Bloom struct {
2322
filter.ScalableBloomFilter
@@ -290,7 +289,7 @@ func (b *BloomBlock) BloomPageDecoder(r io.ReadSeeker, pageIdx int, maxPageSize
290289
if page.Len > maxPageSize {
291290
metrics.pagesSkipped.WithLabelValues(pageTypeBloom, skipReasonTooLarge).Inc()
292291
metrics.bytesSkipped.WithLabelValues(pageTypeBloom, skipReasonTooLarge).Add(float64(page.DecompressedLen))
293-
return nil, true, ErrPageTooLarge
292+
return nil, true, nil
294293
}
295294

296295
if _, err = r.Seek(int64(page.Offset), io.SeekStart); err != nil {

pkg/storage/bloom/v1/bloom_querier.go

+8-5
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,9 @@ func (it *LazyBloomIter) ensureInit() {
4242
}
4343
}
4444

45-
func (it *LazyBloomIter) LoadOffset(offset BloomOffset) (ok bool) {
45+
// LoadOffset returns whether the bloom page at the given offset should
46+
// be skipped (due to being too large) _and_ there's no error
47+
func (it *LazyBloomIter) LoadOffset(offset BloomOffset) (skip bool) {
4648
it.ensureInit()
4749

4850
// if we need a different page or the current page hasn't been loaded,
@@ -61,21 +63,22 @@ func (it *LazyBloomIter) LoadOffset(offset BloomOffset) (ok bool) {
6163
return false
6264
}
6365
decoder, skip, err := it.b.blooms.BloomPageDecoder(r, offset.Page, it.m, it.b.metrics)
64-
if skip {
65-
return false
66-
}
6766
if err != nil {
6867
it.err = errors.Wrap(err, "loading bloom page")
6968
return false
7069
}
7170

71+
if skip {
72+
return true
73+
}
74+
7275
it.curPageIndex = offset.Page
7376
it.curPage = decoder
7477

7578
}
7679

7780
it.curPage.Seek(offset.ByteOffset)
78-
return true
81+
return false
7982
}
8083

8184
func (it *LazyBloomIter) Next() bool {

pkg/storage/bloom/v1/fuse.go

+3-2
Original file line numberDiff line numberDiff line change
@@ -99,11 +99,12 @@ func (fq *FusedQuerier) Run() error {
9999
}
100100

101101
// Now that we've found the series, we need to find the unpack the bloom
102-
ok := fq.bq.blooms.LoadOffset(series.Offset)
103-
if !ok {
102+
skip := fq.bq.blooms.LoadOffset(series.Offset)
103+
if skip {
104104
// could not seek to the desired bloom,
105105
// likely because the page was too large to load
106106
fq.noRemovals(nextBatch, fp)
107+
continue
107108
}
108109

109110
if !fq.bq.blooms.Next() {

pkg/storage/bloom/v1/fuse_test.go

+2-2
Original file line numberDiff line numberDiff line change
@@ -229,10 +229,10 @@ func TestLazyBloomIter_Seek_ResetError(t *testing.T) {
229229
seekable = false
230230
}
231231
if !seekable {
232-
require.False(t, querier.blooms.LoadOffset(series.Offset))
232+
require.True(t, querier.blooms.LoadOffset(series.Offset))
233233
continue
234234
}
235-
require.True(t, querier.blooms.LoadOffset(series.Offset))
235+
require.False(t, querier.blooms.LoadOffset(series.Offset))
236236
require.True(t, querier.blooms.Next())
237237
require.NoError(t, querier.blooms.Err())
238238
}

0 commit comments

Comments
 (0)