@@ -288,6 +288,45 @@ func BenchmarkPopulateSeriesWithBloom(b *testing.B) {
288
288
}
289
289
}
290
290
291
+ func TestTokenizerClearsCacheBetweenPopulateCalls (t * testing.T ) {
292
+ bt := NewBloomTokenizer (DefaultNGramLength , DefaultNGramSkip , 0 , NewMetrics (nil ))
293
+ line := "foobarbazz"
294
+ var blooms []* Bloom
295
+
296
+ for i := 0 ; i < 2 ; i ++ {
297
+ ch := make (chan * BloomCreation )
298
+ itr , err := chunkRefItrFromLines (line )
299
+ require .NoError (t , err )
300
+ go bt .Populate (
301
+ NewEmptyIter [* Bloom ](),
302
+ NewSliceIter ([]ChunkRefWithIter {
303
+ {
304
+ Ref : ChunkRef {},
305
+ Itr : itr ,
306
+ },
307
+ }),
308
+ ch ,
309
+ )
310
+ var ct int
311
+ for created := range ch {
312
+ blooms = append (blooms , created .Bloom )
313
+ ct ++
314
+ }
315
+ // ensure we created one bloom for each call
316
+ require .Equal (t , 1 , ct )
317
+
318
+ }
319
+
320
+ for _ , bloom := range blooms {
321
+ toks := bt .lineTokenizer .Tokens (line )
322
+ for toks .Next () {
323
+ token := toks .At ()
324
+ require .True (t , bloom .Test (token ))
325
+ }
326
+ require .NoError (t , toks .Err ())
327
+ }
328
+ }
329
+
291
330
func BenchmarkMapClear (b * testing.B ) {
292
331
bt := NewBloomTokenizer (DefaultNGramLength , DefaultNGramSkip , 0 , metrics )
293
332
for i := 0 ; i < b .N ; i ++ {
0 commit comments