diff --git a/internal/core/data.go b/internal/core/data.go index 122d255c5f..d84186826c 100644 --- a/internal/core/data.go +++ b/internal/core/data.go @@ -17,40 +17,22 @@ import ( ) // Span is a range of keys from [Start, End). -type Span interface { - // Start returns the starting key of the Span. - Start() keys.DataStoreKey - // End returns the ending key of the Span. - End() keys.DataStoreKey - // Compare returns -1 if the provided span is less, 0 if it is equal, and 1 if its greater. - Compare(Span) SpanComparisonResult -} +type Span struct { + // Start represents the starting key of the Span. + Start keys.Walkable -type span struct { - start keys.DataStoreKey - end keys.DataStoreKey + // End represents the ending key of the Span. + End keys.Walkable } -var _ Span = span{} - // NewSpan creates a new Span from the provided start and end keys. -func NewSpan(start, end keys.DataStoreKey) Span { - return span{ - start: start, - end: end, +func NewSpan(start, end keys.Walkable) Span { + return Span{ + Start: start, + End: end, } } -// Start returns the starting key of the Span. -func (s span) Start() keys.DataStoreKey { - return s.start -} - -// End returns the ending key of the Span. -func (s span) End() keys.DataStoreKey { - return s.end -} - // SpanComparisonResult is the result of comparing two spans. type SpanComparisonResult uint @@ -73,18 +55,18 @@ const ( // Compares two spans returning how the compare to each other. // If the end of one span is adjacent to the other (with no gap possible) // then those ends are considered equal. -func (this span) Compare(other Span) SpanComparisonResult { +func (this Span) Compare(other Span) SpanComparisonResult { if this == other { return Equal } - thisStart := this.start.ToString() - thisEnd := this.end.ToString() - otherStart := other.Start().ToString() - otherEnd := other.End().ToString() + thisStart := this.Start.ToString() + thisEnd := this.End.ToString() + otherStart := other.Start.ToString() + otherEnd := other.End.ToString() if thisStart < otherStart { - if thisEnd == otherStart || isAdjacent(this.end, other.Start()) { + if thisEnd == otherStart || isAdjacent(this.End, other.Start) { return StartBeforeEndEqualToStart } @@ -133,33 +115,19 @@ func (this span) Compare(other Span) SpanComparisonResult { } } - if thisStart == otherEnd || isAdjacent(this.start, other.End()) { + if thisStart == otherEnd || isAdjacent(this.Start, other.End) { return StartEqualToEndEndAfter } return After } -func isAdjacent(this keys.DataStoreKey, other keys.DataStoreKey) bool { +func isAdjacent(this keys.Walkable, other keys.Walkable) bool { return len(this.ToString()) == len(other.ToString()) && (this.PrefixEnd().ToString() == other.ToString() || this.ToString() == other.PrefixEnd().ToString()) } -// Spans is a collection of individual spans. -type Spans struct { - HasValue bool - Value []Span -} - -// NewSpans creates a new Spans from the provided spans. -func NewSpans(spans ...Span) Spans { - return Spans{ - HasValue: true, - Value: spans, - } -} - // Merges an unordered, potentially overlapping and/or duplicated collection of Spans into // a unique set in ascending order, where overlapping spans are merged into a single span. // Will handle spans with keys of different lengths, where one might be a prefix of another. @@ -186,7 +154,7 @@ func MergeAscending(spans []Span) []Span { } // Then we insert - newArray[i] = NewSpan(span.Start(), span.End()) + newArray[i] = NewSpan(span.Start, span.End) // Move the values prior to the new one across for j := 0; j < i; j++ { @@ -197,12 +165,12 @@ func MergeAscending(spans []Span) []Span { // Exit the unique-span loop, this span has been handled i = len(uniqueSpans) case StartBeforeEndEqualToStart, StartBeforeEndWithin, StartBeforeEndEqual: - uniqueSpans[i] = NewSpan(span.Start(), uniqueSpan.End()) + uniqueSpans[i] = NewSpan(span.Start, uniqueSpan.End) uniqueSpanFound = true i++ case StartBeforeEndAfter: - uniqueSpans = removeBefore(uniqueSpans, i, span.End().ToString()) - uniqueSpans[i] = NewSpan(span.Start(), span.End()) + uniqueSpans = removeBefore(uniqueSpans, i, span.End.ToString()) + uniqueSpans[i] = NewSpan(span.Start, span.End) uniqueSpanFound = true // Exit the unique-span loop, this span has been handled i = len(uniqueSpans) @@ -211,8 +179,8 @@ func MergeAscending(spans []Span) []Span { // Do nothing, span is contained within an existing unique-span i = len(uniqueSpans) case StartEqualEndAfter, StartWithinEndAfter, StartEqualToEndEndAfter: - uniqueSpans = removeBefore(uniqueSpans, i, span.End().ToString()) - uniqueSpans[i] = NewSpan(uniqueSpan.Start(), span.End()) + uniqueSpans = removeBefore(uniqueSpans, i, span.End.ToString()) + uniqueSpans[i] = NewSpan(uniqueSpan.Start, span.End) uniqueSpanFound = true // Exit the unique-span loop, this span has been handled i = len(uniqueSpans) @@ -234,7 +202,7 @@ func MergeAscending(spans []Span) []Span { func removeBefore(spans []Span, startIndex int, end string) []Span { indexOfLastMatchingItem := -1 for i := startIndex; i < len(spans); i++ { - if spans[i].End().ToString() <= end { + if spans[i].End.ToString() <= end { indexOfLastMatchingItem = i } } diff --git a/internal/core/data_test.go b/internal/core/data_test.go index d55851b795..154441819e 100644 --- a/internal/core/data_test.go +++ b/internal/core/data_test.go @@ -34,8 +34,8 @@ func TestMergeAscending_ReturnsSingle_GivenSingle(t *testing.T) { result := MergeAscending(input) assert.Len(t, result, 1) - assert.Equal(t, start1, result[0].Start()) - assert.Equal(t, end1, result[0].End()) + assert.Equal(t, start1, result[0].Start) + assert.Equal(t, end1, result[0].End) } func TestMergeAscending_ReturnsSecondBeforeFirst_GivenKeysInReverseOrder(t *testing.T) { @@ -52,10 +52,10 @@ func TestMergeAscending_ReturnsSecondBeforeFirst_GivenKeysInReverseOrder(t *test result := MergeAscending(input) assert.Len(t, result, 2) - assert.Equal(t, start2, result[0].Start()) - assert.Equal(t, end2, result[0].End()) - assert.Equal(t, start1, result[1].Start()) - assert.Equal(t, end1, result[1].End()) + assert.Equal(t, start2, result[0].Start) + assert.Equal(t, end2, result[0].End) + assert.Equal(t, start1, result[1].Start) + assert.Equal(t, end1, result[1].End) } func TestMergeAscending_ReturnsItemsInOrder_GivenKeysInMixedOrder(t *testing.T) { @@ -75,13 +75,13 @@ func TestMergeAscending_ReturnsItemsInOrder_GivenKeysInMixedOrder(t *testing.T) result := MergeAscending(input) assert.Len(t, result, 3) - assert.Equal(t, start1, result[0].Start()) - assert.Equal(t, end1, result[0].End()) + assert.Equal(t, start1, result[0].Start) + assert.Equal(t, end1, result[0].End) // Span 3 should be returned between one and two - assert.Equal(t, start3, result[1].Start()) - assert.Equal(t, end3, result[1].End()) - assert.Equal(t, start2, result[2].Start()) - assert.Equal(t, end2, result[2].End()) + assert.Equal(t, start3, result[1].Start) + assert.Equal(t, end3, result[1].End) + assert.Equal(t, start2, result[2].Start) + assert.Equal(t, end2, result[2].End) } func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndEqualToStart(t *testing.T) { @@ -97,8 +97,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndEqualToStart(t *testing result := MergeAscending(input) assert.Len(t, result, 1) - assert.Equal(t, start2, result[0].Start()) - assert.Equal(t, end1, result[0].End()) + assert.Equal(t, start2, result[0].Start) + assert.Equal(t, end1, result[0].End) } func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndAdjacentToStart(t *testing.T) { @@ -114,8 +114,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndAdjacentToStart(t *test result := MergeAscending(input) assert.Len(t, result, 1) - assert.Equal(t, start2, result[0].Start()) - assert.Equal(t, end1, result[0].End()) + assert.Equal(t, start2, result[0].Start) + assert.Equal(t, end1, result[0].End) } func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndWithin(t *testing.T) { @@ -131,8 +131,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndWithin(t *testing.T) { result := MergeAscending(input) assert.Len(t, result, 1) - assert.Equal(t, start2, result[0].Start()) - assert.Equal(t, end1, result[0].End()) + assert.Equal(t, start2, result[0].Start) + assert.Equal(t, end1, result[0].End) } func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndWithin(t *testing.T) { @@ -148,8 +148,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndWithin(t *testing.T) result := MergeAscending(input) assert.Len(t, result, 1) - assert.Equal(t, start2, result[0].Start()) - assert.Equal(t, end1, result[0].End()) + assert.Equal(t, start2, result[0].Start) + assert.Equal(t, end1, result[0].End) } func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndWithinEndPrefix(t *testing.T) { @@ -165,8 +165,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndWithinEndPrefix(t *test result := MergeAscending(input) assert.Len(t, result, 1) - assert.Equal(t, start2, result[0].Start()) - assert.Equal(t, end1, result[0].End()) + assert.Equal(t, start2, result[0].Start) + assert.Equal(t, end1, result[0].End) } func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndWithinEndPrefix(t *testing.T) { @@ -182,8 +182,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndWithinEndPrefix(t *te result := MergeAscending(input) assert.Len(t, result, 1) - assert.Equal(t, start2, result[0].Start()) - assert.Equal(t, end1, result[0].End()) + assert.Equal(t, start2, result[0].Start) + assert.Equal(t, end1, result[0].End) } func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndEqual(t *testing.T) { @@ -199,8 +199,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndEqual(t *testing.T) { result := MergeAscending(input) assert.Len(t, result, 1) - assert.Equal(t, start2, result[0].Start()) - assert.Equal(t, end1, result[0].End()) + assert.Equal(t, start2, result[0].Start) + assert.Equal(t, end1, result[0].End) } func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndAdjacentAndBefore(t *testing.T) { @@ -216,8 +216,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndAdjacentAndBefore(t *te result := MergeAscending(input) assert.Len(t, result, 1) - assert.Equal(t, start2, result[0].Start()) - assert.Equal(t, end1, result[0].End()) + assert.Equal(t, start2, result[0].Start) + assert.Equal(t, end1, result[0].End) } func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndAdjacentAndGreater(t *testing.T) { @@ -233,8 +233,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartBeforeEndAdjacentAndGreater(t *t result := MergeAscending(input) assert.Len(t, result, 1) - assert.Equal(t, start2, result[0].Start()) - assert.Equal(t, end2, result[0].End()) + assert.Equal(t, start2, result[0].Start) + assert.Equal(t, end2, result[0].End) } func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndEqual(t *testing.T) { @@ -250,8 +250,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndEqual(t *testing.T) { result := MergeAscending(input) assert.Len(t, result, 1) - assert.Equal(t, start2, result[0].Start()) - assert.Equal(t, end1, result[0].End()) + assert.Equal(t, start2, result[0].Start) + assert.Equal(t, end1, result[0].End) } func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndAdjacentAndBefore(t *testing.T) { @@ -267,8 +267,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndAdjacentAndBefore(t * result := MergeAscending(input) assert.Len(t, result, 1) - assert.Equal(t, start2, result[0].Start()) - assert.Equal(t, end1, result[0].End()) + assert.Equal(t, start2, result[0].Start) + assert.Equal(t, end1, result[0].End) } func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndAdjacentAndAfter(t *testing.T) { @@ -284,8 +284,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartPrefixesEndAdjacentAndAfter(t *t result := MergeAscending(input) assert.Len(t, result, 1) - assert.Equal(t, start2, result[0].Start()) - assert.Equal(t, end2, result[0].End()) + assert.Equal(t, start2, result[0].Start) + assert.Equal(t, end2, result[0].End) } func TestMergeAscending_ReturnsMiddleSpansMerged_GivenSpanCoveringMiddleSpans(t *testing.T) { @@ -310,13 +310,13 @@ func TestMergeAscending_ReturnsMiddleSpansMerged_GivenSpanCoveringMiddleSpans(t result := MergeAscending(input) assert.Len(t, result, 3) - assert.Equal(t, start1, result[0].Start()) - assert.Equal(t, end1, result[0].End()) + assert.Equal(t, start1, result[0].Start) + assert.Equal(t, end1, result[0].End) // Spans 2 and 3 are within span 5 - assert.Equal(t, start5, result[1].Start()) - assert.Equal(t, end5, result[1].End()) - assert.Equal(t, start4, result[2].Start()) - assert.Equal(t, end4, result[2].End()) + assert.Equal(t, start5, result[1].Start) + assert.Equal(t, end5, result[1].End) + assert.Equal(t, start4, result[2].Start) + assert.Equal(t, end4, result[2].End) } func TestMergeAscending_ReturnsSingle_GivenStartEqualEndWithin(t *testing.T) { @@ -332,8 +332,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartEqualEndWithin(t *testing.T) { result := MergeAscending(input) assert.Len(t, result, 1) - assert.Equal(t, start1, result[0].Start()) - assert.Equal(t, end1, result[0].End()) + assert.Equal(t, start1, result[0].Start) + assert.Equal(t, end1, result[0].End) } func TestMergeAscending_ReturnsSingle_GivenStartEqualEndWithinEndPrefix(t *testing.T) { @@ -349,8 +349,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartEqualEndWithinEndPrefix(t *testi result := MergeAscending(input) assert.Len(t, result, 1) - assert.Equal(t, start1, result[0].Start()) - assert.Equal(t, end1, result[0].End()) + assert.Equal(t, start1, result[0].Start) + assert.Equal(t, end1, result[0].End) } func TestMergeAscending_ReturnsSingle_GivenDuplicates(t *testing.T) { @@ -364,8 +364,8 @@ func TestMergeAscending_ReturnsSingle_GivenDuplicates(t *testing.T) { result := MergeAscending(input) assert.Len(t, result, 1) - assert.Equal(t, start1, result[0].Start()) - assert.Equal(t, end1, result[0].End()) + assert.Equal(t, start1, result[0].Start) + assert.Equal(t, end1, result[0].End) } func TestMergeAscending_ReturnsSingle_GivenStartWithinEndWithin(t *testing.T) { @@ -381,8 +381,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartWithinEndWithin(t *testing.T) { result := MergeAscending(input) assert.Len(t, result, 1) - assert.Equal(t, start1, result[0].Start()) - assert.Equal(t, end1, result[0].End()) + assert.Equal(t, start1, result[0].Start) + assert.Equal(t, end1, result[0].End) } func TestMergeAscending_ReturnsSingle_GivenStartWithinEndWithinEndPrefix(t *testing.T) { @@ -398,8 +398,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartWithinEndWithinEndPrefix(t *test result := MergeAscending(input) assert.Len(t, result, 1) - assert.Equal(t, start1, result[0].Start()) - assert.Equal(t, end1, result[0].End()) + assert.Equal(t, start1, result[0].Start) + assert.Equal(t, end1, result[0].End) } func TestMergeAscending_ReturnsSingle_GivenStartWithinEndEqual(t *testing.T) { @@ -415,8 +415,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartWithinEndEqual(t *testing.T) { result := MergeAscending(input) assert.Len(t, result, 1) - assert.Equal(t, start1, result[0].Start()) - assert.Equal(t, end1, result[0].End()) + assert.Equal(t, start1, result[0].Start) + assert.Equal(t, end1, result[0].End) } func TestMergeAscending_ReturnsSingle_GivenStartWithinEndAdjacentAndBefore(t *testing.T) { @@ -432,8 +432,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartWithinEndAdjacentAndBefore(t *te result := MergeAscending(input) assert.Len(t, result, 1) - assert.Equal(t, start1, result[0].Start()) - assert.Equal(t, end1, result[0].End()) + assert.Equal(t, start1, result[0].Start) + assert.Equal(t, end1, result[0].End) } func TestMergeAscending_ReturnsSingle_GivenStartWithinEndAdjacentAndAfter(t *testing.T) { @@ -449,8 +449,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartWithinEndAdjacentAndAfter(t *tes result := MergeAscending(input) assert.Len(t, result, 1) - assert.Equal(t, start1, result[0].Start()) - assert.Equal(t, end2, result[0].End()) + assert.Equal(t, start1, result[0].Start) + assert.Equal(t, end2, result[0].End) } func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartEqualEndAfterSpanCoveringMiddleSpans( @@ -477,13 +477,13 @@ func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartEqualEndAfterSpanCove result := MergeAscending(input) assert.Len(t, result, 3) - assert.Equal(t, start1, result[0].Start()) - assert.Equal(t, end1, result[0].End()) + assert.Equal(t, start1, result[0].Start) + assert.Equal(t, end1, result[0].End) // Spans 2 and 3 are within span 5 - assert.Equal(t, start5, result[1].Start()) - assert.Equal(t, end5, result[1].End()) - assert.Equal(t, start4, result[2].Start()) - assert.Equal(t, end4, result[2].End()) + assert.Equal(t, start5, result[1].Start) + assert.Equal(t, end5, result[1].End) + assert.Equal(t, start4, result[2].Start) + assert.Equal(t, end4, result[2].End) } func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartWithinEndAfterSpanCoveringMiddleSpans( @@ -510,12 +510,12 @@ func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartWithinEndAfterSpanCov result := MergeAscending(input) assert.Len(t, result, 3) - assert.Equal(t, start1, result[0].Start()) - assert.Equal(t, end1, result[0].End()) - assert.Equal(t, start2, result[1].Start()) - assert.Equal(t, end5, result[1].End()) - assert.Equal(t, start4, result[2].Start()) - assert.Equal(t, end4, result[2].End()) + assert.Equal(t, start1, result[0].Start) + assert.Equal(t, end1, result[0].End) + assert.Equal(t, start2, result[1].Start) + assert.Equal(t, end5, result[1].End) + assert.Equal(t, start4, result[2].Start) + assert.Equal(t, end4, result[2].End) } func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartEqualToEndEndAfterSpanCoveringMiddleSpans( @@ -542,12 +542,12 @@ func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartEqualToEndEndAfterSpa result := MergeAscending(input) assert.Len(t, result, 3) - assert.Equal(t, start1, result[0].Start()) - assert.Equal(t, end1, result[0].End()) - assert.Equal(t, start2, result[1].Start()) - assert.Equal(t, end5, result[1].End()) - assert.Equal(t, start4, result[2].Start()) - assert.Equal(t, end4, result[2].End()) + assert.Equal(t, start1, result[0].Start) + assert.Equal(t, end1, result[0].End) + assert.Equal(t, start2, result[1].Start) + assert.Equal(t, end5, result[1].End) + assert.Equal(t, start4, result[2].Start) + assert.Equal(t, end4, result[2].End) } func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartAdjacentAndBeforeEndEndAfterSpanCoveringMiddleSpans( @@ -574,12 +574,12 @@ func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartAdjacentAndBeforeEndE result := MergeAscending(input) assert.Len(t, result, 3) - assert.Equal(t, start1, result[0].Start()) - assert.Equal(t, end1, result[0].End()) - assert.Equal(t, start2, result[1].Start()) - assert.Equal(t, end5, result[1].End()) - assert.Equal(t, start4, result[2].Start()) - assert.Equal(t, end4, result[2].End()) + assert.Equal(t, start1, result[0].Start) + assert.Equal(t, end1, result[0].End) + assert.Equal(t, start2, result[1].Start) + assert.Equal(t, end5, result[1].End) + assert.Equal(t, start4, result[2].Start) + assert.Equal(t, end4, result[2].End) } func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartAdjacentAndAfterEndEndAfterSpanCoveringMiddleSpans( @@ -606,12 +606,12 @@ func TestMergeAscending_ReturnsMiddleSpansMerged_GivenStartAdjacentAndAfterEndEn result := MergeAscending(input) assert.Len(t, result, 3) - assert.Equal(t, start1, result[0].Start()) - assert.Equal(t, end1, result[0].End()) - assert.Equal(t, start2, result[1].Start()) - assert.Equal(t, end5, result[1].End()) - assert.Equal(t, start4, result[2].Start()) - assert.Equal(t, end4, result[2].End()) + assert.Equal(t, start1, result[0].Start) + assert.Equal(t, end1, result[0].End) + assert.Equal(t, start2, result[1].Start) + assert.Equal(t, end5, result[1].End) + assert.Equal(t, start4, result[2].Start) + assert.Equal(t, end4, result[2].End) } func TestMergeAscending_ReturnsTwoItems_GivenSecondItemAfterFirst(t *testing.T) { @@ -627,10 +627,10 @@ func TestMergeAscending_ReturnsTwoItems_GivenSecondItemAfterFirst(t *testing.T) result := MergeAscending(input) assert.Len(t, result, 2) - assert.Equal(t, start1, result[0].Start()) - assert.Equal(t, end1, result[0].End()) - assert.Equal(t, start2, result[1].Start()) - assert.Equal(t, end2, result[1].End()) + assert.Equal(t, start1, result[0].Start) + assert.Equal(t, end1, result[0].End) + assert.Equal(t, start2, result[1].Start) + assert.Equal(t, end2, result[1].End) } func TestMergeAscending_ReturnsSingle_GivenStartAdjacentAndBeforeEndEndEqual(t *testing.T) { @@ -646,8 +646,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartAdjacentAndBeforeEndEndEqual(t * result := MergeAscending(input) assert.Len(t, result, 1) - assert.Equal(t, start1, result[0].Start()) - assert.Equal(t, end2, result[0].End()) + assert.Equal(t, start1, result[0].Start) + assert.Equal(t, end2, result[0].End) } func TestMergeAscending_ReturnsSingle_GivenStartAdjacentAndBeforeEndEndAdjacentAndAfter( @@ -665,8 +665,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartAdjacentAndBeforeEndEndAdjacentA result := MergeAscending(input) assert.Len(t, result, 1) - assert.Equal(t, start1, result[0].Start()) - assert.Equal(t, end2, result[0].End()) + assert.Equal(t, start1, result[0].Start) + assert.Equal(t, end2, result[0].End) } func TestMergeAscending_ReturnsSingle_GivenStartAdjacentAndBeforeEndEndAfter(t *testing.T) { @@ -682,8 +682,8 @@ func TestMergeAscending_ReturnsSingle_GivenStartAdjacentAndBeforeEndEndAfter(t * result := MergeAscending(input) assert.Len(t, result, 1) - assert.Equal(t, start1, result[0].Start()) - assert.Equal(t, end2, result[0].End()) + assert.Equal(t, start1, result[0].Start) + assert.Equal(t, end2, result[0].End) } func TestMergeAscending_ReturnsSingle_GivenStartAdjacentAndAfterEndEndAfter(t *testing.T) { @@ -699,6 +699,6 @@ func TestMergeAscending_ReturnsSingle_GivenStartAdjacentAndAfterEndEndAfter(t *t result := MergeAscending(input) assert.Len(t, result, 1) - assert.Equal(t, start1, result[0].Start()) - assert.Equal(t, end2, result[0].End()) + assert.Equal(t, start1, result[0].Start) + assert.Equal(t, end2, result[0].End) } diff --git a/internal/db/collection_get.go b/internal/db/collection_get.go index f2db5f0f8c..e68df05df4 100644 --- a/internal/db/collection_get.go +++ b/internal/db/collection_get.go @@ -73,7 +73,7 @@ func (c *collection) get( // construct target DS key from DocID. targetKey := base.MakeDataStoreKeyWithCollectionAndDocID(c.Description(), primaryKey.DocID) // run the doc fetcher - err = df.Start(ctx, core.NewSpans(core.NewSpan(targetKey, targetKey.PrefixEnd()))) + err = df.Start(ctx, core.NewSpan(targetKey, targetKey.PrefixEnd())) if err != nil { _ = df.Close() return nil, err diff --git a/internal/db/collection_index.go b/internal/db/collection_index.go index b1baad8369..f268e14f2e 100644 --- a/internal/db/collection_index.go +++ b/internal/db/collection_index.go @@ -317,7 +317,7 @@ func (c *collection) iterateAllDocs( return errors.Join(err, df.Close()) } start := base.MakeDataStoreKeyWithCollectionDescription(c.Description()) - spans := core.NewSpans(core.NewSpan(start, start.PrefixEnd())) + spans := core.NewSpan(start, start.PrefixEnd()) err = df.Start(ctx, spans) if err != nil { diff --git a/internal/db/fetcher/dag.go b/internal/db/fetcher/dag.go index 395354fc08..723b821a97 100644 --- a/internal/db/fetcher/dag.go +++ b/internal/db/fetcher/dag.go @@ -12,21 +12,17 @@ package fetcher import ( "context" - "sort" - "strings" "github.com/ipfs/go-cid" dsq "github.com/ipfs/go-datastore/query" "github.com/sourcenetwork/immutable" "github.com/sourcenetwork/defradb/datastore" - "github.com/sourcenetwork/defradb/internal/core" "github.com/sourcenetwork/defradb/internal/keys" ) // HeadFetcher is a utility to incrementally fetch all the MerkleCRDT heads of a given doc/field. type HeadFetcher struct { - spans core.Spans fieldId immutable.Option[string] kvIter dsq.Results @@ -35,33 +31,13 @@ type HeadFetcher struct { func (hf *HeadFetcher) Start( ctx context.Context, txn datastore.Txn, - spans core.Spans, + prefix keys.HeadStoreKey, fieldId immutable.Option[string], ) error { - if len(spans.Value) == 0 { - spans = core.NewSpans( - core.NewSpan( - keys.DataStoreKey{}, - keys.DataStoreKey{}.PrefixEnd(), - ), - ) - } - - if len(spans.Value) > 1 { - // if we have multiple spans, we need to sort them by their start position - // so we can do a single iterative sweep - sort.Slice(spans.Value, func(i, j int) bool { - // compare by strings if i < j. - // apply the '!= df.reverse' to reverse the sort - // if we need to - return (strings.Compare(spans.Value[i].Start().ToString(), spans.Value[j].Start().ToString()) < 0) - }) - } - hf.spans = spans hf.fieldId = fieldId q := dsq.Query{ - Prefix: hf.spans.Value[0].Start().ToString(), + Prefix: prefix.ToString(), Orders: []dsq.Order{dsq.OrderByKey{}}, } diff --git a/internal/db/fetcher/errors.go b/internal/db/fetcher/errors.go index 22f0c8b182..0a8a7d21b4 100644 --- a/internal/db/fetcher/errors.go +++ b/internal/db/fetcher/errors.go @@ -18,39 +18,37 @@ import ( ) const ( - errFieldIdNotFound string = "unable to find SchemaFieldDescription for given FieldId" - errFailedToDecodeCIDForVFetcher string = "failed to decode CID for VersionedFetcher" - errFailedToSeek string = "seek failed" - errFailedToMergeState string = "failed merging state" - errVFetcherFailedToFindBlock string = "(version fetcher) failed to find block in blockstore" - errVFetcherFailedToGetBlock string = "(version fetcher) failed to get block in blockstore" - errVFetcherFailedToWriteBlock string = "(version fetcher) failed to write block to blockstore" - errVFetcherFailedToDecodeNode string = "(version fetcher) failed to decode protobuf" - errVFetcherFailedToGetDagLink string = "(version fetcher) failed to get node link from DAG" - errFailedToGetDagNode string = "failed to get DAG Node" - errMissingMapper string = "missing document mapper" - errInvalidInOperatorValue string = "invalid _in/_nin value" - errInvalidFilterOperator string = "invalid filter operator is provided" - errNotSupportedKindByIndex string = "kind is not supported by index" - errUnexpectedTypeValue string = "unexpected type value" + errFieldIdNotFound string = "unable to find SchemaFieldDescription for given FieldId" + errFailedToSeek string = "seek failed" + errFailedToMergeState string = "failed merging state" + errVFetcherFailedToFindBlock string = "(version fetcher) failed to find block in blockstore" + errVFetcherFailedToGetBlock string = "(version fetcher) failed to get block in blockstore" + errVFetcherFailedToWriteBlock string = "(version fetcher) failed to write block to blockstore" + errVFetcherFailedToDecodeNode string = "(version fetcher) failed to decode protobuf" + errVFetcherFailedToGetDagLink string = "(version fetcher) failed to get node link from DAG" + errFailedToGetDagNode string = "failed to get DAG Node" + errMissingMapper string = "missing document mapper" + errInvalidInOperatorValue string = "invalid _in/_nin value" + errInvalidFilterOperator string = "invalid filter operator is provided" + errNotSupportedKindByIndex string = "kind is not supported by index" + errUnexpectedTypeValue string = "unexpected type value" ) var ( - ErrFieldIdNotFound = errors.New(errFieldIdNotFound) - ErrFailedToDecodeCIDForVFetcher = errors.New(errFailedToDecodeCIDForVFetcher) - ErrFailedToSeek = errors.New(errFailedToSeek) - ErrFailedToMergeState = errors.New(errFailedToMergeState) - ErrVFetcherFailedToFindBlock = errors.New(errVFetcherFailedToFindBlock) - ErrVFetcherFailedToGetBlock = errors.New(errVFetcherFailedToGetBlock) - ErrVFetcherFailedToWriteBlock = errors.New(errVFetcherFailedToWriteBlock) - ErrVFetcherFailedToDecodeNode = errors.New(errVFetcherFailedToDecodeNode) - ErrVFetcherFailedToGetDagLink = errors.New(errVFetcherFailedToGetDagLink) - ErrFailedToGetDagNode = errors.New(errFailedToGetDagNode) - ErrMissingMapper = errors.New(errMissingMapper) - ErrSingleSpanOnly = errors.New("spans must contain only a single entry") - ErrInvalidInOperatorValue = errors.New(errInvalidInOperatorValue) - ErrInvalidFilterOperator = errors.New(errInvalidFilterOperator) - ErrUnexpectedTypeValue = errors.New(errUnexpectedTypeValue) + ErrFieldIdNotFound = errors.New(errFieldIdNotFound) + ErrFailedToSeek = errors.New(errFailedToSeek) + ErrFailedToMergeState = errors.New(errFailedToMergeState) + ErrVFetcherFailedToFindBlock = errors.New(errVFetcherFailedToFindBlock) + ErrVFetcherFailedToGetBlock = errors.New(errVFetcherFailedToGetBlock) + ErrVFetcherFailedToWriteBlock = errors.New(errVFetcherFailedToWriteBlock) + ErrVFetcherFailedToDecodeNode = errors.New(errVFetcherFailedToDecodeNode) + ErrVFetcherFailedToGetDagLink = errors.New(errVFetcherFailedToGetDagLink) + ErrFailedToGetDagNode = errors.New(errFailedToGetDagNode) + ErrMissingMapper = errors.New(errMissingMapper) + ErrSingleSpanOnly = errors.New("spans must contain only a single entry") + ErrInvalidInOperatorValue = errors.New(errInvalidInOperatorValue) + ErrInvalidFilterOperator = errors.New(errInvalidFilterOperator) + ErrUnexpectedTypeValue = errors.New(errUnexpectedTypeValue) ) // NewErrFieldIdNotFound returns an error indicating that the given FieldId was not found. @@ -58,11 +56,6 @@ func NewErrFieldIdNotFound(fieldId uint32) error { return errors.New(errFieldIdNotFound, errors.NewKV("FieldId", fieldId)) } -// NewErrFailedToDecodeCIDForVFetcher returns an error indicating that the given CID could not be decoded. -func NewErrFailedToDecodeCIDForVFetcher(inner error) error { - return errors.Wrap(errFailedToDecodeCIDForVFetcher, inner) -} - // NewErrFailedToSeek returns an error indicating that the given target could not be seeked to. func NewErrFailedToSeek(target any, inner error) error { return errors.Wrap(errFailedToSeek, inner, errors.NewKV("Target", target)) diff --git a/internal/db/fetcher/fetcher.go b/internal/db/fetcher/fetcher.go index 62a03a4d17..0ca828c4b2 100644 --- a/internal/db/fetcher/fetcher.go +++ b/internal/db/fetcher/fetcher.go @@ -72,7 +72,7 @@ type Fetcher interface { reverse bool, showDeleted bool, ) error - Start(ctx context.Context, spans core.Spans) error + Start(ctx context.Context, spans ...core.Span) error FetchNext(ctx context.Context) (EncodedDocument, ExecInfo, error) Close() error } @@ -98,7 +98,7 @@ type DocumentFetcher struct { deletedDocs bool txn datastore.Txn - spans core.Spans + spans []core.Span order []dsq.Order curSpanIndex int @@ -243,7 +243,7 @@ func (df *DocumentFetcher) init( return nil } -func (df *DocumentFetcher) Start(ctx context.Context, spans core.Spans) error { +func (df *DocumentFetcher) Start(ctx context.Context, spans ...core.Span) error { err := df.start(ctx, spans, false) if err != nil { return err @@ -257,7 +257,7 @@ func (df *DocumentFetcher) Start(ctx context.Context, spans core.Spans) error { } // Start implements DocumentFetcher. -func (df *DocumentFetcher) start(ctx context.Context, spans core.Spans, withDeleted bool) error { +func (df *DocumentFetcher) start(ctx context.Context, spans []core.Span, withDeleted bool) error { if df.col == nil { return client.NewErrUninitializeProperty("DocumentFetcher", "CollectionDescription") } @@ -267,22 +267,31 @@ func (df *DocumentFetcher) start(ctx context.Context, spans core.Spans, withDele df.deletedDocs = withDeleted - if !spans.HasValue { // no specified spans so create a prefix scan key for the entire collection + if len(spans) == 0 { // no specified spans so create a prefix scan key for the entire collection start := base.MakeDataStoreKeyWithCollectionDescription(df.col.Description()) if withDeleted { start = start.WithDeletedFlag() } else { start = start.WithValueFlag() } - df.spans = core.NewSpans(core.NewSpan(start, start.PrefixEnd())) + df.spans = []core.Span{core.NewSpan(start, start.PrefixEnd())} } else { - valueSpans := make([]core.Span, len(spans.Value)) - for i, span := range spans.Value { - // We can only handle value keys, so here we ensure we only read value keys + valueSpans := make([]core.Span, len(spans)) + for i, span := range spans { if withDeleted { - valueSpans[i] = core.NewSpan(span.Start().WithDeletedFlag(), span.End().WithDeletedFlag()) + // DocumentFetcher only ever recieves document keys + //nolint:forcetypeassert + valueSpans[i] = core.NewSpan( + span.Start.(keys.DataStoreKey).WithDeletedFlag(), + span.End.(keys.DataStoreKey).WithDeletedFlag(), + ) } else { - valueSpans[i] = core.NewSpan(span.Start().WithValueFlag(), span.End().WithValueFlag()) + // DocumentFetcher only ever recieves document keys + //nolint:forcetypeassert + valueSpans[i] = core.NewSpan( + span.Start.(keys.DataStoreKey).WithValueFlag(), + span.End.(keys.DataStoreKey).WithValueFlag(), + ) } } @@ -292,7 +301,7 @@ func (df *DocumentFetcher) start(ctx context.Context, spans core.Spans, withDele spans[i], spans[j] = spans[j], spans[i] } } - df.spans = core.NewSpans(spans...) + df.spans = spans } df.curSpanIndex = -1 @@ -309,7 +318,7 @@ func (df *DocumentFetcher) start(ctx context.Context, spans core.Spans, withDele func (df *DocumentFetcher) startNextSpan(ctx context.Context) (bool, error) { nextSpanIndex := df.curSpanIndex + 1 - if nextSpanIndex >= len(df.spans.Value) { + if nextSpanIndex >= len(df.spans) { return false, nil } @@ -330,8 +339,8 @@ func (df *DocumentFetcher) startNextSpan(ctx context.Context) (bool, error) { } } - span := df.spans.Value[nextSpanIndex] - df.kvResultsIter, err = df.kvIter.IteratePrefix(ctx, span.Start().ToDS(), span.End().ToDS()) + span := df.spans[nextSpanIndex] + df.kvResultsIter, err = df.kvIter.IteratePrefix(ctx, span.Start.ToDS(), span.End.ToDS()) if err != nil { return false, err } diff --git a/internal/db/fetcher/indexer.go b/internal/db/fetcher/indexer.go index 4d370146ed..3f7b82b6e0 100644 --- a/internal/db/fetcher/indexer.go +++ b/internal/db/fetcher/indexer.go @@ -124,9 +124,9 @@ outer: return err } -func (f *IndexFetcher) Start(ctx context.Context, spans core.Spans) error { +func (f *IndexFetcher) Start(ctx context.Context, spans ...core.Span) error { if f.indexIter == nil { - return f.docFetcher.Start(ctx, spans) + return f.docFetcher.Start(ctx, spans...) } return f.indexIter.Init(ctx, f.txn.Datastore()) } @@ -192,8 +192,8 @@ func (f *IndexFetcher) FetchNext(ctx context.Context) (EncodedDocument, ExecInfo if len(f.docFields) > 0 { targetKey := base.MakeDataStoreKeyWithCollectionAndDocID(f.col.Description(), string(f.doc.id)) - spans := core.NewSpans(core.NewSpan(targetKey, targetKey.PrefixEnd())) - err := f.docFetcher.Start(ctx, spans) + span := core.NewSpan(targetKey, targetKey.PrefixEnd()) + err := f.docFetcher.Start(ctx, span) if err != nil { return nil, ExecInfo{}, err } diff --git a/internal/db/fetcher/mocks/fetcher.go b/internal/db/fetcher/mocks/fetcher.go index 2dac3b0079..396bf67345 100644 --- a/internal/db/fetcher/mocks/fetcher.go +++ b/internal/db/fetcher/mocks/fetcher.go @@ -202,16 +202,23 @@ func (_c *Fetcher_Init_Call) RunAndReturn(run func(context.Context, immutable.Op } // Start provides a mock function with given fields: ctx, spans -func (_m *Fetcher) Start(ctx context.Context, spans core.Spans) error { - ret := _m.Called(ctx, spans) +func (_m *Fetcher) Start(ctx context.Context, spans ...core.Span) error { + _va := make([]interface{}, len(spans)) + for _i := range spans { + _va[_i] = spans[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) if len(ret) == 0 { panic("no return value specified for Start") } var r0 error - if rf, ok := ret.Get(0).(func(context.Context, core.Spans) error); ok { - r0 = rf(ctx, spans) + if rf, ok := ret.Get(0).(func(context.Context, ...core.Span) error); ok { + r0 = rf(ctx, spans...) } else { r0 = ret.Error(0) } @@ -226,14 +233,21 @@ type Fetcher_Start_Call struct { // Start is a helper method to define mock.On call // - ctx context.Context -// - spans core.Spans -func (_e *Fetcher_Expecter) Start(ctx interface{}, spans interface{}) *Fetcher_Start_Call { - return &Fetcher_Start_Call{Call: _e.mock.On("Start", ctx, spans)} +// - spans ...core.Span +func (_e *Fetcher_Expecter) Start(ctx interface{}, spans ...interface{}) *Fetcher_Start_Call { + return &Fetcher_Start_Call{Call: _e.mock.On("Start", + append([]interface{}{ctx}, spans...)...)} } -func (_c *Fetcher_Start_Call) Run(run func(ctx context.Context, spans core.Spans)) *Fetcher_Start_Call { +func (_c *Fetcher_Start_Call) Run(run func(ctx context.Context, spans ...core.Span)) *Fetcher_Start_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(core.Spans)) + variadicArgs := make([]core.Span, len(args)-1) + for i, a := range args[1:] { + if a != nil { + variadicArgs[i] = a.(core.Span) + } + } + run(args[0].(context.Context), variadicArgs...) }) return _c } @@ -243,7 +257,7 @@ func (_c *Fetcher_Start_Call) Return(_a0 error) *Fetcher_Start_Call { return _c } -func (_c *Fetcher_Start_Call) RunAndReturn(run func(context.Context, core.Spans) error) *Fetcher_Start_Call { +func (_c *Fetcher_Start_Call) RunAndReturn(run func(context.Context, ...core.Span) error) *Fetcher_Start_Call { _c.Call.Return(run) return _c } diff --git a/internal/db/fetcher/versioned.go b/internal/db/fetcher/versioned.go index b10b6ca84f..baa3acfcfb 100644 --- a/internal/db/fetcher/versioned.go +++ b/internal/db/fetcher/versioned.go @@ -89,8 +89,7 @@ type VersionedFetcher struct { root datastore.Rootstore store datastore.Txn - dsKey keys.DataStoreKey - version cid.Cid + dsKey keys.DataStoreKey queuedCids *list.List @@ -153,42 +152,37 @@ func (vf *VersionedFetcher) Init( } // Start serializes the correct state according to the Key and CID. -func (vf *VersionedFetcher) Start(ctx context.Context, spans core.Spans) error { +func (vf *VersionedFetcher) Start(ctx context.Context, spans ...core.Span) error { if vf.col == nil { return client.NewErrUninitializeProperty("VersionedFetcher", "CollectionDescription") } - if len(spans.Value) != 1 { + if len(spans) != 1 { return ErrSingleSpanOnly } - // For the VersionedFetcher, the spans needs to be in the format - // Span{Start: DocID, End: CID} - dk := spans.Value[0].Start() - cidRaw := spans.Value[0].End() - if dk.DocID == "" { + // VersionedFetcher only ever recieves a headstore key + //nolint:forcetypeassert + prefix := spans[0].Start.(keys.HeadStoreKey) + dk := prefix.DocID + cid := prefix.Cid + if dk == "" { return client.NewErrUninitializeProperty("Spans", "DocID") - } else if cidRaw.DocID == "" { // todo: dont abuse DataStoreKey/Span like this! + } else if !cid.Defined() { return client.NewErrUninitializeProperty("Spans", "CID") } - // decode cidRaw from core.Key to cid.Cid - // need to remove '/' prefix from the core.Key - - c, err := cid.Decode(cidRaw.DocID) - if err != nil { - return NewErrFailedToDecodeCIDForVFetcher(err) - } - vf.ctx = ctx - vf.dsKey = dk.WithCollectionRoot(vf.col.Description().RootID) - vf.version = c + vf.dsKey = keys.DataStoreKey{ + CollectionRootID: vf.col.Description().RootID, + DocID: dk, + } - if err := vf.seekTo(vf.version); err != nil { - return NewErrFailedToSeek(c, err) + if err := vf.seekTo(cid); err != nil { + return NewErrFailedToSeek(cid, err) } - return vf.DocumentFetcher.Start(ctx, core.Spans{}) + return vf.DocumentFetcher.Start(ctx) } // Rootstore returns the rootstore of the VersionedFetcher. @@ -217,7 +211,7 @@ func (vf *VersionedFetcher) SeekTo(ctx context.Context, c cid.Cid) error { return err } - return vf.DocumentFetcher.Start(ctx, core.Spans{}) + return vf.DocumentFetcher.Start(ctx) } // seekTo seeks to the given CID version by stepping through the CRDT state graph from the beginning @@ -421,9 +415,3 @@ func (vf *VersionedFetcher) Close() error { return vf.DocumentFetcher.Close() } - -// NewVersionedSpan creates a new VersionedSpan from a DataStoreKey and a version CID. -func NewVersionedSpan(dsKey keys.DataStoreKey, version cid.Cid) core.Spans { - // Todo: Dont abuse DataStoreKey for version cid! - return core.NewSpans(core.NewSpan(dsKey, keys.DataStoreKey{DocID: version.String()})) -} diff --git a/internal/db/fetcher_test.go b/internal/db/fetcher_test.go index 01b5ff065f..48c159d3c0 100644 --- a/internal/db/fetcher_test.go +++ b/internal/db/fetcher_test.go @@ -16,13 +16,12 @@ import ( "github.com/stretchr/testify/assert" - "github.com/sourcenetwork/defradb/internal/core" "github.com/sourcenetwork/defradb/internal/db/fetcher" ) func TestFetcherStartWithoutInit(t *testing.T) { ctx := context.Background() df := new(fetcher.DocumentFetcher) - err := df.Start(ctx, core.Spans{}) + err := df.Start(ctx) assert.Error(t, err) } diff --git a/internal/keys/datastore_doc.go b/internal/keys/datastore_doc.go index cd8ac60917..cffa99f6fc 100644 --- a/internal/keys/datastore_doc.go +++ b/internal/keys/datastore_doc.go @@ -41,7 +41,7 @@ type DataStoreKey struct { FieldID string } -var _ Key = (*DataStoreKey)(nil) +var _ Walkable = (*DataStoreKey)(nil) // Creates a new DataStoreKey from a string as best as it can, // splitting the input using '/' as a field deliminator. It assumes @@ -167,7 +167,7 @@ func (k DataStoreKey) ToPrimaryDataStoreKey() PrimaryDataStoreKey { // PrefixEnd determines the end key given key as a prefix, that is the key that sorts precisely // behind all keys starting with prefix: "1" is added to the final byte and the carry propagated. // The special cases of nil and KeyMin always returns KeyMax. -func (k DataStoreKey) PrefixEnd() DataStoreKey { +func (k DataStoreKey) PrefixEnd() Walkable { newKey := k if k.FieldID != "" { diff --git a/internal/keys/headstore_doc.go b/internal/keys/headstore_doc.go index 5d3ec2306e..55809ab236 100644 --- a/internal/keys/headstore_doc.go +++ b/internal/keys/headstore_doc.go @@ -23,7 +23,7 @@ type HeadStoreKey struct { Cid cid.Cid } -var _ Key = (*HeadStoreKey)(nil) +var _ Walkable = (*HeadStoreKey)(nil) // Creates a new HeadStoreKey from a string as best as it can, // splitting the input using '/' as a field deliminator. It assumes @@ -92,3 +92,22 @@ func (k HeadStoreKey) Bytes() []byte { func (k HeadStoreKey) ToDS() ds.Key { return ds.NewKey(k.ToString()) } + +func (k HeadStoreKey) PrefixEnd() Walkable { + newKey := k + + if k.FieldID != "" { + newKey.FieldID = string(bytesPrefixEnd([]byte(k.FieldID))) + return newKey + } + if k.DocID != "" { + newKey.DocID = string(bytesPrefixEnd([]byte(k.DocID))) + return newKey + } + if k.Cid.Defined() { + newKey.Cid = cid.MustParse(bytesPrefixEnd(k.Cid.Bytes())) + return newKey + } + + return newKey +} diff --git a/internal/keys/key.go b/internal/keys/key.go index 893b9790b4..42e5935a1f 100644 --- a/internal/keys/key.go +++ b/internal/keys/key.go @@ -20,3 +20,20 @@ type Key interface { Bytes() []byte ToDS() ds.Key } + +// Walkable represents a key in the database that can be 'walked along' +// by prefixing the end of the key. +type Walkable interface { + Key + PrefixEnd() Walkable +} + +// PrettyPrint returns the human readable version of the given key. +func PrettyPrint(k Key) string { + switch typed := k.(type) { + case DataStoreKey: + return typed.PrettyPrint() + default: + return typed.ToString() + } +} diff --git a/internal/lens/fetcher.go b/internal/lens/fetcher.go index db9e418afa..a441c357bd 100644 --- a/internal/lens/fetcher.go +++ b/internal/lens/fetcher.go @@ -127,8 +127,8 @@ historyLoop: ) } -func (f *lensedFetcher) Start(ctx context.Context, spans core.Spans) error { - return f.source.Start(ctx, spans) +func (f *lensedFetcher) Start(ctx context.Context, spans ...core.Span) error { + return f.source.Start(ctx, spans...) } func (f *lensedFetcher) FetchNext(ctx context.Context) (fetcher.EncodedDocument, fetcher.ExecInfo, error) { diff --git a/internal/planner/arbitrary_join.go b/internal/planner/arbitrary_join.go index 978015298b..e668287028 100644 --- a/internal/planner/arbitrary_join.go +++ b/internal/planner/arbitrary_join.go @@ -79,7 +79,7 @@ func (n *dataSource) Start() error { return nil } -func (n *dataSource) Spans(spans core.Spans) { +func (n *dataSource) Spans(spans []core.Span) { if n.parentSource != nil { n.parentSource.Spans(spans) } diff --git a/internal/planner/average.go b/internal/planner/average.go index 24ef567011..9fe8803bee 100644 --- a/internal/planner/average.go +++ b/internal/planner/average.go @@ -64,11 +64,11 @@ func (n *averageNode) Init() error { return n.plan.Init() } -func (n *averageNode) Kind() string { return "averageNode" } -func (n *averageNode) Start() error { return n.plan.Start() } -func (n *averageNode) Spans(spans core.Spans) { n.plan.Spans(spans) } -func (n *averageNode) Close() error { return n.plan.Close() } -func (n *averageNode) Source() planNode { return n.plan } +func (n *averageNode) Kind() string { return "averageNode" } +func (n *averageNode) Start() error { return n.plan.Start() } +func (n *averageNode) Spans(spans []core.Span) { n.plan.Spans(spans) } +func (n *averageNode) Close() error { return n.plan.Close() } +func (n *averageNode) Source() planNode { return n.plan } func (n *averageNode) Next() (bool, error) { n.execInfo.iterations++ diff --git a/internal/planner/commit.go b/internal/planner/commit.go index dc9a0ce3d7..ceecfc46cd 100644 --- a/internal/planner/commit.go +++ b/internal/planner/commit.go @@ -36,7 +36,7 @@ type dagScanNode struct { queuedCids []*cid.Cid fetcher fetcher.HeadFetcher - spans core.Spans + prefix keys.HeadStoreKey commitSelect *mapper.CommitSelect execInfo dagScanExecInfo @@ -67,20 +67,21 @@ func (n *dagScanNode) Kind() string { } func (n *dagScanNode) Init() error { - if len(n.spans.Value) == 0 { + undefined := keys.HeadStoreKey{} + if n.prefix == undefined { if n.commitSelect.DocID.HasValue() { - dsKey := keys.DataStoreKey{}.WithDocID(n.commitSelect.DocID.Value()) + key := keys.HeadStoreKey{}.WithDocID(n.commitSelect.DocID.Value()) if n.commitSelect.FieldID.HasValue() { field := n.commitSelect.FieldID.Value() - dsKey = dsKey.WithFieldID(field) + key = key.WithFieldID(field) } - n.spans = core.NewSpans(core.NewSpan(dsKey, dsKey.PrefixEnd())) + n.prefix = key } } - return n.fetcher.Start(n.planner.ctx, n.planner.txn, n.spans, n.commitSelect.FieldID) + return n.fetcher.Start(n.planner.ctx, n.planner.txn, n.prefix, n.commitSelect.FieldID) } func (n *dagScanNode) Start() error { @@ -92,18 +93,11 @@ func (n *dagScanNode) Start() error { // either a CID or a DocID. // If its a CID, set the node CID val // if its a DocID, set the node Key val (headset) -func (n *dagScanNode) Spans(spans core.Spans) { - if len(spans.Value) == 0 { +func (n *dagScanNode) Spans(spans []core.Span) { + if len(spans) == 0 { return } - // copy the input spans so that we may mutate freely - headSetSpans := core.Spans{ - HasValue: spans.HasValue, - Value: make([]core.Span, len(spans.Value)), - } - copy(headSetSpans.Value, spans.Value) - var fieldID string if n.commitSelect.FieldID.HasValue() { fieldID = n.commitSelect.FieldID.Value() @@ -111,13 +105,18 @@ func (n *dagScanNode) Spans(spans core.Spans) { fieldID = core.COMPOSITE_NAMESPACE } - for i, span := range headSetSpans.Value { - if span.Start().FieldID != fieldID { - headSetSpans.Value[i] = core.NewSpan(span.Start().WithFieldID(fieldID), keys.DataStoreKey{}) + for _, span := range spans { + var start keys.HeadStoreKey + switch s := span.Start.(type) { + case keys.DataStoreKey: + start = s.ToHeadStoreKey() + case keys.HeadStoreKey: + start = s } - } - n.spans = headSetSpans + n.prefix = start.WithFieldID(fieldID) + return + } } func (n *dagScanNode) Close() error { @@ -145,17 +144,16 @@ func (n *dagScanNode) simpleExplain() (map[string]any, error) { // Build the explanation of the spans attribute. spansExplainer := []map[string]any{} + undefinedHsKey := keys.HeadStoreKey{} // Note: n.headset is `nil` for single commit selection query, so must check for it. - if n.spans.HasValue { - for _, span := range n.spans.Value { - spansExplainer = append( - spansExplainer, - map[string]any{ - "start": span.Start().ToString(), - "end": span.End().ToString(), - }, - ) - } + if n.prefix != undefinedHsKey { + spansExplainer = append( + spansExplainer, + map[string]any{ + "start": n.prefix.ToString(), + "end": n.prefix.PrefixEnd().ToString(), + }, + ) } // Add the built spans attribute, if it was valid. simpleExplainMap[spansLabel] = spansExplainer diff --git a/internal/planner/count.go b/internal/planner/count.go index d0c0642cae..efc2a20c36 100644 --- a/internal/planner/count.go +++ b/internal/planner/count.go @@ -62,7 +62,7 @@ func (n *countNode) Init() error { func (n *countNode) Start() error { return n.plan.Start() } -func (n *countNode) Spans(spans core.Spans) { n.plan.Spans(spans) } +func (n *countNode) Spans(spans []core.Span) { n.plan.Spans(spans) } func (n *countNode) Close() error { return n.plan.Close() } diff --git a/internal/planner/create.go b/internal/planner/create.go index 18365f966d..1b03857a13 100644 --- a/internal/planner/create.go +++ b/internal/planner/create.go @@ -56,13 +56,13 @@ func (n *createNode) Kind() string { return "createNode" } func (n *createNode) Init() error { return nil } -func docIDsToSpans(ids []string, desc client.CollectionDescription) core.Spans { +func docIDsToSpans(ids []string, desc client.CollectionDescription) []core.Span { spans := make([]core.Span, len(ids)) for i, id := range ids { docID := base.MakeDataStoreKeyWithCollectionAndDocID(desc, id) spans[i] = core.NewSpan(docID, docID.PrefixEnd()) } - return core.NewSpans(spans...) + return spans } func documentsToDocIDs(docs ...*client.Document) []string { @@ -115,7 +115,7 @@ func (n *createNode) Next() (bool, error) { return next, err } -func (n *createNode) Spans(spans core.Spans) { /* no-op */ } +func (n *createNode) Spans(spans []core.Span) { /* no-op */ } func (n *createNode) Close() error { return n.results.Close() diff --git a/internal/planner/delete.go b/internal/planner/delete.go index e470f45956..9142a76868 100644 --- a/internal/planner/delete.go +++ b/internal/planner/delete.go @@ -67,7 +67,7 @@ func (n *deleteNode) Next() (bool, error) { return true, nil } -func (n *deleteNode) Spans(spans core.Spans) { +func (n *deleteNode) Spans(spans []core.Span) { n.source.Spans(spans) } diff --git a/internal/planner/group.go b/internal/planner/group.go index 32a98c2330..2491740e81 100644 --- a/internal/planner/group.go +++ b/internal/planner/group.go @@ -127,7 +127,7 @@ func (n *groupNode) Start() error { return nil } -func (n *groupNode) Spans(spans core.Spans) { +func (n *groupNode) Spans(spans []core.Span) { for _, dataSource := range n.dataSources { dataSource.Spans(spans) } diff --git a/internal/planner/lens.go b/internal/planner/lens.go index 816e973b09..618642b5df 100644 --- a/internal/planner/lens.go +++ b/internal/planner/lens.go @@ -61,7 +61,7 @@ func (n *lensNode) Start() error { return n.source.Start() } -func (n *lensNode) Spans(spans core.Spans) { +func (n *lensNode) Spans(spans []core.Span) { n.source.Spans(spans) } diff --git a/internal/planner/limit.go b/internal/planner/limit.go index 0da7a8b249..5281a7e215 100644 --- a/internal/planner/limit.go +++ b/internal/planner/limit.go @@ -59,10 +59,10 @@ func (n *limitNode) Init() error { return n.plan.Init() } -func (n *limitNode) Start() error { return n.plan.Start() } -func (n *limitNode) Spans(spans core.Spans) { n.plan.Spans(spans) } -func (n *limitNode) Close() error { return n.plan.Close() } -func (n *limitNode) Value() core.Doc { return n.plan.Value() } +func (n *limitNode) Start() error { return n.plan.Start() } +func (n *limitNode) Spans(spans []core.Span) { n.plan.Spans(spans) } +func (n *limitNode) Close() error { return n.plan.Close() } +func (n *limitNode) Value() core.Doc { return n.plan.Value() } func (n *limitNode) Next() (bool, error) { n.execInfo.iterations++ diff --git a/internal/planner/max.go b/internal/planner/max.go index dbcc991268..e4db8fa526 100644 --- a/internal/planner/max.go +++ b/internal/planner/max.go @@ -54,13 +54,13 @@ func (p *Planner) Max( }, nil } -func (n *maxNode) Kind() string { return "maxNode" } -func (n *maxNode) Init() error { return n.plan.Init() } -func (n *maxNode) Start() error { return n.plan.Start() } -func (n *maxNode) Spans(spans core.Spans) { n.plan.Spans(spans) } -func (n *maxNode) Close() error { return n.plan.Close() } -func (n *maxNode) Source() planNode { return n.plan } -func (n *maxNode) SetPlan(p planNode) { n.plan = p } +func (n *maxNode) Kind() string { return "maxNode" } +func (n *maxNode) Init() error { return n.plan.Init() } +func (n *maxNode) Start() error { return n.plan.Start() } +func (n *maxNode) Spans(spans []core.Span) { n.plan.Spans(spans) } +func (n *maxNode) Close() error { return n.plan.Close() } +func (n *maxNode) Source() planNode { return n.plan } +func (n *maxNode) SetPlan(p planNode) { n.plan = p } func (n *maxNode) simpleExplain() (map[string]any, error) { sourceExplanations := make([]map[string]any, len(n.aggregateMapping)) diff --git a/internal/planner/min.go b/internal/planner/min.go index 9be8ecd30a..163ca2894d 100644 --- a/internal/planner/min.go +++ b/internal/planner/min.go @@ -54,13 +54,13 @@ func (p *Planner) Min( }, nil } -func (n *minNode) Kind() string { return "minNode" } -func (n *minNode) Init() error { return n.plan.Init() } -func (n *minNode) Start() error { return n.plan.Start() } -func (n *minNode) Spans(spans core.Spans) { n.plan.Spans(spans) } -func (n *minNode) Close() error { return n.plan.Close() } -func (n *minNode) Source() planNode { return n.plan } -func (n *minNode) SetPlan(p planNode) { n.plan = p } +func (n *minNode) Kind() string { return "minNode" } +func (n *minNode) Init() error { return n.plan.Init() } +func (n *minNode) Start() error { return n.plan.Start() } +func (n *minNode) Spans(spans []core.Span) { n.plan.Spans(spans) } +func (n *minNode) Close() error { return n.plan.Close() } +func (n *minNode) Source() planNode { return n.plan } +func (n *minNode) SetPlan(p planNode) { n.plan = p } func (n *minNode) simpleExplain() (map[string]any, error) { sourceExplanations := make([]map[string]any, len(n.aggregateMapping)) diff --git a/internal/planner/multi.go b/internal/planner/multi.go index c4c3278480..579f169344 100644 --- a/internal/planner/multi.go +++ b/internal/planner/multi.go @@ -91,7 +91,7 @@ func (p *parallelNode) Start() error { }) } -func (p *parallelNode) Spans(spans core.Spans) { +func (p *parallelNode) Spans(spans []core.Span) { _ = p.applyToPlans(func(n planNode) error { n.Spans(spans) return nil @@ -157,7 +157,7 @@ func (p *parallelNode) nextAppend(index int, plan planNode) (bool, error) { } // pass the doc key as a reference through the spans interface - spans := core.NewSpans(core.NewSpan(keys.DataStoreKey{DocID: key}, keys.DataStoreKey{})) + spans := []core.Span{core.NewSpan(keys.DataStoreKey{DocID: key}, keys.DataStoreKey{})} plan.Spans(spans) err := plan.Init() if err != nil { diff --git a/internal/planner/operation.go b/internal/planner/operation.go index 934fe2d4b4..6f351f92a1 100644 --- a/internal/planner/operation.go +++ b/internal/planner/operation.go @@ -28,7 +28,7 @@ type operationNode struct { isDone bool } -func (n *operationNode) Spans(spans core.Spans) { +func (n *operationNode) Spans(spans []core.Span) { for _, child := range n.children { child.Spans(spans) } diff --git a/internal/planner/order.go b/internal/planner/order.go index 7f30800f2e..0a69ba5453 100644 --- a/internal/planner/order.go +++ b/internal/planner/order.go @@ -98,7 +98,7 @@ func (n *orderNode) Init() error { } func (n *orderNode) Start() error { return n.plan.Start() } -func (n *orderNode) Spans(spans core.Spans) { n.plan.Spans(spans) } +func (n *orderNode) Spans(spans []core.Span) { n.plan.Spans(spans) } func (n *orderNode) Value() core.Doc { return n.valueIter.Value() diff --git a/internal/planner/pipe.go b/internal/planner/pipe.go index a14432bc12..b9331fce45 100644 --- a/internal/planner/pipe.go +++ b/internal/planner/pipe.go @@ -51,10 +51,10 @@ func (n *pipeNode) Init() error { return n.source.Init() } -func (n *pipeNode) Start() error { return n.source.Start() } -func (n *pipeNode) Spans(spans core.Spans) { n.source.Spans(spans) } -func (n *pipeNode) Close() error { return n.source.Close() } -func (n *pipeNode) Source() planNode { return n.source } +func (n *pipeNode) Start() error { return n.source.Start() } +func (n *pipeNode) Spans(spans []core.Span) { n.source.Spans(spans) } +func (n *pipeNode) Close() error { return n.source.Close() } +func (n *pipeNode) Source() planNode { return n.source } func (n *pipeNode) Next() (bool, error) { // we need to load all docs up until the requested point - this allows us to diff --git a/internal/planner/planner.go b/internal/planner/planner.go index fb5ce5812a..77dac1c7a2 100644 --- a/internal/planner/planner.go +++ b/internal/planner/planner.go @@ -36,7 +36,7 @@ type planNode interface { // Spans sets the planNodes target spans. This is primarily only used for a scanNode, // but based on the tree structure, may need to be propagated Eg. From a selectNode -> scanNode. - Spans(core.Spans) + Spans([]core.Span) // Next processes the next result doc from the request. Can only be called *after* Start(). // Can't be called again if any previous call returns false. diff --git a/internal/planner/scan.go b/internal/planner/scan.go index a5fe4a32e9..c00cda401c 100644 --- a/internal/planner/scan.go +++ b/internal/planner/scan.go @@ -18,6 +18,7 @@ import ( "github.com/sourcenetwork/defradb/internal/core" "github.com/sourcenetwork/defradb/internal/db/base" "github.com/sourcenetwork/defradb/internal/db/fetcher" + "github.com/sourcenetwork/defradb/internal/keys" "github.com/sourcenetwork/defradb/internal/lens" "github.com/sourcenetwork/defradb/internal/planner/filter" "github.com/sourcenetwork/defradb/internal/planner/mapper" @@ -45,7 +46,7 @@ type scanNode struct { showDeleted bool - spans core.Spans + spans []core.Span reverse bool filter *mapper.Filter @@ -201,12 +202,12 @@ func (n *scanNode) Start() error { } func (n *scanNode) initScan() error { - if !n.spans.HasValue { + if len(n.spans) == 0 { start := base.MakeDataStoreKeyWithCollectionDescription(n.col.Description()) - n.spans = core.NewSpans(core.NewSpan(start, start.PrefixEnd())) + n.spans = []core.Span{core.NewSpan(start, start.PrefixEnd())} } - err := n.fetcher.Start(n.p.ctx, n.spans) + err := n.fetcher.Start(n.p.ctx, n.spans...) if err != nil { return err } @@ -220,7 +221,7 @@ func (n *scanNode) initScan() error { func (n *scanNode) Next() (bool, error) { n.execInfo.iterations++ - if n.spans.HasValue && len(n.spans.Value) == 0 { + if len(n.spans) == 0 { return false, nil } @@ -248,7 +249,7 @@ func (n *scanNode) Next() (bool, error) { return true, nil } -func (n *scanNode) Spans(spans core.Spans) { +func (n *scanNode) Spans(spans []core.Span) { n.spans = spans } @@ -261,12 +262,10 @@ func (n *scanNode) Source() planNode { return nil } // explainSpans explains the spans attribute. func (n *scanNode) explainSpans() []map[string]any { spansExplainer := []map[string]any{} - for _, span := range n.spans.Value { + for _, span := range n.spans { spanExplainer := map[string]any{ - // These must be pretty printed as the explain results need to be returnable - // as json via some clients (e.g. http and cli) - "start": span.Start().PrettyPrint(), - "end": span.End().PrettyPrint(), + "start": keys.PrettyPrint(span.Start), + "end": keys.PrettyPrint(span.End), } spansExplainer = append(spansExplainer, spanExplainer) @@ -419,7 +418,7 @@ func (n *multiScanNode) Value() core.Doc { return n.scanNode.documentIterator.Value() } -func (n *multiScanNode) Spans(spans core.Spans) { +func (n *multiScanNode) Spans(spans []core.Span) { n.scanNode.Spans(spans) } diff --git a/internal/planner/select.go b/internal/planner/select.go index 9393103e40..e5b53cd997 100644 --- a/internal/planner/select.go +++ b/internal/planner/select.go @@ -18,7 +18,6 @@ import ( "github.com/sourcenetwork/defradb/client/request" "github.com/sourcenetwork/defradb/internal/core" "github.com/sourcenetwork/defradb/internal/db/base" - "github.com/sourcenetwork/defradb/internal/db/fetcher" "github.com/sourcenetwork/defradb/internal/keys" "github.com/sourcenetwork/defradb/internal/planner/mapper" ) @@ -70,7 +69,7 @@ func (n *selectTopNode) Start() error { return n.planNode.Start() } func (n *selectTopNode) Next() (bool, error) { return n.planNode.Next() } -func (n *selectTopNode) Spans(spans core.Spans) { n.planNode.Spans(spans) } +func (n *selectTopNode) Spans(spans []core.Span) { n.planNode.Spans(spans) } func (n *selectTopNode) Value() core.Doc { return n.planNode.Value() } @@ -182,7 +181,7 @@ func (n *selectNode) Next() (bool, error) { } } -func (n *selectNode) Spans(spans core.Spans) { +func (n *selectNode) Spans(spans []core.Span) { n.source.Spans(spans) } @@ -264,11 +263,17 @@ func (n *selectNode) initSource() ([]aggregateNode, error) { if err != nil { return nil, err } - spans := fetcher.NewVersionedSpan( - keys.DataStoreKey{DocID: n.selectReq.DocIDs.Value()[0]}, - c, - ) // @todo check len - origScan.Spans(spans) + origScan.Spans( + []core.Span{ + core.NewSpan( + keys.HeadStoreKey{ + DocID: n.selectReq.DocIDs.Value()[0], + Cid: c, + }, + keys.HeadStoreKey{}, + ), + }, + ) } else if n.selectReq.DocIDs.HasValue() { // If we *just* have a DocID(s), run a FindByDocID(s) optimization // if we have a FindByDocID filter, create a span for it @@ -281,7 +286,7 @@ func (n *selectNode) initSource() ([]aggregateNode, error) { docIDIndexKey := base.MakeDataStoreKeyWithCollectionAndDocID(sourcePlan.collection.Description(), docID) spans[i] = core.NewSpan(docIDIndexKey, docIDIndexKey.PrefixEnd()) } - origScan.Spans(core.NewSpans(spans...)) + origScan.Spans(spans) } } diff --git a/internal/planner/sum.go b/internal/planner/sum.go index 177dd72e3a..da6010704d 100644 --- a/internal/planner/sum.go +++ b/internal/planner/sum.go @@ -149,7 +149,7 @@ func (n *sumNode) Init() error { func (n *sumNode) Start() error { return n.plan.Start() } -func (n *sumNode) Spans(spans core.Spans) { n.plan.Spans(spans) } +func (n *sumNode) Spans(spans []core.Span) { n.plan.Spans(spans) } func (n *sumNode) Close() error { return n.plan.Close() } diff --git a/internal/planner/top.go b/internal/planner/top.go index ce2ce4e6dc..518a96af50 100644 --- a/internal/planner/top.go +++ b/internal/planner/top.go @@ -35,7 +35,7 @@ type topLevelNode struct { isInRecurse bool } -func (n *topLevelNode) Spans(spans core.Spans) { +func (n *topLevelNode) Spans(spans []core.Span) { if n.isInRecurse { return } diff --git a/internal/planner/type_join.go b/internal/planner/type_join.go index a6d726b801..a9063d07d6 100644 --- a/internal/planner/type_join.go +++ b/internal/planner/type_join.go @@ -114,7 +114,7 @@ func (n *typeIndexJoin) Start() error { return n.joinPlan.Start() } -func (n *typeIndexJoin) Spans(spans core.Spans) { +func (n *typeIndexJoin) Spans(spans []core.Span) { n.joinPlan.Spans(spans) } @@ -444,7 +444,7 @@ func fetchDocWithID(node planNode, docID string) (bool, error) { } dsKey := base.MakeDataStoreKeyWithCollectionAndDocID(scan.col.Description(), docID) - spans := core.NewSpans(core.NewSpan(dsKey, dsKey.PrefixEnd())) + spans := []core.Span{core.NewSpan(dsKey, dsKey.PrefixEnd())} node.Spans(spans) @@ -502,7 +502,7 @@ func (join *invertibleTypeJoin) Close() error { return join.childSide.plan.Close() } -func (join *invertibleTypeJoin) Spans(spans core.Spans) { +func (join *invertibleTypeJoin) Spans(spans []core.Span) { join.parentSide.plan.Spans(spans) } diff --git a/internal/planner/update.go b/internal/planner/update.go index e707065022..4340625bf8 100644 --- a/internal/planner/update.go +++ b/internal/planner/update.go @@ -107,7 +107,7 @@ func (n *updateNode) Next() (bool, error) { func (n *updateNode) Kind() string { return "updateNode" } -func (n *updateNode) Spans(spans core.Spans) { n.results.Spans(spans) } +func (n *updateNode) Spans(spans []core.Span) { n.results.Spans(spans) } func (n *updateNode) Init() error { return n.results.Init() } diff --git a/internal/planner/upsert.go b/internal/planner/upsert.go index 4f12395284..331d1e4171 100644 --- a/internal/planner/upsert.go +++ b/internal/planner/upsert.go @@ -96,7 +96,7 @@ func (n *upsertNode) Kind() string { return "upsertNode" } -func (n *upsertNode) Spans(spans core.Spans) { +func (n *upsertNode) Spans(spans []core.Span) { n.source.Spans(spans) } diff --git a/internal/planner/values.go b/internal/planner/values.go index 8053d2df1e..4028f52594 100644 --- a/internal/planner/values.go +++ b/internal/planner/values.go @@ -46,9 +46,9 @@ func (p *Planner) newContainerValuesNode(ordering []mapper.OrderCondition) *valu } } -func (n *valuesNode) Init() error { return nil } -func (n *valuesNode) Start() error { return nil } -func (n *valuesNode) Spans(spans core.Spans) {} +func (n *valuesNode) Init() error { return nil } +func (n *valuesNode) Start() error { return nil } +func (n *valuesNode) Spans(spans []core.Span) {} func (n *valuesNode) Kind() string { return "valuesNode" diff --git a/internal/planner/view.go b/internal/planner/view.go index e5beef128b..b834d74323 100644 --- a/internal/planner/view.go +++ b/internal/planner/view.go @@ -74,7 +74,7 @@ func (n *viewNode) Start() error { return n.source.Start() } -func (n *viewNode) Spans(spans core.Spans) { +func (n *viewNode) Spans(spans []core.Span) { n.source.Spans(spans) } @@ -217,7 +217,7 @@ func (n *cachedViewFetcher) Start() error { return nil } -func (n *cachedViewFetcher) Spans(spans core.Spans) { +func (n *cachedViewFetcher) Spans(spans []core.Span) { // no-op } diff --git a/tests/integration/explain/default/delete_test.go b/tests/integration/explain/default/delete_test.go index e220ae4d86..39b3b732d5 100644 --- a/tests/integration/explain/default/delete_test.go +++ b/tests/integration/explain/default/delete_test.go @@ -292,7 +292,12 @@ func TestDefaultExplainMutationRequestWithDeleteUsingNoIds(t *testing.T) { "collectionID": "3", "collectionName": "Author", "filter": nil, - "spans": []dataMap{}, + "spans": []dataMap{ + { + "end": "/4", + "start": "/3", + }, + }, }, }, },