diff --git a/CHANGELOG.md b/CHANGELOG.md index 234c479abe1..6dcc40d949e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,7 @@ * [FEATURE] TraceQL support for link scope and link:traceID and link:spanID [#3741](https://github.com/grafana/tempo/pull/3741) (@stoewer) * [FEATURE] TraceQL support for event scope and event:name intrinsic [#3708](https://github.com/grafana/tempo/pull/3708) (@stoewer) * [FEATURE] Flush and query RF1 blocks for TraceQL metric queries [#3628](https://github.com/grafana/tempo/pull/3628) [#3691](https://github.com/grafana/tempo/pull/3691) [#3723](https://github.com/grafana/tempo/pull/3723) (@mapno) +* [FEATURE] Add new compare() metrics function [#3695](https://github.com/grafana/tempo/pull/3695) (@mdisibio) * [ENHANCEMENT] Tag value lookup use protobuf internally for improved latency [#3731](https://github.com/grafana/tempo/pull/3731) (@mdisibio) * [ENHANCEMENT] TraceQL metrics queries use protobuf internally for improved latency [#3745](https://github.com/grafana/tempo/pull/3745) (@mdisibio) * [ENHANCEMENT] Improve use of OTEL semantic conventions on the service graph [#3711](https://github.com/grafana/tempo/pull/3711) (@zalegrala) diff --git a/modules/frontend/metrics_query_range_sharder.go b/modules/frontend/metrics_query_range_sharder.go index 65eae768421..df32502b7db 100644 --- a/modules/frontend/metrics_query_range_sharder.go +++ b/modules/frontend/metrics_query_range_sharder.go @@ -458,6 +458,11 @@ func (s *queryRangeSharder) toUpstreamRequest(ctx context.Context, req tempopb.Q // Without alignment each refresh is shifted by seconds or even milliseconds and the time series // calculations are sublty different each time. It's not wrong, but less preferred behavior. func alignTimeRange(req *tempopb.QueryRangeRequest) { + if req.End-req.Start == req.Step { + // Instant query + return + } + // It doesn't really matter but the request fields are expected to be in nanoseconds. req.Start = req.Start / req.Step * req.Step req.End = req.End / req.Step * req.Step diff --git a/pkg/traceql/ast_test.go b/pkg/traceql/ast_test.go index f3b108677eb..3343bbd449c 100644 --- a/pkg/traceql/ast_test.go +++ b/pkg/traceql/ast_test.go @@ -407,6 +407,12 @@ func (m *mockSpan) AllAttributes() map[Attribute]Static { return m.attributes } +func (m *mockSpan) AllAttributesFunc(cb func(Attribute, Static)) { + for k, v := range m.attributes { + cb(k, v) + } +} + func (m *mockSpan) ID() []byte { return m.id } diff --git a/pkg/traceql/engine_metrics.go b/pkg/traceql/engine_metrics.go index 296ed0701a1..690a7954628 100644 --- a/pkg/traceql/engine_metrics.go +++ b/pkg/traceql/engine_metrics.go @@ -14,6 +14,7 @@ import ( "github.com/grafana/tempo/pkg/tempopb" commonv1proto "github.com/grafana/tempo/pkg/tempopb/common/v1" + v1 "github.com/grafana/tempo/pkg/tempopb/common/v1" "github.com/grafana/tempo/pkg/util" "github.com/prometheus/prometheus/model/labels" ) @@ -77,6 +78,14 @@ type Label struct { type Labels []Label +func LabelsFromProto(ls []v1.KeyValue) Labels { + out := make(Labels, 0, len(ls)) + for _, l := range ls { + out = append(out, Label{Name: l.Key, Value: StaticFromAnyValue(l.Value)}) + } + return out +} + // String returns the prometheus-formatted version of the labels. Which is downcasting // the typed TraceQL values to strings, with some special casing. func (ls Labels) String() string { @@ -98,7 +107,7 @@ func (ls Labels) String() string { } type TimeSeries struct { - Labels []Label + Labels Labels Values []float64 } @@ -578,7 +587,7 @@ func (e *Engine) CompileMetricsQueryRange(req *tempopb.QueryRangeRequest, dedupe // optimize numerous things within the request that is specific to metrics. func optimize(req *FetchSpansRequest) { - if !req.AllConditions { + if !req.AllConditions || req.SecondPassSelectAll { return } diff --git a/pkg/traceql/engine_metrics_compare.go b/pkg/traceql/engine_metrics_compare.go new file mode 100644 index 00000000000..554591025e3 --- /dev/null +++ b/pkg/traceql/engine_metrics_compare.go @@ -0,0 +1,446 @@ +package traceql + +import ( + "fmt" + "sort" + + "github.com/grafana/tempo/pkg/tempopb" +) + +const ( + internalLabelMetaType = "__meta_type" + internalMetaTypeBaseline = "baseline" + internalMetaTypeSelection = "selection" + internalMetaTypeBaselineTotal = "baseline_total" + internalMetaTypeSelectionTotal = "selection_total" + + // internalLabelBaseline = "__baseline" + internalLabelError = "__meta_error" + internalErrorTooManyValues = "__too_many_values__" +) + +var ( + internalLabelTypeBaseline = Label{Name: internalLabelMetaType, Value: NewStaticString(internalMetaTypeBaseline)} + internalLabelTypeBaselineTotal = Label{Name: internalLabelMetaType, Value: NewStaticString(internalMetaTypeBaselineTotal)} + internalLabelTypeSelection = Label{Name: internalLabelMetaType, Value: NewStaticString(internalMetaTypeSelection)} + internalLabelTypeSelectionTotal = Label{Name: internalLabelMetaType, Value: NewStaticString(internalMetaTypeSelectionTotal)} + internalLabelErrorTooManyValues = Label{Name: internalLabelError, Value: NewStaticString(internalErrorTooManyValues)} +) + +type MetricsCompare struct { + f *SpansetFilter + qstart, qend, qstep uint64 + len int + start, end int + topN int + baselines map[Attribute]map[Static][]float64 + selections map[Attribute]map[Static][]float64 + baselineTotals map[Attribute][]float64 + selectionTotals map[Attribute][]float64 + seriesAgg SeriesAggregator +} + +func newMetricsCompare(f *SpansetFilter, topN, start, end int) *MetricsCompare { + return &MetricsCompare{ + f: f, + topN: topN, + start: start, + end: end, + } +} + +func (m *MetricsCompare) extractConditions(request *FetchSpansRequest) { + request.SecondPassSelectAll = true + if !request.HasAttribute(IntrinsicSpanStartTimeAttribute) { + request.SecondPassConditions = append(request.SecondPassConditions, Condition{Attribute: IntrinsicSpanStartTimeAttribute}) + } + // We don't need to extract conditions from the comparison expression + // because we're already selecting all. +} + +func (m *MetricsCompare) init(q *tempopb.QueryRangeRequest, mode AggregateMode) { + switch mode { + case AggregateModeRaw: + m.qstart = q.Start + m.qend = q.End + m.qstep = q.Step + m.len = IntervalCount(q.Start, q.End, q.Step) + m.baselines = make(map[Attribute]map[Static][]float64) + m.selections = make(map[Attribute]map[Static][]float64) + m.baselineTotals = make(map[Attribute][]float64) + m.selectionTotals = make(map[Attribute][]float64) + + case AggregateModeSum: + m.seriesAgg = NewSimpleAdditionCombiner(q) + return + + case AggregateModeFinal: + m.seriesAgg = NewBaselineAggregator(q, m.topN) + return + } +} + +func (m *MetricsCompare) observe(span Span) { + // For performance, MetricsCompare doesn't use the Range/StepAggregator abstractions. + // This lets us: + // * Include the same attribute value in multiple series. This doesn't fit within + // the existing by() grouping or even the potential byeach() (which was in this branch and then deleted) + // * Avoid reading the span start time twice, once for the selection window filter, and + // then again instead of StepAggregator. + // TODO - It would be nice to use those abstractions, area for future improvement + st := span.StartTimeUnixNanos() + i := IntervalOf(st, m.qstart, m.qend, m.qstep) + + // Determine if this span is inside the selection + isSelection := StaticFalse + if m.start > 0 && m.end > 0 { + // Timestamp filtering + if st >= uint64(m.start) && st < uint64(m.end) { + isSelection, _ = m.f.Expression.execute(span) + } + } else { + // No timestamp filtering + isSelection, _ = m.f.Expression.execute(span) + } + + // Choose destination buffers + dest := m.baselines + destTotals := m.baselineTotals + if isSelection == StaticTrue { + dest = m.selections + destTotals = m.selectionTotals + } + + // Increment values for all attributes of this span + span.AllAttributesFunc(func(a Attribute, v Static) { + // We don't group by attributes of these types because the + // cardinality isn't useful. + switch v.Type { + case TypeDuration: + return + } + + // These attributes get pulled back by select all but we never + // group by them because the cardinality isn't useful. + switch a { + case IntrinsicSpanStartTimeAttribute, + IntrinsicTraceIDAttribute: + return + } + + values, ok := dest[a] + if !ok { + values = make(map[Static][]float64, m.len) + dest[a] = values + } + + counts, ok := values[v] + if !ok { + counts = make([]float64, m.len) + values[v] = counts + } + counts[i]++ + + // TODO - It's probably faster to aggregate these at the end + // instead of incrementing in the hotpath twice + totals, ok := destTotals[a] + if !ok { + totals = make([]float64, m.len) + destTotals[a] = totals + } + totals[i]++ + }) +} + +func (m *MetricsCompare) observeSeries(ss []*tempopb.TimeSeries) { + m.seriesAgg.Combine(ss) +} + +func (m *MetricsCompare) result() SeriesSet { + // In the other modes return these results + if m.seriesAgg != nil { + return m.seriesAgg.Results() + } + + var ( + top = topN[Static]{} + ss = make(SeriesSet) + erred = make(map[Attribute]struct{}) + ) + + add := func(ls Labels, counts []float64) { + ss[ls.String()] = TimeSeries{ + Labels: ls, + Values: counts, + } + } + + addValues := func(prefix Label, data map[Attribute]map[Static][]float64) { + for a, values := range data { + // Compute topN values for this attribute + top.reset() + for v, counts := range values { + top.add(v, counts) + } + + top.get(m.topN, func(v Static) { + add(Labels{ + prefix, + {Name: a.String(), Value: v}, + }, values[v]) + }) + + if len(values) > m.topN { + erred[a] = struct{}{} + } + } + } + + addValues(internalLabelTypeBaseline, m.baselines) + addValues(internalLabelTypeSelection, m.selections) + + // Add errors for attributes that hit the limit in either area + for a := range erred { + add(Labels{ + internalLabelErrorTooManyValues, + {Name: a.String()}, + }, nil) + } + + addTotals := func(prefix Label, data map[Attribute][]float64) { + for a, counts := range data { + add(Labels{ + prefix, + {Name: a.String()}, + }, counts) + } + } + + addTotals(internalLabelTypeBaselineTotal, m.baselineTotals) + addTotals(internalLabelTypeSelectionTotal, m.selectionTotals) + + return ss +} + +func (m *MetricsCompare) validate() error { + err := m.f.validate() + if err != nil { + return err + } + + if m.topN <= 0 { + return fmt.Errorf("compare() top number of values must be integer greater than 0") + } + + if m.start == 0 && m.end == 0 { + return nil + } + + if m.start <= 0 || m.end <= 0 { + return fmt.Errorf("compare() timestamps must be positive integer unix nanoseconds") + } + if m.end <= m.start { + return fmt.Errorf("compare() end timestamp must be greater than start timestamp") + } + return nil +} + +func (m *MetricsCompare) String() string { + return "compare(" + m.f.String() + "}" +} + +var _ metricsFirstStageElement = (*MetricsCompare)(nil) + +// BaselineAggregator is a special series combiner for the compare() function. +// It resplits job-level results into baseline and selection buffers, and if +// an attribute reached max cardinality at the job-level, it will be marked +// as such at the query-level. +type BaselineAggregator struct { + topN int + len int + start, end, step uint64 + baseline map[string]map[Static]TimeSeries + selection map[string]map[Static]TimeSeries + baselineTotals map[string]map[Static]TimeSeries + selectionTotals map[string]map[Static]TimeSeries + maxed map[string]struct{} +} + +func NewBaselineAggregator(req *tempopb.QueryRangeRequest, topN int) *BaselineAggregator { + return &BaselineAggregator{ + baseline: make(map[string]map[Static]TimeSeries), + selection: make(map[string]map[Static]TimeSeries), + baselineTotals: make(map[string]map[Static]TimeSeries), + selectionTotals: make(map[string]map[Static]TimeSeries), + maxed: make(map[string]struct{}), + len: IntervalCount(req.Start, req.End, req.Step), + start: req.Start, + end: req.End, + step: req.Step, + topN: topN, + } +} + +func (b *BaselineAggregator) Combine(ss []*tempopb.TimeSeries) { + for _, s := range ss { + var metaType string + var err string + var a string + var v Static + + // Scan all labels + for _, l := range s.Labels { + switch l.Key { + case internalLabelMetaType: + metaType = l.Value.GetStringValue() + case internalLabelError: + err = l.Value.GetStringValue() + default: + a = l.Key + v = StaticFromAnyValue(l.Value) + } + } + + // Check for errors on this attribute + if err != "" { + if err == internalErrorTooManyValues { + // A sub-job reached max values for this attribute. + // Record the error + b.maxed[a] = struct{}{} + } + // Skip remaining processing regardless of error type + continue + } + + // Merge this time series into the destination buffer + // based on meta type + var dest map[string]map[Static]TimeSeries + switch metaType { + case internalMetaTypeBaseline: + dest = b.baseline + case internalMetaTypeSelection: + dest = b.selection + case internalMetaTypeBaselineTotal: + dest = b.baselineTotals + case internalMetaTypeSelectionTotal: + dest = b.selectionTotals + default: + // Unknown type, ignore + continue + } + + attr, ok := dest[a] + if !ok { + attr = make(map[Static]TimeSeries) + dest[a] = attr + } + + val, ok := attr[v] + if !ok { + val = TimeSeries{ + Values: make([]float64, b.len), + } + attr[v] = val + } + + if len(attr) > b.topN { + // This attribute just reached max cardinality overall (not within a sub-job) + // Record the error + b.maxed[a] = struct{}{} + } + + for _, sample := range s.Samples { + j := IntervalOfMs(sample.TimestampMs, b.start, b.end, b.step) + if j >= 0 && j < len(val.Values) { + val.Values[j] += sample.Value + } + } + } +} + +func (b *BaselineAggregator) Results() SeriesSet { + output := make(SeriesSet) + topN := &topN[Static]{} + + addSeries := func(prefix Label, name string, value Static, samples []float64) { + ls := Labels{ + prefix, + {Name: name, Value: value}, + } + output[ls.String()] = TimeSeries{ + Labels: ls, + Values: samples, + } + } + + do := func(buffer map[string]map[Static]TimeSeries, prefix Label) { + for a, m := range buffer { + + topN.reset() + for v, ts := range m { + topN.add(v, ts.Values) + } + + topN.get(b.topN, func(key Static) { + addSeries(prefix, a, key, m[key].Values) + }) + } + } + + do(b.baseline, internalLabelTypeBaseline) + do(b.selection, internalLabelTypeSelection) + do(b.baselineTotals, internalLabelTypeBaselineTotal) + do(b.selectionTotals, internalLabelTypeSelectionTotal) + + // Add series for every attribute that exceeded max value. + for a := range b.maxed { + addSeries(internalLabelErrorTooManyValues, a, NewStaticNil(), nil) + } + + return output +} + +var _ SeriesAggregator = (*BaselineAggregator)(nil) + +// topN is a helper struct that gets the topN keys based on total sum +type topN[T any] struct { + entries []struct { + key T + total float64 + } +} + +func (t *topN[T]) add(key T, values []float64) { + sum := 0.0 + for _, v := range values { + sum += v + } + t.entries = append(t.entries, struct { + key T + total float64 + }{key, sum}) +} + +// get the top N values. Given as a callback to avoid allocating. +// bool result indicates if there were more than N values +func (t *topN[T]) get(n int, cb func(key T)) { + if len(t.entries) <= n { + // <= N, no need to sort + for _, e := range t.entries { + cb(e.key) + } + return + } + + sort.Slice(t.entries, func(i, j int) bool { + return t.entries[i].total > t.entries[j].total // Sort descending + }) + + for i := 0; i < n; i++ { + cb(t.entries[i].key) + } +} + +func (t *topN[T]) reset() { + t.entries = t.entries[:0] +} diff --git a/pkg/traceql/enum_statics.go b/pkg/traceql/enum_statics.go index 43d04f77c1a..c4b5be4997f 100644 --- a/pkg/traceql/enum_statics.go +++ b/pkg/traceql/enum_statics.go @@ -95,3 +95,8 @@ func (k Kind) String() string { return fmt.Sprintf("kind(%d)", k) } + +var ( + StaticTrue = NewStaticBool(true) + StaticFalse = NewStaticBool(false) +) diff --git a/pkg/traceql/expr.y b/pkg/traceql/expr.y index 7169256647f..d9c39c63650 100644 --- a/pkg/traceql/expr.y +++ b/pkg/traceql/expr.y @@ -29,7 +29,7 @@ import ( wrappedScalarPipeline Pipeline scalarPipeline Pipeline aggregate Aggregate - metricsAggregation *MetricsAggregate + metricsAggregation metricsFirstStageElement fieldExpression FieldExpression static Static @@ -98,7 +98,7 @@ import ( COUNT AVG MAX MIN SUM BY COALESCE SELECT END_ATTRIBUTE - RATE COUNT_OVER_TIME QUANTILE_OVER_TIME HISTOGRAM_OVER_TIME + RATE COUNT_OVER_TIME QUANTILE_OVER_TIME HISTOGRAM_OVER_TIME COMPARE WITH // Operators are listed with increasing precedence. @@ -300,6 +300,9 @@ metricsAggregation: | QUANTILE_OVER_TIME OPEN_PARENS attribute COMMA numericList CLOSE_PARENS BY OPEN_PARENS attributeList CLOSE_PARENS { $$ = newMetricsAggregateQuantileOverTime($3, $5, $9) } | HISTOGRAM_OVER_TIME OPEN_PARENS attribute CLOSE_PARENS { $$ = newMetricsAggregateHistogramOverTime($3, nil) } | HISTOGRAM_OVER_TIME OPEN_PARENS attribute CLOSE_PARENS BY OPEN_PARENS attributeList CLOSE_PARENS { $$ = newMetricsAggregateHistogramOverTime($3, $7) } + | COMPARE OPEN_PARENS spansetFilter CLOSE_PARENS { $$ = newMetricsCompare($3, 10, 0, 0)} + | COMPARE OPEN_PARENS spansetFilter COMMA INTEGER CLOSE_PARENS { $$ = newMetricsCompare($3, $5, 0, 0)} + | COMPARE OPEN_PARENS spansetFilter COMMA INTEGER COMMA INTEGER COMMA INTEGER CLOSE_PARENS { $$ = newMetricsCompare($3, $5, $7, $9)} ; // ********************** diff --git a/pkg/traceql/expr.y.go b/pkg/traceql/expr.y.go index ce0ca54ebad..c2458817be1 100644 --- a/pkg/traceql/expr.y.go +++ b/pkg/traceql/expr.y.go @@ -34,7 +34,7 @@ type yySymType struct { wrappedScalarPipeline Pipeline scalarPipeline Pipeline aggregate Aggregate - metricsAggregation *MetricsAggregate + metricsAggregation metricsFirstStageElement fieldExpression FieldExpression static Static @@ -115,37 +115,38 @@ const RATE = 57402 const COUNT_OVER_TIME = 57403 const QUANTILE_OVER_TIME = 57404 const HISTOGRAM_OVER_TIME = 57405 -const WITH = 57406 -const PIPE = 57407 -const AND = 57408 -const OR = 57409 -const EQ = 57410 -const NEQ = 57411 -const LT = 57412 -const LTE = 57413 -const GT = 57414 -const GTE = 57415 -const NRE = 57416 -const RE = 57417 -const DESC = 57418 -const ANCE = 57419 -const SIBL = 57420 -const NOT_CHILD = 57421 -const NOT_PARENT = 57422 -const NOT_DESC = 57423 -const NOT_ANCE = 57424 -const UNION_CHILD = 57425 -const UNION_PARENT = 57426 -const UNION_DESC = 57427 -const UNION_ANCE = 57428 -const UNION_SIBL = 57429 -const ADD = 57430 -const SUB = 57431 -const NOT = 57432 -const MUL = 57433 -const DIV = 57434 -const MOD = 57435 -const POW = 57436 +const COMPARE = 57406 +const WITH = 57407 +const PIPE = 57408 +const AND = 57409 +const OR = 57410 +const EQ = 57411 +const NEQ = 57412 +const LT = 57413 +const LTE = 57414 +const GT = 57415 +const GTE = 57416 +const NRE = 57417 +const RE = 57418 +const DESC = 57419 +const ANCE = 57420 +const SIBL = 57421 +const NOT_CHILD = 57422 +const NOT_PARENT = 57423 +const NOT_DESC = 57424 +const NOT_ANCE = 57425 +const UNION_CHILD = 57426 +const UNION_PARENT = 57427 +const UNION_DESC = 57428 +const UNION_ANCE = 57429 +const UNION_SIBL = 57430 +const ADD = 57431 +const SUB = 57432 +const NOT = 57433 +const MUL = 57434 +const DIV = 57435 +const MOD = 57436 +const POW = 57437 var yyToknames = [...]string{ "$end", @@ -211,6 +212,7 @@ var yyToknames = [...]string{ "COUNT_OVER_TIME", "QUANTILE_OVER_TIME", "HISTOGRAM_OVER_TIME", + "COMPARE", "WITH", "PIPE", "AND", @@ -254,158 +256,158 @@ var yyExca = [...]int{ -1, 1, 1, -1, -2, 0, - -1, 282, + -1, 284, 13, 86, -2, 94, } const yyPrivate = 57344 -const yyLast = 952 +const yyLast = 949 var yyAct = [...]int{ - 101, 5, 100, 6, 99, 8, 221, 7, 98, 18, - 202, 67, 240, 271, 13, 280, 2, 222, 94, 229, - 230, 231, 240, 90, 70, 66, 87, 88, 89, 90, - 77, 317, 149, 198, 150, 197, 153, 30, 151, 227, - 228, 29, 229, 230, 231, 240, 74, 75, 76, 77, - 178, 180, 181, 182, 183, 184, 185, 186, 187, 188, - 189, 190, 191, 192, 193, 194, 195, 314, 85, 86, - 326, 87, 88, 89, 90, 325, 308, 313, 72, 73, - 204, 74, 75, 76, 77, 85, 86, 323, 87, 88, - 89, 90, 197, 225, 307, 224, 306, 223, 305, 212, - 214, 215, 216, 217, 218, 219, 348, 338, 330, 329, - 220, 202, 263, 264, 243, 244, 245, 262, 200, 332, - 241, 242, 232, 233, 234, 235, 236, 237, 239, 238, - 241, 242, 232, 233, 234, 235, 236, 237, 239, 238, - 331, 249, 227, 228, 198, 229, 230, 231, 240, 353, - 287, 322, 227, 228, 226, 229, 230, 231, 240, 319, - 277, 318, 266, 267, 268, 269, 78, 79, 80, 81, - 82, 83, 265, 278, 256, 201, 257, 259, 260, 350, - 258, 277, 250, 251, 351, 287, 85, 86, 261, 87, - 88, 89, 90, 72, 73, 343, 74, 75, 76, 77, - 149, 17, 150, 179, 153, 334, 151, 333, 282, 241, - 242, 232, 233, 234, 235, 236, 237, 239, 238, 345, - 287, 284, 344, 287, 342, 341, 315, 316, 278, 286, - 287, 227, 228, 279, 229, 230, 231, 240, 276, 275, - 274, 273, 205, 161, 147, 146, 288, 289, 290, 291, + 101, 5, 100, 6, 99, 8, 222, 7, 272, 98, + 12, 67, 18, 13, 282, 2, 241, 90, 223, 94, + 77, 319, 198, 70, 66, 228, 229, 198, 230, 231, + 232, 241, 149, 199, 150, 30, 153, 29, 151, 85, + 86, 329, 87, 88, 89, 90, 230, 231, 232, 241, + 179, 181, 182, 183, 184, 185, 186, 187, 188, 189, + 190, 191, 192, 193, 194, 195, 196, 316, 72, 73, + 328, 74, 75, 76, 77, 326, 310, 315, 309, 308, + 199, 205, 87, 88, 89, 90, 74, 75, 76, 77, + 307, 356, 343, 226, 333, 225, 332, 224, 213, 215, + 216, 217, 218, 219, 220, 264, 265, 263, 364, 289, + 250, 221, 360, 289, 361, 244, 245, 246, 334, 17, + 365, 242, 243, 233, 234, 235, 236, 237, 238, 240, + 239, 242, 243, 233, 234, 235, 236, 237, 238, 240, + 239, 68, 11, 228, 229, 335, 230, 231, 232, 241, + 325, 251, 252, 228, 229, 321, 230, 231, 232, 241, + 320, 279, 266, 267, 268, 269, 270, 353, 289, 352, + 289, 350, 351, 280, 233, 234, 235, 236, 237, 238, + 240, 239, 279, 202, 19, 20, 21, 17, 17, 180, + 159, 359, 363, 203, 228, 229, 349, 230, 231, 232, + 241, 149, 339, 150, 338, 153, 281, 151, 348, 347, + 336, 337, 284, 204, 207, 208, 209, 210, 211, 212, + 278, 286, 317, 318, 288, 289, 277, 276, 280, 23, + 26, 24, 25, 27, 14, 160, 15, 275, 154, 155, + 156, 157, 158, 274, 206, 162, 358, 200, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, - 302, 303, 19, 20, 21, 145, 17, 144, 158, 143, - 142, 92, 91, 272, 84, 252, 225, 225, 224, 224, - 223, 223, 253, 67, 254, 67, 71, 310, 225, 255, - 224, 309, 223, 320, 321, 248, 70, 247, 70, 284, - 139, 140, 141, 347, 346, 324, 246, 23, 26, 24, - 25, 27, 14, 159, 15, 28, 154, 155, 156, 157, - 337, 336, 270, 335, 69, 149, 328, 150, 16, 153, - 327, 151, 4, 148, 225, 225, 224, 224, 223, 223, - 339, 340, 12, 10, 225, 22, 224, 152, 223, 1, - 349, 225, 0, 224, 0, 223, 0, 352, 102, 103, - 104, 108, 131, 0, 93, 95, 0, 0, 107, 105, - 106, 110, 109, 111, 112, 113, 114, 115, 116, 117, - 118, 119, 120, 121, 122, 124, 123, 125, 126, 0, - 127, 128, 129, 130, 0, 68, 11, 134, 132, 133, - 135, 136, 137, 138, 312, 102, 103, 104, 108, 131, + 302, 303, 304, 305, 147, 146, 145, 144, 22, 85, + 86, 143, 87, 88, 89, 90, 142, 226, 226, 225, + 225, 224, 224, 92, 253, 67, 91, 67, 344, 324, + 226, 254, 225, 255, 224, 322, 323, 70, 256, 70, + 286, 48, 53, 355, 354, 50, 273, 49, 327, 57, + 312, 51, 52, 54, 55, 56, 59, 58, 60, 61, + 64, 63, 62, 84, 139, 140, 141, 330, 149, 331, + 150, 311, 153, 249, 151, 71, 342, 341, 248, 226, + 226, 225, 225, 224, 224, 345, 346, 247, 28, 271, + 226, 340, 225, 69, 224, 16, 357, 4, 148, 10, + 226, 152, 225, 1, 224, 0, 362, 102, 103, 104, + 108, 131, 0, 93, 95, 0, 0, 107, 105, 106, + 110, 109, 111, 112, 113, 114, 115, 116, 117, 118, + 119, 120, 121, 122, 124, 123, 125, 126, 0, 127, + 128, 129, 130, 201, 0, 0, 134, 132, 133, 135, + 136, 137, 138, 314, 0, 102, 103, 104, 108, 131, 0, 0, 95, 0, 0, 107, 105, 106, 110, 109, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 124, 123, 125, 126, 0, 127, 128, 129, - 130, 311, 96, 97, 134, 132, 133, 135, 136, 137, - 138, 304, 0, 0, 0, 0, 0, 241, 242, 232, - 233, 234, 235, 236, 237, 239, 238, 203, 206, 207, - 208, 209, 210, 211, 0, 0, 0, 0, 0, 227, - 228, 285, 229, 230, 231, 240, 0, 0, 0, 96, - 97, 0, 0, 0, 241, 242, 232, 233, 234, 235, - 236, 237, 239, 238, 241, 242, 232, 233, 234, 235, - 236, 237, 239, 238, 0, 0, 227, 228, 0, 229, - 230, 231, 240, 0, 0, 0, 227, 228, 0, 229, - 230, 231, 240, 0, 241, 242, 232, 233, 234, 235, - 236, 237, 239, 238, 232, 233, 234, 235, 236, 237, - 239, 238, 199, 0, 0, 0, 227, 228, 0, 229, - 230, 231, 240, 0, 227, 228, 0, 229, 230, 231, - 240, 78, 79, 80, 81, 82, 83, 196, 78, 79, - 80, 81, 82, 83, 0, 0, 0, 0, 0, 0, - 0, 85, 86, 0, 87, 88, 89, 90, 72, 73, - 0, 74, 75, 76, 77, 48, 53, 0, 0, 50, - 0, 49, 0, 57, 0, 51, 52, 54, 55, 56, - 59, 58, 60, 61, 64, 63, 62, 0, 0, 0, - 31, 36, 0, 0, 33, 0, 32, 0, 42, 0, - 34, 35, 37, 38, 39, 40, 41, 43, 44, 45, - 46, 47, 48, 53, 0, 0, 50, 0, 49, 0, - 57, 0, 51, 52, 54, 55, 56, 59, 58, 60, - 61, 64, 63, 62, 31, 36, 0, 0, 33, 0, - 32, 0, 42, 0, 34, 35, 37, 38, 39, 40, - 41, 43, 44, 45, 46, 47, 19, 20, 21, 0, - 17, 0, 158, 0, 19, 20, 21, 0, 17, 0, - 283, 0, 19, 20, 21, 50, 17, 49, 281, 57, - 0, 51, 52, 54, 55, 56, 59, 58, 60, 61, - 64, 63, 62, 0, 0, 0, 0, 0, 0, 0, - 0, 23, 26, 24, 25, 27, 14, 159, 15, 23, - 26, 24, 25, 27, 14, 0, 15, 23, 26, 24, - 25, 27, 14, 0, 15, 19, 20, 21, 0, 17, - 0, 9, 0, 0, 19, 20, 21, 0, 17, 22, - 158, 0, 19, 20, 21, 0, 0, 22, 213, 0, - 33, 0, 32, 0, 42, 22, 34, 35, 37, 38, - 39, 40, 41, 43, 44, 45, 46, 47, 0, 0, - 23, 26, 24, 25, 27, 14, 0, 15, 0, 23, - 26, 24, 25, 27, 0, 0, 0, 23, 26, 24, - 25, 27, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 22, 131, - 0, 0, 0, 0, 0, 0, 0, 22, 0, 65, - 3, 0, 0, 0, 0, 22, 0, 118, 119, 120, - 121, 122, 124, 123, 125, 126, 0, 127, 128, 129, - 130, 0, 0, 0, 134, 132, 133, 135, 136, 137, - 138, 160, 162, 163, 164, 165, 166, 167, 168, 169, - 170, 171, 172, 173, 174, 175, 176, 177, 102, 103, - 104, 108, 0, 0, 0, 205, 0, 0, 107, 105, - 106, 110, 109, 111, 112, 113, 114, 115, 116, 117, - 102, 103, 104, 108, 0, 0, 0, 0, 0, 0, - 107, 105, 106, 110, 109, 111, 112, 113, 114, 115, - 116, 117, + 130, 313, 96, 97, 134, 132, 133, 135, 136, 137, + 138, 306, 0, 0, 0, 0, 0, 242, 243, 233, + 234, 235, 236, 237, 238, 240, 239, 0, 0, 72, + 73, 0, 74, 75, 76, 77, 0, 0, 0, 228, + 229, 287, 230, 231, 232, 241, 0, 0, 0, 227, + 96, 97, 0, 0, 0, 242, 243, 233, 234, 235, + 236, 237, 238, 240, 239, 242, 243, 233, 234, 235, + 236, 237, 238, 240, 239, 0, 0, 228, 229, 203, + 230, 231, 232, 241, 0, 0, 0, 228, 229, 0, + 230, 231, 232, 241, 0, 242, 243, 233, 234, 235, + 236, 237, 238, 240, 239, 242, 243, 233, 234, 235, + 236, 237, 238, 240, 239, 0, 0, 228, 229, 0, + 230, 231, 232, 241, 197, 0, 0, 228, 229, 0, + 230, 231, 232, 241, 0, 78, 79, 80, 81, 82, + 83, 0, 78, 79, 80, 81, 82, 83, 0, 78, + 79, 80, 81, 82, 83, 85, 86, 0, 87, 88, + 89, 90, 85, 86, 0, 87, 88, 89, 90, 72, + 73, 0, 74, 75, 76, 77, 0, 0, 31, 36, + 0, 0, 33, 0, 32, 0, 42, 0, 34, 35, + 37, 38, 39, 40, 41, 43, 44, 45, 46, 47, + 48, 53, 0, 0, 50, 0, 49, 0, 57, 0, + 51, 52, 54, 55, 56, 59, 58, 60, 61, 64, + 63, 62, 31, 36, 0, 0, 33, 0, 32, 0, + 42, 0, 34, 35, 37, 38, 39, 40, 41, 43, + 44, 45, 46, 47, 19, 20, 21, 0, 17, 0, + 159, 0, 19, 20, 21, 0, 17, 0, 285, 0, + 19, 20, 21, 50, 17, 49, 283, 57, 0, 51, + 52, 54, 55, 56, 59, 58, 60, 61, 64, 63, + 62, 0, 0, 0, 0, 0, 0, 0, 0, 23, + 26, 24, 25, 27, 14, 160, 15, 23, 26, 24, + 25, 27, 14, 0, 15, 23, 26, 24, 25, 27, + 14, 0, 15, 19, 20, 21, 0, 17, 0, 9, + 0, 19, 20, 21, 0, 17, 0, 159, 22, 19, + 20, 21, 0, 0, 0, 214, 22, 0, 0, 33, + 0, 32, 0, 42, 22, 34, 35, 37, 38, 39, + 40, 41, 43, 44, 45, 46, 47, 0, 23, 26, + 24, 25, 27, 14, 0, 15, 23, 26, 24, 25, + 27, 0, 0, 0, 23, 26, 24, 25, 27, 257, + 0, 258, 260, 261, 0, 259, 0, 0, 0, 0, + 0, 0, 0, 262, 0, 0, 131, 22, 0, 0, + 0, 0, 0, 0, 0, 22, 65, 3, 0, 0, + 0, 0, 0, 22, 118, 119, 120, 121, 122, 124, + 123, 125, 126, 0, 127, 128, 129, 130, 0, 0, + 0, 134, 132, 133, 135, 136, 137, 138, 161, 163, + 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, + 174, 175, 176, 177, 178, 102, 103, 104, 108, 0, + 0, 0, 206, 0, 0, 107, 105, 106, 110, 109, + 111, 112, 113, 114, 115, 116, 117, 102, 103, 104, + 108, 0, 0, 0, 0, 0, 0, 107, 105, 106, + 110, 109, 111, 112, 113, 114, 115, 116, 117, } var yyPact = [...]int{ - 759, -23, -28, 608, -1000, 586, -1000, -1000, -1000, 759, - -1000, 510, -1000, 503, 260, 259, -1000, 353, -1000, -1000, - -1000, -1000, 294, 258, 257, 255, 253, 233, -1000, 232, - 256, 231, 231, 231, 231, 231, 231, 231, 231, 231, - 231, 231, 231, 231, 231, 231, 231, 231, 191, 191, - 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, - 191, 191, 191, 191, 191, 564, 79, 539, 105, 162, - 98, 903, 230, 230, 230, 230, 230, 230, -1000, -1000, - -1000, -1000, -1000, -1000, 776, 776, 776, 776, 776, 776, - 776, 400, 840, -1000, 143, 400, 400, 400, -1000, -1000, + 757, -28, -31, 605, -1000, 583, -1000, -1000, -1000, 757, + -1000, 530, -1000, 523, 274, 271, -1000, 362, -1000, -1000, + -1000, -1000, 318, 264, 259, 255, 254, 253, -1000, 252, + 178, 233, 233, 233, 233, 233, 233, 233, 233, 233, + 233, 233, 233, 233, 233, 233, 233, 233, 177, 177, + 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, + 177, 177, 177, 177, 177, 561, 14, 234, 390, 170, + 516, 900, 232, 232, 232, 232, 232, 232, -1000, -1000, + -1000, -1000, -1000, -1000, 773, 773, 773, 773, 773, 773, + 773, 410, 837, -1000, 488, 410, 410, 410, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, 302, 293, 291, 137, 248, 147, 88, 70, -1000, - -1000, -1000, 159, 400, 400, 400, 400, 269, -1000, 586, - -1000, -1000, -1000, -1000, 229, 228, 227, 226, 768, 221, - 720, 706, -1000, -1000, -1000, -1000, 720, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, 645, 191, - -1000, -1000, -1000, -1000, 645, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, 690, -1000, - -1000, -1000, -1000, -10, -1000, 698, -45, -45, -64, -64, - -64, -64, -20, 776, -65, -65, -71, -71, -71, -71, - 468, 216, -1000, -1000, -1000, -1000, -1000, 400, 400, 400, - 400, 400, 400, 400, 400, 400, 400, 400, 400, 400, - 400, 400, 400, 438, -72, -72, 39, 37, 35, 17, - 287, 283, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, -1000, -1000, 428, 391, 64, 54, - 213, -1000, -37, 148, 146, 840, 840, 539, -3, 138, - 22, 706, -1000, 698, -32, -1000, -1000, 840, -72, -72, - -82, -82, -82, -49, -49, -49, -49, -49, -49, -49, - -49, -82, 476, 476, -1000, -1000, -1000, -1000, -1000, 16, - 11, -1000, -1000, -1000, -1000, -1000, 269, 925, 53, 52, - 126, 106, -1000, 690, -1000, -1000, -1000, -1000, -1000, 195, - 193, 314, 51, 840, 840, 211, -1000, -1000, 183, 209, - 206, 297, 50, 840, -1000, -1000, -1000, -1000, 167, 171, - 840, -1000, 136, -1000, + -1000, 343, 334, 329, 106, 257, 802, 78, 63, -1000, + -1000, -1000, 149, 410, 410, 410, 410, 302, -1000, 583, + -1000, -1000, -1000, -1000, 231, 225, 215, 214, 208, 765, + 194, 718, 704, -1000, -1000, -1000, -1000, 718, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, 642, + 177, -1000, -1000, -1000, -1000, 642, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, 688, + -1000, -1000, -1000, -1000, -21, -1000, 696, -6, -6, -75, + -75, -75, -75, -50, 773, -10, -10, -78, -78, -78, + -78, 478, 211, -1000, -1000, -1000, -1000, -1000, 410, 410, + 410, 410, 410, 410, 410, 410, 410, 410, 410, 410, + 410, 410, 410, 410, 448, -46, -46, 31, 20, 19, + 17, 327, 306, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, 438, 400, 64, + 54, 209, -1000, -48, 147, 142, 837, 837, 109, 234, + 180, 137, 9, 704, -1000, 696, -33, -1000, -1000, 837, + -46, -46, -79, -79, -79, -64, -64, -64, -64, -64, + -64, -64, -64, -79, 105, 105, -1000, -1000, -1000, -1000, + -1000, 11, -18, -1000, -1000, -1000, -1000, -1000, 302, 922, + 40, 38, 104, 132, 197, -1000, 688, -1000, -1000, -1000, + -1000, -1000, 192, 190, 330, 36, -1000, 282, 837, 837, + 195, -1000, -1000, 184, 158, 156, 154, 297, 35, 837, + -1000, 240, -1000, -1000, -1000, -1000, 179, 99, 100, 837, + -1000, 186, 95, 107, -1000, -1000, } var yyPgo = [...]int{ - 0, 349, 7, 347, 5, 6, 1, 859, 343, 15, - 342, 3, 274, 333, 332, 395, 14, 328, 324, 9, - 18, 8, 4, 2, 0, 17, 323, 13, 322, 315, + 0, 363, 7, 361, 5, 6, 1, 856, 359, 14, + 10, 3, 323, 358, 357, 141, 13, 355, 353, 12, + 19, 9, 4, 2, 0, 18, 351, 8, 349, 348, } var yyR1 = [...]int{ @@ -420,14 +422,15 @@ var yyR1 = [...]int{ 15, 15, 15, 15, 15, 17, 18, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 19, 19, 19, 19, 19, 13, 13, 13, 13, - 13, 13, 13, 13, 27, 29, 28, 28, 20, 20, - 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, + 13, 13, 13, 13, 13, 13, 13, 27, 29, 28, + 28, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, - 20, 21, 21, 21, 21, 21, 21, 21, 21, 21, - 21, 21, 21, 21, 21, 21, 21, 22, 22, 22, + 20, 20, 20, 20, 21, 21, 21, 21, 21, 21, + 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, - 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, - 24, 24, 24, 23, 23, 23, 23, 23, 23, + 22, 22, 22, 24, 24, 24, 24, 24, 24, 24, + 24, 24, 24, 24, 24, 24, 23, 23, 23, 23, + 23, 23, } var yyR2 = [...]int{ @@ -442,53 +445,55 @@ var yyR2 = [...]int{ 3, 3, 3, 3, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 1, 1, 1, 2, 2, 2, 3, 4, 4, 4, 4, 3, 7, 3, 7, - 6, 10, 4, 8, 3, 4, 1, 3, 3, 3, + 6, 10, 4, 8, 4, 6, 10, 3, 4, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 2, 2, 1, 1, 1, + 3, 3, 3, 3, 3, 3, 3, 3, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 3, 3, 3, 3, 4, 4, + 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, + 4, 4, } var yyChk = [...]int{ -1000, -1, -9, -7, -14, -6, -11, -2, -4, 12, -8, -15, -10, -16, 56, 58, -17, 10, -19, 6, - 7, 8, 89, 51, 53, 54, 52, 55, -29, 64, - 65, 66, 72, 70, 76, 77, 67, 78, 79, 80, - 81, 82, 74, 83, 84, 85, 86, 87, 66, 72, - 70, 76, 77, 67, 78, 79, 80, 74, 82, 81, - 83, 84, 87, 86, 85, -7, -9, -6, -15, -18, - -16, -12, 88, 89, 91, 92, 93, 94, 68, 69, - 70, 71, 72, 73, -12, 88, 89, 91, 92, 93, - 94, 12, 12, 11, -20, 12, 89, 90, -21, -22, + 7, 8, 90, 51, 53, 54, 52, 55, -29, 65, + 66, 67, 73, 71, 77, 78, 68, 79, 80, 81, + 82, 83, 75, 84, 85, 86, 87, 88, 67, 73, + 71, 77, 78, 68, 79, 80, 81, 75, 83, 82, + 84, 85, 88, 87, 86, -7, -9, -6, -15, -18, + -16, -12, 89, 90, 92, 93, 94, 95, 69, 70, + 71, 72, 73, 74, -12, 89, 90, 92, 93, 94, + 95, 12, 12, 11, -20, 12, 90, 91, -21, -22, -23, -24, 5, 6, 7, 16, 17, 15, 8, 19, 18, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 33, 32, 34, 35, 37, 38, 39, 40, 9, 45, 46, 44, 47, 48, 49, 50, 6, 7, 8, 12, 12, 12, 12, 12, 12, -13, -6, - -11, -2, -3, -4, 60, 61, 62, 63, 12, 57, - -7, 12, -7, -7, -7, -7, -7, -7, -7, -7, - -7, -7, -7, -7, -7, -7, -7, -7, -6, 12, - -6, -6, -6, -6, -6, -6, -6, -6, -6, -6, - -6, -6, -6, -6, -6, -6, 13, 13, 65, 13, - 13, 13, 13, -15, -21, 12, -15, -15, -15, -15, - -15, -15, -16, 12, -16, -16, -16, -16, -16, -16, - -20, -5, -25, -22, -23, -24, 11, 88, 89, 91, - 92, 93, 68, 69, 70, 71, 72, 73, 75, 74, - 94, 66, 67, -20, -20, -20, 4, 4, 4, 4, - 45, 46, 27, 34, 36, 41, 27, 29, 33, 30, - 31, 41, 29, 42, 43, 13, -20, -20, -20, -20, - -28, -27, 4, 12, 12, 12, 12, -6, -16, 12, - -9, 12, -19, 12, -9, 13, 13, 14, -20, -20, + -11, -2, -3, -4, 60, 61, 62, 63, 64, 12, + 57, -7, 12, -7, -7, -7, -7, -7, -7, -7, + -7, -7, -7, -7, -7, -7, -7, -7, -7, -6, + 12, -6, -6, -6, -6, -6, -6, -6, -6, -6, + -6, -6, -6, -6, -6, -6, -6, 13, 13, 66, + 13, 13, 13, 13, -15, -21, 12, -15, -15, -15, + -15, -15, -15, -16, 12, -16, -16, -16, -16, -16, + -16, -20, -5, -25, -22, -23, -24, 11, 89, 90, + 92, 93, 94, 69, 70, 71, 72, 73, 74, 76, + 75, 95, 67, 68, -20, -20, -20, 4, 4, 4, + 4, 45, 46, 27, 34, 36, 41, 27, 29, 33, + 30, 31, 41, 29, 42, 43, 13, -20, -20, -20, + -20, -28, -27, 4, 12, 12, 12, 12, 12, -6, + -16, 12, -9, 12, -19, 12, -9, 13, 13, 14, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, - -20, -20, -20, -20, 13, 59, 59, 59, 59, 4, - 4, 13, 13, 13, 13, 13, 14, 68, 13, 13, - -25, -25, 13, 65, -25, 59, 59, -27, -21, 56, - 56, 14, 13, 12, 12, -26, 7, 6, 56, -5, - -5, 14, 13, 12, 13, 13, 7, 6, 56, -5, - 12, 13, -5, 13, + -20, -20, -20, -20, -20, -20, 13, 59, 59, 59, + 59, 4, 4, 13, 13, 13, 13, 13, 14, 69, + 13, 13, -25, -25, -10, 13, 66, -25, 59, 59, + -27, -21, 56, 56, 14, 13, 13, 14, 12, 12, + -26, 7, 6, 56, 6, -5, -5, 14, 13, 12, + 13, 14, 13, 13, 7, 6, 56, -5, 6, 12, + 13, 14, -5, 6, 13, 13, } var yyDef = [...]int{ @@ -501,33 +506,34 @@ var yyDef = [...]int{ 0, 0, 0, 0, 0, 0, 0, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 69, 70, 71, 72, 73, 74, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 66, 0, 0, 0, 0, 137, 138, - 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, - 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, - 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, - 169, 0, 0, 0, 0, 0, 0, 0, 0, 98, + 0, 0, 0, 66, 0, 0, 0, 0, 140, 141, + 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, + 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, + 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, + 172, 0, 0, 0, 0, 0, 0, 0, 0, 98, 99, 100, 0, 0, 0, 0, 0, 0, 4, 30, 31, 32, 33, 34, 0, 0, 0, 0, 0, 0, - 7, 0, 8, 9, 10, 11, 12, 13, 14, 15, - 16, 17, 18, 19, 20, 21, 22, 23, 48, 0, - 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, - 59, 60, 61, 62, 63, 64, 6, 25, 0, 47, - 77, 85, 87, 75, 76, 0, 78, 79, 80, 81, - 82, 83, 68, 0, 88, 89, 90, 91, 92, 93, - 0, 0, 41, 38, 39, 40, 67, 0, 0, 0, + 0, 7, 0, 8, 9, 10, 11, 12, 13, 14, + 15, 16, 17, 18, 19, 20, 21, 22, 23, 48, + 0, 49, 50, 51, 52, 53, 54, 55, 56, 57, + 58, 59, 60, 61, 62, 63, 64, 6, 25, 0, + 47, 77, 85, 87, 75, 76, 0, 78, 79, 80, + 81, 82, 83, 68, 0, 88, 89, 90, 91, 92, + 93, 0, 0, 41, 38, 39, 40, 67, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 135, 136, 0, 0, 0, 0, - 0, 0, 170, 171, 172, 173, 174, 175, 176, 177, - 178, 179, 180, 181, 182, 101, 0, 0, 0, 0, - 0, 116, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, -2, 0, 0, 35, 37, 0, 119, 120, - 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, - 131, 132, 133, 134, 118, 183, 184, 185, 186, 0, - 0, 102, 103, 104, 105, 115, 0, 0, 106, 108, - 0, 0, 36, 0, 42, 187, 188, 117, 114, 0, - 0, 0, 112, 0, 0, 0, 43, 44, 0, 0, - 0, 0, 110, 0, 107, 109, 45, 46, 0, 0, - 0, 113, 0, 111, + 0, 0, 0, 0, 0, 138, 139, 0, 0, 0, + 0, 0, 0, 173, 174, 175, 176, 177, 178, 179, + 180, 181, 182, 183, 184, 185, 101, 0, 0, 0, + 0, 0, 119, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, -2, 0, 0, 35, 37, 0, + 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, + 132, 133, 134, 135, 136, 137, 121, 186, 187, 188, + 189, 0, 0, 102, 103, 104, 105, 118, 0, 0, + 106, 108, 0, 0, 0, 36, 0, 42, 190, 191, + 120, 117, 0, 0, 0, 112, 114, 0, 0, 0, + 0, 43, 44, 0, 0, 0, 0, 0, 110, 0, + 115, 0, 107, 109, 45, 46, 0, 0, 0, 0, + 113, 0, 0, 0, 111, 116, } var yyTok1 = [...]int{ @@ -544,7 +550,7 @@ var yyTok2 = [...]int{ 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, - 92, 93, 94, + 92, 93, 94, 95, } var yyTok3 = [...]int{ 0, @@ -1566,452 +1572,470 @@ yydefault: yyVAL.metricsAggregation = newMetricsAggregateHistogramOverTime(yyDollar[3].attribute, yyDollar[7].attributeList) } case 114: - yyDollar = yyS[yypt-3 : yypt+1] -//line pkg/traceql/expr.y:309 + yyDollar = yyS[yypt-4 : yypt+1] +//line pkg/traceql/expr.y:303 { - yyVAL.hint = newHint(yyDollar[1].staticStr, yyDollar[3].static) + yyVAL.metricsAggregation = newMetricsCompare(yyDollar[3].spansetFilter, 10, 0, 0) } case 115: - yyDollar = yyS[yypt-4 : yypt+1] -//line pkg/traceql/expr.y:313 + yyDollar = yyS[yypt-6 : yypt+1] +//line pkg/traceql/expr.y:304 { - yyVAL.hints = newHints(yyDollar[3].hintList) + yyVAL.metricsAggregation = newMetricsCompare(yyDollar[3].spansetFilter, yyDollar[5].staticInt, 0, 0) } case 116: - yyDollar = yyS[yypt-1 : yypt+1] -//line pkg/traceql/expr.y:317 + yyDollar = yyS[yypt-10 : yypt+1] +//line pkg/traceql/expr.y:305 { - yyVAL.hintList = []*Hint{yyDollar[1].hint} + yyVAL.metricsAggregation = newMetricsCompare(yyDollar[3].spansetFilter, yyDollar[5].staticInt, yyDollar[7].staticInt, yyDollar[9].staticInt) } case 117: yyDollar = yyS[yypt-3 : yypt+1] -//line pkg/traceql/expr.y:318 +//line pkg/traceql/expr.y:312 { - yyVAL.hintList = append(yyDollar[1].hintList, yyDollar[3].hint) + yyVAL.hint = newHint(yyDollar[1].staticStr, yyDollar[3].static) } case 118: - yyDollar = yyS[yypt-3 : yypt+1] -//line pkg/traceql/expr.y:326 + yyDollar = yyS[yypt-4 : yypt+1] +//line pkg/traceql/expr.y:316 { - yyVAL.fieldExpression = yyDollar[2].fieldExpression + yyVAL.hints = newHints(yyDollar[3].hintList) } case 119: - yyDollar = yyS[yypt-3 : yypt+1] -//line pkg/traceql/expr.y:327 + yyDollar = yyS[yypt-1 : yypt+1] +//line pkg/traceql/expr.y:320 { - yyVAL.fieldExpression = newBinaryOperation(OpAdd, yyDollar[1].fieldExpression, yyDollar[3].fieldExpression) + yyVAL.hintList = []*Hint{yyDollar[1].hint} } case 120: yyDollar = yyS[yypt-3 : yypt+1] -//line pkg/traceql/expr.y:328 +//line pkg/traceql/expr.y:321 { - yyVAL.fieldExpression = newBinaryOperation(OpSub, yyDollar[1].fieldExpression, yyDollar[3].fieldExpression) + yyVAL.hintList = append(yyDollar[1].hintList, yyDollar[3].hint) } case 121: yyDollar = yyS[yypt-3 : yypt+1] //line pkg/traceql/expr.y:329 { - yyVAL.fieldExpression = newBinaryOperation(OpMult, yyDollar[1].fieldExpression, yyDollar[3].fieldExpression) + yyVAL.fieldExpression = yyDollar[2].fieldExpression } case 122: yyDollar = yyS[yypt-3 : yypt+1] //line pkg/traceql/expr.y:330 { - yyVAL.fieldExpression = newBinaryOperation(OpDiv, yyDollar[1].fieldExpression, yyDollar[3].fieldExpression) + yyVAL.fieldExpression = newBinaryOperation(OpAdd, yyDollar[1].fieldExpression, yyDollar[3].fieldExpression) } case 123: yyDollar = yyS[yypt-3 : yypt+1] //line pkg/traceql/expr.y:331 { - yyVAL.fieldExpression = newBinaryOperation(OpMod, yyDollar[1].fieldExpression, yyDollar[3].fieldExpression) + yyVAL.fieldExpression = newBinaryOperation(OpSub, yyDollar[1].fieldExpression, yyDollar[3].fieldExpression) } case 124: yyDollar = yyS[yypt-3 : yypt+1] //line pkg/traceql/expr.y:332 { - yyVAL.fieldExpression = newBinaryOperation(OpEqual, yyDollar[1].fieldExpression, yyDollar[3].fieldExpression) + yyVAL.fieldExpression = newBinaryOperation(OpMult, yyDollar[1].fieldExpression, yyDollar[3].fieldExpression) } case 125: yyDollar = yyS[yypt-3 : yypt+1] //line pkg/traceql/expr.y:333 { - yyVAL.fieldExpression = newBinaryOperation(OpNotEqual, yyDollar[1].fieldExpression, yyDollar[3].fieldExpression) + yyVAL.fieldExpression = newBinaryOperation(OpDiv, yyDollar[1].fieldExpression, yyDollar[3].fieldExpression) } case 126: yyDollar = yyS[yypt-3 : yypt+1] //line pkg/traceql/expr.y:334 { - yyVAL.fieldExpression = newBinaryOperation(OpLess, yyDollar[1].fieldExpression, yyDollar[3].fieldExpression) + yyVAL.fieldExpression = newBinaryOperation(OpMod, yyDollar[1].fieldExpression, yyDollar[3].fieldExpression) } case 127: yyDollar = yyS[yypt-3 : yypt+1] //line pkg/traceql/expr.y:335 { - yyVAL.fieldExpression = newBinaryOperation(OpLessEqual, yyDollar[1].fieldExpression, yyDollar[3].fieldExpression) + yyVAL.fieldExpression = newBinaryOperation(OpEqual, yyDollar[1].fieldExpression, yyDollar[3].fieldExpression) } case 128: yyDollar = yyS[yypt-3 : yypt+1] //line pkg/traceql/expr.y:336 { - yyVAL.fieldExpression = newBinaryOperation(OpGreater, yyDollar[1].fieldExpression, yyDollar[3].fieldExpression) + yyVAL.fieldExpression = newBinaryOperation(OpNotEqual, yyDollar[1].fieldExpression, yyDollar[3].fieldExpression) } case 129: yyDollar = yyS[yypt-3 : yypt+1] //line pkg/traceql/expr.y:337 { - yyVAL.fieldExpression = newBinaryOperation(OpGreaterEqual, yyDollar[1].fieldExpression, yyDollar[3].fieldExpression) + yyVAL.fieldExpression = newBinaryOperation(OpLess, yyDollar[1].fieldExpression, yyDollar[3].fieldExpression) } case 130: yyDollar = yyS[yypt-3 : yypt+1] //line pkg/traceql/expr.y:338 { - yyVAL.fieldExpression = newBinaryOperation(OpRegex, yyDollar[1].fieldExpression, yyDollar[3].fieldExpression) + yyVAL.fieldExpression = newBinaryOperation(OpLessEqual, yyDollar[1].fieldExpression, yyDollar[3].fieldExpression) } case 131: yyDollar = yyS[yypt-3 : yypt+1] //line pkg/traceql/expr.y:339 { - yyVAL.fieldExpression = newBinaryOperation(OpNotRegex, yyDollar[1].fieldExpression, yyDollar[3].fieldExpression) + yyVAL.fieldExpression = newBinaryOperation(OpGreater, yyDollar[1].fieldExpression, yyDollar[3].fieldExpression) } case 132: yyDollar = yyS[yypt-3 : yypt+1] //line pkg/traceql/expr.y:340 { - yyVAL.fieldExpression = newBinaryOperation(OpPower, yyDollar[1].fieldExpression, yyDollar[3].fieldExpression) + yyVAL.fieldExpression = newBinaryOperation(OpGreaterEqual, yyDollar[1].fieldExpression, yyDollar[3].fieldExpression) } case 133: yyDollar = yyS[yypt-3 : yypt+1] //line pkg/traceql/expr.y:341 { - yyVAL.fieldExpression = newBinaryOperation(OpAnd, yyDollar[1].fieldExpression, yyDollar[3].fieldExpression) + yyVAL.fieldExpression = newBinaryOperation(OpRegex, yyDollar[1].fieldExpression, yyDollar[3].fieldExpression) } case 134: yyDollar = yyS[yypt-3 : yypt+1] //line pkg/traceql/expr.y:342 { - yyVAL.fieldExpression = newBinaryOperation(OpOr, yyDollar[1].fieldExpression, yyDollar[3].fieldExpression) + yyVAL.fieldExpression = newBinaryOperation(OpNotRegex, yyDollar[1].fieldExpression, yyDollar[3].fieldExpression) } case 135: - yyDollar = yyS[yypt-2 : yypt+1] + yyDollar = yyS[yypt-3 : yypt+1] //line pkg/traceql/expr.y:343 { - yyVAL.fieldExpression = newUnaryOperation(OpSub, yyDollar[2].fieldExpression) + yyVAL.fieldExpression = newBinaryOperation(OpPower, yyDollar[1].fieldExpression, yyDollar[3].fieldExpression) } case 136: - yyDollar = yyS[yypt-2 : yypt+1] + yyDollar = yyS[yypt-3 : yypt+1] //line pkg/traceql/expr.y:344 { - yyVAL.fieldExpression = newUnaryOperation(OpNot, yyDollar[2].fieldExpression) + yyVAL.fieldExpression = newBinaryOperation(OpAnd, yyDollar[1].fieldExpression, yyDollar[3].fieldExpression) } case 137: - yyDollar = yyS[yypt-1 : yypt+1] + yyDollar = yyS[yypt-3 : yypt+1] //line pkg/traceql/expr.y:345 { - yyVAL.fieldExpression = yyDollar[1].static + yyVAL.fieldExpression = newBinaryOperation(OpOr, yyDollar[1].fieldExpression, yyDollar[3].fieldExpression) } case 138: - yyDollar = yyS[yypt-1 : yypt+1] + yyDollar = yyS[yypt-2 : yypt+1] //line pkg/traceql/expr.y:346 { - yyVAL.fieldExpression = yyDollar[1].intrinsicField + yyVAL.fieldExpression = newUnaryOperation(OpSub, yyDollar[2].fieldExpression) } case 139: - yyDollar = yyS[yypt-1 : yypt+1] + yyDollar = yyS[yypt-2 : yypt+1] //line pkg/traceql/expr.y:347 { - yyVAL.fieldExpression = yyDollar[1].attributeField + yyVAL.fieldExpression = newUnaryOperation(OpNot, yyDollar[2].fieldExpression) } case 140: yyDollar = yyS[yypt-1 : yypt+1] //line pkg/traceql/expr.y:348 { - yyVAL.fieldExpression = yyDollar[1].scopedIntrinsicField + yyVAL.fieldExpression = yyDollar[1].static } case 141: yyDollar = yyS[yypt-1 : yypt+1] -//line pkg/traceql/expr.y:355 +//line pkg/traceql/expr.y:349 { - yyVAL.static = NewStaticString(yyDollar[1].staticStr) + yyVAL.fieldExpression = yyDollar[1].intrinsicField } case 142: yyDollar = yyS[yypt-1 : yypt+1] -//line pkg/traceql/expr.y:356 +//line pkg/traceql/expr.y:350 { - yyVAL.static = NewStaticInt(yyDollar[1].staticInt) + yyVAL.fieldExpression = yyDollar[1].attributeField } case 143: yyDollar = yyS[yypt-1 : yypt+1] -//line pkg/traceql/expr.y:357 +//line pkg/traceql/expr.y:351 { - yyVAL.static = NewStaticFloat(yyDollar[1].staticFloat) + yyVAL.fieldExpression = yyDollar[1].scopedIntrinsicField } case 144: yyDollar = yyS[yypt-1 : yypt+1] //line pkg/traceql/expr.y:358 { - yyVAL.static = NewStaticBool(true) + yyVAL.static = NewStaticString(yyDollar[1].staticStr) } case 145: yyDollar = yyS[yypt-1 : yypt+1] //line pkg/traceql/expr.y:359 { - yyVAL.static = NewStaticBool(false) + yyVAL.static = NewStaticInt(yyDollar[1].staticInt) } case 146: yyDollar = yyS[yypt-1 : yypt+1] //line pkg/traceql/expr.y:360 { - yyVAL.static = NewStaticNil() + yyVAL.static = NewStaticFloat(yyDollar[1].staticFloat) } case 147: yyDollar = yyS[yypt-1 : yypt+1] //line pkg/traceql/expr.y:361 { - yyVAL.static = NewStaticDuration(yyDollar[1].staticDuration) + yyVAL.static = NewStaticBool(true) } case 148: yyDollar = yyS[yypt-1 : yypt+1] //line pkg/traceql/expr.y:362 { - yyVAL.static = NewStaticStatus(StatusOk) + yyVAL.static = NewStaticBool(false) } case 149: yyDollar = yyS[yypt-1 : yypt+1] //line pkg/traceql/expr.y:363 { - yyVAL.static = NewStaticStatus(StatusError) + yyVAL.static = NewStaticNil() } case 150: yyDollar = yyS[yypt-1 : yypt+1] //line pkg/traceql/expr.y:364 { - yyVAL.static = NewStaticStatus(StatusUnset) + yyVAL.static = NewStaticDuration(yyDollar[1].staticDuration) } case 151: yyDollar = yyS[yypt-1 : yypt+1] //line pkg/traceql/expr.y:365 { - yyVAL.static = NewStaticKind(KindUnspecified) + yyVAL.static = NewStaticStatus(StatusOk) } case 152: yyDollar = yyS[yypt-1 : yypt+1] //line pkg/traceql/expr.y:366 { - yyVAL.static = NewStaticKind(KindInternal) + yyVAL.static = NewStaticStatus(StatusError) } case 153: yyDollar = yyS[yypt-1 : yypt+1] //line pkg/traceql/expr.y:367 { - yyVAL.static = NewStaticKind(KindServer) + yyVAL.static = NewStaticStatus(StatusUnset) } case 154: yyDollar = yyS[yypt-1 : yypt+1] //line pkg/traceql/expr.y:368 { - yyVAL.static = NewStaticKind(KindClient) + yyVAL.static = NewStaticKind(KindUnspecified) } case 155: yyDollar = yyS[yypt-1 : yypt+1] //line pkg/traceql/expr.y:369 { - yyVAL.static = NewStaticKind(KindProducer) + yyVAL.static = NewStaticKind(KindInternal) } case 156: yyDollar = yyS[yypt-1 : yypt+1] //line pkg/traceql/expr.y:370 { - yyVAL.static = NewStaticKind(KindConsumer) + yyVAL.static = NewStaticKind(KindServer) } case 157: yyDollar = yyS[yypt-1 : yypt+1] -//line pkg/traceql/expr.y:376 +//line pkg/traceql/expr.y:371 { - yyVAL.intrinsicField = NewIntrinsic(IntrinsicDuration) + yyVAL.static = NewStaticKind(KindClient) } case 158: yyDollar = yyS[yypt-1 : yypt+1] -//line pkg/traceql/expr.y:377 +//line pkg/traceql/expr.y:372 { - yyVAL.intrinsicField = NewIntrinsic(IntrinsicChildCount) + yyVAL.static = NewStaticKind(KindProducer) } case 159: yyDollar = yyS[yypt-1 : yypt+1] -//line pkg/traceql/expr.y:378 +//line pkg/traceql/expr.y:373 { - yyVAL.intrinsicField = NewIntrinsic(IntrinsicName) + yyVAL.static = NewStaticKind(KindConsumer) } case 160: yyDollar = yyS[yypt-1 : yypt+1] //line pkg/traceql/expr.y:379 { - yyVAL.intrinsicField = NewIntrinsic(IntrinsicStatus) + yyVAL.intrinsicField = NewIntrinsic(IntrinsicDuration) } case 161: yyDollar = yyS[yypt-1 : yypt+1] //line pkg/traceql/expr.y:380 { - yyVAL.intrinsicField = NewIntrinsic(IntrinsicStatusMessage) + yyVAL.intrinsicField = NewIntrinsic(IntrinsicChildCount) } case 162: yyDollar = yyS[yypt-1 : yypt+1] //line pkg/traceql/expr.y:381 { - yyVAL.intrinsicField = NewIntrinsic(IntrinsicKind) + yyVAL.intrinsicField = NewIntrinsic(IntrinsicName) } case 163: yyDollar = yyS[yypt-1 : yypt+1] //line pkg/traceql/expr.y:382 { - yyVAL.intrinsicField = NewIntrinsic(IntrinsicParent) + yyVAL.intrinsicField = NewIntrinsic(IntrinsicStatus) } case 164: yyDollar = yyS[yypt-1 : yypt+1] //line pkg/traceql/expr.y:383 { - yyVAL.intrinsicField = NewIntrinsic(IntrinsicTraceRootSpan) + yyVAL.intrinsicField = NewIntrinsic(IntrinsicStatusMessage) } case 165: yyDollar = yyS[yypt-1 : yypt+1] //line pkg/traceql/expr.y:384 { - yyVAL.intrinsicField = NewIntrinsic(IntrinsicTraceRootService) + yyVAL.intrinsicField = NewIntrinsic(IntrinsicKind) } case 166: yyDollar = yyS[yypt-1 : yypt+1] //line pkg/traceql/expr.y:385 { - yyVAL.intrinsicField = NewIntrinsic(IntrinsicTraceDuration) + yyVAL.intrinsicField = NewIntrinsic(IntrinsicParent) } case 167: yyDollar = yyS[yypt-1 : yypt+1] //line pkg/traceql/expr.y:386 { - yyVAL.intrinsicField = NewIntrinsic(IntrinsicNestedSetLeft) + yyVAL.intrinsicField = NewIntrinsic(IntrinsicTraceRootSpan) } case 168: yyDollar = yyS[yypt-1 : yypt+1] //line pkg/traceql/expr.y:387 { - yyVAL.intrinsicField = NewIntrinsic(IntrinsicNestedSetRight) + yyVAL.intrinsicField = NewIntrinsic(IntrinsicTraceRootService) } case 169: yyDollar = yyS[yypt-1 : yypt+1] //line pkg/traceql/expr.y:388 { - yyVAL.intrinsicField = NewIntrinsic(IntrinsicNestedSetParent) + yyVAL.intrinsicField = NewIntrinsic(IntrinsicTraceDuration) } case 170: - yyDollar = yyS[yypt-2 : yypt+1] -//line pkg/traceql/expr.y:393 + yyDollar = yyS[yypt-1 : yypt+1] +//line pkg/traceql/expr.y:389 { - yyVAL.scopedIntrinsicField = NewIntrinsic(IntrinsicTraceDuration) + yyVAL.intrinsicField = NewIntrinsic(IntrinsicNestedSetLeft) } case 171: - yyDollar = yyS[yypt-2 : yypt+1] -//line pkg/traceql/expr.y:394 + yyDollar = yyS[yypt-1 : yypt+1] +//line pkg/traceql/expr.y:390 { - yyVAL.scopedIntrinsicField = NewIntrinsic(IntrinsicTraceRootSpan) + yyVAL.intrinsicField = NewIntrinsic(IntrinsicNestedSetRight) } case 172: - yyDollar = yyS[yypt-2 : yypt+1] -//line pkg/traceql/expr.y:395 + yyDollar = yyS[yypt-1 : yypt+1] +//line pkg/traceql/expr.y:391 { - yyVAL.scopedIntrinsicField = NewIntrinsic(IntrinsicTraceRootService) + yyVAL.intrinsicField = NewIntrinsic(IntrinsicNestedSetParent) } case 173: yyDollar = yyS[yypt-2 : yypt+1] //line pkg/traceql/expr.y:396 { - yyVAL.scopedIntrinsicField = NewIntrinsic(IntrinsicTraceID) + yyVAL.scopedIntrinsicField = NewIntrinsic(IntrinsicTraceDuration) } case 174: yyDollar = yyS[yypt-2 : yypt+1] -//line pkg/traceql/expr.y:398 +//line pkg/traceql/expr.y:397 { - yyVAL.scopedIntrinsicField = NewIntrinsic(IntrinsicDuration) + yyVAL.scopedIntrinsicField = NewIntrinsic(IntrinsicTraceRootSpan) } case 175: yyDollar = yyS[yypt-2 : yypt+1] -//line pkg/traceql/expr.y:399 +//line pkg/traceql/expr.y:398 { - yyVAL.scopedIntrinsicField = NewIntrinsic(IntrinsicName) + yyVAL.scopedIntrinsicField = NewIntrinsic(IntrinsicTraceRootService) } case 176: yyDollar = yyS[yypt-2 : yypt+1] -//line pkg/traceql/expr.y:400 +//line pkg/traceql/expr.y:399 { - yyVAL.scopedIntrinsicField = NewIntrinsic(IntrinsicKind) + yyVAL.scopedIntrinsicField = NewIntrinsic(IntrinsicTraceID) } case 177: yyDollar = yyS[yypt-2 : yypt+1] //line pkg/traceql/expr.y:401 { - yyVAL.scopedIntrinsicField = NewIntrinsic(IntrinsicStatus) + yyVAL.scopedIntrinsicField = NewIntrinsic(IntrinsicDuration) } case 178: yyDollar = yyS[yypt-2 : yypt+1] //line pkg/traceql/expr.y:402 { - yyVAL.scopedIntrinsicField = NewIntrinsic(IntrinsicStatusMessage) + yyVAL.scopedIntrinsicField = NewIntrinsic(IntrinsicName) } case 179: yyDollar = yyS[yypt-2 : yypt+1] //line pkg/traceql/expr.y:403 { - yyVAL.scopedIntrinsicField = NewIntrinsic(IntrinsicSpanID) + yyVAL.scopedIntrinsicField = NewIntrinsic(IntrinsicKind) } case 180: yyDollar = yyS[yypt-2 : yypt+1] -//line pkg/traceql/expr.y:405 +//line pkg/traceql/expr.y:404 { - yyVAL.scopedIntrinsicField = NewIntrinsic(IntrinsicEventName) + yyVAL.scopedIntrinsicField = NewIntrinsic(IntrinsicStatus) } case 181: yyDollar = yyS[yypt-2 : yypt+1] -//line pkg/traceql/expr.y:407 +//line pkg/traceql/expr.y:405 { - yyVAL.scopedIntrinsicField = NewIntrinsic(IntrinsicLinkTraceID) + yyVAL.scopedIntrinsicField = NewIntrinsic(IntrinsicStatusMessage) } case 182: yyDollar = yyS[yypt-2 : yypt+1] +//line pkg/traceql/expr.y:406 + { + yyVAL.scopedIntrinsicField = NewIntrinsic(IntrinsicSpanID) + } + case 183: + yyDollar = yyS[yypt-2 : yypt+1] //line pkg/traceql/expr.y:408 + { + yyVAL.scopedIntrinsicField = NewIntrinsic(IntrinsicEventName) + } + case 184: + yyDollar = yyS[yypt-2 : yypt+1] +//line pkg/traceql/expr.y:410 + { + yyVAL.scopedIntrinsicField = NewIntrinsic(IntrinsicLinkTraceID) + } + case 185: + yyDollar = yyS[yypt-2 : yypt+1] +//line pkg/traceql/expr.y:411 { yyVAL.scopedIntrinsicField = NewIntrinsic(IntrinsicLinkSpanID) } - case 183: + case 186: yyDollar = yyS[yypt-3 : yypt+1] -//line pkg/traceql/expr.y:412 +//line pkg/traceql/expr.y:415 { yyVAL.attributeField = NewAttribute(yyDollar[2].staticStr) } - case 184: + case 187: yyDollar = yyS[yypt-3 : yypt+1] -//line pkg/traceql/expr.y:413 +//line pkg/traceql/expr.y:416 { yyVAL.attributeField = NewScopedAttribute(AttributeScopeResource, false, yyDollar[2].staticStr) } - case 185: + case 188: yyDollar = yyS[yypt-3 : yypt+1] -//line pkg/traceql/expr.y:414 +//line pkg/traceql/expr.y:417 { yyVAL.attributeField = NewScopedAttribute(AttributeScopeSpan, false, yyDollar[2].staticStr) } - case 186: + case 189: yyDollar = yyS[yypt-3 : yypt+1] -//line pkg/traceql/expr.y:415 +//line pkg/traceql/expr.y:418 { yyVAL.attributeField = NewScopedAttribute(AttributeScopeNone, true, yyDollar[2].staticStr) } - case 187: + case 190: yyDollar = yyS[yypt-4 : yypt+1] -//line pkg/traceql/expr.y:416 +//line pkg/traceql/expr.y:419 { yyVAL.attributeField = NewScopedAttribute(AttributeScopeResource, true, yyDollar[3].staticStr) } - case 188: + case 191: yyDollar = yyS[yypt-4 : yypt+1] -//line pkg/traceql/expr.y:417 +//line pkg/traceql/expr.y:420 { yyVAL.attributeField = NewScopedAttribute(AttributeScopeSpan, true, yyDollar[3].staticStr) } diff --git a/pkg/traceql/lexer.go b/pkg/traceql/lexer.go index e93a78b9975..ff818b0f7cd 100644 --- a/pkg/traceql/lexer.go +++ b/pkg/traceql/lexer.go @@ -98,6 +98,7 @@ var tokens = map[string]int{ "count_over_time": COUNT_OVER_TIME, "quantile_over_time": QUANTILE_OVER_TIME, "histogram_over_time": HISTOGRAM_OVER_TIME, + "compare": COMPARE, "with": WITH, } diff --git a/pkg/traceql/storage.go b/pkg/traceql/storage.go index aa8520d9781..cbef7616aec 100644 --- a/pkg/traceql/storage.go +++ b/pkg/traceql/storage.go @@ -75,6 +75,7 @@ type FetchSpansRequest struct { // TODO: extend this to an arbitrary number of passes SecondPass SecondPassFn SecondPassConditions []Condition + SecondPassSelectAll bool // Ignore second pass conditions and select all attributes } func (f *FetchSpansRequest) appendCondition(c ...Condition) { @@ -103,6 +104,9 @@ type Span interface { // AllAttributes returns a map of all attributes for this span. AllAttributes should be used sparingly // and is expected to be significantly slower than AttributeFor. AllAttributes() map[Attribute]Static + // AllAttributesFunc is a way to access all attributes for this span, letting the span determine the + // optimal method. Avoids allocating a map like AllAttributes. + AllAttributesFunc(func(Attribute, Static)) ID() []byte StartTimeUnixNanos() uint64 diff --git a/pkg/traceqlmetrics/mocks.go b/pkg/traceqlmetrics/mocks.go index 0e77fa732c6..b7426a689c6 100644 --- a/pkg/traceqlmetrics/mocks.go +++ b/pkg/traceqlmetrics/mocks.go @@ -58,6 +58,12 @@ func (m *mockSpan) AttributeFor(a traceql.Attribute) (traceql.Static, bool) { return s, ok } +func (m *mockSpan) AllAttributesFunc(cb func(traceql.Attribute, traceql.Static)) { + for k, v := range m.attrs { + cb(k, v) + } +} + func (m *mockSpan) SiblingOf([]traceql.Span, []traceql.Span, bool, bool, []traceql.Span) []traceql.Span { return nil } diff --git a/tempodb/encoding/vparquet2/block_traceql.go b/tempodb/encoding/vparquet2/block_traceql.go index 1448f84c5ad..e042f384efd 100644 --- a/tempodb/encoding/vparquet2/block_traceql.go +++ b/tempodb/encoding/vparquet2/block_traceql.go @@ -43,6 +43,12 @@ func (s *span) AllAttributes() map[traceql.Attribute]traceql.Static { return s.attributes } +func (s *span) AllAttributesFunc(cb func(traceql.Attribute, traceql.Static)) { + for a, s := range s.attributes { + cb(a, s) + } +} + func (s *span) AttributeFor(a traceql.Attribute) (traceql.Static, bool) { atts := s.attributes static, ok := atts[a] @@ -743,6 +749,10 @@ func (b *backendBlock) Fetch(ctx context.Context, req traceql.FetchSpansRequest, return traceql.FetchSpansResponse{}, fmt.Errorf("conditions invalid: %w", err) } + if req.SecondPassSelectAll { + return traceql.FetchSpansResponse{}, common.ErrUnsupported + } + pf, rr, err := b.openForSearch(ctx, opts) if err != nil { return traceql.FetchSpansResponse{}, err diff --git a/tempodb/encoding/vparquet3/block_traceql.go b/tempodb/encoding/vparquet3/block_traceql.go index d108568d81d..ea1a837fc7b 100644 --- a/tempodb/encoding/vparquet3/block_traceql.go +++ b/tempodb/encoding/vparquet3/block_traceql.go @@ -80,6 +80,18 @@ func (s *span) AllAttributes() map[traceql.Attribute]traceql.Static { return atts } +func (s *span) AllAttributesFunc(cb func(traceql.Attribute, traceql.Static)) { + for _, a := range s.traceAttrs { + cb(a.a, a.s) + } + for _, a := range s.resourceAttrs { + cb(a.a, a.s) + } + for _, a := range s.spanAttrs { + cb(a.a, a.s) + } +} + func (s *span) AttributeFor(a traceql.Attribute) (traceql.Static, bool) { find := func(a traceql.Attribute, attrs []attrVal) *traceql.Static { if len(attrs) == 1 { @@ -818,7 +830,7 @@ var intrinsicColumnLookups = map[traceql.Intrinsic]struct { traceql.IntrinsicName: {intrinsicScopeSpan, traceql.TypeString, columnPathSpanName}, traceql.IntrinsicStatus: {intrinsicScopeSpan, traceql.TypeStatus, columnPathSpanStatusCode}, traceql.IntrinsicStatusMessage: {intrinsicScopeSpan, traceql.TypeString, columnPathSpanStatusMessage}, - traceql.IntrinsicDuration: {intrinsicScopeSpan, traceql.TypeDuration, columnPathDurationNanos}, + traceql.IntrinsicDuration: {intrinsicScopeSpan, traceql.TypeDuration, columnPathSpanDuration}, traceql.IntrinsicKind: {intrinsicScopeSpan, traceql.TypeKind, columnPathSpanKind}, traceql.IntrinsicSpanID: {intrinsicScopeSpan, traceql.TypeString, columnPathSpanID}, traceql.IntrinsicSpanStartTime: {intrinsicScopeSpan, traceql.TypeString, columnPathSpanStartTime}, @@ -831,7 +843,7 @@ var intrinsicColumnLookups = map[traceql.Intrinsic]struct { traceql.IntrinsicTraceRootService: {intrinsicScopeTrace, traceql.TypeString, columnPathRootServiceName}, traceql.IntrinsicTraceRootSpan: {intrinsicScopeTrace, traceql.TypeString, columnPathRootSpanName}, - traceql.IntrinsicTraceDuration: {intrinsicScopeTrace, traceql.TypeString, columnPathDurationNanos}, + traceql.IntrinsicTraceDuration: {intrinsicScopeTrace, traceql.TypeDuration, columnPathDurationNanos}, traceql.IntrinsicTraceID: {intrinsicScopeTrace, traceql.TypeString, columnPathTraceID}, traceql.IntrinsicTraceStartTime: {intrinsicScopeTrace, traceql.TypeDuration, columnPathStartTimeUnixNano}, @@ -1338,7 +1350,7 @@ func (i *mergeSpansetIterator) Close() { // V func fetch(ctx context.Context, req traceql.FetchSpansRequest, pf *parquet.File, rowGroups []parquet.RowGroup, dc backend.DedicatedColumns) (*spansetIterator, error) { - iter, err := createAllIterator(ctx, nil, req.Conditions, req.AllConditions, req.StartTimeUnixNanos, req.EndTimeUnixNanos, req.ShardID, req.ShardCount, rowGroups, pf, dc) + iter, err := createAllIterator(ctx, nil, req.Conditions, req.AllConditions, req.StartTimeUnixNanos, req.EndTimeUnixNanos, req.ShardID, req.ShardCount, rowGroups, pf, dc, false) if err != nil { return nil, fmt.Errorf("error creating iterator: %w", err) } @@ -1346,7 +1358,7 @@ func fetch(ctx context.Context, req traceql.FetchSpansRequest, pf *parquet.File, if req.SecondPass != nil { iter = newBridgeIterator(newRebatchIterator(iter), req.SecondPass) - iter, err = createAllIterator(ctx, iter, req.SecondPassConditions, false, 0, 0, req.ShardID, req.ShardCount, rowGroups, pf, dc) + iter, err = createAllIterator(ctx, iter, req.SecondPassConditions, false, 0, 0, req.ShardID, req.ShardCount, rowGroups, pf, dc, req.SecondPassSelectAll) if err != nil { return nil, fmt.Errorf("error creating second pass iterator: %w", err) } @@ -1394,7 +1406,7 @@ func categorizeConditions(conditions []traceql.Condition) (mingled bool, spanCon } func createAllIterator(ctx context.Context, primaryIter parquetquery.Iterator, conds []traceql.Condition, allConditions bool, start, end uint64, - shardID, shardCount uint32, rgs []parquet.RowGroup, pf *parquet.File, dc backend.DedicatedColumns, + shardID, shardCount uint32, rgs []parquet.RowGroup, pf *parquet.File, dc backend.DedicatedColumns, selectAll bool, ) (parquetquery.Iterator, error) { // categorizeConditions conditions into span-level or resource-level mingledConditions, spanConditions, resourceConditions, traceConditions, err := categorizeConditions(conds) @@ -1423,22 +1435,22 @@ func createAllIterator(ctx context.Context, primaryIter parquetquery.Iterator, c // one either resource or span. allConditions = allConditions && !mingledConditions - spanIter, err := createSpanIterator(makeIter, primaryIter, spanConditions, allConditions, dc) + spanIter, err := createSpanIterator(makeIter, primaryIter, spanConditions, allConditions, dc, selectAll) if err != nil { return nil, fmt.Errorf("creating span iterator: %w", err) } - resourceIter, err := createResourceIterator(makeIter, spanIter, resourceConditions, batchRequireAtLeastOneMatchOverall, allConditions, dc) + resourceIter, err := createResourceIterator(makeIter, spanIter, resourceConditions, batchRequireAtLeastOneMatchOverall, allConditions, dc, selectAll) if err != nil { return nil, fmt.Errorf("creating resource iterator: %w", err) } - return createTraceIterator(makeIter, resourceIter, traceConditions, start, end, shardID, shardCount, allConditions) + return createTraceIterator(makeIter, resourceIter, traceConditions, start, end, shardID, shardCount, allConditions, selectAll) } // createSpanIterator iterates through all span-level columns, groups them into rows representing // one span each. Spans are returned that match any of the given conditions. -func createSpanIterator(makeIter makeIterFn, primaryIter parquetquery.Iterator, conditions []traceql.Condition, allConditions bool, dedicatedColumns backend.DedicatedColumns) (parquetquery.Iterator, error) { +func createSpanIterator(makeIter makeIterFn, primaryIter parquetquery.Iterator, conditions []traceql.Condition, allConditions bool, dedicatedColumns backend.DedicatedColumns, selectAll bool) (parquetquery.Iterator, error) { var ( columnSelectAs = map[string]string{} columnPredicates = map[string][]parquetquery.Predicate{} @@ -1628,8 +1640,45 @@ func createSpanIterator(makeIter makeIterFn, primaryIter parquetquery.Iterator, genericConditions = append(genericConditions, cond) } + // SecondPass SelectAll + if selectAll { + for wellKnownAttr, entry := range wellKnownColumnLookups { + if entry.level != traceql.AttributeScopeSpan { + continue + } + + addPredicate(entry.columnPath, nil) + columnSelectAs[entry.columnPath] = wellKnownAttr + } + + for intrins, entry := range intrinsicColumnLookups { + if entry.scope != intrinsicScopeSpan { + continue + } + // These intrinsics aren't included in select all because I say so. + switch intrins { + case traceql.IntrinsicSpanID, + traceql.IntrinsicSpanStartTime, + traceql.IntrinsicStructuralDescendant, + traceql.IntrinsicStructuralChild, + traceql.IntrinsicStructuralSibling, + traceql.IntrinsicNestedSetLeft, + traceql.IntrinsicNestedSetRight, + traceql.IntrinsicNestedSetParent: + continue + } + addPredicate(entry.columnPath, nil) + columnSelectAs[entry.columnPath] = entry.columnPath + } + + for k, v := range columnMapping.mapping { + addPredicate(v.ColumnPath, nil) + columnSelectAs[v.ColumnPath] = k + } + } + attrIter, err := createAttributeIterator(makeIter, genericConditions, DefinitionLevelResourceSpansILSSpanAttrs, - columnPathSpanAttrKey, columnPathSpanAttrString, columnPathSpanAttrInt, columnPathSpanAttrDouble, columnPathSpanAttrBool, allConditions) + columnPathSpanAttrKey, columnPathSpanAttrString, columnPathSpanAttrInt, columnPathSpanAttrDouble, columnPathSpanAttrBool, allConditions, selectAll) if err != nil { return nil, fmt.Errorf("creating span attribute iterator: %w", err) } @@ -1690,7 +1739,7 @@ func createSpanIterator(makeIter makeIterFn, primaryIter parquetquery.Iterator, // createResourceIterator iterates through all resourcespans-level (batch-level) columns, groups them into rows representing // one batch each. It builds on top of the span iterator, and turns the groups of spans and resource-level values into // spansets. Spansets are returned that match any of the given conditions. -func createResourceIterator(makeIter makeIterFn, spanIterator parquetquery.Iterator, conditions []traceql.Condition, requireAtLeastOneMatchOverall, allConditions bool, dedicatedColumns backend.DedicatedColumns) (parquetquery.Iterator, error) { +func createResourceIterator(makeIter makeIterFn, spanIterator parquetquery.Iterator, conditions []traceql.Condition, requireAtLeastOneMatchOverall, allConditions bool, dedicatedColumns backend.DedicatedColumns, selectAll bool) (parquetquery.Iterator, error) { var ( columnSelectAs = map[string]string{} columnPredicates = map[string][]parquetquery.Predicate{} @@ -1749,12 +1798,29 @@ func createResourceIterator(makeIter makeIterFn, spanIterator parquetquery.Itera genericConditions = append(genericConditions, cond) } + // SecondPass SelectAll + if selectAll { + for wellKnownAttr, entry := range wellKnownColumnLookups { + if entry.level != traceql.AttributeScopeResource { + continue + } + + addPredicate(entry.columnPath, nil) + columnSelectAs[entry.columnPath] = wellKnownAttr + } + + for k, v := range columnMapping.mapping { + addPredicate(v.ColumnPath, nil) + columnSelectAs[v.ColumnPath] = k + } + } + for columnPath, predicates := range columnPredicates { iters = append(iters, makeIter(columnPath, orIfNeeded(predicates), columnSelectAs[columnPath])) } attrIter, err := createAttributeIterator(makeIter, genericConditions, DefinitionLevelResourceAttrs, - columnPathResourceAttrKey, columnPathResourceAttrString, columnPathResourceAttrInt, columnPathResourceAttrDouble, columnPathResourceAttrBool, allConditions) + columnPathResourceAttrKey, columnPathResourceAttrString, columnPathResourceAttrInt, columnPathResourceAttrDouble, columnPathResourceAttrBool, allConditions, selectAll) if err != nil { return nil, fmt.Errorf("creating span attribute iterator: %w", err) } @@ -1792,7 +1858,7 @@ func createResourceIterator(makeIter makeIterFn, spanIterator parquetquery.Itera return parquetquery.NewLeftJoinIterator(DefinitionLevelResourceSpans, required, iters, batchCol, parquetquery.WithPool(pqSpansetPool)) } -func createTraceIterator(makeIter makeIterFn, resourceIter parquetquery.Iterator, conds []traceql.Condition, start, end uint64, shardID, shardCount uint32, allConditions bool) (parquetquery.Iterator, error) { +func createTraceIterator(makeIter makeIterFn, resourceIter parquetquery.Iterator, conds []traceql.Condition, start, end uint64, _, _ uint32, allConditions bool, selectAll bool) (parquetquery.Iterator, error) { traceIters := make([]parquetquery.Iterator, 0, 3) var err error @@ -1845,6 +1911,21 @@ func createTraceIterator(makeIter makeIterFn, resourceIter parquetquery.Iterator } } + if selectAll { + for intrins, entry := range intrinsicColumnLookups { + if entry.scope != intrinsicScopeTrace { + continue + } + // These intrinsics aren't included in select all because I say so. + switch intrins { + case traceql.IntrinsicTraceStartTime, + traceql.IntrinsicServiceStats: + continue + } + traceIters = append(traceIters, makeIter(entry.columnPath, nil, entry.columnPath)) + } + } + // order is interesting here. would it be more efficient to grab the span/resource conditions first // or the time range filtering first? traceIters = append(traceIters, resourceIter) @@ -2048,8 +2129,22 @@ func createBoolPredicate(op traceql.Operator, operands traceql.Operands) (parque func createAttributeIterator(makeIter makeIterFn, conditions []traceql.Condition, definitionLevel int, keyPath, strPath, intPath, floatPath, boolPath string, - allConditions bool, + allConditions bool, selectAll bool, ) (parquetquery.Iterator, error) { + if selectAll { + // Select all with no filtering + return parquetquery.NewLeftJoinIterator(definitionLevel, + []parquetquery.Iterator{makeIter(keyPath, nil, "key")}, + []parquetquery.Iterator{ + makeIter(strPath, nil, "string"), + makeIter(intPath, nil, "int"), + makeIter(floatPath, nil, "float"), + makeIter(boolPath, nil, "bool"), + }, + &attributeCollector{}, + parquetquery.WithPool(pqAttrPool)) + } + var ( attrKeys = []string{} attrStringPreds = []parquetquery.Predicate{} diff --git a/tempodb/encoding/vparquet4/block_traceql.go b/tempodb/encoding/vparquet4/block_traceql.go index fbff2f4312e..83c2f90446e 100644 --- a/tempodb/encoding/vparquet4/block_traceql.go +++ b/tempodb/encoding/vparquet4/block_traceql.go @@ -96,6 +96,18 @@ func (s *span) AllAttributes() map[traceql.Attribute]traceql.Static { return atts } +func (s *span) AllAttributesFunc(cb func(traceql.Attribute, traceql.Static)) { + for _, a := range s.traceAttrs { + cb(a.a, a.s) + } + for _, a := range s.resourceAttrs { + cb(a.a, a.s) + } + for _, a := range s.spanAttrs { + cb(a.a, a.s) + } +} + func (s *span) AttributeFor(a traceql.Attribute) (traceql.Static, bool) { find := func(a traceql.Attribute, attrs []attrVal) *traceql.Static { if len(attrs) == 1 { @@ -892,7 +904,7 @@ var intrinsicColumnLookups = map[traceql.Intrinsic]struct { traceql.IntrinsicName: {intrinsicScopeSpan, traceql.TypeString, columnPathSpanName}, traceql.IntrinsicStatus: {intrinsicScopeSpan, traceql.TypeStatus, columnPathSpanStatusCode}, traceql.IntrinsicStatusMessage: {intrinsicScopeSpan, traceql.TypeString, columnPathSpanStatusMessage}, - traceql.IntrinsicDuration: {intrinsicScopeSpan, traceql.TypeDuration, columnPathDurationNanos}, + traceql.IntrinsicDuration: {intrinsicScopeSpan, traceql.TypeDuration, columnPathSpanDuration}, traceql.IntrinsicKind: {intrinsicScopeSpan, traceql.TypeKind, columnPathSpanKind}, traceql.IntrinsicSpanID: {intrinsicScopeSpan, traceql.TypeString, columnPathSpanID}, traceql.IntrinsicSpanStartTime: {intrinsicScopeSpan, traceql.TypeString, columnPathSpanStartTime}, @@ -1414,7 +1426,7 @@ func (i *mergeSpansetIterator) Close() { // V func fetch(ctx context.Context, req traceql.FetchSpansRequest, pf *parquet.File, rowGroups []parquet.RowGroup, dc backend.DedicatedColumns) (*spansetIterator, error) { - iter, err := createAllIterator(ctx, nil, req.Conditions, req.AllConditions, req.StartTimeUnixNanos, req.EndTimeUnixNanos, req.ShardID, req.ShardCount, rowGroups, pf, dc) + iter, err := createAllIterator(ctx, nil, req.Conditions, req.AllConditions, req.StartTimeUnixNanos, req.EndTimeUnixNanos, req.ShardID, req.ShardCount, rowGroups, pf, dc, false) if err != nil { return nil, fmt.Errorf("error creating iterator: %w", err) } @@ -1422,7 +1434,7 @@ func fetch(ctx context.Context, req traceql.FetchSpansRequest, pf *parquet.File, if req.SecondPass != nil { iter = newBridgeIterator(newRebatchIterator(iter), req.SecondPass) - iter, err = createAllIterator(ctx, iter, req.SecondPassConditions, false, 0, 0, req.ShardID, req.ShardCount, rowGroups, pf, dc) + iter, err = createAllIterator(ctx, iter, req.SecondPassConditions, false, 0, 0, req.ShardID, req.ShardCount, rowGroups, pf, dc, req.SecondPassSelectAll) if err != nil { return nil, fmt.Errorf("error creating second pass iterator: %w", err) } @@ -1483,7 +1495,7 @@ func categorizeConditions(conditions []traceql.Condition) (*categorizedCondition } func createAllIterator(ctx context.Context, primaryIter parquetquery.Iterator, conditions []traceql.Condition, allConditions bool, start, end uint64, - shardID, shardCount uint32, rgs []parquet.RowGroup, pf *parquet.File, dc backend.DedicatedColumns, + shardID, shardCount uint32, rgs []parquet.RowGroup, pf *parquet.File, dc backend.DedicatedColumns, selectAll bool, ) (parquetquery.Iterator, error) { // categorize conditions by scope catConditions, mingledConditions, err := categorizeConditions(conditions) @@ -1533,17 +1545,17 @@ func createAllIterator(ctx context.Context, primaryIter parquetquery.Iterator, c innerIterators = append(innerIterators, linkIter) } - spanIter, err := createSpanIterator(makeIter, innerIterators, catConditions.span, allConditions, dc) + spanIter, err := createSpanIterator(makeIter, innerIterators, catConditions.span, allConditions, dc, selectAll) if err != nil { return nil, fmt.Errorf("creating span iterator: %w", err) } - resourceIter, err := createResourceIterator(makeIter, spanIter, catConditions.resource, batchRequireAtLeastOneMatchOverall, allConditions, dc) + resourceIter, err := createResourceIterator(makeIter, spanIter, catConditions.resource, batchRequireAtLeastOneMatchOverall, allConditions, dc, selectAll) if err != nil { return nil, fmt.Errorf("creating resource iterator: %w", err) } - return createTraceIterator(makeIter, resourceIter, catConditions.trace, start, end, shardID, shardCount, allConditions) + return createTraceIterator(makeIter, resourceIter, catConditions.trace, start, end, shardID, shardCount, allConditions, selectAll) } func createEventIterator(makeIter makeIterFn, conditions []traceql.Condition) (parquetquery.Iterator, error) { @@ -1605,7 +1617,7 @@ func createLinkIterator(makeIter makeIterFn, conditions []traceql.Condition) (pa // createSpanIterator iterates through all span-level columns, groups them into rows representing // one span each. Spans are returned that match any of the given conditions. -func createSpanIterator(makeIter makeIterFn, innerIterators []parquetquery.Iterator, conditions []traceql.Condition, allConditions bool, dedicatedColumns backend.DedicatedColumns) (parquetquery.Iterator, error) { +func createSpanIterator(makeIter makeIterFn, innerIterators []parquetquery.Iterator, conditions []traceql.Condition, allConditions bool, dedicatedColumns backend.DedicatedColumns, selectAll bool) (parquetquery.Iterator, error) { var ( columnSelectAs = map[string]string{} columnPredicates = map[string][]parquetquery.Predicate{} @@ -1795,8 +1807,45 @@ func createSpanIterator(makeIter makeIterFn, innerIterators []parquetquery.Itera genericConditions = append(genericConditions, cond) } + // SecondPass SelectAll + if selectAll { + for wellKnownAttr, entry := range wellKnownColumnLookups { + if entry.level != traceql.AttributeScopeSpan { + continue + } + + addPredicate(entry.columnPath, nil) + columnSelectAs[entry.columnPath] = wellKnownAttr + } + + for intrins, entry := range intrinsicColumnLookups { + if entry.scope != intrinsicScopeSpan { + continue + } + // These intrinsics aren't included in select all because I say so. + switch intrins { + case traceql.IntrinsicSpanID, + traceql.IntrinsicSpanStartTime, + traceql.IntrinsicStructuralDescendant, + traceql.IntrinsicStructuralChild, + traceql.IntrinsicStructuralSibling, + traceql.IntrinsicNestedSetLeft, + traceql.IntrinsicNestedSetRight, + traceql.IntrinsicNestedSetParent: + continue + } + addPredicate(entry.columnPath, nil) + columnSelectAs[entry.columnPath] = entry.columnPath + } + + for k, v := range columnMapping.mapping { + addPredicate(v.ColumnPath, nil) + columnSelectAs[v.ColumnPath] = k + } + } + attrIter, err := createAttributeIterator(makeIter, genericConditions, DefinitionLevelResourceSpansILSSpanAttrs, - columnPathSpanAttrKey, columnPathSpanAttrString, columnPathSpanAttrInt, columnPathSpanAttrDouble, columnPathSpanAttrBool, allConditions) + columnPathSpanAttrKey, columnPathSpanAttrString, columnPathSpanAttrInt, columnPathSpanAttrDouble, columnPathSpanAttrBool, allConditions, selectAll) if err != nil { return nil, fmt.Errorf("creating span attribute iterator: %w", err) } @@ -1857,7 +1906,7 @@ func createSpanIterator(makeIter makeIterFn, innerIterators []parquetquery.Itera // createResourceIterator iterates through all resourcespans-level (batch-level) columns, groups them into rows representing // one batch each. It builds on top of the span iterator, and turns the groups of spans and resource-level values into // spansets. Spansets are returned that match any of the given conditions. -func createResourceIterator(makeIter makeIterFn, spanIterator parquetquery.Iterator, conditions []traceql.Condition, requireAtLeastOneMatchOverall, allConditions bool, dedicatedColumns backend.DedicatedColumns) (parquetquery.Iterator, error) { +func createResourceIterator(makeIter makeIterFn, spanIterator parquetquery.Iterator, conditions []traceql.Condition, requireAtLeastOneMatchOverall, allConditions bool, dedicatedColumns backend.DedicatedColumns, selectAll bool) (parquetquery.Iterator, error) { var ( columnSelectAs = map[string]string{} columnPredicates = map[string][]parquetquery.Predicate{} @@ -1916,12 +1965,29 @@ func createResourceIterator(makeIter makeIterFn, spanIterator parquetquery.Itera genericConditions = append(genericConditions, cond) } + // SecondPass SelectAll + if selectAll { + for wellKnownAttr, entry := range wellKnownColumnLookups { + if entry.level != traceql.AttributeScopeResource { + continue + } + + addPredicate(entry.columnPath, nil) + columnSelectAs[entry.columnPath] = wellKnownAttr + } + + for k, v := range columnMapping.mapping { + addPredicate(v.ColumnPath, nil) + columnSelectAs[v.ColumnPath] = k + } + } + for columnPath, predicates := range columnPredicates { iters = append(iters, makeIter(columnPath, orIfNeeded(predicates), columnSelectAs[columnPath])) } attrIter, err := createAttributeIterator(makeIter, genericConditions, DefinitionLevelResourceAttrs, - columnPathResourceAttrKey, columnPathResourceAttrString, columnPathResourceAttrInt, columnPathResourceAttrDouble, columnPathResourceAttrBool, allConditions) + columnPathResourceAttrKey, columnPathResourceAttrString, columnPathResourceAttrInt, columnPathResourceAttrDouble, columnPathResourceAttrBool, allConditions, selectAll) if err != nil { return nil, fmt.Errorf("creating span attribute iterator: %w", err) } @@ -1968,58 +2034,74 @@ func createServiceStatsIterator(makeIter makeIterFn) parquetquery.Iterator { return parquetquery.NewJoinIterator(DefinitionLevelServiceStats, serviceStatsIters, &serviceStatsCollector{}) } -func createTraceIterator(makeIter makeIterFn, resourceIter parquetquery.Iterator, conds []traceql.Condition, start, end uint64, shardID, shardCount uint32, allConditions bool) (parquetquery.Iterator, error) { +func createTraceIterator(makeIter makeIterFn, resourceIter parquetquery.Iterator, conds []traceql.Condition, start, end uint64, _, _ uint32, allConditions bool, selectAll bool) (parquetquery.Iterator, error) { traceIters := make([]parquetquery.Iterator, 0, 3) var err error - // add conditional iterators first. this way if someone searches for { traceDuration > 1s && span.foo = "bar"} the query will - // be sped up by searching for traceDuration first. note that we can only set the predicates if all conditions is true. - // otherwise we just pass the info up to the engine to make a choice - for _, cond := range conds { - switch cond.Attribute.Intrinsic { - case traceql.IntrinsicTraceID: - var pred parquetquery.Predicate - if allConditions { - pred, err = createBytesPredicate(cond.Op, cond.Operands, false) - if err != nil { - return nil, err - } - } - traceIters = append(traceIters, makeIter(columnPathTraceID, pred, columnPathTraceID)) - case traceql.IntrinsicTraceDuration: - var pred parquetquery.Predicate - if allConditions { - pred, err = createIntPredicate(cond.Op, cond.Operands) - if err != nil { - return nil, err - } + if selectAll { + for intrins, entry := range intrinsicColumnLookups { + if entry.scope != intrinsicScopeTrace { + continue } - traceIters = append(traceIters, makeIter(columnPathDurationNanos, pred, columnPathDurationNanos)) - case traceql.IntrinsicTraceStartTime: - if start == 0 && end == 0 { - traceIters = append(traceIters, makeIter(columnPathStartTimeUnixNano, nil, columnPathStartTimeUnixNano)) + // These intrinsics aren't included in select all because they are not + // useful for filtering or grouping. + switch intrins { + case traceql.IntrinsicTraceStartTime, + traceql.IntrinsicServiceStats: + continue } - case traceql.IntrinsicTraceRootSpan: - var pred parquetquery.Predicate - if allConditions { - pred, err = createStringPredicate(cond.Op, cond.Operands) - if err != nil { - return nil, err + traceIters = append(traceIters, makeIter(entry.columnPath, nil, entry.columnPath)) + } + } else { + // add conditional iterators first. this way if someone searches for { traceDuration > 1s && span.foo = "bar"} the query will + // be sped up by searching for traceDuration first. note that we can only set the predicates if all conditions is true. + // otherwise we just pass the info up to the engine to make a choice + for _, cond := range conds { + switch cond.Attribute.Intrinsic { + case traceql.IntrinsicTraceID: + var pred parquetquery.Predicate + if allConditions { + pred, err = createBytesPredicate(cond.Op, cond.Operands, false) + if err != nil { + return nil, err + } } - } - traceIters = append(traceIters, makeIter(columnPathRootSpanName, pred, columnPathRootSpanName)) - case traceql.IntrinsicTraceRootService: - var pred parquetquery.Predicate - if allConditions { - pred, err = createStringPredicate(cond.Op, cond.Operands) - if err != nil { - return nil, err + traceIters = append(traceIters, makeIter(columnPathTraceID, pred, columnPathTraceID)) + case traceql.IntrinsicTraceDuration: + var pred parquetquery.Predicate + if allConditions { + pred, err = createIntPredicate(cond.Op, cond.Operands) + if err != nil { + return nil, err + } + } + traceIters = append(traceIters, makeIter(columnPathDurationNanos, pred, columnPathDurationNanos)) + case traceql.IntrinsicTraceStartTime: + if start == 0 && end == 0 { + traceIters = append(traceIters, makeIter(columnPathStartTimeUnixNano, nil, columnPathStartTimeUnixNano)) } + case traceql.IntrinsicTraceRootSpan: + var pred parquetquery.Predicate + if allConditions { + pred, err = createStringPredicate(cond.Op, cond.Operands) + if err != nil { + return nil, err + } + } + traceIters = append(traceIters, makeIter(columnPathRootSpanName, pred, columnPathRootSpanName)) + case traceql.IntrinsicTraceRootService: + var pred parquetquery.Predicate + if allConditions { + pred, err = createStringPredicate(cond.Op, cond.Operands) + if err != nil { + return nil, err + } + } + traceIters = append(traceIters, makeIter(columnPathRootServiceName, pred, columnPathRootServiceName)) + case traceql.IntrinsicServiceStats: + traceIters = append(traceIters, createServiceStatsIterator(makeIter)) } - traceIters = append(traceIters, makeIter(columnPathRootServiceName, pred, columnPathRootServiceName)) - case traceql.IntrinsicServiceStats: - traceIters = append(traceIters, createServiceStatsIterator(makeIter)) } } @@ -2226,8 +2308,21 @@ func createBoolPredicate(op traceql.Operator, operands traceql.Operands) (parque func createAttributeIterator(makeIter makeIterFn, conditions []traceql.Condition, definitionLevel int, keyPath, strPath, intPath, floatPath, boolPath string, - allConditions bool, + allConditions bool, selectAll bool, ) (parquetquery.Iterator, error) { + if selectAll { + return parquetquery.NewLeftJoinIterator(definitionLevel, + []parquetquery.Iterator{makeIter(keyPath, nil, "key")}, + []parquetquery.Iterator{ + makeIter(strPath, nil, "string"), + makeIter(intPath, nil, "int"), + makeIter(floatPath, nil, "float"), + makeIter(boolPath, nil, "bool"), + }, + &attributeCollector{}, + parquetquery.WithPool(pqAttrPool)) + } + var ( attrKeys = []string{} attrStringPreds = []parquetquery.Predicate{} @@ -2380,41 +2475,11 @@ func (c *spanCollector) KeepGroup(res *parquetquery.IteratorResult) bool { case columnPathSpanName: sp.addSpanAttr(traceql.IntrinsicNameAttribute, traceql.NewStaticString(unsafeToString(kv.Value.Bytes()))) case columnPathSpanStatusCode: - // Map OTLP status code back to TraceQL enum. - // For other values, use the raw integer. - var status traceql.Status - switch kv.Value.Uint64() { - case uint64(v1.Status_STATUS_CODE_UNSET): - status = traceql.StatusUnset - case uint64(v1.Status_STATUS_CODE_OK): - status = traceql.StatusOk - case uint64(v1.Status_STATUS_CODE_ERROR): - status = traceql.StatusError - default: - status = traceql.Status(kv.Value.Uint64()) - } - sp.addSpanAttr(traceql.IntrinsicStatusAttribute, traceql.NewStaticStatus(status)) + sp.addSpanAttr(traceql.IntrinsicStatusAttribute, traceql.NewStaticStatus(otlpStatusToTraceqlStatus(kv.Value.Uint64()))) case columnPathSpanStatusMessage: sp.addSpanAttr(traceql.IntrinsicStatusMessageAttribute, traceql.NewStaticString(unsafeToString(kv.Value.Bytes()))) case columnPathSpanKind: - var kind traceql.Kind - switch kv.Value.Uint64() { - case uint64(v1.Span_SPAN_KIND_UNSPECIFIED): - kind = traceql.KindUnspecified - case uint64(v1.Span_SPAN_KIND_INTERNAL): - kind = traceql.KindInternal - case uint64(v1.Span_SPAN_KIND_SERVER): - kind = traceql.KindServer - case uint64(v1.Span_SPAN_KIND_CLIENT): - kind = traceql.KindClient - case uint64(v1.Span_SPAN_KIND_PRODUCER): - kind = traceql.KindProducer - case uint64(v1.Span_SPAN_KIND_CONSUMER): - kind = traceql.KindConsumer - default: - kind = traceql.Kind(kv.Value.Uint64()) - } - sp.addSpanAttr(traceql.IntrinsicKindAttribute, traceql.NewStaticKind(kind)) + sp.addSpanAttr(traceql.IntrinsicKindAttribute, traceql.NewStaticKind(otlpKindToTraceqlKind(kv.Value.Uint64()))) case columnPathSpanParentID: sp.nestedSetParent = kv.Value.Int32() if c.nestedSetParentExplicit { @@ -2942,3 +3007,37 @@ func (b *backendBlock) rowGroupsForShard(ctx context.Context, pf *parquet.File, return matches, nil } + +func otlpStatusToTraceqlStatus(v uint64) traceql.Status { + // Map OTLP status code back to TraceQL enum. + // For other values, use the raw integer. + switch v { + case uint64(v1.Status_STATUS_CODE_UNSET): + return traceql.StatusUnset + case uint64(v1.Status_STATUS_CODE_OK): + return traceql.StatusOk + case uint64(v1.Status_STATUS_CODE_ERROR): + return traceql.StatusError + default: + return traceql.Status(v) + } +} + +func otlpKindToTraceqlKind(v uint64) traceql.Kind { + switch v { + case uint64(v1.Span_SPAN_KIND_UNSPECIFIED): + return traceql.KindUnspecified + case uint64(v1.Span_SPAN_KIND_INTERNAL): + return traceql.KindInternal + case uint64(v1.Span_SPAN_KIND_SERVER): + return traceql.KindServer + case uint64(v1.Span_SPAN_KIND_CLIENT): + return traceql.KindClient + case uint64(v1.Span_SPAN_KIND_PRODUCER): + return traceql.KindProducer + case uint64(v1.Span_SPAN_KIND_CONSUMER): + return traceql.KindConsumer + default: + return traceql.Kind(v) + } +} diff --git a/tempodb/encoding/vparquet4/block_traceql_test.go b/tempodb/encoding/vparquet4/block_traceql_test.go index 0ec4206f592..0c1cb4bdf27 100644 --- a/tempodb/encoding/vparquet4/block_traceql_test.go +++ b/tempodb/encoding/vparquet4/block_traceql_test.go @@ -8,13 +8,16 @@ import ( "math/rand" "os" "path" + "sort" "strconv" + "strings" "testing" "time" "github.com/google/uuid" "github.com/stretchr/testify/require" + "github.com/grafana/tempo/pkg/parquetquery" "github.com/grafana/tempo/pkg/tempopb" v1 "github.com/grafana/tempo/pkg/tempopb/trace/v1" "github.com/grafana/tempo/pkg/traceql" @@ -424,6 +427,7 @@ func fullyPopulatedTestTrace(id common.ID) *Trace { return &Trace{ TraceID: test.ValidTraceID(id), + TraceIDText: util.TraceIDToHexString(id), StartTimeUnixNano: uint64(1000 * time.Second), EndTimeUnixNano: uint64(2000 * time.Second), DurationNano: uint64((100 * time.Millisecond).Nanoseconds()), @@ -601,6 +605,216 @@ func fullyPopulatedTestTrace(id common.ID) *Trace { } } +func TestBackendBlockSelectAll(t *testing.T) { + var ( + ctx = context.Background() + numTraces = 250 + traces = make([]*Trace, 0, numTraces) + wantTraceIdx = rand.Intn(numTraces) + wantTraceID = test.ValidTraceID(nil) + wantTrace = fullyPopulatedTestTrace(wantTraceID) + dc = test.MakeDedicatedColumns() + dcm = dedicatedColumnsToColumnMapping(dc) + ) + + // TODO - This strips unsupported attributes types for now. Revisit when + // add support for arrays/kvlists in the fetch layer. + trimForSelectAll(wantTrace) + + for i := 0; i < numTraces; i++ { + if i == wantTraceIdx { + traces = append(traces, wantTrace) + continue + } + + id := test.ValidTraceID(nil) + tr, _ := traceToParquet(&backend.BlockMeta{}, id, test.MakeTrace(1, id), nil) + traces = append(traces, tr) + } + + b := makeBackendBlockWithTraces(t, traces) + + _, _, _, req, err := traceql.NewEngine().Compile("{}") + require.NoError(t, err) + req.SecondPass = func(inSS *traceql.Spanset) ([]*traceql.Spanset, error) { return []*traceql.Spanset{inSS}, nil } + req.SecondPassSelectAll = true + + resp, err := b.Fetch(ctx, *req, common.DefaultSearchOptions()) + require.NoError(t, err) + defer resp.Results.Close() + + // This is a dump of all spans in the fully-populated test trace + wantSS := flattenForSelectAll(wantTrace, dcm) + + for { + // Seek to our desired trace + ss, err := resp.Results.Next(ctx) + require.NoError(t, err) + if ss == nil { + break + } + if !bytes.Equal(ss.TraceID, wantTraceID) { + continue + } + + // Cleanup found data for comparison + // equal will fail on the rownum mismatches. this is an internal detail to the + // fetch layer. just wipe them out here + ss.ReleaseFn = nil + ss.ServiceStats = nil + for _, sp := range ss.Spans { + s := sp.(*span) + s.cbSpanset = nil + s.cbSpansetFinal = false + s.rowNum = parquetquery.RowNumber{} + s.startTimeUnixNanos = 0 // selectall doesn't imply start time + sortAttrs(s.traceAttrs) + sortAttrs(s.resourceAttrs) + sortAttrs(s.spanAttrs) + } + + require.Equal(t, wantSS, ss) + } +} + +func sortAttrs(attrs []attrVal) { + sort.SliceStable(attrs, func(i, j int) bool { + is := attrs[i].a.String() + js := attrs[j].a.String() + if is == js { + // Compare by value + return attrs[i].s.String() < attrs[j].s.String() + } + return is < js + }) +} + +func trimArrayAttrs(in []Attribute) []Attribute { + out := []Attribute{} + for _, a := range in { + if a.IsArray || a.ValueUnsupported != nil { + continue + } + out = append(out, a) + } + return out +} + +func trimForSelectAll(tr *Trace) { + for i, rs := range tr.ResourceSpans { + tr.ResourceSpans[i].Resource.Attrs = trimArrayAttrs(rs.Resource.Attrs) + for j, ss := range rs.ScopeSpans { + for k, s := range ss.Spans { + tr.ResourceSpans[i].ScopeSpans[j].Spans[k].Attrs = trimArrayAttrs(s.Attrs) + } + } + } +} + +func flattenForSelectAll(tr *Trace, dcm dedicatedColumnMapping) *traceql.Spanset { + var traceAttrs []attrVal + newSS := &traceql.Spanset{ + RootServiceName: tr.RootServiceName, + RootSpanName: tr.RootSpanName, + TraceID: tr.TraceID, + DurationNanos: tr.DurationNano, + } + traceAttrs = append(traceAttrs, attrVal{traceql.IntrinsicTraceIDAttribute, traceql.NewStaticString(tr.TraceIDText)}) + traceAttrs = append(traceAttrs, attrVal{traceql.IntrinsicTraceDurationAttribute, traceql.NewStaticDuration(time.Duration(tr.DurationNano))}) + traceAttrs = append(traceAttrs, attrVal{traceql.IntrinsicTraceRootServiceAttribute, traceql.NewStaticString(tr.RootServiceName)}) + traceAttrs = append(traceAttrs, attrVal{traceql.IntrinsicTraceRootSpanAttribute, traceql.NewStaticString(tr.RootSpanName)}) + sortAttrs(traceAttrs) + + for _, rs := range tr.ResourceSpans { + var rsAttrs []attrVal + rsAttrs = append(rsAttrs, attrVal{traceql.NewScopedAttribute(traceql.AttributeScopeResource, false, LabelServiceName), traceql.NewStaticString(rs.Resource.ServiceName)}) + rsAttrs = append(rsAttrs, attrVal{traceql.NewScopedAttribute(traceql.AttributeScopeResource, false, LabelCluster), traceql.NewStaticString(*rs.Resource.Cluster)}) + rsAttrs = append(rsAttrs, attrVal{traceql.NewScopedAttribute(traceql.AttributeScopeResource, false, LabelNamespace), traceql.NewStaticString(*rs.Resource.Namespace)}) + rsAttrs = append(rsAttrs, attrVal{traceql.NewScopedAttribute(traceql.AttributeScopeResource, false, LabelPod), traceql.NewStaticString(*rs.Resource.Pod)}) + rsAttrs = append(rsAttrs, attrVal{traceql.NewScopedAttribute(traceql.AttributeScopeResource, false, LabelContainer), traceql.NewStaticString(*rs.Resource.Container)}) + rsAttrs = append(rsAttrs, attrVal{traceql.NewScopedAttribute(traceql.AttributeScopeResource, false, LabelK8sClusterName), traceql.NewStaticString(*rs.Resource.K8sClusterName)}) + rsAttrs = append(rsAttrs, attrVal{traceql.NewScopedAttribute(traceql.AttributeScopeResource, false, LabelK8sNamespaceName), traceql.NewStaticString(*rs.Resource.K8sNamespaceName)}) + rsAttrs = append(rsAttrs, attrVal{traceql.NewScopedAttribute(traceql.AttributeScopeResource, false, LabelK8sPodName), traceql.NewStaticString(*rs.Resource.K8sPodName)}) + rsAttrs = append(rsAttrs, attrVal{traceql.NewScopedAttribute(traceql.AttributeScopeResource, false, LabelK8sContainerName), traceql.NewStaticString(*rs.Resource.K8sContainerName)}) + + for _, a := range parquetToProtoAttrs(rs.Resource.Attrs) { + if arr := a.Value.GetArrayValue(); arr != nil { + for _, v := range arr.Values { + rsAttrs = append(rsAttrs, attrVal{traceql.NewScopedAttribute(traceql.AttributeScopeResource, false, a.Key), traceql.StaticFromAnyValue(v)}) + } + continue + } + rsAttrs = append(rsAttrs, attrVal{traceql.NewScopedAttribute(traceql.AttributeScopeResource, false, a.Key), traceql.StaticFromAnyValue(a.Value)}) + } + + dcm.forEach(func(attr string, column dedicatedColumn) { + if strings.Contains(column.ColumnPath, "Resource") { + v := column.readValue(&rs.Resource.DedicatedAttributes) + if v == nil { + return + } + a := traceql.NewScopedAttribute(traceql.AttributeScopeResource, false, attr) + s := traceql.StaticFromAnyValue(v) + rsAttrs = append(rsAttrs, attrVal{a, s}) + } + }) + + sortAttrs(rsAttrs) + + for _, ss := range rs.ScopeSpans { + for _, s := range ss.Spans { + + newS := &span{} + // newS.id = s.SpanID SpanID isn't implied by SelectAll + // newS.startTimeUnixNanos = s.StartTimeUnixNano Span StartTime isn't implied by selectAll + newS.durationNanos = s.DurationNano + newS.setTraceAttrs(traceAttrs) + newS.setResourceAttrs(rsAttrs) + newS.addSpanAttr(traceql.IntrinsicDurationAttribute, traceql.NewStaticDuration(time.Duration(s.DurationNano))) + newS.addSpanAttr(traceql.IntrinsicKindAttribute, traceql.NewStaticKind(otlpKindToTraceqlKind(uint64(s.Kind)))) + newS.addSpanAttr(traceql.IntrinsicNameAttribute, traceql.NewStaticString(s.Name)) + newS.addSpanAttr(traceql.IntrinsicStatusAttribute, traceql.NewStaticStatus(otlpStatusToTraceqlStatus(uint64(s.StatusCode)))) + newS.addSpanAttr(traceql.IntrinsicStatusMessageAttribute, traceql.NewStaticString(s.StatusMessage)) + if s.HttpStatusCode != nil { + newS.addSpanAttr(traceql.NewScopedAttribute(traceql.AttributeScopeSpan, false, LabelHTTPStatusCode), traceql.NewStaticInt(int(*s.HttpStatusCode))) + } + if s.HttpMethod != nil { + newS.addSpanAttr(traceql.NewScopedAttribute(traceql.AttributeScopeSpan, false, LabelHTTPMethod), traceql.NewStaticString(*s.HttpMethod)) + } + if s.HttpUrl != nil { + newS.addSpanAttr(traceql.NewScopedAttribute(traceql.AttributeScopeSpan, false, LabelHTTPUrl), traceql.NewStaticString(*s.HttpUrl)) + } + + dcm.forEach(func(attr string, column dedicatedColumn) { + if strings.Contains(column.ColumnPath, "Span") { + v := column.readValue(&s.DedicatedAttributes) + if v == nil { + return + } + a := traceql.NewScopedAttribute(traceql.AttributeScopeSpan, false, attr) + s := traceql.StaticFromAnyValue(v) + newS.addSpanAttr(a, s) + } + }) + + for _, a := range parquetToProtoAttrs(s.Attrs) { + if arr := a.Value.GetArrayValue(); arr != nil { + for _, v := range arr.Values { + newS.addSpanAttr(traceql.NewScopedAttribute(traceql.AttributeScopeSpan, false, a.Key), traceql.StaticFromAnyValue(v)) + } + continue + } + newS.addSpanAttr(traceql.NewScopedAttribute(traceql.AttributeScopeSpan, false, a.Key), traceql.StaticFromAnyValue(a.Value)) + } + + sortAttrs(newS.spanAttrs) + newSS.Spans = append(newSS.Spans, newS) + } + } + } + return newSS +} + func BenchmarkBackendBlockTraceQL(b *testing.B) { testCases := []struct { name string