Skip to content

Commit

Permalink
Add allow sub-seconds time resolution with $timeSeriesMs and $timeFil…
Browse files Browse the repository at this point in the history
…terMs support, fix #354, fix #398
  • Loading branch information
Slach committed Jun 1, 2022
1 parent a49dace commit 8224556
Show file tree
Hide file tree
Showing 8 changed files with 396 additions and 23 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
* Add support for Logs visualization, fix https://github.com/Altinity/clickhouse-grafana/issues/331, thanks @Fiery-Fenix and @pixelsquared
* Add $conditionalTest to editor auto-complete
* Add support $__searchFilter to template variable queries, fix https://github.com/Altinity/clickhouse-grafana/issues/354
* Add allow sub-seconds time resolution with $timeSeriesMs and $timeFilterMs support, fix https://github.com/Altinity/clickhouse-grafana/issues/354, fix https://github.com/Altinity/clickhouse-grafana/issues/398

## Fixes:
* allow Nullable types in alert label name in backend part, fix https://github.com/Altinity/clickhouse-grafana/issues/405
Expand Down
1 change: 1 addition & 0 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ services:
environment:
GF_INSTALL_PLUGINS: grafana-piechart-panel,grafana-worldmap-panel
GF_LOG_LEVEL: debug
# @todo wait grafana 9.0 implementation alerts provisioning https://github.com/grafana/grafana/issues/40983#issuecomment-1137770772
GF_UNIFIED_ALERTING_ENABLED: ${GF_UNIFIED_ALERTING_ENABLED:-false}
GF_ALERTING_ENABLED: ${GF_ALERTING_ENABLED:-true}
GF_PLUGINS_ALLOW_LOADING_UNSIGNED_PLUGINS: vertamedia-clickhouse-datasource
Expand Down
192 changes: 192 additions & 0 deletions docker/grafana/dashboards/test_timeFilterMs_and_timeSeriesMs.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,192 @@
{
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": "-- Grafana --",
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"type": "dashboard"
}
]
},
"editable": true,
"gnetId": null,
"graphTooltip": 0,
"links": [],
"panels": [
{
"alert": {
"alertRuleTags": {},
"conditions": [
{
"evaluator": {
"params": [
0
],
"type": "gt"
},
"operator": {
"type": "and"
},
"query": {
"params": [
"A",
"5m",
"now"
]
},
"reducer": {
"params": [],
"type": "max"
},
"type": "query"
}
],
"executionErrorState": "alerting",
"for": "5m",
"frequency": "1m",
"handler": 1,
"name": "$timeSeriesMs and $timeFIlterMs alert",
"noDataState": "no_data",
"notifications": []
},
"aliasColors": {},
"bars": false,
"dashLength": 10,
"dashes": false,
"datasource": "clickhouse",
"description": "fix https://github.com/Altinity/clickhouse-grafana/issues/344 and https://github.com/Altinity/clickhouse-grafana/issues/398",
"fieldConfig": {
"defaults": {},
"overrides": []
},
"fill": 1,
"fillGradient": 0,
"gridPos": {
"h": 9,
"w": 12,
"x": 0,
"y": 0
},
"hiddenSeries": false,
"id": 2,
"legend": {
"avg": false,
"current": false,
"max": false,
"min": false,
"show": true,
"total": false,
"values": false
},
"lines": true,
"linewidth": 1,
"nullPointMode": "null",
"options": {
"alertThreshold": true
},
"percentage": false,
"pluginVersion": "7.5.16",
"pointradius": 2,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"database": "default",
"dateColDataType": "",
"dateLoading": false,
"dateTimeColDataType": "d",
"dateTimeType": "DATETIME64",
"datetimeLoading": false,
"extrapolate": true,
"format": "time_series",
"formattedQuery": "SELECT $timeSeries as t, count() FROM $table WHERE $timeFilter GROUP BY t ORDER BY t",
"interval": "",
"intervalFactor": 1,
"query": "SELECT\n $timeSeriesMs as t,\n count()\nFROM $table\n\nWHERE $timeFilterMs\n\nGROUP BY t\n\nORDER BY t\n",
"queryType": "randomWalk",
"rawQuery": "SELECT\n (intDiv(toFloat64(\"d\") * 1000, 20000) * 20000) as t,\n count()\nFROM default.test_datetime64\n\nWHERE \"d\" >= toDateTime64(1654061632511/1000, 3) AND \"d\" <= toDateTime64(1654083232511/1000, 3)\n\nGROUP BY t\n\nORDER BY t",
"refId": "A",
"round": "0s",
"skip_comments": true,
"table": "test_datetime64",
"tableLoading": false
}
],
"thresholds": [
{
"colorMode": "critical",
"fill": true,
"line": true,
"op": "gt",
"value": 0,
"visible": true
}
],
"timeFrom": null,
"timeRegions": [],
"timeShift": null,
"title": "$timeSeriesMs and $timeFIlterMs",
"tooltip": {
"shared": true,
"sort": 0,
"value_type": "individual"
},
"type": "graph",
"xaxis": {
"buckets": null,
"mode": "time",
"name": null,
"show": true,
"values": []
},
"yaxes": [
{
"$$hashKey": "object:469",
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
},
{
"$$hashKey": "object:470",
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
}
],
"yaxis": {
"align": false,
"alignLevel": null
}
}
],
"refresh": false,
"schemaVersion": 27,
"style": "dark",
"tags": [],
"templating": {
"list": []
},
"time": {
"from": "now-6h",
"to": "now"
},
"timepicker": {},
"timezone": "",
"title": "$timeFilterMs and $timeSeriesMs",
"uid": "iVjWIx97k",
"version": 2
}
55 changes: 50 additions & 5 deletions pkg/eval_query.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,10 @@ import (
/* var NumberOnlyRegexp = regexp.MustCompile(`^[+-]?\d+(\.\d+)?$`) */

var timeSeriesMacroRegexp = regexp.MustCompile(`\$timeSeries\b`)
var timeSeriesMsMacroRegexp = regexp.MustCompile(`\$timeSeriesMs\b`)
var naturalTimeSeriesMacroRegexp = regexp.MustCompile(`\$naturalTimeSeries\b`)
var timeFilterMacroRegexp = regexp.MustCompile(`\$timeFilter\b`)
var timeFilterMsMacroRegexp = regexp.MustCompile(`\$timeFilterMs\b`)
var tableMacroRegexp = regexp.MustCompile(`\$table\b`)
var fromMacroRegexp = regexp.MustCompile(`\$from\b`)
var toMacroRegexp = regexp.MustCompile(`\$to\b`)
Expand All @@ -25,6 +27,7 @@ var timeFilter64ByColumnMacroRegexp = regexp.MustCompile(`\$timeFilter64ByColumn

var fromMsMacroRegexp = regexp.MustCompile(`\$__from\b`)
var toMsMacroRegexp = regexp.MustCompile(`\$__to\b`)
var intervalMsMacroRegexp = regexp.MustCompile(`\$__interval_ms\b`)

type EvalQuery struct {
RefId string `json:"refId"`
Expand All @@ -40,6 +43,7 @@ type EvalQuery struct {
IntervalFactor int `json:"intervalFactor"`
Interval string `json:"interval"`
IntervalSec int
IntervalMs int
Database string `json:"database"`
Table string `json:"table"`
MaxDataPoints int64
Expand All @@ -66,23 +70,34 @@ func (q *EvalQuery) replace(query string) (string, error) {
q.IntervalFactor = 1
}
i := 1 * time.Second
ms := 1 * time.Millisecond
if q.Interval != "" {
duration, err := time.ParseDuration(q.Interval)
if err != nil {
return "", err
}
q.IntervalSec = int(math.Floor(duration.Seconds()))
q.IntervalMs = int(duration.Milliseconds())
}
if q.IntervalSec <= 0 {
if q.MaxDataPoints > 0 {
i = q.To.Sub(q.From) / time.Duration(q.MaxDataPoints)
} else {
i = q.To.Sub(q.From) / 100
}
if i > 1*time.Millisecond && q.IntervalMs <= 0 {
ms = i
}
if i < 1*time.Second {
i = 1 * time.Second
}
q.IntervalSec, err = q.convertInterval(fmt.Sprintf("%fs", math.Floor(i.Seconds())), q.IntervalFactor)
q.IntervalSec, err = q.convertInterval(fmt.Sprintf("%fs", math.Floor(i.Seconds())), q.IntervalFactor, false)
if err != nil {
return "", err
}
}
if q.IntervalMs <= 0 {
q.IntervalMs, err = q.convertInterval(fmt.Sprintf("%dms", ms.Milliseconds()), q.IntervalFactor, true)
if err != nil {
return "", err
}
Expand Down Expand Up @@ -111,16 +126,18 @@ func (q *EvalQuery) replace(query string) (string, error) {
}

timeFilter := q.getDateTimeFilter(q.DateTimeType)
timeFilterMs := q.getDateTimeFilterMs(q.DateTimeType)
if q.DateCol != "" {
timeFilter = q.getDateFilter() + " AND " + timeFilter
timeFilterMs = q.getDateFilter() + " AND " + timeFilterMs
}

table := q.escapeIdentifier(q.Table)
if q.Database != "" {
table = q.escapeIdentifier(q.Database) + "." + table
}

myRound, err := q.convertInterval(q.Round, q.IntervalFactor)
myRound, err := q.convertInterval(q.Round, q.IntervalFactor, false)
if err != nil {
return "", err
}
Expand All @@ -131,14 +148,19 @@ func (q *EvalQuery) replace(query string) (string, error) {
to := q.convertTimestamp(q.round(q.To, myRound))

query = timeSeriesMacroRegexp.ReplaceAllString(query, strings.Replace(q.getTimeSeries(q.DateTimeType), "$", "$$", -1))
query = timeSeriesMsMacroRegexp.ReplaceAllString(query, strings.Replace(q.getTimeSeriesMs(q.DateTimeType), "$", "$$", -1))
query = naturalTimeSeriesMacroRegexp.ReplaceAllString(query, strings.Replace(q.getNaturalTimeSeries(q.DateTimeType, from, to), "$", "$$", -1))
query = timeFilterMacroRegexp.ReplaceAllString(query, strings.Replace(timeFilter, "$", "$$", -1))
query = timeFilterMsMacroRegexp.ReplaceAllString(query, strings.Replace(timeFilterMs, "$", "$$", -1))
query = tableMacroRegexp.ReplaceAllString(query, table)
query = fromMacroRegexp.ReplaceAllString(query, fmt.Sprintf("%d", from))
query = toMacroRegexp.ReplaceAllString(query, fmt.Sprintf("%d", to))
query = fromMsMacroRegexp.ReplaceAllString(query, fmt.Sprintf("%d", q.From.UnixMilli()))
query = toMsMacroRegexp.ReplaceAllString(query, fmt.Sprintf("%d", q.To.UnixMilli()))
query = dateColMacroRegexp.ReplaceAllString(query, q.escapeIdentifier(q.DateCol))
query = dateTimeColMacroRegexp.ReplaceAllString(query, q.escapeIdentifier(q.DateTimeCol))
query = intervalMacroRegexp.ReplaceAllString(query, fmt.Sprintf("%d", q.IntervalSec))
query = intervalMsMacroRegexp.ReplaceAllString(query, fmt.Sprintf("%d", q.IntervalMs))

query = q.replaceTimeFilters(query, myRound)

Expand Down Expand Up @@ -541,6 +563,16 @@ func (q *EvalQuery) getTimeSeries(dateTimeType string) string {
return "(intDiv($dateTimeCol, $interval) * $interval) * 1000"
}

func (q *EvalQuery) getTimeSeriesMs(dateTimeType string) string {
if dateTimeType == "DATETIME" {
return "(intDiv(toUInt32($dateTimeCol) * 1000, $__interval_ms) * $__interval_ms)"
}
if dateTimeType == "DATETIME64" {
return "(intDiv(toFloat64($dateTimeCol) * 1000, $__interval_ms) * $__interval_ms)"
}
return "(intDiv($dateTimeCol, $__interval_ms) * $__interval_ms)"
}

func (q *EvalQuery) getDateFilter() string {
return "$dateCol >= toDate($from) AND $dateCol <= toDate($to)"
}
Expand All @@ -558,6 +590,19 @@ func (q *EvalQuery) getDateTimeFilter(dateTimeType string) string {
return "$dateTimeCol >= " + convertFn("$from") + " AND $dateTimeCol <= " + convertFn("$to")
}

func (q *EvalQuery) getDateTimeFilterMs(dateTimeType string) string {
convertFn := func(t string) string {
if dateTimeType == "DATETIME" {
return "toDateTime(" + t + ")"
}
if dateTimeType == "DATETIME64" {
return "toDateTime64(" + t + ", 3)"
}
return t
}
return "$dateTimeCol >= " + convertFn("$__from/1000") + " AND $dateTimeCol <= " + convertFn("$__to/1000")
}

func (q *EvalQuery) convertTimestamp(dt time.Time) int64 {
return dt.UnixMilli() / 1000
}
Expand All @@ -569,16 +614,16 @@ func (q *EvalQuery) round(dt time.Time, round int) time.Time {
return dt.Truncate(time.Duration(round) * time.Second)
}

func (q *EvalQuery) convertInterval(interval string, intervalFactor int) (int, error) {
func (q *EvalQuery) convertInterval(interval string, intervalFactor int, ms bool) (int, error) {
if interval == "" {
return 0, nil
}
d, err := time.ParseDuration(interval)
if err != nil {
return 0, err
}
if d < 1*time.Second {
d = 1 * time.Second
if ms {
return int(math.Ceil(float64(d.Milliseconds()) * float64(intervalFactor))), nil
}
return int(math.Ceil(d.Seconds() * float64(intervalFactor))), nil
}
Expand Down
Loading

0 comments on commit 8224556

Please sign in to comment.