Skip to content

Commit

Permalink
Merge pull request #816 from grafana/julienduchesne/upstream-pt2
Browse files Browse the repository at this point in the history
Sync upstream to `9823a93c4238b3b549321ece051bbba6d3922741`
  • Loading branch information
julienduchesne authored Jan 8, 2025
2 parents 2129bae + ec81cca commit 236cc08
Show file tree
Hide file tree
Showing 22 changed files with 374 additions and 306 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

* [CHANGE] Notifier: Increment the prometheus_notifications_errors_total metric by the number of affected alerts rather than by one per batch of affected alerts. #15428
* [ENHANCEMENT] OTLP receiver: Convert also metric metadata. #15416
* [BUGFIX] OTLP receiver: Allow colons in non-standard units. #15710

## 3.0.1 / 2024-11-28

Expand Down
11 changes: 1 addition & 10 deletions cmd/prometheus/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -259,7 +259,7 @@ func (c *flagConfig) setFeatureListOptions(logger *slog.Logger) error {
logger.Info("Experimental out-of-order native histogram ingestion enabled. This will only take effect if OutOfOrderTimeWindow is > 0 and if EnableNativeHistograms = true")
case "created-timestamp-zero-ingestion":
c.scrape.EnableCreatedTimestampZeroIngestion = true
c.web.EnableCreatedTimestampZeroIngestion = true
c.web.CTZeroIngestionEnabled = true
// Change relevant global variables. Hacky, but it's hard to pass a new option or default to unmarshallers.
config.DefaultConfig.GlobalConfig.ScrapeProtocols = config.DefaultProtoFirstScrapeProtocols
config.DefaultGlobalConfig.ScrapeProtocols = config.DefaultProtoFirstScrapeProtocols
Expand Down Expand Up @@ -989,18 +989,12 @@ func main() {
listeners, err := webHandler.Listeners()
if err != nil {
logger.Error("Unable to start web listener", "err", err)
if err := queryEngine.Close(); err != nil {
logger.Warn("Closing query engine failed", "err", err)
}
os.Exit(1)
}

err = toolkit_web.Validate(*webConfig)
if err != nil {
logger.Error("Unable to validate web configuration file", "err", err)
if err := queryEngine.Close(); err != nil {
logger.Warn("Closing query engine failed", "err", err)
}
os.Exit(1)
}

Expand All @@ -1022,9 +1016,6 @@ func main() {
case <-cancel:
reloadReady.Close()
}
if err := queryEngine.Close(); err != nil {
logger.Warn("Closing query engine failed", "err", err)
}
return nil
},
func(err error) {
Expand Down
4 changes: 3 additions & 1 deletion cmd/promtool/sd.go
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,9 @@ func getSDCheckResult(targetGroups []*targetgroup.Group, scrapeConfig *config.Sc
}
}

res, orig, err := scrape.PopulateLabels(lb, scrapeConfig)
scrape.PopulateDiscoveredLabels(lb, scrapeConfig, target, targetGroup.Labels)
orig := lb.Labels()
res, err := scrape.PopulateLabels(lb, scrapeConfig, target, targetGroup.Labels)
result := sdCheckResult{
DiscoveredLabels: orig,
Labels: res,
Expand Down
3 changes: 0 additions & 3 deletions discovery/uyuni/uyuni.go
Original file line number Diff line number Diff line change
Expand Up @@ -205,9 +205,6 @@ func getEndpointInfoForSystems(
err := rpcclient.Call(
"system.monitoring.listEndpoints",
[]interface{}{token, systemIDs}, &endpointInfos)
if err != nil {
return nil, err
}
return endpointInfos, err
}

Expand Down
2 changes: 2 additions & 0 deletions promql/engine.go
Original file line number Diff line number Diff line change
Expand Up @@ -436,6 +436,8 @@ func NewEngine(opts EngineOpts) *Engine {
}

// Close closes ng.
// Callers must ensure the engine is really no longer in use before calling this to avoid
// issues failures like in https://github.com/prometheus/prometheus/issues/15232
func (ng *Engine) Close() error {
if ng == nil {
return nil
Expand Down
70 changes: 36 additions & 34 deletions scrape/manager_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ import (
"context"
"errors"
"fmt"
"maps"
"net/http"
"net/http/httptest"
"net/url"
Expand Down Expand Up @@ -61,18 +62,18 @@ func init() {

func TestPopulateLabels(t *testing.T) {
cases := []struct {
in labels.Labels
in model.LabelSet
cfg *config.ScrapeConfig
res labels.Labels
resOrig labels.Labels
err string
}{
// Regular population of scrape config options.
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "1.2.3.4:1000",
"custom": "value",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
Expand Down Expand Up @@ -103,14 +104,14 @@ func TestPopulateLabels(t *testing.T) {
// Pre-define/overwrite scrape config labels.
// Leave out port and expect it to be defaulted to scheme.
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "1.2.3.4",
model.SchemeLabel: "http",
model.MetricsPathLabel: "/custom",
model.JobLabel: "custom-job",
model.ScrapeIntervalLabel: "2s",
model.ScrapeTimeoutLabel: "2s",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
Expand Down Expand Up @@ -138,10 +139,10 @@ func TestPopulateLabels(t *testing.T) {
},
// Provide instance label. HTTPS port default for IPv6.
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "[::1]",
model.InstanceLabel: "custom-instance",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
Expand Down Expand Up @@ -170,7 +171,7 @@ func TestPopulateLabels(t *testing.T) {
},
// Address label missing.
{
in: labels.FromStrings("custom", "value"),
in: model.LabelSet{"custom": "value"},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
Expand All @@ -184,7 +185,7 @@ func TestPopulateLabels(t *testing.T) {
},
// Address label missing, but added in relabelling.
{
in: labels.FromStrings("custom", "host:1234"),
in: model.LabelSet{"custom": "host:1234"},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
Expand Down Expand Up @@ -222,7 +223,7 @@ func TestPopulateLabels(t *testing.T) {
},
// Address label missing, but added in relabelling.
{
in: labels.FromStrings("custom", "host:1234"),
in: model.LabelSet{"custom": "host:1234"},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
Expand Down Expand Up @@ -260,10 +261,10 @@ func TestPopulateLabels(t *testing.T) {
},
// Invalid UTF-8 in label.
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "1.2.3.4:1000",
"custom": "\xbd",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
Expand All @@ -277,10 +278,10 @@ func TestPopulateLabels(t *testing.T) {
},
// Invalid duration in interval label.
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "1.2.3.4:1000",
model.ScrapeIntervalLabel: "2notseconds",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
Expand All @@ -294,10 +295,10 @@ func TestPopulateLabels(t *testing.T) {
},
// Invalid duration in timeout label.
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "1.2.3.4:1000",
model.ScrapeTimeoutLabel: "2notseconds",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
Expand All @@ -311,10 +312,10 @@ func TestPopulateLabels(t *testing.T) {
},
// 0 interval in timeout label.
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "1.2.3.4:1000",
model.ScrapeIntervalLabel: "0s",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
Expand All @@ -328,10 +329,10 @@ func TestPopulateLabels(t *testing.T) {
},
// 0 duration in timeout label.
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "1.2.3.4:1000",
model.ScrapeTimeoutLabel: "0s",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
Expand All @@ -345,11 +346,11 @@ func TestPopulateLabels(t *testing.T) {
},
// Timeout less than interval.
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "1.2.3.4:1000",
model.ScrapeIntervalLabel: "1s",
model.ScrapeTimeoutLabel: "2s",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
Expand All @@ -363,9 +364,9 @@ func TestPopulateLabels(t *testing.T) {
},
// Don't attach default port.
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "1.2.3.4",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
Expand Down Expand Up @@ -393,9 +394,9 @@ func TestPopulateLabels(t *testing.T) {
},
// verify that the default port is not removed (http).
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "1.2.3.4:80",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "http",
MetricsPath: "/metrics",
Expand Down Expand Up @@ -423,9 +424,9 @@ func TestPopulateLabels(t *testing.T) {
},
// verify that the default port is not removed (https).
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "1.2.3.4:443",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
Expand Down Expand Up @@ -453,17 +454,18 @@ func TestPopulateLabels(t *testing.T) {
},
}
for _, c := range cases {
in := c.in.Copy()

res, orig, err := PopulateLabels(labels.NewBuilder(c.in), c.cfg)
in := maps.Clone(c.in)
lb := labels.NewBuilder(labels.EmptyLabels())
res, err := PopulateLabels(lb, c.cfg, c.in, nil)
if c.err != "" {
require.EqualError(t, err, c.err)
} else {
require.NoError(t, err)
testutil.RequireEqual(t, c.res, res)
PopulateDiscoveredLabels(lb, c.cfg, c.in, nil)
testutil.RequireEqual(t, c.resOrig, lb.Labels())
}
require.Equal(t, c.in, in)
testutil.RequireEqual(t, c.res, res)
testutil.RequireEqual(t, c.resOrig, orig)
require.Equal(t, c.in, in) // Check this wasn't altered by PopulateLabels().
}
}

Expand Down
8 changes: 4 additions & 4 deletions scrape/scrape.go
Original file line number Diff line number Diff line change
Expand Up @@ -450,7 +450,7 @@ func (sp *scrapePool) Sync(tgs []*targetgroup.Group) {
switch {
case nonEmpty:
all = append(all, t)
case !t.discoveredLabels.IsEmpty():
default:
if sp.config.KeepDroppedTargets == 0 || uint(len(sp.droppedTargets)) < sp.config.KeepDroppedTargets {
sp.droppedTargets = append(sp.droppedTargets, t)
}
Expand Down Expand Up @@ -553,9 +553,9 @@ func (sp *scrapePool) sync(targets []*Target) {
if _, ok := uniqueLoops[hash]; !ok {
uniqueLoops[hash] = nil
}
// Need to keep the most updated labels information
// for displaying it in the Service Discovery web page.
sp.activeTargets[hash].SetDiscoveredLabels(t.DiscoveredLabels())
// Need to keep the most updated ScrapeConfig for
// displaying labels in the Service Discovery web page.
sp.activeTargets[hash].SetScrapeConfig(sp.config, t.tLabels, t.tgLabels)
}
}

Expand Down
Loading

0 comments on commit 236cc08

Please sign in to comment.