Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Sync upstream to 9823a93c4238b3b549321ece051bbba6d3922741 #816

Merged
merged 20 commits into from
Jan 8, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
5b5fee0
Merge pull request #15702 from prometheus/beorn7/histogram
beorn7 Dec 20, 2024
11e4673
Support ingesting PWRv2's Created Timestamp as 0 samples
ArthurSens Aug 28, 2024
3b97a63
Put PRWv2 created timestamp ingestion behing feature-flag
ArthurSens Aug 28, 2024
6571d97
Handle histogram's created timestamp
ArthurSens Dec 4, 2024
3ffc3bf
handle histogram CT
ArthurSens Dec 4, 2024
3380809
fix linter
ArthurSens Dec 4, 2024
b7a5e28
Inline conditionals and CT handling
ArthurSens Dec 18, 2024
7b03796
Scraping: stop storing discovered labels (#15261)
bboreham Dec 21, 2024
5e7f804
otlptranslator: Remove unused function TrimPromSuffixes (#15709)
aknuds1 Dec 21, 2024
475b7ff
OTLP receiver: Allow colons in non-standard units
aknuds1 Dec 21, 2024
2ffaff8
Merge pull request #15710 from aknuds1/arve/otel-units
ArthurSens Dec 23, 2024
3096733
Merge pull request #14755 from prometheus/arthursens/appendct-prwv2
bwplotka Dec 27, 2024
43fd40c
chore(deps): bump github/codeql-action from 3.27.5 to 3.27.7 (#15582)
dependabot[bot] Dec 29, 2024
4d2c1c1
chore(deps): bump actions/cache from 4.1.2 to 4.2.0 (#15583)
dependabot[bot] Dec 29, 2024
2c5502c
chore(deps): bump actions/setup-go from 5.1.0 to 5.2.0 in /scripts (#…
dependabot[bot] Dec 29, 2024
061400e
tsdb: export CheckpointPrefix constant (#15636)
johncming Dec 29, 2024
6a61efc
discovery: use a more direct and less error-prone return value (#15347)
pinglanlu Dec 29, 2024
08c81b7
chore(deps): bump actions/setup-go from 5.1.0 to 5.2.0 (#15581)
dependabot[bot] Dec 29, 2024
9823a93
fix(main.go): avoid closing the query engine until it is guaranteed t…
machine424 Dec 30, 2024
ec81cca
Merge commit '9823a93c4238b3b549321ece051bbba6d3922741' into juliendu…
julienduchesne Jan 8, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

* [CHANGE] Notifier: Increment the prometheus_notifications_errors_total metric by the number of affected alerts rather than by one per batch of affected alerts. #15428
* [ENHANCEMENT] OTLP receiver: Convert also metric metadata. #15416
* [BUGFIX] OTLP receiver: Allow colons in non-standard units. #15710

## 3.0.1 / 2024-11-28

Expand Down
11 changes: 1 addition & 10 deletions cmd/prometheus/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -259,7 +259,7 @@ func (c *flagConfig) setFeatureListOptions(logger *slog.Logger) error {
logger.Info("Experimental out-of-order native histogram ingestion enabled. This will only take effect if OutOfOrderTimeWindow is > 0 and if EnableNativeHistograms = true")
case "created-timestamp-zero-ingestion":
c.scrape.EnableCreatedTimestampZeroIngestion = true
c.web.EnableCreatedTimestampZeroIngestion = true
c.web.CTZeroIngestionEnabled = true
// Change relevant global variables. Hacky, but it's hard to pass a new option or default to unmarshallers.
config.DefaultConfig.GlobalConfig.ScrapeProtocols = config.DefaultProtoFirstScrapeProtocols
config.DefaultGlobalConfig.ScrapeProtocols = config.DefaultProtoFirstScrapeProtocols
Expand Down Expand Up @@ -989,18 +989,12 @@ func main() {
listeners, err := webHandler.Listeners()
if err != nil {
logger.Error("Unable to start web listener", "err", err)
if err := queryEngine.Close(); err != nil {
logger.Warn("Closing query engine failed", "err", err)
}
os.Exit(1)
}

err = toolkit_web.Validate(*webConfig)
if err != nil {
logger.Error("Unable to validate web configuration file", "err", err)
if err := queryEngine.Close(); err != nil {
logger.Warn("Closing query engine failed", "err", err)
}
os.Exit(1)
}

Expand All @@ -1022,9 +1016,6 @@ func main() {
case <-cancel:
reloadReady.Close()
}
if err := queryEngine.Close(); err != nil {
logger.Warn("Closing query engine failed", "err", err)
}
return nil
},
func(err error) {
Expand Down
4 changes: 3 additions & 1 deletion cmd/promtool/sd.go
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,9 @@ func getSDCheckResult(targetGroups []*targetgroup.Group, scrapeConfig *config.Sc
}
}

res, orig, err := scrape.PopulateLabels(lb, scrapeConfig)
scrape.PopulateDiscoveredLabels(lb, scrapeConfig, target, targetGroup.Labels)
orig := lb.Labels()
res, err := scrape.PopulateLabels(lb, scrapeConfig, target, targetGroup.Labels)
result := sdCheckResult{
DiscoveredLabels: orig,
Labels: res,
Expand Down
3 changes: 0 additions & 3 deletions discovery/uyuni/uyuni.go
Original file line number Diff line number Diff line change
Expand Up @@ -205,9 +205,6 @@ func getEndpointInfoForSystems(
err := rpcclient.Call(
"system.monitoring.listEndpoints",
[]interface{}{token, systemIDs}, &endpointInfos)
if err != nil {
return nil, err
}
return endpointInfos, err
}

Expand Down
2 changes: 2 additions & 0 deletions promql/engine.go
Original file line number Diff line number Diff line change
Expand Up @@ -436,6 +436,8 @@ func NewEngine(opts EngineOpts) *Engine {
}

// Close closes ng.
// Callers must ensure the engine is really no longer in use before calling this to avoid
// issues failures like in https://github.com/prometheus/prometheus/issues/15232
func (ng *Engine) Close() error {
if ng == nil {
return nil
Expand Down
70 changes: 36 additions & 34 deletions scrape/manager_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ import (
"context"
"errors"
"fmt"
"maps"
"net/http"
"net/http/httptest"
"net/url"
Expand Down Expand Up @@ -61,18 +62,18 @@ func init() {

func TestPopulateLabels(t *testing.T) {
cases := []struct {
in labels.Labels
in model.LabelSet
cfg *config.ScrapeConfig
res labels.Labels
resOrig labels.Labels
err string
}{
// Regular population of scrape config options.
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "1.2.3.4:1000",
"custom": "value",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
Expand Down Expand Up @@ -103,14 +104,14 @@ func TestPopulateLabels(t *testing.T) {
// Pre-define/overwrite scrape config labels.
// Leave out port and expect it to be defaulted to scheme.
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "1.2.3.4",
model.SchemeLabel: "http",
model.MetricsPathLabel: "/custom",
model.JobLabel: "custom-job",
model.ScrapeIntervalLabel: "2s",
model.ScrapeTimeoutLabel: "2s",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
Expand Down Expand Up @@ -138,10 +139,10 @@ func TestPopulateLabels(t *testing.T) {
},
// Provide instance label. HTTPS port default for IPv6.
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "[::1]",
model.InstanceLabel: "custom-instance",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
Expand Down Expand Up @@ -170,7 +171,7 @@ func TestPopulateLabels(t *testing.T) {
},
// Address label missing.
{
in: labels.FromStrings("custom", "value"),
in: model.LabelSet{"custom": "value"},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
Expand All @@ -184,7 +185,7 @@ func TestPopulateLabels(t *testing.T) {
},
// Address label missing, but added in relabelling.
{
in: labels.FromStrings("custom", "host:1234"),
in: model.LabelSet{"custom": "host:1234"},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
Expand Down Expand Up @@ -222,7 +223,7 @@ func TestPopulateLabels(t *testing.T) {
},
// Address label missing, but added in relabelling.
{
in: labels.FromStrings("custom", "host:1234"),
in: model.LabelSet{"custom": "host:1234"},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
Expand Down Expand Up @@ -260,10 +261,10 @@ func TestPopulateLabels(t *testing.T) {
},
// Invalid UTF-8 in label.
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "1.2.3.4:1000",
"custom": "\xbd",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
Expand All @@ -277,10 +278,10 @@ func TestPopulateLabels(t *testing.T) {
},
// Invalid duration in interval label.
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "1.2.3.4:1000",
model.ScrapeIntervalLabel: "2notseconds",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
Expand All @@ -294,10 +295,10 @@ func TestPopulateLabels(t *testing.T) {
},
// Invalid duration in timeout label.
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "1.2.3.4:1000",
model.ScrapeTimeoutLabel: "2notseconds",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
Expand All @@ -311,10 +312,10 @@ func TestPopulateLabels(t *testing.T) {
},
// 0 interval in timeout label.
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "1.2.3.4:1000",
model.ScrapeIntervalLabel: "0s",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
Expand All @@ -328,10 +329,10 @@ func TestPopulateLabels(t *testing.T) {
},
// 0 duration in timeout label.
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "1.2.3.4:1000",
model.ScrapeTimeoutLabel: "0s",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
Expand All @@ -345,11 +346,11 @@ func TestPopulateLabels(t *testing.T) {
},
// Timeout less than interval.
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "1.2.3.4:1000",
model.ScrapeIntervalLabel: "1s",
model.ScrapeTimeoutLabel: "2s",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
Expand All @@ -363,9 +364,9 @@ func TestPopulateLabels(t *testing.T) {
},
// Don't attach default port.
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "1.2.3.4",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
Expand Down Expand Up @@ -393,9 +394,9 @@ func TestPopulateLabels(t *testing.T) {
},
// verify that the default port is not removed (http).
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "1.2.3.4:80",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "http",
MetricsPath: "/metrics",
Expand Down Expand Up @@ -423,9 +424,9 @@ func TestPopulateLabels(t *testing.T) {
},
// verify that the default port is not removed (https).
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "1.2.3.4:443",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
Expand Down Expand Up @@ -453,17 +454,18 @@ func TestPopulateLabels(t *testing.T) {
},
}
for _, c := range cases {
in := c.in.Copy()

res, orig, err := PopulateLabels(labels.NewBuilder(c.in), c.cfg)
in := maps.Clone(c.in)
lb := labels.NewBuilder(labels.EmptyLabels())
res, err := PopulateLabels(lb, c.cfg, c.in, nil)
if c.err != "" {
require.EqualError(t, err, c.err)
} else {
require.NoError(t, err)
testutil.RequireEqual(t, c.res, res)
PopulateDiscoveredLabels(lb, c.cfg, c.in, nil)
testutil.RequireEqual(t, c.resOrig, lb.Labels())
}
require.Equal(t, c.in, in)
testutil.RequireEqual(t, c.res, res)
testutil.RequireEqual(t, c.resOrig, orig)
require.Equal(t, c.in, in) // Check this wasn't altered by PopulateLabels().
}
}

Expand Down
8 changes: 4 additions & 4 deletions scrape/scrape.go
Original file line number Diff line number Diff line change
Expand Up @@ -450,7 +450,7 @@ func (sp *scrapePool) Sync(tgs []*targetgroup.Group) {
switch {
case nonEmpty:
all = append(all, t)
case !t.discoveredLabels.IsEmpty():
default:
if sp.config.KeepDroppedTargets == 0 || uint(len(sp.droppedTargets)) < sp.config.KeepDroppedTargets {
sp.droppedTargets = append(sp.droppedTargets, t)
}
Expand Down Expand Up @@ -553,9 +553,9 @@ func (sp *scrapePool) sync(targets []*Target) {
if _, ok := uniqueLoops[hash]; !ok {
uniqueLoops[hash] = nil
}
// Need to keep the most updated labels information
// for displaying it in the Service Discovery web page.
sp.activeTargets[hash].SetDiscoveredLabels(t.DiscoveredLabels())
// Need to keep the most updated ScrapeConfig for
// displaying labels in the Service Discovery web page.
sp.activeTargets[hash].SetScrapeConfig(sp.config, t.tLabels, t.tgLabels)
}
}

Expand Down
Loading
Loading