Skip to content

Commit 9d4ae52

Browse files
committed
Put feature behind a feature flag
Signed-off-by: Paschalis Tsilias <[email protected]>
1 parent e9ccc51 commit 9d4ae52

File tree

4 files changed

+72
-29
lines changed

4 files changed

+72
-29
lines changed

cmd/prometheus/main.go

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -184,6 +184,9 @@ func (c *flagConfig) setFeatureListOptions(logger log.Logger) error {
184184
case "extra-scrape-metrics":
185185
c.scrape.ExtraMetrics = true
186186
level.Info(logger).Log("msg", "Experimental additional scrape metrics")
187+
case "metadata-in-wal":
188+
c.scrape.AppendMetadataToWAL = true
189+
level.Info(logger).Log("msg", "Experimental appending of series metadata to the WAL")
187190
case "new-service-discovery-manager":
188191
c.enableNewSDManager = true
189192
level.Info(logger).Log("msg", "Experimental service discovery manager")

scrape/manager.go

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -128,6 +128,8 @@ type Options struct {
128128
// Option used by downstream scraper users like OpenTelemetry Collector
129129
// to help lookup metric metadata. Should be false for Prometheus.
130130
PassMetadataInContext bool
131+
// Option to experimentally append metadata to the WAL.
132+
AppendMetadataToWAL bool
131133
// Option to increase the interval used by scrape manager to throttle target groups updates.
132134
DiscoveryReloadInterval model.Duration
133135

@@ -207,7 +209,7 @@ func (m *Manager) reload() {
207209
level.Error(m.logger).Log("msg", "error reloading target set", "err", "invalid config id:"+setName)
208210
continue
209211
}
210-
sp, err := newScrapePool(scrapeConfig, m.append, m.jitterSeed, log.With(m.logger, "scrape_pool", setName), m.opts.ExtraMetrics, m.opts.PassMetadataInContext, m.opts.HTTPClientOptions)
212+
sp, err := newScrapePool(scrapeConfig, m.append, m.jitterSeed, log.With(m.logger, "scrape_pool", setName), m.opts.ExtraMetrics, m.opts.PassMetadataInContext, m.opts.AppendMetadataToWAL, m.opts.HTTPClientOptions)
211213
if err != nil {
212214
level.Error(m.logger).Log("msg", "error creating new scrape pool", "err", err, "scrape_pool", setName)
213215
continue

scrape/scrape.go

Lines changed: 27 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -265,7 +265,7 @@ const maxAheadTime = 10 * time.Minute
265265

266266
type labelsMutator func(labels.Labels) labels.Labels
267267

268-
func newScrapePool(cfg *config.ScrapeConfig, app storage.Appendable, jitterSeed uint64, logger log.Logger, reportExtraMetrics, passMetadataInContext bool, httpOpts []config_util.HTTPClientOption) (*scrapePool, error) {
268+
func newScrapePool(cfg *config.ScrapeConfig, app storage.Appendable, jitterSeed uint64, logger log.Logger, reportExtraMetrics, passMetadataInContext, appendMetadataToWAL bool, httpOpts []config_util.HTTPClientOption) (*scrapePool, error) {
269269
targetScrapePools.Inc()
270270
if logger == nil {
271271
logger = log.NewNopLogger()
@@ -316,6 +316,7 @@ func newScrapePool(cfg *config.ScrapeConfig, app storage.Appendable, jitterSeed
316316
opts.interval,
317317
opts.timeout,
318318
reportExtraMetrics,
319+
appendMetadataToWAL,
319320
opts.target,
320321
cache,
321322
passMetadataInContext,
@@ -868,7 +869,8 @@ type scrapeLoop struct {
868869

869870
disabledEndOfRunStalenessMarkers bool
870871

871-
reportExtraMetrics bool
872+
reportExtraMetrics bool
873+
appendMetadataToWAL bool
872874
}
873875

874876
// scrapeCache tracks mappings of exposed metric strings to label sets and
@@ -1135,6 +1137,7 @@ func newScrapeLoop(ctx context.Context,
11351137
interval time.Duration,
11361138
timeout time.Duration,
11371139
reportExtraMetrics bool,
1140+
appendMetadataToWAL bool,
11381141
target *Target,
11391142
metricMetadataStore MetricMetadataStore,
11401143
passMetadataInContext bool,
@@ -1178,6 +1181,7 @@ func newScrapeLoop(ctx context.Context,
11781181
interval: interval,
11791182
timeout: timeout,
11801183
reportExtraMetrics: reportExtraMetrics,
1184+
appendMetadataToWAL: appendMetadataToWAL,
11811185
}
11821186
sl.ctx, sl.cancel = context.WithCancel(ctx)
11831187

@@ -1533,15 +1537,17 @@ loop:
15331537
lset = ce.lset
15341538

15351539
// Update metadata only if it changed in the current iteration.
1536-
sl.cache.metaMtx.Lock()
1537-
metaEntry, metaOk := sl.cache.metadata[yoloString([]byte(lset.Get(labels.MetricName)))]
1538-
if metaOk && metaEntry.lastIterChange == sl.cache.iter {
1539-
shouldAppendMetadata = true
1540-
meta.Type = metaEntry.Type
1541-
meta.Unit = metaEntry.Unit
1542-
meta.Help = metaEntry.Help
1540+
if sl.appendMetadataToWAL {
1541+
sl.cache.metaMtx.Lock()
1542+
metaEntry, metaOk := sl.cache.metadata[yoloString([]byte(lset.Get(labels.MetricName)))]
1543+
if metaOk && metaEntry.lastIterChange == sl.cache.iter {
1544+
shouldAppendMetadata = true
1545+
meta.Type = metaEntry.Type
1546+
meta.Unit = metaEntry.Unit
1547+
meta.Help = metaEntry.Help
1548+
}
1549+
sl.cache.metaMtx.Unlock()
15431550
}
1544-
sl.cache.metaMtx.Unlock()
15451551
} else {
15461552
mets = p.Metric(&lset)
15471553
hash = lset.Hash()
@@ -1567,15 +1573,17 @@ loop:
15671573
break loop
15681574
}
15691575

1570-
sl.cache.metaMtx.Lock()
1571-
metaEntry, metaOk := sl.cache.metadata[yoloString([]byte(lset.Get(labels.MetricName)))]
1572-
if metaOk {
1573-
shouldAppendMetadata = true
1574-
meta.Type = metaEntry.Type
1575-
meta.Unit = metaEntry.Unit
1576-
meta.Help = metaEntry.Help
1576+
if sl.appendMetadataToWAL {
1577+
sl.cache.metaMtx.Lock()
1578+
metaEntry, metaOk := sl.cache.metadata[yoloString([]byte(lset.Get(labels.MetricName)))]
1579+
if metaOk {
1580+
shouldAppendMetadata = true
1581+
meta.Type = metaEntry.Type
1582+
meta.Unit = metaEntry.Unit
1583+
meta.Help = metaEntry.Help
1584+
}
1585+
sl.cache.metaMtx.Unlock()
15771586
}
1578-
sl.cache.metaMtx.Unlock()
15791587
}
15801588

15811589
ref, err = app.Append(ref, lset, t, v)
@@ -1615,7 +1623,7 @@ loop:
16151623
e = exemplar.Exemplar{} // reset for next time round loop
16161624
}
16171625

1618-
if shouldAppendMetadata {
1626+
if sl.appendMetadataToWAL && shouldAppendMetadata {
16191627
if _, merr := app.AppendMetadata(ref, lset, meta); merr != nil {
16201628
// No need to fail the scrape on errors appending metadata.
16211629
level.Debug(sl.l).Log("msg", "Error when appending metadata in scrape loop", "ref", fmt.Sprintf("%d", ref), "metadata", fmt.Sprintf("%+v", meta), "err", merr)

0 commit comments

Comments
 (0)