Skip to content

Commit ea0c746

Browse files
authored
use configurable durations instead of hardcoded values (#4519)
* use configurable durations instead of hardcoded values Signed-off-by: Praful Khanduri <[email protected]> * added chloggen Signed-off-by: Praful Khanduri <[email protected]> --------- Signed-off-by: Praful Khanduri <[email protected]>
1 parent 4330016 commit ea0c746

File tree

4 files changed

+138
-19
lines changed

4 files changed

+138
-19
lines changed
Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix'
2+
change_type: enhancement
3+
4+
# The name of the component, or a single word describing the area of concern, (e.g. collector, target allocator, auto-instrumentation, opamp, github action)
5+
component: target allocator
6+
7+
# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtip (`).
8+
note: make evaluation_interval configurable for Prometheus CR watcher
9+
10+
# One or more tracking issues related to the change
11+
issues: [4520]
12+
13+
# (Optional) One or more lines of additional information to render under the primary note.
14+
# These lines will be padded with 2 spaces and then inserted directly into the document.
15+
# Use pipe (|) for multiline entries.
16+
subtext: |

cmd/otel-allocator/internal/config/config.go

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -89,6 +89,7 @@ type PrometheusCRConfig struct {
8989
ProbeNamespaceSelector *metav1.LabelSelector `yaml:"probe_namespace_selector,omitempty"`
9090
ScrapeProtocols []monitoringv1.ScrapeProtocol `yaml:"scrape_protocols,omitempty"`
9191
ScrapeInterval model.Duration `yaml:"scrape_interval,omitempty"`
92+
EvaluationInterval model.Duration `yaml:"evaluation_interval,omitempty"`
9293
}
9394

9495
type HTTPSServerConfig struct {

cmd/otel-allocator/internal/watcher/promOperator.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@ func NewPrometheusCRWatcher(
6565
// we want to use endpointslices by default
6666
serviceDiscoveryRole := monitoringv1.ServiceDiscoveryRole("EndpointSlice")
6767

68-
// TODO: We should make these durations configurable
68+
//no need to hardcode durations, use default if not set
6969
prom := &monitoringv1.Prometheus{
7070
ObjectMeta: metav1.ObjectMeta{
7171
Namespace: cfg.CollectorNamespace,
@@ -84,7 +84,7 @@ func NewPrometheusCRWatcher(
8484
ServiceDiscoveryRole: &serviceDiscoveryRole,
8585
ScrapeProtocols: cfg.PrometheusCR.ScrapeProtocols,
8686
},
87-
EvaluationInterval: monitoringv1.Duration("30s"),
87+
EvaluationInterval: monitoringv1.Duration(cfg.PrometheusCR.EvaluationInterval.String()),
8888
},
8989
}
9090

cmd/otel-allocator/internal/watcher/promOperator_test.go

Lines changed: 119 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,7 @@ func TestLoadConfig(t *testing.T) {
9494
ScrapeConfigs: []*promconfig.ScrapeConfig{
9595
{
9696
JobName: "serviceMonitor/test/simple/0",
97-
ScrapeInterval: model.Duration(30 * time.Second),
97+
ScrapeInterval: model.Duration(60 * time.Second),
9898
ScrapeProtocols: promconfig.DefaultScrapeProtocols,
9999
ScrapeTimeout: model.Duration(10 * time.Second),
100100
HonorTimestamps: true,
@@ -116,7 +116,7 @@ func TestLoadConfig(t *testing.T) {
116116
},
117117
{
118118
JobName: "podMonitor/test/simple/0",
119-
ScrapeInterval: model.Duration(30 * time.Second),
119+
ScrapeInterval: model.Duration(60 * time.Second),
120120
ScrapeProtocols: promconfig.DefaultScrapeProtocols,
121121
ScrapeTimeout: model.Duration(10 * time.Second),
122122
HonorTimestamps: true,
@@ -187,7 +187,7 @@ func TestLoadConfig(t *testing.T) {
187187
ScrapeConfigs: []*promconfig.ScrapeConfig{
188188
{
189189
JobName: "serviceMonitor/test/auth/0",
190-
ScrapeInterval: model.Duration(30 * time.Second),
190+
ScrapeInterval: model.Duration(60 * time.Second),
191191
ScrapeProtocols: promconfig.DefaultScrapeProtocols,
192192
ScrapeTimeout: model.Duration(10 * time.Second),
193193
HonorTimestamps: true,
@@ -255,7 +255,7 @@ func TestLoadConfig(t *testing.T) {
255255
ScrapeConfigs: []*promconfig.ScrapeConfig{
256256
{
257257
JobName: "podMonitor/test/bearer/0",
258-
ScrapeInterval: model.Duration(30 * time.Second),
258+
ScrapeInterval: model.Duration(60 * time.Second),
259259
ScrapeProtocols: promconfig.DefaultScrapeProtocols,
260260
ScrapeTimeout: model.Duration(10 * time.Second),
261261
HonorTimestamps: true,
@@ -351,7 +351,7 @@ func TestLoadConfig(t *testing.T) {
351351
ScrapeConfigs: []*promconfig.ScrapeConfig{
352352
{
353353
JobName: "serviceMonitor/test/valid-sm/0",
354-
ScrapeInterval: model.Duration(30 * time.Second),
354+
ScrapeInterval: model.Duration(60 * time.Second),
355355
ScrapeProtocols: promconfig.DefaultScrapeProtocols,
356356
ScrapeTimeout: model.Duration(10 * time.Second),
357357
HonorTimestamps: true,
@@ -373,7 +373,7 @@ func TestLoadConfig(t *testing.T) {
373373
},
374374
{
375375
JobName: "podMonitor/test/valid-pm/0",
376-
ScrapeInterval: model.Duration(30 * time.Second),
376+
ScrapeInterval: model.Duration(60 * time.Second),
377377
ScrapeProtocols: promconfig.DefaultScrapeProtocols,
378378
ScrapeTimeout: model.Duration(10 * time.Second),
379379
HonorTimestamps: true,
@@ -462,7 +462,7 @@ func TestLoadConfig(t *testing.T) {
462462
ScrapeConfigs: []*promconfig.ScrapeConfig{
463463
{
464464
JobName: "serviceMonitor/test/valid-sm/0",
465-
ScrapeInterval: model.Duration(30 * time.Second),
465+
ScrapeInterval: model.Duration(60 * time.Second),
466466
ScrapeProtocols: promconfig.DefaultScrapeProtocols,
467467
ScrapeTimeout: model.Duration(10 * time.Second),
468468
HonorTimestamps: true,
@@ -484,7 +484,7 @@ func TestLoadConfig(t *testing.T) {
484484
},
485485
{
486486
JobName: "podMonitor/test/valid-pm/0",
487-
ScrapeInterval: model.Duration(30 * time.Second),
487+
ScrapeInterval: model.Duration(60 * time.Second),
488488
ScrapeProtocols: promconfig.DefaultScrapeProtocols,
489489
ScrapeTimeout: model.Duration(10 * time.Second),
490490
HonorTimestamps: true,
@@ -555,7 +555,7 @@ func TestLoadConfig(t *testing.T) {
555555
ScrapeConfigs: []*promconfig.ScrapeConfig{
556556
{
557557
JobName: "serviceMonitor/test/sm-1/0",
558-
ScrapeInterval: model.Duration(30 * time.Second),
558+
ScrapeInterval: model.Duration(60 * time.Second),
559559
ScrapeProtocols: promconfig.DefaultScrapeProtocols,
560560
ScrapeTimeout: model.Duration(10 * time.Second),
561561
HonorTimestamps: true,
@@ -626,7 +626,7 @@ func TestLoadConfig(t *testing.T) {
626626
ScrapeConfigs: []*promconfig.ScrapeConfig{
627627
{
628628
JobName: "podMonitor/test/pm-1/0",
629-
ScrapeInterval: model.Duration(30 * time.Second),
629+
ScrapeInterval: model.Duration(60 * time.Second),
630630
ScrapeProtocols: promconfig.DefaultScrapeProtocols,
631631
ScrapeTimeout: model.Duration(10 * time.Second),
632632
HonorTimestamps: true,
@@ -687,7 +687,7 @@ func TestLoadConfig(t *testing.T) {
687687
ScrapeConfigs: []*promconfig.ScrapeConfig{
688688
{
689689
JobName: "scrapeConfig/test/scrapeconfig-test-1",
690-
ScrapeInterval: model.Duration(30 * time.Second),
690+
ScrapeInterval: model.Duration(60 * time.Second),
691691
ScrapeProtocols: promconfig.DefaultScrapeProtocols,
692692
ScrapeTimeout: model.Duration(10 * time.Second),
693693
HonorTimestamps: true,
@@ -751,7 +751,7 @@ func TestLoadConfig(t *testing.T) {
751751
ScrapeConfigs: []*promconfig.ScrapeConfig{
752752
{
753753
JobName: "probe/test/probe-test-1",
754-
ScrapeInterval: model.Duration(30 * time.Second),
754+
ScrapeInterval: model.Duration(60 * time.Second),
755755
ScrapeProtocols: promconfig.DefaultScrapeProtocols,
756756
ScrapeTimeout: model.Duration(10 * time.Second),
757757
HonorTimestamps: true,
@@ -826,7 +826,7 @@ func TestLoadConfig(t *testing.T) {
826826
ScrapeConfigs: []*promconfig.ScrapeConfig{
827827
{
828828
JobName: "serviceMonitor/labellednamespace/sm-1/0",
829-
ScrapeInterval: model.Duration(30 * time.Second),
829+
ScrapeInterval: model.Duration(60 * time.Second),
830830
ScrapeProtocols: promconfig.DefaultScrapeProtocols,
831831
ScrapeTimeout: model.Duration(10 * time.Second),
832832
HonorTimestamps: true,
@@ -896,7 +896,7 @@ func TestLoadConfig(t *testing.T) {
896896
ScrapeConfigs: []*promconfig.ScrapeConfig{
897897
{
898898
JobName: "podMonitor/labellednamespace/pm-1/0",
899-
ScrapeInterval: model.Duration(30 * time.Second),
899+
ScrapeInterval: model.Duration(60 * time.Second),
900900
ScrapeProtocols: promconfig.DefaultScrapeProtocols,
901901
ScrapeTimeout: model.Duration(10 * time.Second),
902902
HonorTimestamps: true,
@@ -1002,7 +1002,7 @@ func TestNamespaceLabelUpdate(t *testing.T) {
10021002
ScrapeConfigs: []*promconfig.ScrapeConfig{
10031003
{
10041004
JobName: "podMonitor/labellednamespace/pm-1/0",
1005-
ScrapeInterval: model.Duration(30 * time.Second),
1005+
ScrapeInterval: model.Duration(60 * time.Second),
10061006
ScrapeProtocols: promconfig.DefaultScrapeProtocols,
10071007
ScrapeTimeout: model.Duration(10 * time.Second),
10081008
HonorTimestamps: true,
@@ -1152,6 +1152,108 @@ func TestRateLimit(t *testing.T) {
11521152
assert.Less(t, eventInterval, elapsedTime)
11531153
}
11541154

1155+
func TestDefaultDurations(t *testing.T) {
1156+
namespace := "test"
1157+
portName := "web"
1158+
tests := []struct {
1159+
name string
1160+
serviceMonitors []*monitoringv1.ServiceMonitor
1161+
cfg allocatorconfig.Config
1162+
expectedScrape model.Duration
1163+
expectedEval model.Duration
1164+
}{
1165+
{
1166+
name: "custom scrape and evaluation intervals",
1167+
serviceMonitors: []*monitoringv1.ServiceMonitor{
1168+
{
1169+
ObjectMeta: metav1.ObjectMeta{
1170+
Name: "test-sm",
1171+
Namespace: namespace,
1172+
},
1173+
Spec: monitoringv1.ServiceMonitorSpec{
1174+
JobLabel: "test",
1175+
Endpoints: []monitoringv1.Endpoint{
1176+
{
1177+
Port: portName,
1178+
},
1179+
},
1180+
},
1181+
},
1182+
},
1183+
cfg: allocatorconfig.Config{
1184+
PrometheusCR: allocatorconfig.PrometheusCRConfig{
1185+
ScrapeInterval: model.Duration(120 * time.Second),
1186+
EvaluationInterval: model.Duration(120 * time.Second),
1187+
ServiceMonitorSelector: &metav1.LabelSelector{},
1188+
},
1189+
},
1190+
expectedScrape: model.Duration(120 * time.Second),
1191+
expectedEval: model.Duration(120 * time.Second),
1192+
},
1193+
{
1194+
name: "prometheus operator applies defaults when intervals nil",
1195+
serviceMonitors: []*monitoringv1.ServiceMonitor{
1196+
{
1197+
ObjectMeta: metav1.ObjectMeta{
1198+
Name: "test-sm",
1199+
Namespace: namespace,
1200+
},
1201+
Spec: monitoringv1.ServiceMonitorSpec{
1202+
JobLabel: "test",
1203+
Endpoints: []monitoringv1.Endpoint{
1204+
{
1205+
Port: portName,
1206+
},
1207+
},
1208+
},
1209+
},
1210+
},
1211+
cfg: allocatorconfig.Config{
1212+
PrometheusCR: allocatorconfig.PrometheusCRConfig{
1213+
ServiceMonitorSelector: &metav1.LabelSelector{},
1214+
},
1215+
},
1216+
expectedScrape: model.Duration(60 * time.Second),
1217+
expectedEval: model.Duration(60 * time.Second),
1218+
},
1219+
}
1220+
1221+
for _, tt := range tests {
1222+
t.Run(tt.name, func(t *testing.T) {
1223+
w, _ := getTestPrometheusCRWatcher(t, namespace, tt.serviceMonitors, nil, nil, nil, tt.cfg)
1224+
defer w.Close()
1225+
1226+
events := make(chan Event, 1)
1227+
eventInterval := 5 * time.Millisecond
1228+
w.eventInterval = eventInterval
1229+
1230+
go func() {
1231+
watchErr := w.Watch(events, make(chan error))
1232+
require.NoError(t, watchErr)
1233+
}()
1234+
1235+
if success := cache.WaitForNamedCacheSync("namespace", w.stopChannel, w.nsInformer.HasSynced); !success {
1236+
require.True(t, success)
1237+
}
1238+
1239+
for _, informer := range w.informers {
1240+
success := cache.WaitForCacheSync(w.stopChannel, informer.HasSynced)
1241+
require.True(t, success)
1242+
}
1243+
1244+
got, err := w.LoadConfig(context.Background())
1245+
assert.NoError(t, err)
1246+
1247+
assert.NotEmpty(t, got.ScrapeConfigs)
1248+
1249+
for _, sc := range got.ScrapeConfigs {
1250+
assert.Equal(t, tt.expectedScrape, sc.ScrapeInterval)
1251+
}
1252+
assert.Equal(t, tt.expectedEval, got.GlobalConfig.EvaluationInterval)
1253+
})
1254+
}
1255+
}
1256+
11551257
// getTestPrometheusCRWatcher creates a test instance of PrometheusCRWatcher with fake clients
11561258
// and test secrets.
11571259
func getTestPrometheusCRWatcher(
@@ -1235,7 +1337,7 @@ func getTestPrometheusCRWatcher(
12351337
},
12361338
Spec: monitoringv1.PrometheusSpec{
12371339
CommonPrometheusFields: monitoringv1.CommonPrometheusFields{
1238-
ScrapeInterval: monitoringv1.Duration("30s"),
1340+
ScrapeInterval: monitoringv1.Duration(cfg.PrometheusCR.ScrapeInterval.String()),
12391341
ServiceMonitorSelector: cfg.PrometheusCR.ServiceMonitorSelector,
12401342
PodMonitorSelector: cfg.PrometheusCR.PodMonitorSelector,
12411343
ServiceMonitorNamespaceSelector: cfg.PrometheusCR.ServiceMonitorNamespaceSelector,
@@ -1246,7 +1348,7 @@ func getTestPrometheusCRWatcher(
12461348
ScrapeConfigNamespaceSelector: cfg.PrometheusCR.ScrapeConfigNamespaceSelector,
12471349
ServiceDiscoveryRole: &serviceDiscoveryRole,
12481350
},
1249-
EvaluationInterval: monitoringv1.Duration("30s"),
1351+
EvaluationInterval: monitoringv1.Duration(cfg.PrometheusCR.EvaluationInterval.String()),
12501352
},
12511353
}
12521354

0 commit comments

Comments
 (0)