Compare commits

...

4 Commits

Author SHA1 Message Date
sarthaktyagi-505 47b52942c9
Merge 94e83197ca into 76b74e25d5 2024-11-21 13:30:17 +08:00
Ivan Goncharov 76b74e25d5
fix: add very small delay between observations to make test more realistic (and faster) (#1691) 2024-11-20 14:47:38 +01:00
sarthak.tyagi 94e83197ca use const instead of recording and alerting string in api_test.go
Signed-off-by: sarthak.tyagi <sarthaktyagi100@gmail.com>
2024-07-10 11:30:56 +01:00
sarthak.tyagi c302ac5d8f add type to alertingRule and recordingRule struct
Signed-off-by: sarthak.tyagi <sarthaktyagi100@gmail.com>
2024-07-10 11:30:56 +01:00
3 changed files with 22 additions and 13 deletions

View File

@ -601,6 +601,7 @@ type AlertingRule struct {
EvaluationTime float64 `json:"evaluationTime"`
LastEvaluation time.Time `json:"lastEvaluation"`
State string `json:"state"`
Type string `json:"type"`
}
// RecordingRule models a recording rule.
@ -612,6 +613,7 @@ type RecordingRule struct {
LastError string `json:"lastError,omitempty"`
EvaluationTime float64 `json:"evaluationTime"`
LastEvaluation time.Time `json:"lastEvaluation"`
Type string `json:"type"`
}
// Alert models an active alert.
@ -721,11 +723,13 @@ func (rg *RuleGroup) UnmarshalJSON(b []byte) error {
for _, rule := range v.Rules {
alertingRule := AlertingRule{}
alertingRule.Type = string(RuleTypeAlerting)
if err := json.Unmarshal(rule, &alertingRule); err == nil {
rg.Rules = append(rg.Rules, alertingRule)
continue
}
recordingRule := RecordingRule{}
recordingRule.Type = string(RuleTypeRecording)
if err := json.Unmarshal(rule, &recordingRule); err == nil {
rg.Rules = append(rg.Rules, recordingRule)
continue

View File

@ -717,19 +717,21 @@ func TestAPIs(t *testing.T) {
"annotations": map[string]interface{}{
"summary": "High request latency",
},
"duration": 600,
"health": "ok",
"labels": map[string]interface{}{
"severity": "page",
},
"duration": 600,
"health": "ok",
"name": "HighRequestLatency",
"query": "job:request_latency_seconds:mean5m{job=\"myjob\"} > 0.5",
"lastError": "",
"type": "alerting",
},
{
"health": "ok",
"name": "job:http_inprogress_requests:sum",
"query": "sum(http_inprogress_requests) by (job)",
"lastError": "",
"type": "recording",
},
},
@ -769,12 +771,14 @@ func TestAPIs(t *testing.T) {
Name: "HighRequestLatency",
Query: "job:request_latency_seconds:mean5m{job=\"myjob\"} > 0.5",
LastError: "",
Type: string(RuleTypeAlerting),
},
RecordingRule{
Health: RuleHealthGood,
Name: "job:http_inprogress_requests:sum",
Query: "sum(http_inprogress_requests) by (job)",
LastError: "",
Type: string(RuleTypeRecording),
},
},
},
@ -861,14 +865,14 @@ func TestAPIs(t *testing.T) {
Annotations: model.LabelSet{
"summary": "High request latency",
},
Duration: 600,
Health: RuleHealthGood,
Labels: model.LabelSet{
"severity": "page",
},
Duration: 600,
Health: RuleHealthGood,
Name: "HighRequestLatency",
Query: "job:request_latency_seconds:mean5m{job=\"myjob\"} > 0.5",
LastError: "",
Type: string(RuleTypeAlerting),
EvaluationTime: 0.5,
LastEvaluation: time.Date(2020, 5, 18, 15, 52, 53, 450311300, time.UTC),
State: "firing",
@ -877,7 +881,7 @@ func TestAPIs(t *testing.T) {
Health: RuleHealthGood,
Name: "job:http_inprogress_requests:sum",
Query: "sum(http_inprogress_requests) by (job)",
LastError: "",
Type: string(RuleTypeRecording),
EvaluationTime: 0.3,
LastEvaluation: time.Date(2020, 5, 18, 15, 52, 53, 450311300, time.UTC),
},

View File

@ -382,6 +382,7 @@ func TestHistogramAtomicObserve(t *testing.T) {
return
default:
his.Observe(1)
time.Sleep(time.Nanosecond)
}
}
}