Compare commits

...

4 Commits

Author SHA1 Message Date
sarthaktyagi-505 47b52942c9
Merge 94e83197ca into 76b74e25d5 2024-11-21 13:30:17 +08:00
Ivan Goncharov 76b74e25d5
fix: add very small delay between observations to make test more realistic (and faster) (#1691) 2024-11-20 14:47:38 +01:00
sarthak.tyagi 94e83197ca use const instead of recording and alerting string in api_test.go
Signed-off-by: sarthak.tyagi <sarthaktyagi100@gmail.com>
2024-07-10 11:30:56 +01:00
sarthak.tyagi c302ac5d8f add type to alertingRule and recordingRule struct
Signed-off-by: sarthak.tyagi <sarthaktyagi100@gmail.com>
2024-07-10 11:30:56 +01:00
3 changed files with 22 additions and 13 deletions

View File

@ -601,6 +601,7 @@ type AlertingRule struct {
EvaluationTime float64 `json:"evaluationTime"` EvaluationTime float64 `json:"evaluationTime"`
LastEvaluation time.Time `json:"lastEvaluation"` LastEvaluation time.Time `json:"lastEvaluation"`
State string `json:"state"` State string `json:"state"`
Type string `json:"type"`
} }
// RecordingRule models a recording rule. // RecordingRule models a recording rule.
@ -612,6 +613,7 @@ type RecordingRule struct {
LastError string `json:"lastError,omitempty"` LastError string `json:"lastError,omitempty"`
EvaluationTime float64 `json:"evaluationTime"` EvaluationTime float64 `json:"evaluationTime"`
LastEvaluation time.Time `json:"lastEvaluation"` LastEvaluation time.Time `json:"lastEvaluation"`
Type string `json:"type"`
} }
// Alert models an active alert. // Alert models an active alert.
@ -721,11 +723,13 @@ func (rg *RuleGroup) UnmarshalJSON(b []byte) error {
for _, rule := range v.Rules { for _, rule := range v.Rules {
alertingRule := AlertingRule{} alertingRule := AlertingRule{}
alertingRule.Type = string(RuleTypeAlerting)
if err := json.Unmarshal(rule, &alertingRule); err == nil { if err := json.Unmarshal(rule, &alertingRule); err == nil {
rg.Rules = append(rg.Rules, alertingRule) rg.Rules = append(rg.Rules, alertingRule)
continue continue
} }
recordingRule := RecordingRule{} recordingRule := RecordingRule{}
recordingRule.Type = string(RuleTypeRecording)
if err := json.Unmarshal(rule, &recordingRule); err == nil { if err := json.Unmarshal(rule, &recordingRule); err == nil {
rg.Rules = append(rg.Rules, recordingRule) rg.Rules = append(rg.Rules, recordingRule)
continue continue

View File

@ -717,20 +717,22 @@ func TestAPIs(t *testing.T) {
"annotations": map[string]interface{}{ "annotations": map[string]interface{}{
"summary": "High request latency", "summary": "High request latency",
}, },
"duration": 600,
"health": "ok",
"labels": map[string]interface{}{ "labels": map[string]interface{}{
"severity": "page", "severity": "page",
}, },
"name": "HighRequestLatency", "duration": 600,
"query": "job:request_latency_seconds:mean5m{job=\"myjob\"} > 0.5", "health": "ok",
"type": "alerting", "name": "HighRequestLatency",
"query": "job:request_latency_seconds:mean5m{job=\"myjob\"} > 0.5",
"lastError": "",
"type": "alerting",
}, },
{ {
"health": "ok", "health": "ok",
"name": "job:http_inprogress_requests:sum", "name": "job:http_inprogress_requests:sum",
"query": "sum(http_inprogress_requests) by (job)", "query": "sum(http_inprogress_requests) by (job)",
"type": "recording", "lastError": "",
"type": "recording",
}, },
}, },
}, },
@ -769,12 +771,14 @@ func TestAPIs(t *testing.T) {
Name: "HighRequestLatency", Name: "HighRequestLatency",
Query: "job:request_latency_seconds:mean5m{job=\"myjob\"} > 0.5", Query: "job:request_latency_seconds:mean5m{job=\"myjob\"} > 0.5",
LastError: "", LastError: "",
Type: string(RuleTypeAlerting),
}, },
RecordingRule{ RecordingRule{
Health: RuleHealthGood, Health: RuleHealthGood,
Name: "job:http_inprogress_requests:sum", Name: "job:http_inprogress_requests:sum",
Query: "sum(http_inprogress_requests) by (job)", Query: "sum(http_inprogress_requests) by (job)",
LastError: "", LastError: "",
Type: string(RuleTypeRecording),
}, },
}, },
}, },
@ -861,14 +865,14 @@ func TestAPIs(t *testing.T) {
Annotations: model.LabelSet{ Annotations: model.LabelSet{
"summary": "High request latency", "summary": "High request latency",
}, },
Duration: 600,
Health: RuleHealthGood,
Labels: model.LabelSet{ Labels: model.LabelSet{
"severity": "page", "severity": "page",
}, },
Duration: 600,
Health: RuleHealthGood,
Name: "HighRequestLatency", Name: "HighRequestLatency",
Query: "job:request_latency_seconds:mean5m{job=\"myjob\"} > 0.5", Query: "job:request_latency_seconds:mean5m{job=\"myjob\"} > 0.5",
LastError: "", Type: string(RuleTypeAlerting),
EvaluationTime: 0.5, EvaluationTime: 0.5,
LastEvaluation: time.Date(2020, 5, 18, 15, 52, 53, 450311300, time.UTC), LastEvaluation: time.Date(2020, 5, 18, 15, 52, 53, 450311300, time.UTC),
State: "firing", State: "firing",
@ -877,7 +881,7 @@ func TestAPIs(t *testing.T) {
Health: RuleHealthGood, Health: RuleHealthGood,
Name: "job:http_inprogress_requests:sum", Name: "job:http_inprogress_requests:sum",
Query: "sum(http_inprogress_requests) by (job)", Query: "sum(http_inprogress_requests) by (job)",
LastError: "", Type: string(RuleTypeRecording),
EvaluationTime: 0.3, EvaluationTime: 0.3,
LastEvaluation: time.Date(2020, 5, 18, 15, 52, 53, 450311300, time.UTC), LastEvaluation: time.Date(2020, 5, 18, 15, 52, 53, 450311300, time.UTC),
}, },

View File

@ -382,6 +382,7 @@ func TestHistogramAtomicObserve(t *testing.T) {
return return
default: default:
his.Observe(1) his.Observe(1)
time.Sleep(time.Nanosecond)
} }
} }
} }