Merge branch 'master' into beorn7/histogram
This commit is contained in:
commit
ce36ee3182
|
@ -78,12 +78,12 @@ ifneq ($(shell which gotestsum),)
|
|||
endif
|
||||
endif
|
||||
|
||||
PROMU_VERSION ?= 0.11.1
|
||||
PROMU_VERSION ?= 0.12.0
|
||||
PROMU_URL := https://github.com/prometheus/promu/releases/download/v$(PROMU_VERSION)/promu-$(PROMU_VERSION).$(GO_BUILD_PLATFORM).tar.gz
|
||||
|
||||
GOLANGCI_LINT :=
|
||||
GOLANGCI_LINT_OPTS ?=
|
||||
GOLANGCI_LINT_VERSION ?= v1.36.0
|
||||
GOLANGCI_LINT_VERSION ?= v1.39.0
|
||||
# golangci-lint only supports linux, darwin and windows platforms on i386/amd64.
|
||||
# windows isn't included here because of the path separator being different.
|
||||
ifeq ($(GOHOSTOS),$(filter $(GOHOSTOS),linux darwin))
|
||||
|
|
|
@ -123,6 +123,7 @@ const (
|
|||
epAlertManagers = apiPrefix + "/alertmanagers"
|
||||
epQuery = apiPrefix + "/query"
|
||||
epQueryRange = apiPrefix + "/query_range"
|
||||
epQueryExemplars = apiPrefix + "/query_exemplars"
|
||||
epLabels = apiPrefix + "/labels"
|
||||
epLabelValues = apiPrefix + "/label/:name/values"
|
||||
epSeries = apiPrefix + "/series"
|
||||
|
@ -239,6 +240,8 @@ type API interface {
|
|||
Query(ctx context.Context, query string, ts time.Time) (model.Value, Warnings, error)
|
||||
// QueryRange performs a query for the given range.
|
||||
QueryRange(ctx context.Context, query string, r Range) (model.Value, Warnings, error)
|
||||
// QueryExemplars performs a query for exemplars by the given query and time range.
|
||||
QueryExemplars(ctx context.Context, query string, startTime time.Time, endTime time.Time) ([]ExemplarQueryResult, error)
|
||||
// Buildinfo returns various build information properties about the Prometheus server
|
||||
Buildinfo(ctx context.Context) (BuildinfoResult, error)
|
||||
// Runtimeinfo returns the various runtime information properties about the Prometheus server.
|
||||
|
@ -344,23 +347,28 @@ type Rules []interface{}
|
|||
|
||||
// AlertingRule models a alerting rule.
|
||||
type AlertingRule struct {
|
||||
Name string `json:"name"`
|
||||
Query string `json:"query"`
|
||||
Duration float64 `json:"duration"`
|
||||
Labels model.LabelSet `json:"labels"`
|
||||
Annotations model.LabelSet `json:"annotations"`
|
||||
Alerts []*Alert `json:"alerts"`
|
||||
Health RuleHealth `json:"health"`
|
||||
LastError string `json:"lastError,omitempty"`
|
||||
Name string `json:"name"`
|
||||
Query string `json:"query"`
|
||||
Duration float64 `json:"duration"`
|
||||
Labels model.LabelSet `json:"labels"`
|
||||
Annotations model.LabelSet `json:"annotations"`
|
||||
Alerts []*Alert `json:"alerts"`
|
||||
Health RuleHealth `json:"health"`
|
||||
LastError string `json:"lastError,omitempty"`
|
||||
EvaluationTime float64 `json:"evaluationTime"`
|
||||
LastEvaluation time.Time `json:"lastEvaluation"`
|
||||
State string `json:"state"`
|
||||
}
|
||||
|
||||
// RecordingRule models a recording rule.
|
||||
type RecordingRule struct {
|
||||
Name string `json:"name"`
|
||||
Query string `json:"query"`
|
||||
Labels model.LabelSet `json:"labels,omitempty"`
|
||||
Health RuleHealth `json:"health"`
|
||||
LastError string `json:"lastError,omitempty"`
|
||||
Name string `json:"name"`
|
||||
Query string `json:"query"`
|
||||
Labels model.LabelSet `json:"labels,omitempty"`
|
||||
Health RuleHealth `json:"health"`
|
||||
LastError string `json:"lastError,omitempty"`
|
||||
EvaluationTime float64 `json:"evaluationTime"`
|
||||
LastEvaluation time.Time `json:"lastEvaluation"`
|
||||
}
|
||||
|
||||
// Alert models an active alert.
|
||||
|
@ -380,12 +388,15 @@ type TargetsResult struct {
|
|||
|
||||
// ActiveTarget models an active Prometheus scrape target.
|
||||
type ActiveTarget struct {
|
||||
DiscoveredLabels map[string]string `json:"discoveredLabels"`
|
||||
Labels model.LabelSet `json:"labels"`
|
||||
ScrapeURL string `json:"scrapeUrl"`
|
||||
LastError string `json:"lastError"`
|
||||
LastScrape time.Time `json:"lastScrape"`
|
||||
Health HealthStatus `json:"health"`
|
||||
DiscoveredLabels map[string]string `json:"discoveredLabels"`
|
||||
Labels model.LabelSet `json:"labels"`
|
||||
ScrapePool string `json:"scrapePool"`
|
||||
ScrapeURL string `json:"scrapeUrl"`
|
||||
GlobalURL string `json:"globalUrl"`
|
||||
LastError string `json:"lastError"`
|
||||
LastScrape time.Time `json:"lastScrape"`
|
||||
LastScrapeDuration float64 `json:"lastScrapeDuration"`
|
||||
Health HealthStatus `json:"health"`
|
||||
}
|
||||
|
||||
// DroppedTarget models a dropped Prometheus scrape target.
|
||||
|
@ -480,14 +491,17 @@ func (r *AlertingRule) UnmarshalJSON(b []byte) error {
|
|||
}
|
||||
|
||||
rule := struct {
|
||||
Name string `json:"name"`
|
||||
Query string `json:"query"`
|
||||
Duration float64 `json:"duration"`
|
||||
Labels model.LabelSet `json:"labels"`
|
||||
Annotations model.LabelSet `json:"annotations"`
|
||||
Alerts []*Alert `json:"alerts"`
|
||||
Health RuleHealth `json:"health"`
|
||||
LastError string `json:"lastError,omitempty"`
|
||||
Name string `json:"name"`
|
||||
Query string `json:"query"`
|
||||
Duration float64 `json:"duration"`
|
||||
Labels model.LabelSet `json:"labels"`
|
||||
Annotations model.LabelSet `json:"annotations"`
|
||||
Alerts []*Alert `json:"alerts"`
|
||||
Health RuleHealth `json:"health"`
|
||||
LastError string `json:"lastError,omitempty"`
|
||||
EvaluationTime float64 `json:"evaluationTime"`
|
||||
LastEvaluation time.Time `json:"lastEvaluation"`
|
||||
State string `json:"state"`
|
||||
}{}
|
||||
if err := json.Unmarshal(b, &rule); err != nil {
|
||||
return err
|
||||
|
@ -500,6 +514,9 @@ func (r *AlertingRule) UnmarshalJSON(b []byte) error {
|
|||
r.Duration = rule.Duration
|
||||
r.Labels = rule.Labels
|
||||
r.LastError = rule.LastError
|
||||
r.EvaluationTime = rule.EvaluationTime
|
||||
r.LastEvaluation = rule.LastEvaluation
|
||||
r.State = rule.State
|
||||
|
||||
return nil
|
||||
}
|
||||
|
@ -519,11 +536,13 @@ func (r *RecordingRule) UnmarshalJSON(b []byte) error {
|
|||
}
|
||||
|
||||
rule := struct {
|
||||
Name string `json:"name"`
|
||||
Query string `json:"query"`
|
||||
Labels model.LabelSet `json:"labels,omitempty"`
|
||||
Health RuleHealth `json:"health"`
|
||||
LastError string `json:"lastError,omitempty"`
|
||||
Name string `json:"name"`
|
||||
Query string `json:"query"`
|
||||
Labels model.LabelSet `json:"labels,omitempty"`
|
||||
Health RuleHealth `json:"health"`
|
||||
LastError string `json:"lastError,omitempty"`
|
||||
EvaluationTime float64 `json:"evaluationTime"`
|
||||
LastEvaluation time.Time `json:"lastEvaluation"`
|
||||
}{}
|
||||
if err := json.Unmarshal(b, &rule); err != nil {
|
||||
return err
|
||||
|
@ -533,6 +552,8 @@ func (r *RecordingRule) UnmarshalJSON(b []byte) error {
|
|||
r.Name = rule.Name
|
||||
r.LastError = rule.LastError
|
||||
r.Query = rule.Query
|
||||
r.EvaluationTime = rule.EvaluationTime
|
||||
r.LastEvaluation = rule.LastEvaluation
|
||||
|
||||
return nil
|
||||
}
|
||||
|
@ -570,6 +591,18 @@ func (qr *queryResult) UnmarshalJSON(b []byte) error {
|
|||
return err
|
||||
}
|
||||
|
||||
// Exemplar is additional information associated with a time series.
|
||||
type Exemplar struct {
|
||||
Labels model.LabelSet `json:"labels"`
|
||||
Value model.SampleValue `json:"value"`
|
||||
Timestamp model.Time `json:"timestamp"`
|
||||
}
|
||||
|
||||
type ExemplarQueryResult struct {
|
||||
SeriesLabels model.LabelSet `json:"seriesLabels"`
|
||||
Exemplars []Exemplar `json:"exemplars"`
|
||||
}
|
||||
|
||||
// NewAPI returns a new API for the client.
|
||||
//
|
||||
// It is safe to use the returned API from multiple goroutines.
|
||||
|
@ -949,7 +982,29 @@ func (h *httpAPI) TSDB(ctx context.Context) (TSDBResult, error) {
|
|||
|
||||
var res TSDBResult
|
||||
return res, json.Unmarshal(body, &res)
|
||||
}
|
||||
|
||||
func (h *httpAPI) QueryExemplars(ctx context.Context, query string, startTime time.Time, endTime time.Time) ([]ExemplarQueryResult, error) {
|
||||
u := h.client.URL(epQueryExemplars, nil)
|
||||
q := u.Query()
|
||||
|
||||
q.Set("query", query)
|
||||
q.Set("start", formatTime(startTime))
|
||||
q.Set("end", formatTime(endTime))
|
||||
u.RawQuery = q.Encode()
|
||||
|
||||
req, err := http.NewRequest(http.MethodGet, u.String(), nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
_, body, _, err := h.client.Do(ctx, req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var res []ExemplarQueryResult
|
||||
return res, json.Unmarshal(body, &res)
|
||||
}
|
||||
|
||||
// Warnings is an array of non critical errors
|
||||
|
|
|
@ -230,6 +230,13 @@ func TestAPIs(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
doQueryExemplars := func(query string, startTime time.Time, endTime time.Time) func() (interface{}, Warnings, error) {
|
||||
return func() (interface{}, Warnings, error) {
|
||||
v, err := promAPI.QueryExemplars(context.Background(), query, startTime, endTime)
|
||||
return v, nil, err
|
||||
}
|
||||
}
|
||||
|
||||
queryTests := []apiTest{
|
||||
{
|
||||
do: doQuery("2", testTime),
|
||||
|
@ -846,6 +853,111 @@ func TestAPIs(t *testing.T) {
|
|||
},
|
||||
},
|
||||
|
||||
// This has the newer API elements like lastEvaluation, evaluationTime, etc.
|
||||
{
|
||||
do: doRules(),
|
||||
reqMethod: "GET",
|
||||
reqPath: "/api/v1/rules",
|
||||
inRes: map[string]interface{}{
|
||||
"groups": []map[string]interface{}{
|
||||
{
|
||||
"file": "/rules.yaml",
|
||||
"interval": 60,
|
||||
"name": "example",
|
||||
"rules": []map[string]interface{}{
|
||||
{
|
||||
"alerts": []map[string]interface{}{
|
||||
{
|
||||
"activeAt": testTime.UTC().Format(time.RFC3339Nano),
|
||||
"annotations": map[string]interface{}{
|
||||
"summary": "High request latency",
|
||||
},
|
||||
"labels": map[string]interface{}{
|
||||
"alertname": "HighRequestLatency",
|
||||
"severity": "page",
|
||||
},
|
||||
"state": "firing",
|
||||
"value": "1e+00",
|
||||
},
|
||||
},
|
||||
"annotations": map[string]interface{}{
|
||||
"summary": "High request latency",
|
||||
},
|
||||
"duration": 600,
|
||||
"health": "ok",
|
||||
"labels": map[string]interface{}{
|
||||
"severity": "page",
|
||||
},
|
||||
"name": "HighRequestLatency",
|
||||
"query": "job:request_latency_seconds:mean5m{job=\"myjob\"} > 0.5",
|
||||
"type": "alerting",
|
||||
"evaluationTime": 0.5,
|
||||
"lastEvaluation": "2020-05-18T15:52:53.4503113Z",
|
||||
"state": "firing",
|
||||
},
|
||||
{
|
||||
"health": "ok",
|
||||
"name": "job:http_inprogress_requests:sum",
|
||||
"query": "sum(http_inprogress_requests) by (job)",
|
||||
"type": "recording",
|
||||
"evaluationTime": 0.3,
|
||||
"lastEvaluation": "2020-05-18T15:52:53.4503113Z",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
res: RulesResult{
|
||||
Groups: []RuleGroup{
|
||||
{
|
||||
Name: "example",
|
||||
File: "/rules.yaml",
|
||||
Interval: 60,
|
||||
Rules: []interface{}{
|
||||
AlertingRule{
|
||||
Alerts: []*Alert{
|
||||
{
|
||||
ActiveAt: testTime.UTC(),
|
||||
Annotations: model.LabelSet{
|
||||
"summary": "High request latency",
|
||||
},
|
||||
Labels: model.LabelSet{
|
||||
"alertname": "HighRequestLatency",
|
||||
"severity": "page",
|
||||
},
|
||||
State: AlertStateFiring,
|
||||
Value: "1e+00",
|
||||
},
|
||||
},
|
||||
Annotations: model.LabelSet{
|
||||
"summary": "High request latency",
|
||||
},
|
||||
Labels: model.LabelSet{
|
||||
"severity": "page",
|
||||
},
|
||||
Duration: 600,
|
||||
Health: RuleHealthGood,
|
||||
Name: "HighRequestLatency",
|
||||
Query: "job:request_latency_seconds:mean5m{job=\"myjob\"} > 0.5",
|
||||
LastError: "",
|
||||
EvaluationTime: 0.5,
|
||||
LastEvaluation: time.Date(2020, 5, 18, 15, 52, 53, 450311300, time.UTC),
|
||||
State: "firing",
|
||||
},
|
||||
RecordingRule{
|
||||
Health: RuleHealthGood,
|
||||
Name: "job:http_inprogress_requests:sum",
|
||||
Query: "sum(http_inprogress_requests) by (job)",
|
||||
LastError: "",
|
||||
EvaluationTime: 0.3,
|
||||
LastEvaluation: time.Date(2020, 5, 18, 15, 52, 53, 450311300, time.UTC),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
do: doRules(),
|
||||
reqMethod: "GET",
|
||||
|
@ -871,10 +983,13 @@ func TestAPIs(t *testing.T) {
|
|||
"instance": "127.0.0.1:9090",
|
||||
"job": "prometheus",
|
||||
},
|
||||
"scrapeUrl": "http://127.0.0.1:9090",
|
||||
"lastError": "error while scraping target",
|
||||
"lastScrape": testTime.UTC().Format(time.RFC3339Nano),
|
||||
"health": "up",
|
||||
"scrapePool": "prometheus",
|
||||
"scrapeUrl": "http://127.0.0.1:9090",
|
||||
"globalUrl": "http://127.0.0.1:9090",
|
||||
"lastError": "error while scraping target",
|
||||
"lastScrape": testTime.UTC().Format(time.RFC3339Nano),
|
||||
"lastScrapeDuration": 0.001146115,
|
||||
"health": "up",
|
||||
},
|
||||
},
|
||||
"droppedTargets": []map[string]interface{}{
|
||||
|
@ -901,10 +1016,13 @@ func TestAPIs(t *testing.T) {
|
|||
"instance": "127.0.0.1:9090",
|
||||
"job": "prometheus",
|
||||
},
|
||||
ScrapeURL: "http://127.0.0.1:9090",
|
||||
LastError: "error while scraping target",
|
||||
LastScrape: testTime.UTC(),
|
||||
Health: HealthGood,
|
||||
ScrapePool: "prometheus",
|
||||
ScrapeURL: "http://127.0.0.1:9090",
|
||||
GlobalURL: "http://127.0.0.1:9090",
|
||||
LastError: "error while scraping target",
|
||||
LastScrape: testTime.UTC(),
|
||||
LastScrapeDuration: 0.001146115,
|
||||
Health: HealthGood,
|
||||
},
|
||||
},
|
||||
Dropped: []DroppedTarget{
|
||||
|
@ -1079,6 +1197,66 @@ func TestAPIs(t *testing.T) {
|
|||
},
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
do: doQueryExemplars("tns_request_duration_seconds_bucket", testTime.Add(-1*time.Minute), testTime),
|
||||
reqMethod: "GET",
|
||||
reqPath: "/api/v1/query_exemplars",
|
||||
inErr: fmt.Errorf("some error"),
|
||||
err: fmt.Errorf("some error"),
|
||||
},
|
||||
|
||||
{
|
||||
do: doQueryExemplars("tns_request_duration_seconds_bucket", testTime.Add(-1*time.Minute), testTime),
|
||||
reqMethod: "GET",
|
||||
reqPath: "/api/v1/query_exemplars",
|
||||
inRes: []interface{}{
|
||||
map[string]interface{}{
|
||||
"seriesLabels": map[string]interface{}{
|
||||
"__name__": "tns_request_duration_seconds_bucket",
|
||||
"instance": "app:80",
|
||||
"job": "tns/app",
|
||||
},
|
||||
"exemplars": []interface{}{
|
||||
map[string]interface{}{
|
||||
"labels": map[string]interface{}{
|
||||
"traceID": "19fd8c8a33975a23",
|
||||
},
|
||||
"value": "0.003863295",
|
||||
"timestamp": model.TimeFromUnixNano(testTime.UnixNano()),
|
||||
},
|
||||
map[string]interface{}{
|
||||
"labels": map[string]interface{}{
|
||||
"traceID": "67f743f07cc786b0",
|
||||
},
|
||||
"value": "0.001535405",
|
||||
"timestamp": model.TimeFromUnixNano(testTime.UnixNano()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
res: []ExemplarQueryResult{
|
||||
{
|
||||
SeriesLabels: model.LabelSet{
|
||||
"__name__": "tns_request_duration_seconds_bucket",
|
||||
"instance": "app:80",
|
||||
"job": "tns/app",
|
||||
},
|
||||
Exemplars: []Exemplar{
|
||||
{
|
||||
Labels: model.LabelSet{"traceID": "19fd8c8a33975a23"},
|
||||
Value: 0.003863295,
|
||||
Timestamp: model.TimeFromUnixNano(testTime.UnixNano()),
|
||||
},
|
||||
{
|
||||
Labels: model.LabelSet{"traceID": "67f743f07cc786b0"},
|
||||
Value: 0.001535405,
|
||||
Timestamp: model.TimeFromUnixNano(testTime.UnixNano()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
var tests []apiTest
|
||||
|
|
Loading…
Reference in New Issue