forked from mirror/client_golang
Merge pull request #20 from prometheus/update/stringers
Update LabelSet and Metric String() methods from server.
This commit is contained in:
commit
024b00bcef
|
@ -20,18 +20,6 @@ import (
|
|||
"github.com/prometheus/client_golang/model"
|
||||
)
|
||||
|
||||
const (
|
||||
// The label name prefix to prepend if a synthetic label is already present
|
||||
// in the exported metrics.
|
||||
ExporterLabelPrefix model.LabelName = "exporter_"
|
||||
|
||||
// The label name indicating the metric name of a timeseries.
|
||||
MetricNameLabel = "name"
|
||||
|
||||
// The label name indicating the job from which a timeseries was scraped.
|
||||
JobLabel = "job"
|
||||
)
|
||||
|
||||
// ProcessOptions dictates how the interpreted stream should be rendered for
|
||||
// consumption.
|
||||
type ProcessOptions struct {
|
||||
|
@ -82,7 +70,7 @@ func mergeTargetLabels(entityLabels, targetLabels model.LabelSet) model.LabelSet
|
|||
|
||||
for label, labelValue := range targetLabels {
|
||||
if _, exists := result[label]; exists {
|
||||
result[ExporterLabelPrefix+label] = labelValue
|
||||
result[model.ExporterLabelPrefix+label] = labelValue
|
||||
} else {
|
||||
result[label] = labelValue
|
||||
}
|
||||
|
|
|
@ -21,8 +21,8 @@ import (
|
|||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/prometheus/client_golang/test"
|
||||
"github.com/prometheus/client_golang/model"
|
||||
"github.com/prometheus/client_golang/test"
|
||||
)
|
||||
|
||||
func testProcessor001Process(t test.Tester) {
|
||||
|
@ -39,84 +39,84 @@ func testProcessor001Process(t test.Tester) {
|
|||
{
|
||||
in: "test0_0_1-0_0_2.json",
|
||||
baseLabels: model.LabelSet{
|
||||
JobLabel: "batch_exporter",
|
||||
model.JobLabel: "batch_exporter",
|
||||
},
|
||||
out: model.Samples{
|
||||
&model.Sample{
|
||||
Metric: model.Metric{"service": "zed", MetricNameLabel: "rpc_calls_total", "job": "batch_job", "exporter_job": "batch_exporter"},
|
||||
Metric: model.Metric{"service": "zed", model.MetricNameLabel: "rpc_calls_total", "job": "batch_job", "exporter_job": "batch_exporter"},
|
||||
Value: 25,
|
||||
},
|
||||
&model.Sample{
|
||||
Metric: model.Metric{"service": "bar", MetricNameLabel: "rpc_calls_total", "job": "batch_job", "exporter_job": "batch_exporter"},
|
||||
Metric: model.Metric{"service": "bar", model.MetricNameLabel: "rpc_calls_total", "job": "batch_job", "exporter_job": "batch_exporter"},
|
||||
Value: 25,
|
||||
},
|
||||
&model.Sample{
|
||||
Metric: model.Metric{"service": "foo", MetricNameLabel: "rpc_calls_total", "job": "batch_job", "exporter_job": "batch_exporter"},
|
||||
Metric: model.Metric{"service": "foo", model.MetricNameLabel: "rpc_calls_total", "job": "batch_job", "exporter_job": "batch_exporter"},
|
||||
Value: 25,
|
||||
},
|
||||
&model.Sample{
|
||||
Metric: model.Metric{"percentile": "0.010000", MetricNameLabel: "rpc_latency_microseconds", "service": "zed", "job": "batch_exporter"},
|
||||
Metric: model.Metric{"percentile": "0.010000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "zed", "job": "batch_exporter"},
|
||||
Value: 0.0459814091918713,
|
||||
},
|
||||
&model.Sample{
|
||||
Metric: model.Metric{"percentile": "0.010000", MetricNameLabel: "rpc_latency_microseconds", "service": "bar", "job": "batch_exporter"},
|
||||
Metric: model.Metric{"percentile": "0.010000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "bar", "job": "batch_exporter"},
|
||||
Value: 78.48563317257356,
|
||||
},
|
||||
&model.Sample{
|
||||
Metric: model.Metric{"percentile": "0.010000", MetricNameLabel: "rpc_latency_microseconds", "service": "foo", "job": "batch_exporter"},
|
||||
Metric: model.Metric{"percentile": "0.010000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "foo", "job": "batch_exporter"},
|
||||
Value: 15.890724674774395,
|
||||
},
|
||||
&model.Sample{
|
||||
|
||||
Metric: model.Metric{"percentile": "0.050000", MetricNameLabel: "rpc_latency_microseconds", "service": "zed", "job": "batch_exporter"},
|
||||
Metric: model.Metric{"percentile": "0.050000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "zed", "job": "batch_exporter"},
|
||||
Value: 0.0459814091918713,
|
||||
},
|
||||
&model.Sample{
|
||||
Metric: model.Metric{"percentile": "0.050000", MetricNameLabel: "rpc_latency_microseconds", "service": "bar", "job": "batch_exporter"},
|
||||
Metric: model.Metric{"percentile": "0.050000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "bar", "job": "batch_exporter"},
|
||||
Value: 78.48563317257356,
|
||||
},
|
||||
&model.Sample{
|
||||
|
||||
Metric: model.Metric{"percentile": "0.050000", MetricNameLabel: "rpc_latency_microseconds", "service": "foo", "job": "batch_exporter"},
|
||||
Metric: model.Metric{"percentile": "0.050000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "foo", "job": "batch_exporter"},
|
||||
Value: 15.890724674774395,
|
||||
},
|
||||
&model.Sample{
|
||||
Metric: model.Metric{"percentile": "0.500000", MetricNameLabel: "rpc_latency_microseconds", "service": "zed", "job": "batch_exporter"},
|
||||
Metric: model.Metric{"percentile": "0.500000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "zed", "job": "batch_exporter"},
|
||||
Value: 0.6120456642749681,
|
||||
},
|
||||
&model.Sample{
|
||||
|
||||
Metric: model.Metric{"percentile": "0.500000", MetricNameLabel: "rpc_latency_microseconds", "service": "bar", "job": "batch_exporter"},
|
||||
Metric: model.Metric{"percentile": "0.500000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "bar", "job": "batch_exporter"},
|
||||
Value: 97.31798360385088,
|
||||
},
|
||||
&model.Sample{
|
||||
Metric: model.Metric{"percentile": "0.500000", MetricNameLabel: "rpc_latency_microseconds", "service": "foo", "job": "batch_exporter"},
|
||||
Metric: model.Metric{"percentile": "0.500000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "foo", "job": "batch_exporter"},
|
||||
Value: 84.63044031436561,
|
||||
},
|
||||
&model.Sample{
|
||||
|
||||
Metric: model.Metric{"percentile": "0.900000", MetricNameLabel: "rpc_latency_microseconds", "service": "zed", "job": "batch_exporter"},
|
||||
Metric: model.Metric{"percentile": "0.900000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "zed", "job": "batch_exporter"},
|
||||
Value: 1.355915069887731,
|
||||
},
|
||||
&model.Sample{
|
||||
Metric: model.Metric{"percentile": "0.900000", MetricNameLabel: "rpc_latency_microseconds", "service": "bar", "job": "batch_exporter"},
|
||||
Metric: model.Metric{"percentile": "0.900000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "bar", "job": "batch_exporter"},
|
||||
Value: 109.89202084295582,
|
||||
},
|
||||
&model.Sample{
|
||||
Metric: model.Metric{"percentile": "0.900000", MetricNameLabel: "rpc_latency_microseconds", "service": "foo", "job": "batch_exporter"},
|
||||
Metric: model.Metric{"percentile": "0.900000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "foo", "job": "batch_exporter"},
|
||||
Value: 160.21100853053224,
|
||||
},
|
||||
&model.Sample{
|
||||
Metric: model.Metric{"percentile": "0.990000", MetricNameLabel: "rpc_latency_microseconds", "service": "zed", "job": "batch_exporter"},
|
||||
Metric: model.Metric{"percentile": "0.990000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "zed", "job": "batch_exporter"},
|
||||
Value: 1.772733213161236,
|
||||
},
|
||||
&model.Sample{
|
||||
|
||||
Metric: model.Metric{"percentile": "0.990000", MetricNameLabel: "rpc_latency_microseconds", "service": "bar", "job": "batch_exporter"},
|
||||
Metric: model.Metric{"percentile": "0.990000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "bar", "job": "batch_exporter"},
|
||||
Value: 109.99626121011262,
|
||||
},
|
||||
&model.Sample{
|
||||
Metric: model.Metric{"percentile": "0.990000", MetricNameLabel: "rpc_latency_microseconds", "service": "foo", "job": "batch_exporter"},
|
||||
Metric: model.Metric{"percentile": "0.990000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "foo", "job": "batch_exporter"},
|
||||
Value: 172.49828748957728,
|
||||
},
|
||||
},
|
||||
|
|
|
@ -22,8 +22,8 @@ import (
|
|||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/prometheus/client_golang/test"
|
||||
"github.com/prometheus/client_golang/model"
|
||||
"github.com/prometheus/client_golang/test"
|
||||
)
|
||||
|
||||
func testProcessor002Process(t test.Tester) {
|
||||
|
@ -40,84 +40,84 @@ func testProcessor002Process(t test.Tester) {
|
|||
{
|
||||
in: "test0_0_1-0_0_2.json",
|
||||
baseLabels: model.LabelSet{
|
||||
JobLabel: "batch_exporter",
|
||||
model.JobLabel: "batch_exporter",
|
||||
},
|
||||
out: model.Samples{
|
||||
&model.Sample{
|
||||
Metric: model.Metric{"service": "zed", MetricNameLabel: "rpc_calls_total", "job": "batch_job", "exporter_job": "batch_exporter"},
|
||||
Metric: model.Metric{"service": "zed", model.MetricNameLabel: "rpc_calls_total", "job": "batch_job", "exporter_job": "batch_exporter"},
|
||||
Value: 25,
|
||||
},
|
||||
&model.Sample{
|
||||
Metric: model.Metric{"service": "bar", MetricNameLabel: "rpc_calls_total", "job": "batch_job", "exporter_job": "batch_exporter"},
|
||||
Metric: model.Metric{"service": "bar", model.MetricNameLabel: "rpc_calls_total", "job": "batch_job", "exporter_job": "batch_exporter"},
|
||||
Value: 25,
|
||||
},
|
||||
&model.Sample{
|
||||
Metric: model.Metric{"service": "foo", MetricNameLabel: "rpc_calls_total", "job": "batch_job", "exporter_job": "batch_exporter"},
|
||||
Metric: model.Metric{"service": "foo", model.MetricNameLabel: "rpc_calls_total", "job": "batch_job", "exporter_job": "batch_exporter"},
|
||||
Value: 25,
|
||||
},
|
||||
&model.Sample{
|
||||
Metric: model.Metric{"percentile": "0.010000", MetricNameLabel: "rpc_latency_microseconds", "service": "zed", "job": "batch_exporter"},
|
||||
Metric: model.Metric{"percentile": "0.010000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "zed", "job": "batch_exporter"},
|
||||
Value: 0.0459814091918713,
|
||||
},
|
||||
&model.Sample{
|
||||
Metric: model.Metric{"percentile": "0.010000", MetricNameLabel: "rpc_latency_microseconds", "service": "bar", "job": "batch_exporter"},
|
||||
Metric: model.Metric{"percentile": "0.010000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "bar", "job": "batch_exporter"},
|
||||
Value: 78.48563317257356,
|
||||
},
|
||||
&model.Sample{
|
||||
Metric: model.Metric{"percentile": "0.010000", MetricNameLabel: "rpc_latency_microseconds", "service": "foo", "job": "batch_exporter"},
|
||||
Metric: model.Metric{"percentile": "0.010000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "foo", "job": "batch_exporter"},
|
||||
Value: 15.890724674774395,
|
||||
},
|
||||
&model.Sample{
|
||||
|
||||
Metric: model.Metric{"percentile": "0.050000", MetricNameLabel: "rpc_latency_microseconds", "service": "zed", "job": "batch_exporter"},
|
||||
Metric: model.Metric{"percentile": "0.050000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "zed", "job": "batch_exporter"},
|
||||
Value: 0.0459814091918713,
|
||||
},
|
||||
&model.Sample{
|
||||
Metric: model.Metric{"percentile": "0.050000", MetricNameLabel: "rpc_latency_microseconds", "service": "bar", "job": "batch_exporter"},
|
||||
Metric: model.Metric{"percentile": "0.050000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "bar", "job": "batch_exporter"},
|
||||
Value: 78.48563317257356,
|
||||
},
|
||||
&model.Sample{
|
||||
|
||||
Metric: model.Metric{"percentile": "0.050000", MetricNameLabel: "rpc_latency_microseconds", "service": "foo", "job": "batch_exporter"},
|
||||
Metric: model.Metric{"percentile": "0.050000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "foo", "job": "batch_exporter"},
|
||||
Value: 15.890724674774395,
|
||||
},
|
||||
&model.Sample{
|
||||
Metric: model.Metric{"percentile": "0.500000", MetricNameLabel: "rpc_latency_microseconds", "service": "zed", "job": "batch_exporter"},
|
||||
Metric: model.Metric{"percentile": "0.500000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "zed", "job": "batch_exporter"},
|
||||
Value: 0.6120456642749681,
|
||||
},
|
||||
&model.Sample{
|
||||
|
||||
Metric: model.Metric{"percentile": "0.500000", MetricNameLabel: "rpc_latency_microseconds", "service": "bar", "job": "batch_exporter"},
|
||||
Metric: model.Metric{"percentile": "0.500000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "bar", "job": "batch_exporter"},
|
||||
Value: 97.31798360385088,
|
||||
},
|
||||
&model.Sample{
|
||||
Metric: model.Metric{"percentile": "0.500000", MetricNameLabel: "rpc_latency_microseconds", "service": "foo", "job": "batch_exporter"},
|
||||
Metric: model.Metric{"percentile": "0.500000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "foo", "job": "batch_exporter"},
|
||||
Value: 84.63044031436561,
|
||||
},
|
||||
&model.Sample{
|
||||
|
||||
Metric: model.Metric{"percentile": "0.900000", MetricNameLabel: "rpc_latency_microseconds", "service": "zed", "job": "batch_exporter"},
|
||||
Metric: model.Metric{"percentile": "0.900000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "zed", "job": "batch_exporter"},
|
||||
Value: 1.355915069887731,
|
||||
},
|
||||
&model.Sample{
|
||||
Metric: model.Metric{"percentile": "0.900000", MetricNameLabel: "rpc_latency_microseconds", "service": "bar", "job": "batch_exporter"},
|
||||
Metric: model.Metric{"percentile": "0.900000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "bar", "job": "batch_exporter"},
|
||||
Value: 109.89202084295582,
|
||||
},
|
||||
&model.Sample{
|
||||
Metric: model.Metric{"percentile": "0.900000", MetricNameLabel: "rpc_latency_microseconds", "service": "foo", "job": "batch_exporter"},
|
||||
Metric: model.Metric{"percentile": "0.900000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "foo", "job": "batch_exporter"},
|
||||
Value: 160.21100853053224,
|
||||
},
|
||||
&model.Sample{
|
||||
Metric: model.Metric{"percentile": "0.990000", MetricNameLabel: "rpc_latency_microseconds", "service": "zed", "job": "batch_exporter"},
|
||||
Metric: model.Metric{"percentile": "0.990000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "zed", "job": "batch_exporter"},
|
||||
Value: 1.772733213161236,
|
||||
},
|
||||
&model.Sample{
|
||||
|
||||
Metric: model.Metric{"percentile": "0.990000", MetricNameLabel: "rpc_latency_microseconds", "service": "bar", "job": "batch_exporter"},
|
||||
Metric: model.Metric{"percentile": "0.990000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "bar", "job": "batch_exporter"},
|
||||
Value: 109.99626121011262,
|
||||
},
|
||||
&model.Sample{
|
||||
Metric: model.Metric{"percentile": "0.990000", MetricNameLabel: "rpc_latency_microseconds", "service": "foo", "job": "batch_exporter"},
|
||||
Metric: model.Metric{"percentile": "0.990000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "foo", "job": "batch_exporter"},
|
||||
Value: 172.49828748957728,
|
||||
},
|
||||
},
|
||||
|
|
|
@ -17,6 +17,18 @@ import (
|
|||
"strings"
|
||||
)
|
||||
|
||||
const (
|
||||
// The label name prefix to prepend if a synthetic label is already present
|
||||
// in the exported metrics.
|
||||
ExporterLabelPrefix LabelName = "exporter_"
|
||||
|
||||
// The label name indicating the metric name of a timeseries.
|
||||
MetricNameLabel = "name"
|
||||
|
||||
// The label name indicating the job from which a timeseries was scraped.
|
||||
JobLabel = "job"
|
||||
)
|
||||
|
||||
// A LabelName is a key for a LabelSet or Metric. It has a value associated
|
||||
// therewith.
|
||||
type LabelName string
|
||||
|
|
|
@ -44,10 +44,14 @@ func (l LabelSet) Merge(other LabelSet) LabelSet {
|
|||
func (l LabelSet) String() string {
|
||||
labelStrings := make([]string, 0, len(l))
|
||||
for label, value := range l {
|
||||
labelStrings = append(labelStrings, fmt.Sprintf("%s='%s'", label, value))
|
||||
labelStrings = append(labelStrings, fmt.Sprintf("%s=%q", label, value))
|
||||
}
|
||||
|
||||
sort.Strings(labelStrings)
|
||||
|
||||
return fmt.Sprintf("{%s}", strings.Join(labelStrings, ", "))
|
||||
switch len(labelStrings) {
|
||||
case 0:
|
||||
return ""
|
||||
default:
|
||||
sort.Strings(labelStrings)
|
||||
return fmt.Sprintf("{%s}", strings.Join(labelStrings, ", "))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,6 +13,12 @@
|
|||
|
||||
package model
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// A Metric is similar to a LabelSet, but the key difference is that a Metric is
|
||||
// a singleton and refers to one and only one stream of samples.
|
||||
type Metric map[LabelName]LabelValue
|
||||
|
@ -36,3 +42,24 @@ func (m Metric) Before(o Metric) bool {
|
|||
|
||||
return m.Before(o)
|
||||
}
|
||||
|
||||
func (m Metric) String() string {
|
||||
metricName, ok := m[MetricNameLabel]
|
||||
if !ok {
|
||||
panic("Tried to print metric without name")
|
||||
}
|
||||
labelStrings := make([]string, 0, len(m)-1)
|
||||
for label, value := range m {
|
||||
if label != MetricNameLabel {
|
||||
labelStrings = append(labelStrings, fmt.Sprintf("%s=%q", label, value))
|
||||
}
|
||||
}
|
||||
|
||||
switch len(labelStrings) {
|
||||
case 0:
|
||||
return string(metricName)
|
||||
default:
|
||||
sort.Strings(labelStrings)
|
||||
return fmt.Sprintf("%s{%s}", metricName, strings.Join(labelStrings, ", "))
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue