histograms: Move to new exposition protobuf format
Note that this is an incompatible change. To scrape this new format, the Prometheus server needs to be updated at the same time. PR incoming. Signed-off-by: beorn7 <beorn@grafana.com>
This commit is contained in:
parent
6141a0784e
commit
8cbcd4076a
2
go.mod
2
go.mod
|
@ -8,7 +8,7 @@ require (
|
|||
github.com/davecgh/go-spew v1.1.1
|
||||
github.com/golang/protobuf v1.5.2
|
||||
github.com/json-iterator/go v1.1.12
|
||||
github.com/prometheus/client_model v0.2.1-0.20210624201024-61b6c1aac064
|
||||
github.com/prometheus/client_model v0.2.1-0.20220719122737-1f8dcad1221e
|
||||
github.com/prometheus/common v0.35.0
|
||||
github.com/prometheus/procfs v0.7.3
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a
|
||||
|
|
4
go.sum
4
go.sum
|
@ -135,8 +135,8 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb
|
|||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/prometheus/client_model v0.2.1-0.20210624201024-61b6c1aac064 h1:Kyx21CLOfWDA4e2TcOcupRl2g/Bmddu0AL0hR1BldEw=
|
||||
github.com/prometheus/client_model v0.2.1-0.20210624201024-61b6c1aac064/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w=
|
||||
github.com/prometheus/client_model v0.2.1-0.20220719122737-1f8dcad1221e h1:KjoQdMEQmNC8smQ731iHAXnbFbApg4uu60fNcWHs3Bk=
|
||||
github.com/prometheus/client_model v0.2.1-0.20220719122737-1f8dcad1221e/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w=
|
||||
github.com/prometheus/common v0.35.0 h1:Eyr+Pw2VymWejHqCugNaQXkAi6KayVNxaHeu6khmFBE=
|
||||
github.com/prometheus/common v0.35.0/go.mod h1:phzohg0JFMnBEFGxTDbfu3QyL5GI8gTQJFhYO5B3mfA=
|
||||
github.com/prometheus/procfs v0.7.3 h1:4jVXhlkAyzOScmCkXBTOLRLTz8EeU+eyjrwB/EPq0VU=
|
||||
|
|
|
@ -382,19 +382,20 @@ type HistogramOpts struct {
|
|||
Buckets []float64
|
||||
|
||||
// If SparseBucketsFactor is greater than one, sparse buckets are used
|
||||
// (in addition to the regular buckets, if defined above). Sparse
|
||||
// buckets are exponential buckets covering the whole float64 range
|
||||
// (with the exception of the “zero” bucket, see
|
||||
// SparseBucketsZeroThreshold below). From any one bucket to the next,
|
||||
// the width of the bucket grows by a constant factor.
|
||||
// SparseBucketsFactor provides an upper bound for this factor
|
||||
// (exception see below). The smaller SparseBucketsFactor, the more
|
||||
// buckets will be used and thus the more costly the histogram will
|
||||
// become. A generally good trade-off between cost and accuracy is a
|
||||
// value of 1.1 (each bucket is at most 10% wider than the previous
|
||||
// one), which will result in each power of two divided into 8 buckets
|
||||
// (e.g. there will be 8 buckets between 1 and 2, same as between 2 and
|
||||
// 4, and 4 and 8, etc.).
|
||||
// (in addition to the regular buckets, if defined above). A histogram
|
||||
// with sparse buckets will be ingested as a native histogram by a
|
||||
// Prometheus server with that feature enable. Sparse buckets are
|
||||
// exponential buckets covering the whole float64 range (with the
|
||||
// exception of the “zero” bucket, see SparseBucketsZeroThreshold
|
||||
// below). From any one bucket to the next, the width of the bucket
|
||||
// grows by a constant factor. SparseBucketsFactor provides an upper
|
||||
// bound for this factor (exception see below). The smaller
|
||||
// SparseBucketsFactor, the more buckets will be used and thus the more
|
||||
// costly the histogram will become. A generally good trade-off between
|
||||
// cost and accuracy is a value of 1.1 (each bucket is at most 10% wider
|
||||
// than the previous one), which will result in each power of two
|
||||
// divided into 8 buckets (e.g. there will be 8 buckets between 1 and 2,
|
||||
// same as between 2 and 4, and 4 and 8, etc.).
|
||||
//
|
||||
// Details about the actually used factor: The factor is calculated as
|
||||
// 2^(2^n), where n is an integer number between (and including) -8 and
|
||||
|
@ -723,8 +724,8 @@ func (h *histogram) Write(out *dto.Metric) error {
|
|||
his.Bucket = append(his.Bucket, b)
|
||||
}
|
||||
if h.sparseSchema > math.MinInt32 {
|
||||
his.SbZeroThreshold = proto.Float64(math.Float64frombits(atomic.LoadUint64(&coldCounts.sparseZeroThresholdBits)))
|
||||
his.SbSchema = proto.Int32(atomic.LoadInt32(&coldCounts.sparseSchema))
|
||||
his.ZeroThreshold = proto.Float64(math.Float64frombits(atomic.LoadUint64(&coldCounts.sparseZeroThresholdBits)))
|
||||
his.Schema = proto.Int32(atomic.LoadInt32(&coldCounts.sparseSchema))
|
||||
zeroBucket := atomic.LoadUint64(&coldCounts.sparseZeroBucket)
|
||||
|
||||
defer func() {
|
||||
|
@ -732,9 +733,9 @@ func (h *histogram) Write(out *dto.Metric) error {
|
|||
coldCounts.sparseBucketsNegative.Range(addAndReset(&hotCounts.sparseBucketsNegative, &hotCounts.sparseBucketsNumber))
|
||||
}()
|
||||
|
||||
his.SbZeroCount = proto.Uint64(zeroBucket)
|
||||
his.SbNegative = makeSparseBuckets(&coldCounts.sparseBucketsNegative)
|
||||
his.SbPositive = makeSparseBuckets(&coldCounts.sparseBucketsPositive)
|
||||
his.ZeroCount = proto.Uint64(zeroBucket)
|
||||
his.NegativeSpan, his.NegativeDelta = makeSparseBuckets(&coldCounts.sparseBucketsNegative)
|
||||
his.PositiveSpan, his.PositiveDelta = makeSparseBuckets(&coldCounts.sparseBucketsPositive)
|
||||
}
|
||||
addAndResetCounts(hotCounts, coldCounts)
|
||||
return nil
|
||||
|
@ -1235,7 +1236,7 @@ func pickSparseSchema(bucketFactor float64) int32 {
|
|||
}
|
||||
}
|
||||
|
||||
func makeSparseBuckets(buckets *sync.Map) *dto.SparseBuckets {
|
||||
func makeSparseBuckets(buckets *sync.Map) ([]*dto.BucketSpan, []int64) {
|
||||
var ii []int
|
||||
buckets.Range(func(k, v interface{}) bool {
|
||||
ii = append(ii, k.(int))
|
||||
|
@ -1244,16 +1245,19 @@ func makeSparseBuckets(buckets *sync.Map) *dto.SparseBuckets {
|
|||
sort.Ints(ii)
|
||||
|
||||
if len(ii) == 0 {
|
||||
return nil
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
sbs := dto.SparseBuckets{}
|
||||
var prevCount int64
|
||||
var nextI int
|
||||
var (
|
||||
spans []*dto.BucketSpan
|
||||
deltas []int64
|
||||
prevCount int64
|
||||
nextI int
|
||||
)
|
||||
|
||||
appendDelta := func(count int64) {
|
||||
*sbs.Span[len(sbs.Span)-1].Length++
|
||||
sbs.Delta = append(sbs.Delta, count-prevCount)
|
||||
*spans[len(spans)-1].Length++
|
||||
deltas = append(deltas, count-prevCount)
|
||||
prevCount = count
|
||||
}
|
||||
|
||||
|
@ -1270,7 +1274,7 @@ func makeSparseBuckets(buckets *sync.Map) *dto.SparseBuckets {
|
|||
// We have to create a new span, either because we are
|
||||
// at the very beginning, or because we have found a gap
|
||||
// of more than two buckets.
|
||||
sbs.Span = append(sbs.Span, &dto.SparseBuckets_Span{
|
||||
spans = append(spans, &dto.BucketSpan{
|
||||
Offset: proto.Int32(iDelta),
|
||||
Length: proto.Uint32(0),
|
||||
})
|
||||
|
@ -1284,7 +1288,7 @@ func makeSparseBuckets(buckets *sync.Map) *dto.SparseBuckets {
|
|||
appendDelta(count)
|
||||
nextI = i + 1
|
||||
}
|
||||
return &sbs
|
||||
return spans, deltas
|
||||
}
|
||||
|
||||
// addToSparseBucket increments the sparse bucket at key by the provided
|
||||
|
|
|
@ -490,13 +490,13 @@ func TestSparseHistogram(t *testing.T) {
|
|||
name: "factor 1.1 results in schema 3",
|
||||
observations: []float64{0, 1, 2, 3},
|
||||
factor: 1.1,
|
||||
want: `sample_count:4 sample_sum:6 sb_schema:3 sb_zero_threshold:2.938735877055719e-39 sb_zero_count:1 sb_positive:<span:<offset:0 length:1 > span:<offset:7 length:1 > span:<offset:4 length:1 > delta:1 delta:0 delta:0 > `,
|
||||
want: `sample_count:4 sample_sum:6 schema:3 zero_threshold:2.938735877055719e-39 zero_count:1 positive_span:<offset:0 length:1 > positive_span:<offset:7 length:1 > positive_span:<offset:4 length:1 > positive_delta:1 positive_delta:0 positive_delta:0 `,
|
||||
},
|
||||
{
|
||||
name: "factor 1.2 results in schema 2",
|
||||
observations: []float64{0, 1, 1.2, 1.4, 1.8, 2},
|
||||
factor: 1.2,
|
||||
want: `sample_count:6 sample_sum:7.4 sb_schema:2 sb_zero_threshold:2.938735877055719e-39 sb_zero_count:1 sb_positive:<span:<offset:0 length:5 > delta:1 delta:-1 delta:2 delta:-2 delta:2 > `,
|
||||
want: `sample_count:6 sample_sum:7.4 schema:2 zero_threshold:2.938735877055719e-39 zero_count:1 positive_span:<offset:0 length:5 > positive_delta:1 positive_delta:-1 positive_delta:2 positive_delta:-2 positive_delta:2 `,
|
||||
},
|
||||
{
|
||||
name: "factor 4 results in schema -1",
|
||||
|
@ -507,7 +507,7 @@ func TestSparseHistogram(t *testing.T) {
|
|||
33.33, // Bucket 3: (16, 64]
|
||||
},
|
||||
factor: 4,
|
||||
want: `sample_count:10 sample_sum:62.83 sb_schema:-1 sb_zero_threshold:2.938735877055719e-39 sb_zero_count:0 sb_positive:<span:<offset:0 length:4 > delta:2 delta:2 delta:-1 delta:-2 > `,
|
||||
want: `sample_count:10 sample_sum:62.83 schema:-1 zero_threshold:2.938735877055719e-39 zero_count:0 positive_span:<offset:0 length:4 > positive_delta:2 positive_delta:2 positive_delta:-1 positive_delta:-2 `,
|
||||
},
|
||||
{
|
||||
name: "factor 17 results in schema -2",
|
||||
|
@ -517,58 +517,58 @@ func TestSparseHistogram(t *testing.T) {
|
|||
33.33, // Bucket 2: (16, 256]
|
||||
},
|
||||
factor: 17,
|
||||
want: `sample_count:10 sample_sum:62.83 sb_schema:-2 sb_zero_threshold:2.938735877055719e-39 sb_zero_count:0 sb_positive:<span:<offset:0 length:3 > delta:2 delta:5 delta:-6 > `,
|
||||
want: `sample_count:10 sample_sum:62.83 schema:-2 zero_threshold:2.938735877055719e-39 zero_count:0 positive_span:<offset:0 length:3 > positive_delta:2 positive_delta:5 positive_delta:-6 `,
|
||||
},
|
||||
{
|
||||
name: "negative buckets",
|
||||
observations: []float64{0, -1, -1.2, -1.4, -1.8, -2},
|
||||
factor: 1.2,
|
||||
want: `sample_count:6 sample_sum:-7.4 sb_schema:2 sb_zero_threshold:2.938735877055719e-39 sb_zero_count:1 sb_negative:<span:<offset:0 length:5 > delta:1 delta:-1 delta:2 delta:-2 delta:2 > `,
|
||||
want: `sample_count:6 sample_sum:-7.4 schema:2 zero_threshold:2.938735877055719e-39 zero_count:1 negative_span:<offset:0 length:5 > negative_delta:1 negative_delta:-1 negative_delta:2 negative_delta:-2 negative_delta:2 `,
|
||||
},
|
||||
{
|
||||
name: "negative and positive buckets",
|
||||
observations: []float64{0, -1, -1.2, -1.4, -1.8, -2, 1, 1.2, 1.4, 1.8, 2},
|
||||
factor: 1.2,
|
||||
want: `sample_count:11 sample_sum:0 sb_schema:2 sb_zero_threshold:2.938735877055719e-39 sb_zero_count:1 sb_negative:<span:<offset:0 length:5 > delta:1 delta:-1 delta:2 delta:-2 delta:2 > sb_positive:<span:<offset:0 length:5 > delta:1 delta:-1 delta:2 delta:-2 delta:2 > `,
|
||||
want: `sample_count:11 sample_sum:0 schema:2 zero_threshold:2.938735877055719e-39 zero_count:1 negative_span:<offset:0 length:5 > negative_delta:1 negative_delta:-1 negative_delta:2 negative_delta:-2 negative_delta:2 positive_span:<offset:0 length:5 > positive_delta:1 positive_delta:-1 positive_delta:2 positive_delta:-2 positive_delta:2 `,
|
||||
},
|
||||
{
|
||||
name: "wide zero bucket",
|
||||
observations: []float64{0, -1, -1.2, -1.4, -1.8, -2, 1, 1.2, 1.4, 1.8, 2},
|
||||
factor: 1.2,
|
||||
zeroThreshold: 1.4,
|
||||
want: `sample_count:11 sample_sum:0 sb_schema:2 sb_zero_threshold:1.4 sb_zero_count:7 sb_negative:<span:<offset:4 length:1 > delta:2 > sb_positive:<span:<offset:4 length:1 > delta:2 > `,
|
||||
want: `sample_count:11 sample_sum:0 schema:2 zero_threshold:1.4 zero_count:7 negative_span:<offset:4 length:1 > negative_delta:2 positive_span:<offset:4 length:1 > positive_delta:2 `,
|
||||
},
|
||||
{
|
||||
name: "NaN observation",
|
||||
observations: []float64{0, 1, 1.2, 1.4, 1.8, 2, math.NaN()},
|
||||
factor: 1.2,
|
||||
want: `sample_count:7 sample_sum:nan sb_schema:2 sb_zero_threshold:2.938735877055719e-39 sb_zero_count:1 sb_positive:<span:<offset:0 length:5 > delta:1 delta:-1 delta:2 delta:-2 delta:2 > `,
|
||||
want: `sample_count:7 sample_sum:nan schema:2 zero_threshold:2.938735877055719e-39 zero_count:1 positive_span:<offset:0 length:5 > positive_delta:1 positive_delta:-1 positive_delta:2 positive_delta:-2 positive_delta:2 `,
|
||||
},
|
||||
{
|
||||
name: "+Inf observation",
|
||||
observations: []float64{0, 1, 1.2, 1.4, 1.8, 2, math.Inf(+1)},
|
||||
factor: 1.2,
|
||||
want: `sample_count:7 sample_sum:inf sb_schema:2 sb_zero_threshold:2.938735877055719e-39 sb_zero_count:1 sb_positive:<span:<offset:0 length:5 > span:<offset:2147483642 length:1 > delta:1 delta:-1 delta:2 delta:-2 delta:2 delta:-1 > `,
|
||||
want: `sample_count:7 sample_sum:inf schema:2 zero_threshold:2.938735877055719e-39 zero_count:1 positive_span:<offset:0 length:5 > positive_span:<offset:2147483642 length:1 > positive_delta:1 positive_delta:-1 positive_delta:2 positive_delta:-2 positive_delta:2 positive_delta:-1 `,
|
||||
},
|
||||
{
|
||||
name: "-Inf observation",
|
||||
observations: []float64{0, 1, 1.2, 1.4, 1.8, 2, math.Inf(-1)},
|
||||
factor: 1.2,
|
||||
want: `sample_count:7 sample_sum:-inf sb_schema:2 sb_zero_threshold:2.938735877055719e-39 sb_zero_count:1 sb_negative:<span:<offset:2147483647 length:1 > delta:1 > sb_positive:<span:<offset:0 length:5 > delta:1 delta:-1 delta:2 delta:-2 delta:2 > `,
|
||||
want: `sample_count:7 sample_sum:-inf schema:2 zero_threshold:2.938735877055719e-39 zero_count:1 negative_span:<offset:2147483647 length:1 > negative_delta:1 positive_span:<offset:0 length:5 > positive_delta:1 positive_delta:-1 positive_delta:2 positive_delta:-2 positive_delta:2 `,
|
||||
},
|
||||
{
|
||||
name: "limited buckets but nothing triggered",
|
||||
observations: []float64{0, 1, 1.2, 1.4, 1.8, 2},
|
||||
factor: 1.2,
|
||||
maxBuckets: 4,
|
||||
want: `sample_count:6 sample_sum:7.4 sb_schema:2 sb_zero_threshold:2.938735877055719e-39 sb_zero_count:1 sb_positive:<span:<offset:0 length:5 > delta:1 delta:-1 delta:2 delta:-2 delta:2 > `,
|
||||
want: `sample_count:6 sample_sum:7.4 schema:2 zero_threshold:2.938735877055719e-39 zero_count:1 positive_span:<offset:0 length:5 > positive_delta:1 positive_delta:-1 positive_delta:2 positive_delta:-2 positive_delta:2 `,
|
||||
},
|
||||
{
|
||||
name: "buckets limited by halving resolution",
|
||||
observations: []float64{0, 1, 1.1, 1.2, 1.4, 1.8, 2, 3},
|
||||
factor: 1.2,
|
||||
maxBuckets: 4,
|
||||
want: `sample_count:8 sample_sum:11.5 sb_schema:1 sb_zero_threshold:2.938735877055719e-39 sb_zero_count:1 sb_positive:<span:<offset:0 length:5 > delta:1 delta:2 delta:-1 delta:-2 delta:1 > `,
|
||||
want: `sample_count:8 sample_sum:11.5 schema:1 zero_threshold:2.938735877055719e-39 zero_count:1 positive_span:<offset:0 length:5 > positive_delta:1 positive_delta:2 positive_delta:-1 positive_delta:-2 positive_delta:1 `,
|
||||
},
|
||||
{
|
||||
name: "buckets limited by widening the zero bucket",
|
||||
|
@ -576,7 +576,7 @@ func TestSparseHistogram(t *testing.T) {
|
|||
factor: 1.2,
|
||||
maxBuckets: 4,
|
||||
maxZeroThreshold: 1.2,
|
||||
want: `sample_count:8 sample_sum:11.5 sb_schema:2 sb_zero_threshold:1 sb_zero_count:2 sb_positive:<span:<offset:1 length:7 > delta:1 delta:1 delta:-2 delta:2 delta:-2 delta:0 delta:1 > `,
|
||||
want: `sample_count:8 sample_sum:11.5 schema:2 zero_threshold:1 zero_count:2 positive_span:<offset:1 length:7 > positive_delta:1 positive_delta:1 positive_delta:-2 positive_delta:2 positive_delta:-2 positive_delta:0 positive_delta:1 `,
|
||||
},
|
||||
{
|
||||
name: "buckets limited by widening the zero bucket twice",
|
||||
|
@ -584,7 +584,7 @@ func TestSparseHistogram(t *testing.T) {
|
|||
factor: 1.2,
|
||||
maxBuckets: 4,
|
||||
maxZeroThreshold: 1.2,
|
||||
want: `sample_count:9 sample_sum:15.5 sb_schema:2 sb_zero_threshold:1.189207115002721 sb_zero_count:3 sb_positive:<span:<offset:2 length:7 > delta:2 delta:-2 delta:2 delta:-2 delta:0 delta:1 delta:0 > `,
|
||||
want: `sample_count:9 sample_sum:15.5 schema:2 zero_threshold:1.189207115002721 zero_count:3 positive_span:<offset:2 length:7 > positive_delta:2 positive_delta:-2 positive_delta:2 positive_delta:-2 positive_delta:0 positive_delta:1 positive_delta:0 `,
|
||||
},
|
||||
{
|
||||
name: "buckets limited by reset",
|
||||
|
@ -593,21 +593,21 @@ func TestSparseHistogram(t *testing.T) {
|
|||
maxBuckets: 4,
|
||||
maxZeroThreshold: 1.2,
|
||||
minResetDuration: 5 * time.Minute,
|
||||
want: `sample_count:2 sample_sum:7 sb_schema:2 sb_zero_threshold:2.938735877055719e-39 sb_zero_count:0 sb_positive:<span:<offset:7 length:2 > delta:1 delta:0 > `,
|
||||
want: `sample_count:2 sample_sum:7 schema:2 zero_threshold:2.938735877055719e-39 zero_count:0 positive_span:<offset:7 length:2 > positive_delta:1 positive_delta:0 `,
|
||||
},
|
||||
{
|
||||
name: "limited buckets but nothing triggered, negative observations",
|
||||
observations: []float64{0, -1, -1.2, -1.4, -1.8, -2},
|
||||
factor: 1.2,
|
||||
maxBuckets: 4,
|
||||
want: `sample_count:6 sample_sum:-7.4 sb_schema:2 sb_zero_threshold:2.938735877055719e-39 sb_zero_count:1 sb_negative:<span:<offset:0 length:5 > delta:1 delta:-1 delta:2 delta:-2 delta:2 > `,
|
||||
want: `sample_count:6 sample_sum:-7.4 schema:2 zero_threshold:2.938735877055719e-39 zero_count:1 negative_span:<offset:0 length:5 > negative_delta:1 negative_delta:-1 negative_delta:2 negative_delta:-2 negative_delta:2 `,
|
||||
},
|
||||
{
|
||||
name: "buckets limited by halving resolution, negative observations",
|
||||
observations: []float64{0, -1, -1.1, -1.2, -1.4, -1.8, -2, -3},
|
||||
factor: 1.2,
|
||||
maxBuckets: 4,
|
||||
want: `sample_count:8 sample_sum:-11.5 sb_schema:1 sb_zero_threshold:2.938735877055719e-39 sb_zero_count:1 sb_negative:<span:<offset:0 length:5 > delta:1 delta:2 delta:-1 delta:-2 delta:1 > `,
|
||||
want: `sample_count:8 sample_sum:-11.5 schema:1 zero_threshold:2.938735877055719e-39 zero_count:1 negative_span:<offset:0 length:5 > negative_delta:1 negative_delta:2 negative_delta:-1 negative_delta:-2 negative_delta:1 `,
|
||||
},
|
||||
{
|
||||
name: "buckets limited by widening the zero bucket, negative observations",
|
||||
|
@ -615,7 +615,7 @@ func TestSparseHistogram(t *testing.T) {
|
|||
factor: 1.2,
|
||||
maxBuckets: 4,
|
||||
maxZeroThreshold: 1.2,
|
||||
want: `sample_count:8 sample_sum:-11.5 sb_schema:2 sb_zero_threshold:1 sb_zero_count:2 sb_negative:<span:<offset:1 length:7 > delta:1 delta:1 delta:-2 delta:2 delta:-2 delta:0 delta:1 > `,
|
||||
want: `sample_count:8 sample_sum:-11.5 schema:2 zero_threshold:1 zero_count:2 negative_span:<offset:1 length:7 > negative_delta:1 negative_delta:1 negative_delta:-2 negative_delta:2 negative_delta:-2 negative_delta:0 negative_delta:1 `,
|
||||
},
|
||||
{
|
||||
name: "buckets limited by widening the zero bucket twice, negative observations",
|
||||
|
@ -623,7 +623,7 @@ func TestSparseHistogram(t *testing.T) {
|
|||
factor: 1.2,
|
||||
maxBuckets: 4,
|
||||
maxZeroThreshold: 1.2,
|
||||
want: `sample_count:9 sample_sum:-15.5 sb_schema:2 sb_zero_threshold:1.189207115002721 sb_zero_count:3 sb_negative:<span:<offset:2 length:7 > delta:2 delta:-2 delta:2 delta:-2 delta:0 delta:1 delta:0 > `,
|
||||
want: `sample_count:9 sample_sum:-15.5 schema:2 zero_threshold:1.189207115002721 zero_count:3 negative_span:<offset:2 length:7 > negative_delta:2 negative_delta:-2 negative_delta:2 negative_delta:-2 negative_delta:0 negative_delta:1 negative_delta:0 `,
|
||||
},
|
||||
{
|
||||
name: "buckets limited by reset, negative observations",
|
||||
|
@ -632,7 +632,7 @@ func TestSparseHistogram(t *testing.T) {
|
|||
maxBuckets: 4,
|
||||
maxZeroThreshold: 1.2,
|
||||
minResetDuration: 5 * time.Minute,
|
||||
want: `sample_count:2 sample_sum:-7 sb_schema:2 sb_zero_threshold:2.938735877055719e-39 sb_zero_count:0 sb_negative:<span:<offset:7 length:2 > delta:1 delta:0 > `,
|
||||
want: `sample_count:2 sample_sum:-7 schema:2 zero_threshold:2.938735877055719e-39 zero_count:0 negative_span:<offset:7 length:2 > negative_delta:1 negative_delta:0 `,
|
||||
},
|
||||
{
|
||||
name: "buckets limited by halving resolution, then reset",
|
||||
|
@ -640,7 +640,7 @@ func TestSparseHistogram(t *testing.T) {
|
|||
factor: 1.2,
|
||||
maxBuckets: 4,
|
||||
minResetDuration: 9 * time.Minute,
|
||||
want: `sample_count:2 sample_sum:7 sb_schema:2 sb_zero_threshold:2.938735877055719e-39 sb_zero_count:0 sb_positive:<span:<offset:7 length:2 > delta:1 delta:0 > `,
|
||||
want: `sample_count:2 sample_sum:7 schema:2 zero_threshold:2.938735877055719e-39 zero_count:0 positive_span:<offset:7 length:2 > positive_delta:1 positive_delta:0 `,
|
||||
},
|
||||
{
|
||||
name: "buckets limited by widening the zero bucket, then reset",
|
||||
|
@ -649,7 +649,7 @@ func TestSparseHistogram(t *testing.T) {
|
|||
maxBuckets: 4,
|
||||
maxZeroThreshold: 1.2,
|
||||
minResetDuration: 9 * time.Minute,
|
||||
want: `sample_count:2 sample_sum:7 sb_schema:2 sb_zero_threshold:2.938735877055719e-39 sb_zero_count:0 sb_positive:<span:<offset:7 length:2 > delta:1 delta:0 > `,
|
||||
want: `sample_count:2 sample_sum:7 schema:2 zero_threshold:2.938735877055719e-39 zero_count:0 positive_span:<offset:7 length:2 > positive_delta:1 positive_delta:0 `,
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -754,9 +754,9 @@ func TestSparseHistogramConcurrency(t *testing.T) {
|
|||
// t.Errorf("got sample sum %f, want %f", got, want)
|
||||
// }
|
||||
|
||||
sumBuckets := int(m.Histogram.GetSbZeroCount())
|
||||
sumBuckets := int(m.Histogram.GetZeroCount())
|
||||
current := 0
|
||||
for _, delta := range m.Histogram.GetSbNegative().GetDelta() {
|
||||
for _, delta := range m.Histogram.GetNegativeDelta() {
|
||||
current += int(delta)
|
||||
if current < 0 {
|
||||
t.Fatalf("negative bucket population negative: %d", current)
|
||||
|
@ -764,7 +764,7 @@ func TestSparseHistogramConcurrency(t *testing.T) {
|
|||
sumBuckets += current
|
||||
}
|
||||
current = 0
|
||||
for _, delta := range m.Histogram.GetSbPositive().GetDelta() {
|
||||
for _, delta := range m.Histogram.GetPositiveDelta() {
|
||||
current += int(delta)
|
||||
if current < 0 {
|
||||
t.Fatalf("positive bucket population negative: %d", current)
|
||||
|
|
Loading…
Reference in New Issue