Merge pull request #35 from prometheus/u-c-l/maintenance

Just a couple of minor things maintenance.
This commit is contained in:
Björn Rabenstein 2014-12-08 17:50:40 +01:00
commit 27edbd47b3
9 changed files with 50 additions and 109 deletions

View File

@ -1 +1 @@
Imported at da3e0acc8525a74a0ac8651ac5e7a68891291fdf from https://github.com/u-c-l/perks/tree/opt/pool-for-sample . Imported at 5d903d2c5dc7f55829e36c62ae6c5f5f6d75e70a from https://github.com/u-c-l/perks .

View File

@ -5,6 +5,8 @@ import (
) )
func BenchmarkInsertTargeted(b *testing.B) { func BenchmarkInsertTargeted(b *testing.B) {
b.ReportAllocs()
s := NewTargeted(0.01, 0.5, 0.9, 0.99) s := NewTargeted(0.01, 0.5, 0.9, 0.99)
b.ResetTimer() b.ResetTimer()
for i := float64(0); i < float64(b.N); i++ { for i := float64(0); i < float64(b.N); i++ {

View File

@ -1,28 +0,0 @@
//+build !go1.3
package quantile
type samplePool struct {
pool chan *Sample
}
func newSamplePool(capacity int) *samplePool {
return &samplePool{pool: make(chan *Sample, capacity)}
}
func (sp *samplePool) Get(value, width, delta float64) *Sample {
select {
case sample := <-sp.pool:
sample.Value, sample.Width, sample.Delta = value, width, delta
return sample
default:
return &Sample{value, width, delta}
}
}
func (sp *samplePool) Put(sample *Sample) {
select {
case sp.pool <- sample:
default:
}
}

View File

@ -1,26 +0,0 @@
//+build go1.3
package quantile
import "sync"
// With the Go1.3 sync Pool, there is no max capacity, and a globally shared
// pool is more efficient.
var globalSamplePool = sync.Pool{New: func() interface{} { return &Sample{} }}
type samplePool struct{}
func newSamplePool(capacity int) *samplePool {
// capacity ignored for Go1.3 sync.Pool.
return &samplePool{}
}
func (_ samplePool) Get(value, width, delta float64) *Sample {
sample := globalSamplePool.Get().(*Sample)
sample.Value, sample.Width, sample.Delta = value, width, delta
return sample
}
func (_ samplePool) Put(sample *Sample) {
globalSamplePool.Put(sample)
}

View File

@ -80,11 +80,7 @@ type Stream struct {
func newStream(ƒ invariant) *Stream { func newStream(ƒ invariant) *Stream {
const defaultEpsilon = 0.01 const defaultEpsilon = 0.01
x := &stream{ x := &stream{epsilon: defaultEpsilon, ƒ: ƒ}
epsilon: defaultEpsilon,
ƒ: ƒ,
pool: newSamplePool(1024),
}
return &Stream{x, make(Samples, 0, 500), true} return &Stream{x, make(Samples, 0, 500), true}
} }
@ -173,9 +169,8 @@ func (s *Stream) flushed() bool {
type stream struct { type stream struct {
epsilon float64 epsilon float64
n float64 n float64
l []*Sample l []Sample
ƒ invariant ƒ invariant
pool *samplePool
} }
// SetEpsilon sets the error epsilon for the Stream. The default epsilon is // SetEpsilon sets the error epsilon for the Stream. The default epsilon is
@ -187,9 +182,6 @@ func (s *stream) SetEpsilon(epsilon float64) {
} }
func (s *stream) reset() { func (s *stream) reset() {
for _, sample := range s.l {
s.pool.Put(sample)
}
s.l = s.l[:0] s.l = s.l[:0]
s.n = 0 s.n = 0
} }
@ -206,15 +198,15 @@ func (s *stream) merge(samples Samples) {
c := s.l[i] c := s.l[i]
if c.Value > sample.Value { if c.Value > sample.Value {
// Insert at position i. // Insert at position i.
s.l = append(s.l, nil) s.l = append(s.l, Sample{})
copy(s.l[i+1:], s.l[i:]) copy(s.l[i+1:], s.l[i:])
s.l[i] = s.pool.Get(sample.Value, sample.Width, math.Floor(s.ƒ(s, r))-1) s.l[i] = Sample{sample.Value, sample.Width, math.Floor(s.ƒ(s, r)) - 1}
i++ i++
goto inserted goto inserted
} }
r += c.Width r += c.Width
} }
s.l = append(s.l, s.pool.Get(sample.Value, sample.Width, 0)) s.l = append(s.l, Sample{sample.Value, sample.Width, 0})
i++ i++
inserted: inserted:
s.n += sample.Width s.n += sample.Width
@ -245,19 +237,21 @@ func (s *stream) compress() {
return return
} }
x := s.l[len(s.l)-1] x := s.l[len(s.l)-1]
xi := len(s.l) - 1
r := s.n - 1 - x.Width r := s.n - 1 - x.Width
for i := len(s.l) - 2; i >= 0; i-- { for i := len(s.l) - 2; i >= 0; i-- {
c := s.l[i] c := s.l[i]
if c.Width+x.Width+x.Delta <= s.ƒ(s, r) { if c.Width+x.Width+x.Delta <= s.ƒ(s, r) {
x.Width += c.Width x.Width += c.Width
s.l[xi] = x
// Remove element at i. // Remove element at i.
copy(s.l[i:], s.l[i+1:]) copy(s.l[i:], s.l[i+1:])
s.l[len(s.l)-1] = nil
s.l = s.l[:len(s.l)-1] s.l = s.l[:len(s.l)-1]
s.pool.Put(c) xi -= 1
} else { } else {
x = c x = c
xi = i
} }
r -= c.Width r -= c.Width
} }
@ -265,8 +259,6 @@ func (s *stream) compress() {
func (s *stream) samples() Samples { func (s *stream) samples() Samples {
samples := make(Samples, len(s.l)) samples := make(Samples, len(s.l))
for i, c := range s.l { copy(samples, s.l)
samples[i] = *c
}
return samples return samples
} }

View File

@ -1,7 +1,6 @@
package quantile package quantile
import ( import (
"math"
"math/rand" "math/rand"
"sort" "sort"
"testing" "testing"
@ -12,26 +11,23 @@ func TestQuantRandQuery(t *testing.T) {
a := make([]float64, 0, 1e5) a := make([]float64, 0, 1e5)
rand.Seed(42) rand.Seed(42)
for i := 0; i < cap(a); i++ { for i := 0; i < cap(a); i++ {
v := float64(rand.Int63()) v := rand.NormFloat64()
s.Insert(v) s.Insert(v)
a = append(a, v) a = append(a, v)
} }
t.Logf("len: %d", s.Count()) t.Logf("len: %d", s.Count())
sort.Float64s(a) sort.Float64s(a)
w := getPerc(a, 0.50) w, min, max := getPerc(a, 0.50)
if g := s.Query(0.50); math.Abs(w-g)/w > 0.03 { if g := s.Query(0.50); g < min || g > max {
t.Errorf("perc50: want %v, got %v", w, g) t.Errorf("perc50: want %v [%f,%f], got %v", w, min, max, g)
t.Logf("e: %f", math.Abs(w-g)/w)
} }
w = getPerc(a, 0.90) w, min, max = getPerc(a, 0.90)
if g := s.Query(0.90); math.Abs(w-g)/w > 0.03 { if g := s.Query(0.90); g < min || g > max {
t.Errorf("perc90: want %v, got %v", w, g) t.Errorf("perc90: want %v [%f,%f], got %v", w, min, max, g)
t.Logf("e: %f", math.Abs(w-g)/w)
} }
w = getPerc(a, 0.99) w, min, max = getPerc(a, 0.99)
if g := s.Query(0.99); math.Abs(w-g)/w > 0.03 { if g := s.Query(0.99); g < min || g > max {
t.Errorf("perc99: want %v, got %v", w, g) t.Errorf("perc99: want %v [%f,%f], got %v", w, min, max, g)
t.Logf("e: %f", math.Abs(w-g)/w)
} }
} }
@ -51,7 +47,7 @@ func TestQuantRandMergeQuery(t *testing.T) {
rand.Seed(42) rand.Seed(42)
a := make([]float64, 0, 1e6) a := make([]float64, 0, 1e6)
for i := 0; i < cap(a); i++ { for i := 0; i < cap(a); i++ {
v := float64(rand.Int63()) v := rand.NormFloat64()
a = append(a, v) a = append(a, v)
ch <- v ch <- v
} }
@ -63,20 +59,17 @@ func TestQuantRandMergeQuery(t *testing.T) {
t.Logf("len: %d", s.Count()) t.Logf("len: %d", s.Count())
sort.Float64s(a) sort.Float64s(a)
w := getPerc(a, 0.50) w, min, max := getPerc(a, 0.50)
if g := s.Query(0.50); math.Abs(w-g)/w > 0.03 { if g := s.Query(0.50); g < min || g > max {
t.Errorf("perc50: want %v, got %v", w, g) t.Errorf("perc50: want %v [%f,%f], got %v", w, min, max, g)
t.Logf("e: %f", math.Abs(w-g)/w)
} }
w = getPerc(a, 0.90) w, min, max = getPerc(a, 0.90)
if g := s.Query(0.90); math.Abs(w-g)/w > 0.03 { if g := s.Query(0.90); g < min || g > max {
t.Errorf("perc90: want %v, got %v", w, g) t.Errorf("perc90: want %v [%f,%f], got %v", w, min, max, g)
t.Logf("e: %f", math.Abs(w-g)/w)
} }
w = getPerc(a, 0.99) w, min, max = getPerc(a, 0.99)
if g := s.Query(0.99); math.Abs(w-g)/w > 0.03 { if g := s.Query(0.99); g < min || g > max {
t.Errorf("perc99: want %v, got %v", w, g) t.Errorf("perc99: want %v [%f,%f], got %v", w, min, max, g)
t.Logf("e: %f", math.Abs(w-g)/w)
} }
} }
@ -122,7 +115,15 @@ func TestDefaults(t *testing.T) {
} }
} }
func getPerc(x []float64, p float64) float64 { func getPerc(x []float64, p float64) (want, min, max float64) {
k := int(float64(len(x)) * p) k := int(float64(len(x)) * p)
return x[k] lower := int(float64(len(x)) * (p - 0.04))
if lower < 0 {
lower = 0
}
upper := int(float64(len(x))*(p+0.04)) + 1
if upper >= len(x) {
upper = len(x) - 1
}
return x[k], x[lower], x[upper]
} }

View File

@ -85,7 +85,7 @@ func extractCounter(out Ingester, o *ProcessOptions, f *dto.MetricFamily) error
samples = append(samples, sample) samples = append(samples, sample)
if m.TimestampMs != nil { if m.TimestampMs != nil {
sample.Timestamp = model.TimestampFromUnix(*m.TimestampMs / 1000) sample.Timestamp = model.TimestampFromUnixNano(*m.TimestampMs * 1000000)
} else { } else {
sample.Timestamp = o.Timestamp sample.Timestamp = o.Timestamp
} }
@ -116,7 +116,7 @@ func extractGauge(out Ingester, o *ProcessOptions, f *dto.MetricFamily) error {
samples = append(samples, sample) samples = append(samples, sample)
if m.TimestampMs != nil { if m.TimestampMs != nil {
sample.Timestamp = model.TimestampFromUnix(*m.TimestampMs / 1000) sample.Timestamp = model.TimestampFromUnixNano(*m.TimestampMs * 1000000)
} else { } else {
sample.Timestamp = o.Timestamp sample.Timestamp = o.Timestamp
} }
@ -145,7 +145,7 @@ func extractSummary(out Ingester, o *ProcessOptions, f *dto.MetricFamily) error
timestamp := o.Timestamp timestamp := o.Timestamp
if m.TimestampMs != nil { if m.TimestampMs != nil {
timestamp = model.TimestampFromUnix(*m.TimestampMs / 1000) timestamp = model.TimestampFromUnixNano(*m.TimestampMs * 1000000)
} }
for _, q := range m.Summary.Quantile { for _, q := range m.Summary.Quantile {
@ -209,7 +209,7 @@ func extractUntyped(out Ingester, o *ProcessOptions, f *dto.MetricFamily) error
samples = append(samples, sample) samples = append(samples, sample)
if m.TimestampMs != nil { if m.TimestampMs != nil {
sample.Timestamp = model.TimestampFromUnix(*m.TimestampMs / 1000) sample.Timestamp = model.TimestampFromUnixNano(*m.TimestampMs * 1000000)
} else { } else {
sample.Timestamp = o.Timestamp sample.Timestamp = o.Timestamp
} }

View File

@ -38,7 +38,7 @@ mf2 4
&model.Sample{ &model.Sample{
Metric: model.Metric{model.MetricNameLabel: "mf1", "label": "value1"}, Metric: model.Metric{model.MetricNameLabel: "mf1", "label": "value1"},
Value: -3.14, Value: -3.14,
Timestamp: 123, Timestamp: 123456,
}, },
&model.Sample{ &model.Sample{
Metric: model.Metric{model.MetricNameLabel: "mf1", "label": "value2"}, Metric: model.Metric{model.MetricNameLabel: "mf1", "label": "value2"},

View File

@ -95,7 +95,7 @@ name {labelname="val2",basename="base\"v\\al\nue"} 0.23 1234567890
# HELP name2 doc str"ing 2 # HELP name2 doc str"ing 2
# TYPE name2 gauge # TYPE name2 gauge
name2{labelname="val2" ,basename = "basevalue2" } +Inf 54321 name2{labelname="val2" ,basename = "basevalue2" } +Inf 54321
name2{ labelname = "val1" }-Inf name2{ labelname = "val1" , }-Inf
`, `,
out: []*dto.MetricFamily{ out: []*dto.MetricFamily{
&dto.MetricFamily{ &dto.MetricFamily{
@ -189,7 +189,7 @@ my_summary_count{n2="val2",n1="val1"} 5 5
another_summary{n1="val1",n2="val2",quantile=".3"} -1.2 another_summary{n1="val1",n2="val2",quantile=".3"} -1.2
my_summary_sum{n1="val2"} 08 15 my_summary_sum{n1="val2"} 08 15
my_summary{n1="val3", quantile="0.2"} 4711 my_summary{n1="val3", quantile="0.2"} 4711
my_summary{n1="val1",n2="val2",quantile="-12.34"} NaN my_summary{n1="val1",n2="val2",quantile="-12.34",} NaN
# some # some
# funny comments # funny comments
# HELP # HELP