commit
f1323f902c
|
@ -89,7 +89,7 @@ func TestClientURL(t *testing.T) {
|
||||||
address: "http://localhost:9090",
|
address: "http://localhost:9090",
|
||||||
endpoint: "/test/:param",
|
endpoint: "/test/:param",
|
||||||
args: map[string]string{
|
args: map[string]string{
|
||||||
"nonexistant": "content",
|
"nonexistent": "content",
|
||||||
},
|
},
|
||||||
expected: "http://localhost:9090/test/:param",
|
expected: "http://localhost:9090/test/:param",
|
||||||
},
|
},
|
||||||
|
|
|
@ -78,7 +78,7 @@ func ExampleNewExpvarCollector() {
|
||||||
close(metricChan)
|
close(metricChan)
|
||||||
}()
|
}()
|
||||||
for m := range metricChan {
|
for m := range metricChan {
|
||||||
if strings.Index(m.Desc().String(), "expvar_memstats") == -1 {
|
if !strings.Contains(m.Desc().String(), "expvar_memstats") {
|
||||||
metric.Reset()
|
metric.Reset()
|
||||||
m.Write(&metric)
|
m.Write(&metric)
|
||||||
metricStrings = append(metricStrings, metric.String())
|
metricStrings = append(metricStrings, metric.String())
|
||||||
|
|
|
@ -265,7 +265,7 @@ func (c *goCollector) Collect(ch chan<- Metric) {
|
||||||
quantiles[float64(idx+1)/float64(len(stats.PauseQuantiles)-1)] = pq.Seconds()
|
quantiles[float64(idx+1)/float64(len(stats.PauseQuantiles)-1)] = pq.Seconds()
|
||||||
}
|
}
|
||||||
quantiles[0.0] = stats.PauseQuantiles[0].Seconds()
|
quantiles[0.0] = stats.PauseQuantiles[0].Seconds()
|
||||||
ch <- MustNewConstSummary(c.gcDesc, uint64(stats.NumGC), float64(stats.PauseTotal.Seconds()), quantiles)
|
ch <- MustNewConstSummary(c.gcDesc, uint64(stats.NumGC), stats.PauseTotal.Seconds(), quantiles)
|
||||||
|
|
||||||
ch <- MustNewConstMetric(c.goInfoDesc, GaugeValue, 1)
|
ch <- MustNewConstMetric(c.goInfoDesc, GaugeValue, 1)
|
||||||
|
|
||||||
|
|
|
@ -115,7 +115,7 @@ func decorateWriter(request *http.Request, writer io.Writer) (io.Writer, string)
|
||||||
header := request.Header.Get(acceptEncodingHeader)
|
header := request.Header.Get(acceptEncodingHeader)
|
||||||
parts := strings.Split(header, ",")
|
parts := strings.Split(header, ",")
|
||||||
for _, part := range parts {
|
for _, part := range parts {
|
||||||
part := strings.TrimSpace(part)
|
part = strings.TrimSpace(part)
|
||||||
if part == "gzip" || strings.HasPrefix(part, "gzip;") {
|
if part == "gzip" || strings.HasPrefix(part, "gzip;") {
|
||||||
return gzip.NewWriter(writer), "gzip"
|
return gzip.NewWriter(writer), "gzip"
|
||||||
}
|
}
|
||||||
|
@ -139,16 +139,6 @@ var now nower = nowFunc(func() time.Time {
|
||||||
return time.Now()
|
return time.Now()
|
||||||
})
|
})
|
||||||
|
|
||||||
func nowSeries(t ...time.Time) nower {
|
|
||||||
return nowFunc(func() time.Time {
|
|
||||||
defer func() {
|
|
||||||
t = t[1:]
|
|
||||||
}()
|
|
||||||
|
|
||||||
return t[0]
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// InstrumentHandler wraps the given HTTP handler for instrumentation. It
|
// InstrumentHandler wraps the given HTTP handler for instrumentation. It
|
||||||
// registers four metric collectors (if not already done) and reports HTTP
|
// registers four metric collectors (if not already done) and reports HTTP
|
||||||
// metrics to the (newly or already) registered collectors: http_requests_total
|
// metrics to the (newly or already) registered collectors: http_requests_total
|
||||||
|
@ -352,10 +342,9 @@ func computeApproximateRequestSize(r *http.Request) <-chan int {
|
||||||
type responseWriterDelegator struct {
|
type responseWriterDelegator struct {
|
||||||
http.ResponseWriter
|
http.ResponseWriter
|
||||||
|
|
||||||
handler, method string
|
status int
|
||||||
status int
|
written int64
|
||||||
written int64
|
wroteHeader bool
|
||||||
wroteHeader bool
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *responseWriterDelegator) WriteHeader(code int) {
|
func (r *responseWriterDelegator) WriteHeader(code int) {
|
||||||
|
|
|
@ -29,6 +29,16 @@ func (b respBody) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||||
w.Write([]byte(b))
|
w.Write([]byte(b))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func nowSeries(t ...time.Time) nower {
|
||||||
|
return nowFunc(func() time.Time {
|
||||||
|
defer func() {
|
||||||
|
t = t[1:]
|
||||||
|
}()
|
||||||
|
|
||||||
|
return t[0]
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
func TestInstrumentHandler(t *testing.T) {
|
func TestInstrumentHandler(t *testing.T) {
|
||||||
defer func(n nower) {
|
defer func(n nower) {
|
||||||
now = n.(nower)
|
now = n.(nower)
|
||||||
|
@ -37,9 +47,9 @@ func TestInstrumentHandler(t *testing.T) {
|
||||||
instant := time.Now()
|
instant := time.Now()
|
||||||
end := instant.Add(30 * time.Second)
|
end := instant.Add(30 * time.Second)
|
||||||
now = nowSeries(instant, end)
|
now = nowSeries(instant, end)
|
||||||
respBody := respBody("Howdy there!")
|
body := respBody("Howdy there!")
|
||||||
|
|
||||||
hndlr := InstrumentHandler("test-handler", respBody)
|
hndlr := InstrumentHandler("test-handler", body)
|
||||||
|
|
||||||
opts := SummaryOpts{
|
opts := SummaryOpts{
|
||||||
Subsystem: "http",
|
Subsystem: "http",
|
||||||
|
@ -114,8 +124,8 @@ func TestInstrumentHandler(t *testing.T) {
|
||||||
if resp.Code != http.StatusTeapot {
|
if resp.Code != http.StatusTeapot {
|
||||||
t.Fatalf("expected status %d, got %d", http.StatusTeapot, resp.Code)
|
t.Fatalf("expected status %d, got %d", http.StatusTeapot, resp.Code)
|
||||||
}
|
}
|
||||||
if string(resp.Body.Bytes()) != "Howdy there!" {
|
if resp.Body.String() != "Howdy there!" {
|
||||||
t.Fatalf("expected body %s, got %s", "Howdy there!", string(resp.Body.Bytes()))
|
t.Fatalf("expected body %s, got %s", "Howdy there!", resp.Body.String())
|
||||||
}
|
}
|
||||||
|
|
||||||
out := &dto.Metric{}
|
out := &dto.Metric{}
|
||||||
|
|
|
@ -127,20 +127,6 @@ func (s LabelPairSorter) Less(i, j int) bool {
|
||||||
return s[i].GetName() < s[j].GetName()
|
return s[i].GetName() < s[j].GetName()
|
||||||
}
|
}
|
||||||
|
|
||||||
type hashSorter []uint64
|
|
||||||
|
|
||||||
func (s hashSorter) Len() int {
|
|
||||||
return len(s)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s hashSorter) Swap(i, j int) {
|
|
||||||
s[i], s[j] = s[j], s[i]
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s hashSorter) Less(i, j int) bool {
|
|
||||||
return s[i] < s[j]
|
|
||||||
}
|
|
||||||
|
|
||||||
type invalidMetric struct {
|
type invalidMetric struct {
|
||||||
desc *Desc
|
desc *Desc
|
||||||
err error
|
err error
|
||||||
|
|
|
@ -16,7 +16,6 @@ package prometheus
|
||||||
import "github.com/prometheus/procfs"
|
import "github.com/prometheus/procfs"
|
||||||
|
|
||||||
type processCollector struct {
|
type processCollector struct {
|
||||||
pid int
|
|
||||||
collectFn func(chan<- Metric)
|
collectFn func(chan<- Metric)
|
||||||
pidFn func() (int, error)
|
pidFn func() (int, error)
|
||||||
cpuTotal *Desc
|
cpuTotal *Desc
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
// +build linux
|
||||||
|
|
||||||
package prometheus
|
package prometheus
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
|
|
@ -302,7 +302,7 @@ func decorateWriter(request *http.Request, writer io.Writer, compressionDisabled
|
||||||
header := request.Header.Get(acceptEncodingHeader)
|
header := request.Header.Get(acceptEncodingHeader)
|
||||||
parts := strings.Split(header, ",")
|
parts := strings.Split(header, ",")
|
||||||
for _, part := range parts {
|
for _, part := range parts {
|
||||||
part := strings.TrimSpace(part)
|
part = strings.TrimSpace(part)
|
||||||
if part == "gzip" || strings.HasPrefix(part, "gzip;") {
|
if part == "gzip" || strings.HasPrefix(part, "gzip;") {
|
||||||
return gzip.NewWriter(writer), "gzip"
|
return gzip.NewWriter(writer), "gzip"
|
||||||
}
|
}
|
||||||
|
|
|
@ -121,7 +121,7 @@ the_count 0
|
||||||
t.Errorf("got HTTP status code %d, want %d", got, want)
|
t.Errorf("got HTTP status code %d, want %d", got, want)
|
||||||
}
|
}
|
||||||
if got := logBuf.String(); got != wantMsg {
|
if got := logBuf.String(); got != wantMsg {
|
||||||
t.Errorf("got log message:\n%s\nwant log mesage:\n%s\n", got, wantMsg)
|
t.Errorf("got log message:\n%s\nwant log message:\n%s\n", got, wantMsg)
|
||||||
}
|
}
|
||||||
if got := writer.Body.String(); got != wantErrorBody {
|
if got := writer.Body.String(); got != wantErrorBody {
|
||||||
t.Errorf("got body:\n%s\nwant body:\n%s\n", got, wantErrorBody)
|
t.Errorf("got body:\n%s\nwant body:\n%s\n", got, wantErrorBody)
|
||||||
|
|
|
@ -66,7 +66,7 @@ type Pusher struct {
|
||||||
username, password string
|
username, password string
|
||||||
}
|
}
|
||||||
|
|
||||||
// New creates a new Pusher to push to the provided URL withe the provided job
|
// New creates a new Pusher to push to the provided URL with the provided job
|
||||||
// name. You can use just host:port or ip:port as url, in which case “http://”
|
// name. You can use just host:port or ip:port as url, in which case “http://”
|
||||||
// is added automatically. Alternatively, include the schema in the
|
// is added automatically. Alternatively, include the schema in the
|
||||||
// URL. However, do not include the “/metrics/jobs/…” part.
|
// URL. However, do not include the “/metrics/jobs/…” part.
|
||||||
|
|
|
@ -437,7 +437,7 @@ collectLoop:
|
||||||
))
|
))
|
||||||
default:
|
default:
|
||||||
if goroutineBudget <= 0 || len(collectors) == 0 {
|
if goroutineBudget <= 0 || len(collectors) == 0 {
|
||||||
// All collectors are aleady being worked on or
|
// All collectors are already being worked on or
|
||||||
// we have already as many goroutines started as
|
// we have already as many goroutines started as
|
||||||
// there are collectors. Just process metrics
|
// there are collectors. Just process metrics
|
||||||
// from now on.
|
// from now on.
|
||||||
|
|
|
@ -152,9 +152,7 @@ func makeLabelPairs(desc *Desc, labelValues []string) []*dto.LabelPair {
|
||||||
Value: proto.String(labelValues[i]),
|
Value: proto.String(labelValues[i]),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
for _, lp := range desc.constLabelPairs {
|
labelPairs = append(labelPairs, desc.constLabelPairs...)
|
||||||
labelPairs = append(labelPairs, lp)
|
|
||||||
}
|
|
||||||
sort.Sort(LabelPairSorter(labelPairs))
|
sort.Sort(LabelPairSorter(labelPairs))
|
||||||
return labelPairs
|
return labelPairs
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue