forked from mirror/client_golang
Pull up HTTP changes into deprecated functions
So that also users of those can benefit. Obviously, we will end updating deprecated functions one day (at latest once v0.10 is out). Signed-off-by: beorn7 <beorn@soundcloud.com>
This commit is contained in:
parent
62361fc0fb
commit
fb0f7fe8c2
|
@ -15,9 +15,7 @@ package prometheus
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bufio"
|
"bufio"
|
||||||
"bytes"
|
|
||||||
"compress/gzip"
|
"compress/gzip"
|
||||||
"fmt"
|
|
||||||
"io"
|
"io"
|
||||||
"net"
|
"net"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
@ -41,19 +39,10 @@ const (
|
||||||
acceptEncodingHeader = "Accept-Encoding"
|
acceptEncodingHeader = "Accept-Encoding"
|
||||||
)
|
)
|
||||||
|
|
||||||
var bufPool sync.Pool
|
var gzipPool = sync.Pool{
|
||||||
|
New: func() interface{} {
|
||||||
func getBuf() *bytes.Buffer {
|
return gzip.NewWriter(nil)
|
||||||
buf := bufPool.Get()
|
},
|
||||||
if buf == nil {
|
|
||||||
return &bytes.Buffer{}
|
|
||||||
}
|
|
||||||
return buf.(*bytes.Buffer)
|
|
||||||
}
|
|
||||||
|
|
||||||
func giveBuf(buf *bytes.Buffer) {
|
|
||||||
buf.Reset()
|
|
||||||
bufPool.Put(buf)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handler returns an HTTP handler for the DefaultGatherer. It is
|
// Handler returns an HTTP handler for the DefaultGatherer. It is
|
||||||
|
@ -71,56 +60,38 @@ func Handler() http.Handler {
|
||||||
// Deprecated: Use promhttp.HandlerFor(DefaultGatherer, promhttp.HandlerOpts{})
|
// Deprecated: Use promhttp.HandlerFor(DefaultGatherer, promhttp.HandlerOpts{})
|
||||||
// instead. See there for further documentation.
|
// instead. See there for further documentation.
|
||||||
func UninstrumentedHandler() http.Handler {
|
func UninstrumentedHandler() http.Handler {
|
||||||
return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
|
return http.HandlerFunc(func(rsp http.ResponseWriter, req *http.Request) {
|
||||||
mfs, err := DefaultGatherer.Gather()
|
mfs, err := DefaultGatherer.Gather()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
http.Error(w, "An error has occurred during metrics collection:\n\n"+err.Error(), http.StatusInternalServerError)
|
httpError(rsp, err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
contentType := expfmt.Negotiate(req.Header)
|
contentType := expfmt.Negotiate(req.Header)
|
||||||
buf := getBuf()
|
header := rsp.Header()
|
||||||
defer giveBuf(buf)
|
|
||||||
writer, encoding := decorateWriter(req, buf)
|
|
||||||
enc := expfmt.NewEncoder(writer, contentType)
|
|
||||||
var lastErr error
|
|
||||||
for _, mf := range mfs {
|
|
||||||
if err := enc.Encode(mf); err != nil {
|
|
||||||
lastErr = err
|
|
||||||
http.Error(w, "An error has occurred during metrics encoding:\n\n"+err.Error(), http.StatusInternalServerError)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if closer, ok := writer.(io.Closer); ok {
|
|
||||||
closer.Close()
|
|
||||||
}
|
|
||||||
if lastErr != nil && buf.Len() == 0 {
|
|
||||||
http.Error(w, "No metrics encoded, last error:\n\n"+lastErr.Error(), http.StatusInternalServerError)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
header := w.Header()
|
|
||||||
header.Set(contentTypeHeader, string(contentType))
|
header.Set(contentTypeHeader, string(contentType))
|
||||||
header.Set(contentLengthHeader, fmt.Sprint(buf.Len()))
|
|
||||||
if encoding != "" {
|
w := io.Writer(rsp)
|
||||||
header.Set(contentEncodingHeader, encoding)
|
if gzipAccepted(req.Header) {
|
||||||
}
|
header.Set(contentEncodingHeader, "gzip")
|
||||||
w.Write(buf.Bytes())
|
gz := gzipPool.Get().(*gzip.Writer)
|
||||||
})
|
defer gzipPool.Put(gz)
|
||||||
|
|
||||||
|
gz.Reset(w)
|
||||||
|
defer gz.Close()
|
||||||
|
|
||||||
|
w = gz
|
||||||
}
|
}
|
||||||
|
|
||||||
// decorateWriter wraps a writer to handle gzip compression if requested. It
|
enc := expfmt.NewEncoder(w, contentType)
|
||||||
// returns the decorated writer and the appropriate "Content-Encoding" header
|
|
||||||
// (which is empty if no compression is enabled).
|
for _, mf := range mfs {
|
||||||
func decorateWriter(request *http.Request, writer io.Writer) (io.Writer, string) {
|
if err := enc.Encode(mf); err != nil {
|
||||||
header := request.Header.Get(acceptEncodingHeader)
|
httpError(rsp, err)
|
||||||
parts := strings.Split(header, ",")
|
return
|
||||||
for _, part := range parts {
|
|
||||||
part = strings.TrimSpace(part)
|
|
||||||
if part == "gzip" || strings.HasPrefix(part, "gzip;") {
|
|
||||||
return gzip.NewWriter(writer), "gzip"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return writer, ""
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
var instLabels = []string{"method", "code"}
|
var instLabels = []string{"method", "code"}
|
||||||
|
@ -503,3 +474,31 @@ func sanitizeCode(s int) string {
|
||||||
return strconv.Itoa(s)
|
return strconv.Itoa(s)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// gzipAccepted returns whether the client will accept gzip-encoded content.
|
||||||
|
func gzipAccepted(header http.Header) bool {
|
||||||
|
a := header.Get(acceptEncodingHeader)
|
||||||
|
parts := strings.Split(a, ",")
|
||||||
|
for _, part := range parts {
|
||||||
|
part = strings.TrimSpace(part)
|
||||||
|
if part == "gzip" || strings.HasPrefix(part, "gzip;") {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// httpError removes any content-encoding header and then calls http.Error with
|
||||||
|
// the provided error and http.StatusInternalServerErrer. Error contents is
|
||||||
|
// supposed to be uncompressed plain text. However, same as with a plain
|
||||||
|
// http.Error, any header settings will be void if the header has already been
|
||||||
|
// sent. The error message will still be written to the writer, but it will
|
||||||
|
// probably be of limited use.
|
||||||
|
func httpError(rsp http.ResponseWriter, err error) {
|
||||||
|
rsp.Header().Del(contentEncodingHeader)
|
||||||
|
http.Error(
|
||||||
|
rsp,
|
||||||
|
"An error has occurred while serving metrics:\n\n"+err.Error(),
|
||||||
|
http.StatusInternalServerError,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in New Issue