Skip to content

Commit

Permalink
adds split func to statsd reporter
Browse files Browse the repository at this point in the history
  • Loading branch information
matlockx committed May 11, 2020
1 parent ff500a4 commit d4013d9
Show file tree
Hide file tree
Showing 2 changed files with 39 additions and 26 deletions.
2 changes: 2 additions & 0 deletions go.mod
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
module github.com/flachnetz/go-datadog

go 1.14

require (
github.com/DataDog/datadog-go v2.2.0+incompatible
github.com/go-check/check v0.0.0-20180628173108-788fd7840127
Expand Down
63 changes: 37 additions & 26 deletions metrics_statsd_reporter.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import (
"log"
"runtime"
"strconv"
"strings"
"time"

"github.com/DataDog/datadog-go/statsd"
Expand Down Expand Up @@ -90,70 +91,80 @@ func (r *StatsDReporter) Flush() {
}
}

func (r *StatsDReporter) splitNameAndTags(metric string) (string, []string) {
if res := tagPattern.FindStringSubmatch(metric); len(res) == 3 {
return res[1], append(strings.Split(res[2], ","), r.tags...)
}
return metric, r.tags
}

// FlushOnce submits a snapshot submission of the registry to DataDog. This can
// be used in a loop similarly to FlushWithInterval for custom error handling or
// data submission variations.
func (r *StatsDReporter) FlushOnce() error {
r.Registry.Each(func(name string, i interface{}) {

name, tags := r.splitNameAndTags(name)

switch metric := i.(type) {
case metrics.Counter:
v := metric.Count()
l := r.ss[name]
r.Client.Count(name+".count", v-l, r.tags, 1)
r.Client.Count(name+".count", v-l, tags, 1)
r.ss[name] = v

case metrics.Gauge:
r.Client.Gauge(name+".value", float64(metric.Value()), r.tags, 1)
r.Client.Gauge(name+".value", float64(metric.Value()), tags, 1)

case metrics.GaugeFloat64:
r.Client.Gauge(name+".value", metric.Value(), r.tags, 1)
r.Client.Gauge(name+".value", metric.Value(), tags, 1)

case metrics.Histogram:
ms := metric.Snapshot()

r.Client.Gauge(name+".count", float64(ms.Count()), r.tags, 1)
r.Client.Gauge(name+".max", float64(ms.Max()), r.tags, 1)
r.Client.Gauge(name+".min", float64(ms.Min()), r.tags, 1)
r.Client.Gauge(name+".mean", ms.Mean(), r.tags, 1)
r.Client.Gauge(name+".stddev", ms.StdDev(), r.tags, 1)
r.Client.Gauge(name+".median", time.Duration(ms.Percentile(0.5)).Seconds()*1000, r.tags, 1)
r.Client.Gauge(name+".count", float64(ms.Count()), tags, 1)
r.Client.Gauge(name+".max", float64(ms.Max()), tags, 1)
r.Client.Gauge(name+".min", float64(ms.Min()), tags, 1)
r.Client.Gauge(name+".mean", ms.Mean(), tags, 1)
r.Client.Gauge(name+".stddev", ms.StdDev(), tags, 1)
r.Client.Gauge(name+".median", time.Duration(ms.Percentile(0.5)).Seconds()*1000, tags, 1)

if len(r.percentiles) > 0 {
values := ms.Percentiles(r.percentiles)
for i, p := range r.p {
r.Client.Gauge(name+p, values[i], r.tags, 1)
r.Client.Gauge(name+p, values[i], tags, 1)
}
}

case metrics.Meter:
ms := metric.Snapshot()

r.Client.Gauge(name+".count", float64(ms.Count()), r.tags, 1)
r.Client.Gauge(name+".rate.1min", ms.Rate1(), r.tags, 1)
r.Client.Gauge(name+".rate.5min", ms.Rate5(), r.tags, 1)
r.Client.Gauge(name+".rate.15min", ms.Rate15(), r.tags, 1)
r.Client.Gauge(name+".rate.mean", ms.RateMean(), r.tags, 1)
r.Client.Gauge(name+".count", float64(ms.Count()), tags, 1)
r.Client.Gauge(name+".rate.1min", ms.Rate1(), tags, 1)
r.Client.Gauge(name+".rate.5min", ms.Rate5(), tags, 1)
r.Client.Gauge(name+".rate.15min", ms.Rate15(), tags, 1)
r.Client.Gauge(name+".rate.mean", ms.RateMean(), tags, 1)

case metrics.Timer:
ms := metric.Snapshot()

r.Client.Gauge(name+".count", float64(ms.Count()), r.tags, 1)
r.Client.Gauge(name+".max", time.Duration(ms.Max()).Seconds()*1000, r.tags, 1)
r.Client.Gauge(name+".min", time.Duration(ms.Min()).Seconds()*1000, r.tags, 1)
r.Client.Gauge(name+".mean", time.Duration(ms.Mean()).Seconds()*1000, r.tags, 1)
r.Client.Gauge(name+".stddev", time.Duration(ms.StdDev()).Seconds()*1000, r.tags, 1)
r.Client.Gauge(name+".count", float64(ms.Count()), tags, 1)
r.Client.Gauge(name+".max", time.Duration(ms.Max()).Seconds()*1000, tags, 1)
r.Client.Gauge(name+".min", time.Duration(ms.Min()).Seconds()*1000, tags, 1)
r.Client.Gauge(name+".mean", time.Duration(ms.Mean()).Seconds()*1000, tags, 1)
r.Client.Gauge(name+".stddev", time.Duration(ms.StdDev()).Seconds()*1000, tags, 1)

r.Client.Gauge(name+".median", time.Duration(ms.Percentile(0.5)).Seconds()*1000, r.tags, 1)
r.Client.Gauge(name+".median", time.Duration(ms.Percentile(0.5)).Seconds()*1000, tags, 1)

r.Client.Gauge(name+".rate.1min", ms.Rate1(), r.tags, 1)
r.Client.Gauge(name+".rate.5min", ms.Rate5(), r.tags, 1)
r.Client.Gauge(name+".rate.15min", ms.Rate15(), r.tags, 1)
r.Client.Gauge(name+".rate.mean", ms.RateMean(), r.tags, 1)
r.Client.Gauge(name+".rate.1min", ms.Rate1(), tags, 1)
r.Client.Gauge(name+".rate.5min", ms.Rate5(), tags, 1)
r.Client.Gauge(name+".rate.15min", ms.Rate15(), tags, 1)
r.Client.Gauge(name+".rate.mean", ms.RateMean(), tags, 1)

if len(r.percentiles) > 0 {
values := ms.Percentiles(r.percentiles)
for i, p := range r.p {
r.Client.Gauge(name+p, time.Duration(values[i]).Seconds()*1000, r.tags, 1)
r.Client.Gauge(name+p, time.Duration(values[i]).Seconds()*1000, tags, 1)
}
}
}
Expand Down

0 comments on commit d4013d9

Please sign in to comment.