Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix binary reader download duration histogram #8017

Open
wants to merge 3 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ We use *breaking :warning:* to mark changes that are not backward compatible (re
- [#7907](https://github.com/thanos-io/thanos/pull/7907) Receive: Add `--receive.grpc-service-config` flag to configure gRPC service config for the receivers.
- [#7961](https://github.com/thanos-io/thanos/pull/7961) Store Gateway: Add `--store.posting-group-max-keys` flag to mark posting group as lazy if it exceeds number of keys limit. Added `thanos_bucket_store_lazy_expanded_posting_groups_total` for total number of lazy posting groups and corresponding reasons.
- [#8000](https://github.com/thanos-io/thanos/pull/8000) Query: Bump promql-engine, pass partial response through options
- [#8017](https://github.com/thanos-io/thanos/pull/8017) Store Gateway: Use native histogram for binary reader load and download duration and fixed download duration metric. #8017

### Changed

Expand Down
30 changes: 19 additions & 11 deletions pkg/block/indexheader/binary_reader.go
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ func newCRC32() hash.Hash32 {
return crc32.New(castagnoliTable)
}

// LazyBinaryReaderMetrics holds metrics tracked by LazyBinaryReader.
// BinaryReaderMetrics holds metrics tracked by BinaryReader.
type BinaryReaderMetrics struct {
downloadDuration prometheus.Histogram
loadDuration prometheus.Histogram
Expand All @@ -76,14 +76,18 @@ type BinaryReaderMetrics struct {
func NewBinaryReaderMetrics(reg prometheus.Registerer) *BinaryReaderMetrics {
return &BinaryReaderMetrics{
downloadDuration: promauto.With(reg).NewHistogram(prometheus.HistogramOpts{
Name: "indexheader_download_duration_seconds",
Help: "Duration of the index-header download from objstore in seconds.",
Buckets: []float64{0.1, 0.2, 0.5, 1, 2, 5, 15, 30, 60, 90, 120, 300},
Name: "indexheader_download_duration_seconds",
Help: "Duration of the index-header download from objstore in seconds.",
Buckets: []float64{0.1, 0.2, 0.5, 1, 2, 5, 15, 30, 60, 90, 120, 300},
NativeHistogramMaxBucketNumber: 256,
NativeHistogramBucketFactor: 1.1,
}),
loadDuration: promauto.With(reg).NewHistogram(prometheus.HistogramOpts{
Name: "indexheader_load_duration_seconds",
Help: "Duration of the index-header loading in seconds.",
Buckets: []float64{0.01, 0.02, 0.05, 0.1, 0.2, 0.5, 1, 2, 5, 15, 30, 60, 90, 120, 300},
Name: "indexheader_load_duration_seconds",
Help: "Duration of the index-header loading in seconds.",
Buckets: []float64{0.01, 0.02, 0.05, 0.1, 0.2, 0.5, 1, 2, 5, 15, 30, 60, 90, 120, 300},
NativeHistogramMaxBucketNumber: 256,
NativeHistogramBucketFactor: 1.1,
}),
}
}
Expand All @@ -97,7 +101,12 @@ type BinaryTOC struct {
}

// WriteBinary build index header from the pieces of index in object storage, and cached in file if necessary.
func WriteBinary(ctx context.Context, bkt objstore.BucketReader, id ulid.ULID, filename string) ([]byte, error) {
func WriteBinary(ctx context.Context, bkt objstore.BucketReader, id ulid.ULID, filename string, downloadDuration prometheus.Histogram) ([]byte, error) {
start := time.Now()

defer func() {
downloadDuration.Observe(time.Since(start).Seconds())
}()
var tmpDir = ""
if filename != "" {
tmpDir = filepath.Dir(filename)
Expand Down Expand Up @@ -569,15 +578,14 @@ func NewBinaryReader(ctx context.Context, logger log.Logger, bkt objstore.Bucket
level.Debug(logger).Log("msg", "failed to read index-header from disk; recreating", "path", binfn, "err", err)

start := time.Now()
if _, err := WriteBinary(ctx, bkt, id, binfn); err != nil {
if _, err := WriteBinary(ctx, bkt, id, binfn, metrics.downloadDuration); err != nil {
return nil, errors.Wrap(err, "write index header")
}
metrics.loadDuration.Observe(time.Since(start).Seconds())

level.Debug(logger).Log("msg", "built index-header file", "path", binfn, "elapsed", time.Since(start))
return newFileBinaryReader(binfn, postingOffsetsInMemSampling, metrics)
} else {
buf, err := WriteBinary(ctx, bkt, id, "")
buf, err := WriteBinary(ctx, bkt, id, "", metrics.downloadDuration)
if err != nil {
return nil, errors.Wrap(err, "generate index header")
}
Expand Down
17 changes: 12 additions & 5 deletions pkg/block/indexheader/header_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ import (
"github.com/go-kit/log"
"github.com/oklog/ulid"
"github.com/pkg/errors"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/prometheus/model/labels"
"github.com/prometheus/prometheus/tsdb/encoding"
"github.com/prometheus/prometheus/tsdb/fileutil"
Expand All @@ -31,6 +32,12 @@ import (
"github.com/thanos-io/thanos/pkg/testutil/e2eutil"
)

var (
dummyHistogram = prometheus.NewHistogram(prometheus.HistogramOpts{
Name: "duration_seconds",
})
)

func TestReaders(t *testing.T) {
ctx := context.Background()

Expand Down Expand Up @@ -104,7 +111,7 @@ func TestReaders(t *testing.T) {

t.Run("binary reader", func(t *testing.T) {
fn := filepath.Join(tmpDir, id.String(), block.IndexHeaderFilename)
_, err := WriteBinary(ctx, bkt, id, fn)
_, err := WriteBinary(ctx, bkt, id, fn, dummyHistogram)
testutil.Ok(t, err)

br, err := NewBinaryReader(ctx, log.NewNopLogger(), nil, tmpDir, id, 3, NewBinaryReaderMetrics(nil))
Expand Down Expand Up @@ -228,7 +235,7 @@ func TestReaders(t *testing.T) {

t.Run("lazy binary reader", func(t *testing.T) {
fn := filepath.Join(tmpDir, id.String(), block.IndexHeaderFilename)
_, err := WriteBinary(ctx, bkt, id, fn)
_, err := WriteBinary(ctx, bkt, id, fn, dummyHistogram)
testutil.Ok(t, err)

br, err := NewLazyBinaryReader(ctx, log.NewNopLogger(), nil, tmpDir, id, 3, NewLazyBinaryReaderMetrics(nil), NewBinaryReaderMetrics(nil), nil, false)
Expand Down Expand Up @@ -405,7 +412,7 @@ func BenchmarkBinaryWrite(t *testing.B) {

t.ResetTimer()
for i := 0; i < t.N; i++ {
_, err := WriteBinary(ctx, bkt, m.ULID, fn)
_, err := WriteBinary(ctx, bkt, m.ULID, fn, dummyHistogram)
testutil.Ok(t, err)
}
}
Expand All @@ -419,7 +426,7 @@ func BenchmarkBinaryReader(t *testing.B) {

m := prepareIndexV2Block(t, tmpDir, bkt)
fn := filepath.Join(tmpDir, m.ULID.String(), block.IndexHeaderFilename)
_, err = WriteBinary(ctx, bkt, m.ULID, fn)
_, err = WriteBinary(ctx, bkt, m.ULID, fn, dummyHistogram)
testutil.Ok(t, err)

t.ResetTimer()
Expand Down Expand Up @@ -597,7 +604,7 @@ func TestReaderPostingsOffsets(t *testing.T) {
testutil.Ok(t, block.Upload(ctx, log.NewNopLogger(), bkt, filepath.Join(tmpDir, id.String()), metadata.NoneFunc))

fn := filepath.Join(tmpDir, id.String(), block.IndexHeaderFilename)
_, err = WriteBinary(ctx, bkt, id, fn)
_, err = WriteBinary(ctx, bkt, id, fn, dummyHistogram)
testutil.Ok(t, err)

br, err := NewBinaryReader(ctx, log.NewNopLogger(), nil, tmpDir, id, 3, NewBinaryReaderMetrics(nil))
Expand Down
3 changes: 1 addition & 2 deletions pkg/block/indexheader/lazy_binary_reader.go
Original file line number Diff line number Diff line change
Expand Up @@ -115,12 +115,11 @@ func NewLazyBinaryReader(
level.Debug(logger).Log("msg", "the index-header doesn't exist on disk; recreating", "path", indexHeaderFile)

start := time.Now()
if _, err := WriteBinary(ctx, bkt, id, indexHeaderFile); err != nil {
if _, err := WriteBinary(ctx, bkt, id, indexHeaderFile, binaryReaderMetrics.downloadDuration); err != nil {
return nil, errors.Wrap(err, "write index header")
}

level.Debug(logger).Log("msg", "built index-header file", "path", indexHeaderFile, "elapsed", time.Since(start))
binaryReaderMetrics.downloadDuration.Observe(time.Since(start).Seconds())
}
}

Expand Down
Loading