-
Notifications
You must be signed in to change notification settings - Fork 532
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Block-builder-scheduler initial structure (#9650)
Adds initial structure for block-builder-scheduler living at pkg/blockbuilder/scheduler. Adds a new block-builder-scheduler target to the binary. This target currently just connects to Kafka and records per-partition start and end offset gauge metrics. In the future it will compute jobs and assign them to block-builder workers.
- Loading branch information
1 parent
b27a999
commit 9a55c6f
Showing
6 changed files
with
208 additions
and
19 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,36 @@ | ||
// SPDX-License-Identifier: AGPL-3.0-only | ||
|
||
package scheduler | ||
|
||
import ( | ||
"flag" | ||
"fmt" | ||
"time" | ||
|
||
"github.com/grafana/mimir/pkg/storage/ingest" | ||
) | ||
|
||
type Config struct { | ||
BuilderConsumerGroup string `yaml:"builder_consumer_group"` | ||
SchedulerConsumerGroup string `yaml:"scheduler_consumer_group"` | ||
SchedulingInterval time.Duration `yaml:"kafka_monitor_interval"` | ||
|
||
// Config parameters defined outside the block-builder-scheduler config and are injected dynamically. | ||
Kafka ingest.KafkaConfig `yaml:"-"` | ||
} | ||
|
||
func (cfg *Config) RegisterFlags(f *flag.FlagSet) { | ||
f.StringVar(&cfg.BuilderConsumerGroup, "block-builder-scheduler.builder-consumer-group", "block-builder", "The Kafka consumer group used by block-builders.") | ||
f.StringVar(&cfg.SchedulerConsumerGroup, "block-builder-scheduler.scheduler-consumer-group", "block-builder-scheduler", "The Kafka consumer group used by block-builder-scheduler.") | ||
f.DurationVar(&cfg.SchedulingInterval, "block-builder-scheduler.scheduling-interval", 20*time.Second, "How frequently to recompute the schedule.") | ||
} | ||
|
||
func (cfg *Config) Validate() error { | ||
if err := cfg.Kafka.Validate(); err != nil { | ||
return err | ||
} | ||
if cfg.SchedulingInterval <= 0 { | ||
return fmt.Errorf("scheduling interval (%d) must be positive", cfg.SchedulingInterval) | ||
} | ||
return nil | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,33 @@ | ||
// SPDX-License-Identifier: AGPL-3.0-only | ||
|
||
package scheduler | ||
|
||
import ( | ||
"github.com/prometheus/client_golang/prometheus" | ||
"github.com/prometheus/client_golang/prometheus/promauto" | ||
) | ||
|
||
type schedulerMetrics struct { | ||
updateScheduleDuration prometheus.Histogram | ||
partitionStartOffset *prometheus.GaugeVec | ||
partitionEndOffset *prometheus.GaugeVec | ||
} | ||
|
||
func newSchedulerMetrics(reg prometheus.Registerer) schedulerMetrics { | ||
return schedulerMetrics{ | ||
updateScheduleDuration: promauto.With(reg).NewHistogram(prometheus.HistogramOpts{ | ||
Name: "cortex_blockbuilder_scheduler_schedule_update_seconds", | ||
Help: "Time spent updating the schedule.", | ||
|
||
NativeHistogramBucketFactor: 1.1, | ||
}), | ||
partitionStartOffset: promauto.With(reg).NewGaugeVec(prometheus.GaugeOpts{ | ||
Name: "cortex_blockbuilder_scheduler_partition_start_offset", | ||
Help: "The observed start offset of each partition.", | ||
}, []string{"partition"}), | ||
partitionEndOffset: promauto.With(reg).NewGaugeVec(prometheus.GaugeOpts{ | ||
Name: "cortex_blockbuilder_scheduler_partition_end_offset", | ||
Help: "The observed end offset of each partition.", | ||
}, []string{"partition"}), | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,101 @@ | ||
// SPDX-License-Identifier: AGPL-3.0-only | ||
|
||
package scheduler | ||
|
||
import ( | ||
"context" | ||
"fmt" | ||
"time" | ||
|
||
"github.com/go-kit/log" | ||
"github.com/go-kit/log/level" | ||
"github.com/grafana/dskit/services" | ||
"github.com/prometheus/client_golang/prometheus" | ||
"github.com/twmb/franz-go/pkg/kadm" | ||
"github.com/twmb/franz-go/pkg/kgo" | ||
|
||
"github.com/grafana/mimir/pkg/storage/ingest" | ||
) | ||
|
||
type BlockBuilderScheduler struct { | ||
services.Service | ||
|
||
kafkaClient *kgo.Client | ||
cfg Config | ||
logger log.Logger | ||
register prometheus.Registerer | ||
metrics schedulerMetrics | ||
} | ||
|
||
func New( | ||
cfg Config, | ||
logger log.Logger, | ||
reg prometheus.Registerer, | ||
) (*BlockBuilderScheduler, error) { | ||
s := &BlockBuilderScheduler{ | ||
cfg: cfg, | ||
logger: logger, | ||
register: reg, | ||
metrics: newSchedulerMetrics(reg), | ||
} | ||
s.Service = services.NewBasicService(s.starting, s.running, s.stopping) | ||
return s, nil | ||
} | ||
|
||
func (s *BlockBuilderScheduler) starting(context.Context) error { | ||
kc, err := ingest.NewKafkaReaderClient( | ||
s.cfg.Kafka, | ||
ingest.NewKafkaReaderClientMetrics("block-builder-scheduler", s.register), | ||
s.logger, | ||
kgo.ConsumerGroup(s.cfg.SchedulerConsumerGroup), | ||
// The scheduler simply monitors partitions. We don't want it committing offsets. | ||
kgo.DisableAutoCommit(), | ||
) | ||
if err != nil { | ||
return fmt.Errorf("creating kafka reader: %w", err) | ||
} | ||
s.kafkaClient = kc | ||
return nil | ||
} | ||
|
||
func (s *BlockBuilderScheduler) stopping(_ error) error { | ||
s.kafkaClient.Close() | ||
return nil | ||
} | ||
|
||
func (s *BlockBuilderScheduler) running(ctx context.Context) error { | ||
updateTick := time.NewTicker(s.cfg.SchedulingInterval) | ||
defer updateTick.Stop() | ||
for { | ||
select { | ||
case <-updateTick.C: | ||
s.updateSchedule(ctx) | ||
case <-ctx.Done(): | ||
return nil | ||
} | ||
} | ||
} | ||
|
||
func (s *BlockBuilderScheduler) updateSchedule(ctx context.Context) { | ||
startTime := time.Now() | ||
// Eventually this will also include job computation. But for now, collect partition data. | ||
admin := kadm.NewClient(s.kafkaClient) | ||
|
||
startOffsets, err := admin.ListStartOffsets(ctx, s.cfg.Kafka.Topic) | ||
if err != nil { | ||
level.Warn(s.logger).Log("msg", "failed to list start offsets", "err", err) | ||
} | ||
endOffsets, err := admin.ListEndOffsets(ctx, s.cfg.Kafka.Topic) | ||
if err != nil { | ||
level.Warn(s.logger).Log("msg", "failed to list end offsets", "err", err) | ||
} | ||
|
||
s.metrics.updateScheduleDuration.Observe(time.Since(startTime).Seconds()) | ||
|
||
startOffsets.Each(func(o kadm.ListedOffset) { | ||
s.metrics.partitionStartOffset.WithLabelValues(fmt.Sprint(o.Partition)).Set(float64(o.Offset)) | ||
}) | ||
endOffsets.Each(func(o kadm.ListedOffset) { | ||
s.metrics.partitionEndOffset.WithLabelValues(fmt.Sprint(o.Partition)).Set(float64(o.Offset)) | ||
}) | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters