diff --git a/docs/data-sources/recipients.md b/docs/data-sources/recipients.md index 96402fb1..0dedde84 100644 --- a/docs/data-sources/recipients.md +++ b/docs/data-sources/recipients.md @@ -26,7 +26,7 @@ data "honeycombio_recipients" "example-dot-com" { The following arguments are supported: * `type` - (Optional) The type of recipient, allowed types are `email`, `pagerduty`, `slack` and `webhook`. -* `detail_filter` - (Optional) a block to further filter recipients as described below. `type` must be set when providing a filter. +* `detail_filter` - (Optional) a block to further filter recipients as described below. `name` must be set when providing a filter. To further filter the recipient results, a `detail_filter` block can be provided which accepts the following arguments: diff --git a/docs/data-sources/slo.md b/docs/data-sources/slo.md new file mode 100644 index 00000000..7135cc20 --- /dev/null +++ b/docs/data-sources/slo.md @@ -0,0 +1,37 @@ +# Data Source: honeycombio_slo + +The `honeycombio_slo` data source retrieves the details of a single SLO. + +-> **Note** Terraform will fail unless an SLO is returned by the search. Ensure that your search is specific enough to return an SLO. +If you want to match multiple SLOs, use the `honeycombio_slos` data source instead. + +## Example Usage + +```hcl +variable "dataset" { + type = string +} + +# Retrieve the details of a single SLO +data "honeycombio_slo" "myslo" { + dataset = var.dataset + id = "fS4WfA82ACt" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `dataset` - (Required) The dataset this SLO is associated with +* `id` - (Required) The ID of the SLO + +## Attribute Reference + +In addition to all arguments above, the following attributes are exported: + +* `name` - the name of the SLO. +* `description` - the SLO's description. +* `sli` - the alias of the Derived COlumn used as the SLO's SLI. +* `target_percentage` - the percentage of qualified events expected to succeed during the `time_period`. +* `time_period` - The time period, in days, over which the SLO is evaluated. diff --git a/docs/data-sources/slos.md b/docs/data-sources/slos.md new file mode 100644 index 00000000..e1a8f6ec --- /dev/null +++ b/docs/data-sources/slos.md @@ -0,0 +1,47 @@ +# Data Source: honeycombio_slos + +The SLOs data source allows the SLOs of a dataset to be retrieved. + +## Example Usage + +```hcl +variable "dataset" { + type = string +} + +# returns all SLOs +data "honeycombio_slos" "all" { + dataset = var.dataset +} + +# only returns the SLOs starting with 'foo_' +data "honeycombio_slos" "foo" { + dataset = var.dataset + + detail_filter { + name = "name" + value_regex = "foo_*" + } +} +``` + +## Argument Reference + +The following arguments are supported: + +* `dataset` - (Required) The dataset to retrieve the SLOs list from +* `detail_filter` - (Optional) a block to further filter recipients as described below. `name` must be set when providing a filter. + +To further filter the SLO results, a `detail_filter` block can be provided which accepts the following arguments: + +* `name` - (Required) The name of the detail field to filter by. Currently only `name` is supported. +* `value` - (Optional) The value of the detail field to match on. +* `value_regex` - (Optional) A regular expression string to apply to the value of the detail field to match on. + +~> **Note** one of `value` or `value_regex` is required. + +## Attribute Reference + +In addition to all arguments above, the following attributes are exported: + +* `ids` - a list of all the SLO IDs found in the dataset diff --git a/honeycombio/data_source_recipients.go b/honeycombio/data_source_recipients.go index 7f310088..b9225f5d 100644 --- a/honeycombio/data_source_recipients.go +++ b/honeycombio/data_source_recipients.go @@ -32,7 +32,7 @@ func dataSourceHoneycombioRecipients() *schema.Resource { Optional: true, MinItems: 1, MaxItems: 1, - Description: "Attributes to filter the recipients with. `type` must be set when providing a filter.", + Description: "Attributes to filter the recipients with. `name` must be set when providing a filter.", RequiredWith: []string{"type"}, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ diff --git a/internal/helper/filter/slo_filter.go b/internal/helper/filter/slo_filter.go new file mode 100644 index 00000000..d7bf88df --- /dev/null +++ b/internal/helper/filter/slo_filter.go @@ -0,0 +1,56 @@ +package filter + +import ( + "fmt" + "regexp" + + "github.com/honeycombio/terraform-provider-honeycombio/client" + "github.com/honeycombio/terraform-provider-honeycombio/internal/helper" +) + +type SLODetailFilter struct { + Type string + Value *string + ValueRegex *regexp.Regexp +} + +func NewDetailSLOFilter(filterType, v, r string) (*SLODetailFilter, error) { + if filterType != "name" { + return nil, fmt.Errorf("only name is supported as a filter type") + } + if v != "" && r != "" { + return nil, fmt.Errorf("only one of value or value_regex may be provided") + } + if v == "" && r == "" { + return nil, fmt.Errorf("one of value or value_regex must be provided") + } + + var value *string + var valRegexp *regexp.Regexp + if v != "" { + value = helper.ToPtr(v) + } + if r != "" { + valRegexp = regexp.MustCompile(r) + } + + return &SLODetailFilter{ + Type: filterType, + Value: value, + ValueRegex: valRegexp, + }, nil +} + +func (f *SLODetailFilter) Match(s client.SLO) bool { + // nil filter fails open + if f == nil { + return true + } + if f.Value != nil { + return s.Name == *f.Value + } + if f.ValueRegex != nil { + return f.ValueRegex.MatchString(s.Name) + } + return true +} diff --git a/internal/helper/hashcode/hashcode.go b/internal/helper/hashcode/hashcode.go index 6ccc5231..addc7afe 100644 --- a/internal/helper/hashcode/hashcode.go +++ b/internal/helper/hashcode/hashcode.go @@ -4,6 +4,8 @@ import ( "bytes" "fmt" "hash/crc32" + + "github.com/hashicorp/terraform-plugin-framework/types" ) // String hashes a string to a unique hashcode. @@ -23,7 +25,7 @@ func String(s string) int { return 0 } -// Strings hashes a list of strings to a unique hashcode. +// Strings hashes a slice of strings to a unique hashcode. func Strings(strings []string) string { var buf bytes.Buffer @@ -33,3 +35,14 @@ func Strings(strings []string) string { return fmt.Sprintf("%d", String(buf.String())) } + +// StringValues hashes a slice of tfsdk Strings to a unique hashcode. +func StringValues(strings []types.String) string { + var buf bytes.Buffer + + for _, s := range strings { + buf.WriteString(fmt.Sprintf("%s-", s.String())) + } + + return fmt.Sprintf("%d", String(buf.String())) +} diff --git a/internal/helper/ptr.go b/internal/helper/ptr.go new file mode 100644 index 00000000..fff9585f --- /dev/null +++ b/internal/helper/ptr.go @@ -0,0 +1,6 @@ +package helper + +// Returns a pointer to the given value +func ToPtr[T any](v T) *T { + return &v +} diff --git a/internal/helper/validation/valid_regex.go b/internal/helper/validation/valid_regex.go new file mode 100644 index 00000000..2e4eb117 --- /dev/null +++ b/internal/helper/validation/valid_regex.go @@ -0,0 +1,44 @@ +package validation + +import ( + "context" + "fmt" + "regexp" + + "github.com/hashicorp/terraform-plugin-framework-validators/helpers/validatordiag" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +var _ validator.String = isValidRegExpValidator{} + +type isValidRegExpValidator struct{} + +func (v isValidRegExpValidator) Description(_ context.Context) string { + return "value must be a valid regular expression" +} + +func (v isValidRegExpValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +func (v isValidRegExpValidator) ValidateString(ctx context.Context, request validator.StringRequest, response *validator.StringResponse) { + if request.ConfigValue.IsNull() || request.ConfigValue.IsUnknown() { + return + } + + if _, err := regexp.Compile(request.ConfigValue.ValueString()); err != nil { + response.Diagnostics.Append(validatordiag.InvalidAttributeValueDiagnostic( + request.Path, + v.Description(ctx), + fmt.Sprintf("%q: %s", request.ConfigValue.ValueString(), err.Error()), + )) + } +} + +// IsValidRegExp returns an AttributeValidator which ensures that any configured +// attribute value is a valid regular expression. +// +// Null (unconfigured) and unknown (known after apply) values are skipped. +func IsValidRegExp() validator.String { + return isValidRegExpValidator{} +} diff --git a/internal/helper/validation/valid_regex_test.go b/internal/helper/validation/valid_regex_test.go new file mode 100644 index 00000000..ae64c490 --- /dev/null +++ b/internal/helper/validation/valid_regex_test.go @@ -0,0 +1,57 @@ +package validation_test + +import ( + "context" + "testing" + + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/honeycombio/terraform-provider-honeycombio/internal/helper/validation" +) + +func Test_IsValidRegexValidator(t *testing.T) { + t.Parallel() + + type testCase struct { + val types.String + expectError bool + } + tests := map[string]testCase{ + "unknown": { + val: types.StringUnknown(), + }, + "null": { + val: types.StringNull(), + }, + "valid regex": { + val: types.StringValue("^[a-z]+$"), + }, + "invalid regex": { + val: types.StringValue("^[a-z+$"), + expectError: true, + }, + } + + for name, test := range tests { + name, test := name, test + t.Run(name, func(t *testing.T) { + t.Parallel() + request := validator.StringRequest{ + Path: path.Root("test"), + PathExpression: path.MatchRoot("test"), + ConfigValue: test.val, + } + response := validator.StringResponse{} + validation.IsValidRegExp().ValidateString(context.TODO(), request, &response) + + if !response.Diagnostics.HasError() && test.expectError { + t.Fatal("expected error, got no error") + } + + if response.Diagnostics.HasError() && !test.expectError { + t.Fatalf("got unexpected error: %s", response.Diagnostics) + } + }) + } +} diff --git a/internal/provider/derived_column_data_source.go b/internal/provider/derived_column_data_source.go index bd3aeac9..1b67398c 100644 --- a/internal/provider/derived_column_data_source.go +++ b/internal/provider/derived_column_data_source.go @@ -40,13 +40,13 @@ func (d *derivedColumnDataSource) Metadata(_ context.Context, req datasource.Met func (d *derivedColumnDataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { resp.Schema = schema.Schema{ - Description: "Fetches the Derived Columns in a dataset", + Description: "Fetches a Derived Column in a dataset", Attributes: map[string]schema.Attribute{ "id": schema.StringAttribute{ Computed: true, }, "dataset": schema.StringAttribute{ - Description: "The dataset to fetch the derived columns from. Use '__all__' to fetch Environment-wide derived columns.", + Description: "The dataset to fetch the derived column from. Use '__all__' to fetch an Environment-wide derived column.", Required: true, }, "alias": schema.StringAttribute{ diff --git a/internal/provider/derived_columns_data_source.go b/internal/provider/derived_columns_data_source.go index acfc3e2a..62aea802 100644 --- a/internal/provider/derived_columns_data_source.go +++ b/internal/provider/derived_columns_data_source.go @@ -82,21 +82,14 @@ func (d *derivedColumnsDataSource) Read(ctx context.Context, req datasource.Read return } - if !data.StartsWith.IsNull() { - startsWith := data.StartsWith.ValueString() - - for i := len(columns) - 1; i >= 0; i-- { - if !strings.HasPrefix(columns[i].Alias, startsWith) { - columns = append(columns[:i], columns[i+1:]...) - } + startsWith := data.StartsWith.ValueString() + for _, s := range columns { + if startsWith != "" && !strings.HasPrefix(s.Alias, startsWith) { + continue } + data.Columns = append(data.Columns, types.StringValue(s.ID)) } - - ids := make([]string, len(columns)) - for _, dc := range columns { - data.Columns = append(data.Columns, types.StringValue(dc.Alias)) - } - data.ID = types.StringValue(hashcode.Strings(ids)) + data.ID = types.StringValue(hashcode.StringValues(data.Columns)) diags := resp.State.Set(ctx, &data) resp.Diagnostics.Append(diags...) diff --git a/internal/provider/provider.go b/internal/provider/provider.go index b551e7ec..ed6cd22d 100644 --- a/internal/provider/provider.go +++ b/internal/provider/provider.go @@ -69,6 +69,8 @@ func (p *HoneycombioProvider) DataSources(ctx context.Context) []func() datasour return []func() datasource.DataSource{ NewDerivedColumnDataSource, NewDerivedColumnsDataSource, + NewSLODataSource, + NewSLOsDataSource, } } diff --git a/internal/provider/slo_data_source.go b/internal/provider/slo_data_source.go new file mode 100644 index 00000000..278c6fbe --- /dev/null +++ b/internal/provider/slo_data_source.go @@ -0,0 +1,118 @@ +package provider + +import ( + "context" + "fmt" + + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" + + "github.com/honeycombio/terraform-provider-honeycombio/client" +) + +// Ensure the implementation satisfies the expected interfaces. +var ( + _ datasource.DataSource = &sloDataSource{} + _ datasource.DataSourceWithConfigure = &sloDataSource{} +) + +func NewSLODataSource() datasource.DataSource { + return &sloDataSource{} +} + +// sloDataSource is the data source implementation. +type sloDataSource struct { + client *client.Client +} + +type sloDataSourceModel struct { + ID types.String `tfsdk:"id"` + Dataset types.String `tfsdk:"dataset"` + Name types.String `tfsdk:"name"` + Description types.String `tfsdk:"description"` + SLI types.String `tfsdk:"sli"` + TargetPercentage types.Float64 `tfsdk:"target_percentage"` + TimePeriod types.Int64 `tfsdk:"time_period"` +} + +func (d *sloDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_slo" +} + +func (d *sloDataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = schema.Schema{ + Description: "Fetches an SLO from a dataset", + Attributes: map[string]schema.Attribute{ + "id": schema.StringAttribute{ + Description: "The ID of the SLO to fetch.", + Required: true, + }, + "dataset": schema.StringAttribute{ + Description: "The dataset to fetch the SLO from.", + Required: true, + }, + "name": schema.StringAttribute{ + Description: "The name of the SLO.", + Computed: true, + Optional: false, + Required: false, + }, + "description": schema.StringAttribute{ + Description: "The SLO's description.", + Computed: true, + Optional: false, + Required: false, + }, + "sli": schema.StringAttribute{ + Description: "The alias of the Derived Column used as the SLO's SLI.", + Computed: true, + Optional: false, + Required: false, + }, + "target_percentage": schema.Float64Attribute{ + Description: "The percentage of qualified events expected to succeed during the `time_period`.", + Computed: true, + Optional: false, + Required: false, + }, + "time_period": schema.Int64Attribute{ + Description: "The time period, in days, over which the SLO is evaluated.", + Computed: true, + Optional: false, + Required: false, + }, + }, + } +} + +func (d *sloDataSource) Configure(_ context.Context, req datasource.ConfigureRequest, _ *datasource.ConfigureResponse) { + d.client = getClientFromDatasourceRequest(&req) +} + +func (d *sloDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + var data sloDataSourceModel + + resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + slo, err := d.client.SLOs.Get(ctx, data.Dataset.ValueString(), data.ID.ValueString()) + if err != nil { + resp.Diagnostics.AddError( + fmt.Sprintf("Unable to lookup SLO \"%s\"", data.ID.ValueString()), + err.Error()) + return + } + + data.ID = types.StringValue(slo.ID) + data.Name = types.StringValue(slo.Name) + data.Description = types.StringValue(slo.Description) + data.SLI = types.StringValue(slo.SLI.Alias) + data.TargetPercentage = types.Float64Value(float64(slo.TargetPerMillion) / 10000) + data.TimePeriod = types.Int64Value(int64(slo.TimePeriodDays)) + + diags := resp.State.Set(ctx, &data) + resp.Diagnostics.Append(diags...) +} diff --git a/internal/provider/slo_data_source_test.go b/internal/provider/slo_data_source_test.go new file mode 100644 index 00000000..97d8aca2 --- /dev/null +++ b/internal/provider/slo_data_source_test.go @@ -0,0 +1,69 @@ +package provider + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + + "github.com/honeycombio/terraform-provider-honeycombio/client" +) + +func TestAcc_SLODataSource(t *testing.T) { + ctx := context.Background() + c := testAccClient(t) + dataset := testAccDataset() + + sli, err := c.DerivedColumns.Create(ctx, dataset, &client.DerivedColumn{ + Alias: acctest.RandString(4) + "_sli", + Description: "test SLI", + Expression: "BOOL(1)", + }) + if err != nil { + t.Error(err) + } + slo, err := c.SLOs.Create(ctx, dataset, &client.SLO{ + Name: acctest.RandString(4) + "_slo", + Description: "test SLO", + TimePeriodDays: 30, + TargetPerMillion: 995000, + SLI: client.SLIRef{Alias: sli.Alias}, + }) + if err != nil { + t.Error(err) + } + + //nolint:errcheck + t.Cleanup(func() { + c.SLOs.Delete(ctx, dataset, slo.ID) + c.DerivedColumns.Delete(ctx, dataset, sli.ID) + }) + + resource.Test(t, resource.TestCase{ + PreCheck: testAccPreCheck(t), + ProtoV5ProviderFactories: testAccProtoV5ProviderFactory, + Steps: []resource.TestStep{ + { + Config: testAccSLODataSourceConfig(dataset, slo.ID), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("data.honeycombio_slo.test", "name", slo.Name), + resource.TestCheckResourceAttr("data.honeycombio_slo.test", "description", slo.Description), + resource.TestCheckResourceAttr("data.honeycombio_slo.test", "sli", slo.SLI.Alias), + resource.TestCheckResourceAttr("data.honeycombio_slo.test", "target_percentage", "99.5"), + resource.TestCheckResourceAttr("data.honeycombio_slo.test", "time_period", "30"), + ), + }, + }, + }) +} + +func testAccSLODataSourceConfig(dataset, id string) string { + return fmt.Sprintf(` +data "honeycombio_slo" "test" { + id = "%s" + dataset = "%s" +} +`, id, dataset) +} diff --git a/internal/provider/slos_data_source.go b/internal/provider/slos_data_source.go new file mode 100644 index 00000000..520014c7 --- /dev/null +++ b/internal/provider/slos_data_source.go @@ -0,0 +1,147 @@ +package provider + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-framework-validators/listvalidator" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" + + "github.com/honeycombio/terraform-provider-honeycombio/client" + "github.com/honeycombio/terraform-provider-honeycombio/internal/helper/filter" + "github.com/honeycombio/terraform-provider-honeycombio/internal/helper/hashcode" + "github.com/honeycombio/terraform-provider-honeycombio/internal/helper/validation" +) + +// Ensure the implementation satisfies the expected interfaces. +var ( + _ datasource.DataSource = &slosDataSource{} + _ datasource.DataSourceWithConfigure = &slosDataSource{} +) + +func NewSLOsDataSource() datasource.DataSource { + return &slosDataSource{} +} + +// slosDataSource is the data source implementation. +type slosDataSource struct { + client *client.Client +} + +type slosDataSourceModel struct { + ID types.String `tfsdk:"id"` + Dataset types.String `tfsdk:"dataset"` + DetailFilter []slosDetailFilter `tfsdk:"detail_filter"` + SLOs []types.String `tfsdk:"ids"` +} + +type slosDetailFilter struct { + Name types.String `tfsdk:"name"` + Value types.String `tfsdk:"value"` + ValueRegex types.String `tfsdk:"value_regex"` +} + +func (f *slosDetailFilter) SLOFilter() (*filter.SLODetailFilter, error) { + if f == nil { + return nil, nil + } + return filter.NewDetailSLOFilter(f.Name.ValueString(), f.Value.ValueString(), f.ValueRegex.ValueString()) +} + +func (d *slosDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_slos" +} + +func (d *slosDataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = schema.Schema{ + Description: "Fetches the SLOs in a dataset.", + Attributes: map[string]schema.Attribute{ + "id": schema.StringAttribute{ + Computed: true, + }, + "dataset": schema.StringAttribute{ + Description: "The dataset to fetch the SLOs from.", + Required: true, + }, + "ids": schema.ListAttribute{ + Description: "The list of SLO IDs.", + Computed: true, + Optional: false, + Required: false, + ElementType: types.StringType, + }, + }, + Blocks: map[string]schema.Block{ + "detail_filter": schema.ListNestedBlock{ + Description: "Attributes to filter the SLOs with. `name` must be set when providing a filter.", + Validators: []validator.List{listvalidator.SizeAtMost(1)}, + NestedObject: schema.NestedBlockObject{ + Attributes: map[string]schema.Attribute{ + "name": schema.StringAttribute{ + Required: true, + Description: "The name of the detail field to filter by.", + Validators: []validator.String{stringvalidator.OneOf("name")}, + }, + "value": schema.StringAttribute{ + Optional: true, + Description: "The value of the detail field to match on.", + Validators: []validator.String{ + stringvalidator.ConflictsWith(path.MatchRelative().AtParent().AtName("value_regex")), + }, + }, + "value_regex": schema.StringAttribute{ + Optional: true, + Description: "A regular expression string to apply to the value of the detail field to match on.", + Validators: []validator.String{ + stringvalidator.ConflictsWith(path.MatchRelative().AtParent().AtName("value")), + validation.IsValidRegExp(), + }, + }, + }, + }, + }, + }, + } +} + +func (d *slosDataSource) Configure(_ context.Context, req datasource.ConfigureRequest, _ *datasource.ConfigureResponse) { + d.client = getClientFromDatasourceRequest(&req) +} + +func (d *slosDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + var data slosDataSourceModel + + resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + slos, err := d.client.SLOs.List(ctx, data.Dataset.ValueString()) + if err != nil { + resp.Diagnostics.AddError("Unable to list SLOs", err.Error()) + return + } + + var sloFilter *filter.SLODetailFilter + if len(data.DetailFilter) > 0 { + sloFilter, err = data.DetailFilter[0].SLOFilter() + if err != nil { + resp.Diagnostics.AddError("Unable to create SLO filter", err.Error()) + return + } + } + for _, s := range slos { + if sloFilter != nil && !sloFilter.Match(s) { + continue + } + data.SLOs = append(data.SLOs, types.StringValue(s.ID)) + } + data.ID = types.StringValue(hashcode.StringValues(data.SLOs)) + + diags := resp.State.Set(ctx, &data) + resp.Diagnostics.Append(diags...) +} diff --git a/internal/provider/slos_data_source_test.go b/internal/provider/slos_data_source_test.go new file mode 100644 index 00000000..d92261e5 --- /dev/null +++ b/internal/provider/slos_data_source_test.go @@ -0,0 +1,124 @@ +package provider + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + + "github.com/honeycombio/terraform-provider-honeycombio/client" +) + +func TestAcc_SLOsDataSource(t *testing.T) { + ctx := context.Background() + c := testAccClient(t) + dataset := testAccDataset() + testPrefix := acctest.RandString(8) + + testData := []struct { + SLI client.DerivedColumn + SLO client.SLO + }{ + { + SLI: client.DerivedColumn{ + Alias: testPrefix + "_sli1", + Expression: "BOOL(1)", + }, + SLO: client.SLO{ + Name: testPrefix + "_slo1", + SLI: client.SLIRef{Alias: testPrefix + "_sli1"}, + TimePeriodDays: 30, + TargetPerMillion: 995000, + }, + }, + { + SLI: client.DerivedColumn{ + Alias: testPrefix + "_sli2", + Expression: "BOOL(1)", + }, + SLO: client.SLO{ + Name: testPrefix + "_slo2", + SLI: client.SLIRef{Alias: testPrefix + "_sli2"}, + TimePeriodDays: 30, + TargetPerMillion: 995000, + }, + }, + { + SLI: client.DerivedColumn{ + Alias: testPrefix + "_sli3", + Expression: "BOOL(1)", + }, + SLO: client.SLO{ + // different prefix for all vs filtered testing + Name: acctest.RandString(8) + "_slo", + SLI: client.SLIRef{Alias: testPrefix + "_sli3"}, + TimePeriodDays: 30, + TargetPerMillion: 995000, + }, + }, + } + + for i, tc := range testData { + sli, err := c.DerivedColumns.Create(ctx, dataset, &tc.SLI) + if err != nil { + t.Error(err) + } + + slo, err := c.SLOs.Create(ctx, dataset, &tc.SLO) + if err != nil { + t.Error(err) + } + + // update IDs for removal later + testData[i].SLI.ID = sli.ID + testData[i].SLO.ID = slo.ID + } + + //nolint:errcheck + t.Cleanup(func() { + // remove SLOs at the of the test run + for _, tc := range testData { + c.SLOs.Delete(ctx, dataset, tc.SLO.ID) + c.DerivedColumns.Delete(ctx, dataset, tc.SLI.ID) + } + }) + + resource.Test(t, resource.TestCase{ + PreCheck: testAccPreCheck(t), + ProtoV5ProviderFactories: testAccProtoV5ProviderFactory, + Steps: []resource.TestStep{ + { + Config: fmt.Sprintf(` +data "honeycombio_slos" "all" { + dataset = "%[1]s" +} + +data "honeycombio_slos" "regex" { + dataset = "%[1]s" + + detail_filter { + name = "name" + value_regex = "%[2]s*" + } +} + +data "honeycombio_slos" "exact" { + dataset = "%[1]s" + + detail_filter { + name = "name" + value = "%[2]s_slo1" + } +} +`, dataset, testPrefix), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("data.honeycombio_slos.all", "ids.#", "3"), + resource.TestCheckResourceAttr("data.honeycombio_slos.regex", "ids.#", "2"), + resource.TestCheckResourceAttr("data.honeycombio_slos.exact", "ids.#", "1"), + ), + }, + }, + }) +}