From 6e18265a65e3a78736cb7cfdd17cd1c09d978468 Mon Sep 17 00:00:00 2001 From: dobarx Date: Fri, 3 Jan 2025 11:55:46 +0200 Subject: [PATCH] Async data & content evaluation --- .../builtin/content-providers/sleep.md | 42 +++ docs/plugins/builtin/data-sources/sleep.md | 40 +++ docs/plugins/plugins.json | 14 + engine/options.go | 12 +- eval/document.go | 74 +---- eval/evaluator.go | 253 ++++++++++++++++++ eval/plugin_content_action.go | 2 + examples/templates/async/example.fabric | 61 +++++ internal/builtin/content_frontmatter_test.go | 4 +- internal/builtin/content_sleep.go | 59 ++++ internal/builtin/content_sleep_test.go | 73 +++++ internal/builtin/data_sleep.go | 62 +++++ internal/builtin/data_sleep_test.go | 84 ++++++ internal/builtin/plugin.go | 14 +- internal/builtin/plugin_test.go | 4 +- internal/plugin_validity_test.go | 5 +- parser/definitions/definitions.go | 1 + parser/definitions/parsed_plugin.go | 1 + parser/parse_plugin.go | 13 + plugin/content.go | 3 + tools/docgen/main.go | 3 +- 21 files changed, 747 insertions(+), 77 deletions(-) create mode 100644 docs/plugins/builtin/content-providers/sleep.md create mode 100644 docs/plugins/builtin/data-sources/sleep.md create mode 100644 eval/evaluator.go create mode 100644 examples/templates/async/example.fabric create mode 100644 internal/builtin/content_sleep.go create mode 100644 internal/builtin/content_sleep_test.go create mode 100644 internal/builtin/data_sleep.go create mode 100644 internal/builtin/data_sleep_test.go diff --git a/docs/plugins/builtin/content-providers/sleep.md b/docs/plugins/builtin/content-providers/sleep.md new file mode 100644 index 00000000..d6489d1c --- /dev/null +++ b/docs/plugins/builtin/content-providers/sleep.md @@ -0,0 +1,42 @@ +--- +title: "`sleep` content provider" +plugin: + name: blackstork/builtin + description: "Sleeps for the specified duration. Useful for testing and debugging" + tags: ["debug"] + version: "v0.4.2" + source_github: "https://github.com/blackstork-io/fabric/tree/main/internal/builtin/" +resource: + type: content-provider +type: docs +--- + +{{< breadcrumbs 2 >}} + +{{< plugin-resource-header "blackstork/builtin" "builtin" "v0.4.2" "sleep" "content provider" >}} + +## Description +Sleeps for the specified duration. Useful for testing and debugging. + +The content provider is built-in, which means it's a part of `fabric` binary. It's available out-of-the-box, no installation required. + + +#### Configuration + +The content provider doesn't support any configuration arguments. + +#### Usage + +The content provider supports the following execution arguments: + +```hcl +content sleep { + # Duration to sleep + # + # Optional string. + # Must be non-empty + # Default value: + duration = "1s" +} +``` + diff --git a/docs/plugins/builtin/data-sources/sleep.md b/docs/plugins/builtin/data-sources/sleep.md new file mode 100644 index 00000000..1cc25eb8 --- /dev/null +++ b/docs/plugins/builtin/data-sources/sleep.md @@ -0,0 +1,40 @@ +--- +title: "`sleep` data source" +plugin: + name: blackstork/builtin + description: "Sleeps for the specified duration. Useful for testing and debugging" + tags: ["debug"] + version: "v0.4.2" + source_github: "https://github.com/blackstork-io/fabric/tree/main/internal/builtin/" +resource: + type: data-source +type: docs +--- + +{{< breadcrumbs 2 >}} + +{{< plugin-resource-header "blackstork/builtin" "builtin" "v0.4.2" "sleep" "data source" >}} + +## Description +Sleeps for the specified duration. Useful for testing and debugging. + +The data source is built-in, which means it's a part of `fabric` binary. It's available out-of-the-box, no installation required. + +## Configuration + +The data source doesn't support any configuration arguments. + +## Usage + +The data source supports the following execution arguments: + +```hcl +data sleep { + # Duration to sleep + # + # Optional string. + # Must be non-empty + # Default value: + duration = "1s" +} +``` \ No newline at end of file diff --git a/docs/plugins/plugins.json b/docs/plugins/plugins.json index 68dd5aab..86d2b18b 100644 --- a/docs/plugins/plugins.json +++ b/docs/plugins/plugins.json @@ -123,6 +123,20 @@ "url" ] }, + { + "name": "sleep", + "type": "data-source", + "arguments": [ + "duration" + ] + }, + { + "name": "sleep", + "type": "content-provider", + "arguments": [ + "duration" + ] + }, { "name": "table", "type": "content-provider", diff --git a/engine/options.go b/engine/options.go index 753c5e19..f0e7986b 100644 --- a/engine/options.go +++ b/engine/options.go @@ -26,14 +26,16 @@ type Options struct { tracer trace.Tracer } +var defaultLogger = slog.New(slog.NewTextHandler(io.Discard, &slog.HandlerOptions{ + Level: slog.LevelError, +})) + var defaultOptions = Options{ registryBaseURL: defaultRegistryBaseURL, cacheDir: defaultCacheDir, - logger: slog.New(slog.NewTextHandler(io.Discard, &slog.HandlerOptions{ - Level: slog.LevelError, - })), - tracer: nooptrace.Tracer{}, - builtin: builtin.Plugin("v0.0.0", nil, nil), + logger: defaultLogger, + tracer: nooptrace.Tracer{}, + builtin: builtin.Plugin("v0.0.0", defaultLogger, nil), } type Option func(*Options) diff --git a/eval/document.go b/eval/document.go index 20fa634c..119edae8 100644 --- a/eval/document.go +++ b/eval/document.go @@ -3,7 +3,6 @@ package eval import ( "context" "log/slog" - "maps" "slices" "github.com/hashicorp/hcl/v2" @@ -25,33 +24,8 @@ type Document struct { } func (doc *Document) FetchData(ctx context.Context) (plugindata.Data, diagnostics.Diag) { - logger := *slog.Default() - logger.DebugContext(ctx, "Fetching data for the document template") - result := make(plugindata.Map) - diags := diagnostics.Diag{} - for _, block := range doc.DataBlocks { - var dsMap plugindata.Map - found, ok := result[block.PluginName] - if ok { - dsMap = found.(plugindata.Map) - } else { - dsMap = make(plugindata.Map) - result[block.PluginName] = dsMap - } - if _, found := dsMap[block.BlockName]; found { - diags.Append(&hcl.Diagnostic{ - Severity: hcl.DiagWarning, - Summary: "Data conflict", - Detail: "Result of this block overwrites results from the previous invocation.", - Subject: &block.SrcRange, - }) - } - dsMap[block.BlockName], diags = block.FetchData(ctx) - if diags.HasErrors() { - return nil, diags - } - } - return result, diags + evaluator := makeAsyncDataEvaluator(ctx, doc, slog.Default()) + return evaluator.Execute() } func filterChildrenByTags(children []*Content, requiredTags []string) []*Content { @@ -71,8 +45,8 @@ func filterChildrenByTags(children []*Content, requiredTags []string) []*Content } func (doc *Document) RenderContent(ctx context.Context, docDataCtx plugindata.Map, requiredTags []string) (*plugin.ContentSection, plugindata.Data, diagnostics.Diag) { - logger := *slog.Default() - logger.DebugContext(ctx, "Fetching data for the document template") + logger := slog.Default() + logger.WarnContext(ctx, "Render content for the document template", "document", doc.Source.Name) data, diags := doc.FetchData(ctx) if diags.HasErrors() { return nil, nil, diags @@ -110,40 +84,16 @@ func (doc *Document) RenderContent(ctx context.Context, docDataCtx plugindata.Ma children = filterChildrenByTags(children, requiredTags) } - result := plugin.NewSection(0) - // create a position map for content blocks - posMap := make(map[int]uint32, len(children)) - for i := range children { - empty := new(plugin.ContentEmpty) - result.Add(empty, nil) - posMap[i] = empty.ID() - } - // sort content blocks by invocation order - invokeList := make([]int, 0, len(children)) - for i := range children { - invokeList = append(invokeList, i) + evaluator, diag := makeAsyncContentEvaluator(ctx, children) + if diags.Extend(diag) { + return nil, nil, diags } - slices.SortStableFunc(invokeList, func(a, b int) int { - ao := children[a].InvocationOrder() - bo := children[b].InvocationOrder() - return ao.Weight() - bo.Weight() - }) - // execute content blocks based on the invocation order - for _, idx := range invokeList { - // clone the data context for each content block - dataCtx := maps.Clone(docDataCtx) - // set the current content to the data context - dataCtx[definitions.BlockKindDocument].(plugindata.Map)[definitions.BlockKindContent] = result.AsData() - // TODO: if section, set section - - // execute the content block - diag := children[idx].RenderContent(ctx, dataCtx, result, result, posMap[idx]) - if diags.Extend(diag) { - return nil, nil, diags - } + + result, diag := evaluator.Execute(docDataCtx) + if diags.Extend(diag) { + return nil, nil, diags } - // compact the content tree to remove empty content nodes - result.Compact() + return result, docDataCtx, diags } diff --git a/eval/evaluator.go b/eval/evaluator.go new file mode 100644 index 00000000..bd9f1246 --- /dev/null +++ b/eval/evaluator.go @@ -0,0 +1,253 @@ +package eval + +import ( + "context" + "fmt" + "log/slog" + "sync" + + "github.com/hashicorp/hcl/v2" + + "github.com/blackstork-io/fabric/pkg/diagnostics" + "github.com/blackstork-io/fabric/plugin" + "github.com/blackstork-io/fabric/plugin/plugindata" +) + +type asyncDataEvaluator struct { + ctx context.Context + blocks []*PluginDataAction + logger *slog.Logger +} + +func makeAsyncDataEvaluator(ctx context.Context, doc *Document, logger *slog.Logger) *asyncDataEvaluator { + return &asyncDataEvaluator{ + ctx: ctx, + blocks: doc.DataBlocks, + logger: logger, + } +} + +type asyncDataEvalResult struct { + pluginName string + blockName string + data plugindata.Data + diags diagnostics.Diag +} + +func (doc *asyncDataEvaluator) Execute() (plugindata.Data, diagnostics.Diag) { + doc.logger.DebugContext(doc.ctx, "Fetching data for the document template") + resultch := make(chan *asyncDataEvalResult, len(doc.blocks)) + for _, block := range doc.blocks { + go func(block *PluginDataAction, resultch chan<- *asyncDataEvalResult) { + doc.logger.DebugContext(doc.ctx, "Fetching data for block", "plugin", block.PluginName, "block", block.BlockName) + data, diags := block.FetchData(doc.ctx) + resultch <- &asyncDataEvalResult{ + pluginName: block.PluginName, + blockName: block.BlockName, + data: data, + diags: diags, + } + }(block, resultch) + } + + result := make(plugindata.Map) + diags := diagnostics.Diag{} + + for i := 0; i < len(doc.blocks); i++ { + res := <-resultch + for _, diag := range res.diags { + diags.Append(diag) + } + + if diags.HasErrors() { + return nil, res.diags + } + + var dsMap plugindata.Map + + if found, ok := result[res.pluginName]; ok { + dsMap, ok = found.(plugindata.Map) + if !ok { + return nil, diagnostics.Diag{{ + Severity: hcl.DiagError, + Summary: "Conflicting data", + Detail: fmt.Sprintf("Different type data block with the same name already exists at plugin '%s' and block '%s'", res.pluginName, res.blockName), + }} + } + } else { + dsMap = make(plugindata.Map) + result[res.pluginName] = dsMap + } + + dsMap[res.blockName] = res.data + } + + return result, diags +} + +type asyncContent struct { + ctx context.Context + doc *plugin.ContentSection + parent *plugin.ContentSection + contentID uint32 + content *Content + wg *sync.WaitGroup + dependsOn []*asyncContent +} + +func (ac *asyncContent) Wait() { + ac.wg.Wait() +} + +func (ac *asyncContent) Execute(dataCtx plugindata.Map) diagnostics.Diag { + for _, dep := range ac.dependsOn { + dep.Wait() + } + daigs := ac.content.RenderContent(ac.ctx, dataCtx, ac.doc, ac.parent, ac.contentID) + ac.wg.Done() + return daigs +} + +func makeAsyncContent( + ctx context.Context, + doc *plugin.ContentSection, + parent *plugin.ContentSection, + content *Content, + dependsOn []*asyncContent, +) *asyncContent { + tmp := new(plugin.ContentEmpty) + parent.Add(tmp, nil) + + contentID := tmp.ID() + wg := new(sync.WaitGroup) + wg.Add(1) + return &asyncContent{ + ctx: ctx, + doc: doc, + parent: parent, + contentID: contentID, + content: content, + wg: wg, + dependsOn: dependsOn, + } +} + +type asyncContentEvaluator struct { + invokeMap map[plugin.InvocationOrder][]*asyncContent + namedMap map[string]*asyncContent + rootNode *plugin.ContentSection +} + +func (ace *asyncContentEvaluator) executeGroup(dataCtx plugindata.Map, invokeOrder plugin.InvocationOrder) diagnostics.Diag { + list, ok := ace.invokeMap[invokeOrder] + if !ok || len(list) == 0 { + return nil + } + + diagch := make(chan diagnostics.Diag, len(list)) + + for _, ac := range ace.invokeMap[invokeOrder] { + go func(ac *asyncContent, dataCtx plugindata.Map, diagch chan<- diagnostics.Diag) { + diagch <- ac.Execute(dataCtx) + }(ac, dataCtx, diagch) + } + + diags := diagnostics.Diag{} + + for i := 0; i < len(list); i++ { + diags.Extend(<-diagch) + } + + return diags +} + +func (ace *asyncContentEvaluator) Execute(dataCtx plugindata.Map) (*plugin.ContentSection, diagnostics.Diag) { + order := []plugin.InvocationOrder{ + plugin.InvocationOrderBegin, + plugin.InvocationOrderUnspecified, + plugin.InvocationOrderEnd, + } + + diags := diagnostics.Diag{} + + for _, o := range order { + diags.Extend(ace.executeGroup(dataCtx, o)) + if diags.HasErrors() { + return nil, diags + } + } + + ace.rootNode.Compact() + + return ace.rootNode, diags +} + +func makeAsyncContentEvaluator(ctx context.Context, content []*Content) (*asyncContentEvaluator, diagnostics.Diag) { + namedMap := make(map[string]*asyncContent) + invokeMap := make(map[plugin.InvocationOrder][]*asyncContent) + rootNode := plugin.NewSection(0) + + diags := diagnostics.Diag{} + for _, c := range content { + diag := assignAsyncContent(ctx, c, rootNode, rootNode, namedMap, invokeMap) + if diags.Extend(diag) { + return nil, diag + } + } + + return &asyncContentEvaluator{ + invokeMap: invokeMap, + namedMap: namedMap, + rootNode: rootNode, + }, diags +} + +func assignAsyncContent( + ctx context.Context, + c *Content, + rootNode *plugin.ContentSection, + parent *plugin.ContentSection, + namedMap map[string]*asyncContent, + invokeMap map[plugin.InvocationOrder][]*asyncContent, +) diagnostics.Diag { + diags := diagnostics.Diag{} + switch { + case c.Plugin != nil: + var dependsOn []*asyncContent + if len(c.Plugin.DependsOn) > 0 { + for _, depName := range c.Plugin.DependsOn { + dep, ok := namedMap[depName] + if !ok { + return diagnostics.Diag{{ + Severity: hcl.DiagError, + Summary: "Dependency not found", + Detail: fmt.Sprintf("Content block '%s' depends on '%s' but it's not found", c.Plugin.BlockName, depName), + }} + } + + dependsOn = append(dependsOn, dep) + } + } + + ac := makeAsyncContent(ctx, rootNode, parent, c, dependsOn) + namedMap[c.Plugin.BlockName] = ac + invokeMap[c.Plugin.Provider.InvocationOrder] = append(invokeMap[c.Plugin.Provider.InvocationOrder], ac) + case c.Section != nil: + tmp := plugin.NewSection(0) + parent.Add(tmp, nil) + for _, child := range c.Section.children { + diag := assignAsyncContent(ctx, child, rootNode, tmp, namedMap, invokeMap) + if diags.Extend(diag) { + return diag + } + } + default: + return diagnostics.Diag{{ + Severity: hcl.DiagError, + Summary: "Invalid content", + Detail: "Content block must be either a plugin or a section", + }} + } + + return diags +} diff --git a/eval/plugin_content_action.go b/eval/plugin_content_action.go index 75aae4ca..a3d602d5 100644 --- a/eval/plugin_content_action.go +++ b/eval/plugin_content_action.go @@ -21,6 +21,7 @@ type PluginContentAction struct { Provider *plugin.ContentProvider Vars *definitions.ParsedVars RequiredVars []string + DependsOn []string } func (action *PluginContentAction) RenderContent(ctx context.Context, dataCtx plugindata.Map, doc, parent *plugin.ContentSection, contentID uint32) (diags diagnostics.Diag) { @@ -135,6 +136,7 @@ func LoadPluginContentAction(ctx context.Context, providers ContentProviders, no Provider: cp, Vars: node.Vars, RequiredVars: node.RequiredVars, + DependsOn: node.DependsOn, }, diags } diff --git a/examples/templates/async/example.fabric b/examples/templates/async/example.fabric new file mode 100644 index 00000000..f55df8d7 --- /dev/null +++ b/examples/templates/async/example.fabric @@ -0,0 +1,61 @@ +document "example" { + title = "Document title" + + data sleep data_a { + duration = "2s" + } + data sleep data_b { + duration = "1s" + } + + data sleep data_c { + duration = "1s" + } + + data sleep data_d { + duration = "3s" + } + + content title { + value = "A" + } + + content sleep content_a { + duration = "2s" + } + + content title { + value = "B" + } + + content sleep content_b { + duration = "1.5s" + } + + content title { + value = "C" + } + + content sleep content_c { + duration = "3.5s" + depends_on = ["content_a", "content_b"] + } + + content title { + value = "D" + } + + content sleep content_d { + duration = "3s" + } + + section { + title = "Section 1" + + content sleep content_e { + depends_on = ["content_c"] + duration = "0.5s" + } + } +} + diff --git a/internal/builtin/content_frontmatter_test.go b/internal/builtin/content_frontmatter_test.go index 69781291..e58169bb 100644 --- a/internal/builtin/content_frontmatter_test.go +++ b/internal/builtin/content_frontmatter_test.go @@ -4,6 +4,8 @@ import ( "bytes" "context" "encoding/json" + "io" + "log/slog" "testing" "github.com/hashicorp/hcl/v2/hclwrite" @@ -84,7 +86,7 @@ func (s *FrontMatterGeneratorTestSuite) parseFrontmatter(contentStr string) (for } func (s *FrontMatterGeneratorTestSuite) SetupSuite() { - s.schema = Plugin("", nil, nil) + s.schema = Plugin("", slog.New(slog.NewTextHandler(io.Discard, nil)), nil) } func (s *FrontMatterGeneratorTestSuite) TestSchema() { diff --git a/internal/builtin/content_sleep.go b/internal/builtin/content_sleep.go new file mode 100644 index 00000000..74fed1c0 --- /dev/null +++ b/internal/builtin/content_sleep.go @@ -0,0 +1,59 @@ +package builtin + +import ( + "context" + "fmt" + "log/slog" + "time" + + "github.com/hashicorp/hcl/v2" + "github.com/zclconf/go-cty/cty" + + "github.com/blackstork-io/fabric/pkg/diagnostics" + "github.com/blackstork-io/fabric/plugin" + "github.com/blackstork-io/fabric/plugin/dataspec" + "github.com/blackstork-io/fabric/plugin/dataspec/constraint" +) + +func makeSleepContentProvider(logger *slog.Logger) *plugin.ContentProvider { + logger = logger.With("content_provider", "sleep") + + return &plugin.ContentProvider{ + Doc: ` + Sleeps for the specified duration. Useful for testing and debugging. + `, + Tags: []string{"debug"}, + Args: &dataspec.RootSpec{ + Attrs: []*dataspec.AttrSpec{ + { + Name: "duration", + Type: cty.String, + Doc: "Duration to sleep", + Constraints: constraint.Meaningful, + DefaultVal: cty.StringVal("1s"), + }, + }, + }, + ContentFunc: func(ctx context.Context, params *plugin.ProvideContentParams) (*plugin.ContentResult, diagnostics.Diag) { + duration, err := time.ParseDuration(params.Args.GetAttrVal("duration").AsString()) + if err != nil { + return nil, diagnostics.Diag{ + { + Severity: hcl.DiagError, + Summary: "Invalid duration", + Detail: err.Error(), + }, + } + } + + logger.WarnContext(ctx, "Sleeping", "duration", duration) + time.Sleep(duration) + + return &plugin.ContentResult{ + Content: plugin.NewElementFromMarkdown( + fmt.Sprintf("Slept for %s.", duration), + ), + }, nil + }, + } +} diff --git a/internal/builtin/content_sleep_test.go b/internal/builtin/content_sleep_test.go new file mode 100644 index 00000000..53d365e1 --- /dev/null +++ b/internal/builtin/content_sleep_test.go @@ -0,0 +1,73 @@ +package builtin + +import ( + "context" + "io" + "log/slog" + "testing" + + "github.com/hashicorp/hcl/v2" + "github.com/stretchr/testify/suite" + "github.com/zclconf/go-cty/cty" + + "github.com/blackstork-io/fabric/plugin" + "github.com/blackstork-io/fabric/plugin/plugintest" + "github.com/blackstork-io/fabric/print/mdprint" +) + +type SleepContentTestSuite struct { + suite.Suite + schema *plugin.ContentProvider +} + +func TestSleepContentSuite(t *testing.T) { + suite.Run(t, &SleepContentTestSuite{}) +} + +func (s *SleepContentTestSuite) SetupSuite() { + s.schema = makeSleepContentProvider(slog.New(slog.NewTextHandler(io.Discard, nil))) +} + +func (s *SleepContentTestSuite) TestSchema() { + s.Nil(s.schema.Config) + s.NotNil(s.schema.Args) + s.NotNil(s.schema.ContentFunc) +} + +func (s *SleepContentTestSuite) TestMissingDuration() { + plugintest.NewTestDecoder(s.T(), s.schema.Args).Decode() +} + +func (s *SleepContentTestSuite) TestCustom() { + ctx := context.Background() + result, diags := s.schema.ContentFunc(ctx, &plugin.ProvideContentParams{ + Args: plugintest.NewTestDecoder(s.T(), s.schema.Args). + SetAttr("duration", cty.StringVal("123ms")). + Decode(), + }) + s.Require().Empty(diags) + s.Equal("Slept for 123ms.", mdprint.PrintString(result.Content)) +} + +func (s *SleepContentTestSuite) TestDefault() { + ctx := context.Background() + result, diags := s.schema.ContentFunc(ctx, &plugin.ProvideContentParams{ + Args: plugintest.NewTestDecoder(s.T(), s.schema.Args).Decode(), + DataContext: nil, + }) + s.Require().Empty(diags) + s.Equal("Slept for 1s.", mdprint.PrintString(result.Content)) +} + +func (s *SleepContentTestSuite) TestCallInvalidDuration() { + ctx := context.Background() + result, diags := s.schema.ContentFunc(ctx, &plugin.ProvideContentParams{ + Args: plugintest.NewTestDecoder(s.T(), s.schema.Args). + SetAttr("duration", cty.StringVal("invalid")). + Decode(), + }) + s.Require().Nil(result) + s.Require().Len(diags, 1) + s.Equal(hcl.DiagError, diags[0].Severity) + s.Equal("Invalid duration", diags[0].Summary) +} diff --git a/internal/builtin/data_sleep.go b/internal/builtin/data_sleep.go new file mode 100644 index 00000000..acdca9aa --- /dev/null +++ b/internal/builtin/data_sleep.go @@ -0,0 +1,62 @@ +package builtin + +import ( + "context" + "log/slog" + "time" + + "github.com/hashicorp/hcl/v2" + "github.com/zclconf/go-cty/cty" + + "github.com/blackstork-io/fabric/pkg/diagnostics" + "github.com/blackstork-io/fabric/plugin" + "github.com/blackstork-io/fabric/plugin/dataspec" + "github.com/blackstork-io/fabric/plugin/dataspec/constraint" + "github.com/blackstork-io/fabric/plugin/plugindata" +) + +func makeSleepDataSource(logger *slog.Logger) *plugin.DataSource { + logger = logger.With("data_source", "sleep") + + return &plugin.DataSource{ + Doc: ` + Sleeps for the specified duration. Useful for testing and debugging. + `, + Tags: []string{"debug"}, + Args: &dataspec.RootSpec{ + Attrs: []*dataspec.AttrSpec{ + { + Name: "duration", + Type: cty.String, + Doc: "Duration to sleep", + Constraints: constraint.Meaningful, + DefaultVal: cty.StringVal("1s"), + }, + }, + }, + DataFunc: func(ctx context.Context, params *plugin.RetrieveDataParams) (plugindata.Data, diagnostics.Diag) { + duration, err := time.ParseDuration(params.Args.GetAttrVal("duration").AsString()) + if err != nil { + return nil, diagnostics.Diag{ + { + Severity: hcl.DiagError, + Summary: "Invalid duration", + Detail: err.Error(), + }, + } + } + + logger.WarnContext(ctx, "Sleeping", "duration", duration) + + startTime := time.Now() + time.Sleep(duration) + endTime := time.Now() + + return plugindata.Map{ + "start_time": plugindata.String(startTime.Format(time.RFC3339)), + "took": plugindata.String(duration.String()), + "end_time": plugindata.String(endTime.Format(time.RFC3339)), + }, nil + }, + } +} diff --git a/internal/builtin/data_sleep_test.go b/internal/builtin/data_sleep_test.go new file mode 100644 index 00000000..bdf0783d --- /dev/null +++ b/internal/builtin/data_sleep_test.go @@ -0,0 +1,84 @@ +package builtin + +import ( + "context" + "io" + "log/slog" + "testing" + + "github.com/hashicorp/hcl/v2" + "github.com/stretchr/testify/suite" + "github.com/zclconf/go-cty/cty" + + "github.com/blackstork-io/fabric/plugin" + "github.com/blackstork-io/fabric/plugin/plugindata" + "github.com/blackstork-io/fabric/plugin/plugintest" +) + +type SleepDataTestSuite struct { + suite.Suite + schema *plugin.DataSource +} + +func TestSleepDataSuite(t *testing.T) { + suite.Run(t, &SleepDataTestSuite{}) +} + +func (s *SleepDataTestSuite) SetupSuite() { + s.schema = makeSleepDataSource(slog.New(slog.NewTextHandler(io.Discard, nil))) +} + +func (s *SleepDataTestSuite) TestSchema() { + s.Nil(s.schema.Config) + s.NotNil(s.schema.Args) + s.NotNil(s.schema.DataFunc) +} + +func (s *SleepDataTestSuite) TestMissingDuration() { + plugintest.NewTestDecoder(s.T(), s.schema.Args).Decode() +} + +func (s *SleepDataTestSuite) TestCustom() { + ctx := context.Background() + result, diags := s.schema.DataFunc(ctx, &plugin.RetrieveDataParams{ + Args: plugintest.NewTestDecoder(s.T(), s.schema.Args). + SetAttr("duration", cty.StringVal("123ms")). + Decode(), + }) + s.Empty(diags) + + s.Require().IsType(plugindata.Map{}, result) + + resultMap := result.(plugindata.Map) + s.Require().Equal(plugindata.String("123ms"), resultMap["took"]) + s.NotEmpty(resultMap["start_time"]) + s.NotEmpty(resultMap["end_time"]) +} + +func (s *SleepDataTestSuite) TestDefault() { + ctx := context.Background() + result, diags := s.schema.DataFunc(ctx, &plugin.RetrieveDataParams{ + Args: plugintest.NewTestDecoder(s.T(), s.schema.Args).Decode(), + }) + s.Require().Empty(diags) + + s.Require().IsType(plugindata.Map{}, result) + + resultMap := result.(plugindata.Map) + s.Require().Equal(plugindata.String("1s"), resultMap["took"]) + s.NotEmpty(resultMap["start_time"]) + s.NotEmpty(resultMap["end_time"]) +} + +func (s *SleepDataTestSuite) TestCallInvalidDuration() { + ctx := context.Background() + result, diags := s.schema.DataFunc(ctx, &plugin.RetrieveDataParams{ + Args: plugintest.NewTestDecoder(s.T(), s.schema.Args). + SetAttr("duration", cty.StringVal("invalid")). + Decode(), + }) + s.Require().Nil(result) + s.Require().Len(diags, 1) + s.Equal(hcl.DiagError, diags[0].Severity) + s.Equal("Invalid duration", diags[0].Summary) +} diff --git a/internal/builtin/plugin.go b/internal/builtin/plugin.go index b401bc48..05f77e5d 100644 --- a/internal/builtin/plugin.go +++ b/internal/builtin/plugin.go @@ -15,12 +15,13 @@ func Plugin(version string, logger *slog.Logger, tracer trace.Tracer) *plugin.Sc Name: Name, Version: version, DataSources: plugin.DataSources{ - "csv": makeCSVDataSource(), - "txt": makeTXTDataSource(), - "rss": makeRSSDataSource(), - "json": makeJSONDataSource(), - "yaml": makeYAMLDataSource(), - "http": makeHTTPDataSource(version), + "csv": makeCSVDataSource(), + "txt": makeTXTDataSource(), + "rss": makeRSSDataSource(), + "json": makeJSONDataSource(), + "yaml": makeYAMLDataSource(), + "http": makeHTTPDataSource(version), + "sleep": makeSleepDataSource(logger), }, ContentProviders: plugin.ContentProviders{ "toc": makeTOCContentProvider(), @@ -32,6 +33,7 @@ func Plugin(version string, logger *slog.Logger, tracer trace.Tracer) *plugin.Sc "list": makeListContentProvider(), "table": makeTableContentProvider(), "frontmatter": makeFrontMatterContentProvider(), + "sleep": makeSleepContentProvider(logger), }, Publishers: plugin.Publishers{ "local_file": makeLocalFilePublisher(logger, tracer), diff --git a/internal/builtin/plugin_test.go b/internal/builtin/plugin_test.go index 50d0fb4d..f4cf1f45 100644 --- a/internal/builtin/plugin_test.go +++ b/internal/builtin/plugin_test.go @@ -1,13 +1,15 @@ package builtin import ( + "io" + "log/slog" "testing" "github.com/stretchr/testify/assert" ) func TestPluginSchema(t *testing.T) { - schema := Plugin("1.2.3", nil, nil) + schema := Plugin("1.2.3", slog.New(slog.NewTextHandler(io.Discard, nil)), nil) assert.Equal(t, "blackstork/builtin", schema.Name) assert.Equal(t, "1.2.3", schema.Version) assert.NotNil(t, schema.DataSources["csv"]) diff --git a/internal/plugin_validity_test.go b/internal/plugin_validity_test.go index efc5feda..5f63078e 100644 --- a/internal/plugin_validity_test.go +++ b/internal/plugin_validity_test.go @@ -1,6 +1,8 @@ package internal import ( + "io" + "log/slog" "strings" "testing" @@ -31,8 +33,9 @@ import ( // TestAllPluginSchemaValidity tests that all plugin schemas are valid func TestAllPluginSchemaValidity(t *testing.T) { ver := "1.2.3" + logger := slog.New(slog.NewTextHandler(io.Discard, nil)) plugins := []*plugin.Schema{ - builtin.Plugin(ver, nil, nil), + builtin.Plugin(ver, logger, nil), elastic.Plugin(ver, nil), github.Plugin(ver, nil), graphql.Plugin(ver), diff --git a/parser/definitions/definitions.go b/parser/definitions/definitions.go index 589487fc..31bc519d 100644 --- a/parser/definitions/definitions.go +++ b/parser/definitions/definitions.go @@ -20,6 +20,7 @@ const ( PluginTypeRef = "ref" AttrRefBase = "base" AttrTitle = "title" + AttrDependsOn = "depends_on" AttrLocalVar = "local_var" AttrRequiredVars = "required_vars" AttrIsIncluded = "is_included" diff --git a/parser/definitions/parsed_plugin.go b/parser/definitions/parsed_plugin.go index 11637fbd..d45648e2 100644 --- a/parser/definitions/parsed_plugin.go +++ b/parser/definitions/parsed_plugin.go @@ -15,6 +15,7 @@ type ParsedPlugin struct { Invocation *evaluation.BlockInvocation Vars *ParsedVars RequiredVars []string + DependsOn []string IsIncluded *hclsyntax.Attribute } diff --git a/parser/parse_plugin.go b/parser/parse_plugin.go index c8248ed0..3760f0ac 100644 --- a/parser/parse_plugin.go +++ b/parser/parse_plugin.go @@ -140,6 +140,18 @@ func (db *DefinedBlocks) parsePlugin(ctx context.Context, plugin *definitions.Pl diags.Extend(diag) } + depAttrs, depAttrsFound := utils.Pop(body.Attributes, definitions.AttrDependsOn) + if depAttrsFound { + diag := gohcl.DecodeExpression(depAttrs.Expr, nil, &res.DependsOn) + diags.Extend(diag) + } + + // if title := d.Block.Body.Attributes[definitions.AttrTitle]; title != nil { + // titleContent, diag := db.ParseTitle(ctx, title) + // if !diag.Extend(diags) { + // doc.Content = append(doc.Content, titleContent) + // } + // } plugin.Block.Body = body invocation := &evaluation.BlockInvocation{ Block: plugin.Block, @@ -168,6 +180,7 @@ func (db *DefinedBlocks) parsePlugin(ctx context.Context, plugin *definitions.Pl res.Vars = res.Vars.MergeWithBaseVars(baseEval.Vars) res.RequiredVars = append(res.RequiredVars, baseEval.RequiredVars...) + res.DependsOn = append(res.DependsOn, baseEval.DependsOn...) if res.IsIncluded == nil { res.IsIncluded = baseEval.IsIncluded } diff --git a/plugin/content.go b/plugin/content.go index e0db5cc1..0f95c982 100644 --- a/plugin/content.go +++ b/plugin/content.go @@ -97,6 +97,7 @@ type ContentSection struct { id uint32 Children []Content meta *nodes.ContentMeta + mtx sync.Mutex } func NewSection(contentID uint32) *ContentSection { @@ -110,6 +111,8 @@ func NewSection(contentID uint32) *ContentSection { // Add content to the content tree. func (c *ContentSection) Add(content Content, loc *Location) error { + c.mtx.Lock() + defer c.mtx.Unlock() return addContent(c, content, loc) } diff --git a/tools/docgen/main.go b/tools/docgen/main.go index b304414a..8282d675 100644 --- a/tools/docgen/main.go +++ b/tools/docgen/main.go @@ -264,9 +264,10 @@ func main() { flags.StringVar(&version, "version", "v0.0.0-dev", "version of the build") flags.StringVar(&outputDir, "output", "./dist/docs", "output directory") flags.Parse(os.Args[1:]) + logger := slog.New(slog.NewTextHandler(os.Stdout, nil)) // load all plugins plugins := []*plugin.Schema{ - builtin.Plugin(version, nil, nil), + builtin.Plugin(version, logger, nil), elastic.Plugin(version, nil), github.Plugin(version, nil), graphql.Plugin(version),