From c89f50e259f012a36a223d037a167a628ea20305 Mon Sep 17 00:00:00 2001 From: dobarx Date: Tue, 20 Feb 2024 10:24:18 +0200 Subject: [PATCH 1/8] internal/builtin: add toc content provider --- docs/plugins/builtin.md | 19 +++ examples/templates/toc/example.fabric | 48 ++++++ internal/builtin/content_toc.go | 227 ++++++++++++++++++++++++++ internal/builtin/content_toc_test.go | 99 +++++++++++ internal/builtin/plugin.go | 1 + internal/builtin/plugin_test.go | 1 + 6 files changed, 395 insertions(+) create mode 100644 examples/templates/toc/example.fabric create mode 100644 internal/builtin/content_toc.go create mode 100644 internal/builtin/content_toc_test.go diff --git a/docs/plugins/builtin.md b/docs/plugins/builtin.md index 5c3317e2..10bdd836 100644 --- a/docs/plugins/builtin.md +++ b/docs/plugins/builtin.md @@ -161,3 +161,22 @@ content text { text = # required } ``` + +### `toc` + +#### Configuration + +The content provider doesn't support configuration. + +#### Usage + +The content source supports the following parameters in the content blocks: + +```hcl +content toc { + end_level = # optional + ordered = # optional + scope = # optional + start_level = # optional +} +``` diff --git a/examples/templates/toc/example.fabric b/examples/templates/toc/example.fabric new file mode 100644 index 00000000..2a297a91 --- /dev/null +++ b/examples/templates/toc/example.fabric @@ -0,0 +1,48 @@ + +document "example" { + title = "Using TOC content provider" + + content text { + text = "Sub title 1" + format_as = "title" + absolute_title_size = 2 + } + + content text { + text = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed do eiusmod tempor dolore magna." + } + + content text { + text = "Sub title 2" + format_as = "title" + absolute_title_size = 2 + } + content text { + text = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed do eiusmod tempor dolore magna." + } + content text { + text = "Sub title 3" + format_as = "title" + absolute_title_size = 3 + } + + content text { + text = "Sub title 4.1" + format_as = "title" + absolute_title_size = 4 + } + content text { + text = "Sub title 4.2" + format_as = "title" + absolute_title_size = 4 + } + + content text { + text = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed do eiusmod tempor dolore magna." + } + content toc { + start_level = 1 + end_level = 4 + ordered = true + } +} \ No newline at end of file diff --git a/internal/builtin/content_toc.go b/internal/builtin/content_toc.go new file mode 100644 index 00000000..4dbe69d3 --- /dev/null +++ b/internal/builtin/content_toc.go @@ -0,0 +1,227 @@ +package builtin + +import ( + "context" + "fmt" + "slices" + "strconv" + "strings" + + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/hcldec" + "github.com/zclconf/go-cty/cty" + + "github.com/blackstork-io/fabric/plugin" +) + +const ( + minTOCLevel = 1 + maxTOCLevel = 6 + defaultTOCStartLevel = 1 + defaultTOCEndLevel = 3 + defaultTOCOrdered = false + defaultTOCScope = "document" +) + +var availableTOCScopes = []string{"document", "section"} + +func makeTOCContentProvider() *plugin.ContentProvider { + return &plugin.ContentProvider{ + Args: hcldec.ObjectSpec{ + "start_level": &hcldec.AttrSpec{ + Name: "start_level", + Type: cty.Number, + Required: false, + }, + "end_level": &hcldec.AttrSpec{ + Name: "end_level", + Type: cty.Number, + Required: false, + }, + "ordered": &hcldec.AttrSpec{ + Name: "ordered", + Type: cty.Bool, + Required: false, + }, + "scope": &hcldec.AttrSpec{ + Name: "scope", + Type: cty.String, + Required: false, + }, + }, + ContentFunc: genTOC, + } +} + +type tocArgs struct { + startLevel int + endLevel int + ordered bool + scope string +} + +func parseTOCArgs(args cty.Value) (*tocArgs, error) { + if args.IsNull() { + return nil, fmt.Errorf("arguments are null") + } + startLevel := args.GetAttr("start_level") + if startLevel.IsNull() { + startLevel = cty.NumberIntVal(defaultTOCStartLevel) + } else { + n, _ := startLevel.AsBigFloat().Int64() + if n < minTOCLevel || n > maxTOCLevel { + return nil, fmt.Errorf("start_level should be between %d and %d", minTOCLevel, maxTOCLevel) + } + } + endLevel := args.GetAttr("end_level") + if endLevel.IsNull() { + endLevel = cty.NumberIntVal(defaultTOCEndLevel) + } else { + n, _ := endLevel.AsBigFloat().Int64() + if n < minTOCLevel || n > maxTOCLevel { + return nil, fmt.Errorf("end_level should be between %d and %d", minTOCLevel, maxTOCLevel) + } + } + ordered := args.GetAttr("ordered") + if ordered.IsNull() { + ordered = cty.BoolVal(defaultTOCOrdered) + } + scope := args.GetAttr("scope") + if scope.IsNull() { + scope = cty.StringVal(defaultTOCScope) + } else if !slices.Contains(availableTOCScopes, scope.AsString()) { + return nil, fmt.Errorf("scope should be one of %s", strings.Join(availableTOCScopes, ", ")) + } + startLevelI64, _ := startLevel.AsBigFloat().Int64() + endLevelI64, _ := endLevel.AsBigFloat().Int64() + return &tocArgs{ + startLevel: int(startLevelI64), + endLevel: int(endLevelI64), + ordered: ordered.True(), + scope: scope.AsString(), + }, nil +} + +func genTOC(ctx context.Context, params *plugin.ProvideContentParams) (*plugin.Content, hcl.Diagnostics) { + args, err := parseTOCArgs(params.Args) + if err != nil { + return nil, hcl.Diagnostics{{ + Severity: hcl.DiagError, + Summary: "Failed to parse arguments", + Detail: err.Error(), + }} + } + var scopedCtx plugin.Data + if args.scope == "section" { + section, ok := params.DataContext["section"] + if !ok { + return nil, hcl.Diagnostics{{ + Severity: hcl.DiagError, + Summary: "No section context", + Detail: "No section context found", + }} + } + scopedCtx = section + } else { + doc, ok := params.DataContext["document"] + if !ok { + return nil, hcl.Diagnostics{{ + Severity: hcl.DiagError, + Summary: "No document context", + Detail: "No document context found", + }} + } + scopedCtx = doc + } + content, ok := scopedCtx.(plugin.MapData)["content"] + if !ok { + return nil, hcl.Diagnostics{{ + Severity: hcl.DiagError, + Summary: "No content context", + Detail: "No content context found", + }} + } + titles, err := parseContentTitles(content, args.startLevel, args.endLevel) + if err != nil { + return nil, hcl.Diagnostics{{ + Severity: hcl.DiagError, + Summary: "Failed to parse content titles", + Detail: err.Error(), + }} + } + + return &plugin.Content{ + Markdown: titles.render(0, args.ordered), + }, nil +} + +type tocNode struct { + level int + title string + children tocNodeList +} + +func (n tocNode) render(pos, depth int, ordered bool) string { + format := "%s- [%s](#%s)\n" + if ordered { + format = "%s" + strconv.Itoa(pos+1) + ". [%s](#%s)\n" + } + dst := []string{ + fmt.Sprintf(format, strings.Repeat(" ", depth), n.title, anchorize(n.title)), + n.children.render(depth+1, ordered), + } + return strings.Join(dst, "") +} + +type tocNodeList []tocNode + +func (l tocNodeList) render(depth int, ordered bool) string { + dst := []string{} + for i, node := range l { + dst = append(dst, node.render(i, depth, ordered)) + } + return strings.Join(dst, "") +} + +func (l tocNodeList) add(node tocNode) tocNodeList { + if len(l) == 0 { + return append(l, node) + } + + last := l[len(l)-1] + if last.level < node.level { + last.children = last.children.add(node) + l[len(l)-1] = last + } else { + l = append(l, node) + } + return l +} + +func anchorize(s string) string { + return strings.ToLower(strings.ReplaceAll(s, " ", "-")) +} + +func parseContentTitles(data plugin.Data, startLvl, endLvl int) (tocNodeList, error) { + list, ok := data.(plugin.ListData) + if !ok { + return nil, fmt.Errorf("expected a list of content titles") + } + var result tocNodeList + for _, item := range list { + line, ok := item.(plugin.StringData) + if !ok { + return nil, fmt.Errorf("expected a string") + } + if strings.HasPrefix(string(line), "#") { + level := strings.Count(string(line), "#") + if level < startLvl || level > endLvl { + continue + } + title := strings.TrimSpace(string(line)[level:]) + result = result.add(tocNode{level: level, title: title}) + } + } + + return result, nil +} diff --git a/internal/builtin/content_toc_test.go b/internal/builtin/content_toc_test.go new file mode 100644 index 00000000..f70a0009 --- /dev/null +++ b/internal/builtin/content_toc_test.go @@ -0,0 +1,99 @@ +package builtin + +import ( + "context" + "strings" + "testing" + + "github.com/stretchr/testify/suite" + "github.com/zclconf/go-cty/cty" + + "github.com/blackstork-io/fabric/plugin" +) + +type TOCContentTestSuite struct { + suite.Suite + schema *plugin.ContentProvider +} + +func TestTOCContentTestSuite(t *testing.T) { + suite.Run(t, new(TOCContentTestSuite)) +} + +func (s *TOCContentTestSuite) SetupSuite() { + s.schema = makeTOCContentProvider() +} + +func (s *TOCContentTestSuite) TestSchema() { + s.Require().NotNil(s.schema) + s.Nil(s.schema.Config) + s.NotNil(s.schema.Args) + s.NotNil(s.schema.ContentFunc) +} + +func (s *TOCContentTestSuite) TestSimple() { + schema := makeTOCContentProvider() + ctx := context.Background() + res, diags := schema.ContentFunc(ctx, &plugin.ProvideContentParams{ + Args: cty.ObjectVal(map[string]cty.Value{ + "start_level": cty.NullVal(cty.Number), + "end_level": cty.NullVal(cty.Number), + "ordered": cty.NullVal(cty.Bool), + "scope": cty.NullVal(cty.String), + }), + DataContext: plugin.MapData{ + "document": plugin.MapData{ + "content": plugin.ListData{ + plugin.StringData("# Header 1"), + plugin.StringData("Lorem ipsum dolor sit amet, consectetur adipiscing elit."), + plugin.StringData("## Header 2"), + plugin.StringData("Vestibulum nec odio."), + plugin.StringData("### Header 3"), + plugin.StringData("Integer sit amet."), + }, + }, + }, + }) + s.Len(diags, 0, "no errors") + s.Equal(strings.Join([]string{ + "- [Header 1](#header-1)", + " - [Header 2](#header-2)", + " - [Header 3](#header-3)", + }, "\n")+"\n", res.Markdown) +} + +func (s *TOCContentTestSuite) TestAdvanced() { + schema := makeTOCContentProvider() + ctx := context.Background() + res, diags := schema.ContentFunc(ctx, &plugin.ProvideContentParams{ + Args: cty.ObjectVal(map[string]cty.Value{ + "start_level": cty.NumberIntVal(2), + "end_level": cty.NumberIntVal(3), + "ordered": cty.True, + "scope": cty.StringVal("document"), + }), + DataContext: plugin.MapData{ + "document": plugin.MapData{ + "content": plugin.ListData{ + plugin.StringData("# Header 1"), + plugin.StringData("Lorem ipsum dolor sit amet, consectetur adipiscing elit."), + plugin.StringData("## Header 2"), + plugin.StringData("Vestibulum nec odio."), + plugin.StringData("### Header 3"), + plugin.StringData("Integer sit amet."), + plugin.StringData("## Header 4"), + plugin.StringData("Vestibulum nec odio."), + plugin.StringData("## Header 5"), + plugin.StringData("Vestibulum nec odio."), + }, + }, + }, + }) + s.Len(diags, 0, "no errors") + s.Equal(strings.Join([]string{ + "1. [Header 2](#header-2)", + " 1. [Header 3](#header-3)", + "2. [Header 4](#header-4)", + "3. [Header 5](#header-5)", + }, "\n")+"\n", res.Markdown) +} diff --git a/internal/builtin/plugin.go b/internal/builtin/plugin.go index 593e4e57..f095f89b 100644 --- a/internal/builtin/plugin.go +++ b/internal/builtin/plugin.go @@ -15,6 +15,7 @@ func Plugin(version string) *plugin.Schema { "inline": makeInlineDataSource(), }, ContentProviders: plugin.ContentProviders{ + "toc": makeTOCContentProvider(), "text": makeTextContentProvider(), "image": makeImageContentProvider(), "list": makeListContentProvider(), diff --git a/internal/builtin/plugin_test.go b/internal/builtin/plugin_test.go index f58423ac..ed15a3f4 100644 --- a/internal/builtin/plugin_test.go +++ b/internal/builtin/plugin_test.go @@ -14,6 +14,7 @@ func TestPluginSchema(t *testing.T) { assert.NotNil(t, schema.DataSources["txt"]) assert.NotNil(t, schema.DataSources["json"]) assert.NotNil(t, schema.DataSources["inline"]) + assert.NotNil(t, schema.ContentProviders["toc"]) assert.NotNil(t, schema.ContentProviders["text"]) assert.NotNil(t, schema.ContentProviders["image"]) assert.NotNil(t, schema.ContentProviders["list"]) From 3febeb9f21eb85a82d0f4f68efb5b04575f56ec8 Mon Sep 17 00:00:00 2001 From: dobarx Date: Tue, 20 Feb 2024 10:27:12 +0200 Subject: [PATCH 2/8] parser: fix result type for data sources --- cmd/data.go | 2 +- examples/templates/openai/example.fabric | 4 ++-- parser/caller.go | 4 ++-- parser/evaluation/plugincaller.go | 2 +- test/e2e/data_test.go | 2 +- 5 files changed, 7 insertions(+), 7 deletions(-) diff --git a/cmd/data.go b/cmd/data.go index 6641cdf5..845c1970 100644 --- a/cmd/data.go +++ b/cmd/data.go @@ -19,7 +19,7 @@ import ( var dataTgtRe = regexp.MustCompile(`(?:document\.([^.]+)\.data\.([^.]+)\.([^.\n]+))|(?:data\.([^.]+)\.([^.]+))`) -func Data(ctx context.Context, blocks *parser.DefinedBlocks, caller *parser.Caller, target string) (result plugin.MapData, diags diagnostics.Diag) { +func Data(ctx context.Context, blocks *parser.DefinedBlocks, caller *parser.Caller, target string) (result plugin.Data, diags diagnostics.Diag) { // docName, pluginName, blockName // target: document..data.. tgt := dataTgtRe.FindStringSubmatch(target) diff --git a/examples/templates/openai/example.fabric b/examples/templates/openai/example.fabric index 203e6ea9..45a3a79e 100644 --- a/examples/templates/openai/example.fabric +++ b/examples/templates/openai/example.fabric @@ -22,7 +22,7 @@ document "example" { absolute_title_size = 2 } content table { - query = ".data.csv.csv_file.result" + query = ".data.csv.csv_file" columns = [ { "header" = "ID" @@ -50,7 +50,7 @@ document "example" { config { api_key = "" } - query = ".data.csv.csv_file.result" + query = ".data.csv.csv_file" model = "gpt-3.5-turbo" prompt = "Decribe each user in a sentence" } diff --git a/parser/caller.go b/parser/caller.go index 3647a185..37034ed8 100644 --- a/parser/caller.go +++ b/parser/caller.go @@ -161,14 +161,14 @@ func (c *Caller) CallContent(ctx context.Context, name string, config evaluation return } -func (c *Caller) CallData(ctx context.Context, name string, config evaluation.Configuration, invocation evaluation.Invocation) (result plugin.MapData, diag diagnostics.Diag) { +func (c *Caller) CallData(ctx context.Context, name string, config evaluation.Configuration, invocation evaluation.Invocation) (result plugin.Data, diag diagnostics.Diag) { var ok bool var res any res, diag = c.callPlugin(ctx, definitions.BlockKindData, name, config, invocation, nil) if diag.HasErrors() { return } - result, ok = res.(plugin.MapData) + result, ok = res.(plugin.Data) if !ok { panic("Incorrect plugin result type") } diff --git a/parser/evaluation/plugincaller.go b/parser/evaluation/plugincaller.go index 2de5c053..5bd73e08 100644 --- a/parser/evaluation/plugincaller.go +++ b/parser/evaluation/plugincaller.go @@ -8,7 +8,7 @@ import ( ) type DataCaller interface { - CallData(ctx context.Context, name string, config Configuration, invocation Invocation) (result plugin.MapData, diag diagnostics.Diag) + CallData(ctx context.Context, name string, config Configuration, invocation Invocation) (result plugin.Data, diag diagnostics.Diag) } type ContentCaller interface { diff --git a/test/e2e/data_test.go b/test/e2e/data_test.go index a4abdc33..75338309 100644 --- a/test/e2e/data_test.go +++ b/test/e2e/data_test.go @@ -34,7 +34,7 @@ func dataTest(t *testing.T, testName string, files []string, target string, expe eval.Cleanup(nil) }() - var res plugin.MapData + var res plugin.Data diags := eval.ParseFabricFiles(sourceDir) if !diags.HasErrors() { if !diags.Extend(eval.LoadRunner()) { From f5613263491985ec0fb372e2167987361d7c4b9c Mon Sep 17 00:00:00 2001 From: dobarx Date: Tue, 20 Feb 2024 10:28:18 +0200 Subject: [PATCH 3/8] internal/hackerone: add hackerone_reports data source --- examples/templates/hackerone/example.fabric | 31 + internal/hackerone/client/client.go | 132 ++++ internal/hackerone/client/client_test.go | 197 ++++++ internal/hackerone/cmd/main.go | 14 + internal/hackerone/data_hackerone_reports.go | 614 ++++++++++++++++++ .../hackerone/data_hackerone_reports_test.go | 415 ++++++++++++ internal/hackerone/plugin.go | 47 ++ internal/hackerone/plugin_test.go | 14 + 8 files changed, 1464 insertions(+) create mode 100644 examples/templates/hackerone/example.fabric create mode 100644 internal/hackerone/client/client.go create mode 100644 internal/hackerone/client/client_test.go create mode 100644 internal/hackerone/cmd/main.go create mode 100644 internal/hackerone/data_hackerone_reports.go create mode 100644 internal/hackerone/data_hackerone_reports_test.go create mode 100644 internal/hackerone/plugin.go create mode 100644 internal/hackerone/plugin_test.go diff --git a/examples/templates/hackerone/example.fabric b/examples/templates/hackerone/example.fabric new file mode 100644 index 00000000..e7663c9b --- /dev/null +++ b/examples/templates/hackerone/example.fabric @@ -0,0 +1,31 @@ +fabric { + cache_dir = "./.fabric" + plugin_registry { + mirror_dir = "dist/plugins" + } + plugin_versions = { + "blackstork/hackerone" = "0.0.0-dev" + } +} + +config data hackerone_reports { + api_username = "" + api_token = "" +} + +document "example" { + title = "Using hackerone plugin" + + data hackerone_reports "my_reports" { + program = [""] + } + content text { + format_as = "title" + text = "My HackerOne Reports" + absolute_title_size = 2 + } + content list { + query = "[.data.hackerone_reports.my_reports[].attributes.title]" + item_template = "{{.}}" + } +} \ No newline at end of file diff --git a/internal/hackerone/client/client.go b/internal/hackerone/client/client.go new file mode 100644 index 00000000..8066235b --- /dev/null +++ b/internal/hackerone/client/client.go @@ -0,0 +1,132 @@ +package client + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "net/url" + "time" + + "github.com/google/go-querystring/query" +) + +func String(s string) *string { + return &s +} + +func Bool(b bool) *bool { + return &b +} + +func Int(i int) *int { + return &i +} + +type GetAllReportsReq struct { + PageSize *int `url:"page[size],omitempty"` + PageNumber *int `url:"page[number],omitempty"` + Sort *string `url:"sort,omitempty"` + FilterProgram []string `url:"filter[program][],omitempty"` + FilterInboxIDs []int `url:"filter[inbox_ids][],omitempty"` + FilterReporter []string `url:"filter[reporter][],omitempty"` + FilterAssignee []string `url:"filter[assignee][],omitempty"` + FilterState []string `url:"filter[state][],omitempty"` + FilterID []int `url:"filter[id][],omitempty"` + FilterWeaknessID []int `url:"filter[weakness_id][],omitempty"` + FilterSeverity []string `url:"filter[severity][],omitempty"` + FilterHackerPublished *bool `url:"filter[hacker_published],omitempty"` + FilterCreatedAtGT *time.Time `url:"filter[created_at__gt],omitempty"` + FilterCreatedAtLT *time.Time `url:"filter[created_at__lt],omitempty"` + FilterSubmittedAtGT *time.Time `url:"filter[submitted_at__gt],omitempty"` + FilterSubmittedAtLT *time.Time `url:"filter[submitted_at__lt],omitempty"` + FilterTriagedAtGT *time.Time `url:"filter[triaged_at__gt],omitempty"` + FilterTriagedAtLT *time.Time `url:"filter[triaged_at__lt],omitempty"` + FilterTriagedAtNull *bool `url:"filter[triaged_at__null],omitempty"` + FilterClosedAtGT *time.Time `url:"filter[closed_at__gt],omitempty"` + FilterClosedAtLT *time.Time `url:"filter[closed_at__lt],omitempty"` + FilterClosedAtNull *bool `url:"filter[closed_at__null],omitempty"` + FilterDisclosedAtGT *time.Time `url:"filter[disclosed_at__gt],omitempty"` + FilterDisclosedAtLT *time.Time `url:"filter[disclosed_at__lt],omitempty"` + FilterDisclosedAtNull *bool `url:"filter[disclosed_at__null],omitempty"` + FilterReporterAgreedOnGoingPublic *bool `url:"filter[reporter_agreed_on_going_public],omitempty"` + FilterBountyAwardedAtGT *time.Time `url:"filter[bounty_awarded_at__gt],omitempty"` + FilterBountyAwardedAtLT *time.Time `url:"filter[bounty_awarded_at__lt],omitempty"` + FilterBountyAwardedAtNull *bool `url:"filter[bounty_awarded_at__null],omitempty"` + FilterSwagAwardedAtGT *time.Time `url:"filter[swag_awarded_at__gt],omitempty"` + FilterSwagAwardedAtLT *time.Time `url:"filter[swag_awarded_at__lt],omitempty"` + FilterSwagAwardedAtNull *bool `url:"filter[swag_awarded_at__null],omitempty"` + FilterLastReportActivityAtGT *time.Time `url:"filter[last_report_activity_at__gt],omitempty"` + FilterLastReportActivityAtLT *time.Time `url:"filter[last_report_activity_at__lt],omitempty"` + FilterFirstProgramActivityAtGT *time.Time `url:"filter[first_program_activity_at__gt],omitempty"` + FilterFirstProgramActivityAtLT *time.Time `url:"filter[first_program_activity_at__lt],omitempty"` + FilterFirstProgramActivityAtNull *bool `url:"filter[first_program_activity_at__null],omitempty"` + FilterLastProgramActivityAtGT *time.Time `url:"filter[last_program_activity_at__gt],omitempty"` + FilterLastProgramActivityAtLT *time.Time `url:"filter[last_program_activity_at__lt],omitempty"` + FilterLastProgramActivityAtNull *bool `url:"filter[last_program_activity_at__null],omitempty"` + FilterLastActivityAtGT *time.Time `url:"filter[last_activity_at__gt],omitempty"` + FilterLastActivityAtLT *time.Time `url:"filter[last_activity_at__lt],omitempty"` + FilterLastPublicActivityAtGT *time.Time `url:"filter[last_public_activity_at__gt],omitempty"` + FilterLastPublicActivityAtLT *time.Time `url:"filter[last_public_activity_at__lt],omitempty"` + FilterKeyword *string `url:"filter[keyword],omitempty"` + FilterCustomFields map[string]string `url:"filter[custom_fields][],omitempty"` +} + +type GetAllReportsRes struct { + Data []any `json:"data"` +} + +type client struct { + url string + usr string + tkn string +} + +type Client interface { + GetAllReports(ctx context.Context, req *GetAllReportsReq) (*GetAllReportsRes, error) +} + +func New(user, token string) Client { + return &client{ + url: "https://api.hackerone.com", + usr: user, + tkn: token, + } +} + +func (c *client) auth(r *http.Request) { + r.SetBasicAuth(c.usr, c.tkn) +} + +func (c *client) GetAllReports(ctx context.Context, req *GetAllReportsReq) (*GetAllReportsRes, error) { + u, err := url.Parse(c.url + "/v1/reports") + if err != nil { + return nil, err + } + q, err := query.Values(req) + if err != nil { + return nil, err + } + u.RawQuery = q.Encode() + r, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil) + if err != nil { + return nil, err + } + c.auth(r) + client := http.Client{ + Timeout: 15 * time.Second, + } + res, err := client.Do(r) + if err != nil { + return nil, err + } + if res.StatusCode != http.StatusOK { + return nil, fmt.Errorf("hackerone client returned status code: %d", res.StatusCode) + } + defer res.Body.Close() + var data GetAllReportsRes + if err := json.NewDecoder(res.Body).Decode(&data); err != nil { + return nil, err + } + return &data, nil +} diff --git a/internal/hackerone/client/client_test.go b/internal/hackerone/client/client_test.go new file mode 100644 index 00000000..0b68e660 --- /dev/null +++ b/internal/hackerone/client/client_test.go @@ -0,0 +1,197 @@ +package client + +import ( + "context" + "net/http" + "net/http/httptest" + "net/url" + "testing" + "time" + + "github.com/stretchr/testify/suite" +) + +type ClientTestSuite struct { + suite.Suite + ctx context.Context + cancel context.CancelFunc +} + +func (s *ClientTestSuite) SetupTest() { + s.ctx, s.cancel = context.WithCancel(context.Background()) +} + +func (s *ClientTestSuite) TearDownTest() { + s.cancel() +} + +func TestClientTestSuite(t *testing.T) { + suite.Run(t, new(ClientTestSuite)) +} + +func (s *ClientTestSuite) mock(fn http.HandlerFunc, usr, tkn string) (Client, *httptest.Server) { + srv := httptest.NewServer(fn) + cli := &client{ + url: srv.URL, + usr: usr, + tkn: tkn, + } + return cli, srv +} + +func (s *ClientTestSuite) TestAuth() { + client, srv := s.mock(func(w http.ResponseWriter, r *http.Request) { + usr, tkn, ok := r.BasicAuth() + s.True(ok) + s.Equal("test_user", usr) + s.Equal("test_token", tkn) + }, "test_user", "test_token") + defer srv.Close() + client.GetAllReports(s.ctx, &GetAllReportsReq{}) +} + +func (s *ClientTestSuite) queryList(q url.Values, key string) []string { + list, ok := q[key] + s.Require().True(ok) + return list +} + +func (s *ClientTestSuite) TestGetAllReports() { + ts := time.Unix(123, 0).UTC() + client, srv := s.mock(func(w http.ResponseWriter, r *http.Request) { + s.Equal("/v1/reports", r.URL.Path) + s.Equal(http.MethodGet, r.Method) + usr, tkn, ok := r.BasicAuth() + s.True(ok) + s.Equal("test_user", usr) + s.Equal("test_token", tkn) + s.Equal("10", r.URL.Query().Get("page[size]")) + s.Equal("1", r.URL.Query().Get("page[number]")) + s.Equal("test_h1b", r.URL.Query().Get("filter[program][]")) + s.Equal("72049", r.URL.Query().Get("filter[inbox_ids][]")) + s.Equal("created_at", r.URL.Query().Get("sort")) + s.Equal("test_reporter", r.URL.Query().Get("filter[reporter][]")) + s.Equal("test_assignee", r.URL.Query().Get("filter[assignee][]")) + s.Equal("test_state", r.URL.Query().Get("filter[state][]")) + s.Equal([]string{"1", "2", "3"}, s.queryList(r.URL.Query(), "filter[id][]")) + s.Equal([]string{"1", "2", "3"}, s.queryList(r.URL.Query(), "filter[weakness_id][]")) + s.Equal("test_severity", r.URL.Query().Get("filter[severity][]")) + s.Equal("true", r.URL.Query().Get("filter[hacker_published]")) + s.Equal("1970-01-01T00:02:03Z", r.URL.Query().Get("filter[created_at__gt]")) + s.Equal("1970-01-01T00:02:03Z", r.URL.Query().Get("filter[created_at__lt]")) + s.Equal("1970-01-01T00:02:03Z", r.URL.Query().Get("filter[submitted_at__gt]")) + s.Equal("1970-01-01T00:02:03Z", r.URL.Query().Get("filter[submitted_at__lt]")) + s.Equal("1970-01-01T00:02:03Z", r.URL.Query().Get("filter[triaged_at__gt]")) + s.Equal("1970-01-01T00:02:03Z", r.URL.Query().Get("filter[triaged_at__lt]")) + s.Equal("true", r.URL.Query().Get("filter[triaged_at__null]")) + s.Equal("1970-01-01T00:02:03Z", r.URL.Query().Get("filter[closed_at__gt]")) + s.Equal("1970-01-01T00:02:03Z", r.URL.Query().Get("filter[closed_at__lt]")) + s.Equal("true", r.URL.Query().Get("filter[closed_at__null]")) + s.Equal("1970-01-01T00:02:03Z", r.URL.Query().Get("filter[disclosed_at__gt]")) + s.Equal("1970-01-01T00:02:03Z", r.URL.Query().Get("filter[disclosed_at__lt]")) + s.Equal("true", r.URL.Query().Get("filter[disclosed_at__null]")) + s.Equal("true", r.URL.Query().Get("filter[reporter_agreed_on_going_public]")) + s.Equal("1970-01-01T00:02:03Z", r.URL.Query().Get("filter[bounty_awarded_at__gt]")) + s.Equal("1970-01-01T00:02:03Z", r.URL.Query().Get("filter[bounty_awarded_at__lt]")) + s.Equal("true", r.URL.Query().Get("filter[bounty_awarded_at__null]")) + s.Equal("1970-01-01T00:02:03Z", r.URL.Query().Get("filter[swag_awarded_at__gt]")) + s.Equal("1970-01-01T00:02:03Z", r.URL.Query().Get("filter[swag_awarded_at__lt]")) + s.Equal("true", r.URL.Query().Get("filter[swag_awarded_at__null]")) + s.Equal("1970-01-01T00:02:03Z", r.URL.Query().Get("filter[last_report_activity_at__gt]")) + s.Equal("1970-01-01T00:02:03Z", r.URL.Query().Get("filter[last_report_activity_at__lt]")) + s.Equal("1970-01-01T00:02:03Z", r.URL.Query().Get("filter[first_program_activity_at__gt]")) + s.Equal("1970-01-01T00:02:03Z", r.URL.Query().Get("filter[first_program_activity_at__lt]")) + s.Equal("true", r.URL.Query().Get("filter[first_program_activity_at__null]")) + s.Equal("1970-01-01T00:02:03Z", r.URL.Query().Get("filter[last_program_activity_at__gt]")) + s.Equal("1970-01-01T00:02:03Z", r.URL.Query().Get("filter[last_program_activity_at__lt]")) + s.Equal("true", r.URL.Query().Get("filter[last_program_activity_at__null]")) + s.Equal("1970-01-01T00:02:03Z", r.URL.Query().Get("filter[last_activity_at__gt]")) + s.Equal("1970-01-01T00:02:03Z", r.URL.Query().Get("filter[last_activity_at__lt]")) + s.Equal("1970-01-01T00:02:03Z", r.URL.Query().Get("filter[last_public_activity_at__gt]")) + s.Equal("1970-01-01T00:02:03Z", r.URL.Query().Get("filter[last_public_activity_at__lt]")) + s.Equal("test_keyword", r.URL.Query().Get("filter[keyword]")) + s.Equal("map[test_key:test_value]", r.URL.Query().Get("filter[custom_fields][]")) + w.Write([]byte(`{ + "data": [ + { + "any": "data" + } + ] + }`)) + }, "test_user", "test_token") + defer srv.Close() + req := GetAllReportsReq{ + PageSize: Int(10), + PageNumber: Int(1), + FilterProgram: []string{"test_h1b"}, + FilterInboxIDs: []int{72049}, + Sort: String("created_at"), + FilterReporter: []string{"test_reporter"}, + FilterAssignee: []string{"test_assignee"}, + FilterState: []string{"test_state"}, + FilterID: []int{1, 2, 3}, + FilterWeaknessID: []int{1, 2, 3}, + FilterSeverity: []string{"test_severity"}, + FilterHackerPublished: Bool(true), + FilterCreatedAtGT: &ts, + FilterCreatedAtLT: &ts, + FilterSubmittedAtGT: &ts, + FilterSubmittedAtLT: &ts, + FilterTriagedAtGT: &ts, + FilterTriagedAtLT: &ts, + FilterTriagedAtNull: Bool(true), + FilterClosedAtGT: &ts, + FilterClosedAtLT: &ts, + FilterClosedAtNull: Bool(true), + FilterDisclosedAtGT: &ts, + FilterDisclosedAtLT: &ts, + FilterDisclosedAtNull: Bool(true), + FilterReporterAgreedOnGoingPublic: Bool(true), + FilterBountyAwardedAtGT: &ts, + FilterBountyAwardedAtLT: &ts, + FilterBountyAwardedAtNull: Bool(true), + FilterSwagAwardedAtGT: &ts, + FilterSwagAwardedAtLT: &ts, + FilterSwagAwardedAtNull: Bool(true), + FilterLastReportActivityAtGT: &ts, + FilterLastReportActivityAtLT: &ts, + FilterFirstProgramActivityAtGT: &ts, + FilterFirstProgramActivityAtLT: &ts, + FilterFirstProgramActivityAtNull: Bool(true), + FilterLastProgramActivityAtGT: &ts, + FilterLastProgramActivityAtLT: &ts, + FilterLastProgramActivityAtNull: Bool(true), + FilterLastActivityAtGT: &ts, + FilterLastActivityAtLT: &ts, + FilterLastPublicActivityAtGT: &ts, + FilterLastPublicActivityAtLT: &ts, + FilterKeyword: String("test_keyword"), + FilterCustomFields: map[string]string{ + "test_key": "test_value", + }, + } + result, err := client.GetAllReports(s.ctx, &req) + s.NoError(err) + s.Equal(&GetAllReportsRes{ + Data: []any{ + map[string]any{ + "any": "data", + }, + }, + }, result) +} + +func (s *ClientTestSuite) TestGetAllReportsError() { + client, srv := s.mock(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusUnauthorized) + }, "test_user", "test_token") + defer srv.Close() + req := GetAllReportsReq{} + _, err := client.GetAllReports(s.ctx, &req) + s.Error(err) +} + +func (s *ClientTestSuite) TestDefaultClientURL() { + cli := New("test_user", "test_token") + s.Equal("https://api.hackerone.com", cli.(*client).url) +} diff --git a/internal/hackerone/cmd/main.go b/internal/hackerone/cmd/main.go new file mode 100644 index 00000000..ee070062 --- /dev/null +++ b/internal/hackerone/cmd/main.go @@ -0,0 +1,14 @@ +package main + +import ( + "github.com/blackstork-io/fabric/internal/hackerone" + pluginapiv1 "github.com/blackstork-io/fabric/plugin/pluginapi/v1" +) + +var version string + +func main() { + pluginapiv1.Serve( + hackerone.Plugin(version, hackerone.DefaultClientLoader), + ) +} diff --git a/internal/hackerone/data_hackerone_reports.go b/internal/hackerone/data_hackerone_reports.go new file mode 100644 index 00000000..f4ca3a06 --- /dev/null +++ b/internal/hackerone/data_hackerone_reports.go @@ -0,0 +1,614 @@ +package hackerone + +import ( + "context" + "fmt" + "time" + + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/hcldec" + "github.com/zclconf/go-cty/cty" + + "github.com/blackstork-io/fabric/internal/hackerone/client" + "github.com/blackstork-io/fabric/plugin" +) + +func makeHackerOneReportsDataSchema(loader ClientLoadFn) *plugin.DataSource { + return &plugin.DataSource{ + Config: hcldec.ObjectSpec{ + "api_username": &hcldec.AttrSpec{ + Name: "api_username", + Type: cty.String, + Required: true, + }, + "api_token": &hcldec.AttrSpec{ + Name: "api_token", + Type: cty.String, + Required: true, + }, + }, + Args: hcldec.ObjectSpec{ + "size": &hcldec.AttrSpec{ + Name: "size", + Type: cty.Number, + }, + "page_number": &hcldec.AttrSpec{ + Name: "page_number", + Type: cty.Number, + }, + "sort": &hcldec.AttrSpec{ + Name: "sort", + Type: cty.String, + }, + "program": &hcldec.AttrSpec{ + Name: "program", + Type: cty.List(cty.String), + }, + "inbox_ids": &hcldec.AttrSpec{ + Name: "inbox_ids", + Type: cty.List(cty.Number), + }, + "reporter": &hcldec.AttrSpec{ + Name: "reporter", + Type: cty.List(cty.String), + }, + "assignee": &hcldec.AttrSpec{ + Name: "assignee", + Type: cty.List(cty.String), + }, + "state": &hcldec.AttrSpec{ + Name: "state", + Type: cty.List(cty.String), + }, + "id": &hcldec.AttrSpec{ + Name: "id", + Type: cty.List(cty.Number), + }, + "weakness_id": &hcldec.AttrSpec{ + Name: "weakness_id", + Type: cty.List(cty.Number), + }, + "severity": &hcldec.AttrSpec{ + Name: "severity", + Type: cty.List(cty.String), + }, + "hacker_published": &hcldec.AttrSpec{ + Name: "hacker_published", + Type: cty.Bool, + }, + "created_at__gt": &hcldec.AttrSpec{ + Name: "created_at__gt", + Type: cty.String, + }, + "created_at__lt": &hcldec.AttrSpec{ + Name: "created_at__lt", + Type: cty.String, + }, + "submitted_at__gt": &hcldec.AttrSpec{ + Name: "submitted_at__gt", + Type: cty.String, + }, + "submitted_at__lt": &hcldec.AttrSpec{ + Name: "submitted_at__lt", + Type: cty.String, + }, + "triaged_at__gt": &hcldec.AttrSpec{ + Name: "triaged_at__gt", + Type: cty.String, + }, + "triaged_at__lt": &hcldec.AttrSpec{ + Name: "triaged_at__lt", + Type: cty.String, + }, + "triaged_at__null": &hcldec.AttrSpec{ + Name: "triaged_at__null", + Type: cty.Bool, + }, + "closed_at__gt": &hcldec.AttrSpec{ + Name: "closed_at__gt", + Type: cty.String, + }, + "closed_at__lt": &hcldec.AttrSpec{ + Name: "closed_at__lt", + Type: cty.String, + }, + "closed_at__null": &hcldec.AttrSpec{ + Name: "closed_at__null", + Type: cty.Bool, + }, + "disclosed_at__gt": &hcldec.AttrSpec{ + Name: "disclosed_at__gt", + Type: cty.String, + }, + "disclosed_at__lt": &hcldec.AttrSpec{ + Name: "disclosed_at__lt", + Type: cty.String, + }, + "disclosed_at__null": &hcldec.AttrSpec{ + Name: "disclosed_at__null", + Type: cty.Bool, + }, + "reporter_agreed_on_going_public": &hcldec.AttrSpec{ + Name: "reporter_agreed_on_going_public", + Type: cty.Bool, + }, + "bounty_awarded_at__gt": &hcldec.AttrSpec{ + Name: "bounty_awarded_at__gt", + Type: cty.String, + }, + "bounty_awarded_at__lt": &hcldec.AttrSpec{ + Name: "bounty_awarded_at__lt", + Type: cty.String, + }, + "bounty_awarded_at__null": &hcldec.AttrSpec{ + Name: "bounty_awarded_at__null", + Type: cty.Bool, + }, + "swag_awarded_at__gt": &hcldec.AttrSpec{ + Name: "swag_awarded_at__gt", + Type: cty.String, + }, + "swag_awarded_at__lt": &hcldec.AttrSpec{ + Name: "swag_awarded_at__lt", + Type: cty.String, + }, + "swag_awarded_at__null": &hcldec.AttrSpec{ + Name: "swag_awarded_at__null", + Type: cty.Bool, + }, + "last_report_activity_at__gt": &hcldec.AttrSpec{ + Name: "last_report_activity_at__gt", + Type: cty.String, + }, + "last_report_activity_at__lt": &hcldec.AttrSpec{ + Name: "last_report_activity_at__lt", + Type: cty.String, + }, + "first_program_activity_at__gt": &hcldec.AttrSpec{ + Name: "first_program_activity_at__gt", + Type: cty.String, + }, + "first_program_activity_at__lt": &hcldec.AttrSpec{ + Name: "first_program_activity_at__lt", + Type: cty.String, + }, + "first_program_activity_at__null": &hcldec.AttrSpec{ + Name: "first_program_activity_at__null", + Type: cty.Bool, + }, + "last_program_activity_at__gt": &hcldec.AttrSpec{ + Name: "last_program_activity_at__gt", + Type: cty.String, + }, + "last_program_activity_at__lt": &hcldec.AttrSpec{ + Name: "last_program_activity_at__lt", + Type: cty.String, + }, + "last_program_activity_at__null": &hcldec.AttrSpec{ + Name: "last_program_activity_at__null", + Type: cty.Bool, + }, + "last_activity_at__gt": &hcldec.AttrSpec{ + Name: "last_activity_at__gt", + Type: cty.String, + }, + "last_activity_at__lt": &hcldec.AttrSpec{ + Name: "last_activity_at__lt", + Type: cty.String, + }, + "last_public_activity_at__gt": &hcldec.AttrSpec{ + Name: "last_public_activity_at__gt", + Type: cty.String, + }, + "last_public_activity_at__lt": &hcldec.AttrSpec{ + Name: "last_public_activity_at__lt", + Type: cty.String, + }, + "keyword": &hcldec.AttrSpec{ + Name: "keyword", + Type: cty.String, + }, + "custom_fields": &hcldec.AttrSpec{ + Name: "custom_fields", + Type: cty.Map(cty.String), + }, + }, + DataFunc: fetchHackerOneReports(loader), + } +} + +func fetchHackerOneReports(loader ClientLoadFn) plugin.RetrieveDataFunc { + return func(ctx context.Context, params *plugin.RetrieveDataParams) (plugin.Data, hcl.Diagnostics) { + cli, err := makeClient(loader, params.Config) + if err != nil { + return nil, hcl.Diagnostics{{ + Severity: hcl.DiagError, + Summary: "Failed to create client", + Detail: err.Error(), + }} + } + req, err := parseHackerOneReportsArgs(params.Args) + if err != nil { + return nil, hcl.Diagnostics{{ + Severity: hcl.DiagError, + Summary: "Failed to parse arguments", + Detail: err.Error(), + }} + } + + data := make([]any, 0) + if req.PageNumber != nil { + res, err := cli.GetAllReports(ctx, req) + if err != nil { + return nil, hcl.Diagnostics{{ + Severity: hcl.DiagError, + Summary: "Failed to fetch reports", + Detail: err.Error(), + }} + } + data = append(data, res.Data...) + } else { + limit := -1 + if req.PageSize != nil { + limit = *req.PageSize + } + for page := minPage; ; page++ { + req.PageNumber = client.Int(page) + res, err := cli.GetAllReports(ctx, req) + if err != nil { + return nil, hcl.Diagnostics{{ + Severity: hcl.DiagError, + Summary: "Failed to fetch reports", + Detail: err.Error(), + }} + } + if res.Data == nil { + res.Data = make([]any, 0) + } + data = append(data, res.Data...) + if len(res.Data) == 0 || (limit > 0 && len(data) >= limit) { + break + } + } + } + dst, err := plugin.ParseDataAny(data) + if err != nil { + return nil, hcl.Diagnostics{{ + Severity: hcl.DiagError, + Summary: "Failed to parse data", + Detail: err.Error(), + }} + } + return dst, nil + } +} + +func parseHackerOneReportsArgs(args cty.Value) (*client.GetAllReportsReq, error) { + if args.IsNull() { + return nil, fmt.Errorf("args are required") + } + var req client.GetAllReportsReq + size := args.GetAttr("size") + if !size.IsNull() { + n, _ := size.AsBigFloat().Int64() + if n <= 0 { + return nil, fmt.Errorf("size must be greater than 0") + } + req.PageSize = client.Int(int(n)) + } + pageNumber := args.GetAttr("page_number") + if !pageNumber.IsNull() { + n, _ := pageNumber.AsBigFloat().Int64() + if n <= 0 { + return nil, fmt.Errorf("page_number must be greater than 0") + } + req.PageNumber = client.Int(int(n)) + } + sort := args.GetAttr("sort") + if !sort.IsNull() && sort.AsString() != "" { + req.Sort = client.String(sort.AsString()) + } + program := args.GetAttr("program") + if !program.IsNull() { + programs := program.AsValueSlice() + for _, p := range programs { + req.FilterProgram = append(req.FilterProgram, p.AsString()) + } + } + inboxIDs := args.GetAttr("inbox_ids") + if !inboxIDs.IsNull() { + ids := inboxIDs.AsValueSlice() + for _, id := range ids { + n, _ := id.AsBigFloat().Int64() + req.FilterInboxIDs = append(req.FilterInboxIDs, int(n)) + } + } + if len(req.FilterProgram)+len(req.FilterInboxIDs) == 0 { + return nil, fmt.Errorf("at least one of program or inbox_ids must be provided") + } + reporter := args.GetAttr("reporter") + if !reporter.IsNull() { + reporters := reporter.AsValueSlice() + for _, r := range reporters { + req.FilterReporter = append(req.FilterReporter, r.AsString()) + } + } + assignee := args.GetAttr("assignee") + if !assignee.IsNull() { + assignees := assignee.AsValueSlice() + for _, a := range assignees { + req.FilterAssignee = append(req.FilterAssignee, a.AsString()) + } + } + state := args.GetAttr("state") + if !state.IsNull() { + states := state.AsValueSlice() + for _, s := range states { + req.FilterState = append(req.FilterState, s.AsString()) + } + } + id := args.GetAttr("id") + if !id.IsNull() { + ids := id.AsValueSlice() + for _, i := range ids { + n, _ := i.AsBigFloat().Int64() + req.FilterID = append(req.FilterID, int(n)) + } + } + weaknessID := args.GetAttr("weakness_id") + if !weaknessID.IsNull() { + ids := weaknessID.AsValueSlice() + for _, i := range ids { + n, _ := i.AsBigFloat().Int64() + req.FilterWeaknessID = append(req.FilterWeaknessID, int(n)) + } + } + severity := args.GetAttr("severity") + if !severity.IsNull() { + severities := severity.AsValueSlice() + for _, s := range severities { + req.FilterSeverity = append(req.FilterSeverity, s.AsString()) + } + } + hackerPublished := args.GetAttr("hacker_published") + if !hackerPublished.IsNull() { + req.FilterHackerPublished = client.Bool(hackerPublished.True()) + } + createdAtGT := args.GetAttr("created_at__gt") + if !createdAtGT.IsNull() { + t, err := time.Parse(time.RFC3339, createdAtGT.AsString()) + if err != nil { + return nil, fmt.Errorf("failed to parse created_at__gt: %w", err) + } + req.FilterCreatedAtGT = &t + } + createdAtLT := args.GetAttr("created_at__lt") + if !createdAtLT.IsNull() { + t, err := time.Parse(time.RFC3339, createdAtLT.AsString()) + if err != nil { + return nil, fmt.Errorf("failed to parse created_at__lt: %w", err) + } + req.FilterCreatedAtLT = &t + } + submittedAtGT := args.GetAttr("submitted_at__gt") + if !submittedAtGT.IsNull() { + t, err := time.Parse(time.RFC3339, submittedAtGT.AsString()) + if err != nil { + return nil, fmt.Errorf("failed to parse submitted_at__gt: %w", err) + } + req.FilterSubmittedAtGT = &t + } + submittedAtLT := args.GetAttr("submitted_at__lt") + if !submittedAtLT.IsNull() { + t, err := time.Parse(time.RFC3339, submittedAtLT.AsString()) + if err != nil { + return nil, fmt.Errorf("failed to parse submitted_at__lt: %w", err) + } + req.FilterSubmittedAtLT = &t + } + triagedAtGT := args.GetAttr("triaged_at__gt") + if !triagedAtGT.IsNull() { + t, err := time.Parse(time.RFC3339, triagedAtGT.AsString()) + if err != nil { + return nil, fmt.Errorf("failed to parse triaged_at__gt: %w", err) + } + req.FilterTriagedAtGT = &t + } + triagedAtLT := args.GetAttr("triaged_at__lt") + if !triagedAtLT.IsNull() { + t, err := time.Parse(time.RFC3339, triagedAtLT.AsString()) + if err != nil { + return nil, fmt.Errorf("failed to parse triaged_at__lt: %w", err) + } + req.FilterTriagedAtLT = &t + } + triagedAtNull := args.GetAttr("triaged_at__null") + if !triagedAtNull.IsNull() { + req.FilterTriagedAtNull = client.Bool(triagedAtNull.True()) + } + closedAtGT := args.GetAttr("closed_at__gt") + if !closedAtGT.IsNull() { + t, err := time.Parse(time.RFC3339, closedAtGT.AsString()) + if err != nil { + return nil, fmt.Errorf("failed to parse closed_at__gt: %w", err) + } + req.FilterClosedAtGT = &t + } + closedAtLT := args.GetAttr("closed_at__lt") + if !closedAtLT.IsNull() { + t, err := time.Parse(time.RFC3339, closedAtLT.AsString()) + if err != nil { + return nil, fmt.Errorf("failed to parse closed_at__lt: %w", err) + } + req.FilterClosedAtLT = &t + } + closedAtNull := args.GetAttr("closed_at__null") + if !closedAtNull.IsNull() { + req.FilterClosedAtNull = client.Bool(closedAtNull.True()) + } + disclosedAtGT := args.GetAttr("disclosed_at__gt") + if !disclosedAtGT.IsNull() { + t, err := time.Parse(time.RFC3339, disclosedAtGT.AsString()) + if err != nil { + return nil, fmt.Errorf("failed to parse disclosed_at__gt: %w", err) + } + req.FilterDisclosedAtGT = &t + } + disclosedAtLT := args.GetAttr("disclosed_at__lt") + if !disclosedAtLT.IsNull() { + t, err := time.Parse(time.RFC3339, disclosedAtLT.AsString()) + if err != nil { + return nil, fmt.Errorf("failed to parse disclosed_at__lt: %w", err) + } + req.FilterDisclosedAtLT = &t + } + disclosedAtNull := args.GetAttr("disclosed_at__null") + if !disclosedAtNull.IsNull() { + req.FilterDisclosedAtNull = client.Bool(disclosedAtNull.True()) + } + reporterAgreedOnGoingPublic := args.GetAttr("reporter_agreed_on_going_public") + if !reporterAgreedOnGoingPublic.IsNull() { + req.FilterReporterAgreedOnGoingPublic = client.Bool(reporterAgreedOnGoingPublic.True()) + } + bountyAwardedAtGT := args.GetAttr("bounty_awarded_at__gt") + if !bountyAwardedAtGT.IsNull() { + t, err := time.Parse(time.RFC3339, bountyAwardedAtGT.AsString()) + if err != nil { + return nil, fmt.Errorf("failed to parse bounty_awarded_at__gt: %w", err) + } + req.FilterBountyAwardedAtGT = &t + } + bountyAwardedAtLT := args.GetAttr("bounty_awarded_at__lt") + if !bountyAwardedAtLT.IsNull() { + t, err := time.Parse(time.RFC3339, bountyAwardedAtLT.AsString()) + if err != nil { + return nil, fmt.Errorf("failed to parse bounty_awarded_at__lt: %w", err) + } + req.FilterBountyAwardedAtLT = &t + } + bountyAwardedAtNull := args.GetAttr("bounty_awarded_at__null") + if !bountyAwardedAtNull.IsNull() { + req.FilterBountyAwardedAtNull = client.Bool(bountyAwardedAtNull.True()) + } + swagAwardedAtGT := args.GetAttr("swag_awarded_at__gt") + if !swagAwardedAtGT.IsNull() { + t, err := time.Parse(time.RFC3339, swagAwardedAtGT.AsString()) + if err != nil { + return nil, fmt.Errorf("failed to parse swag_awarded_at__gt: %w", err) + } + req.FilterSwagAwardedAtGT = &t + } + swagAwardedAtLT := args.GetAttr("swag_awarded_at__lt") + if !swagAwardedAtLT.IsNull() { + t, err := time.Parse(time.RFC3339, swagAwardedAtLT.AsString()) + if err != nil { + return nil, fmt.Errorf("failed to parse swag_awarded_at__lt: %w", err) + } + req.FilterSwagAwardedAtLT = &t + } + swagAwardedAtNull := args.GetAttr("swag_awarded_at__null") + if !swagAwardedAtNull.IsNull() { + req.FilterSwagAwardedAtNull = client.Bool(swagAwardedAtNull.True()) + } + lastReportActivityAtGT := args.GetAttr("last_report_activity_at__gt") + if !lastReportActivityAtGT.IsNull() { + t, err := time.Parse(time.RFC3339, lastReportActivityAtGT.AsString()) + if err != nil { + return nil, fmt.Errorf("failed to parse last_report_activity_at__gt: %w", err) + } + req.FilterLastReportActivityAtGT = &t + } + lastReportActivityAtLT := args.GetAttr("last_report_activity_at__lt") + if !lastReportActivityAtLT.IsNull() { + t, err := time.Parse(time.RFC3339, lastReportActivityAtLT.AsString()) + if err != nil { + return nil, fmt.Errorf("failed to parse last_report_activity_at__lt: %w", err) + } + req.FilterLastReportActivityAtLT = &t + } + firstProgramActivityAtGT := args.GetAttr("first_program_activity_at__gt") + if !firstProgramActivityAtGT.IsNull() { + t, err := time.Parse(time.RFC3339, firstProgramActivityAtGT.AsString()) + if err != nil { + return nil, fmt.Errorf("failed to parse first_program_activity_at__gt: %w", err) + } + req.FilterFirstProgramActivityAtGT = &t + } + firstProgramActivityAtLT := args.GetAttr("first_program_activity_at__lt") + if !firstProgramActivityAtLT.IsNull() { + t, err := time.Parse(time.RFC3339, firstProgramActivityAtLT.AsString()) + if err != nil { + return nil, fmt.Errorf("failed to parse first_program_activity_at__lt: %w", err) + } + req.FilterFirstProgramActivityAtLT = &t + } + firstProgramActivityAtNull := args.GetAttr("first_program_activity_at__null") + if !firstProgramActivityAtNull.IsNull() { + req.FilterFirstProgramActivityAtNull = client.Bool(firstProgramActivityAtNull.True()) + } + lastProgramActivityAtGT := args.GetAttr("last_program_activity_at__gt") + if !lastProgramActivityAtGT.IsNull() { + t, err := time.Parse(time.RFC3339, lastProgramActivityAtGT.AsString()) + if err != nil { + return nil, fmt.Errorf("failed to parse last_program_activity_at__gt: %w", err) + } + req.FilterLastProgramActivityAtGT = &t + } + lastProgramActivityAtLT := args.GetAttr("last_program_activity_at__lt") + if !lastProgramActivityAtLT.IsNull() { + t, err := time.Parse(time.RFC3339, lastProgramActivityAtLT.AsString()) + if err != nil { + return nil, fmt.Errorf("failed to parse last_program_activity_at__lt: %w", err) + } + req.FilterLastProgramActivityAtLT = &t + } + lastProgramActivityAtNull := args.GetAttr("last_program_activity_at__null") + if !lastProgramActivityAtNull.IsNull() { + req.FilterLastProgramActivityAtNull = client.Bool(lastProgramActivityAtNull.True()) + } + lastActivityAtGT := args.GetAttr("last_activity_at__gt") + if !lastActivityAtGT.IsNull() { + t, err := time.Parse(time.RFC3339, lastActivityAtGT.AsString()) + if err != nil { + return nil, fmt.Errorf("failed to parse last_activity_at__gt: %w", err) + } + req.FilterLastActivityAtGT = &t + } + lastActivityAtLT := args.GetAttr("last_activity_at__lt") + if !lastActivityAtLT.IsNull() { + t, err := time.Parse(time.RFC3339, lastActivityAtLT.AsString()) + if err != nil { + return nil, fmt.Errorf("failed to parse last_activity_at__lt: %w", err) + } + req.FilterLastActivityAtLT = &t + } + lastPublicActivityAtGT := args.GetAttr("last_public_activity_at__gt") + if !lastPublicActivityAtGT.IsNull() { + t, err := time.Parse(time.RFC3339, lastPublicActivityAtGT.AsString()) + if err != nil { + return nil, fmt.Errorf("failed to parse last_public_activity_at__gt: %w", err) + } + req.FilterLastPublicActivityAtGT = &t + } + lastPublicActivityAtLT := args.GetAttr("last_public_activity_at__lt") + if !lastPublicActivityAtLT.IsNull() { + t, err := time.Parse(time.RFC3339, lastPublicActivityAtLT.AsString()) + if err != nil { + return nil, fmt.Errorf("failed to parse last_public_activity_at__lt: %w", err) + } + req.FilterLastPublicActivityAtLT = &t + } + keyword := args.GetAttr("keyword") + if !keyword.IsNull() { + req.FilterKeyword = client.String(keyword.AsString()) + } + customFields := args.GetAttr("custom_fields") + if !customFields.IsNull() { + fields := customFields.AsValueMap() + req.FilterCustomFields = make(map[string]string, len(fields)) + for k, v := range fields { + req.FilterCustomFields[k] = v.AsString() + } + } + return &req, nil +} diff --git a/internal/hackerone/data_hackerone_reports_test.go b/internal/hackerone/data_hackerone_reports_test.go new file mode 100644 index 00000000..983555d3 --- /dev/null +++ b/internal/hackerone/data_hackerone_reports_test.go @@ -0,0 +1,415 @@ +package hackerone + +import ( + "context" + "testing" + + "github.com/hashicorp/hcl/v2" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/suite" + "github.com/zclconf/go-cty/cty" + + "github.com/blackstork-io/fabric/internal/hackerone/client" + client_mocks "github.com/blackstork-io/fabric/mocks/internalpkg/hackerone/client" + "github.com/blackstork-io/fabric/plugin" +) + +type ReportsDataSourceTestSuite struct { + suite.Suite + schema *plugin.DataSource + ctx context.Context + cli *client_mocks.Client + storedUsr string + storedTkn string +} + +func TestReportsDataSourceTestSuite(t *testing.T) { + suite.Run(t, new(ReportsDataSourceTestSuite)) +} + +func (s *ReportsDataSourceTestSuite) SetupSuite() { + s.schema = makeHackerOneReportsDataSchema(func(user, token string) client.Client { + s.storedUsr = user + s.storedTkn = token + return s.cli + }) + s.ctx = context.Background() +} + +func (s *ReportsDataSourceTestSuite) SetupTest() { + s.cli = &client_mocks.Client{} +} + +func (s *ReportsDataSourceTestSuite) TearDownTest() { + s.cli.AssertExpectations(s.T()) +} + +func (s *ReportsDataSourceTestSuite) TestSchema() { + s.Require().NotNil(s.schema) + s.NotNil(s.schema.Config) + s.NotNil(s.schema.Args) + s.NotNil(s.schema.DataFunc) +} + +func (s *ReportsDataSourceTestSuite) TestPageNumber() { + s.cli.On("GetAllReports", mock.Anything, &client.GetAllReportsReq{ + PageNumber: client.Int(123), + FilterProgram: []string{"test_program"}, + }).Return(&client.GetAllReportsRes{ + Data: []any{ + map[string]any{ + "id": "1", + }, + }, + }, nil) + res, diags := s.schema.DataFunc(s.ctx, &plugin.RetrieveDataParams{ + Config: cty.ObjectVal(map[string]cty.Value{ + "api_username": cty.StringVal("test_user"), + "api_token": cty.StringVal("test_token"), + }), + Args: cty.ObjectVal(map[string]cty.Value{ + "size": cty.NullVal(cty.Number), + "page_number": cty.NumberIntVal(123), + "sort": cty.NullVal(cty.String), + "program": cty.ListVal([]cty.Value{cty.StringVal("test_program")}), + "inbox_ids": cty.NullVal(cty.List(cty.Number)), + "reporter": cty.NullVal(cty.List(cty.String)), + "assignee": cty.NullVal(cty.List(cty.String)), + "state": cty.NullVal(cty.List(cty.String)), + "id": cty.NullVal(cty.List(cty.Number)), + "weakness_id": cty.NullVal(cty.List(cty.Number)), + "severity": cty.NullVal(cty.List(cty.String)), + "hacker_published": cty.NullVal(cty.Bool), + "created_at__gt": cty.NullVal(cty.String), + "created_at__lt": cty.NullVal(cty.String), + "submitted_at__gt": cty.NullVal(cty.String), + "submitted_at__lt": cty.NullVal(cty.String), + "triaged_at__gt": cty.NullVal(cty.String), + "triaged_at__lt": cty.NullVal(cty.String), + "triaged_at__null": cty.NullVal(cty.Bool), + "closed_at__gt": cty.NullVal(cty.String), + "closed_at__lt": cty.NullVal(cty.String), + "closed_at__null": cty.NullVal(cty.Bool), + "disclosed_at__gt": cty.NullVal(cty.String), + "disclosed_at__lt": cty.NullVal(cty.String), + "disclosed_at__null": cty.NullVal(cty.Bool), + "reporter_agreed_on_going_public": cty.NullVal(cty.Bool), + "bounty_awarded_at__gt": cty.NullVal(cty.String), + "bounty_awarded_at__lt": cty.NullVal(cty.String), + "bounty_awarded_at__null": cty.NullVal(cty.Bool), + "swag_awarded_at__gt": cty.NullVal(cty.String), + "swag_awarded_at__lt": cty.NullVal(cty.String), + "swag_awarded_at__null": cty.NullVal(cty.Bool), + "last_report_activity_at__gt": cty.NullVal(cty.String), + "last_report_activity_at__lt": cty.NullVal(cty.String), + "first_program_activity_at__gt": cty.NullVal(cty.String), + "first_program_activity_at__lt": cty.NullVal(cty.String), + "first_program_activity_at__null": cty.NullVal(cty.Bool), + "last_program_activity_at__gt": cty.NullVal(cty.String), + "last_program_activity_at__lt": cty.NullVal(cty.String), + "last_program_activity_at__null": cty.NullVal(cty.Bool), + "last_activity_at__gt": cty.NullVal(cty.String), + "last_activity_at__lt": cty.NullVal(cty.String), + "last_public_activity_at__gt": cty.NullVal(cty.String), + "last_public_activity_at__lt": cty.NullVal(cty.String), + "keyword": cty.NullVal(cty.String), + "custom_fields": cty.NullVal(cty.Map(cty.String)), + }), + }) + s.Equal("test_user", s.storedUsr) + s.Equal("test_token", s.storedTkn) + s.Len(diags, 0) + s.Equal(plugin.ListData{ + plugin.MapData{ + "id": plugin.StringData("1"), + }, + }, res) +} + +func (s *ReportsDataSourceTestSuite) TestProgram() { + s.cli.On("GetAllReports", mock.Anything, &client.GetAllReportsReq{ + PageNumber: client.Int(1), + FilterProgram: []string{"test_program"}, + }).Return(&client.GetAllReportsRes{ + Data: []any{ + map[string]any{ + "id": "1", + }, + }, + }, nil) + s.cli.On("GetAllReports", mock.Anything, &client.GetAllReportsReq{ + PageNumber: client.Int(2), + FilterProgram: []string{"test_program"}, + }).Return(&client.GetAllReportsRes{ + Data: []any{}, + }, nil) + res, diags := s.schema.DataFunc(s.ctx, &plugin.RetrieveDataParams{ + Config: cty.ObjectVal(map[string]cty.Value{ + "api_username": cty.StringVal("test_user"), + "api_token": cty.StringVal("test_token"), + }), + Args: cty.ObjectVal(map[string]cty.Value{ + "size": cty.NullVal(cty.Number), + "page_number": cty.NullVal(cty.Number), + "sort": cty.NullVal(cty.String), + "program": cty.ListVal([]cty.Value{cty.StringVal("test_program")}), + "inbox_ids": cty.NullVal(cty.List(cty.Number)), + "reporter": cty.NullVal(cty.List(cty.String)), + "assignee": cty.NullVal(cty.List(cty.String)), + "state": cty.NullVal(cty.List(cty.String)), + "id": cty.NullVal(cty.List(cty.Number)), + "weakness_id": cty.NullVal(cty.List(cty.Number)), + "severity": cty.NullVal(cty.List(cty.String)), + "hacker_published": cty.NullVal(cty.Bool), + "created_at__gt": cty.NullVal(cty.String), + "created_at__lt": cty.NullVal(cty.String), + "submitted_at__gt": cty.NullVal(cty.String), + "submitted_at__lt": cty.NullVal(cty.String), + "triaged_at__gt": cty.NullVal(cty.String), + "triaged_at__lt": cty.NullVal(cty.String), + "triaged_at__null": cty.NullVal(cty.Bool), + "closed_at__gt": cty.NullVal(cty.String), + "closed_at__lt": cty.NullVal(cty.String), + "closed_at__null": cty.NullVal(cty.Bool), + "disclosed_at__gt": cty.NullVal(cty.String), + "disclosed_at__lt": cty.NullVal(cty.String), + "disclosed_at__null": cty.NullVal(cty.Bool), + "reporter_agreed_on_going_public": cty.NullVal(cty.Bool), + "bounty_awarded_at__gt": cty.NullVal(cty.String), + "bounty_awarded_at__lt": cty.NullVal(cty.String), + "bounty_awarded_at__null": cty.NullVal(cty.Bool), + "swag_awarded_at__gt": cty.NullVal(cty.String), + "swag_awarded_at__lt": cty.NullVal(cty.String), + "swag_awarded_at__null": cty.NullVal(cty.Bool), + "last_report_activity_at__gt": cty.NullVal(cty.String), + "last_report_activity_at__lt": cty.NullVal(cty.String), + "first_program_activity_at__gt": cty.NullVal(cty.String), + "first_program_activity_at__lt": cty.NullVal(cty.String), + "first_program_activity_at__null": cty.NullVal(cty.Bool), + "last_program_activity_at__gt": cty.NullVal(cty.String), + "last_program_activity_at__lt": cty.NullVal(cty.String), + "last_program_activity_at__null": cty.NullVal(cty.Bool), + "last_activity_at__gt": cty.NullVal(cty.String), + "last_activity_at__lt": cty.NullVal(cty.String), + "last_public_activity_at__gt": cty.NullVal(cty.String), + "last_public_activity_at__lt": cty.NullVal(cty.String), + "keyword": cty.NullVal(cty.String), + "custom_fields": cty.NullVal(cty.Map(cty.String)), + }), + }) + s.Equal("test_user", s.storedUsr) + s.Equal("test_token", s.storedTkn) + s.Len(diags, 0) + s.Equal(plugin.ListData{ + plugin.MapData{ + "id": plugin.StringData("1"), + }, + }, res) +} + +func (s *ReportsDataSourceTestSuite) TestInboxIDs() { + s.cli.On("GetAllReports", mock.Anything, &client.GetAllReportsReq{ + PageNumber: client.Int(1), + FilterInboxIDs: []int{1, 2, 3}, + }).Return(&client.GetAllReportsRes{ + Data: []any{ + map[string]any{ + "id": "1", + }, + }, + }, nil) + s.cli.On("GetAllReports", mock.Anything, &client.GetAllReportsReq{ + PageNumber: client.Int(2), + FilterInboxIDs: []int{1, 2, 3}, + }).Return(&client.GetAllReportsRes{ + Data: []any{}, + }, nil) + res, diags := s.schema.DataFunc(s.ctx, &plugin.RetrieveDataParams{ + Config: cty.ObjectVal(map[string]cty.Value{ + "api_username": cty.StringVal("test_user"), + "api_token": cty.StringVal("test_token"), + }), + Args: cty.ObjectVal(map[string]cty.Value{ + "size": cty.NullVal(cty.Number), + "page_number": cty.NullVal(cty.Number), + "sort": cty.NullVal(cty.String), + "program": cty.NullVal(cty.List(cty.String)), + "inbox_ids": cty.ListVal([]cty.Value{cty.NumberIntVal(1), cty.NumberIntVal(2), cty.NumberIntVal(3)}), + "reporter": cty.NullVal(cty.List(cty.String)), + "assignee": cty.NullVal(cty.List(cty.String)), + "state": cty.NullVal(cty.List(cty.String)), + "id": cty.NullVal(cty.List(cty.Number)), + "weakness_id": cty.NullVal(cty.List(cty.Number)), + "severity": cty.NullVal(cty.List(cty.String)), + "hacker_published": cty.NullVal(cty.Bool), + "created_at__gt": cty.NullVal(cty.String), + "created_at__lt": cty.NullVal(cty.String), + "submitted_at__gt": cty.NullVal(cty.String), + "submitted_at__lt": cty.NullVal(cty.String), + "triaged_at__gt": cty.NullVal(cty.String), + "triaged_at__lt": cty.NullVal(cty.String), + "triaged_at__null": cty.NullVal(cty.Bool), + "closed_at__gt": cty.NullVal(cty.String), + "closed_at__lt": cty.NullVal(cty.String), + "closed_at__null": cty.NullVal(cty.Bool), + "disclosed_at__gt": cty.NullVal(cty.String), + "disclosed_at__lt": cty.NullVal(cty.String), + "disclosed_at__null": cty.NullVal(cty.Bool), + "reporter_agreed_on_going_public": cty.NullVal(cty.Bool), + "bounty_awarded_at__gt": cty.NullVal(cty.String), + "bounty_awarded_at__lt": cty.NullVal(cty.String), + "bounty_awarded_at__null": cty.NullVal(cty.Bool), + "swag_awarded_at__gt": cty.NullVal(cty.String), + "swag_awarded_at__lt": cty.NullVal(cty.String), + "swag_awarded_at__null": cty.NullVal(cty.Bool), + "last_report_activity_at__gt": cty.NullVal(cty.String), + "last_report_activity_at__lt": cty.NullVal(cty.String), + "first_program_activity_at__gt": cty.NullVal(cty.String), + "first_program_activity_at__lt": cty.NullVal(cty.String), + "first_program_activity_at__null": cty.NullVal(cty.Bool), + "last_program_activity_at__gt": cty.NullVal(cty.String), + "last_program_activity_at__lt": cty.NullVal(cty.String), + "last_program_activity_at__null": cty.NullVal(cty.Bool), + "last_activity_at__gt": cty.NullVal(cty.String), + "last_activity_at__lt": cty.NullVal(cty.String), + "last_public_activity_at__gt": cty.NullVal(cty.String), + "last_public_activity_at__lt": cty.NullVal(cty.String), + "keyword": cty.NullVal(cty.String), + "custom_fields": cty.NullVal(cty.Map(cty.String)), + }), + }) + s.Equal("test_user", s.storedUsr) + s.Equal("test_token", s.storedTkn) + s.Len(diags, 0) + s.Equal(plugin.ListData{ + plugin.MapData{ + "id": plugin.StringData("1"), + }, + }, res) +} + +func (s *ReportsDataSourceTestSuite) TestInvalid() { + res, diags := s.schema.DataFunc(s.ctx, &plugin.RetrieveDataParams{ + Config: cty.ObjectVal(map[string]cty.Value{ + "api_username": cty.StringVal("test_user"), + "api_token": cty.StringVal("test_token"), + }), + Args: cty.ObjectVal(map[string]cty.Value{ + "size": cty.NullVal(cty.Number), + "page_number": cty.NullVal(cty.Number), + "sort": cty.NullVal(cty.String), + "program": cty.NullVal(cty.List(cty.String)), + "inbox_ids": cty.NullVal(cty.List(cty.Number)), + "reporter": cty.NullVal(cty.List(cty.String)), + "assignee": cty.NullVal(cty.List(cty.String)), + "state": cty.NullVal(cty.List(cty.String)), + "id": cty.NullVal(cty.List(cty.Number)), + "weakness_id": cty.NullVal(cty.List(cty.Number)), + "severity": cty.NullVal(cty.List(cty.String)), + "hacker_published": cty.NullVal(cty.Bool), + "created_at__gt": cty.NullVal(cty.String), + "created_at__lt": cty.NullVal(cty.String), + "submitted_at__gt": cty.NullVal(cty.String), + "submitted_at__lt": cty.NullVal(cty.String), + "triaged_at__gt": cty.NullVal(cty.String), + "triaged_at__lt": cty.NullVal(cty.String), + "triaged_at__null": cty.NullVal(cty.Bool), + "closed_at__gt": cty.NullVal(cty.String), + "closed_at__lt": cty.NullVal(cty.String), + "closed_at__null": cty.NullVal(cty.Bool), + "disclosed_at__gt": cty.NullVal(cty.String), + "disclosed_at__lt": cty.NullVal(cty.String), + "disclosed_at__null": cty.NullVal(cty.Bool), + "reporter_agreed_on_going_public": cty.NullVal(cty.Bool), + "bounty_awarded_at__gt": cty.NullVal(cty.String), + "bounty_awarded_at__lt": cty.NullVal(cty.String), + "bounty_awarded_at__null": cty.NullVal(cty.Bool), + "swag_awarded_at__gt": cty.NullVal(cty.String), + "swag_awarded_at__lt": cty.NullVal(cty.String), + "swag_awarded_at__null": cty.NullVal(cty.Bool), + "last_report_activity_at__gt": cty.NullVal(cty.String), + "last_report_activity_at__lt": cty.NullVal(cty.String), + "first_program_activity_at__gt": cty.NullVal(cty.String), + "first_program_activity_at__lt": cty.NullVal(cty.String), + "first_program_activity_at__null": cty.NullVal(cty.Bool), + "last_program_activity_at__gt": cty.NullVal(cty.String), + "last_program_activity_at__lt": cty.NullVal(cty.String), + "last_program_activity_at__null": cty.NullVal(cty.Bool), + "last_activity_at__gt": cty.NullVal(cty.String), + "last_activity_at__lt": cty.NullVal(cty.String), + "last_public_activity_at__gt": cty.NullVal(cty.String), + "last_public_activity_at__lt": cty.NullVal(cty.String), + "keyword": cty.NullVal(cty.String), + "custom_fields": cty.NullVal(cty.Map(cty.String)), + }), + }) + s.Nil(res) + s.Equal(hcl.Diagnostics{{ + Severity: hcl.DiagError, + Summary: "Failed to parse arguments", + Detail: "at least one of program or inbox_ids must be provided", + }}, diags) +} + +func (s *ReportsDataSourceTestSuite) TestInvalidConfig() { + res, diags := s.schema.DataFunc(s.ctx, &plugin.RetrieveDataParams{ + Config: cty.ObjectVal(map[string]cty.Value{ + "api_username": cty.NullVal(cty.String), + "api_token": cty.NullVal(cty.String), + }), + Args: cty.ObjectVal(map[string]cty.Value{ + "size": cty.NullVal(cty.Number), + "page_number": cty.NullVal(cty.Number), + "sort": cty.NullVal(cty.String), + "program": cty.NullVal(cty.List(cty.String)), + "inbox_ids": cty.NullVal(cty.List(cty.Number)), + "reporter": cty.NullVal(cty.List(cty.String)), + "assignee": cty.NullVal(cty.List(cty.String)), + "state": cty.NullVal(cty.List(cty.String)), + "id": cty.NullVal(cty.List(cty.Number)), + "weakness_id": cty.NullVal(cty.List(cty.Number)), + "severity": cty.NullVal(cty.List(cty.String)), + "hacker_published": cty.NullVal(cty.Bool), + "created_at__gt": cty.NullVal(cty.String), + "created_at__lt": cty.NullVal(cty.String), + "submitted_at__gt": cty.NullVal(cty.String), + "submitted_at__lt": cty.NullVal(cty.String), + "triaged_at__gt": cty.NullVal(cty.String), + "triaged_at__lt": cty.NullVal(cty.String), + "triaged_at__null": cty.NullVal(cty.Bool), + "closed_at__gt": cty.NullVal(cty.String), + "closed_at__lt": cty.NullVal(cty.String), + "closed_at__null": cty.NullVal(cty.Bool), + "disclosed_at__gt": cty.NullVal(cty.String), + "disclosed_at__lt": cty.NullVal(cty.String), + "disclosed_at__null": cty.NullVal(cty.Bool), + "reporter_agreed_on_going_public": cty.NullVal(cty.Bool), + "bounty_awarded_at__gt": cty.NullVal(cty.String), + "bounty_awarded_at__lt": cty.NullVal(cty.String), + "bounty_awarded_at__null": cty.NullVal(cty.Bool), + "swag_awarded_at__gt": cty.NullVal(cty.String), + "swag_awarded_at__lt": cty.NullVal(cty.String), + "swag_awarded_at__null": cty.NullVal(cty.Bool), + "last_report_activity_at__gt": cty.NullVal(cty.String), + "last_report_activity_at__lt": cty.NullVal(cty.String), + "first_program_activity_at__gt": cty.NullVal(cty.String), + "first_program_activity_at__lt": cty.NullVal(cty.String), + "first_program_activity_at__null": cty.NullVal(cty.Bool), + "last_program_activity_at__gt": cty.NullVal(cty.String), + "last_program_activity_at__lt": cty.NullVal(cty.String), + "last_program_activity_at__null": cty.NullVal(cty.Bool), + "last_activity_at__gt": cty.NullVal(cty.String), + "last_activity_at__lt": cty.NullVal(cty.String), + "last_public_activity_at__gt": cty.NullVal(cty.String), + "last_public_activity_at__lt": cty.NullVal(cty.String), + "keyword": cty.NullVal(cty.String), + "custom_fields": cty.NullVal(cty.Map(cty.String)), + }), + }) + s.Nil(res) + s.Equal(hcl.Diagnostics{{ + Severity: hcl.DiagError, + Summary: "Failed to create client", + Detail: "api_username is required in configuration", + }}, diags) +} diff --git a/internal/hackerone/plugin.go b/internal/hackerone/plugin.go new file mode 100644 index 00000000..ea153b34 --- /dev/null +++ b/internal/hackerone/plugin.go @@ -0,0 +1,47 @@ +package hackerone + +import ( + "fmt" + + "github.com/zclconf/go-cty/cty" + + "github.com/blackstork-io/fabric/internal/hackerone/client" + "github.com/blackstork-io/fabric/plugin" +) + +const ( + minPage = 1 + pageSize = 25 +) + +type ClientLoadFn func(user, token string) client.Client + +var DefaultClientLoader ClientLoadFn = client.New + +func Plugin(version string, loader ClientLoadFn) *plugin.Schema { + if loader == nil { + loader = DefaultClientLoader + } + return &plugin.Schema{ + Name: "blackstork/hackerone", + Version: version, + DataSources: plugin.DataSources{ + "hackerone_reports": makeHackerOneReportsDataSchema(loader), + }, + } +} + +func makeClient(loader ClientLoadFn, cfg cty.Value) (client.Client, error) { + if cfg.IsNull() { + return nil, fmt.Errorf("configuration is required") + } + user := cfg.GetAttr("api_username") + if user.IsNull() || user.AsString() == "" { + return nil, fmt.Errorf("api_username is required in configuration") + } + token := cfg.GetAttr("api_token") + if token.IsNull() || token.AsString() == "" { + return nil, fmt.Errorf("api_token is required in configuration") + } + return loader(user.AsString(), token.AsString()), nil +} diff --git a/internal/hackerone/plugin_test.go b/internal/hackerone/plugin_test.go new file mode 100644 index 00000000..24f55de5 --- /dev/null +++ b/internal/hackerone/plugin_test.go @@ -0,0 +1,14 @@ +package hackerone + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestPlugin_Schema(t *testing.T) { + schema := Plugin("1.2.3", nil) + assert.Equal(t, "blackstork/hackerone", schema.Name) + assert.Equal(t, "1.2.3", schema.Version) + assert.NotNil(t, schema.DataSources["hackerone_reports"]) +} From 59ea6e39ea044404c453f87f3157deb052cdef4c Mon Sep 17 00:00:00 2001 From: dobarx Date: Tue, 20 Feb 2024 10:29:42 +0200 Subject: [PATCH 4/8] internal/splunk: add splunk_search data source --- internal/splunk/client/client.go | 117 +++++++++++ internal/splunk/client/client_test.go | 179 ++++++++++++++++ internal/splunk/client/dto.go | 76 +++++++ internal/splunk/cmd/main.go | 14 ++ internal/splunk/data_splunk_search.go | 175 ++++++++++++++++ internal/splunk/data_splunk_search_test.go | 233 +++++++++++++++++++++ internal/splunk/plugin.go | 50 +++++ internal/splunk/plugin_test.go | 14 ++ 8 files changed, 858 insertions(+) create mode 100644 internal/splunk/client/client.go create mode 100644 internal/splunk/client/client_test.go create mode 100644 internal/splunk/client/dto.go create mode 100644 internal/splunk/cmd/main.go create mode 100644 internal/splunk/data_splunk_search.go create mode 100644 internal/splunk/data_splunk_search_test.go create mode 100644 internal/splunk/plugin.go create mode 100644 internal/splunk/plugin_test.go diff --git a/internal/splunk/client/client.go b/internal/splunk/client/client.go new file mode 100644 index 00000000..9708698f --- /dev/null +++ b/internal/splunk/client/client.go @@ -0,0 +1,117 @@ +package client + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "strings" + + "github.com/google/go-querystring/query" +) + +type Client interface { + CreateSearchJob(ctx context.Context, req *CreateSearchJobReq) (*CreateSearchJobRes, error) + GetSearchJobByID(ctx context.Context, req *GetSearchJobByIDReq) (*GetSearchJobByIDRes, error) + GetSearchJobResults(ctx context.Context, req *GetSearchJobResultsReq) (*GetSearchJobResultsRes, error) +} + +type client struct { + token string + url string +} + +func New(token, host, deployment string) Client { + url := "https://" + host + ":8089" + if deployment != "" { + url = "https://" + deployment + ".splunkcloud.com:8089" + } + return &client{ + token: token, + url: url, + } +} + +func (c *client) auth(r *http.Request) { + r.Header.Add("Authorization", "Bearer "+c.token) +} + +func (c *client) CreateSearchJob(ctx context.Context, req *CreateSearchJobReq) (*CreateSearchJobRes, error) { + v, err := query.Values(req) + if err != nil { + return nil, err + } + r, err := http.NewRequestWithContext(ctx, http.MethodPost, c.url+"/services/search/jobs", strings.NewReader(v.Encode())) + if err != nil { + return nil, err + } + c.auth(r) + r.Header.Add("Accept", "application/json") + r.Header.Add("Content-Type", "application/x-www-form-urlencoded") + client := http.Client{} + res, err := client.Do(r) + if err != nil { + return nil, err + } + defer res.Body.Close() + if res.StatusCode != http.StatusOK { + return nil, fmt.Errorf("splunk client returned status code: %d", res.StatusCode) + } + + var data CreateSearchJobRes + if err := json.NewDecoder(res.Body).Decode(&data); err != nil { + return nil, err + } + return &data, nil +} + +func (c *client) GetSearchJobByID(ctx context.Context, req *GetSearchJobByIDReq) (*GetSearchJobByIDRes, error) { + r, err := http.NewRequestWithContext(ctx, http.MethodGet, c.url+"/services/search/jobs/"+req.ID, nil) + if err != nil { + return nil, err + } + c.auth(r) + r.Header.Add("Accept", "application/json") + client := http.Client{} + res, err := client.Do(r) + if err != nil { + return nil, err + } + defer res.Body.Close() + if res.StatusCode != http.StatusOK { + return nil, fmt.Errorf("splunk client returned status code: %d", res.StatusCode) + } + var data GetSearchJobByIDRes + if err := json.NewDecoder(res.Body).Decode(&data); err != nil { + return nil, err + } + return &data, nil +} + +func (c *client) GetSearchJobResults(ctx context.Context, req *GetSearchJobResultsReq) (*GetSearchJobResultsRes, error) { + r, err := http.NewRequestWithContext(ctx, http.MethodGet, c.url+"/services/search/v2/jobs/"+req.ID+"/results", nil) + if err != nil { + return nil, err + } + q, err := query.Values(req) + if err != nil { + return nil, err + } + r.URL.RawQuery = q.Encode() + c.auth(r) + r.Header.Add("Accept", "application/json") + client := http.Client{} + res, err := client.Do(r) + if err != nil { + return nil, err + } + defer res.Body.Close() + if res.StatusCode != http.StatusOK { + return nil, fmt.Errorf("splunk client returned status code: %d", res.StatusCode) + } + var data GetSearchJobResultsRes + if err := json.NewDecoder(res.Body).Decode(&data); err != nil { + return nil, err + } + return &data, nil +} diff --git a/internal/splunk/client/client_test.go b/internal/splunk/client/client_test.go new file mode 100644 index 00000000..347f6172 --- /dev/null +++ b/internal/splunk/client/client_test.go @@ -0,0 +1,179 @@ +package client + +import ( + "context" + "net/http" + "net/http/httptest" + "testing" + + "github.com/stretchr/testify/suite" +) + +type ClientTestSuite struct { + suite.Suite + ctx context.Context + cancel context.CancelFunc +} + +func (s *ClientTestSuite) SetupTest() { + s.ctx, s.cancel = context.WithCancel(context.Background()) +} + +func (s *ClientTestSuite) TearDownTest() { + s.cancel() +} + +func TestClientTestSuite(t *testing.T) { + suite.Run(t, new(ClientTestSuite)) +} + +func (s *ClientTestSuite) mock(fn http.HandlerFunc, tkn string) (Client, *httptest.Server) { + srv := httptest.NewServer(fn) + cli := &client{ + url: srv.URL, + token: tkn, + } + return cli, srv +} + +func (s *ClientTestSuite) TestAuth() { + client, srv := s.mock(func(w http.ResponseWriter, r *http.Request) { + s.Equal("Bearer test_token", r.Header.Get("Authorization")) + }, "test_token") + defer srv.Close() + client.GetSearchJobByID(s.ctx, &GetSearchJobByIDReq{}) +} + +func (s *ClientTestSuite) TestCreateSearchJob() { + client, srv := s.mock(func(w http.ResponseWriter, r *http.Request) { + s.Equal(http.MethodPost, r.Method) + s.Equal("/services/search/jobs", r.URL.Path) + s.Equal("Bearer test_token", r.Header.Get("Authorization")) + err := r.ParseForm() + s.Require().NoError(err) + s.Equal("test_id", r.FormValue("id")) + s.Equal("test_exec_mode", r.FormValue("exec_mode")) + s.Equal("test_search", r.FormValue("search")) + s.Equal("1", r.FormValue("status_buckets")) + s.Equal("2", r.FormValue("max_count")) + s.Equal("test_rf", r.Form["rf"][0]) + s.Equal("test_rf", r.Form["rf"][1]) + s.Equal("test_earliest_time", r.FormValue("earliest_time")) + s.Equal("test_latest_time", r.FormValue("latest_time")) + w.Write([]byte(`{"sid":"test_sid"}`)) + }, "test_token") + defer srv.Close() + res, err := client.CreateSearchJob(s.ctx, &CreateSearchJobReq{ + ID: "test_id", + ExecMode: "test_exec_mode", + Search: "test_search", + StatusBuckets: Int(1), + MaxCount: Int(2), + RF: []string{"test_rf", "test_rf"}, + EarliestTime: String("test_earliest_time"), + LatestTime: String("test_latest_time"), + }) + s.Require().NoError(err) + s.Equal("test_sid", res.Sid) +} + +func (s *ClientTestSuite) TestCreateSearchJobError() { + client, srv := s.mock(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusInternalServerError) + }, "test_token") + defer srv.Close() + _, err := client.CreateSearchJob(s.ctx, &CreateSearchJobReq{}) + s.Require().Error(err) +} + +func (s *ClientTestSuite) TestGetSearchJobByID() { + client, srv := s.mock(func(w http.ResponseWriter, r *http.Request) { + s.Equal(http.MethodGet, r.Method) + s.Equal("/services/search/jobs/test_id", r.URL.Path) + s.Equal("Bearer test_token", r.Header.Get("Authorization")) + w.Write([]byte(`{"dispatchState":"QUEUED"}`)) + }, "test_token") + defer srv.Close() + res, err := client.GetSearchJobByID(s.ctx, &GetSearchJobByIDReq{ID: "test_id"}) + s.Require().NoError(err) + s.Equal(DispatchStateQueued, res.DispatchState) +} + +func (s *ClientTestSuite) TestGetSearchJobByIDError() { + client, srv := s.mock(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusInternalServerError) + }, "test_token") + defer srv.Close() + _, err := client.GetSearchJobByID(s.ctx, &GetSearchJobByIDReq{ID: "test_id"}) + s.Require().Error(err) +} + +func (s *ClientTestSuite) TestDispatchStateWait() { + s.True(DispatchStateQueued.Wait()) + s.True(DispatchStateParsing.Wait()) + s.True(DispatchStateRunning.Wait()) + s.True(DispatchStateFinalizing.Wait()) + s.False(DispatchStateDone.Wait()) + s.False(DispatchStatePause.Wait()) + s.False(DispatchStateInternalCancel.Wait()) + s.False(DispatchStateUserCancel.Wait()) + s.False(DispatchStateBadInputCancel.Wait()) + s.False(DispatchStateQuit.Wait()) + s.False(DispatchStateFailed.Wait()) +} + +func (s *ClientTestSuite) TestDispatchStateDone() { + s.True(DispatchStateDone.Done()) + s.False(DispatchStateQueued.Done()) + s.False(DispatchStateParsing.Done()) + s.False(DispatchStateRunning.Done()) + s.False(DispatchStateFinalizing.Done()) + s.False(DispatchStatePause.Done()) + s.False(DispatchStateInternalCancel.Done()) + s.False(DispatchStateUserCancel.Done()) + s.False(DispatchStateBadInputCancel.Done()) + s.False(DispatchStateQuit.Done()) + s.False(DispatchStateFailed.Done()) +} + +func (s *ClientTestSuite) TestDispatchStateFailed() { + s.False(DispatchStateQueued.Failed()) + s.False(DispatchStateParsing.Failed()) + s.False(DispatchStateRunning.Failed()) + s.False(DispatchStateFinalizing.Failed()) + s.False(DispatchStateDone.Failed()) + s.True(DispatchStatePause.Failed()) + s.True(DispatchStateFailed.Failed()) + s.True(DispatchStateInternalCancel.Failed()) + s.True(DispatchStateUserCancel.Failed()) + s.True(DispatchStateBadInputCancel.Failed()) + s.True(DispatchStateQuit.Failed()) +} + +func (s *ClientTestSuite) TestGetSearchJobResults() { + client, srv := s.mock(func(w http.ResponseWriter, r *http.Request) { + s.Equal(http.MethodGet, r.Method) + s.Equal("/services/search/v2/jobs/test_id/results", r.URL.Path) + s.Equal("output_mode=json", r.URL.RawQuery) + s.Equal("Bearer test_token", r.Header.Get("Authorization")) + w.Write([]byte(`{"results":[{"test_key":"test_value"}]}`)) + }, "test_token") + defer srv.Close() + res, err := client.GetSearchJobResults(s.ctx, &GetSearchJobResultsReq{ + ID: "test_id", + OutputMode: "json", + }) + s.Require().NoError(err) + s.Equal([]any{ + map[string]any{"test_key": "test_value"}, + }, res.Results) +} + +func (s *ClientTestSuite) TestGetSearchJobResultsError() { + client, srv := s.mock(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusInternalServerError) + }, "test_token") + defer srv.Close() + _, err := client.GetSearchJobResults(s.ctx, &GetSearchJobResultsReq{ID: "test_id"}) + s.Require().Error(err) +} diff --git a/internal/splunk/client/dto.go b/internal/splunk/client/dto.go new file mode 100644 index 00000000..4ece6e20 --- /dev/null +++ b/internal/splunk/client/dto.go @@ -0,0 +1,76 @@ +package client + +import "slices" + +type CreateSearchJobReq struct { + ID string `url:"id"` + ExecMode string `url:"exec_mode"` + Search string `url:"search"` + StatusBuckets *int `url:"status_buckets,omitempty"` + MaxCount *int `url:"max_count,omitempty"` + RF []string `url:"rf,omitempty"` + EarliestTime *string `url:"earliest_time,omitempty"` + LatestTime *string `url:"latest_time,omitempty"` +} + +func String(s string) *string { + return &s +} + +func Int(i int) *int { + return &i +} + +type CreateSearchJobRes struct { + Sid string `json:"sid"` +} + +type GetSearchJobByIDReq struct { + ID string +} + +type DispatchState string + +const ( + DispatchStateQueued DispatchState = "QUEUED" + DispatchStateParsing DispatchState = "PARSING" + DispatchStateRunning DispatchState = "RUNNING" + DispatchStateFinalizing DispatchState = "FINALIZING" + DispatchStateDone DispatchState = "DONE" + DispatchStatePause DispatchState = "PAUSE" + DispatchStateInternalCancel DispatchState = "INTERNAL_CANCEL" + DispatchStateUserCancel DispatchState = "USER_CANCEL" + DispatchStateBadInputCancel DispatchState = "BAD_INPUT_CANCEL" + DispatchStateQuit DispatchState = "QUIT" + DispatchStateFailed DispatchState = "FAILED" +) + +func (d DispatchState) Wait() bool { + return slices.Contains([]DispatchState{ + DispatchStateQueued, + DispatchStateParsing, + DispatchStateRunning, + DispatchStateFinalizing, + }, d) +} + +func (d DispatchState) Done() bool { + return DispatchStateDone == d +} + +func (d DispatchState) Failed() bool { + return !d.Wait() && !d.Done() +} + +type GetSearchJobByIDRes struct { + DispatchState DispatchState `json:"dispatchState"` +} + +type GetSearchJobResultsReq struct { + ID string `url:"-"` + OutputMode string `url:"output_mode"` +} + +type GetSearchJobResultsRes struct { + Results []any `json:"results"` +} diff --git a/internal/splunk/cmd/main.go b/internal/splunk/cmd/main.go new file mode 100644 index 00000000..b18f5421 --- /dev/null +++ b/internal/splunk/cmd/main.go @@ -0,0 +1,14 @@ +package main + +import ( + "github.com/blackstork-io/fabric/internal/splunk" + pluginapiv1 "github.com/blackstork-io/fabric/plugin/pluginapi/v1" +) + +var version string + +func main() { + pluginapiv1.Serve( + splunk.Plugin(version, splunk.DefaultClientLoader), + ) +} diff --git a/internal/splunk/data_splunk_search.go b/internal/splunk/data_splunk_search.go new file mode 100644 index 00000000..5706284b --- /dev/null +++ b/internal/splunk/data_splunk_search.go @@ -0,0 +1,175 @@ +package splunk + +import ( + "context" + "crypto/rand" + "encoding/base32" + "fmt" + "time" + + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/hcldec" + "github.com/zclconf/go-cty/cty" + + "github.com/blackstork-io/fabric/internal/splunk/client" + "github.com/blackstork-io/fabric/plugin" +) + +func makeSplunkSearchDataSchema(loader ClientLoadFn) *plugin.DataSource { + return &plugin.DataSource{ + Config: hcldec.ObjectSpec{ + "auth_token": &hcldec.AttrSpec{ + Name: "auth_token", + Type: cty.String, + Required: true, + }, + "host": &hcldec.AttrSpec{ + Name: "host", + Type: cty.String, + Required: false, + }, + "deployment_name": &hcldec.AttrSpec{ + Name: "deployment_name", + Type: cty.String, + Required: false, + }, + }, + Args: hcldec.ObjectSpec{ + "search_query": &hcldec.AttrSpec{ + Name: "search_query", + Type: cty.String, + Required: true, + }, + "max_count": &hcldec.AttrSpec{ + Name: "max_count", + Type: cty.Number, + Required: false, + }, + "status_buckets": &hcldec.AttrSpec{ + Name: "status_buckets", + Type: cty.Number, + Required: false, + }, + "rf": &hcldec.AttrSpec{ + Name: "rf", + Type: cty.List(cty.String), + Required: false, + }, + "earliest_time": &hcldec.AttrSpec{ + Name: "earliest_time", + Type: cty.String, + Required: false, + }, + "latest_time": &hcldec.AttrSpec{ + Name: "latest_time", + Type: cty.String, + Required: false, + }, + }, + DataFunc: fetchSplunkSearchData(loader), + } +} + +func fetchSplunkSearchData(loader ClientLoadFn) plugin.RetrieveDataFunc { + return func(ctx context.Context, params *plugin.RetrieveDataParams) (plugin.Data, hcl.Diagnostics) { + cli, err := makeClient(loader, params.Config) + if err != nil { + return nil, hcl.Diagnostics{{ + Severity: hcl.DiagError, + Summary: "Failed to create client", + Detail: err.Error(), + }} + } + + result, err := search(cli, ctx, params.Args) + if err != nil { + return nil, hcl.Diagnostics{{ + Severity: hcl.DiagError, + Summary: "Failed to search", + Detail: err.Error(), + }} + } + return result, nil + } +} + +func search(cli client.Client, ctx context.Context, args cty.Value) (plugin.Data, error) { + id, err := randID() + if err != nil { + return nil, err + } + req := &client.CreateSearchJobReq{ + ID: id, + ExecMode: "blocking", + } + if attr := args.GetAttr("search_query"); attr.IsNull() || attr.AsString() == "" { + return nil, fmt.Errorf("search_query is required") + } else { + req.Search = attr.AsString() + } + if attr := args.GetAttr("max_count"); !attr.IsNull() { + n, _ := attr.AsBigFloat().Int64() + req.MaxCount = client.Int(int(n)) + } + if attr := args.GetAttr("status_buckets"); !attr.IsNull() { + n, _ := attr.AsBigFloat().Int64() + req.StatusBuckets = client.Int(int(n)) + } + if attr := args.GetAttr("rf"); !attr.IsNull() { + req.RF = make([]string, attr.LengthInt()) + for i, v := range attr.AsValueSlice() { + req.RF[i] = v.AsString() + } + } + if attr := args.GetAttr("earliest_time"); !attr.IsNull() { + req.EarliestTime = client.String(attr.AsString()) + } + if attr := args.GetAttr("latest_time"); !attr.IsNull() { + req.LatestTime = client.String(attr.AsString()) + } + res, err := cli.CreateSearchJob(ctx, req) + if err != nil { + return nil, err + } + if res.Sid != id { + return nil, fmt.Errorf("unexpected search job id: %s", res.Sid) + } + for { + select { + case <-ctx.Done(): + return nil, ctx.Err() + case <-time.After(1 * time.Second): + res, err := cli.GetSearchJobByID(ctx, &client.GetSearchJobByIDReq{ID: id}) + if err != nil { + return nil, err + } + if res.DispatchState.Failed() { + return nil, fmt.Errorf("search job failed: %s", res.DispatchState) + } + if res.DispatchState.Done() { + res, err := cli.GetSearchJobResults(ctx, &client.GetSearchJobResultsReq{ + ID: id, + OutputMode: "json", + }) + if err != nil { + return nil, err + } + result, err := plugin.ParseDataAny(res.Results) + if err != nil { + return nil, err + } + return result, nil + } + } + } +} + +func randID() (string, error) { + var b [16]byte + _, err := rand.Read(b[:]) + if err != nil { + return "", err + } + rndStr := base32.StdEncoding.WithPadding(base32.NoPadding).EncodeToString(b[:]) + return fmt.Sprintf("fabric_%s", rndStr), nil +} diff --git a/internal/splunk/data_splunk_search_test.go b/internal/splunk/data_splunk_search_test.go new file mode 100644 index 00000000..2017787e --- /dev/null +++ b/internal/splunk/data_splunk_search_test.go @@ -0,0 +1,233 @@ +package splunk + +import ( + "context" + "fmt" + "strings" + "testing" + + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/suite" + "github.com/zclconf/go-cty/cty" + + "github.com/blackstork-io/fabric/internal/splunk/client" + client_mocks "github.com/blackstork-io/fabric/mocks/internalpkg/splunk/client" + "github.com/blackstork-io/fabric/plugin" +) + +type SearchDataSourceTestSuite struct { + suite.Suite + schema *plugin.DataSource + ctx context.Context + cli *client_mocks.Client + storedHost string + storedToken string + storedDeployment string +} + +func TestSearchDataSourceTestSuite(t *testing.T) { + suite.Run(t, new(SearchDataSourceTestSuite)) +} + +func (s *SearchDataSourceTestSuite) SetupSuite() { + s.schema = makeSplunkSearchDataSchema(func(token, host, deployment string) client.Client { + s.storedHost = host + s.storedToken = token + s.storedDeployment = deployment + return s.cli + }) + s.ctx = context.Background() +} + +func (s *SearchDataSourceTestSuite) SetupTest() { + s.cli = &client_mocks.Client{} +} + +func (s *SearchDataSourceTestSuite) TearDownTest() { + s.cli.AssertExpectations(s.T()) +} + +func (s *SearchDataSourceTestSuite) TestSchema() { + s.Require().NotNil(s.schema) + s.NotNil(s.schema.Config) + s.NotNil(s.schema.Args) + s.NotNil(s.schema.DataFunc) +} + +func (s *SearchDataSourceTestSuite) TestSearch() { + createJobRes := new(client.CreateSearchJobRes) + getJobReq := &client.GetSearchJobByIDReq{} + getJobResultsReq := &client.GetSearchJobResultsReq{ + OutputMode: "json", + } + s.cli.On("CreateSearchJob", mock.Anything, mock.MatchedBy(func(req *client.CreateSearchJobReq) bool { + createJobRes.Sid = req.ID + getJobReq.ID = req.ID + getJobResultsReq.ID = req.ID + s.True(strings.HasPrefix(req.ID, "fabric_"), "ID should be prefixed with 'fabric_'") + s.Equal("test_query", req.Search) + s.Equal("blocking", req.ExecMode) + s.Equal(client.Int(1), req.StatusBuckets) + s.Equal(client.Int(2), req.MaxCount) + s.Equal([]string{"test_rf_1", "test_rf_2"}, req.RF) + s.Equal(client.String("test_earliest_time"), req.EarliestTime) + s.Equal(client.String("test_latest_time"), req.LatestTime) + return true + })). + Return(createJobRes, nil) + + s.cli.On("GetSearchJobByID", mock.Anything, getJobReq). + Return(&client.GetSearchJobByIDRes{ + DispatchState: client.DispatchStateDone, + }, nil) + + s.cli.On("GetSearchJobResults", mock.Anything, getJobResultsReq). + Return(&client.GetSearchJobResultsRes{ + Results: []any{ + map[string]any{"key": "value"}, + }, + }, nil) + + data, diags := s.schema.DataFunc(s.ctx, &plugin.RetrieveDataParams{ + Config: cty.ObjectVal(map[string]cty.Value{ + "auth_token": cty.StringVal("test_token"), + "host": cty.StringVal("test_host"), + "deployment_name": cty.NullVal(cty.String), + }), + Args: cty.ObjectVal(map[string]cty.Value{ + "search_query": cty.StringVal("test_query"), + "status_buckets": cty.NumberIntVal(1), + "max_count": cty.NumberIntVal(2), + "rf": cty.ListVal([]cty.Value{cty.StringVal("test_rf_1"), cty.StringVal("test_rf_2")}), + "earliest_time": cty.StringVal("test_earliest_time"), + "latest_time": cty.StringVal("test_latest_time"), + }), + }) + s.Require().Len(diags, 0) + s.Equal(plugin.ListData{ + plugin.MapData{ + "key": plugin.StringData("value"), + }, + }, data) + + s.Equal("test_token", s.storedToken) + s.Equal("test_host", s.storedHost) + s.Empty(s.storedDeployment) +} + +func (s *SearchDataSourceTestSuite) TestSearchError() { + resErr := fmt.Errorf("test error") + s.cli.On("CreateSearchJob", mock.Anything, mock.Anything). + Return(nil, resErr) + + _, diags := s.schema.DataFunc(s.ctx, &plugin.RetrieveDataParams{ + Config: cty.ObjectVal(map[string]cty.Value{ + "auth_token": cty.StringVal("test_token"), + "host": cty.StringVal("test_host"), + "deployment_name": cty.NullVal(cty.String), + }), + Args: cty.ObjectVal(map[string]cty.Value{ + "search_query": cty.StringVal("test_query"), + "status_buckets": cty.NumberIntVal(1), + "max_count": cty.NumberIntVal(2), + "rf": cty.ListVal([]cty.Value{cty.StringVal("test_rf_1"), cty.StringVal("test_rf_2")}), + "earliest_time": cty.StringVal("test_earliest_time"), + "latest_time": cty.StringVal("test_latest_time"), + }), + }) + s.Require().Len(diags, 1) + s.Equal("Failed to search", diags[0].Summary) +} + +func (s *SearchDataSourceTestSuite) TestSearchJobError() { + createJobRes := new(client.CreateSearchJobRes) + resErr := fmt.Errorf("test error") + s.cli.On("CreateSearchJob", mock.Anything, mock.MatchedBy(func(req *client.CreateSearchJobReq) bool { + createJobRes.Sid = req.ID + return true + })). + Return(createJobRes, nil) + + s.cli.On("GetSearchJobByID", mock.Anything, mock.Anything). + Return(nil, resErr) + + _, diags := s.schema.DataFunc(s.ctx, &plugin.RetrieveDataParams{ + Config: cty.ObjectVal(map[string]cty.Value{ + "auth_token": cty.StringVal("test_token"), + "host": cty.StringVal("test_host"), + "deployment_name": cty.NullVal(cty.String), + }), + Args: cty.ObjectVal(map[string]cty.Value{ + "search_query": cty.StringVal("test_query"), + "status_buckets": cty.NumberIntVal(1), + "max_count": cty.NumberIntVal(2), + "rf": cty.ListVal([]cty.Value{cty.StringVal("test_rf_1"), cty.StringVal("test_rf_2")}), + "earliest_time": cty.StringVal("test_earliest_time"), + "latest_time": cty.StringVal("test_latest_time"), + }), + }) + s.Require().Len(diags, 1) + s.Equal("Failed to search", diags[0].Summary) +} + +func (s *SearchDataSourceTestSuite) TestSearchJobResultsError() { + createJobRes := new(client.CreateSearchJobRes) + getJobReq := &client.GetSearchJobByIDReq{} + getJobResultsReq := &client.GetSearchJobResultsReq{ + OutputMode: "json", + } + s.cli.On("CreateSearchJob", mock.Anything, mock.MatchedBy(func(req *client.CreateSearchJobReq) bool { + createJobRes.Sid = req.ID + getJobReq.ID = req.ID + getJobResultsReq.ID = req.ID + return true + })). + Return(createJobRes, nil) + + s.cli.On("GetSearchJobByID", mock.Anything, getJobReq). + Return(&client.GetSearchJobByIDRes{ + DispatchState: client.DispatchStateDone, + }, nil) + + resErr := fmt.Errorf("test error") + s.cli.On("GetSearchJobResults", mock.Anything, getJobResultsReq). + Return(nil, resErr) + + _, diags := s.schema.DataFunc(s.ctx, &plugin.RetrieveDataParams{ + Config: cty.ObjectVal(map[string]cty.Value{ + "auth_token": cty.StringVal("test_token"), + "host": cty.StringVal("test_host"), + "deployment_name": cty.NullVal(cty.String), + }), + Args: cty.ObjectVal(map[string]cty.Value{ + "search_query": cty.StringVal("test_query"), + "status_buckets": cty.NumberIntVal(1), + "max_count": cty.NumberIntVal(2), + "rf": cty.ListVal([]cty.Value{cty.StringVal("test_rf_1"), cty.StringVal("test_rf_2")}), + "earliest_time": cty.StringVal("test_earliest_time"), + "latest_time": cty.StringVal("test_latest_time"), + }), + }) + s.Require().Len(diags, 1) + s.Equal("Failed to search", diags[0].Summary) +} + +func (s *SearchDataSourceTestSuite) TestSearchEmptyQuery() { + _, diags := s.schema.DataFunc(s.ctx, &plugin.RetrieveDataParams{ + Config: cty.ObjectVal(map[string]cty.Value{ + "auth_token": cty.StringVal("test_token"), + "host": cty.StringVal("test_host"), + "deployment_name": cty.NullVal(cty.String), + }), + Args: cty.ObjectVal(map[string]cty.Value{ + "search_query": cty.StringVal(""), + "status_buckets": cty.NumberIntVal(1), + "max_count": cty.NumberIntVal(2), + "rf": cty.ListVal([]cty.Value{cty.StringVal("test_rf_1"), cty.StringVal("test_rf_2")}), + "earliest_time": cty.StringVal("test_earliest_time"), + "latest_time": cty.StringVal("test_latest_time"), + }), + }) + s.Require().Len(diags, 1) + s.Equal("search_query is required", diags[0].Detail) +} diff --git a/internal/splunk/plugin.go b/internal/splunk/plugin.go new file mode 100644 index 00000000..9426b006 --- /dev/null +++ b/internal/splunk/plugin.go @@ -0,0 +1,50 @@ +package splunk + +import ( + "fmt" + + "github.com/zclconf/go-cty/cty" + + "github.com/blackstork-io/fabric/internal/splunk/client" + "github.com/blackstork-io/fabric/plugin" +) + +type ClientLoadFn func(token, host, deployment string) client.Client + +var DefaultClientLoader ClientLoadFn = client.New + +func Plugin(version string, loader ClientLoadFn) *plugin.Schema { + if loader == nil { + loader = DefaultClientLoader + } + return &plugin.Schema{ + Name: "blackstork/splunk", + Version: version, + DataSources: plugin.DataSources{ + "splunk_search": makeSplunkSearchDataSchema(loader), + }, + } +} + +func makeClient(loader ClientLoadFn, cfg cty.Value) (client.Client, error) { + if cfg.IsNull() { + return nil, fmt.Errorf("configuration is required") + } + + token := cfg.GetAttr("auth_token") + if token.IsNull() || token.AsString() == "" { + return nil, fmt.Errorf("auth_token is required in configuration") + } + host := cfg.GetAttr("host") + if host.IsNull() { + host = cty.StringVal("") + } + deployment := cfg.GetAttr("deployment_name") + if deployment.IsNull() { + deployment = cty.StringVal("") + } + if host.AsString() == "" && deployment.AsString() == "" { + return nil, fmt.Errorf("host or deployment_name is required in configuration") + } + return loader(token.AsString(), host.AsString(), deployment.AsString()), nil +} diff --git a/internal/splunk/plugin_test.go b/internal/splunk/plugin_test.go new file mode 100644 index 00000000..3a96f794 --- /dev/null +++ b/internal/splunk/plugin_test.go @@ -0,0 +1,14 @@ +package splunk + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestPlugin_Schema(t *testing.T) { + schema := Plugin("1.2.3", nil) + assert.Equal(t, "blackstork/splunk", schema.Name) + assert.Equal(t, "1.2.3", schema.Version) + assert.NotNil(t, schema.DataSources["splunk_search"]) +} From cb7ddd9d0931aee516d53b008689ef1951cb05fd Mon Sep 17 00:00:00 2001 From: dobarx Date: Tue, 20 Feb 2024 10:30:54 +0200 Subject: [PATCH 5/8] internal/stixview: add stixview content provider --- internal/stixview/cmd/main.go | 14 ++ internal/stixview/content_stixview.go | 237 +++++++++++++++++++++ internal/stixview/content_stixview_test.go | 137 ++++++++++++ internal/stixview/plugin.go | 15 ++ internal/stixview/plugin_test.go | 14 ++ internal/stixview/stixview.gohtml | 39 ++++ 6 files changed, 456 insertions(+) create mode 100644 internal/stixview/cmd/main.go create mode 100644 internal/stixview/content_stixview.go create mode 100644 internal/stixview/content_stixview_test.go create mode 100644 internal/stixview/plugin.go create mode 100644 internal/stixview/plugin_test.go create mode 100644 internal/stixview/stixview.gohtml diff --git a/internal/stixview/cmd/main.go b/internal/stixview/cmd/main.go new file mode 100644 index 00000000..ce2712b2 --- /dev/null +++ b/internal/stixview/cmd/main.go @@ -0,0 +1,14 @@ +package main + +import ( + "github.com/blackstork-io/fabric/internal/stixview" + pluginapiv1 "github.com/blackstork-io/fabric/plugin/pluginapi/v1" +) + +var version string + +func main() { + pluginapiv1.Serve( + stixview.Plugin(version), + ) +} diff --git a/internal/stixview/content_stixview.go b/internal/stixview/content_stixview.go new file mode 100644 index 00000000..a4c1c6a8 --- /dev/null +++ b/internal/stixview/content_stixview.go @@ -0,0 +1,237 @@ +package stixview + +import ( + "bytes" + "context" + "crypto/rand" + _ "embed" + "encoding/base32" + "encoding/json" + "fmt" + "text/template" + + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/hcldec" + "github.com/zclconf/go-cty/cty" + + "github.com/blackstork-io/fabric/plugin" +) + +//go:embed stixview.gohtml +var stixViewTmplStr string + +var stixViewTmpl *template.Template + +func init() { + stixViewTmpl = template.Must(template.New("stixview").Funcs(template.FuncMap{ + "json": func(v interface{}) string { + data, err := json.Marshal(v) + if err != nil { + return fmt.Sprintf("error: %s", err) + } + return string(data) + }, + }).Parse(stixViewTmplStr)) +} + +func makeStixViewContentProvider() *plugin.ContentProvider { + return &plugin.ContentProvider{ + Args: hcldec.ObjectSpec{ + "gist_id": &hcldec.AttrSpec{ + Name: "gist_id", + Type: cty.String, + Required: false, + }, + "stix_url": &hcldec.AttrSpec{ + Name: "stix_url", + Type: cty.String, + Required: false, + }, + "caption": &hcldec.AttrSpec{ + Name: "caption", + Type: cty.String, + Required: false, + }, + "show_footer": &hcldec.AttrSpec{ + Name: "show_footer", + Type: cty.Bool, + Required: false, + }, + "show_sidebar": &hcldec.AttrSpec{ + Name: "show_sidebar", + Type: cty.Bool, + Required: false, + }, + "show_tlp_as_tags": &hcldec.AttrSpec{ + Name: "show_tlp_as_tags", + Type: cty.Bool, + Required: false, + }, + "show_marking_nodes": &hcldec.AttrSpec{ + Name: "show_marking_nodes", + Type: cty.Bool, + Required: false, + }, + "show_labels": &hcldec.AttrSpec{ + Name: "show_labels", + Type: cty.Bool, + Required: false, + }, + "show_idrefs": &hcldec.AttrSpec{ + Name: "show_idrefs", + Type: cty.Bool, + Required: false, + }, + "width": &hcldec.AttrSpec{ + Name: "width", + Type: cty.Number, + Required: false, + }, + "height": &hcldec.AttrSpec{ + Name: "height", + Type: cty.Number, + Required: false, + }, + }, + ContentFunc: renderStixView, + } +} + +func renderStixView(ctx context.Context, params *plugin.ProvideContentParams) (*plugin.Content, hcl.Diagnostics) { + args, err := parseStixViewArgs(params.Args) + if err != nil { + return nil, hcl.Diagnostics{{ + Severity: hcl.DiagError, + Summary: "Failed to parse arguments", + Detail: err.Error(), + }} + } + var uid [16]byte + _, err = rand.Read(uid[:]) + if err != nil { + return nil, hcl.Diagnostics{{ + Severity: hcl.DiagError, + Summary: "Failed to generate UID", + Detail: err.Error(), + }} + } + rctx := &renderContext{ + Args: args, + UID: base32.StdEncoding.WithPadding(base32.NoPadding).EncodeToString(uid[:]), + } + if queryResult, ok := params.DataContext["query_result"]; ok { + rctx.QueryResult, ok = queryResult.(plugin.ListData) + if !ok { + return nil, hcl.Diagnostics{{ + Severity: hcl.DiagError, + Summary: "Invalid query result", + Detail: "Query result is not a list", + }} + } + } else if args.StixURL == nil && args.GistID == nil { + return nil, hcl.Diagnostics{{ + Severity: hcl.DiagError, + Summary: "Missing arugments", + Detail: "Must provide either stix_url or gist_id or query_result", + }} + } + buf := bytes.NewBufferString("") + err = stixViewTmpl.Execute(buf, rctx) + if err != nil { + return nil, hcl.Diagnostics{{ + Severity: hcl.DiagError, + Summary: "Failed to render template", + Detail: err.Error(), + }} + } + + return &plugin.Content{ + Markdown: buf.String(), + }, nil +} + +type renderContext struct { + Args *stixViewArgs + UID string + QueryResult plugin.ListData +} + +type stixViewArgs struct { + GistID *string + StixURL *string + Caption *string + ShowFooter *bool + ShowSidebar *bool + ShowTLPAsTags *bool + ShowMarkingNodes *bool + ShowLabels *bool + ShowIDRefs *bool + Width *int + Height *int +} + +func stringPtr(s string) *string { + return &s +} + +func boolPtr(b bool) *bool { + return &b +} + +func intPtr(i int) *int { + return &i +} + +func parseStixViewArgs(args cty.Value) (*stixViewArgs, error) { + if args.IsNull() { + return nil, fmt.Errorf("arguments are null") + } + var dst stixViewArgs + gistID := args.GetAttr("gist_id") + if !gistID.IsNull() && gistID.AsString() != "" { + dst.GistID = stringPtr(gistID.AsString()) + } + stixURL := args.GetAttr("stix_url") + if !stixURL.IsNull() && stixURL.AsString() != "" { + dst.StixURL = stringPtr(stixURL.AsString()) + } + caption := args.GetAttr("caption") + if !caption.IsNull() && caption.AsString() != "" { + dst.Caption = stringPtr(caption.AsString()) + } + showFooter := args.GetAttr("show_footer") + if !showFooter.IsNull() { + dst.ShowFooter = boolPtr(showFooter.True()) + } + showSidebar := args.GetAttr("show_sidebar") + if !showSidebar.IsNull() { + dst.ShowSidebar = boolPtr(showSidebar.True()) + } + showTLPAsTags := args.GetAttr("show_tlp_as_tags") + if !showTLPAsTags.IsNull() { + dst.ShowTLPAsTags = boolPtr(showTLPAsTags.True()) + } + showMarkingNodes := args.GetAttr("show_marking_nodes") + if !showMarkingNodes.IsNull() { + dst.ShowMarkingNodes = boolPtr(showMarkingNodes.True()) + } + showLabels := args.GetAttr("show_labels") + if !showLabels.IsNull() { + dst.ShowLabels = boolPtr(showLabels.True()) + } + showIDRefs := args.GetAttr("show_idrefs") + if !showIDRefs.IsNull() { + dst.ShowIDRefs = boolPtr(showIDRefs.True()) + } + width := args.GetAttr("width") + if !width.IsNull() { + n, _ := width.AsBigFloat().Int64() + dst.Width = intPtr(int(n)) + } + height := args.GetAttr("height") + if !height.IsNull() { + n, _ := height.AsBigFloat().Int64() + dst.Height = intPtr(int(n)) + } + return &dst, nil +} diff --git a/internal/stixview/content_stixview_test.go b/internal/stixview/content_stixview_test.go new file mode 100644 index 00000000..1b89edaf --- /dev/null +++ b/internal/stixview/content_stixview_test.go @@ -0,0 +1,137 @@ +package stixview + +import ( + "context" + "strings" + "testing" + + "github.com/stretchr/testify/suite" + "github.com/zclconf/go-cty/cty" + + "github.com/blackstork-io/fabric/plugin" +) + +type StixViewTestSuite struct { + suite.Suite + schema *plugin.ContentProvider +} + +func TestStixViewTestSuite(t *testing.T) { + suite.Run(t, new(StixViewTestSuite)) +} + +func (s *StixViewTestSuite) SetupTest() { + s.schema = makeStixViewContentProvider() +} + +func (s *StixViewTestSuite) TestSchema() { + s.Require().NotNil(s.schema) + s.Nil(s.schema.Config) + s.NotNil(s.schema.Args) + s.NotNil(s.schema.ContentFunc) +} + +func (s *StixViewTestSuite) TestGistID() { + res, diags := s.schema.ContentFunc(context.Background(), &plugin.ProvideContentParams{ + Args: cty.ObjectVal(map[string]cty.Value{ + "gist_id": cty.StringVal("123"), + "stix_url": cty.NullVal(cty.String), + "caption": cty.NullVal(cty.String), + "show_footer": cty.NullVal(cty.Bool), + "show_sidebar": cty.NullVal(cty.Bool), + "show_tlp_as_tags": cty.NullVal(cty.Bool), + "show_marking_nodes": cty.NullVal(cty.Bool), + "show_labels": cty.NullVal(cty.Bool), + "show_idrefs": cty.NullVal(cty.Bool), + "width": cty.NullVal(cty.Number), + "height": cty.NullVal(cty.Number), + }), + DataContext: plugin.MapData{}, + }) + s.Len(diags, 0) + s.Equal(strings.Join([]string{ + ``, + `
`, + `
`, + }, "\n"), res.Markdown) +} + +func (s *StixViewTestSuite) TestStixURL() { + res, diags := s.schema.ContentFunc(context.Background(), &plugin.ProvideContentParams{ + Args: cty.ObjectVal(map[string]cty.Value{ + "gist_id": cty.NullVal(cty.String), + "stix_url": cty.StringVal("https://example.com/stix.json"), + "caption": cty.NullVal(cty.String), + "show_footer": cty.NullVal(cty.Bool), + "show_sidebar": cty.NullVal(cty.Bool), + "show_tlp_as_tags": cty.NullVal(cty.Bool), + "show_marking_nodes": cty.NullVal(cty.Bool), + "show_labels": cty.NullVal(cty.Bool), + "show_idrefs": cty.NullVal(cty.Bool), + "width": cty.NullVal(cty.Number), + "height": cty.NullVal(cty.Number), + }), + DataContext: plugin.MapData{}, + }) + s.Len(diags, 0) + s.Equal(strings.Join([]string{ + ``, + `
`, + `
`, + }, "\n"), res.Markdown) +} + +func (s *StixViewTestSuite) TestAllArgs() { + res, diags := s.schema.ContentFunc(context.Background(), &plugin.ProvideContentParams{ + Args: cty.ObjectVal(map[string]cty.Value{ + "gist_id": cty.StringVal("123"), + "stix_url": cty.NullVal(cty.String), + "caption": cty.StringVal("test caption"), + "show_footer": cty.BoolVal(true), + "show_sidebar": cty.BoolVal(true), + "show_tlp_as_tags": cty.BoolVal(true), + "show_marking_nodes": cty.BoolVal(true), + "show_labels": cty.BoolVal(true), + "show_idrefs": cty.BoolVal(true), + "width": cty.NumberIntVal(400), + "height": cty.NumberIntVal(300), + }), + DataContext: plugin.MapData{}, + }) + s.Len(diags, 0) + s.Equal(strings.Join([]string{ + ``, + `
`, + `
`, + }, "\n"), res.Markdown) +} + +func (s *StixViewTestSuite) TestQueryResult() { + res, diags := s.schema.ContentFunc(context.Background(), &plugin.ProvideContentParams{ + Args: cty.ObjectVal(map[string]cty.Value{ + "gist_id": cty.NullVal(cty.String), + "stix_url": cty.NullVal(cty.String), + "caption": cty.NullVal(cty.String), + "show_footer": cty.NullVal(cty.Bool), + "show_sidebar": cty.NullVal(cty.Bool), + "show_tlp_as_tags": cty.NullVal(cty.Bool), + "show_marking_nodes": cty.NullVal(cty.Bool), + "show_labels": cty.NullVal(cty.Bool), + "show_idrefs": cty.NullVal(cty.Bool), + "width": cty.NullVal(cty.Number), + "height": cty.NullVal(cty.Number), + }), + DataContext: plugin.MapData{ + "query_result": plugin.ListData{ + plugin.MapData{ + "key": plugin.StringData("value"), + }, + }, + }, + }) + s.Len(diags, 0) + s.Contains(res.Markdown, ``) + s.Contains(res.Markdown, `
+{{if .QueryResult -}} +
+
+{{else -}} +
+
{{end -}} \ No newline at end of file From 875dcc6fdab0f1dad5794a9ff9c0a5afb72d1ea7 Mon Sep 17 00:00:00 2001 From: dobarx Date: Tue, 20 Feb 2024 10:31:44 +0200 Subject: [PATCH 6/8] internal/virustotal: add virustotal_api_usage data source --- internal/virustotal/client/client.go | 108 ++++++++++ internal/virustotal/client/client_test.go | 204 ++++++++++++++++++ internal/virustotal/client/dto.go | 49 +++++ internal/virustotal/cmd/main.go | 14 ++ .../virustotal/data_virustotal_api_usage.go | 165 ++++++++++++++ .../data_virustotal_api_usage_test.go | 185 ++++++++++++++++ internal/virustotal/plugin.go | 38 ++++ internal/virustotal/plugin_test.go | 14 ++ 8 files changed, 777 insertions(+) create mode 100644 internal/virustotal/client/client.go create mode 100644 internal/virustotal/client/client_test.go create mode 100644 internal/virustotal/client/dto.go create mode 100644 internal/virustotal/cmd/main.go create mode 100644 internal/virustotal/data_virustotal_api_usage.go create mode 100644 internal/virustotal/data_virustotal_api_usage_test.go create mode 100644 internal/virustotal/plugin.go create mode 100644 internal/virustotal/plugin_test.go diff --git a/internal/virustotal/client/client.go b/internal/virustotal/client/client.go new file mode 100644 index 00000000..7fdab37f --- /dev/null +++ b/internal/virustotal/client/client.go @@ -0,0 +1,108 @@ +package client + +import ( + "context" + "encoding/json" + "net/http" + "net/url" + "time" + + "github.com/google/go-querystring/query" +) + +var defaultAPIBaseURL = "https://www.virustotal.com/api/v3" + +type Client interface { + GetUserAPIUsage(ctx context.Context, req *GetUserAPIUsageReq) (*GetUserAPIUsageRes, error) + GetGroupAPIUsage(ctx context.Context, req *GetGroupAPIUsageReq) (*GetGroupAPIUsageRes, error) +} + +type client struct { + url string + key string +} + +func New(key string) Client { + return &client{ + url: defaultAPIBaseURL, + key: key, + } +} + +func (c *client) auth(r *http.Request) { + r.Header.Set("x-apikey", c.key) +} + +func (c *client) GetUserAPIUsage(ctx context.Context, req *GetUserAPIUsageReq) (*GetUserAPIUsageRes, error) { + u, err := url.Parse(c.url + "/users/" + req.User + "/api_usage") + if err != nil { + return nil, err + } + q, err := query.Values(req) + if err != nil { + return nil, err + } + u.RawQuery = q.Encode() + r, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil) + if err != nil { + return nil, err + } + c.auth(r) + client := http.Client{ + Timeout: 15 * time.Second, + } + res, err := client.Do(r) + if err != nil { + return nil, err + } + if res.StatusCode != http.StatusOK { + var data ErrorRes + if err := json.NewDecoder(res.Body).Decode(&data); err != nil { + return nil, err + } + return nil, data.Error + } + defer res.Body.Close() + var data GetUserAPIUsageRes + if err := json.NewDecoder(res.Body).Decode(&data); err != nil { + return nil, err + } + return &data, nil +} + +func (c *client) GetGroupAPIUsage(ctx context.Context, req *GetGroupAPIUsageReq) (*GetGroupAPIUsageRes, error) { + u, err := url.Parse(c.url + "/groups/" + req.Group + "/api_usage") + if err != nil { + return nil, err + } + q, err := query.Values(req) + if err != nil { + return nil, err + } + u.RawQuery = q.Encode() + r, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil) + if err != nil { + return nil, err + } + c.auth(r) + client := http.Client{ + Timeout: 15 * time.Second, + } + res, err := client.Do(r) + if err != nil { + return nil, err + } + if res.StatusCode != http.StatusOK { + var data ErrorRes + if err := json.NewDecoder(res.Body).Decode(&data); err != nil { + return nil, err + } + return nil, data.Error + } + defer res.Body.Close() + var data GetGroupAPIUsageRes + if err := json.NewDecoder(res.Body).Decode(&data); err != nil { + return nil, err + } + return &data, nil +} diff --git a/internal/virustotal/client/client_test.go b/internal/virustotal/client/client_test.go new file mode 100644 index 00000000..47b8ff84 --- /dev/null +++ b/internal/virustotal/client/client_test.go @@ -0,0 +1,204 @@ +package client + +import ( + "context" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/stretchr/testify/suite" +) + +type ClientTestSuite struct { + suite.Suite + ctx context.Context + cancel context.CancelFunc +} + +func (s *ClientTestSuite) SetupTest() { + s.ctx, s.cancel = context.WithCancel(context.Background()) +} + +func (s *ClientTestSuite) TearDownTest() { + s.cancel() +} + +func TestClientTestSuite(t *testing.T) { + suite.Run(t, new(ClientTestSuite)) +} + +func (s *ClientTestSuite) mock(fn http.HandlerFunc, tkn string) (*client, *httptest.Server) { + srv := httptest.NewServer(fn) + cli := &client{ + url: srv.URL, + key: tkn, + } + return cli, srv +} + +func (s *ClientTestSuite) TestAuth() { + client, srv := s.mock(func(w http.ResponseWriter, r *http.Request) { + s.Equal("test_token", r.Header.Get("x-apikey")) + }, "test_token") + defer srv.Close() + client.GetUserAPIUsage(s.ctx, &GetUserAPIUsageReq{User: "test_user"}) +} + +func (s *ClientTestSuite) TestGetUserAPIUsageWithQuery() { + client, srv := s.mock(func(w http.ResponseWriter, r *http.Request) { + s.Equal("test_token", r.Header.Get("x-apikey")) + s.Equal("GET", r.Method) + s.Equal("/users/test_user/api_usage", r.URL.Path) + s.Equal("20240101", r.URL.Query().Get("start_date")) + s.Equal("20240103", r.URL.Query().Get("end_date")) + w.Write([]byte(`{"data": { + "daily": { + "2024-01-01": {}, + "2024-01-02": {}, + "2024-01-03": {} + }}}`)) + }, "test_token") + defer srv.Close() + start, err := time.Parse("20060102", "20240101") + s.Require().NoError(err) + end, err := time.Parse("20060102", "20240103") + s.Require().NoError(err) + res, err := client.GetUserAPIUsage(s.ctx, &GetUserAPIUsageReq{ + User: "test_user", + StartDate: &Date{start}, + EndDate: &Date{end}, + }) + s.Require().NoError(err) + s.Equal(map[string]any{ + "daily": map[string]any{ + "2024-01-01": map[string]any{}, + "2024-01-02": map[string]any{}, + "2024-01-03": map[string]any{}, + }, + }, res.Data) +} + +func (s *ClientTestSuite) TestGetUserAPIUsage() { + client, srv := s.mock(func(w http.ResponseWriter, r *http.Request) { + s.Equal("test_token", r.Header.Get("x-apikey")) + s.Equal("GET", r.Method) + s.Equal("/users/test_user/api_usage", r.URL.Path) + w.Write([]byte(`{"data": { + "daily": { + "2024-01-01": {}, + "2024-01-02": {}, + "2024-01-03": {} + }}}`)) + }, "test_token") + defer srv.Close() + res, err := client.GetUserAPIUsage(s.ctx, &GetUserAPIUsageReq{ + User: "test_user", + }) + s.Require().NoError(err) + s.Equal(map[string]any{ + "daily": map[string]any{ + "2024-01-01": map[string]any{}, + "2024-01-02": map[string]any{}, + "2024-01-03": map[string]any{}, + }, + }, res.Data) +} + +func (s *ClientTestSuite) TestGetGroupAPIUsageWithQuery() { + client, srv := s.mock(func(w http.ResponseWriter, r *http.Request) { + s.Equal("test_token", r.Header.Get("x-apikey")) + s.Equal("GET", r.Method) + s.Equal("/groups/test_group/api_usage", r.URL.Path) + s.Equal("20240101", r.URL.Query().Get("start_date")) + s.Equal("20240103", r.URL.Query().Get("end_date")) + w.Write([]byte(`{"data": { + "daily": { + "2024-01-01": {}, + "2024-01-02": {}, + "2024-01-03": {} + }}}`)) + }, "test_token") + defer srv.Close() + start, err := time.Parse("20060102", "20240101") + s.Require().NoError(err) + end, err := time.Parse("20060102", "20240103") + s.Require().NoError(err) + res, err := client.GetGroupAPIUsage(s.ctx, &GetGroupAPIUsageReq{ + Group: "test_group", + StartDate: &Date{start}, + EndDate: &Date{end}, + }) + s.Require().NoError(err) + s.Equal(map[string]any{ + "daily": map[string]any{ + "2024-01-01": map[string]any{}, + "2024-01-02": map[string]any{}, + "2024-01-03": map[string]any{}, + }, + }, res.Data) +} + +func (s *ClientTestSuite) TestGetGroupAPIUsage() { + client, srv := s.mock(func(w http.ResponseWriter, r *http.Request) { + s.Equal("test_token", r.Header.Get("x-apikey")) + s.Equal("GET", r.Method) + s.Equal("/groups/test_group/api_usage", r.URL.Path) + w.Write([]byte(`{"data": { + "daily": { + "2024-01-01": {}, + "2024-01-02": {}, + "2024-01-03": {} + }}}`)) + }, "test_token") + defer srv.Close() + res, err := client.GetGroupAPIUsage(s.ctx, &GetGroupAPIUsageReq{ + Group: "test_group", + }) + s.Require().NoError(err) + s.Equal(map[string]any{ + "daily": map[string]any{ + "2024-01-01": map[string]any{}, + "2024-01-02": map[string]any{}, + "2024-01-03": map[string]any{}, + }, + }, res.Data) +} + +func (s *ClientTestSuite) TestGetUserAPIUsageError() { + client, srv := s.mock(func(w http.ResponseWriter, r *http.Request) { + s.Equal("test_token", r.Header.Get("x-apikey")) + s.Equal("GET", r.Method) + s.Equal("/users/test_user/api_usage", r.URL.Path) + w.WriteHeader(http.StatusInternalServerError) + w.Write([]byte(`{"error": { + "code": "test_code", + "message": "test_message" + }}`)) + }, "test_token") + defer srv.Close() + _, err := client.GetUserAPIUsage(s.ctx, &GetUserAPIUsageReq{ + User: "test_user", + }) + s.Require().Error(err) + s.Equal("test_code: test_message", err.Error()) +} + +func (s *ClientTestSuite) TestGetGroupAPIUsageError() { + client, srv := s.mock(func(w http.ResponseWriter, r *http.Request) { + s.Equal("test_token", r.Header.Get("x-apikey")) + s.Equal("GET", r.Method) + s.Equal("/groups/test_group/api_usage", r.URL.Path) + w.WriteHeader(http.StatusInternalServerError) + w.Write([]byte(`{"error": { + "code": "test_code", + "message": "test_message" + }}`)) + }, "test_token") + defer srv.Close() + _, err := client.GetGroupAPIUsage(s.ctx, &GetGroupAPIUsageReq{ + Group: "test_group", + }) + s.Require().Error(err) + s.Equal("test_code: test_message", err.Error()) +} diff --git a/internal/virustotal/client/dto.go b/internal/virustotal/client/dto.go new file mode 100644 index 00000000..f2a37e32 --- /dev/null +++ b/internal/virustotal/client/dto.go @@ -0,0 +1,49 @@ +package client + +import ( + "fmt" + "net/url" + "time" +) + +type GetUserAPIUsageReq struct { + User string `url:"-"` + StartDate *Date `url:"start_date,omitempty"` + EndDate *Date `url:"end_date,omitempty"` +} + +type GetGroupAPIUsageReq struct { + Group string `url:"-"` + StartDate *Date `url:"start_date,omitempty"` + EndDate *Date `url:"end_date,omitempty"` +} + +type Error struct { + Code string `json:"code"` + Message string `json:"message"` +} + +func (e Error) Error() string { + return fmt.Sprintf("%s: %s", e.Code, e.Message) +} + +type ErrorRes struct { + Error Error `json:"error"` +} + +type GetUserAPIUsageRes struct { + Data map[string]any `json:"data"` +} + +type GetGroupAPIUsageRes struct { + Data map[string]any `json:"data"` +} + +type Date struct { + time.Time +} + +func (d Date) EncodeValues(key string, v *url.Values) error { + v.Add(key, d.Time.Format("20060102")) + return nil +} diff --git a/internal/virustotal/cmd/main.go b/internal/virustotal/cmd/main.go new file mode 100644 index 00000000..b59e6f2c --- /dev/null +++ b/internal/virustotal/cmd/main.go @@ -0,0 +1,14 @@ +package main + +import ( + "github.com/blackstork-io/fabric/internal/virustotal" + pluginapiv1 "github.com/blackstork-io/fabric/plugin/pluginapi/v1" +) + +var version string + +func main() { + pluginapiv1.Serve( + virustotal.Plugin(version, virustotal.DefaultClientLoader), + ) +} diff --git a/internal/virustotal/data_virustotal_api_usage.go b/internal/virustotal/data_virustotal_api_usage.go new file mode 100644 index 00000000..431e634d --- /dev/null +++ b/internal/virustotal/data_virustotal_api_usage.go @@ -0,0 +1,165 @@ +package virustotal + +import ( + "context" + "fmt" + "time" + + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/hcldec" + "github.com/zclconf/go-cty/cty" + + "github.com/blackstork-io/fabric/internal/virustotal/client" + "github.com/blackstork-io/fabric/plugin" +) + +func makeVirusTotalAPIUsageDataSchema(loader ClientLoadFn) *plugin.DataSource { + return &plugin.DataSource{ + DataFunc: fetchVirusTotalAPIUsageData(loader), + Config: hcldec.ObjectSpec{ + "api_key": &hcldec.AttrSpec{ + Name: "api_key", + Type: cty.String, + Required: true, + }, + }, + Args: hcldec.ObjectSpec{ + "user_id": &hcldec.AttrSpec{ + Name: "user_id", + Type: cty.String, + Required: false, + }, + "group_id": &hcldec.AttrSpec{ + Name: "group_id", + Type: cty.String, + Required: false, + }, + "start_date": &hcldec.AttrSpec{ + Name: "start_date", + Type: cty.String, + Required: false, + }, + "end_date": &hcldec.AttrSpec{ + Name: "end_date", + Type: cty.String, + Required: false, + }, + }, + } +} + +func fetchVirusTotalAPIUsageData(loader ClientLoadFn) plugin.RetrieveDataFunc { + return func(ctx context.Context, params *plugin.RetrieveDataParams) (plugin.Data, hcl.Diagnostics) { + cli, err := makeClient(loader, params.Config) + if err != nil { + return nil, hcl.Diagnostics{{ + Severity: hcl.DiagError, + Summary: "Failed to create client", + Detail: err.Error(), + }} + } + args, err := parseAPIUsageArgs(params.Args) + if err != nil { + return nil, hcl.Diagnostics{{ + Severity: hcl.DiagError, + Summary: "Failed to parse arguments", + Detail: err.Error(), + }} + } + var data map[string]any + if args.User != nil { + req := &client.GetUserAPIUsageReq{ + User: *args.User, + } + if args.StartDate != nil { + req.StartDate = &client.Date{Time: *args.StartDate} + } + if args.EndDate != nil { + req.EndDate = &client.Date{Time: *args.EndDate} + } + + res, err := cli.GetUserAPIUsage(ctx, req) + if err != nil { + return nil, hcl.Diagnostics{{ + Severity: hcl.DiagError, + Summary: "Failed to fetch data", + Detail: err.Error(), + }} + } + data = res.Data + } else { + req := &client.GetGroupAPIUsageReq{ + Group: *args.Group, + } + if args.StartDate != nil { + req.StartDate = &client.Date{Time: *args.StartDate} + } + if args.EndDate != nil { + req.EndDate = &client.Date{Time: *args.EndDate} + } + + res, err := cli.GetGroupAPIUsage(ctx, req) + if err != nil { + return nil, hcl.Diagnostics{{ + Severity: hcl.DiagError, + Summary: "Failed to fetch data", + Detail: err.Error(), + }} + } + data = res.Data + } + result, err := plugin.ParseDataMapAny(data) + if err != nil { + return nil, hcl.Diagnostics{{ + Severity: hcl.DiagError, + Summary: "Failed to parse data", + Detail: err.Error(), + }} + } + return result, nil + } +} + +type apiUsageArgs struct { + User *string + Group *string + StartDate *time.Time + EndDate *time.Time +} + +func parseAPIUsageArgs(args cty.Value) (*apiUsageArgs, error) { + dst := apiUsageArgs{} + + if args.IsNull() { + return nil, fmt.Errorf("arguments are null") + } + + if userID := args.GetAttr("user_id"); !userID.IsNull() { + userIDStr := userID.AsString() + dst.User = &userIDStr + } + if groupID := args.GetAttr("group_id"); !groupID.IsNull() { + groupIDStr := groupID.AsString() + dst.Group = &groupIDStr + } + if dst.User == nil && dst.Group == nil { + return nil, fmt.Errorf("either user_id or group_id must be set") + } + if startDate := args.GetAttr("start_date"); !startDate.IsNull() { + startDateStr := startDate.AsString() + startDate, err := time.Parse("20060102", startDateStr) + if err != nil { + return nil, fmt.Errorf("failed to parse start_date: %w", err) + } + dst.StartDate = &startDate + } + if endDate := args.GetAttr("end_date"); !endDate.IsNull() { + endDateStr := endDate.AsString() + endDate, err := time.Parse("20060102", endDateStr) + if err != nil { + return nil, fmt.Errorf("failed to parse end_date: %w", err) + } + dst.EndDate = &endDate + } + return &dst, nil +} diff --git a/internal/virustotal/data_virustotal_api_usage_test.go b/internal/virustotal/data_virustotal_api_usage_test.go new file mode 100644 index 00000000..19f209db --- /dev/null +++ b/internal/virustotal/data_virustotal_api_usage_test.go @@ -0,0 +1,185 @@ +package virustotal + +import ( + "context" + "errors" + "testing" + "time" + + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/suite" + "github.com/zclconf/go-cty/cty" + + "github.com/blackstork-io/fabric/internal/virustotal/client" + client_mocks "github.com/blackstork-io/fabric/mocks/internalpkg/virustotal/client" + "github.com/blackstork-io/fabric/plugin" +) + +type APIUsageTestSuite struct { + suite.Suite + schema *plugin.DataSource + ctx context.Context + cli *client_mocks.Client + storedTkn string +} + +func TestAPIUsageTestSuite(t *testing.T) { + suite.Run(t, new(APIUsageTestSuite)) +} + +func (s *APIUsageTestSuite) SetupSuite() { + s.schema = makeVirusTotalAPIUsageDataSchema(func(token string) client.Client { + s.storedTkn = token + return s.cli + }) + s.ctx = context.Background() +} + +func (s *APIUsageTestSuite) SetupTest() { + s.cli = &client_mocks.Client{} +} + +func (s *APIUsageTestSuite) TearDownTest() { + s.cli.AssertExpectations(s.T()) +} + +func (s *APIUsageTestSuite) TestSchema() { + s.Require().NotNil(s.schema) + s.NotNil(s.schema.Config) + s.NotNil(s.schema.Args) + s.NotNil(s.schema.DataFunc) +} + +func (s *APIUsageTestSuite) TestUser() { + start, err := time.Parse("20060102", "20240101") + s.Require().NoError(err) + end, err := time.Parse("20060102", "20240103") + s.Require().NoError(err) + s.cli.On("GetUserAPIUsage", mock.Anything, &client.GetUserAPIUsageReq{ + User: "test_user", + StartDate: &client.Date{Time: start}, + EndDate: &client.Date{Time: end}, + }).Return(&client.GetUserAPIUsageRes{ + Data: map[string]any{ + "daily": map[string]any{ + "2024-01-01": map[string]any{}, + "2024-01-02": map[string]any{}, + "2024-01-03": map[string]any{}, + }, + }, + }, nil) + data, diags := s.schema.DataFunc(s.ctx, &plugin.RetrieveDataParams{ + Config: cty.ObjectVal(map[string]cty.Value{ + "api_key": cty.StringVal("test_token"), + }), + Args: cty.ObjectVal(map[string]cty.Value{ + "user_id": cty.StringVal("test_user"), + "group_id": cty.NullVal(cty.String), + "start_date": cty.StringVal("20240101"), + "end_date": cty.StringVal("20240103"), + }), + }) + s.Require().Len(diags, 0) + s.Equal(plugin.MapData{ + "daily": plugin.MapData{ + "2024-01-01": plugin.MapData{}, + "2024-01-02": plugin.MapData{}, + "2024-01-03": plugin.MapData{}, + }, + }, data) +} + +func (s *APIUsageTestSuite) TestGroup() { + start, err := time.Parse("20060102", "20240101") + s.Require().NoError(err) + end, err := time.Parse("20060102", "20240103") + s.Require().NoError(err) + s.cli.On("GetGroupAPIUsage", mock.Anything, &client.GetGroupAPIUsageReq{ + Group: "test_group", + StartDate: &client.Date{Time: start}, + EndDate: &client.Date{Time: end}, + }).Return(&client.GetGroupAPIUsageRes{ + Data: map[string]any{ + "daily": map[string]any{ + "2024-01-01": map[string]any{}, + "2024-01-02": map[string]any{}, + "2024-01-03": map[string]any{}, + }, + }, + }, nil) + data, diags := s.schema.DataFunc(s.ctx, &plugin.RetrieveDataParams{ + Config: cty.ObjectVal(map[string]cty.Value{ + "api_key": cty.StringVal("test_token"), + }), + Args: cty.ObjectVal(map[string]cty.Value{ + "user_id": cty.NullVal(cty.String), + "group_id": cty.StringVal("test_group"), + "start_date": cty.StringVal("20240101"), + "end_date": cty.StringVal("20240103"), + }), + }) + s.Require().Len(diags, 0) + s.Equal(plugin.MapData{ + "daily": plugin.MapData{ + "2024-01-01": plugin.MapData{}, + "2024-01-02": plugin.MapData{}, + "2024-01-03": plugin.MapData{}, + }, + }, data) +} + +func (s *APIUsageTestSuite) TestMissingConfig() { + data, diags := s.schema.DataFunc(s.ctx, &plugin.RetrieveDataParams{ + Config: cty.NullVal(cty.DynamicPseudoType), + Args: cty.ObjectVal(map[string]cty.Value{}), + }) + s.Require().Len(diags, 1) + s.Nil(data) +} + +func (s *APIUsageTestSuite) TestMissingAPIKey() { + data, diags := s.schema.DataFunc(s.ctx, &plugin.RetrieveDataParams{ + Config: cty.ObjectVal(map[string]cty.Value{ + "api_key": cty.NullVal(cty.String), + }), + Args: cty.ObjectVal(map[string]cty.Value{}), + }) + s.Require().Len(diags, 1) + s.Nil(data) +} + +func (s *APIUsageTestSuite) TestMissingUserIDAndGroupID() { + data, diags := s.schema.DataFunc(s.ctx, &plugin.RetrieveDataParams{ + Config: cty.ObjectVal(map[string]cty.Value{ + "api_key": cty.StringVal("test_token"), + }), + Args: cty.ObjectVal(map[string]cty.Value{ + "user_id": cty.NullVal(cty.String), + "group_id": cty.NullVal(cty.String), + "start_date": cty.StringVal("20240101"), + "end_date": cty.StringVal("20240103"), + }), + }) + s.Require().Len(diags, 1) + s.Nil(data) +} + +func (s *APIUsageTestSuite) TestError() { + err := errors.New("test error") + s.cli.On("GetUserAPIUsage", mock.Anything, &client.GetUserAPIUsageReq{ + User: "test_user", + }).Return(nil, err) + data, diags := s.schema.DataFunc(s.ctx, &plugin.RetrieveDataParams{ + Config: cty.ObjectVal(map[string]cty.Value{ + "api_key": cty.StringVal("test_token"), + }), + Args: cty.ObjectVal(map[string]cty.Value{ + "user_id": cty.StringVal("test_user"), + "group_id": cty.NullVal(cty.String), + "start_date": cty.NullVal(cty.String), + "end_date": cty.NullVal(cty.String), + }), + }) + s.Require().Len(diags, 1) + s.Nil(data) +} diff --git a/internal/virustotal/plugin.go b/internal/virustotal/plugin.go new file mode 100644 index 00000000..ec2b42ca --- /dev/null +++ b/internal/virustotal/plugin.go @@ -0,0 +1,38 @@ +package virustotal + +import ( + "fmt" + + "github.com/zclconf/go-cty/cty" + + "github.com/blackstork-io/fabric/internal/virustotal/client" + "github.com/blackstork-io/fabric/plugin" +) + +type ClientLoadFn func(key string) client.Client + +var DefaultClientLoader ClientLoadFn = client.New + +func Plugin(version string, loader ClientLoadFn) *plugin.Schema { + if loader == nil { + loader = DefaultClientLoader + } + return &plugin.Schema{ + Name: "blackstork/virustotal", + Version: version, + DataSources: plugin.DataSources{ + "virustotal_api_usage": makeVirusTotalAPIUsageDataSchema(loader), + }, + } +} + +func makeClient(loader ClientLoadFn, cfg cty.Value) (client.Client, error) { + if cfg.IsNull() { + return nil, fmt.Errorf("configuration is required") + } + key := cfg.GetAttr("api_key") + if key.IsNull() || key.AsString() == "" { + return nil, fmt.Errorf("api_key is required in configuration") + } + return loader(key.AsString()), nil +} diff --git a/internal/virustotal/plugin_test.go b/internal/virustotal/plugin_test.go new file mode 100644 index 00000000..70e9b944 --- /dev/null +++ b/internal/virustotal/plugin_test.go @@ -0,0 +1,14 @@ +package virustotal + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestPlugin_Schema(t *testing.T) { + schema := Plugin("1.2.3", nil) + assert.Equal(t, "blackstork/virustotal", schema.Name) + assert.Equal(t, "1.2.3", schema.Version) + assert.NotNil(t, schema.DataSources["virustotal_api_usage"]) +} From 98ba7665a6a7bd1332b415050ef18d4592007232 Mon Sep 17 00:00:00 2001 From: dobarx Date: Tue, 20 Feb 2024 10:32:24 +0200 Subject: [PATCH 7/8] update mocks, examples & docks --- .goreleaser-dev.yaml | 24 ++ .goreleaser.yaml | 40 ++++ .mockery.yaml | 12 + docs/plugins/hackerone.md | 91 ++++++++ docs/plugins/splunk.md | 52 +++++ docs/plugins/stixview.md | 50 ++++ docs/plugins/virustotal.md | 48 ++++ examples/plugins/basic/content_greeting.go | 2 +- examples/plugins/basic/data_random_numbers.go | 4 +- examples/templates/stixview/data.csv | 4 + examples/templates/stixview/example.fabric | 18 ++ examples/templates/virustotal/example.fabric | 26 +++ gen.go | 2 +- go.mod | 2 +- mocks/internalpkg/github/client.go | 2 +- mocks/internalpkg/hackerone/client/client.go | 97 ++++++++ mocks/internalpkg/openai/client/client.go | 2 +- mocks/internalpkg/splunk/client/client.go | 215 ++++++++++++++++++ mocks/internalpkg/virustotal/client/client.go | 156 +++++++++++++ mocks/parser/definitions/fabric_block.go | 2 +- tools/docgen/main.go | 8 + 21 files changed, 849 insertions(+), 8 deletions(-) create mode 100644 docs/plugins/hackerone.md create mode 100644 docs/plugins/splunk.md create mode 100644 docs/plugins/stixview.md create mode 100644 docs/plugins/virustotal.md create mode 100644 examples/templates/stixview/data.csv create mode 100644 examples/templates/stixview/example.fabric create mode 100644 examples/templates/virustotal/example.fabric create mode 100644 mocks/internalpkg/hackerone/client/client.go create mode 100644 mocks/internalpkg/splunk/client/client.go create mode 100644 mocks/internalpkg/virustotal/client/client.go diff --git a/.goreleaser-dev.yaml b/.goreleaser-dev.yaml index 98772780..af24e57a 100644 --- a/.goreleaser-dev.yaml +++ b/.goreleaser-dev.yaml @@ -68,3 +68,27 @@ builds: binary: "plugins/blackstork/terraform@{{ .Version }}" ldflags: "-X main.version={{.Version}}" no_unique_dist_dir: true + + - id: hackerone + main: ./internal/hackerone/cmd + binary: "plugins/blackstork/hackerone@{{ .Version }}" + ldflags: "-X main.version={{.Version}}" + no_unique_dist_dir: true + + - id: virustotal + main: ./internal/virustotal/cmd + binary: "plugins/blackstork/virustotal@{{ .Version }}" + ldflags: "-X main.version={{.Version}}" + no_unique_dist_dir: true + + - id: splunk + main: ./internal/splunk/cmd + binary: "plugins/blackstork/splunk@{{ .Version }}" + ldflags: "-X main.version={{.Version}}" + no_unique_dist_dir: true + + - id: stixview + main: ./internal/stixview/cmd + binary: "plugins/blackstork/stixview@{{ .Version }}" + ldflags: "-X main.version={{.Version}}" + no_unique_dist_dir: true diff --git a/.goreleaser.yaml b/.goreleaser.yaml index bcd19b62..0b67100a 100644 --- a/.goreleaser.yaml +++ b/.goreleaser.yaml @@ -95,6 +95,42 @@ builds: - windows - darwin + - id: hackerone + main: ./internal/hackerone/cmd + binary: "plugins/blackstork/hackerone@{{ .Version }}" + flags: "-trimpath" + goos: + - linux + - windows + - darwin + + - id: virustotal + main: ./internal/virustotal/cmd + binary: "plugins/blackstork/virustotal@{{ .Version }}" + flags: "-trimpath" + goos: + - linux + - windows + - darwin + + - id: splunk + main: ./internal/splunk/cmd + binary: "plugins/blackstork/splunk@{{ .Version }}" + flags: "-trimpath" + goos: + - linux + - windows + - darwin + + - id: stixview + main: ./internal/stixview/cmd + binary: "plugins/blackstork/stixview@{{ .Version }}" + flags: "-trimpath" + goos: + - linux + - windows + - darwin + archives: - id: fabric format: tar.gz @@ -122,6 +158,10 @@ archives: - postgresql - sqlite - terraform + - hackerone + - virustotal + - splunk + - stixview name_template: >- plugins_ {{- .Os }}_ diff --git a/.mockery.yaml b/.mockery.yaml index f5abb328..51c97f6d 100644 --- a/.mockery.yaml +++ b/.mockery.yaml @@ -14,6 +14,18 @@ packages: interfaces: Client: github.com/blackstork-io/fabric/internal/openai/client: + config: + interfaces: + Client: + github.com/blackstork-io/fabric/internal/hackerone/client: + config: + interfaces: + Client: + github.com/blackstork-io/fabric/internal/virustotal/client: + config: + interfaces: + Client: + github.com/blackstork-io/fabric/internal/splunk/client: config: interfaces: Client: \ No newline at end of file diff --git a/docs/plugins/hackerone.md b/docs/plugins/hackerone.md new file mode 100644 index 00000000..307c4516 --- /dev/null +++ b/docs/plugins/hackerone.md @@ -0,0 +1,91 @@ +--- +title: blackstork/hackerone +weight: 20 +type: docs +--- + +# `blackstork/hackerone` plugin + +## Installation + +To install the plugin, add it to `plugin_versions` map in the Fabric global configuration block (see [Global configuration]({{< ref "configs.md#global-configuration" >}}) for more details), with a version constraint restricting which available versions of the plugin the codebase is compatible with: + +```hcl +fabric { + plugin_versions = { + "blackstork/hackerone" = "=> v0.0.0-dev" + } +} +``` + +## Data sources + +The plugin has the following data sources available: + +### `hackerone_reports` + +#### Configuration + +The data source supports the following configuration parameters: + +```hcl +config data hackerone_reports { + api_token = # required + api_username = # required +} +``` + +#### Usage + +The data source supports the following parameters in the data blocks: + +```hcl +data hackerone_reports { + assignee = # optional + bounty_awarded_at__gt = # optional + bounty_awarded_at__lt = # optional + bounty_awarded_at__null = # optional + closed_at__gt = # optional + closed_at__lt = # optional + closed_at__null = # optional + created_at__gt = # optional + created_at__lt = # optional + custom_fields = # optional + disclosed_at__gt = # optional + disclosed_at__lt = # optional + disclosed_at__null = # optional + first_program_activity_at__gt = # optional + first_program_activity_at__lt = # optional + first_program_activity_at__null = # optional + hacker_published = # optional + id = # optional + inbox_ids = # optional + keyword = # optional + last_activity_at__gt = # optional + last_activity_at__lt = # optional + last_program_activity_at__gt = # optional + last_program_activity_at__lt = # optional + last_program_activity_at__null = # optional + last_public_activity_at__gt = # optional + last_public_activity_at__lt = # optional + last_report_activity_at__gt = # optional + last_report_activity_at__lt = # optional + page_number = # optional + program = # optional + reporter = # optional + reporter_agreed_on_going_public = # optional + severity = # optional + size = # optional + sort = # optional + state = # optional + submitted_at__gt = # optional + submitted_at__lt = # optional + swag_awarded_at__gt = # optional + swag_awarded_at__lt = # optional + swag_awarded_at__null = # optional + triaged_at__gt = # optional + triaged_at__lt = # optional + triaged_at__null = # optional + weakness_id = # optional +} +``` \ No newline at end of file diff --git a/docs/plugins/splunk.md b/docs/plugins/splunk.md new file mode 100644 index 00000000..28ee1b41 --- /dev/null +++ b/docs/plugins/splunk.md @@ -0,0 +1,52 @@ +--- +title: blackstork/splunk +weight: 20 +type: docs +--- + +# `blackstork/splunk` plugin + +## Installation + +To install the plugin, add it to `plugin_versions` map in the Fabric global configuration block (see [Global configuration]({{< ref "configs.md#global-configuration" >}}) for more details), with a version constraint restricting which available versions of the plugin the codebase is compatible with: + +```hcl +fabric { + plugin_versions = { + "blackstork/splunk" = "=> v0.0.0-dev" + } +} +``` + +## Data sources + +The plugin has the following data sources available: + +### `splunk_search` + +#### Configuration + +The data source supports the following configuration parameters: + +```hcl +config data splunk_search { + auth_token = # required + deployment_name = # optional + host = # optional +} +``` + +#### Usage + +The data source supports the following parameters in the data blocks: + +```hcl +data splunk_search { + earliest_time = # optional + latest_time = # optional + max_count = # optional + rf = # optional + search_query = # required + status_buckets = # optional +} +``` \ No newline at end of file diff --git a/docs/plugins/stixview.md b/docs/plugins/stixview.md new file mode 100644 index 00000000..3df909b9 --- /dev/null +++ b/docs/plugins/stixview.md @@ -0,0 +1,50 @@ +--- +title: blackstork/stixview +weight: 20 +type: docs +--- + +# `blackstork/stixview` plugin + +## Installation + +To install the plugin, add it to `plugin_versions` map in the Fabric global configuration block (see [Global configuration]({{< ref "configs.md#global-configuration" >}}) for more details), with a version constraint restricting which available versions of the plugin the codebase is compatible with: + +```hcl +fabric { + plugin_versions = { + "blackstork/stixview" = "=> v0.0.0-dev" + } +} +``` + + + +## Content providers +The plugin has the following content providers available: + +### `stixview` + +#### Configuration + +The content provider doesn't support configuration. + +#### Usage + +The content source supports the following parameters in the content blocks: + +```hcl +content stixview { + caption = # optional + gist_id = # optional + height = # optional + show_footer = # optional + show_idrefs = # optional + show_labels = # optional + show_marking_nodes = # optional + show_sidebar = # optional + show_tlp_as_tags = # optional + stix_url = # optional + width = # optional +} +``` diff --git a/docs/plugins/virustotal.md b/docs/plugins/virustotal.md new file mode 100644 index 00000000..fe4930ca --- /dev/null +++ b/docs/plugins/virustotal.md @@ -0,0 +1,48 @@ +--- +title: blackstork/virustotal +weight: 20 +type: docs +--- + +# `blackstork/virustotal` plugin + +## Installation + +To install the plugin, add it to `plugin_versions` map in the Fabric global configuration block (see [Global configuration]({{< ref "configs.md#global-configuration" >}}) for more details), with a version constraint restricting which available versions of the plugin the codebase is compatible with: + +```hcl +fabric { + plugin_versions = { + "blackstork/virustotal" = "=> v0.0.0-dev" + } +} +``` + +## Data sources + +The plugin has the following data sources available: + +### `virustotal_api_usage` + +#### Configuration + +The data source supports the following configuration parameters: + +```hcl +config data virustotal_api_usage { + api_key = # required +} +``` + +#### Usage + +The data source supports the following parameters in the data blocks: + +```hcl +data virustotal_api_usage { + end_date = # optional + group_id = # optional + start_date = # optional + user_id = # optional +} +``` \ No newline at end of file diff --git a/examples/plugins/basic/content_greeting.go b/examples/plugins/basic/content_greeting.go index a23a2a0d..eb6e381e 100644 --- a/examples/plugins/basic/content_greeting.go +++ b/examples/plugins/basic/content_greeting.go @@ -16,7 +16,7 @@ func makeGreetingContentProvider() *plugin.ContentProvider { return &plugin.ContentProvider{ // Config is optional, in this case we don't need it // We only define the schema for the arguments - Args: &hcldec.ObjectSpec{ + Args: hcldec.ObjectSpec{ "name": &hcldec.AttrSpec{ Name: "name", Required: true, diff --git a/examples/plugins/basic/data_random_numbers.go b/examples/plugins/basic/data_random_numbers.go index 718ea869..b71ba47e 100644 --- a/examples/plugins/basic/data_random_numbers.go +++ b/examples/plugins/basic/data_random_numbers.go @@ -22,7 +22,7 @@ const ( func makeRandomNumbersDataSource() *plugin.DataSource { return &plugin.DataSource{ // Config is optional, we can define the schema for the config that is reusable for this data source - Config: &hcldec.ObjectSpec{ + Config: hcldec.ObjectSpec{ "min": &hcldec.AttrSpec{ Name: "min", Required: false, @@ -35,7 +35,7 @@ func makeRandomNumbersDataSource() *plugin.DataSource { }, }, // We define the schema for the arguments - Args: &hcldec.ObjectSpec{ + Args: hcldec.ObjectSpec{ "length": &hcldec.AttrSpec{ Name: "length", Required: true, diff --git a/examples/templates/stixview/data.csv b/examples/templates/stixview/data.csv new file mode 100644 index 00000000..94ecff74 --- /dev/null +++ b/examples/templates/stixview/data.csv @@ -0,0 +1,4 @@ +id,active,name,age,height +b8fa4bb0-6dd4-45ba-96e0-9a182b2b932e,true,Stacey,26,1.98 +b0086c49-bcd8-4aae-9f88-4f46b128e709,false,Myriam,33,1.81 +a12d2a8c-eebc-42b3-be52-1ab0a2969a81,true,Oralee,31,2.23 \ No newline at end of file diff --git a/examples/templates/stixview/example.fabric b/examples/templates/stixview/example.fabric new file mode 100644 index 00000000..4815c695 --- /dev/null +++ b/examples/templates/stixview/example.fabric @@ -0,0 +1,18 @@ +fabric { + cache_dir = "./.fabric" + plugin_registry { + mirror_dir = "dist/plugins" + } + plugin_versions = { + "blackstork/stixview" = "0.0.0-dev" + } +} + +document "example" { + title = "Using stixview plugin" + content stixview { + gist_id = "6a0fbb0f6e7faf063c748b23f9c7dc62" + height = 500 + width = 500 + } +} \ No newline at end of file diff --git a/examples/templates/virustotal/example.fabric b/examples/templates/virustotal/example.fabric new file mode 100644 index 00000000..1326ca2e --- /dev/null +++ b/examples/templates/virustotal/example.fabric @@ -0,0 +1,26 @@ +fabric { + cache_dir = "./.fabric" + plugin_registry { + mirror_dir = "dist/plugins" + } + plugin_versions = { + "blackstork/virustotal" = "0.0.0-dev" + } +} + +config data virustotal_api_usage { + api_key = "" +} + +document "example" { + title = "Using virustotal plugin" + + data virustotal_api_usage "my_usage" { + user_id = "" + start_date = "20240201" + end_date = "20240203" + } + content text { + text = "{{.data.virustotal_api_usage.my_usage.daily}}" + } +} \ No newline at end of file diff --git a/gen.go b/gen.go index 2c3806a4..7b070f0f 100644 --- a/gen.go +++ b/gen.go @@ -1,4 +1,4 @@ package main -//go:generate go run github.com/vektra/mockery/v2@v2.40.1 +//go:generate go run github.com/vektra/mockery/v2@v2.42.0 //go:generate go run github.com/bufbuild/buf/cmd/buf@v1.29.0 generate diff --git a/go.mod b/go.mod index 909c86ad..ce65d656 100644 --- a/go.mod +++ b/go.mod @@ -10,6 +10,7 @@ require ( github.com/elastic/go-elasticsearch/v8 v8.11.1 github.com/golang-cz/devslog v0.0.8 github.com/google/go-github/v58 v58.0.0 + github.com/google/go-querystring v1.1.0 github.com/hashicorp/go-hclog v0.14.1 github.com/hashicorp/go-plugin v1.6.0 github.com/hashicorp/hcl/v2 v2.19.1 @@ -59,7 +60,6 @@ require ( github.com/gogo/protobuf v1.3.2 // indirect github.com/golang/protobuf v1.5.3 // indirect github.com/google/go-cmp v0.6.0 // indirect - github.com/google/go-querystring v1.1.0 // indirect github.com/google/uuid v1.4.0 // indirect github.com/hashicorp/yamux v0.1.1 // indirect github.com/hokaccha/go-prettyjson v0.0.0-20211117102719-0474bc63780f // indirect diff --git a/mocks/internalpkg/github/client.go b/mocks/internalpkg/github/client.go index f1c3ffa4..1906e71c 100644 --- a/mocks/internalpkg/github/client.go +++ b/mocks/internalpkg/github/client.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.40.1. DO NOT EDIT. +// Code generated by mockery v2.42.0. DO NOT EDIT. package github_mocks diff --git a/mocks/internalpkg/hackerone/client/client.go b/mocks/internalpkg/hackerone/client/client.go new file mode 100644 index 00000000..ba8120c9 --- /dev/null +++ b/mocks/internalpkg/hackerone/client/client.go @@ -0,0 +1,97 @@ +// Code generated by mockery v2.42.0. DO NOT EDIT. + +package client_mocks + +import ( + context "context" + + client "github.com/blackstork-io/fabric/internal/hackerone/client" + + mock "github.com/stretchr/testify/mock" +) + +// Client is an autogenerated mock type for the Client type +type Client struct { + mock.Mock +} + +type Client_Expecter struct { + mock *mock.Mock +} + +func (_m *Client) EXPECT() *Client_Expecter { + return &Client_Expecter{mock: &_m.Mock} +} + +// GetAllReports provides a mock function with given fields: ctx, req +func (_m *Client) GetAllReports(ctx context.Context, req *client.GetAllReportsReq) (*client.GetAllReportsRes, error) { + ret := _m.Called(ctx, req) + + if len(ret) == 0 { + panic("no return value specified for GetAllReports") + } + + var r0 *client.GetAllReportsRes + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *client.GetAllReportsReq) (*client.GetAllReportsRes, error)); ok { + return rf(ctx, req) + } + if rf, ok := ret.Get(0).(func(context.Context, *client.GetAllReportsReq) *client.GetAllReportsRes); ok { + r0 = rf(ctx, req) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*client.GetAllReportsRes) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *client.GetAllReportsReq) error); ok { + r1 = rf(ctx, req) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Client_GetAllReports_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetAllReports' +type Client_GetAllReports_Call struct { + *mock.Call +} + +// GetAllReports is a helper method to define mock.On call +// - ctx context.Context +// - req *client.GetAllReportsReq +func (_e *Client_Expecter) GetAllReports(ctx interface{}, req interface{}) *Client_GetAllReports_Call { + return &Client_GetAllReports_Call{Call: _e.mock.On("GetAllReports", ctx, req)} +} + +func (_c *Client_GetAllReports_Call) Run(run func(ctx context.Context, req *client.GetAllReportsReq)) *Client_GetAllReports_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(*client.GetAllReportsReq)) + }) + return _c +} + +func (_c *Client_GetAllReports_Call) Return(_a0 *client.GetAllReportsRes, _a1 error) *Client_GetAllReports_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Client_GetAllReports_Call) RunAndReturn(run func(context.Context, *client.GetAllReportsReq) (*client.GetAllReportsRes, error)) *Client_GetAllReports_Call { + _c.Call.Return(run) + return _c +} + +// NewClient creates a new instance of Client. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewClient(t interface { + mock.TestingT + Cleanup(func()) +}) *Client { + mock := &Client{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/mocks/internalpkg/openai/client/client.go b/mocks/internalpkg/openai/client/client.go index 4e250032..e6fea8a5 100644 --- a/mocks/internalpkg/openai/client/client.go +++ b/mocks/internalpkg/openai/client/client.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.40.1. DO NOT EDIT. +// Code generated by mockery v2.42.0. DO NOT EDIT. package client_mocks diff --git a/mocks/internalpkg/splunk/client/client.go b/mocks/internalpkg/splunk/client/client.go new file mode 100644 index 00000000..46b34180 --- /dev/null +++ b/mocks/internalpkg/splunk/client/client.go @@ -0,0 +1,215 @@ +// Code generated by mockery v2.42.0. DO NOT EDIT. + +package client_mocks + +import ( + context "context" + + client "github.com/blackstork-io/fabric/internal/splunk/client" + + mock "github.com/stretchr/testify/mock" +) + +// Client is an autogenerated mock type for the Client type +type Client struct { + mock.Mock +} + +type Client_Expecter struct { + mock *mock.Mock +} + +func (_m *Client) EXPECT() *Client_Expecter { + return &Client_Expecter{mock: &_m.Mock} +} + +// CreateSearchJob provides a mock function with given fields: ctx, req +func (_m *Client) CreateSearchJob(ctx context.Context, req *client.CreateSearchJobReq) (*client.CreateSearchJobRes, error) { + ret := _m.Called(ctx, req) + + if len(ret) == 0 { + panic("no return value specified for CreateSearchJob") + } + + var r0 *client.CreateSearchJobRes + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *client.CreateSearchJobReq) (*client.CreateSearchJobRes, error)); ok { + return rf(ctx, req) + } + if rf, ok := ret.Get(0).(func(context.Context, *client.CreateSearchJobReq) *client.CreateSearchJobRes); ok { + r0 = rf(ctx, req) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*client.CreateSearchJobRes) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *client.CreateSearchJobReq) error); ok { + r1 = rf(ctx, req) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Client_CreateSearchJob_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateSearchJob' +type Client_CreateSearchJob_Call struct { + *mock.Call +} + +// CreateSearchJob is a helper method to define mock.On call +// - ctx context.Context +// - req *client.CreateSearchJobReq +func (_e *Client_Expecter) CreateSearchJob(ctx interface{}, req interface{}) *Client_CreateSearchJob_Call { + return &Client_CreateSearchJob_Call{Call: _e.mock.On("CreateSearchJob", ctx, req)} +} + +func (_c *Client_CreateSearchJob_Call) Run(run func(ctx context.Context, req *client.CreateSearchJobReq)) *Client_CreateSearchJob_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(*client.CreateSearchJobReq)) + }) + return _c +} + +func (_c *Client_CreateSearchJob_Call) Return(_a0 *client.CreateSearchJobRes, _a1 error) *Client_CreateSearchJob_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Client_CreateSearchJob_Call) RunAndReturn(run func(context.Context, *client.CreateSearchJobReq) (*client.CreateSearchJobRes, error)) *Client_CreateSearchJob_Call { + _c.Call.Return(run) + return _c +} + +// GetSearchJobByID provides a mock function with given fields: ctx, req +func (_m *Client) GetSearchJobByID(ctx context.Context, req *client.GetSearchJobByIDReq) (*client.GetSearchJobByIDRes, error) { + ret := _m.Called(ctx, req) + + if len(ret) == 0 { + panic("no return value specified for GetSearchJobByID") + } + + var r0 *client.GetSearchJobByIDRes + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *client.GetSearchJobByIDReq) (*client.GetSearchJobByIDRes, error)); ok { + return rf(ctx, req) + } + if rf, ok := ret.Get(0).(func(context.Context, *client.GetSearchJobByIDReq) *client.GetSearchJobByIDRes); ok { + r0 = rf(ctx, req) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*client.GetSearchJobByIDRes) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *client.GetSearchJobByIDReq) error); ok { + r1 = rf(ctx, req) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Client_GetSearchJobByID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetSearchJobByID' +type Client_GetSearchJobByID_Call struct { + *mock.Call +} + +// GetSearchJobByID is a helper method to define mock.On call +// - ctx context.Context +// - req *client.GetSearchJobByIDReq +func (_e *Client_Expecter) GetSearchJobByID(ctx interface{}, req interface{}) *Client_GetSearchJobByID_Call { + return &Client_GetSearchJobByID_Call{Call: _e.mock.On("GetSearchJobByID", ctx, req)} +} + +func (_c *Client_GetSearchJobByID_Call) Run(run func(ctx context.Context, req *client.GetSearchJobByIDReq)) *Client_GetSearchJobByID_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(*client.GetSearchJobByIDReq)) + }) + return _c +} + +func (_c *Client_GetSearchJobByID_Call) Return(_a0 *client.GetSearchJobByIDRes, _a1 error) *Client_GetSearchJobByID_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Client_GetSearchJobByID_Call) RunAndReturn(run func(context.Context, *client.GetSearchJobByIDReq) (*client.GetSearchJobByIDRes, error)) *Client_GetSearchJobByID_Call { + _c.Call.Return(run) + return _c +} + +// GetSearchJobResults provides a mock function with given fields: ctx, req +func (_m *Client) GetSearchJobResults(ctx context.Context, req *client.GetSearchJobResultsReq) (*client.GetSearchJobResultsRes, error) { + ret := _m.Called(ctx, req) + + if len(ret) == 0 { + panic("no return value specified for GetSearchJobResults") + } + + var r0 *client.GetSearchJobResultsRes + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *client.GetSearchJobResultsReq) (*client.GetSearchJobResultsRes, error)); ok { + return rf(ctx, req) + } + if rf, ok := ret.Get(0).(func(context.Context, *client.GetSearchJobResultsReq) *client.GetSearchJobResultsRes); ok { + r0 = rf(ctx, req) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*client.GetSearchJobResultsRes) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *client.GetSearchJobResultsReq) error); ok { + r1 = rf(ctx, req) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Client_GetSearchJobResults_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetSearchJobResults' +type Client_GetSearchJobResults_Call struct { + *mock.Call +} + +// GetSearchJobResults is a helper method to define mock.On call +// - ctx context.Context +// - req *client.GetSearchJobResultsReq +func (_e *Client_Expecter) GetSearchJobResults(ctx interface{}, req interface{}) *Client_GetSearchJobResults_Call { + return &Client_GetSearchJobResults_Call{Call: _e.mock.On("GetSearchJobResults", ctx, req)} +} + +func (_c *Client_GetSearchJobResults_Call) Run(run func(ctx context.Context, req *client.GetSearchJobResultsReq)) *Client_GetSearchJobResults_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(*client.GetSearchJobResultsReq)) + }) + return _c +} + +func (_c *Client_GetSearchJobResults_Call) Return(_a0 *client.GetSearchJobResultsRes, _a1 error) *Client_GetSearchJobResults_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Client_GetSearchJobResults_Call) RunAndReturn(run func(context.Context, *client.GetSearchJobResultsReq) (*client.GetSearchJobResultsRes, error)) *Client_GetSearchJobResults_Call { + _c.Call.Return(run) + return _c +} + +// NewClient creates a new instance of Client. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewClient(t interface { + mock.TestingT + Cleanup(func()) +}) *Client { + mock := &Client{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/mocks/internalpkg/virustotal/client/client.go b/mocks/internalpkg/virustotal/client/client.go new file mode 100644 index 00000000..d3ee7a54 --- /dev/null +++ b/mocks/internalpkg/virustotal/client/client.go @@ -0,0 +1,156 @@ +// Code generated by mockery v2.42.0. DO NOT EDIT. + +package client_mocks + +import ( + context "context" + + client "github.com/blackstork-io/fabric/internal/virustotal/client" + + mock "github.com/stretchr/testify/mock" +) + +// Client is an autogenerated mock type for the Client type +type Client struct { + mock.Mock +} + +type Client_Expecter struct { + mock *mock.Mock +} + +func (_m *Client) EXPECT() *Client_Expecter { + return &Client_Expecter{mock: &_m.Mock} +} + +// GetGroupAPIUsage provides a mock function with given fields: ctx, req +func (_m *Client) GetGroupAPIUsage(ctx context.Context, req *client.GetGroupAPIUsageReq) (*client.GetGroupAPIUsageRes, error) { + ret := _m.Called(ctx, req) + + if len(ret) == 0 { + panic("no return value specified for GetGroupAPIUsage") + } + + var r0 *client.GetGroupAPIUsageRes + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *client.GetGroupAPIUsageReq) (*client.GetGroupAPIUsageRes, error)); ok { + return rf(ctx, req) + } + if rf, ok := ret.Get(0).(func(context.Context, *client.GetGroupAPIUsageReq) *client.GetGroupAPIUsageRes); ok { + r0 = rf(ctx, req) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*client.GetGroupAPIUsageRes) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *client.GetGroupAPIUsageReq) error); ok { + r1 = rf(ctx, req) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Client_GetGroupAPIUsage_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetGroupAPIUsage' +type Client_GetGroupAPIUsage_Call struct { + *mock.Call +} + +// GetGroupAPIUsage is a helper method to define mock.On call +// - ctx context.Context +// - req *client.GetGroupAPIUsageReq +func (_e *Client_Expecter) GetGroupAPIUsage(ctx interface{}, req interface{}) *Client_GetGroupAPIUsage_Call { + return &Client_GetGroupAPIUsage_Call{Call: _e.mock.On("GetGroupAPIUsage", ctx, req)} +} + +func (_c *Client_GetGroupAPIUsage_Call) Run(run func(ctx context.Context, req *client.GetGroupAPIUsageReq)) *Client_GetGroupAPIUsage_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(*client.GetGroupAPIUsageReq)) + }) + return _c +} + +func (_c *Client_GetGroupAPIUsage_Call) Return(_a0 *client.GetGroupAPIUsageRes, _a1 error) *Client_GetGroupAPIUsage_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Client_GetGroupAPIUsage_Call) RunAndReturn(run func(context.Context, *client.GetGroupAPIUsageReq) (*client.GetGroupAPIUsageRes, error)) *Client_GetGroupAPIUsage_Call { + _c.Call.Return(run) + return _c +} + +// GetUserAPIUsage provides a mock function with given fields: ctx, req +func (_m *Client) GetUserAPIUsage(ctx context.Context, req *client.GetUserAPIUsageReq) (*client.GetUserAPIUsageRes, error) { + ret := _m.Called(ctx, req) + + if len(ret) == 0 { + panic("no return value specified for GetUserAPIUsage") + } + + var r0 *client.GetUserAPIUsageRes + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *client.GetUserAPIUsageReq) (*client.GetUserAPIUsageRes, error)); ok { + return rf(ctx, req) + } + if rf, ok := ret.Get(0).(func(context.Context, *client.GetUserAPIUsageReq) *client.GetUserAPIUsageRes); ok { + r0 = rf(ctx, req) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*client.GetUserAPIUsageRes) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *client.GetUserAPIUsageReq) error); ok { + r1 = rf(ctx, req) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Client_GetUserAPIUsage_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetUserAPIUsage' +type Client_GetUserAPIUsage_Call struct { + *mock.Call +} + +// GetUserAPIUsage is a helper method to define mock.On call +// - ctx context.Context +// - req *client.GetUserAPIUsageReq +func (_e *Client_Expecter) GetUserAPIUsage(ctx interface{}, req interface{}) *Client_GetUserAPIUsage_Call { + return &Client_GetUserAPIUsage_Call{Call: _e.mock.On("GetUserAPIUsage", ctx, req)} +} + +func (_c *Client_GetUserAPIUsage_Call) Run(run func(ctx context.Context, req *client.GetUserAPIUsageReq)) *Client_GetUserAPIUsage_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(*client.GetUserAPIUsageReq)) + }) + return _c +} + +func (_c *Client_GetUserAPIUsage_Call) Return(_a0 *client.GetUserAPIUsageRes, _a1 error) *Client_GetUserAPIUsage_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Client_GetUserAPIUsage_Call) RunAndReturn(run func(context.Context, *client.GetUserAPIUsageReq) (*client.GetUserAPIUsageRes, error)) *Client_GetUserAPIUsage_Call { + _c.Call.Return(run) + return _c +} + +// NewClient creates a new instance of Client. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewClient(t interface { + mock.TestingT + Cleanup(func()) +}) *Client { + mock := &Client{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/mocks/parser/definitions/fabric_block.go b/mocks/parser/definitions/fabric_block.go index dfe386b9..aa6abfaa 100644 --- a/mocks/parser/definitions/fabric_block.go +++ b/mocks/parser/definitions/fabric_block.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.40.3. DO NOT EDIT. +// Code generated by mockery v2.42.0. DO NOT EDIT. package definitions_mocks diff --git a/tools/docgen/main.go b/tools/docgen/main.go index fafdddbf..97286374 100644 --- a/tools/docgen/main.go +++ b/tools/docgen/main.go @@ -15,10 +15,14 @@ import ( "github.com/blackstork-io/fabric/internal/elasticsearch" "github.com/blackstork-io/fabric/internal/github" "github.com/blackstork-io/fabric/internal/graphql" + "github.com/blackstork-io/fabric/internal/hackerone" "github.com/blackstork-io/fabric/internal/openai" "github.com/blackstork-io/fabric/internal/postgresql" + "github.com/blackstork-io/fabric/internal/splunk" "github.com/blackstork-io/fabric/internal/sqlite" + "github.com/blackstork-io/fabric/internal/stixview" "github.com/blackstork-io/fabric/internal/terraform" + "github.com/blackstork-io/fabric/internal/virustotal" "github.com/blackstork-io/fabric/plugin" ) @@ -52,6 +56,10 @@ func main() { postgresql.Plugin(version), sqlite.Plugin(version), terraform.Plugin(version), + hackerone.Plugin(version, nil), + virustotal.Plugin(version, nil), + splunk.Plugin(version, nil), + stixview.Plugin(version), } // generate markdown for each plugin for _, p := range plugins { From b0b0f585f42478785d5e99a38ba1e4ad9f4ea933 Mon Sep 17 00:00:00 2001 From: dobarx Date: Wed, 21 Feb 2024 13:10:32 +0200 Subject: [PATCH 8/8] internal: add v0.3 plugins to schema tests --- internal/plugin_validity_test.go | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/internal/plugin_validity_test.go b/internal/plugin_validity_test.go index da076de0..b93b0863 100644 --- a/internal/plugin_validity_test.go +++ b/internal/plugin_validity_test.go @@ -12,11 +12,15 @@ import ( "github.com/blackstork-io/fabric/internal/elasticsearch" "github.com/blackstork-io/fabric/internal/github" "github.com/blackstork-io/fabric/internal/graphql" + "github.com/blackstork-io/fabric/internal/hackerone" "github.com/blackstork-io/fabric/internal/openai" "github.com/blackstork-io/fabric/internal/opencti" "github.com/blackstork-io/fabric/internal/postgresql" + "github.com/blackstork-io/fabric/internal/splunk" "github.com/blackstork-io/fabric/internal/sqlite" + "github.com/blackstork-io/fabric/internal/stixview" "github.com/blackstork-io/fabric/internal/terraform" + "github.com/blackstork-io/fabric/internal/virustotal" "github.com/blackstork-io/fabric/plugin" ) @@ -33,6 +37,10 @@ func TestAllPluginSchemaValidity(t *testing.T) { postgresql.Plugin(ver), sqlite.Plugin(ver), terraform.Plugin(ver), + hackerone.Plugin(ver, nil), + virustotal.Plugin(ver, nil), + stixview.Plugin(ver), + splunk.Plugin(ver, nil), } for _, p := range plugins { p := p