diff --git a/.github/workflows/cd.yml b/.github/workflows/cd.yml index 0620dcb1..accb7c67 100644 --- a/.github/workflows/cd.yml +++ b/.github/workflows/cd.yml @@ -10,7 +10,7 @@ jobs: name: Release strategy: matrix: - go-version: [1.17.0] + go-version: [1.18] os: [ubuntu-latest] runs-on: ${{ matrix.os }} env: @@ -18,21 +18,15 @@ jobs: DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} steps: + - name: Checkout code + uses: actions/checkout@v3 + - name: Install Go - uses: actions/setup-go@v2 + uses: actions/setup-go@v3 with: go-version: ${{ matrix.go-version }} - - - name: Checkout code - uses: actions/checkout@v2 - - - name: Cache go modules - uses: actions/cache@v2 - with: - path: ~/go/pkg/mod - key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} - restore-keys: | - ${{ runner.os }}-go- + check-latest: true + cache: true - name: Run linter run: make lint @@ -49,6 +43,14 @@ jobs: echo "VERSION file ${{steps.version.outputs.VERSION_FILE}} does not match tagged version ${{ github.ref }}" exit 1 + - name: Snyk Setup + uses: snyk/actions/setup@master + + - name: Run Snyk to check for vulnerabilities + run: snyk test --project-name=stream-replicator --severity-threshold=high + env: + SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} + - name: Compile run: make all diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ee02cc62..9fe33695 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -12,28 +12,30 @@ jobs: name: Compile & Test strategy: matrix: - go-version: [1.17.0] + go-version: [1.18] os: [ubuntu-latest] runs-on: ${{ matrix.os }} env: NGROK_TOKEN: ${{ secrets.NGROK_TOKEN }} steps: + - name: Checkout code + uses: actions/checkout@v3 + - name: Install Go - uses: actions/setup-go@v2 + uses: actions/setup-go@v3 with: go-version: ${{ matrix.go-version }} + check-latest: true + cache: true - - name: Checkout code - uses: actions/checkout@v2 + - name: Snyk Setup + uses: snyk/actions/setup@master - - name: Cache go modules - uses: actions/cache@v2 - with: - path: ~/go/pkg/mod - key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} - restore-keys: | - ${{ runner.os }}-go- + - name: Run Snyk to check for vulnerabilities + run: snyk test --project-name=stream-replicator --severity-threshold=high + env: + SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} - name: Block Concurrent Executions of Integration Tests if: ${{ env.NGROK_TOKEN != '' }} @@ -60,4 +62,4 @@ jobs: run: make lint - name: Compile all targets - run: make all + run: make all \ No newline at end of file diff --git a/.github/workflows/snyk.yml b/.github/workflows/snyk.yml index f42d728c..dea58637 100644 --- a/.github/workflows/snyk.yml +++ b/.github/workflows/snyk.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Run Snyk to check for vulnerabilities uses: snyk/actions/golang@master diff --git a/.gitignore b/.gitignore index 21bf590a..c414211c 100644 --- a/.gitignore +++ b/.gitignore @@ -12,3 +12,6 @@ vendor/ build/ dist/ .localstack/ + +#temporary directory created by tests +tmp_replicator/ \ No newline at end of file diff --git a/CHANGELOG b/CHANGELOG index 57a13a88..d46d7cd8 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -1,3 +1,30 @@ +Version 1.0.0 (2022-08-15) +-------------------------- +Refactor v1 release filters (#192) +Remove support for GCP auth via env vars (#181) +Bump dependencies (#120) +Fix import order (#175) +Allow env var configuration of updated transformation config (#174) +Fix typo in statsd reporting (#158) +Cleanup Makefile (#112) +Make setting of EventHub Partition Key configurable (#148) +Fix latency reporting with no transformations (#108) +Rationalise transformations and transformation config (#169) +Resolve CI caching errors (#164) +Bump to Go 1.18 (#163) +Bump analytics SDK version to v0.3.0 (#131) +Fix bug in makefile that prevents integration-down from completing (#162) +Revamp unit testing project-wide (#129) +Make anything that doesn't need to be exported private (#111) +Add custom transformation layer (#146) +Fail tests on NewConfig error (#145) +Remove AWS Lambda and GCP Cloudfunctions builds (#140) +Add telemetry (#124) +Extend filtering to use custom data (#176) +Use Snyk test to block release if there are vulnerabilities (#119) +Clean up tls configuration (#177) +Allow configuration from a file (#105) + Version 0.8.1 (2022-06-07) -------------------------- Update Sarama package to 1.34 for kafka v3 (#133) diff --git a/Makefile b/Makefile index 6c132d60..e4f9c6ff 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,4 @@ -.PHONY: all gox aws-lambda gcp-cloudfunctions cli cli-linux cli-darwin cli-windows container format lint tidy test-setup test integration-reset integration-up integration-down integration-test container-release clean +.PHONY: all gox cli cli-linux cli-darwin cli-windows container format lint tidy test-setup test integration-reset integration-up integration-down integration-test container-release clean # ----------------------------------------------------------------------------- # CONSTANTS @@ -11,8 +11,6 @@ go_dirs = `go list ./... | grep -v /build/ | grep -v /vendor/` build_dir = build vendor_dir = vendor integration_dir = integration -cert_dir = $(integration_dir)/http -abs_cert_dir = $$(pwd)/$(cert_dir) ngrok_path = ${NGROK_DIR}ngrok # Set NGROK_DIR to `/path/to/directory/` for local setup coverage_dir = $(build_dir)/coverage @@ -34,57 +32,12 @@ gcp_container_name = snowplow/stream-replicator-gcp # BUILDING # ----------------------------------------------------------------------------- -all: aws-lambda gcp-cloudfunctions cli container +all: cli container gox: - GO111MODULE=on go install github.com/mitchellh/gox@latest + go install github.com/mitchellh/gox@latest mkdir -p $(compiled_dir) -aws-lambda: gox - # WARNING: Binary must be called 'main' to work in Lambda - GO111MODULE=on CGO_ENABLED=0 gox -osarch=linux/amd64 -output=$(linux_out_dir)/aws/lambda/main ./cmd/aws/lambda/ - - # Create ZIP file for upload to Lambda - (cd $(linux_out_dir)/aws/lambda/ && zip -r staging.zip main) - mv $(linux_out_dir)/aws/lambda/staging.zip $(compiled_dir)/aws_lambda_stream_replicator_$(version)_linux_amd64.zip - -gcp-cloudfunctions: gox - mkdir -p $(staging_dir)/gcp/cloudfunctions - - # Copy dependencies into staging area - cp ./cmd/gcp/cloudfunctions/function.go $(staging_dir)/gcp/cloudfunctions/function.go - - # Get module dependencies in a vendor directory - GO111MODULE=on go mod vendor - cp -R ./$(vendor_dir)/ $(staging_dir)/gcp/cloudfunctions/vendor/ - - # Copy local packages into staging area - mkdir -p $(staging_dir)/gcp/cloudfunctions/vendor/github.com/snowplow-devops/stream-replicator/cmd/ - cp ./cmd/constants.go $(staging_dir)/gcp/cloudfunctions/vendor/github.com/snowplow-devops/stream-replicator/cmd/constants.go - cp ./cmd/init.go $(staging_dir)/gcp/cloudfunctions/vendor/github.com/snowplow-devops/stream-replicator/cmd/init.go - cp ./cmd/serverless.go $(staging_dir)/gcp/cloudfunctions/vendor/github.com/snowplow-devops/stream-replicator/cmd/serverless.go - - mkdir -p $(staging_dir)/gcp/cloudfunctions/vendor/github.com/snowplow-devops/stream-replicator/config/ - cp ./config/config.go $(staging_dir)/gcp/cloudfunctions/vendor/github.com/snowplow-devops/stream-replicator/config/config.go - - mkdir -p $(staging_dir)/gcp/cloudfunctions/vendor/github.com/snowplow-devops/stream-replicator/ - cp -R ./pkg/ $(staging_dir)/gcp/cloudfunctions/vendor/github.com/snowplow-devops/stream-replicator/pkg/ - - mkdir -p $(staging_dir)/gcp/cloudfunctions/vendor/github.com/snowplow-devops/stream-replicator/third_party/snowplow/ - cp -R ./third_party/snowplow/badrows/ $(staging_dir)/gcp/cloudfunctions/vendor/github.com/snowplow-devops/stream-replicator/third_party/snowplow/badrows - cp -R ./third_party/snowplow/iglu/ $(staging_dir)/gcp/cloudfunctions/vendor/github.com/snowplow-devops/stream-replicator/third_party/snowplow/iglu - - echo "# github.com/snowplow-devops/stream-replicator v$(version)" >> $(staging_dir)/gcp/cloudfunctions/vendor/modules.txt - echo "github.com/snowplow-devops/stream-replicator/config" >> $(staging_dir)/gcp/cloudfunctions/vendor/modules.txt - echo "github.com/snowplow-devops/stream-replicator/cmd" >> $(staging_dir)/gcp/cloudfunctions/vendor/modules.txt - echo "github.com/snowplow-devops/stream-replicator/pkg" >> $(staging_dir)/gcp/cloudfunctions/vendor/modules.txt - echo "github.com/snowplow-devops/stream-replicator/third_party/snowplow/badrows" >> $(staging_dir)/gcp/cloudfunctions/vendor/modules.txt - echo "github.com/snowplow-devops/stream-replicator/third_party/snowplow/iglu" >> $(staging_dir)/gcp/cloudfunctions/vendor/modules.txt - - # Create ZIP file for upload to CloudFunctions - (cd $(staging_dir)/gcp/cloudfunctions/ && zip -r staging.zip .) - mv $(staging_dir)/gcp/cloudfunctions/staging.zip $(compiled_dir)/gcp_cloudfunctions_stream_replicator_$(version)_linux_amd64.zip - cli: gox cli-linux cli-darwin cli-windows (cd $(linux_out_dir)/aws/cli/ && zip -r staging.zip stream-replicator) mv $(linux_out_dir)/aws/cli/staging.zip $(compiled_dir)/aws_cli_stream_replicator_$(version)_linux_amd64.zip @@ -100,16 +53,16 @@ cli: gox cli-linux cli-darwin cli-windows mv $(windows_out_dir)/gcp/cli/staging.zip $(compiled_dir)/gcp_cli_stream_replicator_$(version)_windows_amd64.zip cli-linux: gox - GO111MODULE=on CGO_ENABLED=0 gox -osarch=linux/amd64 -output=$(linux_out_dir)/aws/cli/stream-replicator ./cmd/aws/cli/ - GO111MODULE=on CGO_ENABLED=0 gox -osarch=linux/amd64 -output=$(linux_out_dir)/gcp/cli/stream-replicator ./cmd/gcp/cli/ + CGO_ENABLED=0 gox -osarch=linux/amd64 -output=$(linux_out_dir)/aws/cli/stream-replicator ./cmd/aws/cli/ + CGO_ENABLED=0 gox -osarch=linux/amd64 -output=$(linux_out_dir)/gcp/cli/stream-replicator ./cmd/gcp/cli/ cli-darwin: gox - GO111MODULE=on CGO_ENABLED=0 gox -osarch=darwin/amd64 -output=$(darwin_out_dir)/aws/cli/stream-replicator ./cmd/aws/cli/ - GO111MODULE=on CGO_ENABLED=0 gox -osarch=darwin/amd64 -output=$(darwin_out_dir)/gcp/cli/stream-replicator ./cmd/gcp/cli/ + CGO_ENABLED=0 gox -osarch=darwin/amd64 -output=$(darwin_out_dir)/aws/cli/stream-replicator ./cmd/aws/cli/ + CGO_ENABLED=0 gox -osarch=darwin/amd64 -output=$(darwin_out_dir)/gcp/cli/stream-replicator ./cmd/gcp/cli/ cli-windows: gox - GO111MODULE=on CGO_ENABLED=0 gox -osarch=windows/amd64 -output=$(windows_out_dir)/aws/cli/stream-replicator ./cmd/aws/cli/ - GO111MODULE=on CGO_ENABLED=0 gox -osarch=windows/amd64 -output=$(windows_out_dir)/gcp/cli/stream-replicator ./cmd/gcp/cli/ + CGO_ENABLED=0 gox -osarch=windows/amd64 -output=$(windows_out_dir)/aws/cli/stream-replicator ./cmd/aws/cli/ + CGO_ENABLED=0 gox -osarch=windows/amd64 -output=$(windows_out_dir)/gcp/cli/stream-replicator ./cmd/gcp/cli/ container: cli-linux docker build -t $(aws_container_name):$(version) -f Dockerfile.aws . @@ -120,15 +73,15 @@ container: cli-linux # ----------------------------------------------------------------------------- format: - GO111MODULE=on go fmt $(go_dirs) - GO111MODULE=on gofmt -s -w . + go fmt $(go_dirs) + gofmt -s -w . lint: - GO111MODULE=on go install golang.org/x/lint/golint@latest + go install golang.org/x/lint/golint@latest LINTRESULT=$$(golint $(go_dirs)); echo "$$LINTRESULT"; [ -z "$$LINTRESULT" ]; tidy: - GO111MODULE=on go mod tidy + go mod tidy # ----------------------------------------------------------------------------- # TESTING @@ -136,18 +89,17 @@ tidy: test-setup: mkdir -p $(coverage_dir) - GO111MODULE=on go install golang.org/x/tools/cmd/cover@latest + go install golang.org/x/tools/cmd/cover@latest test: test-setup - GO111MODULE=on go test $(go_dirs) -v -short -covermode=count -coverprofile=$(coverage_out) - GO111MODULE=on go tool cover -html=$(coverage_out) -o $(coverage_html) - GO111MODULE=on go tool cover -func=$(coverage_out) + go test $(go_dirs) -v -short -covermode=count -coverprofile=$(coverage_out) + go tool cover -html=$(coverage_out) -o $(coverage_html) + go tool cover -func=$(coverage_out) integration-test: test-setup - export CERT_DIR=$(abs_cert_dir); \ - GO111MODULE=on go test $(go_dirs) -v -covermode=count -coverprofile=$(coverage_out) - GO111MODULE=on go tool cover -html=$(coverage_out) -o $(coverage_html) - GO111MODULE=on go tool cover -func=$(coverage_out) + go test $(go_dirs) -v -covermode=count -coverprofile=$(coverage_out) + go tool cover -html=$(coverage_out) -o $(coverage_html) + go tool cover -func=$(coverage_out) integration-reset: integration-down integration-up @@ -159,6 +111,7 @@ integration-down: http-down (cd $(integration_dir) && docker-compose -f ./docker-compose.yml down) rm -rf $(integration_dir)/.localstack +# ngrok needs to be installed and auth token must be configured for this if running locally http-up: (cd "$(integration_dir)/http/server" && go run server.go &) sleep 5 @@ -166,7 +119,7 @@ http-up: http-down: (cd "$(integration_dir)/http/shutdown" && go run shutdownRequest.go) - killall ngrok + killall ngrok || true # ----------------------------------------------------------------------------- # RELEASE diff --git a/README.md b/README.md index 44f982f9..135125d7 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ ## Overview -Easily replicate data streams wherever you need them to be! This application is available in three different runtimes to facilitate different needs - AWS Lambda, GCP CloudFunctions and as a standalone application. +Easily replicate data streams wherever you need them to be! This application is available as a standalone application. See the [wiki documention](https://github.com/snowplow-devops/stream-replicator/wiki) for details on how to configure and run the application. @@ -60,5 +60,5 @@ Unauthorized copying of this project via any medium is strictly prohibited. Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. -[release-image]: http://img.shields.io/badge/golang-0.8.1-6ad7e5.svg?style=flat +[release-image]: http://img.shields.io/badge/golang-1.0.0-6ad7e5.svg?style=flat [releases]: https://github.com/snowplow-devops/stream-replicator/releases/ diff --git a/VERSION b/VERSION index c18d72be..afaf360d 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -0.8.1 \ No newline at end of file +1.0.0 \ No newline at end of file diff --git a/cmd/aws/lambda/main.go b/cmd/aws/lambda/main.go deleted file mode 100644 index 640fa9c2..00000000 --- a/cmd/aws/lambda/main.go +++ /dev/null @@ -1,35 +0,0 @@ -// PROPRIETARY AND CONFIDENTIAL -// -// Unauthorized copying of this file via any medium is strictly prohibited. -// -// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. - -package main - -import ( - "context" - - "github.com/aws/aws-lambda-go/events" - "github.com/aws/aws-lambda-go/lambda" - - "github.com/snowplow-devops/stream-replicator/cmd" - "github.com/snowplow-devops/stream-replicator/pkg/models" -) - -func main() { - lambda.Start(HandleRequest) -} - -// HandleRequest processes the Kinesis event and forwards it onto another stream -func HandleRequest(ctx context.Context, event events.KinesisEvent) error { - messages := make([]*models.Message, len(event.Records)) - for i := 0; i < len(messages); i++ { - record := event.Records[i] - messages[i] = &models.Message{ - Data: record.Kinesis.Data, - PartitionKey: record.Kinesis.PartitionKey, - } - } - - return cmd.ServerlessRequestHandler(messages) -} diff --git a/cmd/cli/cli.go b/cmd/cli/cli.go index 5608195f..a216c2f6 100644 --- a/cmd/cli/cli.go +++ b/cmd/cli/cli.go @@ -22,13 +22,16 @@ import ( _ "net/http/pprof" "github.com/snowplow-devops/stream-replicator/cmd" + "github.com/snowplow-devops/stream-replicator/pkg/common" "github.com/snowplow-devops/stream-replicator/pkg/failure/failureiface" "github.com/snowplow-devops/stream-replicator/pkg/models" "github.com/snowplow-devops/stream-replicator/pkg/observer" "github.com/snowplow-devops/stream-replicator/pkg/source/sourceconfig" "github.com/snowplow-devops/stream-replicator/pkg/source/sourceiface" "github.com/snowplow-devops/stream-replicator/pkg/target/targetiface" + "github.com/snowplow-devops/stream-replicator/pkg/telemetry" "github.com/snowplow-devops/stream-replicator/pkg/transform" + "github.com/snowplow-devops/stream-replicator/pkg/transform/transformconfig" ) const ( @@ -78,7 +81,7 @@ func RunCli(supportedSourceConfigPairs []sourceconfig.ConfigPair) { return err } - tr, err := cfg.GetTransformations() + tr, err := transformconfig.GetTransformations(cfg) if err != nil { return err } @@ -105,6 +108,8 @@ func RunCli(supportedSourceConfigPairs []sourceconfig.ConfigPair) { } o.Start() + stopTelemetry := telemetry.InitTelemetryWithCollector(cfg) + // Handle SIGTERM sig := make(chan os.Signal) signal.Notify(sig, os.Interrupt, syscall.SIGTERM, os.Kill) @@ -121,12 +126,24 @@ func RunCli(supportedSourceConfigPairs []sourceconfig.ConfigPair) { select { case <-stop: log.Debug("source.Stop() finished successfully!") + + stopTelemetry() + err := common.DeleteTemporaryDir() + if err != nil { + log.Debugf(`error deleting tmp directory: %v`, err) + } case <-time.After(5 * time.Second): log.Error("source.Stop() took more than 5 seconds, forcing shutdown ...") t.Close() ft.Close() o.Stop() + stopTelemetry() + + err := common.DeleteTemporaryDir() + if err != nil { + log.Debugf(`error deleting tmp directory: %v`, err) + } os.Exit(1) } diff --git a/cmd/constants.go b/cmd/constants.go index fa33693d..c2989eee 100644 --- a/cmd/constants.go +++ b/cmd/constants.go @@ -8,7 +8,7 @@ package cmd const ( // AppVersion is the current version of the replicator - AppVersion = "0.8.1" + AppVersion = "1.0.0" // AppName is the name of the application to use in logging / places that require the artifact AppName = "stream-replicator" diff --git a/cmd/gcp/cloudfunctions/function.go b/cmd/gcp/cloudfunctions/function.go deleted file mode 100644 index e9795594..00000000 --- a/cmd/gcp/cloudfunctions/function.go +++ /dev/null @@ -1,33 +0,0 @@ -// PROPRIETARY AND CONFIDENTIAL -// -// Unauthorized copying of this file via any medium is strictly prohibited. -// -// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. - -package cloudfunctions - -import ( - "context" - - "github.com/twinj/uuid" - - "github.com/snowplow-devops/stream-replicator/cmd" - "github.com/snowplow-devops/stream-replicator/pkg/models" -) - -// PubSubMessage is the payload of a Pub/Sub message -type PubSubMessage struct { - Data []byte `json:"data"` -} - -// HandleRequest consumes a Pub/Sub message -func HandleRequest(ctx context.Context, m PubSubMessage) error { - messages := []*models.Message{ - { - Data: m.Data, - PartitionKey: uuid.NewV4().String(), - }, - } - - return cmd.ServerlessRequestHandler(messages) -} diff --git a/cmd/init.go b/cmd/init.go index 2045a265..a73a671a 100644 --- a/cmd/init.go +++ b/cmd/init.go @@ -9,7 +9,6 @@ package cmd import ( "encoding/json" "fmt" - "os" "github.com/getsentry/sentry-go" "github.com/pkg/errors" @@ -17,7 +16,6 @@ import ( sentryhook "github.com/snowplow-devops/go-sentryhook" config "github.com/snowplow-devops/stream-replicator/config" - "github.com/snowplow-devops/stream-replicator/pkg/common" ) var ( @@ -43,21 +41,12 @@ func Init() (*config.Config, bool, error) { return nil, false, errors.Wrap(err, "Failed to build config") } - // Configure GCP Access (if set) - if cfg.GoogleServiceAccountB64 != "" { - targetFile, err := common.GetGCPServiceAccountFromBase64(cfg.GoogleServiceAccountB64) - if err != nil { - return nil, false, errors.Wrap(err, "Failed to store GCP Service Account JSON file") - } - os.Setenv("GOOGLE_APPLICATION_CREDENTIALS", targetFile) - } - // Configure Sentry - sentryEnabled := cfg.Sentry.Dsn != "" + sentryEnabled := cfg.Data.Sentry.Dsn != "" if sentryEnabled { err := sentry.Init(sentry.ClientOptions{ - Dsn: cfg.Sentry.Dsn, - Debug: cfg.Sentry.Debug, + Dsn: cfg.Data.Sentry.Dsn, + Debug: cfg.Data.Sentry.Debug, AttachStacktrace: true, }) if err != nil { @@ -65,7 +54,7 @@ func Init() (*config.Config, bool, error) { } sentryTagsMap := map[string]string{} - err = json.Unmarshal([]byte(cfg.Sentry.Tags), &sentryTagsMap) + err = json.Unmarshal([]byte(cfg.Data.Sentry.Tags), &sentryTagsMap) if err != nil { return nil, false, errors.Wrap(err, "Failed to unmarshall SENTRY_TAGS to map") } @@ -79,10 +68,10 @@ func Init() (*config.Config, bool, error) { } // Configure logging level - if level, ok := logLevelsMap[cfg.LogLevel]; ok { + if level, ok := logLevelsMap[cfg.Data.LogLevel]; ok { log.SetLevel(level) } else { - return nil, sentryEnabled, fmt.Errorf("Supported log levels are 'debug, info, warning, error, fatal, panic'; provided %s", cfg.LogLevel) + return nil, sentryEnabled, fmt.Errorf("Supported log levels are 'debug, info, warning, error, fatal, panic'; provided %s", cfg.Data.LogLevel) } log.Debugf("Config: %+v", cfg) diff --git a/cmd/init_test.go b/cmd/init_test.go index 359e004b..f735ddf2 100644 --- a/cmd/init_test.go +++ b/cmd/init_test.go @@ -13,6 +13,12 @@ import ( "github.com/stretchr/testify/assert" ) +func TestMain(m *testing.M) { + os.Clearenv() + exitVal := m.Run() + os.Exit(exitVal) +} + func TestInit_Success(t *testing.T) { assert := assert.New(t) @@ -24,23 +30,21 @@ func TestInit_Success(t *testing.T) { func TestInit_Failure(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("STATS_RECEIVER_TIMEOUT_SEC") - - os.Setenv("STATS_RECEIVER_TIMEOUT_SEC", "debug") + t.Setenv("STATS_RECEIVER_TIMEOUT_SEC", "debug") cfg, _, err := Init() assert.Nil(cfg) assert.NotNil(err) + if err != nil { + assert.Equal("Failed to build config: env: parse error on field \"TimeoutSec\" of type \"int\": strconv.ParseInt: parsing \"debug\": invalid syntax", err.Error()) + } } func TestInit_Success_Sentry(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("SENTRY_DSN") - defer os.Unsetenv("SENTRY_TAGS") - - os.Setenv("SENTRY_DSN", "https://1111111111111111111111111111111d@sentry.snplow.net/28") - os.Setenv("SENTRY_TAGS", "{\"client_name\":\"com.acme\"}") + t.Setenv("SENTRY_DSN", "https://1111111111111111111111111111111d@sentry.snplow.net/28") + t.Setenv("SENTRY_TAGS", "{\"client_name\":\"com.acme\"}") cfg, _, err := Init() assert.NotNil(cfg) @@ -50,43 +54,39 @@ func TestInit_Success_Sentry(t *testing.T) { func TestInit_Failure_LogLevel(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("LOG_LEVEL") - - os.Setenv("LOG_LEVEL", "DEBUG") + t.Setenv("LOG_LEVEL", "DEBUG") cfg, _, err := Init() assert.Nil(cfg) assert.NotNil(err) - - assert.Equal("Supported log levels are 'debug, info, warning, error, fatal, panic'; provided DEBUG", err.Error()) + if err != nil { + assert.Equal("Supported log levels are 'debug, info, warning, error, fatal, panic'; provided DEBUG", err.Error()) + } } func TestInit_Failure_SentryDSN(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("SENTRY_DSN") - - os.Setenv("SENTRY_DSN", "blahblah") + t.Setenv("SENTRY_DSN", "blahblah") cfg, _, err := Init() assert.Nil(cfg) assert.NotNil(err) - - assert.Equal("Failed to build Sentry: [Sentry] DsnParseError: invalid scheme", err.Error()) + if err != nil { + assert.Equal("Failed to build Sentry: [Sentry] DsnParseError: invalid scheme", err.Error()) + } } func TestInit_Failure_SentryTags(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("SENTRY_DSN") - defer os.Unsetenv("SENTRY_TAGS") - - os.Setenv("SENTRY_DSN", "https://1111111111111111111111111111111d@sentry.snplow.net/28") - os.Setenv("SENTRY_TAGS", "asdasdasd") + t.Setenv("SENTRY_DSN", "https://1111111111111111111111111111111d@sentry.snplow.net/28") + t.Setenv("SENTRY_TAGS", "asdasdasd") cfg, _, err := Init() assert.Nil(cfg) assert.NotNil(err) - - assert.Equal("Failed to unmarshall SENTRY_TAGS to map: invalid character 'a' looking for beginning of value", err.Error()) + if err != nil { + assert.Equal("Failed to unmarshall SENTRY_TAGS to map: invalid character 'a' looking for beginning of value", err.Error()) + } } diff --git a/cmd/serverless.go b/cmd/serverless.go deleted file mode 100644 index 06b19776..00000000 --- a/cmd/serverless.go +++ /dev/null @@ -1,91 +0,0 @@ -// PROPRIETARY AND CONFIDENTIAL -// -// Unauthorized copying of this file via any medium is strictly prohibited. -// -// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. - -package cmd - -import ( - "time" - - "github.com/getsentry/sentry-go" - log "github.com/sirupsen/logrus" - - "github.com/snowplow-devops/stream-replicator/pkg/models" -) - -// ServerlessRequestHandler is a common function for all -// serverless implementations to leverage -func ServerlessRequestHandler(messages []*models.Message) error { - cfg, sentryEnabled, err := Init() - if err != nil { - return err - } - if sentryEnabled { - defer sentry.Flush(2 * time.Second) - } - - // --- Setup structs - - t, err := cfg.GetTarget() - if err != nil { - return err - } - t.Open() - - tr, err := cfg.GetTransformations() - if err != nil { - return err - } - - ft, err := cfg.GetFailureTarget(AppName, AppVersion) - if err != nil { - return err - } - ft.Open() - - // --- Process events - - transformed := tr(messages) - // no error as errors should be returned in the failures array of TransformationResult - - // Ack filtered messages with no further action - messagesToFilter := transformed.Filtered - for _, msg := range messagesToFilter { - if msg.AckFunc != nil { - msg.AckFunc() - } - } - - res, err := t.Write(transformed.Result) - if err != nil { - log.WithFields(log.Fields{"error": err}).Error(err) - } - - if len(res.Oversized) > 0 { - res2, err := ft.WriteOversized(t.MaximumAllowedMessageSizeBytes(), res.Oversized) - if len(res2.Oversized) != 0 || len(res2.Invalid) != 0 { - log.Fatal("Oversized message transformation resulted in new oversized / invalid messages") - } - if err != nil { - log.WithFields(log.Fields{"error": err}).Error(err) - } - } - - invalid := append(res.Invalid, transformed.Invalid...) - - if len(invalid) > 0 { - res3, err := ft.WriteInvalid(invalid) - if len(res3.Oversized) != 0 || len(res3.Invalid) != 0 { - log.Fatal("Invalid message transformation resulted in new invalid / oversized messages") - } - if err != nil { - log.WithFields(log.Fields{"error": err}).Error(err) - } - } - - t.Close() - ft.Close() - return err -} diff --git a/config/component.go b/config/component.go new file mode 100644 index 00000000..828897d6 --- /dev/null +++ b/config/component.go @@ -0,0 +1,54 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package config + +// ComponentConfigurable is the interface that wraps the ProvideDefault method. +type ComponentConfigurable interface { + // ProvideDefault returns a pointer to a structure that will be + // written with the decoded configuration. + ProvideDefault() (interface{}, error) +} + +// ComponentCreator is the interface that wraps the Create method. +type ComponentCreator interface { + // Create returns a pointer to an output structure given a pointer + // to an input structure. This interface is expected to be implemented + // by components that are creatable through a configuration. + Create(i interface{}) (interface{}, error) +} + +// Pluggable is the interface that groups +// ComponentConfigurable and ComponentCreator. +type Pluggable interface { + ComponentConfigurable + ComponentCreator +} + +// decodingHandler is the type of any function that, given a ComponentConfigurable +// and a Decoder, returns a pointer to a structure that was decoded. +type decodingHandler func(c ComponentConfigurable, d Decoder) (interface{}, error) + +// withDecoderOptions returns a decodingHandler closed over some DecoderOptions. +func withDecoderOptions(opts *DecoderOptions) decodingHandler { + return func(c ComponentConfigurable, d Decoder) (interface{}, error) { + return configure(c, d, opts) + } +} + +// Configure returns the decoded target. +func configure(c ComponentConfigurable, d Decoder, opts *DecoderOptions) (interface{}, error) { + target, err := c.ProvideDefault() // target is ptr + if err != nil { + return nil, err + } + + if err = d.Decode(opts, target); err != nil { + return nil, err + } + + return target, nil +} diff --git a/config/component_test.go b/config/component_test.go new file mode 100644 index 00000000..d70548f7 --- /dev/null +++ b/config/component_test.go @@ -0,0 +1,459 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package config + +import ( + "errors" + "path/filepath" + "reflect" + "testing" + + "github.com/davecgh/go-spew/spew" + "github.com/stretchr/testify/assert" + + "github.com/snowplow-devops/stream-replicator/pkg/statsreceiver" + "github.com/snowplow-devops/stream-replicator/pkg/target" +) + +func TestCreateTargetComponentHCL(t *testing.T) { + testCases := []struct { + File string + Plug Pluggable + Expected interface{} + }{ + { + File: "target-sqs.hcl", + Plug: testSQSTargetAdapter(testSQSTargetFunc), + Expected: &target.SQSTargetConfig{ + QueueName: "testQueue", + Region: "eu-test-1", + RoleARN: "xxx-test-role-arn", + }, + }, + { + File: "target-eventhub-simple.hcl", + Plug: testEventHubTargetAdapter(testEventHubTargetFunc), + Expected: &target.EventHubConfig{ + EventHubNamespace: "testNamespace", + EventHubName: "testName", + MaxAutoRetries: 1, + MessageByteLimit: 1048576, + ChunkByteLimit: 1048576, + ChunkMessageLimit: 500, + ContextTimeoutInSeconds: 20, + BatchByteLimit: 1048576, + SetEHPartitionKey: true, + }, + }, + { + File: "target-eventhub-extended.hcl", + Plug: testEventHubTargetAdapter(testEventHubTargetFunc), + Expected: &target.EventHubConfig{ + EventHubNamespace: "testNamespace", + EventHubName: "testName", + MaxAutoRetries: 2, + MessageByteLimit: 1000000, + ChunkByteLimit: 1000000, + ChunkMessageLimit: 501, + ContextTimeoutInSeconds: 21, + BatchByteLimit: 1000000, + SetEHPartitionKey: false, + }, + }, + { + File: "target-http-simple.hcl", + Plug: testHTTPTargetAdapter(testHTTPTargetFunc), + Expected: &target.HTTPTargetConfig{ + HTTPURL: "testUrl", + ByteLimit: 1048576, + RequestTimeoutInSeconds: 5, + ContentType: "application/json", + Headers: "", + BasicAuthUsername: "", + BasicAuthPassword: "", + CertFile: "", + KeyFile: "", + CaFile: "", + SkipVerifyTLS: false, + }, + }, + { + File: "target-http-extended.hcl", + Plug: testHTTPTargetAdapter(testHTTPTargetFunc), + Expected: &target.HTTPTargetConfig{ + HTTPURL: "testUrl", + ByteLimit: 1000000, + RequestTimeoutInSeconds: 2, + ContentType: "test/test", + Headers: "{\"Accept-Language\":\"en-US\"}", + BasicAuthUsername: "testUsername", + BasicAuthPassword: "testPass", + CertFile: "myLocalhost.crt", + KeyFile: "MyLocalhost.key", + CaFile: "myRootCA.crt", + SkipVerifyTLS: true, + }, + }, + { + File: "target-kafka-simple.hcl", + Plug: testKafkaTargetAdapter(testKafkaTargetFunc), + Expected: &target.KafkaConfig{ + Brokers: "testBrokers", + TopicName: "testTopic", + TargetVersion: "", + MaxRetries: 10, + ByteLimit: 1048576, + Compress: false, + WaitForAll: false, + Idempotent: false, + EnableSASL: false, + SASLUsername: "", + SASLPassword: "", + SASLAlgorithm: "sha512", + CertFile: "", + KeyFile: "", + CaFile: "", + SkipVerifyTLS: false, + ForceSync: false, + FlushFrequency: 0, + FlushMessages: 0, + FlushBytes: 0, + }, + }, + { + File: "target-kafka-extended.hcl", + Plug: testKafkaTargetAdapter(testKafkaTargetFunc), + Expected: &target.KafkaConfig{ + Brokers: "testBrokers", + TopicName: "testTopic", + TargetVersion: "1.2.3", + MaxRetries: 11, + ByteLimit: 1000000, + Compress: true, + WaitForAll: true, + Idempotent: true, + EnableSASL: true, + SASLUsername: "testUsername", + SASLPassword: "testPass", + SASLAlgorithm: "sha256", + CertFile: "myLocalhost.crt", + KeyFile: "MyLocalhost.key", + CaFile: "myRootCA.crt", + SkipVerifyTLS: true, + ForceSync: true, + FlushFrequency: 2, + FlushMessages: 2, + FlushBytes: 2, + }, + }, + { + File: "target-kinesis.hcl", + Plug: testKinesisTargetAdapter(testKinesisTargetFunc), + Expected: &target.KinesisTargetConfig{ + StreamName: "testStream", + Region: "eu-test-1", + RoleARN: "xxx-test-role-arn", + }, + }, + { + File: "target-pubsub.hcl", + Plug: testPubSubTargetAdapter(testPubSubTargetFunc), + Expected: &target.PubSubTargetConfig{ + ProjectID: "testId", + TopicName: "testTopic", + }, + }, + } + + for _, tt := range testCases { + t.Run(tt.File, func(t *testing.T) { + assert := assert.New(t) + + filename := filepath.Join("test-fixtures", tt.File) + t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", filename) + + c, err := NewConfig() + assert.NotNil(c) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } + + use := c.Data.Target.Use + decoderOpts := &DecoderOptions{ + Input: use.Body, + } + + result, err := c.CreateComponent(tt.Plug, decoderOpts) + assert.NotNil(result) + assert.Nil(err) + + if !reflect.DeepEqual(result, tt.Expected) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(result), + spew.Sdump(tt.Expected)) + } + }) + } +} + +func TestCreateFailureTargetComponentENV(t *testing.T) { + testCase := struct { + Name string + Plug Pluggable + Expected interface{} + }{ + Name: "test_failure_target_kafka_extended_env", + Plug: testKafkaTargetAdapter(testKafkaTargetFunc), + Expected: &target.KafkaConfig{ + Brokers: "testBrokers", + TopicName: "testTopic", + TargetVersion: "1.2.3", + MaxRetries: 11, + ByteLimit: 1000000, + Compress: true, + WaitForAll: true, + Idempotent: true, + EnableSASL: true, + SASLUsername: "testUsername", + SASLPassword: "testPass", + SASLAlgorithm: "sha256", + CertFile: "test/certfile.crt", + KeyFile: "test/keyfile.key", + CaFile: "test/cafile.crt", + SkipVerifyTLS: true, + ForceSync: true, + FlushFrequency: 2, + FlushMessages: 2, + FlushBytes: 2, + }, + } + + t.Run(testCase.Name, func(t *testing.T) { + assert := assert.New(t) + t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", "") + t.Setenv("FAILURE_TARGET_NAME", "kafka") + t.Setenv("FAILURE_TARGET_KAFKA_BROKERS", "testBrokers") + t.Setenv("FAILURE_TARGET_KAFKA_TOPIC_NAME", "testTopic") + t.Setenv("FAILURE_TARGET_KAFKA_TARGET_VERSION", "1.2.3") + t.Setenv("FAILURE_TARGET_KAFKA_MAX_RETRIES", "11") + t.Setenv("FAILURE_TARGET_KAFKA_BYTE_LIMIT", "1000000") + t.Setenv("FAILURE_TARGET_KAFKA_COMPRESS", "true") + t.Setenv("FAILURE_TARGET_KAFKA_WAIT_FOR_ALL", "true") + t.Setenv("FAILURE_TARGET_KAFKA_IDEMPOTENT", "true") + t.Setenv("FAILURE_TARGET_KAFKA_ENABLE_SASL", "true") + t.Setenv("FAILURE_TARGET_KAFKA_SASL_USERNAME", "testUsername") + t.Setenv("FAILURE_TARGET_KAFKA_SASL_PASSWORD", "testPass") + t.Setenv("FAILURE_TARGET_KAFKA_SASL_ALGORITHM", "sha256") + t.Setenv("FAILURE_TARGET_KAFKA_TLS_CERT_FILE", "test/certfile.crt") + t.Setenv("FAILURE_TARGET_KAFKA_TLS_KEY_FILE", "test/keyfile.key") + t.Setenv("FAILURE_TARGET_KAFKA_TLS_CA_FILE", "test/cafile.crt") + t.Setenv("FAILURE_TARGET_KAFKA_TLS_SKIP_VERIFY_TLS", "true") + t.Setenv("FAILURE_TARGET_KAFKA_FORCE_SYNC_PRODUCER", "true") + t.Setenv("FAILURE_TARGET_KAFKA_FLUSH_FREQUENCY", "2") + t.Setenv("FAILURE_TARGET_KAFKA_FLUSH_MESSAGES", "2") + t.Setenv("FAILURE_TARGET_KAFKA_FLUSH_BYTES", "2") + + c, err := NewConfig() + assert.NotNil(c) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } + + assert.Equal("kafka", c.Data.FailureTarget.Target.Name) + decoderOpts := &DecoderOptions{ + Prefix: "FAILURE_", + } + + result, err := c.CreateComponent(testCase.Plug, decoderOpts) + assert.NotNil(result) + assert.Nil(err) + + if !reflect.DeepEqual(result, testCase.Expected) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(result), + spew.Sdump(testCase.Expected)) + } + }) +} + +func TestCreateObserverComponentHCL(t *testing.T) { + testCases := []struct { + File string + Plug Pluggable + Expected interface{} + }{ + { + File: "observer.hcl", + Plug: testStatsDAdapter(testStatsDFunc), + Expected: &statsreceiver.StatsDStatsReceiverConfig{ + Address: "test.localhost", + Prefix: "snowplow.test", + Tags: "{\"testKey\": \"testValue\"}", + }, + }, + } + + for _, tt := range testCases { + t.Run(tt.File, func(t *testing.T) { + assert := assert.New(t) + + filename := filepath.Join("test-fixtures", tt.File) + t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", filename) + + c, err := NewConfig() + assert.NotNil(c) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } + + assert.Equal(2, c.Data.StatsReceiver.TimeoutSec) + assert.Equal(20, c.Data.StatsReceiver.BufferSec) + + use := c.Data.StatsReceiver.Receiver + decoderOpts := &DecoderOptions{ + Input: use.Body, + } + + result, err := c.CreateComponent(tt.Plug, decoderOpts) + assert.NotNil(result) + assert.Nil(err) + + if !reflect.DeepEqual(result, tt.Expected) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(result), + spew.Sdump(tt.Expected)) + } + }) + } +} + +// Test Helpers +// SQS +func testSQSTargetAdapter(f func(c *target.SQSTargetConfig) (*target.SQSTargetConfig, error)) target.SQSTargetAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*target.SQSTargetConfig) + if !ok { + return nil, errors.New("invalid input, expected SQSTargetConfig") + } + + return f(cfg) + } + +} + +func testSQSTargetFunc(c *target.SQSTargetConfig) (*target.SQSTargetConfig, error) { + + return c, nil +} + +// EventHub +func testEventHubTargetAdapter(f func(c *target.EventHubConfig) (*target.EventHubConfig, error)) target.EventHubTargetAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*target.EventHubConfig) + if !ok { + return nil, errors.New("invalid input, expected EventHubTargetConfig") + } + + return f(cfg) + } + +} + +func testEventHubTargetFunc(c *target.EventHubConfig) (*target.EventHubConfig, error) { + + return c, nil +} + +// HTTP +func testHTTPTargetAdapter(f func(c *target.HTTPTargetConfig) (*target.HTTPTargetConfig, error)) target.HTTPTargetAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*target.HTTPTargetConfig) + if !ok { + return nil, errors.New("invalid input, expected HTTPTargetConfig") + } + + return f(cfg) + } + +} + +func testHTTPTargetFunc(c *target.HTTPTargetConfig) (*target.HTTPTargetConfig, error) { + + return c, nil +} + +// Kafka +func testKafkaTargetAdapter(f func(c *target.KafkaConfig) (*target.KafkaConfig, error)) target.KafkaTargetAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*target.KafkaConfig) + if !ok { + return nil, errors.New("invalid input, expected KafkaTargetConfig") + } + + return f(cfg) + } + +} + +func testKafkaTargetFunc(c *target.KafkaConfig) (*target.KafkaConfig, error) { + + return c, nil +} + +// Kinesis +func testKinesisTargetAdapter(f func(c *target.KinesisTargetConfig) (*target.KinesisTargetConfig, error)) target.KinesisTargetAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*target.KinesisTargetConfig) + if !ok { + return nil, errors.New("invalid input, expected KinesisTargetConfig") + } + + return f(cfg) + } + +} + +func testKinesisTargetFunc(c *target.KinesisTargetConfig) (*target.KinesisTargetConfig, error) { + + return c, nil +} + +// PubSub +func testPubSubTargetAdapter(f func(c *target.PubSubTargetConfig) (*target.PubSubTargetConfig, error)) target.PubSubTargetAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*target.PubSubTargetConfig) + if !ok { + return nil, errors.New("invalid input, expected PubSubTargetConfig") + } + + return f(cfg) + } + +} + +func testPubSubTargetFunc(c *target.PubSubTargetConfig) (*target.PubSubTargetConfig, error) { + + return c, nil +} + +// StatsD +func testStatsDAdapter(f func(c *statsreceiver.StatsDStatsReceiverConfig) (*statsreceiver.StatsDStatsReceiverConfig, error)) statsreceiver.StatsDStatsReceiverAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*statsreceiver.StatsDStatsReceiverConfig) + if !ok { + return nil, errors.New("invalid input, expected StatsDStatsReceiverConfig") + } + + return f(cfg) + } + +} + +func testStatsDFunc(c *statsreceiver.StatsDStatsReceiverConfig) (*statsreceiver.StatsDStatsReceiverConfig, error) { + + return c, nil +} diff --git a/config/config.go b/config/config.go index 2a94aeb8..5076b4b2 100644 --- a/config/config.go +++ b/config/config.go @@ -9,13 +9,15 @@ package config import ( "fmt" "os" + "path/filepath" "strconv" "strings" "time" - "github.com/caarlos0/env/v6" + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/hclparse" "github.com/pkg/errors" - + "github.com/snowplow-devops/stream-replicator/pkg/common" "github.com/snowplow-devops/stream-replicator/pkg/failure" "github.com/snowplow-devops/stream-replicator/pkg/failure/failureiface" "github.com/snowplow-devops/stream-replicator/pkg/observer" @@ -23,459 +25,294 @@ import ( "github.com/snowplow-devops/stream-replicator/pkg/statsreceiver/statsreceiveriface" "github.com/snowplow-devops/stream-replicator/pkg/target" "github.com/snowplow-devops/stream-replicator/pkg/target/targetiface" - "github.com/snowplow-devops/stream-replicator/pkg/transform" ) -// ---------- [ TARGETS ] ---------- - -// KinesisTargetConfig configures the destination for records consumed -type KinesisTargetConfig struct { - StreamName string `env:"TARGET_KINESIS_STREAM_NAME"` - Region string `env:"TARGET_KINESIS_REGION"` - RoleARN string `env:"TARGET_KINESIS_ROLE_ARN"` +// Config holds the configuration data along with the Decoder to Decode them +type Config struct { + Data *configurationData + Decoder Decoder } -// PubSubTargetConfig configures the destination for records consumed -type PubSubTargetConfig struct { - ProjectID string `env:"TARGET_PUBSUB_PROJECT_ID"` - TopicName string `env:"TARGET_PUBSUB_TOPIC_NAME"` +// configurationData for holding all configuration options +type configurationData struct { + Source *component `hcl:"source,block" envPrefix:"SOURCE_"` + Target *component `hcl:"target,block" envPrefix:"TARGET_"` + FailureTarget *failureConfig `hcl:"failure_target,block"` + Sentry *sentryConfig `hcl:"sentry,block"` + StatsReceiver *statsConfig `hcl:"stats_receiver,block"` + Transformations []*component `hcl:"transform,block"` + LogLevel string `hcl:"log_level,optional" env:"LOG_LEVEL"` + UserProvidedID string `hcl:"user_provided_id,optional" env:"USER_PROVIDED_ID"` + DisableTelemetry bool `hcl:"disable_telemetry,optional" env:"DISABLE_TELEMETRY"` } -// SQSTargetConfig configures the destination for records consumed -type SQSTargetConfig struct { - QueueName string `env:"TARGET_SQS_QUEUE_NAME"` - Region string `env:"TARGET_SQS_REGION"` - RoleARN string `env:"TARGET_SQS_ROLE_ARN"` +// component is a type to abstract over configuration blocks. +type component struct { + Use *use `hcl:"use,block"` } -// KafkaTargetConfig configures the destination for records consumed -type KafkaTargetConfig struct { - Brokers string `env:"TARGET_KAFKA_BROKERS"` // REQUIRED - TopicName string `env:"TARGET_KAFKA_TOPIC_NAME"` // REQUIRED - TargetVersion string `env:"TARGET_KAFKA_TARGET_VERSION"` // The Kafka version we should target e.g. 2.7.0 or 0.11.0.2 - MaxRetries int `env:"TARGET_KAFKA_MAX_RETRIES" envDefault:"10"` // Max retries - ByteLimit int `env:"TARGET_KAFKA_BYTE_LIMIT" envDefault:"1048576"` // Kafka Default is 1MiB - Compress bool `env:"TARGET_KAFKA_COMPRESS"` // Reduces Network usage & Increases latency by compressing data - WaitForAll bool `env:"TARGET_KAFKA_WAIT_FOR_ALL"` // Sets RequireAcks = WaitForAll which waits for min.insync.replicas to Ack - Idempotent bool `env:"TARGET_KAFKA_IDEMPOTENT"` // Exactly once writes - Also sets RequiredAcks = WaitForAll - EnableSASL bool `env:"TARGET_KAFKA_ENABLE_SASL"` // Enables SASL Support - SASLUsername string `env:"TARGET_KAFKA_SASL_USERNAME"` // SASL auth - SASLPassword string `env:"TARGET_KAFKA_SASL_PASSWORD"` // SASL auth - SASLAlgorithm string `env:"TARGET_KAFKA_SASL_ALGORITHM" envDefault:"sha512"` // sha256 or sha512 - CertFile string `env:"TARGET_KAFKA_TLS_CERT_FILE"` // The optional certificate file for client authentication - KeyFile string `env:"TARGET_KAFKA_TLS_KEY_FILE"` // The optional key file for client authentication - CaFile string `env:"TARGET_KAFKA_TLS_CA_FILE"` // The optional certificate authority file for TLS client authentication - SkipVerifyTLS bool `env:"TARGET_KAFKA_TLS_SKIP_VERIFY_TLS"` // Optional skip verifying ssl certificates chain - ForceSyncProducer bool `env:"TARGET_KAFKA_FORCE_SYNC_PRODUCER"` // Forces the use of the Sync Producer, emits as fast as possible, may limit performance - FlushFrequency int `env:"TARGET_KAFKA_FLUSH_FREQUENCY" envDefault:"0"` // Milliseconds between flushes of events - 0 = as fast as possible - FlushMessages int `env:"TARGET_KAFKA_FLUSH_MESSAGES" envDefault:"0"` // Best effort for how many messages are sent in each batch - 0 = as fast as possible - FlushBytes int `env:"TARGET_KAFKA_FLUSH_BYTES" envDefault:"0"` // Best effort for how many bytes will trigger a flush - 0 = as fast as possible +// use is a type to denote what a component will be configured to use. +type use struct { + Name string `hcl:",label" env:"NAME"` + Body hcl.Body `hcl:",remain"` } -// EventHubTargetConfig configures the destination for records consumed -type EventHubTargetConfig struct { - EventHubNamespace string `env:"TARGET_EVENTHUB_NAMESPACE"` // REQUIRED - namespace housing Eventhub - EventHubName string `env:"TARGET_EVENTHUB_NAME"` // REQUIRED - name of Eventhub - MaxAutoRetries int `env:"TARGET_EVENTHUB_MAX_AUTO_RETRY" envDefault:"1"` // Number of retries handled automatically by the EH library - all retries should be completed before context timeout - MessageByteLimit int `env:"TARGET_EVENTHUB_MESSAGE_BYTE_LIMIT" envDefault:"1048576"` // Default presumes paid tier limit is 1MB - ChunkByteLimit int `env:"TARGET_EVENTHUB_CHUNK_BYTE_LIMIT" envDefault:"1048576"` // Default chunk size of 1MB is arbitrary - ChunkMessageLimit int `env:"TARGET_EVENTHUB_CHUNK_MESSAGE_LIMIT" envDefault:"500"` // Default of 500 is arbitrary - ContextTimeoutInSeconds int `env:"TARGET_EVENTHUB_CONTEXT_TIMEOUT_SECONDS" envDefault:"20"` // Default of 20 is arbitrary - BatchByteLimit int `env:"TARGET_EVENTHUB_BATCH_BYTE_LIMIT" envDefault:"1048576"` // Default batch size of 1MB is the limit for EH's high tier +// failureConfig holds configuration for the failure target. +// It includes the target component to use. +type failureConfig struct { + Target *use `hcl:"use,block" envPrefix:"FAILURE_TARGET_"` + Format string `hcl:"format,optional" env:"FAILURE_TARGETS_FORMAT"` } -// HTTPTargetConfig configures the destination for records consumed -type HTTPTargetConfig struct { - HTTPURL string `env:"TARGET_HTTP_URL"` // REQUIRED - url endpoint - ByteLimit int `env:"TARGET_HTTP_BYTE_LIMIT" envDefault:"1048576"` // Byte limit for requests - RequestTimeoutInSeconds int `env:"TARGET_HTTP_TIMEOUT_IN_SECONDS" envDefault:"5"` // Request timeout in seconds - ContentType string `env:"TARGET_HTTP_CONTENT_TYPE" envDefault:"application/json"` // Content type for POST request - Headers string `env:"TARGET_HTTP_HEADERS"` // Optional headers to add to the request, provided as a JSON of string key-value pairs. eg: `{"Max Forwards": "10", "Accept-Language": "en-US,en-IE", "Accept-Datetime": "Thu, 31 May 2007 20:35:00 GMT"}` - BasicAuthUsername string `env:"TARGET_HTTP_BASICAUTH_USERNAME"` // Optional basicauth username - BasicAuthPassword string `env:"TARGET_HTTP_BASICAUTH_PASSWORD"` // Optional basicauth password - CertFile string `env:"TARGET_HTTP_TLS_CERT_FILE"` // The optional certificate file for client authentication - KeyFile string `env:"TARGET_HTTP_TLS_KEY_FILE"` // The optional key file for client authentication - CaFile string `env:"TARGET_HTTP_TLS_CA_FILE"` // The optional certificate authority file for TLS client authentication - SkipVerifyTLS bool `env:"TARGET_HTTP_TLS_SKIP_VERIFY_TLS" envDefault:"false"` // Optional skip verifying ssl certificates chain - if certfile and keyfile are not provided, this setting is not applied. +// sentryConfig configures the Sentry error tracker. +type sentryConfig struct { + Dsn string `hcl:"dsn" env:"SENTRY_DSN"` + Tags string `hcl:"tags,optional" env:"SENTRY_TAGS"` + Debug bool `hcl:"debug,optional" env:"SENTRY_DEBUG"` } -// TargetsConfig holds configuration for the available targets -type TargetsConfig struct { - Kinesis KinesisTargetConfig - PubSub PubSubTargetConfig - SQS SQSTargetConfig - Kafka KafkaTargetConfig - EventHub EventHubTargetConfig - HTTP HTTPTargetConfig +// statsConfig holds configuration for stats receivers. +// It includes a receiver component to use. +type statsConfig struct { + Receiver *use `hcl:"use,block" envPrefix:"STATS_RECEIVER_"` + TimeoutSec int `hcl:"timeout_sec,optional" env:"STATS_RECEIVER_TIMEOUT_SEC"` + BufferSec int `hcl:"buffer_sec,optional" env:"STATS_RECEIVER_BUFFER_SEC"` } -// ---------- [ FAILURE MESSAGE TARGETS ] ---------- - -// FailureKinesisTargetConfig configures the destination for records consumed -type FailureKinesisTargetConfig struct { - StreamName string `env:"FAILURE_TARGET_KINESIS_STREAM_NAME"` - Region string `env:"FAILURE_TARGET_KINESIS_REGION"` - RoleARN string `env:"FAILURE_TARGET_KINESIS_ROLE_ARN"` -} - -// FailurePubSubTargetConfig configures the destination for records consumed -type FailurePubSubTargetConfig struct { - ProjectID string `env:"FAILURE_TARGET_PUBSUB_PROJECT_ID"` - TopicName string `env:"FAILURE_TARGET_PUBSUB_TOPIC_NAME"` +// defaultConfigData returns the initial main configuration target. +func defaultConfigData() *configurationData { + return &configurationData{ + Source: &component{&use{Name: "stdin"}}, + Target: &component{&use{Name: "stdout"}}, + + FailureTarget: &failureConfig{ + Target: &use{Name: "stdout"}, + Format: "snowplow", + }, + Sentry: &sentryConfig{ + Tags: "{}", + }, + StatsReceiver: &statsConfig{ + Receiver: &use{}, + TimeoutSec: 1, + BufferSec: 15, + }, + Transformations: nil, + LogLevel: "info", + DisableTelemetry: false, + } } -// FailureSQSTargetConfig configures the destination for records consumed -type FailureSQSTargetConfig struct { - QueueName string `env:"FAILURE_TARGET_SQS_QUEUE_NAME"` - Region string `env:"FAILURE_TARGET_SQS_REGION"` - RoleARN string `env:"FAILURE_TARGET_SQS_ROLE_ARN"` -} +// NewConfig returns a configuration +func NewConfig() (*Config, error) { + filename := os.Getenv("STREAM_REPLICATOR_CONFIG_FILE") + if filename == "" { + return newEnvConfig() + } -// FailureKafkaTargetConfig configures the destination for records consumed -type FailureKafkaTargetConfig struct { - Brokers string `env:"FAILURE_TARGET_KAFKA_BROKERS"` // REQUIRED - TopicName string `env:"FAILURE_TARGET_KAFKA_TOPIC_NAME"` // REQUIRED - TargetVersion string `env:"FAILURE_TARGET_KAFKA_TARGET_VERSION"` // The Kafka version we should target e.g. 2.7.0 or 0.11.0.2 - MaxRetries int `env:"FAILURE_TARGET_KAFKA_MAX_RETRIES" envDefault:"10"` // Max retries - ByteLimit int `env:"FAILURE_TARGET_KAFKA_BYTE_LIMIT" envDefault:"1048576"` // Kafka Default is 1MiB - Compress bool `env:"FAILURE_TARGET_KAFKA_COMPRESS"` // Reduces Network usage & Increases latency by compressing data - WaitForAll bool `env:"FAILURE_TARGET_KAFKA_WAIT_FOR_ALL"` // Sets RequireAcks = WaitForAll which waits for min.insync.replicas to Ack - Idempotent bool `env:"FAILURE_TARGET_KAFKA_IDEMPOTENT"` // Exactly once writes - EnableSASL bool `env:"FAILURE_TARGET_KAFKA_ENABLE_SASL"` // Enables SASL Support - SASLUsername string `env:"FAILURE_TARGET_KAFKA_SASL_USERNAME"` // SASL auth - SASLPassword string `env:"FAILURE_TARGET_KAFKA_SASL_PASSWORD"` // SASL auth - SASLAlgorithm string `env:"FAILURE_TARGET_KAFKA_SASL_ALGORITHM" envDefault:"sha512"` // sha256 or sha512 - CertFile string `env:"FAILURE_TARGET_KAFKA_TLS_CERT_FILE"` // The optional certificate file for client authentication - KeyFile string `env:"FAILURE_TARGET_KAFKA_TLS_KEY_FILE"` // The optional key file for client authentication - CaFile string `env:"FAILURE_TARGET_KAFKA_TLS_CA_FILE"` // The optional certificate authority file for TLS client authentication - SkipVerifyTLS bool `env:"FAILURE_TARGET_KAFKA_TLS_SKIP_VERIFY_TLS"` // Optional skip verifying ssl certificates chain - ForceSyncProducer bool `env:"FAILURE_TARGET_KAFKA_FORCE_SYNC_PRODUCER"` // Forces the use of the Sync Producer, emits as fast as possible, may limit performance - FlushFrequency int `env:"FAILURE_TARGET_KAFKA_FLUSH_FREQUENCY" envDefault:"0"` // Milliseconds between flushes of events - 0 = as fast as possible - FlushMessages int `env:"FAILURE_TARGET_KAFKA_FLUSH_MESSAGES" envDefault:"0"` // Best effort for how many messages are sent in each batch - 0 = as fast as possible - FlushBytes int `env:"FAILURE_TARGET_KAFKA_FLUSH_BYTES" envDefault:"0"` // Best effort for how many bytes will trigger a flush - 0 = as fast as possible + switch suffix := strings.ToLower(filepath.Ext(filename)); suffix { + case ".hcl": + return newHclConfig(filename) + default: + return nil, errors.New("invalid extension for the configuration file") + } } -// FailureEventHubTargetConfig configures the destination for records consumed -type FailureEventHubTargetConfig struct { - EventHubNamespace string `env:"FAILURE_TARGET_EVENTHUB_NAMESPACE"` // REQUIRED - namespace housing Eventhub - EventHubName string `env:"FAILURE_TARGET_EVENTHUB_NAME"` // REQUIRED - name of Eventhub - MaxAutoRetries int `env:"FAILURE_TARGET_EVENTHUB_MAX_AUTO_RETRY" envDefault:"1"` // Number of retries handled automatically by the EH library - all retries should be completed before context timeout - MessageByteLimit int `env:"FAILURE_TARGET_EVENTHUB_MESSAGE_BYTE_LIMIT" envDefault:"1048576"` // Default presumes paid tier limit is 1MB - ChunkByteLimit int `env:"FAILURE_TARGET_EVENTHUB_CHUNK_BYTE_LIMIT" envDefault:"1048576"` // Default chunk size of 1MB is arbitrary - ChunkMessageLimit int `env:"FAILURE_TARGET_EVENTHUB_CHUNK_MESSAGE_LIMIT" envDefault:"500"` // Default of 500 is arbitrary - ContextTimeoutInSeconds int `env:"FAILURE_TARGET_EVENTHUB_CONTEXT_TIMEOUT_SECONDS" envDefault:"20"` // Default of 20 is arbitrary - BatchByteLimit int `env:"FAILURE_TARGET_EVENTHUB_BATCH_BYTE_LIMIT" envDefault:"1048576"` // Default batch size of 1MB is the limit for EH's high tier -} +func newEnvConfig() (*Config, error) { + var err error -// FailureHTTPTargetConfig configures the destination for records consumed -type FailureHTTPTargetConfig struct { - HTTPURL string `env:"FAILURE_TARGET_HTTP_URL"` // REQUIRED - url endpoint - byteLimit int `env:"FAILURE_TARGET_HTTP_BYTE_LIMIT" envDefault:"1048576"` // Byte limit for requests - requestTimeoutInSeconds int `env:"FAILURE_TARGET_HTTP_TIMEOUT_IN_SECONDS" envDefault:"5"` // Request timeout in seconds - ContentType string `env:"FAILURE_TARGET_HTTP_CONTENT_TYPE" envDefault:"application/json"` // Content type for POST request - Headers string `env:"FAILURE_TARGET_HTTP_HEADERS"` // Optional headers to add to the request, provided as a JSON of string key-value pairs. eg: `{"Max Forwards": "10", "Accept-Language": "en-US,en-IE", "Accept-Datetime": "Thu, 31 May 2007 20:35:00 GMT"}` - BasicAuthUsername string `env:"FAILURE_TARGET_HTTP_BASICAUTH_USERNAME"` // Optional basicauth username - BasicAuthPassword string `env:"FAILURE_TARGET_HTTP_BASICAUTH_PASSWORD"` // Optional basicauth password - CertFile string `env:"FAILURE_TARGET_HTTP_TLS_CERT_FILE"` // The optional certificate file for client authentication - KeyFile string `env:"FAILURE_TARGET_HTTP_TLS_KEY_FILE"` // The optional key file for client authentication - CaFile string `env:"FAILURE_TARGET_HTTP_TLS_CA_FILE"` // The optional certificate authority file for TLS client authentication - SkipVerifyTLS bool `env:"FAILURE_TARGET_HTTP_TLS_SKIP_VERIFY_TLS" envDefault:"false"` // Optional skip verifying ssl certificates chain - if certfile and keyfile are not provided, this setting is not applied. -} + decoderOpts := &DecoderOptions{} + envDecoder := &envDecoder{} -// FailureTargetsConfig holds configuration for the available targets -type FailureTargetsConfig struct { - Kinesis FailureKinesisTargetConfig - PubSub FailurePubSubTargetConfig - SQS FailureSQSTargetConfig - Kafka FailureKafkaTargetConfig - EventHub FailureEventHubTargetConfig - HTTP FailureHTTPTargetConfig - - // Format defines how the message will be transformed before - // being sent to the target - Format string `env:"FAILURE_TARGETS_FORMAT" envDefault:"snowplow"` -} + configData := defaultConfigData() -// ---------- [ SOURCES ] ---------- + err = envDecoder.Decode(decoderOpts, configData) + if err != nil { + return nil, err + } -// KinesisSourceConfig configures the source for records pulled -type KinesisSourceConfig struct { - StreamName string `env:"SOURCE_KINESIS_STREAM_NAME"` - Region string `env:"SOURCE_KINESIS_REGION"` - RoleARN string `env:"SOURCE_KINESIS_ROLE_ARN"` - AppName string `env:"SOURCE_KINESIS_APP_NAME"` - StartTimestamp string `env:"SOURCE_KINESIS_START_TIMESTAMP"` // Timestamp for the kinesis shard iterator to begin processing. Format YYYY-MM-DD HH:MM:SS.MS (miliseconds optional) -} + mainConfig := Config{ + Data: configData, + Decoder: envDecoder, + } -// PubSubSourceConfig configures the source for records pulled -type PubSubSourceConfig struct { - ProjectID string `env:"SOURCE_PUBSUB_PROJECT_ID"` - SubscriptionID string `env:"SOURCE_PUBSUB_SUBSCRIPTION_ID"` -} + // If the TRANSFORM_CONFIG_B64 env var is set, parse it, and use the Transformations in our mainConfig. + b64Transformations := os.Getenv("TRANSFORM_CONFIG_B64") + if b64Transformations != "" { + err := common.DecodeB64ToFile(b64Transformations, "tmp_replicator/transform.hcl") + if err != nil { + return nil, errors.Wrap(err, "Error decoding transformation config base64 from env") + } -// SQSSourceConfig configures the source for records pulled -type SQSSourceConfig struct { - QueueName string `env:"SOURCE_SQS_QUEUE_NAME"` - Region string `env:"SOURCE_SQS_REGION"` - RoleARN string `env:"SOURCE_SQS_ROLE_ARN"` -} + confFromFile, err := newHclConfig("tmp_replicator/transform.hcl") + if err != nil { + return nil, errors.Wrap(err, "Error parsing transformation config from env") + } -// SourcesConfig holds configuration for the available sources -type SourcesConfig struct { - Kinesis KinesisSourceConfig - PubSub PubSubSourceConfig - SQS SQSSourceConfig + mainConfig.Data.Transformations = confFromFile.Data.Transformations + } - // ConcurrentWrites is how many go-routines a source can leverage to parallelise processing - ConcurrentWrites int `env:"SOURCE_CONCURRENT_WRITES" envDefault:"50"` + return &mainConfig, nil } -// ---------- [ OBSERVABILITY ] ---------- +func newHclConfig(filename string) (*Config, error) { + src, err := os.ReadFile(filename) + if err != nil { + return nil, err + } -// SentryConfig configures the Sentry error tracker -type SentryConfig struct { - Dsn string `env:"SENTRY_DSN"` - Tags string `env:"SENTRY_TAGS" envDefault:"{}"` - Debug bool `env:"SENTRY_DEBUG" envDefault:"false"` -} + // Parsing + parser := hclparse.NewParser() + fileHCL, diags := parser.ParseHCL(src, filename) + if diags.HasErrors() { + return nil, diags + } -// StatsDStatsReceiverConfig configures the stats metrics receiver -type StatsDStatsReceiverConfig struct { - Address string `env:"STATS_RECEIVER_STATSD_ADDRESS"` - Prefix string `env:"STATS_RECEIVER_STATSD_PREFIX" envDefault:"snowplow.stream-replicator"` - Tags string `env:"STATS_RECEIVER_STATSD_TAGS" envDefault:"{}"` -} + // Creating EvalContext + evalContext := createHclContext() // ptr + + // Decoding + configData := defaultConfigData() + decoderOpts := &DecoderOptions{Input: fileHCL.Body} + hclDecoder := &hclDecoder{EvalContext: evalContext} -// StatsReceiversConfig holds configuration for different stats receivers -type StatsReceiversConfig struct { - StatsD StatsDStatsReceiverConfig + err = hclDecoder.Decode(decoderOpts, configData) + if err != nil { + return nil, err + } - // TimeoutSec is how long the observer will wait for a new result before looping - TimeoutSec int `env:"STATS_RECEIVER_TIMEOUT_SEC" envDefault:"1"` + mainConfig := Config{ + Data: configData, + Decoder: hclDecoder, + } - // BufferSec is how long the observer buffers results before pushing results out and resetting - BufferSec int `env:"STATS_RECEIVER_BUFFER_SEC" envDefault:"15"` + return &mainConfig, nil } -// Config for holding all configuration details -type Config struct { - Source string `env:"SOURCE" envDefault:"stdin"` - Sources SourcesConfig - Target string `env:"TARGET" envDefault:"stdout"` - Targets TargetsConfig - FailureTarget string `env:"FAILURE_TARGET" envDefault:"stdout"` - FailureTargets FailureTargetsConfig - Transformation string `env:"MESSAGE_TRANSFORMATION" envDefault:"none"` - LogLevel string `env:"LOG_LEVEL" envDefault:"info"` - Sentry SentryConfig - StatsReceiver string `env:"STATS_RECEIVER"` - StatsReceivers StatsReceiversConfig - - // Provides the ability to provide a GCP service account to the application directly - GoogleServiceAccountB64 string `env:"GOOGLE_APPLICATION_CREDENTIALS_B64"` -} +// CreateComponent creates a pluggable component given the Decoder options. +func (c *Config) CreateComponent(p Pluggable, opts *DecoderOptions) (interface{}, error) { + componentConfigure := withDecoderOptions(opts) -// NewConfig resolves the config from the environment -func NewConfig() (*Config, error) { - cfg := Config{} - err := env.Parse(&cfg) + decodedConfig, err := componentConfigure(p, c.Decoder) if err != nil { return nil, err } - return &cfg, nil + + return p.Create(decodedConfig) } // GetTarget builds and returns the target that is configured func (c *Config) GetTarget() (targetiface.Target, error) { - switch c.Target { + var plug Pluggable + useTarget := c.Data.Target.Use + decoderOpts := &DecoderOptions{ + Input: useTarget.Body, + } + + switch useTarget.Name { case "stdout": - return target.NewStdoutTarget() + plug = target.AdaptStdoutTargetFunc( + target.StdoutTargetConfigFunction, + ) case "kinesis": - return target.NewKinesisTarget( - c.Targets.Kinesis.Region, - c.Targets.Kinesis.StreamName, - c.Targets.Kinesis.RoleARN, + plug = target.AdaptKinesisTargetFunc( + target.KinesisTargetConfigFunction, ) case "pubsub": - return target.NewPubSubTarget( - c.Targets.PubSub.ProjectID, - c.Targets.PubSub.TopicName, + plug = target.AdaptPubSubTargetFunc( + target.PubSubTargetConfigFunction, ) case "sqs": - return target.NewSQSTarget( - c.Targets.SQS.Region, - c.Targets.SQS.QueueName, - c.Targets.SQS.RoleARN, + plug = target.AdaptSQSTargetFunc( + target.SQSTargetConfigFunction, ) case "kafka": - return target.NewKafkaTarget(&target.KafkaConfig{ - Brokers: c.Targets.Kafka.Brokers, - TopicName: c.Targets.Kafka.TopicName, - TargetVersion: c.Targets.Kafka.TargetVersion, - MaxRetries: c.Targets.Kafka.MaxRetries, - ByteLimit: c.Targets.Kafka.ByteLimit, - Compress: c.Targets.Kafka.Compress, - WaitForAll: c.Targets.Kafka.WaitForAll, - Idempotent: c.Targets.Kafka.Idempotent, - EnableSASL: c.Targets.Kafka.EnableSASL, - SASLUsername: c.Targets.Kafka.SASLUsername, - SASLPassword: c.Targets.Kafka.SASLPassword, - SASLAlgorithm: c.Targets.Kafka.SASLAlgorithm, - CertFile: c.Targets.Kafka.CertFile, - KeyFile: c.Targets.Kafka.KeyFile, - CaFile: c.Targets.Kafka.CaFile, - SkipVerifyTLS: c.Targets.Kafka.SkipVerifyTLS, - ForceSync: c.Targets.Kafka.ForceSyncProducer, - FlushFrequency: c.Targets.Kafka.FlushFrequency, - FlushMessages: c.Targets.Kafka.FlushMessages, - FlushBytes: c.Targets.Kafka.FlushBytes, - }) + plug = target.AdaptKafkaTargetFunc( + target.NewKafkaTarget, + ) case "eventhub": - return target.NewEventHubTarget(&target.EventHubConfig{ - EventHubNamespace: c.Targets.EventHub.EventHubNamespace, - EventHubName: c.Targets.EventHub.EventHubName, - MaxAutoRetries: c.Targets.EventHub.MaxAutoRetries, - MessageByteLimit: c.Targets.EventHub.MessageByteLimit, - ChunkByteLimit: c.Targets.EventHub.ChunkByteLimit, - ChunkMessageLimit: c.Targets.EventHub.ChunkMessageLimit, - ContextTimeoutInSeconds: c.Targets.EventHub.ContextTimeoutInSeconds, - BatchByteLimit: c.Targets.EventHub.BatchByteLimit, - }) + plug = target.AdaptEventHubTargetFunc( + target.EventHubTargetConfigFunction, + ) case "http": - return target.NewHTTPTarget( - c.Targets.HTTP.HTTPURL, - c.Targets.HTTP.RequestTimeoutInSeconds, - c.Targets.HTTP.ByteLimit, - c.Targets.HTTP.ContentType, - c.Targets.HTTP.Headers, - c.Targets.HTTP.BasicAuthUsername, - c.Targets.HTTP.BasicAuthPassword, - c.Targets.HTTP.CertFile, - c.Targets.HTTP.KeyFile, - c.Targets.HTTP.CaFile, - c.Targets.HTTP.SkipVerifyTLS, + plug = target.AdaptHTTPTargetFunc( + target.HTTPTargetConfigFunction, ) default: - return nil, errors.New(fmt.Sprintf("Invalid target found; expected one of 'stdout, kinesis, pubsub, sqs, kafka, eventhub, http' and got '%s'", c.Target)) + return nil, errors.New(fmt.Sprintf("Invalid target found; expected one of 'stdout, kinesis, pubsub, sqs, kafka, eventhub, http' and got '%s'", useTarget.Name)) } + + component, err := c.CreateComponent(plug, decoderOpts) + if err != nil { + return nil, err + } + + if t, ok := component.(targetiface.Target); ok { + return t, nil + } + + return nil, fmt.Errorf("could not interpret target configuration for %q", useTarget.Name) } // GetFailureTarget builds and returns the target that is configured func (c *Config) GetFailureTarget(AppName string, AppVersion string) (failureiface.Failure, error) { - var t targetiface.Target + var plug Pluggable var err error - switch c.FailureTarget { + useFailureTarget := c.Data.FailureTarget.Target + decoderOpts := &DecoderOptions{ + Prefix: "FAILURE_", + Input: useFailureTarget.Body, + } + + switch useFailureTarget.Name { case "stdout": - t, err = target.NewStdoutTarget() + plug = target.AdaptStdoutTargetFunc( + target.StdoutTargetConfigFunction, + ) case "kinesis": - t, err = target.NewKinesisTarget( - c.FailureTargets.Kinesis.Region, - c.FailureTargets.Kinesis.StreamName, - c.FailureTargets.Kinesis.RoleARN, + plug = target.AdaptKinesisTargetFunc( + target.KinesisTargetConfigFunction, ) case "pubsub": - t, err = target.NewPubSubTarget( - c.FailureTargets.PubSub.ProjectID, - c.FailureTargets.PubSub.TopicName, + plug = target.AdaptPubSubTargetFunc( + target.PubSubTargetConfigFunction, ) case "sqs": - t, err = target.NewSQSTarget( - c.FailureTargets.SQS.Region, - c.FailureTargets.SQS.QueueName, - c.FailureTargets.SQS.RoleARN, + plug = target.AdaptSQSTargetFunc( + target.SQSTargetConfigFunction, ) case "kafka": - t, err = target.NewKafkaTarget(&target.KafkaConfig{ - Brokers: c.FailureTargets.Kafka.Brokers, - TopicName: c.FailureTargets.Kafka.TopicName, - TargetVersion: c.FailureTargets.Kafka.TargetVersion, - MaxRetries: c.FailureTargets.Kafka.MaxRetries, - ByteLimit: c.FailureTargets.Kafka.ByteLimit, - Compress: c.FailureTargets.Kafka.Compress, - WaitForAll: c.FailureTargets.Kafka.WaitForAll, - Idempotent: c.FailureTargets.Kafka.Idempotent, - EnableSASL: c.FailureTargets.Kafka.EnableSASL, - SASLUsername: c.FailureTargets.Kafka.SASLUsername, - SASLPassword: c.FailureTargets.Kafka.SASLPassword, - SASLAlgorithm: c.FailureTargets.Kafka.SASLAlgorithm, - CertFile: c.FailureTargets.Kafka.CertFile, - KeyFile: c.FailureTargets.Kafka.KeyFile, - CaFile: c.FailureTargets.Kafka.CaFile, - SkipVerifyTLS: c.FailureTargets.Kafka.SkipVerifyTLS, - ForceSync: c.FailureTargets.Kafka.ForceSyncProducer, - FlushFrequency: c.FailureTargets.Kafka.FlushFrequency, - FlushMessages: c.FailureTargets.Kafka.FlushMessages, - FlushBytes: c.FailureTargets.Kafka.FlushBytes, - }) + plug = target.AdaptKafkaTargetFunc( + target.NewKafkaTarget, + ) case "eventhub": - t, err = target.NewEventHubTarget(&target.EventHubConfig{ - EventHubNamespace: c.FailureTargets.EventHub.EventHubNamespace, - EventHubName: c.FailureTargets.EventHub.EventHubName, - MaxAutoRetries: c.FailureTargets.EventHub.MaxAutoRetries, - MessageByteLimit: c.FailureTargets.EventHub.MessageByteLimit, - ChunkByteLimit: c.FailureTargets.EventHub.ChunkByteLimit, - ChunkMessageLimit: c.FailureTargets.EventHub.ChunkMessageLimit, - ContextTimeoutInSeconds: c.FailureTargets.EventHub.ContextTimeoutInSeconds, - BatchByteLimit: c.FailureTargets.EventHub.BatchByteLimit, - }) + plug = target.AdaptEventHubTargetFunc( + target.EventHubTargetConfigFunction, + ) case "http": - t, err = target.NewHTTPTarget( - c.FailureTargets.HTTP.HTTPURL, - c.FailureTargets.HTTP.requestTimeoutInSeconds, - c.FailureTargets.HTTP.byteLimit, - c.FailureTargets.HTTP.ContentType, - c.FailureTargets.HTTP.Headers, - c.FailureTargets.HTTP.BasicAuthUsername, - c.FailureTargets.HTTP.BasicAuthPassword, - c.FailureTargets.HTTP.CertFile, - c.FailureTargets.HTTP.KeyFile, - c.FailureTargets.HTTP.CaFile, - c.FailureTargets.HTTP.SkipVerifyTLS, + plug = target.AdaptHTTPTargetFunc( + target.HTTPTargetConfigFunction, ) default: - err = errors.New(fmt.Sprintf("Invalid failure target found; expected one of 'stdout, kinesis, pubsub, sqs, kafka, eventhub, http' and got '%s'", c.FailureTarget)) + return nil, errors.New(fmt.Sprintf("Invalid failure target found; expected one of 'stdout, kinesis, pubsub, sqs, kafka, eventhub, http' and got '%s'", useFailureTarget.Name)) } + + component, err := c.CreateComponent(plug, decoderOpts) if err != nil { return nil, err } - switch c.FailureTargets.Format { - case "snowplow": - return failure.NewSnowplowFailure(t, AppName, AppVersion) - default: - return nil, errors.New(fmt.Sprintf("Invalid failure format found; expected one of 'snowplow' and got '%s'", c.FailureTargets.Format)) - } -} - -// GetTransformations builds and returns transformationApplyFunction from the transformations configured -func (c *Config) GetTransformations() (transform.TransformationApplyFunction, error) { - funcs := make([]transform.TransformationFunction, 0, 0) - - // Parse list of transformations - transformations := strings.Split(c.Transformation, ",") - - for _, transformation := range transformations { - // Parse function name-option sets - funcOpts := strings.Split(transformation, ":") - - switch funcOpts[0] { - case "spEnrichedToJson": - funcs = append(funcs, transform.SpEnrichedToJSON) - case "spEnrichedSetPk": - funcs = append(funcs, transform.NewSpEnrichedSetPkFunction(funcOpts[1])) - case "spEnrichedFilter": - filterFunc, err := transform.NewSpEnrichedFilterFunction(funcOpts[1]) - if err != nil { - return nil, err - } - funcs = append(funcs, filterFunc) - case "none": + if t, ok := component.(targetiface.Target); ok { + switch c.Data.FailureTarget.Format { + case "snowplow": + return failure.NewSnowplowFailure(t, AppName, AppVersion) default: - return nil, errors.New(fmt.Sprintf("Invalid transformation found; expected one of 'spEnrichedToJson', 'spEnrichedSetPk:{option}', spEnrichedFilter:{option} and got '%s'", c.Transformation)) + return nil, errors.New(fmt.Sprintf("Invalid failure format found; expected one of 'snowplow' and got '%s'", c.Data.FailureTarget.Format)) } } - return transform.NewTransformation(funcs...), nil + + return nil, fmt.Errorf("could not interpret failure target configuration for %q", useFailureTarget.Name) } // GetTags returns a list of tags to use in identifying this instance of stream-replicator with enough @@ -500,26 +337,38 @@ func (c *Config) GetTags() (map[string]string, error) { // GetObserver builds and returns the observer with the embedded // optional stats receiver func (c *Config) GetObserver(tags map[string]string) (*observer.Observer, error) { - sr, err := c.GetStatsReceiver(tags) + sr, err := c.getStatsReceiver(tags) if err != nil { return nil, err } - return observer.New(sr, time.Duration(c.StatsReceivers.TimeoutSec)*time.Second, time.Duration(c.StatsReceivers.BufferSec)*time.Second), nil + return observer.New(sr, time.Duration(c.Data.StatsReceiver.TimeoutSec)*time.Second, time.Duration(c.Data.StatsReceiver.BufferSec)*time.Second), nil } -// GetStatsReceiver builds and returns the stats receiver -func (c *Config) GetStatsReceiver(tags map[string]string) (statsreceiveriface.StatsReceiver, error) { - switch c.StatsReceiver { +// getStatsReceiver builds and returns the stats receiver +func (c *Config) getStatsReceiver(tags map[string]string) (statsreceiveriface.StatsReceiver, error) { + useReceiver := c.Data.StatsReceiver.Receiver + decoderOpts := &DecoderOptions{ + Input: useReceiver.Body, + } + + switch useReceiver.Name { case "statsd": - return statsreceiver.NewStatsDStatsReceiver( - c.StatsReceivers.StatsD.Address, - c.StatsReceivers.StatsD.Prefix, - c.StatsReceivers.StatsD.Tags, - tags, + plug := statsreceiver.AdaptStatsDStatsReceiverFunc( + statsreceiver.NewStatsDReceiverWithTags(tags), ) + component, err := c.CreateComponent(plug, decoderOpts) + if err != nil { + return nil, err + } + + if r, ok := component.(statsreceiveriface.StatsReceiver); ok { + return r, nil + } + + return nil, fmt.Errorf("could not interpret stats receiver configuration for %q", useReceiver.Name) case "": return nil, nil default: - return nil, errors.New(fmt.Sprintf("Invalid stats receiver found; expected one of 'statsd' and got '%s'", c.StatsReceiver)) + return nil, errors.New(fmt.Sprintf("Invalid stats receiver found; expected one of 'statsd' and got '%s'", useReceiver.Name)) } } diff --git a/config/config_test.go b/config/config_test.go index dfe39de7..b653507b 100644 --- a/config/config_test.go +++ b/config/config_test.go @@ -8,6 +8,7 @@ package config import ( "os" + "path/filepath" "testing" "github.com/stretchr/testify/assert" @@ -19,12 +20,13 @@ func TestNewConfig(t *testing.T) { c, err := NewConfig() assert.NotNil(c) - assert.Nil(err) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } - assert.Equal("info", c.LogLevel) - assert.Equal("stdout", c.Target) - assert.Equal("none", c.Transformation) - assert.Equal("stdin", c.Source) + assert.Equal("info", c.Data.LogLevel) + assert.Equal("stdout", c.Data.Target.Use.Name) + assert.Equal("stdin", c.Data.Source.Use.Name) // Tests on sources moved to the source package. @@ -32,10 +34,6 @@ func TestNewConfig(t *testing.T) { assert.NotNil(target) assert.Nil(err) - transformation, err := c.GetTransformations() - assert.NotNil(transformation) - assert.Nil(err) - failureTarget, err := c.GetFailureTarget("testAppName", "0.0.0") assert.NotNil(failureTarget) assert.Nil(err) @@ -43,140 +41,147 @@ func TestNewConfig(t *testing.T) { observer, err := c.GetObserver(map[string]string{}) assert.NotNil(observer) assert.Nil(err) + os.RemoveAll(`tmp_replicator`) } func TestNewConfig_FromEnv(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("LOG_LEVEL") - defer os.Unsetenv("TARGET") - defer os.Unsetenv("SOURCE") - - os.Setenv("LOG_LEVEL", "debug") - os.Setenv("TARGET", "kinesis") - os.Setenv("SOURCE", "kinesis") + t.Setenv("LOG_LEVEL", "debug") + t.Setenv("TARGET_NAME", "kinesis") + t.Setenv("SOURCE_NAME", "kinesis") + t.Setenv("TRANSFORM_CONFIG_B64", `dHJhbnNmb3JtIHsKICB1c2UgImpzIiB7CiAgICAvLyBjaGFuZ2VzIGFwcF9pZCB0byAiMSIKICAgIHNvdXJjZV9iNjQgPSAiWm5WdVkzUnBiMjRnYldGcGJpaDRLU0I3Q2lBZ0lDQjJZWElnYW5OdmJrOWlhaUE5SUVwVFQwNHVjR0Z5YzJVb2VDNUVZWFJoS1RzS0lDQWdJR3B6YjI1UFltcGJJbUZ3Y0Y5cFpDSmRJRDBnSWpFaU93b2dJQ0FnY21WMGRYSnVJSHNLSUNBZ0lDQWdJQ0JFWVhSaE9pQktVMDlPTG5OMGNtbHVaMmxtZVNocWMyOXVUMkpxS1FvZ0lDQWdmVHNLZlE9PSIKICB9Cn0KCnRyYW5zZm9ybSB7CiAgdXNlICJqcyIgewogICAgLy8gaWYgYXBwX2lkID09ICIxIiBpdCBpcyBjaGFuZ2VkIHRvICIyIgogICAgc291cmNlX2I2NCA9ICJablZ1WTNScGIyNGdiV0ZwYmloNEtTQjdDaUFnSUNCMllYSWdhbk52Yms5aWFpQTlJRXBUVDA0dWNHRnljMlVvZUM1RVlYUmhLVHNLSUNBZ0lHbG1JQ2hxYzI5dVQySnFXeUpoY0hCZmFXUWlYU0E5UFNBaU1TSXBJSHNLSUNBZ0lDQWdJQ0JxYzI5dVQySnFXeUpoY0hCZmFXUWlYU0E5SUNJeUlnb2dJQ0FnZlFvZ0lDQWdjbVYwZFhKdUlIc0tJQ0FnSUNBZ0lDQkVZWFJoT2lCS1UwOU9Mbk4wY21sdVoybG1lU2hxYzI5dVQySnFLUW9nSUNBZ2ZUc0tmUT09IgogIH0KfQoKdHJhbnNmb3JtIHsKICB1c2UgImpzIiB7CiAgICAvLyBpZiBhcHBfaWQgPT0gIjIiIGl0IGlzIGNoYW5nZWQgdG8gIjMiCiAgICBzb3VyY2VfYjY0ID0gIlpuVnVZM1JwYjI0Z2JXRnBiaWg0S1NCN0NpQWdJQ0IyWVhJZ2FuTnZiazlpYWlBOUlFcFRUMDR1Y0dGeWMyVW9lQzVFWVhSaEtUc0tJQ0FnSUdsbUlDaHFjMjl1VDJKcVd5SmhjSEJmYVdRaVhTQTlQU0FpTWlJcElIc0tJQ0FnSUNBZ0lDQnFjMjl1VDJKcVd5SmhjSEJmYVdRaVhTQTlJQ0l6SWdvZ0lDQWdmUW9nSUNBZ2NtVjBkWEp1SUhzS0lDQWdJQ0FnSUNCRVlYUmhPaUJLVTA5T0xuTjBjbWx1WjJsbWVTaHFjMjl1VDJKcUtRb2dJQ0FnZlRzS2ZRPT0iCiAgfQp9`) c, err := NewConfig() assert.NotNil(c) - assert.Nil(err) - - assert.Equal("debug", c.LogLevel) - assert.Equal("kinesis", c.Target) - assert.Equal("kinesis", c.Source) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } + + assert.Equal("debug", c.Data.LogLevel) + assert.Equal("kinesis", c.Data.Target.Use.Name) + assert.Equal("kinesis", c.Data.Source.Use.Name) + assert.Equal(3, len(c.Data.Transformations)) + for _, transf := range c.Data.Transformations { + assert.Equal("js", transf.Use.Name) + + } } func TestNewConfig_FromEnvInvalid(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("STATS_RECEIVER_TIMEOUT_SEC") - - os.Setenv("STATS_RECEIVER_TIMEOUT_SEC", "debug") + t.Setenv("STATS_RECEIVER_TIMEOUT_SEC", "debug") c, err := NewConfig() assert.Nil(c) assert.NotNil(err) + if err != nil { + assert.Equal("env: parse error on field \"TimeoutSec\" of type \"int\": strconv.ParseInt: parsing \"debug\": invalid syntax", err.Error()) + } } -func TestNewConfig_InvalidTransformation(t *testing.T) { +func TestNewConfig_InvalidTarget(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("MESSAGE_TRANSFORMATION") - - os.Setenv("MESSAGE_TRANSFORMATION", "fake") + t.Setenv("TARGET_NAME", "fake") c, err := NewConfig() assert.NotNil(c) - assert.Nil(err) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } - transformation, err := c.GetTransformations() - assert.Nil(transformation) + source, err := c.GetTarget() + assert.Nil(source) assert.NotNil(err) - assert.Equal("Invalid transformation found; expected one of 'spEnrichedToJson', 'spEnrichedSetPk:{option}', spEnrichedFilter:{option} and got 'fake'", err.Error()) + if err != nil { + assert.Equal("Invalid target found; expected one of 'stdout, kinesis, pubsub, sqs, kafka, eventhub, http' and got 'fake'", err.Error()) + } } -func TestNewConfig_FilterFailure(t *testing.T) { +func TestNewConfig_InvalidFailureTarget(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("MESSAGE_TRANSFORMATION") - - os.Setenv("MESSAGE_TRANSFORMATION", "spEnrichedFilter:incompatibleArg") + t.Setenv("FAILURE_TARGET_NAME", "fake") c, err := NewConfig() assert.NotNil(c) - assert.Nil(err) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } - transformation, err := c.GetTransformations() - assert.Nil(transformation) + source, err := c.GetFailureTarget("testAppName", "0.0.0") + assert.Nil(source) assert.NotNil(err) - assert.Equal(`Invalid filter function config, must be of the format {field name}=={value}[|{value}|...] or {field name}!={value}[|{value}|...]`, err.Error()) + if err != nil { + assert.Equal("Invalid failure target found; expected one of 'stdout, kinesis, pubsub, sqs, kafka, eventhub, http' and got 'fake'", err.Error()) + } } -func TestNewConfig_InvalidTarget(t *testing.T) { +func TestNewConfig_InvalidFailureFormat(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("TARGET") - - os.Setenv("TARGET", "fake") + t.Setenv("FAILURE_TARGETS_FORMAT", "fake") c, err := NewConfig() assert.NotNil(c) - assert.Nil(err) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } - source, err := c.GetTarget() + source, err := c.GetFailureTarget("testAppName", "0.0.0") assert.Nil(source) assert.NotNil(err) - assert.Equal("Invalid target found; expected one of 'stdout, kinesis, pubsub, sqs, kafka, eventhub, http' and got 'fake'", err.Error()) + if err != nil { + assert.Equal("Invalid failure format found; expected one of 'snowplow' and got 'fake'", err.Error()) + } } -func TestNewConfig_InvalidFailureTarget(t *testing.T) { +func TestNewConfig_InvalidStatsReceiver(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("FAILURE_TARGET") - - os.Setenv("FAILURE_TARGET", "fake") + t.Setenv("STATS_RECEIVER_NAME", "fake") c, err := NewConfig() assert.NotNil(c) - assert.Nil(err) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } - source, err := c.GetFailureTarget("testAppName", "0.0.0") + source, err := c.GetObserver(map[string]string{}) assert.Nil(source) assert.NotNil(err) - assert.Equal("Invalid failure target found; expected one of 'stdout, kinesis, pubsub, sqs, kafka, eventhub, http' and got 'fake'", err.Error()) + if err != nil { + assert.Equal("Invalid stats receiver found; expected one of 'statsd' and got 'fake'", err.Error()) + } } -func TestNewConfig_InvalidFailureFormat(t *testing.T) { +func TestNewConfig_InvalidTransformationB64(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("FAILURE_TARGETS_FORMAT") - - os.Setenv("FAILURE_TARGETS_FORMAT", "fake") + t.Setenv("TRANSFORM_CONFIG_B64", `fdssdnpfdspnm`) c, err := NewConfig() - assert.NotNil(c) - assert.Nil(err) - - source, err := c.GetFailureTarget("testAppName", "0.0.0") - assert.Nil(source) + assert.Nil(c) assert.NotNil(err) - assert.Equal("Invalid failure format found; expected one of 'snowplow' and got 'fake'", err.Error()) + if err != nil { + assert.Equal("Error decoding transformation config base64 from env: Failed to Base64 decode for creating file tmp_replicator/transform.hcl: illegal base64 data at input byte 12", err.Error()) + } + } -func TestNewConfig_InvalidStatsReceiver(t *testing.T) { +func TestNewConfig_UnparseableTransformationB64(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("STATS_RECEIVER") - - os.Setenv("STATS_RECEIVER", "fake") + t.Setenv("TRANSFORM_CONFIG_B64", `dHJhbnNmb3JtIHsKICB1c2UgImpzIiB7CiAgICAvLyBjaGFuZ2VzIGFwcF9pZCB0byAiMSIKICAgIHNvdXJjZV9iNjQgPSAiWm5WdVkzUnBiMjRnYldGcGJpaDRLU0I3Q2lBZ0lDQjJZWElnYW5OdmJrOWlhaUE5SUVwVFQwNHVjR0Z5YzJVb2VDNUVZWFJoS1RzS0lDQWdJR3B6YjI1UFltcGJJbUZ3Y0Y5cFpDSmRJRDBnSWpFaU93b2dJQ0FnY21WMGRYSnVJSHNLSUNBZ0lDQWdJQ0JFWVhSaE9pQktVMDlPTG5OMGNtbHVaMmxtZVNocWMyOXVUMkpxS1FvZ0lDQWdmVHNLZlE9PSIKICB9Cn0KCnRyYW5zZm9ybSB7CiAgdXNlICJqcyIgewogICAgLy8gaWYgYXBwX2lkID09ICIxIiBpdCBpcyBjaGFuZ2VkIHRvICIyIgogICAgc291cmNlX2I2NCA9ICJablZ1WTNScGIyNGdiV0ZwYmloNEtTQjdDaUFnSUNCMllYSWdhbk52Yms5aWFpQTlJRXBUVDA0dWNHRnljMlVvZUM1RVlYUmhLVHNLSUNBZ0lHbG1JQ2hxYzI5dVQySnFXeUpoY0hCZmFXUWlYU0E5UFNBaU1TSXBJSHNLSUNBZ0lDQWdJQ0JxYzI5dVQySnFXeUpoY0hCZmFXUWlYU0E5SUNJeUlnb2dJQ0FnZlFvZ0lDQWdjbVYwZFhKdUlIc0tJQ0FnSUNBZ0lDQkVZWFJoT2lCS1UwOU9Mbk4wY21sdVoybG1lU2hxYzI5dVQySnFLUW9nSUNBZ2ZUc0tmUT09IgoKfQoKdHJhbnNmb3JtIHsKICB1c2UgImpzIiB7CiAgICAvLyBpZiBhcHBfaWQgPT0gIjIiIGl0IGlzIGNoYW5nZWQgdG8gIjMiCiAgICBzb3VyY2VfYjY0ID0gIlpuVnVZM1JwYjI0Z2JXRnBiaWg0S1NCN0NpQWdJQ0IyWVhJZ2FuTnZiazlpYWlBOUlFcFRUMDR1Y0dGeWMyVW9lQzVFWVhSaEtUc0tJQ0FnSUdsbUlDaHFjMjl1VDJKcVd5SmhjSEJmYVdRaVhTQTlQU0FpTWlJcElIc0tJQ0FnSUNBZ0lDQnFjMjl1VDJKcVd5SmhjSEJmYVdRaVhTQTlJQ0l6SWdvZ0lDQWdmUW9nSUNBZ2NtVjBkWEp1SUhzS0lDQWdJQ0FnSUNCRVlYUmhPaUJLVTA5T0xuTjBjbWx1WjJsbWVTaHFjMjl1VDJKcUtRb2dJQ0FnZlRzS2ZRPT0iCiAgfQp9`) c, err := NewConfig() - assert.NotNil(c) - assert.Nil(err) - - source, err := c.GetObserver(map[string]string{}) - assert.Nil(source) + assert.Nil(c) assert.NotNil(err) - assert.Equal("Invalid stats receiver found; expected one of 'statsd' and got 'fake'", err.Error()) + if err != nil { + assert.Equal("Error parsing transformation config from env: tmp_replicator/transform.hcl:8,11-12: Unclosed configuration block; There is no closing brace for this block before the end of the file. This may be caused by incorrect brace nesting elsewhere in this file.", err.Error()) + } } func TestNewConfig_GetTags(t *testing.T) { @@ -184,7 +189,9 @@ func TestNewConfig_GetTags(t *testing.T) { c, err := NewConfig() assert.NotNil(c) - assert.Nil(err) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } tags, err := c.GetTags() assert.NotNil(tags) @@ -198,104 +205,112 @@ func TestNewConfig_GetTags(t *testing.T) { assert.True(ok) } -func TestNewConfig_KafkaTargetDefaults(t *testing.T) { +func TestNewConfig_Hcl_invalids(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("TARGET") - - os.Setenv("TARGET", "kafka") + filename := filepath.Join("test-fixtures", "invalids.hcl") + t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", filename) c, err := NewConfig() assert.NotNil(c) - assert.Nil(err) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } + + t.Run("invalid_target", func(t *testing.T) { + target, err := c.GetTarget() + assert.Nil(target) + assert.NotNil(err) + if err != nil { + assert.Equal("Invalid target found; expected one of 'stdout, kinesis, pubsub, sqs, kafka, eventhub, http' and got 'fakeHCL'", err.Error()) + } + }) + + t.Run("invalid_failure_target", func(t *testing.T) { + ftarget, err := c.GetFailureTarget("testAppName", "0.0.0") + assert.Nil(ftarget) + assert.NotNil(err) + if err != nil { + assert.Equal("Invalid failure target found; expected one of 'stdout, kinesis, pubsub, sqs, kafka, eventhub, http' and got 'fakeHCL'", err.Error()) + } + }) - target := c.Targets.Kafka - assert.NotNil(target) - assert.Equal(target.MaxRetries, 10) - assert.Equal(target.ByteLimit, 1048576) - assert.Equal(target.Compress, false) - assert.Equal(target.WaitForAll, false) - assert.Equal(target.Idempotent, false) - assert.Equal(target.EnableSASL, false) - assert.Equal(target.ForceSyncProducer, false) - assert.Equal(target.FlushFrequency, 0) - assert.Equal(target.FlushMessages, 0) - assert.Equal(target.FlushBytes, 0) } -func TestNewConfig_KafkaFailureTargetDefaults(t *testing.T) { +func TestNewConfig_Hcl_defaults(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("FAILURE_TARGET") - - os.Setenv("FAILURE_TARGET", "kafka") + filename := filepath.Join("test-fixtures", "empty.hcl") + t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", filename) c, err := NewConfig() assert.NotNil(c) - assert.Nil(err) - - target := c.FailureTargets.Kafka - assert.NotNil(target) - assert.Equal(target.MaxRetries, 10) - assert.Equal(target.ByteLimit, 1048576) - assert.Equal(target.Compress, false) - assert.Equal(target.WaitForAll, false) - assert.Equal(target.Idempotent, false) - assert.Equal(target.EnableSASL, false) - assert.Equal(target.ForceSyncProducer, false) - assert.Equal(target.FlushFrequency, 0) - assert.Equal(target.FlushMessages, 0) - assert.Equal(target.FlushBytes, 0) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } + + assert.Equal("stdin", c.Data.Source.Use.Name) + assert.Equal("stdout", c.Data.Target.Use.Name) + assert.Equal("stdout", c.Data.FailureTarget.Target.Name) + assert.Equal("snowplow", c.Data.FailureTarget.Format) + assert.Equal("{}", c.Data.Sentry.Tags) + assert.Equal(false, c.Data.Sentry.Debug) + assert.Equal(1, c.Data.StatsReceiver.TimeoutSec) + assert.Equal(15, c.Data.StatsReceiver.BufferSec) + assert.Equal("info", c.Data.LogLevel) } -func TestNewConfig_EventhubTargetDefaults(t *testing.T) { +func TestNewConfig_Hcl_sentry(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("TARGET") - defer os.Unsetenv("TARGET_EVENTHUB_NAMESPACE") - defer os.Unsetenv("TARGET_EVENTHUB_NAME") - - os.Setenv("TARGET", "eventhub") - os.Setenv("TARGET_EVENTHUB_NAMESPACE", "fake") - os.Setenv("TARGET_EVENTHUB_NAME", "fake") + filename := filepath.Join("test-fixtures", "sentry.hcl") + t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", filename) c, err := NewConfig() assert.NotNil(c) - assert.Nil(err) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } - target := c.Targets.EventHub - assert.NotNil(target) - assert.Equal(target.EventHubName, "fake") - assert.Equal(target.EventHubNamespace, "fake") - assert.Equal(target.MessageByteLimit, 1048576) - assert.Equal(target.ChunkByteLimit, 1048576) - assert.Equal(target.ChunkMessageLimit, 500) - assert.Equal(target.ContextTimeoutInSeconds, 20) - assert.Equal(target.BatchByteLimit, 1048576) + assert.Equal(true, c.Data.Sentry.Debug) + assert.Equal("{\"testKey\":\"testValue\"}", c.Data.Sentry.Tags) + assert.Equal("testDsn", c.Data.Sentry.Dsn) } -func TestNewConfig_EventhubFailureTargetDefaults(t *testing.T) { +func TestNewConfig_HclTransformationOrder(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("FAILURE_TARGET") - defer os.Unsetenv("FAILURE_TARGET_EVENTHUB_NAMESPACE") - defer os.Unsetenv("FAILURE_TARGET_EVENTHUB_NAME") - - os.Setenv("FAILURE_TARGET", "eventhub") - os.Setenv("FAILURE_TARGET_EVENTHUB_NAMESPACE", "fake") - os.Setenv("FAILURE_TARGET_EVENTHUB_NAME", "fake") + filename := filepath.Join("test-fixtures", "transform-mocked-order.hcl") + t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", filename) c, err := NewConfig() assert.NotNil(c) - assert.Nil(err) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } + + assert.Equal(5, len(c.Data.Transformations)) + assert.Equal("one", c.Data.Transformations[0].Use.Name) + assert.Equal("two", c.Data.Transformations[1].Use.Name) + assert.Equal("three", c.Data.Transformations[2].Use.Name) + assert.Equal("four", c.Data.Transformations[3].Use.Name) + assert.Equal("five", c.Data.Transformations[4].Use.Name) +} - target := c.FailureTargets.EventHub - assert.NotNil(target) - assert.Equal(target.EventHubName, "fake") - assert.Equal(target.EventHubNamespace, "fake") - assert.Equal(target.MessageByteLimit, 1048576) - assert.Equal(target.ChunkByteLimit, 1048576) - assert.Equal(target.ChunkMessageLimit, 500) - assert.Equal(target.ContextTimeoutInSeconds, 20) - assert.Equal(target.BatchByteLimit, 1048576) +func TestNewConfig_B64TransformationOrder(t *testing.T) { + assert := assert.New(t) + + t.Setenv("TRANSFORM_CONFIG_B64", `dHJhbnNmb3JtIHsKICB1c2UgIm9uZSIgewogIH0KfQoKdHJhbnNmb3JtIHsKICB1c2UgInR3byIgewogIH0KfQoKdHJhbnNmb3JtIHsKICB1c2UgInRocmVlIiB7CiAgfQp9Cgp0cmFuc2Zvcm0gewogIHVzZSAiZm91ciIgewogIH0KfQoKdHJhbnNmb3JtIHsKICB1c2UgImZpdmUiIHsKICB9Cn0=`) + + c, err := NewConfig() + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } + + assert.Equal(5, len(c.Data.Transformations)) + assert.Equal("one", c.Data.Transformations[0].Use.Name) + assert.Equal("two", c.Data.Transformations[1].Use.Name) + assert.Equal("three", c.Data.Transformations[2].Use.Name) + assert.Equal("four", c.Data.Transformations[3].Use.Name) + assert.Equal("five", c.Data.Transformations[4].Use.Name) } diff --git a/config/decode.go b/config/decode.go new file mode 100644 index 00000000..4491d597 --- /dev/null +++ b/config/decode.go @@ -0,0 +1,169 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package config + +import ( + "errors" + "os" + + "github.com/caarlos0/env/v6" + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/gohcl" + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/function" +) + +// Decoder is the interface that wraps the Decode method. +type Decoder interface { + // Decode decodes onto target given DecoderOptions. + // The target argument must be a pointer to an allocated structure. + Decode(opts *DecoderOptions, target interface{}) error +} + +// DecoderOptions represent the options for a Decoder. +// The purpose of this type is to unify the input to the different available +// Decoders. The zero value of DecoderOptions means no-prefix/nil-input, +// which should be usable by the Decoders. +type DecoderOptions struct { + Prefix string + Input hcl.Body +} + +// envDecoder implements Decoder. +type envDecoder struct{} + +// Decode populates target from the environment. +// The target argument must be a pointer to a struct type value. +func (e *envDecoder) Decode(opts *DecoderOptions, target interface{}) error { + // Decoder Options cannot be missing + if opts == nil { + return errors.New("missing DecoderOptions for envDecoder") + } + + // If target is nil then we assume that target is not decodable. + if target == nil { + return nil + } + + envOpts := env.Options{ + Prefix: opts.Prefix, // zero value ok + } + + return env.Parse(target, envOpts) +} + +// hclDecoder implements Decoder. +type hclDecoder struct { + EvalContext *hcl.EvalContext +} + +// Decode populates target given HCL input through DecoderOptions. +// The target argument must be a pointer to an allocated structure. +// If the HCL input is nil, we assume there is nothing to do and the target +// stays unaffected. If the target is nil, we assume is not decodable. +func (h *hclDecoder) Decode(opts *DecoderOptions, target interface{}) error { + // Decoder Options cannot be missing + if opts == nil { + return errors.New("missing DecoderOptions for hclDecoder") + } + + src := opts.Input + if src == nil { + return nil // zero value ok + } + + // If target is nil then we assume that target is not decodable. + if target == nil { + return nil + } + + // Decode + diag := gohcl.DecodeBody(src, h.EvalContext, target) + if len(diag) > 0 { + return diag + } + + return nil +} + +// createHclContext creates an *hcl.EvalContext that is used in decoding HCL. +// Here we can add the evaluation features available for the HCL configuration +// users. +// For now, below is an example of 2 different ways users can reference +// environment variables in their HCL configuration file. +func createHclContext() *hcl.EvalContext { + evalCtx := &hcl.EvalContext{ + Functions: hclCtxFunctions(), + Variables: hclCtxVariables(), + } + + return evalCtx +} + +// hclCtxFunctions constracts the Functions map of the hcl.EvalContext +// Here, for example, we add the `env` as function. +// Users can reference any env var as `env("MY_ENV_VAR")` e.g. +// ``` +// listen_addr = env("LISTEN_ADDR") +// ``` +func hclCtxFunctions() map[string]function.Function { + funcs := map[string]function.Function{ + "env": envFunc(), + } + + return funcs +} + +// hclCtxVariables constracts the Variables map of the hcl.EvalContext +// Here, for example, we add the `env` as variable. +// Users can reference any env var as `env.MY_ENV_VAR` e.g. +// ``` +// listen_addr = env.LISTEN_ADDR +// ``` +func hclCtxVariables() map[string]cty.Value { + vars := map[string]cty.Value{ + "env": cty.ObjectVal(envVarsMap(os.Environ())), + } + + return vars +} + +// envFunc constructs a cty.Function that takes a key as string argument and +// returns a string representation of the environment variable behind it. +func envFunc() function.Function { + return function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "key", + Type: cty.String, + AllowNull: false, + AllowUnknown: false, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + key := args[0].AsString() + value := os.Getenv(key) + return cty.StringVal(value), nil + }, + }) +} + +// envVarsMap constructs a map of the environment variables to be used in +// hcl.EvalContext +func envVarsMap(environ []string) map[string]cty.Value { + envMap := make(map[string]cty.Value) + for _, s := range environ { + for j := 1; j < len(s); j++ { + if s[j] == '=' { + envMap[s[0:j]] = cty.StringVal(s[j+1:]) + } + } + } + + return envMap +} diff --git a/config/decode_test.go b/config/decode_test.go new file mode 100644 index 00000000..4a6cb060 --- /dev/null +++ b/config/decode_test.go @@ -0,0 +1,199 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package config + +import ( + "reflect" + "testing" + + "github.com/davecgh/go-spew/spew" + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/hclparse" + "github.com/stretchr/testify/assert" +) + +type testStruct struct { + Test string `hcl:"test_string" env:"TEST_STRING"` +} + +func TestEnvDecode(t *testing.T) { + envDecoder := envDecoder{} + + testCases := []struct { + TestName string + DecoderOpts *DecoderOptions + Target interface{} + Expected interface{} + }{ + { + "nil_target", + &DecoderOptions{}, + nil, + nil, + }, + { + "decoder_opts", + &DecoderOptions{}, + &testStruct{}, + &testStruct{ + Test: "ateststring", + }, + }, + { + "decoder_opts_with_prefix", + &DecoderOptions{ + Prefix: "PREFIX_", + }, + &testStruct{}, + &testStruct{ + Test: "ateststringprefixed", + }, + }, + } + + for _, tt := range testCases { + t.Run(tt.TestName, func(t *testing.T) { + assert := assert.New(t) + t.Setenv("TEST_STRING", "ateststring") + t.Setenv("PREFIX_TEST_STRING", "ateststringprefixed") + + err := envDecoder.Decode(tt.DecoderOpts, tt.Target) + assert.Nil(err) + + if !reflect.DeepEqual(tt.Target, tt.Expected) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(tt.Target), + spew.Sdump(tt.Expected)) + } + + }) + } +} + +func TestHclDecode(t *testing.T) { + evalCtx := &hcl.EvalContext{} + hclDecoder := hclDecoder{evalCtx} + hclSrc := ` +test_string = "ateststring" +` + p := hclparse.NewParser() + hclFile, diags := p.ParseHCL([]byte(hclSrc), "placeholder.hcl") + if diags.HasErrors() { + t.Errorf("Failed parsing HCL test source") + } + testInput := hclFile.Body + + testCases := []struct { + TestName string + DecoderOpts *DecoderOptions + Target interface{} + Expected interface{} + }{ + { + "nil_target", + &DecoderOptions{}, + nil, + nil, + }, + { + "decoder_opts_no_input", + &DecoderOptions{}, + &testStruct{ + Test: "noChange", + }, + &testStruct{ + Test: "noChange", + }, + }, + { + "decoder_opts_with_input", + &DecoderOptions{ + Input: testInput, + }, + &testStruct{}, + &testStruct{ + Test: "ateststring", + }, + }, + } + + for _, tt := range testCases { + t.Run(tt.TestName, func(t *testing.T) { + assert := assert.New(t) + err := hclDecoder.Decode(tt.DecoderOpts, tt.Target) + if err != nil { + t.Errorf("decoding failed") + } + assert.Nil(err) + + if !reflect.DeepEqual(tt.Target, tt.Expected) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(tt.Target), + spew.Sdump(tt.Expected)) + } + }) + } +} + +func TestCreateHclContext(t *testing.T) { + t.Setenv("TEST_STRING", "ateststring") + t.Setenv("TEST_INT", "2") + type testHclStruct struct { + TestStr string `hcl:"test_string"` + TestInt int `hcl:"test_int"` + } + + evalCtx := createHclContext() + hclDecoder := hclDecoder{evalCtx} + hclSrc := ` +test_string = env.TEST_STRING +test_int = env("TEST_INT") +` + p := hclparse.NewParser() + hclFile, diags := p.ParseHCL([]byte(hclSrc), "placeholder.hcl") + if diags.HasErrors() { + t.Errorf("Failed parsing HCL test source") + } + testInput := hclFile.Body + + testCases := []struct { + TestName string + DecoderOpts *DecoderOptions + Target interface{} + Expected interface{} + }{ + { + "Hcl_eval_context_with_env_fun_and_var", + &DecoderOptions{ + Input: testInput, + }, + &testHclStruct{}, + &testHclStruct{ + TestStr: "ateststring", + TestInt: 2, + }, + }, + } + + for _, tt := range testCases { + t.Run(tt.TestName, func(t *testing.T) { + assert := assert.New(t) + + err := hclDecoder.Decode(tt.DecoderOpts, tt.Target) + if err != nil { + t.Errorf(err.Error()) + } + assert.Nil(err) + + if !reflect.DeepEqual(tt.Target, tt.Expected) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(tt.Target), + spew.Sdump(tt.Expected)) + } + }) + } +} diff --git a/config/examples/README.md b/config/examples/README.md new file mode 100644 index 00000000..066e56b0 --- /dev/null +++ b/config/examples/README.md @@ -0,0 +1,147 @@ +# Configuring stream-replicator from a file + +Another option to configuring stream-replicator purely from environment variables as described in the [wiki](https://github.com/snowplow-devops/stream-replicator/wiki), is through an HCL file. + +This directory contains particular examples of configuration options. + +## General structure + +The general structure of the configuration file is composed of: + +1. Source configuration +2. Target configuration +3. Failure target configuration +4. Observability configuration +5. Transformation configuration +6. Additional configuration options + +As a vague example: + +```hcl +// block for configuring the source +source { + use "sqs" { + // block for configuring SQS as source + } +} + +// block for configuring the target +target { + use "kafka" { + // block for configuring Kafka as target + } +} + +// block for configuring the failure target +failure_target { + use "pubsub" { + // block for configuring PubSub as failure target + } +} + +// block for configuring stats receiver +stats_receiver { + use "statsd" { + // block for configuring StatsD as a receiver + } +} + +// block for configuring sentry +sentry {} + +// block for configuring transformations +transform { + // string to configure message transformation (default: "none") + message_transformation = "none" +} + +// log level configuration (default: "info") +log_level = "info" + +// Ability to provide a GCP service account (b64) to the application directly +google_application_credentials = "" + +// Optional parameter that helps us categorise telemetry events +user_provided_id = "" +``` + +So, a complete example could be: + +```hcl +// example.hcl + +source { + use "sqs" { + queue_name = "mySqsQueue" + region = "eu-west-1" + } +} + +target { + use "kafka" { + brokers = "my-kafka-broker-connectinon-string" + topic_name = "snowplow-enriched-good" + } +} + +failure_target { + use "kinesis" { + stream_name = "some-acme-stream" + region = "us-east-1" + } +} + +stats_receiver { + use "statsd" { + address = "127.0.0.1:8125" + } +} + +sentry { + dsn = "https://acme.com/1" + debug = true +} + +transform { + message_transformation = "spEnrichedFilter:app_id==myApp,js:customFunction" + + use "js" { + source_b64 = "CmZ1bmN0aW9uIGN1c3RvbUZ1bmN0aW9uKGlucHV0KSB7CiAgICByZXR1cm4gaW5wdXQ7Cn0K" + timeout_sec = 2 + disable_source_maps = false + snowplow_mode = true + } +} + +log_level = "debug" + +user_provided_id = "my-example-id" +``` + +In the example files in this directory, there is a simple and extended version for configuring each: + + - source + - target + - failure_target + - sentry + - stats-receiver + +## Referencing environment variables in the configuration file + +There are 2 ways to reference environment variables in the HCL file: + +1. As `env("MY_ENV_VAR")` + + For example: + + ```txt + sasl_password = env("SASL_PASSWORD") + ``` + +2. As `env.MY_ENV_VAR` + + For example: + + ```txt + sasl_password = env.SASL_PASSWORD + ``` diff --git a/config/examples/failure-targets/eventhub-extended.hcl b/config/examples/failure-targets/eventhub-extended.hcl new file mode 100644 index 00000000..fc701c33 --- /dev/null +++ b/config/examples/failure-targets/eventhub-extended.hcl @@ -0,0 +1,30 @@ +# Extended configuration for Eventhub as a failure target (all options) + +failure_target { + use "eventhub" { + # Namespace housing Eventhub + namespace = "testNamespace" + + # Name of Eventhub + name = "testName" + + # Number of retries handled automatically by the EventHubs library. + # All retries should be completed before context timeout (default: 1). + max_auto_retries = 2 + + # Default presumes paid tier byte limit is 1MB (default: 1048576) + message_byte_limit = 1048576 + + # Chunk byte limit (default: 1048576) + chunk_byte_limit = 1048576 + + # Chunk message limit (default: 500) + chunk_message_limit = 500 + + # The time (seconds) before context timeout (default: 20) + context_timeout_in_seconds = 20 + + # Default batch size of 1MB is the limit for Eventhub's high tier + batch_byte_limit = 1048576 + } +} diff --git a/config/examples/failure-targets/eventhub-simple.hcl b/config/examples/failure-targets/eventhub-simple.hcl new file mode 100644 index 00000000..258fddbc --- /dev/null +++ b/config/examples/failure-targets/eventhub-simple.hcl @@ -0,0 +1,11 @@ +# Simple configuration for Eventhub as a failure target (only required options) + +failure_target { + use "eventhub" { + # Namespace housing Eventhub + namespace = "testNamespace" + + # Name of Eventhub + name = "testName" + } +} diff --git a/config/examples/failure-targets/http-extended.hcl b/config/examples/failure-targets/http-extended.hcl new file mode 100644 index 00000000..ce3b5c95 --- /dev/null +++ b/config/examples/failure-targets/http-extended.hcl @@ -0,0 +1,42 @@ +# Extended configuration for HTTP as a failure target (all options) + +failure_target { + use "http" { + # URL endpoint + url = "https://acme.com/x" + + # Byte limit for requests (default: 1048576) + byte_limit = 1048576 + + # Request timeout in seconds (default: 5) + request_timeout_in_seconds = 5 + + # Content type for POST request (default: "application/json") + content_type = "application/json" + + # Optional headers to add to the request. + # It is provided as a JSON string of key-value pairs (default: ""). + headers = "{\"Accept-Language\":\"en-US\"}" + + # Optional basicauth username + basic_auth_username = "myUsername" + + # Optional basicauth password + # Even though you could just reference the password directly as a string, + # you could also reference an environment variable. + basic_auth_password = env.MY_AUTH_PASSWORD + + # The optional certificate file for client authentication + cert_file = "myLocalhost.crt" + + # The optional key file for client authentication + key_file = "MyLocalhost.key" + + # The optional certificate authority file for TLS client authentication + ca_file = "myRootCA.crt" + + # Whether to skip verifying ssl certificates chain (default: false) + # If tls_cert and tls_key are not provided, this setting is not applied. + skip_verify_tls = true + } +} diff --git a/config/examples/failure-targets/http-simple.hcl b/config/examples/failure-targets/http-simple.hcl new file mode 100644 index 00000000..af506b92 --- /dev/null +++ b/config/examples/failure-targets/http-simple.hcl @@ -0,0 +1,8 @@ +# Simple configuration for HTTP as a failure target (only required options) + +failure_target { + use "http" { + # URL endpoint + url = "https://acme.com/x" + } +} diff --git a/config/examples/failure-targets/kafka-extended.hcl b/config/examples/failure-targets/kafka-extended.hcl new file mode 100644 index 00000000..4ef2ba60 --- /dev/null +++ b/config/examples/failure-targets/kafka-extended.hcl @@ -0,0 +1,69 @@ +# Extended configuration for Kafka as a failure target (all options) + +failure_target { + use "kafka" { + # Kafka broker connectinon string + brokers = "my-kafka-connection-string" + + # Kafka topic name + topic_name = "snowplow-enriched-good" + + # The Kafka version + target_version = "2.7.0" + + # Max retries (default: 10) + max_retries = 10 + + # Kafka default byte limit is 1MB (default: 1048576) + byte_limit = 1048576 + + # Whether to compress data (default: false). + # Reduces network usage and increases latency. + compress = true + + # Sets RequireAck s= WaitForAll, which waits for min.insync.replicas + # to Ack (default: false) + wait_for_all = true + + # Exactly once writes - Also sets RequiredAcks = WaitForAll (default: false) + idempotent = true + + # Whether to enable SASL support (defailt: false) + enable_sasl = true + + # SASL AUTH + sasl_username = "mySaslUsername" + sasl_password = env.SASL_PASSWORD + + # The SASL Algorithm to use: "sha512" or "sha256" (default: "sha512") + sasl_algorithm = "sha256" + + # The optional certificate file for client authentication + cert_file = "myLocalhost.crt" + + # The optional key file for client authentication + key_file = "MyLocalhost.key" + + # The optional certificate authority file for TLS client authentication + ca_file = "myRootCA.crt" + + # Whether to skip verifying ssl certificates chain (default: false) + skip_verify_tls = true + + # Forces the use of the Sync Producer (default: false). + # Emits as fast as possible but may limit performance. + force_sync_producer = true + + # Milliseconds between flushes of events (default: 0) + # Setting to 0 means as fast as possible. + flush_frequency = 2 + + # Best effort for how many messages are sent in each batch (default: 0) + # Setting to 0 means as fast as possible. + flush_messages = 2 + + # Best effort for how many bytes will trigger a flush (default: 0) + # Setting to 0 means as fast as possible. + flush_bytes = 2 + } +} diff --git a/config/examples/failure-targets/kafka-simple.hcl b/config/examples/failure-targets/kafka-simple.hcl new file mode 100644 index 00000000..a5c594fc --- /dev/null +++ b/config/examples/failure-targets/kafka-simple.hcl @@ -0,0 +1,11 @@ +# Simple configuration for Kafka as a failure target (only required options) + +failure_target { + use "kafka" { + # Kafka broker connectinon string + brokers = "my-kafka-connection-string" + + # Kafka topic name + topic_name = "snowplow-enriched-good" + } +} diff --git a/config/examples/failure-targets/kinesis-extended.hcl b/config/examples/failure-targets/kinesis-extended.hcl new file mode 100644 index 00000000..3d99cbba --- /dev/null +++ b/config/examples/failure-targets/kinesis-extended.hcl @@ -0,0 +1,14 @@ +# Extended configuration of Kinesis as a failure target (all options) + +failure_target { + use "kinesis" { + # Kinesis stream name to send data to + stream_name = "my-stream" + + # AWS region of Kinesis stream + region = "us-west-1" + + # Optional ARN to use on the stream (default: "") + role_arn = "arn:aws:iam::123456789012:role/myrole" + } +} diff --git a/config/examples/failure-targets/kinesis-simple.hcl b/config/examples/failure-targets/kinesis-simple.hcl new file mode 100644 index 00000000..2d619fc0 --- /dev/null +++ b/config/examples/failure-targets/kinesis-simple.hcl @@ -0,0 +1,11 @@ +# Simple configuration of Kinesis as a failure target (only required options) + +failure_target { + use "kinesis" { + # Kinesis stream name to send data to + stream_name = "my-stream" + + # AWS region of Kinesis stream + region = "us-west-1" + } +} diff --git a/config/examples/failure-targets/pubsub.hcl b/config/examples/failure-targets/pubsub.hcl new file mode 100644 index 00000000..f86858f0 --- /dev/null +++ b/config/examples/failure-targets/pubsub.hcl @@ -0,0 +1,11 @@ +# Configuration of PubSub as a failure target. + +failure_target { + use "pubsub" { + # ID of the GCP Project + project_id = "acme-project" + + # Name of the topic to send data into + topic_name = "some-acme-topic" + } +} diff --git a/config/examples/failure-targets/sqs-extended.hcl b/config/examples/failure-targets/sqs-extended.hcl new file mode 100644 index 00000000..4c5b4c68 --- /dev/null +++ b/config/examples/failure-targets/sqs-extended.hcl @@ -0,0 +1,14 @@ +# Extended configuration of SQS as a failure target (all options) + +failure_target { + use "sqs" { + # SQS queue name + queue_name = "mySqsQueue" + + # AWS region of SQS queue + region = "us-west-1" + + # Role ARN to use on SQS queue + role_arn = "arn:aws:iam::123456789012:role/myrole" + } +} diff --git a/config/examples/failure-targets/sqs-simple.hcl b/config/examples/failure-targets/sqs-simple.hcl new file mode 100644 index 00000000..f44acdd1 --- /dev/null +++ b/config/examples/failure-targets/sqs-simple.hcl @@ -0,0 +1,11 @@ +# Simple configuration of SQS as a failure target (only required options) + +failure_target { + use "sqs" { + # SQS queue name + queue_name = "mySqsQueue" + + # AWS region of SQS queue + region = "us-west-1" + } +} diff --git a/config/examples/failure-targets/stdout.hcl b/config/examples/failure-targets/stdout.hcl new file mode 100644 index 00000000..373fb61d --- /dev/null +++ b/config/examples/failure-targets/stdout.hcl @@ -0,0 +1,6 @@ +# Stdout has no required configuration options as a failure target. +# Since it is the default failure target, the failure_target block can be omitted. + +failure_target { + use "stdout" {} +} diff --git a/config/examples/sentry/sentry-extended.hcl b/config/examples/sentry/sentry-extended.hcl new file mode 100644 index 00000000..97ea0236 --- /dev/null +++ b/config/examples/sentry/sentry-extended.hcl @@ -0,0 +1,12 @@ +# Extended sentry configuration (all options) + +sentry { + # The DSN to send Sentry alerts to + dsn = "https://acme.com/1" + + # Whether to put Sentry into debug mode (default: false) + debug = true + + # Escaped JSON string with tags to send to Sentry (default: "{}") + tags = "{\"aKey\":\"aValue\"}" +} diff --git a/config/examples/sentry/sentry-simple.hcl b/config/examples/sentry/sentry-simple.hcl new file mode 100644 index 00000000..56b1edc3 --- /dev/null +++ b/config/examples/sentry/sentry-simple.hcl @@ -0,0 +1,6 @@ +# Simple sentry configuration (only required options) + +sentry { + # The DSN to send Sentry alerts to + dsn = "https://acme.com/1" +} diff --git a/config/examples/sources/kinesis-extended.hcl b/config/examples/sources/kinesis-extended.hcl new file mode 100644 index 00000000..aaacacfd --- /dev/null +++ b/config/examples/sources/kinesis-extended.hcl @@ -0,0 +1,25 @@ +# Extended configuration for Kinesis as a source (all options) + +source { + use "kinesis" { + # Kinesis stream name to read from (required) + stream_name = "my-stream" + + # AWS region of Kinesis stream (required) + region = "us-west-1" + + # App name for Stream Replicator (required) + app_name = "StreamReplicatorProd1" + + # Optional ARN to use on source stream (default: "") + role_arn = "arn:aws:iam::123456789012:role/myrole" + + # Timestamp for the kinesis shard iterator to begin processing. + # Format YYYY-MM-DD HH:MM:SS.MS (miliseconds optional) + # (default: TRIM_HORIZON) + start_timestamp = "2020-01-01 10:00:00" + + # Number of events to process concurrently (default: 50) + concurrent_writes = 15 + } +} diff --git a/config/examples/sources/kinesis-simple.hcl b/config/examples/sources/kinesis-simple.hcl new file mode 100644 index 00000000..dde9c6a2 --- /dev/null +++ b/config/examples/sources/kinesis-simple.hcl @@ -0,0 +1,14 @@ +# Simple configuration for Kinesis as a source (only required options) + +source { + use "kinesis" { + # Kinesis stream name to read from + stream_name = "my-stream" + + # AWS region of Kinesis stream + region = "us-west-1" + + # App name for Stream Replicator + app_name = "StreamReplicatorProd1" + } +} diff --git a/config/examples/sources/pubsub-extended.hcl b/config/examples/sources/pubsub-extended.hcl new file mode 100644 index 00000000..6805478a --- /dev/null +++ b/config/examples/sources/pubsub-extended.hcl @@ -0,0 +1,14 @@ +# Extended configuration for PubSub as a source (all options) + +source { + use "pubsub" { + # GCP Project ID + project_id = "project-id" + + # subscription ID for the pubsub subscription + subscription_id = "subscription-id" + + # Number of events to process concurrently (default: 50) + concurrent_writes = 20 + } +} diff --git a/config/examples/sources/pubsub-simple.hcl b/config/examples/sources/pubsub-simple.hcl new file mode 100644 index 00000000..04827e3a --- /dev/null +++ b/config/examples/sources/pubsub-simple.hcl @@ -0,0 +1,11 @@ +# Simple configuration for PubSub as a source (only required options) + +source { + use "pubsub" { + # GCP Project ID + project_id = "project-id" + + # subscription ID for the pubsub subscription + subscription_id = "subscription-id" + } +} diff --git a/config/examples/sources/sqs-extended.hcl b/config/examples/sources/sqs-extended.hcl new file mode 100644 index 00000000..82f3544e --- /dev/null +++ b/config/examples/sources/sqs-extended.hcl @@ -0,0 +1,17 @@ +# Extended configuration for SQS as a source (all options) + +source { + use "sqs" { + # SQS queue name + queue_name = "mySqsQueue" + + # AWS region of SQS queue + region = "us-west-1" + + # Role ARN to use on source queue + role_arn = "arn:aws:iam::123456789012:role/myrole" + + # Number of events to process concurrently (default: 50) + concurrent_writes = 20 + } +} diff --git a/config/examples/sources/sqs-simple.hcl b/config/examples/sources/sqs-simple.hcl new file mode 100644 index 00000000..06b71413 --- /dev/null +++ b/config/examples/sources/sqs-simple.hcl @@ -0,0 +1,11 @@ +# Simple configuration for SQS as a source (only required options) + +source { + use "sqs" { + # SQS queue name + queue_name = "mySqsQueue" + + # AWS region of SQS queue + region = "us-west-1" + } +} diff --git a/config/examples/sources/stdin-extended.hcl b/config/examples/sources/stdin-extended.hcl new file mode 100644 index 00000000..294a725c --- /dev/null +++ b/config/examples/sources/stdin-extended.hcl @@ -0,0 +1,8 @@ +# Extended configuration for Stdin as a source (all options) + +source { + use "stdin" { + # Number of events to process concurrently (default: 50) + concurrent_writes = 20 + } +} diff --git a/config/examples/sources/stdin-simple.hcl b/config/examples/sources/stdin-simple.hcl new file mode 100644 index 00000000..3723a16e --- /dev/null +++ b/config/examples/sources/stdin-simple.hcl @@ -0,0 +1,7 @@ +# Simple configuration for Stdin as a source +# Stdin has no required configuration options. +# Since it is the default source, the source block can also be omitted. + +source { + use "stdin" {} +} diff --git a/config/examples/stats-receivers/stats-receiver-statsd-extended.hcl b/config/examples/stats-receivers/stats-receiver-statsd-extended.hcl new file mode 100644 index 00000000..a516d12d --- /dev/null +++ b/config/examples/stats-receivers/stats-receiver-statsd-extended.hcl @@ -0,0 +1,20 @@ +# Extended configuration for StatsD stats receiver (all options) + +stats_receiver { + use "statsd" { + # StatsD server address + address = "127.0.0.1:8125" + + # StatsD metric prefix (default: "snowplow.stream-replicator") + prefix = "snowplow.stream-replicator" + + # Escaped JSON string with tags to send to StatsD (default: "{}") + tags = "{\"aKey\": \"aValue\"}" + } + + # Time (seconds) the observer waits for new results (default: 1) + timeout_sec = 2 + + # Aggregation time window (seconds) for metrics being collected (default: 15) + buffer_sec = 20 +} diff --git a/config/examples/stats-receivers/stats-receiver-statsd-simple.hcl b/config/examples/stats-receivers/stats-receiver-statsd-simple.hcl new file mode 100644 index 00000000..19cc8110 --- /dev/null +++ b/config/examples/stats-receivers/stats-receiver-statsd-simple.hcl @@ -0,0 +1,5 @@ +# Simple configuration for StatsD stats receiver + +stats_receiver { + use "statsd" {} +} diff --git a/config/examples/targets/eventhub-extended.hcl b/config/examples/targets/eventhub-extended.hcl new file mode 100644 index 00000000..042e1f4e --- /dev/null +++ b/config/examples/targets/eventhub-extended.hcl @@ -0,0 +1,30 @@ +# Extended configuration for Eventhub as a target (all options) + +target { + use "eventhub" { + # Namespace housing Eventhub + namespace = "testNamespace" + + # Name of Eventhub + name = "testName" + + # Number of retries handled automatically by the EventHubs library. + # All retries should be completed before context timeout (default: 1). + max_auto_retries = 2 + + # Default presumes paid tier byte limit is 1MB (default: 1048576) + message_byte_limit = 1048576 + + # Chunk byte limit (default: 1048576) + chunk_byte_limit = 1048576 + + # Chunk message limit (default: 500) + chunk_message_limit = 500 + + # The time (seconds) before context timeout (default: 20) + context_timeout_in_seconds = 20 + + # Default batch size of 1MB is the limit for Eventhub's high tier + batch_byte_limit = 1048576 + } +} diff --git a/config/examples/targets/eventhub-simple.hcl b/config/examples/targets/eventhub-simple.hcl new file mode 100644 index 00000000..144b4683 --- /dev/null +++ b/config/examples/targets/eventhub-simple.hcl @@ -0,0 +1,11 @@ +# Simple configuration for Eventhub as a target (only required options) + +target { + use "eventhub" { + # Namespace housing Eventhub + namespace = "testNamespace" + + # Name of Eventhub + name = "testName" + } +} diff --git a/config/examples/targets/http-extended.hcl b/config/examples/targets/http-extended.hcl new file mode 100644 index 00000000..d1223e14 --- /dev/null +++ b/config/examples/targets/http-extended.hcl @@ -0,0 +1,42 @@ +# Extended configuration for HTTP target (all options) + +target { + use "http" { + # URL endpoint + url = "https://acme.com/x" + + # Byte limit for requests (default: 1048576) + byte_limit = 1048576 + + # Request timeout in seconds (default: 5) + request_timeout_in_seconds = 5 + + # Content type for POST request (default: "application/json") + content_type = "application/json" + + # Optional headers to add to the request. + # It is provided as a JSON string of key-value pairs (default: ""). + headers = "{\"Accept-Language\":\"en-US\"}" + + # Optional basicauth username + basic_auth_username = "myUsername" + + # Optional basicauth password + # Even though you could just reference the password directly as a string, + # you could also reference an environment variable. + basic_auth_password = env.MY_AUTH_PASSWORD + + # The optional certificate file for client authentication + cert_file = "myLocalhost.crt" + + # The optional key file for client authentication + key_file = "MyLocalhost.key" + + # The optional certificate authority file for TLS client authentication + ca_file = "myRootCA.crt" + + # Whether to skip verifying ssl certificates chain (default: false) + # If tls_cert and tls_key are not provided, this setting is not applied. + skip_verify_tls = true + } +} diff --git a/config/examples/targets/http-simple.hcl b/config/examples/targets/http-simple.hcl new file mode 100644 index 00000000..20079f88 --- /dev/null +++ b/config/examples/targets/http-simple.hcl @@ -0,0 +1,8 @@ +# Simple configuration for HTTP target (only required options) + +target { + use "http" { + # URL endpoint + url = "https://acme.com/x" + } +} diff --git a/config/examples/targets/kafka-extended.hcl b/config/examples/targets/kafka-extended.hcl new file mode 100644 index 00000000..19e28ca8 --- /dev/null +++ b/config/examples/targets/kafka-extended.hcl @@ -0,0 +1,69 @@ +# Extended configuration for Kafka as a target (all options) + +target { + use "kafka" { + # Kafka broker connectinon string + brokers = "my-kafka-connection-string" + + # Kafka topic name + topic_name = "snowplow-enriched-good" + + # The Kafka version + target_version = "2.7.0" + + # Max retries (default: 10) + max_retries = 10 + + # Kafka default byte limit is 1MB (default: 1048576) + byte_limit = 1048576 + + # Whether to compress data (default: false). + # Reduces network usage and increases latency. + compress = true + + # Sets RequireAck s= WaitForAll, which waits for min.insync.replicas + # to Ack (default: false) + wait_for_all = true + + # Exactly once writes - Also sets RequiredAcks = WaitForAll (default: false) + idempotent = true + + # Whether to enable SASL support (defailt: false) + enable_sasl = true + + # SASL AUTH + sasl_username = "mySaslUsername" + sasl_password = env.SASL_PASSWORD + + # The SASL Algorithm to use: "sha512" or "sha256" (default: "sha512") + sasl_algorithm = "sha256" + + # The optional certificate file for client authentication + cert_file = "myLocalhost.crt" + + # The optional key file for client authentication + key_file = "MyLocalhost.key" + + # The optional certificate authority file for TLS client authentication + ca_file = "myRootCA.crt" + + # Whether to skip verifying ssl certificates chain (default: false) + skip_verify_tls = true + + # Forces the use of the Sync Producer (default: false). + # Emits as fast as possible but may limit performance. + force_sync_producer = true + + # Milliseconds between flushes of events (default: 0) + # Setting to 0 means as fast as possible. + flush_frequency = 2 + + # Best effort for how many messages are sent in each batch (default: 0) + # Setting to 0 means as fast as possible. + flush_messages = 2 + + # Best effort for how many bytes will trigger a flush (default: 0) + # Setting to 0 means as fast as possible. + flush_bytes = 2 + } +} diff --git a/config/examples/targets/kafka-simple.hcl b/config/examples/targets/kafka-simple.hcl new file mode 100644 index 00000000..b3fa6b18 --- /dev/null +++ b/config/examples/targets/kafka-simple.hcl @@ -0,0 +1,11 @@ +# Simple configuration for Kafka as a target (only required options) + +target { + use "kafka" { + # Kafka broker connectinon string + brokers = "my-kafka-connection-string" + + # Kafka topic name + topic_name = "snowplow-enriched-good" + } +} diff --git a/config/examples/targets/kinesis-extended.hcl b/config/examples/targets/kinesis-extended.hcl new file mode 100644 index 00000000..12fd8118 --- /dev/null +++ b/config/examples/targets/kinesis-extended.hcl @@ -0,0 +1,14 @@ +# Extended configuration of Kinesis as a target (all options) + +target { + use "kinesis" { + # Kinesis stream name to send data to + stream_name = "my-stream" + + # AWS region of Kinesis stream + region = "us-west-1" + + # Optional ARN to use on the stream (default: "") + role_arn = "arn:aws:iam::123456789012:role/myrole" + } +} diff --git a/config/examples/targets/kinesis-simple.hcl b/config/examples/targets/kinesis-simple.hcl new file mode 100644 index 00000000..17952616 --- /dev/null +++ b/config/examples/targets/kinesis-simple.hcl @@ -0,0 +1,11 @@ +# Simple configuration of Kinesis as a target (only required options) + +target { + use "kinesis" { + # Kinesis stream name to send data to + stream_name = "my-stream" + + # AWS region of Kinesis stream + region = "us-west-1" + } +} diff --git a/config/examples/targets/pubsub.hcl b/config/examples/targets/pubsub.hcl new file mode 100644 index 00000000..40e281e6 --- /dev/null +++ b/config/examples/targets/pubsub.hcl @@ -0,0 +1,11 @@ +# Configuration of PubSub as a target. + +target { + use "pubsub" { + # ID of the GCP Project + project_id = "acme-project" + + # Name of the topic to send data into + topic_name = "some-acme-topic" + } +} diff --git a/config/examples/targets/sqs-extended.hcl b/config/examples/targets/sqs-extended.hcl new file mode 100644 index 00000000..fefaacec --- /dev/null +++ b/config/examples/targets/sqs-extended.hcl @@ -0,0 +1,14 @@ +# Extended configuration of SQS as a target (all options) + +target { + use "sqs" { + # SQS queue name + queue_name = "mySqsQueue" + + # AWS region of SQS queue + region = "us-west-1" + + # Role ARN to use on SQS queue + role_arn = "arn:aws:iam::123456789012:role/myrole" + } +} diff --git a/config/examples/targets/sqs-simple.hcl b/config/examples/targets/sqs-simple.hcl new file mode 100644 index 00000000..dc0ae07c --- /dev/null +++ b/config/examples/targets/sqs-simple.hcl @@ -0,0 +1,11 @@ +# Simple configuration of SQS as a target (only required options) + +target { + use "sqs" { + # SQS queue name + queue_name = "mySqsQueue" + + # AWS region of SQS queue + region = "us-west-1" + } +} diff --git a/config/examples/targets/stdout.hcl b/config/examples/targets/stdout.hcl new file mode 100644 index 00000000..e5580753 --- /dev/null +++ b/config/examples/targets/stdout.hcl @@ -0,0 +1,6 @@ +# Stdout has no required configuration options as a target. +# Since it is the default target, the target block can also be omitted. + +target { + use "stdout" {} +} diff --git a/config/setup_test.go b/config/setup_test.go new file mode 100644 index 00000000..4119babe --- /dev/null +++ b/config/setup_test.go @@ -0,0 +1,18 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package config + +import ( + "os" + "testing" +) + +func TestMain(m *testing.M) { + os.Clearenv() + exitVal := m.Run() + os.Exit(exitVal) +} diff --git a/config/test-fixtures/empty.hcl b/config/test-fixtures/empty.hcl new file mode 100644 index 00000000..46a47ea7 --- /dev/null +++ b/config/test-fixtures/empty.hcl @@ -0,0 +1 @@ +# empty HCL file to test defaults diff --git a/config/test-fixtures/invalids.hcl b/config/test-fixtures/invalids.hcl new file mode 100644 index 00000000..5f308e8a --- /dev/null +++ b/config/test-fixtures/invalids.hcl @@ -0,0 +1,9 @@ +# configuration with various invalid options + +target { + use "fakeHCL" {} +} + +failure_target { + use "fakeHCL" {} +} diff --git a/config/test-fixtures/observer.hcl b/config/test-fixtures/observer.hcl new file mode 100644 index 00000000..1ada6421 --- /dev/null +++ b/config/test-fixtures/observer.hcl @@ -0,0 +1,11 @@ +# stats receiver extended configuration + +stats_receiver { + use "statsd" { + address = "test.localhost" + prefix = "snowplow.test" + tags = "{\"testKey\": \"testValue\"}" + } + timeout_sec = 2 + buffer_sec = 20 +} diff --git a/config/test-fixtures/sentry.hcl b/config/test-fixtures/sentry.hcl new file mode 100644 index 00000000..55e0e117 --- /dev/null +++ b/config/test-fixtures/sentry.hcl @@ -0,0 +1,7 @@ +# just sentry configuration + +sentry { + dsn = "testDsn" + debug = true + tags = "{\"testKey\":\"testValue\"}" +} diff --git a/config/test-fixtures/source-kinesis-extended.hcl b/config/test-fixtures/source-kinesis-extended.hcl new file mode 100644 index 00000000..cbb85ada --- /dev/null +++ b/config/test-fixtures/source-kinesis-extended.hcl @@ -0,0 +1,12 @@ +# kinesis source extended configuration + +source { + use "kinesis" { + stream_name = "testStream" + region = "us-test-1" + role_arn = "xxx-test-role-arn" + app_name = "testApp" + start_timestamp = "2022-03-15 07:52:53" + concurrent_writes = 51 + } +} diff --git a/config/test-fixtures/source-kinesis-simple.hcl b/config/test-fixtures/source-kinesis-simple.hcl new file mode 100644 index 00000000..0030e252 --- /dev/null +++ b/config/test-fixtures/source-kinesis-simple.hcl @@ -0,0 +1,9 @@ +# kinesis source required configuration + +source { + use "kinesis" { + stream_name = "testStream" + region = "us-test-1" + app_name = "testApp" + } +} diff --git a/config/test-fixtures/source-sqs.hcl b/config/test-fixtures/source-sqs.hcl new file mode 100644 index 00000000..ca226ba7 --- /dev/null +++ b/config/test-fixtures/source-sqs.hcl @@ -0,0 +1,9 @@ +# sqs source configuration + +source { + use "sqs" { + queue_name = "testQueue" + region = "us-test-1" + role_arn = "xxx-test-role-arn" + } +} diff --git a/config/test-fixtures/target-eventhub-extended.hcl b/config/test-fixtures/target-eventhub-extended.hcl new file mode 100644 index 00000000..d8b2bcfe --- /dev/null +++ b/config/test-fixtures/target-eventhub-extended.hcl @@ -0,0 +1,15 @@ +# eventhub target extended config + +target { + use "eventhub" { + namespace = "testNamespace" + name = "testName" + max_auto_retries = 2 + message_byte_limit = 1000000 + chunk_byte_limit = 1000000 + chunk_message_limit = 501 + context_timeout_in_seconds = 21 + batch_byte_limit = 1000000 + set_eh_partition_key = false + } +} diff --git a/config/test-fixtures/target-eventhub-simple.hcl b/config/test-fixtures/target-eventhub-simple.hcl new file mode 100644 index 00000000..205ac45a --- /dev/null +++ b/config/test-fixtures/target-eventhub-simple.hcl @@ -0,0 +1,8 @@ +# eventhub target config + +target { + use "eventhub" { + namespace = "testNamespace" + name = "testName" + } +} diff --git a/config/test-fixtures/target-http-extended.hcl b/config/test-fixtures/target-http-extended.hcl new file mode 100644 index 00000000..d29f4d8f --- /dev/null +++ b/config/test-fixtures/target-http-extended.hcl @@ -0,0 +1,17 @@ +# http target extended configuration + +target { + use "http" { + url = "testUrl" + byte_limit = 1000000 + request_timeout_in_seconds = 2 + content_type = "test/test" + headers = "{\"Accept-Language\":\"en-US\"}" + basic_auth_username = "testUsername" + basic_auth_password = "testPass" + cert_file = "myLocalhost.crt" + key_file = "MyLocalhost.key" + ca_file = "myRootCA.crt" + skip_verify_tls = true + } +} diff --git a/config/test-fixtures/target-http-simple.hcl b/config/test-fixtures/target-http-simple.hcl new file mode 100644 index 00000000..f7617dd9 --- /dev/null +++ b/config/test-fixtures/target-http-simple.hcl @@ -0,0 +1,7 @@ +# http target required configuration + +target { + use "http" { + url = "testUrl" + } +} diff --git a/config/test-fixtures/target-kafka-extended.hcl b/config/test-fixtures/target-kafka-extended.hcl new file mode 100644 index 00000000..b325fcd7 --- /dev/null +++ b/config/test-fixtures/target-kafka-extended.hcl @@ -0,0 +1,26 @@ +# kafka target extended configuration + +target { + use "kafka" { + brokers = "testBrokers" + topic_name = "testTopic" + target_version = "1.2.3" + max_retries = 11 + byte_limit = 1000000 + compress = true + wait_for_all = true + idempotent = true + enable_sasl = true + sasl_username = "testUsername" + sasl_password = "testPass" + sasl_algorithm = "sha256" + cert_file = "myLocalhost.crt" + key_file = "MyLocalhost.key" + ca_file = "myRootCA.crt" + skip_verify_tls = true + force_sync_producer = true + flush_frequency = 2 + flush_messages = 2 + flush_bytes = 2 + } +} diff --git a/config/test-fixtures/target-kafka-simple.hcl b/config/test-fixtures/target-kafka-simple.hcl new file mode 100644 index 00000000..29fa4780 --- /dev/null +++ b/config/test-fixtures/target-kafka-simple.hcl @@ -0,0 +1,8 @@ +# kafka target required configuration + +target { + use "kafka" { + brokers = "testBrokers" + topic_name = "testTopic" + } +} diff --git a/config/test-fixtures/target-kinesis.hcl b/config/test-fixtures/target-kinesis.hcl new file mode 100644 index 00000000..763ca44b --- /dev/null +++ b/config/test-fixtures/target-kinesis.hcl @@ -0,0 +1,9 @@ +# kinesis target configuration + +target { + use "kinesis" { + stream_name = "testStream" + region = "eu-test-1" + role_arn = "xxx-test-role-arn" + } +} diff --git a/config/test-fixtures/target-pubsub.hcl b/config/test-fixtures/target-pubsub.hcl new file mode 100644 index 00000000..e71b2a30 --- /dev/null +++ b/config/test-fixtures/target-pubsub.hcl @@ -0,0 +1,8 @@ +# pubsub target configuration + +target { + use "pubsub" { + project_id = "testId" + topic_name = "testTopic" + } +} diff --git a/config/test-fixtures/target-sqs.hcl b/config/test-fixtures/target-sqs.hcl new file mode 100644 index 00000000..81dceb89 --- /dev/null +++ b/config/test-fixtures/target-sqs.hcl @@ -0,0 +1,9 @@ +# config for sqs target + +target { + use "sqs" { + queue_name = "testQueue" + region = "eu-test-1" + role_arn = "xxx-test-role-arn" + } +} diff --git a/config/test-fixtures/transform-js-error.hcl b/config/test-fixtures/transform-js-error.hcl new file mode 100644 index 00000000..f39d5764 --- /dev/null +++ b/config/test-fixtures/transform-js-error.hcl @@ -0,0 +1,7 @@ +# transform configuration - js - compile error + +transform { + use "js" { + source_b64 = "ZnVuY3Rpb24gbWFpbih4KSB7CiAgICB3aG9vcHMgd2hhdCBpcyB0aGlzCiAgICB2YXIganNvbk9iaiA9IEpTT04ucGFyc2UoeC5EYXRhKTsKICAgIGpzb25PYmpbIndyb25nX2tleSJdID0gIndoeSBhcmUgeW91IGRlY29kaW5nIHRoaXMiOwoKICAgIHJldHVybiB7CiAgICAgICAgRGF0YTogSlNPTi5zdHJpbmdpZnkoanNvbk9iaikKICAgIH07Cn0=" + } +} diff --git a/config/test-fixtures/transform-js-extended.hcl b/config/test-fixtures/transform-js-extended.hcl new file mode 100644 index 00000000..e6d2ea6c --- /dev/null +++ b/config/test-fixtures/transform-js-extended.hcl @@ -0,0 +1,10 @@ +# transform configuration - js - extended + +transform { + use "js" { + source_b64 = "ZnVuY3Rpb24gbWFpbih4KSB7CiAgICB2YXIganNvbk9iaiA9IEpTT04ucGFyc2UoeC5EYXRhKTsKICAgIGpzb25PYmpbImFwcF9pZCJdID0gImNoYW5nZWQiOwogICAgcmV0dXJuIHsKICAgICAgICBEYXRhOiBKU09OLnN0cmluZ2lmeShqc29uT2JqKQogICAgfTsKfQ==" + timeout_sec = 20 + disable_source_maps = true + snowplow_mode = false + } +} \ No newline at end of file diff --git a/config/test-fixtures/transform-js-simple.hcl b/config/test-fixtures/transform-js-simple.hcl new file mode 100644 index 00000000..3aeac3ec --- /dev/null +++ b/config/test-fixtures/transform-js-simple.hcl @@ -0,0 +1,7 @@ +# transform configuration - js - simple + +transform { + use "js" { + source_b64 = "ZnVuY3Rpb24gbWFpbih4KSB7CiAgICByZXR1cm4geDsKfQkgICAKCQ==" + } +} diff --git a/config/test-fixtures/transform-lua-extended.hcl b/config/test-fixtures/transform-lua-extended.hcl new file mode 100644 index 00000000..0fcc0db7 --- /dev/null +++ b/config/test-fixtures/transform-lua-extended.hcl @@ -0,0 +1,10 @@ +# transform configuration - lua - extended + +transform { + use "lua" { + source_b64 = "CglmdW5jdGlvbiBmb28oeCkgewoJICAgIHJldHVybiB4OwoJfQoJ" + timeout_sec = 10 + snowplow_mode = false + sandbox = false + } +} \ No newline at end of file diff --git a/config/test-fixtures/transform-lua-simple.hcl b/config/test-fixtures/transform-lua-simple.hcl new file mode 100644 index 00000000..8f3d33a9 --- /dev/null +++ b/config/test-fixtures/transform-lua-simple.hcl @@ -0,0 +1,7 @@ +# transform configuration - lua - simple + +transform { + use "lua" { + source_b64 = "CglmdW5jdGlvbiBmb28oeCkKICAgICAgICAgICByZXR1cm4geAogICAgICAgIGVuZAoJ" + } +} diff --git a/config/test-fixtures/transform-mixed-error.hcl b/config/test-fixtures/transform-mixed-error.hcl new file mode 100644 index 00000000..761e7e26 --- /dev/null +++ b/config/test-fixtures/transform-mixed-error.hcl @@ -0,0 +1,20 @@ +transform { + use "js" { + timeout_sec = 15 + source_b64 = "ZnVuY3Rpb24gbWFpbih4KSB7CiAgICB2YXIganNvbk9iaiA9IEpTT04ucGFyc2UoeC5EYXRhKTsKICAgIGpzb25PYmpbImFwcF9pZCJdID0gImNoYW5nZWQiOwogICAgcmV0dXJuIHsKICAgICAgICBEYXRhOiBKU09OLnN0cmluZ2lmeShqc29uT2JqKQogICAgfTsKfQ==" + } +} + +transform { + use "js" { + timeout_sec = 15 + source_b64 = "ZnVuY3Rpb24gbWFpbih4KSB7CiAgICB3aG9vcHMgd2hhdCBpcyB0aGlzCiAgICB2YXIganNvbk9iaiA9IEpTT04ucGFyc2UoeC5EYXRhKTsKICAgIGpzb25PYmpbIndyb25nX2tleSJdID0gIndoeSBhcmUgeW91IGRlY29kaW5nIHRoaXMiOwoKICAgIHJldHVybiB7CiAgICAgICAgRGF0YTogSlNPTi5zdHJpbmdpZnkoanNvbk9iaikKICAgIH07Cn0=" + } +} + +transform { + use "lua" { + timeout_sec = 15 + source_b64 = "ZnVuY3Rpb24gbWFpbih4KQogICB4LkRhdGEgPSAiSGVsbG86IiAuLiB4LkRhdGEKICAgcmV0dXJuIHgKZW5k" + } +} \ No newline at end of file diff --git a/config/test-fixtures/transform-mixed-filtered.hcl b/config/test-fixtures/transform-mixed-filtered.hcl new file mode 100644 index 00000000..1d6711db --- /dev/null +++ b/config/test-fixtures/transform-mixed-filtered.hcl @@ -0,0 +1,23 @@ +transform { + use "js" { + timeout_sec = 15 + // return x; + source_b64 = "ZnVuY3Rpb24gbWFpbih4KSB7CiAgICByZXR1cm4geDsKfQ==" + } +} + +transform { + use "spEnrichedFilter" { + atomic_field = "app_id" + regex = "wrong" + regex_timeout = 10 + } +} + +transform { + use "js" { + timeout_sec = 15 + // return x; + source_b64 = "ZnVuY3Rpb24gbWFpbih4KSB7CiAgICByZXR1cm4geDsKfQ==" + } +} \ No newline at end of file diff --git a/config/test-fixtures/transform-mixed-order.hcl b/config/test-fixtures/transform-mixed-order.hcl new file mode 100644 index 00000000..b64fdf05 --- /dev/null +++ b/config/test-fixtures/transform-mixed-order.hcl @@ -0,0 +1,20 @@ +transform { + use "js" { + // changes app_id to "1" + source_b64 = "ZnVuY3Rpb24gbWFpbih4KSB7CiAgICB2YXIganNvbk9iaiA9IEpTT04ucGFyc2UoeC5EYXRhKTsKICAgIGpzb25PYmpbImFwcF9pZCJdID0gIjEiOwogICAgcmV0dXJuIHsKICAgICAgICBEYXRhOiBKU09OLnN0cmluZ2lmeShqc29uT2JqKQogICAgfTsKfQ==" + } +} + +transform { + use "js" { + // if app_id == "1" it is changed to "2" + source_b64 = "ZnVuY3Rpb24gbWFpbih4KSB7CiAgICB2YXIganNvbk9iaiA9IEpTT04ucGFyc2UoeC5EYXRhKTsKICAgIGlmIChqc29uT2JqWyJhcHBfaWQiXSA9PSAiMSIpIHsKICAgICAgICBqc29uT2JqWyJhcHBfaWQiXSA9ICIyIgogICAgfQogICAgcmV0dXJuIHsKICAgICAgICBEYXRhOiBKU09OLnN0cmluZ2lmeShqc29uT2JqKQogICAgfTsKfQ==" + } +} + +transform { + use "js" { + // if app_id == "2" it is changed to "3" + source_b64 = "ZnVuY3Rpb24gbWFpbih4KSB7CiAgICB2YXIganNvbk9iaiA9IEpTT04ucGFyc2UoeC5EYXRhKTsKICAgIGlmIChqc29uT2JqWyJhcHBfaWQiXSA9PSAiMiIpIHsKICAgICAgICBqc29uT2JqWyJhcHBfaWQiXSA9ICIzIgogICAgfQogICAgcmV0dXJuIHsKICAgICAgICBEYXRhOiBKU09OLnN0cmluZ2lmeShqc29uT2JqKQogICAgfTsKfQ==" + } +} \ No newline at end of file diff --git a/config/test-fixtures/transform-mixed.hcl b/config/test-fixtures/transform-mixed.hcl new file mode 100644 index 00000000..aadf0393 --- /dev/null +++ b/config/test-fixtures/transform-mixed.hcl @@ -0,0 +1,17 @@ +transform { + use "js" { + source_b64 = "ZnVuY3Rpb24gbWFpbih4KSB7CiAgICB2YXIganNvbk9iaiA9IEpTT04ucGFyc2UoeC5EYXRhKTsKICAgIGpzb25PYmpbImFwcF9pZCJdID0gImNoYW5nZWQiOwogICAgcmV0dXJuIHsKICAgICAgICBEYXRhOiBKU09OLnN0cmluZ2lmeShqc29uT2JqKQogICAgfTsKfQ==" + } +} + +transform { + use "js" { + source_b64 = "ZnVuY3Rpb24gbWFpbih4KSB7CiAgICB2YXIganNvbk9iaiA9IEpTT04ucGFyc2UoeC5EYXRhKTsKICAgIGpzb25PYmpbImFwcF9pZCJdID0gImFnYWluIjsKICAgIHJldHVybiB7CiAgICAgICAgRGF0YTogSlNPTi5zdHJpbmdpZnkoanNvbk9iaikKICAgIH07Cn0=" + } +} + +transform { + use "lua" { + source_b64 = "ZnVuY3Rpb24gbWFpbih4KQogIHguRGF0YSA9ICJIZWxsbzoiIC4uIHguRGF0YQogIHJldHVybiB4CmVuZA==" + } +} \ No newline at end of file diff --git a/config/test-fixtures/transform-mocked-order.hcl b/config/test-fixtures/transform-mocked-order.hcl new file mode 100644 index 00000000..74ebc8fa --- /dev/null +++ b/config/test-fixtures/transform-mocked-order.hcl @@ -0,0 +1,24 @@ +transform { + use "one" { + } +} + +transform { + use "two" { + } +} + +transform { + use "three" { + } +} + +transform { + use "four" { + } +} + +transform { + use "five" { + } +} \ No newline at end of file diff --git a/go.mod b/go.mod index 16376d89..e421fbd4 100644 --- a/go.mod +++ b/go.mod @@ -3,92 +3,108 @@ module github.com/snowplow-devops/stream-replicator go 1.17 require ( - cloud.google.com/go v0.91.1 // indirect - cloud.google.com/go/pubsub v1.14.0 - github.com/Azure/azure-amqp-common-go/v3 v3.1.0 // indirect - github.com/Azure/azure-event-hubs-go/v3 v3.3.12 - github.com/Azure/azure-sdk-for-go v56.2.0+incompatible // indirect - github.com/Azure/go-amqp v0.13.11 // indirect - github.com/Azure/go-autorest/autorest v0.11.19 // indirect - github.com/Azure/go-autorest/autorest/adal v0.9.14 // indirect - github.com/Shopify/sarama v1.34.0 - github.com/aws/aws-lambda-go v1.26.0 - github.com/aws/aws-sdk-go v1.40.22 - github.com/caarlos0/env/v6 v6.6.2 - github.com/cpuguy83/go-md2man/v2 v2.0.1 // indirect - github.com/form3tech-oss/jwt-go v3.2.5+incompatible // indirect - github.com/getsentry/sentry-go v0.11.0 + cloud.google.com/go v0.103.0 // indirect + cloud.google.com/go/pubsub v1.24.0 + github.com/Azure/azure-amqp-common-go/v3 v3.2.3 // indirect + github.com/Azure/azure-event-hubs-go/v3 v3.3.18 + github.com/Azure/azure-sdk-for-go v66.0.0+incompatible // indirect + github.com/Azure/go-amqp v0.17.5 // indirect + github.com/Azure/go-autorest/autorest v0.11.27 // indirect + github.com/Azure/go-autorest/autorest/adal v0.9.20 // indirect + github.com/Shopify/sarama v1.34.1 + github.com/aws/aws-sdk-go v1.44.60 + github.com/caarlos0/env/v6 v6.9.3 + github.com/cpuguy83/go-md2man/v2 v2.0.2 // indirect + github.com/getsentry/sentry-go v0.13.0 github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/snappy v0.0.4 // indirect github.com/google/uuid v1.3.0 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/go-multierror v1.1.1 github.com/jpillora/backoff v1.0.0 // indirect - github.com/klauspost/compress v1.15.0 // indirect - github.com/mitchellh/mapstructure v1.4.1 // indirect + github.com/klauspost/compress v1.15.9 // indirect + github.com/mitchellh/mapstructure v1.5.0 github.com/myesui/uuid v1.0.0 // indirect - github.com/pierrec/lz4 v2.6.1+incompatible // indirect github.com/pkg/errors v0.9.1 - github.com/sirupsen/logrus v1.8.1 + github.com/sirupsen/logrus v1.9.0 github.com/smira/go-statsd v1.3.2 github.com/snowplow-devops/go-retry v0.0.0-20210106090855-8989bbdbae1c github.com/snowplow-devops/go-sentryhook v0.0.0-20210106082031-21bf7f9dac2a - github.com/snowplow/snowplow-golang-analytics-sdk v0.1.0 - github.com/stretchr/testify v1.7.0 + github.com/snowplow/snowplow-golang-analytics-sdk v0.3.0 + github.com/stretchr/testify v1.7.1 github.com/twinj/uuid v1.0.0 github.com/twitchscience/kinsumer v0.0.0-20210611163023-da24975e2c91 - github.com/urfave/cli v1.22.5 - github.com/xdg/scram v1.0.3 - golang.org/x/crypto v0.0.0-20220214200702-86341886e292 // indirect - golang.org/x/mod v0.5.0 // indirect - golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2 // indirect - golang.org/x/oauth2 v0.0.0-20210810183815-faf39c7919d5 // indirect - golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e // indirect + github.com/urfave/cli v1.22.9 + github.com/xdg/scram v1.0.5 + golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa // indirect + golang.org/x/net v0.0.0-20220722155237-a158d28d115b // indirect + golang.org/x/oauth2 v0.0.0-20220722155238-128564f6959c // indirect + golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f // indirect golang.org/x/text v0.3.7 // indirect - google.golang.org/api v0.54.0 // indirect - google.golang.org/genproto v0.0.0-20210813162853-db860fec028c // indirect - google.golang.org/grpc v1.40.0 // indirect + google.golang.org/api v0.88.0 // indirect + google.golang.org/genproto v0.0.0-20220720214146-176da50484ac + google.golang.org/grpc v1.48.0 gopkg.in/stretchr/testify.v1 v1.2.2 // indirect ) require ( + github.com/davecgh/go-spew v1.1.1 + github.com/dlclark/regexp2 v1.7.0 + github.com/dop251/goja v0.0.0-20220722151623-4765a9872229 + github.com/goccy/go-json v0.9.10 + github.com/hashicorp/hcl/v2 v2.13.0 + github.com/snowplow/snowplow-golang-tracker/v2 v2.4.1 + github.com/yuin/gluamapper v0.0.0-20150323120927-d836955830e7 + github.com/yuin/gopher-lua v0.0.0-20220504180219-658193537a64 + github.com/zclconf/go-cty v1.10.0 + layeh.com/gopher-json v0.0.0-20201124131017-552bb3c4c3bf +) + +require ( + cloud.google.com/go/compute v1.7.0 // indirect + cloud.google.com/go/iam v0.3.0 // indirect github.com/Azure/go-autorest v14.2.0+incompatible // indirect github.com/Azure/go-autorest/autorest/date v0.3.0 // indirect github.com/Azure/go-autorest/autorest/to v0.4.0 // indirect github.com/Azure/go-autorest/autorest/validation v0.3.1 // indirect github.com/Azure/go-autorest/logger v0.2.1 // indirect github.com/Azure/go-autorest/tracing v0.6.0 // indirect - github.com/davecgh/go-spew v1.1.1 // indirect + github.com/agext/levenshtein v1.2.3 // indirect + github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect github.com/devigned/tab v0.1.1 // indirect - github.com/eapache/go-resiliency v1.2.0 // indirect + github.com/eapache/go-resiliency v1.3.0 // indirect github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21 // indirect github.com/eapache/queue v1.1.0 // indirect + github.com/go-sourcemap/sourcemap v2.1.3+incompatible // indirect + github.com/golang-jwt/jwt/v4 v4.4.2 // indirect github.com/golang/protobuf v1.5.2 // indirect - github.com/google/go-cmp v0.5.6 // indirect - github.com/googleapis/gax-go/v2 v2.0.5 // indirect - github.com/hashicorp/go-uuid v1.0.2 // indirect + github.com/google/go-cmp v0.5.8 // indirect + github.com/googleapis/enterprise-certificate-proxy v0.1.0 // indirect + github.com/googleapis/gax-go/v2 v2.4.0 // indirect + github.com/hashicorp/go-immutable-radix v1.3.1 // indirect + github.com/hashicorp/go-memdb v1.3.3 // indirect + github.com/hashicorp/go-uuid v1.0.3 // indirect + github.com/hashicorp/golang-lru v0.5.4 // indirect github.com/jcmturner/aescts/v2 v2.0.0 // indirect github.com/jcmturner/dnsutils/v2 v2.0.0 // indirect github.com/jcmturner/gofork v1.0.0 // indirect github.com/jcmturner/gokrb5/v8 v8.4.2 // indirect github.com/jcmturner/rpc/v2 v2.0.3 // indirect github.com/jmespath/go-jmespath v0.4.0 // indirect - github.com/json-iterator/go v1.1.11 // indirect - github.com/jstemmer/go-junit-report v0.9.1 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/mattn/go-sqlite3 v2.0.3+incompatible // indirect + github.com/mitchellh/go-wordwrap v1.0.1 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect - github.com/modern-go/reflect2 v1.0.1 // indirect - github.com/pierrec/lz4/v4 v4.1.14 // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/pierrec/lz4/v4 v4.1.15 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect github.com/xdg/stringprep v1.0.3 // indirect go.opencensus.io v0.23.0 // indirect - golang.org/x/lint v0.0.0-20210508222113-6edffad5e616 // indirect - golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect - golang.org/x/tools v0.1.5 // indirect - golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect + golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4 // indirect google.golang.org/appengine v1.6.7 // indirect - google.golang.org/protobuf v1.27.1 // indirect + google.golang.org/protobuf v1.28.0 // indirect gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect ) diff --git a/go.sum b/go.sum index f46adff2..17e927ba 100644 --- a/go.sum +++ b/go.sum @@ -21,61 +21,79 @@ cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E cloud.google.com/go v0.83.0/go.mod h1:Z7MJUsANfY0pYPdw0lbnivPx4/vhy/e2FEkSkF7vAVY= cloud.google.com/go v0.84.0/go.mod h1:RazrYuxIK6Kb7YrzzhPoLmCVzl7Sup4NrbKPg8KHSUM= cloud.google.com/go v0.87.0/go.mod h1:TpDYlFy7vuLzZMMZ+B6iRiELaY7z/gJPaqbMx6mlWcY= -cloud.google.com/go v0.88.0/go.mod h1:dnKwfYbP9hQhefiUvpbcAyoGSHUrOxR20JVElLiUvEY= cloud.google.com/go v0.90.0/go.mod h1:kRX0mNRHe0e2rC6oNakvwQqzyDmg57xJ+SZU1eT2aDQ= -cloud.google.com/go v0.91.1 h1:w+u8ttN/QtYrpvgXNUd2G6kwqrqCIQbkINlXQjHP1ek= -cloud.google.com/go v0.91.1/go.mod h1:V358WZfbFQkmC3gv5XCxzZq2e3h7OGvQR0IXtj77ylI= +cloud.google.com/go v0.93.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+YI= +cloud.google.com/go v0.94.1/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW4= +cloud.google.com/go v0.97.0/go.mod h1:GF7l59pYBVlXQIBLx3a761cZ41F9bBH3JUlihCt2Udc= +cloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA= +cloud.google.com/go v0.100.1/go.mod h1:fs4QogzfH5n2pBXBP9vRiU+eCny7lD2vmFZy79Iuw1U= +cloud.google.com/go v0.100.2/go.mod h1:4Xra9TjzAeYHrl5+oeLlzbM2k3mjVhZh4UqTZ//w99A= +cloud.google.com/go v0.102.0/go.mod h1:oWcCzKlqJ5zgHQt9YsaeTY9KzIvjyy0ArmiBUgpQ+nc= +cloud.google.com/go v0.102.1/go.mod h1:XZ77E9qnTEnrgEOvr4xzfdX5TRo7fB4T2F4O6+34hIU= +cloud.google.com/go v0.103.0 h1:YXtxp9ymmZjlGzxV7VrYQ8aaQuAgcqxSy6YhDX4I458= +cloud.google.com/go v0.103.0/go.mod h1:vwLx1nqLrzLX/fpwSMOXmFIqBOyHsvHbnAdbGSJ+mKk= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= +cloud.google.com/go/compute v0.1.0/go.mod h1:GAesmwr110a34z04OlxYkATPBEfVhkymfTBXtfbBFow= +cloud.google.com/go/compute v1.3.0/go.mod h1:cCZiE1NHEtai4wiufUhW8I8S1JKkAnhnQJWM7YD99wM= +cloud.google.com/go/compute v1.5.0/go.mod h1:9SMHyhJlzhlkJqrPAc839t2BZFTSk6Jdj6mkzQJeu0M= +cloud.google.com/go/compute v1.6.0/go.mod h1:T29tfhtVbq1wvAPo0E3+7vhgmkOYeXjhFvz/FMzPu0s= +cloud.google.com/go/compute v1.6.1/go.mod h1:g85FgpzFvNULZ+S8AYq87axRKuf2Kh7deLqV/jJ3thU= +cloud.google.com/go/compute v1.7.0 h1:v/k9Eueb8aAJ0vZuxKMrgm6kPhCLZU9HxFU+AFDs9Uk= +cloud.google.com/go/compute v1.7.0/go.mod h1:435lt8av5oL9P3fv1OEzSbSUe+ybHXGMPQHHZWZxy9U= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/iam v0.1.0/go.mod h1:vcUNEa0pEm0qRVpmWepWaFMIAI8/hjB9mO8rNCJtF6c= +cloud.google.com/go/iam v0.3.0 h1:exkAomrVUuzx9kWFI1wm3KI0uoDeUFPB4kKGzx6x+Gc= +cloud.google.com/go/iam v0.3.0/go.mod h1:XzJPvDayI+9zsASAFO68Hk07u3z+f+JrT2xXNdp4bnY= +cloud.google.com/go/kms v1.4.0 h1:iElbfoE61VeLhnZcGOltqL8HIly8Nhbe5t6JlH9GXjo= +cloud.google.com/go/kms v1.4.0/go.mod h1:fajBHndQ+6ubNw6Ss2sSd+SWvjL26RNo/dr7uxsnnOA= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= -cloud.google.com/go/pubsub v1.14.0 h1:l58eo7mixlotSL9sQo/+h12Nm+3d30IMccgT2AssMME= -cloud.google.com/go/pubsub v1.14.0/go.mod h1:boZDy/YGYWAyVd11q7KN4HFOJcadGhUfEBfzweSQ7Ww= +cloud.google.com/go/pubsub v1.24.0 h1:aCS6wSMzrc602OeXUMA66KGlyXxpdkHdwN+FSBv/sUg= +cloud.google.com/go/pubsub v1.24.0/go.mod h1:rWv09Te1SsRpRGPiWOMDKraMQTJyJps4MkUCoMGUgqw= cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +cloud.google.com/go/storage v1.22.1/go.mod h1:S8N1cAStu7BOeFfE8KAQzmyyLkK8p/vmRq6kuBTW58Y= +cloud.google.com/go/storage v1.23.0/go.mod h1:vOEEDNFnciUMhBeT6hsJIn3ieU5cFRmzeLgDvXzfIXc= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/AndreasBriese/bbloom v0.0.0-20190306092124-e2d15f34fcf9/go.mod h1:bOvUY6CB00SOBii9/FifXqc0awNKxLFCL/+pkDPuyl8= -github.com/Azure/azure-amqp-common-go/v3 v3.0.1/go.mod h1:PBIGdzcO1teYoufTKMcGibdKaYZv4avS+O6LNIp8bq0= -github.com/Azure/azure-amqp-common-go/v3 v3.1.0 h1:1N4YSkWYWffOpQHromYdOucBSQXhNRKzqtgICy6To8Q= -github.com/Azure/azure-amqp-common-go/v3 v3.1.0/go.mod h1:PBIGdzcO1teYoufTKMcGibdKaYZv4avS+O6LNIp8bq0= -github.com/Azure/azure-event-hubs-go/v3 v3.3.12 h1:jaZxZtDdOKSMxg1bJb6Yv2R4pUEKvEhok6BoHpcHvr4= -github.com/Azure/azure-event-hubs-go/v3 v3.3.12/go.mod h1:vWHatYv3Y8J9rY4GGKECEs6fF3fSUHuFS/m+ErhP0gw= +github.com/Azure/azure-amqp-common-go/v3 v3.2.3 h1:uDF62mbd9bypXWi19V1bN5NZEO84JqgmI5G73ibAmrk= +github.com/Azure/azure-amqp-common-go/v3 v3.2.3/go.mod h1:7rPmbSfszeovxGfc5fSAXE4ehlXQZHpMja2OtxC2Tas= +github.com/Azure/azure-event-hubs-go/v3 v3.3.18 h1:jgWDk2qmknA0UsfyzjHiW5yciOw3aBY0Oq9p/M9lz2Q= +github.com/Azure/azure-event-hubs-go/v3 v3.3.18/go.mod h1:R5H325+EzgxcBDkUerEwtor7ZQg77G7HiOTwpcuIVXY= github.com/Azure/azure-pipeline-go v0.1.8/go.mod h1:XA1kFWRVhSK+KNFiOhfv83Fv8L9achrP7OxIzeTn1Yg= github.com/Azure/azure-pipeline-go v0.1.9/go.mod h1:XA1kFWRVhSK+KNFiOhfv83Fv8L9achrP7OxIzeTn1Yg= github.com/Azure/azure-sdk-for-go v51.1.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= -github.com/Azure/azure-sdk-for-go v56.2.0+incompatible h1:2GrG1JkTSMqLquy1pqVsjeRJhNtZLjss2+rx8ogZXx4= -github.com/Azure/azure-sdk-for-go v56.2.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= +github.com/Azure/azure-sdk-for-go v66.0.0+incompatible h1:bmmC38SlE8/E81nNADlgmVGurPWMHDX2YNXVQMrBpEE= +github.com/Azure/azure-sdk-for-go v66.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= github.com/Azure/azure-storage-blob-go v0.6.0/go.mod h1:oGfmITT1V6x//CswqY2gtAHND+xIP64/qL7a5QJix0Y= -github.com/Azure/go-amqp v0.13.0/go.mod h1:qj+o8xPCz9tMSbQ83Vp8boHahuRDl5mkNHyt1xlxUTs= -github.com/Azure/go-amqp v0.13.10/go.mod h1:D5ZrjQqB1dyp1A+G73xeL/kNn7D5qHJIIsNNps7YNmk= -github.com/Azure/go-amqp v0.13.11 h1:E28zKoWuzO4+D80iUD88BUorI5PqvIZ/S/77md3hIvA= -github.com/Azure/go-amqp v0.13.11/go.mod h1:D5ZrjQqB1dyp1A+G73xeL/kNn7D5qHJIIsNNps7YNmk= +github.com/Azure/go-amqp v0.17.0/go.mod h1:9YJ3RhxRT1gquYnzpZO1vcYMMpAdJT+QEg6fwmw9Zlg= +github.com/Azure/go-amqp v0.17.5 h1:7Lsi9H9ijCAfqOaMiNmQ4c+GL9bdrpCjebNKhV/eQ+c= +github.com/Azure/go-amqp v0.17.5/go.mod h1:9YJ3RhxRT1gquYnzpZO1vcYMMpAdJT+QEg6fwmw9Zlg= github.com/Azure/go-autorest v14.2.0+incompatible h1:V5VMDjClD3GiElqLWO7mz2MxNAK/vTfRHdAubSIPRgs= github.com/Azure/go-autorest v14.2.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24= github.com/Azure/go-autorest/autorest v0.9.0/go.mod h1:xyHB1BMZT0cuDHU7I0+g046+BFDTQ8rEZB0s4Yfa6bI= github.com/Azure/go-autorest/autorest v0.9.3/go.mod h1:GsRuLYvwzLjjjRoWEIyMUaYq8GNUx2nRB378IPt/1p0= -github.com/Azure/go-autorest/autorest v0.11.3/go.mod h1:JFgpikqFJ/MleTTxwepExTKnFUKKszPS8UavbQYUMuw= github.com/Azure/go-autorest/autorest v0.11.18/go.mod h1:dSiJPy22c3u0OtOKDNttNgqpNFY/GeWa7GH/Pz56QRA= -github.com/Azure/go-autorest/autorest v0.11.19 h1:7/IqD2fEYVha1EPeaiytVKhzmPV223pfkRIQUGOK2IE= -github.com/Azure/go-autorest/autorest v0.11.19/go.mod h1:dSiJPy22c3u0OtOKDNttNgqpNFY/GeWa7GH/Pz56QRA= +github.com/Azure/go-autorest/autorest v0.11.27 h1:F3R3q42aWytozkV8ihzcgMO4OA4cuqr3bNlsEuF6//A= +github.com/Azure/go-autorest/autorest v0.11.27/go.mod h1:7l8ybrIdUmGqZMTD0sRtAr8NvbHjfofbf8RSP2q7w7U= github.com/Azure/go-autorest/autorest/adal v0.5.0/go.mod h1:8Z9fGy2MpX0PvDjB1pEgQTmVqjGhiHBW7RJJEciWzS0= github.com/Azure/go-autorest/autorest/adal v0.8.0/go.mod h1:Z6vX6WXXuyieHAXwMj0S6HY6e6wcHn37qQMBQlvY3lc= github.com/Azure/go-autorest/autorest/adal v0.8.1/go.mod h1:ZjhuQClTqx435SRJ2iMlOxPYt3d2C/T/7TiQCVZSn3Q= -github.com/Azure/go-autorest/autorest/adal v0.9.0/go.mod h1:/c022QCutn2P7uY+/oQWWNcK9YU+MH96NgK+jErpbcg= github.com/Azure/go-autorest/autorest/adal v0.9.13/go.mod h1:W/MM4U6nLxnIskrw4UwWzlHfGjwUS50aOsc/I3yuU8M= -github.com/Azure/go-autorest/autorest/adal v0.9.14 h1:G8hexQdV5D4khOXrWG2YuLCFKhWYmWD8bHYaXN5ophk= -github.com/Azure/go-autorest/autorest/adal v0.9.14/go.mod h1:W/MM4U6nLxnIskrw4UwWzlHfGjwUS50aOsc/I3yuU8M= +github.com/Azure/go-autorest/autorest/adal v0.9.18/go.mod h1:XVVeme+LZwABT8K5Lc3hA4nAe8LDBVle26gTrguhhPQ= +github.com/Azure/go-autorest/autorest/adal v0.9.20 h1:gJ3E98kMpFB1MFqQCvA1yFab8vthOeD4VlFRQULxahg= +github.com/Azure/go-autorest/autorest/adal v0.9.20/go.mod h1:XVVeme+LZwABT8K5Lc3hA4nAe8LDBVle26gTrguhhPQ= github.com/Azure/go-autorest/autorest/azure/auth v0.4.2 h1:iM6UAvjR97ZIeR93qTcwpKNMpV+/FTWjwEbuPD495Tk= github.com/Azure/go-autorest/autorest/azure/auth v0.4.2/go.mod h1:90gmfKdlmKgfjUpnCEpOJzsUEjrWDSLwHIG73tSXddM= github.com/Azure/go-autorest/autorest/azure/cli v0.3.1 h1:LXl088ZQlP0SBppGFsRZonW6hSvwgL5gRByMbvUbx8U= @@ -87,15 +105,14 @@ github.com/Azure/go-autorest/autorest/date v0.3.0/go.mod h1:BI0uouVdmngYNUzGWeSY github.com/Azure/go-autorest/autorest/mocks v0.1.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0= github.com/Azure/go-autorest/autorest/mocks v0.2.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0= github.com/Azure/go-autorest/autorest/mocks v0.3.0/go.mod h1:a8FDP3DYzQ4RYfVAxAN3SVSiiO77gL2j2ronKKP0syM= -github.com/Azure/go-autorest/autorest/mocks v0.4.0/go.mod h1:LTp+uSrOhSkaKrUy935gNZuuIPPVsHlr9DSOxSayd+k= -github.com/Azure/go-autorest/autorest/mocks v0.4.1 h1:K0laFcLE6VLTOwNgSxaGbUcLPuGXlNkbVvq4cW4nIHk= github.com/Azure/go-autorest/autorest/mocks v0.4.1/go.mod h1:LTp+uSrOhSkaKrUy935gNZuuIPPVsHlr9DSOxSayd+k= +github.com/Azure/go-autorest/autorest/mocks v0.4.2 h1:PGN4EDXnuQbojHbU0UWoNvmu9AGVwYHG9/fkDYhtAfw= +github.com/Azure/go-autorest/autorest/mocks v0.4.2/go.mod h1:Vy7OitM9Kei0i1Oj+LvyAWMXJHeKH1MVlzFugfVrmyU= github.com/Azure/go-autorest/autorest/to v0.4.0 h1:oXVqrxakqqV1UZdSazDOPOLvOIz+XA683u8EctwboHk= github.com/Azure/go-autorest/autorest/to v0.4.0/go.mod h1:fE8iZBn7LQR7zH/9XU2NcPR4o9jEImooCeWJcYV/zLE= github.com/Azure/go-autorest/autorest/validation v0.3.1 h1:AgyqjAd94fwNAoTjl/WQXg4VvFeRFpO+UhNyRXqF1ac= github.com/Azure/go-autorest/autorest/validation v0.3.1/go.mod h1:yhLgjC0Wda5DYXl6JAsWyUe4KVNffhoDhG0zVzUMo3E= github.com/Azure/go-autorest/logger v0.1.0/go.mod h1:oExouG+K6PryycPJfVSxi/koC6LSNgds39diKLz7Vrc= -github.com/Azure/go-autorest/logger v0.2.0/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8= github.com/Azure/go-autorest/logger v0.2.1 h1:IG7i4p/mDa2Ce4TRyAO8IHnVhAVF3RFU+ZtXWSmf4Tg= github.com/Azure/go-autorest/logger v0.2.1/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8= github.com/Azure/go-autorest/tracing v0.5.0/go.mod h1:r/s2XiOKccPW3HrqB+W0TQzfbtp2fGCgRFtBroKn4Dk= @@ -108,27 +125,40 @@ github.com/CloudyKit/jet/v3 v3.0.0/go.mod h1:HKQPgSJmdK8hdoAbKUUWajkHyHo4RaU5rMd github.com/Joker/hpp v1.0.0/go.mod h1:8x5n+M1Hp5hC0g8okX3sR3vFQwynaX/UgSOM9MeBKzY= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/Shopify/goreferrer v0.0.0-20181106222321-ec9c9a553398/go.mod h1:a1uqRtAwp2Xwc6WNPJEufxJ7fx3npB4UV/JOLmbu5I0= -github.com/Shopify/sarama v1.29.1 h1:wBAacXbYVLmWieEA/0X/JagDdCZ8NVFOfS6l6+2u5S0= -github.com/Shopify/sarama v1.29.1/go.mod h1:mdtqvCSg8JOxk8PmpTNGyo6wzd4BMm4QXSfDnTXmgkE= -github.com/Shopify/sarama v1.34.0 h1:j4zTaFHFnfvuV2fdLZyXqIg0Tu4Mzl9f064Z5/H+o4o= -github.com/Shopify/sarama v1.34.0/go.mod h1:V2ceE9UupUf4/oP1Z38SI49fAnD0/MtkqDDHvolIeeQ= -github.com/Shopify/toxiproxy v2.1.4+incompatible h1:TKdv8HiTLgE5wdJuEML90aBgNWsokNbMijUGhmcoBJc= -github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI= -github.com/Shopify/toxiproxy/v2 v2.3.0/go.mod h1:KvQTtB6RjCJY4zqNJn7C7JDFgsG5uoHYDirfUfpIm0c= +github.com/Shopify/sarama v1.34.1 h1:pVCQO7BMAK3s1jWhgi5v1W6lwZ6Veiekfc2vsgRS06Y= +github.com/Shopify/sarama v1.34.1/go.mod h1:NZSNswsnStpq8TUdFaqnpXm2Do6KRzTIjdBdVlL1YRM= +github.com/Shopify/toxiproxy/v2 v2.4.0 h1:O1e4Jfvr/hefNTNu+8VtdEG5lSeamJRo4aKhMOKNM64= +github.com/Shopify/toxiproxy/v2 v2.4.0/go.mod h1:3ilnjng821bkozDRxNoo64oI/DKqM+rOyJzb564+bvg= +github.com/agext/levenshtein v1.2.1/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= +github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7lmo= +github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= github.com/ajg/form v1.5.1/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/apparentlymart/go-dump v0.0.0-20180507223929-23540a00eaa3/go.mod h1:oL81AME2rN47vu18xqj1S1jPIPuN7afo62yKTNn3XMM= +github.com/apparentlymart/go-textseg v1.0.0 h1:rRmlIsPEEhUTIKQb7T++Nz/A5Q6C9IuX2wFoYVvnCs0= +github.com/apparentlymart/go-textseg v1.0.0/go.mod h1:z96Txxhf3xSFMPmb5X/1W05FF/Nj9VFpLOpjS5yuumk= +github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw= +github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo= github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= -github.com/aws/aws-lambda-go v1.26.0 h1:6ujqBpYF7tdZcBvPIccs98SpeGfrt/UOVEiexfNIdHA= -github.com/aws/aws-lambda-go v1.26.0/go.mod h1:jJmlefzPfGnckuHdXX7/80O3BvUUi12XOkbv4w9SGLU= github.com/aws/aws-sdk-go v1.25.19/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= -github.com/aws/aws-sdk-go v1.40.22 h1:iit4tJ1hjL2GlNCrbE4aJza6jTmvEE2pDTnShct/yyY= -github.com/aws/aws-sdk-go v1.40.22/go.mod h1:585smgzpB/KqRA+K3y/NL/oYRqQvpNJYvLm+LY1U59Q= +github.com/aws/aws-sdk-go v1.44.60 h1:KTTogelVR+4dWiIPl7eyxoxaJkziChON6/Y/hVfTipk= +github.com/aws/aws-sdk-go v1.44.60/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo= github.com/aymerick/raymond v2.0.3-0.20180322193309-b565731e1464+incompatible/go.mod h1:osfaiScAUVup+UC9Nfq76eWqDhXlp+4UYaA8uhTBO6g= -github.com/caarlos0/env/v6 v6.6.2 h1:BypLXDWQTA32rS4UM7pBz+/0BOuvs6C7LSeQAxMwyvI= -github.com/caarlos0/env/v6 v6.6.2/go.mod h1:P0BVSgU9zfkxfSpFUs6KsO3uWR4k3Ac0P66ibAGTybM= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/caarlos0/env/v6 v6.9.3 h1:Tyg69hoVXDnpO5Qvpsu8EoquarbPyQb+YwExWHP8wWU= +github.com/caarlos0/env/v6 v6.9.3/go.mod h1:hvp/ryKXKipEkcuYjs9mI4bBCg+UI0Yhgm5Zu0ddvwc= github.com/cactus/go-statsd-client/statsd v0.0.0-20190922113730-52b467de415c/go.mod h1:D4RDtP0MffJ3+R36OkGul0LwJLIN8nRb0Ac6jZmJCmo= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= +github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= @@ -136,7 +166,12 @@ github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDk github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/codegangsta/inject v0.0.0-20150114235600-33e0aa1cb7c0/go.mod h1:4Zcjuz89kmFXt9morQgcfYZAYZ5n8WHjt81YYWIwtTM= github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= @@ -144,9 +179,8 @@ github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3Ee github.com/cpuguy83/go-md2man v1.0.10 h1:BSKMNlYxDvnunlTymqtgONjNnaRV1sTpcovwwjF22jk= github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= -github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= -github.com/cpuguy83/go-md2man/v2 v2.0.1 h1:r/myEWzV9lfsM1tFLgDyu0atFtJ1fXn261LKYj/3DxU= -github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/cpuguy83/go-md2man/v2 v2.0.2 h1:p1EgwI/C7NhT0JmVkwCD2ZBK8j4aeHQX2pMHHBfMQ6w= +github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= @@ -158,9 +192,18 @@ github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZm github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= github.com/dimchansky/utfbom v1.1.0 h1:FcM3g+nofKgUteL8dm/UpdRXNC9KmADgTpLKsu0TRo4= github.com/dimchansky/utfbom v1.1.0/go.mod h1:rO41eb7gLfo8SF1jd9F8HplJm1Fewwi4mQvIirEdv+8= +github.com/dlclark/regexp2 v1.4.1-0.20201116162257-a2a8dda75c91/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc= +github.com/dlclark/regexp2 v1.7.0 h1:7lJfhqlPssTb1WQx4yvTHN0uElPEv52sbaECrAQxjAo= +github.com/dlclark/regexp2 v1.7.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= +github.com/dop251/goja v0.0.0-20211022113120-dc8c55024d06/go.mod h1:R9ET47fwRVRPZnOGvHxxhuZcbrMCuiqOz3Rlrh4KSnk= +github.com/dop251/goja v0.0.0-20220722151623-4765a9872229 h1:tZlMnVCISkoA6ibywWNsK2FtFcA5cZLQ+NE17WvSdW0= +github.com/dop251/goja v0.0.0-20220722151623-4765a9872229/go.mod h1:1jWwHOtOkEqsfX6tYsufUc7BBTuGHH2ekiJabpkN4CA= +github.com/dop251/goja_nodejs v0.0.0-20210225215109-d91c329300e7/go.mod h1:hn7BA7c8pLvoGndExHudxTDKZ84Pyvv+90pbBjbTz0Y= +github.com/dop251/goja_nodejs v0.0.0-20211022123610-8dd9abb0616d/go.mod h1:DngW8aVqWbuLRMHItjPUyqdj+HWPvnQe8V8y1nDpIbM= github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= -github.com/eapache/go-resiliency v1.2.0 h1:v7g92e/KSN71Rq7vSThKaWIq68fL4YHvWyiUKorFR1Q= github.com/eapache/go-resiliency v1.2.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs= +github.com/eapache/go-resiliency v1.3.0 h1:RRL0nge+cWGlxXbUzJ7yMcq6w2XBEr19dCN6HECGaT0= +github.com/eapache/go-resiliency v1.3.0/go.mod h1:5yPzW0MIvSe0JDsv0v+DvcjEv2FyD6iZYSs1ZI+iQho= github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21 h1:YEetp8/yCZMuEPMUDHG0CW/brkkEp8mzqk2+ODEitlw= github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU= github.com/eapache/queue v1.1.0 h1:YOEu7KNc61ntiQlcEeUIoDTJ2o8mQznoNvUhiigpIqc= @@ -173,35 +216,59 @@ github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5y github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= +github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= +github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/etcd-io/bbolt v1.3.3/go.mod h1:ZF2nL25h33cCyBtcyWeZ2/I3HQOfTP+0PIEvHjkjCrw= github.com/fasthttp-contrib/websocket v0.0.0-20160511215533-1f3b11f56072/go.mod h1:duJ4Jxv5lDcvg4QuQr0oowTf7dz4/CR8NtyCooz9HL8= github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= github.com/form3tech-oss/jwt-go v3.2.2+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k= -github.com/form3tech-oss/jwt-go v3.2.5+incompatible h1:/l4kBbb4/vGSsdtB5nUe8L7B9mImVMaBPw9L/0TBHU8= -github.com/form3tech-oss/jwt-go v3.2.5+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k= github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw= github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g= -github.com/frankban/quicktest v1.11.3 h1:8sXhOn0uLys67V8EsXLc6eszDs8VXWxL3iRvebPhedY= -github.com/frankban/quicktest v1.11.3/go.mod h1:wRf/ReqHper53s+kmmSZizM8NamnL3IM0I9ntUbOk+k= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/gavv/httpexpect v2.0.0+incompatible/go.mod h1:x+9tiU1YnrOvnB725RkpoLv1M62hOWzwo5OXotisrKc= github.com/getsentry/sentry-go v0.9.0/go.mod h1:kELm/9iCblqUYh+ZRML7PNdCvEuw24wBvJPYyi86cws= -github.com/getsentry/sentry-go v0.11.0 h1:qro8uttJGvNAMr5CLcFI9CHR0aDzXl0Vs3Pmw/oTPg8= -github.com/getsentry/sentry-go v0.11.0/go.mod h1:KBQIxiZAetw62Cj8Ri964vAEWVdgfaUCn30Q3bCvANo= +github.com/getsentry/sentry-go v0.13.0 h1:20dgTiUSfxRB/EhMPtxcL9ZEbM1ZdR+W/7f7NWD+xWo= +github.com/getsentry/sentry-go v0.13.0/go.mod h1:EOsfu5ZdvKPfeHYV6pTVQnsjfp30+XA7//UooKNumH0= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/gin-contrib/sse v0.0.0-20190301062529-5545eab6dad3/go.mod h1:VJ0WA2NBN22VlZ2dKZQPAPnyWw5XTlK1KymzLKsr59s= +github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= github.com/gin-gonic/gin v1.4.0/go.mod h1:OW2EZn3DO8Ln9oIKOvM++LBO+5UPHJJDH72/q/3rZdM= +github.com/gin-gonic/gin v1.7.7/go.mod h1:axIBovoeJpVj8S3BwE0uPMTeReE4+AfFtqpqaZ1qq1U= github.com/go-check/check v0.0.0-20180628173108-788fd7840127/go.mod h1:9ES+weclKsC9YodN5RgxqK/VD9HM9JsCSh7rNhMZE98= github.com/go-errors/errors v1.0.1 h1:LUHzmkK3GUKUrL/1gfBUxAHzcev3apQlezX/+O7ma6w= github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= github.com/go-martini/martini v0.0.0-20170121215854-22fa46961aab/go.mod h1:/P9AEU963A2AYjv4d1V5eVL1CQbEJq6aCNHDDjibzu8= +github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8= +github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA= +github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4= +github.com/go-sourcemap/sourcemap v2.1.3+incompatible h1:W1iEw64niKVGogNgBN3ePyLFfuisuzeidWPMPWmECqU= +github.com/go-sourcemap/sourcemap v2.1.3+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68= +github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo= github.com/gobwas/pool v0.2.0/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= github.com/gobwas/ws v1.0.2/go.mod h1:szmBTxLgaFppYjEmNtny/v3w89xOydFnnZMcgRRu/EM= +github.com/goccy/go-json v0.9.10 h1:hCeNmprSNLB8B8vQKWl6DpuH0t60oEs+TAk9a7CScKc= +github.com/goccy/go-json v0.9.10/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY= +github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= +github.com/golang-jwt/jwt/v4 v4.0.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= +github.com/golang-jwt/jwt/v4 v4.2.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= +github.com/golang-jwt/jwt/v4 v4.4.2 h1:rcc4lwaZgFMCZ5jxF9ABolDcIHdBytAFgqFPbSJQAYs= +github.com/golang-jwt/jwt/v4 v4.4.2/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= @@ -217,6 +284,7 @@ github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= +github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= @@ -252,8 +320,10 @@ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= +github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg= +github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= @@ -273,17 +343,24 @@ github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLe github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210715191844-86eeefc3e471/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210804190019-f964ff605595/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/enterprise-certificate-proxy v0.0.0-20220520183353-fd19c99a87aa/go.mod h1:17drOmN3MwGY7t0e+Ei9b45FFGA3fBs3x36SsCg1hq8= +github.com/googleapis/enterprise-certificate-proxy v0.1.0 h1:zO8WHNx/MYiAKJ3d5spxZXZE6KHmIQGQcAzwUzV7qQw= +github.com/googleapis/enterprise-certificate-proxy v0.1.0/go.mod h1:17drOmN3MwGY7t0e+Ei9b45FFGA3fBs3x36SsCg1hq8= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= -github.com/googleapis/gax-go/v2 v2.0.5 h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+Tv3SM= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= +github.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM= +github.com/googleapis/gax-go/v2 v2.2.0/go.mod h1:as02EH8zWkzwUoLbBaFeQ+arQaj/OthfcblKl4IGNaM= +github.com/googleapis/gax-go/v2 v2.3.0/go.mod h1:b8LNqSzNabLiUpXKkY7HAR5jr6bIT99EXz9pXxye9YM= +github.com/googleapis/gax-go/v2 v2.4.0 h1:dS9eYAjhrE2RjmzYw2XAPvcXfmcQLtFEQWn0CR82awk= +github.com/googleapis/gax-go/v2 v2.4.0/go.mod h1:XOTVJ59hdnfJLIP/dh8n5CGryZR2LxK9wbMD5+iXC6c= +github.com/googleapis/go-type-adapters v1.0.0/go.mod h1:zHW75FOG2aur7gAO2B+MLby+cLsWGBF62rFAi7WjWO4= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= @@ -293,14 +370,28 @@ github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFb github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-immutable-radix v1.1.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-immutable-radix v1.3.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-immutable-radix v1.3.1 h1:DKHmCUm2hRBK510BaiZlwvpD40f8bJFeZnpfm2KLowc= +github.com/hashicorp/go-immutable-radix v1.3.1/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-memdb v1.0.4/go.mod h1:LWQ8R70vPrS4OEY9k28D2z8/Zzyu34NVzeRibGAzHO0= +github.com/hashicorp/go-memdb v1.3.3 h1:oGfEWrFuxtIUF3W2q/Jzt6G85TrMk9ey6XfYLvVe1Wo= +github.com/hashicorp/go-memdb v1.3.3/go.mod h1:uBTr1oQbtuMgd1SSGoR8YV27eT3sBHbYiNm53bMpgSg= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= -github.com/hashicorp/go-uuid v1.0.2 h1:cfejS+Tpcp13yd5nYHWDI6qVCny6wyX2Mt5SGur2IGE= +github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8= +github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc= +github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= +github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/hashicorp/hcl/v2 v2.13.0 h1:0Apadu1w6M11dyGFxWnmhhcMjkbAiKCv7G1r/2QgCNc= +github.com/hashicorp/hcl/v2 v2.13.0/go.mod h1:e4z5nxYlWNPdDSNYX+ph14EvWYMFm3eP0zIUqPc2jr0= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= @@ -311,6 +402,8 @@ github.com/iris-contrib/go.uuid v2.0.0+incompatible/go.mod h1:iz2lgM/1UnEf1kP0L/ github.com/iris-contrib/jade v1.1.3/go.mod h1:H/geBymxJhShH5kecoiOCSssPX7QWYH7UaeZTSWddIk= github.com/iris-contrib/pongo2 v0.0.1/go.mod h1:Ssh+00+3GAZqSQb30AvBRNxBx7rf0GqwkjqxNd0u65g= github.com/iris-contrib/schema v0.0.1/go.mod h1:urYA3uvUNG1TIIjOSCzHr9/LmbQo8LrOcOqfqxa4hXw= +github.com/jarcoal/httpmock v1.0.4 h1:jp+dy/+nonJE4g4xbVtl9QdrUNbn6/3hDT5R4nDIZnA= +github.com/jarcoal/httpmock v1.0.4/go.mod h1:ATjnClrvW/3tijVmpL/va5Z3aAyGvqU3gCT8nX0Txik= github.com/jcmturner/aescts/v2 v2.0.0 h1:9YKLH6ey7H4eDBXW8khjYslgyqG2xZikXP0EQFKrle8= github.com/jcmturner/aescts/v2 v2.0.0/go.mod h1:AiaICIRyfYg35RUkr8yESTqvSy7csK90qZ5xfvvsoNs= github.com/jcmturner/dnsutils/v2 v2.0.0 h1:lltnkeZGL0wILNvrNiVCR6Ro5PGU/SeBvVO/8c/iPbo= @@ -335,12 +428,15 @@ github.com/jpillora/backoff v1.0.0 h1:uvFg412JmmHBHw7iwprIxkPMI+sGQ4kzOWsMeHnm2E github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.11 h1:uVUAXhF2To8cbw/3xN3pxj6kk7TYKs98NIrTqPlMWAQ= +github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= -github.com/jstemmer/go-junit-report v0.9.1 h1:6QPYqodiu3GuPL+7mfx+NwDdp2eTkp9IfEUpgAwUN0o= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88/go.mod h1:3w7q1U84EfirKl04SVQ/s7nPm1ZPhiXd34z40TNz36k= github.com/kataras/golog v0.0.10/go.mod h1:yJ8YKCmyL+nWjERB90Qwn+bdyBZsaQwU3bTVFgkFIp8= github.com/kataras/iris/v12 v12.1.8/go.mod h1:LMYy4VlP67TQ3Zgriz8RE2h2kMZV2SgMYbq3UhfoFmE= @@ -350,45 +446,61 @@ github.com/kataras/sitemap v0.0.5/go.mod h1:KY2eugMKiPwsJgx7+U103YZehfvNGOXURubc github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/compress v1.8.2/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= github.com/klauspost/compress v1.9.7/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= -github.com/klauspost/compress v1.12.2/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= -github.com/klauspost/compress v1.13.4 h1:0zhec2I8zGnjWcKyLl6i3gPqKANCCn5e9xmviEEeX6s= -github.com/klauspost/compress v1.13.4/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= -github.com/klauspost/compress v1.15.0 h1:xqfchp4whNFxn5A4XFyyYtitiWI8Hy5EW59jEwcyL6U= -github.com/klauspost/compress v1.15.0/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= +github.com/klauspost/compress v1.15.6/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU= +github.com/klauspost/compress v1.15.9 h1:wKRjX6JRtDdrE9qwa4b/Cip7ACOshUI4smpCQanqjSY= +github.com/klauspost/compress v1.15.9/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU= github.com/klauspost/cpuid v1.2.1/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.2.1 h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348 h1:MtvEpTB6LX3vkb4ax0b5D2DHbNAUsen0Gx5wZoq3lV4= +github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k= github.com/labstack/echo/v4 v4.1.11/go.mod h1:i541M3Fj6f76NZtHSj7TXnyM8n2gaodfvfxNnFqi74g= +github.com/labstack/echo/v4 v4.5.0/go.mod h1:czIriw4a0C1dFun+ObrXp7ok03xON0N1awStJ6ArI7Y= github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k= +github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= -github.com/matryer/is v1.4.0 h1:sosSmIWwkYITGrxZ25ULNDeKiMNzFSr4V/eqBQP0PeE= -github.com/matryer/is v1.4.0/go.mod h1:8I/i5uYgLzgsgEloJE1U6xx5HkBQpAZvepWuujKwMRU= github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.11/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= +github.com/mattn/go-sqlite3 v2.0.2+incompatible/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= +github.com/mattn/go-sqlite3 v2.0.3+incompatible h1:gXHsfypPkaMZrKbD5209QV9jbUTJKjyR5WD3HYQSd+U= +github.com/mattn/go-sqlite3 v2.0.3+incompatible/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= github.com/mattn/goveralls v0.0.2/go.mod h1:8d1ZMHsd7fW6IRPKQh46F2WRpyib5/X4FOpevwGNQEw= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/mediocregopher/radix/v3 v3.4.2/go.mod h1:8FL3F6UQRXHXIBSPUs5h0RybMF8i4n7wVopoX3x7Bv8= github.com/microcosm-cc/bluemonday v1.0.2/go.mod h1:iVP4YcDBq+n/5fb23BhYFvIMq/leAFZyRl6bYmGDlGc= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= +github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0= +github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0= github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v1.4.1 h1:CpVNEelQCZBooIPDn+AR3NpivK/TIKU8bDxdASFVQag= -github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/modern-go/reflect2 v1.0.1 h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/moul/http2curl v1.0.0/go.mod h1:8UbvGypXm98wA/IqH45anm5Y2Z6ep6O31QGOAZ3H0fQ= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/myesui/uuid v1.0.0 h1:xCBmH4l5KuvLYc5L7AS7SZg9/jKdIFubM7OVoLqaQUI= github.com/myesui/uuid v1.0.0/go.mod h1:2CDfNgU0LR8mIdO8vdWd8i9gWWxLlcoIGGpSNgafq84= github.com/nats-io/jwt v0.3.0/go.mod h1:fRYCDE99xlTsqUzISS1Bi75UBJ6ljOJQOAAu5VglpSg= @@ -399,23 +511,40 @@ github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+W github.com/onsi/ginkgo v1.10.3/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= -github.com/pierrec/lz4 v2.6.0+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= -github.com/pierrec/lz4 v2.6.1+incompatible h1:9UY3+iC23yxF0UfGaYrGplQ+79Rg+h/q9FV9ix19jjM= -github.com/pierrec/lz4 v2.6.1+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= -github.com/pierrec/lz4/v4 v4.1.14 h1:+fL8AQEZtz/ijeNnpduH0bROTu0O3NZAlPjQxGn8LwE= github.com/pierrec/lz4/v4 v4.1.14/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= +github.com/pierrec/lz4/v4 v4.1.15 h1:MO0/ucJhngq7299dKLwIMtgTfbkoSPF6AoMYDd8Q4q0= +github.com/pierrec/lz4/v4 v4.1.15/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4= github.com/pingcap/errors v0.11.4/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= +github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= +github.com/prometheus/client_golang v1.12.1/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= +github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= +github.com/prometheus/common v0.32.1/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= +github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 h1:N/ElC8H3+5XpJzTSTfLsJV/mx9Q9g7kxmchpfZyxgzM= github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.6.1 h1:/FiVV8dS/e+YqF2JvO3yXRFbBLTIuSDkuC7aBOAvL+k= github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= github.com/russross/blackfriday v1.5.2 h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo= github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= @@ -424,12 +553,16 @@ github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/schollz/closestmatch v2.1.0+incompatible/go.mod h1:RtP1ddjLong6gTkbtmuhtR2uUrrJOpYzYRvbcPAid+g= +github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= -github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE= github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= +github.com/sirupsen/logrus v1.9.0 h1:trlNQbNUG3OdDrDil03MCb1H2o9nJ1x4/5LYw7byDE0= +github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/smira/go-statsd v1.3.2 h1:1EeuzxNZ/TD9apbTOFSM9nulqfcsQFmT4u1A2DREabI= @@ -440,13 +573,16 @@ github.com/snowplow-devops/go-sentryhook v0.0.0-20210106082031-21bf7f9dac2a h1:9 github.com/snowplow-devops/go-sentryhook v0.0.0-20210106082031-21bf7f9dac2a/go.mod h1:7/jMxl0yrvgiUlv5L37fw6pql71aNh55sKQc4kBFj5s= github.com/snowplow-devops/kinsumer v1.3.0 h1:uN8PPG8EffKjcfTcDqsHWnnsTFvYGMU39XlDPULIQcA= github.com/snowplow-devops/kinsumer v1.3.0/go.mod h1:SebvcasLweQnOygk9SOFkM/JjBtXFviUxoAq19CwrHQ= -github.com/snowplow/snowplow-golang-analytics-sdk v0.1.0 h1:FA8xHSHzoshF3fJDK9tqUDnuBmyqTiGPRLvIaRQMk2I= -github.com/snowplow/snowplow-golang-analytics-sdk v0.1.0/go.mod h1:Z8ZW805JGCYhnq1wnHe2PIiamUnvoNtAtXPWNyS0mV8= +github.com/snowplow/snowplow-golang-analytics-sdk v0.3.0 h1:lkWd2JDVG8+X8UPJYdru2EgRW4w/TVnWCmKhW5lPJvc= +github.com/snowplow/snowplow-golang-analytics-sdk v0.3.0/go.mod h1:KCL+i2+Uj9lvSdknXOA7lBQoBUWGW6ovJgTao7Fkdxk= +github.com/snowplow/snowplow-golang-tracker/v2 v2.4.1 h1:bp1MynC4BkywqTfpt4wddqZxtN4U7d3UUqxjalcGR1s= +github.com/snowplow/snowplow-golang-tracker/v2 v2.4.1/go.mod h1:/74pOlgs8xon7CAWihi1peUflolbKSSy2Fu/UDF4PgI= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU= github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= +github.com/spf13/pflag v1.0.2/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= @@ -456,28 +592,32 @@ github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UV github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1 h1:5TQK59W5E3v0r2duFAb7P95B6hEeOyEnHRa8MjYSMTY= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/twinj/uuid v1.0.0 h1:fzz7COZnDrXGTAOHGuUGYd6sG+JMq+AoE7+Jlu0przk= github.com/twinj/uuid v1.0.0/go.mod h1:mMgcE1RHFUFqe5AfiwlINXisXfDGro23fWdPUfOMjRY= github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc= github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw= github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY= -github.com/urfave/cli v1.22.5 h1:lNq9sAHXK2qfdI8W+GRItjCEkI+2oR4d+MEHy1CKXoU= -github.com/urfave/cli v1.22.5/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= -github.com/urfave/cli/v2 v2.2.0/go.mod h1:SE9GqnLQmjVa0iPEY0f1w3ygNIYcIJ0OKPMoW2caLfQ= +github.com/urfave/cli v1.22.9 h1:cv3/KhXGBGjEXLC4bH0sLuJ9BewaAbpk5oyMOveu4pw= +github.com/urfave/cli v1.22.9/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI= github.com/urfave/negroni v1.0.0/go.mod h1:Meg73S6kFm/4PpbYdq35yYWoCZ9mS/YSx+lKnmiohz4= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/valyala/fasthttp v1.6.0/go.mod h1:FstJa9V+Pj9vQ7OJie2qMHdwemEDaDiSdBnvPM1Su9w= github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8= +github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ= github.com/valyala/tcplisten v0.0.0-20161114210144-ceec8f93295a/go.mod h1:v3UYOV9WzVtRmSR+PDvWpU/qWl4Wa5LApYYX4ZtKbio= +github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= +github.com/vmihailenco/msgpack/v4 v4.3.12/go.mod h1:gborTTJjAo/GWTqqRjrLCn9pgNN+NXzzngzBKDPIqw4= +github.com/vmihailenco/tagparser v0.1.1/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI= github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g= github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8= -github.com/xdg/scram v1.0.3 h1:nTadYh2Fs4BK2xdldEa2g5bbaZp0/+1nJMMPtPxS/to= -github.com/xdg/scram v1.0.3/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I= +github.com/xdg/scram v1.0.5 h1:TuS0RFmt5Is5qm9Tm2SoD89OPqe4IRiFtyFY4iwWXsw= +github.com/xdg/scram v1.0.5/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I= github.com/xdg/stringprep v1.0.3 h1:cmL5Enob4W83ti/ZHuZLuKD/xqJfus4fVPwE+/BDm+4= github.com/xdg/stringprep v1.0.3/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y= github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= @@ -488,11 +628,20 @@ github.com/yalp/jsonpath v0.0.0-20180802001716-5cc68e5049a0/go.mod h1:/LWChgwKmv github.com/yudai/gojsondiff v1.0.0/go.mod h1:AY32+k2cwILAkW1fbgxQ5mUmMiZFgLIV+FBNExI05xg= github.com/yudai/golcs v0.0.0-20170316035057-ecda9a501e82/go.mod h1:lgjkn3NuSvDfVJdfcVVdX+jpBxNmX4rDAzaS45IcYoM= github.com/yudai/pp v2.0.1+incompatible/go.mod h1:PuxR/8QJ7cyCkFp/aUDS+JY727OFEZkTdatxwunjIkc= +github.com/yuin/gluamapper v0.0.0-20150323120927-d836955830e7 h1:noHsffKZsNfU38DwcXWEPldrTjIZ8FPNKx8mYMGnqjs= +github.com/yuin/gluamapper v0.0.0-20150323120927-d836955830e7/go.mod h1:bbMEM6aU1WDF1ErA5YJ0p91652pGv140gGw4Ww3RGp8= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/gopher-lua v0.0.0-20220504180219-658193537a64 h1:5mLPGnFdSsevFRFc9q3yYbBkB6tsm4aCwwQV/j1JQAQ= +github.com/yuin/gopher-lua v0.0.0-20220504180219-658193537a64/go.mod h1:GBR0iDaNXjAgGg9zfCvksxSRnQx76gclCIb7kdAd1Pw= +github.com/zclconf/go-cty v1.2.0/go.mod h1:hOPWgoHbaTUnI5k4D2ld+GRpFJSCe6bCM7m1q/N4PQ8= +github.com/zclconf/go-cty v1.8.0/go.mod h1:vVKLxnk3puL4qRAv72AO+W99LUD4da90g3uUAzyuvAk= +github.com/zclconf/go-cty v1.10.0 h1:mp9ZXQeIcN8kAwuqorjH+Q+njbJKjLrvB2yIh4q7U+0= +github.com/zclconf/go-cty v1.10.0/go.mod h1:vVKLxnk3puL4qRAv72AO+W99LUD4da90g3uUAzyuvAk= +github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b/go.mod h1:ZRKQfBXbGkpdV6QMzT3rU1kSTAnfu1dO8dPKjYprgj8= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= @@ -512,14 +661,15 @@ golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8U golang.org/x/crypto v0.0.0-20191206172530-e9b2fee46413/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20191227163750-53104e6ec876/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20200728195943-123391ffb6de/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20201112155050-0c6587e931a9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20210812204632-0ba0e8f03122 h1:AOT7vJYHE32m61R8d1WlcqhOO1AocesDsKpcMq+UOaA= -golang.org/x/crypto v0.0.0-20210812204632-0ba0e8f03122/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20220214200702-86341886e292 h1:f+lwQ+GtmgoY+A2YaQxlSOnDjXcQ7ZRLWOHbC6HtRqE= +golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220214200702-86341886e292/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.0.0-20220517005047-85d78b3ac167/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa h1:zuSxTR4o9y82ebqCUJYNGJbGPo6sKVl54f/TVDObg1c= +golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -543,7 +693,6 @@ golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRu golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20210508222113-6edffad5e616 h1:VLliZ0d+/avPrXXH+OakdXhpJuEoBZuwh1m2j7U6Iug= golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= @@ -556,11 +705,11 @@ golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.5.0 h1:UG21uOlmZabA4fW5i7ZX6bjw1xELEGg/ZLgZq9auk/Q= -golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180811021610-c39426892332/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -570,6 +719,7 @@ golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= @@ -598,12 +748,20 @@ golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d h1:LO7XpTYMwTqxjLcGWPijK3vRXg1aWdlNOVOHRq45d7c= -golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211008194852-3b03d305991f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2 h1:NWy5+hlRbC7HK+PmcXVUmW1IMyFce7to56IUvhUFm7Y= +golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220325170049-de3da57026de/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220412020605-290c469a71a5/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220607020251-c690dde0001d/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.0.0-20220617184016-355a448f1bc9/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.0.0-20220624214902-1bab6f366d9e/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b h1:PxfKdU9lEEDYjdIzOtC4qFWgkU2rGHdKlKowJSMN9h0= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -618,8 +776,15 @@ golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210810183815-faf39c7919d5 h1:Ati8dO7+U7mxpkPSxBZQEvzHVUYB/MqCklCN8ig5w/o= -golang.org/x/oauth2 v0.0.0-20210810183815-faf39c7919d5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= +golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= +golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= +golang.org/x/oauth2 v0.0.0-20220608161450-d0670ef3b1eb/go.mod h1:jaDAt6Dkxork7LmZnYtzbRWj0W47D86a3TGe0YHBvmE= +golang.org/x/oauth2 v0.0.0-20220622183110-fd043fe589d2/go.mod h1:jaDAt6Dkxork7LmZnYtzbRWj0W47D86a3TGe0YHBvmE= +golang.org/x/oauth2 v0.0.0-20220722155238-128564f6959c h1:q3gFqPqH7NVofKo3c3yETAP//pPI+G5mvB7qqj1Y5kY= +golang.org/x/oauth2 v0.0.0-20220722155238-128564f6959c/go.mod h1:h4gKUeWbJ4rQPri7E0u6Gs4e9Ri2zaLxzw5DI5XGrYg= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -630,16 +795,21 @@ golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4 h1:uVc8UZUe6tr40fFVnUP5Oj+veunVezqYl9z7DYw9xzw= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190204203706-41f3e6584952/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -651,7 +821,9 @@ golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -663,6 +835,8 @@ golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -670,23 +844,44 @@ golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210831042530-f4d43177bf5e h1:XMgFehsDnnLGtjvjOfqWSUzt0alpTR1RSEuznObga2c= -golang.org/x/sys v0.0.0-20210831042530-f4d43177bf5e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e h1:fLOSk5Q00efkSvAm+4xcoXD+RRmLmmulPn5I3Y9F2EM= +golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220209214540-3681064d5158/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220502124256-b6088ccd6cba/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220610221304-9f5ed59c137d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220615213510-4f61da869c0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220624220833-87e55d714810/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f h1:v4INt8xihDGvnrfjMDVXGxw9wrfxYyCjk0KbXjhR55s= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -702,7 +897,8 @@ golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20210723032227-1f47c861a9ac/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20201208040808-7e3f01d25324/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20220609170525-579cf78fd858/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20181221001348-537d06c36207/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -756,13 +952,14 @@ golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.5 h1:ouewzE6p+/VEB31YYnTbEJdi8pFqKp4P4n85vwo3DHA= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= +golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= @@ -788,9 +985,24 @@ google.golang.org/api v0.47.0/go.mod h1:Wbvgpq1HddcWVtzsVLyfLp8lDg6AA241LmgIL59t google.golang.org/api v0.48.0/go.mod h1:71Pr1vy+TAZRPkPs/xlCf5SsU8WjuAWv1Pfjbtukyy4= google.golang.org/api v0.50.0/go.mod h1:4bNT5pAuq5ji4SRZm+5QIkjny9JAyVD/3gaSihNefaw= google.golang.org/api v0.51.0/go.mod h1:t4HdrdoNgyN5cbEfm7Lum0lcLDLiise1F8qDKX00sOU= -google.golang.org/api v0.52.0/go.mod h1:Him/adpjt0sxtkWViy0b6xyKW/SD71CwdJ7HqJo7SrU= -google.golang.org/api v0.54.0 h1:ECJUVngj71QI6XEm7b1sAf8BljU5inEhMbKPR8Lxhhk= google.golang.org/api v0.54.0/go.mod h1:7C4bFFOvVDGXjfDTAsgGwDgAxRDeQ4X8NvUedIt6z3k= +google.golang.org/api v0.55.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= +google.golang.org/api v0.56.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= +google.golang.org/api v0.57.0/go.mod h1:dVPlbZyBo2/OjBpmvNdpn2GRm6rPy75jyU7bmhdrMgI= +google.golang.org/api v0.61.0/go.mod h1:xQRti5UdCmoCEqFxcz93fTl338AVqDgyaDRuOZ3hg9I= +google.golang.org/api v0.63.0/go.mod h1:gs4ij2ffTRXwuzzgJl/56BdwJaA194ijkfn++9tDuPo= +google.golang.org/api v0.67.0/go.mod h1:ShHKP8E60yPsKNw/w8w+VYaj9H6buA5UqDp8dhbQZ6g= +google.golang.org/api v0.70.0/go.mod h1:Bs4ZM2HGifEvXwd50TtW70ovgJffJYw2oRCOFU/SkfA= +google.golang.org/api v0.71.0/go.mod h1:4PyU6e6JogV1f9eA4voyrTY2batOLdgZ5qZ5HOCc4j8= +google.golang.org/api v0.74.0/go.mod h1:ZpfMZOVRMywNyvJFeqL9HRWBgAuRfSjJFpe9QtRRyDs= +google.golang.org/api v0.75.0/go.mod h1:pU9QmyHLnzlpar1Mjt4IbapUCy8J+6HD6GeELN69ljA= +google.golang.org/api v0.78.0/go.mod h1:1Sg78yoMLOhlQTeF+ARBoytAcH1NNyyl390YMy6rKmw= +google.golang.org/api v0.80.0/go.mod h1:xY3nI94gbvBrE0J6NHXhxOmW97HG7Khjkku6AFB3Hyg= +google.golang.org/api v0.84.0/go.mod h1:NTsGnUFJMYROtiquksZHBWtHfeMC7iYthki7Eq3pa8o= +google.golang.org/api v0.85.0/go.mod h1:AqZf8Ep9uZ2pyTvgL+x0D3Zt0eoT9b5E8fmzfu6FO2g= +google.golang.org/api v0.86.0/go.mod h1:+Sem1dnrKlrXMR/X0bPnMWyluQe4RsNoYfmNLhOIkzw= +google.golang.org/api v0.88.0 h1:MPwxQRqpyskYhr2iNyfsQ8R06eeyhe7UEuR30p136ZQ= +google.golang.org/api v0.88.0/go.mod h1:+Sem1dnrKlrXMR/X0bPnMWyluQe4RsNoYfmNLhOIkzw= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -838,6 +1050,7 @@ google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6D google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210329143202-679c6ae281ee/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= google.golang.org/genproto v0.0.0-20210513213006-bf773b8c8384/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= @@ -846,13 +1059,41 @@ google.golang.org/genproto v0.0.0-20210608205507-b6d2f5bf0d7d/go.mod h1:UODoCrxH google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24= google.golang.org/genproto v0.0.0-20210713002101-d411969a0d9a/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= google.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= -google.golang.org/genproto v0.0.0-20210721163202-f1cecdd8b78a/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= -google.golang.org/genproto v0.0.0-20210722135532-667f2b7c528f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= google.golang.org/genproto v0.0.0-20210728212813-7823e685a01f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= google.golang.org/genproto v0.0.0-20210805201207-89edb61ffb67/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= -google.golang.org/genproto v0.0.0-20210811021853-ddbe55d93216/go.mod h1:cFeNkxwySK631ADgubI+/XFU/xp8FD5KIVV4rj8UC5w= -google.golang.org/genproto v0.0.0-20210813162853-db860fec028c h1:iLQakcwWG3k/++1q/46apVb1sUQ3IqIdn9yUE6eh/xA= google.golang.org/genproto v0.0.0-20210813162853-db860fec028c/go.mod h1:cFeNkxwySK631ADgubI+/XFU/xp8FD5KIVV4rj8UC5w= +google.golang.org/genproto v0.0.0-20210821163610-241b8fcbd6c8/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210831024726-fe130286e0e2/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210903162649-d08c68adba83/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210909211513-a8c4777a87af/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210924002016-3dee208752a0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211221195035-429b39de9b1c/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20220126215142-9970aeb2e350/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20220207164111-0872dc986b00/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20220218161850-94dd64e39d7c/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220222213610-43724f9ea8cf/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220304144024-325a89244dc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220310185008-1973136f34c6/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220324131243-acbaeb5b85eb/go.mod h1:hAL49I2IFola2sVEjAn7MEwsja0xp51I0tlGAf9hz4E= +google.golang.org/genproto v0.0.0-20220407144326-9054f6ed7bac/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220413183235-5e96e2839df9/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220414192740-2d67ff6cf2b4/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220421151946-72621c1f0bd3/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220429170224-98d788798c3e/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220505152158-f39f71e6c8f3/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= +google.golang.org/genproto v0.0.0-20220518221133-4f43b3371335/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= +google.golang.org/genproto v0.0.0-20220523171625-347a074981d8/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= +google.golang.org/genproto v0.0.0-20220608133413-ed9918b62aac/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= +google.golang.org/genproto v0.0.0-20220616135557-88e70c0c3a90/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= +google.golang.org/genproto v0.0.0-20220617124728-180714bec0ad/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= +google.golang.org/genproto v0.0.0-20220624142145-8cd45d7dbd1f/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= +google.golang.org/genproto v0.0.0-20220628213854-d9e0b6570c03/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= +google.golang.org/genproto v0.0.0-20220720214146-176da50484ac h1:EOa+Yrhx1C0O+4pHeXeWrCwdI0tWI6IfUU56Vebs9wQ= +google.golang.org/genproto v0.0.0-20220720214146-176da50484ac/go.mod h1:GkXuJDJ6aQ7lnJcRF+SJVgFdQhypqgl3LB1C9vabdRE= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= @@ -877,8 +1118,15 @@ google.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQ google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= -google.golang.org/grpc v1.40.0 h1:AGJ0Ih4mHjSeibYkFGh1dD9KJ/eOtZ93I6hoHhukQ5Q= google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= +google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= +google.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= +google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= +google.golang.org/grpc v1.46.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= +google.golang.org/grpc v1.46.2/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= +google.golang.org/grpc v1.47.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= +google.golang.org/grpc v1.48.0 h1:rQOsyJ/8+ufEDJd/Gdsz7HG220Mh9HAhFHRGnIjda0w= +google.golang.org/grpc v1.48.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= @@ -892,11 +1140,14 @@ google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGj google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.27.1 h1:SnqbnDw1V7RiZcXPx5MEeqPv2s79L9i7BJUlG/+RurQ= google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.28.0 h1:w43yiav+6bVFTBQFZX0r7ipe9JQ1QsbMgHwbBziscLw= +google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= @@ -908,14 +1159,17 @@ gopkg.in/mgo.v2 v2.0.0-20180705113604-9856a29383ce/go.mod h1:yeKp02qBN3iKW1OzL3M gopkg.in/stretchr/testify.v1 v1.2.2 h1:yhQC6Uy5CqibAIlk1wlusa/MJ3iAN49/BsR/dCCKz3M= gopkg.in/stretchr/testify.v1 v1.2.2/go.mod h1:QI5V/q6UbPmuhtm10CaFZxED9NreB8PnFYN9JcR6TxU= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10= +gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20191120175047-4206685974f2/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= @@ -925,6 +1179,8 @@ honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWh honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +layeh.com/gopher-json v0.0.0-20201124131017-552bb3c4c3bf h1:rRz0YsF7VXj9fXRF6yQgFI7DzST+hsI3TeFSGupntu0= +layeh.com/gopher-json v0.0.0-20201124131017-552bb3c4c3bf/go.mod h1:ivKkcY8Zxw5ba0jldhZCYYQfGdb2K6u9tbYK1AwMIBc= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/integration/docker-compose.yml b/integration/docker-compose.yml index 85bf28a9..8dd2058c 100644 --- a/integration/docker-compose.yml +++ b/integration/docker-compose.yml @@ -16,3 +16,9 @@ services: environment: - SERVICES=sqs,kinesis,dynamodb - DATA_DIR=/tmp/localstack/data + pubsub: + image: bigtruedata/gcloud-pubsub-emulator + command: start --host-port 0.0.0.0:8432 + ports: + - "0.0.0.0:8432:8432" + diff --git a/integration/http/localhost.key b/integration/http/localhost.key index 93424361..11dec0e6 100644 --- a/integration/http/localhost.key +++ b/integration/http/localhost.key @@ -24,4 +24,4 @@ itxdXvoomlhwDKZv0Y+vPm4V9SBx/36ubf6bM6vKoZTSuv2+ktA/uInFW+y/1mLH KD1JlQKBgQDNZry00fN3iJ9stUNEYVaAtXQ1a0/LY/r2NuC04IwemwOyFUvzY7G9 sNXeIxTYjQ9OCp9+EE1n6Q3yg63MmTrNuD51f0h2tftokYBaoYBny34HuQf0N7qF laOI6yiORZ4eGdYrpCq+q+J0fAkRca0M4Nq/lDEw4bric38WpPxV3Q== ------END RSA PRIVATE KEY----- +-----END RSA PRIVATE KEY----- \ No newline at end of file diff --git a/integration/http/rootCA.crt b/integration/http/rootCA.crt index 79bbfc13..df8b1a27 100644 --- a/integration/http/rootCA.crt +++ b/integration/http/rootCA.crt @@ -27,4 +27,4 @@ Ggg1Qo5z0+XT2l+2KhOC02ydgHV1/tT6cVVX3ZkBvvb/WPHmVp9bT8zqeJzrMQkM 9DaKEyZKw+LYy7sZp4p4giE/JAzBLidsfIdznhYguPjKgboPMfiJvapzyZPEJsDu ShYb5uIlytHwAVlGiUgjx+z/YXBQN1vWsCm5pVL4RGdXdcq5HZzZRaJxAUBrfmiU uCJPEnUJ1emIqakgSy3yA+9WtQ== ------END CERTIFICATE----- +-----END CERTIFICATE----- \ No newline at end of file diff --git a/pkg/common/helpers.go b/pkg/common/helpers.go index 0c44df1a..8a962424 100644 --- a/pkg/common/helpers.go +++ b/pkg/common/helpers.go @@ -7,8 +7,11 @@ package common import ( + "crypto/tls" + "crypto/x509" "encoding/base64" "fmt" + "io/ioutil" "os" "time" @@ -17,34 +20,41 @@ import ( "github.com/aws/aws-sdk-go/aws/session" "github.com/aws/aws-sdk-go/service/sts" "github.com/pkg/errors" - "github.com/twinj/uuid" ) -// --- Cloud Helpers +// DeleteTemporaryDir deletes the temp directory we created to store credentials +func DeleteTemporaryDir() error { + err := os.RemoveAll(`tmp_replicator`) + return err +} -// GetGCPServiceAccountFromBase64 will take a base64 encoded string -// and attempt to create a JSON file on disk within the /tmp directory -// for later use in creating GCP clients. -func GetGCPServiceAccountFromBase64(serviceAccountB64 string) (string, error) { - sDec, err := base64.StdEncoding.DecodeString(serviceAccountB64) - if err != nil { - return "", errors.Wrap(err, "Failed to Base64 decode service account") +// DecodeB64ToFile takes a B64-encoded credential, decodes it, and writes it to a file +func DecodeB64ToFile(b64String, filename string) error { + tls, decodeErr := base64.StdEncoding.DecodeString(b64String) + if decodeErr != nil { + return errors.Wrap(decodeErr, "Failed to Base64 decode for creating file "+filename) } - targetFile := fmt.Sprintf("/tmp/stream-replicator-service-account-%s.json", uuid.NewV4().String()) - - f, err := os.Create(targetFile) + err := createTempDir(`tmp_replicator`) if err != nil { - return "", errors.Wrap(err, fmt.Sprintf("Failed to create target file '%s' for service account", targetFile)) + return err } - defer f.Close() - _, err2 := f.WriteString(string(sDec)) - if err2 != nil { - return "", errors.Wrap(err, fmt.Sprintf("Failed to write decoded service account to target file '%s'", targetFile)) + f, createErr := os.Create(filename) + if createErr != nil { + return errors.Wrap(createErr, fmt.Sprintf("Failed to create file '%s'", filename)) + } + + _, writeErr := f.WriteString(string(tls)) + if writeErr != nil { + return errors.Wrap(decodeErr, fmt.Sprintf("Failed to write decoded base64 string to target file '%s'", filename)) + } + err = f.Close() + if err != nil { + return err } - return targetFile, nil + return nil } // GetAWSSession is a general tool to handle generating an AWS session @@ -87,3 +97,47 @@ func GetAverageFromDuration(sum time.Duration, total int64) time.Duration { } return time.Duration(0) } + +func createTempDir(dirName string) error { + dir, statErr := os.Stat(dirName) + if statErr != nil && !errors.Is(statErr, os.ErrNotExist) { + return errors.Wrap(statErr, fmt.Sprintf("Failed checking for existence of %s dir", dirName)) + } + + if dir == nil { + dirErr := os.Mkdir(dirName, 0700) + if dirErr != nil && !errors.Is(dirErr, os.ErrExist) { + return errors.Wrap(dirErr, fmt.Sprintf("Failed to create %s directory", dirName)) + } + } + return nil +} + +// CreateTLSConfiguration creates a TLS configuration for use in a target +func CreateTLSConfiguration(certFile string, keyFile string, caFile string, skipVerify bool) (*tls.Config, error) { + if certFile == "" || keyFile == "" { + return nil, nil + } + + cert, err := tls.LoadX509KeyPair(certFile, keyFile) + if err != nil { + return nil, err + } + + caCert, err := ioutil.ReadFile(caFile) + if err != nil { + return nil, err + } + + caCertPool, err := x509.SystemCertPool() + if err != nil { + return nil, err + } + caCertPool.AppendCertsFromPEM(caCert) + + return &tls.Config{ + Certificates: []tls.Certificate{cert}, + RootCAs: caCertPool, + InsecureSkipVerify: skipVerify, + }, nil +} diff --git a/pkg/common/helpers_test.go b/pkg/common/helpers_test.go index ee06b640..8257b87b 100644 --- a/pkg/common/helpers_test.go +++ b/pkg/common/helpers_test.go @@ -7,39 +7,17 @@ package common import ( - "strings" + "crypto/tls" "testing" "time" "github.com/stretchr/testify/assert" ) -// --- Cloud Helpers - -func TestGetGCPServiceAccountFromBase64(t *testing.T) { - assert := assert.New(t) - - path, err := GetGCPServiceAccountFromBase64("ewogICJoZWxsbyI6IndvcmxkIgp9") - - assert.NotEqual(path, "") - assert.Nil(err) - assert.True(strings.HasPrefix(path, "/tmp/stream-replicator-service-account-")) - assert.True(strings.HasSuffix(path, ".json")) -} - -func TestGetGCPServiceAccountFromBase64_NotBase64(t *testing.T) { - assert := assert.New(t) - - path, err := GetGCPServiceAccountFromBase64("helloworld") - - assert.Equal(path, "") - assert.NotNil(err) - assert.True(strings.HasPrefix(err.Error(), "Failed to Base64 decode service account: ")) -} - func TestGetAWSSession(t *testing.T) { assert := assert.New(t) + t.Setenv("AWS_SHARED_CREDENTIALS_FILE", "") sess, cfg, accID, err := GetAWSSession("us-east-1", "") assert.NotNil(sess) assert.Nil(cfg) @@ -51,6 +29,9 @@ func TestGetAWSSession(t *testing.T) { assert.NotNil(cfg2) assert.Nil(accID2) assert.NotNil(err2) + if err != nil { + assert.Equal("InvalidParameter: 1 validation error(s) found.\n- minimum field size of 20, AssumeRoleInput.RoleArn.\n", err2.Error()) + } } // --- Generic Helpers @@ -64,3 +45,12 @@ func TestGetAverageFromDuration(t *testing.T) { duration2 := GetAverageFromDuration(time.Duration(10)*time.Second, 2) assert.Equal(time.Duration(5)*time.Second, duration2) } + +func TestCreateTLSConfiguration(t *testing.T) { + assert := assert.New(t) + + conf, err := CreateTLSConfiguration(`../../integration/http/localhost.crt`, `../../integration/http/localhost.key`, `../../integration/http/rootCA.crt`, false) + + assert.Nil(err) + assert.IsType(tls.Config{}, *conf) +} diff --git a/pkg/models/filter_result.go b/pkg/models/filter_result.go index 23c7a1f7..97ef3c15 100644 --- a/pkg/models/filter_result.go +++ b/pkg/models/filter_result.go @@ -26,15 +26,15 @@ type FilterResult struct { AvgFilterLatency time.Duration } -// NewFilterResult uses the current time as the timeOfFilter and calls NewFilterResultWithTime +// NewFilterResult uses the current time as the timeOfFilter and calls newFilterResultWithTime func NewFilterResult(filtered []*Message) *FilterResult { - return NewFilterResultWithTime(filtered, time.Now().UTC()) + return newFilterResultWithTime(filtered, time.Now().UTC()) } -// NewFilterResultWithTime builds a result structure to return from a filtered message slice +// newFilterResultWithTime builds a result structure to return from a filtered message slice // attempt which contains the filtered message count as well as several // derived latency measures. -func NewFilterResultWithTime(filtered []*Message, timeOfFilter time.Time) *FilterResult { +func newFilterResultWithTime(filtered []*Message, timeOfFilter time.Time) *FilterResult { r := FilterResult{ FilteredCount: int64(len(filtered)), } diff --git a/pkg/models/filter_result_test.go b/pkg/models/filter_result_test.go index eb0520eb..591e3560 100644 --- a/pkg/models/filter_result_test.go +++ b/pkg/models/filter_result_test.go @@ -29,7 +29,7 @@ func TestNewFilterResult_EmptyWithoutTime(t *testing.T) { func TestNewFilterResult_EmptyWithTime(t *testing.T) { assert := assert.New(t) - r := NewFilterResultWithTime(nil, time.Now().UTC()) + r := newFilterResultWithTime(nil, time.Now().UTC()) assert.NotNil(r) assert.Equal(int64(0), r.FilteredCount) @@ -61,7 +61,7 @@ func TestNewFilterResult_WithMessages(t *testing.T) { }, } - r := NewFilterResultWithTime(filtered, timeNow) + r := newFilterResultWithTime(filtered, timeNow) assert.NotNil(r) assert.Equal(int64(2), r.FilteredCount) diff --git a/pkg/models/message_test.go b/pkg/models/message_test.go index efd01de4..95ba8faf 100644 --- a/pkg/models/message_test.go +++ b/pkg/models/message_test.go @@ -27,7 +27,9 @@ func TestMessageString(t *testing.T) { msg.SetError(errors.New("failure")) assert.NotNil(msg.GetError()) - assert.Equal("failure", msg.GetError().Error()) + if msg.GetError() != nil { + assert.Equal("failure", msg.GetError().Error()) + } } func TestGetChunkedMessages(t *testing.T) { diff --git a/pkg/models/observer_buffer.go b/pkg/models/observer_buffer.go index a67f8a28..1598284d 100644 --- a/pkg/models/observer_buffer.go +++ b/pkg/models/observer_buffer.go @@ -176,7 +176,7 @@ func (b *ObserverBuffer) String() string { b.MaxMsgLatency.Milliseconds(), b.MaxFilterLatency.Milliseconds(), b.MaxTransformLatency.Milliseconds(), - b.SumTransformLatency.Milliseconds(), // Reporting sums for rc version in order to compute averages in load tests. + b.SumTransformLatency.Milliseconds(), // Sums are reported to allow us to compute averages across multi-instance deployments b.SumProcLatency.Milliseconds(), b.SumMsgLatency.Milliseconds(), ) diff --git a/pkg/models/observer_buffer_test.go b/pkg/models/observer_buffer_test.go index e1f7bedd..aed0f979 100644 --- a/pkg/models/observer_buffer_test.go +++ b/pkg/models/observer_buffer_test.go @@ -68,7 +68,7 @@ func TestObserverBuffer(t *testing.T) { b.AppendWriteInvalid(r) b.AppendWriteInvalid(nil) - fr := NewFilterResultWithTime(filtered, timeNow) + fr := newFilterResultWithTime(filtered, timeNow) b.AppendFiltered(fr) @@ -105,3 +105,134 @@ func TestObserverBuffer(t *testing.T) { assert.Equal("TargetResults:2,MsgFiltered:1,MsgSent:4,MsgFailed:2,OversizedTargetResults:2,OversizedMsgSent:4,OversizedMsgFailed:2,InvalidTargetResults:2,InvalidMsgSent:4,InvalidMsgFailed:2,MaxProcLatency:600000,MaxMsgLatency:4200000,MaxFilterLatency:600000,MaxTransformLatency:180000,SumTransformLatency:720000,SumProcLatency:2520000,SumMsgLatency:18000000", b.String()) } + +// TestObserverBuffer_Basic is a basic version of the above test, stripping away all but one event +// It was created in order to provide a simpler way to investigate whether logging may be misreporting latency +func TestObserverBuffer_Basic(t *testing.T) { + assert := assert.New(t) + + b := ObserverBuffer{} + assert.NotNil(b) + + timeNow := time.Now().UTC() + + sent := []*Message{ + { + Data: []byte("Baz"), + PartitionKey: "partition1", + TimeCreated: timeNow.Add(time.Duration(-50) * time.Minute), + TimePulled: timeNow.Add(time.Duration(-4) * time.Minute), + TimeTransformed: timeNow.Add(time.Duration(-2) * time.Minute), + }, + } + + r := NewTargetWriteResultWithTime(sent, nil, nil, nil, timeNow) + + b.AppendWrite(r) + b.AppendWrite(nil) + // b.AppendWriteOversized(r) + b.AppendWriteOversized(nil) + // b.AppendWriteInvalid(r) + b.AppendWriteInvalid(nil) + + fr := newFilterResultWithTime(nil, timeNow) + + b.AppendFiltered(fr) + + assert.Equal(int64(1), b.TargetResults) + assert.Equal(int64(1), b.MsgSent) + assert.Equal(int64(0), b.MsgFailed) + assert.Equal(int64(1), b.MsgTotal) + + assert.Equal(int64(0), b.MsgFiltered) + + assert.Equal(int64(0), b.OversizedTargetResults) + assert.Equal(int64(0), b.OversizedMsgSent) + assert.Equal(int64(0), b.OversizedMsgFailed) + assert.Equal(int64(0), b.OversizedMsgTotal) + + assert.Equal(int64(0), b.InvalidTargetResults) + assert.Equal(int64(0), b.InvalidMsgSent) + assert.Equal(int64(0), b.InvalidMsgFailed) + assert.Equal(int64(0), b.InvalidMsgTotal) + + assert.Equal(time.Duration(4)*time.Minute, b.MaxProcLatency) + assert.Equal(time.Duration(4)*time.Minute, b.MinProcLatency) + assert.Equal(time.Duration(4)*time.Minute, b.GetAvgProcLatency()) + assert.Equal(time.Duration(50)*time.Minute, b.MaxMsgLatency) + assert.Equal(time.Duration(50)*time.Minute, b.MinMsgLatency) + assert.Equal(time.Duration(50)*time.Minute, b.GetAvgMsgLatency()) + assert.Equal(time.Duration(2)*time.Minute, b.MaxTransformLatency) + assert.Equal(time.Duration(2)*time.Minute, b.MinTransformLatency) + assert.Equal(time.Duration(2)*time.Minute, b.GetAvgTransformLatency()) + + assert.Equal(time.Duration(0), b.MaxFilterLatency) + assert.Equal(time.Duration(0), b.MinFilterLatency) + assert.Equal(time.Duration(0), b.GetAvgFilterLatency()) + + assert.Equal("TargetResults:1,MsgFiltered:0,MsgSent:1,MsgFailed:0,OversizedTargetResults:0,OversizedMsgSent:0,OversizedMsgFailed:0,InvalidTargetResults:0,InvalidMsgSent:0,InvalidMsgFailed:0,MaxProcLatency:240000,MaxMsgLatency:3000000,MaxFilterLatency:0,MaxTransformLatency:120000,SumTransformLatency:120000,SumProcLatency:240000,SumMsgLatency:3000000", b.String()) +} + +// TestObserverBuffer_Basic is a basic version of the above test, stripping away all but one event. +// It exists purely to simplify reasoning through bugs. +func TestObserverBuffer_BasicNoTransform(t *testing.T) { + assert := assert.New(t) + + b := ObserverBuffer{} + assert.NotNil(b) + + timeNow := time.Now().UTC() + + sent := []*Message{ + { + Data: []byte("Baz"), + PartitionKey: "partition1", + TimeCreated: timeNow.Add(time.Duration(-50) * time.Minute), + TimePulled: timeNow.Add(time.Duration(-4) * time.Minute), + }, + } + + r := NewTargetWriteResultWithTime(sent, nil, nil, nil, timeNow) + + b.AppendWrite(r) + b.AppendWrite(nil) + b.AppendWriteOversized(nil) + b.AppendWriteInvalid(nil) + + fr := newFilterResultWithTime(nil, timeNow) + + b.AppendFiltered(fr) + + assert.Equal(int64(1), b.TargetResults) + assert.Equal(int64(1), b.MsgSent) + assert.Equal(int64(0), b.MsgFailed) + assert.Equal(int64(1), b.MsgTotal) + + assert.Equal(int64(0), b.MsgFiltered) + + assert.Equal(int64(0), b.OversizedTargetResults) + assert.Equal(int64(0), b.OversizedMsgSent) + assert.Equal(int64(0), b.OversizedMsgFailed) + assert.Equal(int64(0), b.OversizedMsgTotal) + + assert.Equal(int64(0), b.InvalidTargetResults) + assert.Equal(int64(0), b.InvalidMsgSent) + assert.Equal(int64(0), b.InvalidMsgFailed) + assert.Equal(int64(0), b.InvalidMsgTotal) + + assert.Equal(time.Duration(4)*time.Minute, b.MaxProcLatency) + assert.Equal(time.Duration(4)*time.Minute, b.MinProcLatency) + assert.Equal(time.Duration(4)*time.Minute, b.GetAvgProcLatency()) + assert.Equal(time.Duration(50)*time.Minute, b.MaxMsgLatency) + assert.Equal(time.Duration(50)*time.Minute, b.MinMsgLatency) + assert.Equal(time.Duration(50)*time.Minute, b.GetAvgMsgLatency()) + assert.Equal(time.Duration(0), b.MaxTransformLatency) + assert.Equal(time.Duration(0), b.MinTransformLatency) + assert.Equal(time.Duration(0), b.GetAvgTransformLatency()) + + assert.Equal(time.Duration(0), b.MaxFilterLatency) + assert.Equal(time.Duration(0), b.MinFilterLatency) + assert.Equal(time.Duration(0), b.GetAvgFilterLatency()) + + assert.Equal("TargetResults:1,MsgFiltered:0,MsgSent:1,MsgFailed:0,OversizedTargetResults:0,OversizedMsgSent:0,OversizedMsgFailed:0,InvalidTargetResults:0,InvalidMsgSent:0,InvalidMsgFailed:0,MaxProcLatency:240000,MaxMsgLatency:3000000,MaxFilterLatency:0,MaxTransformLatency:0,SumTransformLatency:0,SumProcLatency:240000,SumMsgLatency:3000000", b.String()) +} diff --git a/pkg/models/target_write_result.go b/pkg/models/target_write_result.go index 08ab9604..ebfd0182 100644 --- a/pkg/models/target_write_result.go +++ b/pkg/models/target_write_result.go @@ -98,7 +98,10 @@ func NewTargetWriteResultWithTime(sent []*Message, failed []*Message, oversized } sumMessageLatency += messageLatency - transformLatency := msg.TimeTransformed.Sub(msg.TimePulled) + var transformLatency time.Duration + if !msg.TimeTransformed.IsZero() { + transformLatency = msg.TimeTransformed.Sub(msg.TimePulled) + } if r.MaxTransformLatency < transformLatency { r.MaxTransformLatency = transformLatency } diff --git a/pkg/models/target_write_result_test.go b/pkg/models/target_write_result_test.go index 9a156b9f..1853a9f0 100644 --- a/pkg/models/target_write_result_test.go +++ b/pkg/models/target_write_result_test.go @@ -13,6 +13,7 @@ import ( "github.com/stretchr/testify/assert" ) +// TestNewTargetWriteResult_EmptyWithoutTime tests that an empty targetWriteResult with no timings will report 0s across the board func TestNewTargetWriteResult_EmptyWithoutTime(t *testing.T) { assert := assert.New(t) @@ -36,6 +37,7 @@ func TestNewTargetWriteResult_EmptyWithoutTime(t *testing.T) { assert.Equal(time.Duration(0), r.AvgTransformLatency) } +// TestNewTargetWriteResult_EmptyWithTime tests that an empty targetWriteResult with no a provided timestamp will report 0s across the board func TestNewTargetWriteResult_EmptyWithTime(t *testing.T) { assert := assert.New(t) @@ -59,6 +61,7 @@ func TestNewTargetWriteResult_EmptyWithTime(t *testing.T) { assert.Equal(time.Duration(0), r.AvgTransformLatency) } +// TestNewTargetWriteResult_WithMessages tests that reporting of statistics is as it should be when we have all data func TestNewTargetWriteResult_WithMessages(t *testing.T) { assert := assert.New(t) @@ -159,3 +162,49 @@ func TestNewTargetWriteResult_WithMessages(t *testing.T) { assert.Equal(time.Duration(1)*time.Minute, r3.MinTransformLatency) assert.Equal(time.Duration(3)*time.Minute, r3.AvgTransformLatency) } + +// TestNewTargetWriteResult_NoTransformation tests that reporting of statistics is as it should be when we don't have a timeTransformed +func TestNewTargetWriteResult_NoTransformation(t *testing.T) { + assert := assert.New(t) + + timeNow := time.Now().UTC() + + sent := []*Message{ + { + Data: []byte("Baz"), + PartitionKey: "partition1", + TimeCreated: timeNow.Add(time.Duration(-50) * time.Minute), + TimePulled: timeNow.Add(time.Duration(-4) * time.Minute), + }, + { + Data: []byte("Bar"), + PartitionKey: "partition2", + TimeCreated: timeNow.Add(time.Duration(-70) * time.Minute), + TimePulled: timeNow.Add(time.Duration(-7) * time.Minute), + }, + } + failed := []*Message{ + { + Data: []byte("Foo"), + PartitionKey: "partition3", + TimeCreated: timeNow.Add(time.Duration(-30) * time.Minute), + TimePulled: timeNow.Add(time.Duration(-10) * time.Minute), + }, + } + + r := NewTargetWriteResultWithTime(sent, failed, nil, nil, timeNow) + assert.NotNil(r) + + assert.Equal(int64(2), r.SentCount) + assert.Equal(int64(1), r.FailedCount) + assert.Equal(int64(3), r.Total()) + assert.Equal(time.Duration(10)*time.Minute, r.MaxProcLatency) + assert.Equal(time.Duration(4)*time.Minute, r.MinProcLatency) + assert.Equal(time.Duration(7)*time.Minute, r.AvgProcLatency) + assert.Equal(time.Duration(70)*time.Minute, r.MaxMsgLatency) + assert.Equal(time.Duration(30)*time.Minute, r.MinMsgLatency) + assert.Equal(time.Duration(50)*time.Minute, r.AvgMsgLatency) + assert.Equal(time.Duration(0), r.MaxTransformLatency) + assert.Equal(time.Duration(0), r.MinTransformLatency) + assert.Equal(time.Duration(0), r.AvgTransformLatency) +} diff --git a/pkg/models/transformation_result_test.go b/pkg/models/transformation_result_test.go new file mode 100644 index 00000000..4ebc1bb0 --- /dev/null +++ b/pkg/models/transformation_result_test.go @@ -0,0 +1,39 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package models + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +// TestNewTransformationResult test NewTransformationResult. +// It doesn't do a whole lot so we don't need much here. +func TestNewTransformationResult(t *testing.T) { + assert := assert.New(t) + + msgs := []*Message{ + { + Data: []byte("Baz"), + PartitionKey: "partition1", + }, + { + Data: []byte("Bar"), + PartitionKey: "partition2", + }, + } + + res := NewTransformationResult(msgs, msgs, msgs) + + assert.Equal(int64(2), res.ResultCount) + assert.Equal(int64(2), res.FilteredCount) + assert.Equal(int64(2), res.InvalidCount) + assert.Equal(msgs, res.Result) + assert.Equal(msgs, res.Filtered) + assert.Equal(msgs, res.Invalid) +} diff --git a/pkg/source/kinesis/kinesis_source.go b/pkg/source/kinesis/kinesis_source.go index 586acc1d..70a76490 100644 --- a/pkg/source/kinesis/kinesis_source.go +++ b/pkg/source/kinesis/kinesis_source.go @@ -21,13 +21,22 @@ import ( "github.com/twinj/uuid" "github.com/twitchscience/kinsumer" - config "github.com/snowplow-devops/stream-replicator/config" "github.com/snowplow-devops/stream-replicator/pkg/common" "github.com/snowplow-devops/stream-replicator/pkg/models" "github.com/snowplow-devops/stream-replicator/pkg/source/sourceconfig" "github.com/snowplow-devops/stream-replicator/pkg/source/sourceiface" ) +// configuration configures the source for records pulled +type configuration struct { + StreamName string `hcl:"stream_name" env:"SOURCE_KINESIS_STREAM_NAME"` + Region string `hcl:"region" env:"SOURCE_KINESIS_REGION"` + AppName string `hcl:"app_name" env:"SOURCE_KINESIS_APP_NAME"` + RoleARN string `hcl:"role_arn,optional" env:"SOURCE_KINESIS_ROLE_ARN"` + StartTimestamp string `hcl:"start_timestamp,optional" env:"SOURCE_KINESIS_START_TIMESTAMP"` // Timestamp for the kinesis shard iterator to begin processing. Format YYYY-MM-DD HH:MM:SS.MS (miliseconds optional) + ConcurrentWrites int `hcl:"concurrent_writes,optional" env:"SOURCE_CONCURRENT_WRITES"` +} + // --- Kinesis source // kinesisSource holds a new client for reading messages from kinesis @@ -45,14 +54,14 @@ type kinesisSource struct { // configFunctionGeneratorWithInterfaces generates the kinesis Source Config function, allowing you // to provide a Kinesis + DynamoDB client directly to allow for mocking and localstack usage -func configFunctionGeneratorWithInterfaces(kinesisClient kinesisiface.KinesisAPI, dynamodbClient dynamodbiface.DynamoDBAPI, awsAccountID string) func(c *config.Config) (sourceiface.Source, error) { +func configFunctionGeneratorWithInterfaces(kinesisClient kinesisiface.KinesisAPI, dynamodbClient dynamodbiface.DynamoDBAPI, awsAccountID string) func(c *configuration) (sourceiface.Source, error) { // Return a function which returns the source - return func(c *config.Config) (sourceiface.Source, error) { + return func(c *configuration) (sourceiface.Source, error) { // Handle iteratorTstamp if provided var iteratorTstamp time.Time var tstampParseErr error - if c.Sources.Kinesis.StartTimestamp != "" { - iteratorTstamp, tstampParseErr = time.Parse("2006-01-02 15:04:05.999", c.Sources.Kinesis.StartTimestamp) + if c.StartTimestamp != "" { + iteratorTstamp, tstampParseErr = time.Parse("2006-01-02 15:04:05.999", c.StartTimestamp) if tstampParseErr != nil { return nil, errors.Wrap(tstampParseErr, fmt.Sprintf("Failed to parse provided value for SOURCE_KINESIS_START_TIMESTAMP: %v", iteratorTstamp)) } @@ -62,17 +71,17 @@ func configFunctionGeneratorWithInterfaces(kinesisClient kinesisiface.KinesisAPI kinesisClient, dynamodbClient, awsAccountID, - c.Sources.ConcurrentWrites, - c.Sources.Kinesis.Region, - c.Sources.Kinesis.StreamName, - c.Sources.Kinesis.AppName, + c.ConcurrentWrites, + c.Region, + c.StreamName, + c.AppName, &iteratorTstamp) } } // configFunction returns a kinesis source from a config -func configFunction(c *config.Config) (sourceiface.Source, error) { - awsSession, awsConfig, awsAccountID, err := common.GetAWSSession(c.Sources.Kinesis.Region, c.Sources.Kinesis.RoleARN) +func configFunction(c *configuration) (sourceiface.Source, error) { + awsSession, awsConfig, awsAccountID, err := common.GetAWSSession(c.Region, c.RoleARN) if err != nil { return nil, err } @@ -87,8 +96,42 @@ func configFunction(c *config.Config) (sourceiface.Source, error) { return sourceConfigFunction(c) } +// The adapter type is an adapter for functions to be used as +// pluggable components for Kinesis Source. Implements the Pluggable interface. +type adapter func(i interface{}) (interface{}, error) + +// Create implements the ComponentCreator interface. +func (f adapter) Create(i interface{}) (interface{}, error) { + return f(i) +} + +// ProvideDefault implements the ComponentConfigurable interface. +func (f adapter) ProvideDefault() (interface{}, error) { + // Provide defaults + cfg := &configuration{ + ConcurrentWrites: 50, + } + + return cfg, nil +} + +// adapterGenerator returns a Kinesis Source adapter. +func adapterGenerator(f func(c *configuration) (sourceiface.Source, error)) adapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*configuration) + if !ok { + return nil, errors.New("invalid input, expected configuration for kinesis source") + } + + return f(cfg) + } +} + // ConfigPair is passed to configuration to determine when to build a Kinesis source. -var ConfigPair = sourceconfig.ConfigPair{SourceName: "kinesis", SourceConfigFunc: configFunction} +var ConfigPair = sourceconfig.ConfigPair{ + Name: "kinesis", + Handle: adapterGenerator(configFunction), +} // --- Kinsumer overrides diff --git a/pkg/source/kinesis/kinesis_source_test.go b/pkg/source/kinesis/kinesis_source_test.go index 4ae312e8..be6e08da 100644 --- a/pkg/source/kinesis/kinesis_source_test.go +++ b/pkg/source/kinesis/kinesis_source_test.go @@ -7,14 +7,18 @@ package kinesissource import ( + "errors" "fmt" "os" + "path/filepath" + "reflect" "testing" "time" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/service/kinesis" "github.com/aws/aws-sdk-go/service/kinesis/kinesisiface" + "github.com/davecgh/go-spew/spew" "github.com/stretchr/testify/assert" config "github.com/snowplow-devops/stream-replicator/config" @@ -22,6 +26,64 @@ import ( "github.com/snowplow-devops/stream-replicator/pkg/testutil" ) +func TestMain(m *testing.M) { + os.Clearenv() + exitVal := m.Run() + os.Exit(exitVal) +} + +func TestNewKinesisSourceWithInterfaces_Success(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test") + } + // Since this requires a localstack client (until we implement a mock and make unit tests), + // We'll only run it with the integration tests for the time being. + assert := assert.New(t) + + // Set up localstack resources + kinesisClient := testutil.GetAWSLocalstackKinesisClient() + dynamodbClient := testutil.GetAWSLocalstackDynamoDBClient() + + streamName := "kinesis-source-integration-1" + createErr := testutil.CreateAWSLocalstackKinesisStream(kinesisClient, streamName) + if createErr != nil { + t.Fatal(createErr) + } + defer testutil.DeleteAWSLocalstackKinesisStream(kinesisClient, streamName) + + appName := "integration" + ddbErr := testutil.CreateAWSLocalstackDynamoDBTables(dynamodbClient, appName) + if ddbErr != nil { + t.Fatal(ddbErr) + } + + defer testutil.DeleteAWSLocalstackDynamoDBTables(dynamodbClient, appName) + + source, err := newKinesisSourceWithInterfaces(kinesisClient, dynamodbClient, "00000000000", 15, testutil.AWSLocalstackRegion, streamName, appName, nil) + + assert.IsType(&kinesisSource{}, source) + assert.Nil(err) +} + +// newKinesisSourceWithInterfaces should fail if we can't reach Kinesis and DDB, commented out this test until we look into https://github.com/snowplow-devops/stream-replicator/issues/151 +/* +func TestNewKinesisSourceWithInterfaces_Failure(t *testing.T) { + // Unlike the success test, we don't require anything to exist for this one + assert := assert.New(t) + + // Set up localstack resources + kinesisClient := testutil.GetAWSLocalstackKinesisClient() + dynamodbClient := testutil.GetAWSLocalstackDynamoDBClient() + + source, err := newKinesisSourceWithInterfaces(kinesisClient, dynamodbClient, "00000000000", 15, testutil.AWSLocalstackRegion, "nonexistent-stream", "test", nil) + + assert.Nil(&kinesisSource{}, source) + assert.NotNil(err) + +} +*/ + +// TODO: When we address https://github.com/snowplow-devops/stream-replicator/issues/151, this test will need to change. func TestKinesisSource_ReadFailure_NoResources(t *testing.T) { if testing.Short() { t.Skip("skipping integration test") @@ -39,7 +101,9 @@ func TestKinesisSource_ReadFailure_NoResources(t *testing.T) { err = source.Read(nil) assert.NotNil(err) - assert.Equal("Failed to start Kinsumer client: error describing table fake-name_checkpoints: ResourceNotFoundException: Cannot do operations on a non-existent table", err.Error()) + if err != nil { + assert.Equal("Failed to start Kinsumer client: error describing table fake-name_checkpoints: ResourceNotFoundException: Cannot do operations on a non-existent table", err.Error()) + } } func TestKinesisSource_ReadMessages(t *testing.T) { @@ -53,32 +117,36 @@ func TestKinesisSource_ReadMessages(t *testing.T) { kinesisClient := testutil.GetAWSLocalstackKinesisClient() dynamodbClient := testutil.GetAWSLocalstackDynamoDBClient() - streamName := "kinesis-source-integration-1" + streamName := "kinesis-source-integration-2" createErr := testutil.CreateAWSLocalstackKinesisStream(kinesisClient, streamName) if createErr != nil { - panic(createErr) + t.Fatal(createErr) } defer testutil.DeleteAWSLocalstackKinesisStream(kinesisClient, streamName) appName := "integration" - testutil.CreateAWSLocalstackDynamoDBTables(dynamodbClient, appName) - + ddbErr := testutil.CreateAWSLocalstackDynamoDBTables(dynamodbClient, appName) + if ddbErr != nil { + t.Fatal(ddbErr) + } defer testutil.DeleteAWSLocalstackDynamoDBTables(dynamodbClient, appName) // Put ten records into kinesis stream putErr := putNRecordsIntoKinesis(kinesisClient, 10, streamName, "Test") if putErr != nil { - panic(putErr) + t.Fatal(putErr) } + time.Sleep(1 * time.Second) + // Create the source and assert that it's there source, err := newKinesisSourceWithInterfaces(kinesisClient, dynamodbClient, "00000000000", 15, testutil.AWSLocalstackRegion, streamName, appName, nil) assert.Nil(err) assert.NotNil(source) - assert.Equal("arn:aws:kinesis:us-east-1:00000000000:stream/kinesis-source-integration-1", source.GetID()) + assert.Equal("arn:aws:kinesis:us-east-1:00000000000:stream/kinesis-source-integration-2", source.GetID()) // Read data from stream and check that we got it all - successfulReads := testutil.ReadAndReturnMessages(source) + successfulReads := testutil.ReadAndReturnMessages(source, 3*time.Second, testutil.DefaultTestWriteBuilder, nil) assert.Equal(10, len(successfulReads)) } @@ -94,22 +162,25 @@ func TestKinesisSource_StartTimestamp(t *testing.T) { kinesisClient := testutil.GetAWSLocalstackKinesisClient() dynamodbClient := testutil.GetAWSLocalstackDynamoDBClient() - streamName := "kinesis-source-integration-2" + streamName := "kinesis-source-integration-3" createErr := testutil.CreateAWSLocalstackKinesisStream(kinesisClient, streamName) if createErr != nil { - panic(createErr) + t.Fatal(createErr) } defer testutil.DeleteAWSLocalstackKinesisStream(kinesisClient, streamName) appName := "integration" - testutil.CreateAWSLocalstackDynamoDBTables(dynamodbClient, appName) + ddbErr := testutil.CreateAWSLocalstackDynamoDBTables(dynamodbClient, appName) + if ddbErr != nil { + t.Fatal(ddbErr) + } defer testutil.DeleteAWSLocalstackDynamoDBTables(dynamodbClient, appName) // Put two batches of 10 records into kinesis stream, grabbing a timestamp in between putErr := putNRecordsIntoKinesis(kinesisClient, 10, streamName, "First batch") if putErr != nil { - panic(putErr) + t.Fatal(putErr) } time.Sleep(1 * time.Second) // Put a 1s buffer either side of the start timestamp @@ -118,17 +189,17 @@ func TestKinesisSource_StartTimestamp(t *testing.T) { putErr2 := putNRecordsIntoKinesis(kinesisClient, 10, streamName, "Second batch") if putErr2 != nil { - panic(putErr2) + t.Fatal(putErr2) } // Create the source (with start timestamp) and assert that it's there source, err := newKinesisSourceWithInterfaces(kinesisClient, dynamodbClient, "00000000000", 15, testutil.AWSLocalstackRegion, streamName, appName, &timeToStart) assert.Nil(err) assert.NotNil(source) - assert.Equal("arn:aws:kinesis:us-east-1:00000000000:stream/kinesis-source-integration-2", source.GetID()) + assert.Equal("arn:aws:kinesis:us-east-1:00000000000:stream/kinesis-source-integration-3", source.GetID()) // Read from stream - successfulReads := testutil.ReadAndReturnMessages(source) + successfulReads := testutil.ReadAndReturnMessages(source, 3*time.Second, testutil.DefaultTestWriteBuilder, nil) // Check that we have ten messages assert.Equal(10, len(successfulReads)) @@ -164,31 +235,34 @@ func TestGetSource_WithKinesisSource(t *testing.T) { streamName := "kinesis-source-config-integration-1" createErr := testutil.CreateAWSLocalstackKinesisStream(kinesisClient, streamName) if createErr != nil { - panic(createErr) + t.Fatal(createErr) } defer testutil.DeleteAWSLocalstackKinesisStream(kinesisClient, streamName) appName := "kinesisSourceIntegration" - testutil.CreateAWSLocalstackDynamoDBTables(dynamodbClient, appName) - + ddbErr := testutil.CreateAWSLocalstackDynamoDBTables(dynamodbClient, appName) + if ddbErr != nil { + t.Fatal(ddbErr) + } defer testutil.DeleteAWSLocalstackDynamoDBTables(dynamodbClient, appName) - defer os.Unsetenv("SOURCE") + t.Setenv("SOURCE_NAME", "kinesis") - os.Setenv("SOURCE", "kinesis") - - os.Setenv("SOURCE_KINESIS_STREAM_NAME", streamName) - os.Setenv("SOURCE_KINESIS_REGION", testutil.AWSLocalstackRegion) - os.Setenv("SOURCE_KINESIS_APP_NAME", appName) + t.Setenv("SOURCE_KINESIS_STREAM_NAME", streamName) + t.Setenv("SOURCE_KINESIS_REGION", testutil.AWSLocalstackRegion) + t.Setenv("SOURCE_KINESIS_APP_NAME", appName) c, err := config.NewConfig() assert.NotNil(c) - assert.Nil(err) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } - // Use our function generator to interact with localstack + // use our function generator to interact with localstack kinesisSourceConfigFunctionWithLocalstack := configFunctionGeneratorWithInterfaces(kinesisClient, dynamodbClient, "00000000000") + adaptedHandle := adapterGenerator(kinesisSourceConfigFunctionWithLocalstack) - kinesisSourceConfigPairWithLocalstack := sourceconfig.ConfigPair{SourceName: "kinesis", SourceConfigFunc: kinesisSourceConfigFunctionWithLocalstack} + kinesisSourceConfigPairWithLocalstack := sourceconfig.ConfigPair{Name: "kinesis", Handle: adaptedHandle} supportedSources := []sourceconfig.ConfigPair{kinesisSourceConfigPairWithLocalstack} source, err := sourceconfig.GetSource(c, supportedSources) @@ -197,3 +271,85 @@ func TestGetSource_WithKinesisSource(t *testing.T) { assert.IsType(&kinesisSource{}, source) } + +func TestKinesisSourceHCL(t *testing.T) { + testFixPath := "../../../config/test-fixtures" + testCases := []struct { + File string + Plug config.Pluggable + Expected interface{} + }{ + { + File: "source-kinesis-simple.hcl", + Plug: testKinesisSourceAdapter(testKinesisSourceFunc), + Expected: &configuration{ + StreamName: "testStream", + Region: "us-test-1", + AppName: "testApp", + RoleARN: "", + StartTimestamp: "", + ConcurrentWrites: 50, + }, + }, + { + File: "source-kinesis-extended.hcl", + Plug: testKinesisSourceAdapter(testKinesisSourceFunc), + Expected: &configuration{ + StreamName: "testStream", + Region: "us-test-1", + AppName: "testApp", + RoleARN: "xxx-test-role-arn", + StartTimestamp: "2022-03-15 07:52:53", + ConcurrentWrites: 51, + }, + }, + } + + for _, tt := range testCases { + t.Run(tt.File, func(t *testing.T) { + assert := assert.New(t) + + filename := filepath.Join(testFixPath, tt.File) + t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", filename) + + c, err := config.NewConfig() + assert.NotNil(c) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } + + use := c.Data.Source.Use + decoderOpts := &config.DecoderOptions{ + Input: use.Body, + } + + result, err := c.CreateComponent(tt.Plug, decoderOpts) + assert.NotNil(result) + assert.Nil(err) + + if !reflect.DeepEqual(result, tt.Expected) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(result), + spew.Sdump(tt.Expected)) + } + }) + } +} + +// Helpers +func testKinesisSourceAdapter(f func(c *configuration) (*configuration, error)) adapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*configuration) + if !ok { + return nil, errors.New("invalid input, expected KinesisSourceConfig") + } + + return f(cfg) + } + +} + +func testKinesisSourceFunc(c *configuration) (*configuration, error) { + + return c, nil +} diff --git a/pkg/source/pubsub/pubsub_source.go b/pkg/source/pubsub/pubsub_source.go index fa27bf7e..acb10626 100644 --- a/pkg/source/pubsub/pubsub_source.go +++ b/pkg/source/pubsub/pubsub_source.go @@ -16,12 +16,18 @@ import ( log "github.com/sirupsen/logrus" "github.com/twinj/uuid" - config "github.com/snowplow-devops/stream-replicator/config" "github.com/snowplow-devops/stream-replicator/pkg/models" "github.com/snowplow-devops/stream-replicator/pkg/source/sourceconfig" "github.com/snowplow-devops/stream-replicator/pkg/source/sourceiface" ) +// configuration configures the source for records pulled +type configuration struct { + ProjectID string `hcl:"project_id" env:"SOURCE_PUBSUB_PROJECT_ID"` + SubscriptionID string `hcl:"subscription_id" env:"SOURCE_PUBSUB_SUBSCRIPTION_ID"` + ConcurrentWrites int `hcl:"concurrent_writes,optional" env:"SOURCE_CONCURRENT_WRITES"` +} + // pubSubSource holds a new client for reading messages from PubSub type pubSubSource struct { projectID string @@ -36,16 +42,50 @@ type pubSubSource struct { } // configFunction returns a pubsub source from a config -func configFunction(c *config.Config) (sourceiface.Source, error) { +func configFunction(c *configuration) (sourceiface.Source, error) { return newPubSubSource( - c.Sources.ConcurrentWrites, - c.Sources.PubSub.ProjectID, - c.Sources.PubSub.SubscriptionID, + c.ConcurrentWrites, + c.ProjectID, + c.SubscriptionID, ) } +// The adapter type is an adapter for functions to be used as +// pluggable components for PubSub Source. It implements the Pluggable interface. +type adapter func(i interface{}) (interface{}, error) + +// Create implements the ComponentCreator interface. +func (f adapter) Create(i interface{}) (interface{}, error) { + return f(i) +} + +// ProvideDefault implements the ComponentConfigurable interface +func (f adapter) ProvideDefault() (interface{}, error) { + // Provide defaults + cfg := &configuration{ + ConcurrentWrites: 50, + } + + return cfg, nil +} + +// adapterGenerator returns a PubSub Source adapter. +func adapterGenerator(f func(c *configuration) (sourceiface.Source, error)) adapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*configuration) + if !ok { + return nil, errors.New("invalid input, expected PubSubSourceConfig") + } + + return f(cfg) + } +} + // ConfigPair is passed to configuration to determine when to build a Pubsub source. -var ConfigPair = sourceconfig.ConfigPair{SourceName: "pubsub", SourceConfigFunc: configFunction} +var ConfigPair = sourceconfig.ConfigPair{ + Name: "pubsub", + Handle: adapterGenerator(configFunction), +} // newPubSubSource creates a new client for reading messages from PubSub func newPubSubSource(concurrentWrites int, projectID string, subscriptionID string) (*pubSubSource, error) { diff --git a/pkg/source/pubsub/pubsub_source_test.go b/pkg/source/pubsub/pubsub_source_test.go index 423f12de..8de38102 100644 --- a/pkg/source/pubsub/pubsub_source_test.go +++ b/pkg/source/pubsub/pubsub_source_test.go @@ -6,18 +6,45 @@ package pubsubsource -// Commenting out as it fails on CI - passes on local as I have default creds for a real account -// TODO: Find a way to integration test pubsub +import ( + "os" + "sort" + "strconv" + "sync" + "testing" + "time" -/* -func TestGetSource_WithPubsubSource(t *testing.T) { + "github.com/stretchr/testify/assert" + + "github.com/snowplow-devops/stream-replicator/config" + "github.com/snowplow-devops/stream-replicator/pkg/source/sourceconfig" + "github.com/snowplow-devops/stream-replicator/pkg/testutil" +) + +func TestMain(m *testing.M) { + os.Clearenv() + exitVal := m.Run() + os.Exit(exitVal) +} + +func TestPubSubSource_ReadAndReturnSuccessIntegration(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test") + } assert := assert.New(t) - supportedSources := []sourceconfig.SourceConfigPair{PubsubSourceConfigPair} + // Create pubsub integration resource and populate with 10 messages + testutil.CreatePubsubResourcesAndWrite(10, t) + defer testutil.DeletePubsubResources(t) + + t.Setenv("SOURCE_NAME", "pubsub") + t.Setenv("SOURCE_PUBSUB_SUBSCRIPTION_ID", "test-sub") + t.Setenv("SOURCE_PUBSUB_PROJECT_ID", `project-test`) - defer os.Unsetenv("SOURCE") + adaptedHandle := adapterGenerator(configFunction) - os.Setenv("SOURCE", "pubsub") + pubsubSourceConfigPair := sourceconfig.ConfigPair{Name: "pubsub", Handle: adaptedHandle} + supportedSources := []sourceconfig.ConfigPair{pubsubSourceConfigPair} pubsubConfig, err := config.NewConfig() assert.NotNil(pubsubConfig) @@ -27,6 +54,132 @@ func TestGetSource_WithPubsubSource(t *testing.T) { assert.NotNil(pubsubSource) assert.Nil(err) - assert.Equal("projects//subscriptions/", pubsubSource.GetID()) + assert.Equal("projects/project-test/subscriptions/test-sub", pubsubSource.GetID()) + + output := testutil.ReadAndReturnMessages(pubsubSource, 5*time.Second, testutil.DefaultTestWriteBuilder, nil) + assert.Equal(10, len(output)) + for _, message := range output { + assert.Contains(string(message.Data), `message #`) + assert.Nil(message.GetError()) + } +} + +// newPubSubSource_Failure should fail if we can't reach PubSub, commented out this test until we look into https://github.com/snowplow-devops/stream-replicator/issues/151 +/* +func TestNewPubSubSource_Failure(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test") + } + assert := assert.New(t) + + pubsubSource, err := newPubSubSource(10, "nonexistent-project", "nonexistent-subscription") + assert.NotNil(err) + assert.Nil(pubsubSource) + // This should return an error when we can't connect, rather than proceeding to the Write() function before we hit a problem. } */ + +// TestNewPubSubSource_Success tests the typical case of creating a new pubsub source. +func TestNewPubSubSource_Success(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test") + } + assert := assert.New(t) + + testutil.InitMockPubsubServer(8010, nil, t) + + pubsubSource, err := newPubSubSource(10, "project-test", "test-sub") + assert.Nil(err) + assert.IsType(&pubSubSource{}, pubsubSource) + // This should return an error when we can't connect, rather than proceeding to the Write() function before we hit a problem. +} + +func TestPubSubSource_ReadAndReturnSuccessWithMock(t *testing.T) { + assert := assert.New(t) + + srv, conn := testutil.InitMockPubsubServer(8008, nil, t) + defer srv.Close() + defer conn.Close() + + // Publish ten messages + numMsgs := 10 + wg := sync.WaitGroup{} + for i := 0; i < numMsgs; i++ { + wg.Add(1) + go func(i int) { + _ = srv.Publish(`projects/project-test/topics/test-topic`, []byte(strconv.Itoa(i)), nil) + wg.Done() + }(i) + } + wg.Wait() + + pubsubSource, err := newPubSubSource(10, "project-test", "test-sub") + + assert.NotNil(pubsubSource) + assert.Nil(err) + assert.Equal("projects/project-test/subscriptions/test-sub", pubsubSource.GetID()) + + output := testutil.ReadAndReturnMessages(pubsubSource, 3*time.Second, testutil.DefaultTestWriteBuilder, nil) + assert.Equal(10, len(output)) + + // Check that we got exactly the 10 messages we want, with no duplicates + msgDatas := make([]string, 0) + for _, msg := range output { + msgDatas = append(msgDatas, string(msg.Data)) + } + expected := []string{"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"} + sort.Strings(msgDatas) + assert.Equal(expected, msgDatas) +} + +// TestPubSubSource_ReadAndReturnSuccessWithMock_DelayedAcks tests the behaviour of pubsub source when some messages take longer to ack than others +func TestPubSubSource_ReadAndReturnSuccessWithMock_DelayedAcks(t *testing.T) { + assert := assert.New(t) + + srv, conn := testutil.InitMockPubsubServer(8008, nil, t) + defer srv.Close() + defer conn.Close() + + // publish 10 messages + numMsgs := 10 + wg := sync.WaitGroup{} + for i := 0; i < numMsgs; i++ { + wg.Add(1) + go func(i int) { + _ = srv.Publish(`projects/project-test/topics/test-topic`, []byte(strconv.Itoa(i)), nil) + wg.Done() + }(i) + } + wg.Wait() + + t.Setenv("SOURCE_NAME", "pubsub") + t.Setenv("SOURCE_PUBSUB_SUBSCRIPTION_ID", "test-sub") + t.Setenv("SOURCE_PUBSUB_PROJECT_ID", `project-test`) + + adaptedHandle := adapterGenerator(configFunction) + + pubsubSourceConfigPair := sourceconfig.ConfigPair{Name: "pubsub", Handle: adaptedHandle} + supportedSources := []sourceconfig.ConfigPair{pubsubSourceConfigPair} + + pubsubConfig, err := config.NewConfig() + assert.NotNil(pubsubConfig) + assert.Nil(err) + + pubsubSource, err := sourceconfig.GetSource(pubsubConfig, supportedSources) + + assert.NotNil(pubsubSource) + assert.Nil(err) + assert.Equal("projects/project-test/subscriptions/test-sub", pubsubSource.GetID()) + + output := testutil.ReadAndReturnMessages(pubsubSource, 5*time.Second, testutil.DelayedAckTestWriteBuilder, 2*time.Second) + assert.Equal(10, len(output)) + + // Check that we got exactly the 10 messages we want, with no duplicates + msgDatas := make([]string, 0) + for _, msg := range output { + msgDatas = append(msgDatas, string(msg.Data)) + } + expected := []string{"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"} + sort.Strings(msgDatas) + assert.Equal(expected, msgDatas) +} diff --git a/pkg/source/sourceconfig/source_config.go b/pkg/source/sourceconfig/source_config.go index 59b9d288..3eb752ab 100644 --- a/pkg/source/sourceconfig/source_config.go +++ b/pkg/source/sourceconfig/source_config.go @@ -14,23 +14,36 @@ import ( "github.com/snowplow-devops/stream-replicator/pkg/source/sourceiface" ) -// configFunction is a function which returns a source. -type configFunction func(*config.Config) (sourceiface.Source, error) - -// ConfigPair contains the name of a source and its ConfigFunction. +// ConfigPair contains the name of a source and its handle that satisfies the +// Pluggable interface. type ConfigPair struct { - SourceName string - SourceConfigFunc configFunction + Name string + Handle config.Pluggable } -// GetSource iterates the list of supported sources, matches the provided config for source, and returns a source. +// GetSource creates and returns the source that is configured. func GetSource(c *config.Config, supportedSources []ConfigPair) (sourceiface.Source, error) { + useSource := c.Data.Source.Use + decoderOpts := &config.DecoderOptions{ + Input: useSource.Body, + } + sourceList := make([]string, 0) - for _, configPair := range supportedSources { - if configPair.SourceName == c.Source { - return configPair.SourceConfigFunc(c) + for _, pair := range supportedSources { + if pair.Name == useSource.Name { + plug := pair.Handle + component, err := c.CreateComponent(plug, decoderOpts) + if err != nil { + return nil, err + } + + if s, ok := component.(sourceiface.Source); ok { + return s, nil + } + + return nil, fmt.Errorf("could not interpret source configuration for %q", useSource.Name) } - sourceList = append(sourceList, configPair.SourceName) + sourceList = append(sourceList, pair.Name) } - return nil, fmt.Errorf("Invalid source found: %s. Supported sources in this build: %s", c.Source, strings.Join(sourceList, ", ")) + return nil, fmt.Errorf("Invalid source found: %s. Supported sources in this build: %s", useSource.Name, strings.Join(sourceList, ", ")) } diff --git a/pkg/source/sourceconfig/source_config_test.go b/pkg/source/sourceconfig/source_config_test.go index 40a2cb2e..f82e1808 100644 --- a/pkg/source/sourceconfig/source_config_test.go +++ b/pkg/source/sourceconfig/source_config_test.go @@ -10,25 +10,134 @@ import ( "os" "testing" - config "github.com/snowplow-devops/stream-replicator/config" "github.com/stretchr/testify/assert" + + config "github.com/snowplow-devops/stream-replicator/config" + "github.com/snowplow-devops/stream-replicator/pkg/source/sourceiface" ) -func TestNewConfig_InvalidSource(t *testing.T) { - assert := assert.New(t) +func TestMain(m *testing.M) { + os.Clearenv() + exitVal := m.Run() + os.Exit(exitVal) +} + +// Mock a Source and configuration +type mockSource struct{} + +func (m mockSource) Read(sf *sourceiface.SourceFunctions) error { + return nil +} + +func (m mockSource) Stop() {} - defer os.Unsetenv("SOURCE") +func (m mockSource) GetID() string { + return "" +} - os.Setenv("SOURCE", "fake") +type configuration struct{} + +func configfunction(c *configuration) (sourceiface.Source, error) { + return mockSource{}, nil +} + +type adapter func(i interface{}) (interface{}, error) + +func adapterGenerator(f func(c *configuration) (sourceiface.Source, error)) adapter { + return func(i interface{}) (interface{}, error) { + return mockSource{}, nil + } +} + +func (f adapter) Create(i interface{}) (interface{}, error) { + return f(i) +} + +func (f adapter) ProvideDefault() (interface{}, error) { + // Provide defaults + cfg := &configuration{} + + return cfg, nil +} + +var mockConfigPair = ConfigPair{ + Name: "mock", + Handle: adapterGenerator(configfunction), +} + +// TestGetSource_ValidSource tests the happy path for GetSource +func TestGetSource_ValidSource(t *testing.T) { + assert := assert.New(t) + + t.Setenv("SOURCE_NAME", "mock") c, err := config.NewConfig() assert.NotNil(c) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } + + supportedSources := []ConfigPair{mockConfigPair} + + source, err := GetSource(c, supportedSources) + + assert.Equal(mockSource{}, source) assert.Nil(err) +} + +// TestGetSource_InvalidSource tests that we throw an error when given an invalid source configuration +func TestGetSource_InvalidSource(t *testing.T) { + assert := assert.New(t) + + t.Setenv("SOURCE_NAME", "fake") + + c, err := config.NewConfig() + assert.NotNil(c) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } supportedSources := []ConfigPair{} source, err := GetSource(c, supportedSources) assert.Nil(source) assert.NotNil(err) - assert.Equal("Invalid source found: fake. Supported sources in this build: ", err.Error()) + if err != nil { + assert.Equal("Invalid source found: fake. Supported sources in this build: ", err.Error()) + } +} + +// Mock a broken adapter generator implementation +func brokenAdapterGenerator(f func(c *configuration) (sourceiface.Source, error)) adapter { + return func(i interface{}) (interface{}, error) { + return nil, nil + } +} + +var mockUnhappyConfigPair = ConfigPair{ + Name: "mockUnhappy", + Handle: brokenAdapterGenerator(configfunction), +} + +// TestGetSource_BadConfig tests the case where the configuration implementation is broken +func TestGetSource_BadConfig(t *testing.T) { + assert := assert.New(t) + + t.Setenv("SOURCE_NAME", "mockUnhappy") + + c, err := config.NewConfig() + assert.NotNil(c) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } + + supportedSources := []ConfigPair{mockUnhappyConfigPair} + + source, err := GetSource(c, supportedSources) + + assert.Nil(source) + assert.NotNil(err) + if err != nil { + assert.Equal("could not interpret source configuration for \"mockUnhappy\"", err.Error()) + } } diff --git a/pkg/source/sqs/sqs_source.go b/pkg/source/sqs/sqs_source.go index 4499985f..28039e41 100644 --- a/pkg/source/sqs/sqs_source.go +++ b/pkg/source/sqs/sqs_source.go @@ -19,13 +19,20 @@ import ( log "github.com/sirupsen/logrus" "github.com/twinj/uuid" - config "github.com/snowplow-devops/stream-replicator/config" "github.com/snowplow-devops/stream-replicator/pkg/common" "github.com/snowplow-devops/stream-replicator/pkg/models" "github.com/snowplow-devops/stream-replicator/pkg/source/sourceconfig" "github.com/snowplow-devops/stream-replicator/pkg/source/sourceiface" ) +// configuration configures the source for records pulled +type configuration struct { + QueueName string `hcl:"queue_name" env:"SOURCE_SQS_QUEUE_NAME"` + Region string `hcl:"region" env:"SOURCE_SQS_REGION"` + RoleARN string `hcl:"role_arn,optional" env:"SOURCE_SQS_ROLE_ARN"` + ConcurrentWrites int `hcl:"concurrent_writes,optional" env:"SOURCE_CONCURRENT_WRITES"` +} + // sqsSource holds a new client for reading messages from SQS type sqsSource struct { client sqsiface.SQSAPI @@ -47,15 +54,15 @@ type sqsSource struct { // configFunctionGeneratorWithInterfaces generates the SQS Source Config function, allowing you // to provide an SQS client directly to allow for mocking and localstack usage -func configFunctionGeneratorWithInterfaces(client sqsiface.SQSAPI, awsAccountID string) func(c *config.Config) (sourceiface.Source, error) { - return func(c *config.Config) (sourceiface.Source, error) { - return newSQSSourceWithInterfaces(client, awsAccountID, c.Sources.ConcurrentWrites, c.Sources.SQS.Region, c.Sources.SQS.QueueName) +func configFunctionGeneratorWithInterfaces(client sqsiface.SQSAPI, awsAccountID string) func(c *configuration) (sourceiface.Source, error) { + return func(c *configuration) (sourceiface.Source, error) { + return newSQSSourceWithInterfaces(client, awsAccountID, c.ConcurrentWrites, c.Region, c.QueueName) } } -// configFunction returns an SQS source from a config -func configFunction(c *config.Config) (sourceiface.Source, error) { - awsSession, awsConfig, awsAccountID, err := common.GetAWSSession(c.Sources.SQS.Region, c.Sources.SQS.RoleARN) +// configFunction returns an SQS source from a config. +func configFunction(c *configuration) (sourceiface.Source, error) { + awsSession, awsConfig, awsAccountID, err := common.GetAWSSession(c.Region, c.RoleARN) if err != nil { return nil, err } @@ -67,8 +74,43 @@ func configFunction(c *config.Config) (sourceiface.Source, error) { return sourceConfigFunc(c) } -// ConfigPair is passed to configuration to determine when to build an SQS source. -var ConfigPair = sourceconfig.ConfigPair{SourceName: "sqs", SourceConfigFunc: configFunction} +// The adapter type is an adapter for functions to be used as +// pluggable components for SQS Source. It implements the Pluggable interface. +type adapter func(i interface{}) (interface{}, error) + +// Create implements the ComponentCreator interface. +func (f adapter) Create(i interface{}) (interface{}, error) { + return f(i) +} + +// ProvideDefault implements the ComponentConfigurable interface. +func (f adapter) ProvideDefault() (interface{}, error) { + // Provide defaults + cfg := &configuration{ + ConcurrentWrites: 50, + } + + return cfg, nil +} + +// adapterGenerator returns an SQS Source adapter. +func adapterGenerator(f func(c *configuration) (sourceiface.Source, error)) adapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*configuration) + if !ok { + return nil, errors.New("invalid input, expected SQSSourceConfig") + } + + return f(cfg) + } +} + +// ConfigPair is passed to configuration to determine when and how to build +// an SQS source. +var ConfigPair = sourceconfig.ConfigPair{ + Name: "sqs", + Handle: adapterGenerator(configFunction), +} // newSQSSourceWithInterfaces allows you to provide an SQS client directly to allow // for mocking and localstack usage diff --git a/pkg/source/sqs/sqs_source_test.go b/pkg/source/sqs/sqs_source_test.go index 7462fc98..84aa4b77 100644 --- a/pkg/source/sqs/sqs_source_test.go +++ b/pkg/source/sqs/sqs_source_test.go @@ -7,10 +7,14 @@ package sqssource import ( + "errors" "os" + "path/filepath" + "reflect" "testing" "time" + "github.com/davecgh/go-spew/spew" "github.com/stretchr/testify/assert" config "github.com/snowplow-devops/stream-replicator/config" @@ -20,6 +24,49 @@ import ( "github.com/snowplow-devops/stream-replicator/pkg/testutil" ) +func TestMain(m *testing.M) { + os.Clearenv() + exitVal := m.Run() + os.Exit(exitVal) +} + +// func newSQSSourceWithInterfaces(client sqsiface.SQSAPI, awsAccountID string, concurrentWrites int, region string, queueName string) (*sqsSource, error) { +func TestNewSQSSourceWithInterfaces_Success(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test") + } + // Since this requires a localstack client (until we implement a mock and make unit tests), + // We'll only run it with the integration tests for the time being. + assert := assert.New(t) + + client := testutil.GetAWSLocalstackSQSClient() + + queueName := "sqs-queue-source" + queueURL := testutil.SetupAWSLocalstackSQSQueueWithMessages(client, queueName, 50, "Hello SQS!!") + defer testutil.DeleteAWSLocalstackSQSQueue(client, queueURL) + + source, err := newSQSSourceWithInterfaces(client, "00000000000", 10, testutil.AWSLocalstackRegion, queueName) + + assert.IsType(&sqsSource{}, source) + assert.Nil(err) +} + +// newSQSSourceWithInterfaces should fail if we can't reach SQS, commented out this test until we look into https://github.com/snowplow-devops/stream-replicator/issues/151 +/* +func TestNewSQSSourceWithInterfaces_Failure(t *testing.T) { + // Unlike the success test, we don't require anything to exist for this one + assert := assert.New(t) + + client := testutil.GetAWSLocalstackSQSClient() + + source, err := newSQSSourceWithInterfaces(client, "00000000000", 10, testutil.AWSLocalstackRegion, "nonexistent-queue") + + assert.Nil(source) + assert.NotNil(err) +} +*/ + +// TODO: When we address https://github.com/snowplow-devops/stream-replicator/issues/151, this test will need to change. func TestSQSSource_ReadFailure(t *testing.T) { if testing.Short() { t.Skip("skipping integration test") @@ -36,6 +83,9 @@ func TestSQSSource_ReadFailure(t *testing.T) { err = source.Read(nil) assert.NotNil(err) + if err != nil { + assert.Equal("Failed to get SQS queue URL: AWS.SimpleQueueService.NonExistentQueue: AWS.SimpleQueueService.NonExistentQueue; see the SQS docs.\n\tstatus code: 400, request id: 00000000-0000-0000-0000-000000000000", err.Error()) + } } func TestSQSSource_ReadSuccess(t *testing.T) { @@ -84,7 +134,7 @@ func TestSQSSource_ReadSuccess(t *testing.T) { select { case <-done: case <-time.After(5 * time.Second): - panic("TestSQSSource_ReadSuccess timed out!") + t.Fatal("TestSQSSource_ReadSuccess timed out!") } assert.Equal(50, messageCount) @@ -103,22 +153,24 @@ func TestGetSource_WithSQSSource(t *testing.T) { queueName := "sqs-source-config-integration-1" _, createErr := testutil.CreateAWSLocalstackSQSQueue(sqsClient, queueName) if createErr != nil { - panic(createErr) + t.Fatal(createErr) } defer testutil.DeleteAWSLocalstackSQSQueue(sqsClient, &queueName) - defer os.Unsetenv("SOURCE") - - os.Setenv("SOURCE", "sqs") - os.Setenv("SOURCE_SQS_QUEUE_NAME", queueName) + t.Setenv("SOURCE_NAME", "sqs") + t.Setenv("SOURCE_SQS_QUEUE_NAME", queueName) c, err := config.NewConfig() assert.NotNil(c) - assert.Nil(err) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } sqsSourceConfigFunctionWithLocalStack := configFunctionGeneratorWithInterfaces(sqsClient, "00000000000") - sqsSourceConfigPairWithInterfaces := sourceconfig.ConfigPair{SourceName: "sqs", SourceConfigFunc: sqsSourceConfigFunctionWithLocalStack} + adaptedHandle := adapterGenerator(sqsSourceConfigFunctionWithLocalStack) + + sqsSourceConfigPairWithInterfaces := sourceconfig.ConfigPair{Name: "sqs", Handle: adaptedHandle} supportedSources := []sourceconfig.ConfigPair{sqsSourceConfigPairWithInterfaces} source, err := sourceconfig.GetSource(c, supportedSources) @@ -127,3 +179,71 @@ func TestGetSource_WithSQSSource(t *testing.T) { assert.IsType(&sqsSource{}, source) } + +func TestSQSSourceHCL(t *testing.T) { + testFixPath := "../../../config/test-fixtures" + testCases := []struct { + File string + Plug config.Pluggable + Expected interface{} + }{ + { + File: "source-sqs.hcl", + Plug: testSQSSourceAdapter(testSQSSourceFunc), + Expected: &configuration{ + QueueName: "testQueue", + Region: "us-test-1", + RoleARN: "xxx-test-role-arn", + ConcurrentWrites: 50, + }, + }, + } + + for _, tt := range testCases { + t.Run(tt.File, func(t *testing.T) { + assert := assert.New(t) + + filename := filepath.Join(testFixPath, tt.File) + t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", filename) + + c, err := config.NewConfig() + assert.NotNil(c) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } + + use := c.Data.Source.Use + decoderOpts := &config.DecoderOptions{ + Input: use.Body, + } + + result, err := c.CreateComponent(tt.Plug, decoderOpts) + assert.NotNil(result) + assert.Nil(err) + + if !reflect.DeepEqual(result, tt.Expected) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(result), + spew.Sdump(tt.Expected)) + } + }) + } +} + +// Helpers +func testSQSSourceAdapter(f func(c *configuration) (*configuration, error)) adapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*configuration) + if !ok { + return nil, errors.New("invalid input, expected SQSSourceConfig") + } + + return f(cfg) + } + +} + +func testSQSSourceFunc(c *configuration) (*configuration, error) { + + return c, nil +} diff --git a/pkg/source/stdin/stdin_source.go b/pkg/source/stdin/stdin_source.go index 79386c42..8148760d 100644 --- a/pkg/source/stdin/stdin_source.go +++ b/pkg/source/stdin/stdin_source.go @@ -16,12 +16,16 @@ import ( log "github.com/sirupsen/logrus" "github.com/twinj/uuid" - config "github.com/snowplow-devops/stream-replicator/config" "github.com/snowplow-devops/stream-replicator/pkg/models" "github.com/snowplow-devops/stream-replicator/pkg/source/sourceconfig" "github.com/snowplow-devops/stream-replicator/pkg/source/sourceiface" ) +// configuration configures the source for records pulled +type configuration struct { + ConcurrentWrites int `hcl:"concurrent_writes,optional" env:"SOURCE_CONCURRENT_WRITES"` +} + // stdinSource holds a new client for reading messages from stdin type stdinSource struct { concurrentWrites int @@ -29,15 +33,49 @@ type stdinSource struct { log *log.Entry } -// configfunction returns an stdin source from a config -func configfunction(c *config.Config) (sourceiface.Source, error) { +// configFunction returns an stdin source from a config +func configfunction(c *configuration) (sourceiface.Source, error) { return newStdinSource( - c.Sources.ConcurrentWrites, + c.ConcurrentWrites, ) } +// The adapter type is an adapter for functions to be used as +// pluggable components for Stdin Source. It implements the Pluggable interface. +type adapter func(i interface{}) (interface{}, error) + +// Create implements the ComponentCreator interface. +func (f adapter) Create(i interface{}) (interface{}, error) { + return f(i) +} + +// ProvideDefault implements the ComponentConfigurable interface. +func (f adapter) ProvideDefault() (interface{}, error) { + // Provide defaults + cfg := &configuration{ + ConcurrentWrites: 50, + } + + return cfg, nil +} + +// adapterGenerator returns a StdinSource adapter. +func adapterGenerator(f func(c *configuration) (sourceiface.Source, error)) adapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*configuration) + if !ok { + return nil, errors.New("invalid input, expected StdinSourceConfig") + } + + return f(cfg) + } +} + // ConfigPair is passed to configuration to determine when to build an stdin source. -var ConfigPair = sourceconfig.ConfigPair{SourceName: "stdin", SourceConfigFunc: configfunction} +var ConfigPair = sourceconfig.ConfigPair{ + Name: "stdin", + Handle: adapterGenerator(configfunction), +} // newStdinSource creates a new client for reading messages from stdin func newStdinSource(concurrentWrites int) (*stdinSource, error) { diff --git a/pkg/source/stdin/stdin_source_test.go b/pkg/source/stdin/stdin_source_test.go index 8bca5c57..38d78fa3 100644 --- a/pkg/source/stdin/stdin_source_test.go +++ b/pkg/source/stdin/stdin_source_test.go @@ -19,6 +19,12 @@ import ( "github.com/snowplow-devops/stream-replicator/pkg/source/sourceiface" ) +func TestMain(m *testing.M) { + os.Clearenv() + exitVal := m.Run() + os.Exit(exitVal) +} + func TestStdinSource_ReadSuccess(t *testing.T) { assert := assert.New(t) @@ -60,19 +66,19 @@ func TestStdinSource_ReadSuccess(t *testing.T) { } func TestGetSource_WithStdinSource(t *testing.T) { + t.Setenv("SOURCE_NAME", "stdin") + assert := assert.New(t) supportedSources := []sourceconfig.ConfigPair{ConfigPair} - defer os.Unsetenv("SOURCE") - - os.Setenv("SOURCE", "stdin") - - stdinConfig, err := config.NewConfig() - assert.NotNil(stdinConfig) - assert.Nil(err) + c, err := config.NewConfig() + assert.NotNil(c) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } - stdinSource, err := sourceconfig.GetSource(stdinConfig, supportedSources) + stdinSource, err := sourceconfig.GetSource(c, supportedSources) assert.NotNil(stdinSource) assert.Nil(err) diff --git a/pkg/statsreceiver/statsd.go b/pkg/statsreceiver/statsd.go index 48babe95..bf75e676 100644 --- a/pkg/statsreceiver/statsd.go +++ b/pkg/statsreceiver/statsd.go @@ -17,13 +17,20 @@ import ( "github.com/snowplow-devops/stream-replicator/pkg/models" ) -// StatsDStatsReceiver holds a new client for writing statistics to a StatsD server -type StatsDStatsReceiver struct { +// StatsDStatsReceiverConfig configures the stats metrics receiver +type StatsDStatsReceiverConfig struct { + Address string `hcl:"address,optional" env:"STATS_RECEIVER_STATSD_ADDRESS"` + Prefix string `hcl:"prefix,optional" env:"STATS_RECEIVER_STATSD_PREFIX"` + Tags string `hcl:"tags,optional" env:"STATS_RECEIVER_STATSD_TAGS"` +} + +// statsDStatsReceiver holds a new client for writing statistics to a StatsD server +type statsDStatsReceiver struct { client *statsd.Client } -// NewStatsDStatsReceiver creates a new client for writing metrics to StatsD -func NewStatsDStatsReceiver(address string, prefix string, tagsRaw string, tagsMapClient map[string]string) (*StatsDStatsReceiver, error) { +// newStatsDStatsReceiver creates a new client for writing metrics to StatsD +func newStatsDStatsReceiver(address string, prefix string, tagsRaw string, tagsMapClient map[string]string) (*statsDStatsReceiver, error) { tagsMap := map[string]string{} err := json.Unmarshal([]byte(tagsRaw), &tagsMap) if err != nil { @@ -46,19 +53,66 @@ func NewStatsDStatsReceiver(address string, prefix string, tagsRaw string, tagsM statsd.ReconnectInterval(60*time.Second), ) - return &StatsDStatsReceiver{ + return &statsDStatsReceiver{ client: client, }, nil } +// NewStatsDReceiverWithTags closes over a given tags map and returns a function +// that creates a statsDStatsReceiver given a StatsDStatsReceiverConfig. +func NewStatsDReceiverWithTags(tags map[string]string) func(c *StatsDStatsReceiverConfig) (*statsDStatsReceiver, error) { + return func(c *StatsDStatsReceiverConfig) (*statsDStatsReceiver, error) { + return newStatsDStatsReceiver( + c.Address, + c.Prefix, + c.Tags, + tags, + ) + } +} + +// The StatsDStatsReceiverAdapter type is an adapter for functions to be used as +// pluggable components for StatsD Stats Receiver. +// It implements the Pluggable interface. +type StatsDStatsReceiverAdapter func(i interface{}) (interface{}, error) + +// Create implements the ComponentCreator interface. +func (f StatsDStatsReceiverAdapter) Create(i interface{}) (interface{}, error) { + return f(i) +} + +// ProvideDefault implements the ComponentConfigurable interface. +func (f StatsDStatsReceiverAdapter) ProvideDefault() (interface{}, error) { + // Provide defaults for the optional parameters + // whose default is not their zero value. + cfg := &StatsDStatsReceiverConfig{ + Prefix: "snowplow.stream-replicator", + Tags: "{}", + } + + return cfg, nil +} + +// AdaptStatsDStatsReceiverFunc returns a StatsDStatsReceiverAdapter. +func AdaptStatsDStatsReceiverFunc(f func(c *StatsDStatsReceiverConfig) (*statsDStatsReceiver, error)) StatsDStatsReceiverAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*StatsDStatsReceiverConfig) + if !ok { + return nil, errors.New("invalid input, expected StatsDStatsReceiverConfig") + } + + return f(cfg) + } +} + // Send emits the bufferred metrics to the receiver -func (s *StatsDStatsReceiver) Send(b *models.ObserverBuffer) { +func (s *statsDStatsReceiver) Send(b *models.ObserverBuffer) { s.client.Incr("message_sent", b.MsgSent) s.client.Incr("message_failed", b.MsgFailed) s.client.Incr("oversized_message_sent", b.OversizedMsgSent) s.client.Incr("oversized_message_failed", b.OversizedMsgFailed) s.client.Incr("invalid_message_sent", b.InvalidMsgSent) s.client.Incr("invalid_message_failed", b.InvalidMsgFailed) - s.client.PrecisionTiming("latency_proccesing_max", b.MaxProcLatency) + s.client.PrecisionTiming("latency_processing_max", b.MaxProcLatency) s.client.PrecisionTiming("latency_message_max", b.MaxMsgLatency) } diff --git a/pkg/target/eventhub.go b/pkg/target/eventhub.go index 478bf44b..62a31abf 100644 --- a/pkg/target/eventhub.go +++ b/pkg/target/eventhub.go @@ -16,24 +16,26 @@ import ( "github.com/hashicorp/go-multierror" "github.com/pkg/errors" log "github.com/sirupsen/logrus" + "github.com/snowplow-devops/stream-replicator/pkg/models" ) // EventHubConfig holds a config object for Azure EventHub type EventHubConfig struct { - EventHubNamespace string - EventHubName string - MaxAutoRetries int - MessageByteLimit int - ChunkByteLimit int - ChunkMessageLimit int - ContextTimeoutInSeconds int - BatchByteLimit int + EventHubNamespace string `hcl:"namespace" env:"TARGET_EVENTHUB_NAMESPACE"` + EventHubName string `hcl:"name" env:"TARGET_EVENTHUB_NAME"` + MaxAutoRetries int `hcl:"max_auto_retries,optional" env:"TARGET_EVENTHUB_MAX_AUTO_RETRY"` + MessageByteLimit int `hcl:"message_byte_limit,optional" env:"TARGET_EVENTHUB_MESSAGE_BYTE_LIMIT"` + ChunkByteLimit int `hcl:"chunk_byte_limit,optional" env:"TARGET_EVENTHUB_CHUNK_BYTE_LIMIT"` + ChunkMessageLimit int `hcl:"chunk_message_limit,optional" env:"TARGET_EVENTHUB_CHUNK_MESSAGE_LIMIT"` + ContextTimeoutInSeconds int `hcl:"context_timeout_in_seconds,optional" env:"TARGET_EVENTHUB_CONTEXT_TIMEOUT_SECONDS"` + BatchByteLimit int `hcl:"batch_byte_limit,optional" env:"TARGET_EVENTHUB_BATCH_BYTE_LIMIT"` + SetEHPartitionKey bool `hcl:"set_eh_partition_key,optional" env:"TARGET_EVENTHUB_SET_EH_PK"` } // EventHubTarget holds a new client for writing messages to Azure EventHub type EventHubTarget struct { - client *eventhub.Hub + client clientIface eventHubNamespace string eventHubName string messageByteLimit int @@ -41,12 +43,36 @@ type EventHubTarget struct { chunkMessageLimit int contextTimeoutInSeconds int batchByteLimit int + setEHPartitionKey bool log *log.Entry } -// NewEventHubTarget creates a new client for writing messages to Azure EventHub -func NewEventHubTarget(cfg *EventHubConfig) (*EventHubTarget, error) { +// clientIface allows us to mock the entire eventhub.Hub client, since they don't provide interfaces for mocking https://github.com/Azure/azure-event-hubs-go/issues/98 +type clientIface interface { + SendBatch(context.Context, eventhub.BatchIterator, ...eventhub.BatchOption) error + Close(context.Context) error +} + +// newEventHubTargetWithInterfaces allows for mocking the eventhub client +func newEventHubTargetWithInterfaces(client clientIface, cfg *EventHubConfig) *EventHubTarget { + return &EventHubTarget{ + client: client, + eventHubNamespace: cfg.EventHubNamespace, + eventHubName: cfg.EventHubName, + messageByteLimit: cfg.MessageByteLimit, + chunkByteLimit: cfg.ChunkByteLimit, + chunkMessageLimit: cfg.ChunkMessageLimit, + contextTimeoutInSeconds: cfg.ContextTimeoutInSeconds, + batchByteLimit: cfg.BatchByteLimit, + setEHPartitionKey: cfg.SetEHPartitionKey, + + log: log.WithFields(log.Fields{"target": "eventhub", "cloud": "Azure", "namespace": cfg.EventHubNamespace, "eventhub": cfg.EventHubName}), + } +} + +// newEventHubTarget creates a new client for writing messages to Azure EventHub +func newEventHubTarget(cfg *EventHubConfig) (*EventHubTarget, error) { _, keyNamePresent := os.LookupEnv("EVENTHUB_KEY_NAME") _, keyValuePresent := os.LookupEnv("EVENTHUB_KEY_VALUE") @@ -70,18 +96,50 @@ func NewEventHubTarget(cfg *EventHubConfig) (*EventHubTarget, error) { // If none is specified, it will retry indefinitely until the context times out, which hides the actual error message // To avoid obscuring errors, contextTimeoutInSeconds should be configured to ensure all retries may be completed before its expiry - return &EventHubTarget{ - client: hub, - eventHubNamespace: cfg.EventHubNamespace, - eventHubName: cfg.EventHubName, - messageByteLimit: cfg.MessageByteLimit, - chunkByteLimit: cfg.ChunkByteLimit, - chunkMessageLimit: cfg.ChunkMessageLimit, - contextTimeoutInSeconds: cfg.ContextTimeoutInSeconds, - batchByteLimit: cfg.BatchByteLimit, + return newEventHubTargetWithInterfaces(hub, cfg), err +} - log: log.WithFields(log.Fields{"target": "eventhub", "cloud": "Azure", "namespace": cfg.EventHubNamespace, "eventhub": cfg.EventHubName}), - }, err +// EventHubTargetConfigFunction creates an EventHubTarget from an EventHubconfig +func EventHubTargetConfigFunction(cfg *EventHubConfig) (*EventHubTarget, error) { + return newEventHubTarget(cfg) +} + +// The EventHubTargetAdapter type is an adapter for functions to be used as +// pluggable components for EventHub target. Implements the Pluggable interface. +type EventHubTargetAdapter func(i interface{}) (interface{}, error) + +// Create implements the ComponentCreator interface. +func (f EventHubTargetAdapter) Create(i interface{}) (interface{}, error) { + return f(i) +} + +// ProvideDefault implements the ComponentConfigurable interface. +func (f EventHubTargetAdapter) ProvideDefault() (interface{}, error) { + // Provide defaults for the optional parameters + // whose default is not their zero value. + cfg := &EventHubConfig{ + MaxAutoRetries: 1, + MessageByteLimit: 1048576, + ChunkByteLimit: 1048576, + ChunkMessageLimit: 500, + ContextTimeoutInSeconds: 20, + BatchByteLimit: 1048576, + SetEHPartitionKey: true, + } + + return cfg, nil +} + +// AdaptEventHubTargetFunc returns an EventHubTargetAdapter. +func AdaptEventHubTargetFunc(f func(c *EventHubConfig) (*EventHubTarget, error)) EventHubTargetAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*EventHubConfig) + if !ok { + return nil, errors.New("invalid input, expected EventHubConfig") + } + + return f(cfg) + } } func (eht *EventHubTarget) Write(messages []*models.Message) (*models.TargetWriteResult, error) { @@ -124,7 +182,9 @@ func (eht *EventHubTarget) process(messages []*models.Message) (*models.TargetWr ehBatch := make([]*eventhub.Event, messageCount) for i, msg := range messages { ehEvent := eventhub.NewEvent(msg.Data) - ehEvent.PartitionKey = &msg.PartitionKey + if eht.setEHPartitionKey { + ehEvent.PartitionKey = &msg.PartitionKey + } ehBatch[i] = ehEvent } diff --git a/pkg/target/eventhub_test.go b/pkg/target/eventhub_test.go index 3cad6142..611c5c18 100644 --- a/pkg/target/eventhub_test.go +++ b/pkg/target/eventhub_test.go @@ -7,70 +7,413 @@ package target import ( - "os" + "context" + "fmt" + "sort" + "strings" + "sync/atomic" "testing" + "time" + eventhub "github.com/Azure/azure-event-hubs-go/v3" + "github.com/pkg/errors" "github.com/stretchr/testify/assert" + "github.com/twinj/uuid" + + "github.com/snowplow-devops/stream-replicator/pkg/models" + "github.com/snowplow-devops/stream-replicator/pkg/testutil" ) var cfg = EventHubConfig{ - EventHubNamespace: "test", - EventHubName: "test", + EventHubNamespace: "test", + EventHubName: "test", + MaxAutoRetries: 1, + MessageByteLimit: 1048576, + ChunkByteLimit: 1048576, + ChunkMessageLimit: 500, + ContextTimeoutInSeconds: 20, + BatchByteLimit: 1048576, + SetEHPartitionKey: true, +} + +var errMock = errors.New("Mock Failure Path") + +type mockHub struct { + // Channel to output results + results chan *eventhub.EventBatch + // Boolean to allow us to mock failure path + fail bool } -func unsetEverything() { - os.Unsetenv("EVENTHUB_KEY_NAME") - os.Unsetenv("EVENTHUB_KEY_VALUE") +// Sendbatch is a mock of the Eventhubs SendBatch method. If m.fail is true, it returns an error. +// Otherwise, it uses the provided BatchIterator to mimic the batching behaviour in the client, and feeds +// those batches into the m.results channel. +func (m mockHub) SendBatch(ctx context.Context, iterator eventhub.BatchIterator, opts ...eventhub.BatchOption) error { + if m.fail { + return errMock + } + + //mimic eventhubs SendBatch behaviour loosely + batchOptions := &eventhub.BatchOptions{ + MaxSize: eventhub.DefaultMaxMessageSizeInBytes, + } - os.Unsetenv("EVENTHUB_CONNECTION_STRING") + for _, opt := range opts { + if err := opt(batchOptions); err != nil { - os.Unsetenv("AZURE_TENANT_ID") - os.Unsetenv("AZURE_CLIENT_ID") + return err + } + } - os.Unsetenv("AZURE_CLIENT_SECRET") + for !iterator.Done() { + id := uuid.NewV4() - os.Unsetenv("AZURE_CERTIFICATE_PATH") - os.Unsetenv("AZURE_CERTIFICATE_PASSWORD") + batch, err := iterator.Next(id.String(), batchOptions) + if err != nil { + return err + } + m.results <- batch + } + return nil } -func TestNewEventHubTarget_KeyValue(t *testing.T) { +// Close isn't used, it's just here to satisfy the mock API interface +func (m mockHub) Close(context.Context) error { + return nil +} + +// getResults retrieves and returns results from the mock's results channel, +// it blocks until no result have come in for the timeout period +func getResults(resultChannel chan *eventhub.EventBatch, timeout time.Duration) []*eventhub.EventBatch { + res := make([]*eventhub.EventBatch, 0) + +ResultsLoop: + for { + select { + case batch := <-resultChannel: + res = append(res, batch) + case <-time.After(1 * time.Second): + break ResultsLoop + } + } + + return res +} + +// TestProcessWithRandomPartitionKeys tests the process() function happy path when we set the eventhub partition key to a random value. +// When we explicitly set the partition key, events are batched by partition key - so random PK should result in batches of 1. +func TestProcessWithRandomPartitionKeys(t *testing.T) { assert := assert.New(t) - unsetEverything() + // Happy path + m := mockHub{ + results: make(chan *eventhub.EventBatch), + } + tgt := newEventHubTargetWithInterfaces(m, &cfg) - // Test that we can initialise a client with Key and Value - defer os.Unsetenv("EVENTHUB_KEY_NAME") - defer os.Unsetenv("EVENTHUB_KEY_VALUE") + // Mechanism for counting acks + var ackOps int64 + ackFunc := func() { + atomic.AddInt64(&ackOps, 1) + } + + messages := testutil.GetTestMessages(10, testutil.GenRandomString(100), ackFunc) + + var twres *models.TargetWriteResult + var err error + + go func() { + twres, err = tgt.process(messages) + }() + res := getResults(m.results, 1*time.Second) + + // Check that we got correct amonut of batches + assert.Equal(10, len(res)) + // Check that we acked correct amount of times + assert.Equal(int64(10), ackOps) + // Check that we got no error and the TargetWriteResult is as expected. + assert.Nil(err) + assert.Equal(10, len(twres.Sent)) + assert.Nil(twres.Failed) + assert.Nil(twres.Oversized) + assert.Nil(twres.Invalid) +} + +// TestProcessFailure tests that we get correct behaviour in a failure scenario. +func TestProcessFailure(t *testing.T) { + assert := assert.New(t) + + // Unhappy path + m := mockHub{ + results: make(chan *eventhub.EventBatch), + fail: true, + } + tgtToFail := newEventHubTargetWithInterfaces(m, &cfg) + + var ackOps int64 + ackFunc := func() { + atomic.AddInt64(&ackOps, 1) + } + + messages := testutil.GetTestMessages(10, testutil.GenRandomString(100), ackFunc) + + var twres *models.TargetWriteResult + var err error + + // We don't really need to spawn a goroutine here, + // however not doing so and reading results will make the test hang when misconfigured + // so for future debuggers' sanity let's do it this way. + go func() { + twres, err = tgtToFail.process(messages) + }() + + failRes := getResults(m.results, 500*time.Millisecond) + + // Check that we got correct amonut of batches + assert.Equal(0, len(failRes)) + // Check that we acked correct amount of times + assert.Equal(int64(0), ackOps) + // Check that we got the desired error and the TargetWriteResult is as expected. + assert.NotNil(err) + if err != nil { + assert.Equal("Failed to send message batch to EventHub: Mock Failure Path", err.Error()) + } + assert.Nil(twres.Sent) + assert.Equal(10, len(twres.Failed)) + assert.Nil(twres.Oversized) + assert.Nil(twres.Invalid) +} + +// TestProcessWithNoPartitionKey tests the process() function happy path when we don't set a partition key. +func TestProcessWithNoPartitionKey(t *testing.T) { + assert := assert.New(t) + + // Happy path + m := mockHub{ + results: make(chan *eventhub.EventBatch), + } + tgt := newEventHubTargetWithInterfaces(m, &cfg) + tgt.setEHPartitionKey = false + + // Mechanism for counting acks + var ackOps int64 + ackFunc := func() { + atomic.AddInt64(&ackOps, 1) + } + + messages := testutil.GetTestMessages(10, testutil.GenRandomString(100), ackFunc) + + var twres *models.TargetWriteResult + var err error + + go func() { + twres, err = tgt.process(messages) + }() + res := getResults(m.results, 1*time.Second) + + // Check that we got correct amonut of batches + assert.Equal(1, len(res)) + // Check that we acked correct amount of times + assert.Equal(int64(10), ackOps) + // Check that we got no error and the TargetWriteResult is as expected. + assert.Nil(err) + assert.Equal(10, len(twres.Sent)) + assert.Nil(twres.Failed) + assert.Nil(twres.Oversized) + assert.Nil(twres.Invalid) +} + +// TestProcessBatchingByPartitionKey tests that the process function batches per partition key as expected. +func TestProcessBatchingByPartitionKey(t *testing.T) { + assert := assert.New(t) - os.Setenv("EVENTHUB_KEY_NAME", "fake") - os.Setenv("EVENTHUB_KEY_VALUE", "fake") + // Happy path + m := mockHub{ + results: make(chan *eventhub.EventBatch), + } + tgt := newEventHubTargetWithInterfaces(m, &cfg) - tgt, err := NewEventHubTarget(&cfg) + // Mechanism for counting acks + var ackOps int64 + ackFunc := func() { + atomic.AddInt64(&ackOps, 1) + } + + messages := testutil.GetTestMessages(99, testutil.GenRandomString(100), ackFunc) + + // Assign one of three evenly distributed partition keys + for i, msg := range messages { + msg.PartitionKey = fmt.Sprintf("PK%d", i%3) + } + + var twres *models.TargetWriteResult + var err error + + go func() { + twres, err = tgt.process(messages) + }() + res := getResults(m.results, 1*time.Second) + + // Check that we got correct amonut of batches + assert.Equal(3, len(res)) + // Check that we acked correct amount of times + assert.Equal(int64(99), ackOps) + // Check that we got no error and the TargetWriteResult is as expected. + assert.Nil(err) + assert.Equal(99, len(twres.Sent)) + assert.Nil(twres.Failed) + assert.Nil(twres.Oversized) + assert.Nil(twres.Invalid) + + // The data iteslf isn't public from the EH client, but at least we can check that the partition keys are as expected. + pksFound := make([]string, 0) + for _, r := range res { + pksFound = append(pksFound, *r.Event.PartitionKey) + } + sort.Strings(pksFound) + assert.Equal([]string{"PK0", "PK1", "PK2"}, pksFound) +} + +// TestWriteSuccess test the happy path for the Write() function. +func TestWriteSuccess(t *testing.T) { + assert := assert.New(t) + + // Happy path + m := mockHub{ + results: make(chan *eventhub.EventBatch), + } + tgt := newEventHubTargetWithInterfaces(m, &cfg) + // Max chunk size of 20 just to validate behaviour with some chunking involved. + tgt.chunkMessageLimit = 20 + + // Mechanism for counting acks + var ackOps int64 + ackFunc := func() { + atomic.AddInt64(&ackOps, 1) + } + + messages := testutil.GetTestMessages(100, testutil.GenRandomString(100), ackFunc) + + // Set the partition key all to the same value to ensure that batching behaviour is down to chunking rather than EH client batching (which we test elsewhere) + for _, msg := range messages { + msg.PartitionKey = "testPK" + } + + var twres *models.TargetWriteResult + var err error + + go func() { + twres, err = tgt.Write(messages) + }() + res := getResults(m.results, 1*time.Second) + + // Check that we got correct amonut of batches + assert.Equal(5, len(res)) + // Check that we acked correct amount of times + assert.Equal(int64(100), ackOps) + // Check that we got no error and the TargetWriteResult is as expected. + assert.Nil(err) + assert.Equal(100, len(twres.Sent)) + assert.Nil(twres.Failed) + assert.Nil(twres.Oversized) + assert.Nil(twres.Invalid) +} + +// TestWriteFailure tests the unhappy path for the Write function. +func TestWriteFailure(t *testing.T) { + assert := assert.New(t) + + // Unhappy path + m := mockHub{ + results: make(chan *eventhub.EventBatch), + fail: true, + } + tgt := newEventHubTargetWithInterfaces(m, &cfg) + // Max chunk size of 20 just to validate behaviour with several errors + tgt.chunkMessageLimit = 20 + + // Mechanism for counting acks + var ackOps int64 + ackFunc := func() { + atomic.AddInt64(&ackOps, 1) + } + + messages := testutil.GetTestMessages(100, testutil.GenRandomString(100), ackFunc) + + var twres *models.TargetWriteResult + var err error + + go func() { + twres, err = tgt.Write(messages) + }() + res := getResults(m.results, 1*time.Second) + + // Check that we got correct amonut of batches + assert.Equal(0, len(res)) + // Check that we acked correct amount of times + assert.Equal(int64(0), ackOps) + // Check that we got the expected error and the TargetWriteResult is as expected. + assert.NotNil(err) + if err != nil { + assert.True(strings.Contains(err.Error(), "Error writing messages to EventHub: 5 errors occurred:")) + assert.Equal(5, strings.Count(err.Error(), "Failed to send message batch to EventHub: Mock Failure Path")) + } + assert.Nil(twres.Sent) + assert.Equal(100, len(twres.Failed)) + assert.Nil(twres.Oversized) + assert.Nil(twres.Invalid) +} + +// TestNewEventHubTarget_KeyValue tests that we can initialise a client with key value credentials. +func TestNewEventHubTarget_KeyValue(t *testing.T) { + assert := assert.New(t) + + // Test that we can initialise a client with Key and Value + t.Setenv("EVENTHUB_KEY_NAME", "fake") + t.Setenv("EVENTHUB_KEY_VALUE", "fake") + + tgt, err := newEventHubTarget(&cfg) assert.Nil(err) assert.NotNil(tgt) } +// TestNewEventHubTarget_ConnString tests that we can initialise a client with connection string credentials. func TestNewEventHubTarget_ConnString(t *testing.T) { assert := assert.New(t) - unsetEverything() - // Test that we can initialise a client with Connection String - defer os.Unsetenv("EVENTHUB_CONNECTION_STRING") - os.Setenv("EVENTHUB_CONNECTION_STRING", "Endpoint=sb://test.servicebus.windows.net/;SharedAccessKeyName=fake;SharedAccessKey=fake") + t.Setenv("EVENTHUB_CONNECTION_STRING", "Endpoint=sb://test.servicebus.windows.net/;SharedAccessKeyName=fake;SharedAccessKey=fake") - tgt, err := NewEventHubTarget(&cfg) + tgt, err := newEventHubTarget(&cfg) assert.Nil(err) assert.NotNil(tgt) } +// TestNewEventHubTarget_CredentialsNotFound tests that we fail on startup when we're not provided with appropriate credential values. +func TestNewEventHubTarget_CredentialsNotFound(t *testing.T) { + assert := assert.New(t) + + tgt, err := newEventHubTarget(&cfg) + assert.NotNil(err) + if err != nil { + assert.Equal("Error initialising EventHub client: No valid combination of authentication Env vars found. https://pkg.go.dev/github.com/Azure/azure-event-hubs-go#NewHubWithNamespaceNameAndEnvironment", err.Error()) + } + assert.Nil(tgt) +} + +// NewEventHubTarget should fail if we can't reach EventHub, commented out this test until we look into https://github.com/snowplow-devops/stream-replicator/issues/151 +// Note that when we do so, the above tests will need to be changed to use some kind of mock +/* func TestNewEventHubTarget_Failure(t *testing.T) { assert := assert.New(t) - unsetEverything() + // Test that we can initialise a client with Key and Value + t.Setenv("EVENTHUB_KEY_NAME", "fake") + t.Setenv("EVENTHUB_KEY_VALUE", "fake") - tgt, err := NewEventHubTarget(&cfg) + tgt, err := newEventHubTarget(&cfg) assert.Equal("Error initialising EventHub client: No valid combination of authentication Env vars found. https://pkg.go.dev/github.com/Azure/azure-event-hubs-go#NewHubWithNamespaceNameAndEnvironment", err.Error()) assert.Nil(tgt) } +*/ diff --git a/pkg/target/http.go b/pkg/target/http.go index 976af5ed..28f8d9a3 100644 --- a/pkg/target/http.go +++ b/pkg/target/http.go @@ -18,9 +18,26 @@ import ( "github.com/hashicorp/go-multierror" "github.com/pkg/errors" log "github.com/sirupsen/logrus" + + "github.com/snowplow-devops/stream-replicator/pkg/common" "github.com/snowplow-devops/stream-replicator/pkg/models" ) +// HTTPTargetConfig configures the destination for records consumed +type HTTPTargetConfig struct { + HTTPURL string `hcl:"url" env:"TARGET_HTTP_URL"` + ByteLimit int `hcl:"byte_limit,optional" env:"TARGET_HTTP_BYTE_LIMIT"` + RequestTimeoutInSeconds int `hcl:"request_timeout_in_seconds,optional" env:"TARGET_HTTP_TIMEOUT_IN_SECONDS"` + ContentType string `hcl:"content_type,optional" env:"TARGET_HTTP_CONTENT_TYPE"` + Headers string `hcl:"headers,optional" env:"TARGET_HTTP_HEADERS" ` + BasicAuthUsername string `hcl:"basic_auth_username,optional" env:"TARGET_HTTP_BASICAUTH_USERNAME"` + BasicAuthPassword string `hcl:"basic_auth_password,optional" env:"TARGET_HTTP_BASICAUTH_PASSWORD"` + CertFile string `hcl:"cert_file,optional" env:"TARGET_HTTP_TLS_CERT_FILE"` + KeyFile string `hcl:"key_file,optional" env:"TARGET_HTTP_TLS_KEY_FILE"` + CaFile string `hcl:"ca_file,optional" env:"TARGET_HTTP_TLS_CA_FILE"` + SkipVerifyTLS bool `hcl:"skip_verify_tls,optional" env:"TARGET_HTTP_TLS_SKIP_VERIFY_TLS"` // false +} + // HTTPTarget holds a new client for writing messages to HTTP endpoints type HTTPTarget struct { client *http.Client @@ -39,7 +56,7 @@ func checkURL(str string) error { return err } if u.Scheme == "" || u.Host == "" { - return errors.New(fmt.Sprintf("Invalid url for Http target: '%s'", str)) + return errors.New(fmt.Sprintf("Invalid url for HTTP target: '%s'", str)) } return nil } @@ -70,8 +87,8 @@ func addHeadersToRequest(request *http.Request, headers map[string]string) { } -// NewHTTPTarget creates a client for writing events to HTTP -func NewHTTPTarget(httpURL string, requestTimeout int, byteLimit int, contentType string, headers string, basicAuthUsername string, basicAuthPassword string, +// newHTTPTarget creates a client for writing events to HTTP +func newHTTPTarget(httpURL string, requestTimeout int, byteLimit int, contentType string, headers string, basicAuthUsername string, basicAuthPassword string, certFile string, keyFile string, caFile string, skipVerifyTLS bool) (*HTTPTarget, error) { err := checkURL(httpURL) if err != nil { @@ -81,11 +98,16 @@ func NewHTTPTarget(httpURL string, requestTimeout int, byteLimit int, contentTyp if err1 != nil { return nil, err1 } - tlsConfig, err2 := CreateTLSConfiguration(certFile, keyFile, caFile, skipVerifyTLS) + transport := &http.Transport{} + + tlsConfig, err2 := common.CreateTLSConfiguration(certFile, keyFile, caFile, skipVerifyTLS) if err2 != nil { return nil, err2 } - transport := &http.Transport{TLSClientConfig: tlsConfig} + if tlsConfig != nil { + transport.TLSClientConfig = tlsConfig + } + return &HTTPTarget{ client: &http.Client{ Transport: transport, @@ -101,6 +123,57 @@ func NewHTTPTarget(httpURL string, requestTimeout int, byteLimit int, contentTyp }, nil } +// HTTPTargetConfigFunction creates HTTPTarget from HTTPTargetConfig +func HTTPTargetConfigFunction(c *HTTPTargetConfig) (*HTTPTarget, error) { + return newHTTPTarget( + c.HTTPURL, + c.RequestTimeoutInSeconds, + c.ByteLimit, + c.ContentType, + c.Headers, + c.BasicAuthUsername, + c.BasicAuthPassword, + c.CertFile, + c.KeyFile, + c.CaFile, + c.SkipVerifyTLS, + ) +} + +// The HTTPTargetAdapter type is an adapter for functions to be used as +// pluggable components for HTTP Target. It implements the Pluggable interface. +type HTTPTargetAdapter func(i interface{}) (interface{}, error) + +// Create implements the ComponentCreator interface. +func (f HTTPTargetAdapter) Create(i interface{}) (interface{}, error) { + return f(i) +} + +// ProvideDefault implements the ComponentConfigurable interface. +func (f HTTPTargetAdapter) ProvideDefault() (interface{}, error) { + // Provide defaults for the optional parameters + // whose default is not their zero value. + cfg := &HTTPTargetConfig{ + ByteLimit: 1048576, + RequestTimeoutInSeconds: 5, + ContentType: "application/json", + } + + return cfg, nil +} + +// AdaptHTTPTargetFunc returns an HTTPTargetAdapter. +func AdaptHTTPTargetFunc(f func(c *HTTPTargetConfig) (*HTTPTarget, error)) HTTPTargetAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*HTTPTargetConfig) + if !ok { + return nil, errors.New("invalid input, expected HTTPTargetConfig") + } + + return f(cfg) + } +} + func (ht *HTTPTarget) Write(messages []*models.Message) (*models.TargetWriteResult, error) { ht.log.Debugf("Writing %d messages to topic ...", len(messages)) diff --git a/pkg/target/http_test.go b/pkg/target/http_test.go index 6a5ce820..7ab3eb86 100644 --- a/pkg/target/http_test.go +++ b/pkg/target/http_test.go @@ -17,9 +17,10 @@ import ( "sync/atomic" "testing" + "github.com/stretchr/testify/assert" + "github.com/snowplow-devops/stream-replicator/pkg/models" "github.com/snowplow-devops/stream-replicator/pkg/testutil" - "github.com/stretchr/testify/assert" ) func createTestServer(results *[][]byte, waitgroup *sync.WaitGroup) *httptest.Server { @@ -28,7 +29,7 @@ func createTestServer(results *[][]byte, waitgroup *sync.WaitGroup) *httptest.Se defer req.Body.Close() data, err := ioutil.ReadAll(req.Body) if err != nil { - panic(err) // If we hit this error, something went wrong with the test setup, so panic + panic(err) } mutex.Lock() *results = append(*results, data) @@ -68,13 +69,19 @@ func TestGetHeaders(t *testing.T) { invalid1 := `{"Max Forwards": 10}` out4, err4 := getHeaders(invalid1) - assert.Equal("Error parsing headers. Ensure that headers are provided as a JSON of string key-value pairs: json: cannot unmarshal number into Go value of type string", err4.Error()) + assert.NotNil(err4) + if err4 != nil { + assert.Equal("Error parsing headers. Ensure that headers are provided as a JSON of string key-value pairs: json: cannot unmarshal number into Go value of type string", err4.Error()) + } assert.Nil(out4) invalid2 := `[{"Max Forwards": "10"}]` out5, err5 := getHeaders(invalid2) - assert.Equal("Error parsing headers. Ensure that headers are provided as a JSON of string key-value pairs: json: cannot unmarshal array into Go value of type map[string]string", err5.Error()) + assert.NotNil(err5) + if err5 != nil { + assert.Equal("Error parsing headers. Ensure that headers are provided as a JSON of string key-value pairs: json: cannot unmarshal array into Go value of type map[string]string", err5.Error()) + } assert.Nil(out5) } @@ -84,7 +91,7 @@ func TestAddHeadersToRequest(t *testing.T) { req, err := http.NewRequest("POST", "abc", bytes.NewBuffer([]byte("def"))) if err != nil { - panic(err) + t.Fatal(err) } headersToAdd := map[string]string{"Max Forwards": "10", "Accept-Language": "en-US,en-IE", "Accept-Datetime": "Thu, 31 May 2007 20:35:00 GMT"} @@ -99,7 +106,7 @@ func TestAddHeadersToRequest(t *testing.T) { req2, err2 := http.NewRequest("POST", "abc", bytes.NewBuffer([]byte("def"))) if err2 != nil { - panic(err2) + t.Fatal(err2) } var noHeadersToAdd map[string]string noHeadersExpected := http.Header{} @@ -112,18 +119,24 @@ func TestAddHeadersToRequest(t *testing.T) { func TestNewHTTPTarget(t *testing.T) { assert := assert.New(t) - httpTarget, err := NewHTTPTarget("http://something", 5, 1048576, "application/json", "", "", "", "", "", "", true) + httpTarget, err := newHTTPTarget("http://something", 5, 1048576, "application/json", "", "", "", "", "", "", true) assert.Nil(err) assert.NotNil(httpTarget) - failedHTTPTarget, err1 := NewHTTPTarget("something", 5, 1048576, "application/json", "", "", "", "", "", "", true) + failedHTTPTarget, err1 := newHTTPTarget("something", 5, 1048576, "application/json", "", "", "", "", "", "", true) - assert.Equal("Invalid url for Http target: 'something'", err1.Error()) + assert.NotNil(err1) + if err1 != nil { + assert.Equal("Invalid url for HTTP target: 'something'", err1.Error()) + } assert.Nil(failedHTTPTarget) - failedHTTPTarget2, err2 := NewHTTPTarget("", 5, 1048576, "application/json", "", "", "", "", "", "", true) - assert.Equal("Invalid url for Http target: ''", err2.Error()) + failedHTTPTarget2, err2 := newHTTPTarget("", 5, 1048576, "application/json", "", "", "", "", "", "", true) + assert.NotNil(err2) + if err2 != nil { + assert.Equal("Invalid url for HTTP target: ''", err2.Error()) + } assert.Nil(failedHTTPTarget2) } @@ -135,9 +148,9 @@ func TestHttpWrite_Simple(t *testing.T) { server := createTestServer(&results, &wg) defer server.Close() - target, err := NewHTTPTarget(server.URL, 5, 1048576, "application/json", "", "", "", "", "", "", true) + target, err := newHTTPTarget(server.URL, 5, 1048576, "application/json", "", "", "", "", "", "", true) if err != nil { - panic(err) + t.Fatal(err) } var ackOps int64 @@ -169,9 +182,9 @@ func TestHttpWrite_Concurrent(t *testing.T) { server := createTestServer(&results, &wg) defer server.Close() - target, err := NewHTTPTarget(server.URL, 5, 1048576, "application/json", "", "", "", "", "", "", true) + target, err := newHTTPTarget(server.URL, 5, 1048576, "application/json", "", "", "", "", "", "", true) if err != nil { - panic(err) + t.Fatal(err) } mu := &sync.Mutex{} @@ -212,9 +225,9 @@ func TestHttpWrite_Failure(t *testing.T) { server := createTestServer(&results, &wg) defer server.Close() - target, err := NewHTTPTarget("http://NonexistentEndpoint", 5, 1048576, "application/json", "", "", "", "", "", "", true) + target, err := newHTTPTarget("http://NonexistentEndpoint", 5, 1048576, "application/json", "", "", "", "", "", "", true) if err != nil { - panic(err) + t.Fatal(err) } var ackOps int64 @@ -227,8 +240,9 @@ func TestHttpWrite_Failure(t *testing.T) { writeResult, err1 := target.Write(messages) assert.NotNil(err1) - - assert.Regexp("Error sending http request: 10 errors occurred:.*", err1.Error()) + if err1 != nil { + assert.Regexp("Error sending http request: 10 errors occurred:.*", err1.Error()) + } assert.Equal(10, len(writeResult.Failed)) assert.Nil(writeResult.Sent) @@ -243,9 +257,9 @@ func TestHttpWrite_Oversized(t *testing.T) { server := createTestServer(&results, &wg) defer server.Close() - target, err := NewHTTPTarget(server.URL, 5, 1048576, "application/json", "", "", "", "", "", "", true) + target, err := newHTTPTarget(server.URL, 5, 1048576, "application/json", "", "", "", "", "", "", true) if err != nil { - panic(err) + t.Fatal(err) } var ackOps int64 @@ -288,19 +302,19 @@ func TestHttpWrite_TLS(t *testing.T) { assert := assert.New(t) // Test that https requests work with manually provided certs - target, err := NewHTTPTarget("https://localhost:8999/hello", + target, err := newHTTPTarget("https://localhost:8999/hello", 5, 1048576, "application/json", "", "", "", - os.Getenv("CERT_DIR")+"/localhost.crt", - os.Getenv("CERT_DIR")+"/localhost.key", - os.Getenv("CERT_DIR")+"/rootCA.crt", + string(`../../integration/http/localhost.crt`), + string(`../../integration/http/localhost.key`), + string(`../../integration/http/rootCA.crt`), false) if err != nil { - panic(err) + t.Fatal(err) } var ackOps int64 @@ -319,20 +333,23 @@ func TestHttpWrite_TLS(t *testing.T) { ngrokAddress := getNgrokAddress() + "/hello" + os.RemoveAll(`tmp_replicator`) + // Test that https requests work for different endpoints when different certs are provided manually - target2, err2 := NewHTTPTarget(ngrokAddress, + target2, err2 := newHTTPTarget(ngrokAddress, 5, 1048576, "application/json", "", "", "", - os.Getenv("CERT_DIR")+"/localhost.crt", - os.Getenv("CERT_DIR")+"/localhost.key", - os.Getenv("CERT_DIR")+"/rootCA.crt", + string(`../../integration/http/localhost.crt`), + string(`../../integration/http/localhost.key`), + string(`../../integration/http/rootCA.crt`), false) if err2 != nil { - panic(err2) + os.RemoveAll(`tmp_replicator`) + t.Fatal(err2) } writeResult2, err3 := target2.Write(messages) @@ -342,10 +359,12 @@ func TestHttpWrite_TLS(t *testing.T) { assert.Equal(int64(20), ackOps) + os.RemoveAll(`tmp_replicator`) + // Test that https works when certs aren't manually provided // Test that https requests work for different endpoints when different certs are provided manually - target3, err4 := NewHTTPTarget(ngrokAddress, + target3, err4 := newHTTPTarget(ngrokAddress, 5, 1048576, "application/json", @@ -357,7 +376,8 @@ func TestHttpWrite_TLS(t *testing.T) { "", false) if err4 != nil { - panic(err4) + os.RemoveAll(`tmp_replicator`) + t.Fatal(err4) } writeResult3, err5 := target3.Write(messages) @@ -366,6 +386,7 @@ func TestHttpWrite_TLS(t *testing.T) { assert.Equal(10, len(writeResult3.Sent)) assert.Equal(int64(30), ackOps) + os.RemoveAll(`tmp_replicator`) } type ngrokAPIObject struct { diff --git a/pkg/target/kafka.go b/pkg/target/kafka.go index 1f5f4c09..f9bbc8ad 100644 --- a/pkg/target/kafka.go +++ b/pkg/target/kafka.go @@ -18,39 +18,41 @@ import ( "github.com/hashicorp/go-multierror" "github.com/pkg/errors" log "github.com/sirupsen/logrus" - "github.com/snowplow-devops/stream-replicator/pkg/models" "github.com/xdg/scram" + + "github.com/snowplow-devops/stream-replicator/pkg/common" + "github.com/snowplow-devops/stream-replicator/pkg/models" ) // KafkaConfig contains configurable options for the kafka target type KafkaConfig struct { - Brokers string - TopicName string - TargetVersion string - MaxRetries int - ByteLimit int - Compress bool - WaitForAll bool - Idempotent bool - EnableSASL bool - SASLUsername string - SASLPassword string - SASLAlgorithm string - CertFile string - KeyFile string - CaFile string - SkipVerifyTLS bool - ForceSync bool - FlushFrequency int - FlushMessages int - FlushBytes int + Brokers string `hcl:"brokers" env:"TARGET_KAFKA_BROKERS"` + TopicName string `hcl:"topic_name" env:"TARGET_KAFKA_TOPIC_NAME"` + TargetVersion string `hcl:"target_version,optional" env:"TARGET_KAFKA_TARGET_VERSION"` + MaxRetries int `hcl:"max_retries,optional" env:"TARGET_KAFKA_MAX_RETRIES"` + ByteLimit int `hcl:"byte_limit,optional" env:"TARGET_KAFKA_BYTE_LIMIT"` + Compress bool `hcl:"compress,optional" env:"TARGET_KAFKA_COMPRESS"` + WaitForAll bool `hcl:"wait_for_all,optional" env:"TARGET_KAFKA_WAIT_FOR_ALL"` + Idempotent bool `hcl:"idempotent,optional" env:"TARGET_KAFKA_IDEMPOTENT"` + EnableSASL bool `hcl:"enable_sasl,optional" env:"TARGET_KAFKA_ENABLE_SASL"` + SASLUsername string `hcl:"sasl_username,optional" env:"TARGET_KAFKA_SASL_USERNAME" ` + SASLPassword string `hcl:"sasl_password,optional" env:"TARGET_KAFKA_SASL_PASSWORD"` + SASLAlgorithm string `hcl:"sasl_algorithm,optional" env:"TARGET_KAFKA_SASL_ALGORITHM"` + CertFile string `hcl:"cert_file,optional" env:"TARGET_KAFKA_TLS_CERT_FILE"` + KeyFile string `hcl:"key_file,optional" env:"TARGET_KAFKA_TLS_KEY_FILE"` + CaFile string `hcl:"ca_file,optional" env:"TARGET_KAFKA_TLS_CA_FILE"` + SkipVerifyTLS bool `hcl:"skip_verify_tls,optional" env:"TARGET_KAFKA_TLS_SKIP_VERIFY_TLS"` + ForceSync bool `hcl:"force_sync_producer,optional" env:"TARGET_KAFKA_FORCE_SYNC_PRODUCER"` + FlushFrequency int `hcl:"flush_frequency,optional" env:"TARGET_KAFKA_FLUSH_FREQUENCY"` + FlushMessages int `hcl:"flush_messages,optional" env:"TARGET_KAFKA_FLUSH_MESSAGES"` + FlushBytes int `hcl:"flush_bytes,optional" env:"TARGET_KAFKA_FLUSH_BYTES"` } // KafkaTarget holds a new client for writing messages to Apache Kafka type KafkaTarget struct { syncProducer sarama.SyncProducer asyncProducer sarama.AsyncProducer - asyncResults chan *SaramaResult + asyncResults chan *saramaResult topicName string brokers string messageByteLimit int @@ -58,8 +60,8 @@ type KafkaTarget struct { log *log.Entry } -// SaramaResult holds the result of a Sarama request -type SaramaResult struct { +// saramaResult holds the result of a Sarama request +type saramaResult struct { Msg *sarama.ProducerMessage Err error } @@ -116,7 +118,7 @@ func NewKafkaTarget(cfg *KafkaConfig) (*KafkaTarget, error) { } } - tlsConfig, err := CreateTLSConfiguration(cfg.CertFile, cfg.KeyFile, cfg.CaFile, cfg.SkipVerifyTLS) + tlsConfig, err := common.CreateTLSConfiguration(cfg.CertFile, cfg.KeyFile, cfg.CaFile, cfg.SkipVerifyTLS) if err != nil { return nil, err } @@ -125,7 +127,7 @@ func NewKafkaTarget(cfg *KafkaConfig) (*KafkaTarget, error) { saramaConfig.Net.TLS.Enable = true } - var asyncResults chan *SaramaResult = nil + var asyncResults chan *saramaResult = nil var asyncProducer sarama.AsyncProducer = nil var syncProducer sarama.SyncProducer = nil var producerError error = nil @@ -147,17 +149,17 @@ func NewKafkaTarget(cfg *KafkaConfig) (*KafkaTarget, error) { return nil, producerError } - asyncResults = make(chan *SaramaResult) + asyncResults = make(chan *saramaResult) go func() { for err := range asyncProducer.Errors() { - asyncResults <- &SaramaResult{Msg: err.Msg, Err: err.Err} + asyncResults <- &saramaResult{Msg: err.Msg, Err: err.Err} } }() go func() { for success := range asyncProducer.Successes() { - asyncResults <- &SaramaResult{Msg: success} + asyncResults <- &saramaResult{Msg: success} } }() } else { @@ -175,6 +177,40 @@ func NewKafkaTarget(cfg *KafkaConfig) (*KafkaTarget, error) { }, producerError } +// The KafkaTargetAdapter type is an adapter for functions to be used as +// pluggable components for Kafka target. It implements the Pluggable interface. +type KafkaTargetAdapter func(i interface{}) (interface{}, error) + +// Create implements the ComponentCreator interface. +func (f KafkaTargetAdapter) Create(i interface{}) (interface{}, error) { + return f(i) +} + +// ProvideDefault implements the ComponentConfigurable interface. +func (f KafkaTargetAdapter) ProvideDefault() (interface{}, error) { + // Provide defaults for the optional parameters + // whose default is not their zero value. + cfg := &KafkaConfig{ + MaxRetries: 10, + ByteLimit: 1048576, + SASLAlgorithm: "sha512", + } + + return cfg, nil +} + +// AdaptKafkaTargetFunc returns a KafkaTargetAdapter. +func AdaptKafkaTargetFunc(f func(c *KafkaConfig) (*KafkaTarget, error)) KafkaTargetAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*KafkaConfig) + if !ok { + return nil, errors.New("invalid input, expected KafkaConfig") + } + + return f(cfg) + } +} + // Write pushes all messages to the required target func (kt *KafkaTarget) Write(messages []*models.Message) (*models.TargetWriteResult, error) { kt.log.Debugf("Writing %d messages to topic ...", len(messages)) diff --git a/pkg/target/kafka_test.go b/pkg/target/kafka_test.go index 3565cedc..9de02a2b 100644 --- a/pkg/target/kafka_test.go +++ b/pkg/target/kafka_test.go @@ -13,6 +13,7 @@ import ( "github.com/Shopify/sarama" "github.com/Shopify/sarama/mocks" log "github.com/sirupsen/logrus" + "github.com/snowplow-devops/stream-replicator/pkg/testutil" "github.com/stretchr/testify/assert" ) @@ -23,17 +24,17 @@ func SetUpMockAsyncProducer(t *testing.T) (*mocks.AsyncProducer, *KafkaTarget) { config.Producer.Return.Errors = true mp := mocks.NewAsyncProducer(t, config) - asyncResults := make(chan *SaramaResult) + asyncResults := make(chan *saramaResult) go func() { for err := range mp.Errors() { - asyncResults <- &SaramaResult{Msg: err.Msg, Err: err.Err} + asyncResults <- &saramaResult{Msg: err.Msg, Err: err.Err} } }() go func() { for success := range mp.Successes() { - asyncResults <- &SaramaResult{Msg: success} + asyncResults <- &saramaResult{Msg: success} } }() @@ -75,6 +76,9 @@ func TestKafkaTarget_AsyncWriteFailure(t *testing.T) { writeRes, err := target.Write(messages) assert.NotNil(err) + if err != nil { + assert.Equal("Error writing messages to Kafka topic: : 1 error occurred:\n\t* kafka: client has run out of available brokers to talk to\n\n", err.Error()) + } assert.NotNil(writeRes) // Check results @@ -127,6 +131,9 @@ func TestKafkaTarget_SyncWriteFailure(t *testing.T) { writeRes, err := target.Write(messages) assert.NotNil(err) + if err != nil { + assert.Equal("Error writing messages to Kafka topic: : 1 error occurred:\n\t* kafka: client has run out of available brokers to talk to\n\n", err.Error()) + } assert.NotNil(writeRes) // Check results diff --git a/pkg/target/kinesis.go b/pkg/target/kinesis.go index 7dcc2c1f..2dc898cb 100644 --- a/pkg/target/kinesis.go +++ b/pkg/target/kinesis.go @@ -31,6 +31,13 @@ const ( kinesisPutRecordsRequestByteLimit = kinesisPutRecordsMessageByteLimit * 5 ) +// KinesisTargetConfig configures the destination for records consumed +type KinesisTargetConfig struct { + StreamName string `hcl:"stream_name" env:"TARGET_KINESIS_STREAM_NAME"` + Region string `hcl:"region" env:"TARGET_KINESIS_REGION"` + RoleARN string `hcl:"role_arn,optional" env:"TARGET_KINESIS_ROLE_ARN"` +} + // KinesisTarget holds a new client for writing messages to kinesis type KinesisTarget struct { client kinesisiface.KinesisAPI @@ -41,20 +48,20 @@ type KinesisTarget struct { log *log.Entry } -// NewKinesisTarget creates a new client for writing messages to kinesis -func NewKinesisTarget(region string, streamName string, roleARN string) (*KinesisTarget, error) { +// newKinesisTarget creates a new client for writing messages to kinesis +func newKinesisTarget(region string, streamName string, roleARN string) (*KinesisTarget, error) { awsSession, awsConfig, awsAccountID, err := common.GetAWSSession(region, roleARN) if err != nil { return nil, err } kinesisClient := kinesis.New(awsSession, awsConfig) - return NewKinesisTargetWithInterfaces(kinesisClient, *awsAccountID, region, streamName) + return newKinesisTargetWithInterfaces(kinesisClient, *awsAccountID, region, streamName) } -// NewKinesisTargetWithInterfaces allows you to provide a Kinesis client directly to allow +// newKinesisTargetWithInterfaces allows you to provide a Kinesis client directly to allow // for mocking and localstack usage -func NewKinesisTargetWithInterfaces(client kinesisiface.KinesisAPI, awsAccountID string, region string, streamName string) (*KinesisTarget, error) { +func newKinesisTargetWithInterfaces(client kinesisiface.KinesisAPI, awsAccountID string, region string, streamName string) (*KinesisTarget, error) { return &KinesisTarget{ client: client, streamName: streamName, @@ -64,6 +71,40 @@ func NewKinesisTargetWithInterfaces(client kinesisiface.KinesisAPI, awsAccountID }, nil } +// KinesisTargetConfigFunction creates KinesisTarget from KinesisTargetConfig. +func KinesisTargetConfigFunction(c *KinesisTargetConfig) (*KinesisTarget, error) { + return newKinesisTarget(c.Region, c.StreamName, c.RoleARN) +} + +// The KinesisTargetAdapter type is an adapter for functions to be used as +// pluggable components for Kinesis Target. Implements the Pluggable interface. +type KinesisTargetAdapter func(i interface{}) (interface{}, error) + +// Create implements the ComponentCreator interface. +func (f KinesisTargetAdapter) Create(i interface{}) (interface{}, error) { + return f(i) +} + +// ProvideDefault implements the ComponentConfigurable interface. +func (f KinesisTargetAdapter) ProvideDefault() (interface{}, error) { + // Provide defaults if any + cfg := &KinesisTargetConfig{} + + return cfg, nil +} + +// AdaptKinesisTargetFunc returns a KinesisTargetAdapter. +func AdaptKinesisTargetFunc(f func(c *KinesisTargetConfig) (*KinesisTarget, error)) KinesisTargetAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*KinesisTargetConfig) + if !ok { + return nil, errors.New("invalid input, expected KinesisTargetConfig") + } + + return f(cfg) + } +} + // Write pushes all messages to the required target // TODO: Should each put be in its own goroutine? func (kt *KinesisTarget) Write(messages []*models.Message) (*models.TargetWriteResult, error) { diff --git a/pkg/target/kinesis_test.go b/pkg/target/kinesis_test.go index 936d5fa4..5d7aee8f 100644 --- a/pkg/target/kinesis_test.go +++ b/pkg/target/kinesis_test.go @@ -24,7 +24,7 @@ func TestKinesisTarget_WriteFailure(t *testing.T) { client := testutil.GetAWSLocalstackKinesisClient() - target, err := NewKinesisTargetWithInterfaces(client, "00000000000", testutil.AWSLocalstackRegion, "not-exists") + target, err := newKinesisTargetWithInterfaces(client, "00000000000", testutil.AWSLocalstackRegion, "not-exists") assert.Nil(err) assert.NotNil(target) assert.Equal("arn:aws:kinesis:us-east-1:00000000000:stream/not-exists", target.GetID()) @@ -36,6 +36,9 @@ func TestKinesisTarget_WriteFailure(t *testing.T) { writeRes, err := target.Write(messages) assert.NotNil(err) + if err != nil { + assert.Equal("Error writing messages to Kinesis stream: 1 error occurred:\n\t* Failed to send message batch to Kinesis stream: ResourceNotFoundException: Stream not-exists under account 000000000000 not found.\n\n", err.Error()) + } assert.NotNil(writeRes) // Check results @@ -55,11 +58,11 @@ func TestKinesisTarget_WriteSuccess(t *testing.T) { streamName := "kinesis-stream-target-1" err := testutil.CreateAWSLocalstackKinesisStream(client, streamName) if err != nil { - panic(err) + t.Fatal(err) } defer testutil.DeleteAWSLocalstackKinesisStream(client, streamName) - target, err := NewKinesisTargetWithInterfaces(client, "00000000000", testutil.AWSLocalstackRegion, streamName) + target, err := newKinesisTargetWithInterfaces(client, "00000000000", testutil.AWSLocalstackRegion, streamName) assert.Nil(err) assert.NotNil(target) @@ -97,11 +100,11 @@ func TestKinesisTarget_WriteSuccess_OversizeBatch(t *testing.T) { streamName := "kinesis-stream-target-2" err := testutil.CreateAWSLocalstackKinesisStream(client, streamName) if err != nil { - panic(err) + t.Fatal(err) } defer testutil.DeleteAWSLocalstackKinesisStream(client, streamName) - target, err := NewKinesisTargetWithInterfaces(client, "00000000000", testutil.AWSLocalstackRegion, streamName) + target, err := newKinesisTargetWithInterfaces(client, "00000000000", testutil.AWSLocalstackRegion, streamName) assert.Nil(err) assert.NotNil(target) @@ -140,11 +143,11 @@ func TestKinesisTarget_WriteSuccess_OversizeRecord(t *testing.T) { streamName := "kinesis-stream-target-3" err := testutil.CreateAWSLocalstackKinesisStream(client, streamName) if err != nil { - panic(err) + t.Fatal(err) } defer testutil.DeleteAWSLocalstackKinesisStream(client, streamName) - target, err := NewKinesisTargetWithInterfaces(client, "00000000000", testutil.AWSLocalstackRegion, streamName) + target, err := newKinesisTargetWithInterfaces(client, "00000000000", testutil.AWSLocalstackRegion, streamName) assert.Nil(err) assert.NotNil(target) diff --git a/pkg/target/pubsub.go b/pkg/target/pubsub.go index 6d72eddd..2af180ce 100644 --- a/pkg/target/pubsub.go +++ b/pkg/target/pubsub.go @@ -25,6 +25,12 @@ const ( pubSubPublishMessageByteLimit = 10485760 ) +// PubSubTargetConfig configures the destination for records consumed +type PubSubTargetConfig struct { + ProjectID string `hcl:"project_id" env:"TARGET_PUBSUB_PROJECT_ID"` + TopicName string `hcl:"topic_name" env:"TARGET_PUBSUB_TOPIC_NAME"` +} + // PubSubTarget holds a new client for writing messages to Google PubSub type PubSubTarget struct { projectID string @@ -35,15 +41,15 @@ type PubSubTarget struct { log *log.Entry } -// PubSubPublishResult contains the publish result and the function to execute +// pubSubPublishResult contains the publish result and the function to execute // on success to ack the send -type PubSubPublishResult struct { +type pubSubPublishResult struct { Result *pubsub.PublishResult Message *models.Message } -// NewPubSubTarget creates a new client for writing messages to Google PubSub -func NewPubSubTarget(projectID string, topicName string) (*PubSubTarget, error) { +// newPubSubTarget creates a new client for writing messages to Google PubSub +func newPubSubTarget(projectID string, topicName string) (*PubSubTarget, error) { ctx := context.Background() client, err := pubsub.NewClient(ctx, projectID) @@ -59,6 +65,40 @@ func NewPubSubTarget(projectID string, topicName string) (*PubSubTarget, error) }, nil } +// PubSubTargetConfigFunction creates PubSubTarget from PubSubTargetConfig +func PubSubTargetConfigFunction(c *PubSubTargetConfig) (*PubSubTarget, error) { + return newPubSubTarget(c.ProjectID, c.TopicName) +} + +// The PubSubTargetAdapter type is an adapter for functions to be used as +// pluggable components for PubSub Target. It implements the Pluggable interface. +type PubSubTargetAdapter func(i interface{}) (interface{}, error) + +// Create implements the ComponentCreator interface. +func (f PubSubTargetAdapter) Create(i interface{}) (interface{}, error) { + return f(i) +} + +// ProvideDefault implements the ComponentConfigurable interface. +func (f PubSubTargetAdapter) ProvideDefault() (interface{}, error) { + // Provide defaults if any + cfg := &PubSubTargetConfig{} + + return cfg, nil +} + +// AdaptPubSubTargetFunc returns a PubSubTargetAdapter. +func AdaptPubSubTargetFunc(f func(c *PubSubTargetConfig) (*PubSubTarget, error)) PubSubTargetAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*PubSubTargetConfig) + if !ok { + return nil, errors.New("invalid input, expected PubSubTargetConfig") + } + + return f(cfg) + } +} + // Write pushes all messages to the required target func (ps *PubSubTarget) Write(messages []*models.Message) (*models.TargetWriteResult, error) { ps.log.Debugf("Writing %d messages to topic ...", len(messages)) @@ -77,7 +117,7 @@ func (ps *PubSubTarget) Write(messages []*models.Message) (*models.TargetWriteRe ), err } - var results []*PubSubPublishResult + var results []*pubSubPublishResult safeMessages, oversized := models.FilterOversizedMessages( messages, @@ -99,7 +139,7 @@ func (ps *PubSubTarget) Write(messages []*models.Message) (*models.TargetWriteRe } r := ps.topic.Publish(ctx, pubSubMsg) - results = append(results, &PubSubPublishResult{ + results = append(results, &pubSubPublishResult{ Result: r, Message: msg, }) diff --git a/pkg/target/pubsub_test.go b/pkg/target/pubsub_test.go new file mode 100644 index 00000000..74ffeeaf --- /dev/null +++ b/pkg/target/pubsub_test.go @@ -0,0 +1,278 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package target + +import ( + "context" + "sort" + "strings" + "sync/atomic" + "testing" + + "cloud.google.com/go/pubsub/pstest" + "github.com/stretchr/testify/assert" + pubsubV1 "google.golang.org/genproto/googleapis/pubsub/v1" + "google.golang.org/grpc/codes" + + "github.com/snowplow-devops/stream-replicator/pkg/models" + "github.com/snowplow-devops/stream-replicator/pkg/testutil" +) + +func TestPubSubTarget_WriteSuccessIntegration(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test") + } + assert := assert.New(t) + + testutil.CreatePubsubResourcesAndWrite(0, t) + defer testutil.DeletePubsubResources(t) + + pubsubTarget, err := newPubSubTarget(`project-test`, `test-topic`) + assert.NotNil(pubsubTarget) + assert.Nil(err) + assert.Equal("projects/project-test/topics/test-topic", pubsubTarget.GetID()) + pubsubTarget.Open() + defer pubsubTarget.Close() + + messages := testutil.GetTestMessages(10, "Hello Pubsub!!", nil) + + result, err := pubsubTarget.Write(messages) + + assert.Equal(int64(10), result.Total()) + assert.Equal([]*models.Message(nil), result.Failed) + assert.Equal([]*models.Message(nil), result.Oversized) + + assert.Nil(err) +} + +func TestPubSubTarget_WriteTopicUnopenedIntegration(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test") + } + assert := assert.New(t) + + testutil.CreatePubsubResourcesAndWrite(0, t) + defer testutil.DeletePubsubResources(t) + + pubsubTarget, err := newPubSubTarget(`project-test`, `test-topic`) + assert.NotNil(pubsubTarget) + assert.Nil(err) + assert.Equal("projects/project-test/topics/test-topic", pubsubTarget.GetID()) + + messages := testutil.GetTestMessages(1, ``, nil) + + _, err = pubsubTarget.Write(messages) + + assert.Error(err) +} + +func TestPubSubTarget_WithInvalidMessageIntegration(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test") + } + assert := assert.New(t) + + testutil.CreatePubsubResourcesAndWrite(0, t) + defer testutil.DeletePubsubResources(t) + + pubsubTarget, err := newPubSubTarget(`project-test`, `test-topic`) + assert.NotNil(pubsubTarget) + assert.Nil(err) + assert.Equal("projects/project-test/topics/test-topic", pubsubTarget.GetID()) + pubsubTarget.Open() + defer pubsubTarget.Close() + + messages := testutil.GetTestMessages(1, `test`, nil) + messages = append(messages, testutil.GetTestMessages(1, ``, nil)...) + + result, err := pubsubTarget.Write(messages) + + assert.Equal(int64(1), result.Total()) + assert.Equal(1, len(result.Invalid)) + + assert.Nil(err) +} + +// TestPubSubTarget_WriteSuccessWithMocks unit tests the happy path for PubSub target +func TestPubSubTarget_WriteSuccessWithMocks(t *testing.T) { + assert := assert.New(t) + srv, conn := testutil.InitMockPubsubServer(8563, nil, t) + defer srv.Close() + defer conn.Close() + + pubsubTarget, err := newPubSubTarget(`project-test`, `test-topic`) + assert.NotNil(pubsubTarget) + assert.Nil(err) + assert.Equal("projects/project-test/topics/test-topic", pubsubTarget.GetID()) + pubsubTarget.Open() + defer pubsubTarget.Close() + + // Mechanism for counting acks + var ackOps int64 + ackFunc := func() { + atomic.AddInt64(&ackOps, 1) + } + + messages := testutil.GetSequentialTestMessages(10, ackFunc) + + twres, err := pubsubTarget.Write(messages) + // Check that the TargetWriteResult is correct + assert.Equal(int64(10), twres.SentCount) + assert.Equal(10, len(twres.Sent)) + assert.Nil(twres.Failed) + assert.Nil(twres.Oversized) + assert.Nil(twres.Invalid) + assert.Nil(err) + + res, pullErr := srv.GServer.Pull(context.TODO(), &pubsubV1.PullRequest{ + Subscription: "projects/project-test/subscriptions/test-sub", + MaxMessages: 15, // 15 max messages to ensure we don't miss dupes + }) + if pullErr != nil { + t.Fatal(pullErr) + } + + var results []string + + for _, msg := range res.ReceivedMessages { + results = append(results, string(msg.Message.Data)) + } + + expected := []string{"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"} + sort.Strings(results) + assert.Equal(expected, results) + + // Check that we acked correct amount of times + assert.Equal(int64(10), ackOps) +} + +// TestPubSubTarget_WriteFailureWithMocks unit tests the unhappy path for PubSub target +func TestPubSubTarget_WriteFailureWithMocks(t *testing.T) { + assert := assert.New(t) + + // Initialise the mock server with un-retryable error + opts := []pstest.ServerReactorOption{ + pstest.WithErrorInjection("Publish", codes.PermissionDenied, "Some Error"), + } + srv, conn := testutil.InitMockPubsubServer(8563, opts, t) + defer srv.Close() + defer conn.Close() + + pubsubTarget, err := newPubSubTarget(`project-test`, `test-topic`) + assert.NotNil(pubsubTarget) + if err != nil { + t.Fatal(err) + } + assert.Equal("projects/project-test/topics/test-topic", pubsubTarget.GetID()) + pubsubTarget.Open() + defer pubsubTarget.Close() + + // Mechanism for counting acks + var ackOps int64 + ackFunc := func() { + atomic.AddInt64(&ackOps, 1) + } + + messages := testutil.GetSequentialTestMessages(10, ackFunc) + + twres, err := pubsubTarget.Write(messages) + + // Check that the TargetWriteResult is correct + assert.Equal(int64(0), twres.SentCount) + assert.Equal(int64(10), twres.FailedCount) + assert.Equal(10, len(twres.Failed)) + assert.Nil(twres.Sent) + assert.Nil(twres.Oversized) + assert.Nil(twres.Invalid) + assert.NotNil(err) + if err != nil { + assert.True(strings.Contains(err.Error(), "Error writing messages to PubSub topic: 10 errors occurred:")) + assert.Equal(10, strings.Count(err.Error(), "rpc error: code = PermissionDenied desc = Some Error")) + } +} + +// TestPubSubTarget_WriteFailureRetryableWithMocks unit tests the unhappy path for PubSub target +// This isn't an integration test, but takes a long time so we skip on short runs +// This test demonstrates the case where retryable errors are obscured somewhat. +// We should try to make these more transparent: https://github.com/snowplow-devops/stream-replicator/issues/156 +func TestPubSubTarget_WriteFailureRetryableWithMocks(t *testing.T) { + if testing.Short() { + t.Skip("skipping slow test") + } + assert := assert.New(t) + + // Initialise the mock server with retryable error + opts := []pstest.ServerReactorOption{ + pstest.WithErrorInjection("Publish", codes.Unknown, "Some Error"), + } + srv, conn := testutil.InitMockPubsubServer(8563, opts, t) + defer srv.Close() + defer conn.Close() + + pubsubTarget, err := newPubSubTarget(`project-test`, `test-topic`) + assert.NotNil(pubsubTarget) + if err != nil { + t.Fatal(err) + } + assert.Equal("projects/project-test/topics/test-topic", pubsubTarget.GetID()) + pubsubTarget.Open() + defer pubsubTarget.Close() + + // Mechanism for counting acks + var ackOps int64 + ackFunc := func() { + atomic.AddInt64(&ackOps, 1) + } + + messages := testutil.GetSequentialTestMessages(10, ackFunc) + + twres, err := pubsubTarget.Write(messages) + + // Check that the TargetWriteResult is correct + assert.Equal(int64(0), twres.SentCount) + assert.Equal(int64(10), twres.FailedCount) + assert.Equal(10, len(twres.Failed)) + assert.Nil(twres.Sent) + assert.Nil(twres.Oversized) + assert.Nil(twres.Invalid) + assert.NotNil(err) + if err != nil { + assert.True(strings.Contains(err.Error(), "Error writing messages to PubSub topic: 10 errors occurred:")) + assert.Equal(10, strings.Count(err.Error(), "context deadline exceeded")) + } +} + +// TestNewPubSubTarget_Success tests that we newPubSubTarget returns a PubSubTarget +func TestNewPubSubTarget_Success(t *testing.T) { + assert := assert.New(t) + + // This isn't needed at present, but adding it as we'll need it after https://github.com/snowplow-devops/stream-replicator/issues/151 + srv, conn := testutil.InitMockPubsubServer(8563, nil, t) + defer srv.Close() + defer conn.Close() + + pubsubTarget, err := newPubSubTarget(`project-test`, `test-topic`) + + assert.Nil(err) + assert.NotNil(pubsubTarget) + assert.IsType(PubSubTarget{}, *pubsubTarget) +} + +// TestnewPubSubTarget_Failure tests that we fail early when we cannot reach pubsub +// Commented out as this behaviour is not currently instrumented. +// This test serves to illustrate the desired behaviour for this issue: https://github.com/snowplow-devops/stream-replicator/issues/151 +/* +func TestnewPubSubTarget_Failure(t *testing.T) { + assert := assert.New(t) + + pubsubTarget, err := newPubSubTarget(`nonexistent-project`, `nonexistent-topic`) + + // TODO: Test for the actual error we expect, when we have instrumented failing fast + assert.NotNil(err) + assert.Nil(pubsubTarget) +} +*/ diff --git a/pkg/target/setup_test.go b/pkg/target/setup_test.go new file mode 100644 index 00000000..4f87b80a --- /dev/null +++ b/pkg/target/setup_test.go @@ -0,0 +1,18 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package target + +import ( + "os" + "testing" +) + +func TestMain(m *testing.M) { + os.Clearenv() + exitVal := m.Run() + os.Exit(exitVal) +} diff --git a/pkg/target/sqs.go b/pkg/target/sqs.go index 7e23ecc6..0cd365c0 100644 --- a/pkg/target/sqs.go +++ b/pkg/target/sqs.go @@ -32,6 +32,13 @@ const ( sqsSendMessageBatchByteLimit = 262144 ) +// SQSTargetConfig configures the destination for records consumed +type SQSTargetConfig struct { + QueueName string `hcl:"queue_name" env:"TARGET_SQS_QUEUE_NAME"` + Region string `hcl:"region" env:"TARGET_SQS_REGION"` + RoleARN string `hcl:"role_arn,optional" env:"TARGET_SQS_ROLE_ARN"` +} + // SQSTarget holds a new client for writing messages to sqs type SQSTarget struct { client sqsiface.SQSAPI @@ -43,20 +50,20 @@ type SQSTarget struct { log *log.Entry } -// NewSQSTarget creates a new client for writing messages to sqs -func NewSQSTarget(region string, queueName string, roleARN string) (*SQSTarget, error) { +// newSQSTarget creates a new client for writing messages to sqs +func newSQSTarget(region string, queueName string, roleARN string) (*SQSTarget, error) { awsSession, awsConfig, awsAccountID, err := common.GetAWSSession(region, roleARN) if err != nil { return nil, err } sqsClient := sqs.New(awsSession, awsConfig) - return NewSQSTargetWithInterfaces(sqsClient, *awsAccountID, region, queueName) + return newSQSTargetWithInterfaces(sqsClient, *awsAccountID, region, queueName) } -// NewSQSTargetWithInterfaces allows you to provide an SQS client directly to allow +// newSQSTargetWithInterfaces allows you to provide an SQS client directly to allow // for mocking and localstack usage -func NewSQSTargetWithInterfaces(client sqsiface.SQSAPI, awsAccountID string, region string, queueName string) (*SQSTarget, error) { +func newSQSTargetWithInterfaces(client sqsiface.SQSAPI, awsAccountID string, region string, queueName string) (*SQSTarget, error) { return &SQSTarget{ client: client, queueName: queueName, @@ -66,6 +73,40 @@ func NewSQSTargetWithInterfaces(client sqsiface.SQSAPI, awsAccountID string, reg }, nil } +// SQSTargetConfigFunction creates an SQSTarget from an SQSTargetConfig +func SQSTargetConfigFunction(c *SQSTargetConfig) (*SQSTarget, error) { + return newSQSTarget(c.Region, c.QueueName, c.RoleARN) +} + +// The SQSTargetAdapter type is an adapter for functions to be used as +// pluggable components for SQS Target. It implements the Pluggable interface. +type SQSTargetAdapter func(i interface{}) (interface{}, error) + +// Create implements the ComponentCreator interface. +func (f SQSTargetAdapter) Create(i interface{}) (interface{}, error) { + return f(i) +} + +// ProvideDefault implements the ComponentConfigurable interface. +func (f SQSTargetAdapter) ProvideDefault() (interface{}, error) { + // Provide defaults if any + cfg := &SQSTargetConfig{} + + return cfg, nil +} + +// AdaptSQSTargetFunc returns a SQSTargetAdapter. +func AdaptSQSTargetFunc(f func(c *SQSTargetConfig) (*SQSTarget, error)) SQSTargetAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*SQSTargetConfig) + if !ok { + return nil, errors.New("invalid input, expected SQSTargetConfig") + } + + return f(cfg) + } +} + // Write pushes all messages to the required target // TODO: Should each put be in its own goroutine? func (st *SQSTarget) Write(messages []*models.Message) (*models.TargetWriteResult, error) { diff --git a/pkg/target/sqs_test.go b/pkg/target/sqs_test.go index b9334c7f..159c81d9 100644 --- a/pkg/target/sqs_test.go +++ b/pkg/target/sqs_test.go @@ -24,7 +24,7 @@ func TestSQSTarget_WriteFailure(t *testing.T) { client := testutil.GetAWSLocalstackSQSClient() - target, err := NewSQSTargetWithInterfaces(client, "00000000000", testutil.AWSLocalstackRegion, "not-exists") + target, err := newSQSTargetWithInterfaces(client, "00000000000", testutil.AWSLocalstackRegion, "not-exists") assert.Nil(err) assert.NotNil(target) assert.Equal("arn:aws:sqs:us-east-1:00000000000:not-exists", target.GetID()) @@ -46,12 +46,12 @@ func TestSQSTarget_WriteSuccess(t *testing.T) { queueName := "sqs-queue-target-1" queueRes, err := testutil.CreateAWSLocalstackSQSQueue(client, queueName) if err != nil { - panic(err) + t.Fatal(err) } queueURL := queueRes.QueueUrl defer testutil.DeleteAWSLocalstackSQSQueue(client, queueURL) - target, err := NewSQSTargetWithInterfaces(client, "00000000000", testutil.AWSLocalstackRegion, queueName) + target, err := newSQSTargetWithInterfaces(client, "00000000000", testutil.AWSLocalstackRegion, queueName) assert.Nil(err) assert.NotNil(target) @@ -89,12 +89,12 @@ func TestSQSTarget_WritePartialFailure_OversizeRecord(t *testing.T) { queueName := "sqs-queue-target-2" queueRes, err := testutil.CreateAWSLocalstackSQSQueue(client, queueName) if err != nil { - panic(err) + t.Fatal(err) } queueURL := queueRes.QueueUrl defer testutil.DeleteAWSLocalstackSQSQueue(client, queueURL) - target, err := NewSQSTargetWithInterfaces(client, "00000000000", testutil.AWSLocalstackRegion, queueName) + target, err := newSQSTargetWithInterfaces(client, "00000000000", testutil.AWSLocalstackRegion, queueName) assert.Nil(err) assert.NotNil(target) diff --git a/pkg/target/stdout.go b/pkg/target/stdout.go index 74b6fca7..7396ce79 100644 --- a/pkg/target/stdout.go +++ b/pkg/target/stdout.go @@ -7,6 +7,7 @@ package target import ( + "errors" "fmt" log "github.com/sirupsen/logrus" @@ -19,13 +20,43 @@ type StdoutTarget struct { log *log.Entry } -// NewStdoutTarget creates a new client for writing messages to stdout -func NewStdoutTarget() (*StdoutTarget, error) { +// newStdoutTarget creates a new client for writing messages to stdout +func newStdoutTarget() (*StdoutTarget, error) { return &StdoutTarget{ log: log.WithFields(log.Fields{"target": "stdout"}), }, nil } +// StdoutTargetConfigFunction creates an StdoutTarget +func StdoutTargetConfigFunction() (*StdoutTarget, error) { + return newStdoutTarget() +} + +// The StdoutTargetAdapter type is an adapter for functions to be used as +// pluggable components for Stdout Target. It implements the Pluggable interface. +type StdoutTargetAdapter func(i interface{}) (interface{}, error) + +// Create implements the ComponentCreator interface. +func (f StdoutTargetAdapter) Create(i interface{}) (interface{}, error) { + return f(i) +} + +// ProvideDefault implements the ComponentConfigurable interface. +func (f StdoutTargetAdapter) ProvideDefault() (interface{}, error) { + return nil, nil +} + +// AdaptStdoutTargetFunc returns StdoutTargetAdapter. +func AdaptStdoutTargetFunc(f func() (*StdoutTarget, error)) StdoutTargetAdapter { + return func(i interface{}) (interface{}, error) { + if i != nil { + return nil, errors.New("unexpected configuration input for Stdout target") + } + + return f() + } +} + // Write pushes all messages to the required target func (st *StdoutTarget) Write(messages []*models.Message) (*models.TargetWriteResult, error) { st.log.Debugf("Writing %d messages to stdout ...", len(messages)) diff --git a/pkg/target/stdout_test.go b/pkg/target/stdout_test.go index 271e02fb..aefa9c39 100644 --- a/pkg/target/stdout_test.go +++ b/pkg/target/stdout_test.go @@ -18,7 +18,7 @@ import ( func TestStdoutTarget_WriteSuccess(t *testing.T) { assert := assert.New(t) - target, err := NewStdoutTarget() + target, err := newStdoutTarget() assert.NotNil(target) assert.Nil(err) assert.Equal("stdout", target.GetID()) diff --git a/pkg/target/targetutil.go b/pkg/target/targetutil.go deleted file mode 100644 index 7b920ae1..00000000 --- a/pkg/target/targetutil.go +++ /dev/null @@ -1,42 +0,0 @@ -// PROPRIETARY AND CONFIDENTIAL -// -// Unauthorized copying of this file via any medium is strictly prohibited. -// -// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. - -package target - -import ( - "crypto/tls" - "crypto/x509" - "io/ioutil" -) - -// CreateTLSConfiguration creates a TLS configuration for use in a target -func CreateTLSConfiguration(certFile string, keyFile string, caFile string, skipVerify bool) (*tls.Config, error) { - if certFile == "" || keyFile == "" { - return nil, nil - } - - cert, err := tls.LoadX509KeyPair(certFile, keyFile) - if err != nil { - return nil, err - } - - caCert, err := ioutil.ReadFile(caFile) - if err != nil { - return nil, err - } - - caCertPool, err := x509.SystemCertPool() - if err != nil { - return nil, err - } - caCertPool.AppendCertsFromPEM(caCert) - - return &tls.Config{ - Certificates: []tls.Certificate{cert}, - RootCAs: caCertPool, - InsecureSkipVerify: skipVerify, - }, nil -} diff --git a/pkg/telemetry/const.go b/pkg/telemetry/const.go new file mode 100644 index 00000000..528d1486 --- /dev/null +++ b/pkg/telemetry/const.go @@ -0,0 +1,17 @@ +package telemetry + +import ( + "time" + + "github.com/snowplow-devops/stream-replicator/cmd" +) + +var ( + interval = time.Hour + method = "POST" + protocol = "https" + url = "telemetry-g.snowplowanalytics.com" + port = "443" + applicationName = "stream-replicator" + applicationVersion = cmd.AppVersion +) diff --git a/pkg/telemetry/telemetry.go b/pkg/telemetry/telemetry.go new file mode 100644 index 00000000..c26ff6bc --- /dev/null +++ b/pkg/telemetry/telemetry.go @@ -0,0 +1,132 @@ +package telemetry + +import ( + "fmt" + "net/http" + "time" + + log "github.com/sirupsen/logrus" + conf "github.com/snowplow-devops/stream-replicator/config" + gt "github.com/snowplow/snowplow-golang-tracker/v2/tracker" + "github.com/twinj/uuid" +) + +// config holds the configuration for telemetry +type config struct { + disable bool + interval time.Duration + method string + url string + protocol string + port string + userProvidedID string + applicationName string + applicationVersion string + appGeneratedID string +} + +func newTelemetryWithConfig(cfg *conf.Config) *config { + return &config{ + disable: cfg.Data.DisableTelemetry, + interval: interval, + method: method, + protocol: protocol, + url: url, + port: port, + userProvidedID: cfg.Data.UserProvidedID, + applicationName: applicationName, + applicationVersion: applicationVersion, + appGeneratedID: uuid.NewV4().String(), + } +} + +func initTelemetry(telemetry *config) func() { + storage := gt.InitStorageMemory() + emitter := gt.InitEmitter( + gt.RequireCollectorUri(fmt.Sprintf(`%s:%s`, telemetry.url, telemetry.port)), + gt.OptionRequestType(telemetry.method), + gt.OptionProtocol(telemetry.protocol), + gt.OptionCallback(func(goodResults []gt.CallbackResult, badResults []gt.CallbackResult) { + for _, goodResult := range goodResults { + if goodResult.Status != http.StatusOK { + log.WithFields(log.Fields{ + "error_code": goodResult.Status, + }).Debugf("Error sending telemetry event") + return + } + } + for _, badResult := range badResults { + if badResult.Status != http.StatusOK { + log.WithFields(log.Fields{ + "error_code": badResult.Status, + }).Debugf("Error sending telemetry event") + return + } + } + log.Info(`Telemetry event sent successfully`) + }), + gt.OptionStorage(storage), + ) + + tracker := gt.InitTracker( + gt.RequireEmitter(emitter), + gt.OptionNamespace("telemetry"), + gt.OptionAppId(telemetry.applicationName), + ) + + ticker := time.NewTicker(telemetry.interval) + + stop := make(chan struct{}) + + go func() { + makeAndTrackHeartbeat(telemetry, tracker) + for { + select { + case <-ticker.C: + makeAndTrackHeartbeat(telemetry, tracker) + case <-stop: + return + } + + } + }() + + return func() { + close(stop) + } +} + +func makeAndTrackHeartbeat(telemetry *config, tracker *gt.Tracker) { + event := makeHeartbeatEvent(*telemetry) + + tracker.TrackSelfDescribingEvent(gt.SelfDescribingEvent{ + Event: event, + Timestamp: nil, + EventId: nil, + TrueTimestamp: nil, + Contexts: nil, + Subject: nil, + }) +} + +// InitTelemetryWithCollector initialises telemetry +func InitTelemetryWithCollector(cfg *conf.Config) func() { + telemetry := newTelemetryWithConfig(cfg) + if telemetry.disable { + return func() {} + } + return initTelemetry(telemetry) +} + +func makeHeartbeatEvent(service config) *gt.SelfDescribingJson { + payload := gt.InitPayload() + + payload.Add(`userProvidedId`, &service.userProvidedID) + payload.Add(`applicationName`, &service.applicationName) + payload.Add(`applicationVersion`, &service.applicationVersion) + payload.Add(`appGeneratedId`, &service.appGeneratedID) + + selfDescJSON := gt.InitSelfDescribingJson( + `iglu:com.snowplowanalytics.oss/oss_context/jsonschema/1-0-1`, payload.Get()) + return selfDescJSON +} diff --git a/pkg/testutil/common.go b/pkg/testutil/common.go index 0800cd33..f50aa990 100644 --- a/pkg/testutil/common.go +++ b/pkg/testutil/common.go @@ -7,6 +7,7 @@ package testutil import ( + "fmt" "math/rand" "time" @@ -45,3 +46,17 @@ func GetTestMessages(count int, body string, ackFunc func()) []*models.Message { } return messages } + +// GetSequentialTestMessages will return an array of messages ready to be used for testing +// targets and sources. Message data will be sequential integers for easier testing of accuracy, duplicates, etc. +func GetSequentialTestMessages(count int, ackFunc func()) []*models.Message { + var messages []*models.Message + for i := 0; i < count; i++ { + messages = append(messages, &models.Message{ + Data: []byte(fmt.Sprint(i)), + PartitionKey: uuid.NewV4().String(), + AckFunc: ackFunc, + }) + } + return messages +} diff --git a/pkg/testutil/pubsub_helpers.go b/pkg/testutil/pubsub_helpers.go new file mode 100644 index 00000000..46284a8a --- /dev/null +++ b/pkg/testutil/pubsub_helpers.go @@ -0,0 +1,131 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package testutil + +import ( + "context" + "fmt" + "strconv" + "sync" + "sync/atomic" + "testing" + "time" + + "cloud.google.com/go/pubsub" + "cloud.google.com/go/pubsub/pstest" + "github.com/pkg/errors" + pubsubV1 "google.golang.org/genproto/googleapis/pubsub/v1" + "google.golang.org/grpc" + "google.golang.org/grpc/credentials/insecure" +) + +// InitMockPubsubServer creates a mock PubSub Server for testing +func InitMockPubsubServer(port int, opts []pstest.ServerReactorOption, t *testing.T) (*pstest.Server, *grpc.ClientConn) { + t.Setenv("PUBSUB_PROJECT_ID", `project-test`) + t.Setenv(`PUBSUB_EMULATOR_HOST`, fmt.Sprint("localhost:", port)) + ctx := context.Background() + srv := pstest.NewServerWithPort(port, opts...) + // Connect to the server without using TLS. + conn, err := grpc.Dial(srv.Addr, grpc.WithTransportCredentials(insecure.NewCredentials())) + if err != nil { + t.Fatal(err) + } + + _, err = srv.GServer.CreateTopic(ctx, &pubsubV1.Topic{Name: `projects/project-test/topics/test-topic`}) + if err != nil { + t.Fatal(err) + } + + _, err = srv.GServer.CreateSubscription(ctx, &pubsubV1.Subscription{ + Name: "projects/project-test/subscriptions/test-sub", + Topic: "projects/project-test/topics/test-topic", + AckDeadlineSeconds: 10, + }) + if err != nil { + t.Fatal(err) + } + + return srv, conn +} + +// CreatePubsubResourcesAndWrite creates PubSub integration resources, and writes numMsgs +func CreatePubsubResourcesAndWrite(numMsgs int, t *testing.T) { + ctx, cancelFunc := context.WithTimeout(context.Background(), 10*time.Second) + defer cancelFunc() + t.Setenv("PUBSUB_PROJECT_ID", `project-test`) + t.Setenv(`PUBSUB_EMULATOR_HOST`, "localhost:8432") + + client, err := pubsub.NewClient(ctx, `project-test`) + if err != nil { + t.Fatal(errors.Wrap(err, "Failed to create PubSub client")) + } + defer client.Close() + + topic, err := client.CreateTopic(ctx, `test-topic`) + if err != nil { + t.Fatal(errors.Wrap(err, "Failed to create pubsub topic")) + } + + _, err = client.CreateSubscription(ctx, `test-sub`, pubsub.SubscriptionConfig{ + Topic: topic, + AckDeadline: 10 * time.Second, + }) + if err != nil { + t.Fatal(fmt.Errorf("error creating subscription: %v", err)) + } + + var wg sync.WaitGroup + var totalErrors uint64 + + // publish n messages + for i := 0; i < numMsgs; i++ { + wg.Add(1) + result := topic.Publish(ctx, &pubsub.Message{ + Data: []byte("message #" + strconv.Itoa(i)), + }) + go func(i int, res *pubsub.PublishResult) { + defer wg.Done() + _, err := res.Get(ctx) + if err != nil { + atomic.AddUint64(&totalErrors, 1) + return + } + }(i, result) + } + + wg.Wait() +} + +// DeletePubsubResources tears down Pubsub integration resources +func DeletePubsubResources(t *testing.T) { + ctx, cancelFunc := context.WithTimeout(context.Background(), 10*time.Second) + defer cancelFunc() + t.Setenv("PUBSUB_PROJECT_ID", `project-test`) + t.Setenv(`PUBSUB_EMULATOR_HOST`, "localhost:8432") + + client, err := pubsub.NewClient(ctx, `project-test`) + if err != nil { + t.Fatal(errors.Wrap(err, "Failed to create PubSub client")) + } + defer client.Close() + + subscription := client.Subscription(`test-sub`) + err = subscription.Delete(ctx) + if err != nil { + t.Fatal(errors.Wrap(err, "Failed to delete subscription")) + } + + topic := client.Topic(`test-topic`) + if err != nil { + t.Fatal(errors.Wrap(err, "Failed to get topic")) + } + + err = topic.Delete(ctx) + if err != nil { + t.Fatal(errors.Wrap(err, "Failed to delete topic")) + } +} diff --git a/pkg/testutil/source_helpers.go b/pkg/testutil/source_helpers.go index 797ec26c..c09bfd2f 100644 --- a/pkg/testutil/source_helpers.go +++ b/pkg/testutil/source_helpers.go @@ -14,15 +14,19 @@ import ( "github.com/snowplow-devops/stream-replicator/pkg/source/sourceiface" ) +// TODO: Refactor to provide a means to test errors without panicing + // ReadAndReturnMessages takes a source, runs the read function, and outputs all messages found in a slice, against which we may run assertions. -func ReadAndReturnMessages(source sourceiface.Source) []*models.Message { +// The testWriteBuilder argument allows the test implementation to provide a write function builder, +// and the additionalOpts argument allows one to pass arguments to that builder +func ReadAndReturnMessages(source sourceiface.Source, timeToWait time.Duration, testWriteBuilder func(sourceiface.Source, chan *models.Message, interface{}) func([]*models.Message) error, additionalOpts interface{}) []*models.Message { var successfulReads []*models.Message hitError := make(chan error) msgRecieved := make(chan *models.Message) // run the read function in a goroutine, so that we can close it after a timeout sf := sourceiface.SourceFunctions{ - WriteToTarget: testWriteFuncBuilder(source, msgRecieved), + WriteToTarget: testWriteBuilder(source, msgRecieved, additionalOpts), } go runRead(hitError, source, &sf) @@ -34,7 +38,7 @@ resultLoop: case msg := <-msgRecieved: // Append messages to the result slice successfulReads = append(successfulReads, msg) - case <-time.After(3 * time.Second): + case <-time.After(timeToWait): // Stop source after 3s, and return the result slice fmt.Println("Stopping source.") source.Stop() @@ -51,8 +55,8 @@ func runRead(ch chan error, source sourceiface.Source, sf *sourceiface.SourceFun } } -// testWriteFuncBuiler returns a function which replaces the write function, outputting any messages it finds to be handled via a channel -func testWriteFuncBuilder(source sourceiface.Source, msgChan chan *models.Message) func(messages []*models.Message) error { +// DefaultTestWriteBuilder returns a function which replaces the write function, outputting any messages it finds to be handled via a channel +func DefaultTestWriteBuilder(source sourceiface.Source, msgChan chan *models.Message, additionalOpts interface{}) func(messages []*models.Message) error { return func(messages []*models.Message) error { for _, msg := range messages { // Send each message onto the channel to be appended to results @@ -62,3 +66,22 @@ func testWriteFuncBuilder(source sourceiface.Source, msgChan chan *models.Messag return nil } } + +// DelayedAckTestWriteBuilder delays every third ack, to test the case where some messages are processed slower than others +func DelayedAckTestWriteBuilder(source sourceiface.Source, msgChan chan *models.Message, additionalOpts interface{}) func(messages []*models.Message) error { + return func(messages []*models.Message) error { + duration, ok := additionalOpts.(time.Duration) + if !ok { + panic("DelayedAckTestWriteBuilder requires a duration to be passed as additionalOpts") + } + for i, msg := range messages { + // Send each message onto the channel to be appended to results + msgChan <- msg + if i%3 == 1 { + time.Sleep(duration) + } + msg.AckFunc() + } + return nil + } +} diff --git a/pkg/transform/engine/engine.go b/pkg/transform/engine/engine.go new file mode 100644 index 00000000..6b1cd84e --- /dev/null +++ b/pkg/transform/engine/engine.go @@ -0,0 +1,39 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package engine + +import ( + "github.com/snowplow-devops/stream-replicator/pkg/transform" +) + +// functionMaker is the interface that wraps the MakeFunction method +type functionMaker interface { + // MakeFunction returns a TransformationFunction that runs + // a given function in a runtime engine. + MakeFunction(funcName string) transform.TransformationFunction +} + +// smokeTester is the interface that wraps the SmokeTest method. +type smokeTester interface { + // SmokeTest runs a test spin of the engine trying to get as close to + // running the given function as possible. + SmokeTest(funcName string) error +} + +// Engine is the interface that groups +// functionMaker and smokeTester. +type Engine interface { + functionMaker + smokeTester +} + +// engineProtocol is the I/O type of Engine. +type engineProtocol struct { + FilterOut bool + PartitionKey string + Data interface{} +} diff --git a/pkg/transform/engine/engine_javascript.go b/pkg/transform/engine/engine_javascript.go new file mode 100644 index 00000000..0b32be03 --- /dev/null +++ b/pkg/transform/engine/engine_javascript.go @@ -0,0 +1,280 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package engine + +import ( + "encoding/base64" + "errors" + "fmt" + "time" + + goja "github.com/dop251/goja" + gojaparser "github.com/dop251/goja/parser" + gojson "github.com/goccy/go-json" + "github.com/mitchellh/mapstructure" + + "github.com/snowplow-devops/stream-replicator/pkg/models" + "github.com/snowplow-devops/stream-replicator/pkg/transform" +) + +// JSEngineConfig configures the JavaScript Engine. +type JSEngineConfig struct { + SourceB64 string `hcl:"source_b64,optional"` + RunTimeout int `hcl:"timeout_sec,optional"` + DisableSourceMaps bool `hcl:"disable_source_maps,optional"` + SpMode bool `hcl:"snowplow_mode,optional"` +} + +// JSEngine handles the provision of a JavaScript runtime to run transformations. +type JSEngine struct { + Code *goja.Program + RunTimeout time.Duration + SpMode bool +} + +// The JSEngineAdapter type is an adapter for functions to be used as +// pluggable components for a JS Engine. It implements the Pluggable interface. +type JSEngineAdapter func(i interface{}) (interface{}, error) + +// ProvideDefault returns a JSEngineConfig with default configuration values +func (f JSEngineAdapter) ProvideDefault() (interface{}, error) { + return &JSEngineConfig{ + RunTimeout: 15, + DisableSourceMaps: true, + }, nil +} + +// Create implements the ComponentCreator interface. +func (f JSEngineAdapter) Create(i interface{}) (interface{}, error) { + return f(i) +} + +// JSEngineConfigFunction creates a JSEngine from a JSEngineConfig +func JSEngineConfigFunction(c *JSEngineConfig) (*JSEngine, error) { + return NewJSEngine(&JSEngineConfig{ + SourceB64: c.SourceB64, + RunTimeout: c.RunTimeout, + DisableSourceMaps: c.DisableSourceMaps, + SpMode: c.SpMode, + }) +} + +// AdaptJSEngineFunc returns an JSEngineAdapter. +func AdaptJSEngineFunc(f func(c *JSEngineConfig) (*JSEngine, error)) JSEngineAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*JSEngineConfig) + if !ok { + return nil, errors.New("invalid input, expected JSEngineConfig") + } + + return f(cfg) + } +} + +// NewJSEngine returns a JSEngine from a JSEngineConfig. +func NewJSEngine(c *JSEngineConfig) (*JSEngine, error) { + jsSrc, err := base64.StdEncoding.DecodeString(c.SourceB64) + if err != nil { + return nil, err + } + + compiledCode, err := compileJS(string(jsSrc), c.SourceB64, c.DisableSourceMaps) + if err != nil { + return nil, err + } + + eng := &JSEngine{ + Code: compiledCode, + RunTimeout: time.Duration(c.RunTimeout) * time.Second, + SpMode: c.SpMode, + } + + return eng, nil +} + +// SmokeTest implements smokeTester. +func (e *JSEngine) SmokeTest(funcName string) error { + _, _, err := initRuntime(e, funcName) + return err +} + +// MakeFunction implements functionMaker. +func (e *JSEngine) MakeFunction(funcName string) transform.TransformationFunction { + + return func(message *models.Message, interState interface{}) (*models.Message, *models.Message, *models.Message, interface{}) { + // making input + input, err := mkJSEngineInput(e, message, interState) + if err != nil { + message.SetError(fmt.Errorf("failed making input for the JavaScript runtime: %q", err.Error())) + return nil, nil, message, nil + } + + // initializing + vm, fun, err := initRuntime(e, funcName) + if err != nil { + message.SetError(fmt.Errorf("failed initializing JavaScript runtime: %q", err.Error())) + return nil, nil, message, nil + } + + timer := time.AfterFunc(e.RunTimeout, func() { + vm.Interrupt("runtime deadline exceeded") + }) + defer timer.Stop() + + // running + res, err := fun(goja.Undefined(), vm.ToValue(input)) + + if err != nil { + // runtime error counts as failure + runErr := fmt.Errorf("error running JavaScript function %q: %q", funcName, err.Error()) + message.SetError(runErr) + return nil, nil, message, nil + } + + // validating output + protocol, err := validateJSEngineOut(res.Export()) + if err != nil { + message.SetError(err) + return nil, nil, message, nil + } + + // filtering - keeping same behaviour with spEnrichedFilter + if protocol.FilterOut == true { + return nil, message, nil, nil + } + + // handling data + switch protoData := protocol.Data.(type) { + case string: + message.Data = []byte(protoData) + case map[string]interface{}: + // encode + encoded, err := gojson.MarshalWithOption(protoData, gojson.DisableHTMLEscape()) + if err != nil { + message.SetError(fmt.Errorf("error encoding message data")) + return nil, nil, message, nil + } + message.Data = encoded + default: + message.SetError(fmt.Errorf("invalid return type from JavaScript transformation; expected string or object")) + return nil, nil, message, nil + } + + // setting pk if needed + pk := protocol.PartitionKey + if pk != "" && message.PartitionKey != pk { + message.PartitionKey = pk + } + + return message, nil, nil, protocol + } +} + +// compileJS compiles JavaScript code. +// Since goja.New is not goroutine-safe, we spin a new runtime for every +// transformation. The reason for this function is to allow us to at least share +// the compiled code and so run only once the parse and compile steps, +// which are implicitly run by the alternative RunString. +// see also: +// https://pkg.go.dev/github.com/dop251/goja#CompileAST +func compileJS(code, name string, disableSrcMaps bool) (*goja.Program, error) { + parserOpts := make([]gojaparser.Option, 0, 1) + + if disableSrcMaps == true { + parserOpts = append(parserOpts, gojaparser.WithDisableSourceMaps) + } + + ast, err := goja.Parse(name, code, parserOpts...) + if err != nil { + return nil, err + } + + // 'use strict' + prog, err := goja.CompileAST(ast, true) + if err != nil { + return nil, err + } + + return prog, nil +} + +// initRuntime initializes and returns an instance of a JavaScript runtime. +func initRuntime(e *JSEngine, funcName string) (*goja.Runtime, goja.Callable, error) { + // goja.New returns *goja.Runtime + vm := goja.New() + timer := time.AfterFunc(e.RunTimeout, func() { + vm.Interrupt("runtime deadline exceeded") + }) + defer timer.Stop() + + _, err := vm.RunProgram(e.Code) + if err != nil { + return nil, nil, fmt.Errorf("could not load JavaScript code: %q", err) + } + + if fun, ok := goja.AssertFunction(vm.Get(funcName)); ok { + return vm, fun, nil + } + + return nil, nil, fmt.Errorf("could not assert as function: %q", funcName) +} + +// mkJSEngineInput describes the logic for constructing the input to JS engine. +// No side effects. +func mkJSEngineInput(e *JSEngine, message *models.Message, interState interface{}) (*engineProtocol, error) { + if interState != nil { + if i, ok := interState.(*engineProtocol); ok { + return i, nil + } + } + + candidate := &engineProtocol{ + Data: string(message.Data), + } + + if !e.SpMode { + return candidate, nil + } + + parsedMessage, err := transform.IntermediateAsSpEnrichedParsed(interState, message) + if err != nil { + // if spMode, error for non Snowplow enriched event data + return nil, err + } + + spMap, err := parsedMessage.ToMap() + if err != nil { + return nil, err + } + + candidate.Data = spMap + return candidate, nil +} + +// validateJSEngineOut validates the value returned by the js engine. +func validateJSEngineOut(output interface{}) (*engineProtocol, error) { + if output == nil { + return nil, fmt.Errorf("invalid return type from JavaScript transformation; got null or undefined") + } + + if out, ok := output.(*engineProtocol); ok { + return out, nil + } + + outMap, ok := output.(map[string]interface{}) + if !ok { + return nil, fmt.Errorf("invalid return type from JavaScript transformation") + } + + result := &engineProtocol{} + err := mapstructure.Decode(outMap, result) + if err != nil { + return nil, fmt.Errorf("protocol violation in return value from JavaScript transformation") + } + + return result, nil +} diff --git a/pkg/transform/engine/engine_javascript_test.go b/pkg/transform/engine/engine_javascript_test.go new file mode 100644 index 00000000..0736325f --- /dev/null +++ b/pkg/transform/engine/engine_javascript_test.go @@ -0,0 +1,1529 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. +// +package engine + +import ( + "encoding/base64" + "fmt" + "reflect" + "strings" + "testing" + "time" + + "github.com/davecgh/go-spew/spew" + "github.com/stretchr/testify/assert" + + "github.com/snowplow-devops/stream-replicator/pkg/models" +) + +func TestJSLayer(t *testing.T) { + assert := assert.New(t) + + jsEngine, err := NewJSEngine(&JSEngineConfig{ + SourceB64: "CglmdW5jdGlvbiBmb28oeCkgewoJICAgIHJldHVybiB4OwoJfQoJ", + RunTimeout: 15, + DisableSourceMaps: true, + SpMode: false, + }) + assert.NotNil(t, jsEngine) + assert.Nil(err) +} + +func TestJSEngineMakeFunction_SpModeFalse_IntermediateNil(t *testing.T) { + var testInterState interface{} = nil + var testSpMode = false + testCases := []struct { + Src string + Scenario string + DisableSourceMaps bool + Input *models.Message + Expected map[string]*models.Message + ExpInterState interface{} + Error error + }{ + { + Src: ` +function main(x) { + return x; +} +`, + Scenario: "identity", + DisableSourceMaps: true, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: "asdf", + }, + Error: nil, + }, + { + Src: ` +function main(x) { + let newVal = "Hello:" + x.Data; + x.Data = newVal; + return x; +} +`, + Scenario: "concatHello", + DisableSourceMaps: true, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": { + Data: []byte("Hello:asdf"), + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: "Hello:asdf", + }, + Error: nil, + }, + { + Src: ` +function main(x) { + x.FilterOut = false + return x; +} +`, + Scenario: "filterIn", + DisableSourceMaps: true, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: "asdf", + }, + Error: nil, + }, + { + Src: ` +function main(x) { + if (Object.prototype.toString.call(x.Data) === '[object String]') { + return { + FilterOut: true, + }; + } + + return { + FilterOut: false, + Data: x.Data + }; +} +`, + Scenario: "filterOut", + DisableSourceMaps: true, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + "failed": nil, + }, + ExpInterState: nil, + Error: nil, + }, + { + Src: ` +function main(x) { + var jsonObj = JSON.parse(x.Data); + var result = JSON.stringify(jsonObj); + + return { + Data: result + }; +} +`, + Scenario: "jsonIdentity", + DisableSourceMaps: false, + Input: &models.Message{ + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": { + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(testJsJSON), + }, + Error: nil, + }, + { + Src: ` +function main(x) { + var jsonObj = JSON.parse(x.Data); + + if (jsonObj.hasOwnProperty("app_id")) { + x.Data = x.Data.replace(/app_id/, 'app_id_CHANGED'); + } + + return x; +} +`, + Scenario: "jsonTransformFieldNameRegex", + DisableSourceMaps: false, + Input: &models.Message{ + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": { + Data: testJsJSONChanged1, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(testJsJSONChanged1), + }, + Error: nil, + }, + { + Src: ` +function main(x) { + + var jsonObj = JSON.parse(x.Data); + + var descriptor = Object.getOwnPropertyDescriptor(jsonObj, "app_id"); + Object.defineProperty(jsonObj, "app_id_CHANGED", descriptor); + delete jsonObj["app_id"]; + + return { + Data: JSON.stringify(jsonObj) + }; +} +`, + Scenario: "jsonTransformFieldNameObj", + DisableSourceMaps: false, + Input: &models.Message{ + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": { + Data: testJsJSONChanged2, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(testJsJSONChanged2), + }, + Error: nil, + }, + { + Src: ` +function main(x) { + var jsonObj = JSON.parse(x.Data); + + if (jsonObj.hasOwnProperty("app_id") && jsonObj["app_id"] === "filterMeOut") { + x.FilterOut = false; + } else { + x.FilterOut = true; + } + + return x; +} +`, + Scenario: "jsonFilterOut", + DisableSourceMaps: false, + Input: &models.Message{ + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": { + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + "failed": nil, + }, + ExpInterState: nil, + Error: nil, + }, + { + Src: ` +function main(x) { + return 0; +} +`, + Scenario: "returnWrongType", + DisableSourceMaps: true, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("invalid return type from JavaScript transformation"), + }, + { + Src: ` +function main(x) {} +`, + Scenario: "returnUndefined", + DisableSourceMaps: true, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("invalid return type from JavaScript transformation; got null or undefined"), + }, + { + Src: ` +function main(x) { + return null; +} +`, + Scenario: "returnNull", + DisableSourceMaps: true, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("invalid return type from JavaScript transformation; got null or undefined"), + }, + { + Src: ` +function main(x) { + return x.toExponential(2); +} +`, + Scenario: "causeRuntimeError", + DisableSourceMaps: true, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf(`error running JavaScript function "main": "TypeError: Object has no member 'toExponential' at main`), + }, + { + Src: ` +function main(x) { + throw("Failed"); +} +`, + Scenario: "callError", + DisableSourceMaps: false, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf(`error running JavaScript function "main": "Failed at main`), + }, + { + Src: ` +function main(x) { + var now = new Date().getTime(); + while(new Date().getTime() < now + 10000) { + } +} +`, + Scenario: "sleepTenSecs", + DisableSourceMaps: false, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("runtime deadline exceeded"), + }, + } + + for _, tt := range testCases { + t.Run(tt.Scenario, func(t *testing.T) { + assert := assert.New(t) + + src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) + jsConfig := &JSEngineConfig{ + SourceB64: src, + RunTimeout: 5, + DisableSourceMaps: tt.DisableSourceMaps, + SpMode: testSpMode, + } + + jsEngine, err := NewJSEngine(jsConfig) + assert.NotNil(jsEngine) + if err != nil { + t.Fatalf("function NewJSEngine failed with error: %q", err.Error()) + } + + if err := jsEngine.SmokeTest("main"); err != nil { + t.Fatalf("smoke-test failed with error: %q", err.Error()) + } + + transFunction := jsEngine.MakeFunction("main") + s, f, e, i := transFunction(tt.Input, testInterState) + + if !reflect.DeepEqual(i, tt.ExpInterState) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(i), + spew.Sdump(tt.ExpInterState)) + } + + if e != nil { + gotErr := e.GetError() + expErr := tt.Error + if expErr == nil { + t.Fatalf("got unexpected error: %s", gotErr.Error()) + } + + if !strings.Contains(gotErr.Error(), expErr.Error()) { + t.Errorf("GOT_ERROR:\n%s\n does not contain\nEXPECTED_ERROR:\n%s", + gotErr.Error(), + expErr.Error()) + } + } + + assertMessagesCompareJs(t, s, tt.Expected["success"]) + assertMessagesCompareJs(t, f, tt.Expected["filtered"]) + assertMessagesCompareJs(t, e, tt.Expected["failed"]) + }) + } +} + +func TestJSEngineMakeFunction_SpModeTrue_IntermediateNil(t *testing.T) { + var testInterState interface{} = nil + var testSpMode bool = true + testCases := []struct { + Scenario string + Src string + DisableSourceMaps bool + Input *models.Message + Expected map[string]*models.Message + ExpInterState interface{} + Error error + }{ + { + Scenario: "identity", + Src: ` +function main(x) { + return x; +} +`, + DisableSourceMaps: true, + Input: &models.Message{ + Data: testJsTsv, + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": { + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: testJSMap, + }, + Error: nil, + }, + { + Scenario: "filtering", + Src: ` +function main(input) { + // input is an object + var spData = input.Data; + if (spData["app_id"] === "myApp") { + return input; + } + return { + FilterOut: true + }; +} +`, + DisableSourceMaps: true, + Input: &models.Message{ + Data: testJsTsv, + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": { + Data: testJsTsv, + PartitionKey: "some-test-key", + }, + "failed": nil, + }, + ExpInterState: nil, + Error: nil, + }, + { + Scenario: "filteringOut_ignoresData", + Src: ` +function main(x) { + return { + FilterOut: true, + Data: "shouldNotAppear", + PartitionKey: "notThis" + }; +} +`, + DisableSourceMaps: true, + Input: &models.Message{ + Data: testJsTsv, + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": { + Data: testJsTsv, + PartitionKey: "some-test-key", + }, + "failed": nil, + }, + ExpInterState: nil, + Error: nil, + }, + { + Scenario: "non_Snowplow_enriched_to_failed", + Src: ` +function main(x) { + return x; +} +`, + DisableSourceMaps: false, + Input: &models.Message{ + Data: []byte("nonSpEnrichedEvent"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: []byte("nonSpEnrichedEvent"), + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("Cannot parse"), + }, + { + Scenario: "return_wrong_type", + Src: ` +function main(x) { + return 0; +} +`, + DisableSourceMaps: true, + Input: &models.Message{ + Data: testJsTsv, + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: testJsTsv, + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("invalid return type from JavaScript transformation"), + }, + } + + for _, tt := range testCases { + t.Run(tt.Scenario, func(t *testing.T) { + assert := assert.New(t) + + src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) + jsConfig := &JSEngineConfig{ + SourceB64: src, + RunTimeout: 5, + DisableSourceMaps: tt.DisableSourceMaps, + SpMode: testSpMode, + } + + jsEngine, err := NewJSEngine(jsConfig) + assert.NotNil(jsEngine) + if err != nil { + t.Fatalf("function NewJSEngine failed with error: %q", err.Error()) + } + + if err := jsEngine.SmokeTest("main"); err != nil { + t.Fatalf("smoke-test failed with error: %q", err.Error()) + } + + transFunction := jsEngine.MakeFunction("main") + s, f, e, i := transFunction(tt.Input, testInterState) + + if !reflect.DeepEqual(i, tt.ExpInterState) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(i), + spew.Sdump(tt.ExpInterState)) + } + + if e != nil { + gotErr := e.GetError() + expErr := tt.Error + if expErr == nil { + t.Fatalf("got unexpected error: %s", gotErr.Error()) + } + + if !strings.Contains(gotErr.Error(), expErr.Error()) { + t.Errorf("GOT_ERROR:\n%s\n does not contain\nEXPECTED_ERROR:\n%s", + gotErr.Error(), + expErr.Error()) + } + } + + assertMessagesCompareJs(t, s, tt.Expected["success"]) + assertMessagesCompareJs(t, f, tt.Expected["filtered"]) + assertMessagesCompareJs(t, e, tt.Expected["failed"]) + }) + } +} + +func TestJSEngineMakeFunction_IntermediateState_SpModeFalse(t *testing.T) { + testSpMode := false + testCases := []struct { + Scenario string + Src string + DisableSourceMaps bool + Input *models.Message + InterState interface{} + Expected map[string]*models.Message + ExpInterState interface{} + Error error + }{ + { + Scenario: "intermediateState_EngineProtocol_Map", + Src: ` +function main(x) { + return x; +} +`, + DisableSourceMaps: true, + Input: &models.Message{ + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + InterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: testJSMap, + }, + Expected: map[string]*models.Message{ + "success": { + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: testJSMap, + }, + Error: nil, + }, + { + Scenario: "intermediateState_EngineProtocol_String", + Src: ` +function main(x) { + return x; +} +`, + DisableSourceMaps: true, + Input: &models.Message{ + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + InterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(testJsJSON), + }, + Expected: map[string]*models.Message{ + "success": { + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(testJsJSON), + }, + Error: nil, + }, + { + Scenario: "intermediateState_not_EngineProtocol_spMode_true", + Src: ` +function main(x) { + return x; +} +`, + DisableSourceMaps: true, + Input: &models.Message{ + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + InterState: "notEngineProtocol", + Expected: map[string]*models.Message{ + "success": { + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(testJsJSON), + }, + Error: nil, + }, + { + Scenario: "intermediateState_not_EngineProtocol_spMode_false", + Src: ` +function main(x) { + return x; +} +`, + DisableSourceMaps: true, + Input: &models.Message{ + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + InterState: "notEngineProtocol", + Expected: map[string]*models.Message{ + "success": { + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(testJsJSON), + }, + Error: nil, + }, + } + + for _, tt := range testCases { + t.Run(tt.Scenario, func(t *testing.T) { + assert := assert.New(t) + + src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) + jsConfig := &JSEngineConfig{ + SourceB64: src, + RunTimeout: 5, + DisableSourceMaps: tt.DisableSourceMaps, + SpMode: testSpMode, + } + + jsEngine, err := NewJSEngine(jsConfig) + assert.NotNil(jsEngine) + if err != nil { + t.Fatalf("function NewJSEngine failed with error: %q", err.Error()) + } + + if err := jsEngine.SmokeTest("main"); err != nil { + t.Fatalf("smoke-test failed with error: %q", err.Error()) + } + + transFunction := jsEngine.MakeFunction("main") + s, f, e, i := transFunction(tt.Input, tt.InterState) + + if !reflect.DeepEqual(i, tt.ExpInterState) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(i), + spew.Sdump(tt.ExpInterState)) + } + + if e != nil { + gotErr := e.GetError() + expErr := tt.Error + if expErr == nil { + t.Fatalf("got unexpected error: %s", gotErr.Error()) + } + + if !strings.Contains(gotErr.Error(), expErr.Error()) { + t.Errorf("GOT_ERROR:\n%s\n does not contain\nEXPECTED_ERROR:\n%s", + gotErr.Error(), + expErr.Error()) + } + } + + assertMessagesCompareJs(t, s, tt.Expected["success"]) + assertMessagesCompareJs(t, f, tt.Expected["filtered"]) + assertMessagesCompareJs(t, e, tt.Expected["failed"]) + }) + } +} + +func TestJSEngineMakeFunction_IntermediateState_SpModeTrue(t *testing.T) { + testSpMode := true + testCases := []struct { + Scenario string + Src string + DisableSourceMaps bool + Input *models.Message + InterState interface{} + Expected map[string]*models.Message + ExpInterState interface{} + Error error + }{ + { + Scenario: "intermediateState_EngineProtocol_Map", + Src: ` +function main(x) { + return x; +} +`, + DisableSourceMaps: true, + Input: &models.Message{ + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + InterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: testJSMap, + }, + Expected: map[string]*models.Message{ + "success": { + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: testJSMap, + }, + Error: nil, + }, + { + Scenario: "intermediateState_EngineProtocol_String", + Src: ` +function main(x) { + return x; +} +`, + DisableSourceMaps: true, + Input: &models.Message{ + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + InterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(testJsJSON), + }, + Expected: map[string]*models.Message{ + "success": { + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(testJsJSON), + }, + Error: nil, + }, + { + Scenario: "intermediateState_notEngineProtocol_notSpEnriched", + Src: ` +function main(x) { + return x; +} +`, + DisableSourceMaps: true, + Input: &models.Message{ + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + InterState: "notEngineProtocol", + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("Cannot parse"), + }, + { + Scenario: "intermediateState_notEngineProtocol_SpEnriched", + Src: ` +function main(x) { + return x; +} +`, + DisableSourceMaps: true, + Input: &models.Message{ + Data: testJsTsv, + PartitionKey: "some-test-key", + }, + InterState: "notEngineProtocol", + Expected: map[string]*models.Message{ + "success": { + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: testJSMap, + }, + Error: nil, + }, + } + + for _, tt := range testCases { + t.Run(tt.Scenario, func(t *testing.T) { + assert := assert.New(t) + + src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) + jsConfig := &JSEngineConfig{ + SourceB64: src, + RunTimeout: 5, + DisableSourceMaps: tt.DisableSourceMaps, + SpMode: testSpMode, + } + + jsEngine, err := NewJSEngine(jsConfig) + assert.NotNil(jsEngine) + if err != nil { + t.Fatalf("function NewJSEngine failed with error: %q", err.Error()) + } + + if err := jsEngine.SmokeTest("main"); err != nil { + t.Fatalf("smoke-test failed with error: %q", err.Error()) + } + + transFunction := jsEngine.MakeFunction("main") + s, f, e, i := transFunction(tt.Input, tt.InterState) + + if !reflect.DeepEqual(i, tt.ExpInterState) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(i), + spew.Sdump(tt.ExpInterState)) + } + + if e != nil { + gotErr := e.GetError() + expErr := tt.Error + if expErr == nil { + t.Fatalf("got unexpected error: %s", gotErr.Error()) + } + + if !strings.Contains(gotErr.Error(), expErr.Error()) { + t.Errorf("GOT_ERROR:\n%s\n does not contain\nEXPECTED_ERROR:\n%s", + gotErr.Error(), + expErr.Error()) + } + } + + assertMessagesCompareJs(t, s, tt.Expected["success"]) + assertMessagesCompareJs(t, f, tt.Expected["filtered"]) + assertMessagesCompareJs(t, e, tt.Expected["failed"]) + }) + } +} + +func TestJSEngineMakeFunction_SetPK(t *testing.T) { + var testInterState interface{} = nil + testCases := []struct { + Scenario string + Src string + DisableSourceMaps bool + SpMode bool + Input *models.Message + Expected map[string]*models.Message + ExpInterState interface{} + Error error + }{ + { + Scenario: "onlySetPk_spModeTrue", + Src: ` +function main(x) { + x.PartitionKey = "newPk"; + return x; +} +`, + DisableSourceMaps: true, + SpMode: true, + Input: &models.Message{ + Data: testJsTsv, + PartitionKey: "oldPK", + }, + Expected: map[string]*models.Message{ + "success": { + Data: testJsJSON, + PartitionKey: "newPk", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "newPk", + Data: testJSMap, + }, + Error: nil, + }, + { + Scenario: "onlySetPk_spModeFalse", + Src: ` +function main(x) { + x.PartitionKey = "newPk"; + return x; +} +`, + DisableSourceMaps: true, + SpMode: false, + Input: &models.Message{ + Data: testJsTsv, + PartitionKey: "oldPK", + }, + Expected: map[string]*models.Message{ + "success": { + Data: testJsTsv, + PartitionKey: "newPk", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "newPk", + Data: string(testJsTsv), + }, + Error: nil, + }, + { + Scenario: "filterOutIgnores", + Src: ` +function main(x) { + return { + FilterOut: true, + Data: "shouldNotAppear", + PartitionKey: "notThis" + }; +} +`, + DisableSourceMaps: true, + SpMode: true, + Input: &models.Message{ + Data: testJsTsv, + PartitionKey: "oldPk", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": { + Data: testJsTsv, + PartitionKey: "oldPk", + }, + "failed": nil, + }, + ExpInterState: nil, + Error: nil, + }, + } + + for _, tt := range testCases { + t.Run(tt.Scenario, func(t *testing.T) { + assert := assert.New(t) + + src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) + jsConfig := &JSEngineConfig{ + SourceB64: src, + RunTimeout: 5, + DisableSourceMaps: tt.DisableSourceMaps, + SpMode: tt.SpMode, + } + + jsEngine, err := NewJSEngine(jsConfig) + assert.NotNil(jsEngine) + if err != nil { + t.Fatalf("function NewJSEngine failed with error: %q", err.Error()) + } + + if err := jsEngine.SmokeTest("main"); err != nil { + t.Fatalf("smoke-test failed with error: %q", err.Error()) + } + + transFunction := jsEngine.MakeFunction("main") + s, f, e, i := transFunction(tt.Input, testInterState) + + if !reflect.DeepEqual(i, tt.ExpInterState) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(i), + spew.Sdump(tt.ExpInterState)) + } + + if e != nil { + gotErr := e.GetError() + expErr := tt.Error + if expErr == nil { + t.Fatalf("got unexpected error: %s", gotErr.Error()) + } + + if !strings.Contains(gotErr.Error(), expErr.Error()) { + t.Errorf("GOT_ERROR:\n%s\n does not contain\nEXPECTED_ERROR:\n%s", + gotErr.Error(), + expErr.Error()) + } + } + + assertMessagesCompareJs(t, s, tt.Expected["success"]) + assertMessagesCompareJs(t, f, tt.Expected["filtered"]) + assertMessagesCompareJs(t, e, tt.Expected["failed"]) + }) + } +} + +func TestJSEngineSmokeTest(t *testing.T) { + testCases := []struct { + Src string + FunName string + DisableSourceMaps bool + CompileError error + SmokeError error + }{ + { + Src: ` +function identity(x) { + return x; +} +`, + FunName: "identity", + DisableSourceMaps: true, + CompileError: nil, + SmokeError: nil, + }, + { + Src: ` +function notMain(x) { + return x; +} +`, + FunName: "notExists", + DisableSourceMaps: true, + CompileError: nil, + SmokeError: fmt.Errorf("could not assert as function"), + }, + { + Src: ` +function main(x) { + local y = 0; +} +`, + FunName: "syntaxError", + DisableSourceMaps: false, + CompileError: fmt.Errorf("SyntaxError"), + SmokeError: nil, + }, + } + + for _, tt := range testCases { + t.Run(tt.FunName, func(t *testing.T) { + assert := assert.New(t) + + src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) + jsConfig := &JSEngineConfig{ + SourceB64: src, + RunTimeout: 5, + DisableSourceMaps: tt.DisableSourceMaps, + } + + jsEngine, compileErr := NewJSEngine(jsConfig) + + if compileErr != nil { + if tt.CompileError == nil { + t.Fatalf("got unexpected error while creating NewJSEngine: %s", compileErr.Error()) + } + + if !strings.Contains(compileErr.Error(), tt.CompileError.Error()) { + t.Errorf("NewJSEngine error mismatch\nGOT_ERROR:\n%q\n does not contain\nEXPECTED_ERROR:\n%q", + compileErr.Error(), + tt.CompileError.Error()) + } + } else { + assert.NotNil(jsEngine) + + smoke := jsEngine.SmokeTest(tt.FunName) + expErr := tt.SmokeError + if smoke != nil { + if expErr == nil { + t.Fatalf("got unexpected smoke-test error: %q", smoke.Error()) + } + + if !strings.Contains(smoke.Error(), expErr.Error()) { + t.Errorf("smoke error mismatch\nGOT_ERROR:\n%q\ndoes not contain\nEXPECTED_ERROR:\n%q", + smoke.Error(), + expErr.Error()) + } + } else { + assert.Nil(tt.SmokeError) + } + } + }) + } +} + +func Benchmark_JSEngine_Passthrough_DisabledSrcMaps(b *testing.B) { + b.ReportAllocs() + + srcCode := ` +function main(x) { + return x; +} +` + src := base64.StdEncoding.EncodeToString([]byte(srcCode)) + inputMsg := &models.Message{ + Data: testJsJSON, + PartitionKey: "some-test-key", + } + + jsConfig := &JSEngineConfig{ + SourceB64: src, + RunTimeout: 5, + DisableSourceMaps: true, + } + + jsEngine, err := NewJSEngine(jsConfig) + if err != nil { + b.Fatalf("function NewJSEngine failed with error: %q", err.Error()) + } + + // not Smoke-Tested + transFunction := jsEngine.MakeFunction("identity") + + for n := 0; n < b.N; n++ { + transFunction(inputMsg, nil) + } +} + +func Benchmark_JSEngine_Passthrough(b *testing.B) { + b.ReportAllocs() + + srcCode := ` +function main(x) { + return x; +} +` + src := base64.StdEncoding.EncodeToString([]byte(srcCode)) + inputMsg := &models.Message{ + Data: testJsJSON, + PartitionKey: "some-test-key", + } + + jsConfig := &JSEngineConfig{ + SourceB64: src, + RunTimeout: 5, + DisableSourceMaps: false, + } + + jsEngine, err := NewJSEngine(jsConfig) + if err != nil { + b.Fatalf("function NewJSEngine failed with error: %q", err.Error()) + } + + // not Smoke-Tested + transFunction := jsEngine.MakeFunction("identity") + + for n := 0; n < b.N; n++ { + transFunction(inputMsg, nil) + } +} + +func Benchmark_JSEngine_PassthroughSpMode(b *testing.B) { + b.ReportAllocs() + + srcCode := ` +function main(x) { + return x; +} +` + src := base64.StdEncoding.EncodeToString([]byte(srcCode)) + inputMsg := &models.Message{ + Data: testJsTsv, + PartitionKey: "some-test-key", + } + + jsConfig := &JSEngineConfig{ + SourceB64: src, + RunTimeout: 5, + DisableSourceMaps: false, + } + + jsEngine, err := NewJSEngine(jsConfig) + if err != nil { + b.Fatalf("function NewJSEngine failed with error: %q", err.Error()) + } + + // not Smoke-Tested + transFunction := jsEngine.MakeFunction("identity") + + for n := 0; n < b.N; n++ { + transFunction(inputMsg, nil) + } +} + +func Benchmark_JSEngine_Passthrough_JsJson(b *testing.B) { + b.ReportAllocs() + + srcCode := ` +function main(x) { + var jsonObj = JSON.parse(x.Data); + var result = JSON.stringify(jsonObj); + + return { + Data: result + }; +} +` + src := base64.StdEncoding.EncodeToString([]byte(srcCode)) + inputMsg := &models.Message{ + Data: testJsJSON, + PartitionKey: "some-test-key", + } + + jsConfig := &JSEngineConfig{ + SourceB64: src, + RunTimeout: 5, + DisableSourceMaps: false, + } + + jsEngine, err := NewJSEngine(jsConfig) + if err != nil { + b.Fatalf("function NewJSEngine failed with error: %q", err.Error()) + } + + // not Smoke-Tested + transFunction := jsEngine.MakeFunction("jsonIdentity") + + for n := 0; n < b.N; n++ { + transFunction(inputMsg, nil) + } +} + +func testJSEngineFunc(c *JSEngineConfig) (*JSEngineConfig, error) { + return c, nil +} + +// Helper function to compare messages and avoid using reflect.DeepEqual +// on errors. Compares all but the error field of messages. +func assertMessagesCompareJs(t *testing.T, act, exp *models.Message) { + t.Helper() + + ok := false + switch { + case act == nil: + ok = exp == nil + case exp == nil: + default: + var dataOk bool + pkOk := act.PartitionKey == exp.PartitionKey + dataOk = reflect.DeepEqual(act.Data, exp.Data) + cTimeOk := reflect.DeepEqual(act.TimeCreated, exp.TimeCreated) + pTimeOk := reflect.DeepEqual(act.TimePulled, exp.TimePulled) + tTimeOk := reflect.DeepEqual(act.TimeTransformed, exp.TimeTransformed) + ackOk := reflect.DeepEqual(act.AckFunc, exp.AckFunc) + + if pkOk && dataOk && cTimeOk && pTimeOk && tTimeOk && ackOk { + ok = true + } + } + + if !ok { + t.Errorf("\nGOT:\n%s\nEXPECTED:\n%s\n", + spew.Sdump(act), + spew.Sdump(exp)) + } +} + +// helper variables +var testJsDvceCreatedTstamp, _ = time.Parse("2006-01-02 15:04:05.999", "2019-05-10 14:40:35.551") +var testJsEtlTstamp, _ = time.Parse("2006-01-02 15:04:05.999", "2019-05-10 14:40:37.436") +var testJsDerivedTstamp, _ = time.Parse("2006-01-02 15:04:05.999", "2019-05-10 14:40:35.972") +var testJsCollectorTstamp, _ = time.Parse("2006-01-02 15:04:05.999", "2019-05-10 14:40:35.972") +var testJsDvceSentTstamp, _ = time.Parse("2006-01-02 15:04:05.999", "2019-05-10 14:40:35") +var testJSMap = map[string]interface{}{ + "event_version": "1-0-0", + "app_id": "test-data<>", + "dvce_created_tstamp": testJsDvceCreatedTstamp, + "event": "unstruct", + "v_collector": "ssc-0.15.0-googlepubsub", + "network_userid": "d26822f5-52cc-4292-8f77-14ef6b7a27e2", + "event_name": "add_to_cart", + "event_vendor": "com.snowplowanalytics.snowplow", + "event_format": "jsonschema", + "platform": "pc", + "etl_tstamp": testJsEtlTstamp, + "collector_tstamp": testJsCollectorTstamp, + "user_id": "user", + "dvce_sent_tstamp": testJsDvceSentTstamp, + "derived_tstamp": testJsDerivedTstamp, + "event_id": "e9234345-f042-46ad-b1aa-424464066a33", + "v_tracker": "py-0.8.2", + "v_etl": "beam-enrich-0.2.0-common-0.36.0", + "user_ipaddress": "1.2.3.4", + "unstruct_event_com_snowplowanalytics_snowplow_add_to_cart_1": map[string]interface{}{ + "quantity": float64(2), + "unitPrice": 32.4, + "currency": "GBP", + "sku": "item41", + }, + "contexts_nl_basjes_yauaa_context_1": []interface{}{ + map[string]interface{}{ + "deviceName": "Unknown", + "layoutEngineVersionMajor": "??", + "operatingSystemName": "Unknown", + "deviceClass": "Unknown", + "agentVersion": "2.21.0", + "layoutEngineName": "Unknown", + "layoutEngineClass": "Unknown", + "agentName": "python-requests", + "agentNameVersion": "python-requests 2.21.0", + "operatingSystemVersion": "??", + "agentClass": "Special", + "deviceBrand": "Unknown", + "agentVersionMajor": "2", + "agentNameVersionMajor": "python-requests 2", + "operatingSystemClass": "Unknown", + "layoutEngineVersion": "??", + }, + }, + "useragent": "python-requests/2.21.0", +} + +var testJsTsv = []byte(`test-data<> pc 2019-05-10 14:40:37.436 2019-05-10 14:40:35.972 2019-05-10 14:40:35.551 unstruct e9234345-f042-46ad-b1aa-424464066a33 py-0.8.2 ssc-0.15.0-googlepubsub beam-enrich-0.2.0-common-0.36.0 user 1.2.3.4 d26822f5-52cc-4292-8f77-14ef6b7a27e2 {"schema":"iglu:com.snowplowanalytics.snowplow/unstruct_event/jsonschema/1-0-0","data":{"schema":"iglu:com.snowplowanalytics.snowplow/add_to_cart/jsonschema/1-0-0","data":{"sku":"item41","quantity":2,"unitPrice":32.4,"currency":"GBP"}}} python-requests/2.21.0 2019-05-10 14:40:35.000 {"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-1","data":[{"schema":"iglu:nl.basjes/yauaa_context/jsonschema/1-0-0","data":{"deviceBrand":"Unknown","deviceName":"Unknown","operatingSystemName":"Unknown","agentVersionMajor":"2","layoutEngineVersionMajor":"??","deviceClass":"Unknown","agentNameVersionMajor":"python-requests 2","operatingSystemClass":"Unknown","layoutEngineName":"Unknown","agentName":"python-requests","agentVersion":"2.21.0","layoutEngineClass":"Unknown","agentNameVersion":"python-requests 2.21.0","operatingSystemVersion":"??","agentClass":"Special","layoutEngineVersion":"??"}}]} 2019-05-10 14:40:35.972 com.snowplowanalytics.snowplow add_to_cart jsonschema 1-0-0 `) + +// +// corresponding JSON to previous TSV +var testJsJSON = []byte(`{"app_id":"test-data<>","collector_tstamp":"2019-05-10T14:40:35.972Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??"}],"derived_tstamp":"2019-05-10T14:40:35.972Z","dvce_created_tstamp":"2019-05-10T14:40:35.551Z","dvce_sent_tstamp":"2019-05-10T14:40:35Z","etl_tstamp":"2019-05-10T14:40:37.436Z","event":"unstruct","event_format":"jsonschema","event_id":"e9234345-f042-46ad-b1aa-424464066a33","event_name":"add_to_cart","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"d26822f5-52cc-4292-8f77-14ef6b7a27e2","platform":"pc","unstruct_event_com_snowplowanalytics_snowplow_add_to_cart_1":{"currency":"GBP","quantity":2,"sku":"item41","unitPrice":32.4},"user_id":"user","user_ipaddress":"1.2.3.4","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) + +// json's changed and stringified inside JS +var testJsJSONChanged1 = []byte(`{"app_id_CHANGED":"test-data<>","collector_tstamp":"2019-05-10T14:40:35.972Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??"}],"derived_tstamp":"2019-05-10T14:40:35.972Z","dvce_created_tstamp":"2019-05-10T14:40:35.551Z","dvce_sent_tstamp":"2019-05-10T14:40:35Z","etl_tstamp":"2019-05-10T14:40:37.436Z","event":"unstruct","event_format":"jsonschema","event_id":"e9234345-f042-46ad-b1aa-424464066a33","event_name":"add_to_cart","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"d26822f5-52cc-4292-8f77-14ef6b7a27e2","platform":"pc","unstruct_event_com_snowplowanalytics_snowplow_add_to_cart_1":{"currency":"GBP","quantity":2,"sku":"item41","unitPrice":32.4},"user_id":"user","user_ipaddress":"1.2.3.4","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) + +var testJsJSONChanged2 = []byte(`{"collector_tstamp":"2019-05-10T14:40:35.972Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??"}],"derived_tstamp":"2019-05-10T14:40:35.972Z","dvce_created_tstamp":"2019-05-10T14:40:35.551Z","dvce_sent_tstamp":"2019-05-10T14:40:35Z","etl_tstamp":"2019-05-10T14:40:37.436Z","event":"unstruct","event_format":"jsonschema","event_id":"e9234345-f042-46ad-b1aa-424464066a33","event_name":"add_to_cart","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"d26822f5-52cc-4292-8f77-14ef6b7a27e2","platform":"pc","unstruct_event_com_snowplowanalytics_snowplow_add_to_cart_1":{"currency":"GBP","quantity":2,"sku":"item41","unitPrice":32.4},"user_id":"user","user_ipaddress":"1.2.3.4","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2","app_id_CHANGED":"test-data<>"}`) diff --git a/pkg/transform/engine/engine_lua.go b/pkg/transform/engine/engine_lua.go new file mode 100644 index 00000000..2b6e6eca --- /dev/null +++ b/pkg/transform/engine/engine_lua.go @@ -0,0 +1,434 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package engine + +import ( + "context" + "encoding/base64" + "fmt" + "strings" + "time" + + gojson "github.com/goccy/go-json" + "github.com/mitchellh/mapstructure" + "github.com/pkg/errors" + "github.com/yuin/gluamapper" + lua "github.com/yuin/gopher-lua" + luaparse "github.com/yuin/gopher-lua/parse" + luajson "layeh.com/gopher-json" + + "github.com/snowplow-devops/stream-replicator/pkg/models" + "github.com/snowplow-devops/stream-replicator/pkg/transform" +) + +// LuaEngineConfig configures the Lua Engine. +type LuaEngineConfig struct { + SourceB64 string `hcl:"source_b64"` + RunTimeout int `hcl:"timeout_sec,optional"` + Sandbox bool `hcl:"sandbox,optional"` + SpMode bool `hcl:"snowplow_mode,optional"` +} + +// LuaEngine handles the provision of a Lua runtime to run transformations. +type LuaEngine struct { + Code *lua.FunctionProto + RunTimeout time.Duration + Options *lua.Options + SpMode bool +} + +// NewLuaEngine returns a Lua Engine from a LuaEngineConfig. +func NewLuaEngine(c *LuaEngineConfig) (*LuaEngine, error) { + luaSrc, err := base64.StdEncoding.DecodeString(c.SourceB64) + if err != nil { + return nil, err + } + + compiledCode, err := compileLuaCode(string(luaSrc), c.SourceB64) + if err != nil { + return nil, err + } + + eng := &LuaEngine{ + Code: compiledCode, + RunTimeout: time.Duration(c.RunTimeout) * time.Second, + Options: &lua.Options{SkipOpenLibs: c.Sandbox}, + SpMode: c.SpMode, + } + + return eng, nil +} + +// The LuaEngineAdapter type is an adapter for functions to be used as +// pluggable components for Lua Engine. It implements the Pluggable interface. +type LuaEngineAdapter func(i interface{}) (interface{}, error) + +// AdaptLuaEngineFunc returns a LuaEngineAdapter. +func AdaptLuaEngineFunc(f func(c *LuaEngineConfig) (*LuaEngine, error)) LuaEngineAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*LuaEngineConfig) + if !ok { + return nil, errors.New("invalid input, expected LuaEngineConfig") + } + + return f(cfg) + } +} + +// Create implements the ComponentCreator interface. +func (f LuaEngineAdapter) Create(i interface{}) (interface{}, error) { + return f(i) +} + +// ProvideDefault implements the ComponentConfigurable interface. +func (f LuaEngineAdapter) ProvideDefault() (interface{}, error) { + // Provide defaults for the optional parameters + // whose default is not their zero value. + cfg := &LuaEngineConfig{ + RunTimeout: 5, + Sandbox: true, + } + + return cfg, nil +} + +// LuaEngineConfigFunction returns the Pluggable transformation layer implemented in Lua. +func LuaEngineConfigFunction(t *LuaEngineConfig) (*LuaEngine, error) { + return NewLuaEngine(&LuaEngineConfig{ + SourceB64: t.SourceB64, + RunTimeout: t.RunTimeout, + Sandbox: t.Sandbox, + SpMode: t.SpMode, + }) +} + +// SmokeTest implements smokeTester. +func (e *LuaEngine) SmokeTest(funcName string) error { + // setup the Lua state + L := lua.NewState(*e.Options) // L is ptr + defer L.Close() + + d := time.Now().Add(e.RunTimeout) + ctx, cancel := context.WithDeadline(context.Background(), d) + defer cancel() + L.SetContext(ctx) + + return initVM(e, L, funcName) +} + +// MakeFunction implements functionMaker. +func (e *LuaEngine) MakeFunction(funcName string) transform.TransformationFunction { + + return func(message *models.Message, interState interface{}) (*models.Message, *models.Message, *models.Message, interface{}) { + // making input + input, err := mkLuaEngineInput(e, message, interState) + if err != nil { + message.SetError(fmt.Errorf("failed making input for the Lua runtime: %q", err.Error())) + return nil, nil, message, nil + } + + // setup the Lua state + L := lua.NewState(*e.Options) + defer L.Close() + + d := time.Now().Add(e.RunTimeout) + ctx, cancel := context.WithDeadline(context.Background(), d) + defer cancel() + L.SetContext(ctx) + + err = initVM(e, L, funcName) + if err != nil { + message.SetError(fmt.Errorf("failed initializing Lua runtime: %q", err.Error())) + return nil, nil, message, nil + } + + // running + err = L.CallByParam(lua.P{ + Fn: L.GetGlobal(funcName), // name of Lua function + NRet: 1, // num of return values + Protect: true, // don't panic + }, input) + if err != nil { + // runtime error counts as failure + runErr := fmt.Errorf("error running Lua function %q: %q", funcName, err.Error()) + message.SetError(runErr) + return nil, nil, message, nil + } + + // validating output + protocol, err := validateLuaEngineOut(L.Get(-1)) + if err != nil { + message.SetError(err) + return nil, nil, message, nil + } + + // filtering - keeping same behaviour with spEnrichedFilter + if protocol.FilterOut == true { + return nil, message, nil, nil + } + + // handling data + encode := false + switch protoData := protocol.Data.(type) { + case string: + message.Data = []byte(protoData) + case map[string]interface{}: + encode = true + case map[interface{}]interface{}: + encode = true + siData := toStringIfaceMap(protoData) + protocol.Data = siData + default: + message.SetError(fmt.Errorf("invalid return type from Lua transformation; expected string or table")) + return nil, nil, message, nil + } + + // encode + if encode { + encoded, err := gojson.MarshalWithOption(protocol.Data, gojson.DisableHTMLEscape()) + if err != nil { + message.SetError(fmt.Errorf("error encoding message data")) + return nil, nil, message, nil + } + message.Data = encoded + } + + // setting pk if needed + pk := protocol.PartitionKey + if pk != "" && message.PartitionKey != pk { + message.PartitionKey = pk + } + + return message, nil, nil, protocol + + } +} + +// compileLuaCode compiles lua code. +// Since lua.NewState is not goroutine-safe, we spin a new state for every +// transformation. The reason for this function is to allow us to at least share +// the compiled bytecode (which is read-only and thus safe) and so run only once +// the load, parse and compile steps, which are implicitly run by the alternative +// lua.DoString. +// see also: +// https://github.com/yuin/gopher-lua/pull/193 +// https://github.com/yuin/gopher-lua#sharing-lua-byte-code-between-lstates +func compileLuaCode(code, name string) (*lua.FunctionProto, error) { + reader := strings.NewReader(code) + chunk, err := luaparse.Parse(reader, code) + if err != nil { + return nil, err + } + proto, err := lua.Compile(chunk, name) + if err != nil { + return nil, err + } + return proto, nil +} + +// loadLuaCode loads compiled Lua code into a lua state +func loadLuaCode(ls *lua.LState, proto *lua.FunctionProto) error { + lfunc := ls.NewFunctionFromProto(proto) + ls.Push(lfunc) + + // https://github.com/yuin/gopher-lua/blob/f4c35e4016d9d8580b007ebaeb68ecd8e0b09f1c/_state.go#L1811 + return ls.PCall(0, lua.MultRet, nil) +} + +// initVM performs the initialization steps for a Lua state. +func initVM(e *LuaEngine, L *lua.LState, funcName string) error { + if e.Options.SkipOpenLibs == false { + luajson.Preload(L) + } + + err := loadLuaCode(L, e.Code) + if err != nil { + return fmt.Errorf("could not load lua code: %q", err) + } + + if _, ok := L.GetGlobal(funcName).(*lua.LFunction); !ok { + return fmt.Errorf("global Lua function not found: %q", funcName) + } + + return nil +} + +// mkLuaEngineInput describes the process of constructing input to Lua engine. +// No side effects. +func mkLuaEngineInput(e *LuaEngine, message *models.Message, interState interface{}) (*lua.LTable, error) { + if interState != nil { + if i, ok := interState.(*engineProtocol); ok { + return toLuaTable(i) + } + } + + candidate := &engineProtocol{ + Data: string(message.Data), + } + + if !e.SpMode { + return toLuaTable(candidate) + } + + parsedMessage, err := transform.IntermediateAsSpEnrichedParsed(interState, message) + if err != nil { + // if spMode, error for non Snowplow enriched event data + return nil, err + } + + spMap, err := parsedMessage.ToMap() + if err != nil { + return nil, err + } + candidate.Data = spMap + + return toLuaTable(candidate) +} + +// toLuaTable +func toLuaTable(p *engineProtocol) (*lua.LTable, error) { + var tmpMap map[string]interface{} + + err := mapstructure.Decode(p, &tmpMap) + if err != nil { + return nil, fmt.Errorf("error decoding to map") + } + + return mapToLTable(tmpMap) +} + +// mapToLTable converts a Go map to a lua table +// see: https://github.com/yuin/gopher-lua/issues/160#issuecomment-447608033 +func mapToLTable(m map[string]interface{}) (*lua.LTable, error) { + timeLayout := "2006-01-02T15:04:05.999Z07:00" + + // Main table pointer + ltbl := &lua.LTable{} + + // Loop map + for key, val := range m { + + switch val.(type) { + case float64: + ltbl.RawSetString(key, lua.LNumber(val.(float64))) + case int64: + ltbl.RawSetString(key, lua.LNumber(val.(int64))) + case string: + ltbl.RawSetString(key, lua.LString(val.(string))) + case bool: + ltbl.RawSetString(key, lua.LBool(val.(bool))) + case []byte: + ltbl.RawSetString(key, lua.LString(string(val.([]byte)))) + case map[string]interface{}: + // Get table from map + tmp, err := mapToLTable(val.(map[string]interface{})) + if err != nil { + return nil, err + } + ltbl.RawSetString(key, tmp) + case time.Time: + t := val.(time.Time).Format(timeLayout) + ltbl.RawSetString(key, lua.LString(t)) + case []map[string]interface{}: + // Create slice table + sliceTable := &lua.LTable{} + for _, vv := range val.([]map[string]interface{}) { + next, err := mapToLTable(vv) + if err != nil { + return nil, err + } + sliceTable.Append(next) + } + ltbl.RawSetString(key, sliceTable) + case []interface{}: + // Create slice table + sliceTable := &lua.LTable{} + for _, vv := range val.([]interface{}) { + switch vv.(type) { + case map[string]interface{}: + // Convert map to table + m, err := mapToLTable(vv.(map[string]interface{})) + if err != nil { + return nil, err + } + sliceTable.Append(m) + case float64: + sliceTable.Append(lua.LNumber(vv.(float64))) + case string: + sliceTable.Append(lua.LString(vv.(string))) + case bool: + sliceTable.Append(lua.LBool(vv.(bool))) + } + } + + // Append to main table + ltbl.RawSetString(key, sliceTable) + } + } + + return ltbl, nil +} + +// validateLuaEngineOut validates the value returned from the Lua engine is a +// Lua Table (lua.LTable) and that it maps to engineProtocol. +func validateLuaEngineOut(output interface{}) (*engineProtocol, error) { + if output == nil { + return nil, fmt.Errorf("invalid return type from Lua transformation; got nil") + } + + if luaTablePtr, ok := output.(*lua.LTable); ok { + result := &engineProtocol{} + luaMapper := gluamapper.NewMapper(gluamapper.Option{ + NameFunc: gluamapper.Id, + }) + + err := luaMapper.Map(luaTablePtr, result) + if err != nil { + return nil, fmt.Errorf("protocol violation in return value from Lua transformation") + } + + return result, nil + } + + return nil, fmt.Errorf("invalid return type from Lua transformation; expected Lua Table") +} + +// toStringIfaceMap converts map[interface{}]interface{} to map[string]interface. +// This function is used in Lua Engine because of how gluamapper actually maps +// lua.LTable to Go map. +// see:https://github.com/yuin/gluamapper/blob/d836955830e75240d46ce9f0e6d148d94f2e1d3a/gluamapper.go#L44 +func toStringIfaceMap(interfaceMap map[interface{}]interface{}) map[string]interface{} { + result := make(map[string]interface{}) + for key, val := range interfaceMap { + result[fmt.Sprintf("%v", key)] = doValue(val) + } + + return result +} + +// doValue is a helper for toStringIfaceMap, to cover for values that are +// []interface{} and map[interface{}]interface. +func doValue(value interface{}) interface{} { + switch value := value.(type) { + case []interface{}: + return doIfaceSlice(value) + case map[interface{}]interface{}: + return toStringIfaceMap(value) + default: + return value + } +} + +// doIfaceSlice is a helper for doValue to handle interface slices. +func doIfaceSlice(iSlice []interface{}) []interface{} { + result := make([]interface{}, len(iSlice)) + for i, val := range iSlice { + result[i] = doValue(val) + } + + return result +} diff --git a/pkg/transform/engine/engine_lua_test.go b/pkg/transform/engine/engine_lua_test.go new file mode 100644 index 00000000..e7dfdd09 --- /dev/null +++ b/pkg/transform/engine/engine_lua_test.go @@ -0,0 +1,1790 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package engine + +import ( + "encoding/base64" + "fmt" + "reflect" + "strings" + "testing" + + "github.com/davecgh/go-spew/spew" + "github.com/stretchr/testify/assert" + + "github.com/snowplow-devops/stream-replicator/pkg/models" + "github.com/snowplow-devops/stream-replicator/pkg/transform" +) + +func TestLuaLayer(t *testing.T) { + assert := assert.New(t) + layer, err := LuaEngineConfigFunction(&LuaEngineConfig{ + SourceB64: "CglmdW5jdGlvbiBmb28oeCkKICAgICAgICAgICByZXR1cm4geAogICAgICAgIGVuZAoJ", + RunTimeout: 5, + Sandbox: false, + SpMode: false, + }) + assert.Nil(err) + assert.NotNil(layer) +} + +func TestLuaEngineMakeFunction_SpModeFalse_IntermediateNil(t *testing.T) { + var testInterState interface{} = nil + var testSpMode bool = false + testCases := []struct { + Src string + Scenario string + Sandbox bool + Input *models.Message + Expected map[string]*models.Message + ExpInterState interface{} + Error error + }{ + { + Src: ` +function main(x) + return x +end +`, + Scenario: "main", + Sandbox: true, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: "asdf", + }, + Error: nil, + }, + { + Src: ` +function main(x) + x.Data = "Hello:" .. x.Data + return x +end +`, + Scenario: "main", + Sandbox: true, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": { + Data: []byte("Hello:asdf"), + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: "Hello:asdf", + }, + Error: nil, + }, + { + Src: ` +function main(x) + x.FilterOut = false + return x +end +`, + Scenario: "main", + Sandbox: true, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: "asdf", + }, + Error: nil, + }, + { + Src: ` +function main(x) + if type(x.Data) == "string" then + return { FilterOut = true } + end + return { FilterOut = false } +end +`, + Scenario: "main", + Sandbox: false, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + "failed": nil, + }, + ExpInterState: nil, + Error: nil, + }, + { + Src: ` +local json = require("json") + +function main(x) + local dat = x["Data"] + local jsonObj, decodeErr = json.decode(dat) + if decodeErr then error(decodeErr) end + + local result, encodeErr = json.encode(jsonObj) + if encodeErr then error(encodeErr) end + + x.Data = result + return x +end +`, + Scenario: "main", + Sandbox: false, + Input: &models.Message{ + Data: snowplowJSON1, + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": { + Data: snowplowJSON1, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(snowplowJSON1), + }, + Error: nil, + }, + { + Src: ` +local json = require("json") + +function main(x) + local data = x["Data"] + local jsonObj, decodeErr = json.decode(data) + if decodeErr then error(decodeErr) end + + jsonObj["app_id_CHANGED"] = jsonObj["app_id"] + jsonObj["app_id"] = nil + + local result, encodeErr = json.encode(jsonObj) + if encodeErr then error(encodeErr) end + + return { Data = result } +end +`, + Scenario: "main", + Sandbox: false, + Input: &models.Message{ + Data: snowplowJSON1, + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": { + Data: snowplowJSON1ChangedLua, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(snowplowJSON1ChangedLua), + }, + Error: nil, + }, + { + Src: ` +local json = require("json") + +function main(x) + local jsonObj, decodeErr = json.decode(x["Data"]) + if decodeErr then error(decodeErr) end + + if jsonObj["app_id"] == "filterMeOut" then + return { FilterOut = false, Data = x["Data"] } + else + return { FilterOut = true } + end +end +`, + Scenario: "main", + Sandbox: false, + Input: &models.Message{ + Data: snowplowJSON1, + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": { + Data: snowplowJSON1, + PartitionKey: "some-test-key", + }, + "failed": nil, + }, + ExpInterState: nil, + Error: nil, + }, + { + Src: ` +function main(x) + return 0 +end +`, + Scenario: "main", + Sandbox: true, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("invalid return type from Lua transformation; expected Lua Table"), + }, + { + Src: ` +function main(x) +end +`, + Scenario: "main", + Sandbox: true, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("invalid return type from Lua transformation; expected Lua Table"), + }, + { + Src: ` +function main(x) + return nil +end +`, + Scenario: "main", + Sandbox: true, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("invalid return type from Lua transformation; expected Lua Table"), + }, + { + Src: ` +function main(x) + return 2 * x +end +`, + Scenario: "main", + Sandbox: true, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("error running Lua function \"main\""), + }, + { + Src: ` +function main(x) + error("Failed") +end +`, + Scenario: "main", + Sandbox: false, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("error running Lua function \"main\""), + }, + { + Src: ` +local clock = os.clock + +function main(x) + local t0 = clock() + while clock() - t0 <= 10 do end +end +`, + Scenario: "main", + Sandbox: false, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("context deadline exceeded"), + }, + } + + for _, tt := range testCases { + t.Run(tt.Scenario, func(t *testing.T) { + assert := assert.New(t) + + src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) + luaConfig := &LuaEngineConfig{ + SourceB64: src, + RunTimeout: 1, + Sandbox: tt.Sandbox, + SpMode: testSpMode, + } + + luaEngine, err := NewLuaEngine(luaConfig) + assert.NotNil(luaEngine) + if err != nil { + t.Fatalf("function NewLuaEngine failed with error: %q", err.Error()) + } + + if err := luaEngine.SmokeTest(tt.Scenario); err != nil { + t.Fatalf("smoke-test failed with error: %q", err.Error()) + } + + transFunction := luaEngine.MakeFunction(tt.Scenario) + s, f, e, i := transFunction(tt.Input, testInterState) + + if !reflect.DeepEqual(i, tt.ExpInterState) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(i), + spew.Sdump(tt.ExpInterState)) + } + + if e != nil { + gotErr := e.GetError() + expErr := tt.Error + if expErr == nil { + t.Fatalf("got unexpected error: %s", gotErr.Error()) + } + + if !strings.Contains(gotErr.Error(), expErr.Error()) { + t.Errorf("GOT_ERROR:\n%s\n does not contain\nEXPECTED_ERROR:\n%s", + gotErr.Error(), + expErr.Error()) + } + } + + assertMessagesCompareLua(t, s, tt.Expected["success"]) + assertMessagesCompareLua(t, f, tt.Expected["filtered"]) + assertMessagesCompareLua(t, e, tt.Expected["failed"]) + }) + } +} + +func TestLuaEngineMakeFunction_SpModeTrue_IntermediateNil(t *testing.T) { + var testInterState interface{} = nil + var testSpMode bool = true + testCases := []struct { + Scenario string + Src string + Sandbox bool + Input *models.Message + Expected map[string]*models.Message + ExpInterState interface{} + Error error + }{ + { + Scenario: "main", + Src: ` +function main(x) + return x +end +`, + Sandbox: false, + Input: &models.Message{ + Data: testLuaTsv, + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": { + Data: testLuaJSON, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: testLuaMap, + }, + Error: nil, + }, + { + Scenario: "filtering", + Src: ` +function main(input) + -- input is a lua table + local spData = input["Data"] + if spData["app_id"] == "myApp" then + return input; + end + return { FilterOut = true } +end +`, + Sandbox: false, + Input: &models.Message{ + Data: testLuaTsv, + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": { + Data: testLuaTsv, + PartitionKey: "some-test-key", + }, + "failed": nil, + }, + ExpInterState: nil, + Error: nil, + }, + { + Scenario: "filteringOut_ignoresData", + Src: ` +function main(x) + local ret = { + FilterOut = true, + Data = "shouldNotAppear", + PartitionKey = "notThis" + } + return ret +end +`, + Sandbox: false, + Input: &models.Message{ + Data: testLuaTsv, + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": { + Data: testLuaTsv, + PartitionKey: "some-test-key", + }, + "failed": nil, + }, + ExpInterState: nil, + Error: nil, + }, + { + Scenario: "non_Snowplow_enriched_to_failed", + Src: ` +function main(x) + return x +end +`, + Sandbox: false, + Input: &models.Message{ + Data: []byte("nonSpEnrichedEvent"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: []byte("nonSpEnrichedEvent"), + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("Cannot parse"), + }, + { + Scenario: "return_wrong_type", + Src: ` +function main(x) + return 0 +end +`, + Sandbox: true, + Input: &models.Message{ + Data: testLuaTsv, + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: testLuaTsv, + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("invalid return type from Lua transformation; expected Lua Table"), + }, + } + + for _, tt := range testCases { + t.Run(tt.Scenario, func(t *testing.T) { + assert := assert.New(t) + + src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) + luaConfig := &LuaEngineConfig{ + SourceB64: src, + RunTimeout: 1, + Sandbox: tt.Sandbox, + SpMode: testSpMode, + } + + luaEngine, err := NewLuaEngine(luaConfig) + assert.NotNil(luaEngine) + if err != nil { + t.Fatalf("function NewLuaEngine failed with error: %q", err.Error()) + } + + if err := luaEngine.SmokeTest(`main`); err != nil { + t.Fatalf("smoke-test failed with error: %q", err.Error()) + } + + transFunction := luaEngine.MakeFunction(`main`) + s, f, e, i := transFunction(tt.Input, testInterState) + + if !reflect.DeepEqual(i, tt.ExpInterState) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(i), + spew.Sdump(tt.ExpInterState)) + } + + if e != nil { + gotErr := e.GetError() + expErr := tt.Error + if expErr == nil { + t.Fatalf("got unexpected error: %s", gotErr.Error()) + } + + if !strings.Contains(gotErr.Error(), expErr.Error()) { + t.Errorf("GOT_ERROR:\n%s\n does not contain\nEXPECTED_ERROR:\n%s", + gotErr.Error(), + expErr.Error()) + } + } + + assertMessagesCompareLua(t, s, tt.Expected["success"]) + assertMessagesCompareLua(t, f, tt.Expected["filtered"]) + assertMessagesCompareLua(t, e, tt.Expected["failed"]) + }) + } +} + +func TestLuaEngineMakeFunction_IntermediateState_SpModeFalse(t *testing.T) { + testSpMode := false + testCases := []struct { + Scenario string + Src string + Sandbox bool + Input *models.Message + InterState interface{} + Expected map[string]*models.Message + ExpInterState interface{} + Error error + }{ + { + Scenario: "intermediateState_EngineProtocol_Map", + Src: ` +function main(x) + return x +end +`, + Sandbox: true, + Input: &models.Message{ + Data: testLuaJSON, + PartitionKey: "some-test-key", + }, + InterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: testLuaMap, + }, + Expected: map[string]*models.Message{ + "success": { + Data: testLuaJSON, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: testLuaMap, + }, + Error: nil, + }, + { + Scenario: "intermediateState_EngineProtocol_String", + Src: ` +function main(x) + return x +end +`, + Sandbox: true, + Input: &models.Message{ + Data: testLuaJSON, + PartitionKey: "some-test-key", + }, + InterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(testLuaJSON), + }, + Expected: map[string]*models.Message{ + "success": { + Data: testLuaJSON, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(testLuaJSON), + }, + Error: nil, + }, + { + Scenario: "intermediateState_not_EngineProtocol_nonSpEnriched", + Src: ` +function main(x) + return x; +end +`, + Sandbox: true, + Input: &models.Message{ + Data: testLuaJSON, + PartitionKey: "some-test-key", + }, + InterState: "notEngineProtocol", + Expected: map[string]*models.Message{ + "success": { + Data: testLuaJSON, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(testLuaJSON), + }, + Error: nil, + }, + { + Scenario: "intermediateState_not_EngineProtocol_SpEnriched", + Src: ` +function main(x) + return x; +end +`, + Sandbox: true, + Input: &models.Message{ + Data: testLuaTsv, + PartitionKey: "some-test-key", + }, + InterState: "notEngineProtocol", + Expected: map[string]*models.Message{ + "success": { + Data: testLuaTsv, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(testLuaTsv), + }, + Error: nil, + }, + } + + for _, tt := range testCases { + t.Run(tt.Scenario, func(t *testing.T) { + assert := assert.New(t) + + src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) + luaConfig := &LuaEngineConfig{ + SourceB64: src, + RunTimeout: 1, + Sandbox: tt.Sandbox, + SpMode: testSpMode, + } + + luaEngine, err := NewLuaEngine(luaConfig) + assert.NotNil(luaEngine) + if err != nil { + t.Fatalf("function NewLuaEngine failed with error: %q", err.Error()) + } + + if err := luaEngine.SmokeTest(`main`); err != nil { + t.Fatalf("smoke-test failed with error: %q", err.Error()) + } + + transFunction := luaEngine.MakeFunction(`main`) + s, f, e, i := transFunction(tt.Input, tt.InterState) + + if !reflect.DeepEqual(i, tt.ExpInterState) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(i), + spew.Sdump(tt.ExpInterState)) + } + + if e != nil { + gotErr := e.GetError() + expErr := tt.Error + if expErr == nil { + t.Fatalf("got unexpected error: %s", gotErr.Error()) + } + + if !strings.Contains(gotErr.Error(), expErr.Error()) { + t.Errorf("GOT_ERROR:\n%s\n does not contain\nEXPECTED_ERROR:\n%s", + gotErr.Error(), + expErr.Error()) + } + } + + assertMessagesCompareLua(t, s, tt.Expected["success"]) + assertMessagesCompareLua(t, f, tt.Expected["filtered"]) + assertMessagesCompareLua(t, e, tt.Expected["failed"]) + }) + } +} + +func TestLuaEngineMakeFunction_IntermediateState_SpModeTrue(t *testing.T) { + testSpMode := true + + testCases := []struct { + Scenario string + Src string + Sandbox bool + Input *models.Message + InterState interface{} + Expected map[string]*models.Message + ExpInterState interface{} + Error error + }{ + { + Scenario: "intermediateState_EngineProtocol_Map", + Src: ` +function main(x) + return x +end +`, + Sandbox: true, + Input: &models.Message{ + Data: testLuaJSON, + PartitionKey: "some-test-key", + }, + InterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: testLuaMap, + }, + Expected: map[string]*models.Message{ + "success": { + Data: testLuaJSON, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: testLuaMap, + }, + Error: nil, + }, + { + Scenario: "intermediateState_EngineProtocol_String", + Src: ` +function main(x) + return x +end +`, + Sandbox: true, + Input: &models.Message{ + Data: testLuaJSON, + PartitionKey: "some-test-key", + }, + InterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(testLuaJSON), + }, + Expected: map[string]*models.Message{ + "success": { + Data: testLuaJSON, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(testLuaJSON), + }, + Error: nil, + }, + { + Scenario: "intermediateState_notEngineProtocol_notSpEnriched", + Src: ` +function main(x) + return x +end +`, + Sandbox: true, + Input: &models.Message{ + Data: testLuaJSON, + PartitionKey: "some-test-key", + }, + InterState: "notEngineProtocol", + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: testLuaJSON, + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("Cannot parse"), + }, + { + Scenario: "intermediateState_notEngineProtocol_SpEnriched", + Src: ` +function main(x) + return x +end +`, + Sandbox: true, + Input: &models.Message{ + Data: testLuaTsv, + PartitionKey: "some-test-key", + }, + InterState: "notEngineProtocol", + Expected: map[string]*models.Message{ + "success": { + Data: testLuaJSON, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: testLuaMap, + }, + Error: nil, + }, + } + + for _, tt := range testCases { + t.Run(tt.Scenario, func(t *testing.T) { + assert := assert.New(t) + + src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) + luaConfig := &LuaEngineConfig{ + SourceB64: src, + RunTimeout: 1, + Sandbox: tt.Sandbox, + SpMode: testSpMode, + } + + luaEngine, err := NewLuaEngine(luaConfig) + assert.NotNil(luaEngine) + if err != nil { + t.Fatalf("function NewLuaEngine failed with error: %q", err.Error()) + } + + if err := luaEngine.SmokeTest(`main`); err != nil { + t.Fatalf("smoke-test failed with error: %q", err.Error()) + } + + transFunction := luaEngine.MakeFunction(`main`) + s, f, e, i := transFunction(tt.Input, tt.InterState) + + if !reflect.DeepEqual(i, tt.ExpInterState) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(i), + spew.Sdump(tt.ExpInterState)) + } + + if e != nil { + gotErr := e.GetError() + expErr := tt.Error + if expErr == nil { + t.Fatalf("got unexpected error: %s", gotErr.Error()) + } + + if !strings.Contains(gotErr.Error(), expErr.Error()) { + t.Errorf("GOT_ERROR:\n%s\n does not contain\nEXPECTED_ERROR:\n%s", + gotErr.Error(), + expErr.Error()) + } + } + + assertMessagesCompareLua(t, s, tt.Expected["success"]) + assertMessagesCompareLua(t, f, tt.Expected["filtered"]) + assertMessagesCompareLua(t, e, tt.Expected["failed"]) + }) + } +} + +func TestLuaEngineMakeFunction_SetPK(t *testing.T) { + var testInterState interface{} = nil + testCases := []struct { + Scenario string + Src string + Sandbox bool + SpMode bool + Input *models.Message + Expected map[string]*models.Message + ExpInterState interface{} + Error error + }{ + { + Scenario: "onlySetPk_spModeTrue", + Src: ` +function main(x) + x["PartitionKey"] = "newPk" + return x +end +`, + Sandbox: true, + SpMode: true, + Input: &models.Message{ + Data: testLuaTsv, + PartitionKey: "oldPK", + }, + Expected: map[string]*models.Message{ + "success": { + Data: testLuaJSON, + PartitionKey: "newPk", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "newPk", + Data: testLuaMap, + }, + Error: nil, + }, + { + Scenario: "onlySetPk_spModeFalse", + Src: ` +function main(x) + x["PartitionKey"] = "newPk" + return x +end +`, + Sandbox: true, + SpMode: false, + Input: &models.Message{ + Data: testLuaTsv, + PartitionKey: "oldPK", + }, + Expected: map[string]*models.Message{ + "success": { + Data: testLuaTsv, + PartitionKey: "newPk", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &engineProtocol{ + FilterOut: false, + PartitionKey: "newPk", + Data: string(testLuaTsv), + }, + Error: nil, + }, + { + Scenario: "filterOutIgnores", + Src: ` +function main(x) + local ret = { + FilterOut = true, + Data = "shouldNotAppear", + PartitionKey = "notThis" + } + return ret +end +`, + Sandbox: true, + SpMode: true, + Input: &models.Message{ + Data: testLuaTsv, + PartitionKey: "oldPk", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": { + Data: testLuaTsv, + PartitionKey: "oldPk", + }, + "failed": nil, + }, + ExpInterState: nil, + Error: nil, + }, + } + + for _, tt := range testCases { + t.Run(tt.Scenario, func(t *testing.T) { + assert := assert.New(t) + + src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) + luaConfig := &LuaEngineConfig{ + SourceB64: src, + RunTimeout: 1, + Sandbox: tt.Sandbox, + SpMode: tt.SpMode, + } + + luaEngine, err := NewLuaEngine(luaConfig) + assert.NotNil(luaEngine) + if err != nil { + t.Fatalf("function NewLuaEngine failed with error: %q", err.Error()) + } + + if err := luaEngine.SmokeTest(`main`); err != nil { + t.Fatalf("smoke-test failed with error: %q", err.Error()) + } + + transFunction := luaEngine.MakeFunction(`main`) + s, f, e, i := transFunction(tt.Input, testInterState) + + if !reflect.DeepEqual(i, tt.ExpInterState) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(i), + spew.Sdump(tt.ExpInterState)) + } + + if e != nil { + gotErr := e.GetError() + expErr := tt.Error + if expErr == nil { + t.Fatalf("got unexpected error: %s", gotErr.Error()) + } + + if !strings.Contains(gotErr.Error(), expErr.Error()) { + t.Errorf("GOT_ERROR:\n%s\n does not contain\nEXPECTED_ERROR:\n%s", + gotErr.Error(), + expErr.Error()) + } + } + + assertMessagesCompareLua(t, s, tt.Expected["success"]) + assertMessagesCompareLua(t, f, tt.Expected["filtered"]) + assertMessagesCompareLua(t, e, tt.Expected["failed"]) + }) + } +} + +func TestLuaEngineSmokeTest(t *testing.T) { + testCases := []struct { + Src string + FunName string + Sandbox bool + CompileError error + SmokeError error + }{ + { + Src: ` +function main(x) + return x +end +`, + FunName: "main", + Sandbox: true, + CompileError: nil, + SmokeError: nil, + }, + { + Src: ` +function wrong_name(x) + return "something" +end +`, + FunName: "main", + Sandbox: true, + CompileError: nil, + SmokeError: fmt.Errorf("global Lua function not found: \"main\""), + }, + { + Src: ` +local json = require("json") +local clock = os.clock +`, + FunName: "notCalledMissingLibs", + Sandbox: true, + CompileError: nil, + SmokeError: fmt.Errorf("could not load lua code"), + }, + { + Src: ` +function main(x) + loca y = 0 +end +`, + FunName: "syntaxError", + Sandbox: false, + CompileError: fmt.Errorf("error"), + SmokeError: nil, + }, + } + + for _, tt := range testCases { + t.Run(tt.FunName, func(t *testing.T) { + assert := assert.New(t) + + src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) + luaConfig := &LuaEngineConfig{ + SourceB64: src, + RunTimeout: 1, + Sandbox: tt.Sandbox, + } + + luaEngine, compileErr := NewLuaEngine(luaConfig) + + if compileErr != nil { + if tt.CompileError == nil { + t.Fatalf("got unexpected error while creating NewLuaEngine: %s", compileErr.Error()) + } + + if !strings.Contains(compileErr.Error(), tt.CompileError.Error()) { + t.Errorf("NewLuaEngine error mismatch\nGOT_ERROR:\n%q\n does not contain\nEXPECTED_ERROR:\n%q", + compileErr.Error(), + tt.CompileError.Error()) + } + } else { + assert.NotNil(luaEngine) + + smoke := luaEngine.SmokeTest(tt.FunName) + expErr := tt.SmokeError + if smoke != nil { + if expErr == nil { + t.Fatalf("got unexpected smoke-test error: %q", smoke.Error()) + } + + if !strings.Contains(smoke.Error(), expErr.Error()) { + t.Errorf("smoke error mismatch\nGOT_ERROR:\n%q\ndoes not contain\nEXPECTED_ERROR:\n%q", + smoke.Error(), + expErr.Error()) + } + } else { + assert.Nil(tt.SmokeError) + } + } + }) + } +} + +func TestLuaEngineWithBuiltins(t *testing.T) { + var expectedGood = []*models.Message{ + { + Data: snowplowJSON1, + PartitionKey: "test-data1", + }, + { + Data: snowplowJSON2, + PartitionKey: "test-data2", + }, + { + Data: snowplowJSON3, + PartitionKey: "test-data3", + }, + } + + srcCode := ` +function main(x) + return x +end +` + funcname := "main" + src := base64.StdEncoding.EncodeToString([]byte(srcCode)) + luaConfig := &LuaEngineConfig{ + SourceB64: src, + RunTimeout: 1, + Sandbox: true, + } + + luaEngine, err := NewLuaEngine(luaConfig) + if err != nil { + t.Fatalf("NewLuaEngine failed with error: %q", err) + } + + if err := luaEngine.SmokeTest(funcname); err != nil { + t.Fatalf("smoke-test failed with error: %q", err.Error()) + } + + luaFunc := luaEngine.MakeFunction(funcname) + setPkToAppID := transform.NewSpEnrichedSetPkFunction("app_id") + spEnrichedToJSON := transform.SpEnrichedToJSON + + testCases := []struct { + Name string + Transformation transform.TransformationApplyFunction + }{ + { + Transformation: transform.NewTransformation( + setPkToAppID, + spEnrichedToJSON, + luaFunc, + ), + }, + } + + for _, tt := range testCases { + t.Run(tt.Name, func(t *testing.T) { + assert := assert.New(t) + transformMultiple := tt.Transformation + + result := transformMultiple(messages) + assert.NotNil(result) + for i, res := range result.Result { + exp := expectedGood[i] + assert.JSONEq(string(res.Data), string(exp.Data)) + assert.Equal(res.PartitionKey, exp.PartitionKey) + + } + }) + } + +} + +func TestLuaEngineWithBuiltinsSpModeFalse(t *testing.T) { + srcCode := ` +function main(x) + return x +end + +function setPk(x) + x["PartitionKey"] = "testKey" + return x +end +` + // Lua + src := base64.StdEncoding.EncodeToString([]byte(srcCode)) + luaConfig := &LuaEngineConfig{ + SourceB64: src, + RunTimeout: 1, + Sandbox: true, + SpMode: false, + } + + luaEngine, err := NewLuaEngine(luaConfig) + if err != nil { + t.Fatalf("NewLuaEngine failed with error: %q", err) + } + + if err := luaEngine.SmokeTest("main"); err != nil { + t.Fatalf("smoke-test failed with error: %q", err.Error()) + } + + luaFuncID := luaEngine.MakeFunction("main") + luaFuncPk := luaEngine.MakeFunction("setPk") + + // Builtins + setPkToAppID := transform.NewSpEnrichedSetPkFunction("app_id") + spEnrichedToJSON := transform.SpEnrichedToJSON + + testCases := []struct { + Name string + Transformation transform.TransformationApplyFunction + Input []*models.Message + ExpectedGood []*models.Message + }{ + { + Input: messages, + Transformation: transform.NewTransformation( + luaFuncID, + setPkToAppID, + spEnrichedToJSON, + ), + ExpectedGood: []*models.Message{ + { + Data: snowplowJSON1, + PartitionKey: "test-data1", + }, + { + Data: snowplowJSON2, + PartitionKey: "test-data2", + }, + { + Data: snowplowJSON3, + PartitionKey: "test-data3", + }, + }, + }, + { + Input: messages, + Transformation: transform.NewTransformation( + setPkToAppID, + spEnrichedToJSON, + luaFuncID, + ), + ExpectedGood: []*models.Message{ + { + Data: snowplowJSON1, + PartitionKey: "test-data1", + }, + { + Data: snowplowJSON2, + PartitionKey: "test-data2", + }, + { + Data: snowplowJSON3, + PartitionKey: "test-data3", + }, + }, + }, + { + Input: messages, + Transformation: transform.NewTransformation( + setPkToAppID, + luaFuncPk, + spEnrichedToJSON, + ), + ExpectedGood: []*models.Message{ + { + Data: snowplowJSON1, + PartitionKey: "testKey", + }, + { + Data: snowplowJSON2, + PartitionKey: "testKey", + }, + { + Data: snowplowJSON3, + PartitionKey: "testKey", + }, + }, + }, + } + + for _, tt := range testCases { + t.Run(tt.Name, func(t *testing.T) { + assert := assert.New(t) + + result := tt.Transformation(tt.Input) + assert.NotNil(result) + assert.Equal(len(tt.ExpectedGood), len(result.Result)) + for i, res := range result.Result { + if i < len(tt.ExpectedGood) { + exp := tt.ExpectedGood[i] + assert.JSONEq(string(res.Data), string(exp.Data)) + assert.Equal(res.PartitionKey, exp.PartitionKey) + } + } + }) + } +} + +func TestLuaEngineWithBuiltinsSpModeTrue(t *testing.T) { + srcCode := ` +function main(x) + return x +end + +function setPk(x) + x["PartitionKey"] = "testKey" + return x +end +` + // Lua + src := base64.StdEncoding.EncodeToString([]byte(srcCode)) + luaConfig := &LuaEngineConfig{ + SourceB64: src, + RunTimeout: 1, + Sandbox: true, + SpMode: true, + } + + luaEngine, err := NewLuaEngine(luaConfig) + if err != nil { + t.Fatalf("NewLuaEngine failed with error: %q", err) + } + + if err := luaEngine.SmokeTest("main"); err != nil { + t.Fatalf("smoke-test failed with error: %q", err.Error()) + } + if err := luaEngine.SmokeTest("setPk"); err != nil { + t.Fatalf("smoke-test failed with error: %q", err.Error()) + } + + luaFuncID := luaEngine.MakeFunction("main") + luaFuncPk := luaEngine.MakeFunction("setPk") + + // Builtins + setPkToAppID := transform.NewSpEnrichedSetPkFunction("app_id") + spEnrichedToJSON := transform.SpEnrichedToJSON + + testCases := []struct { + Name string + Transformation transform.TransformationApplyFunction + Input []*models.Message + ExpectedGood []*models.Message + }{ + { + Input: []*models.Message{ + { + Data: testLuaTsv, + PartitionKey: "prevKey", + }, + }, + Transformation: transform.NewTransformation( + setPkToAppID, + spEnrichedToJSON, + luaFuncID, + ), + ExpectedGood: []*models.Message{ + { + Data: testLuaJSON, + PartitionKey: "test-data<>", + }, + }, + }, + { + Input: []*models.Message{ + { + Data: testLuaTsv, + PartitionKey: "prevKey", + }, + }, + Transformation: transform.NewTransformation( + setPkToAppID, + luaFuncPk, + ), + ExpectedGood: []*models.Message{ + { + Data: testLuaJSON, + PartitionKey: "testKey", + }, + }, + }, + { + Input: []*models.Message{ + { + Data: testLuaTsv, + PartitionKey: "prevKey", + }, + }, + Transformation: transform.NewTransformation( + setPkToAppID, + luaFuncID, + luaFuncPk, + luaFuncID, + ), + ExpectedGood: []*models.Message{ + { + Data: testLuaJSON, + PartitionKey: "testKey", + }, + }, + }, + } + + for _, tt := range testCases { + t.Run(tt.Name, func(t *testing.T) { + assert := assert.New(t) + + result := tt.Transformation(tt.Input) + assert.NotNil(result) + assert.Equal(len(tt.ExpectedGood), len(result.Result)) + for i, res := range result.Result { + if i < len(tt.ExpectedGood) { + exp := tt.ExpectedGood[i] + assert.JSONEq(string(res.Data), string(exp.Data)) + assert.Equal(res.PartitionKey, exp.PartitionKey) + } + } + }) + } +} + +func Benchmark_LuaEngine_Passthrough_Sandboxed(b *testing.B) { + b.ReportAllocs() + + srcCode := ` +function main(x) + return x +end +` + src := base64.StdEncoding.EncodeToString([]byte(srcCode)) + + inputMsg := &models.Message{ + Data: snowplowJSON1, + PartitionKey: "some-test-key", + } + luaConfig := &LuaEngineConfig{ + SourceB64: src, + RunTimeout: 5, + Sandbox: true, + } + + luaEngine, err := NewLuaEngine(luaConfig) + if err != nil { + b.Fatalf("function NewLuaEngine failed with error: %q", err.Error()) + } + + transFunction := luaEngine.MakeFunction("main") + + for n := 0; n < b.N; n++ { + transFunction(inputMsg, nil) + } +} + +func Benchmark_LuaEngine_Passthrough(b *testing.B) { + b.ReportAllocs() + + srcCode := ` +function main(x) + return x +end +` + src := base64.StdEncoding.EncodeToString([]byte(srcCode)) + + inputMsg := &models.Message{ + Data: snowplowJSON1, + PartitionKey: "some-test-key", + } + luaConfig := &LuaEngineConfig{ + SourceB64: src, + RunTimeout: 5, + Sandbox: false, + } + + luaEngine, err := NewLuaEngine(luaConfig) + if err != nil { + b.Fatalf("function NewLuaEngine failed with error: %q", err.Error()) + } + + transFunction := luaEngine.MakeFunction("main") + + for n := 0; n < b.N; n++ { + transFunction(inputMsg, nil) + } +} + +func Benchmark_LuaEngine_Passthrough_Json(b *testing.B) { + b.ReportAllocs() + + srcCode := ` +function main(x) + local jsonObj, _ = json.decode(x) + local result, _ = json.encode(jsonObj) + + return result +end +` + src := base64.StdEncoding.EncodeToString([]byte(srcCode)) + + inputMsg := &models.Message{ + Data: snowplowJSON1, + PartitionKey: "some-test-key", + } + luaConfig := &LuaEngineConfig{ + SourceB64: src, + RunTimeout: 5, + Sandbox: false, + } + + luaEngine, err := NewLuaEngine(luaConfig) + if err != nil { + b.Fatalf("function NewLuaEngine failed with error: %q", err.Error()) + } + + transFunction := luaEngine.MakeFunction("jsonIdentity") + + for n := 0; n < b.N; n++ { + transFunction(inputMsg, nil) + } +} + +// Helper function to compare messages and avoid using reflect.DeepEqual +// on errors. Compares all but the error field of messages. +func assertMessagesCompareLua(t *testing.T, act, exp *models.Message) { + t.Helper() + + ok := false + switch { + case act == nil: + ok = exp == nil + case exp == nil: + default: + pkOk := act.PartitionKey == exp.PartitionKey + dataOk := reflect.DeepEqual(act.Data, exp.Data) + cTimeOk := reflect.DeepEqual(act.TimeCreated, exp.TimeCreated) + pTimeOk := reflect.DeepEqual(act.TimePulled, exp.TimePulled) + tTimeOk := reflect.DeepEqual(act.TimeTransformed, exp.TimeTransformed) + ackOk := reflect.DeepEqual(act.AckFunc, exp.AckFunc) + + if pkOk && dataOk && cTimeOk && pTimeOk && tTimeOk && ackOk { + ok = true + } + } + + if !ok { + t.Errorf("\nGOT:\n%s\nEXPECTED:\n%s\n", + spew.Sdump(act), + spew.Sdump(exp)) + } +} + +// helper variables +var testLuaTimes = map[string]string{ + "dvceCreatedTstamp": "2019-05-10T14:40:35.551Z", + "etlTstamp": "2019-05-10T14:40:37.436Z", + "derivedTstamp": "2019-05-10T14:40:35.972Z", + "collectorTstamp": "2019-05-10T14:40:35.972Z", + "dvceSentTstamp": "2019-05-10T14:40:35Z", +} + +var testLuaMap = map[string]interface{}{ + "event_version": "1-0-0", + "app_id": "test-data<>", + "dvce_created_tstamp": testLuaTimes["dvceCreatedTstamp"], + "event": "unstruct", + "v_collector": "ssc-0.15.0-googlepubsub", + "network_userid": "d26822f5-52cc-4292-8f77-14ef6b7a27e2", + "event_name": "add_to_cart", + "event_vendor": "com.snowplowanalytics.snowplow", + "event_format": "jsonschema", + "platform": "pc", + "etl_tstamp": testLuaTimes["etlTstamp"], + "collector_tstamp": testLuaTimes["collectorTstamp"], + "user_id": "user", + "dvce_sent_tstamp": testLuaTimes["dvceSentTstamp"], + "derived_tstamp": testLuaTimes["derivedTstamp"], + "event_id": "e9234345-f042-46ad-b1aa-424464066a33", + "v_tracker": "py-0.8.2", + "v_etl": "beam-enrich-0.2.0-common-0.36.0", + "user_ipaddress": "1.2.3.4", + "unstruct_event_com_snowplowanalytics_snowplow_add_to_cart_1": map[string]interface{}{ + "quantity": float64(2), + "unitPrice": 32.4, + "currency": "GBP", + "sku": "item41", + }, + "contexts_nl_basjes_yauaa_context_1": []interface{}{ + map[string]interface{}{ + "deviceName": "Unknown", + "layoutEngineVersionMajor": "??", + "operatingSystemName": "Unknown", + "deviceClass": "Unknown", + "agentVersion": "2.21.0", + "layoutEngineName": "Unknown", + "layoutEngineClass": "Unknown", + "agentName": "python-requests", + "agentNameVersion": "python-requests 2.21.0", + "operatingSystemVersion": "??", + "agentClass": "Special", + "deviceBrand": "Unknown", + "agentVersionMajor": "2", + "agentNameVersionMajor": "python-requests 2", + "operatingSystemClass": "Unknown", + "layoutEngineVersion": "??", + }, + }, + "useragent": "python-requests/2.21.0", +} + +var testLuaTsv = []byte(`test-data<> pc 2019-05-10 14:40:37.436 2019-05-10 14:40:35.972 2019-05-10 14:40:35.551 unstruct e9234345-f042-46ad-b1aa-424464066a33 py-0.8.2 ssc-0.15.0-googlepubsub beam-enrich-0.2.0-common-0.36.0 user 1.2.3.4 d26822f5-52cc-4292-8f77-14ef6b7a27e2 {"schema":"iglu:com.snowplowanalytics.snowplow/unstruct_event/jsonschema/1-0-0","data":{"schema":"iglu:com.snowplowanalytics.snowplow/add_to_cart/jsonschema/1-0-0","data":{"sku":"item41","quantity":2,"unitPrice":32.4,"currency":"GBP"}}} python-requests/2.21.0 2019-05-10 14:40:35.000 {"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-1","data":[{"schema":"iglu:nl.basjes/yauaa_context/jsonschema/1-0-0","data":{"deviceBrand":"Unknown","deviceName":"Unknown","operatingSystemName":"Unknown","agentVersionMajor":"2","layoutEngineVersionMajor":"??","deviceClass":"Unknown","agentNameVersionMajor":"python-requests 2","operatingSystemClass":"Unknown","layoutEngineName":"Unknown","agentName":"python-requests","agentVersion":"2.21.0","layoutEngineClass":"Unknown","agentNameVersion":"python-requests 2.21.0","operatingSystemVersion":"??","agentClass":"Special","layoutEngineVersion":"??"}}]} 2019-05-10 14:40:35.972 com.snowplowanalytics.snowplow add_to_cart jsonschema 1-0-0 `) + +// corresponding JSON to previous TSV +var testLuaJSON = []byte(`{"app_id":"test-data<>","collector_tstamp":"2019-05-10T14:40:35.972Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??"}],"derived_tstamp":"2019-05-10T14:40:35.972Z","dvce_created_tstamp":"2019-05-10T14:40:35.551Z","dvce_sent_tstamp":"2019-05-10T14:40:35Z","etl_tstamp":"2019-05-10T14:40:37.436Z","event":"unstruct","event_format":"jsonschema","event_id":"e9234345-f042-46ad-b1aa-424464066a33","event_name":"add_to_cart","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"d26822f5-52cc-4292-8f77-14ef6b7a27e2","platform":"pc","unstruct_event_com_snowplowanalytics_snowplow_add_to_cart_1":{"currency":"GBP","quantity":2,"sku":"item41","unitPrice":32.4},"user_id":"user","user_ipaddress":"1.2.3.4","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) + +// json encoded inside Lua +var snowplowJSON1ChangedLua = []byte(`{"app_id_CHANGED":"test-data1","collector_tstamp":"2019-05-10T14:40:35.972Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??"}],"derived_tstamp":"2019-05-10T14:40:35.972Z","dvce_created_tstamp":"2019-05-10T14:40:35.551Z","dvce_sent_tstamp":"2019-05-10T14:40:35Z","etl_tstamp":"2019-05-10T14:40:37.436Z","event":"unstruct","event_format":"jsonschema","event_id":"e9234345-f042-46ad-b1aa-424464066a33","event_name":"add_to_cart","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"d26822f5-52cc-4292-8f77-14ef6b7a27e2","platform":"pc","unstruct_event_com_snowplowanalytics_snowplow_add_to_cart_1":{"currency":"GBP","quantity":2,"sku":"item41","unitPrice":32.4},"user_id":"user\u003cbuilt-in function input\u003e","user_ipaddress":"18.194.133.57","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) diff --git a/pkg/transform/engine/engine_test_variables.go b/pkg/transform/engine/engine_test_variables.go new file mode 100644 index 00000000..147623e9 --- /dev/null +++ b/pkg/transform/engine/engine_test_variables.go @@ -0,0 +1,33 @@ +package engine + +import ( + "github.com/snowplow-devops/stream-replicator/pkg/models" +) + +var snowplowTsv1 = []byte(`test-data1 pc 2019-05-10 14:40:37.436 2019-05-10 14:40:35.972 2019-05-10 14:40:35.551 unstruct e9234345-f042-46ad-b1aa-424464066a33 py-0.8.2 ssc-0.15.0-googlepubsub beam-enrich-0.2.0-common-0.36.0 user 18.194.133.57 d26822f5-52cc-4292-8f77-14ef6b7a27e2 {"schema":"iglu:com.snowplowanalytics.snowplow/unstruct_event/jsonschema/1-0-0","data":{"schema":"iglu:com.snowplowanalytics.snowplow/add_to_cart/jsonschema/1-0-0","data":{"sku":"item41","quantity":2,"unitPrice":32.4,"currency":"GBP"}}} python-requests/2.21.0 2019-05-10 14:40:35.000 {"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-1","data":[{"schema":"iglu:nl.basjes/yauaa_context/jsonschema/1-0-0","data":{"deviceBrand":"Unknown","deviceName":"Unknown","operatingSystemName":"Unknown","agentVersionMajor":"2","layoutEngineVersionMajor":"??","deviceClass":"Unknown","agentNameVersionMajor":"python-requests 2","operatingSystemClass":"Unknown","layoutEngineName":"Unknown","agentName":"python-requests","agentVersion":"2.21.0","layoutEngineClass":"Unknown","agentNameVersion":"python-requests 2.21.0","operatingSystemVersion":"??","agentClass":"Special","layoutEngineVersion":"??"}}]} 2019-05-10 14:40:35.972 com.snowplowanalytics.snowplow add_to_cart jsonschema 1-0-0 `) +var snowplowJSON1 = []byte(`{"app_id":"test-data1","collector_tstamp":"2019-05-10T14:40:35.972Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??"}],"derived_tstamp":"2019-05-10T14:40:35.972Z","dvce_created_tstamp":"2019-05-10T14:40:35.551Z","dvce_sent_tstamp":"2019-05-10T14:40:35Z","etl_tstamp":"2019-05-10T14:40:37.436Z","event":"unstruct","event_format":"jsonschema","event_id":"e9234345-f042-46ad-b1aa-424464066a33","event_name":"add_to_cart","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"d26822f5-52cc-4292-8f77-14ef6b7a27e2","platform":"pc","unstruct_event_com_snowplowanalytics_snowplow_add_to_cart_1":{"currency":"GBP","quantity":2,"sku":"item41","unitPrice":32.4},"user_id":"user\u003cbuilt-in function input\u003e","user_ipaddress":"18.194.133.57","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) +var snowplowTsv2 = []byte(`test-data2 pc 2019-05-10 14:40:32.392 2019-05-10 14:40:31.105 2019-05-10 14:40:30.218 transaction_item 5071169f-3050-473f-b03f-9748319b1ef2 py-0.8.2 ssc-0.15.0-googlepubsub beam-enrich-0.2.0-common-0.36.0 user 18.194.133.57 68220ade-307b-4898-8e25-c4c8ac92f1d7 transaction item58 35.87 1 python-requests/2.21.0 2019-05-10 14:40:30.000 {"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-1","data":[{"schema":"iglu:nl.basjes/yauaa_context/jsonschema/1-0-0","data":{"deviceBrand":"Unknown","deviceName":"Unknown","operatingSystemName":"Unknown","agentVersionMajor":"2","layoutEngineVersionMajor":"??","deviceClass":"Unknown","agentNameVersionMajor":"python-requests 2","operatingSystemClass":"Unknown","layoutEngineName":"Unknown","agentName":"python-requests","agentVersion":"2.21.0","layoutEngineClass":"Unknown","agentNameVersion":"python-requests 2.21.0","operatingSystemVersion":"??","agentClass":"Special","layoutEngineVersion":"??"}}]} 2019-05-10 14:40:31.105 com.snowplowanalytics.snowplow transaction_item jsonschema 1-0-0 `) +var snowplowJSON2 = []byte(`{"app_id":"test-data2","collector_tstamp":"2019-05-10T14:40:31.105Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??"}],"derived_tstamp":"2019-05-10T14:40:31.105Z","dvce_created_tstamp":"2019-05-10T14:40:30.218Z","dvce_sent_tstamp":"2019-05-10T14:40:30Z","etl_tstamp":"2019-05-10T14:40:32.392Z","event":"transaction_item","event_format":"jsonschema","event_id":"5071169f-3050-473f-b03f-9748319b1ef2","event_name":"transaction_item","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"68220ade-307b-4898-8e25-c4c8ac92f1d7","platform":"pc","ti_orderid":"transaction\u003cbuilt-in function input\u003e","ti_price":35.87,"ti_quantity":1,"ti_sku":"item58","user_id":"user\u003cbuilt-in function input\u003e","user_ipaddress":"18.194.133.57","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) +var snowplowTsv3 = []byte(`test-data3 pc 2019-05-10 14:40:30.836 2019-05-10 14:40:29.576 2019-05-10 14:40:29.204 page_view e8aef68d-8533-45c6-a672-26a0f01be9bd py-0.8.2 ssc-0.15.0-googlepubsub beam-enrich-0.2.0-common-0.36.0 user 18.194.133.57 b66c4a12-8584-4c7a-9a5d-7c96f59e2556 www.demo-site.com/campaign-landing-page landing-page 80 www.demo-site.com/campaign-landing-page python-requests/2.21.0 2019-05-10 14:40:29.000 {"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-1","data":[{"schema":"iglu:nl.basjes/yauaa_context/jsonschema/1-0-0","data":{"deviceBrand":"Unknown","deviceName":"Unknown","operatingSystemName":"Unknown","agentVersionMajor":"2","layoutEngineVersionMajor":"??","deviceClass":"Unknown","agentNameVersionMajor":"python-requests 2","operatingSystemClass":"Unknown","layoutEngineName":"Unknown","agentName":"python-requests","agentVersion":"2.21.0","layoutEngineClass":"Unknown","agentNameVersion":"python-requests 2.21.0","operatingSystemVersion":"??","agentClass":"Special","layoutEngineVersion":"??","test1":{"test2":[{"test3":"testValue"}]}}}]} 2019-05-10 14:40:29.576 com.snowplowanalytics.snowplow page_view jsonschema 1-0-0 `) +var snowplowJSON3 = []byte(`{"app_id":"test-data3","collector_tstamp":"2019-05-10T14:40:29.576Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??","test1":{"test2":[{"test3":"testValue"}]}}],"derived_tstamp":"2019-05-10T14:40:29.576Z","dvce_created_tstamp":"2019-05-10T14:40:29.204Z","dvce_sent_tstamp":"2019-05-10T14:40:29Z","etl_tstamp":"2019-05-10T14:40:30.836Z","event":"page_view","event_format":"jsonschema","event_id":"e8aef68d-8533-45c6-a672-26a0f01be9bd","event_name":"page_view","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"b66c4a12-8584-4c7a-9a5d-7c96f59e2556","page_title":"landing-page","page_url":"www.demo-site.com/campaign-landing-page","page_urlpath":"www.demo-site.com/campaign-landing-page","page_urlport":80,"platform":"pc","user_id":"user\u003cbuilt-in function input\u003e","user_ipaddress":"18.194.133.57","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) + +var nonSnowplowString = []byte(`not a snowplow event`) + +var messages = []*models.Message{ + { + Data: snowplowTsv1, + PartitionKey: "some-key", + }, + { + Data: snowplowTsv2, + PartitionKey: "some-key1", + }, + { + Data: snowplowTsv3, + PartitionKey: "some-key2", + }, + { + Data: nonSnowplowString, + PartitionKey: "some-key4", + }, +} diff --git a/pkg/transform/setup_test.go b/pkg/transform/setup_test.go new file mode 100644 index 00000000..3f2028b5 --- /dev/null +++ b/pkg/transform/setup_test.go @@ -0,0 +1,18 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package transform + +import ( + "os" + "testing" +) + +func TestMain(m *testing.M) { + os.Clearenv() + exitVal := m.Run() + os.Exit(exitVal) +} diff --git a/pkg/transform/snowplow_enriched_filter.go b/pkg/transform/snowplow_enriched_filter.go index 8a807bb6..74b4393d 100644 --- a/pkg/transform/snowplow_enriched_filter.go +++ b/pkg/transform/snowplow_enriched_filter.go @@ -7,84 +7,232 @@ package transform import ( - "errors" "fmt" "regexp" + "strconv" "strings" + "time" + + "github.com/dlclark/regexp2" + + "github.com/pkg/errors" + "github.com/snowplow/snowplow-golang-analytics-sdk/analytics" "github.com/snowplow-devops/stream-replicator/pkg/models" ) -// NewSpEnrichedFilterFunction returns a TransformationFunction which filters messages based on a field in the Snowplow enriched event. -// The filterconfig should describe the conditions for including a message. -// For example "aid=abc|def" includes all events with app IDs of abc or def, and filters out the rest. -// aid!=abc|def includes all events whose app IDs do not match abc or def, and filters out the rest. -func NewSpEnrichedFilterFunction(filterConfig string) (TransformationFunction, error) { - - // This regex prevents whitespace characters in the value provided - regex := `\S+(!=|==)[^\s\|]+((?:\|[^\s|]+)*)$` - re := regexp.MustCompile(regex) - - if !(re.MatchString(filterConfig)) { - // If invalid, return an error which will be returned by the main function - return nil, errors.New("Invalid filter function config, must be of the format {field name}=={value}[|{value}|...] or {field name}!={value}[|{value}|...]") +func evaluateSpEnrichedFilter(re *regexp2.Regexp, valuesFound []interface{}) bool { + // if valuesFound is nil, we found no value. + // Because negative matches are a thing, we still want to match against an empty string + if valuesFound == nil { + valuesFound = make([]interface{}, 1) + } + for _, v := range valuesFound { + if v == nil { + v = "" // because nil gets cast to `` + } + + if ok, _ := re.MatchString(fmt.Sprintf("%v", v)); ok { + return true + } + } + return false +} + +func createSpEnrichedFilterFunction(regex string, regexTimeout int, getFunc valueGetter) (TransformationFunction, error) { + if regexTimeout == 0 { + // default timeout for regex is 10 seconds + regexTimeout = 10 } - // Check for a negation condition first - keyValues := strings.SplitN(filterConfig, "!=", 2) - - // isNegationFilter determines whether a match sets shouldKeepMessage to true or false, and consequently whether message is kept or filtered - var isNegationFilter bool - if len(keyValues) > 1 { - // If negation condition is found, default to keep the message, and change this when match found - isNegationFilter = true - } else { - // Otherwise, look for affirmation condition, default to drop the message and change when match found - keyValues = strings.SplitN(filterConfig, "==", 2) - isNegationFilter = false + // regexToMatch is what we use to evaluate the actual filter, once we have the value. + regexToMatch, err := regexp2.Compile(regex, 0) + regexToMatch.MatchTimeout = time.Duration(regexTimeout) * time.Second + if err != nil { + return nil, errors.Wrap(err, `error compiling regex for filter`) } return func(message *models.Message, intermediateState interface{}) (*models.Message, *models.Message, *models.Message, interface{}) { - // Start by resetting shouldKeepMessage to isNegationFilter - shouldKeepMessage := isNegationFilter - // Evalute intermediateState to parsedEvent - parsedMessage, parseErr := intermediateAsSpEnrichedParsed(intermediateState, message) + // Evaluate intermediateState to parsedEvent + parsedMessage, parseErr := IntermediateAsSpEnrichedParsed(intermediateState, message) if parseErr != nil { message.SetError(parseErr) return nil, nil, message, nil } - valueFound, err := parsedMessage.GetValue(keyValues[0]) - - // GetValue returns an error if the field requested is empty. Check for that particular error before failing the message. - if err != nil && err.Error() == fmt.Sprintf("Field %s is empty", keyValues[0]) { - valueFound = nil - } else if err != nil { + // get the value + valueFound, err := getFunc(parsedMessage) + if err != nil { message.SetError(err) return nil, nil, message, nil } - evaluation: - for _, valueToMatch := range strings.Split(keyValues[1], "|") { - if valueToMatch == fmt.Sprintf("%v", valueFound) { // coerce to string as valueFound may be any type found in a Snowplow event - if isNegationFilter { - shouldKeepMessage = false - } else { - shouldKeepMessage = true - } - break evaluation - // Once config value is matched once, change shouldKeepMessage, and stop looking for matches - } - } + // evaluate whether the found value passes the filter, determining if the message should be kept + shouldKeepMessage := evaluateSpEnrichedFilter(regexToMatch, valueFound) - // If message is not to be kept, return it as a filtered message to be acked in the main function + // if message is not to be kept, return it as a filtered message to be acked in the main function if !shouldKeepMessage { - return nil, message, nil, nil } - // Otherwise, return the message and intermediateState for further processing. + // otherwise, return the message and intermediateState for further processing. return message, nil, nil, parsedMessage }, nil } + +// valueGetter is a function that can hold the logic for getting values in the case of base, context, and unstruct fields, +// which respecively require different logic. +type valueGetter func(analytics.ParsedEvent) ([]interface{}, error) + +// Because each type of value requires different arguments, we use these `make` functions to construct them. +// This allows us to unit test each one, plug them into the createSpEnrichedFilterFunction constructor, +// and to construct them so that field names/paths and regexes are handled only once, at startup. + +// makeBaseValueGetter returns a valueGetter for base-level values. +func makeBaseValueGetter(field string) valueGetter { + return func(parsedMessage analytics.ParsedEvent) (value []interface{}, err error) { + // find the value in the event + valueFound, err := parsedMessage.GetValue(field) + // We don't return an error for empty field since this just means the value is nil. + if err != nil && err.Error() != analytics.EmptyFieldErr { + return nil, err + } + return []interface{}{valueFound}, nil + } +} + +// NewSpEnrichedFilterFunction returns a TransformationFunction which filters messages based on a field in the Snowplow enriched event. +func NewSpEnrichedFilterFunction(field, regex string, regexTimeout int) (TransformationFunction, error) { + + // getBaseValueForMatch is responsible for retrieving data from the message for base fields + getBaseValueForMatch := makeBaseValueGetter(field) + + return createSpEnrichedFilterFunction(regex, regexTimeout, getBaseValueForMatch) +} + +// makeContextValueGetter creates a valueGetter for context data +func makeContextValueGetter(name string, path []interface{}) valueGetter { + return func(parsedMessage analytics.ParsedEvent) ([]interface{}, error) { + value, err := parsedMessage.GetContextValue(name, path...) + // We don't return an error for empty field since this just means the value is nil. + if err != nil && err.Error() != analytics.EmptyFieldErr { + return nil, err + } + // bug in analytics sdk requires the type casting below. https://github.com/snowplow/snowplow-golang-analytics-sdk/issues/36 + // GetContextValue should always return []interface{} but instead it returns an interface{} which always contains type []interface{} + + // if it's nil, return nil - we just didn't find any value. + if value == nil { + return nil, nil + } + // otherwise, type assertion. + valueFound, ok := value.([]interface{}) + if !ok { + return nil, errors.New(fmt.Sprintf("Context filter encountered unexpected type in getting value for path %v", path)) + } + + return valueFound, nil + } +} + +// NewSpEnrichedFilterFunctionContext returns a TransformationFunction for filtering a context +func NewSpEnrichedFilterFunctionContext(contextFullName, pathToField, regex string, regexTimeout int) (TransformationFunction, error) { + + path, err := parsePathToArguments(pathToField) + if err != nil { + return nil, errors.Wrap(err, "error creating Context filter function") + } + + // getContextValuesForMatch is responsible for retrieving data from the message for context fields + getContextValuesForMatch := makeContextValueGetter(contextFullName, path) + + return createSpEnrichedFilterFunction(regex, regexTimeout, getContextValuesForMatch) +} + +// makeUnstructValueGetter creates a valueGetter for unstruct data. +func makeUnstructValueGetter(eventName string, versionRegex *regexp.Regexp, path []interface{}) valueGetter { + return func(parsedMessage analytics.ParsedEvent) (value []interface{}, err error) { + eventNameFound, err := parsedMessage.GetValue(`event_name`) + if err != nil { // This field can't be empty for a valid event, so we return all errors here + return nil, err + } + if eventNameFound != eventName { // If we don't have an exact match on event name, we return nil value + return nil, nil + } + versionFound, err := parsedMessage.GetValue(`event_version`) + if err != nil { // This field can't be empty for a valid event, so we return all errors here + return nil, err + } + if !versionRegex.MatchString(versionFound.(string)) { // If we don't match the provided version regex, return nil value + return nil, nil + } + + valueFound, err := parsedMessage.GetUnstructEventValue(path...) + // We don't return an error for empty field since this just means the value is nil. + if err != nil && err.Error() != analytics.EmptyFieldErr && !strings.Contains(err.Error(), "not found") { + // This last clause exists because of this: https://github.com/snowplow/snowplow-golang-analytics-sdk/issues/37 + // TODO: Fix that and remove it as soon as possible. + return nil, err + } + + if valueFound == nil { + return nil, nil + } + + return []interface{}{valueFound}, nil + } +} + +// NewSpEnrichedFilterFunctionUnstructEvent returns a TransformationFunction for filtering an unstruct_event +func NewSpEnrichedFilterFunctionUnstructEvent(eventNameToMatch, eventVersionToMatch, pathToField, regex string, regexTimeout int) (TransformationFunction, error) { + + path, err := parsePathToArguments(pathToField) + if err != nil { + return nil, errors.Wrap(err, "error creating Unstruct filter function") + } + + versionRegex, err := regexp.Compile(eventVersionToMatch) + if err != nil { + return nil, errors.Wrap(err, fmt.Sprint("Failed to compile regex: ", eventVersionToMatch)) + } + + // getUnstructValuesForMatch is responsible for retrieving data from the message for context fields. + // It also checks that the correct event name and version are provided, and returns nil if not. + getUnstructValuesForMatch := makeUnstructValueGetter(eventNameToMatch, versionRegex, path) + + return createSpEnrichedFilterFunction(regex, regexTimeout, getUnstructValuesForMatch) +} + +// parsePathToArguments parses a string path to custom data (eg. `test1.test2[0].test3`) +// into the slice of interfaces expected by the analytics SDK's Get() methods. +func parsePathToArguments(pathToField string) ([]interface{}, error) { + // validate that an edge case (unmatched opening brace) isn't present + if strings.Count(pathToField, "[") != strings.Count(pathToField, "]") { + return nil, errors.New(fmt.Sprint("unmatched brace in path: ", pathToField)) + } + + // regex to separate path into components + re := regexp.MustCompile(`\[\d+\]|[^\.\[]+`) + parts := re.FindAllString(pathToField, -1) + + // regex to identify arrays + arrayRegex := regexp.MustCompile(`\[\d+\]`) + + convertedPath := make([]interface{}, 0) + for _, part := range parts { + + if arrayRegex.MatchString(part) { // handle arrays first + intPart, err := strconv.Atoi(part[1 : len(part)-1]) // strip braces and convert to int + if err != nil { + return nil, errors.New(fmt.Sprint("error parsing path element: ", part)) + } + + convertedPath = append(convertedPath, intPart) + } else { // handle strings + convertedPath = append(convertedPath, part) + } + + } + return convertedPath, nil +} diff --git a/pkg/transform/snowplow_enriched_filter_test.go b/pkg/transform/snowplow_enriched_filter_test.go index 32da7ac2..acc11583 100644 --- a/pkg/transform/snowplow_enriched_filter_test.go +++ b/pkg/transform/snowplow_enriched_filter_test.go @@ -7,31 +7,43 @@ package transform import ( + "regexp" "testing" - "github.com/snowplow-devops/stream-replicator/pkg/models" + "github.com/dlclark/regexp2" "github.com/stretchr/testify/assert" + + "github.com/snowplow-devops/stream-replicator/pkg/models" ) +var messageGood = models.Message{ + Data: snowplowTsv3, + PartitionKey: "some-key", +} + +var messageGoodInt = models.Message{ + Data: snowplowTsv4, + PartitionKey: "some-key", +} + +var messageWithUnstructEvent = models.Message{ + Data: snowplowTsv1, + PartitionKey: "some-key", +} + func TestNewSpEnrichedFilterFunction(t *testing.T) { assert := assert.New(t) - var messageGood = models.Message{ - Data: snowplowTsv3, - PartitionKey: "some-key", - } - // Single value cases - aidFilterFuncKeep, _ := NewSpEnrichedFilterFunction("app_id==test-data3") + aidFilterFuncKeep, _ := NewSpEnrichedFilterFunction("app_id", "^test-data3$", 0) - // TODO: sort out numbering for fail cases... aidKeepIn, aidKeepOut, fail, _ := aidFilterFuncKeep(&messageGood, nil) assert.Equal(snowplowTsv3, aidKeepIn.Data) assert.Nil(aidKeepOut) assert.Nil(fail) - aidFilterFuncDiscard, _ := NewSpEnrichedFilterFunction("app_id==failThis") + aidFilterFuncDiscard, _ := NewSpEnrichedFilterFunction("app_id", "failThis", 10) aidDiscardIn, aidDiscardOut, fail2, _ := aidFilterFuncDiscard(&messageGood, nil) @@ -40,7 +52,7 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { assert.Nil(fail2) // int value - urlPrtFilterFuncKeep, _ := NewSpEnrichedFilterFunction("page_urlport==80") + urlPrtFilterFuncKeep, _ := NewSpEnrichedFilterFunction("page_urlport", "^80$", 10) urlPrtKeepIn, urlPrtKeepOut, fail, _ := urlPrtFilterFuncKeep(&messageGood, nil) @@ -49,7 +61,7 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { assert.Nil(fail) // Multiple value cases - aidFilterFuncKeepWithMultiple, _ := NewSpEnrichedFilterFunction("app_id==someotherValue|test-data3") + aidFilterFuncKeepWithMultiple, _ := NewSpEnrichedFilterFunction("app_id", "^someotherValue|test-data3$", 10) aidMultipleNegationFailedIn, aidMultipleKeepOut, fail3, _ := aidFilterFuncKeepWithMultiple(&messageGood, nil) @@ -57,7 +69,7 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { assert.Nil(aidMultipleKeepOut) assert.Nil(fail3) - aidFilterFuncDiscardWithMultiple, _ := NewSpEnrichedFilterFunction("app_id==someotherValue|failThis") + aidFilterFuncDiscardWithMultiple, _ := NewSpEnrichedFilterFunction("app_id", "^someotherValue|failThis$", 10) aidNegationMultipleIn, aidMultipleDiscardOut, fail3, _ := aidFilterFuncDiscardWithMultiple(&messageGood, nil) @@ -66,8 +78,7 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { assert.Nil(fail3) // Single value negation cases - - aidFilterFuncNegationDiscard, _ := NewSpEnrichedFilterFunction("app_id!=test-data3") + aidFilterFuncNegationDiscard, _ := NewSpEnrichedFilterFunction("app_id", "^((?!test-data3).)*$", 10) aidNegationIn, aidNegationOut, fail4, _ := aidFilterFuncNegationDiscard(&messageGood, nil) @@ -75,7 +86,7 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { assert.Equal(snowplowTsv3, aidNegationOut.Data) assert.Nil(fail4) - aidFilterFuncNegationKeep, _ := NewSpEnrichedFilterFunction("app_id!=failThis") + aidFilterFuncNegationKeep, _ := NewSpEnrichedFilterFunction("app_id", "^((?!someValue).)*$", 10) aidNegationFailedIn, aidNegationFailedOut, fail5, _ := aidFilterFuncNegationKeep(&messageGood, nil) @@ -84,7 +95,7 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { assert.Nil(fail5) // Multiple value negation cases - aidFilterFuncNegationDiscardMultiple, _ := NewSpEnrichedFilterFunction("app_id!=someotherValue|test-data1|test-data2|test-data3") + aidFilterFuncNegationDiscardMultiple, _ := NewSpEnrichedFilterFunction("app_id", "^((?!someotherValue|test-data1|test-data2|test-data3).)*$", 10) aidNegationMultipleIn, aidNegationMultipleOut, fail6, _ := aidFilterFuncNegationDiscardMultiple(&messageGood, nil) @@ -92,7 +103,7 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { assert.Equal(snowplowTsv3, aidNegationMultipleOut.Data) assert.Nil(fail6) - aidFilterFuncNegationKeptMultiple, _ := NewSpEnrichedFilterFunction("app_id!=someotherValue|failThis") + aidFilterFuncNegationKeptMultiple, _ := NewSpEnrichedFilterFunction("app_id", "^((?!someotherValue|failThis).)*$", 10) aidMultipleNegationFailedIn, aidMultipleNegationFailedOut, fail7, _ := aidFilterFuncNegationKeptMultiple(&messageGood, nil) @@ -101,41 +112,107 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { assert.Nil(fail7) // Filters on a nil field - txnFilterFunctionAffirmation, _ := NewSpEnrichedFilterFunction("txn_id==something") + txnFilterFunctionAffirmation, _ := NewSpEnrichedFilterFunction("txn_id", "^something$", 10) nilAffirmationIn, nilAffirmationOut, fail8, _ := txnFilterFunctionAffirmation(&messageGood, nil) + // nil doesn't match the regex and should be filtered out. assert.Nil(nilAffirmationIn) assert.Equal(snowplowTsv3, nilAffirmationOut.Data) assert.Nil(fail8) - txnFilterFunctionNegation, _ := NewSpEnrichedFilterFunction("txn_id!=something") + txnFilterFunctionNegation, _ := NewSpEnrichedFilterFunction("txn_id", "^((?!something).)*$", 10) nilNegationIn, nilNegationOut, fail8, _ := txnFilterFunctionNegation(&messageGood, nil) + // nil DOES match the negative lookup - it doesn't contain 'something'. So should be kept. assert.Equal(snowplowTsv3, nilNegationIn.Data) assert.Nil(nilNegationOut) assert.Nil(fail8) + + fieldNotExistsFilter, _ := NewSpEnrichedFilterFunction("nothing", "", 10) + + notExistsIn, notExistsOut, notExistsFail, _ := fieldNotExistsFilter(&messageGood, nil) + + assert.Nil(notExistsIn) + assert.Nil(notExistsOut) + assert.NotNil(notExistsFail) } -func TestNewSpEnrichedFilterFunction_Error(t *testing.T) { +func TestNewSpEnrichedFilterFunctionContext(t *testing.T) { assert := assert.New(t) - error := `Invalid filter function config, must be of the format {field name}=={value}[|{value}|...] or {field name}!={value}[|{value}|...]` - filterFunc, err1 := NewSpEnrichedFilterFunction("") + // The relevant data in messageGood looks like this: "test1":{"test2":[{"test3":"testValue"}] + + // context filter success + contextFuncKeep, _ := NewSpEnrichedFilterFunctionContext("contexts_nl_basjes_yauaa_context_1", "test1.test2[0].test3", "^testValue$", 10) + + contextKeepIn, contextKeepOut, fail9, _ := contextFuncKeep(&messageGood, nil) + + assert.Equal(snowplowTsv3, contextKeepIn.Data) + assert.Nil(contextKeepOut) + assert.Nil(fail9) + + // The relevant data in messageGoodInt looks like this: "test1":{"test2":[{"test3":1}] + + // context filter success (integer value) + contextFuncKeep, _ = NewSpEnrichedFilterFunctionContext("contexts_nl_basjes_yauaa_context_1", "test1.test2[0].test3", "^1$", 10) + + contextKeepIn, contextKeepOut, fail9, _ = contextFuncKeep(&messageGoodInt, nil) + + assert.Equal(snowplowTsv4, contextKeepIn.Data) + assert.Nil(contextKeepOut) + assert.Nil(fail9) + + // context filter wrong path + contextFuncKeep, _ = NewSpEnrichedFilterFunctionContext("contexts_nl_basjes_yauaa_context_2", "test1.test2[0].test3", "^testValue$", 10) + + contextKeepIn, contextKeepOut, fail9, _ = contextFuncKeep(&messageGood, nil) + + assert.Nil(contextKeepIn) + assert.Equal(snowplowTsv3, contextKeepOut.Data) + assert.Nil(fail9) +} + +func TestNewSpEnrichedFilterFunctionUnstructEvent(t *testing.T) { + assert := assert.New(t) + + // event filter success, filtered event name + eventFilterFuncKeep, _ := NewSpEnrichedFilterFunctionUnstructEvent("add_to_cart", "1-*-*", "sku", "^item41$", 10) + + eventKeepIn, eventKeepOut, fail10, _ := eventFilterFuncKeep(&messageWithUnstructEvent, nil) + + assert.Equal(snowplowTsv1, eventKeepIn.Data) + assert.Nil(eventKeepOut) + assert.Nil(fail10) + + // event filter success, filtered event name, no event ver + eventFilterFuncKeep, _ = NewSpEnrichedFilterFunctionUnstructEvent("add_to_cart", "", "sku", "^item41$", 10) + + eventKeepIn, eventKeepOut, fail10, _ = eventFilterFuncKeep(&messageWithUnstructEvent, nil) - assert.Nil(filterFunc) - assert.Equal(error, err1.Error()) + assert.Equal(snowplowTsv1, eventKeepIn.Data) + assert.Nil(eventKeepOut) + assert.Nil(fail10) - filterFunc, err2 := NewSpEnrichedFilterFunction("app_id==abc|") + // event filter failure, wrong event name + eventFilterFuncKeep, _ = NewSpEnrichedFilterFunctionUnstructEvent("wrong_name", "", "sku", "^item41$", 10) - assert.Nil(filterFunc) - assert.Equal(error, err2.Error()) + eventKeepIn, eventKeepOut, fail11, _ := eventFilterFuncKeep(&messageWithUnstructEvent, nil) - filterFunc, err3 := NewSpEnrichedFilterFunction("!=abc") + assert.Nil(eventKeepIn) + assert.Equal(snowplowTsv1, eventKeepOut.Data) + assert.Nil(fail11) + + // event filter failure, field not found + eventFilterFuncKeep, _ = NewSpEnrichedFilterFunctionUnstructEvent("add_to_cart", "", "ska", "item41", 10) + + eventNoFieldIn, eventNoFieldOut, fail12, _ := eventFilterFuncKeep(&messageWithUnstructEvent, nil) + + assert.Nil(eventNoFieldIn) + assert.Equal(snowplowTsv1, eventNoFieldOut.Data) + assert.Nil(fail12) - assert.Nil(filterFunc) - assert.Equal(error, err3.Error()) } func TestSpEnrichedFilterFunction_Slice(t *testing.T) { @@ -159,7 +236,7 @@ func TestSpEnrichedFilterFunction_Slice(t *testing.T) { }, } - filterFunc, _ := NewSpEnrichedFilterFunction("app_id==test-data1") + filterFunc, _ := NewSpEnrichedFilterFunction("app_id", "^test-data1$", 10) filter1 := NewTransformation(filterFunc) filter1Res := filter1(messages) @@ -187,7 +264,7 @@ func TestSpEnrichedFilterFunction_Slice(t *testing.T) { }, } - filterFunc2, _ := NewSpEnrichedFilterFunction("app_id==test-data1|test-data2") + filterFunc2, _ := NewSpEnrichedFilterFunction("app_id", "^test-data1|test-data2$", 10) filter2 := NewTransformation(filterFunc2) filter2Res := filter2(messages) @@ -203,7 +280,7 @@ func TestSpEnrichedFilterFunction_Slice(t *testing.T) { }, } - filterFunc3, _ := NewSpEnrichedFilterFunction("app_id!=test-data1|test-data2") + filterFunc3, _ := NewSpEnrichedFilterFunction("app_id", "^((?!test-data1|test-data2).)*$", 10) filter3 := NewTransformation(filterFunc3) filter3Res := filter3(messages) @@ -212,3 +289,237 @@ func TestSpEnrichedFilterFunction_Slice(t *testing.T) { assert.Equal(1, len(filter3Res.Invalid)) } + +func TestEvaluateSpEnrichedFilter(t *testing.T) { + assert := assert.New(t) + + regex, err := regexp2.Compile("^yes$", 0) + if err != nil { + panic(err) + } + + valuesFound := []interface{}{"NO", "maybe", "yes"} + assert.True(evaluateSpEnrichedFilter(regex, valuesFound)) + + valuesFound2 := []interface{}{"NO", "maybe", "nope", nil} + assert.False(evaluateSpEnrichedFilter(regex, valuesFound2)) + + regexInt, err := regexp2.Compile("^123$", 0) + if err != nil { + panic(err) + } + + valuesFound3 := []interface{}{123, "maybe", "nope", nil} + assert.True(evaluateSpEnrichedFilter(regexInt, valuesFound3)) + + // This asserts that when any element of the input is nil, we assert against empty string. + // It exists to ensure we don't evaluate against the string `` since we're naively casting values to string. + regexNil, err := regexp2.Compile("^$", 0) + if err != nil { + panic(err) + } + + assert.True(evaluateSpEnrichedFilter(regexNil, []interface{}{nil})) + + // just to make sure the regex only matches empty: + assert.False(evaluateSpEnrichedFilter(regexNil, []interface{}{"a"})) + + // These tests ensures that when getters return a nil slice, we're still asserting against the empty value. + // This is important since we have negative lookaheads. + + assert.True(evaluateSpEnrichedFilter(regexNil, nil)) + + // negative lookahead: + regexNegative, err := regexp2.Compile("^((?!failThis).)*$", 0) + if err != nil { + panic(err) + } + + assert.True(evaluateSpEnrichedFilter(regexNegative, nil)) +} + +func TestMakeBaseValueGetter(t *testing.T) { + assert := assert.New(t) + + // simple app ID + appIDGetter := makeBaseValueGetter("app_id") + + res, err := appIDGetter(spTsv3Parsed) + + assert.Equal([]interface{}{"test-data3"}, res) + assert.Nil(err) + + nonExistentFieldGetter := makeBaseValueGetter("nope") + + res2, err2 := nonExistentFieldGetter(spTsv3Parsed) + + assert.Nil(res2) + assert.NotNil(err2) + if err2 != nil { + assert.Equal("Key nope not a valid atomic field", err2.Error()) + } + // TODO: currently we'll only hit this error while processing data. Ideally we should hit it on startup. +} + +func TestMakeContextValueGetter(t *testing.T) { + assert := assert.New(t) + + contextGetter := makeContextValueGetter("contexts_nl_basjes_yauaa_context_1", []interface{}{"test1", "test2", 0, "test3"}) + + res, err := contextGetter(spTsv3Parsed) + + assert.Equal([]interface{}{"testValue"}, res) + assert.Nil(err) + + res2, err2 := contextGetter(spTsv1Parsed) + + // If the path doesn't exist, we shoud return nil, nil. + assert.Nil(res2) + assert.Nil(err2) + + contextGetterArray := makeContextValueGetter("contexts_com_acme_just_ints_1", []interface{}{"integerField"}) + + res3, err3 := contextGetterArray(spTsv1Parsed) + + assert.Equal([]interface{}{float64(0), float64(1), float64(2)}, res3) + assert.Nil(err3) +} + +func TestMakeUnstructValueGetter(t *testing.T) { + assert := assert.New(t) + + re1 := regexp.MustCompile("1-*-*") + + unstructGetter := makeUnstructValueGetter("add_to_cart", re1, []interface{}{"sku"}) + + res, err := unstructGetter(spTsv1Parsed) + + assert.Equal([]interface{}{"item41"}, res) + assert.Nil(err) + + unstructGetterWrongPath := makeUnstructValueGetter("add_to_cart", re1, []interface{}{"notSku"}) + + // If it's not in the event, both should be nil + res2, err2 := unstructGetterWrongPath(spTsv1Parsed) + + assert.Nil(res2) + assert.Nil(err2) + + // test that wrong schema version behaves appropriately (return nil nil) + re2 := regexp.MustCompile("2-*-*") + + unstructWrongSchemaGetter := makeUnstructValueGetter("add_to_cart", re2, []interface{}{"sku"}) + + res3, err3 := unstructWrongSchemaGetter(spTsv1Parsed) + + assert.Nil(res3) + assert.Nil(err3) + + // test that not specifying a version behaves appropriately (accepts all versions) + re3 := regexp.MustCompile("") + + unstructAnyVersionGetter := makeUnstructValueGetter("add_to_cart", re3, []interface{}{"sku"}) + + res4, err4 := unstructAnyVersionGetter(spTsv1Parsed) + + assert.Equal([]interface{}{"item41"}, res4) + assert.Nil(err4) + + // test that wrong event name behaves appropriately (return nil nil) + + unstructWrongEvnetName := makeUnstructValueGetter("not_add_to_cart_at_all", re3, []interface{}{"sku"}) + + res5, err5 := unstructWrongEvnetName(spTsv1Parsed) + + assert.Nil(res5) + assert.Nil(err5) +} + +func BenchmarkBaseFieldFilter(b *testing.B) { + var messageGood = models.Message{ + Data: snowplowTsv3, + PartitionKey: "some-key", + } + aidFilterFuncKeep, _ := NewSpEnrichedFilterFunction("app_id", "^test-data3$", 0) + + aidFilterFuncNegationKeep, _ := NewSpEnrichedFilterFunction("app_id", "^((?!failThis).)*$", 10) + + for i := 0; i < b.N; i++ { + + aidFilterFuncKeep(&messageGood, nil) + aidFilterFuncNegationKeep(&messageGood, nil) + } +} + +func BenchmarkContextFilterNew(b *testing.B) { + var messageGood = models.Message{ + Data: snowplowTsv3, + PartitionKey: "some-key", + } + + contextFuncAffirm, _ := NewSpEnrichedFilterFunctionContext("contexts_nl_basjes_yauaa_context_1", "test1.test2[0].test3", "^testValue$", 10) + contextFuncNegate, _ := NewSpEnrichedFilterFunctionContext("contexts_nl_basjes_yauaa_context_1", "test1.test2[0].test3", "^((?!failThis).)*$", 10) + + for i := 0; i < b.N; i++ { + contextFuncAffirm(&messageGood, nil) + contextFuncNegate(&messageGood, nil) + } +} + +func BenchmarkUnstructFilterNew(b *testing.B) { + var messageGood = models.Message{ + Data: snowplowTsv1, + PartitionKey: "some-key", + } + + unstructFilterFuncAffirm, _ := NewSpEnrichedFilterFunctionUnstructEvent("add_to_cart", "1-*-*", "sku", "^item41$", 10) + unstructFilterFuncNegate, _ := NewSpEnrichedFilterFunctionUnstructEvent("add_to_cart", "1-*-*", "sku", "^((?!failThis).)*$", 10) + + for i := 0; i < b.N; i++ { + unstructFilterFuncAffirm(&messageGood, nil) + unstructFilterFuncNegate(&messageGood, nil) + + } +} + +func TestParsePathToArguments(t *testing.T) { + assert := assert.New(t) + + // Common case + path1, err1 := parsePathToArguments("test1[123].test2[1].test3") + expectedPath1 := []interface{}{"test1", 123, "test2", 1, "test3"} + + assert.Equal(expectedPath1, path1) + assert.Nil(err1) + + // Success edge case - field names with different character + path2, err2 := parsePathToArguments("test-1.test_2[1].test$3") + expectedPath2 := []interface{}{"test-1", "test_2", 1, "test$3"} + + assert.Equal(expectedPath2, path2) + assert.Nil(err2) + + // Success edge case - field name is stringified int + path3, err3 := parsePathToArguments("123.456[1].789") + expectedPath3 := []interface{}{"123", "456", 1, "789"} + + assert.Equal(expectedPath3, path3) + assert.Nil(err3) + + // Success edge case - nested arrays + path4, err4 := parsePathToArguments("test1.test2[1][2].test3") + expectedPath4 := []interface{}{"test1", "test2", 1, 2, "test3"} + + assert.Equal(expectedPath4, path4) + assert.Nil(err4) + + // Failure edge case - unmatched brace in path + // We are validating for this and failing at startup, with the assumption that it must be misconfiguration. + path5, err5 := parsePathToArguments("test1.test[2.test3") + + assert.Nil(path5) + assert.NotNil(err5) + if err5 != nil { + assert.Equal("unmatched brace in path: test1.test[2.test3", err5.Error()) + } +} diff --git a/pkg/transform/snowplow_enriched_set_pk.go b/pkg/transform/snowplow_enriched_set_pk.go index 0a3323e8..a928554c 100644 --- a/pkg/transform/snowplow_enriched_set_pk.go +++ b/pkg/transform/snowplow_enriched_set_pk.go @@ -16,7 +16,7 @@ import ( func NewSpEnrichedSetPkFunction(pkField string) TransformationFunction { return func(message *models.Message, intermediateState interface{}) (*models.Message, *models.Message, *models.Message, interface{}) { // Evalute intermediateState to parsedEvent - parsedMessage, parseErr := intermediateAsSpEnrichedParsed(intermediateState, message) + parsedMessage, parseErr := IntermediateAsSpEnrichedParsed(intermediateState, message) if parseErr != nil { message.SetError(parseErr) return nil, nil, message, nil diff --git a/pkg/transform/snowplow_enriched_set_pk_test.go b/pkg/transform/snowplow_enriched_set_pk_test.go index c0958fe0..04988efb 100644 --- a/pkg/transform/snowplow_enriched_set_pk_test.go +++ b/pkg/transform/snowplow_enriched_set_pk_test.go @@ -9,8 +9,9 @@ package transform import ( "testing" - "github.com/snowplow-devops/stream-replicator/pkg/models" "github.com/stretchr/testify/assert" + + "github.com/snowplow-devops/stream-replicator/pkg/models" ) func TestNewSpEnrichedSetPkFunction(t *testing.T) { @@ -57,7 +58,10 @@ func TestNewSpEnrichedSetPkFunction(t *testing.T) { assert.Nil(failureCase) assert.Nil(intermediate) assert.NotNil(fail) - assert.Equal("Cannot parse tsv event - wrong number of fields provided: 4", fail.GetError().Error()) + assert.NotNil(fail.GetError()) + if fail.GetError() != nil { + assert.Equal("Cannot parse tsv event - wrong number of fields provided: 4", fail.GetError().Error()) + } // Nuanced success case // Test to assert behaviour when there's an incompatible intermediateState in the input diff --git a/pkg/transform/snowplow_enriched_to_json.go b/pkg/transform/snowplow_enriched_to_json.go index b0fce26c..020b2884 100644 --- a/pkg/transform/snowplow_enriched_to_json.go +++ b/pkg/transform/snowplow_enriched_to_json.go @@ -13,7 +13,7 @@ import ( // SpEnrichedToJSON is a specific transformation implementation to transform good enriched data within a message to Json func SpEnrichedToJSON(message *models.Message, intermediateState interface{}) (*models.Message, *models.Message, *models.Message, interface{}) { // Evalute intermediateState to parsedEvent - parsedMessage, parseErr := intermediateAsSpEnrichedParsed(intermediateState, message) + parsedMessage, parseErr := IntermediateAsSpEnrichedParsed(intermediateState, message) if parseErr != nil { message.SetError(parseErr) return nil, nil, message, nil diff --git a/pkg/transform/snowplow_enriched_to_json_test.go b/pkg/transform/snowplow_enriched_to_json_test.go index 0d86dcaa..e037e442 100644 --- a/pkg/transform/snowplow_enriched_to_json_test.go +++ b/pkg/transform/snowplow_enriched_to_json_test.go @@ -9,8 +9,9 @@ package transform import ( "testing" - "github.com/snowplow-devops/stream-replicator/pkg/models" "github.com/stretchr/testify/assert" + + "github.com/snowplow-devops/stream-replicator/pkg/models" ) func TestSpEnrichedToJson(t *testing.T) { @@ -34,7 +35,8 @@ func TestSpEnrichedToJson(t *testing.T) { // Simple success case transformSuccess, _, failure, intermediate := SpEnrichedToJSON(&messageGood, nil) - assert.Equal(&expectedGood, transformSuccess) + assert.Equal(expectedGood.PartitionKey, transformSuccess.PartitionKey) + assert.JSONEq(string(expectedGood.Data), string(transformSuccess.Data)) assert.Equal(spTsv1Parsed, intermediate) assert.Nil(failure) @@ -42,7 +44,10 @@ func TestSpEnrichedToJson(t *testing.T) { success, _, transformFailure, intermediate := SpEnrichedToJSON(&messageBad, nil) // Not matching equivalence of whole object because error stacktrace makes it unfeasible. Doing each component part instead. - assert.Equal("Cannot parse tsv event - wrong number of fields provided: 4", transformFailure.GetError().Error()) + assert.NotNil(transformFailure.GetError()) + if transformFailure.GetError() != nil { + assert.Equal("Cannot parse tsv event - wrong number of fields provided: 4", transformFailure.GetError().Error()) + } assert.Equal([]byte("not a snowplow event"), transformFailure.Data) assert.Equal("some-key4", transformFailure.PartitionKey) // Failure in this case is in parsing to IntermediateState, so none expected in output @@ -64,7 +69,8 @@ func TestSpEnrichedToJson(t *testing.T) { // When we have some incompatible IntermediateState, expected behaviour is to replace it with this transformation's IntermediateState transformSuccess2, _, failure2, intermediate2 := SpEnrichedToJSON(&incompatibleIntermediateMessage, incompatibleIntermediate) - assert.Equal(&expectedGood, transformSuccess2) + assert.Equal(expectedGood.PartitionKey, transformSuccess2.PartitionKey) + assert.JSONEq(string(expectedGood.Data), string(transformSuccess2.Data)) assert.Equal(spTsv1Parsed, intermediate2) assert.Nil(failure2) } diff --git a/pkg/transform/snowplow_enriched_util.go b/pkg/transform/snowplow_enriched_util.go index 3f374d4d..b29b34cd 100644 --- a/pkg/transform/snowplow_enriched_util.go +++ b/pkg/transform/snowplow_enriched_util.go @@ -7,11 +7,14 @@ package transform import ( - "github.com/snowplow-devops/stream-replicator/pkg/models" "github.com/snowplow/snowplow-golang-analytics-sdk/analytics" + + "github.com/snowplow-devops/stream-replicator/pkg/models" ) -func intermediateAsSpEnrichedParsed(intermediateState interface{}, message *models.Message) (analytics.ParsedEvent, error) { +// IntermediateAsSpEnrichedParsed returns the intermediate state as a ParsedEvent if valid or parses +// the message as an event +func IntermediateAsSpEnrichedParsed(intermediateState interface{}, message *models.Message) (analytics.ParsedEvent, error) { var parsedMessage, ok = intermediateState.(analytics.ParsedEvent) var parseErr error if ok { diff --git a/pkg/transform/snowplow_enriched_util_test.go b/pkg/transform/snowplow_enriched_util_test.go new file mode 100644 index 00000000..d253cb21 --- /dev/null +++ b/pkg/transform/snowplow_enriched_util_test.go @@ -0,0 +1,48 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package transform + +import ( + "testing" + + "github.com/snowplow-devops/stream-replicator/pkg/models" + + "github.com/stretchr/testify/assert" +) + +// TestIntermediateAsSpEnrichedParsed tests that intermediateAsSpEnrichedParsed +// returns the parsed event when provided a snowplow TSV with +func TestIntermediateAsSpEnrichedParsed(t *testing.T) { + assert := assert.New(t) + + // case 1: no intermediate state + res1, err1 := IntermediateAsSpEnrichedParsed(nil, &models.Message{Data: snowplowTsv1}) + + assert.Equal(spTsv1Parsed, res1) + assert.Nil(err1) + + // case 2: intermediate state provided as ParsedEvent + res2, err2 := IntermediateAsSpEnrichedParsed(spTsv2Parsed, &models.Message{Data: snowplowTsv2}) + + assert.Equal(spTsv2Parsed, res2) + assert.Nil(err2) + + // case 3: intermediate state provided as some other type + res3, err3 := IntermediateAsSpEnrichedParsed("not a ParsedEvent", &models.Message{Data: snowplowTsv3}) + + assert.Equal(spTsv3Parsed, res3) + assert.Nil(err3) + + // case 4: message not parseable + res4, err4 := IntermediateAsSpEnrichedParsed(nil, &models.Message{Data: []byte("Not a snowplow event")}) + + assert.Nil(res4) + assert.NotNil(err4) + if err4 != nil { + assert.Equal("Cannot parse tsv event - wrong number of fields provided: 1", err4.Error()) + } +} diff --git a/pkg/transform/transform_test.go b/pkg/transform/transform_test.go index bd5eea4b..7bec84f7 100644 --- a/pkg/transform/transform_test.go +++ b/pkg/transform/transform_test.go @@ -10,8 +10,9 @@ import ( "testing" "time" - "github.com/snowplow-devops/stream-replicator/pkg/models" "github.com/stretchr/testify/assert" + + "github.com/snowplow-devops/stream-replicator/pkg/models" ) // To test a function which creates a function, we're creating the function then testing that. Not sure if there's a better way? @@ -67,7 +68,7 @@ func TestNewTransformation_EnrichedToJson(t *testing.T) { enrichJSONRes := tranformEnrichJSON(messages) for index, value := range enrichJSONRes.Result { - assert.Equal(expectedGood[index].Data, value.Data) + assert.JSONEq(string(expectedGood[index].Data), string(value.Data)) assert.Equal(expectedGood[index].PartitionKey, value.PartitionKey) assert.NotNil(expectedGood[index].TimeTransformed) @@ -80,7 +81,10 @@ func TestNewTransformation_EnrichedToJson(t *testing.T) { // Not matching equivalence of whole object because error stacktrace makes it unfeasible. Doing each component part instead. assert.Equal(1, len(enrichJSONRes.Invalid)) assert.Equal(int64(1), enrichJSONRes.InvalidCount) - assert.Equal("Cannot parse tsv event - wrong number of fields provided: 4", enrichJSONRes.Invalid[0].GetError().Error()) + assert.NotNil(enrichJSONRes.Invalid[0].GetError()) + if enrichJSONRes.Invalid[0].GetError() != nil { + assert.Equal("Cannot parse tsv event - wrong number of fields provided: 4", enrichJSONRes.Invalid[0].GetError().Error()) + } assert.Equal([]byte("not a snowplow event"), enrichJSONRes.Invalid[0].Data) assert.Equal("some-key4", enrichJSONRes.Invalid[0].PartitionKey) } @@ -127,9 +131,10 @@ func TestNewTransformation_Multiple(t *testing.T) { enrichJSONRes := tranformMultiple(messages) for index, value := range enrichJSONRes.Result { - assert.Equal(expectedGood[index].Data, value.Data) + assert.JSONEq(string(expectedGood[index].Data), string(value.Data)) assert.Equal(expectedGood[index].PartitionKey, value.PartitionKey) assert.NotNil(expectedGood[index].TimeTransformed) + assert.NotNil(value.TimeTransformed) // assertions to ensure we don't accidentally modify the input assert.NotEqual(messages[index].Data, value.Data) @@ -141,7 +146,11 @@ func TestNewTransformation_Multiple(t *testing.T) { // Not matching equivalence of whole object because error stacktrace makes it unfeasible. Doing each component part instead. assert.Equal(1, len(enrichJSONRes.Invalid)) assert.Equal(int64(1), enrichJSONRes.InvalidCount) - assert.Equal("Cannot parse tsv event - wrong number of fields provided: 4", enrichJSONRes.Invalid[0].GetError().Error()) + assert.NotNil(enrichJSONRes.Invalid[0].GetError()) + if enrichJSONRes.Invalid[0].GetError() != nil { + assert.Equal("Cannot parse tsv event - wrong number of fields provided: 4", enrichJSONRes.Invalid[0].GetError().Error()) + } + assert.Equal([]byte("not a snowplow event"), enrichJSONRes.Invalid[0].Data) assert.Equal("some-key4", enrichJSONRes.Invalid[0].PartitionKey) } diff --git a/pkg/transform/transform_test_variables.go b/pkg/transform/transform_test_variables.go index 57f67336..0e48b5bd 100644 --- a/pkg/transform/transform_test_variables.go +++ b/pkg/transform/transform_test_variables.go @@ -7,21 +7,21 @@ package transform import ( - "github.com/snowplow-devops/stream-replicator/pkg/models" "github.com/snowplow/snowplow-golang-analytics-sdk/analytics" + + "github.com/snowplow-devops/stream-replicator/pkg/models" ) -var snowplowTsv1 = []byte(`test-data1 pc 2019-05-10 14:40:37.436 2019-05-10 14:40:35.972 2019-05-10 14:40:35.551 unstruct e9234345-f042-46ad-b1aa-424464066a33 py-0.8.2 ssc-0.15.0-googlepubsub beam-enrich-0.2.0-common-0.36.0 user 18.194.133.57 d26822f5-52cc-4292-8f77-14ef6b7a27e2 {"schema":"iglu:com.snowplowanalytics.snowplow/unstruct_event/jsonschema/1-0-0","data":{"schema":"iglu:com.snowplowanalytics.snowplow/add_to_cart/jsonschema/1-0-0","data":{"sku":"item41","quantity":2,"unitPrice":32.4,"currency":"GBP"}}} python-requests/2.21.0 2019-05-10 14:40:35.000 {"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-1","data":[{"schema":"iglu:nl.basjes/yauaa_context/jsonschema/1-0-0","data":{"deviceBrand":"Unknown","deviceName":"Unknown","operatingSystemName":"Unknown","agentVersionMajor":"2","layoutEngineVersionMajor":"??","deviceClass":"Unknown","agentNameVersionMajor":"python-requests 2","operatingSystemClass":"Unknown","layoutEngineName":"Unknown","agentName":"python-requests","agentVersion":"2.21.0","layoutEngineClass":"Unknown","agentNameVersion":"python-requests 2.21.0","operatingSystemVersion":"??","agentClass":"Special","layoutEngineVersion":"??"}}]} 2019-05-10 14:40:35.972 com.snowplowanalytics.snowplow add_to_cart jsonschema 1-0-0 `) +var snowplowTsv1 = []byte(`test-data1 pc 2019-05-10 14:40:37.436 2019-05-10 14:40:35.972 2019-05-10 14:40:35.551 unstruct e9234345-f042-46ad-b1aa-424464066a33 py-0.8.2 ssc-0.15.0-googlepubsub beam-enrich-0.2.0-common-0.36.0 user 18.194.133.57 d26822f5-52cc-4292-8f77-14ef6b7a27e2 {"schema":"iglu:com.snowplowanalytics.snowplow/unstruct_event/jsonschema/1-0-0","data":{"schema":"iglu:com.snowplowanalytics.snowplow/add_to_cart/jsonschema/1-0-0","data":{"sku":"item41","quantity":2,"unitPrice":32.4,"currency":"GBP"}}} python-requests/2.21.0 2019-05-10 14:40:35.000 {"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-1","data":[{"schema":"iglu:com.acme/justInts/jsonschema/1-0-0", "data":{"integerField": 0}},{"schema":"iglu:com.acme/justInts/jsonschema/1-0-0", "data":{"integerField": 1}},{"schema":"iglu:com.acme/justInts/jsonschema/1-0-0", "data":{"integerField": 2}},{"schema":"iglu:nl.basjes/yauaa_context/jsonschema/1-0-0","data":{"deviceBrand":"Unknown","deviceName":"Unknown","operatingSystemName":"Unknown","agentVersionMajor":"2","layoutEngineVersionMajor":"??","deviceClass":"Unknown","agentNameVersionMajor":"python-requests 2","operatingSystemClass":"Unknown","layoutEngineName":"Unknown","agentName":"python-requests","agentVersion":"2.21.0","layoutEngineClass":"Unknown","agentNameVersion":"python-requests 2.21.0","operatingSystemVersion":"??","agentClass":"Special","layoutEngineVersion":"??"}}]} 2019-05-10 14:40:35.972 com.snowplowanalytics.snowplow add_to_cart jsonschema 1-0-0 `) var spTsv1Parsed, _ = analytics.ParseEvent(string(snowplowTsv1)) -var snowplowJSON1 = []byte(`{"app_id":"test-data1","collector_tstamp":"2019-05-10T14:40:35.972Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??"}],"derived_tstamp":"2019-05-10T14:40:35.972Z","dvce_created_tstamp":"2019-05-10T14:40:35.551Z","dvce_sent_tstamp":"2019-05-10T14:40:35Z","etl_tstamp":"2019-05-10T14:40:37.436Z","event":"unstruct","event_format":"jsonschema","event_id":"e9234345-f042-46ad-b1aa-424464066a33","event_name":"add_to_cart","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"d26822f5-52cc-4292-8f77-14ef6b7a27e2","platform":"pc","unstruct_event_com_snowplowanalytics_snowplow_add_to_cart_1":{"currency":"GBP","quantity":2,"sku":"item41","unitPrice":32.4},"user_id":"user\u003cbuilt-in function input\u003e","user_ipaddress":"18.194.133.57","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) - +var snowplowJSON1 = []byte(`{"app_id":"test-data1","collector_tstamp":"2019-05-10T14:40:35.972Z","contexts_com_acme_just_ints_1":[{"integerField":0},{"integerField":1},{"integerField":2}],"contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??"}],"derived_tstamp":"2019-05-10T14:40:35.972Z","dvce_created_tstamp":"2019-05-10T14:40:35.551Z","dvce_sent_tstamp":"2019-05-10T14:40:35Z","etl_tstamp":"2019-05-10T14:40:37.436Z","event":"unstruct","event_format":"jsonschema","event_id":"e9234345-f042-46ad-b1aa-424464066a33","event_name":"add_to_cart","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"d26822f5-52cc-4292-8f77-14ef6b7a27e2","platform":"pc","unstruct_event_com_snowplowanalytics_snowplow_add_to_cart_1":{"currency":"GBP","quantity":2,"sku":"item41","unitPrice":32.4},"user_id":"user\u003cbuilt-in function input\u003e","user_ipaddress":"18.194.133.57","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) var snowplowTsv2 = []byte(`test-data2 pc 2019-05-10 14:40:32.392 2019-05-10 14:40:31.105 2019-05-10 14:40:30.218 transaction_item 5071169f-3050-473f-b03f-9748319b1ef2 py-0.8.2 ssc-0.15.0-googlepubsub beam-enrich-0.2.0-common-0.36.0 user 18.194.133.57 68220ade-307b-4898-8e25-c4c8ac92f1d7 transaction item58 35.87 1 python-requests/2.21.0 2019-05-10 14:40:30.000 {"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-1","data":[{"schema":"iglu:nl.basjes/yauaa_context/jsonschema/1-0-0","data":{"deviceBrand":"Unknown","deviceName":"Unknown","operatingSystemName":"Unknown","agentVersionMajor":"2","layoutEngineVersionMajor":"??","deviceClass":"Unknown","agentNameVersionMajor":"python-requests 2","operatingSystemClass":"Unknown","layoutEngineName":"Unknown","agentName":"python-requests","agentVersion":"2.21.0","layoutEngineClass":"Unknown","agentNameVersion":"python-requests 2.21.0","operatingSystemVersion":"??","agentClass":"Special","layoutEngineVersion":"??"}}]} 2019-05-10 14:40:31.105 com.snowplowanalytics.snowplow transaction_item jsonschema 1-0-0 `) var spTsv2Parsed, _ = analytics.ParseEvent(string(snowplowTsv2)) var snowplowJSON2 = []byte(`{"app_id":"test-data2","collector_tstamp":"2019-05-10T14:40:31.105Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??"}],"derived_tstamp":"2019-05-10T14:40:31.105Z","dvce_created_tstamp":"2019-05-10T14:40:30.218Z","dvce_sent_tstamp":"2019-05-10T14:40:30Z","etl_tstamp":"2019-05-10T14:40:32.392Z","event":"transaction_item","event_format":"jsonschema","event_id":"5071169f-3050-473f-b03f-9748319b1ef2","event_name":"transaction_item","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"68220ade-307b-4898-8e25-c4c8ac92f1d7","platform":"pc","ti_orderid":"transaction\u003cbuilt-in function input\u003e","ti_price":35.87,"ti_quantity":1,"ti_sku":"item58","user_id":"user\u003cbuilt-in function input\u003e","user_ipaddress":"18.194.133.57","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) - -var snowplowTsv3 = []byte(`test-data3 pc 2019-05-10 14:40:30.836 2019-05-10 14:40:29.576 2019-05-10 14:40:29.204 page_view e8aef68d-8533-45c6-a672-26a0f01be9bd py-0.8.2 ssc-0.15.0-googlepubsub beam-enrich-0.2.0-common-0.36.0 user 18.194.133.57 b66c4a12-8584-4c7a-9a5d-7c96f59e2556 www.demo-site.com/campaign-landing-page landing-page 80 www.demo-site.com/campaign-landing-page python-requests/2.21.0 2019-05-10 14:40:29.000 {"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-1","data":[{"schema":"iglu:nl.basjes/yauaa_context/jsonschema/1-0-0","data":{"deviceBrand":"Unknown","deviceName":"Unknown","operatingSystemName":"Unknown","agentVersionMajor":"2","layoutEngineVersionMajor":"??","deviceClass":"Unknown","agentNameVersionMajor":"python-requests 2","operatingSystemClass":"Unknown","layoutEngineName":"Unknown","agentName":"python-requests","agentVersion":"2.21.0","layoutEngineClass":"Unknown","agentNameVersion":"python-requests 2.21.0","operatingSystemVersion":"??","agentClass":"Special","layoutEngineVersion":"??"}}]} 2019-05-10 14:40:29.576 com.snowplowanalytics.snowplow page_view jsonschema 1-0-0 `) +var snowplowTsv3 = []byte(`test-data3 pc 2019-05-10 14:40:30.836 2019-05-10 14:40:29.576 2019-05-10 14:40:29.204 page_view e8aef68d-8533-45c6-a672-26a0f01be9bd py-0.8.2 ssc-0.15.0-googlepubsub beam-enrich-0.2.0-common-0.36.0 user 18.194.133.57 b66c4a12-8584-4c7a-9a5d-7c96f59e2556 www.demo-site.com/campaign-landing-page landing-page 80 www.demo-site.com/campaign-landing-page python-requests/2.21.0 2019-05-10 14:40:29.000 {"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-1","data":[{"schema":"iglu:nl.basjes/yauaa_context/jsonschema/1-0-0","data":{"deviceBrand":"Unknown","deviceName":"Unknown","operatingSystemName":"Unknown","agentVersionMajor":"2","layoutEngineVersionMajor":"??","deviceClass":"Unknown","agentNameVersionMajor":"python-requests 2","operatingSystemClass":"Unknown","layoutEngineName":"Unknown","agentName":"python-requests","agentVersion":"2.21.0","layoutEngineClass":"Unknown","agentNameVersion":"python-requests 2.21.0","operatingSystemVersion":"??","agentClass":"Special","layoutEngineVersion":"??","test1":{"test2":[{"test3":"testValue"}]}}}]} 2019-05-10 14:40:29.576 com.snowplowanalytics.snowplow page_view jsonschema 1-0-0 `) var spTsv3Parsed, _ = analytics.ParseEvent(string(snowplowTsv3)) -var snowplowJSON3 = []byte(`{"app_id":"test-data3","collector_tstamp":"2019-05-10T14:40:29.576Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??"}],"derived_tstamp":"2019-05-10T14:40:29.576Z","dvce_created_tstamp":"2019-05-10T14:40:29.204Z","dvce_sent_tstamp":"2019-05-10T14:40:29Z","etl_tstamp":"2019-05-10T14:40:30.836Z","event":"page_view","event_format":"jsonschema","event_id":"e8aef68d-8533-45c6-a672-26a0f01be9bd","event_name":"page_view","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"b66c4a12-8584-4c7a-9a5d-7c96f59e2556","page_title":"landing-page","page_url":"www.demo-site.com/campaign-landing-page","page_urlpath":"www.demo-site.com/campaign-landing-page","page_urlport":80,"platform":"pc","user_id":"user\u003cbuilt-in function input\u003e","user_ipaddress":"18.194.133.57","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) +var snowplowJSON3 = []byte(`{"app_id":"test-data3","collector_tstamp":"2019-05-10T14:40:29.576Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??","test1":{"test2":[{"test3":"testValue"}]}}],"derived_tstamp":"2019-05-10T14:40:29.576Z","dvce_created_tstamp":"2019-05-10T14:40:29.204Z","dvce_sent_tstamp":"2019-05-10T14:40:29Z","etl_tstamp":"2019-05-10T14:40:30.836Z","event":"page_view","event_format":"jsonschema","event_id":"e8aef68d-8533-45c6-a672-26a0f01be9bd","event_name":"page_view","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"b66c4a12-8584-4c7a-9a5d-7c96f59e2556","page_title":"landing-page","page_url":"www.demo-site.com/campaign-landing-page","page_urlpath":"www.demo-site.com/campaign-landing-page","page_urlport":80,"platform":"pc","user_id":"user\u003cbuilt-in function input\u003e","user_ipaddress":"18.194.133.57","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) +var snowplowTsv4 = []byte(`test-data3 pc 2019-05-10 14:40:30.836 2019-05-10 14:40:29.576 2019-05-10 14:40:29.204 page_view e8aef68d-8533-45c6-a672-26a0f01be9bd py-0.8.2 ssc-0.15.0-googlepubsub beam-enrich-0.2.0-common-0.36.0 user 18.194.133.57 b66c4a12-8584-4c7a-9a5d-7c96f59e2556 www.demo-site.com/campaign-landing-page landing-page 80 www.demo-site.com/campaign-landing-page python-requests/2.21.0 2019-05-10 14:40:29.000 {"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-1","data":[{"schema":"iglu:nl.basjes/yauaa_context/jsonschema/1-0-0","data":{"deviceBrand":"Unknown","deviceName":"Unknown","operatingSystemName":"Unknown","agentVersionMajor":"2","layoutEngineVersionMajor":"??","deviceClass":"Unknown","agentNameVersionMajor":"python-requests 2","operatingSystemClass":"Unknown","layoutEngineName":"Unknown","agentName":"python-requests","agentVersion":"2.21.0","layoutEngineClass":"Unknown","agentNameVersion":"python-requests 2.21.0","operatingSystemVersion":"??","agentClass":"Special","layoutEngineVersion":"??","test1":{"test2":[{"test3":1}]}}}]} 2019-05-10 14:40:29.576 com.snowplowanalytics.snowplow page_view jsonschema 1-0-0 `) var nonSnowplowString = []byte(`not a snowplow event`) diff --git a/pkg/transform/transformconfig/transform_config.go b/pkg/transform/transformconfig/transform_config.go new file mode 100644 index 00000000..fd4d8ee2 --- /dev/null +++ b/pkg/transform/transformconfig/transform_config.go @@ -0,0 +1,268 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package transformconfig + +import ( + "fmt" + "regexp" + + "github.com/pkg/errors" + log "github.com/sirupsen/logrus" + + "github.com/snowplow-devops/stream-replicator/config" + "github.com/snowplow-devops/stream-replicator/pkg/transform" + "github.com/snowplow-devops/stream-replicator/pkg/transform/engine" +) + +// Transformation represents a transformation's configuration +type Transformation struct { + // For native filters + Description string `hcl:"description,optional"` + UnstructEventName string `hcl:"unstruct_event_name,optional"` + UnstructEventVersionRegex string `hcl:"unstruct_event_version_regex,optional"` + ContextFullName string `hcl:"context_full_name,optional"` + CustomFieldPath string `hcl:"custom_field_path,optional"` + AtomicField string `hcl:"atomic_field,optional"` + Regex string `hcl:"regex,optional"` + RegexTimeout int `hcl:"regex_timeout,optional"` + // for JS and Lua transformations + SourceB64 string `hcl:"source_b64,optional"` + TimeoutSec int `hcl:"timeout_sec,optional"` + Sandbox bool `hcl:"sandbox,optional"` + SpMode bool `hcl:"snowplow_mode,optional"` + DisableSourceMaps bool `hcl:"disable_source_maps,optional"` + + Engine engine.Engine + Name string +} + +// TransformationAdapter is an adapter for transformations to be used +// as pluggable components. It implements the Pluggable interface. +type TransformationAdapter func(i interface{}) (interface{}, error) + +// ProvideDefault returns an empty Transformation to be used as default +func (t TransformationAdapter) ProvideDefault() (interface{}, error) { + return &Transformation{}, nil +} + +// Create implements the ComponentCreator interface +func (t TransformationAdapter) Create(i interface{}) (interface{}, error) { + return t(i) +} + +// TransformationConfigFunction creates a Transformation from a TransformationConfig +func TransformationConfigFunction(c *Transformation) (*Transformation, error) { + return c, nil +} + +// AdaptTransformationsFunc returns an TransformationsAdapter. +func AdaptTransformationsFunc(f func(c *Transformation) (*Transformation, error)) TransformationAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*Transformation) + if !ok { + return nil, errors.New("invalid input, expected Transformation") + } + + return f(cfg) + } +} + +// ValidateTransformations validates the transformation according to rules. +// The reason for this function is to make the validation part explicit and +// separate it from GetTransformations. +func ValidateTransformations(transformations []*Transformation) []error { + var validationErrors []error + for idx, transformation := range transformations { + switch transformation.Name { + case "spEnrichedToJson": + continue + case "spEnrichedSetPk": + if transformation.AtomicField == `` { + validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedSetPk, empty atomic field`, idx)) + continue + } + case "spEnrichedFilter": + if transformation.AtomicField == `` { + validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilter, empty atomic field`, idx)) + } + if transformation.Regex == `` { + validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilter, empty regex`, idx)) + } else { + _, err := regexp.Compile(transformation.Regex) + if err != nil { + validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilter, regex does not compile. error: %v`, idx, err)) + } + } + continue + case "spEnrichedFilterContext": + if transformation.ContextFullName == `` { + validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilterContext, empty context full name`, idx)) + } + if transformation.CustomFieldPath == `` { + validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilterContext, empty custom field path`, idx)) + } + if transformation.Regex == `` { + validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilterContext, empty regex`, idx)) + } else { + _, err := regexp.Compile(transformation.Regex) + if err != nil { + validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilterContext, regex does not compile. error: %v`, idx, err)) + } + } + continue + case "spEnrichedFilterUnstructEvent": + if transformation.CustomFieldPath == `` { + validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilterUnstructEvent, empty custom field path`, idx)) + } + if transformation.UnstructEventName == `` { + validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilterUnstructEvent, empty event name`, idx)) + } + if transformation.Regex == `` { + validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilterUnstructEvent, empty regex`, idx)) + } else { + _, err := regexp.Compile(transformation.Regex) + if err != nil { + validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilterUnstructEvent, regex does not compile. error: %v`, idx, err)) + } + } + continue + case "lua": + if transformation.Engine.SmokeTest(`main`) != nil { + validationErrors = append(validationErrors, fmt.Errorf(`validation error in lua transformation #%d, main() smoke test failed`, idx)) + continue + } + case "js": + if transformation.Engine.SmokeTest(`main`) != nil { + validationErrors = append(validationErrors, fmt.Errorf(`validation error in js transformation #%d, main() smoke test failed`, idx)) + continue + } + default: + validationErrors = append(validationErrors, fmt.Errorf(`invalid transformation name: %s`, transformation.Name)) + } + } + return validationErrors +} + +// MkEngineFunction is a helper method used in GetTransformations +// It creates, smoke-tests and returns a custom transformation function. +func MkEngineFunction(trans *Transformation) (transform.TransformationFunction, error) { + if trans.Engine != nil { + return trans.Engine.MakeFunction(`main`), nil + } + + return nil, errors.New(`could not find engine for transformation`) +} + +// GetTransformations builds and returns transformationApplyFunction +// from the transformations configured. +func GetTransformations(c *config.Config) (transform.TransformationApplyFunction, error) { + transformations := make([]*Transformation, len(c.Data.Transformations)) + for idx, transformation := range c.Data.Transformations { + var enginePlug config.Pluggable + var eng engine.Engine + decoderOpts := &config.DecoderOptions{ + Input: transformation.Use.Body, + } + if transformation.Use.Name == `lua` { + enginePlug = engine.AdaptLuaEngineFunc(engine.LuaEngineConfigFunction) + component, err := c.CreateComponent(enginePlug, decoderOpts) + if err != nil { + return nil, err + } + + engine, ok := component.(engine.Engine) + if !ok { + return nil, errors.New("cannot create lua engine") + } + eng = engine + } + if transformation.Use.Name == `js` { + enginePlug = engine.AdaptJSEngineFunc(engine.JSEngineConfigFunction) + component, err := c.CreateComponent(enginePlug, decoderOpts) + if err != nil { + return nil, err + } + + engine, ok := component.(engine.Engine) + if !ok { + return nil, errors.New("cannot create js engine") + } + eng = engine + } + + plug := AdaptTransformationsFunc(TransformationConfigFunction) + + component, err := c.CreateComponent(plug, &config.DecoderOptions{ + Input: transformation.Use.Body, + }) + if err != nil { + return nil, err + } + + trans, ok := component.(*Transformation) + if !ok { + return nil, fmt.Errorf(`error parsing transformation: %s`, transformation.Use.Name) + } + if eng != nil { + trans.Engine = eng + } + trans.Name = transformation.Use.Name + transformations[idx] = trans + } + + validationErrors := ValidateTransformations(transformations) + if validationErrors != nil { + for _, err := range validationErrors { + log.Errorf("validation error: %v", err) + } + return nil, errors.New(`transformations validation returned errors`) + } + + funcs := make([]transform.TransformationFunction, 0, len(transformations)) + for _, transformation := range transformations { + switch transformation.Name { + // Builtin transformations + case "spEnrichedToJson": + funcs = append(funcs, transform.SpEnrichedToJSON) + case "spEnrichedSetPk": + funcs = append(funcs, transform.NewSpEnrichedSetPkFunction(transformation.AtomicField)) + case "spEnrichedFilter": + filterFunc, err := transform.NewSpEnrichedFilterFunction(transformation.AtomicField, transformation.Regex, transformation.RegexTimeout) + if err != nil { + return nil, err + } + funcs = append(funcs, filterFunc) + case "spEnrichedFilterContext": + filterFunc, err := transform.NewSpEnrichedFilterFunctionContext(transformation.ContextFullName, transformation.CustomFieldPath, transformation.Regex, transformation.RegexTimeout) + if err != nil { + return nil, err + } + funcs = append(funcs, filterFunc) + case "spEnrichedFilterUnstructEvent": + filterFunc, err := transform.NewSpEnrichedFilterFunctionUnstructEvent(transformation.UnstructEventName, transformation.UnstructEventVersionRegex, transformation.CustomFieldPath, transformation.Regex, transformation.RegexTimeout) + if err != nil { + return nil, err + } + funcs = append(funcs, filterFunc) + // Custom transformations + case "lua": + luaFunc, err := MkEngineFunction(transformation) + if err != nil { + return nil, err + } + funcs = append(funcs, luaFunc) + case "js": + jsFunc, err := MkEngineFunction(transformation) + if err != nil { + return nil, err + } + funcs = append(funcs, jsFunc) + } + } + + return transform.NewTransformation(funcs...), nil +} diff --git a/pkg/transform/transformconfig/transform_config_test.go b/pkg/transform/transformconfig/transform_config_test.go new file mode 100644 index 00000000..c42c243e --- /dev/null +++ b/pkg/transform/transformconfig/transform_config_test.go @@ -0,0 +1,534 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package transformconfig + +import ( + "encoding/base64" + "errors" + "fmt" + "path/filepath" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/snowplow-devops/stream-replicator/config" + "github.com/snowplow-devops/stream-replicator/pkg/models" + "github.com/snowplow-devops/stream-replicator/pkg/transform/engine" +) + +func TestMkEngineFunction(t *testing.T) { + var eng engine.Engine + eng = &engine.JSEngine{ + Code: nil, + RunTimeout: 15, + SpMode: false, + } + testCases := []struct { + Name string + Engines []engine.Engine + Transformation *Transformation + ExpectedErr error + }{ + { + Name: "no engine", + Engines: nil, + Transformation: &Transformation{ + Name: "js", + }, + ExpectedErr: fmt.Errorf("could not find engine for transformation"), + }, + { + Name: "success", + Engines: []engine.Engine{eng}, + Transformation: &Transformation{ + Name: "js", + Engine: eng, + }, + }, + } + + for _, tt := range testCases { + t.Run(tt.Name, func(t *testing.T) { + assert := assert.New(t) + + fun, err := MkEngineFunction(tt.Transformation) + + if tt.ExpectedErr != nil { + assert.Equal(tt.ExpectedErr.Error(), err.Error()) + assert.Nil(fun) + } else { + assert.Nil(err) + assert.NotNil(fun) + } + }) + } +} + +func TestValidateTransformations(t *testing.T) { + srcCode := ` +function main(x) + local jsonObj, _ = json.decode(x) + local result, _ = json.encode(jsonObj) + + return result +end +` + src := base64.StdEncoding.EncodeToString([]byte(srcCode)) + + luaConfig := &engine.LuaEngineConfig{ + SourceB64: src, + RunTimeout: 5, + Sandbox: false, + } + + luaEngine, err := engine.NewLuaEngine(luaConfig) + assert.NotNil(t, luaEngine) + if err != nil { + t.Fatalf("function NewLuaEngine failed with error: %q", err.Error()) + } + + srcCode = ` +function notMain(x) + return x +end +` + src = base64.StdEncoding.EncodeToString([]byte(srcCode)) + + luaConfig = &engine.LuaEngineConfig{ + SourceB64: src, + RunTimeout: 5, + Sandbox: false, + } + + luaEngineNoMain, err := engine.NewLuaEngine(luaConfig) + assert.NotNil(t, luaEngineNoMain) + if err != nil { + t.Fatalf("function NewLuaEngine failed with error: %q", err.Error()) + } + + srcCode = ` +function main(x) { + return x; +} +` + src = base64.StdEncoding.EncodeToString([]byte(srcCode)) + jsConfig := &engine.JSEngineConfig{ + SourceB64: src, + RunTimeout: 5, + } + + jsEngine, err := engine.NewJSEngine(jsConfig) + assert.NotNil(t, jsEngine) + if err != nil { + t.Fatalf("function NewJSEngine failed with error: %q", err.Error()) + } + + srcCode = ` +function notMain(x) { + return x; +} +` + src = base64.StdEncoding.EncodeToString([]byte(srcCode)) + jsConfig = &engine.JSEngineConfig{ + SourceB64: src, + RunTimeout: 5, + } + + jsEngineNoMain, err := engine.NewJSEngine(jsConfig) + assert.NotNil(t, jsEngine) + if err != nil { + t.Fatalf("function NewJSEngine failed with error: %q", err.Error()) + } + + testCases := []struct { + Name string + Transformations []*Transformation + ExpectedErrs []error + }{ + { + Name: "invalid name", + Transformations: []*Transformation{{ + Name: "wrongName", + }}, + ExpectedErrs: []error{fmt.Errorf("invalid transformation name: wrongName")}, + }, + { + Name: "spEnrichedSetPk success", + Transformations: []*Transformation{{ + Name: "spEnrichedSetPk", + AtomicField: `app_id`, + }}, + }, + { + Name: "spEnrichedSetPk no field", + Transformations: []*Transformation{{ + Name: "spEnrichedSetPk", + }}, + ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedSetPk, empty atomic field")}, + }, + { + Name: "spEnrichedFilter success", + Transformations: []*Transformation{{ + Name: "spEnrichedFilter", + AtomicField: "app_id", + Regex: "test.+", + }}, + }, + { + Name: "spEnrichedFilter regexp does not compile", + Transformations: []*Transformation{{ + Name: "spEnrichedFilter", + AtomicField: "app_id", + Regex: "?(?=-)", + }}, + ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedFilter, regex does not compile. error: error parsing regexp: missing argument to repetition operator: `?`")}, + }, + { + Name: "spEnrichedFilter empty atomic field", + Transformations: []*Transformation{{ + Name: "spEnrichedFilter", + Regex: "test.+", + }}, + ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedFilter, empty atomic field")}, + }, + { + Name: "spEnrichedFilter empty regex", + Transformations: []*Transformation{{ + Name: "spEnrichedFilter", + AtomicField: "app_id", + }}, + ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedFilter, empty regex")}, + }, + { + Name: "spEnrichedFilterContext success", + Transformations: []*Transformation{{ + Name: "spEnrichedFilterContext", + ContextFullName: "contexts_nl_basjes_yauaa_context_1", + CustomFieldPath: "test1.test2[0]", + Regex: "test.+", + }}, + }, + { + Name: "spEnrichedFilterContext regexp does not compile", + Transformations: []*Transformation{{ + Name: "spEnrichedFilterContext", + ContextFullName: "contexts_nl_basjes_yauaa_context_1", + CustomFieldPath: "test1.test2[0]", + Regex: "?(?=-)", + }}, + ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedFilterContext, regex does not compile. error: error parsing regexp: missing argument to repetition operator: `?`")}, + }, + { + Name: "spEnrichedFilterContext empty custom field path", + Transformations: []*Transformation{{ + Name: "spEnrichedFilterContext", + Regex: "test.+", + }}, + ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedFilterContext, empty context full name"), fmt.Errorf("validation error #0 spEnrichedFilterContext, empty custom field path")}, + }, + { + Name: "spEnrichedFilterContext empty regex", + Transformations: []*Transformation{{ + Name: "spEnrichedFilterContext", + ContextFullName: "contexts_nl_basjes_yauaa_context_1", + CustomFieldPath: "test1.test2[0]", + }}, + ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedFilterContext, empty regex")}, + }, + { + Name: "spEnrichedFilterUnstructEvent success", + Transformations: []*Transformation{{ + Name: "spEnrichedFilterUnstructEvent", + CustomFieldPath: "sku", + Regex: "test.+", + UnstructEventName: "add_to_cart", + }}, + }, + { + Name: "spEnrichedFilterUnstructEvent regexp does not compile", + Transformations: []*Transformation{{ + Name: "spEnrichedFilterUnstructEvent", + CustomFieldPath: "sku", + Regex: "?(?=-)", + UnstructEventName: "add_to_cart", + }}, + ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedFilterUnstructEvent, regex does not compile. error: error parsing regexp: missing argument to repetition operator: `?`")}, + }, + { + Name: "spEnrichedFilterUnstructEvent empty custom field path and event name", + Transformations: []*Transformation{{ + Name: "spEnrichedFilterUnstructEvent", + Regex: "test.+", + }}, + ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedFilterUnstructEvent, empty custom field path"), fmt.Errorf("validation error #0 spEnrichedFilterUnstructEvent, empty event name")}, + }, + { + Name: "spEnrichedFilterUnstructEvent empty regex and event name", + Transformations: []*Transformation{{ + Name: "spEnrichedFilterUnstructEvent", + CustomFieldPath: "sku", + }}, + ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedFilterUnstructEvent, empty event name"), fmt.Errorf("validation error #0 spEnrichedFilterUnstructEvent, empty regex")}, + }, + { + Name: "lua success", + Transformations: []*Transformation{{ + Name: "lua", + Engine: luaEngine, + }}, + }, + { + Name: "lua main() smoke test failed", + Transformations: []*Transformation{{ + Name: "lua", + Engine: luaEngineNoMain, + }}, + ExpectedErrs: []error{fmt.Errorf("validation error in lua transformation #0, main() smoke test failed")}, + }, + { + Name: "js success", + Transformations: []*Transformation{{ + Name: "js", + Engine: jsEngine, + }}, + }, + { + Name: "js main() smoke test failed", + Transformations: []*Transformation{{ + Name: "js", + Engine: jsEngineNoMain, + }}, + ExpectedErrs: []error{fmt.Errorf("validation error in js transformation #0, main() smoke test failed")}, + }, + { + Name: "multiple validation errors", + Transformations: []*Transformation{ + { + Name: "js", + Engine: jsEngineNoMain, + }, + { + Name: "spEnrichedFilter", + Regex: "test.+", + }, + // a successful transformation mixed in to test transformation counter + { + Name: "spEnrichedToJson", + }, + { + Name: "spEnrichedSetPk", + }, + }, + ExpectedErrs: []error{ + fmt.Errorf("validation error in js transformation #0, main() smoke test failed"), + fmt.Errorf("validation error #1 spEnrichedFilter, empty atomic field"), + fmt.Errorf("validation error #3 spEnrichedSetPk, empty atomic field"), + }, + }, + } + + for _, tt := range testCases { + t.Run(tt.Name, func(t *testing.T) { + assert := assert.New(t) + + valErrs := ValidateTransformations(tt.Transformations) + if tt.ExpectedErrs != nil { + for idx, valErr := range valErrs { + assert.Equal(tt.ExpectedErrs[idx].Error(), valErr.Error()) + } + } else { + assert.Nil(valErrs) + } + }) + } +} + +func TestEnginesAndTransformations(t *testing.T) { + var messageJSCompileErr = &models.Message{ + Data: snowplowTsv1, + PartitionKey: "some-key", + } + messageJSCompileErr.SetError(errors.New(`failed initializing JavaScript runtime: "could not assert as function: \"main\""`)) + + testFixPath := "../../../config/test-fixtures" + testCases := []struct { + Description string + File string + ExpectedTransforms []Transformation + ExpectedMessages expectedMessages + CompileErr string + }{ + { + Description: "simple transform success", + File: "transform-js-simple.hcl", + ExpectedMessages: expectedMessages{ + Before: []*models.Message{{ + Data: snowplowTsv1, + PartitionKey: "some-key", + }}, + After: []*models.Message{{ + Data: snowplowTsv1, + PartitionKey: "some-key", + }}, + }, + }, + { + Description: "simple transform with js compile error", + File: "transform-js-error.hcl", + ExpectedMessages: expectedMessages{ + Before: []*models.Message{{ + Data: snowplowJSON1, + PartitionKey: "some-key", + }}, + After: []*models.Message{messageJSCompileErr}, + }, + CompileErr: `SyntaxError`, + }, + { + Description: `mixed success`, + File: "transform-mixed.hcl", + ExpectedMessages: expectedMessages{ + Before: []*models.Message{{ + Data: snowplowJSON1, + PartitionKey: "some-key", + }}, + After: []*models.Message{{ + Data: snowplowJSON1Mixed, + PartitionKey: "some-key", + }}, + }, + }, + { + Description: `mixed success, order test`, + File: "transform-mixed-order.hcl", + // initial app_id should be changed to 1, then if the app_id is 1, it should be changed to 2, then 3 + ExpectedMessages: expectedMessages{ + Before: []*models.Message{{ + Data: snowplowJSON1, + PartitionKey: "some-key", + }}, + After: []*models.Message{{ + Data: snowplowJSON1Order, + PartitionKey: "some-key", + }}, + }, + }, + { + Description: `mixed with error`, + File: "transform-mixed-error.hcl", + ExpectedMessages: expectedMessages{ + Before: []*models.Message{{ + Data: snowplowJSON1, + PartitionKey: "some-key", + }}, + After: []*models.Message{messageJSCompileErr}, + }, + CompileErr: `SyntaxError`, + }, + { + Description: `mixed with filter success`, + File: "transform-mixed-filtered.hcl", + ExpectedMessages: expectedMessages{ + Before: []*models.Message{{ + Data: snowplowTsv1, + PartitionKey: "some-key", + }}, + After: []*models.Message{{ + Data: snowplowTsv1, + PartitionKey: "some-key", + }}, + }, + }, + } + + for _, tt := range testCases { + t.Run(tt.Description, func(t *testing.T) { + assert := assert.New(t) + + filename := filepath.Join(testFixPath, tt.File) + t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", filename) + + c, err := config.NewConfig() + assert.NotNil(c) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } + + // get transformations, and run the transformations on the expected messages + tr, err := GetTransformations(c) + if tt.CompileErr != `` { + assert.True(strings.HasPrefix(err.Error(), tt.CompileErr)) + assert.Nil(tr) + return + } + + if err != nil { + t.Fatalf(err.Error()) + } + + result := tr(tt.ExpectedMessages.Before) + assert.NotNil(result) + assert.Equal(int(result.ResultCount+result.FilteredCount+result.InvalidCount), len(tt.ExpectedMessages.After)) + + // check result for successfully transformed messages + for idx, resultMessage := range result.Result { + assert.Equal(resultMessage.Data, tt.ExpectedMessages.After[idx].Data) + } + + // check errors for invalid messages + for idx, resultMessage := range result.Invalid { + assert.Equal(resultMessage.GetError(), tt.ExpectedMessages.After[idx].GetError()) + } + + // check result for transformed messages in case of filtered results + if result.FilteredCount != 0 { + assert.NotNil(result.Filtered) + for idx, resultMessage := range result.Filtered { + assert.Equal(resultMessage.Data, tt.ExpectedMessages.After[idx].Data) + } + } + }) + } +} + +type expectedMessages struct { + Before []*models.Message + After []*models.Message +} + +var snowplowTsv1 = []byte(`test-data1 pc 2019-05-10 14:40:37.436 2019-05-10 14:40:35.972 2019-05-10 14:40:35.551 unstruct e9234345-f042-46ad-b1aa-424464066a33 py-0.8.2 ssc-0.15.0-googlepubsub beam-enrich-0.2.0-common-0.36.0 user 18.194.133.57 d26822f5-52cc-4292-8f77-14ef6b7a27e2 {"schema":"iglu:com.snowplowanalytics.snowplow/unstruct_event/jsonschema/1-0-0","data":{"schema":"iglu:com.snowplowanalytics.snowplow/add_to_cart/jsonschema/1-0-0","data":{"sku":"item41","quantity":2,"unitPrice":32.4,"currency":"GBP"}}} python-requests/2.21.0 2019-05-10 14:40:35.000 {"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-1","data":[{"schema":"iglu:nl.basjes/yauaa_context/jsonschema/1-0-0","data":{"deviceBrand":"Unknown","deviceName":"Unknown","operatingSystemName":"Unknown","agentVersionMajor":"2","layoutEngineVersionMajor":"??","deviceClass":"Unknown","agentNameVersionMajor":"python-requests 2","operatingSystemClass":"Unknown","layoutEngineName":"Unknown","agentName":"python-requests","agentVersion":"2.21.0","layoutEngineClass":"Unknown","agentNameVersion":"python-requests 2.21.0","operatingSystemVersion":"??","agentClass":"Special","layoutEngineVersion":"??"}}]} 2019-05-10 14:40:35.972 com.snowplowanalytics.snowplow add_to_cart jsonschema 1-0-0 `) +var snowplowJSON1 = []byte(`{"app_id":"test-data1","collector_tstamp":"2019-05-10T14:40:35.972Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??"}],"derived_tstamp":"2019-05-10T14:40:35.972Z","dvce_created_tstamp":"2019-05-10T14:40:35.551Z","dvce_sent_tstamp":"2019-05-10T14:40:35Z","etl_tstamp":"2019-05-10T14:40:37.436Z","event":"unstruct","event_format":"jsonschema","event_id":"e9234345-f042-46ad-b1aa-424464066a33","event_name":"add_to_cart","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"d26822f5-52cc-4292-8f77-14ef6b7a27e2","platform":"pc","unstruct_event_com_snowplowanalytics_snowplow_add_to_cart_1":{"currency":"GBP","quantity":2,"sku":"item41","unitPrice":32.4},"user_id":"user\u003cbuilt-in function input\u003e","user_ipaddress":"18.194.133.57","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) +var snowplowTsv2 = []byte(`test-data2 pc 2019-05-10 14:40:32.392 2019-05-10 14:40:31.105 2019-05-10 14:40:30.218 transaction_item 5071169f-3050-473f-b03f-9748319b1ef2 py-0.8.2 ssc-0.15.0-googlepubsub beam-enrich-0.2.0-common-0.36.0 user 18.194.133.57 68220ade-307b-4898-8e25-c4c8ac92f1d7 transaction item58 35.87 1 python-requests/2.21.0 2019-05-10 14:40:30.000 {"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-1","data":[{"schema":"iglu:nl.basjes/yauaa_context/jsonschema/1-0-0","data":{"deviceBrand":"Unknown","deviceName":"Unknown","operatingSystemName":"Unknown","agentVersionMajor":"2","layoutEngineVersionMajor":"??","deviceClass":"Unknown","agentNameVersionMajor":"python-requests 2","operatingSystemClass":"Unknown","layoutEngineName":"Unknown","agentName":"python-requests","agentVersion":"2.21.0","layoutEngineClass":"Unknown","agentNameVersion":"python-requests 2.21.0","operatingSystemVersion":"??","agentClass":"Special","layoutEngineVersion":"??"}}]} 2019-05-10 14:40:31.105 com.snowplowanalytics.snowplow transaction_item jsonschema 1-0-0 `) +var snowplowTsv3 = []byte(`test-data3 pc 2019-05-10 14:40:30.836 2019-05-10 14:40:29.576 2019-05-10 14:40:29.204 page_view e8aef68d-8533-45c6-a672-26a0f01be9bd py-0.8.2 ssc-0.15.0-googlepubsub beam-enrich-0.2.0-common-0.36.0 user 18.194.133.57 b66c4a12-8584-4c7a-9a5d-7c96f59e2556 www.demo-site.com/campaign-landing-page landing-page 80 www.demo-site.com/campaign-landing-page python-requests/2.21.0 2019-05-10 14:40:29.000 {"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-1","data":[{"schema":"iglu:nl.basjes/yauaa_context/jsonschema/1-0-0","data":{"deviceBrand":"Unknown","deviceName":"Unknown","operatingSystemName":"Unknown","agentVersionMajor":"2","layoutEngineVersionMajor":"??","deviceClass":"Unknown","agentNameVersionMajor":"python-requests 2","operatingSystemClass":"Unknown","layoutEngineName":"Unknown","agentName":"python-requests","agentVersion":"2.21.0","layoutEngineClass":"Unknown","agentNameVersion":"python-requests 2.21.0","operatingSystemVersion":"??","agentClass":"Special","layoutEngineVersion":"??","test1":{"test2":[{"test3":"testValue"}]}}}]} 2019-05-10 14:40:29.576 com.snowplowanalytics.snowplow page_view jsonschema 1-0-0 `) + +var nonSnowplowString = []byte(`not a snowplow event`) + +var messages = []*models.Message{ + { + Data: snowplowTsv1, + PartitionKey: "some-key", + }, + { + Data: snowplowTsv2, + PartitionKey: "some-key1", + }, + { + Data: snowplowTsv3, + PartitionKey: "some-key2", + }, + { + Data: nonSnowplowString, + PartitionKey: "some-key4", + }, +} + +// snowplowJSON1 with 3 transformations applied +var snowplowJSON1Mixed = []byte(`Hello:{"app_id":"again","collector_tstamp":"2019-05-10T14:40:35.972Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??"}],"derived_tstamp":"2019-05-10T14:40:35.972Z","dvce_created_tstamp":"2019-05-10T14:40:35.551Z","dvce_sent_tstamp":"2019-05-10T14:40:35Z","etl_tstamp":"2019-05-10T14:40:37.436Z","event":"unstruct","event_format":"jsonschema","event_id":"e9234345-f042-46ad-b1aa-424464066a33","event_name":"add_to_cart","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"d26822f5-52cc-4292-8f77-14ef6b7a27e2","platform":"pc","unstruct_event_com_snowplowanalytics_snowplow_add_to_cart_1":{"currency":"GBP","quantity":2,"sku":"item41","unitPrice":32.4},"user_id":"user","user_ipaddress":"18.194.133.57","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) + +// snowplowJSON1 with 3 transformations applied, for order test +var snowplowJSON1Order = []byte(`{"app_id":"3","collector_tstamp":"2019-05-10T14:40:35.972Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??"}],"derived_tstamp":"2019-05-10T14:40:35.972Z","dvce_created_tstamp":"2019-05-10T14:40:35.551Z","dvce_sent_tstamp":"2019-05-10T14:40:35Z","etl_tstamp":"2019-05-10T14:40:37.436Z","event":"unstruct","event_format":"jsonschema","event_id":"e9234345-f042-46ad-b1aa-424464066a33","event_name":"add_to_cart","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"d26822f5-52cc-4292-8f77-14ef6b7a27e2","platform":"pc","unstruct_event_com_snowplowanalytics_snowplow_add_to_cart_1":{"currency":"GBP","quantity":2,"sku":"item41","unitPrice":32.4},"user_id":"user","user_ipaddress":"18.194.133.57","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) diff --git a/third_party/snowplow/badrows/bad_row.go b/third_party/snowplow/badrows/bad_row.go index 6eb8acbc..701d5ffd 100644 --- a/third_party/snowplow/badrows/bad_row.go +++ b/third_party/snowplow/badrows/bad_row.go @@ -33,8 +33,8 @@ type BadRow struct { selfDescribingData *iglu.SelfDescribingData } -// NewBadRow returns a new bad-row structure -func NewBadRow(schema string, data map[string]interface{}, payload []byte, targetByteLimit int) (*BadRow, error) { +// newBadRow returns a new bad-row structure +func newBadRow(schema string, data map[string]interface{}, payload []byte, targetByteLimit int) (*BadRow, error) { payloadLength := len(payload) // Ensure data map does not contain anything for payload diff --git a/third_party/snowplow/badrows/bad_row_test.go b/third_party/snowplow/badrows/bad_row_test.go index 281223ce..127955e0 100644 --- a/third_party/snowplow/badrows/bad_row_test.go +++ b/third_party/snowplow/badrows/bad_row_test.go @@ -23,7 +23,10 @@ func TestNewBadRow_InvalidData(t *testing.T) { }, } - br, err := NewBadRow(schema, data, []byte("Hello World!"), 5000) + br, err := newBadRow(schema, data, []byte("Hello World!"), 5000) assert.NotNil(err) + if err != nil { + assert.Equal("Could not unmarshall bad-row data blob to JSON: json: unsupported type: map[bool]string", err.Error()) + } assert.Nil(br) } diff --git a/third_party/snowplow/badrows/generic_error.go b/third_party/snowplow/badrows/generic_error.go index 10ede9ac..e2a3b4c8 100644 --- a/third_party/snowplow/badrows/generic_error.go +++ b/third_party/snowplow/badrows/generic_error.go @@ -42,7 +42,7 @@ func NewGenericError(input *GenericErrorInput, targetByteLimit int) (*BadRow, er }, } - return NewBadRow( + return newBadRow( genericErrorSchema, data, input.Payload, diff --git a/third_party/snowplow/badrows/size_violation.go b/third_party/snowplow/badrows/size_violation.go index 24e2727b..2fc65dc8 100644 --- a/third_party/snowplow/badrows/size_violation.go +++ b/third_party/snowplow/badrows/size_violation.go @@ -40,7 +40,7 @@ func NewSizeViolation(input *SizeViolationInput, targetByteLimit int) (*BadRow, }, } - return NewBadRow( + return newBadRow( sizeViolationSchema, data, input.Payload, diff --git a/third_party/snowplow/badrows/size_violation_test.go b/third_party/snowplow/badrows/size_violation_test.go index c7a60e03..6e2be626 100644 --- a/third_party/snowplow/badrows/size_violation_test.go +++ b/third_party/snowplow/badrows/size_violation_test.go @@ -80,7 +80,9 @@ func TestNewSizeViolation_NotEnoughBytes(t *testing.T) { }, 10, ) - assert.NotNil(err) assert.Nil(sv) - assert.Equal("Failed to create bad-row as resultant payload will exceed the targets byte limit", err.Error()) + assert.NotNil(err) + if err != nil { + assert.Equal("Failed to create bad-row as resultant payload will exceed the targets byte limit", err.Error()) + } } diff --git a/third_party/snowplow/iglu/self_describing_data_test.go b/third_party/snowplow/iglu/self_describing_data_test.go index 91f3a5f7..8d46121d 100644 --- a/third_party/snowplow/iglu/self_describing_data_test.go +++ b/third_party/snowplow/iglu/self_describing_data_test.go @@ -45,5 +45,8 @@ func TestNewSelfDescribingData_InvalidData(t *testing.T) { sddString, err := sdd.String() assert.NotNil(err) + if err != nil { + assert.Equal("json: unsupported type: map[bool]string", err.Error()) + } assert.Equal("", sddString) }