diff --git a/.github/workflows/api-tests.yml b/.github/workflows/api-tests.yml index 19a11a3384..5136fdedca 100644 --- a/.github/workflows/api-tests.yml +++ b/.github/workflows/api-tests.yml @@ -22,7 +22,6 @@ on: - "invitations/api/**" - "journal/api/**" - "provision/api/**" - - "readers/api/**" - "users/api/**" env: @@ -42,8 +41,6 @@ env: BOOTSTRAP_URL: http://localhost:9013 CERTS_URL: http://localhost:9019 PROVISION_URL: http://localhost:9016 - POSTGRES_READER_URL: http://localhost:9009 - TIMESCALE_READER_URL: http://localhost:9011 JOURNAL_URL: http://localhost:9021 jobs: @@ -118,11 +115,6 @@ jobs: - "apidocs/openapi/provision.yml" - "provision/api/**" - readers: - - ".github/workflows/api-tests.yml" - - "apidocs/openapi/readers.yml" - - "readers/api/**" - clients: - ".github/workflows/api-tests.yml" - "apidocs/openapi/clients.yml" @@ -263,32 +255,6 @@ jobs: report: false args: '--header "Authorization: Bearer ${{ env.USER_TOKEN }}" --contrib-openapi-formats-uuid --hypothesis-suppress-health-check=filter_too_much --stateful=links' - - name: Seed Messages - if: steps.changes.outputs.readers == 'true' - run: | - make cli - ./build/cli provision test - - - name: Run Postgres Reader API tests - if: steps.changes.outputs.readers == 'true' - uses: schemathesis/action@v1 - with: - schema: apidocs/openapi/readers.yml - base-url: ${{ env.POSTGRES_READER_URL }} - checks: all - report: false - args: '--header "Authorization: Bearer ${{ env.USER_TOKEN }}" --contrib-openapi-formats-uuid --hypothesis-suppress-health-check=filter_too_much --stateful=links' - - - name: Run Timescale Reader API tests - if: steps.changes.outputs.readers == 'true' - uses: schemathesis/action@v1 - with: - schema: apidocs/openapi/readers.yml - base-url: ${{ env.TIMESCALE_READER_URL }} - checks: all - report: false - args: '--header "Authorization: Bearer ${{ env.USER_TOKEN }}" --contrib-openapi-formats-uuid --hypothesis-suppress-health-check=filter_too_much --stateful=links' - - name: Stop containers if: always() run: make run down args="-v" && make run_addons down args="-v" diff --git a/.github/workflows/check-generated-files.yml b/.github/workflows/check-generated-files.yml index 4bd66241a0..6893efa1ea 100644 --- a/.github/workflows/check-generated-files.yml +++ b/.github/workflows/check-generated-files.yml @@ -65,7 +65,6 @@ jobs: - "users/emailer.go" - "users/hasher.go" - "mqtt/events/streams.go" - - "readers/messages.go" - "consumers/notifiers/notifier.go" - "consumers/notifiers/service.go" - "consumers/notifiers/subscriptions.go" diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 1c407abc13..5059023ece 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -243,7 +243,6 @@ jobs: - "internal/groups/**" - "invitations/**" - "provision/**" - - "readers/**" - "clients/**" - "users/**" @@ -262,15 +261,6 @@ jobs: - "logger/**" - "pkg/sdk/**" - readers: - - "readers/**" - - "cmd/postgres-reader/**" - - "cmd/timescale-reader/**" - - "auth.pb.go" - - "auth_grpc.pb.go" - - "clients/**" - - "auth/**" - users: - "users/**" - "cmd/users/**" @@ -403,11 +393,6 @@ jobs: run: | go test --race -v -count=1 -coverprofile=coverage/provision.out ./provision/... - - name: Run readers tests - if: steps.changes.outputs.readers == 'true' || steps.changes.outputs.workflow == 'true' - run: | - go test --race -v -count=1 -coverprofile=coverage/readers.out ./readers/... - - name: Run clients tests if: steps.changes.outputs.clients == 'true' || steps.changes.outputs.workflow == 'true' run: | diff --git a/Makefile b/Makefile index 8e175b5e86..1f7ea3f1f5 100644 --- a/Makefile +++ b/Makefile @@ -3,9 +3,9 @@ SMQ_DOCKER_IMAGE_NAME_PREFIX ?= supermq BUILD_DIR ?= build -SERVICES = auth users clients groups channels domains http coap ws postgres-writer postgres-reader timescale-writer \ - timescale-reader cli bootstrap mqtt provision certs invitations journal -TEST_API_SERVICES = journal auth bootstrap certs http invitations notifiers provision readers clients users channels groups domains +SERVICES = auth users clients groups channels domains http coap ws postgres-writer timescale-writer \ + cli bootstrap mqtt provision certs invitations journal +TEST_API_SERVICES = journal auth bootstrap certs http invitations notifiers provision clients users channels groups domains TEST_API = $(addprefix test_api_,$(TEST_API_SERVICES)) DOCKERS = $(addprefix docker_,$(SERVICES)) DOCKERS_DEV = $(addprefix docker_dev_,$(SERVICES)) @@ -73,7 +73,7 @@ define make_docker_dev -f docker/Dockerfile.dev ./build endef -ADDON_SERVICES = bootstrap journal provision certs timescale-reader timescale-writer postgres-reader postgres-writer +ADDON_SERVICES = bootstrap journal provision certs timescale-writer postgres-writer EXTERNAL_SERVICES = vault prometheus @@ -178,7 +178,6 @@ test_api_auth: TEST_API_URL := http://localhost:9001 test_api_bootstrap: TEST_API_URL := http://localhost:9013 test_api_certs: TEST_API_URL := http://localhost:9019 test_api_provision: TEST_API_URL := http://localhost:9016 -test_api_readers: TEST_API_URL := http://localhost:9009 # This can be the URL of any reader service. test_api_journal: TEST_API_URL := http://localhost:9021 $(TEST_API): diff --git a/apidocs/openapi/readers.yml b/apidocs/openapi/readers.yml deleted file mode 100644 index 6436bbdca2..0000000000 --- a/apidocs/openapi/readers.yml +++ /dev/null @@ -1,313 +0,0 @@ -# Copyright (c) Abstract Machines -# SPDX-License-Identifier: Apache-2.0 - -openapi: 3.0.1 -info: - title: SuperMQ reader service - description: | - HTTP API for reading messages. - Some useful links: - - [The SuperMQ repository](https://github.com/absmach/supermq) - contact: - email: info@abstractmachines.fr - license: - name: Apache 2.0 - url: https://github.com/absmach/supermq/blob/main/LICENSE - version: 0.15.1 - -servers: - - url: http://localhost:9003 - - url: https://localhost:9003 - - url: http://localhost:9005 - - url: https://localhost:9005 - - url: http://localhost:9007 - - url: https://localhost:9007 - - url: http://localhost:9009 - - url: https://localhost:9009 - - url: http://localhost:9011 - - url: https://localhost:9011 - -tags: - - name: readers - description: Everything about your Readers - externalDocs: - description: Find out more about readers - url: https://docs.supermq.abstractmachines.fr/ - -paths: - /channels/{chanId}/messages: - get: - operationId: getMessages - summary: Retrieves messages sent to single channel - description: | - Retrieves a list of messages sent to specific channel. Due to - performance concerns, data is retrieved in subsets. The API readers must - ensure that the entire dataset is consumed either by making subsequent - requests, or by increasing the subset size of the initial request. - tags: - - readers - parameters: - - $ref: "#/components/parameters/ChanId" - - $ref: "#/components/parameters/Limit" - - $ref: "#/components/parameters/Offset" - - $ref: "#/components/parameters/Publisher" - - $ref: "#/components/parameters/Name" - - $ref: "#/components/parameters/Value" - - $ref: "#/components/parameters/BoolValue" - - $ref: "#/components/parameters/StringValue" - - $ref: "#/components/parameters/DataValue" - - $ref: "#/components/parameters/From" - - $ref: "#/components/parameters/To" - - $ref: "#/components/parameters/Aggregation" - - $ref: "#/components/parameters/Interval" - responses: - "200": - $ref: "#/components/responses/MessagesPageRes" - "400": - description: Failed due to malformed query parameters. - "401": - description: Missing or invalid access token provided. - "500": - $ref: "#/components/responses/ServiceError" - /health: - get: - operationId: health - summary: Retrieves service health check info. - tags: - - health - security: [] - responses: - "200": - $ref: "#/components/responses/HealthRes" - "500": - $ref: "#/components/responses/ServiceError" - -components: - schemas: - MessagesPage: - type: object - properties: - total: - type: number - description: Total number of items that are present on the system. - offset: - type: number - description: Number of items that were skipped during retrieval. - limit: - type: number - description: Size of the subset that was retrieved. - messages: - type: array - minItems: 0 - uniqueItems: true - items: - type: object - properties: - channel: - type: integer - description: Unique channel id. - publisher: - type: integer - description: Unique publisher id. - protocol: - type: string - description: Protocol name. - name: - type: string - description: Measured parameter name. - unit: - type: string - description: Value unit. - value: - type: number - description: Measured value in number. - stringValue: - type: string - description: Measured value in string format. - boolValue: - type: boolean - description: Measured value in boolean format. - dataValue: - type: string - description: Measured value in binary format. - valueSum: - type: number - description: Sum value. - time: - type: number - description: Time of measurement. - updateTime: - type: number - description: Time of updating measurement. - - parameters: - DomainID: - name: domainID - description: Unique domain identifier. - in: path - schema: - type: string - format: uuid - required: true - ChanId: - name: chanId - description: Unique channel identifier. - in: path - schema: - type: string - format: uuid - required: true - Limit: - name: limit - description: Size of the subset to retrieve. - in: query - schema: - type: integer - default: 10 - maximum: 100 - minimum: 1 - required: false - Offset: - name: offset - description: Number of items to skip during retrieval. - in: query - schema: - type: integer - default: 0 - minimum: 0 - required: false - Publisher: - name: Publisher - description: Unique client identifier. - in: query - schema: - type: string - format: uuid - required: false - Name: - name: name - description: SenML message name. - in: query - schema: - type: string - required: false - Value: - name: v - description: SenML message value. - in: query - schema: - type: string - required: false - BoolValue: - name: vb - description: SenML message bool value. - in: query - schema: - type: boolean - required: false - StringValue: - name: vs - description: SenML message string value. - in: query - schema: - type: string - required: false - DataValue: - name: vd - description: SenML message data value. - in: query - schema: - type: string - required: false - Comparator: - name: comparator - description: Value comparison operator. - in: query - schema: - type: string - default: eq - enum: - - eq - - lt - - le - - gt - - ge - required: false - From: - name: from - description: SenML message time in nanoseconds (integer part represents seconds). - in: query - schema: - type: number - example: 1709218556069 - required: false - To: - name: to - description: SenML message time in nanoseconds (integer part represents seconds). - in: query - schema: - type: number - example: 1709218757503 - required: false - Aggregation: - name: aggregation - description: Aggregation function. - in: query - schema: - type: string - enum: - - MAX - - AVG - - MIN - - SUM - - COUNT - - max - - min - - sum - - avg - - count - example: MAX - required: false - Interval: - name: interval - description: Aggregation interval. - in: query - schema: - type: string - example: 10s - required: false - - responses: - MessagesPageRes: - description: Data retrieved. - content: - application/json: - schema: - $ref: "#/components/schemas/MessagesPage" - ServiceError: - description: Unexpected server-side error occurred. - HealthRes: - description: Service Health Check. - content: - application/health+json: - schema: - $ref: "./schemas/health_info.yml" - - securitySchemes: - bearerAuth: - type: http - scheme: bearer - bearerFormat: JWT - description: | - * Users access: "Authorization: Bearer " - - clientAuth: - type: http - scheme: bearer - bearerFormat: uuid - description: | - * Clients access: "Authorization: Client " - -security: - - bearerAuth: [] - - clientAuth: [] diff --git a/cli/message.go b/cli/message.go index ceb6c6b561..6744b63181 100644 --- a/cli/message.go +++ b/cli/message.go @@ -3,10 +3,7 @@ package cli -import ( - smqsdk "github.com/absmach/supermq/pkg/sdk" - "github.com/spf13/cobra" -) +import "github.com/spf13/cobra" var cmdMessages = []cobra.Command{ { @@ -27,41 +24,14 @@ var cmdMessages = []cobra.Command{ logOKCmd(*cmd) }, }, - { - Use: "read ", - Short: "Read messages", - Long: "Reads all channel messages\n" + - "Usage:\n" + - "\tsupermq-cli messages read --offset --limit - lists all messages with provided offset and limit\n", - Run: func(cmd *cobra.Command, args []string) { - if len(args) != 3 { - logUsageCmd(*cmd, cmd.Use) - return - } - pageMetadata := smqsdk.MessagePageMetadata{ - PageMetadata: smqsdk.PageMetadata{ - Offset: Offset, - Limit: Limit, - }, - } - - m, err := sdk.ReadMessages(pageMetadata, args[0], args[1], args[2]) - if err != nil { - logErrorCmd(*cmd, err) - return - } - - logJSONCmd(*cmd, m) - }, - }, } // NewMessagesCmd returns messages command. func NewMessagesCmd() *cobra.Command { cmd := cobra.Command{ Use: "messages [send | read]", - Short: "Send or read messages", - Long: `Send or read messages using the http-adapter and the configured database reader`, + Short: "Send messages", + Long: `Send messages using the http-adapter`, } for i := range cmdMessages { diff --git a/cli/message_test.go b/cli/message_test.go index 14b92e0d89..249dd928b1 100644 --- a/cli/message_test.go +++ b/cli/message_test.go @@ -4,7 +4,6 @@ package cli_test import ( - "encoding/json" "fmt" "net/http" "strings" @@ -13,11 +12,8 @@ import ( "github.com/absmach/supermq/cli" "github.com/absmach/supermq/pkg/errors" svcerr "github.com/absmach/supermq/pkg/errors/service" - mgsdk "github.com/absmach/supermq/pkg/sdk" sdkmocks "github.com/absmach/supermq/pkg/sdk/mocks" - "github.com/absmach/supermq/pkg/transformers/senml" "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/mock" ) func TestSendMesageCmd(t *testing.T) { @@ -84,82 +80,3 @@ func TestSendMesageCmd(t *testing.T) { }) } } - -func TestReadMesageCmd(t *testing.T) { - sdkMock := new(sdkmocks.SDK) - cli.SetSDK(sdkMock) - messageCmd := cli.NewMessagesCmd() - rootCmd := setFlags(messageCmd) - - var mp mgsdk.MessagesPage - cases := []struct { - desc string - args []string - logType outputLog - errLogMessage string - sdkErr errors.SDKError - page mgsdk.MessagesPage - }{ - { - desc: "read message successfully", - args: []string{ - channel.ID, - domainID, - validToken, - }, - page: mgsdk.MessagesPage{ - PageRes: mgsdk.PageRes{ - Total: 1, - Offset: 0, - Limit: 10, - }, - Messages: []senml.Message{ - { - Channel: channel.ID, - }, - }, - }, - logType: entityLog, - }, - { - desc: "read message with invalid args", - args: []string{ - channel.ID, - domainID, - validToken, - extraArg, - }, - logType: usageLog, - }, - { - desc: "read message with invalid token", - args: []string{ - channel.ID, - domainID, - invalidToken, - }, - sdkErr: errors.NewSDKErrorWithStatus(svcerr.ErrAuthorization, http.StatusUnauthorized), - errLogMessage: fmt.Sprintf("\nerror: %s\n\n", errors.NewSDKErrorWithStatus(svcerr.ErrAuthorization, http.StatusUnauthorized)), - logType: errLog, - }, - } - - for _, tc := range cases { - t.Run(tc.desc, func(t *testing.T) { - sdkCall := sdkMock.On("ReadMessages", mock.Anything, tc.args[0], tc.args[1], tc.args[2]).Return(tc.page, tc.sdkErr) - out := executeCommand(t, rootCmd, append([]string{readCmd}, tc.args...)...) - - switch tc.logType { - case entityLog: - err := json.Unmarshal([]byte(out), &mp) - assert.Nil(t, err) - assert.Equal(t, tc.page, mp, fmt.Sprintf("%s unexpected response: expected: %v, got: %v", tc.desc, tc.page, mp)) - case errLog: - assert.Equal(t, tc.errLogMessage, out, fmt.Sprintf("%s unexpected error response: expected %s got errLogMessage:%s", tc.desc, tc.errLogMessage, out)) - case usageLog: - assert.False(t, strings.Contains(out, rootCmd.Use), fmt.Sprintf("%s invalid usage: %s", tc.desc, out)) - } - sdkCall.Unset() - }) - } -} diff --git a/cmd/cli/main.go b/cmd/cli/main.go index a261527887..345bae077d 100644 --- a/cmd/cli/main.go +++ b/cmd/cli/main.go @@ -114,14 +114,6 @@ func main() { "HTTP adapter URL", ) - rootCmd.PersistentFlags().StringVarP( - &sdkConf.ReaderURL, - "reader-url", - "R", - sdkConf.ReaderURL, - "Reader URL", - ) - rootCmd.PersistentFlags().StringVarP( &sdkConf.InvitationsURL, "invitations-url", diff --git a/cmd/postgres-reader/main.go b/cmd/postgres-reader/main.go deleted file mode 100644 index 5157c9270d..0000000000 --- a/cmd/postgres-reader/main.go +++ /dev/null @@ -1,171 +0,0 @@ -// Copyright (c) Abstract Machines -// SPDX-License-Identifier: Apache-2.0 - -// Package main contains postgres-reader main function to start the postgres-reader service. -package main - -import ( - "context" - "fmt" - "log" - "log/slog" - "os" - - chclient "github.com/absmach/callhome/pkg/client" - "github.com/absmach/supermq" - smqlog "github.com/absmach/supermq/logger" - "github.com/absmach/supermq/pkg/authn/authsvc" - "github.com/absmach/supermq/pkg/grpcclient" - pgclient "github.com/absmach/supermq/pkg/postgres" - "github.com/absmach/supermq/pkg/prometheus" - "github.com/absmach/supermq/pkg/server" - httpserver "github.com/absmach/supermq/pkg/server/http" - "github.com/absmach/supermq/pkg/uuid" - "github.com/absmach/supermq/readers" - httpapi "github.com/absmach/supermq/readers/api" - "github.com/absmach/supermq/readers/postgres" - "github.com/caarlos0/env/v11" - "github.com/jmoiron/sqlx" - "golang.org/x/sync/errgroup" -) - -const ( - svcName = "postgres-reader" - envPrefixDB = "SMQ_POSTGRES_" - envPrefixHTTP = "SMQ_POSTGRES_READER_HTTP_" - envPrefixAuth = "SMQ_AUTH_GRPC_" - envPrefixClients = "SMQ_CLIENTS_AUTH_GRPC_" - envPrefixChannels = "SMQ_CHANNELS_GRPC_" - defDB = "supermq" - defSvcHTTPPort = "9009" -) - -type config struct { - LogLevel string `env:"SMQ_POSTGRES_READER_LOG_LEVEL" envDefault:"info"` - SendTelemetry bool `env:"SMQ_SEND_TELEMETRY" envDefault:"true"` - InstanceID string `env:"SMQ_POSTGRES_READER_INSTANCE_ID" envDefault:""` -} - -func main() { - ctx, cancel := context.WithCancel(context.Background()) - g, ctx := errgroup.WithContext(ctx) - - cfg := config{} - if err := env.Parse(&cfg); err != nil { - log.Fatalf("failed to load %s configuration : %s", svcName, err) - } - - logger, err := smqlog.New(os.Stdout, cfg.LogLevel) - if err != nil { - log.Fatalf("failed to init logger: %s", err.Error()) - } - - var exitCode int - defer smqlog.ExitWithError(&exitCode) - - if cfg.InstanceID == "" { - if cfg.InstanceID, err = uuid.New().ID(); err != nil { - logger.Error(fmt.Sprintf("failed to generate instanceID: %s", err)) - exitCode = 1 - return - } - } - - dbConfig := pgclient.Config{} - if err := env.ParseWithOptions(&dbConfig, env.Options{Prefix: envPrefixDB}); err != nil { - logger.Error(err.Error()) - exitCode = 1 - return - } - db, err := pgclient.Connect(dbConfig) - if err != nil { - logger.Error(fmt.Sprintf("failed to setup postgres database : %s", err)) - exitCode = 1 - return - } - defer db.Close() - - clientsClientCfg := grpcclient.Config{} - if err := env.ParseWithOptions(&clientsClientCfg, env.Options{Prefix: envPrefixClients}); err != nil { - logger.Error(fmt.Sprintf("failed to load clients gRPC client configuration : %s", err)) - exitCode = 1 - return - } - - clientsClient, clientsHandler, err := grpcclient.SetupClientsClient(ctx, clientsClientCfg) - if err != nil { - logger.Error(err.Error()) - exitCode = 1 - return - } - defer clientsHandler.Close() - logger.Info("Clients service gRPC client successfully connected to clients gRPC server " + clientsHandler.Secure()) - - channelsClientCfg := grpcclient.Config{} - if err := env.ParseWithOptions(&channelsClientCfg, env.Options{Prefix: envPrefixChannels}); err != nil { - logger.Error(fmt.Sprintf("failed to load channels gRPC client configuration : %s", err)) - exitCode = 1 - return - } - - channelsClient, channelsHandler, err := grpcclient.SetupChannelsClient(ctx, channelsClientCfg) - if err != nil { - logger.Error(err.Error()) - exitCode = 1 - return - } - defer channelsHandler.Close() - logger.Info("Channels service gRPC client successfully connected to channels gRPC server " + channelsHandler.Secure()) - - authnCfg := grpcclient.Config{} - if err := env.ParseWithOptions(&authnCfg, env.Options{Prefix: envPrefixAuth}); err != nil { - logger.Error(fmt.Sprintf("failed to load auth gRPC client configuration : %s", err)) - exitCode = 1 - return - } - - authn, authnHandler, err := authsvc.NewAuthentication(ctx, authnCfg) - if err != nil { - logger.Error(err.Error()) - exitCode = 1 - return - } - defer authnHandler.Close() - logger.Info("authn successfully connected to auth gRPC server " + authnHandler.Secure()) - - repo := newService(db, logger) - - httpServerConfig := server.Config{Port: defSvcHTTPPort} - if err := env.ParseWithOptions(&httpServerConfig, env.Options{Prefix: envPrefixHTTP}); err != nil { - logger.Error(fmt.Sprintf("failed to load %s HTTP server configuration : %s", svcName, err)) - exitCode = 1 - return - } - hs := httpserver.NewServer(ctx, cancel, svcName, httpServerConfig, httpapi.MakeHandler(repo, authn, clientsClient, channelsClient, svcName, cfg.InstanceID), logger) - - if cfg.SendTelemetry { - chc := chclient.New(svcName, supermq.Version, logger, cancel) - go chc.CallHome(ctx) - } - - g.Go(func() error { - return hs.Start() - }) - - g.Go(func() error { - return server.StopSignalHandler(ctx, cancel, logger, svcName, hs) - }) - - if err := g.Wait(); err != nil { - logger.Error(fmt.Sprintf("Postgres reader service terminated: %s", err)) - } -} - -func newService(db *sqlx.DB, logger *slog.Logger) readers.MessageRepository { - svc := postgres.New(db) - svc = httpapi.LoggingMiddleware(svc, logger) - counter, latency := prometheus.MakeMetrics("postgres", "message_reader") - svc = httpapi.MetricsMiddleware(svc, counter, latency) - - return svc -} diff --git a/cmd/timescale-reader/main.go b/cmd/timescale-reader/main.go deleted file mode 100644 index ed5e22913a..0000000000 --- a/cmd/timescale-reader/main.go +++ /dev/null @@ -1,170 +0,0 @@ -// Copyright (c) Abstract Machines -// SPDX-License-Identifier: Apache-2.0 - -// Package main contains timescale-reader main function to start the timescale-reader service. -package main - -import ( - "context" - "fmt" - "log" - "log/slog" - "os" - - chclient "github.com/absmach/callhome/pkg/client" - "github.com/absmach/supermq" - smqlog "github.com/absmach/supermq/logger" - "github.com/absmach/supermq/pkg/authn/authsvc" - "github.com/absmach/supermq/pkg/grpcclient" - pgclient "github.com/absmach/supermq/pkg/postgres" - "github.com/absmach/supermq/pkg/prometheus" - "github.com/absmach/supermq/pkg/server" - httpserver "github.com/absmach/supermq/pkg/server/http" - "github.com/absmach/supermq/pkg/uuid" - "github.com/absmach/supermq/readers" - httpapi "github.com/absmach/supermq/readers/api" - "github.com/absmach/supermq/readers/timescale" - "github.com/caarlos0/env/v11" - "github.com/jmoiron/sqlx" - "golang.org/x/sync/errgroup" -) - -const ( - svcName = "timescaledb-reader" - envPrefixDB = "SMQ_TIMESCALE_" - envPrefixHTTP = "SMQ_TIMESCALE_READER_HTTP_" - envPrefixAuth = "SMQ_AUTH_GRPC_" - envPrefixClients = "SMQ_CLIENTS_AUTH_GRPC_" - envPrefixChannels = "SMQ_CHANNELS_GRPC_" - defDB = "messages" - defSvcHTTPPort = "9011" -) - -type config struct { - LogLevel string `env:"SMQ_TIMESCALE_READER_LOG_LEVEL" envDefault:"info"` - SendTelemetry bool `env:"SMQ_SEND_TELEMETRY" envDefault:"true"` - InstanceID string `env:"SMQ_TIMESCALE_READER_INSTANCE_ID" envDefault:""` -} - -func main() { - ctx, cancel := context.WithCancel(context.Background()) - g, ctx := errgroup.WithContext(ctx) - - cfg := config{} - if err := env.Parse(&cfg); err != nil { - log.Fatalf("failed to load %s configuration : %s", svcName, err) - } - - logger, err := smqlog.New(os.Stdout, cfg.LogLevel) - if err != nil { - log.Fatalf("failed to init logger: %s", err.Error()) - } - - var exitCode int - defer smqlog.ExitWithError(&exitCode) - - if cfg.InstanceID == "" { - if cfg.InstanceID, err = uuid.New().ID(); err != nil { - logger.Error(fmt.Sprintf("failed to generate instanceID: %s", err)) - exitCode = 1 - return - } - } - - dbConfig := pgclient.Config{Name: defDB} - if err := env.ParseWithOptions(&dbConfig, env.Options{Prefix: envPrefixDB}); err != nil { - logger.Error(err.Error()) - exitCode = 1 - return - } - db, err := pgclient.Connect(dbConfig) - if err != nil { - logger.Error(err.Error()) - } - defer db.Close() - - repo := newService(db, logger) - - clientsClientCfg := grpcclient.Config{} - if err := env.ParseWithOptions(&clientsClientCfg, env.Options{Prefix: envPrefixClients}); err != nil { - logger.Error(fmt.Sprintf("failed to load %s auth configuration : %s", svcName, err)) - exitCode = 1 - return - } - - clientsClient, clientsHandler, err := grpcclient.SetupClientsClient(ctx, clientsClientCfg) - if err != nil { - logger.Error(err.Error()) - exitCode = 1 - return - } - defer clientsHandler.Close() - - logger.Info("ClientsService gRPC client successfully connected to clients gRPC server " + clientsHandler.Secure()) - - channelsClientCfg := grpcclient.Config{} - if err := env.ParseWithOptions(&channelsClientCfg, env.Options{Prefix: envPrefixChannels}); err != nil { - logger.Error(fmt.Sprintf("failed to load channels gRPC client configuration : %s", err)) - exitCode = 1 - return - } - - channelsClient, channelsHandler, err := grpcclient.SetupChannelsClient(ctx, channelsClientCfg) - if err != nil { - logger.Error(err.Error()) - exitCode = 1 - return - } - defer channelsHandler.Close() - logger.Info("Channels service gRPC client successfully connected to channels gRPC server " + channelsHandler.Secure()) - - authnCfg := grpcclient.Config{} - if err := env.ParseWithOptions(&authnCfg, env.Options{Prefix: envPrefixAuth}); err != nil { - logger.Error(fmt.Sprintf("failed to load auth gRPC client configuration : %s", err)) - exitCode = 1 - return - } - - authn, authnHandler, err := authsvc.NewAuthentication(ctx, authnCfg) - if err != nil { - logger.Error(err.Error()) - exitCode = 1 - return - } - defer authnHandler.Close() - logger.Info("authn successfully connected to auth gRPC server " + authnHandler.Secure()) - - httpServerConfig := server.Config{Port: defSvcHTTPPort} - if err := env.ParseWithOptions(&httpServerConfig, env.Options{Prefix: envPrefixHTTP}); err != nil { - logger.Error(fmt.Sprintf("failed to load %s HTTP server configuration : %s", svcName, err)) - exitCode = 1 - return - } - hs := httpserver.NewServer(ctx, cancel, svcName, httpServerConfig, httpapi.MakeHandler(repo, authn, clientsClient, channelsClient, svcName, cfg.InstanceID), logger) - - if cfg.SendTelemetry { - chc := chclient.New(svcName, supermq.Version, logger, cancel) - go chc.CallHome(ctx) - } - - g.Go(func() error { - return hs.Start() - }) - - g.Go(func() error { - return server.StopSignalHandler(ctx, cancel, logger, svcName, hs) - }) - - if err := g.Wait(); err != nil { - logger.Error(fmt.Sprintf("Timescale reader service terminated: %s", err)) - } -} - -func newService(db *sqlx.DB, logger *slog.Logger) readers.MessageRepository { - svc := timescale.New(db) - svc = httpapi.LoggingMiddleware(svc, logger) - counter, latency := prometheus.MakeMetrics("timescale", "message_reader") - svc = httpapi.MetricsMiddleware(svc, counter, latency) - - return svc -} diff --git a/docker/.env b/docker/.env index 769bf6299b..408133f55e 100644 --- a/docker/.env +++ b/docker/.env @@ -515,14 +515,6 @@ SMQ_POSTGRES_WRITER_HTTP_SERVER_CERT= SMQ_POSTGRES_WRITER_HTTP_SERVER_KEY= SMQ_POSTGRES_WRITER_INSTANCE_ID= -### Postgres Reader -SMQ_POSTGRES_READER_LOG_LEVEL=debug -SMQ_POSTGRES_READER_HTTP_HOST=postgres-reader -SMQ_POSTGRES_READER_HTTP_PORT=9009 -SMQ_POSTGRES_READER_HTTP_SERVER_CERT= -SMQ_POSTGRES_READER_HTTP_SERVER_KEY= -SMQ_POSTGRES_READER_INSTANCE_ID= - ### Timescale SMQ_TIMESCALE_HOST=supermq-timescale SMQ_TIMESCALE_PORT=5432 @@ -543,14 +535,6 @@ SMQ_TIMESCALE_WRITER_HTTP_SERVER_CERT= SMQ_TIMESCALE_WRITER_HTTP_SERVER_KEY= SMQ_TIMESCALE_WRITER_INSTANCE_ID= -### Timescale Reader -SMQ_TIMESCALE_READER_LOG_LEVEL=debug -SMQ_TIMESCALE_READER_HTTP_HOST=timescale-reader -SMQ_TIMESCALE_READER_HTTP_PORT=9011 -SMQ_TIMESCALE_READER_HTTP_SERVER_CERT= -SMQ_TIMESCALE_READER_HTTP_SERVER_KEY= -SMQ_TIMESCALE_READER_INSTANCE_ID= - ### Journal SMQ_JOURNAL_LOG_LEVEL=info SMQ_JOURNAL_HTTP_HOST=journal diff --git a/docker/addons/postgres-reader/docker-compose.yml b/docker/addons/postgres-reader/docker-compose.yml deleted file mode 100644 index ef0be76332..0000000000 --- a/docker/addons/postgres-reader/docker-compose.yml +++ /dev/null @@ -1,117 +0,0 @@ -# Copyright (c) Abstract Machines -# SPDX-License-Identifier: Apache-2.0 - -# This docker-compose file contains optional Postgres-reader service for SuperMQ platform. -# Since this service is optional, this file is dependent of docker-compose.yml file -# from /docker. In order to run this service, execute command: -# docker compose -f docker/docker-compose.yml -f docker/addons/postgres-reader/docker-compose.yml up -# from project root. - -networks: - supermq-base-net: - -services: - postgres-reader: - image: supermq/postgres-reader:${SMQ_RELEASE_TAG} - container_name: supermq-postgres-reader - restart: on-failure - environment: - SMQ_POSTGRES_READER_LOG_LEVEL: ${SMQ_POSTGRES_READER_LOG_LEVEL} - SMQ_POSTGRES_READER_HTTP_HOST: ${SMQ_POSTGRES_READER_HTTP_HOST} - SMQ_POSTGRES_READER_HTTP_PORT: ${SMQ_POSTGRES_READER_HTTP_PORT} - SMQ_POSTGRES_READER_HTTP_SERVER_CERT: ${SMQ_POSTGRES_READER_HTTP_SERVER_CERT} - SMQ_POSTGRES_READER_HTTP_SERVER_KEY: ${SMQ_POSTGRES_READER_HTTP_SERVER_KEY} - SMQ_POSTGRES_HOST: ${SMQ_POSTGRES_HOST} - SMQ_POSTGRES_PORT: ${SMQ_POSTGRES_PORT} - SMQ_POSTGRES_USER: ${SMQ_POSTGRES_USER} - SMQ_POSTGRES_PASS: ${SMQ_POSTGRES_PASS} - SMQ_POSTGRES_NAME: ${SMQ_POSTGRES_NAME} - SMQ_POSTGRES_SSL_MODE: ${SMQ_POSTGRES_SSL_MODE} - SMQ_POSTGRES_SSL_CERT: ${SMQ_POSTGRES_SSL_CERT} - SMQ_POSTGRES_SSL_KEY: ${SMQ_POSTGRES_SSL_KEY} - SMQ_POSTGRES_SSL_ROOT_CERT: ${SMQ_POSTGRES_SSL_ROOT_CERT} - SMQ_CLIENTS_AUTH_GRPC_URL: ${SMQ_CLIENTS_AUTH_GRPC_URL} - SMQ_CLIENTS_AUTH_GRPC_TIMEOUT: ${SMQ_CLIENTS_AUTH_GRPC_TIMEOUT} - SMQ_CLIENTS_AUTH_GRPC_CLIENT_CERT: ${SMQ_CLIENTS_AUTH_GRPC_CLIENT_CERT:+/clients-grpc-client.crt} - SMQ_CLIENTS_AUTH_GRPC_CLIENT_KEY: ${SMQ_CLIENTS_AUTH_GRPC_CLIENT_KEY:+/clients-grpc-client.key} - SMQ_CLIENTS_AUTH_GRPC_SERVER_CA_CERTS: ${SMQ_CLIENTS_AUTH_GRPC_SERVER_CA_CERTS:+/clients-grpc-server-ca.crt} - SMQ_CHANNELS_GRPC_URL: ${SMQ_CHANNELS_GRPC_URL} - SMQ_CHANNELS_GRPC_TIMEOUT: ${SMQ_CHANNELS_GRPC_TIMEOUT} - SMQ_CHANNELS_GRPC_CLIENT_CERT: ${SMQ_CHANNELS_GRPC_CLIENT_CERT:+/channels-grpc-client.crt} - SMQ_CHANNELS_GRPC_CLIENT_KEY: ${SMQ_CHANNELS_GRPC_CLIENT_KEY:+/channels-grpc-client.key} - SMQ_CHANNELS_GRPC_SERVER_CA_CERTS: ${SMQ_CHANNELS_GRPC_SERVER_CA_CERTS:+/channels-grpc-server-ca.crt} - SMQ_AUTH_GRPC_URL: ${SMQ_AUTH_GRPC_URL} - SMQ_AUTH_GRPC_TIMEOUT: ${SMQ_AUTH_GRPC_TIMEOUT} - SMQ_AUTH_GRPC_CLIENT_CERT: ${SMQ_AUTH_GRPC_CLIENT_CERT:+/auth-grpc-client.crt} - SMQ_AUTH_GRPC_CLIENT_KEY: ${SMQ_AUTH_GRPC_CLIENT_KEY:+/auth-grpc-client.key} - SMQ_AUTH_GRPC_SERVER_CA_CERTS: ${SMQ_AUTH_GRPC_SERVER_CA_CERTS:+/auth-grpc-server-ca.crt} - SMQ_SEND_TELEMETRY: ${SMQ_SEND_TELEMETRY} - SMQ_POSTGRES_READER_INSTANCE_ID: ${SMQ_POSTGRES_READER_INSTANCE_ID} - ports: - - ${SMQ_POSTGRES_READER_HTTP_PORT}:${SMQ_POSTGRES_READER_HTTP_PORT} - networks: - - supermq-base-net - volumes: - - type: bind - source: ${SMQ_ADDONS_CERTS_PATH_PREFIX}${SMQ_AUTH_GRPC_CLIENT_CERT:-./ssl/certs/dummy/client_cert} - target: /auth-grpc-client${SMQ_AUTH_GRPC_CLIENT_CERT:+.crt} - bind: - create_host_path: true - - type: bind - source: ${SMQ_ADDONS_CERTS_PATH_PREFIX}${SMQ_AUTH_GRPC_CLIENT_KEY:-./ssl/certs/dummy/client_key} - target: /auth-grpc-client${SMQ_AUTH_GRPC_CLIENT_KEY:+.key} - bind: - create_host_path: true - - type: bind - source: ${SMQ_ADDONS_CERTS_PATH_PREFIX}${SMQ_AUTH_GRPC_SERVER_CA_CERTS:-./ssl/certs/dummy/server_ca} - target: /auth-grpc-server-ca${SMQ_AUTH_GRPC_SERVER_CA_CERTS:+.crt} - bind: - create_host_path: true - # Clients gRPC mTLS client certificates - - type: bind - source: ${SMQ_ADDONS_CERTS_PATH_PREFIX}${SMQ_CLIENTS_AUTH_GRPC_CLIENT_CERT:-ssl/certs/dummy/client_cert} - target: /clients-grpc-client${SMQ_CLIENTS_AUTH_GRPC_CLIENT_CERT:+.crt} - bind: - create_host_path: true - - type: bind - source: ${SMQ_ADDONS_CERTS_PATH_PREFIX}${SMQ_CLIENTS_AUTH_GRPC_CLIENT_KEY:-ssl/certs/dummy/client_key} - target: /clients-grpc-client${SMQ_CLIENTS_AUTH_GRPC_CLIENT_KEY:+.key} - bind: - create_host_path: true - - type: bind - source: ${SMQ_ADDONS_CERTS_PATH_PREFIX}${SMQ_CLIENTS_AUTH_GRPC_SERVER_CA_CERTS:-ssl/certs/dummy/server_ca} - target: /clients-grpc-server-ca${SMQ_CLIENTS_AUTH_GRPC_SERVER_CA_CERTS:+.crt} - bind: - create_host_path: true - # Channels gRPC mTLS client certificates - - type: bind - source: ${SMQ_CHANNELS_AUTH_GRPC_CLIENT_CERT:-ssl/certs/dummy/client_cert} - target: /channels-grpc-client${SMQ_CHANNELS_AUTH_GRPC_CLIENT_CERT:+.crt} - bind: - create_host_path: true - - type: bind - source: ${SMQ_CHANNELS_AUTH_GRPC_CLIENT_KEY:-ssl/certs/dummy/client_key} - target: /channels-grpc-client${SMQ_CHANNELS_AUTH_GRPC_CLIENT_KEY:+.key} - bind: - create_host_path: true - - type: bind - source: ${SMQ_CHANNELS_AUTH_GRPC_SERVER_CA_CERTS:-ssl/certs/dummy/server_ca} - target: /channels-grpc-server-ca${SMQ_CHANNELS_AUTH_GRPC_SERVER_CA_CERTS:+.crt} - bind: - create_host_path: true - # Auth gRPC mTLS client certificates - - type: bind - source: ${SMQ_AUTH_GRPC_CLIENT_CERT:-ssl/certs/dummy/client_cert} - target: /auth-grpc-client${SMQ_AUTH_GRPC_CLIENT_CERT:+.crt} - bind: - create_host_path: true - - type: bind - source: ${SMQ_AUTH_GRPC_CLIENT_KEY:-ssl/certs/dummy/client_key} - target: /auth-grpc-client${SMQ_AUTH_GRPC_CLIENT_KEY:+.key} - bind: - create_host_path: true - - type: bind - source: ${SMQ_AUTH_GRPC_SERVER_CA_CERTS:-ssl/certs/dummy/server_ca} - target: /auth-grpc-server-ca${SMQ_AUTH_GRPC_SERVER_CA_CERTS:+.crt} - bind: - create_host_path: true diff --git a/docker/addons/timescale-reader/docker-compose.yml b/docker/addons/timescale-reader/docker-compose.yml deleted file mode 100644 index ebdadef4f0..0000000000 --- a/docker/addons/timescale-reader/docker-compose.yml +++ /dev/null @@ -1,117 +0,0 @@ -# Copyright (c) Abstract Machines -# SPDX-License-Identifier: Apache-2.0 - -# This docker-compose file contains optional Timescale-reader service for SuperMQ platform. -# Since this service is optional, this file is dependent of docker-compose.yml file -# from /docker. In order to run this service, execute command: -# docker compose -f docker/docker-compose.yml -f docker/addons/timescale-reader/docker-compose.yml up -# from project root. - -networks: - supermq-base-net: - -services: - timescale-reader: - image: supermq/timescale-reader:${SMQ_RELEASE_TAG} - container_name: supermq-timescale-reader - restart: on-failure - environment: - SMQ_TIMESCALE_READER_LOG_LEVEL: ${SMQ_TIMESCALE_READER_LOG_LEVEL} - SMQ_TIMESCALE_READER_HTTP_HOST: ${SMQ_TIMESCALE_READER_HTTP_HOST} - SMQ_TIMESCALE_READER_HTTP_PORT: ${SMQ_TIMESCALE_READER_HTTP_PORT} - SMQ_TIMESCALE_READER_HTTP_SERVER_CERT: ${SMQ_TIMESCALE_READER_HTTP_SERVER_CERT} - SMQ_TIMESCALE_READER_HTTP_SERVER_KEY: ${SMQ_TIMESCALE_READER_HTTP_SERVER_KEY} - SMQ_TIMESCALE_HOST: ${SMQ_TIMESCALE_HOST} - SMQ_TIMESCALE_PORT: ${SMQ_TIMESCALE_PORT} - SMQ_TIMESCALE_USER: ${SMQ_TIMESCALE_USER} - SMQ_TIMESCALE_PASS: ${SMQ_TIMESCALE_PASS} - SMQ_TIMESCALE_NAME: ${SMQ_TIMESCALE_NAME} - SMQ_TIMESCALE_SSL_MODE: ${SMQ_TIMESCALE_SSL_MODE} - SMQ_TIMESCALE_SSL_CERT: ${SMQ_TIMESCALE_SSL_CERT} - SMQ_TIMESCALE_SSL_KEY: ${SMQ_TIMESCALE_SSL_KEY} - SMQ_TIMESCALE_SSL_ROOT_CERT: ${SMQ_TIMESCALE_SSL_ROOT_CERT} - SMQ_CLIENTS_AUTH_GRPC_URL: ${SMQ_CLIENTS_AUTH_GRPC_URL} - SMQ_CLIENTS_AUTH_GRPC_TIMEOUT: ${SMQ_CLIENTS_AUTH_GRPC_TIMEOUT} - SMQ_CLIENTS_AUTH_GRPC_CLIENT_CERT: ${SMQ_CLIENTS_AUTH_GRPC_CLIENT_CERT:+/clients-grpc-client.crt} - SMQ_CLIENTS_AUTH_GRPC_CLIENT_KEY: ${SMQ_CLIENTS_AUTH_GRPC_CLIENT_KEY:+/clients-grpc-client.key} - SMQ_CLIENTS_AUTH_GRPC_SERVER_CA_CERTS: ${SMQ_CLIENTS_AUTH_GRPC_SERVER_CA_CERTS:+/clients-grpc-server-ca.crt} - SMQ_CHANNELS_GRPC_URL: ${SMQ_CHANNELS_GRPC_URL} - SMQ_CHANNELS_GRPC_TIMEOUT: ${SMQ_CHANNELS_GRPC_TIMEOUT} - SMQ_CHANNELS_GRPC_CLIENT_CERT: ${SMQ_CHANNELS_GRPC_CLIENT_CERT:+/channels-grpc-client.crt} - SMQ_CHANNELS_GRPC_CLIENT_KEY: ${SMQ_CHANNELS_GRPC_CLIENT_KEY:+/channels-grpc-client.key} - SMQ_CHANNELS_GRPC_SERVER_CA_CERTS: ${SMQ_CHANNELS_GRPC_SERVER_CA_CERTS:+/channels-grpc-server-ca.crt} - SMQ_AUTH_GRPC_URL: ${SMQ_AUTH_GRPC_URL} - SMQ_AUTH_GRPC_TIMEOUT: ${SMQ_AUTH_GRPC_TIMEOUT} - SMQ_AUTH_GRPC_CLIENT_CERT: ${SMQ_AUTH_GRPC_CLIENT_CERT:+/auth-grpc-client.crt} - SMQ_AUTH_GRPC_CLIENT_KEY: ${SMQ_AUTH_GRPC_CLIENT_KEY:+/auth-grpc-client.key} - SMQ_AUTH_GRPC_SERVER_CA_CERTS: ${SMQ_AUTH_GRPC_SERVER_CA_CERTS:+/auth-grpc-server-ca.crt} - SMQ_SEND_TELEMETRY: ${SMQ_SEND_TELEMETRY} - SMQ_TIMESCALE_READER_INSTANCE_ID: ${SMQ_TIMESCALE_READER_INSTANCE_ID} - ports: - - ${SMQ_TIMESCALE_READER_HTTP_PORT}:${SMQ_TIMESCALE_READER_HTTP_PORT} - networks: - - supermq-base-net - volumes: - - type: bind - source: ${SMQ_ADDONS_CERTS_PATH_PREFIX}${SMQ_AUTH_GRPC_CLIENT_CERT:-./ssl/certs/dummy/client_cert} - target: /auth-grpc-client${SMQ_AUTH_GRPC_CLIENT_CERT:+.crt} - bind: - create_host_path: true - - type: bind - source: ${SMQ_ADDONS_CERTS_PATH_PREFIX}${SMQ_AUTH_GRPC_CLIENT_KEY:-./ssl/certs/dummy/client_key} - target: /auth-grpc-client${SMQ_AUTH_GRPC_CLIENT_KEY:+.key} - bind: - create_host_path: true - - type: bind - source: ${SMQ_ADDONS_CERTS_PATH_PREFIX}${SMQ_AUTH_GRPC_SERVER_CA_CERTS:-./ssl/certs/dummy/server_ca} - target: /auth-grpc-server-ca${SMQ_AUTH_GRPC_SERVER_CA_CERTS:+.crt} - bind: - create_host_path: true - # Clients gRPC mTLS client certificates - - type: bind - source: ${SMQ_ADDONS_CERTS_PATH_PREFIX}${SMQ_CLIENTS_AUTH_GRPC_CLIENT_CERT:-ssl/certs/dummy/client_cert} - target: /clients-grpc-client${SMQ_CLIENTS_AUTH_GRPC_CLIENT_CERT:+.crt} - bind: - create_host_path: true - - type: bind - source: ${SMQ_ADDONS_CERTS_PATH_PREFIX}${SMQ_CLIENTS_AUTH_GRPC_CLIENT_KEY:-ssl/certs/dummy/client_key} - target: /clients-grpc-client${SMQ_CLIENTS_AUTH_GRPC_CLIENT_KEY:+.key} - bind: - create_host_path: true - - type: bind - source: ${SMQ_ADDONS_CERTS_PATH_PREFIX}${SMQ_CLIENTS_AUTH_GRPC_SERVER_CA_CERTS:-ssl/certs/dummy/server_ca} - target: /clients-grpc-server-ca${SMQ_CLIENTS_AUTH_GRPC_SERVER_CA_CERTS:+.crt} - bind: - create_host_path: true - # Channels gRPC mTLS client certificates - - type: bind - source: ${SMQ_CHANNELS_AUTH_GRPC_CLIENT_CERT:-ssl/certs/dummy/client_cert} - target: /channels-grpc-client${SMQ_CHANNELS_AUTH_GRPC_CLIENT_CERT:+.crt} - bind: - create_host_path: true - - type: bind - source: ${SMQ_CHANNELS_AUTH_GRPC_CLIENT_KEY:-ssl/certs/dummy/client_key} - target: /channels-grpc-client${SMQ_CHANNELS_AUTH_GRPC_CLIENT_KEY:+.key} - bind: - create_host_path: true - - type: bind - source: ${SMQ_CHANNELS_AUTH_GRPC_SERVER_CA_CERTS:-ssl/certs/dummy/server_ca} - target: /channels-grpc-server-ca${SMQ_CHANNELS_AUTH_GRPC_SERVER_CA_CERTS:+.crt} - bind: - create_host_path: true - # Auth gRPC mTLS client certificates - - type: bind - source: ${SMQ_AUTH_GRPC_CLIENT_CERT:-ssl/certs/dummy/client_cert} - target: /auth-grpc-client${SMQ_AUTH_GRPC_CLIENT_CERT:+.crt} - bind: - create_host_path: true - - type: bind - source: ${SMQ_AUTH_GRPC_CLIENT_KEY:-ssl/certs/dummy/client_key} - target: /auth-grpc-client${SMQ_AUTH_GRPC_CLIENT_KEY:+.key} - bind: - create_host_path: true - - type: bind - source: ${SMQ_AUTH_GRPC_SERVER_CA_CERTS:-ssl/certs/dummy/server_ca} - target: /auth-grpc-server-ca${SMQ_AUTH_GRPC_SERVER_CA_CERTS:+.crt} - bind: - create_host_path: true diff --git a/pkg/sdk/health.go b/pkg/sdk/health.go index 1906cf0c07..d504559303 100644 --- a/pkg/sdk/health.go +++ b/pkg/sdk/health.go @@ -40,8 +40,6 @@ func (sdk mgSDK) Health(service string) (HealthInfo, errors.SDKError) { url = fmt.Sprintf("%s/health", sdk.bootstrapURL) case "certs": url = fmt.Sprintf("%s/health", sdk.certsURL) - case "reader": - url = fmt.Sprintf("%s/health", sdk.readerURL) case "http-adapter": url = fmt.Sprintf("%s/health", sdk.httpAdapterURL) } diff --git a/pkg/sdk/health_test.go b/pkg/sdk/health_test.go index 25087bc548..4d9d781b79 100644 --- a/pkg/sdk/health_test.go +++ b/pkg/sdk/health_test.go @@ -11,14 +11,10 @@ import ( "github.com/absmach/supermq" "github.com/absmach/supermq/bootstrap/api" bmocks "github.com/absmach/supermq/bootstrap/mocks" - chmocks "github.com/absmach/supermq/channels/mocks" - climocks "github.com/absmach/supermq/clients/mocks" smqlog "github.com/absmach/supermq/logger" authnmocks "github.com/absmach/supermq/pkg/authn/mocks" "github.com/absmach/supermq/pkg/errors" sdk "github.com/absmach/supermq/pkg/sdk" - readersapi "github.com/absmach/supermq/readers/api" - readersmocks "github.com/absmach/supermq/readers/mocks" "github.com/stretchr/testify/assert" ) @@ -35,9 +31,6 @@ func TestHealth(t *testing.T) { bootstrapTs := setupMinimalBootstrap() defer bootstrapTs.Close() - readerTs := setupMinimalReader() - defer readerTs.Close() - httpAdapterTs, _ := setupMessages() defer httpAdapterTs.Close() @@ -46,7 +39,6 @@ func TestHealth(t *testing.T) { UsersURL: usersTs.URL, CertsURL: certsTs.URL, BootstrapURL: bootstrapTs.URL, - ReaderURL: readerTs.URL, HTTPAdapterURL: httpAdapterTs.URL, MsgContentType: contentType, TLSVerification: false, @@ -93,14 +85,6 @@ func TestHealth(t *testing.T) { description: "bootstrap service", status: "pass", }, - { - desc: "get reader service health check", - service: "reader", - empty: false, - err: nil, - description: "test service", - status: "pass", - }, { desc: "get http-adapter service health check", service: "http-adapter", @@ -132,13 +116,3 @@ func setupMinimalBootstrap() *httptest.Server { return httptest.NewServer(mux) } - -func setupMinimalReader() *httptest.Server { - repo := new(readersmocks.MessageRepository) - channels := new(chmocks.ChannelsServiceClient) - authn := new(authnmocks.Authentication) - clients := new(climocks.ClientsServiceClient) - - mux := readersapi.MakeHandler(repo, authn, clients, channels, "test", "") - return httptest.NewServer(mux) -} diff --git a/pkg/sdk/message_test.go b/pkg/sdk/message_test.go index 3177591531..867affebdf 100644 --- a/pkg/sdk/message_test.go +++ b/pkg/sdk/message_test.go @@ -19,16 +19,11 @@ import ( adapter "github.com/absmach/supermq/http" "github.com/absmach/supermq/http/api" smqlog "github.com/absmach/supermq/logger" - smqauthn "github.com/absmach/supermq/pkg/authn" authnmocks "github.com/absmach/supermq/pkg/authn/mocks" "github.com/absmach/supermq/pkg/errors" svcerr "github.com/absmach/supermq/pkg/errors/service" pubsub "github.com/absmach/supermq/pkg/messaging/mocks" sdk "github.com/absmach/supermq/pkg/sdk" - "github.com/absmach/supermq/pkg/transformers/senml" - "github.com/absmach/supermq/readers" - readersapi "github.com/absmach/supermq/readers/api" - readersmocks "github.com/absmach/supermq/readers/mocks" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" ) @@ -60,16 +55,6 @@ func setupMessages() (*httptest.Server, *pubsub.PubSub) { return httptest.NewServer(http.HandlerFunc(mp.ServeHTTP)), pub } -func setupReaders() (*httptest.Server, *authnmocks.Authentication, *readersmocks.MessageRepository) { - repo := new(readersmocks.MessageRepository) - authn := new(authnmocks.Authentication) - clientsGRPCClient = new(climocks.ClientsServiceClient) - channelsGRPCClient = new(chmocks.ChannelsServiceClient) - - mux := readersapi.MakeHandler(repo, authn, clientsGRPCClient, channelsGRPCClient, "test", "") - return httptest.NewServer(mux), authn, repo -} - func TestSendMessage(t *testing.T) { ts, pub := setupMessages() defer ts.Close() @@ -207,206 +192,3 @@ func TestSetContentType(t *testing.T) { assert.Equal(t, tc.err, err, fmt.Sprintf("%s: expected error %s, got %s", tc.desc, tc.err, err)) } } - -func TestReadMessages(t *testing.T) { - ts, authn, repo := setupReaders() - defer ts.Close() - - channelID := "channelID" - msgValue := 1.6 - boolVal := true - msg := senml.Message{ - Name: "current", - Time: 1720000000, - Value: &msgValue, - Publisher: validID, - } - invalidMsg := "[{\"n\":\"current\",\"t\":-1,\"v\":1.6}]" - - sdkConf := sdk.Config{ - ReaderURL: ts.URL, - } - - mgsdk := sdk.NewSDK(sdkConf) - - cases := []struct { - desc string - token string - chanName string - domainID string - messagePageMeta sdk.MessagePageMetadata - authzErr error - authnErr error - repoRes readers.MessagesPage - repoErr error - response sdk.MessagesPage - err errors.SDKError - }{ - { - desc: "read messages successfully", - token: validToken, - chanName: channelID, - domainID: validID, - messagePageMeta: sdk.MessagePageMetadata{ - PageMetadata: sdk.PageMetadata{ - Offset: 0, - Limit: 10, - Level: 0, - }, - Publisher: validID, - BoolValue: &boolVal, - }, - repoRes: readers.MessagesPage{ - Total: 1, - Messages: []readers.Message{msg}, - }, - repoErr: nil, - response: sdk.MessagesPage{ - PageRes: sdk.PageRes{ - Total: 1, - }, - Messages: []senml.Message{msg}, - }, - err: nil, - }, - { - desc: "read messages successfully with subtopic", - token: validToken, - chanName: channelID + ".subtopic", - domainID: validID, - messagePageMeta: sdk.MessagePageMetadata{ - PageMetadata: sdk.PageMetadata{ - Offset: 0, - Limit: 10, - }, - Publisher: validID, - }, - repoRes: readers.MessagesPage{ - Total: 1, - Messages: []readers.Message{msg}, - }, - repoErr: nil, - response: sdk.MessagesPage{ - PageRes: sdk.PageRes{ - Total: 1, - }, - Messages: []senml.Message{msg}, - }, - err: nil, - }, - { - desc: "read messages with invalid token", - token: invalidToken, - chanName: channelID, - domainID: validID, - messagePageMeta: sdk.MessagePageMetadata{ - PageMetadata: sdk.PageMetadata{ - Offset: 0, - Limit: 10, - }, - Subtopic: "subtopic", - Publisher: validID, - }, - authzErr: svcerr.ErrAuthorization, - repoRes: readers.MessagesPage{}, - response: sdk.MessagesPage{}, - err: errors.NewSDKErrorWithStatus(errors.Wrap(svcerr.ErrAuthorization, svcerr.ErrAuthorization), http.StatusUnauthorized), - }, - { - desc: "read messages with empty token", - token: "", - chanName: channelID, - domainID: validID, - messagePageMeta: sdk.MessagePageMetadata{ - PageMetadata: sdk.PageMetadata{ - Offset: 0, - Limit: 10, - }, - Subtopic: "subtopic", - Publisher: validID, - }, - authnErr: svcerr.ErrAuthentication, - repoRes: readers.MessagesPage{}, - response: sdk.MessagesPage{}, - err: errors.NewSDKErrorWithStatus(errors.Wrap(apiutil.ErrValidation, apiutil.ErrBearerToken), http.StatusUnauthorized), - }, - { - desc: "read messages with empty channel ID", - token: validToken, - chanName: "", - domainID: validID, - messagePageMeta: sdk.MessagePageMetadata{ - PageMetadata: sdk.PageMetadata{ - Offset: 0, - Limit: 10, - }, - Subtopic: "subtopic", - Publisher: validID, - }, - repoRes: readers.MessagesPage{}, - repoErr: nil, - response: sdk.MessagesPage{}, - err: errors.NewSDKErrorWithStatus(errors.Wrap(apiutil.ErrValidation, apiutil.ErrMissingID), http.StatusBadRequest), - }, - { - desc: "read messages with invalid message page metadata", - token: validToken, - chanName: channelID, - domainID: validID, - messagePageMeta: sdk.MessagePageMetadata{ - PageMetadata: sdk.PageMetadata{ - Offset: 0, - Limit: 10, - Metadata: map[string]interface{}{ - "key": make(chan int), - }, - }, - Subtopic: "subtopic", - Publisher: validID, - }, - repoRes: readers.MessagesPage{}, - repoErr: nil, - response: sdk.MessagesPage{}, - err: errors.NewSDKError(errors.New("json: unsupported type: chan int")), - }, - { - desc: "read messages with response that cannot be unmarshalled", - token: validToken, - chanName: channelID, - domainID: validID, - messagePageMeta: sdk.MessagePageMetadata{ - PageMetadata: sdk.PageMetadata{ - Offset: 0, - Limit: 10, - }, - Subtopic: "subtopic", - Publisher: validID, - }, - repoRes: readers.MessagesPage{ - Total: 1, - Messages: []readers.Message{invalidMsg}, - }, - repoErr: nil, - response: sdk.MessagesPage{}, - err: errors.NewSDKError(errors.New("json: cannot unmarshal string into Go struct field MessagesPage.messages of type senml.Message")), - }, - } - for _, tc := range cases { - t.Run(tc.desc, func(t *testing.T) { - authCall1 := authn.On("Authenticate", mock.Anything, tc.token).Return(smqauthn.Session{UserID: validID}, tc.authnErr) - authzCall := channelsGRPCClient.On("Authorize", mock.Anything, mock.Anything).Return(&grpcChannelsV1.AuthzRes{Authorized: true}, tc.authzErr) - repoCall := repo.On("ReadAll", channelID, mock.Anything).Return(tc.repoRes, tc.repoErr) - response, err := mgsdk.ReadMessages(tc.messagePageMeta, tc.chanName, tc.domainID, tc.token) - fmt.Println(err) - assert.Equal(t, tc.err, err) - assert.Equal(t, tc.response, response) - if tc.err == nil { - ok := repoCall.Parent.AssertCalled(t, "ReadAll", channelID, mock.Anything) - assert.True(t, ok) - } - authCall1.Unset() - authzCall.Unset() - repoCall.Unset() - }) - } -} diff --git a/pkg/sdk/sdk.go b/pkg/sdk/sdk.go index 5b71efbf31..fc3c42a61b 100644 --- a/pkg/sdk/sdk.go +++ b/pkg/sdk/sdk.go @@ -1044,17 +1044,6 @@ type SDK interface { // fmt.Println(err) SendMessage(chanID, msg, key string) errors.SDKError - // ReadMessages read messages of specified channel. - // - // example: - // pm := sdk.MessagePageMetadata{ - // Offset: 0, - // Limit: 10, - // } - // msgs, _ := sdk.ReadMessages(pm,"channelID", "domainID", "token") - // fmt.Println(msgs) - ReadMessages(pm MessagePageMetadata, chanID, domainID, token string) (MessagesPage, errors.SDKError) - // SetContentType sets message content type. // // example: diff --git a/readers/api/doc.go b/readers/api/doc.go deleted file mode 100644 index 2424852cc4..0000000000 --- a/readers/api/doc.go +++ /dev/null @@ -1,6 +0,0 @@ -// Copyright (c) Abstract Machines -// SPDX-License-Identifier: Apache-2.0 - -// Package api contains API-related concerns: endpoint definitions, middlewares -// and all resource representations. -package api diff --git a/readers/api/endpoint.go b/readers/api/endpoint.go deleted file mode 100644 index 61d1d6038e..0000000000 --- a/readers/api/endpoint.go +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Abstract Machines -// SPDX-License-Identifier: Apache-2.0 - -package api - -import ( - "context" - - grpcChannelsV1 "github.com/absmach/supermq/api/grpc/channels/v1" - grpcClientsV1 "github.com/absmach/supermq/api/grpc/clients/v1" - apiutil "github.com/absmach/supermq/api/http/util" - smqauthn "github.com/absmach/supermq/pkg/authn" - "github.com/absmach/supermq/pkg/errors" - svcerr "github.com/absmach/supermq/pkg/errors/service" - "github.com/absmach/supermq/readers" - "github.com/go-kit/kit/endpoint" -) - -func listMessagesEndpoint(svc readers.MessageRepository, authn smqauthn.Authentication, clients grpcClientsV1.ClientsServiceClient, channels grpcChannelsV1.ChannelsServiceClient) endpoint.Endpoint { - return func(ctx context.Context, request interface{}) (interface{}, error) { - req := request.(listMessagesReq) - if err := req.validate(); err != nil { - return nil, errors.Wrap(apiutil.ErrValidation, err) - } - - if err := authnAuthz(ctx, req, authn, clients, channels); err != nil { - return nil, errors.Wrap(svcerr.ErrAuthorization, err) - } - - page, err := svc.ReadAll(req.chanID, req.pageMeta) - if err != nil { - return nil, err - } - - return pageRes{ - PageMetadata: page.PageMetadata, - Total: page.Total, - Messages: page.Messages, - }, nil - } -} diff --git a/readers/api/endpoint_test.go b/readers/api/endpoint_test.go deleted file mode 100644 index 1c23fcddb1..0000000000 --- a/readers/api/endpoint_test.go +++ /dev/null @@ -1,1020 +0,0 @@ -// Copyright (c) Abstract Machines -// SPDX-License-Identifier: Apache-2.0 - -package api_test - -import ( - "encoding/json" - "fmt" - "net/http" - "net/http/httptest" - "testing" - "time" - - grpcChannelsV1 "github.com/absmach/supermq/api/grpc/channels/v1" - grpcClientsV1 "github.com/absmach/supermq/api/grpc/clients/v1" - apiutil "github.com/absmach/supermq/api/http/util" - chmocks "github.com/absmach/supermq/channels/mocks" - climocks "github.com/absmach/supermq/clients/mocks" - "github.com/absmach/supermq/internal/testsutil" - smqauthn "github.com/absmach/supermq/pkg/authn" - authnmocks "github.com/absmach/supermq/pkg/authn/mocks" - svcerr "github.com/absmach/supermq/pkg/errors/service" - "github.com/absmach/supermq/pkg/transformers/senml" - "github.com/absmach/supermq/readers" - "github.com/absmach/supermq/readers/api" - "github.com/absmach/supermq/readers/mocks" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/mock" -) - -const ( - svcName = "test-service" - clientToken = "1" - userToken = "token" - invalidToken = "invalid" - email = "user@example.com" - invalid = "invalid" - numOfMessages = 100 - valueFields = 5 - subtopic = "topic" - mqttProt = "mqtt" - httpProt = "http" - msgName = "temperature" - instanceID = "5de9b29a-feb9-11ed-be56-0242ac120002" -) - -var ( - v float64 = 5 - vs = "value" - vb = true - vd = "dataValue" - sum float64 = 42 - validSession = smqauthn.Session{UserID: testsutil.GenerateUUID(&testing.T{})} -) - -func newServer(repo *mocks.MessageRepository, authn *authnmocks.Authentication, clients *climocks.ClientsServiceClient, channels *chmocks.ChannelsServiceClient) *httptest.Server { - mux := api.MakeHandler(repo, authn, clients, channels, svcName, instanceID) - return httptest.NewServer(mux) -} - -type testRequest struct { - client *http.Client - method string - url string - token string - key string -} - -func (tr testRequest) make() (*http.Response, error) { - req, err := http.NewRequest(tr.method, tr.url, http.NoBody) - if err != nil { - return nil, err - } - if tr.token != "" { - req.Header.Set("Authorization", apiutil.BearerPrefix+tr.token) - } - if tr.key != "" { - req.Header.Set("Authorization", apiutil.ClientPrefix+tr.key) - } - - return tr.client.Do(req) -} - -func TestReadAll(t *testing.T) { - chanID := testsutil.GenerateUUID(t) - pubID := testsutil.GenerateUUID(t) - pubID2 := testsutil.GenerateUUID(t) - - now := time.Now().Unix() - - var messages []senml.Message - var queryMsgs []senml.Message - var valueMsgs []senml.Message - var boolMsgs []senml.Message - var stringMsgs []senml.Message - var dataMsgs []senml.Message - - for i := 0; i < numOfMessages; i++ { - // Mix possible values as well as value sum. - msg := senml.Message{ - Channel: chanID, - Publisher: pubID, - Protocol: mqttProt, - Time: float64(now - int64(i)), - Name: "name", - } - - count := i % valueFields - switch count { - case 0: - msg.Value = &v - valueMsgs = append(valueMsgs, msg) - case 1: - msg.BoolValue = &vb - boolMsgs = append(boolMsgs, msg) - case 2: - msg.StringValue = &vs - stringMsgs = append(stringMsgs, msg) - case 3: - msg.DataValue = &vd - dataMsgs = append(dataMsgs, msg) - case 4: - msg.Sum = &sum - msg.Subtopic = subtopic - msg.Protocol = httpProt - msg.Publisher = pubID2 - msg.Name = msgName - queryMsgs = append(queryMsgs, msg) - } - - messages = append(messages, msg) - } - - repo := new(mocks.MessageRepository) - authn := new(authnmocks.Authentication) - clients := new(climocks.ClientsServiceClient) - channels := new(chmocks.ChannelsServiceClient) - ts := newServer(repo, authn, clients, channels) - defer ts.Close() - - cases := []struct { - desc string - req string - url string - token string - key string - authResponse bool - status int - res pageRes - authnErr error - err error - }{ - { - desc: "read page with valid offset and limit", - url: fmt.Sprintf("%s/channels/%s/messages?offset=0&limit=10", ts.URL, chanID), - token: userToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages"}, - Total: uint64(len(messages)), - Messages: messages[0:10], - }, - }, - { - desc: "read page with valid offset and limit as user", - url: fmt.Sprintf("%s/channels/%s/messages?offset=0&limit=10", ts.URL, chanID), - token: userToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages"}, - Total: uint64(len(messages)), - Messages: messages[0:10], - }, - }, - { - desc: "read page with negative offset as client", - url: fmt.Sprintf("%s/channels/%s/messages?offset=-1&limit=10", ts.URL, chanID), - key: clientToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with negative limit as client", - url: fmt.Sprintf("%s/channels/%s/messages?offset=0&limit=-10", ts.URL, chanID), - key: clientToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with zero limit as client", - url: fmt.Sprintf("%s/channels/%s/messages?offset=0&limit=0", ts.URL, chanID), - key: clientToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with non-integer offset as client", - url: fmt.Sprintf("%s/channels/%s/messages?offset=abc&limit=10", ts.URL, chanID), - key: clientToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with non-integer limit as client", - url: fmt.Sprintf("%s/channels/%s/messages?offset=0&limit=abc", ts.URL, chanID), - key: clientToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with invalid channel id as client", - url: fmt.Sprintf("%s/channels//messages?offset=0&limit=10", ts.URL), - key: clientToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with multiple offset as client", - url: fmt.Sprintf("%s/channels/%s/messages?offset=0&offset=1&limit=10", ts.URL, chanID), - key: clientToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with multiple limit as client", - url: fmt.Sprintf("%s/channels/%s/messages?offset=0&limit=20&limit=10", ts.URL, chanID), - key: clientToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with empty token as client", - url: fmt.Sprintf("%s/channels/%s/messages?offset=0&limit=10", ts.URL, chanID), - token: "", - authResponse: false, - authnErr: svcerr.ErrAuthentication, - status: http.StatusUnauthorized, - err: svcerr.ErrAuthentication, - }, - { - desc: "read page with default offset as client", - url: fmt.Sprintf("%s/channels/%s/messages?limit=10", ts.URL, chanID), - key: clientToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages"}, - Total: uint64(len(messages)), - Messages: messages[0:10], - }, - }, - { - desc: "read page with default limit as client", - url: fmt.Sprintf("%s/channels/%s/messages?offset=0", ts.URL, chanID), - key: clientToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages"}, - Total: uint64(len(messages)), - Messages: messages[0:10], - }, - }, - { - desc: "read page with senml format as client", - url: fmt.Sprintf("%s/channels/%s/messages?format=messages", ts.URL, chanID), - key: clientToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages"}, - Total: uint64(len(messages)), - Messages: messages[0:10], - }, - }, - { - desc: "read page with subtopic as client", - url: fmt.Sprintf("%s/channels/%s/messages?subtopic=%s&protocol=%s", ts.URL, chanID, subtopic, httpProt), - key: clientToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Subtopic: subtopic, Format: "messages", Protocol: httpProt}, - Total: uint64(len(queryMsgs)), - Messages: queryMsgs[0:10], - }, - }, - { - desc: "read page with subtopic and protocol as client", - url: fmt.Sprintf("%s/channels/%s/messages?subtopic=%s&protocol=%s", ts.URL, chanID, subtopic, httpProt), - key: clientToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Subtopic: subtopic, Format: "messages", Protocol: httpProt}, - Total: uint64(len(queryMsgs)), - Messages: queryMsgs[0:10], - }, - }, - { - desc: "read page with publisher as client", - url: fmt.Sprintf("%s/channels/%s/messages?publisher=%s", ts.URL, chanID, pubID2), - key: clientToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages", Publisher: pubID2}, - Total: uint64(len(queryMsgs)), - Messages: queryMsgs[0:10], - }, - }, - { - desc: "read page with protocol as client", - url: fmt.Sprintf("%s/channels/%s/messages?protocol=http", ts.URL, chanID), - key: clientToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages", Protocol: httpProt}, - Total: uint64(len(queryMsgs)), - Messages: queryMsgs[0:10], - }, - }, - { - desc: "read page with name as client", - url: fmt.Sprintf("%s/channels/%s/messages?name=%s", ts.URL, chanID, msgName), - key: clientToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages", Name: msgName}, - Total: uint64(len(queryMsgs)), - Messages: queryMsgs[0:10], - }, - }, - { - desc: "read page with value as client", - url: fmt.Sprintf("%s/channels/%s/messages?v=%f", ts.URL, chanID, v), - key: clientToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages", Value: v}, - Total: uint64(len(valueMsgs)), - Messages: valueMsgs[0:10], - }, - }, - { - desc: "read page with value and equal comparator as client", - url: fmt.Sprintf("%s/channels/%s/messages?v=%f&comparator=%s", ts.URL, chanID, v, readers.EqualKey), - key: clientToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages", Value: v, Comparator: readers.EqualKey}, - Total: uint64(len(valueMsgs)), - Messages: valueMsgs[0:10], - }, - }, - { - desc: "read page with value and lower-than comparator as client", - url: fmt.Sprintf("%s/channels/%s/messages?v=%f&comparator=%s", ts.URL, chanID, v+1, readers.LowerThanKey), - key: clientToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages", Value: v + 1, Comparator: readers.LowerThanKey}, - Total: uint64(len(valueMsgs)), - Messages: valueMsgs[0:10], - }, - }, - { - desc: "read page with value and lower-than-or-equal comparator as client", - url: fmt.Sprintf("%s/channels/%s/messages?v=%f&comparator=%s", ts.URL, chanID, v+1, readers.LowerThanEqualKey), - key: clientToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages", Value: v + 1, Comparator: readers.LowerThanEqualKey}, - Total: uint64(len(valueMsgs)), - Messages: valueMsgs[0:10], - }, - }, - { - desc: "read page with value and greater-than comparator as client", - url: fmt.Sprintf("%s/channels/%s/messages?v=%f&comparator=%s", ts.URL, chanID, v-1, readers.GreaterThanKey), - key: clientToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages", Value: v - 1, Comparator: readers.GreaterThanKey}, - Total: uint64(len(valueMsgs)), - Messages: valueMsgs[0:10], - }, - }, - { - desc: "read page with value and greater-than-or-equal comparator as client", - url: fmt.Sprintf("%s/channels/%s/messages?v=%f&comparator=%s", ts.URL, chanID, v-1, readers.GreaterThanEqualKey), - key: clientToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages", Value: v - 1, Comparator: readers.GreaterThanEqualKey}, - Total: uint64(len(valueMsgs)), - Messages: valueMsgs[0:10], - }, - }, - { - desc: "read page with non-float value as client", - url: fmt.Sprintf("%s/channels/%s/messages?v=ab01", ts.URL, chanID), - key: clientToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with value and wrong comparator as client", - url: fmt.Sprintf("%s/channels/%s/messages?v=%f&comparator=wrong", ts.URL, chanID, v-1), - key: clientToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with boolean value as client", - url: fmt.Sprintf("%s/channels/%s/messages?vb=true", ts.URL, chanID), - key: clientToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages", BoolValue: true}, - Total: uint64(len(boolMsgs)), - Messages: boolMsgs[0:10], - }, - }, - { - desc: "read page with non-boolean value as client", - url: fmt.Sprintf("%s/channels/%s/messages?vb=yes", ts.URL, chanID), - key: clientToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with string value as client", - url: fmt.Sprintf("%s/channels/%s/messages?vs=%s", ts.URL, chanID, vs), - key: clientToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages", StringValue: vs}, - Total: uint64(len(stringMsgs)), - Messages: stringMsgs[0:10], - }, - }, - { - desc: "read page with data value as client", - url: fmt.Sprintf("%s/channels/%s/messages?vd=%s", ts.URL, chanID, vd), - key: clientToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages", DataValue: vd}, - Total: uint64(len(dataMsgs)), - Messages: dataMsgs[0:10], - }, - }, - { - desc: "read page with non-float from as client", - url: fmt.Sprintf("%s/channels/%s/messages?from=ABCD", ts.URL, chanID), - key: clientToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with non-float to as client", - url: fmt.Sprintf("%s/channels/%s/messages?to=ABCD", ts.URL, chanID), - key: clientToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with from/to as client", - url: fmt.Sprintf("%s/channels/%s/messages?from=%f&to=%f", ts.URL, chanID, messages[19].Time, messages[4].Time), - key: clientToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages", From: messages[19].Time, To: messages[4].Time}, - Total: uint64(len(messages[5:20])), - Messages: messages[5:15], - }, - }, - { - desc: "read page with aggregation as client", - url: fmt.Sprintf("%s/channels/%s/messages?aggregation=MAX", ts.URL, chanID), - key: clientToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with interval as client", - url: fmt.Sprintf("%s/channels/%s/messages?interval=10h", ts.URL, chanID), - key: clientToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages"}, - Total: uint64(len(messages)), - Messages: messages[0:10], - }, - }, - { - desc: "read page with aggregation and interval as client", - url: fmt.Sprintf("%s/channels/%s/messages?aggregation=MAX&interval=10h", ts.URL, chanID), - key: clientToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with aggregation, interval, to and from as client", - url: fmt.Sprintf("%s/channels/%s/messages?aggregation=MAX&interval=10h&from=%f&to=%f", ts.URL, chanID, messages[19].Time, messages[4].Time), - key: clientToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages", Aggregation: "MAX", Interval: "10h", From: messages[19].Time, To: messages[4].Time}, - Total: uint64(len(messages[5:20])), - Messages: messages[5:15], - }, - }, - { - desc: "read page with invalid aggregation and valid interval, to and from as client", - url: fmt.Sprintf("%s/channels/%s/messages?aggregation=invalid&interval=10h&from=%f&to=%f", ts.URL, chanID, messages[19].Time, messages[4].Time), - key: clientToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with invalid interval and valid aggregation, to and from as client", - url: fmt.Sprintf("%s/channels/%s/messages?aggregation=MAX&interval=10hrs&from=%f&to=%f", ts.URL, chanID, messages[19].Time, messages[4].Time), - key: clientToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with aggregation, interval and to with missing from as client", - url: fmt.Sprintf("%s/channels/%s/messages?aggregation=MAX&interval=10h&to=%f", ts.URL, chanID, messages[4].Time), - key: clientToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with aggregation, interval and to with invalid from as client", - url: fmt.Sprintf("%s/channels/%s/messages?aggregation=MAX&interval=10h&to=ABCD&from=%f", ts.URL, chanID, messages[4].Time), - key: clientToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with aggregation, interval and to with invalid to as client", - url: fmt.Sprintf("%s/channels/%s/messages?aggregation=MAX&interval=10h&from=%f&to=ABCD", ts.URL, chanID, messages[4].Time), - key: clientToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with valid offset and limit as user", - url: fmt.Sprintf("%s/channels/%s/messages?offset=0&limit=10", ts.URL, chanID), - token: userToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages"}, - Total: uint64(len(messages)), - Messages: messages[0:10], - }, - }, - { - desc: "read page with negative offset as user", - url: fmt.Sprintf("%s/channels/%s/messages?offset=-1&limit=10", ts.URL, chanID), - token: userToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with negative limit as user", - url: fmt.Sprintf("%s/channels/%s/messages?offset=0&limit=-10", ts.URL, chanID), - token: userToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with zero limit as user", - url: fmt.Sprintf("%s/channels/%s/messages?offset=0&limit=0", ts.URL, chanID), - token: userToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with non-integer offset as user", - url: fmt.Sprintf("%s/channels/%s/messages?offset=abc&limit=10", ts.URL, chanID), - token: userToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with non-integer limit as user", - url: fmt.Sprintf("%s/channels/%s/messages?offset=0&limit=abc", ts.URL, chanID), - token: userToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with invalid channel id as user", - url: fmt.Sprintf("%s/channels//messages?offset=0&limit=10", ts.URL), - token: userToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with invalid token as user", - url: fmt.Sprintf("%s/channels/%s/messages?offset=0&limit=10", ts.URL, chanID), - token: invalidToken, - authResponse: false, - status: http.StatusUnauthorized, - err: svcerr.ErrAuthorization, - }, - { - desc: "read page with multiple offset as user", - url: fmt.Sprintf("%s/channels/%s/messages?offset=0&offset=1&limit=10", ts.URL, chanID), - token: userToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with multiple limit as user", - url: fmt.Sprintf("%s/channels/%s/messages?offset=0&limit=20&limit=10", ts.URL, chanID), - token: userToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with empty token as user", - url: fmt.Sprintf("%s/channels/%s/messages?offset=0&limit=10", ts.URL, chanID), - token: "", - authResponse: false, - status: http.StatusUnauthorized, - err: svcerr.ErrAuthorization, - }, - { - desc: "read page with default offset as user", - url: fmt.Sprintf("%s/channels/%s/messages?limit=10", ts.URL, chanID), - token: userToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages"}, - Total: uint64(len(messages)), - Messages: messages[0:10], - }, - }, - { - desc: "read page with default limit as user", - url: fmt.Sprintf("%s/channels/%s/messages?offset=0", ts.URL, chanID), - token: userToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages"}, - Total: uint64(len(messages)), - Messages: messages[0:10], - }, - }, - { - desc: "read page with senml format as user", - url: fmt.Sprintf("%s/channels/%s/messages?format=messages", ts.URL, chanID), - token: userToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages"}, - Total: uint64(len(messages)), - Messages: messages[0:10], - }, - }, - { - desc: "read page with subtopic as user", - url: fmt.Sprintf("%s/channels/%s/messages?subtopic=%s&protocol=%s", ts.URL, chanID, subtopic, httpProt), - token: userToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages", Subtopic: subtopic, Protocol: httpProt}, - Total: uint64(len(queryMsgs)), - Messages: queryMsgs[0:10], - }, - }, - { - desc: "read page with subtopic and protocol as user", - url: fmt.Sprintf("%s/channels/%s/messages?subtopic=%s&protocol=%s", ts.URL, chanID, subtopic, httpProt), - token: userToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages", Subtopic: subtopic, Protocol: httpProt}, - Total: uint64(len(queryMsgs)), - Messages: queryMsgs[0:10], - }, - }, - { - desc: "read page with publisher as user", - url: fmt.Sprintf("%s/channels/%s/messages?publisher=%s", ts.URL, chanID, pubID2), - token: userToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages", Publisher: pubID2}, - Total: uint64(len(queryMsgs)), - Messages: queryMsgs[0:10], - }, - }, - { - desc: "read page with protocol as user", - url: fmt.Sprintf("%s/channels/%s/messages?protocol=http", ts.URL, chanID), - token: userToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages", Protocol: httpProt}, - Total: uint64(len(queryMsgs)), - Messages: queryMsgs[0:10], - }, - }, - { - desc: "read page with name as user", - url: fmt.Sprintf("%s/channels/%s/messages?name=%s", ts.URL, chanID, msgName), - token: userToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages", Name: msgName}, - Total: uint64(len(queryMsgs)), - Messages: queryMsgs[0:10], - }, - }, - { - desc: "read page with value as user", - url: fmt.Sprintf("%s/channels/%s/messages?v=%f", ts.URL, chanID, v), - token: userToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages", Value: v}, - Total: uint64(len(valueMsgs)), - Messages: valueMsgs[0:10], - }, - }, - { - desc: "read page with value and equal comparator as user", - url: fmt.Sprintf("%s/channels/%s/messages?v=%f&comparator=%s", ts.URL, chanID, v, readers.EqualKey), - token: userToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages", Value: v, Comparator: readers.EqualKey}, - Total: uint64(len(valueMsgs)), - Messages: valueMsgs[0:10], - }, - }, - { - desc: "read page with value and lower-than comparator as user", - url: fmt.Sprintf("%s/channels/%s/messages?v=%f&comparator=%s", ts.URL, chanID, v+1, readers.LowerThanKey), - token: userToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages", Value: v + 1, Comparator: readers.LowerThanKey}, - Total: uint64(len(valueMsgs)), - Messages: valueMsgs[0:10], - }, - }, - { - desc: "read page with value and lower-than-or-equal comparator as user", - url: fmt.Sprintf("%s/channels/%s/messages?v=%f&comparator=%s", ts.URL, chanID, v+1, readers.LowerThanEqualKey), - token: userToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages", Value: v + 1, Comparator: readers.LowerThanEqualKey}, - Total: uint64(len(valueMsgs)), - Messages: valueMsgs[0:10], - }, - }, - { - desc: "read page with value and greater-than comparator as user", - url: fmt.Sprintf("%s/channels/%s/messages?v=%f&comparator=%s", ts.URL, chanID, v-1, readers.GreaterThanKey), - token: userToken, - status: http.StatusOK, - authResponse: true, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages", Value: v - 1, Comparator: readers.GreaterThanKey}, - Total: uint64(len(valueMsgs)), - Messages: valueMsgs[0:10], - }, - }, - { - desc: "read page with value and greater-than-or-equal comparator as user", - url: fmt.Sprintf("%s/channels/%s/messages?v=%f&comparator=%s", ts.URL, chanID, v-1, readers.GreaterThanEqualKey), - token: userToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages", Value: v - 1, Comparator: readers.GreaterThanEqualKey}, - Total: uint64(len(valueMsgs)), - Messages: valueMsgs[0:10], - }, - }, - { - desc: "read page with non-float value as user", - url: fmt.Sprintf("%s/channels/%s/messages?v=ab01", ts.URL, chanID), - token: userToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with value and wrong comparator as user", - url: fmt.Sprintf("%s/channels/%s/messages?v=%f&comparator=wrong", ts.URL, chanID, v-1), - token: userToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with boolean value as user", - url: fmt.Sprintf("%s/channels/%s/messages?vb=true", ts.URL, chanID), - token: userToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages", BoolValue: true}, - Total: uint64(len(boolMsgs)), - Messages: boolMsgs[0:10], - }, - }, - { - desc: "read page with non-boolean value as user", - url: fmt.Sprintf("%s/channels/%s/messages?vb=yes", ts.URL, chanID), - token: userToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with string value as user", - url: fmt.Sprintf("%s/channels/%s/messages?vs=%s", ts.URL, chanID, vs), - token: userToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages", StringValue: vs}, - Total: uint64(len(stringMsgs)), - Messages: stringMsgs[0:10], - }, - }, - { - desc: "read page with data value as user", - url: fmt.Sprintf("%s/channels/%s/messages?vd=%s", ts.URL, chanID, vd), - token: userToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages", DataValue: vd}, - Total: uint64(len(dataMsgs)), - Messages: dataMsgs[0:10], - }, - }, - { - desc: "read page with non-float from as user", - url: fmt.Sprintf("%s/channels/%s/messages?from=ABCD", ts.URL, chanID), - token: userToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with non-float to as user", - url: fmt.Sprintf("%s/channels/%s/messages?to=ABCD", ts.URL, chanID), - token: userToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with from/to as user", - url: fmt.Sprintf("%s/channels/%s/messages?from=%f&to=%f", ts.URL, chanID, messages[19].Time, messages[4].Time), - token: userToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages", From: messages[19].Time, To: messages[4].Time}, - Total: uint64(len(messages[5:20])), - Messages: messages[5:15], - }, - }, - { - desc: "read page with aggregation as user", - url: fmt.Sprintf("%s/channels/%s/messages?aggregation=MAX", ts.URL, chanID), - key: userToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with interval as user", - url: fmt.Sprintf("%s/channels/%s/messages?interval=10h", ts.URL, chanID), - key: userToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages"}, - Total: uint64(len(messages)), - Messages: messages[0:10], - }, - }, - { - desc: "read page with aggregation and interval as user", - url: fmt.Sprintf("%s/channels/%s/messages?aggregation=MAX&interval=10h", ts.URL, chanID), - key: userToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with aggregation, interval, to and from as user", - url: fmt.Sprintf("%s/channels/%s/messages?aggregation=MAX&interval=10h&from=%f&to=%f", ts.URL, chanID, messages[19].Time, messages[4].Time), - key: userToken, - authResponse: true, - status: http.StatusOK, - res: pageRes{ - PageMetadata: readers.PageMetadata{Limit: 10, Format: "messages", Aggregation: "MAX", Interval: "10h", From: messages[19].Time, To: messages[4].Time}, - Total: uint64(len(messages[5:20])), - Messages: messages[5:15], - }, - }, - { - desc: "read page with invalid aggregation and valid interval, to and from as user", - url: fmt.Sprintf("%s/channels/%s/messages?aggregation=invalid&interval=10h&from=%f&to=%f", ts.URL, chanID, messages[19].Time, messages[4].Time), - key: userToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with invalid interval and valid aggregation, to and from as user", - url: fmt.Sprintf("%s/channels/%s/messages?aggregation=MAX&interval=10hrs&from=%f&to=%f", ts.URL, chanID, messages[19].Time, messages[4].Time), - key: userToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with aggregation, interval and to with missing from as user", - url: fmt.Sprintf("%s/channels/%s/messages?aggregation=MAX&interval=10h&to=%f", ts.URL, chanID, messages[4].Time), - key: userToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with aggregation, interval and to with invalid from as user", - url: fmt.Sprintf("%s/channels/%s/messages?aggregation=MAX&interval=10h&to=ABCD&from=%f", ts.URL, chanID, messages[4].Time), - key: userToken, - authResponse: true, - status: http.StatusBadRequest, - }, - { - desc: "read page with aggregation, interval and to with invalid to as user", - url: fmt.Sprintf("%s/channels/%s/messages?aggregation=MAX&interval=10h&from=%f&to=ABCD", ts.URL, chanID, messages[4].Time), - key: userToken, - authResponse: true, - status: http.StatusBadRequest, - }, - } - - for _, tc := range cases { - authnCall := authn.On("Authenticate", mock.Anything, tc.token).Return(validSession, tc.authnErr) - if tc.key != "" { - authnCall = clients.On("Authenticate", mock.Anything, &grpcClientsV1.AuthnReq{ - ClientSecret: tc.key, - }).Return(&grpcClientsV1.AuthnRes{Id: testsutil.GenerateUUID(t), Authenticated: true}, tc.authnErr) - } - authzCall := channels.On("Authorize", mock.Anything, mock.Anything).Return(&grpcChannelsV1.AuthzRes{Authorized: true}, tc.err) - repoCall := repo.On("ReadAll", chanID, tc.res.PageMetadata).Return(readers.MessagesPage{Total: tc.res.Total, Messages: fromSenml(tc.res.Messages)}, nil) - req := testRequest{ - client: ts.Client(), - method: http.MethodGet, - url: tc.url, - token: tc.token, - key: tc.key, - } - res, err := req.make() - assert.Nil(t, err, fmt.Sprintf("%s: unexpected error %s", tc.desc, err)) - - var page pageRes - err = json.NewDecoder(res.Body).Decode(&page) - assert.Nil(t, err, fmt.Sprintf("%s: unexpected error while decoding response body: %s", tc.desc, err)) - assert.Nil(t, err, fmt.Sprintf("%s: unexpected error %s", tc.desc, err)) - assert.Equal(t, tc.status, res.StatusCode, fmt.Sprintf("%s: expected %d got %d", tc.desc, tc.status, res.StatusCode)) - assert.Equal(t, tc.res.Total, page.Total, fmt.Sprintf("%s: expected %d got %d", tc.desc, tc.res.Total, page.Total)) - assert.ElementsMatch(t, tc.res.Messages, page.Messages, fmt.Sprintf("%s: got incorrect body from response", tc.desc)) - authzCall.Unset() - authnCall.Unset() - repoCall.Unset() - } -} - -type pageRes struct { - readers.PageMetadata - Total uint64 `json:"total"` - Messages []senml.Message `json:"messages,omitempty"` -} - -func fromSenml(in []senml.Message) []readers.Message { - var ret []readers.Message - for _, m := range in { - ret = append(ret, m) - } - return ret -} diff --git a/readers/api/logging.go b/readers/api/logging.go deleted file mode 100644 index 30f013ec23..0000000000 --- a/readers/api/logging.go +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright (c) Abstract Machines -// SPDX-License-Identifier: Apache-2.0 - -//go:build !test - -package api - -import ( - "log/slog" - "time" - - "github.com/absmach/supermq/readers" -) - -var _ readers.MessageRepository = (*loggingMiddleware)(nil) - -type loggingMiddleware struct { - logger *slog.Logger - svc readers.MessageRepository -} - -// LoggingMiddleware adds logging facilities to the core service. -func LoggingMiddleware(svc readers.MessageRepository, logger *slog.Logger) readers.MessageRepository { - return &loggingMiddleware{ - logger: logger, - svc: svc, - } -} - -func (lm *loggingMiddleware) ReadAll(chanID string, rpm readers.PageMetadata) (page readers.MessagesPage, err error) { - defer func(begin time.Time) { - args := []any{ - slog.String("duration", time.Since(begin).String()), - slog.String("channel_id", chanID), - slog.Group("page", - slog.Uint64("offset", rpm.Offset), - slog.Uint64("limit", rpm.Limit), - slog.Uint64("total", page.Total), - ), - } - if rpm.Subtopic != "" { - args = append(args, slog.String("subtopic", rpm.Subtopic)) - } - if rpm.Publisher != "" { - args = append(args, slog.String("publisher", rpm.Publisher)) - } - if err != nil { - args = append(args, slog.Any("error", err)) - lm.logger.Warn("Read all failed", args...) - return - } - lm.logger.Info("Read all completed successfully", args...) - }(time.Now()) - - return lm.svc.ReadAll(chanID, rpm) -} diff --git a/readers/api/metrics.go b/readers/api/metrics.go deleted file mode 100644 index 717ab91bc1..0000000000 --- a/readers/api/metrics.go +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Abstract Machines -// SPDX-License-Identifier: Apache-2.0 - -//go:build !test - -package api - -import ( - "time" - - "github.com/absmach/supermq/readers" - "github.com/go-kit/kit/metrics" -) - -var _ readers.MessageRepository = (*metricsMiddleware)(nil) - -type metricsMiddleware struct { - counter metrics.Counter - latency metrics.Histogram - svc readers.MessageRepository -} - -// MetricsMiddleware instruments core service by tracking request count and latency. -func MetricsMiddleware(svc readers.MessageRepository, counter metrics.Counter, latency metrics.Histogram) readers.MessageRepository { - return &metricsMiddleware{ - counter: counter, - latency: latency, - svc: svc, - } -} - -func (mm *metricsMiddleware) ReadAll(chanID string, rpm readers.PageMetadata) (readers.MessagesPage, error) { - defer func(begin time.Time) { - mm.counter.With("method", "read_all").Add(1) - mm.latency.With("method", "read_all").Observe(time.Since(begin).Seconds()) - }(time.Now()) - - return mm.svc.ReadAll(chanID, rpm) -} diff --git a/readers/api/requests.go b/readers/api/requests.go deleted file mode 100644 index a39dd5834d..0000000000 --- a/readers/api/requests.go +++ /dev/null @@ -1,67 +0,0 @@ -// Copyright (c) Abstract Machines -// SPDX-License-Identifier: Apache-2.0 - -package api - -import ( - "slices" - "strings" - "time" - - apiutil "github.com/absmach/supermq/api/http/util" - "github.com/absmach/supermq/readers" -) - -const maxLimitSize = 1000 - -var validAggregations = []string{"MAX", "MIN", "AVG", "SUM", "COUNT"} - -type listMessagesReq struct { - chanID string - token string - key string - pageMeta readers.PageMetadata -} - -func (req listMessagesReq) validate() error { - if req.token == "" && req.key == "" { - return apiutil.ErrBearerToken - } - - if req.chanID == "" { - return apiutil.ErrMissingID - } - - if req.pageMeta.Limit < 1 || req.pageMeta.Limit > maxLimitSize { - return apiutil.ErrLimitSize - } - - if req.pageMeta.Comparator != "" && - req.pageMeta.Comparator != readers.EqualKey && - req.pageMeta.Comparator != readers.LowerThanKey && - req.pageMeta.Comparator != readers.LowerThanEqualKey && - req.pageMeta.Comparator != readers.GreaterThanKey && - req.pageMeta.Comparator != readers.GreaterThanEqualKey { - return apiutil.ErrInvalidComparator - } - - if req.pageMeta.Aggregation != "" { - if req.pageMeta.From == 0 { - return apiutil.ErrMissingFrom - } - - if req.pageMeta.To == 0 { - return apiutil.ErrMissingTo - } - - if !slices.Contains(validAggregations, strings.ToUpper(req.pageMeta.Aggregation)) { - return apiutil.ErrInvalidAggregation - } - - if _, err := time.ParseDuration(req.pageMeta.Interval); err != nil { - return apiutil.ErrInvalidInterval - } - } - - return nil -} diff --git a/readers/api/responses.go b/readers/api/responses.go deleted file mode 100644 index e1106c0752..0000000000 --- a/readers/api/responses.go +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Abstract Machines -// SPDX-License-Identifier: Apache-2.0 - -package api - -import ( - "net/http" - - "github.com/absmach/supermq" - "github.com/absmach/supermq/readers" -) - -var _ supermq.Response = (*pageRes)(nil) - -type pageRes struct { - readers.PageMetadata - Total uint64 `json:"total"` - Messages []readers.Message `json:"messages,omitempty"` -} - -func (res pageRes) Headers() map[string]string { - return map[string]string{} -} - -func (res pageRes) Code() int { - return http.StatusOK -} - -func (res pageRes) Empty() bool { - return false -} diff --git a/readers/api/transport.go b/readers/api/transport.go deleted file mode 100644 index 50f8a13811..0000000000 --- a/readers/api/transport.go +++ /dev/null @@ -1,289 +0,0 @@ -// Copyright (c) Abstract Machines -// SPDX-License-Identifier: Apache-2.0 - -package api - -import ( - "context" - "encoding/json" - "net/http" - - "github.com/absmach/supermq" - grpcChannelsV1 "github.com/absmach/supermq/api/grpc/channels/v1" - grpcClientsV1 "github.com/absmach/supermq/api/grpc/clients/v1" - apiutil "github.com/absmach/supermq/api/http/util" - smqauthn "github.com/absmach/supermq/pkg/authn" - "github.com/absmach/supermq/pkg/connections" - "github.com/absmach/supermq/pkg/errors" - svcerr "github.com/absmach/supermq/pkg/errors/service" - "github.com/absmach/supermq/pkg/policies" - "github.com/absmach/supermq/readers" - "github.com/go-chi/chi/v5" - kithttp "github.com/go-kit/kit/transport/http" - "github.com/prometheus/client_golang/prometheus/promhttp" -) - -const ( - contentType = "application/json" - offsetKey = "offset" - limitKey = "limit" - formatKey = "format" - subtopicKey = "subtopic" - publisherKey = "publisher" - protocolKey = "protocol" - nameKey = "name" - valueKey = "v" - stringValueKey = "vs" - dataValueKey = "vd" - boolValueKey = "vb" - comparatorKey = "comparator" - fromKey = "from" - toKey = "to" - aggregationKey = "aggregation" - intervalKey = "interval" - defInterval = "1s" - defLimit = 10 - defOffset = 0 - defFormat = "messages" -) - -// MakeHandler returns a HTTP handler for API endpoints. -func MakeHandler(svc readers.MessageRepository, authn smqauthn.Authentication, clients grpcClientsV1.ClientsServiceClient, channels grpcChannelsV1.ChannelsServiceClient, svcName, instanceID string) http.Handler { - opts := []kithttp.ServerOption{ - kithttp.ServerErrorEncoder(encodeError), - } - - mux := chi.NewRouter() - mux.Get("/channels/{chanID}/messages", kithttp.NewServer( - listMessagesEndpoint(svc, authn, clients, channels), - decodeList, - encodeResponse, - opts..., - ).ServeHTTP) - - mux.Get("/health", supermq.Health(svcName, instanceID)) - mux.Handle("/metrics", promhttp.Handler()) - - return mux -} - -func decodeList(_ context.Context, r *http.Request) (interface{}, error) { - offset, err := apiutil.ReadNumQuery[uint64](r, offsetKey, defOffset) - if err != nil { - return nil, errors.Wrap(apiutil.ErrValidation, err) - } - - limit, err := apiutil.ReadNumQuery[uint64](r, limitKey, defLimit) - if err != nil { - return nil, errors.Wrap(apiutil.ErrValidation, err) - } - - format, err := apiutil.ReadStringQuery(r, formatKey, defFormat) - if err != nil { - return nil, errors.Wrap(apiutil.ErrValidation, err) - } - - subtopic, err := apiutil.ReadStringQuery(r, subtopicKey, "") - if err != nil { - return nil, errors.Wrap(apiutil.ErrValidation, err) - } - - publisher, err := apiutil.ReadStringQuery(r, publisherKey, "") - if err != nil { - return nil, errors.Wrap(apiutil.ErrValidation, err) - } - - protocol, err := apiutil.ReadStringQuery(r, protocolKey, "") - if err != nil { - return nil, errors.Wrap(apiutil.ErrValidation, err) - } - - name, err := apiutil.ReadStringQuery(r, nameKey, "") - if err != nil { - return nil, errors.Wrap(apiutil.ErrValidation, err) - } - - v, err := apiutil.ReadNumQuery[float64](r, valueKey, 0) - if err != nil { - return nil, errors.Wrap(apiutil.ErrValidation, err) - } - - comparator, err := apiutil.ReadStringQuery(r, comparatorKey, "") - if err != nil { - return nil, errors.Wrap(apiutil.ErrValidation, err) - } - - vs, err := apiutil.ReadStringQuery(r, stringValueKey, "") - if err != nil { - return nil, errors.Wrap(apiutil.ErrValidation, err) - } - - vd, err := apiutil.ReadStringQuery(r, dataValueKey, "") - if err != nil { - return nil, errors.Wrap(apiutil.ErrValidation, err) - } - - vb, err := apiutil.ReadBoolQuery(r, boolValueKey, false) - if err != nil && err != apiutil.ErrNotFoundParam { - return nil, err - } - - from, err := apiutil.ReadNumQuery[float64](r, fromKey, 0) - if err != nil { - return nil, errors.Wrap(apiutil.ErrValidation, err) - } - - to, err := apiutil.ReadNumQuery[float64](r, toKey, 0) - if err != nil { - return nil, errors.Wrap(apiutil.ErrValidation, err) - } - - aggregation, err := apiutil.ReadStringQuery(r, aggregationKey, "") - if err != nil { - return nil, errors.Wrap(apiutil.ErrValidation, err) - } - - var interval string - if aggregation != "" { - interval, err = apiutil.ReadStringQuery(r, intervalKey, defInterval) - if err != nil { - return nil, errors.Wrap(apiutil.ErrValidation, err) - } - } - - req := listMessagesReq{ - chanID: chi.URLParam(r, "chanID"), - token: apiutil.ExtractBearerToken(r), - key: apiutil.ExtractClientSecret(r), - pageMeta: readers.PageMetadata{ - Offset: offset, - Limit: limit, - Format: format, - Subtopic: subtopic, - Publisher: publisher, - Protocol: protocol, - Name: name, - Value: v, - Comparator: comparator, - StringValue: vs, - DataValue: vd, - BoolValue: vb, - From: from, - To: to, - Aggregation: aggregation, - Interval: interval, - }, - } - return req, nil -} - -func encodeResponse(_ context.Context, w http.ResponseWriter, response interface{}) error { - w.Header().Set("Content-Type", contentType) - - if ar, ok := response.(supermq.Response); ok { - for k, v := range ar.Headers() { - w.Header().Set(k, v) - } - - w.WriteHeader(ar.Code()) - - if ar.Empty() { - return nil - } - } - - return json.NewEncoder(w).Encode(response) -} - -func encodeError(_ context.Context, err error, w http.ResponseWriter) { - var wrapper error - if errors.Contains(err, apiutil.ErrValidation) { - wrapper, err = errors.Unwrap(err) - } - - switch { - case errors.Contains(err, nil): - case errors.Contains(err, apiutil.ErrInvalidQueryParams), - errors.Contains(err, svcerr.ErrMalformedEntity), - errors.Contains(err, apiutil.ErrMissingID), - errors.Contains(err, apiutil.ErrLimitSize), - errors.Contains(err, apiutil.ErrOffsetSize), - errors.Contains(err, apiutil.ErrInvalidComparator), - errors.Contains(err, apiutil.ErrInvalidAggregation), - errors.Contains(err, apiutil.ErrInvalidInterval), - errors.Contains(err, apiutil.ErrMissingFrom), - errors.Contains(err, apiutil.ErrMissingTo), - errors.Contains(err, apiutil.ErrMissingDomainID): - w.WriteHeader(http.StatusBadRequest) - case errors.Contains(err, svcerr.ErrAuthentication), - errors.Contains(err, svcerr.ErrAuthorization), - errors.Contains(err, apiutil.ErrBearerToken): - w.WriteHeader(http.StatusUnauthorized) - case errors.Contains(err, readers.ErrReadMessages): - w.WriteHeader(http.StatusInternalServerError) - default: - w.WriteHeader(http.StatusInternalServerError) - } - - if wrapper != nil { - err = errors.Wrap(wrapper, err) - } - if errorVal, ok := err.(errors.Error); ok { - w.Header().Set("Content-Type", contentType) - if err := json.NewEncoder(w).Encode(errorVal); err != nil { - w.WriteHeader(http.StatusInternalServerError) - } - } -} - -func authnAuthz(ctx context.Context, req listMessagesReq, authn smqauthn.Authentication, clients grpcClientsV1.ClientsServiceClient, channels grpcChannelsV1.ChannelsServiceClient) error { - clientID, clientType, err := authenticate(ctx, req, authn, clients) - if err != nil { - return nil - } - if err := authorize(ctx, clientID, clientType, req.chanID, channels); err != nil { - return err - } - return nil -} - -func authenticate(ctx context.Context, req listMessagesReq, authn smqauthn.Authentication, clients grpcClientsV1.ClientsServiceClient) (clientID string, clientType string, err error) { - switch { - case req.token != "": - session, err := authn.Authenticate(ctx, req.token) - if err != nil { - return "", "", err - } - - return session.DomainUserID, policies.UserType, nil - case req.key != "": - res, err := clients.Authenticate(ctx, &grpcClientsV1.AuthnReq{ - ClientSecret: req.key, - }) - if err != nil { - return "", "", err - } - if !res.GetAuthenticated() { - return "", "", svcerr.ErrAuthentication - } - return res.GetId(), policies.ClientType, nil - default: - return "", "", svcerr.ErrAuthentication - } -} - -func authorize(ctx context.Context, clientID, clientType, chanID string, channels grpcChannelsV1.ChannelsServiceClient) (err error) { - res, err := channels.Authorize(ctx, &grpcChannelsV1.AuthzReq{ - ClientId: clientID, - ClientType: clientType, - Type: uint32(connections.Subscribe), - ChannelId: chanID, - }) - if err != nil { - return errors.Wrap(svcerr.ErrAuthorization, err) - } - if !res.GetAuthorized() { - return svcerr.ErrAuthorization - } - return nil -} diff --git a/readers/postgres/README.md b/readers/postgres/README.md deleted file mode 100644 index 23b0105d5c..0000000000 --- a/readers/postgres/README.md +++ /dev/null @@ -1,101 +0,0 @@ -# Postgres reader - -Postgres reader provides message repository implementation for Postgres. - -## Configuration - -The service is configured using the environment variables presented in the -following table. Note that any unset variables will be replaced with their -default values. - -| Variable | Description | Default | -| ------------------------------------ | -------------------------------------------- | ---------------------------- | -| SMQ_POSTGRES_READER_LOG_LEVEL | Service log level | info | -| SMQ_POSTGRES_READER_HTTP_HOST | Service HTTP host | localhost | -| SMQ_POSTGRES_READER_HTTP_PORT | Service HTTP port | 9009 | -| SMQ_POSTGRES_READER_HTTP_SERVER_CERT | Service HTTP server cert | "" | -| SMQ_POSTGRES_READER_HTTP_SERVER_KEY | Service HTTP server key | "" | -| SMQ_POSTGRES_HOST | Postgres DB host | localhost | -| SMQ_POSTGRES_PORT | Postgres DB port | 5432 | -| SMQ_POSTGRES_USER | Postgres user | supermq | -| SMQ_POSTGRES_PASS | Postgres password | supermq | -| SMQ_POSTGRES_NAME | Postgres database name | messages | -| SMQ_POSTGRES_SSL_MODE | Postgres SSL mode | disabled | -| SMQ_POSTGRES_SSL_CERT | Postgres SSL certificate path | "" | -| SMQ_POSTGRES_SSL_KEY | Postgres SSL key | "" | -| SMQ_POSTGRES_SSL_ROOT_CERT | Postgres SSL root certificate path | "" | -| SMQ_CLIENTS_AUTH_GRPC_URL | Clients service Auth gRPC URL | localhost:7000 | -| SMQ_CLIENTS_AUTH_GRPC_TIMEOUT | Clients service Auth gRPC timeout in seconds | 1s | -| SMQ_CLIENTS_AUTH_GRPC_CLIENT_TLS | Clients service Auth gRPC TLS mode flag | false | -| SMQ_CLIENTS_AUTH_GRPC_CA_CERTS | Clients service Auth gRPC CA certificates | "" | -| SMQ_AUTH_GRPC_URL | Auth service gRPC URL | localhost:7001 | -| SMQ_AUTH_GRPC_TIMEOUT | Auth service gRPC request timeout in seconds | 1s | -| SMQ_AUTH_GRPC_CLIENT_TLS | Auth service gRPC TLS mode flag | false | -| SMQ_AUTH_GRPC_CA_CERTS | Auth service gRPC CA certificates | "" | -| SMQ_JAEGER_URL | Jaeger server URL | http://jaeger:4318/v1/traces | -| SMQ_SEND_TELEMETRY | Send telemetry to supermq call home server | true | -| SMQ_POSTGRES_READER_INSTANCE_ID | Postgres reader instance ID | | - -## Deployment - -The service itself is distributed as Docker container. Check the [`postgres-reader`](https://github.com/absmach/supermq/blob/main/docker/addons/postgres-reader/docker-compose.yml#L17-L41) service section in -docker-compose file to see how service is deployed. - -To start the service, execute the following shell script: - -```bash -# download the latest version of the service -git clone https://github.com/absmach/supermq - -cd supermq - -# compile the postgres writer -make postgres-writer - -# copy binary to bin -make install - -# Set the environment variables and run the service -SMQ_POSTGRES_READER_LOG_LEVEL=[Service log level] \ -SMQ_POSTGRES_READER_HTTP_HOST=[Service HTTP host] \ -SMQ_POSTGRES_READER_HTTP_PORT=[Service HTTP port] \ -SMQ_POSTGRES_READER_HTTP_SERVER_CERT=[Service HTTPS server certificate path] \ -SMQ_POSTGRES_READER_HTTP_SERVER_KEY=[Service HTTPS server key path] \ -SMQ_POSTGRES_HOST=[Postgres host] \ -SMQ_POSTGRES_PORT=[Postgres port] \ -SMQ_POSTGRES_USER=[Postgres user] \ -SMQ_POSTGRES_PASS=[Postgres password] \ -SMQ_POSTGRES_NAME=[Postgres database name] \ -SMQ_POSTGRES_SSL_MODE=[Postgres SSL mode] \ -SMQ_POSTGRES_SSL_CERT=[Postgres SSL cert] \ -SMQ_POSTGRES_SSL_KEY=[Postgres SSL key] \ -SMQ_POSTGRES_SSL_ROOT_CERT=[Postgres SSL Root cert] \ -SMQ_CLIENTS_AUTH_GRPC_URL=[Clients service Auth GRPC URL] \ -SMQ_CLIENTS_AUTH_GRPC_TIMEOUT=[Clients service Auth gRPC request timeout in seconds] \ -SMQ_CLIENTS_AUTH_GRPC_CLIENT_TLS=[Clients service Auth gRPC TLS mode flag] \ -SMQ_CLIENTS_AUTH_GRPC_CA_CERTS=[Clients service Auth gRPC CA certificates] \ -SMQ_AUTH_GRPC_URL=[Auth service gRPC URL] \ -SMQ_AUTH_GRPC_TIMEOUT=[Auth service gRPC request timeout in seconds] \ -SMQ_AUTH_GRPC_CLIENT_TLS=[Auth service gRPC TLS mode flag] \ -SMQ_AUTH_GRPC_CA_CERTS=[Auth service gRPC CA certificates] \ -SMQ_JAEGER_URL=[Jaeger server URL] \ -SMQ_SEND_TELEMETRY=[Send telemetry to supermq call home server] \ -SMQ_POSTGRES_READER_INSTANCE_ID=[Postgres reader instance ID] \ -$GOBIN/supermq-postgres-reader -``` - -## Usage - -Starting service will start consuming normalized messages in SenML format. - -Comparator Usage Guide: - -| Comparator | Usage | Example | -| ---------- | --------------------------------------------------------------------------- | ---------------------------------- | -| eq | Return values that are equal to the query | eq["active"] -> "active" | -| ge | Return values that are substrings of the query | ge["tiv"] -> "active" and "tiv" | -| gt | Return values that are substrings of the query and not equal to the query | gt["tiv"] -> "active" | -| le | Return values that are superstrings of the query | le["active"] -> "tiv" | -| lt | Return values that are superstrings of the query and not equal to the query | lt["active"] -> "active" and "tiv" | - -Official docs can be found [here](https://docs.supermq.abstractmachines.fr). diff --git a/readers/postgres/doc.go b/readers/postgres/doc.go deleted file mode 100644 index a92d4f9b54..0000000000 --- a/readers/postgres/doc.go +++ /dev/null @@ -1,6 +0,0 @@ -// Copyright (c) Abstract Machines -// SPDX-License-Identifier: Apache-2.0 - -// Package postgres contains repository implementations using Postgres as -// the underlying database. -package postgres diff --git a/readers/postgres/init.go b/readers/postgres/init.go deleted file mode 100644 index 10bc5f1eb3..0000000000 --- a/readers/postgres/init.go +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright (c) Abstract Machines -// SPDX-License-Identifier: Apache-2.0 - -package postgres - -import ( - "fmt" - - "github.com/jmoiron/sqlx" - migrate "github.com/rubenv/sql-migrate" -) - -// Table for SenML messages. -const defTable = "messages" - -// Config defines the options that are used when connecting to a PostgreSQL instance. -type Config struct { - Host string - Port string - User string - Pass string - Name string - SSLMode string - SSLCert string - SSLKey string - SSLRootCert string -} - -// Connect creates a connection to the PostgreSQL instance and applies any -// unapplied database migrations. A non-nil error is returned to indicate -// failure. -func Connect(cfg Config) (*sqlx.DB, error) { - url := fmt.Sprintf("host=%s port=%s user=%s dbname=%s password=%s sslmode=%s sslcert=%s sslkey=%s sslrootcert=%s", cfg.Host, cfg.Port, cfg.User, cfg.Name, cfg.Pass, cfg.SSLMode, cfg.SSLCert, cfg.SSLKey, cfg.SSLRootCert) - - db, err := sqlx.Open("pgx", url) - if err != nil { - return nil, err - } - - if err := migrateDB(db); err != nil { - return nil, err - } - - return db, nil -} - -func migrateDB(db *sqlx.DB) error { - migrations := &migrate.MemoryMigrationSource{ - Migrations: []*migrate.Migration{ - { - Id: "messages_1", - Up: []string{ - `CREATE TABLE IF NOT EXISTS messages ( - id UUID, - channel UUID, - subtopic VARCHAR(254), - publisher UUID, - protocol TEXT, - name TEXT, - unit TEXT, - value FLOAT, - string_value TEXT, - bool_value BOOL, - data_value TEXT, - sum FLOAT, - time FlOAT, - update_time FLOAT, - PRIMARY KEY (id) - )`, - }, - Down: []string{ - "DROP TABLE messages", - }, - }, - }, - } - - _, err := migrate.Exec(db.DB, "postgres", migrations, migrate.Up) - return err -} diff --git a/readers/postgres/messages.go b/readers/postgres/messages.go deleted file mode 100644 index 2cab73e0a3..0000000000 --- a/readers/postgres/messages.go +++ /dev/null @@ -1,199 +0,0 @@ -// Copyright (c) Abstract Machines -// SPDX-License-Identifier: Apache-2.0 - -package postgres - -import ( - "encoding/json" - "fmt" - - "github.com/absmach/supermq/pkg/errors" - "github.com/absmach/supermq/pkg/transformers/senml" - "github.com/absmach/supermq/readers" - "github.com/jackc/pgerrcode" - "github.com/jackc/pgx/v5/pgconn" - "github.com/jmoiron/sqlx" -) - -var _ readers.MessageRepository = (*postgresRepository)(nil) - -type postgresRepository struct { - db *sqlx.DB -} - -// New returns new PostgreSQL writer. -func New(db *sqlx.DB) readers.MessageRepository { - return &postgresRepository{ - db: db, - } -} - -func (tr postgresRepository) ReadAll(chanID string, rpm readers.PageMetadata) (readers.MessagesPage, error) { - order := "time" - format := defTable - - if rpm.Format != "" && rpm.Format != defTable { - order = "created" - format = rpm.Format - } - cond := fmtCondition(chanID, rpm) - - q := fmt.Sprintf(`SELECT * FROM %s - WHERE %s ORDER BY %s DESC - LIMIT :limit OFFSET :offset;`, format, cond, order) - - params := map[string]interface{}{ - "channel": chanID, - "limit": rpm.Limit, - "offset": rpm.Offset, - "subtopic": rpm.Subtopic, - "publisher": rpm.Publisher, - "name": rpm.Name, - "protocol": rpm.Protocol, - "value": rpm.Value, - "bool_value": rpm.BoolValue, - "string_value": rpm.StringValue, - "data_value": rpm.DataValue, - "from": rpm.From, - "to": rpm.To, - } - rows, err := tr.db.NamedQuery(q, params) - if err != nil { - if pgErr, ok := err.(*pgconn.PgError); ok { - if pgErr.Code == pgerrcode.UndefinedTable { - return readers.MessagesPage{}, nil - } - } - return readers.MessagesPage{}, errors.Wrap(readers.ErrReadMessages, err) - } - defer rows.Close() - - page := readers.MessagesPage{ - PageMetadata: rpm, - Messages: []readers.Message{}, - } - switch format { - case defTable: - for rows.Next() { - msg := senmlMessage{Message: senml.Message{}} - if err := rows.StructScan(&msg); err != nil { - return readers.MessagesPage{}, errors.Wrap(readers.ErrReadMessages, err) - } - - page.Messages = append(page.Messages, msg.Message) - } - default: - for rows.Next() { - msg := jsonMessage{} - if err := rows.StructScan(&msg); err != nil { - return readers.MessagesPage{}, errors.Wrap(readers.ErrReadMessages, err) - } - m, err := msg.toMap() - if err != nil { - return readers.MessagesPage{}, errors.Wrap(readers.ErrReadMessages, err) - } - page.Messages = append(page.Messages, m) - } - } - - q = fmt.Sprintf(`SELECT COUNT(*) FROM %s WHERE %s;`, format, cond) - rows, err = tr.db.NamedQuery(q, params) - if err != nil { - return readers.MessagesPage{}, errors.Wrap(readers.ErrReadMessages, err) - } - defer rows.Close() - - total := uint64(0) - if rows.Next() { - if err := rows.Scan(&total); err != nil { - return page, err - } - } - page.Total = total - - return page, nil -} - -func fmtCondition(chanID string, rpm readers.PageMetadata) string { - condition := `channel = :channel` - - var query map[string]interface{} - meta, err := json.Marshal(rpm) - if err != nil { - return condition - } - if err := json.Unmarshal(meta, &query); err != nil { - return condition - } - - for name := range query { - switch name { - case - "subtopic", - "publisher", - "name", - "protocol": - condition = fmt.Sprintf(`%s AND %s = :%s`, condition, name, name) - case "v": - comparator := readers.ParseValueComparator(query) - condition = fmt.Sprintf(`%s AND value %s :value`, condition, comparator) - case "vb": - condition = fmt.Sprintf(`%s AND bool_value = :bool_value`, condition) - case "vs": - comparator := readers.ParseValueComparator(query) - switch comparator { - case "=": - condition = fmt.Sprintf("%s AND string_value = :string_value ", condition) - case ">": - condition = fmt.Sprintf("%s AND string_value LIKE '%%' || :string_value || '%%' AND string_value <> :string_value", condition) - case ">=": - condition = fmt.Sprintf("%s AND string_value LIKE '%%' || :string_value || '%%'", condition) - case "<=": - condition = fmt.Sprintf("%s AND :string_value LIKE '%%' || string_value || '%%'", condition) - case "<": - condition = fmt.Sprintf("%s AND :string_value LIKE '%%' || string_value || '%%' AND string_value <> :string_value", condition) - } - case "vd": - comparator := readers.ParseValueComparator(query) - condition = fmt.Sprintf(`%s AND data_value %s :data_value`, condition, comparator) - case "from": - condition = fmt.Sprintf(`%s AND time >= :from`, condition) - case "to": - condition = fmt.Sprintf(`%s AND time < :to`, condition) - } - } - return condition -} - -type senmlMessage struct { - ID string `db:"id"` - senml.Message -} - -type jsonMessage struct { - ID string `db:"id"` - Channel string `db:"channel"` - Created int64 `db:"created"` - Subtopic string `db:"subtopic"` - Publisher string `db:"publisher"` - Protocol string `db:"protocol"` - Payload []byte `db:"payload"` -} - -func (msg jsonMessage) toMap() (map[string]interface{}, error) { - ret := map[string]interface{}{ - "id": msg.ID, - "channel": msg.Channel, - "created": msg.Created, - "subtopic": msg.Subtopic, - "publisher": msg.Publisher, - "protocol": msg.Protocol, - "payload": map[string]interface{}{}, - } - pld := make(map[string]interface{}) - if err := json.Unmarshal(msg.Payload, &pld); err != nil { - return nil, err - } - ret["payload"] = pld - return ret, nil -} diff --git a/readers/postgres/messages_test.go b/readers/postgres/messages_test.go deleted file mode 100644 index 85a98e0473..0000000000 --- a/readers/postgres/messages_test.go +++ /dev/null @@ -1,687 +0,0 @@ -// Copyright (c) Abstract Machines -// SPDX-License-Identifier: Apache-2.0 - -package postgres_test - -import ( - "context" - "fmt" - "testing" - "time" - - pwriter "github.com/absmach/supermq/consumers/writers/postgres" - "github.com/absmach/supermq/internal/testsutil" - "github.com/absmach/supermq/pkg/transformers/json" - "github.com/absmach/supermq/pkg/transformers/senml" - "github.com/absmach/supermq/readers" - preader "github.com/absmach/supermq/readers/postgres" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -const ( - subtopic = "subtopic" - msgsNum = 100 - limit = 10 - valueFields = 5 - mqttProt = "mqtt" - httpProt = "http" - msgName = "temperature" - format1 = "format1" - format2 = "format2" - wrongID = "0" -) - -var ( - v float64 = 5 - vs = "stringValue" - vb = true - vd = "dataValue" - sum float64 = 42 -) - -func TestReadSenml(t *testing.T) { - writer := pwriter.New(db) - - chanID := testsutil.GenerateUUID(t) - pubID := testsutil.GenerateUUID(t) - pubID2 := testsutil.GenerateUUID(t) - wrongID := testsutil.GenerateUUID(t) - - m := senml.Message{ - Channel: chanID, - Publisher: pubID, - Protocol: mqttProt, - } - - messages := []senml.Message{} - valueMsgs := []senml.Message{} - boolMsgs := []senml.Message{} - stringMsgs := []senml.Message{} - dataMsgs := []senml.Message{} - queryMsgs := []senml.Message{} - - now := float64(time.Now().Unix()) - for i := 0; i < msgsNum; i++ { - // Mix possible values as well as value sum. - msg := m - msg.Time = now - float64(i) - - count := i % valueFields - switch count { - case 0: - msg.Value = &v - valueMsgs = append(valueMsgs, msg) - case 1: - msg.BoolValue = &vb - boolMsgs = append(boolMsgs, msg) - case 2: - msg.StringValue = &vs - stringMsgs = append(stringMsgs, msg) - case 3: - msg.DataValue = &vd - dataMsgs = append(dataMsgs, msg) - case 4: - msg.Sum = &sum - msg.Subtopic = subtopic - msg.Protocol = httpProt - msg.Publisher = pubID2 - msg.Name = msgName - queryMsgs = append(queryMsgs, msg) - } - - messages = append(messages, msg) - } - - err := writer.ConsumeBlocking(context.TODO(), messages) - require.Nil(t, err, fmt.Sprintf("expected no error got %s\n", err)) - - reader := preader.New(db) - - // Since messages are not saved in natural order, - // cases that return subset of messages are only - // checking data result set size, but not content. - cases := []struct { - desc string - chanID string - pageMeta readers.PageMetadata - page readers.MessagesPage - }{ - { - desc: "read message page for existing channel", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: msgsNum, - }, - page: readers.MessagesPage{ - Total: msgsNum, - Messages: fromSenml(messages), - }, - }, - { - desc: "read message page for non-existent channel", - chanID: wrongID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: msgsNum, - }, - page: readers.MessagesPage{ - Messages: []readers.Message{}, - }, - }, - { - desc: "read message last page", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: msgsNum - 20, - Limit: msgsNum, - }, - page: readers.MessagesPage{ - Total: msgsNum, - Messages: fromSenml(messages[msgsNum-20 : msgsNum]), - }, - }, - { - desc: "read message with non-existent subtopic", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: msgsNum, - Subtopic: "not-present", - }, - page: readers.MessagesPage{ - Messages: []readers.Message{}, - }, - }, - { - desc: "read message with subtopic", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: uint64(len(queryMsgs)), - Subtopic: subtopic, - }, - page: readers.MessagesPage{ - Total: uint64(len(queryMsgs)), - Messages: fromSenml(queryMsgs), - }, - }, - { - desc: "read message with publisher", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: uint64(len(queryMsgs)), - Publisher: pubID2, - }, - page: readers.MessagesPage{ - Total: uint64(len(queryMsgs)), - Messages: fromSenml(queryMsgs), - }, - }, - { - desc: "read message with wrong format", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Format: "messagess", - Offset: 0, - Limit: uint64(len(queryMsgs)), - Publisher: pubID2, - }, - page: readers.MessagesPage{ - Total: 0, - Messages: []readers.Message{}, - }, - }, - { - desc: "read message with protocol", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: uint64(len(queryMsgs)), - Protocol: httpProt, - }, - page: readers.MessagesPage{ - Total: uint64(len(queryMsgs)), - Messages: fromSenml(queryMsgs), - }, - }, - { - desc: "read message with name", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - Name: msgName, - }, - page: readers.MessagesPage{ - Total: uint64(len(queryMsgs)), - Messages: fromSenml(queryMsgs[0:limit]), - }, - }, - { - desc: "read message with value", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - Value: v, - }, - page: readers.MessagesPage{ - Total: uint64(len(valueMsgs)), - Messages: fromSenml(valueMsgs[0:limit]), - }, - }, - { - desc: "read message with value and equal comparator", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - Value: v, - Comparator: readers.EqualKey, - }, - page: readers.MessagesPage{ - Total: uint64(len(valueMsgs)), - Messages: fromSenml(valueMsgs[0:limit]), - }, - }, - { - desc: "read message with value and lower-than comparator", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - Value: v + 1, - Comparator: readers.LowerThanKey, - }, - page: readers.MessagesPage{ - Total: uint64(len(valueMsgs)), - Messages: fromSenml(valueMsgs[0:limit]), - }, - }, - { - desc: "read message with value and lower-than-or-equal comparator", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - Value: v + 1, - Comparator: readers.LowerThanEqualKey, - }, - page: readers.MessagesPage{ - Total: uint64(len(valueMsgs)), - Messages: fromSenml(valueMsgs[0:limit]), - }, - }, - { - desc: "read message with value and greater-than comparator", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - Value: v - 1, - Comparator: readers.GreaterThanKey, - }, - page: readers.MessagesPage{ - Total: uint64(len(valueMsgs)), - Messages: fromSenml(valueMsgs[0:limit]), - }, - }, - { - desc: "read message with value and greater-than-or-equal comparator", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - Value: v - 1, - Comparator: readers.GreaterThanEqualKey, - }, - page: readers.MessagesPage{ - Total: uint64(len(valueMsgs)), - Messages: fromSenml(valueMsgs[0:limit]), - }, - }, - { - desc: "read message with boolean value", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - BoolValue: vb, - }, - page: readers.MessagesPage{ - Total: uint64(len(boolMsgs)), - Messages: fromSenml(boolMsgs[0:limit]), - }, - }, - { - desc: "read message with string value", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - StringValue: vs, - }, - page: readers.MessagesPage{ - Total: uint64(len(stringMsgs)), - Messages: fromSenml(stringMsgs[0:limit]), - }, - }, - { - desc: "read message with string value and equal comparator", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - StringValue: vs, - Comparator: readers.EqualKey, - }, - page: readers.MessagesPage{ - Total: uint64(len(stringMsgs)), - Messages: fromSenml(stringMsgs[0:limit]), - }, - }, - { - desc: "read message with string value and lower-than comparator", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - StringValue: "a stringValues b", - Comparator: readers.LowerThanKey, - }, - page: readers.MessagesPage{ - Total: uint64(len(stringMsgs)), - Messages: fromSenml(stringMsgs[0:limit]), - }, - }, - { - desc: "read message with string value and lower-than-or-equal comparator", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - StringValue: vs, - Comparator: readers.LowerThanEqualKey, - }, - page: readers.MessagesPage{ - Total: uint64(len(stringMsgs)), - Messages: fromSenml(stringMsgs[0:limit]), - }, - }, - { - desc: "read message with string value and greater-than comparator", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - StringValue: "alu", - Comparator: readers.GreaterThanKey, - }, - page: readers.MessagesPage{ - Total: uint64(len(stringMsgs)), - Messages: fromSenml(stringMsgs[0:limit]), - }, - }, - { - desc: "read message with string value and greater-than-or-equal comparator", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - StringValue: vs, - Comparator: readers.GreaterThanEqualKey, - }, - page: readers.MessagesPage{ - Total: uint64(len(stringMsgs)), - Messages: fromSenml(stringMsgs[0:limit]), - }, - }, - { - desc: "read message with data value", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - DataValue: vd, - }, - page: readers.MessagesPage{ - Total: uint64(len(dataMsgs)), - Messages: fromSenml(dataMsgs[0:limit]), - }, - }, - { - desc: "read message with data value and lower-than comparator", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - DataValue: vd + string(rune(1)), - Comparator: readers.LowerThanKey, - }, - page: readers.MessagesPage{ - Total: uint64(len(dataMsgs)), - Messages: fromSenml(dataMsgs[0:limit]), - }, - }, - { - desc: "read message with data value and lower-than-or-equal comparator", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - DataValue: vd + string(rune(1)), - Comparator: readers.LowerThanEqualKey, - }, - page: readers.MessagesPage{ - Total: uint64(len(dataMsgs)), - Messages: fromSenml(dataMsgs[0:limit]), - }, - }, - { - desc: "read message with data value and greater-than comparator", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - DataValue: vd[:len(vd)-1], - Comparator: readers.GreaterThanKey, - }, - page: readers.MessagesPage{ - Total: uint64(len(dataMsgs)), - Messages: fromSenml(dataMsgs[0:limit]), - }, - }, - { - desc: "read message with data value and greater-than-or-equal comparator", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - DataValue: vd[:len(vd)-1], - Comparator: readers.GreaterThanEqualKey, - }, - page: readers.MessagesPage{ - Total: uint64(len(dataMsgs)), - Messages: fromSenml(dataMsgs[0:limit]), - }, - }, - { - desc: "read message with from", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: uint64(len(messages[0:21])), - From: messages[20].Time, - }, - page: readers.MessagesPage{ - Total: uint64(len(messages[0:21])), - Messages: fromSenml(messages[0:21]), - }, - }, - { - desc: "read message with to", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: uint64(len(messages[21:])), - To: messages[20].Time, - }, - page: readers.MessagesPage{ - Total: uint64(len(messages[21:])), - Messages: fromSenml(messages[21:]), - }, - }, - { - desc: "read message with from/to", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - From: messages[5].Time, - To: messages[0].Time, - }, - page: readers.MessagesPage{ - Total: 5, - Messages: fromSenml(messages[1:6]), - }, - }, - } - - for _, tc := range cases { - result, err := reader.ReadAll(tc.chanID, tc.pageMeta) - assert.Nil(t, err, fmt.Sprintf("%s: expected no error got %s", tc.desc, err)) - assert.ElementsMatch(t, tc.page.Messages, result.Messages, fmt.Sprintf("%s: got incorrect list of senml Messages from ReadAll()", tc.desc)) - assert.Equal(t, tc.page.Total, result.Total, fmt.Sprintf("%s: expected %v got %v", tc.desc, tc.page.Total, result.Total)) - } -} - -func TestReadJSON(t *testing.T) { - writer := pwriter.New(db) - - id1 := testsutil.GenerateUUID(t) - m := json.Message{ - Channel: id1, - Publisher: id1, - Created: time.Now().Unix(), - Subtopic: "subtopic/format/some_json", - Protocol: "coap", - Payload: map[string]interface{}{ - "field_1": 123.0, - "field_2": "value", - "field_3": false, - "field_4": 12.344, - "field_5": map[string]interface{}{ - "field_1": "value", - "field_2": 42.0, - }, - }, - } - messages1 := json.Messages{ - Format: format1, - } - msgs1 := []map[string]interface{}{} - for i := 0; i < msgsNum; i++ { - msg := m - messages1.Data = append(messages1.Data, msg) - m := toMap(msg) - msgs1 = append(msgs1, m) - } - - err := writer.ConsumeBlocking(context.TODO(), messages1) - require.Nil(t, err, fmt.Sprintf("expected no error got %s\n", err)) - - id2 := testsutil.GenerateUUID(t) - m = json.Message{ - Channel: id2, - Publisher: id2, - Created: time.Now().Unix(), - Subtopic: "subtopic/other_format/some_other_json", - Protocol: "udp", - Payload: map[string]interface{}{ - "field_1": "other_value", - "false_value": false, - "field_pi": 3.14159265, - }, - } - messages2 := json.Messages{ - Format: format2, - } - msgs2 := []map[string]interface{}{} - for i := 0; i < msgsNum; i++ { - msg := m - if i%2 == 0 { - msg.Protocol = httpProt - } - messages2.Data = append(messages2.Data, msg) - m := toMap(msg) - msgs2 = append(msgs2, m) - } - - err = writer.ConsumeBlocking(context.TODO(), messages2) - require.Nil(t, err, fmt.Sprintf("expected no error got %s\n", err)) - - httpMsgs := []map[string]interface{}{} - for i := 0; i < msgsNum; i += 2 { - httpMsgs = append(httpMsgs, msgs2[i]) - } - - reader := preader.New(db) - - cases := map[string]struct { - chanID string - pageMeta readers.PageMetadata - page readers.MessagesPage - }{ - "read message page for existing channel": { - chanID: id1, - pageMeta: readers.PageMetadata{ - Format: messages1.Format, - Offset: 0, - Limit: 10, - }, - page: readers.MessagesPage{ - Total: 100, - Messages: fromJSON(msgs1[:10]), - }, - }, - "read message page for non-existent channel": { - chanID: wrongID, - pageMeta: readers.PageMetadata{ - Format: messages1.Format, - Offset: 0, - Limit: 10, - }, - page: readers.MessagesPage{ - Messages: []readers.Message{}, - }, - }, - "read message last page": { - chanID: id2, - pageMeta: readers.PageMetadata{ - Format: messages2.Format, - Offset: msgsNum - 20, - Limit: msgsNum, - }, - page: readers.MessagesPage{ - Total: msgsNum, - Messages: fromJSON(msgs2[msgsNum-20 : msgsNum]), - }, - }, - "read message with protocol": { - chanID: id2, - pageMeta: readers.PageMetadata{ - Format: messages2.Format, - Offset: 0, - Limit: uint64(msgsNum / 2), - Protocol: httpProt, - }, - page: readers.MessagesPage{ - Total: uint64(msgsNum / 2), - Messages: fromJSON(httpMsgs), - }, - }, - } - - for desc, tc := range cases { - result, err := reader.ReadAll(tc.chanID, tc.pageMeta) - for i := 0; i < len(result.Messages); i++ { - m := result.Messages[i] - // Remove id as it is not sent by the client. - delete(m.(map[string]interface{}), "id") - result.Messages[i] = m - } - assert.Nil(t, err, fmt.Sprintf("%s: expected no error got %s", desc, err)) - assert.ElementsMatch(t, tc.page.Messages, result.Messages, fmt.Sprintf("%s: got incorrect list of json Messages from ReadAll()", desc)) - assert.Equal(t, tc.page.Total, result.Total, fmt.Sprintf("%s: expected %v got %v", desc, tc.page.Total, result.Total)) - } -} - -func fromSenml(msg []senml.Message) []readers.Message { - var ret []readers.Message - for _, m := range msg { - ret = append(ret, m) - } - return ret -} - -func fromJSON(msg []map[string]interface{}) []readers.Message { - var ret []readers.Message - for _, m := range msg { - ret = append(ret, m) - } - return ret -} - -func toMap(msg json.Message) map[string]interface{} { - return map[string]interface{}{ - "channel": msg.Channel, - "created": msg.Created, - "subtopic": msg.Subtopic, - "publisher": msg.Publisher, - "protocol": msg.Protocol, - "payload": map[string]interface{}(msg.Payload), - } -} diff --git a/readers/postgres/setup_test.go b/readers/postgres/setup_test.go deleted file mode 100644 index 4636f6a281..0000000000 --- a/readers/postgres/setup_test.go +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright (c) Abstract Machines -// SPDX-License-Identifier: Apache-2.0 - -// Package postgres_test contains tests for PostgreSQL repository -// implementations. -package postgres_test - -import ( - "fmt" - "log" - "os" - "testing" - - "github.com/absmach/supermq/readers/postgres" - _ "github.com/jackc/pgx/v5/stdlib" // required for SQL access - "github.com/jmoiron/sqlx" - "github.com/ory/dockertest/v3" - "github.com/ory/dockertest/v3/docker" -) - -var db *sqlx.DB - -func TestMain(m *testing.M) { - pool, err := dockertest.NewPool("") - if err != nil { - log.Fatalf("Could not connect to docker: %s", err) - } - container, err := pool.RunWithOptions(&dockertest.RunOptions{ - Repository: "postgres", - Tag: "16.2-alpine", - Env: []string{ - "POSTGRES_USER=test", - "POSTGRES_PASSWORD=test", - "POSTGRES_DB=test", - "listen_addresses = '*'", - }, - }, func(config *docker.HostConfig) { - config.AutoRemove = true - config.RestartPolicy = docker.RestartPolicy{Name: "no"} - }) - if err != nil { - log.Fatalf("Could not start container: %s", err) - } - - port := container.GetPort("5432/tcp") - url := fmt.Sprintf("host=localhost port=%s user=test dbname=test password=test sslmode=disable", port) - - if err = pool.Retry(func() error { - db, err = sqlx.Open("pgx", url) - if err != nil { - return err - } - return db.Ping() - }); err != nil { - log.Fatalf("Could not connect to docker: %s", err) - } - - dbConfig := postgres.Config{ - Host: "localhost", - Port: port, - User: "test", - Pass: "test", - Name: "test", - SSLMode: "disable", - SSLCert: "", - SSLKey: "", - SSLRootCert: "", - } - - if db, err = postgres.Connect(dbConfig); err != nil { - log.Fatalf("Could not setup test DB connection: %s", err) - } - - code := m.Run() - - // Defers will not be run when using os.Exit - db.Close() - if err = pool.Purge(container); err != nil { - log.Fatalf("Could not purge container: %s", err) - } - - os.Exit(code) -} diff --git a/readers/timescale/README.md b/readers/timescale/README.md deleted file mode 100644 index 193d4ec15f..0000000000 --- a/readers/timescale/README.md +++ /dev/null @@ -1,99 +0,0 @@ -# Timescale reader - -Timescale reader provides message repository implementation for Timescale. - -## Configuration - -The service is configured using the environment variables presented in the -following table. Note that any unset variables will be replaced with their -default values. - -| Variable | Description | Default | -| ------------------------------------- | -------------------------------------------- | ---------------------------- | -| SMQ_TIMESCALE_READER_LOG_LEVEL | Service log level | info | -| SMQ_TIMESCALE_READER_HTTP_HOST | Service HTTP host | localhost | -| SMQ_TIMESCALE_READER_HTTP_PORT | Service HTTP port | 8180 | -| SMQ_TIMESCALE_READER_HTTP_SERVER_CERT | Service HTTP server certificate path | "" | -| SMQ_TIMESCALE_READER_HTTP_SERVER_KEY | Service HTTP server key path | "" | -| SMQ_TIMESCALE_HOST | Timescale DB host | localhost | -| SMQ_TIMESCALE_PORT | Timescale DB port | 5432 | -| SMQ_TIMESCALE_USER | Timescale user | supermq | -| SMQ_TIMESCALE_PASS | Timescale password | supermq | -| SMQ_TIMESCALE_NAME | Timescale database name | messages | -| SMQ_TIMESCALE_SSL_MODE | Timescale SSL mode | disabled | -| SMQ_TIMESCALE_SSL_CERT | Timescale SSL certificate path | "" | -| SMQ_TIMESCALE_SSL_KEY | Timescale SSL key | "" | -| SMQ_TIMESCALE_SSL_ROOT_CERT | Timescale SSL root certificate path | "" | -| SMQ_CLIENTS_AUTH_GRPC_URL | Clients service Auth gRPC URL | localhost:7000 | -| SMQ_CLIENTS_AUTH_GRPC_TIMEOUT | Clients service Auth gRPC timeout in seconds | 1s | -| SMQ_CLIENTS_AUTH_GRPC_CLIENT_TLS | Clients service Auth gRPC TLS enabled flag | false | -| SMQ_CLIENTS_AUTH_GRPC_CA_CERTS | Clients service Auth gRPC CA certificates | "" | -| SMQ_AUTH_GRPC_URL | Auth service gRPC URL | localhost:7001 | -| SMQ_AUTH_GRPC_TIMEOUT | Auth service gRPC timeout in seconds | 1s | -| SMQ_AUTH_GRPC_CLIENT_TLS | Auth service gRPC TLS enabled flag | false | -| SMQ_AUTH_GRPC_CA_CERT | Auth service gRPC CA certificate | "" | -| SMQ_JAEGER_URL | Jaeger server URL | http://jaeger:4318/v1/traces | -| SMQ_SEND_TELEMETRY | Send telemetry to supermq call home server | true | -| SMQ_TIMESCALE_READER_INSTANCE_ID | Timescale reader instance ID | "" | - -## Deployment - -The service itself is distributed as Docker container. Check the [`timescale-reader`](https://github.com/absmach/supermq/blob/main/docker/addons/timescale-reader/docker-compose.yml#L17-L41) service section in docker-compose file to see how service is deployed. - -To start the service, execute the following shell script: - -```bash -# download the latest version of the service -git clone https://github.com/absmach/supermq - -cd supermq - -# compile the timescale writer -make timescale-writer - -# copy binary to bin -make install - -# Set the environment variables and run the service -SMQ_TIMESCALE_READER_LOG_LEVEL=[Service log level] \ -SMQ_TIMESCALE_READER_HTTP_HOST=[Service HTTP host] \ -SMQ_TIMESCALE_READER_HTTP_PORT=[Service HTTP port] \ -SMQ_TIMESCALE_READER_HTTP_SERVER_CERT=[Service HTTP server cert] \ -SMQ_TIMESCALE_READER_HTTP_SERVER_KEY=[Service HTTP server key] \ -SMQ_TIMESCALE_HOST=[Timescale host] \ -SMQ_TIMESCALE_PORT=[Timescale port] \ -SMQ_TIMESCALE_USER=[Timescale user] \ -SMQ_TIMESCALE_PASS=[Timescale password] \ -SMQ_TIMESCALE_NAME=[Timescale database name] \ -SMQ_TIMESCALE_SSL_MODE=[Timescale SSL mode] \ -SMQ_TIMESCALE_SSL_CERT=[Timescale SSL cert] \ -SMQ_TIMESCALE_SSL_KEY=[Timescale SSL key] \ -SMQ_TIMESCALE_SSL_ROOT_CERT=[Timescale SSL Root cert] \ -SMQ_CLIENTS_AUTH_GRPC_URL=[Clients service Auth GRPC URL] \ -SMQ_CLIENTS_AUTH_GRPC_TIMEOUT=[Clients service Auth gRPC request timeout in seconds] \ -SMQ_CLIENTS_AUTH_GRPC_CLIENT_TLS=[Clients service Auth gRPC TLS enabled flag] \ -SMQ_CLIENTS_AUTH_GRPC_CA_CERTS=[Clients service Auth gRPC CA certificates] \ -SMQ_AUTH_GRPC_URL=[Auth service Auth gRPC URL] \ -SMQ_AUTH_GRPC_TIMEOUT=[Auth service Auth gRPC request timeout in seconds] \ -SMQ_AUTH_GRPC_CLIENT_TLS=[Auth service Auth gRPC TLS enabled flag] \ -SMQ_AUTH_GRPC_CA_CERT=[Auth service Auth gRPC CA certificates] \ -SMQ_JAEGER_URL=[Jaeger server URL] \ -SMQ_SEND_TELEMETRY=[Send telemetry to supermq call home server] \ -SMQ_TIMESCALE_READER_INSTANCE_ID=[Timescale reader instance ID] \ -$GOBIN/supermq-timescale-reader -``` - -## Usage - -Starting service will start consuming normalized messages in SenML format. - -Comparator Usage Guide: -| Comparator | Usage | Example | -| ---------- | --------------------------------------------------------------------------- | ---------------------------------- | -| eq | Return values that are equal to the query | eq["active"] -> "active" | -| ge | Return values that are substrings of the query | ge["tiv"] -> "active" and "tiv" | -| gt | Return values that are substrings of the query and not equal to the query | gt["tiv"] -> "active" | -| le | Return values that are superstrings of the query | le["active"] -> "tiv" | -| lt | Return values that are superstrings of the query and not equal to the query | lt["active"] -> "active" and "tiv" | - -Official docs can be found [here](https://docs.supermq.abstractmachines.fr). diff --git a/readers/timescale/doc.go b/readers/timescale/doc.go deleted file mode 100644 index 302be6ea5d..0000000000 --- a/readers/timescale/doc.go +++ /dev/null @@ -1,6 +0,0 @@ -// Copyright (c) Abstract Machines -// SPDX-License-Identifier: Apache-2.0 - -// Package timescale contains repository implementations using Timescale as -// the underlying database. -package timescale diff --git a/readers/timescale/init.go b/readers/timescale/init.go deleted file mode 100644 index 9513df15f2..0000000000 --- a/readers/timescale/init.go +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright (c) Abstract Machines -// SPDX-License-Identifier: Apache-2.0 - -package timescale - -import ( - "fmt" - - "github.com/jmoiron/sqlx" - migrate "github.com/rubenv/sql-migrate" -) - -// Table for SenML messages. -const defTable = "messages" - -// Config defines the options that are used when connecting to a TimescaleSQL instance. -type Config struct { - Host string - Port string - User string - Pass string - Name string - SSLMode string - SSLCert string - SSLKey string - SSLRootCert string -} - -// Connect creates a connection to the TimescaleSQL instance and applies any -// unapplied database migrations. A non-nil error is returned to indicate -// failure. -func Connect(cfg Config) (*sqlx.DB, error) { - url := fmt.Sprintf("host=%s port=%s user=%s dbname=%s password=%s sslmode=%s sslcert=%s sslkey=%s sslrootcert=%s", cfg.Host, cfg.Port, cfg.User, cfg.Name, cfg.Pass, cfg.SSLMode, cfg.SSLCert, cfg.SSLKey, cfg.SSLRootCert) - - db, err := sqlx.Open("pgx", url) - if err != nil { - return nil, err - } - - if err := migrateDB(db); err != nil { - return nil, err - } - - return db, nil -} - -func migrateDB(db *sqlx.DB) error { - migrations := &migrate.MemoryMigrationSource{ - Migrations: []*migrate.Migration{ - { - Id: "messages_1", - Up: []string{ - `CREATE TABLE IF NOT EXISTS messages ( - time BIGINT NOT NULL, - channel UUID, - subtopic VARCHAR(254), - publisher UUID, - protocol TEXT, - name VARCHAR(254), - unit TEXT, - value FLOAT, - string_value TEXT, - bool_value BOOL, - data_value BYTEA, - sum FLOAT, - update_time FLOAT, - PRIMARY KEY (time, publisher, subtopic, name) - ); - SELECT create_hypertable('messages', 'time', create_default_indexes => FALSE, chunk_time_interval => 86400000, if_not_exists => TRUE);`, - }, - Down: []string{ - "DROP TABLE messages", - }, - }, - }, - } - - _, err := migrate.Exec(db.DB, "postgres", migrations, migrate.Up) - return err -} diff --git a/readers/timescale/messages.go b/readers/timescale/messages.go deleted file mode 100644 index 94c96bdd58..0000000000 --- a/readers/timescale/messages.go +++ /dev/null @@ -1,204 +0,0 @@ -// Copyright (c) Abstract Machines -// SPDX-License-Identifier: Apache-2.0 - -package timescale - -import ( - "encoding/json" - "fmt" - - "github.com/absmach/supermq/pkg/errors" - "github.com/absmach/supermq/pkg/transformers/senml" - "github.com/absmach/supermq/readers" - "github.com/jackc/pgerrcode" - "github.com/jackc/pgx/v5/pgconn" - "github.com/jmoiron/sqlx" // required for DB access -) - -var _ readers.MessageRepository = (*timescaleRepository)(nil) - -type timescaleRepository struct { - db *sqlx.DB -} - -// New returns new TimescaleSQL writer. -func New(db *sqlx.DB) readers.MessageRepository { - return ×caleRepository{ - db: db, - } -} - -func (tr timescaleRepository) ReadAll(chanID string, rpm readers.PageMetadata) (readers.MessagesPage, error) { - order := "time" - format := defTable - - if rpm.Format != "" && rpm.Format != defTable { - order = "created" - format = rpm.Format - } - - q := fmt.Sprintf(`SELECT * FROM %s WHERE %s ORDER BY %s DESC LIMIT :limit OFFSET :offset;`, format, fmtCondition(rpm), order) - totalQuery := fmt.Sprintf(`SELECT COUNT(*) FROM %s WHERE %s;`, format, fmtCondition(rpm)) - - // If aggregation is provided, add time_bucket and aggregation to the query - const timeDivisor = 1000000000 - - if rpm.Aggregation != "" { - q = fmt.Sprintf(`SELECT EXTRACT(epoch FROM time_bucket('%s', to_timestamp(time/%d))) *%d AS time, %s(value) AS value, FIRST(publisher, time) AS publisher, FIRST(protocol, time) AS protocol, FIRST(subtopic, time) AS subtopic, FIRST(name,time) AS name, FIRST(unit, time) AS unit FROM %s WHERE %s GROUP BY 1 ORDER BY time DESC LIMIT :limit OFFSET :offset;`, rpm.Interval, timeDivisor, timeDivisor, rpm.Aggregation, format, fmtCondition(rpm)) - - totalQuery = fmt.Sprintf(`SELECT COUNT(*) FROM (SELECT EXTRACT(epoch FROM time_bucket('%s', to_timestamp(time/%d))) AS time, %s(value) AS value FROM %s WHERE %s GROUP BY 1) AS subquery;`, rpm.Interval, timeDivisor, rpm.Aggregation, format, fmtCondition(rpm)) - } - - params := map[string]interface{}{ - "channel": chanID, - "limit": rpm.Limit, - "offset": rpm.Offset, - "subtopic": rpm.Subtopic, - "publisher": rpm.Publisher, - "name": rpm.Name, - "protocol": rpm.Protocol, - "value": rpm.Value, - "bool_value": rpm.BoolValue, - "string_value": rpm.StringValue, - "data_value": rpm.DataValue, - "from": rpm.From, - "to": rpm.To, - } - - rows, err := tr.db.NamedQuery(q, params) - if err != nil { - if pgErr, ok := err.(*pgconn.PgError); ok { - if pgErr.Code == pgerrcode.UndefinedTable { - return readers.MessagesPage{}, nil - } - } - return readers.MessagesPage{}, errors.Wrap(readers.ErrReadMessages, err) - } - defer rows.Close() - - page := readers.MessagesPage{ - PageMetadata: rpm, - Messages: []readers.Message{}, - } - switch format { - case defTable: - for rows.Next() { - msg := senmlMessage{Message: senml.Message{}} - if err := rows.StructScan(&msg); err != nil { - return readers.MessagesPage{}, errors.Wrap(readers.ErrReadMessages, err) - } - - page.Messages = append(page.Messages, msg.Message) - } - default: - for rows.Next() { - msg := jsonMessage{} - if err := rows.StructScan(&msg); err != nil { - return readers.MessagesPage{}, errors.Wrap(readers.ErrReadMessages, err) - } - m, err := msg.toMap() - if err != nil { - return readers.MessagesPage{}, errors.Wrap(readers.ErrReadMessages, err) - } - page.Messages = append(page.Messages, m) - } - } - - rows, err = tr.db.NamedQuery(totalQuery, params) - if err != nil { - return readers.MessagesPage{}, errors.Wrap(readers.ErrReadMessages, err) - } - defer rows.Close() - - total := uint64(0) - if rows.Next() { - if err := rows.Scan(&total); err != nil { - return page, err - } - } - page.Total = total - - return page, nil -} - -func fmtCondition(rpm readers.PageMetadata) string { - condition := `channel = :channel` - - var query map[string]interface{} - meta, err := json.Marshal(rpm) - if err != nil { - return condition - } - if err := json.Unmarshal(meta, &query); err != nil { - return condition - } - - for name := range query { - switch name { - case - "subtopic", - "publisher", - "name", - "protocol": - condition = fmt.Sprintf(`%s AND %s = :%s`, condition, name, name) - case "v": - comparator := readers.ParseValueComparator(query) - condition = fmt.Sprintf(`%s AND value %s :value`, condition, comparator) - case "vb": - condition = fmt.Sprintf(`%s AND bool_value = :bool_value`, condition) - case "vs": - comparator := readers.ParseValueComparator(query) - switch comparator { - case "=": - condition = fmt.Sprintf("%s AND string_value = :string_value ", condition) - case ">": - condition = fmt.Sprintf("%s AND string_value LIKE '%%' || :string_value || '%%' AND string_value <> :string_value", condition) - case ">=": - condition = fmt.Sprintf("%s AND string_value LIKE '%%' || :string_value || '%%'", condition) - case "<=": - condition = fmt.Sprintf("%s AND :string_value LIKE '%%' || string_value || '%%'", condition) - case "<": - condition = fmt.Sprintf("%s AND :string_value LIKE '%%' || string_value || '%%' AND string_value <> :string_value", condition) - } - case "vd": - comparator := readers.ParseValueComparator(query) - condition = fmt.Sprintf(`%s AND data_value %s :data_value`, condition, comparator) - case "from": - condition = fmt.Sprintf(`%s AND time >= :from`, condition) - case "to": - condition = fmt.Sprintf(`%s AND time < :to`, condition) - } - } - return condition -} - -type senmlMessage struct { - ID string `db:"id"` - senml.Message -} - -type jsonMessage struct { - Channel string `db:"channel"` - Created int64 `db:"created"` - Subtopic string `db:"subtopic"` - Publisher string `db:"publisher"` - Protocol string `db:"protocol"` - Payload []byte `db:"payload"` -} - -func (msg jsonMessage) toMap() (map[string]interface{}, error) { - ret := map[string]interface{}{ - "channel": msg.Channel, - "created": msg.Created, - "subtopic": msg.Subtopic, - "publisher": msg.Publisher, - "protocol": msg.Protocol, - "payload": map[string]interface{}{}, - } - pld := make(map[string]interface{}) - if err := json.Unmarshal(msg.Payload, &pld); err != nil { - return nil, err - } - ret["payload"] = pld - return ret, nil -} diff --git a/readers/timescale/messages_test.go b/readers/timescale/messages_test.go deleted file mode 100644 index b7b8d0c802..0000000000 --- a/readers/timescale/messages_test.go +++ /dev/null @@ -1,810 +0,0 @@ -// Copyright (c) Abstract Machines -// SPDX-License-Identifier: Apache-2.0 - -package timescale_test - -import ( - "context" - "fmt" - "testing" - "time" - - twriter "github.com/absmach/supermq/consumers/writers/timescale" - "github.com/absmach/supermq/internal/testsutil" - "github.com/absmach/supermq/pkg/transformers/json" - "github.com/absmach/supermq/pkg/transformers/senml" - "github.com/absmach/supermq/readers" - treader "github.com/absmach/supermq/readers/timescale" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -const ( - subtopic = "subtopic" - msgsNum = 100 - limit = 10 - valueFields = 5 - mqttProt = "mqtt" - httpProt = "http" - msgName = "temperature" - format1 = "format1" - format2 = "format2" - wrongID = "0" -) - -var ( - v float64 = 5 - vs = "stringValue" - vb = true - vd = "dataValue" - sum float64 = 42 -) - -func TestReadSenml(t *testing.T) { - writer := twriter.New(db) - - chanID := testsutil.GenerateUUID(t) - pubID := testsutil.GenerateUUID(t) - pubID2 := testsutil.GenerateUUID(t) - wrongID := testsutil.GenerateUUID(t) - - m := senml.Message{ - Channel: chanID, - Publisher: pubID, - Protocol: mqttProt, - } - - messages := []senml.Message{} - valueMsgs := []senml.Message{} - boolMsgs := []senml.Message{} - stringMsgs := []senml.Message{} - dataMsgs := []senml.Message{} - queryMsgs := []senml.Message{} - - now := float64(time.Now().Unix()) - for i := 0; i < msgsNum; i++ { - // Mix possible values as well as value sum. - msg := m - msg.Time = now - float64(i) - - count := i % valueFields - switch count { - case 0: - msg.Value = &v - valueMsgs = append(valueMsgs, msg) - case 1: - msg.BoolValue = &vb - boolMsgs = append(boolMsgs, msg) - case 2: - msg.StringValue = &vs - stringMsgs = append(stringMsgs, msg) - case 3: - msg.DataValue = &vd - dataMsgs = append(dataMsgs, msg) - case 4: - msg.Sum = &sum - msg.Subtopic = subtopic - msg.Protocol = httpProt - msg.Publisher = pubID2 - msg.Name = msgName - queryMsgs = append(queryMsgs, msg) - } - - messages = append(messages, msg) - } - - err := writer.ConsumeBlocking(context.TODO(), messages) - require.Nil(t, err, fmt.Sprintf("expected no error got %s\n", err)) - - reader := treader.New(db) - - // Since messages are not saved in natural order, - // cases that return subset of messages are only - // checking data result set size, but not content. - cases := []struct { - desc string - chanID string - pageMeta readers.PageMetadata - page readers.MessagesPage - }{ - { - desc: "read message page for existing channel", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: msgsNum, - }, - page: readers.MessagesPage{ - Total: msgsNum, - Messages: fromSenml(messages), - }, - }, - { - desc: "read message page for non-existent channel", - chanID: wrongID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: msgsNum, - }, - page: readers.MessagesPage{ - Messages: []readers.Message{}, - }, - }, - { - desc: "read message last page", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: msgsNum - 20, - Limit: msgsNum, - }, - page: readers.MessagesPage{ - Total: msgsNum, - Messages: fromSenml(messages[msgsNum-20 : msgsNum]), - }, - }, - { - desc: "read message with non-existent subtopic", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: msgsNum, - Subtopic: "not-present", - }, - page: readers.MessagesPage{ - Messages: []readers.Message{}, - }, - }, - { - desc: "read message with subtopic", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: uint64(len(queryMsgs)), - Subtopic: subtopic, - }, - page: readers.MessagesPage{ - Total: uint64(len(queryMsgs)), - Messages: fromSenml(queryMsgs), - }, - }, - { - desc: "read message with publisher", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: uint64(len(queryMsgs)), - Publisher: pubID2, - }, - page: readers.MessagesPage{ - Total: uint64(len(queryMsgs)), - Messages: fromSenml(queryMsgs), - }, - }, - { - desc: "read message with wrong format", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Format: "messagess", - Offset: 0, - Limit: uint64(len(queryMsgs)), - Publisher: pubID2, - }, - page: readers.MessagesPage{ - Total: 0, - Messages: []readers.Message{}, - }, - }, - { - desc: "read message with protocol", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: uint64(len(queryMsgs)), - Protocol: httpProt, - }, - page: readers.MessagesPage{ - Total: uint64(len(queryMsgs)), - Messages: fromSenml(queryMsgs), - }, - }, - { - desc: "read message with name", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - Name: msgName, - }, - page: readers.MessagesPage{ - Total: uint64(len(queryMsgs)), - Messages: fromSenml(queryMsgs[0:limit]), - }, - }, - { - desc: "read message with value", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - Value: v, - }, - page: readers.MessagesPage{ - Total: uint64(len(valueMsgs)), - Messages: fromSenml(valueMsgs[0:limit]), - }, - }, - { - desc: "read message with value and equal comparator", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - Value: v, - Comparator: readers.EqualKey, - }, - page: readers.MessagesPage{ - Total: uint64(len(valueMsgs)), - Messages: fromSenml(valueMsgs[0:limit]), - }, - }, - { - desc: "read message with value and lower-than comparator", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - Value: v + 1, - Comparator: readers.LowerThanKey, - }, - page: readers.MessagesPage{ - Total: uint64(len(valueMsgs)), - Messages: fromSenml(valueMsgs[0:limit]), - }, - }, - { - desc: "read message with value and lower-than-or-equal comparator", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - Value: v + 1, - Comparator: readers.LowerThanEqualKey, - }, - page: readers.MessagesPage{ - Total: uint64(len(valueMsgs)), - Messages: fromSenml(valueMsgs[0:limit]), - }, - }, - { - desc: "read message with value and greater-than comparator", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - Value: v - 1, - Comparator: readers.GreaterThanKey, - }, - page: readers.MessagesPage{ - Total: uint64(len(valueMsgs)), - Messages: fromSenml(valueMsgs[0:limit]), - }, - }, - { - desc: "read message with value and greater-than-or-equal comparator", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - Value: v - 1, - Comparator: readers.GreaterThanEqualKey, - }, - page: readers.MessagesPage{ - Total: uint64(len(valueMsgs)), - Messages: fromSenml(valueMsgs[0:limit]), - }, - }, - { - desc: "read message with boolean value", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - BoolValue: vb, - }, - page: readers.MessagesPage{ - Total: uint64(len(boolMsgs)), - Messages: fromSenml(boolMsgs[0:limit]), - }, - }, - { - desc: "read message with string value", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - StringValue: vs, - }, - page: readers.MessagesPage{ - Total: uint64(len(stringMsgs)), - Messages: fromSenml(stringMsgs[0:limit]), - }, - }, - { - desc: "read message with string value and equal comparator", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - StringValue: vs, - Comparator: readers.EqualKey, - }, - page: readers.MessagesPage{ - Total: uint64(len(stringMsgs)), - Messages: fromSenml(stringMsgs[0:limit]), - }, - }, - { - desc: "read message with string value and lower-than comparator", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - StringValue: "a stringValues b", - Comparator: readers.LowerThanKey, - }, - page: readers.MessagesPage{ - Total: uint64(len(stringMsgs)), - Messages: fromSenml(stringMsgs[0:limit]), - }, - }, - { - desc: "read message with string value and lower-than-or-equal comparator", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - StringValue: vs, - Comparator: readers.LowerThanEqualKey, - }, - page: readers.MessagesPage{ - Total: uint64(len(stringMsgs)), - Messages: fromSenml(stringMsgs[0:limit]), - }, - }, - { - desc: "read message with string value and greater-than comparator", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - StringValue: "alu", - Comparator: readers.GreaterThanKey, - }, - page: readers.MessagesPage{ - Total: uint64(len(stringMsgs)), - Messages: fromSenml(stringMsgs[0:limit]), - }, - }, - { - desc: "read message with string value and greater-than-or-equal comparator", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - StringValue: vs, - Comparator: readers.GreaterThanEqualKey, - }, - page: readers.MessagesPage{ - Total: uint64(len(stringMsgs)), - Messages: fromSenml(stringMsgs[0:limit]), - }, - }, - { - desc: "read message with data value", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - DataValue: vd, - }, - page: readers.MessagesPage{ - Total: uint64(len(dataMsgs)), - Messages: fromSenml(dataMsgs[0:limit]), - }, - }, - { - desc: "read message with data value and lower-than comparator", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - DataValue: vd + string(rune(1)), - Comparator: readers.LowerThanKey, - }, - page: readers.MessagesPage{ - Total: uint64(len(dataMsgs)), - Messages: fromSenml(dataMsgs[0:limit]), - }, - }, - { - desc: "read message with data value and lower-than-or-equal comparator", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - DataValue: vd + string(rune(1)), - Comparator: readers.LowerThanEqualKey, - }, - page: readers.MessagesPage{ - Total: uint64(len(dataMsgs)), - Messages: fromSenml(dataMsgs[0:limit]), - }, - }, - { - desc: "read message with data value and greater-than comparator", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - DataValue: vd[:len(vd)-1] + string(rune(1)), - Comparator: readers.GreaterThanKey, - }, - page: readers.MessagesPage{ - Total: uint64(len(dataMsgs)), - Messages: fromSenml(dataMsgs[0:limit]), - }, - }, - { - desc: "read message with data value and greater-than-or-equal comparator", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - DataValue: vd[:len(vd)-1] + string(rune(1)), - Comparator: readers.GreaterThanEqualKey, - }, - page: readers.MessagesPage{ - Total: uint64(len(dataMsgs)), - Messages: fromSenml(dataMsgs[0:limit]), - }, - }, - { - desc: "read message with from", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: uint64(len(messages[0:21])), - From: messages[20].Time, - }, - page: readers.MessagesPage{ - Total: uint64(len(messages[0:21])), - Messages: fromSenml(messages[0:21]), - }, - }, - { - desc: "read message with to", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: uint64(len(messages[21:])), - To: messages[20].Time, - }, - page: readers.MessagesPage{ - Total: uint64(len(messages[21:])), - Messages: fromSenml(messages[21:]), - }, - }, - { - desc: "read message with from/to", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Offset: 0, - Limit: limit, - From: messages[5].Time, - To: messages[0].Time, - }, - page: readers.MessagesPage{ - Total: 5, - Messages: fromSenml(messages[1:6]), - }, - }, - } - - for _, tc := range cases { - result, err := reader.ReadAll(tc.chanID, tc.pageMeta) - assert.Nil(t, err, fmt.Sprintf("%s: expected no error got %s", tc.desc, err)) - assert.ElementsMatch(t, tc.page.Messages, result.Messages, fmt.Sprintf("%s: expected %v got %v", tc.desc, tc.page.Messages, result.Messages)) - assert.Equal(t, tc.page.Total, result.Total, fmt.Sprintf("%s: expected %v got %v", tc.desc, tc.page.Total, result.Total)) - } -} - -func TestReadMessagesWithAggregation(t *testing.T) { - writer := twriter.New(db) - - chanID := testsutil.GenerateUUID(t) - pubID := testsutil.GenerateUUID(t) - messages := []senml.Message{} - - now := float64(time.Now().UnixNano()) - value := 10.0 - for i := 0; i < 100; i++ { - if i%10 == 0 { - value += 10.0 - } - v := value - msg := senml.Message{ - Channel: chanID, - Publisher: pubID, - Time: now - float64(i*1000000000), // over 100 seconds - Value: &v, - Protocol: mqttProt, - } - messages = append(messages, msg) - } - - err := writer.ConsumeBlocking(context.TODO(), messages) - require.Nil(t, err, "expected no error got %s\n", err) - - reader := treader.New(db) - - // Set up cases for aggregation readAll - cases := []struct { - desc string - chanID string - pageMeta readers.PageMetadata - page readers.MessagesPage - }{ - { - desc: "read message page for existing channel with AVG aggregation over an hour", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Limit: 100, - Offset: 0, - Aggregation: "AVG", - Interval: "1 hour", - From: now - float64(100000000000), - To: now, - }, - page: readers.MessagesPage{ - Messages: fromSenml(messages), - }, - }, - { - desc: "read message page for existing channel with MAX aggregation over an hour", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Limit: 100, - Offset: 0, - Aggregation: "MAX", - Interval: "1 hour", - From: now - float64(100000000000), - To: now, - }, - page: readers.MessagesPage{ - Messages: fromSenml(messages), - }, - }, - { - desc: "read message page for existing channel with MIN aggregation over an hour", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Limit: 100, - Offset: 0, - Aggregation: "MIN", - Interval: "1 hour", - From: now - float64(100000000000), - To: now, - }, - page: readers.MessagesPage{ - Messages: fromSenml(messages), - }, - }, - { - desc: "read message page for existing channel with SUM aggregation over an hour", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Limit: 100, - Offset: 0, - Aggregation: "SUM", - Interval: "1 hour", - From: now - float64(100000000000), - To: now, - }, - page: readers.MessagesPage{ - Messages: fromSenml(messages), - }, - }, - { - desc: "read message page for existing channel with COUNT aggregation over an hour", - chanID: chanID, - pageMeta: readers.PageMetadata{ - Limit: 100, - Offset: 0, - Aggregation: "COUNT", - Interval: "1 hour", - From: now - float64(100000000000), - To: now, - }, - page: readers.MessagesPage{ - Messages: fromSenml(messages), - }, - }, - } - - for _, tc := range cases { - resultPage, err := reader.ReadAll(tc.chanID, tc.pageMeta) - assert.Nil(t, err, fmt.Sprintf("%s: expected no error got %s", tc.desc, err)) - assert.NotEmpty(t, resultPage.Messages, "expected non-empty result set") - for i := range resultPage.Messages { - msg, ok := resultPage.Messages[i].(senml.Message) - if ok && msg.Value != nil { - assert.GreaterOrEqual(t, *msg.Value, resultPage.Value, "expected aggregated value to be greater or equal to the expected value") - } - } - } -} - -func TestReadJSON(t *testing.T) { - writer := twriter.New(db) - - id1 := testsutil.GenerateUUID(t) - messages1 := json.Messages{ - Format: format1, - } - msgs1 := []map[string]interface{}{} - timeNow := time.Now().UnixMilli() - for i := 0; i < msgsNum; i++ { - m := json.Message{ - Channel: id1, - Publisher: id1, - Created: timeNow - int64(i), - Subtopic: "subtopic/format/some_json", - Protocol: "coap", - Payload: map[string]interface{}{ - "field_1": 123.0, - "field_2": "value", - "field_3": false, - "field_4": 12.344, - "field_5": map[string]interface{}{ - "field_1": "value", - "field_2": 42.0, - }, - }, - } - - msg := m - messages1.Data = append(messages1.Data, msg) - mapped := toMap(msg) - msgs1 = append(msgs1, mapped) - } - - err := writer.ConsumeBlocking(context.TODO(), messages1) - require.Nil(t, err, fmt.Sprintf("expected no error got %s\n", err)) - - id2 := testsutil.GenerateUUID(t) - messages2 := json.Messages{ - Format: format2, - } - msgs2 := []map[string]interface{}{} - for i := 0; i < msgsNum; i++ { - m := json.Message{ - Channel: id2, - Publisher: id2, - Created: timeNow - int64(i), - Subtopic: "subtopic/other_format/some_other_json", - Protocol: "udp", - Payload: map[string]interface{}{ - "field_1": "other_value", - "false_value": false, - "field_pi": 3.14159265, - }, - } - - msg := m - if i%2 == 0 { - msg.Protocol = httpProt - } - messages2.Data = append(messages2.Data, msg) - mapped := toMap(msg) - msgs2 = append(msgs2, mapped) - } - - err = writer.ConsumeBlocking(context.TODO(), messages2) - require.Nil(t, err, fmt.Sprintf("expected no error got %s\n", err)) - - httpMsgs := []map[string]interface{}{} - for i := 0; i < msgsNum; i += 2 { - httpMsgs = append(httpMsgs, msgs2[i]) - } - - reader := treader.New(db) - - cases := map[string]struct { - chanID string - pageMeta readers.PageMetadata - page readers.MessagesPage - }{ - "read message page for existing channel": { - chanID: id1, - pageMeta: readers.PageMetadata{ - Format: messages1.Format, - Offset: 0, - Limit: 10, - }, - page: readers.MessagesPage{ - Total: 100, - Messages: fromJSON(msgs1[:10]), - }, - }, - "read message page for non-existent channel": { - chanID: wrongID, - pageMeta: readers.PageMetadata{ - Format: messages1.Format, - Offset: 0, - Limit: 10, - }, - page: readers.MessagesPage{ - Messages: []readers.Message{}, - }, - }, - "read message last page": { - chanID: id2, - pageMeta: readers.PageMetadata{ - Format: messages2.Format, - Offset: msgsNum - 20, - Limit: msgsNum, - }, - page: readers.MessagesPage{ - Total: msgsNum, - Messages: fromJSON(msgs2[msgsNum-20 : msgsNum]), - }, - }, - "read message with protocol": { - chanID: id2, - pageMeta: readers.PageMetadata{ - Format: messages2.Format, - Offset: 0, - Limit: uint64(msgsNum / 2), - Protocol: httpProt, - }, - page: readers.MessagesPage{ - Total: uint64(msgsNum / 2), - Messages: fromJSON(httpMsgs), - }, - }, - } - - for desc, tc := range cases { - result, err := reader.ReadAll(tc.chanID, tc.pageMeta) - assert.Nil(t, err, fmt.Sprintf("%s: expected no error got %s", desc, err)) - assert.ElementsMatch(t, tc.page.Messages, result.Messages, fmt.Sprintf("%s: got incorrect list of json Messages from ReadAll()", desc)) - assert.Equal(t, tc.page.Total, result.Total, fmt.Sprintf("%s: expected %v got %v", desc, tc.page.Total, result.Total)) - } -} - -func fromSenml(msg []senml.Message) []readers.Message { - var ret []readers.Message - for _, m := range msg { - ret = append(ret, m) - } - return ret -} - -func fromJSON(msg []map[string]interface{}) []readers.Message { - var ret []readers.Message - for _, m := range msg { - ret = append(ret, m) - } - return ret -} - -func toMap(msg json.Message) map[string]interface{} { - return map[string]interface{}{ - "channel": msg.Channel, - "created": msg.Created, - "subtopic": msg.Subtopic, - "publisher": msg.Publisher, - "protocol": msg.Protocol, - "payload": map[string]interface{}(msg.Payload), - } -} diff --git a/readers/timescale/setup_test.go b/readers/timescale/setup_test.go deleted file mode 100644 index 519a6e7172..0000000000 --- a/readers/timescale/setup_test.go +++ /dev/null @@ -1,84 +0,0 @@ -// Copyright (c) Abstract Machines -// SPDX-License-Identifier: Apache-2.0 - -// Package timescale_test contains tests for PostgreSQL repository -// implementations. -package timescale_test - -import ( - "fmt" - "log" - "os" - "testing" - - "github.com/absmach/supermq/readers/timescale" - _ "github.com/jackc/pgx/v5/stdlib" // required for SQL access - "github.com/jmoiron/sqlx" - "github.com/ory/dockertest/v3" - "github.com/ory/dockertest/v3/docker" -) - -var db *sqlx.DB - -func TestMain(m *testing.M) { - pool, err := dockertest.NewPool("") - if err != nil { - log.Fatalf("Could not connect to docker: %s", err) - } - - container, err := pool.RunWithOptions(&dockertest.RunOptions{ - Repository: "timescale/timescaledb", - Tag: "2.13.1-pg16", - Env: []string{ - "POSTGRES_USER=test", - "POSTGRES_PASSWORD=test", - "POSTGRES_DB=test", - "listen_addresses = '*'", - }, - }, func(config *docker.HostConfig) { - config.AutoRemove = true - config.RestartPolicy = docker.RestartPolicy{Name: "no"} - }) - if err != nil { - log.Fatalf("Could not start container: %s", err) - } - - port := container.GetPort("5432/tcp") - url := fmt.Sprintf("host=localhost port=%s user=test dbname=test password=test sslmode=disable", port) - - if err = pool.Retry(func() error { - db, err = sqlx.Open("pgx", url) - if err != nil { - return err - } - return db.Ping() - }); err != nil { - log.Fatalf("Could not connect to docker: %s", err) - } - - dbConfig := timescale.Config{ - Host: "localhost", - Port: port, - User: "test", - Pass: "test", - Name: "test", - SSLMode: "disable", - SSLCert: "", - SSLKey: "", - SSLRootCert: "", - } - - if db, err = timescale.Connect(dbConfig); err != nil { - log.Fatalf("Could not setup test DB connection: %s", err) - } - - code := m.Run() - - // Defers will not be run when using os.Exit - db.Close() - if err = pool.Purge(container); err != nil { - log.Fatalf("Could not purge container: %s", err) - } - - os.Exit(code) -}