diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 1cef57af73e..bdd76d916cf 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -35,7 +35,6 @@ See the project's [README](README.md) for further information about working in t - Include instructions on how to test your changes. 3. Your branch may be merged once all configured checks pass, including: - A review from appropriate maintainers -4. Along with the PR in transformer raise a PR against [config-generator][config-generator] with the configurations. ## Committing diff --git a/package-lock.json b/package-lock.json index 0f44dfce3d9..05bab904aae 100644 --- a/package-lock.json +++ b/package-lock.json @@ -21,6 +21,7 @@ "@pyroscope/nodejs": "^0.2.6", "@rudderstack/integrations-lib": "^0.2.4", "@rudderstack/workflow-engine": "^0.7.2", + "@shopify/jest-koa-mocks": "^5.1.1", "ajv": "^8.12.0", "ajv-draft-04": "^1.0.0", "ajv-formats": "^2.1.1", @@ -4529,6 +4530,18 @@ "tslib": "^2.6.2" } }, + "node_modules/@shopify/jest-koa-mocks": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@shopify/jest-koa-mocks/-/jest-koa-mocks-5.1.1.tgz", + "integrity": "sha512-H1dRznXIK03ph1l/VDBQ5ef+A9kkEn3ikNfk70zwm9auW15MfHfY9gekE99VecxUSekws7sbFte0i8ltWCS4/g==", + "dependencies": { + "koa": "^2.13.4", + "node-mocks-http": "^1.11.0" + }, + "engines": { + "node": "^14.17.0 || >=16.0.0" + } + }, "node_modules/@sideway/address": { "version": "4.1.4", "resolved": "https://registry.npmjs.org/@sideway/address/-/address-4.1.4.tgz", @@ -16013,6 +16026,14 @@ "integrity": "sha512-jz+Cfrg9GWOZbQAnDQ4hlVnQky+341Yk5ru8bZSe6sIDTCIg8n9i/u7hSQGSVOF3C7lH6mGtqjkiT9G4wFLL0w==", "dev": true }, + "node_modules/merge-descriptors": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz", + "integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==", + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/merge-stream": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", @@ -16591,6 +16612,47 @@ "integrity": "sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==", "dev": true }, + "node_modules/node-mocks-http": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/node-mocks-http/-/node-mocks-http-1.14.1.tgz", + "integrity": "sha512-mfXuCGonz0A7uG1FEjnypjm34xegeN5+HI6xeGhYKecfgaZhjsmYoLE9LEFmT+53G1n8IuagPZmVnEL/xNsFaA==", + "dependencies": { + "@types/express": "^4.17.21", + "@types/node": "^20.10.6", + "accepts": "^1.3.7", + "content-disposition": "^0.5.3", + "depd": "^1.1.0", + "fresh": "^0.5.2", + "merge-descriptors": "^1.0.1", + "methods": "^1.1.2", + "mime": "^1.3.4", + "parseurl": "^1.3.3", + "range-parser": "^1.2.0", + "type-is": "^1.6.18" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/node-mocks-http/node_modules/depd": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", + "integrity": "sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/node-mocks-http/node_modules/mime": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4" + } + }, "node_modules/node-notifier": { "version": "10.0.1", "resolved": "https://registry.npmjs.org/node-notifier/-/node-notifier-10.0.1.tgz", @@ -18041,6 +18103,14 @@ "integrity": "sha512-PgIdVpn5y5Yns8vqb8FzBUEYn98V3xcPgawAkkgj0YJ0qDsnHCiNmZYfOGMgOvoB0eWFLpYbhxUR3mxfDIMvpw==", "dev": true }, + "node_modules/range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "engines": { + "node": ">= 0.6" + } + }, "node_modules/raw-body": { "version": "2.5.2", "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", diff --git a/package.json b/package.json index f6ab6bc1ddc..558160207ce 100644 --- a/package.json +++ b/package.json @@ -66,6 +66,7 @@ "@pyroscope/nodejs": "^0.2.6", "@rudderstack/integrations-lib": "^0.2.4", "@rudderstack/workflow-engine": "^0.7.2", + "@shopify/jest-koa-mocks": "^5.1.1", "ajv": "^8.12.0", "ajv-draft-04": "^1.0.0", "ajv-formats": "^2.1.1", diff --git a/src/cdk/v2/destinations/algolia/procWorkflow.yaml b/src/cdk/v2/destinations/algolia/procWorkflow.yaml index b9ce7ef7fd4..f9ac8e3ae6e 100644 --- a/src/cdk/v2/destinations/algolia/procWorkflow.yaml +++ b/src/cdk/v2/destinations/algolia/procWorkflow.yaml @@ -61,7 +61,7 @@ steps: const filters = $.context.payload.filters; const objectIDs = $.context.payload.objectIDs; $.assert(!(filters && objectIDs), "event can't have both objectIds and filters at the same time."); - $.assert(filters || objectIDs, "Either filters or objectIds is required."); + $.assert(filters.length || objectIDs.length, "Either filters or objectIds is required and must be non empty."); - name: validatePayloadForClickEvent condition: $.context.payload.eventType === "click" diff --git a/src/cdk/v2/destinations/ninetailed/config.js b/src/cdk/v2/destinations/ninetailed/config.js new file mode 100644 index 00000000000..c38496a4155 --- /dev/null +++ b/src/cdk/v2/destinations/ninetailed/config.js @@ -0,0 +1,31 @@ +const { getMappingConfig } = require('../../../../v0/util'); + +const ConfigCategories = { + GENERAL: { + type: 'general', + name: 'generalPayloadMapping', + }, + CONTEXT: { + type: 'context', + name: 'contextMapping', + }, + TRACK: { + type: 'track', + name: 'trackMapping', + }, + IDENTIFY: { + type: 'identify', + name: 'identifyMapping', + }, + PAGE: { + type: 'page', + name: 'pageMapping', + }, +}; + +// MAX_BATCH_SIZE : // Maximum number of events to send in a single batch +const mappingConfig = getMappingConfig(ConfigCategories, __dirname); +const batchEndpoint = + 'https://experience.ninetailed.co/v2/organizations/{{organisationId}}/environments/{{environment}}/events'; + +module.exports = { ConfigCategories, mappingConfig, batchEndpoint, MAX_BATCH_SIZE: 200 }; diff --git a/src/cdk/v2/destinations/ninetailed/data/contextMapping.json b/src/cdk/v2/destinations/ninetailed/data/contextMapping.json new file mode 100644 index 00000000000..3d6392dd1ec --- /dev/null +++ b/src/cdk/v2/destinations/ninetailed/data/contextMapping.json @@ -0,0 +1,43 @@ +[ + { + "sourceKeys": "app.name", + "required": true, + "destKey": "app.name" + }, + { + "sourceKeys": "app.version", + "required": true, + "destKey": "app.version" + }, + { + "sourceKeys": "campaign", + "destKey": "campaign" + }, + { + "sourceKeys": "library.name", + "required": true, + "destKey": "library.name" + }, + { + "sourceKeys": "library.version", + "required": true, + "destKey": "library.version" + }, + { + "sourceKeys": "locale", + "destKey": "locale" + }, + { + "sourceKeys": "page", + "destKey": "page" + }, + { + "sourceKeys": "userAgent", + "destKey": "userAgent" + }, + { + "sourceKeys": "location", + "required": true, + "destKey": "location" + } +] diff --git a/src/cdk/v2/destinations/ninetailed/data/generalPayloadMapping.json b/src/cdk/v2/destinations/ninetailed/data/generalPayloadMapping.json new file mode 100644 index 00000000000..3ab72d1b9f4 --- /dev/null +++ b/src/cdk/v2/destinations/ninetailed/data/generalPayloadMapping.json @@ -0,0 +1,25 @@ +[ + { + "sourceKeys": "anonymousId", + "required": true, + "destKey": "anonymousId" + }, + { + "sourceKeys": "messageId", + "required": true, + "destKey": "messageId" + }, + { + "sourceKeys": "channel", + "required": true, + "destKey": "channel" + }, + { + "sourceKeys": "type", + "destKey": "type" + }, + { + "sourceKeys": "originalTimestamp", + "destKey": "originalTimestamp" + } +] diff --git a/src/cdk/v2/destinations/ninetailed/data/identifyMapping.json b/src/cdk/v2/destinations/ninetailed/data/identifyMapping.json new file mode 100644 index 00000000000..e8d3f7797d6 --- /dev/null +++ b/src/cdk/v2/destinations/ninetailed/data/identifyMapping.json @@ -0,0 +1,14 @@ +[ + { + "sourceKeys": "traits", + "sourceFromGenericMap": true, + "required": true, + "destKey": "traits" + }, + { + "sourceKeys": "userIdOnly", + "sourceFromGenericMap": true, + "required": true, + "destKey": "userId" + } +] diff --git a/src/cdk/v2/destinations/ninetailed/data/pageMapping.json b/src/cdk/v2/destinations/ninetailed/data/pageMapping.json new file mode 100644 index 00000000000..80ec2f58f12 --- /dev/null +++ b/src/cdk/v2/destinations/ninetailed/data/pageMapping.json @@ -0,0 +1,7 @@ +[ + { + "sourceKeys": "properties", + "required": true, + "destKey": "properties" + } +] diff --git a/src/cdk/v2/destinations/ninetailed/data/trackMapping.json b/src/cdk/v2/destinations/ninetailed/data/trackMapping.json new file mode 100644 index 00000000000..44af6dd1a34 --- /dev/null +++ b/src/cdk/v2/destinations/ninetailed/data/trackMapping.json @@ -0,0 +1,12 @@ +[ + { + "sourceKeys": "properties", + "required": true, + "destKey": "properties" + }, + { + "sourceKeys": "event", + "required": true, + "destKey": "event" + } +] diff --git a/src/cdk/v2/destinations/ninetailed/procWorkflow.yaml b/src/cdk/v2/destinations/ninetailed/procWorkflow.yaml new file mode 100644 index 00000000000..6f5056ce100 --- /dev/null +++ b/src/cdk/v2/destinations/ninetailed/procWorkflow.yaml @@ -0,0 +1,33 @@ +bindings: + - name: EventType + path: ../../../../constants + - path: ../../bindings/jsontemplate + - name: defaultRequestConfig + path: ../../../../v0/util + - name: removeUndefinedAndNullValues + path: ../../../../v0/util + - path: ./utils + +steps: + - name: messageType + template: | + .message.type.toLowerCase(); + - name: validateInput + template: | + let messageType = $.outputs.messageType; + $.assert(messageType, "message Type is not present. Aborting"); + $.assert(messageType in {{$.EventType.([.TRACK,.IDENTIFY,.PAGE])}}, "message type " + messageType + " is not supported"); + $.assertConfig(.destination.Config.organisationId, "Organisation ID is not present. Aborting"); + $.assertConfig(.destination.Config.environment, "Environment is not present. Aborting"); + - name: preparePayload + template: | + const payload = $.constructFullPayload(.message); + $.context.payload = $.removeUndefinedAndNullValues(payload); + + - name: buildResponse + template: | + const response = $.defaultRequestConfig(); + response.body.JSON.events = [$.context.payload]; + response.endpoint = $.getEndpoint(.destination.Config); + response.method = "POST"; + response diff --git a/src/cdk/v2/destinations/ninetailed/rtWorkflow.yaml b/src/cdk/v2/destinations/ninetailed/rtWorkflow.yaml new file mode 100644 index 00000000000..30dd3fdd95d --- /dev/null +++ b/src/cdk/v2/destinations/ninetailed/rtWorkflow.yaml @@ -0,0 +1,35 @@ +bindings: + - path: ./config + - name: handleRtTfSingleEventError + path: ../../../../v0/util/index + - path: ./utils +steps: + - name: validateInput + template: | + $.assert(Array.isArray(^) && ^.length > 0, "Invalid event array") + + - name: transform + externalWorkflow: + path: ./procWorkflow.yaml + loopOverInput: true + + - name: successfulEvents + template: | + $.outputs.transform#idx.output.({ + "output": .body.JSON.events[0], + "destination": ^[idx].destination, + "metadata": ^[idx].metadata + })[] + - name: failedEvents + template: | + $.outputs.transform#idx.error.( + $.handleRtTfSingleEventError(^[idx], .originalError ?? ., {}) + )[] + - name: batchSuccessfulEvents + description: Batches the successfulEvents + template: | + $.batchResponseBuilder($.outputs.successfulEvents); + + - name: finalPayload + template: | + [...$.outputs.failedEvents, ...$.outputs.batchSuccessfulEvents] diff --git a/src/cdk/v2/destinations/ninetailed/utils.js b/src/cdk/v2/destinations/ninetailed/utils.js new file mode 100644 index 00000000000..b716422a0e8 --- /dev/null +++ b/src/cdk/v2/destinations/ninetailed/utils.js @@ -0,0 +1,109 @@ +const { BatchUtils } = require('@rudderstack/workflow-engine'); +const config = require('./config'); +const { constructPayload } = require('../../../../v0/util'); + +/** + * This fucntion constructs payloads based upon mappingConfig for all calls + * We build context as it has some specific payloads with default values so just breaking them down + * @param {*} message + * @returns + */ +const constructFullPayload = (message) => { + const context = constructPayload( + message?.context || {}, + config.mappingConfig[config.ConfigCategories.CONTEXT.name], + ); + const payload = constructPayload( + message, + config.mappingConfig[config.ConfigCategories.GENERAL.name], + ); + let typeSpecifcPayload; + switch (message.type) { + case 'track': + typeSpecifcPayload = constructPayload( + message, + config.mappingConfig[config.ConfigCategories.TRACK.name], + ); + break; + case 'identify': + typeSpecifcPayload = constructPayload( + message, + config.mappingConfig[config.ConfigCategories.IDENTIFY.name], + ); + break; + case 'page': + typeSpecifcPayload = constructPayload( + message, + config.mappingConfig[config.ConfigCategories.PAGE.name], + ); + break; + default: + break; + } + payload.context = context; + return { ...payload, ...typeSpecifcPayload }; // merge base and type-specific payloads; +}; + +const getEndpoint = (Config) => { + const { organisationId, environment } = Config; + return config.batchEndpoint + .replace('{{organisationId}}', organisationId) + .replace('{{environment}}', environment); +}; + +const mergeMetadata = (batch) => { + const metadata = []; + batch.forEach((event) => { + metadata.push(event.metadata); + }); + return metadata; +}; + +const getMergedEvents = (batch) => { + const events = []; + batch.forEach((event) => { + events.push(event.output); + }); + return events; +}; + +const batchBuilder = (batch) => ({ + batchedRequest: { + body: { + JSON: { events: getMergedEvents(batch) }, + JSON_ARRAY: {}, + XML: {}, + FORM: {}, + }, + version: '1', + type: 'REST', + method: 'POST', + endpoint: getEndpoint(batch[0].destination.Config), + headers: { + 'Content-Type': 'application/json', + }, + params: {}, + files: {}, + }, + metadata: mergeMetadata(batch), + batched: true, + statusCode: 200, + destination: batch[0].destination, +}); + +/** + * This fucntions make chunk of successful events based on MAX_BATCH_SIZE + * and then build the response for each chunk to be returned as object of an array + * @param {*} events + * @returns + */ +const batchResponseBuilder = (events) => { + const batches = BatchUtils.chunkArrayBySizeAndLength(events, { maxItems: config.MAX_BATCH_SIZE }); + const response = []; + batches.items.forEach((batch) => { + response.push(batchBuilder(batch)); + }); + return response; +}; + +module.exports = { constructFullPayload, getEndpoint, batchResponseBuilder }; diff --git a/src/controllers/__tests__/delivery.test.ts b/src/controllers/__tests__/delivery.test.ts new file mode 100644 index 00000000000..0f91913f9d0 --- /dev/null +++ b/src/controllers/__tests__/delivery.test.ts @@ -0,0 +1,186 @@ +import request from 'supertest'; +import { createHttpTerminator } from 'http-terminator'; +import Koa from 'koa'; +import bodyParser from 'koa-bodyparser'; +import { applicationRoutes } from '../../routes'; +import { NativeIntegrationDestinationService } from '../../services/destination/nativeIntegration'; +import { ServiceSelector } from '../../helpers/serviceSelector'; + +let server: any; +const OLD_ENV = process.env; + +beforeAll(async () => { + process.env = { ...OLD_ENV }; // Make a copy + const app = new Koa(); + app.use( + bodyParser({ + jsonLimit: '200mb', + }), + ); + applicationRoutes(app); + server = app.listen(9090); +}); + +afterAll(async () => { + process.env = OLD_ENV; // Restore old environment + const httpTerminator = createHttpTerminator({ + server, + }); + await httpTerminator.terminate(); +}); + +afterEach(() => { + jest.clearAllMocks(); +}); + +const getData = () => { + return { body: { JSON: { a: 'b' } }, metadata: [{ a1: 'b1' }], destinationConfig: { a2: 'b2' } }; +}; + +describe('Delivery controller tests', () => { + describe('Delivery V0 tests', () => { + test('successful delivery', async () => { + const testOutput = { status: 200, message: 'success' }; + const mockDestinationService = new NativeIntegrationDestinationService(); + mockDestinationService.deliver = jest + .fn() + .mockImplementation((event, destinationType, requestMetadata, version) => { + expect(event).toEqual(getData()); + expect(destinationType).toEqual('__rudder_test__'); + expect(version).toEqual('v0'); + return testOutput; + }); + const getNativeDestinationServiceSpy = jest + .spyOn(ServiceSelector, 'getNativeDestinationService') + .mockImplementation(() => { + return mockDestinationService; + }); + + const response = await request(server) + .post('/v0/destinations/__rudder_test__/proxy') + .set('Accept', 'application/json') + .send(getData()); + + expect(response.status).toEqual(200); + expect(response.body).toEqual({ output: testOutput }); + + expect(response.header['apiversion']).toEqual('2'); + + expect(getNativeDestinationServiceSpy).toHaveBeenCalledTimes(1); + expect(mockDestinationService.deliver).toHaveBeenCalledTimes(1); + }); + + test('delivery failure', async () => { + const mockDestinationService = new NativeIntegrationDestinationService(); + mockDestinationService.deliver = jest + .fn() + .mockImplementation((event, destinationType, requestMetadata, version) => { + expect(event).toEqual(getData()); + expect(destinationType).toEqual('__rudder_test__'); + expect(version).toEqual('v0'); + throw new Error('test error'); + }); + const getNativeDestinationServiceSpy = jest + .spyOn(ServiceSelector, 'getNativeDestinationService') + .mockImplementation(() => { + return mockDestinationService; + }); + + const response = await request(server) + .post('/v0/destinations/__rudder_test__/proxy') + .set('Accept', 'application/json') + .send(getData()); + + const expectedResp = { + output: { + message: 'test error', + statTags: { + errorCategory: 'transformation', + }, + destinationResponse: '', + status: 500, + }, + }; + expect(response.status).toEqual(500); + expect(response.body).toEqual(expectedResp); + + expect(response.header['apiversion']).toEqual('2'); + + expect(getNativeDestinationServiceSpy).toHaveBeenCalledTimes(1); + expect(mockDestinationService.deliver).toHaveBeenCalledTimes(1); + }); + }); + + describe('Delivery V1 tests', () => { + test('successful delivery', async () => { + const testOutput = { status: 200, message: 'success' }; + const mockDestinationService = new NativeIntegrationDestinationService(); + mockDestinationService.deliver = jest + .fn() + .mockImplementation((event, destinationType, requestMetadata, version) => { + expect(event).toEqual(getData()); + expect(destinationType).toEqual('__rudder_test__'); + expect(version).toEqual('v1'); + return testOutput; + }); + const getNativeDestinationServiceSpy = jest + .spyOn(ServiceSelector, 'getNativeDestinationService') + .mockImplementation(() => { + return mockDestinationService; + }); + + const response = await request(server) + .post('/v1/destinations/__rudder_test__/proxy') + .set('Accept', 'application/json') + .send(getData()); + + expect(response.status).toEqual(200); + expect(response.body).toEqual({ output: testOutput }); + + expect(response.header['apiversion']).toEqual('2'); + + expect(getNativeDestinationServiceSpy).toHaveBeenCalledTimes(1); + expect(mockDestinationService.deliver).toHaveBeenCalledTimes(1); + }); + + test('delivery failure', async () => { + const mockDestinationService = new NativeIntegrationDestinationService(); + mockDestinationService.deliver = jest + .fn() + .mockImplementation((event, destinationType, requestMetadata, version) => { + expect(event).toEqual(getData()); + expect(destinationType).toEqual('__rudder_test__'); + expect(version).toEqual('v1'); + throw new Error('test error'); + }); + const getNativeDestinationServiceSpy = jest + .spyOn(ServiceSelector, 'getNativeDestinationService') + .mockImplementation(() => { + return mockDestinationService; + }); + + const response = await request(server) + .post('/v1/destinations/__rudder_test__/proxy') + .set('Accept', 'application/json') + .send(getData()); + + const expectedResp = { + output: { + message: 'test error', + statTags: { + errorCategory: 'transformation', + }, + status: 500, + response: [{ error: 'test error', metadata: { a1: 'b1' }, statusCode: 500 }], + }, + }; + expect(response.status).toEqual(200); + expect(response.body).toEqual(expectedResp); + + expect(response.header['apiversion']).toEqual('2'); + + expect(getNativeDestinationServiceSpy).toHaveBeenCalledTimes(1); + expect(mockDestinationService.deliver).toHaveBeenCalledTimes(1); + }); + }); +}); diff --git a/src/controllers/__tests__/destination.test.ts b/src/controllers/__tests__/destination.test.ts new file mode 100644 index 00000000000..3c49a9a0aff --- /dev/null +++ b/src/controllers/__tests__/destination.test.ts @@ -0,0 +1,337 @@ +import request from 'supertest'; +import { createHttpTerminator } from 'http-terminator'; +import Koa from 'koa'; +import bodyParser from 'koa-bodyparser'; +import { applicationRoutes } from '../../routes'; +import { ServiceSelector } from '../../helpers/serviceSelector'; +import { DynamicConfigParser } from '../../util/dynamicConfigParser'; +import { NativeIntegrationDestinationService } from '../../services/destination/nativeIntegration'; + +let server: any; +const OLD_ENV = process.env; + +beforeAll(async () => { + process.env = { ...OLD_ENV }; // Make a copy + const app = new Koa(); + app.use( + bodyParser({ + jsonLimit: '200mb', + }), + ); + applicationRoutes(app); + server = app.listen(9090); +}); + +afterAll(async () => { + process.env = OLD_ENV; // Restore old environment + const httpTerminator = createHttpTerminator({ + server, + }); + await httpTerminator.terminate(); +}); + +afterEach(() => { + jest.clearAllMocks(); +}); + +const getData = () => { + return [{ event: { a: 'b1' } }, { event: { a: 'b2' } }]; +}; + +const getRouterTransformInputData = () => { + return { + input: [ + { message: { a: 'b1' }, destination: {}, metadata: { jobId: 1 } }, + { message: { a: 'b2' }, destination: {}, metadata: { jobId: 2 } }, + ], + destType: '__rudder_test__', + }; +}; + +describe('Destination controller tests', () => { + describe('Destination processor transform tests', () => { + test('successful transformation at processor', async () => { + const mockDestinationService = new NativeIntegrationDestinationService(); + + const expectedOutput = [ + { + event: { a: 'b1' }, + request: { query: {} }, + message: {}, + }, + { + event: { a: 'b2' }, + request: { query: {} }, + message: {}, + }, + ]; + mockDestinationService.doProcessorTransformation = jest + .fn() + .mockImplementation((events, destinationType, version, requestMetadata) => { + expect(events).toEqual(expectedOutput); + expect(destinationType).toEqual('__rudder_test__'); + expect(version).toEqual('v0'); + + return events; + }); + const getDestinationServiceSpy = jest + .spyOn(ServiceSelector, 'getDestinationService') + .mockImplementation(() => { + return mockDestinationService; + }); + + DynamicConfigParser.process = jest.fn().mockImplementation((events) => { + return events; + }); + + const response = await request(server) + .post('/v0/destinations/__rudder_test__') + .set('Accept', 'application/json') + .send(getData()); + + expect(response.status).toEqual(200); + expect(response.body).toEqual(expectedOutput); + + expect(response.header['apiversion']).toEqual('2'); + + expect(getDestinationServiceSpy).toHaveBeenCalledTimes(1); + expect(mockDestinationService.doProcessorTransformation).toHaveBeenCalledTimes(1); + }); + + test('transformation at processor failure', async () => { + const mockDestinationService = new NativeIntegrationDestinationService(); + + const expectedOutput = [ + { + statusCode: 500, + error: 'Processor transformation failed', + statTags: { errorCategory: 'transformation' }, + }, + { + statusCode: 500, + error: 'Processor transformation failed', + statTags: { errorCategory: 'transformation' }, + }, + ]; + + mockDestinationService.doProcessorTransformation = jest + .fn() + .mockImplementation((events, destinationType, version, requestMetadata) => { + expect(destinationType).toEqual('__rudder_test__'); + expect(version).toEqual('v0'); + + throw new Error('Processor transformation failed'); + }); + const getDestinationServiceSpy = jest + .spyOn(ServiceSelector, 'getDestinationService') + .mockImplementation(() => { + return mockDestinationService; + }); + + DynamicConfigParser.process = jest.fn().mockImplementation((events) => { + return events; + }); + + const response = await request(server) + .post('/v0/destinations/__rudder_test__') + .set('Accept', 'application/json') + .send(getData()); + + expect(response.status).toEqual(200); + expect(response.body).toEqual(expectedOutput); + + expect(response.header['apiversion']).toEqual('2'); + + expect(getDestinationServiceSpy).toHaveBeenCalledTimes(1); + expect(mockDestinationService.doProcessorTransformation).toHaveBeenCalledTimes(1); + }); + }); + + describe('Destination router transform tests', () => { + test('successful transformation at router', async () => { + const mockDestinationService = new NativeIntegrationDestinationService(); + + const expectedOutput = [ + { + message: { a: 'b1' }, + destination: {}, + metadata: { jobId: 1 }, + request: { query: {} }, + }, + { + message: { a: 'b2' }, + destination: {}, + metadata: { jobId: 2 }, + request: { query: {} }, + }, + ]; + + mockDestinationService.doRouterTransformation = jest + .fn() + .mockImplementation((events, destinationType, version, requestMetadata) => { + expect(events).toEqual(expectedOutput); + expect(destinationType).toEqual('__rudder_test__'); + expect(version).toEqual('v0'); + + return events; + }); + const getDestinationServiceSpy = jest + .spyOn(ServiceSelector, 'getDestinationService') + .mockImplementation(() => { + return mockDestinationService; + }); + + DynamicConfigParser.process = jest.fn().mockImplementation((events) => { + return events; + }); + + const response = await request(server) + .post('/routerTransform') + .set('Accept', 'application/json') + .send(getRouterTransformInputData()); + + expect(response.status).toEqual(200); + expect(response.body).toEqual({ output: expectedOutput }); + + expect(response.header['apiversion']).toEqual('2'); + + expect(getDestinationServiceSpy).toHaveBeenCalledTimes(1); + expect(mockDestinationService.doRouterTransformation).toHaveBeenCalledTimes(1); + }); + + test('transformation at router failure', async () => { + const mockDestinationService = new NativeIntegrationDestinationService(); + + mockDestinationService.doRouterTransformation = jest + .fn() + .mockImplementation((events, destinationType, version, requestMetadata) => { + throw new Error('Router transformation failed'); + }); + const getDestinationServiceSpy = jest + .spyOn(ServiceSelector, 'getDestinationService') + .mockImplementation(() => { + return mockDestinationService; + }); + + DynamicConfigParser.process = jest.fn().mockImplementation((events) => { + return events; + }); + + const response = await request(server) + .post('/routerTransform') + .set('Accept', 'application/json') + .send(getRouterTransformInputData()); + + const expectedOutput = [ + { + metadata: [{ jobId: 1 }, { jobId: 2 }], + batched: false, + statusCode: 500, + error: 'Router transformation failed', + statTags: { errorCategory: 'transformation' }, + }, + ]; + expect(response.status).toEqual(200); + expect(response.body).toEqual({ output: expectedOutput }); + + expect(response.header['apiversion']).toEqual('2'); + + expect(getDestinationServiceSpy).toHaveBeenCalledTimes(1); + expect(mockDestinationService.doRouterTransformation).toHaveBeenCalledTimes(1); + }); + }); + + describe('Batch transform tests', () => { + test('successful batching at router', async () => { + const mockDestinationService = new NativeIntegrationDestinationService(); + + const expectedOutput = [ + { + message: { a: 'b1' }, + destination: {}, + metadata: { jobId: 1 }, + request: { query: {} }, + }, + { + message: { a: 'b2' }, + destination: {}, + metadata: { jobId: 2 }, + request: { query: {} }, + }, + ]; + + mockDestinationService.doBatchTransformation = jest + .fn() + .mockImplementation((events, destinationType, version, requestMetadata) => { + expect(events).toEqual(expectedOutput); + expect(destinationType).toEqual('__rudder_test__'); + expect(version).toEqual('v0'); + + return events; + }); + const getDestinationServiceSpy = jest + .spyOn(ServiceSelector, 'getDestinationService') + .mockImplementation(() => { + return mockDestinationService; + }); + + DynamicConfigParser.process = jest.fn().mockImplementation((events) => { + return events; + }); + + const response = await request(server) + .post('/batch') + .set('Accept', 'application/json') + .send(getRouterTransformInputData()); + + expect(response.status).toEqual(200); + expect(response.body).toEqual(expectedOutput); + + expect(response.header['apiversion']).toEqual('2'); + + expect(getDestinationServiceSpy).toHaveBeenCalledTimes(1); + expect(mockDestinationService.doBatchTransformation).toHaveBeenCalledTimes(1); + }); + + test('batch transformation failure', async () => { + const mockDestinationService = new NativeIntegrationDestinationService(); + + mockDestinationService.doBatchTransformation = jest + .fn() + .mockImplementation((events, destinationType, version, requestMetadata) => { + throw new Error('Batch transformation failed'); + }); + const getDestinationServiceSpy = jest + .spyOn(ServiceSelector, 'getDestinationService') + .mockImplementation(() => { + return mockDestinationService; + }); + + DynamicConfigParser.process = jest.fn().mockImplementation((events) => { + return events; + }); + + const response = await request(server) + .post('/batch') + .set('Accept', 'application/json') + .send(getRouterTransformInputData()); + + const expectedOutput = [ + { + metadata: [{ jobId: 1 }, { jobId: 2 }], + batched: false, + statusCode: 500, + error: 'Batch transformation failed', + statTags: { errorCategory: 'transformation' }, + }, + ]; + expect(response.status).toEqual(200); + expect(response.body).toEqual(expectedOutput); + + expect(response.header['apiversion']).toEqual('2'); + + expect(getDestinationServiceSpy).toHaveBeenCalledTimes(1); + expect(mockDestinationService.doBatchTransformation).toHaveBeenCalledTimes(1); + }); + }); +}); diff --git a/src/controllers/__tests__/regulation.test.ts b/src/controllers/__tests__/regulation.test.ts new file mode 100644 index 00000000000..55cd8f2d376 --- /dev/null +++ b/src/controllers/__tests__/regulation.test.ts @@ -0,0 +1,107 @@ +import request from 'supertest'; +import { createHttpTerminator } from 'http-terminator'; +import Koa from 'koa'; +import bodyParser from 'koa-bodyparser'; +import { applicationRoutes } from '../../routes'; +import { ServiceSelector } from '../../helpers/serviceSelector'; +import { NativeIntegrationDestinationService } from '../../services/destination/nativeIntegration'; + +let server: any; +const OLD_ENV = process.env; + +beforeAll(async () => { + process.env = { ...OLD_ENV }; // Make a copy + const app = new Koa(); + app.use( + bodyParser({ + jsonLimit: '200mb', + }), + ); + applicationRoutes(app); + server = app.listen(9090); +}); + +afterAll(async () => { + process.env = OLD_ENV; // Restore old environment + const httpTerminator = createHttpTerminator({ + server, + }); + await httpTerminator.terminate(); +}); + +afterEach(() => { + jest.clearAllMocks(); +}); + +const getDeletionData = () => { + return [ + { userAttributes: [{ a: 'b1' }], destType: '__rudder_test__' }, + { userAttributes: [{ a: 'b1' }], destType: '__rudder_test__' }, + ]; +}; + +describe('Regulation controller tests', () => { + describe('Delete users tests', () => { + test('successful delete users request', async () => { + const mockDestinationService = new NativeIntegrationDestinationService(); + + const expectedOutput = [{ statusCode: 400 }, { statusCode: 200 }]; + + mockDestinationService.processUserDeletion = jest + .fn() + .mockImplementation((reqs, destInfo) => { + expect(reqs).toEqual(getDeletionData()); + expect(destInfo).toEqual({ a: 'test' }); + + return expectedOutput; + }); + const getDestinationServiceSpy = jest + .spyOn(ServiceSelector, 'getNativeDestinationService') + .mockImplementation(() => { + return mockDestinationService; + }); + + const response = await request(server) + .post('/deleteUsers') + .set('Accept', 'application/json') + .set('x-rudder-dest-info', '{"a": "test"}') + .send(getDeletionData()); + + expect(response.status).toEqual(400); + expect(response.body).toEqual(expectedOutput); + + expect(getDestinationServiceSpy).toHaveBeenCalledTimes(1); + expect(mockDestinationService.processUserDeletion).toHaveBeenCalledTimes(1); + }); + + test('delete users request failure', async () => { + const mockDestinationService = new NativeIntegrationDestinationService(); + + mockDestinationService.processUserDeletion = jest + .fn() + .mockImplementation((reqs, destInfo) => { + expect(reqs).toEqual(getDeletionData()); + expect(destInfo).toEqual({ a: 'test' }); + + throw new Error('processUserDeletion error'); + }); + const getDestinationServiceSpy = jest + .spyOn(ServiceSelector, 'getNativeDestinationService') + .mockImplementation(() => { + return mockDestinationService; + }); + + const response = await request(server) + .post('/deleteUsers') + .set('Accept', 'application/json') + .set('x-rudder-dest-info', '{"a": "test"}') + .send(getDeletionData()); + + expect(response.status).toEqual(500); + expect(response.body).toEqual([{ error: {}, statusCode: 500 }]); + + expect(getDestinationServiceSpy).toHaveBeenCalledTimes(1); + expect(mockDestinationService.processUserDeletion).toHaveBeenCalledTimes(1); + }); + }); +}); diff --git a/src/controllers/__tests__/source.test.ts b/src/controllers/__tests__/source.test.ts new file mode 100644 index 00000000000..565f39d559f --- /dev/null +++ b/src/controllers/__tests__/source.test.ts @@ -0,0 +1,220 @@ +import request from 'supertest'; +import { createHttpTerminator } from 'http-terminator'; +import Koa from 'koa'; +import bodyParser from 'koa-bodyparser'; +import { applicationRoutes } from '../../routes'; +import { NativeIntegrationSourceService } from '../../services/source/nativeIntegration'; +import { ServiceSelector } from '../../helpers/serviceSelector'; +import { ControllerUtility } from '../util/index'; + +let server: any; +const OLD_ENV = process.env; + +beforeAll(async () => { + process.env = { ...OLD_ENV }; // Make a copy + const app = new Koa(); + app.use( + bodyParser({ + jsonLimit: '200mb', + }), + ); + applicationRoutes(app); + server = app.listen(9090); +}); + +afterAll(async () => { + process.env = OLD_ENV; // Restore old environment + const httpTerminator = createHttpTerminator({ + server, + }); + await httpTerminator.terminate(); +}); + +afterEach(() => { + jest.clearAllMocks(); +}); + +const getData = () => { + return [{ event: { a: 'b1' } }, { event: { a: 'b2' } }]; +}; + +describe('Source controller tests', () => { + describe('V0 Source transform tests', () => { + test('successful source transform', async () => { + const sourceType = '__rudder_test__'; + const version = 'v0'; + const testOutput = [{ event: { a: 'b' } }]; + + const mockSourceService = new NativeIntegrationSourceService(); + mockSourceService.sourceTransformRoutine = jest + .fn() + .mockImplementation((i, s, v, requestMetadata) => { + expect(i).toEqual(getData()); + expect(s).toEqual(sourceType); + expect(v).toEqual(version); + return testOutput; + }); + const getNativeSourceServiceSpy = jest + .spyOn(ServiceSelector, 'getNativeSourceService') + .mockImplementation(() => { + return mockSourceService; + }); + + const adaptInputToVersionSpy = jest + .spyOn(ControllerUtility, 'adaptInputToVersion') + .mockImplementation((s, v, e) => { + expect(s).toEqual(sourceType); + expect(v).toEqual(version); + expect(e).toEqual(getData()); + return { implementationVersion: version, input: e }; + }); + + const response = await request(server) + .post('/v0/sources/__rudder_test__') + .set('Accept', 'application/json') + .send(getData()); + + expect(response.status).toEqual(200); + expect(response.body).toEqual(testOutput); + + expect(response.header['apiversion']).toEqual('2'); + + expect(getNativeSourceServiceSpy).toHaveBeenCalledTimes(1); + expect(adaptInputToVersionSpy).toHaveBeenCalledTimes(1); + expect(mockSourceService.sourceTransformRoutine).toHaveBeenCalledTimes(1); + }); + + test('failing source transform', async () => { + const sourceType = '__rudder_test__'; + const version = 'v0'; + + const mockSourceService = new NativeIntegrationSourceService(); + const getNativeSourceServiceSpy = jest + .spyOn(ServiceSelector, 'getNativeSourceService') + .mockImplementation(() => { + return mockSourceService; + }); + + const adaptInputToVersionSpy = jest + .spyOn(ControllerUtility, 'adaptInputToVersion') + .mockImplementation((s, v, e) => { + expect(s).toEqual(sourceType); + expect(v).toEqual(version); + expect(e).toEqual(getData()); + throw new Error('test error'); + }); + + const response = await request(server) + .post('/v0/sources/__rudder_test__') + .set('Accept', 'application/json') + .send(getData()); + + const expectedResp = [ + { + error: 'test error', + statTags: { + errorCategory: 'transformation', + }, + statusCode: 500, + }, + ]; + + expect(response.status).toEqual(200); + expect(response.body).toEqual(expectedResp); + + expect(response.header['apiversion']).toEqual('2'); + + expect(getNativeSourceServiceSpy).toHaveBeenCalledTimes(1); + expect(adaptInputToVersionSpy).toHaveBeenCalledTimes(1); + }); + }); + + describe('V1 Source transform tests', () => { + test('successful source transform', async () => { + const sourceType = '__rudder_test__'; + const version = 'v1'; + const testOutput = [{ event: { a: 'b' }, source: { id: 'id' } }]; + + const mockSourceService = new NativeIntegrationSourceService(); + mockSourceService.sourceTransformRoutine = jest + .fn() + .mockImplementation((i, s, v, requestMetadata) => { + expect(i).toEqual(getData()); + expect(s).toEqual(sourceType); + expect(v).toEqual(version); + return testOutput; + }); + const getNativeSourceServiceSpy = jest + .spyOn(ServiceSelector, 'getNativeSourceService') + .mockImplementation(() => { + return mockSourceService; + }); + + const adaptInputToVersionSpy = jest + .spyOn(ControllerUtility, 'adaptInputToVersion') + .mockImplementation((s, v, e) => { + expect(s).toEqual(sourceType); + expect(v).toEqual(version); + expect(e).toEqual(getData()); + return { implementationVersion: version, input: e }; + }); + + const response = await request(server) + .post('/v1/sources/__rudder_test__') + .set('Accept', 'application/json') + .send(getData()); + + expect(response.status).toEqual(200); + expect(response.body).toEqual(testOutput); + + expect(response.header['apiversion']).toEqual('2'); + + expect(getNativeSourceServiceSpy).toHaveBeenCalledTimes(1); + expect(adaptInputToVersionSpy).toHaveBeenCalledTimes(1); + expect(mockSourceService.sourceTransformRoutine).toHaveBeenCalledTimes(1); + }); + + test('failing source transform', async () => { + const sourceType = '__rudder_test__'; + const version = 'v1'; + const mockSourceService = new NativeIntegrationSourceService(); + const getNativeSourceServiceSpy = jest + .spyOn(ServiceSelector, 'getNativeSourceService') + .mockImplementation(() => { + return mockSourceService; + }); + + const adaptInputToVersionSpy = jest + .spyOn(ControllerUtility, 'adaptInputToVersion') + .mockImplementation((s, v, e) => { + expect(s).toEqual(sourceType); + expect(v).toEqual(version); + expect(e).toEqual(getData()); + throw new Error('test error'); + }); + + const response = await request(server) + .post('/v1/sources/__rudder_test__') + .set('Accept', 'application/json') + .send(getData()); + + const expectedResp = [ + { + error: 'test error', + statTags: { + errorCategory: 'transformation', + }, + statusCode: 500, + }, + ]; + + expect(response.status).toEqual(200); + expect(response.body).toEqual(expectedResp); + + expect(response.header['apiversion']).toEqual('2'); + + expect(getNativeSourceServiceSpy).toHaveBeenCalledTimes(1); + expect(adaptInputToVersionSpy).toHaveBeenCalledTimes(1); + }); + }); +}); diff --git a/src/controllers/obs.delivery.js b/src/controllers/obs.delivery.js index 5aa3ca5862d..8e99650af64 100644 --- a/src/controllers/obs.delivery.js +++ b/src/controllers/obs.delivery.js @@ -1,7 +1,7 @@ /** * -------------------------------------- * -------------------------------------- - * ---------TO BE DEPRICIATED------------ + * ---------TO BE DEPRECATED------------- * -------------------------------------- * -------------------------------------- */ diff --git a/src/controllers/regulation.ts b/src/controllers/regulation.ts index a50541780d6..318b5ed4e78 100644 --- a/src/controllers/regulation.ts +++ b/src/controllers/regulation.ts @@ -34,7 +34,7 @@ export class RegulationController { rudderDestInfo, ); ctx.body = resplist; - ctx.status = resplist[0].statusCode; + ctx.status = resplist[0].statusCode; // TODO: check if this is the right way to set status } catch (error: CatchErr) { const metaTO = integrationService.getTags( userDeletionRequests[0].destType, @@ -46,8 +46,8 @@ export class RegulationController { const errResp = DestinationPostTransformationService.handleUserDeletionFailureEvents( error, metaTO, - ); - ctx.body = [{ error, statusCode: 500 }] as UserDeletionResponse[]; + ); // TODO: this is not used. Fix it. + ctx.body = [{ error, statusCode: 500 }] as UserDeletionResponse[]; // TODO: responses array length is always 1. Is that okay? ctx.status = 500; } stats.timing('dest_transform_request_latency', startTime, { diff --git a/src/features.json b/src/features.json index 8709dce4326..5460111a22e 100644 --- a/src/features.json +++ b/src/features.json @@ -65,7 +65,8 @@ "TIKTOK_AUDIENCE": true, "REDDIT": true, "THE_TRADE_DESK": true, - "INTERCOM": true + "INTERCOM": true, + "NINETAILED": true }, "regulations": [ "BRAZE", diff --git a/src/helpers/__tests__/fetchHandlers.test.ts b/src/helpers/__tests__/fetchHandlers.test.ts new file mode 100644 index 00000000000..2135317cafb --- /dev/null +++ b/src/helpers/__tests__/fetchHandlers.test.ts @@ -0,0 +1,36 @@ +import { FetchHandler } from '../fetchHandlers'; +import { MiscService } from '../../services/misc'; + +afterEach(() => { + jest.clearAllMocks(); +}); + +describe('FetchHandlers Service', () => { + test('should save the handlers in the respective maps', async () => { + const dest = 'dest'; + const source = 'source'; + const version = 'version'; + + MiscService.getDestHandler = jest.fn().mockImplementation((dest, version) => { + return {}; + }); + MiscService.getSourceHandler = jest.fn().mockImplementation((source, version) => { + return {}; + }); + MiscService.getDeletionHandler = jest.fn().mockImplementation((source, version) => { + return {}; + }); + + expect(FetchHandler['sourceHandlerMap'].get(dest)).toBeUndefined(); + FetchHandler.getSourceHandler(dest, version); + expect(FetchHandler['sourceHandlerMap'].get(dest)).toBeDefined(); + + expect(FetchHandler['destHandlerMap'].get(dest)).toBeUndefined(); + FetchHandler.getDestHandler(dest, version); + expect(FetchHandler['destHandlerMap'].get(dest)).toBeDefined(); + + expect(FetchHandler['deletionHandlerMap'].get(dest)).toBeUndefined(); + FetchHandler.getDeletionHandler(dest, version); + expect(FetchHandler['deletionHandlerMap'].get(dest)).toBeDefined(); + }); +}); diff --git a/src/helpers/__tests__/serviceSelector.test.ts b/src/helpers/__tests__/serviceSelector.test.ts new file mode 100644 index 00000000000..c48d6bbe8b7 --- /dev/null +++ b/src/helpers/__tests__/serviceSelector.test.ts @@ -0,0 +1,105 @@ +import { ServiceSelector } from '../serviceSelector'; +import { INTEGRATION_SERVICE } from '../../routes/utils/constants'; +import { ProcessorTransformationRequest } from '../../types/index'; +import { CDKV1DestinationService } from '../../services/destination/cdkV1Integration'; +import { CDKV2DestinationService } from '../../services/destination/cdkV2Integration'; +import { NativeIntegrationDestinationService } from '../../services/destination/nativeIntegration'; + +afterEach(() => { + jest.clearAllMocks(); +}); + +describe('ServiceSelector Service', () => { + test('should save the service in the cache', async () => { + expect(ServiceSelector['serviceMap'].get(INTEGRATION_SERVICE.NATIVE_DEST)).toBeUndefined(); + expect(ServiceSelector['serviceMap'].get(INTEGRATION_SERVICE.NATIVE_SOURCE)).toBeUndefined(); + + ServiceSelector.getNativeDestinationService(); + ServiceSelector.getNativeSourceService(); + + expect(ServiceSelector['serviceMap'].get(INTEGRATION_SERVICE.NATIVE_DEST)).toBeDefined(); + expect(ServiceSelector['serviceMap'].get(INTEGRATION_SERVICE.NATIVE_SOURCE)).toBeDefined(); + }); + + test('fetchCachedService should throw error for invalidService', async () => { + expect(() => ServiceSelector['fetchCachedService']('invalidService')).toThrow( + 'Invalid Service', + ); + }); + + test('isCdkDestination should return true', async () => { + const destinationDefinitionConfig = { + cdkEnabled: true, + }; + expect(ServiceSelector['isCdkDestination'](destinationDefinitionConfig)).toBe(true); + }); + + test('isCdkDestination should return false', async () => { + const destinationDefinitionConfig = { + cdkEnabledXYZ: true, + }; + expect(ServiceSelector['isCdkDestination'](destinationDefinitionConfig)).toBe(false); + }); + + test('isCdkV2Destination should return true', async () => { + const destinationDefinitionConfig = { + cdkV2Enabled: true, + }; + expect(ServiceSelector['isCdkV2Destination'](destinationDefinitionConfig)).toBe(true); + }); + + test('isCdkV2Destination should return false', async () => { + const destinationDefinitionConfig = { + cdkV2EnabledXYZ: true, + }; + expect(ServiceSelector['isCdkV2Destination'](destinationDefinitionConfig)).toBe(false); + }); + + test('getPrimaryDestinationService should return cdk v1 dest service', async () => { + const events = [ + { + destination: { + DestinationDefinition: { + Config: { + cdkEnabled: true, + }, + }, + }, + }, + ] as ProcessorTransformationRequest[]; + expect(ServiceSelector['getPrimaryDestinationService'](events)).toBeInstanceOf( + CDKV1DestinationService, + ); + }); + + test('getPrimaryDestinationService should return cdk v2 dest service', async () => { + const events = [ + { + destination: { + DestinationDefinition: { + Config: { + cdkV2Enabled: true, + }, + }, + }, + }, + ] as ProcessorTransformationRequest[]; + expect(ServiceSelector['getPrimaryDestinationService'](events)).toBeInstanceOf( + CDKV2DestinationService, + ); + }); + + test('getPrimaryDestinationService should return native dest service', async () => { + const events = [{}] as ProcessorTransformationRequest[]; + expect(ServiceSelector['getPrimaryDestinationService'](events)).toBeInstanceOf( + NativeIntegrationDestinationService, + ); + }); + + test('getDestinationService should return native dest service', async () => { + const events = [{}] as ProcessorTransformationRequest[]; + expect(ServiceSelector.getDestinationService(events)).toBeInstanceOf( + NativeIntegrationDestinationService, + ); + }); +}); diff --git a/src/helpers/serviceSelector.ts b/src/helpers/serviceSelector.ts index 89678e94074..faa1c58240c 100644 --- a/src/helpers/serviceSelector.ts +++ b/src/helpers/serviceSelector.ts @@ -79,7 +79,7 @@ export class ServiceSelector { // eslint-disable-next-line @typescript-eslint/no-unused-vars public static getSourceService(arg: unknown) { - // Implement source event based descision logic for selecting service + // Implement source event based decision logic for selecting service } public static getDestinationService( diff --git a/src/services/__tests__/misc.test.ts b/src/services/__tests__/misc.test.ts new file mode 100644 index 00000000000..5dcd948b34e --- /dev/null +++ b/src/services/__tests__/misc.test.ts @@ -0,0 +1,26 @@ +import { DestHandlerMap } from '../../constants/destinationCanonicalNames'; +import { MiscService } from '../misc'; + +describe('Misc tests', () => { + test('should return the right transform', async () => { + const version = 'v0'; + + Object.keys(DestHandlerMap).forEach((key) => { + expect(MiscService.getDestHandler(key, version)).toEqual( + require(`../../${version}/destinations/${DestHandlerMap[key]}/transform`), + ); + }); + + expect(MiscService.getDestHandler('am', version)).toEqual( + require(`../../${version}/destinations/am/transform`), + ); + + expect(MiscService.getSourceHandler('shopify', version)).toEqual( + require(`../../${version}/sources/shopify/transform`), + ); + + expect(MiscService.getDeletionHandler('intercom', version)).toEqual( + require(`../../${version}/destinations/intercom/deleteUsers`), + ); + }); +}); diff --git a/src/services/destination/__tests__/nativeIntegration.test.ts b/src/services/destination/__tests__/nativeIntegration.test.ts new file mode 100644 index 00000000000..59c8b418817 --- /dev/null +++ b/src/services/destination/__tests__/nativeIntegration.test.ts @@ -0,0 +1,100 @@ +import { NativeIntegrationDestinationService } from '../nativeIntegration'; +import { DestinationPostTransformationService } from '../postTransformation'; +import { + ProcessorTransformationRequest, + ProcessorTransformationOutput, + ProcessorTransformationResponse, +} from '../../../types/index'; +import { FetchHandler } from '../../../helpers/fetchHandlers'; + +afterEach(() => { + jest.clearAllMocks(); +}); + +describe('NativeIntegration Service', () => { + test('doProcessorTransformation - success', async () => { + const destType = '__rudder_test__'; + const version = 'v0'; + const requestMetadata = {}; + const event = { message: { a: 'b' } } as ProcessorTransformationRequest; + const events: ProcessorTransformationRequest[] = [event, event]; + + const tevent = { version: 'v0', endpoint: 'http://abc' } as ProcessorTransformationOutput; + const tresp = { output: tevent, statusCode: 200 } as ProcessorTransformationResponse; + const tresponse: ProcessorTransformationResponse[] = [tresp, tresp]; + + FetchHandler.getDestHandler = jest.fn().mockImplementation((d, v) => { + expect(d).toEqual(destType); + expect(v).toEqual(version); + return { + process: jest.fn(() => { + return tevent; + }), + }; + }); + + const postTransformSpy = jest + .spyOn(DestinationPostTransformationService, 'handleProcessorTransformSucessEvents') + .mockImplementation((e, p, d) => { + expect(e).toEqual(event); + expect(p).toEqual(tevent); + return [tresp]; + }); + + const service = new NativeIntegrationDestinationService(); + const resp = await service.doProcessorTransformation( + events, + destType, + version, + requestMetadata, + ); + + expect(resp).toEqual(tresponse); + + expect(postTransformSpy).toHaveBeenCalledTimes(2); + }); + + test('doProcessorTransformation - failure', async () => { + const destType = '__rudder_test__'; + const version = 'v0'; + const requestMetadata = {}; + const event = { message: { a: 'b' } } as ProcessorTransformationRequest; + const events: ProcessorTransformationRequest[] = [event, event]; + + FetchHandler.getDestHandler = jest.fn().mockImplementation((d, v) => { + expect(d).toEqual(destType); + expect(v).toEqual(version); + return { + process: jest.fn(() => { + throw new Error('test error'); + }), + }; + }); + + const service = new NativeIntegrationDestinationService(); + const resp = await service.doProcessorTransformation( + events, + destType, + version, + requestMetadata, + ); + + const expected = [ + { + metadata: undefined, + statusCode: 500, + error: 'test error', + statTags: { errorCategory: 'transformation' }, + }, + { + metadata: undefined, + statusCode: 500, + error: 'test error', + statTags: { errorCategory: 'transformation' }, + }, + ]; + + console.log('resp:', resp); + expect(resp).toEqual(expected); + }); +}); diff --git a/src/services/destination/__tests__/postTransformation.test.ts b/src/services/destination/__tests__/postTransformation.test.ts new file mode 100644 index 00000000000..f961dcbce7e --- /dev/null +++ b/src/services/destination/__tests__/postTransformation.test.ts @@ -0,0 +1,22 @@ +import { MetaTransferObject, ProcessorTransformationRequest } from '../../../types/index'; +import { DestinationPostTransformationService } from '../postTransformation'; +import { ProcessorTransformationResponse } from '../../../types'; + +describe('PostTransformation Service', () => { + test('should handleProcessorTransformFailureEvents', async () => { + const e = new Error('test error'); + const metaTo = { errorContext: 'error Context' } as MetaTransferObject; + const resp = DestinationPostTransformationService.handleProcessorTransformFailureEvents( + e, + metaTo, + ); + + const expected = { + statusCode: 500, + error: 'test error', + statTags: { errorCategory: 'transformation' }, + } as ProcessorTransformationResponse; + + expect(resp).toEqual(expected); + }); +}); diff --git a/src/services/destination/__tests__/preTransformation.test.ts b/src/services/destination/__tests__/preTransformation.test.ts new file mode 100644 index 00000000000..c10bab78acb --- /dev/null +++ b/src/services/destination/__tests__/preTransformation.test.ts @@ -0,0 +1,23 @@ +import { createMockContext } from '@shopify/jest-koa-mocks'; +import { ProcessorTransformationRequest } from '../../../types/index'; +import { DestinationPreTransformationService } from '../../destination/preTransformation'; + +describe('PreTransformation Service', () => { + test('should enhance events with query params', async () => { + const ctx = createMockContext(); + ctx.request.query = { cycle: 'true', x: 'y' }; + + const events: ProcessorTransformationRequest[] = [ + { message: { a: 'b' } } as ProcessorTransformationRequest, + ]; + const expected: ProcessorTransformationRequest[] = [ + { + message: { a: 'b' }, + request: { query: { cycle: 'true', x: 'y' } }, + } as ProcessorTransformationRequest, + ]; + + const resp = DestinationPreTransformationService.preProcess(events, ctx); + expect(resp).toEqual(expected); + }); +}); diff --git a/src/services/source/__tests__/nativeIntegration.test.ts b/src/services/source/__tests__/nativeIntegration.test.ts new file mode 100644 index 00000000000..bb40438811f --- /dev/null +++ b/src/services/source/__tests__/nativeIntegration.test.ts @@ -0,0 +1,89 @@ +import { NativeIntegrationSourceService } from '../nativeIntegration'; +import { SourcePostTransformationService } from '../postTransformation'; +import { SourceTransformationResponse, RudderMessage } from '../../../types/index'; +import stats from '../../../util/stats'; +import { FetchHandler } from '../../../helpers/fetchHandlers'; + +afterEach(() => { + jest.clearAllMocks(); +}); + +describe('NativeIntegration Source Service', () => { + test('sourceTransformRoutine - success', async () => { + const sourceType = '__rudder_test__'; + const version = 'v0'; + const requestMetadata = {}; + + const event = { message: { a: 'b' } }; + const events = [event, event]; + + const tevent = { anonymousId: 'test' } as RudderMessage; + const tresp = { output: { batch: [tevent] }, statusCode: 200 } as SourceTransformationResponse; + + const tresponse = [ + { output: { batch: [{ anonymousId: 'test' }] }, statusCode: 200 }, + { output: { batch: [{ anonymousId: 'test' }] }, statusCode: 200 }, + ]; + + FetchHandler.getSourceHandler = jest.fn().mockImplementationOnce((d, v) => { + expect(d).toEqual(sourceType); + expect(v).toEqual(version); + return { + process: jest.fn(() => { + return tevent; + }), + }; + }); + + const postTransformSpy = jest + .spyOn(SourcePostTransformationService, 'handleSuccessEventsSource') + .mockImplementation((e) => { + expect(e).toEqual(tevent); + return tresp; + }); + + const service = new NativeIntegrationSourceService(); + const resp = await service.sourceTransformRoutine(events, sourceType, version, requestMetadata); + + expect(resp).toEqual(tresponse); + + expect(postTransformSpy).toHaveBeenCalledTimes(2); + }); + + test('sourceTransformRoutine - failure', async () => { + const sourceType = '__rudder_test__'; + const version = 'v0'; + const requestMetadata = {}; + + const event = { message: { a: 'b' } }; + const events = [event, event]; + + const tresp = { error: 'error' } as SourceTransformationResponse; + + const tresponse = [{ error: 'error' }, { error: 'error' }]; + + FetchHandler.getSourceHandler = jest.fn().mockImplementationOnce((d, v) => { + expect(d).toEqual(sourceType); + expect(v).toEqual(version); + return { + process: jest.fn(() => { + throw new Error('test error'); + }), + }; + }); + + const postTransformSpy = jest + .spyOn(SourcePostTransformationService, 'handleFailureEventsSource') + .mockImplementation((e, m) => { + return tresp; + }); + jest.spyOn(stats, 'increment').mockImplementation(() => {}); + + const service = new NativeIntegrationSourceService(); + const resp = await service.sourceTransformRoutine(events, sourceType, version, requestMetadata); + + expect(resp).toEqual(tresponse); + + expect(postTransformSpy).toHaveBeenCalledTimes(2); + }); +}); diff --git a/src/services/source/__tests__/postTransformation.test.ts b/src/services/source/__tests__/postTransformation.test.ts new file mode 100644 index 00000000000..e5efbe8194f --- /dev/null +++ b/src/services/source/__tests__/postTransformation.test.ts @@ -0,0 +1,49 @@ +import { + MetaTransferObject, + RudderMessage, + SourceTransformationResponse, +} from '../../../types/index'; +import { SourcePostTransformationService } from '../../source/postTransformation'; + +describe('Source PostTransformation Service', () => { + test('should handleFailureEventsSource', async () => { + const e = new Error('test error'); + const metaTo = { errorContext: 'error Context' } as MetaTransferObject; + const resp = SourcePostTransformationService.handleFailureEventsSource(e, metaTo); + + const expected = { + statusCode: 500, + error: 'test error', + statTags: { errorCategory: 'transformation' }, + } as SourceTransformationResponse; + + expect(resp).toEqual(expected); + }); + + test('should return the event as SourceTransformationResponse if it has outputToSource property', () => { + const event = { + outputToSource: {}, + output: { batch: [{ anonymousId: 'test' }] }, + } as SourceTransformationResponse; + + const result = SourcePostTransformationService.handleSuccessEventsSource(event); + + expect(result).toEqual(event); + }); + + test('should return the events as batch in SourceTransformationResponse if it is an array', () => { + const events = [{ anonymousId: 'test' }, { anonymousId: 'test' }] as RudderMessage[]; + + const result = SourcePostTransformationService.handleSuccessEventsSource(events); + + expect(result).toEqual({ output: { batch: events } }); + }); + + test('should return the event as batch in SourceTransformationResponse if it is a single object', () => { + const event = { anonymousId: 'test' } as RudderMessage; + + const result = SourcePostTransformationService.handleSuccessEventsSource(event); + + expect(result).toEqual({ output: { batch: [event] } }); + }); +}); diff --git a/src/util/prometheus.js b/src/util/prometheus.js index eec480bbff6..0fa17dc9bda 100644 --- a/src/util/prometheus.js +++ b/src/util/prometheus.js @@ -497,7 +497,7 @@ class Prometheus { name: 'shopify_anon_id_resolve', help: 'shopify_anon_id_resolve', type: 'counter', - labelNames: ['method', 'writeKey', 'shopifyTopic'], + labelNames: ['method', 'writeKey', 'shopifyTopic', 'source'], }, { name: 'shopify_redis_calls', diff --git a/src/v0/destinations/adobe_analytics/transform.js b/src/v0/destinations/adobe_analytics/transform.js index b4281387249..5d3d6e7d00f 100644 --- a/src/v0/destinations/adobe_analytics/transform.js +++ b/src/v0/destinations/adobe_analytics/transform.js @@ -18,6 +18,7 @@ const { getIntegrationsObj, removeUndefinedAndNullValues, simpleProcessRouterDest, + validateEventAndLowerCaseConversion, } = require('../../util'); const { @@ -307,7 +308,7 @@ const processTrackEvent = (message, adobeEventName, destinationConfig, extras = destinationConfig; const { event: rawMessageEvent, properties } = message; const { overrideEventString, overrideProductString } = properties; - const event = rawMessageEvent.toLowerCase(); + const event = validateEventAndLowerCaseConversion(rawMessageEvent, true, true); const adobeEventArr = adobeEventName ? adobeEventName.split(',') : []; // adobeEventArr is an array of events which is defined as // ["eventName", "mapped Adobe Event=mapped merchproperty's value", "mapped Adobe Event=mapped merchproperty's value", . . .] diff --git a/src/v0/destinations/gainsight_px/util.js b/src/v0/destinations/gainsight_px/util.js index e03fbbf1489..83d23566dd8 100644 --- a/src/v0/destinations/gainsight_px/util.js +++ b/src/v0/destinations/gainsight_px/util.js @@ -6,13 +6,13 @@ const { getDynamicErrorType } = require('../../../adapters/utils/networkUtils'); const { JSON_MIME_TYPE } = require('../../util/constant'); const handleErrorResponse = (error, customErrMessage, expectedErrStatus, defaultStatus = 400) => { + let destResp; let errMessage = ''; let errorStatus = defaultStatus; if (error.response && error.response.data) { - errMessage = error.response.data.externalapierror - ? JSON.stringify(error.response.data.externalapierror) - : JSON.stringify(error.response.data); + destResp = error.response?.data?.externalapierror ?? error.response?.data; + errMessage = JSON.stringify(destResp); errorStatus = error.response.status; @@ -26,7 +26,7 @@ const handleErrorResponse = (error, customErrMessage, expectedErrStatus, default { [tags.TAG_NAMES.ERROR_TYPE]: getDynamicErrorType(errorStatus), }, - error, + destResp, ); }; diff --git a/src/v0/destinations/google_adwords_remarketing_lists/transform.js b/src/v0/destinations/google_adwords_remarketing_lists/transform.js index 9526973fb8f..9ab415346a2 100644 --- a/src/v0/destinations/google_adwords_remarketing_lists/transform.js +++ b/src/v0/destinations/google_adwords_remarketing_lists/transform.js @@ -218,7 +218,7 @@ const processEvent = async (metadata, message, destination) => { } Object.values(createdPayload).forEach((data) => { - const consentObj = populateConsentForGoogleDestinations(message.properties); + const consentObj = populateConsentForGoogleDestinations(destination.Config); response.push(responseBuilder(metadata, data, destination, message, consentObj)); }); return response; diff --git a/src/v0/destinations/hs/util.js b/src/v0/destinations/hs/util.js index 32ee923f5fc..359c93dc1a5 100644 --- a/src/v0/destinations/hs/util.js +++ b/src/v0/destinations/hs/util.js @@ -19,6 +19,7 @@ const { getHashFromArray, getDestinationExternalIDInfoForRetl, getValueFromMessage, + isNull, } = require('../../util'); const { CONTACT_PROPERTY_MAP_ENDPOINT, @@ -223,7 +224,9 @@ const getTransformedJSON = async (message, destination, propertyMap) => { // lowercase and replace ' ' & '.' with '_' const hsSupportedKey = formatKey(traitsKey); if (!rawPayload[traitsKey] && propertyMap[hsSupportedKey]) { - let propValue = traits[traitsKey]; + // HS accepts empty string to remove the property from contact + // https://community.hubspot.com/t5/APIs-Integrations/Clearing-values-of-custom-properties-in-Hubspot-contact-using/m-p/409156 + let propValue = isNull(traits[traitsKey]) ? '' : traits[traitsKey]; if (propertyMap[hsSupportedKey] === 'date') { propValue = getUTCMidnightTimeStampValue(propValue); } diff --git a/src/v0/destinations/klaviyo/data/KlaviyoIdentify.json b/src/v0/destinations/klaviyo/data/KlaviyoIdentify.json index e128f2666c8..b358919bc1e 100644 --- a/src/v0/destinations/klaviyo/data/KlaviyoIdentify.json +++ b/src/v0/destinations/klaviyo/data/KlaviyoIdentify.json @@ -57,7 +57,12 @@ "traits.address.region", "context.traits.region", "context.traits.address.region", - "properties.region" + "properties.region", + "traits.state", + "traits.address.state", + "context.traits.address.state", + "context.traits.state", + "properties.state" ], "required": false }, @@ -77,14 +82,19 @@ "sourceKeys": [ "traits.zip", "traits.postalcode", + "traits.postalCode", "traits.address.zip", "traits.address.postalcode", + "traits.address.postalCode", "context.traits.zip", "context.traits.postalcode", + "context.traits.postalCode", "context.traits.address.zip", "context.traits.address.postalcode", + "context.traits.address.postalCode", "properties.zip", - "properties.postalcode" + "properties.postalcode", + "properties.postalCode" ], "required": false }, @@ -97,5 +107,16 @@ "destKey": "location.timezone", "sourceKeys": ["traits.timezone", "context.traits.timezone", "properties.timezone"], "required": false + }, + { + "destKey": "location.address1", + "sourceKeys": [ + "traits.street", + "traits.address.street", + "context.traits.street", + "context.traits.address.street", + "properties.street" + ], + "required": false } ] diff --git a/src/v0/destinations/klaviyo/data/KlaviyoProfile.json b/src/v0/destinations/klaviyo/data/KlaviyoProfile.json index e2a8d86085b..329ecd978f5 100644 --- a/src/v0/destinations/klaviyo/data/KlaviyoProfile.json +++ b/src/v0/destinations/klaviyo/data/KlaviyoProfile.json @@ -41,7 +41,12 @@ "traits.address.region", "context.traits.region", "context.traits.address.region", - "properties.region" + "properties.region", + "traits.state", + "traits.address.state", + "context.traits.address.state", + "context.traits.state", + "properties.state" ], "required": false }, @@ -61,14 +66,19 @@ "sourceKeys": [ "traits.zip", "traits.postalcode", + "traits.postalCode", "traits.address.zip", "traits.address.postalcode", + "traits.address.postalCode", "context.traits.zip", "context.traits.postalcode", + "context.traits.postalCode", "context.traits.address.zip", "context.traits.address.postalcode", + "context.traits.address.postalCode", "properties.zip", - "properties.postalcode" + "properties.postalcode", + "properties.postalCode" ], "required": false }, @@ -81,5 +91,16 @@ "destKey": "$image", "sourceKeys": ["traits.image", "context.traits.image", "properties.image"], "required": false + }, + { + "destKey": "$address1", + "sourceKeys": [ + "traits.street", + "traits.address.street", + "context.traits.street", + "context.traits.address.street", + "properties.street" + ], + "required": false } ] diff --git a/src/v0/destinations/one_signal/transform.js b/src/v0/destinations/one_signal/transform.js index a072aef0e4f..b025660fa42 100644 --- a/src/v0/destinations/one_signal/transform.js +++ b/src/v0/destinations/one_signal/transform.js @@ -122,7 +122,7 @@ const trackResponseBuilder = (message, { Config }) => { if (!externalUserId) { throw new InstrumentationError('userId is required for track events/updating a device'); } - endpoint = `${endpoint}/${appId}/users/${externalUserId}`; + endpoint = `${endpoint}/${appId}/users/${encodeURIComponent(externalUserId)}`; const payload = {}; const tags = {}; /* Populating event as true in tags. @@ -163,7 +163,7 @@ const groupResponseBuilder = (message, { Config }) => { if (!externalUserId) { throw new InstrumentationError('userId is required for group events'); } - endpoint = `${endpoint}/${appId}/users/${externalUserId}`; + endpoint = `${endpoint}/${appId}/users/${encodeURIComponent(externalUserId)}`; const payload = {}; const tags = { groupId, diff --git a/src/v0/destinations/sfmc/config.js b/src/v0/destinations/sfmc/config.js index f856c44d6b3..1b1f5c323ba 100644 --- a/src/v0/destinations/sfmc/config.js +++ b/src/v0/destinations/sfmc/config.js @@ -4,6 +4,7 @@ const ENDPOINTS = { GET_TOKEN: `auth.marketingcloudapis.com/v2/token`, CONTACTS: `rest.marketingcloudapis.com/contacts/v1/contacts`, INSERT_CONTACTS: `rest.marketingcloudapis.com/hub/v1/dataevents/key:`, + EVENT: 'rest.marketingcloudapis.com/interaction/v1/events', }; const CONFIG_CATEGORIES = { diff --git a/src/v0/destinations/sfmc/transform.js b/src/v0/destinations/sfmc/transform.js index 553ceb28289..53925bc7ed5 100644 --- a/src/v0/destinations/sfmc/transform.js +++ b/src/v0/destinations/sfmc/transform.js @@ -1,3 +1,4 @@ +/* eslint-disable no-param-reassign */ /* eslint-disable no-nested-ternary */ const { NetworkError, @@ -188,6 +189,26 @@ const responseBuilderForInsertData = ( return response; }; +// DOC : https://developer.salesforce.com/docs/marketing/marketing-cloud/references/mc_rest_interaction/postEvent.html + +const responseBuilderForMessageEvent = (message, subDomain, authToken, hashMapEventDefinition) => { + const contactKey = message.properties.contactId; + delete message.properties.contactId; + const response = defaultRequestConfig(); + response.method = defaultPostRequestConfig.requestMethod; + response.endpoint = `https://${subDomain}.${ENDPOINTS.EVENT}`; + response.headers = { + 'Content-Type': JSON_MIME_TYPE, + Authorization: `Bearer ${authToken}`, + }; + response.body.JSON = { + ContactKey: contactKey, + EventDefinitionKey: hashMapEventDefinition[message.event.toLowerCase()], + Data: { ...message.properties }, + }; + return response; +}; + const responseBuilderSimple = async (message, category, destination) => { const { clientId, @@ -198,6 +219,7 @@ const responseBuilderSimple = async (message, category, destination) => { eventToExternalKey, eventToPrimaryKey, eventToUUID, + eventToDefinitionMapping, } = destination.Config; // map from an event name to an external key of a data extension. const hashMapExternalKey = getHashFromArray(eventToExternalKey, 'from', 'to'); @@ -207,6 +229,8 @@ const responseBuilderSimple = async (message, category, destination) => { const hashMapUUID = getHashFromArray(eventToUUID, 'event', 'uuid'); // token needed for authorization for subsequent calls const authToken = await getToken(clientId, clientSecret, subDomain); + // map from an event name to an event definition key. + const hashMapEventDefinition = getHashFromArray(eventToDefinitionMapping, 'from', 'to'); // if createOrUpdateContacts is true identify calls for create and update of contacts will not occur. if (category.type === 'identify' && !createOrUpdateContacts) { // first call to identify the contact @@ -240,10 +264,12 @@ const responseBuilderSimple = async (message, category, destination) => { if (typeof message.event !== 'string') { throw new ConfigurationError('Event name must be a string'); } + if (hashMapEventDefinition[message.event.toLowerCase()]) { + return responseBuilderForMessageEvent(message, subDomain, authToken, hashMapEventDefinition); + } if (!isDefinedAndNotNull(hashMapExternalKey[message.event.toLowerCase()])) { throw new ConfigurationError('Event not mapped for this track call'); } - return responseBuilderForInsertData( message, hashMapExternalKey[message.event.toLowerCase()], @@ -293,4 +319,9 @@ const processRouterDest = async (inputs, reqMetadata) => { return respList; }; -module.exports = { process, processRouterDest, responseBuilderSimple }; +module.exports = { + process, + processRouterDest, + responseBuilderSimple, + responseBuilderForMessageEvent, +}; diff --git a/src/v0/destinations/sfmc/transform.test.js b/src/v0/destinations/sfmc/transform.test.js index c49c49017c6..8d382ef6499 100644 --- a/src/v0/destinations/sfmc/transform.test.js +++ b/src/v0/destinations/sfmc/transform.test.js @@ -1,7 +1,7 @@ const { ConfigurationError } = require('@rudderstack/integrations-lib'); const axios = require('axios'); const MockAxiosAdapter = require('axios-mock-adapter'); -const { responseBuilderSimple } = require('./transform'); +const { responseBuilderSimple, responseBuilderForMessageEvent } = require('./transform'); beforeAll(() => { const mock = new MockAxiosAdapter(axios); mock @@ -122,4 +122,44 @@ describe('responseBuilderSimple', () => { expect(response).toHaveProperty('body.JSON'); expect(response).toHaveProperty('headers'); }); + + it('should build response object with correct details for message event', () => { + const message = { + userId: 'u123', + event: 'testEvent', + properties: { + contactId: '12345', + prop1: 'value1', + prop2: 'value2', + }, + }; + const subDomain = 'subdomain'; + const authToken = 'token'; + const hashMapEventDefinition = { + testevent: 'eventDefinitionKey', + }; + + const response = responseBuilderForMessageEvent( + message, + subDomain, + authToken, + hashMapEventDefinition, + ); + expect(response.method).toBe('POST'); + expect(response.endpoint).toBe( + 'https://subdomain.rest.marketingcloudapis.com/interaction/v1/events', + ); + expect(response.headers).toEqual({ + 'Content-Type': 'application/json', + Authorization: 'Bearer token', + }); + expect(response.body.JSON).toEqual({ + ContactKey: '12345', + EventDefinitionKey: 'eventDefinitionKey', + Data: { + prop1: 'value1', + prop2: 'value2', + }, + }); + }); }); diff --git a/src/v0/util/googleUtils/index.js b/src/v0/util/googleUtils/index.js index c8d872e90e9..de73b0fb05a 100644 --- a/src/v0/util/googleUtils/index.js +++ b/src/v0/util/googleUtils/index.js @@ -8,21 +8,27 @@ const GOOGLE_ALLOWED_CONSENT_STATUS = ['UNSPECIFIED', 'UNKNOWN', 'GRANTED', 'DEN * ref : https://developers.google.com/google-ads/api/rest/reference/rest/v15/Consent */ -const populateConsentForGoogleDestinations = (properties) => { +const populateConsentForGoogleDestinations = (config) => { const consent = {}; - if ( - properties?.userDataConsent && - GOOGLE_ALLOWED_CONSENT_STATUS.includes(properties.userDataConsent) - ) { - consent.adUserData = properties.userDataConsent; + if (config?.userDataConsent) { + if (GOOGLE_ALLOWED_CONSENT_STATUS.includes(config.userDataConsent)) { + consent.adUserData = config.userDataConsent; + } else { + consent.adUserData = 'UNKNOWN'; + } + } else { + consent.adUserData = 'UNSPECIFIED'; } - if ( - properties?.personalizationConsent && - GOOGLE_ALLOWED_CONSENT_STATUS.includes(properties.personalizationConsent) - ) { - consent.adPersonalization = properties.personalizationConsent; + if (config?.personalizationConsent) { + if (GOOGLE_ALLOWED_CONSENT_STATUS.includes(config.personalizationConsent)) { + consent.adPersonalization = config.personalizationConsent; + } else { + consent.adPersonalization = 'UNKNOWN'; + } + } else { + consent.adPersonalization = 'UNSPECIFIED'; } return consent; }; diff --git a/src/v0/util/googleUtils/index.test.js b/src/v0/util/googleUtils/index.test.js index 27eff2a7936..9d1aa5e51a9 100644 --- a/src/v0/util/googleUtils/index.test.js +++ b/src/v0/util/googleUtils/index.test.js @@ -1,50 +1,58 @@ const { populateConsentForGoogleDestinations } = require('./index'); describe('unit test for populateConsentForGoogleDestinations', () => { - // Returns an empty object when no properties are provided. - it('should return an empty object when no properties are provided', () => { + it('should return an UNSPECIFIED object when no properties are provided', () => { const result = populateConsentForGoogleDestinations({}); - expect(result).toEqual({}); + expect(result).toEqual({ + adPersonalization: 'UNSPECIFIED', + adUserData: 'UNSPECIFIED', + }); }); - // Sets adUserData property of consent object when userDataConsent property is provided and its value is one of the allowed consent statuses. it('should set adUserData property of consent object when userDataConsent property is provided and its value is one of the allowed consent statuses', () => { const properties = { userDataConsent: 'GRANTED' }; const result = populateConsentForGoogleDestinations(properties); - expect(result).toEqual({ adUserData: 'GRANTED' }); + expect(result).toEqual({ adUserData: 'GRANTED', adPersonalization: 'UNSPECIFIED' }); }); - // Sets adPersonalization property of consent object when personalizationConsent property is provided and its value is one of the allowed consent statuses. it('should set adPersonalization property of consent object when personalizationConsent property is provided and its value is one of the allowed consent statuses', () => { const properties = { personalizationConsent: 'DENIED' }; const result = populateConsentForGoogleDestinations(properties); - expect(result).toEqual({ adPersonalization: 'DENIED' }); + expect(result).toEqual({ adPersonalization: 'DENIED', adUserData: 'UNSPECIFIED' }); }); - // Returns an empty object when properties parameter is not provided. - it('should return an empty object when properties parameter is not provided', () => { + it('should return an UNSPECIFIED object when properties parameter is not provided', () => { const result = populateConsentForGoogleDestinations(); - expect(result).toEqual({}); + expect(result).toEqual({ + adPersonalization: 'UNSPECIFIED', + adUserData: 'UNSPECIFIED', + }); }); - // Returns an empty object when properties parameter is null. - it('should return an empty object when properties parameter is null', () => { + it('should return an UNSPECIFIED object when properties parameter is null', () => { const result = populateConsentForGoogleDestinations(null); - expect(result).toEqual({}); + expect(result).toEqual({ + adPersonalization: 'UNSPECIFIED', + adUserData: 'UNSPECIFIED', + }); }); - // Returns an empty object when properties parameter is an empty object. - it('should return an empty object when properties parameter is an empty object', () => { + it('should return an UNSPECIFIED object when properties parameter is an UNSPECIFIED object', () => { const result = populateConsentForGoogleDestinations({}); - expect(result).toEqual({}); + expect(result).toEqual({ + adPersonalization: 'UNSPECIFIED', + adUserData: 'UNSPECIFIED', + }); }); - // Returns an empty object when properties parameter is an empty object. - it('should return an empty object when properties parameter contains adUserData and adPersonalization with non-allowed values', () => { + it('should return UNKNOWN when properties parameter contains adUserData and adPersonalization with non-allowed values', () => { const result = populateConsentForGoogleDestinations({ - adUserData: 'RANDOM', + userDataConsent: 'RANDOM', personalizationConsent: 'RANDOM', }); - expect(result).toEqual({}); + expect(result).toEqual({ + adPersonalization: 'UNKNOWN', + adUserData: 'UNKNOWN', + }); }); }); diff --git a/src/v0/util/index.js b/src/v0/util/index.js index 1d952693f27..c1debce0888 100644 --- a/src/v0/util/index.js +++ b/src/v0/util/index.js @@ -52,6 +52,7 @@ const removeUndefinedAndNullAndEmptyValues = (obj) => lodash.pickBy(obj, isDefinedAndNotNullAndNotEmpty); const isBlank = (value) => lodash.isEmpty(lodash.toString(value)); const flattenMap = (collection) => lodash.flatMap(collection, (x) => x); +const isNull = (x) => lodash.isNull(x); // ======================================================================== // GENERIC UTLITY // ======================================================================== @@ -2200,6 +2201,25 @@ const combineBatchRequestsWithSameJobIds = (inputBatches) => { return combineBatches(combineBatches(inputBatches)); }; +/** + * This function validates the event and return it as string. + * @param {*} isMandatory The event is a required field. + * @param {*} convertToLowerCase The event should be converted to lower-case. + * @returns {string} Event name converted to string. + */ +const validateEventAndLowerCaseConversion = (event, isMandatory, convertToLowerCase) => { + if (!isDefined(event) || typeof event === 'object' || typeof event === 'boolean') { + throw new InstrumentationError('Event should not be a object, NaN, boolean or undefined'); + } + + // handling 0 as it is a valid value + if (isMandatory && !event && event !== 0) { + throw new InstrumentationError('Event is a required field'); + } + + return convertToLowerCase ? event.toString().toLowerCase() : event.toString(); +}; + // ======================================================================== // EXPORTS // ======================================================================== @@ -2266,6 +2286,7 @@ module.exports = { isDefinedAndNotNullAndNotEmpty, isEmpty, isNotEmpty, + isNull, isEmptyObject, isHttpStatusRetryable, isHttpStatusSuccess, @@ -2315,4 +2336,5 @@ module.exports = { findExistingBatch, removeDuplicateMetadata, combineBatchRequestsWithSameJobIds, + validateEventAndLowerCaseConversion, }; diff --git a/src/v0/util/index.test.js b/src/v0/util/index.test.js index 4dc62556911..810eb5a9d48 100644 --- a/src/v0/util/index.test.js +++ b/src/v0/util/index.test.js @@ -1,4 +1,4 @@ -const { TAG_NAMES } = require('@rudderstack/integrations-lib'); +const { TAG_NAMES, InstrumentationError } = require('@rudderstack/integrations-lib'); const utilities = require('.'); const { getFuncTestData } = require('../../../test/testHelper'); const { FilteredEventsError } = require('./errorTypes'); @@ -7,6 +7,7 @@ const { flattenJson, generateExclusionList, combineBatchRequestsWithSameJobIds, + validateEventAndLowerCaseConversion, } = require('./index'); // Names of the utility functions to test @@ -36,7 +37,6 @@ describe('Utility Functions Tests', () => { test.each(funcTestData)('$description', async ({ description, input, output }) => { try { let result; - // This is to allow sending multiple arguments to the function if (Array.isArray(input)) { result = utilities[funcName](...input); @@ -456,3 +456,53 @@ describe('Unit test cases for combineBatchRequestsWithSameJobIds', () => { expect(combineBatchRequestsWithSameJobIds(input)).toEqual(expectedOutput); }); }); + +describe('validateEventAndLowerCaseConversion Tests', () => { + it('should return string conversion of number types', () => { + const ev = 0; + expect(validateEventAndLowerCaseConversion(ev, false, true)).toBe('0'); + expect(validateEventAndLowerCaseConversion(ev, true, false)).toBe('0'); + }); + + it('should convert string types to lowercase', () => { + const ev = 'Abc'; + expect(validateEventAndLowerCaseConversion(ev, true, true)).toBe('abc'); + }); + + it('should throw error if event is object type', () => { + expect(() => { + validateEventAndLowerCaseConversion({}, true, true); + }).toThrow(InstrumentationError); + expect(() => { + validateEventAndLowerCaseConversion([1, 2], false, true); + }).toThrow(InstrumentationError); + expect(() => { + validateEventAndLowerCaseConversion({ a: 1 }, true, true); + }).toThrow(InstrumentationError); + }); + + it('should convert string to lowercase', () => { + expect(validateEventAndLowerCaseConversion('Abc', true, true)).toBe('abc'); + expect(validateEventAndLowerCaseConversion('ABC', true, false)).toBe('ABC'); + expect(validateEventAndLowerCaseConversion('abc55', false, true)).toBe('abc55'); + expect(validateEventAndLowerCaseConversion(123, false, true)).toBe('123'); + }); + + it('should throw error for null and undefined', () => { + expect(() => { + validateEventAndLowerCaseConversion(null, true, true); + }).toThrow(InstrumentationError); + expect(() => { + validateEventAndLowerCaseConversion(undefined, false, true); + }).toThrow(InstrumentationError); + }); + + it('should throw error for boolean values', () => { + expect(() => { + validateEventAndLowerCaseConversion(true, true, true); + }).toThrow(InstrumentationError); + expect(() => { + validateEventAndLowerCaseConversion(false, false, false); + }).toThrow(InstrumentationError); + }); +}); diff --git a/test/apitests/service.api.test.ts b/test/apitests/service.api.test.ts index cbc2abb3b25..266619b6ac2 100644 --- a/test/apitests/service.api.test.ts +++ b/test/apitests/service.api.test.ts @@ -6,6 +6,8 @@ import Koa from 'koa'; import bodyParser from 'koa-bodyparser'; import setValue from 'set-value'; import { applicationRoutes } from '../../src/routes'; +import { FetchHandler } from '../../src/helpers/fetchHandlers'; +import networkHandlerFactory from '../../src/adapters/networkHandlerFactory'; let server: any; const OLD_ENV = process.env; @@ -30,6 +32,10 @@ afterAll(async () => { await httpTerminator.terminate(); }); +afterEach(() => { + jest.clearAllMocks(); +}); + const getDataFromPath = (pathInput) => { const testDataFile = fs.readFileSync(path.resolve(__dirname, pathInput)); return JSON.parse(testDataFile.toString()); @@ -76,6 +82,332 @@ describe('features tests', () => { }); }); +describe('Api tests with a mock source/destination', () => { + test('(mock destination) Processor transformation scenario with single event', async () => { + const destType = '__rudder_test__'; + const version = 'v0'; + + const getInputData = () => { + return [ + { message: { a: 'b1' }, destination: {}, metadata: { jobId: 1 } }, + { message: { a: 'b2' }, destination: {}, metadata: { jobId: 2 } }, + ]; + }; + const tevent = { version: 'v0', endpoint: 'http://abc' }; + + const getDestHandlerSpy = jest + .spyOn(FetchHandler, 'getDestHandler') + .mockImplementationOnce((d, v) => { + expect(d).toEqual(destType); + expect(v).toEqual(version); + return { + process: jest.fn(() => { + return tevent; + }), + }; + }); + + const expected = [ + { + output: { version: 'v0', endpoint: 'http://abc', userId: '' }, + metadata: { jobId: 1 }, + statusCode: 200, + }, + { + output: { version: 'v0', endpoint: 'http://abc', userId: '' }, + metadata: { jobId: 2 }, + statusCode: 200, + }, + ]; + + const response = await request(server) + .post('/v0/destinations/__rudder_test__') + .set('Accept', 'application/json') + .send(getInputData()); + + expect(response.status).toEqual(200); + expect(JSON.parse(response.text)).toEqual(expected); + expect(getDestHandlerSpy).toHaveBeenCalledTimes(1); + }); + + test('(mock destination) Batching', async () => { + const destType = '__rudder_test__'; + const version = 'v0'; + + const getBatchInputData = () => { + return { + input: [ + { message: { a: 'b1' }, destination: {}, metadata: { jobId: 1 } }, + { message: { a: 'b2' }, destination: {}, metadata: { jobId: 2 } }, + ], + destType: destType, + }; + }; + const tevent = [ + { + batchedRequest: { version: 'v0', endpoint: 'http://abc' }, + metadata: [{ jobId: 1 }, { jobId: 2 }], + statusCode: 200, + }, + ]; + + const getDestHandlerSpy = jest + .spyOn(FetchHandler, 'getDestHandler') + .mockImplementationOnce((d, v) => { + expect(d).toEqual(destType); + expect(v).toEqual(version); + return { + batch: jest.fn(() => { + return tevent; + }), + }; + }); + + const response = await request(server) + .post('/batch') + .set('Accept', 'application/json') + .send(getBatchInputData()); + + expect(response.status).toEqual(200); + expect(JSON.parse(response.text)).toEqual(tevent); + expect(getDestHandlerSpy).toHaveBeenCalledTimes(1); + }); + + test('(mock destination) Router transformation', async () => { + const destType = '__rudder_test__'; + const version = 'v0'; + + const getRouterTransformInputData = () => { + return { + input: [ + { message: { a: 'b1' }, destination: {}, metadata: { jobId: 1 } }, + { message: { a: 'b2' }, destination: {}, metadata: { jobId: 2 } }, + ], + destType: destType, + }; + }; + const tevent = [ + { + batchedRequest: { version: 'v0', endpoint: 'http://abc' }, + metadata: [{ jobId: 1 }, { jobId: 2 }], + statusCode: 200, + }, + ]; + + const getDestHandlerSpy = jest + .spyOn(FetchHandler, 'getDestHandler') + .mockImplementationOnce((d, v) => { + expect(d).toEqual(destType); + expect(v).toEqual(version); + return { + processRouterDest: jest.fn(() => { + return tevent; + }), + }; + }); + + const response = await request(server) + .post('/routerTransform') + .set('Accept', 'application/json') + .send(getRouterTransformInputData()); + + expect(response.status).toEqual(200); + expect(JSON.parse(response.text)).toEqual({ output: tevent }); + expect(getDestHandlerSpy).toHaveBeenCalledTimes(1); + }); + + test('(mock destination) v0 proxy', async () => { + const destType = '__rudder_test__'; + const version = 'v0'; + + const getData = () => { + return { + body: { JSON: { a: 'b' } }, + metadata: { a1: 'b1' }, + destinationConfig: { a2: 'b2' }, + }; + }; + + const proxyResponse = { success: true, response: { response: 'response', code: 200 } }; + + const mockNetworkHandler = { + proxy: jest.fn((r, d) => { + expect(r).toEqual(getData()); + expect(d).toEqual(destType); + return proxyResponse; + }), + processAxiosResponse: jest.fn((r) => { + expect(r).toEqual(proxyResponse); + return { response: 'response', status: 200 }; + }), + responseHandler: jest.fn((o, d) => { + expect(o.destinationResponse).toEqual({ response: 'response', status: 200 }); + expect(o.rudderJobMetadata).toEqual({ a1: 'b1' }); + expect(o.destType).toEqual(destType); + return { status: 200, message: 'response', destinationResponse: 'response' }; + }), + }; + + const getNetworkHandlerSpy = jest + .spyOn(networkHandlerFactory, 'getNetworkHandler') + .mockImplementationOnce((d, v) => { + expect(d).toEqual(destType); + expect(v).toEqual(version); + return { + networkHandler: mockNetworkHandler, + handlerVersion: version, + }; + }); + + const response = await request(server) + .post('/v0/destinations/__rudder_test__/proxy') + .set('Accept', 'application/json') + .send(getData()); + + expect(response.status).toEqual(200); + expect(JSON.parse(response.text)).toEqual({ + output: { status: 200, message: 'response', destinationResponse: 'response' }, + }); + expect(getNetworkHandlerSpy).toHaveBeenCalledTimes(1); + }); + + test('(mock destination) v1 proxy', async () => { + const destType = '__rudder_test__'; + const version = 'v1'; + + const getData = () => { + return { + body: { JSON: { a: 'b' } }, + metadata: [{ a1: 'b1' }], + destinationConfig: { a2: 'b2' }, + }; + }; + + const proxyResponse = { success: true, response: { response: 'response', code: 200 } }; + const respHandlerResponse = { + status: 200, + message: 'response', + destinationResponse: 'response', + response: [{ statusCode: 200, metadata: { a1: 'b1' } }], + }; + + const mockNetworkHandler = { + proxy: jest.fn((r, d) => { + expect(r).toEqual(getData()); + expect(d).toEqual(destType); + return proxyResponse; + }), + processAxiosResponse: jest.fn((r) => { + expect(r).toEqual(proxyResponse); + return { response: 'response', status: 200 }; + }), + responseHandler: jest.fn((o, d) => { + expect(o.destinationResponse).toEqual({ response: 'response', status: 200 }); + expect(o.rudderJobMetadata).toEqual([{ a1: 'b1' }]); + expect(o.destType).toEqual(destType); + return respHandlerResponse; + }), + }; + + const getNetworkHandlerSpy = jest + .spyOn(networkHandlerFactory, 'getNetworkHandler') + .mockImplementationOnce((d, v) => { + expect(d).toEqual(destType); + expect(v).toEqual(version); + return { + networkHandler: mockNetworkHandler, + handlerVersion: version, + }; + }); + + const response = await request(server) + .post('/v1/destinations/__rudder_test__/proxy') + .set('Accept', 'application/json') + .send(getData()); + + expect(response.status).toEqual(200); + expect(JSON.parse(response.text)).toEqual({ + output: respHandlerResponse, + }); + expect(getNetworkHandlerSpy).toHaveBeenCalledTimes(1); + }); + + test('(mock source) v0 source transformation', async () => { + const sourceType = '__rudder_test__'; + const version = 'v0'; + + const getData = () => { + return [{ event: { a: 'b1' } }, { event: { a: 'b2' } }]; + }; + + const tevent = { event: 'clicked', type: 'track' }; + + const getSourceHandlerSpy = jest + .spyOn(FetchHandler, 'getSourceHandler') + .mockImplementationOnce((s, v) => { + expect(s).toEqual(sourceType); + return { + process: jest.fn(() => { + return tevent; + }), + }; + }); + + const response = await request(server) + .post('/v0/sources/__rudder_test__') + .set('Accept', 'application/json') + .send(getData()); + + const expected = [ + { output: { batch: [{ event: 'clicked', type: 'track' }] } }, + { output: { batch: [{ event: 'clicked', type: 'track' }] } }, + ]; + + expect(response.status).toEqual(200); + expect(JSON.parse(response.text)).toEqual(expected); + expect(getSourceHandlerSpy).toHaveBeenCalledTimes(1); + }); + + test('(mock source) v1 source transformation', async () => { + const sourceType = '__rudder_test__'; + const version = 'v1'; + + const getData = () => { + return [ + { event: { a: 'b1' }, source: { id: 'id' } }, + { event: { a: 'b2' }, source: { id: 'id' } }, + ]; + }; + + const tevent = { event: 'clicked', type: 'track' }; + + const getSourceHandlerSpy = jest + .spyOn(FetchHandler, 'getSourceHandler') + .mockImplementationOnce((s, v) => { + expect(s).toEqual(sourceType); + return { + process: jest.fn(() => { + return tevent; + }), + }; + }); + + const response = await request(server) + .post('/v1/sources/__rudder_test__') + .set('Accept', 'application/json') + .send(getData()); + + const expected = [ + { output: { batch: [{ event: 'clicked', type: 'track' }] } }, + { output: { batch: [{ event: 'clicked', type: 'track' }] } }, + ]; + + expect(response.status).toEqual(200); + expect(JSON.parse(response.text)).toEqual(expected); + expect(getSourceHandlerSpy).toHaveBeenCalledTimes(1); + }); +}); + describe('Destination api tests', () => { describe('Processor transform tests', () => { test('(webhook) success scenario with single event', async () => { @@ -183,6 +515,7 @@ describe('Destination api tests', () => { expect(response.status).toEqual(200); expect(JSON.parse(response.text)).toEqual(data.output); }); + test('(pinterest_tag) failure router transform(partial failure)', async () => { const data = getDataFromPath('./data_scenarios/destination/router/failure_test.json'); const response = await request(server) diff --git a/test/integrations/destinations/algolia/processor/data.ts b/test/integrations/destinations/algolia/processor/data.ts index 0cbdd8b31b2..7c37c9642ae 100644 --- a/test/integrations/destinations/algolia/processor/data.ts +++ b/test/integrations/destinations/algolia/processor/data.ts @@ -380,7 +380,7 @@ export const data = [ body: [ { error: - 'Either filters or objectIds is required.: Workflow: procWorkflow, Step: validateDestPayload, ChildStep: undefined, OriginalError: Either filters or objectIds is required.', + 'Either filters or objectIds is required and must be non empty.: Workflow: procWorkflow, Step: validateDestPayload, ChildStep: undefined, OriginalError: Either filters or objectIds is required and must be non empty.', statTags: { destType: 'ALGOLIA', errorCategory: 'dataValidation', @@ -1417,4 +1417,214 @@ export const data = [ }, }, }, + { + name: 'algolia', + description: 'Eventype must be one of click, conversion pr view', + feature: 'processor', + module: 'destination', + version: 'v0', + input: { + request: { + body: [ + { + message: { + channel: 'web', + context: { + app: { + build: '1.0.0', + name: 'RudderLabs JavaScript SDK', + namespace: 'com.rudderlabs.javascript', + version: '1.0.0', + }, + traits: { + email: 'testone@gmail.com', + firstName: 'test', + lastName: 'one', + }, + library: { + name: 'RudderLabs JavaScript SDK', + version: '1.0.0', + }, + userAgent: + 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.90 Safari/537.36', + locale: 'en-US', + ip: '0.0.0.0', + os: { + name: '', + version: '', + }, + screen: { + density: 2, + }, + page: { + path: '/destinations/ometria', + referrer: '', + search: '', + title: '', + url: 'https://docs.rudderstack.com/destinations/ometria', + category: 'destination', + initial_referrer: 'https://docs.rudderstack.com', + initial_referring_domain: 'docs.rudderstack.com', + }, + }, + type: 'track', + messageId: '84e26acc-56a5-4835-8233-591137fca468', + session_id: '3049dc4c-5a95-4ccd-a3e7-d74a7e411f22', + originalTimestamp: '2019-10-14T09:03:17.562Z', + anonymousId: '123456', + event: 'product clicked', + userId: 'testuserId1', + properties: { + index: 'products', + filters: ['field1:hello', 'val1:val2'], + }, + integrations: { + All: true, + }, + sentAt: '2019-10-14T09:03:22.563Z', + }, + destination: { + DestinationDefinition: { + Config: { + cdkV2Enabled: true, + excludeKeys: [], + includeKeys: [], + }, + }, + Config: { + apiKey: 'dummyApiKey', + applicationId: 'O2YARRI15I', + eventTypeSettings: [ + { + from: 'product clicked', + to: 'abcd', + }, + ], + }, + }, + metadata: { + destinationId: 'destId', + workspaceId: 'wspId', + }, + }, + ], + }, + }, + output: { + response: { + status: 200, + body: [ + { + error: + 'eventType can be either click, view or conversion: Workflow: procWorkflow, Step: preparePayload, ChildStep: undefined, OriginalError: eventType can be either click, view or conversion', + statTags: { + destType: 'ALGOLIA', + errorCategory: 'dataValidation', + errorType: 'instrumentation', + feature: 'processor', + implementation: 'cdkV2', + module: 'destination', + destinationId: 'destId', + workspaceId: 'wspId', + }, + statusCode: 400, + metadata: { + destinationId: 'destId', + workspaceId: 'wspId', + }, + }, + ], + }, + }, + }, + { + name: 'algolia', + description: 'filters or objectIds must be non empty', + feature: 'processor', + module: 'destination', + version: 'v0', + input: { + request: { + body: [ + { + message: { + type: 'track', + event: 'product clicked', + sentAt: '2024-02-25T17:55:36.882Z', + userId: '12345', + channel: 'web', + properties: { + index: 'products', + list_id: 'search_results_page', + queryId: '8e737', + products: [], + eventName: 'productListView', + list_name: 'Search Results Page', + objectIds: [], + positions: [], + userToken: 'e494', + additional_attributes: {}, + }, + receivedAt: '2024-02-25T17:55:38.089Z', + request_ip: '107.130.37.100', + anonymousId: '68e9f4b8-fd4d-4c56-8ca4-858de2fd1df8', + integrations: { + All: true, + }, + originalTimestamp: '2024-02-25T17:55:36.880Z', + }, + destination: { + DestinationDefinition: { + Config: { + cdkV2Enabled: true, + excludeKeys: [], + includeKeys: [], + }, + }, + Config: { + apiKey: 'dummyApiKey', + applicationId: 'O2YARRI15I', + eventTypeSettings: [ + { + from: 'product clicked', + to: 'cLick ', + }, + ], + }, + }, + metadata: { + destinationId: 'destId', + workspaceId: 'wspId', + }, + }, + ], + }, + }, + output: { + response: { + status: 200, + body: [ + { + error: + 'Either filters or objectIds is required and must be non empty.: Workflow: procWorkflow, Step: validateDestPayload, ChildStep: undefined, OriginalError: Either filters or objectIds is required and must be non empty.', + statTags: { + destType: 'ALGOLIA', + errorCategory: 'dataValidation', + errorType: 'instrumentation', + feature: 'processor', + implementation: 'cdkV2', + module: 'destination', + destinationId: 'destId', + workspaceId: 'wspId', + }, + statusCode: 400, + metadata: { + destinationId: 'destId', + workspaceId: 'wspId', + }, + }, + ], + }, + }, + }, ]; diff --git a/test/integrations/destinations/gainsight_px/network.ts b/test/integrations/destinations/gainsight_px/network.ts index 81a2da4bede..99f51d9d8ef 100644 --- a/test/integrations/destinations/gainsight_px/network.ts +++ b/test/integrations/destinations/gainsight_px/network.ts @@ -219,4 +219,33 @@ export const networkCallsData = [ status: 200, }, }, + // Axios Error + { + httpReq: { + url: 'https://api.aptrinsic.com/v1/users/myUId', + headers: { 'X-APTRINSIC-API-KEY': 'sample-api-key', 'Content-Type': 'application/json' }, + method: 'GET', + }, + httpRes: { + message: 'Request failed with status code 403', + name: 'AxiosError', + stack: + 'AxiosError: Request failed with status code 403\n at settle (/Users/saisankeerth/rudderstack/rudder-transformer/node_modules/axios/lib/core/settle.js:19:12)\n at IncomingMessage.handleStreamEnd (/Users/saisankeerth/rudderstack/rudder-transformer/node_modules/axios/lib/adapters/http.js:589:11)\n at IncomingMessage.emit (node:events:529:35)\n at IncomingMessage.emit (node:domain:489:12)\n at endReadableNT (node:internal/streams/readable:1400:12)\n at processTicksAndRejections (node:internal/process/task_queues:82:21)', + config: { + headers: { + Accept: 'application/json, text/plain, */*', + 'Content-Type': 'application/json', + 'X-APTRINSIC-API-KEY': 'sample-api-key', + 'User-Agent': 'axios/1.6.5', + 'Accept-Encoding': 'gzip, compress, deflate, br', + }, + method: 'get', + dummy: 'upgrade required', // keyword + url: 'https://api.aptrinsic.com/v1/users/myUId', + }, + code: 'FORBIDDEN', + status: 403, + data: '\u003c!doctype html\u003e\u003cmeta charset="utf-8"\u003e\u003cmeta name=viewport content="width=device-width, initial-scale=1"\u003e\u003ctitle\u003e403\u003c/title\u003e403 Forbidden', + }, + }, ]; diff --git a/test/integrations/destinations/gainsight_px/router/data.ts b/test/integrations/destinations/gainsight_px/router/data.ts index 7dc131127da..1b3d5be8756 100644 --- a/test/integrations/destinations/gainsight_px/router/data.ts +++ b/test/integrations/destinations/gainsight_px/router/data.ts @@ -1,3 +1,75 @@ +const metadata = { + userId: '9a7820d0-0ff2-4451-b655-682cec15cbd2', + jobId: 1, + sourceId: '1s9eG8UCer6YSKsD8ZlQCyLa3pj', + destinationId: 'desId2', + attemptNum: 0, + receivedAt: '2021-06-25T14:29:52.911+05:30', + createdAt: '2021-06-25T08:59:56.329Z', + firstAttemptedAt: '', + transformAt: 'router', +}; +const destination2 = { + ID: 'desId2', + Name: 'gainsight-px-dest', + DestinationDefinition: { + ID: 'destDef1', + Name: 'GAINSIGHT_PX', + DisplayName: 'Gainsight PX', + Config: { + destConfig: { + defaultConfig: [ + 'apiKey', + 'productTagKey', + 'userAttributeMap', + 'accountAttributeMap', + 'globalContextMap', + ], + }, + excludeKeys: [], + includeKeys: [], + saveDestinationResponse: true, + secretKeys: ['apiKey', 'productTagKey'], + supportedSourceTypes: [ + 'android', + 'ios', + 'web', + 'unity', + 'amp', + 'cloud', + 'reactnative', + 'flutter', + ], + transformAt: 'router', + transformAtV1: 'router', + }, + ResponseRules: {}, + }, + Config: { + accountAttributeMap: [ + { from: 'LAST_INVOICE_DATE', to: 'last_invoice_date' }, + { from: 'LAST_INVOICE_PLAN', to: 'last_invoice_plan' }, + { from: 'LANGUAGE', to: 'language' }, + { from: 'REGION', to: 'region2' }, + { from: 'LAST_INVOICE_CURRENCY', to: 'last_invoice_currency' }, + { from: 'IBR_PLAN', to: 'ibr_plan' }, + { from: 'WH_COUNTRY', to: 'wh_country' }, + { from: 'inboxready_signup_date', to: 'inboxready_signup_date' }, + { from: 'gpt_setup', to: 'gpt_setup' }, + ], + oneTrustCookieCategories: [], + apiKey: 'sample-api-key', + eventDelivery: false, + eventDeliveryTS: 1624472902670, + globalContextMap: [{ from: 'kubrickTest', to: 'value' }], + productTagKey: 'AP-SAMPLE-2', + userAttributeMap: [{ from: 'hobbyCustomField', to: 'hobby' }], + }, + Enabled: true, + Transformations: [], + IsProcessorEnabled: true, +}; + export const data = [ { name: 'gainsight_px', @@ -463,4 +535,85 @@ export const data = [ }, }, }, + { + name: 'gainsight_px', + description: 'Test 1: Group call -- AxiosError thrown', + feature: 'router', + module: 'destination', + version: 'v0', + input: { + request: { + body: { + input: [ + { + message: { + type: 'group', + sentAt: '2024-02-16T06:00:54.075Z', + traits: { + name: ',sleep(100)', + REGION: 'MEA', + USERID: 'myUId', + groupId: 'myGId', + IBR_PLAN: 'free_ir', + LANGUAGE: 'EN', + gpt_setup: false, + ACCOUNT_ID: 'myGId', + WH_COUNTRY: 'MA', + LAST_INVOICE_DATE: 1706810675000, + LAST_INVOICE_PLAN: 'foundation_trial', + LAST_INVOICE_CURRENCY: 'USD', + inboxready_signup_date: 1680254544705, + }, + userId: 'myUId', + channel: 'sources', + context: { + sources: { + job_run_id: 'cn7fjonu4d9b3u706u2g', + task_run_id: 'cn7fjonu4d9b3u706u3g', + }, + }, + recordId: '111111', + rudderId: 'dummy-rudder-id', + timestamp: '2024-02-16T06:00:52.581Z', + receivedAt: '2024-02-16T06:00:52.582Z', + request_ip: '10.7.150.126', + anonymousId: 'myUId', + integrations: { limitAPIForGroup: true }, + originalTimestamp: '2024-02-16T06:00:54.075Z', + }, + metadata, + destination: destination2, + }, + ], + destType: 'gainsight_px', + }, + }, + }, + output: { + response: { + status: 200, + body: { + output: [ + { + error: + '{"message":"error while fetching user: \\"