diff --git a/performance_tests/Makefile b/performance_tests/Makefile new file mode 100644 index 000000000..2d2a8bfd1 --- /dev/null +++ b/performance_tests/Makefile @@ -0,0 +1,18 @@ + +.PHONY: backend_perf_test +backend_perf_test: ## run backend performance tests +backend_perf_test: ## Variables and setup +backend_perf_test: SERVER_HOST=http://127.0.0.1:3000 +backend_perf_test: LOAD=test_run +backend_perf_test: REQUESTS_PER_SECOND=1 ## If setting above 50, inform platform services first +backend_perf_test: TOKEN_URL=https://dev.loginproxy.gov.bc.ca/auth/realms/standard/protocol/openid-connect/token +backend_perf_test: + @k6 -e SERVER_HOST=$(SERVER_HOST) -e LOAD=$(LOAD) -e RPS=${REQUESTS_PER_SECOND} -e TOKEN_URL=${TOKEN_URL} run ./backend_script.js --out csv=k6_results/backend_test_results.csv + +.PHONY: frontend_perf_test +frontend_perf_test: ## run frontend performance tests +frontend_perf_test: APP_HOST=http://127.0.0.1:3001 +frontend_perf_test: LOAD=test_run +frontend_perf_test: REQUESTS_PER_SECOND=1 ## If setting above 50, inform platform services first +frontend_perf_test: + @k6 -e APP_HOST=$(APP_HOST) -e LOAD=$(LOAD) -e RPS=${REQUESTS_PER_SECOND} run ./frontend_script.js --out csv=k6_results/test_results_frontend.csv diff --git a/performance_tests/README.md b/performance_tests/README.md new file mode 100644 index 000000000..1bc50f0a2 --- /dev/null +++ b/performance_tests/README.md @@ -0,0 +1,64 @@ +# Performance Testing + +## Tools + +Performance testing of this app was done using k6 by Grafana (https://k6.io/). Frontend performance +tests made use of the k6 browser testing tools (https://grafana.com/docs/k6/latest/using-k6-browser/). + +## Running the Tests + +Backend and frontend have thier own test scripts, and their own make commands to run. +For the most accurate results, it is generally best to run one scenario at a time. Note that if running multiple test +scenarios at once, such as hitting several endpoints continuously, each test will have its own set of the number of +users specified in the stages, so change the numbers as needed. + +## Setup + +In order to configure the tests, the following settings are available: + +- Stages: set in `common/params.js`, these are the scenarios of user counts and times that the tests will simulate. +- LOAD: set in the `Makefile`, this dictates which stages the tests will use. The value must match one of the keys + from the `STAGES` object in `common/params.js` e.g. `smoke`, `spike` etc. +- HOST: set in the `Makefile`, this points to the server being tested. Front- and back-end each have a value. +- REQUESTS_PER_SECOND: set i the `Makefile`, this dictates the rate that the requests will be made at. + ** Important: if setting above 50rps, alert platform services first ** +- Credentials and tokens: set in `common/auth.js` these are the credentials used to run the tests. User + credentials will need to be added for the browser test. A valid auth token & refresh token are needed for the + protocol level tets. +- Tests: determining which tests will run in a given suite is done by simply commenting out tests that you want skipped + in the respective `frontend_script.js` or `backend_script.js` file. Comment out the tests entry in both + `options.scenario` and the corresponding if statement in the default function. + +### Token and Credentials + +Valid user credentials with the appropriate roles are required for the browser tests, along with that users officer ID +from the officer table. For the protocol level tests a token and refresh token that are valid at the time of running +the test will be needed, and the tests will refresh the values as they run. The logic for this can be found in +`backend_script.js` and `frontend_script.js`. The initial values for the token and refresh token can be found be loggin +into the environment that is being used for the load test and copying the values out of the response of the network +call titled `token` from browser tools network tab. If you leave an instance open in your browser these network +requests will continue to be made with the latest token, which can save time between tests. Once the rest of the setup +is done and you are ready to run the tests, copy the latest values into `common/auth.js` and don't forget to save. + +### Running from Local + +1. Install k6 - https://k6.io/docs/get-started/installation/ + +2. If results files already exist in `/performance/k6_results` rename or move them, or else they will be + overwritten. + +3. Configure the tests (see "Setup" above) to be pointed at the correct servers with the desired load and scenarios. + The load is set for the front- and backend separately in the Makefile, be sure to set the correct one. + +4. Ensure that the servers being used for testing are running and ready with the correct test data, and setup any + cluster / resource monitoring needed during the testing. It is a good idea to watch resource usage on the machine + running the tests as well just to ensure that its own resource constraints are not affecting results. + +5. From the `/performance` directory, run `make bakend_perf_test` or `make frontend_perf_test` depending on + which suite you are running. To run the frontend browser test with a regular browser, instead run + `K6_BROWSER_HEADLESS=false make frontend_perf_test` however this is very resource intesive to do with more than one + virtual user so adjust the number of vus. If running one of the more intensive tests such as `spike` or + `stress`, it is recommended that a `test_run` is done first to ensure that everything is working as intended. + +6. When the tests finish, k6 will present you with a summary of the test in the terminal that ran the tests, it is + recommended to copy the summary into a text file. The detailed results can be found in `/performance/k6_results/`. diff --git a/performance_tests/backend_script.js b/performance_tests/backend_script.js new file mode 100644 index 000000000..bdec676b0 --- /dev/null +++ b/performance_tests/backend_script.js @@ -0,0 +1,111 @@ +import http from "k6/http"; +import exec from "k6/execution"; +import { STAGES } from "./common/params.js"; +import { + searchWithDefaultFilters, + searchWithoutFilters, + openSearchWithoutFilters, + searchWithCMFilter, +} from "./tests/backend/search.js"; +import { + mapSearchDefaultFilters, + mapSearchAllOpenComplaints, + mapSearchAllComplaints, + mapSearchWithCMFilter, +} from "./tests/backend/mapSearch.js"; +import { getComplaintDetails, addAndRemoveComplaintOutcome } from "./tests/backend/complaint_details.js"; +import { INITIAL_TOKEN, INITIAL_REFRESH_TOKEN, generateRequestConfig } from "./common/auth.js"; + +const defaultOptions = { + executor: "ramping-vus", + stages: STAGES[`${__ENV.LOAD}`], +}; + +export const options = { + scenarios: { + // Search + // searchWithDefaultFilters: defaultOptions, + // searchWithoutFilters: defaultOptions, + // openSearchWithoutFilters: defaultOptions, + // searchWithCMFilter: defaultOptions, + + // Map Search + // mapSearchDefaultFilters: defaultOptions, + // mapSearchAllOpenComplaints: defaultOptions, + // mapSearchAllComplaints: defaultOptions, + // mapSearchWithCMFilter: defaultOptions, + + // Complaint Details + getComplaintDetails: defaultOptions, + addAndRemoveComplaintOutcome: defaultOptions, + }, + thresholds: { + http_req_duration: ["p(99)<2000"], // ms that 99% of requests must be completed within + }, + // rps: 50, // Do not increase to over 50 without informing Platform Services +}; + +/** + * In order to keep the token active, refresh it often enough to avoid any false 401, but not so often that it will + * interfere with the testing. The RPS * refresh time is a rough approximation so tokenRefreshSeconds slightly + * conservatively, not the exact token expiry period. + * The token and config vars are set outside of the function to take advantage of some scope to allow the refresh to + * happen conditionally rather than on every iteration. + */ + +const TOKEN_REFRESH_TIME = 60; +let token = INITIAL_TOKEN; +let refreshToken = INITIAL_REFRESH_TOKEN; +let requestConfig = generateRequestConfig(token); + +export default function () { + const HOST = __ENV.SERVER_HOST; + // Refresh the token if necessary based on iteration number, refresh time and rate of requests + if (__ITER === 0 || __ITER % (__ENV.RPS * TOKEN_REFRESH_TIME) === 0) { + const refreshRes = http.post( + "https://dev.loginproxy.gov.bc.ca/auth/realms/standard/protocol/openid-connect/token", + { + grant_type: "refresh_token", + refresh_token: refreshToken, + client_id: "compliance-and-enforcement-digital-services-web-4794", + }, + ); + + token = JSON.parse(refreshRes.body).access_token; + refreshToken = JSON.parse(refreshRes.body).refresh_token; + requestConfig = generateRequestConfig(token); + } + // search + if (exec.scenario.name === "searchWithDefaultFilters") { + searchWithDefaultFilters(HOST, requestConfig); + } + if (exec.scenario.name === "searchWithoutFilters") { + searchWithoutFilters(HOST, requestConfig); + } + if (exec.scenario.name === "openSearchWithoutFilters") { + openSearchWithoutFilters(HOST, requestConfig); + } + if (exec.scenario.name === "searchWithCMFilter") { + searchWithCMFilter(HOST, requestConfig); + } + // map search + if (exec.scenario.name === "mapSearchDefaultFilters") { + mapSearchDefaultFilters(HOST, requestConfig); + } + if (exec.scenario.name === "mapSearchAllOpenComplaints") { + mapSearchAllOpenComplaints(HOST, requestConfig); + } + if (exec.scenario.name === "mapSearchAllComplaints") { + mapSearchAllComplaints(HOST, requestConfig); + } + if (exec.scenario.name === "mapSearchWithCMFilter") { + mapSearchWithCMFilter(HOST, requestConfig); + } + // complaint details + if (exec.scenario.name === "getComplaintDetails") { + getComplaintDetails(HOST, requestConfig); + } + if (exec.scenario.name === "addAndRemoveComplaintOutcome") { + addAndRemoveComplaintOutcome(HOST, requestConfig); + } +} diff --git a/performance_tests/common/auth.js b/performance_tests/common/auth.js new file mode 100644 index 000000000..0531fd16d --- /dev/null +++ b/performance_tests/common/auth.js @@ -0,0 +1,17 @@ +export const INITIAL_TOKEN = ""; +export const INITIAL_REFRESH_TOKEN = ""; + +export const COS_USER_CREDS = { + username: "", + password: "", + officerGuid: "", +}; + +export const generateRequestConfig = (token) => { + return { + headers: { + "Content-Type": "application/json", + Authorization: `Bearer ${token}`, + }, + }; +}; diff --git a/performance_tests/common/params.js b/performance_tests/common/params.js new file mode 100644 index 000000000..99f8a0169 --- /dev/null +++ b/performance_tests/common/params.js @@ -0,0 +1,79 @@ +/** + * IMPORTANT + * These numbers are the values used PER TEST. That means that if you run tests hitting 5 endpoints, each + * with its own test, you will get 5 times the number of users specified by the stages of total traffic. + * These values were entered assuming 1 test is run at a time. Adjust them accordingly. + */ +const TEST_RUN_USERS = 1; +const MIN_USERS = 20; +const MAX_USERS = 200; +const AVERAGE_USERS = 75; +const STRESS_LOAD_USERS = 150; + +export const STAGES = { + // Test run stages to make sure all scenarios are working + test_run: [ + { duration: "5s", target: TEST_RUN_USERS }, + { duration: "10s", target: TEST_RUN_USERS }, + { duration: "5s", target: 0 }, + ], + + // Smoke tests for minimum expected load + smoke: [ + { duration: "1m", target: MIN_USERS }, // ramp-up of traffic to the smoke users + { duration: "2m", target: MIN_USERS }, // stay at minimum users for 10 minutes + { duration: "1m", target: 0 }, // ramp-down to 0 users + ], + + // Load tests at average load + load: [ + { duration: "1m", target: AVERAGE_USERS }, // ramp up to average user base + { duration: "10m", target: AVERAGE_USERS }, // maintain average user base + { duration: "1m", target: 0 }, // ramp down + ], + + // Load tests for average load with a spike to stress load + load_with_spike: [ + { duration: "1m", target: MIN_USERS }, // ramp up to minimum users + { duration: "2m", target: MIN_USERS }, // maintain minimum users + { duration: "1m", target: AVERAGE_USERS }, // ramp up to average user base + { duration: "10m", target: AVERAGE_USERS }, // maintain average user base + // Small spike + { duration: "1m", target: STRESS_LOAD_USERS }, // scale up to stress load + { duration: "2m", target: STRESS_LOAD_USERS }, // briefly maintain stress load + { duration: "1m", target: AVERAGE_USERS }, // scale back to average users + { duration: "2m", target: AVERAGE_USERS }, // briefly maintain average users + { duration: "1m", target: 0 }, // maintain average users + ], + + // Stress tests for heavy load + stress: [ + { duration: "2m", target: MIN_USERS }, // ramp up to minimum users + { duration: "1m", target: MIN_USERS }, // maintain minimum users + { duration: "3m", target: AVERAGE_USERS }, // maintain average users + { duration: "5m", target: AVERAGE_USERS }, // maintain average users + { duration: "5m", target: STRESS_LOAD_USERS }, // ramp up to stress load + { duration: "30m", target: STRESS_LOAD_USERS }, // maintain stress load + { duration: "10m", target: 0 }, // gradually drop to 0 users + ], + + // Spike tests for maximum expected load (e.g. entire expected user base) + // Initial scenario this is intended to simulate is the COS wide training session + // when a significant portion of the user base will likely all log on at the same time. + spike: [ + { duration: "2m", target: MAX_USERS }, // simulate fast ramp up of users to max users + { duration: "20m", target: MAX_USERS }, // stay at max users + { duration: "2m", target: 0 }, // ramp-down to 0 users + ], + + // Soak tests for extended varrying standard load + soak: [ + { duration: "2m", target: MIN_USERS }, // ramp up to minimum users + { duration: "5m", target: MIN_USERS }, // maintain minimum users + { duration: "5m", target: AVERAGE_USERS }, // ramp up to average user base + { duration: "480m", target: AVERAGE_USERS }, // maintain average user base + { duration: "5m", target: MIN_USERS }, // slow down to minimum users + { duration: "20m", target: MIN_USERS }, // maintain low numbers + { duration: "10m", target: 0 }, // gradually drop to 0 users + ], +}; diff --git a/performance_tests/frontend_script.js b/performance_tests/frontend_script.js new file mode 100644 index 000000000..4c013f56a --- /dev/null +++ b/performance_tests/frontend_script.js @@ -0,0 +1,69 @@ +import exec from "k6/execution"; +import http from "k6/http"; +import { browserTest } from "./tests/frontend/browser.js"; +import { protocolTest } from "./tests/frontend/protocol.js"; +import { INITIAL_TOKEN, INITIAL_REFRESH_TOKEN, generateRequestConfig } from "./common/auth.js"; + +// Use activeBrowserOptions for the browser test if you aren't running it headless +const activeBrowserOptions = { + executor: "per-vu-iterations", + vus: 1, + options: { + browser: { + type: "chromium", + }, + }, +}; + +const defaultOptions = { + executor: "ramping-vus", + stages: STAGES[`${__ENV.LOAD}`], +}; + +/** + * To run with an open browser, prepend the make command with: + * K6_BROWSER_HEADLESS=false + * It is suggested to use the activeBrowserOptions when doing so as browsers will eat up a significant amount of local + * resources which may affect the results of the tests in a way that is not representative of the servers performance. + */ +export const options = { + scenarios: { + browserTest: activeBrowserOptions, + protocolTest: defaultOptions, + }, + thresholds: { + http_req_duration: ["p(99)<2000"], // ms that 99% of requests must be completed within + }, + // rps: 50, // Do not increase to over 50 without informing Platform Services +}; + +const TOKEN_REFRESH_TIME = 60; +let token = INITIAL_TOKEN; +let refreshToken = INITIAL_REFRESH_TOKEN; +let requestConfig = generateRequestConfig(token); + +export default function () { + const HOST = __ENV.APP_HOST; + // Refresh the token if necessary based on iteration number, refresh time and rate of requests + if (__ITER === 0 || __ITER % (__ENV.RPS * TOKEN_REFRESH_TIME) === 0) { + const refreshRes = http.post( + "https://dev.loginproxy.gov.bc.ca/auth/realms/standard/protocol/openid-connect/token", + { + grant_type: "refresh_token", + refresh_token: refreshToken, + client_id: "compliance-and-enforcement-digital-services-web-4794", + }, + ); + + token = JSON.parse(refreshRes.body).access_token; + refreshToken = JSON.parse(refreshRes.body).refresh_token; + requestConfig = generateRequestConfig(token); + } + if (exec.scenario.name === "browserTest") { + browserTest(HOST); + } + + if (exec.scenario.name === "protocolTest") { + protocolTest(HOST, requestConfig); + } +} diff --git a/performance_tests/tests/backend/complaint_details.js b/performance_tests/tests/backend/complaint_details.js new file mode 100644 index 000000000..f9dac399e --- /dev/null +++ b/performance_tests/tests/backend/complaint_details.js @@ -0,0 +1,63 @@ +import http from "k6/http"; +import { check } from "k6"; +import { COS_USER_CREDS } from "../../common/auth.js"; + +const VALID_HWCR_COMPLAINT_ID = "23-031120"; + +// Search for open HWCR complaints in the South Peace region +export const getComplaintDetails = async (host, requestConfig) => { + check( + await http.get(host + `/api/v1/complaint/by-complaint-identifier/HWCR/${VALID_HWCR_COMPLAINT_ID}`, requestConfig), + { + "getComplaintDetails response status 200": (r) => r.status === 200, + "getComplaintDetails response contains correct response": (r) => + JSON.parse(r.body).id === VALID_HWCR_COMPLAINT_ID, + }, + ); +}; + +export const addAndRemoveComplaintOutcome = async (host, requestConfig) => { + const { username, officerGuid } = COS_USER_CREDS; + // Add an outcome to a complaint to hit case management + // then delete it so that the process can be repeated. + const outcomeBody = { + leadIdentifier: VALID_HWCR_COMPLAINT_ID, + agencyCode: "COS", + caseCode: "HWCR", + createUserId: username, + wildlife: { + species: "FOX", + sex: "M", + age: "ADLT", + categoryLevel: "", + identifyingFeatures: "", + outcome: "TRANSLCTD", + tags: [], + drugs: [], + actions: [{ action: "RECOUTCOME", actor: officerGuid, date: "2024-12-18T08:00:00.000Z" }], + }, + }; + check(await http.post(host + `/api/v1/case/wildlife`, JSON.stringify(outcomeBody), requestConfig), { + "addAndRemoveComplaintOutcome create outcome response status 201": (r) => r.status === 201, + }); + // Fetch the updated complaint to get the outcome ID + const getComplaintRes = await http.get(host + `/api/v1/case/${VALID_HWCR_COMPLAINT_ID}`, requestConfig); + const updatedComplaint = JSON.parse(getComplaintRes.body); + const outcomeId = updatedComplaint.subject?.id; + + // Delete the outcome + check( + await http.del( + host + + `/api/v1/case/wildlife?caseIdentifier=${updatedComplaint.caseIdentifier}&actor=${officerGuid}&updateUserId=${username}&outcomeId=${outcomeId}`, + undefined, + requestConfig, + ), + { + // "addAndRemoveComplaintOutcome delete outcome response status 200": (r) => r.status === 200, + "addAndRemoveComplaintOutcome delete outcome response status 200": (r) => { + return r.status === 200; + }, + }, + ); +}; diff --git a/performance_tests/tests/backend/mapSearch.js b/performance_tests/tests/backend/mapSearch.js new file mode 100644 index 000000000..a14fec820 --- /dev/null +++ b/performance_tests/tests/backend/mapSearch.js @@ -0,0 +1,71 @@ +import http from "k6/http"; +import { check } from "k6"; + +// Search for open HWCR complaints in the South Peace region +export const mapSearchDefaultFilters = async (host, requestConfig) => { + check( + await http.get( + host + + "/api/v1/complaint/search/HWCR?sortBy=incident_reported_utc_timestmp&orderBy=DESC&zone=SPCE&status=OPEN&page=1&pageSize=50", + requestConfig, + ), + { + "mapSearchDefaultFilters response status 200": (r) => r.status === 200, + "mapSearchDefaultFilters response has entries": (r) => JSON.parse(r.body).totalCount > 0, + }, + ); +}; + +export const mapSearchAllOpenComplaints = async (host, requestConfig) => { + check( + await http.get( + host + "/api/v1/complaint/search/HWCR?sortBy=incident_reported_utc_timestmp&orderBy=DESC&page=1&pageSize=50", + requestConfig, + ), + { + "mapSearchAllOpenComplaints response status 200": (r) => r.status === 200, + "mapSearchAllOpenComplaints response has entries": (r) => JSON.parse(r.body).totalCount > 0, + }, + ); +}; + +export const mapSearchAllComplaints = async (host, requestConfig) => { + check( + await http.get(host + "/api/v1/complaint/map/search/clustered/HWCR?zoom=0&query=&clusters=true", requestConfig), + { + "mapSearchAllComplaints response status 200": (r) => r.status === 200, + "mapSearchAllComplaints response has entries": (r) => JSON.parse(r.body).mappedCount > 0, + }, + ); + check( + await http.get(host + "/api/v1/complaint/map/search/clustered/HWCR?zoom=0&query=&unmapped=true", requestConfig), + { + "mapSearchAllComplaints unmapped response status 200": (r) => r.status === 200, + "mapSearchAllComplaints unmapped response has entries": (r) => JSON.parse(r.body).unmappedCount > 0, + }, + ); +}; +export const mapSearchWithCMFilter = async (host, requestConfig) => { + check( + await http.get( + host + + "/api/v1/complaint/map/search/clustered/HWCR?zoom=0&outcomeAnimalStartDate=2023-06-01T07:00:00.000Z&query=&clusters=true", + requestConfig, + ), + { + "mapSearchWithCMFilter response status 200": (r) => r.status === 200, + "mapSearchWithCMFilter response has entries": (r) => JSON.parse(r.body).mappedCount >= 0, + }, + ); + check( + await http.get( + host + + "/api/v1/complaint/map/search/clustered/HWCR?zoom=0&outcomeAnimalStartDate=2023-06-01T07:00:00.000Z&query=&unmapped=true", + requestConfig, + ), + { + "mapSearchWithCMFilter unmapped response status 200": (r) => r.status === 200, + "mapSearchWithCMFilter unmapped response has entries": (r) => JSON.parse(r.body).unmappedCount >= 0, + }, + ); +}; diff --git a/performance_tests/tests/backend/search.js b/performance_tests/tests/backend/search.js new file mode 100644 index 000000000..6a5989987 --- /dev/null +++ b/performance_tests/tests/backend/search.js @@ -0,0 +1,62 @@ +import http from "k6/http"; +import { check } from "k6"; + +// Search with default parameters (open HWCR complaints in the South Peace region) +export const searchWithDefaultFilters = async (host, requestConfig) => { + check( + await http.get( + host + + "/api/v1/complaint/search/HWCR?sortBy=incident_reported_utc_timestmp&orderBy=DESC&zone=SPCE&status=OPEN&page=1&pageSize=50", + requestConfig, + ), + { + "Search with default parameters response status 200": (r) => r.status === 200, + "Search with default parameters response has entries": (r) => JSON.parse(r.body).totalCount > 0, + }, + ); +}; + +// Searches for all complaints +export const searchWithoutFilters = async (host, requestConfig) => { + check( + await http.get( + host + "/api/v1/complaint/search/HWCR?sortBy=incident_reported_utc_timestmp&orderBy=DESC&page=1&pageSize=50", + requestConfig, + ), + { + "Search for all HWCR complaints response status 200": (r) => r.status === 200, + "Search for all HWCR complaints response has entries": (r) => JSON.parse(r.body).totalCount > 0, + }, + ); +}; + +// Searches for all open complaints +export const openSearchWithoutFilters = async (host, requestConfig) => { + check( + await http.get( + host + + "/api/v1/complaint/search/HWCR?sortBy=incident_reported_utc_timestmp&orderBy=DESC&status=OPEN&page=1&pageSize=50", + requestConfig, + ), + { + "Search for all open HWCR complaints response status 200": (r) => r.status === 200, + "Search for all open HWCR complaints response has entries": (r) => JSON.parse(r.body).totalCount > 0, + }, + ); +}; + +// This search is for all complaints with an outcome date after June 1 2023. +// This filter will hit the Case Management database. +export const searchWithCMFilter = async (host, requestConfig) => { + check( + await http.get( + host + + "/api/v1/complaint/search/HWCR?sortBy=incident_reported_utc_timestmp&orderBy=DESC&outcomeAnimalStartDate=2024-11-11T08:00:00.000Z&page=1&pageSize=50", + requestConfig, + ), + { + "Search with case management filters response status 200": (r) => r.status === 200, + "Search with case management filters response has entries": (r) => JSON.parse(r.body).totalCount > 0, + }, + ); +}; diff --git a/performance_tests/tests/frontend/browser.js b/performance_tests/tests/frontend/browser.js new file mode 100644 index 000000000..e7b2cee55 --- /dev/null +++ b/performance_tests/tests/frontend/browser.js @@ -0,0 +1,66 @@ +import { browser } from "k6/browser"; +import { check } from "https://jslib.k6.io/k6-utils/1.5.0/index.js"; +import { COS_USER_CREDS } from "../../common/auth.js"; +import { sleep } from "k6"; + +// This is the amount of time used to give the page time to render and slow the behaviour +// down to a more human speed. If running several headless browsers at once increasing this may +// ease a bit of the stress on the system executing the tests. +const IDLE_TIME = 3; + +export async function browserTest(host) { + /** + * Scenario: + * This test logs into NatCOM using the credentials provided via COS_USER_CREDS in performance/common/auth.js + * Once logged in, the test navigates to the ERS tab then back to the HWCR tab to guarantee filters have been reset. + * Next the default filters are removed, resulting in the query for all HWCR complaints. + * Finally, it switches to the map view and checks that there is a cluster of results on the map. + */ + const page = await browser.newPage(); + // Visit the page and login + // The double waitForNavigation calls are intentional to handle the redirects that occur when + // logging in. + await page.goto(host); + await page.waitForNavigation(); + await page.waitForNavigation(); + + await page.locator('input[name="user"]').type(COS_USER_CREDS.username); + await page.locator('input[name="password"]').type(COS_USER_CREDS.password); + + await Promise.all([page.waitForNavigation(), page.locator('input[type="submit"]').click()]); + await page.waitForNavigation(); + await page.waitForNavigation(); + + await check(page.locator("h1"), { + header: async (h1) => (await h1.textContent()) == "Complaints", + }); + sleep(IDLE_TIME); + // Navigate to the ERS tab to ensure filters reset + await page.locator("#ers-tab").click({ force: true }); + await check(page.locator("#ers-tab"), { + // Only the active tab has the number in brackets in the title + "ERS tab active": async (a) => (await a.textContent()).includes("("), + }); + sleep(IDLE_TIME); + // Return to the HWCR tab + // Navigate to the ERS tab to ensure filters reset + await page.locator("#hwcr-tab").click({ force: true }); + await check(page.locator("#hwcr-tab"), { + "HWCR tab active": async (a) => (await a.textContent()).includes("("), + }); + sleep(IDLE_TIME); + // Clear the filters + await page.locator("#comp-status-filter").click({ force: true }); + sleep(IDLE_TIME); + await page.locator("#comp-zone-filter").click({ force: true }); + sleep(IDLE_TIME); + // Go to the map view + await page.locator('//label[@for="map_toggle_id"]').click({ force: true }); + sleep(IDLE_TIME); + + // Verify that there are clusters on the map. + check(page.locator('//*[@id="multi-point-map"]'), { + "Marker cluster exists": async (cluster) => await cluster.isVisible(), + }); + page.close(); +} diff --git a/performance_tests/tests/frontend/protocol.js b/performance_tests/tests/frontend/protocol.js new file mode 100644 index 000000000..032daa914 --- /dev/null +++ b/performance_tests/tests/frontend/protocol.js @@ -0,0 +1,26 @@ +import http from "k6/http"; +import { check } from "k6"; + +export function protocolTest(host, requestConfig) { + const res = http.get(host + "/static/js/bundle.js", { + headers: { + ...requestConfig.headers, + Accept: + "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7", + "Accept-Encoding": "gzip, deflate, br, zstd", + "Accept-Language": "en-US,en;q=0.9", + "Cache-Control": "no-cache", + Connection: "keep-alive", + Host: "localhost:3001", + Pragma: "no-cache", + "Sec-Fetch-Dest": "document", + "Sec-Fetch-Mode": "navigate", + "Sec-Fetch-Site": "same-origin", + "Sec-Fetch-User": 1, + "Upgrade-Insecure-Requests": 1, + }, + }); + check(res, { + "Fetch of bundle.js return status is 200": (res) => res.status === 200, + }); +}