diff --git a/Makefile b/Makefile index 1c603c9cc..8cc2ba40c 100644 --- a/Makefile +++ b/Makefile @@ -56,6 +56,14 @@ clean: ## Closes and cleans (removes) all project containers @echo "===============================================" @docker-compose -f docker-compose.yml down -v --rmi all --remove-orphans +prune: ## Deletes ALL docker artifacts (even those not associated to this project) + @echo -n "Delete ALL docker artifacts? [y/n] " && read ans && [ $${ans:-n} = y ] + @echo "===============================================" + @echo "Make: prune - deleting all docker artifacts" + @echo "===============================================" + @docker system prune --all --volumes -f + @docker volume prune --all -f + ## ------------------------------------------------------------------------------ ## Build/Run Postgres DB Commands ## - Builds all of the BioHub postgres db projects (db, db_setup) diff --git a/api/package-lock.json b/api/package-lock.json index bc6de85dc..7e0745025 100644 --- a/api/package-lock.json +++ b/api/package-lock.json @@ -538,36 +538,6 @@ "strip-ansi": "^7.0.1" } }, - "string-width-cjs": { - "version": "npm:string-width@4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "requires": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, - "emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" - }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "requires": { - "ansi-regex": "^5.0.1" - } - } - } - }, "strip-ansi": { "version": "7.1.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", @@ -576,21 +546,6 @@ "ansi-regex": "^6.0.1" } }, - "strip-ansi-cjs": { - "version": "npm:strip-ansi@6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "requires": { - "ansi-regex": "^5.0.1" - }, - "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - } - } - }, "wrap-ansi": { "version": "8.1.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", @@ -600,54 +555,6 @@ "string-width": "^5.0.1", "strip-ansi": "^7.0.1" } - }, - "wrap-ansi-cjs": { - "version": "npm:wrap-ansi@7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "requires": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, - "ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "requires": { - "color-convert": "^2.0.1" - } - }, - "emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" - }, - "string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "requires": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - } - }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "requires": { - "ansi-regex": "^5.0.1" - } - } - } } } }, @@ -8807,6 +8714,16 @@ "strip-ansi": "^6.0.1" } }, + "string-width-cjs": { + "version": "npm:string-width@4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + } + }, "string.prototype.padend": { "version": "3.1.4", "resolved": "https://registry.npmjs.org/string.prototype.padend/-/string.prototype.padend-3.1.4.tgz", @@ -8864,6 +8781,14 @@ "ansi-regex": "^5.0.1" } }, + "strip-ansi-cjs": { + "version": "npm:strip-ansi@6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "requires": { + "ansi-regex": "^5.0.1" + } + }, "strip-bom": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-2.0.0.tgz", @@ -9901,6 +9826,16 @@ "strip-ansi": "^6.0.0" } }, + "wrap-ansi-cjs": { + "version": "npm:wrap-ansi@7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + } + }, "wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", diff --git a/database/.eslintrc b/database/.eslintrc index 2a457fb10..8e93220f7 100644 --- a/database/.eslintrc +++ b/database/.eslintrc @@ -16,6 +16,15 @@ "@typescript-eslint/no-explicit-any": "off", "@typescript-eslint/explicit-module-boundary-types": "off", "@typescript-eslint/ban-types": ["error", { "types": { "object": false, "extendDefaults": true } }], + "@typescript-eslint/ban-ts-comment": [ + "error", + { + "ts-expect-error": false, + "ts-ignore": false, + "ts-nocheck": false, + "ts-check": false + } + ], "no-var": "error" } } diff --git a/database/.pipeline/lib/db.setup.deploy.js b/database/.pipeline/lib/db.setup.deploy.js index e59288377..ba97b6181 100644 --- a/database/.pipeline/lib/db.setup.deploy.js +++ b/database/.pipeline/lib/db.setup.deploy.js @@ -69,7 +69,6 @@ const dbSetupDeploy = async (settings) => { NODE_ENV: phases[phase].env || 'dev', DB_SERVICE_NAME: dbName, DB_SCHEMA: 'biohub', - DB_SCHEMA_DAPI_V1: 'biohub_dapi_v1', IMAGE: dbSetupImageStream.image.dockerImageReference } }) diff --git a/database/.pipeline/templates/db.setup.dc.yaml b/database/.pipeline/templates/db.setup.dc.yaml index f0dba10ca..2cfdb1b87 100644 --- a/database/.pipeline/templates/db.setup.dc.yaml +++ b/database/.pipeline/templates/db.setup.dc.yaml @@ -24,9 +24,6 @@ parameters: - name: DB_SCHEMA description: 'Database schema' required: true - - name: DB_SCHEMA_DAPI_V1 - description: 'Database api v1 schema' - required: true - name: NODE_ENV description: Application Environment type variable required: true @@ -96,8 +93,6 @@ objects: value: ${VERSION} - name: DB_SCHEMA value: ${DB_SCHEMA} - - name: DB_SCHEMA_DAPI_V1 - value: ${DB_SCHEMA_DAPI_V1} imagePullPolicy: Always restartPolicy: Never activeDeadlineSeconds: 900 diff --git a/database/package-lock.json b/database/package-lock.json index e88feb905..2148757cf 100644 --- a/database/package-lock.json +++ b/database/package-lock.json @@ -89,6 +89,12 @@ } } }, + "@faker-js/faker": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/@faker-js/faker/-/faker-8.2.0.tgz", + "integrity": "sha512-VacmzZqVxdWdf9y64lDOMZNDMM/FQdtM9IsaOPKOm2suYwEatb8VkdHqOzXcDnZbk7YDE2BmsJmy/2Hmkn563g==", + "dev": true + }, "@humanwhocodes/config-array": { "version": "0.5.0", "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.5.0.tgz", @@ -284,6 +290,16 @@ "eslint-visitor-keys": "^2.0.0" } }, + "JSONStream": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/JSONStream/-/JSONStream-1.3.5.tgz", + "integrity": "sha512-E+iruNOY8VV9s4JEbe1aNEm6MiszPRr/UfcHMz0TQh1BXSxHK+ASV1R6W4HpjBhSeS+54PIsAMCBmwD06LLsqQ==", + "dev": true, + "requires": { + "jsonparse": "^1.2.0", + "through": ">=2.2.7 <3" + } + }, "acorn": { "version": "7.4.1", "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", @@ -506,6 +522,12 @@ "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", "dev": true }, + "core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", + "dev": true + }, "create-require": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", @@ -960,6 +982,16 @@ "is-callable": "^1.1.3" } }, + "from2": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/from2/-/from2-2.3.0.tgz", + "integrity": "sha512-OMcX/4IC/uqEPVgGeyfN22LJk6AZrMkRZHxcHBMBvHScDGgwTm2GT2Wkgtocyd3JfZffjj2kYUDXXII0Fk9W0g==", + "dev": true, + "requires": { + "inherits": "^2.0.1", + "readable-stream": "^2.0.0" + } + }, "fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", @@ -995,6 +1027,26 @@ "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==", "dev": true }, + "geojson-random": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/geojson-random/-/geojson-random-0.5.0.tgz", + "integrity": "sha512-a4j6KJCC/ZhufwiXMxuoXrJiOxwwBQ0Y0DEcGmytnSJC11AkWHSKSFOw/ovh6QFp8n9XIDXhQXAvArRCvi2Y4A==", + "dev": true, + "requires": { + "from2": "^2.1.0", + "geojson-stream": "0.1.0" + } + }, + "geojson-stream": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/geojson-stream/-/geojson-stream-0.1.0.tgz", + "integrity": "sha512-svSg5fFXPaTiqzEBGXScA+nISaeC9rLvku2PH+wM5LToATUw2bLIrvls43ymnT9Xnp51nBPVyK9m4Af40KpJ7w==", + "dev": true, + "requires": { + "JSONStream": "^1.0.0", + "through": "^2.3.4" + } + }, "get-intrinsic": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.1.tgz", @@ -1381,6 +1433,12 @@ "call-bind": "^1.0.2" } }, + "isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", + "dev": true + }, "isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", @@ -1421,6 +1479,12 @@ "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=", "dev": true }, + "jsonparse": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/jsonparse/-/jsonparse-1.3.1.tgz", + "integrity": "sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg==", + "dev": true + }, "knex": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/knex/-/knex-2.4.2.tgz", @@ -1858,6 +1922,12 @@ "integrity": "sha512-R8o23sf5iVL/U71h9SFUdhdOEPsi3nm42FD/oDYIZ2PQa4TNWWuWecxln6jlIQzpZTDMUeO1NicJP6lLn2TtRw==", "dev": true }, + "process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "dev": true + }, "progress": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", @@ -1898,6 +1968,21 @@ } } }, + "readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "dev": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, "rechoir": { "version": "0.8.0", "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.8.0.tgz", @@ -1968,6 +2053,12 @@ "queue-microtask": "^1.2.2" } }, + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, "safe-regex-test": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.0.tgz", @@ -2157,6 +2248,15 @@ "es-abstract": "^1.20.4" } }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "requires": { + "safe-buffer": "~5.1.0" + } + }, "strip-ansi": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", @@ -2236,6 +2336,12 @@ "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=", "dev": true }, + "through": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", + "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==", + "dev": true + }, "tildify": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/tildify/-/tildify-2.0.0.tgz", @@ -2345,6 +2451,12 @@ "punycode": "^2.1.0" } }, + "util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true + }, "v8-compile-cache": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz", diff --git a/database/package.json b/database/package.json index 9da4b4d40..c6a98ccd2 100644 --- a/database/package.json +++ b/database/package.json @@ -27,6 +27,7 @@ "typescript": "~4.1.6" }, "devDependencies": { + "@faker-js/faker": "^8.2.0", "@types/node": "~14.14.31", "@types/pg": "~7.14.4", "@typescript-eslint/eslint-plugin": "~4.33.0", @@ -34,6 +35,7 @@ "eslint": "~7.32.0", "eslint-config-prettier": "~6.15.0", "eslint-plugin-prettier": "~3.3.1", + "geojson-random": "^0.5.0", "npm-run-all": "~4.1.5", "prettier": "~2.3.2", "prettier-plugin-organize-imports": "~2.3.4", diff --git a/database/src/migrations/20220225205948_release.ts b/database/src/migrations/20220225205948_release.ts index 51e7f401f..35e527581 100644 --- a/database/src/migrations/20220225205948_release.ts +++ b/database/src/migrations/20220225205948_release.ts @@ -63,35 +63,21 @@ export async function up(knex: Knex): Promise { path.join(__dirname, DB_RELEASE, 'populate_persecution_or_harm.sql') ); - const vw_generated_dapi_views = fs.readFileSync(path.join(__dirname, DB_RELEASE, 'vw_generated_dapi_views.sql')); - await knex.raw(` -- set up spatial extensions ${create_spatial_extensions} -- set up biohub schema create schema if not exists biohub; + + -- setup postgres user GRANT ALL ON SCHEMA biohub TO postgres; - set search_path = biohub, public; - - -- setup biohub api schema - create schema if not exists biohub_dapi_v1; + set search_path = biohub, public, topology; -- setup api user create user ${DB_USER_API} password '${DB_USER_API_PASS}'; - alter schema biohub_dapi_v1 owner to ${DB_USER_API}; - - -- Grant rights on biohub_dapi_v1 to biohub_api user - grant all on schema biohub_dapi_v1 to ${DB_USER_API}; - grant all on schema biohub_dapi_v1 to postgres; - alter DEFAULT PRIVILEGES in SCHEMA biohub_dapi_v1 grant ALL on tables to ${DB_USER_API}; - alter DEFAULT PRIVILEGES in SCHEMA biohub_dapi_v1 grant ALL on tables to postgres; - - -- biohub grants - GRANT USAGE ON SCHEMA biohub TO ${DB_USER_API}; - ALTER DEFAULT PRIVILEGES IN SCHEMA biohub GRANT ALL ON TABLES TO ${DB_USER_API}; - - alter role ${DB_USER_API} set search_path to biohub_dapi_v1, biohub, public, topology; + GRANT ALL ON SCHEMA biohub TO ${DB_USER_API}; + alter role ${DB_USER_API} set search_path to biohub, public, topology; ${biohub_ddl} ${populate_user_identity_source} @@ -118,21 +104,14 @@ export async function up(knex: Knex): Promise { ${populate_persecution_or_harm_type} ${populate_persecution_or_harm} - -- create the views - set search_path = biohub_dapi_v1; - set role biohub_api; - ${vw_generated_dapi_views} - set role postgres; - set search_path = biohub; - grant execute on function biohub.api_set_context(_system_user_identifier system_user.user_identifier%type, _user_identity_source_name user_identity_source.name%type) to ${DB_USER_API}; + set search_path = biohub, public; `); } export async function down(knex: Knex): Promise { await knex.raw(` DROP SCHEMA IF EXISTS biohub CASCADE; - DROP SCHEMA IF EXISTS biohub_dapi_v1 CASCADE; DROP USER IF EXISTS ${DB_USER_API}; `); } diff --git a/database/src/migrations/20220602152015_add_record_sims_eml_transformation.ts b/database/src/migrations/20220602152015_add_record_sims_eml_transformation.ts index a6701b570..7205960bd 100644 --- a/database/src/migrations/20220602152015_add_record_sims_eml_transformation.ts +++ b/database/src/migrations/20220602152015_add_record_sims_eml_transformation.ts @@ -1,8 +1,5 @@ import { Knex } from 'knex'; -const DB_SCHEMA = process.env.DB_SCHEMA; -const DB_SCHEMA_DAPI_V1 = process.env.DB_SCHEMA_DAPI_V1; - /** * EML source Transform. * @@ -247,8 +244,8 @@ export async function up(knex: Knex): Promise { `; await knex.raw(` - SET SCHEMA '${DB_SCHEMA}'; - SET SEARCH_PATH = ${DB_SCHEMA}, ${DB_SCHEMA_DAPI_V1}; + SET SCHEMA 'biohub'; + SET SEARCH_PATH = 'biohub'; insert into source_transform ( system_user_id, diff --git a/database/src/migrations/20220602152016_spatial_transform_eml_project_boundaries.ts b/database/src/migrations/20220602152016_spatial_transform_eml_project_boundaries.ts index 69d2adfe8..23d4421e7 100644 --- a/database/src/migrations/20220602152016_spatial_transform_eml_project_boundaries.ts +++ b/database/src/migrations/20220602152016_spatial_transform_eml_project_boundaries.ts @@ -1,8 +1,5 @@ import { Knex } from 'knex'; -const DB_SCHEMA = process.env.DB_SCHEMA; -const DB_SCHEMA_DAPI_V1 = process.env.DB_SCHEMA_DAPI_V1; - /** * EML Spatial Transform. * @@ -186,8 +183,8 @@ export async function up(knex: Knex): Promise { `; await knex.raw(` - SET SCHEMA '${DB_SCHEMA}'; - SET SEARCH_PATH = ${DB_SCHEMA}, ${DB_SCHEMA_DAPI_V1}; + SET SCHEMA 'biohub'; + SET SEARCH_PATH = 'biohub'; insert into spatial_transform ( name, diff --git a/database/src/migrations/20220602152016_spatial_transform_eml_project_boundary_centroid.ts b/database/src/migrations/20220602152016_spatial_transform_eml_project_boundary_centroid.ts index 7377a04d0..402ce5842 100644 --- a/database/src/migrations/20220602152016_spatial_transform_eml_project_boundary_centroid.ts +++ b/database/src/migrations/20220602152016_spatial_transform_eml_project_boundary_centroid.ts @@ -1,8 +1,5 @@ import { Knex } from 'knex'; -const DB_SCHEMA = process.env.DB_SCHEMA; -const DB_SCHEMA_DAPI_V1 = process.env.DB_SCHEMA_DAPI_V1; - /** * EML Spatial Transform. * @@ -136,8 +133,8 @@ export async function up(knex: Knex): Promise { `; await knex.raw(` - SET SCHEMA '${DB_SCHEMA}'; - SET SEARCH_PATH = ${DB_SCHEMA}, ${DB_SCHEMA_DAPI_V1}; + SET SCHEMA 'biohub'; + SET SEARCH_PATH = 'biohub'; insert into spatial_transform ( name, diff --git a/database/src/migrations/20230518125700_update_record_sims_eml_transformation.ts b/database/src/migrations/20230518125700_update_record_sims_eml_transformation.ts index 4f788f8f2..f5ae282b3 100644 --- a/database/src/migrations/20230518125700_update_record_sims_eml_transformation.ts +++ b/database/src/migrations/20230518125700_update_record_sims_eml_transformation.ts @@ -1,8 +1,5 @@ import { Knex } from 'knex'; -const DB_SCHEMA = process.env.DB_SCHEMA; -const DB_SCHEMA_DAPI_V1 = process.env.DB_SCHEMA_DAPI_V1; - /** * EML source Transform. * @@ -264,8 +261,8 @@ export async function up(knex: Knex): Promise { project_type pt; `; await knex.raw(` - SET SCHEMA '${DB_SCHEMA}'; - SET SEARCH_PATH = ${DB_SCHEMA}, ${DB_SCHEMA_DAPI_V1}; + SET SCHEMA 'biohub'; + SET SEARCH_PATH = 'biohub'; update source_transform set metadata_transform=$transform$${transformSQL}$transform$ diff --git a/database/src/migrations/20230531012345_remove_unique_id_artifact_persecution.ts b/database/src/migrations/20230531012345_remove_unique_id_artifact_persecution.ts index 869eaedfa..86d39ede2 100644 --- a/database/src/migrations/20230531012345_remove_unique_id_artifact_persecution.ts +++ b/database/src/migrations/20230531012345_remove_unique_id_artifact_persecution.ts @@ -9,16 +9,9 @@ import { Knex } from 'knex'; */ export async function up(knex: Knex): Promise { await knex.raw(` - set search_path=biohub_dapi_v1; - - drop view if exists artifact_persecution; - set search_path=biohub; DROP INDEX if exists artifact_persecution_uk1; CREATE UNIQUE INDEX artifact_persecution_uk1 ON artifact_persecution(artifact_id, persecution_or_harm_id); - - set search_path=biohub_dapi_v1; - CREATE OR REPLACE VIEW artifact_persecution as SELECT * FROM biohub.artifact_persecution; `); } diff --git a/database/src/migrations/20231109000001_feature_tables.ts b/database/src/migrations/20231109000001_feature_tables.ts new file mode 100644 index 000000000..85a028bb8 --- /dev/null +++ b/database/src/migrations/20231109000001_feature_tables.ts @@ -0,0 +1,269 @@ +import { Knex } from 'knex'; + +/** + * Add tables: + * - submission_feature + * - feature_type + * - feature_type_property + * - feature_property + * - feature_property_type + * + * @export + * @param {Knex} knex + * @return {*} {Promise} + */ +export async function up(knex: Knex): Promise { + await knex.raw(` + ---------------------------------------------------------------------------------------- + -- Create tables + ---------------------------------------------------------------------------------------- + set search_path=biohub,public; + + CREATE TABLE submission_feature( + submission_feature_id integer GENERATED ALWAYS AS IDENTITY (START WITH 1 INCREMENT BY 1), + submission_id integer NOT NULL, + feature_type_id integer NOT NULL, + data jsonb NOT NULL, + parent_submission_feature_id integer, + record_effective_date date NOT NULL, + record_end_date date, + create_date timestamptz(6) DEFAULT now() NOT NULL, + create_user integer NOT NULL, + update_date timestamptz(6), + update_user integer, + revision_count integer DEFAULT 0 NOT NULL, + CONSTRAINT submission_feature_pk PRIMARY KEY (submission_feature_id) + ); + + COMMENT ON COLUMN submission_feature.submission_feature_id IS 'System generated surrogate primary key identifier.'; + COMMENT ON COLUMN submission_feature.submission_id IS 'Foreign key to the submission table.'; + COMMENT ON COLUMN submission_feature.feature_type_id IS 'Foreign key to the feature_type table.'; + COMMENT ON COLUMN submission_feature.data IS 'The json data of the submission_feature record.'; + COMMENT ON COLUMN submission_feature.parent_submission_feature_id IS 'Foreign key to the submission_feature table.'; + COMMENT ON COLUMN submission_feature.record_effective_date IS 'Record level effective date.'; + COMMENT ON COLUMN submission_feature.record_end_date IS 'Record level end date.'; + COMMENT ON COLUMN submission_feature.create_date IS 'The datetime the record was created.'; + COMMENT ON COLUMN submission_feature.create_user IS 'The id of the user who created the record as identified in the system user table.'; + COMMENT ON COLUMN submission_feature.update_date IS 'The datetime the record was updated.'; + COMMENT ON COLUMN submission_feature.update_user IS 'The id of the user who updated the record as identified in the system user table.'; + COMMENT ON COLUMN submission_feature.revision_count IS 'Revision count used for concurrency control.'; + COMMENT ON TABLE submission_feature IS 'A set of data for a specific feature of a submission.'; + + ---------------------------------------------------------------------------------------- + + CREATE TABLE feature_type( + feature_type_id integer GENERATED ALWAYS AS IDENTITY (START WITH 1 INCREMENT BY 1), + name varchar(50) NOT NULL, + display_name varchar(100) NOT NULL, + description varchar(3000), + record_effective_date date NOT NULL, + record_end_date date, + create_date timestamptz(6) DEFAULT now() NOT NULL, + create_user integer NOT NULL, + update_date timestamptz(6), + update_user integer, + revision_count integer DEFAULT 0 NOT NULL, + CONSTRAINT feature_type_pk PRIMARY KEY (feature_type_id) + ); + + COMMENT ON COLUMN feature_type.feature_type_id IS 'System generated surrogate primary key identifier.'; + COMMENT ON COLUMN feature_type.name IS 'The name of the feature_type record.'; + COMMENT ON COLUMN feature_type.display_name IS 'The formatted name of the feature_type record.'; + COMMENT ON COLUMN feature_type.description IS 'The description of the feature_type record.'; + COMMENT ON COLUMN feature_type.record_effective_date IS 'Record level effective date.'; + COMMENT ON COLUMN feature_type.record_end_date IS 'Record level end date.'; + COMMENT ON COLUMN feature_type.create_date IS 'The datetime the record was created.'; + COMMENT ON COLUMN feature_type.create_user IS 'The id of the user who created the record as identified in the system user table.'; + COMMENT ON COLUMN feature_type.update_date IS 'The datetime the record was updated.'; + COMMENT ON COLUMN feature_type.update_user IS 'The id of the user who updated the record as identified in the system user table.'; + COMMENT ON COLUMN feature_type.revision_count IS 'Revision count used for concurrency control.'; + COMMENT ON TABLE feature_type IS 'Defines feature types.'; + + ---------------------------------------------------------------------------------------- + + CREATE TABLE feature_type_property( + feature_type_property_id integer GENERATED ALWAYS AS IDENTITY (START WITH 1 INCREMENT BY 1), + feature_type_id integer NOT NULL, + feature_property_id integer NOT NULL, + record_effective_date date NOT NULL, + record_end_date date, + create_date timestamptz(6) DEFAULT now() NOT NULL, + create_user integer NOT NULL, + update_date timestamptz(6), + update_user integer, + revision_count integer DEFAULT 0 NOT NULL, + CONSTRAINT feature_type_property_pk PRIMARY KEY (feature_type_property_id) + ); + + COMMENT ON COLUMN feature_type_property.feature_type_property_id IS 'System generated surrogate primary key identifier.'; + COMMENT ON COLUMN feature_type_property.feature_type_id IS 'Foreign key to the feature_type table.'; + COMMENT ON COLUMN feature_type_property.feature_property_id IS 'Foreign key to the feature_property table.'; + COMMENT ON COLUMN feature_type_property.record_effective_date IS 'Record level effective date.'; + COMMENT ON COLUMN feature_type_property.record_end_date IS 'Record level end date.'; + COMMENT ON COLUMN feature_type_property.create_date IS 'The datetime the record was created.'; + COMMENT ON COLUMN feature_type_property.create_user IS 'The id of the user who created the record as identified in the system user table.'; + COMMENT ON COLUMN feature_type_property.update_date IS 'The datetime the record was updated.'; + COMMENT ON COLUMN feature_type_property.update_user IS 'The id of the user who updated the record as identified in the system user table.'; + COMMENT ON COLUMN feature_type_property.revision_count IS 'Revision count used for concurrency control.'; + COMMENT ON TABLE feature_type_property IS 'A join table on feature type and feature_property. Defines which properties can be used by a given feature type.'; + + ---------------------------------------------------------------------------------------- + + CREATE TABLE feature_property( + feature_property_id integer GENERATED ALWAYS AS IDENTITY (START WITH 1 INCREMENT BY 1), + feature_property_type_id integer NOT NULL, + name varchar(50) NOT NULL, + display_name varchar(100) NOT NULL, + description varchar(3000), + parent_feature_property_id integer, + record_effective_date date NOT NULL, + record_end_date date, + create_date timestamptz(6) DEFAULT now() NOT NULL, + create_user integer NOT NULL, + update_date timestamptz(6), + update_user integer, + revision_count integer DEFAULT 0 NOT NULL, + CONSTRAINT feature_property_pk PRIMARY KEY (feature_property_id) + ); + + COMMENT ON COLUMN feature_property.feature_property_id IS 'System generated surrogate primary key identifier.'; + COMMENT ON COLUMN feature_property.feature_property_type_id IS 'Foreign key to the feature_property_type table.'; + COMMENT ON COLUMN feature_property.name IS 'The name of the feature_property record.'; + COMMENT ON COLUMN feature_property.display_name IS 'The formatted name of the feature_property record.'; + COMMENT ON COLUMN feature_property.description IS 'The description of the feature_property record.'; + COMMENT ON COLUMN feature_property.parent_feature_property_id IS 'Foreign key to the feature_property table.'; + COMMENT ON COLUMN feature_property.record_effective_date IS 'Record level effective date.'; + COMMENT ON COLUMN feature_property.record_end_date IS 'Record level end date.'; + COMMENT ON COLUMN feature_property.create_date IS 'The datetime the record was created.'; + COMMENT ON COLUMN feature_property.create_user IS 'The id of the user who created the record as identified in the system user table.'; + COMMENT ON COLUMN feature_property.update_date IS 'The datetime the record was updated.'; + COMMENT ON COLUMN feature_property.update_user IS 'The id of the user who updated the record as identified in the system user table.'; + COMMENT ON COLUMN feature_property.revision_count IS 'Revision count used for concurrency control.'; + COMMENT ON TABLE feature_property IS 'Defines supported feature data properties.'; + + ---------------------------------------------------------------------------------------- + + CREATE TABLE feature_property_type( + feature_property_type_id integer GENERATED ALWAYS AS IDENTITY (START WITH 1 INCREMENT BY 1), + name varchar(100) NOT NULL, + description varchar(3000), + record_effective_date date NOT NULL, + record_end_date date, + create_date timestamptz(6) DEFAULT now() NOT NULL, + create_user integer NOT NULL, + update_date timestamptz(6), + update_user integer, + revision_count integer DEFAULT 0 NOT NULL, + CONSTRAINT feature_property_type_pk PRIMARY KEY (feature_property_type_id) + ); + + COMMENT ON COLUMN feature_property_type.feature_property_type_id IS 'System generated surrogate primary key identifier.'; + COMMENT ON COLUMN feature_property_type.name IS 'The name of the feature_property_type record.'; + COMMENT ON COLUMN feature_property_type.description IS 'The description of the feature_property_type record.'; + COMMENT ON COLUMN feature_property_type.record_effective_date IS 'Record level effective date.'; + COMMENT ON COLUMN feature_property_type.record_end_date IS 'Record level end date.'; + COMMENT ON COLUMN feature_property_type.create_date IS 'The datetime the record was created.'; + COMMENT ON COLUMN feature_property_type.create_user IS 'The id of the user who created the record as identified in the system user table.'; + COMMENT ON COLUMN feature_property_type.update_date IS 'The datetime the record was updated.'; + COMMENT ON COLUMN feature_property_type.update_user IS 'The id of the user who updated the record as identified in the system user table.'; + COMMENT ON COLUMN feature_property_type.revision_count IS 'Revision count used for concurrency control.'; + COMMENT ON TABLE feature_property_type IS 'Defines supported feature data property types.'; + + ---------------------------------------------------------------------------------------- + -- Create Indexes and Constraints for table: submission_feature + ---------------------------------------------------------------------------------------- + + -- Add foreign key constraint + ALTER TABLE submission_feature ADD CONSTRAINT submission_feature_fk1 + FOREIGN KEY (submission_id) + REFERENCES submission(submission_id); + + ALTER TABLE submission_feature ADD CONSTRAINT submission_feature_fk2 + FOREIGN KEY (feature_type_id) + REFERENCES feature_type(feature_type_id); + + ALTER TABLE submission_feature ADD CONSTRAINT submission_feature_fk3 + FOREIGN KEY (parent_submission_feature_id) + REFERENCES submission_feature(submission_feature_id); + + -- add indexes for foreign keys + CREATE INDEX submission_feature_idx1 ON submission_feature(submission_id); + + CREATE INDEX submission_feature_idx2 ON submission_feature(feature_type_id); + + CREATE INDEX submission_feature_idx3 ON submission_feature(submission_feature_id); + + ---------------------------------------------------------------------------------------- + -- Create Indexes and Constraints for table: feature_type + ---------------------------------------------------------------------------------------- + + -- Add unique end-date key constraint (don't allow 2 records with the same name and a NULL record_end_date) + CREATE UNIQUE INDEX feature_type_nuk1 ON feature_type(name, (record_end_date is NULL)) where record_end_date is null; + + ---------------------------------------------------------------------------------------- + -- Create Indexes and Constraints for table: feature_type_property + ---------------------------------------------------------------------------------------- + + -- Add unique end-date key constraint (don't allow 2 records with the same feature_type_id, feature_property_id, and a NULL record_end_date) + CREATE UNIQUE INDEX feature_type_property_nuk1 ON feature_type_property(feature_type_id, feature_property_id, (record_end_date is NULL)) where record_end_date is null; + + -- Add foreign key constraint + ALTER TABLE feature_type_property ADD CONSTRAINT feature_type_property_fk1 + FOREIGN KEY (feature_type_id) + REFERENCES feature_type(feature_type_id); + + ALTER TABLE feature_type_property ADD CONSTRAINT feature_type_property_fk2 + FOREIGN KEY (feature_property_id) + REFERENCES feature_property(feature_property_id); + + -- add indexes for foreign keys + CREATE INDEX feature_type_property_idx1 ON feature_type_property(feature_type_id); + + CREATE INDEX feature_type_property_idx2 ON feature_type_property(feature_property_id); + + ---------------------------------------------------------------------------------------- + -- Create Indexes and Constraints for table: feature_property + ---------------------------------------------------------------------------------------- + + -- Add unique end-date key constraint (don't allow 2 records with the same name and a NULL record_end_date) + CREATE UNIQUE INDEX feature_property_nuk1 ON feature_property(name, (record_end_date is NULL)) where record_end_date is null; + + -- Add foreign key constraint + ALTER TABLE feature_property ADD CONSTRAINT feature_property_fk1 + FOREIGN KEY (feature_property_type_id) + REFERENCES feature_property_type(feature_property_type_id); + + -- add indexes for foreign keys + CREATE INDEX feature_property_idx1 ON feature_property(feature_property_type_id); + + ---------------------------------------------------------------------------------------- + -- Create Indexes and Constraints for table: feature_property_type + ---------------------------------------------------------------------------------------- + + -- Add unique end-date key constraint (don't allow 2 records with the same name and a NULL record_end_date) + CREATE UNIQUE INDEX feature_property_type_nuk1 ON feature_property_type(name, (record_end_date is NULL)) where record_end_date is null; + + ---------------------------------------------------------------------------------------- + -- Create audit and journal triggers + ---------------------------------------------------------------------------------------- + + create trigger audit_submission_feature before insert or update or delete on submission_feature for each row execute procedure tr_audit_trigger(); + create trigger journal_submission_feature after insert or update or delete on submission_feature for each row execute procedure tr_journal_trigger(); + + create trigger audit_feature_type before insert or update or delete on feature_type for each row execute procedure tr_audit_trigger(); + create trigger journal_feature_type after insert or update or delete on feature_type for each row execute procedure tr_journal_trigger(); + + create trigger audit_feature_type_property before insert or update or delete on feature_type_property for each row execute procedure tr_audit_trigger(); + create trigger journal_feature_type_property after insert or update or delete on feature_type_property for each row execute procedure tr_journal_trigger(); + + create trigger audit_feature_property before insert or update or delete on feature_property for each row execute procedure tr_audit_trigger(); + create trigger journal_feature_property after insert or update or delete on feature_property for each row execute procedure tr_journal_trigger(); + + create trigger audit_feature_property_type before insert or update or delete on feature_property_type for each row execute procedure tr_audit_trigger(); + create trigger journal_feature_property_type after insert or update or delete on feature_property_type for each row execute procedure tr_journal_trigger(); + `); +} + +export async function down(knex: Knex): Promise { + await knex.raw(``); +} diff --git a/database/src/migrations/20231109000002_search_tables.ts b/database/src/migrations/20231109000002_search_tables.ts new file mode 100644 index 000000000..a45cd6c43 --- /dev/null +++ b/database/src/migrations/20231109000002_search_tables.ts @@ -0,0 +1,295 @@ +import { Knex } from 'knex'; + +/** + * Add tables: + * - search_string + * - search_string + * - search_number + * - search_datetime + * - search_spatial + * - search_taxonomy + * + * @export + * @param {Knex} knex + * @return {*} {Promise} + */ +export async function up(knex: Knex): Promise { + await knex.raw(` + ---------------------------------------------------------------------------------------- + -- Create tables + ---------------------------------------------------------------------------------------- + set search_path=biohub,public; + + CREATE TABLE search_string( + search_string_id integer GENERATED ALWAYS AS IDENTITY (START WITH 1 INCREMENT BY 1), + submission_feature_id integer NOT NULL, + feature_property_id integer NOT NULL, + value varchar(250) NOT NULL, +-- record_effective_date date NOT NULL, +-- record_end_date date, + create_date timestamptz(6) DEFAULT now() NOT NULL, + create_user integer NOT NULL, + update_date timestamptz(6), + update_user integer, + revision_count integer DEFAULT 0 NOT NULL, + CONSTRAINT search_string_pk PRIMARY KEY (search_string_id) + ); + + COMMENT ON COLUMN search_string.search_string_id IS 'System generated surrogate primary key identifier.'; + COMMENT ON COLUMN search_string.submission_feature_id IS 'Foreign key to the submission_feature table.'; + COMMENT ON COLUMN search_string.feature_property_id IS 'Foreign key to the feature_property table.'; + COMMENT ON COLUMN search_string.value IS 'The search value of the record.'; +-- COMMENT ON COLUMN search_string.record_effective_date IS 'Record level effective date.'; +-- COMMENT ON COLUMN search_string.record_end_date IS 'Record level end date.'; + COMMENT ON COLUMN search_string.create_date IS 'The datetime the record was created.'; + COMMENT ON COLUMN search_string.create_user IS 'The id of the user who created the record as identified in the system user table.'; + COMMENT ON COLUMN search_string.update_date IS 'The datetime the record was updated.'; + COMMENT ON COLUMN search_string.update_user IS 'The id of the user who updated the record as identified in the system user table.'; + COMMENT ON COLUMN search_string.revision_count IS 'Revision count used for concurrency control.'; + COMMENT ON TABLE search_string IS 'String search values'; + + ---------------------------------------------------------------------------------------- + + CREATE TABLE search_number( + search_number_id integer GENERATED ALWAYS AS IDENTITY (START WITH 1 INCREMENT BY 1), + submission_feature_id integer NOT NULL, + feature_property_id integer NOT NULL, + value numeric NOT NULL, +-- record_effective_date date NOT NULL, +-- record_end_date date, + create_date timestamptz(6) DEFAULT now() NOT NULL, + create_user integer NOT NULL, + update_date timestamptz(6), + update_user integer, + revision_count integer DEFAULT 0 NOT NULL, + CONSTRAINT search_number_pk PRIMARY KEY (search_number_id) + ); + + COMMENT ON COLUMN search_number.search_number_id IS 'System generated surrogate primary key identifier.'; + COMMENT ON COLUMN search_number.submission_feature_id IS 'Foreign key to the submission_feature table.'; + COMMENT ON COLUMN search_number.feature_property_id IS 'Foreign key to the feature_property table.'; + COMMENT ON COLUMN search_number.value IS 'The search value of the record.'; +-- COMMENT ON COLUMN search_number.record_effective_date IS 'Record level effective date.'; +-- COMMENT ON COLUMN search_number.record_end_date IS 'Record level end date.'; + COMMENT ON COLUMN search_number.create_date IS 'The datetime the record was created.'; + COMMENT ON COLUMN search_number.create_user IS 'The id of the user who created the record as identified in the system user table.'; + COMMENT ON COLUMN search_number.update_date IS 'The datetime the record was updated.'; + COMMENT ON COLUMN search_number.update_user IS 'The id of the user who updated the record as identified in the system user table.'; + COMMENT ON COLUMN search_number.revision_count IS 'Revision count used for concurrency control.'; + COMMENT ON TABLE search_number IS 'Number search values'; + + ---------------------------------------------------------------------------------------- + + CREATE TABLE search_datetime( + search_datetime_id integer GENERATED ALWAYS AS IDENTITY (START WITH 1 INCREMENT BY 1), + submission_feature_id integer NOT NULL, + feature_property_id integer NOT NULL, + value timestamptz(6) NOT NULL, +-- record_effective_date date NOT NULL, +-- record_end_date date, + create_date timestamptz(6) DEFAULT now() NOT NULL, + create_user integer NOT NULL, + update_date timestamptz(6), + update_user integer, + revision_count integer DEFAULT 0 NOT NULL, + CONSTRAINT search_datetime_pk PRIMARY KEY (search_datetime_id) + ); + + COMMENT ON COLUMN search_datetime.search_datetime_id IS 'System generated surrogate primary key identifier.'; + COMMENT ON COLUMN search_datetime.submission_feature_id IS 'Foreign key to the submission_feature table.'; + COMMENT ON COLUMN search_datetime.feature_property_id IS 'Foreign key to the feature_property table.'; + COMMENT ON COLUMN search_datetime.value IS 'The search value of the record.'; +-- COMMENT ON COLUMN search_datetime.record_effective_date IS 'Record level effective date.'; +-- COMMENT ON COLUMN search_datetime.record_end_date IS 'Record level end date.'; + COMMENT ON COLUMN search_datetime.create_date IS 'The datetime the record was created.'; + COMMENT ON COLUMN search_datetime.create_user IS 'The id of the user who created the record as identified in the system user table.'; + COMMENT ON COLUMN search_datetime.update_date IS 'The datetime the record was updated.'; + COMMENT ON COLUMN search_datetime.update_user IS 'The id of the user who updated the record as identified in the system user table.'; + COMMENT ON COLUMN search_datetime.revision_count IS 'Revision count used for concurrency control.'; + COMMENT ON TABLE search_datetime IS 'Datetime search values'; + + ---------------------------------------------------------------------------------------- + + CREATE TABLE search_spatial( + search_spatial_id integer GENERATED ALWAYS AS IDENTITY (START WITH 1 INCREMENT BY 1), + submission_feature_id integer NOT NULL, + feature_property_id integer NOT NULL, + value geometry NOT NULL, +-- record_effective_date date NOT NULL, +-- record_end_date date, + create_date timestamptz(6) DEFAULT now() NOT NULL, + create_user integer NOT NULL, + update_date timestamptz(6), + update_user integer, + revision_count integer DEFAULT 0 NOT NULL, + CONSTRAINT search_spatial_pk PRIMARY KEY (search_spatial_id) + ); + + COMMENT ON COLUMN search_spatial.search_spatial_id IS 'System generated surrogate primary key identifier.'; + COMMENT ON COLUMN search_spatial.submission_feature_id IS 'Foreign key to the submission_feature table.'; + COMMENT ON COLUMN search_spatial.feature_property_id IS 'Foreign key to the feature_property table.'; + COMMENT ON COLUMN search_spatial.value IS 'The search value of the record.'; +-- COMMENT ON COLUMN search_spatial.record_effective_date IS 'Record level effective date.'; +-- COMMENT ON COLUMN search_spatial.record_end_date IS 'Record level end date.'; + COMMENT ON COLUMN search_spatial.create_date IS 'The spatial the record was created.'; + COMMENT ON COLUMN search_spatial.create_user IS 'The id of the user who created the record as identified in the system user table.'; + COMMENT ON COLUMN search_spatial.update_date IS 'The spatial the record was updated.'; + COMMENT ON COLUMN search_spatial.update_user IS 'The id of the user who updated the record as identified in the system user table.'; + COMMENT ON COLUMN search_spatial.revision_count IS 'Revision count used for concurrency control.'; + COMMENT ON TABLE search_spatial IS 'Spatial search values'; + + ---------------------------------------------------------------------------------------- + + CREATE TABLE search_taxonomy( + search_taxonomy_id integer GENERATED ALWAYS AS IDENTITY (START WITH 1 INCREMENT BY 1), + submission_feature_id integer NOT NULL, + feature_property_id integer NOT NULL, + value numeric NOT NULL, +-- record_effective_date date NOT NULL, +-- record_end_date date, + create_date timestamptz(6) DEFAULT now() NOT NULL, + create_user integer NOT NULL, + update_date timestamptz(6), + update_user integer, + revision_count integer DEFAULT 0 NOT NULL, + CONSTRAINT search_taxonomy_pk PRIMARY KEY (search_taxonomy_id) + ); + + COMMENT ON COLUMN search_taxonomy.search_taxonomy_id IS 'System generated surrogate primary key identifier.'; + COMMENT ON COLUMN search_taxonomy.submission_feature_id IS 'Foreign key to the submission_feature table.'; + COMMENT ON COLUMN search_taxonomy.feature_property_id IS 'Foreign key to the feature_property table.'; + COMMENT ON COLUMN search_taxonomy.value IS 'The search value of the record.'; +-- COMMENT ON COLUMN search_taxonomy.record_effective_date IS 'Record level effective date.'; +-- COMMENT ON COLUMN search_taxonomy.record_end_date IS 'Record level end date.'; + COMMENT ON COLUMN search_taxonomy.create_date IS 'The taxonomy the record was created.'; + COMMENT ON COLUMN search_taxonomy.create_user IS 'The id of the user who created the record as identified in the system user table.'; + COMMENT ON COLUMN search_taxonomy.update_date IS 'The taxonomy the record was updated.'; + COMMENT ON COLUMN search_taxonomy.update_user IS 'The id of the user who updated the record as identified in the system user table.'; + COMMENT ON COLUMN search_taxonomy.revision_count IS 'Revision count used for concurrency control.'; + COMMENT ON TABLE search_taxonomy IS 'Taxonomy search values'; + + ---------------------------------------------------------------------------------------- + -- Create Indexes and Constraints for table: search_string + ---------------------------------------------------------------------------------------- + + -- Add foreign key constraint + ALTER TABLE search_string ADD CONSTRAINT search_string_fk1 + FOREIGN KEY (submission_feature_id) + REFERENCES submission_feature(submission_feature_id); + + ALTER TABLE search_string ADD CONSTRAINT search_string_fk2 + FOREIGN KEY (feature_property_id) + REFERENCES feature_property(feature_property_id); + + -- add indexes for foreign keys + CREATE INDEX search_string_idx1 ON search_string(submission_feature_id); + + CREATE INDEX search_string_idx2 ON search_string(feature_property_id); + + CREATE INDEX search_string_idx3 ON search_string(value); + + ---------------------------------------------------------------------------------------- + -- Create Indexes and Constraints for table: search_number + ---------------------------------------------------------------------------------------- + + -- Add foreign key constraint + ALTER TABLE search_number ADD CONSTRAINT search_number_fk1 + FOREIGN KEY (submission_feature_id) + REFERENCES submission_feature(submission_feature_id); + + ALTER TABLE search_number ADD CONSTRAINT search_number_fk2 + FOREIGN KEY (feature_property_id) + REFERENCES feature_property(feature_property_id); + + -- add indexes for foreign keys + CREATE INDEX search_number_idx1 ON search_number(submission_feature_id); + + CREATE INDEX search_number_idx2 ON search_number(feature_property_id); + + CREATE INDEX search_number_idx3 ON search_number(value); + + ---------------------------------------------------------------------------------------- + -- Create Indexes and Constraints for table: search_datetime + ---------------------------------------------------------------------------------------- + + -- Add foreign key constraint + ALTER TABLE search_datetime ADD CONSTRAINT search_datetime_fk1 + FOREIGN KEY (submission_feature_id) + REFERENCES submission_feature(submission_feature_id); + + ALTER TABLE search_datetime ADD CONSTRAINT search_datetime_fk2 + FOREIGN KEY (feature_property_id) + REFERENCES feature_property(feature_property_id); + + -- add indexes for foreign keys + CREATE INDEX search_datetime_idx1 ON search_datetime(submission_feature_id); + + CREATE INDEX search_datetime_idx2 ON search_datetime(feature_property_id); + + CREATE INDEX search_datetime_idx3 ON search_datetime(value); + + ---------------------------------------------------------------------------------------- + -- Create Indexes and Constraints for table: search_spatial + ---------------------------------------------------------------------------------------- + + -- Add foreign key constraint + ALTER TABLE search_spatial ADD CONSTRAINT search_spatial_fk1 + FOREIGN KEY (submission_feature_id) + REFERENCES submission_feature(submission_feature_id); + + ALTER TABLE search_spatial ADD CONSTRAINT search_spatial_fk2 + FOREIGN KEY (feature_property_id) + REFERENCES feature_property(feature_property_id); + + -- add indexes for foreign keys + CREATE INDEX search_spatial_idx1 ON search_spatial(submission_feature_id); + + CREATE INDEX search_spatial_idx2 ON search_spatial(feature_property_id); + + -- add spatial index + CREATE INDEX search_spatial_idx3 ON search_spatial using GIST(value); + + ---------------------------------------------------------------------------------------- + -- Create Indexes and Constraints for table: search_taxonomy + ---------------------------------------------------------------------------------------- + + -- Add foreign key constraint + ALTER TABLE search_taxonomy ADD CONSTRAINT search_taxonomy_fk1 + FOREIGN KEY (submission_feature_id) + REFERENCES submission_feature(submission_feature_id); + + ALTER TABLE search_taxonomy ADD CONSTRAINT search_taxonomy_fk2 + FOREIGN KEY (feature_property_id) + REFERENCES feature_property(feature_property_id); + + -- add indexes for foreign keys + CREATE INDEX search_taxonomy_idx1 ON search_taxonomy(submission_feature_id); + + CREATE INDEX search_taxonomy_idx2 ON search_taxonomy(feature_property_id); + + CREATE INDEX search_taxonomy_idx3 ON search_taxonomy(value); + + ---------------------------------------------------------------------------------------- + -- Create audit and journal triggers + ---------------------------------------------------------------------------------------- + + create trigger audit_search_string before insert or update or delete on search_string for each row execute procedure tr_audit_trigger(); + create trigger journal_search_string after insert or update or delete on search_string for each row execute procedure tr_journal_trigger(); + + create trigger audit_search_number before insert or update or delete on search_number for each row execute procedure tr_audit_trigger(); + create trigger journal_search_number after insert or update or delete on search_number for each row execute procedure tr_journal_trigger(); + + create trigger audit_search_datetime before insert or update or delete on search_datetime for each row execute procedure tr_audit_trigger(); + create trigger journal_search_datetime after insert or update or delete on search_datetime for each row execute procedure tr_journal_trigger(); + + create trigger audit_search_spatial before insert or update or delete on search_spatial for each row execute procedure tr_audit_trigger(); + create trigger journal_search_spatial after insert or update or delete on search_spatial for each row execute procedure tr_journal_trigger(); + + create trigger audit_search_taxonomy before insert or update or delete on search_taxonomy for each row execute procedure tr_audit_trigger(); + create trigger journal_search_taxonomy after insert or update or delete on search_taxonomy for each row execute procedure tr_journal_trigger(); + `); +} + +export async function down(knex: Knex): Promise { + await knex.raw(``); +} diff --git a/database/src/migrations/20231109000003_populate_feature_lookup_tables.ts b/database/src/migrations/20231109000003_populate_feature_lookup_tables.ts new file mode 100644 index 000000000..31d920cdb --- /dev/null +++ b/database/src/migrations/20231109000003_populate_feature_lookup_tables.ts @@ -0,0 +1,94 @@ +import { Knex } from 'knex'; + +/** + * Populate tables: + * - feature_property_type + * - feature_property + * - feature_type + * - feature_type_property + * + * @export + * @param {Knex} knex + * @return {*} {Promise} + */ +export async function up(knex: Knex): Promise { + await knex.raw(` + ---------------------------------------------------------------------------------------- + -- Create tables + ---------------------------------------------------------------------------------------- + set search_path=biohub,public; + + -- populate feature_property_type table + insert into feature_property_type (name, description, record_effective_date) values ('string', 'A text type', now()); + insert into feature_property_type (name, description, record_effective_date) values ('number', 'A number type', now()); + insert into feature_property_type (name, description, record_effective_date) values ('boolean', 'A boolean type', now()); + insert into feature_property_type (name, description, record_effective_date) values ('object', 'An object type', now()); + insert into feature_property_type (name, description, record_effective_date) values ('array', 'An array type', now()); + insert into feature_property_type (name, description, record_effective_date) values ('datetime', 'A datetime type (ISO 8601)', now()); + insert into feature_property_type (name, description, record_effective_date) values ('spatial', 'A spatial type', now()); + insert into feature_property_type (name, description, record_effective_date) values ('taxonomy', 'A taxonomic type', now()); + + -- populate feature_property table + insert into feature_property (name, display_name, description, feature_property_type_id, parent_feature_property_id, record_effective_date) values ('name', 'Name', 'The name of the record', (select feature_property_type_id from feature_property_type where name = 'string'), null, now()); + insert into feature_property (name, display_name, description, feature_property_type_id, parent_feature_property_id, record_effective_date) values ('description', 'Description', 'The description of the record', (select feature_property_type_id from feature_property_type where name = 'string'), null, now()); + insert into feature_property (name, display_name, description, feature_property_type_id, parent_feature_property_id, record_effective_date) values ('taxonomy', 'Taxonomy', 'The taxonomy associated to the record', (select feature_property_type_id from feature_property_type where name = 'taxonomy'), null, now()); + insert into feature_property (name, display_name, description, feature_property_type_id, parent_feature_property_id, record_effective_date) values ('date_range', 'Date Range', 'A date range', (select feature_property_type_id from feature_property_type where name = 'object'), null, now()); + insert into feature_property (name, display_name, description, feature_property_type_id, parent_feature_property_id, record_effective_date) values ('start_date', 'Start Date', 'The start date of the record', (select feature_property_type_id from feature_property_type where name = 'datetime'), (select feature_property_id from feature_property where name = 'date_range'), now()); + insert into feature_property (name, display_name, description, feature_property_type_id, parent_feature_property_id, record_effective_date) values ('end_date', 'End Date', 'The end date of the record', (select feature_property_type_id from feature_property_type where name = 'datetime'), (select feature_property_id from feature_property where name = 'date_range'), now()); + insert into feature_property (name, display_name, description, feature_property_type_id, parent_feature_property_id, record_effective_date) values ('geometry', 'Geometry', 'The location of the record', (select feature_property_type_id from feature_property_type where name = 'spatial'), null, now()); + insert into feature_property (name, display_name, description, feature_property_type_id, parent_feature_property_id, record_effective_date) values ('count', 'Count', 'The count of the record', (select feature_property_type_id from feature_property_type where name = 'number'), null, now()); + + -- populate feature_type table + insert into feature_type (name, display_name, description, record_effective_date) values ('dataset', 'Dataset', 'A related collection of data (ie: survey)', now()); + insert into feature_type (name, display_name, description, record_effective_date) values ('sample_site', 'Sample Site', 'A location at which data was collected', now()); + insert into feature_type (name, display_name, description, record_effective_date) values ('sample_method', 'Sample Method', 'A method used to collect data', now()); + insert into feature_type (name, display_name, description, record_effective_date) values ('sample_period', 'Sample Period', 'A datetime period in which data was collected', now()); + insert into feature_type (name, display_name, description, record_effective_date) values ('observation', 'Observation', 'An observation record', now()); + insert into feature_type (name, display_name, description, record_effective_date) values ('animal', 'Animal', 'An individual animal record', now()); + insert into feature_type (name, display_name, description, record_effective_date) values ('telemetry', 'Telemetry ', 'A telemetry record', now()); + + -- populate feature_type_property table + -- feature_type: dataset + insert into feature_type_property (feature_type_id, feature_property_id, record_effective_date) values ((select feature_type_id from feature_type where name = 'dataset'), (select feature_property_id from feature_property where name = 'name'), now()); + insert into feature_type_property (feature_type_id, feature_property_id, record_effective_date) values ((select feature_type_id from feature_type where name = 'dataset'), (select feature_property_id from feature_property where name = 'description'), now()); + insert into feature_type_property (feature_type_id, feature_property_id, record_effective_date) values ((select feature_type_id from feature_type where name = 'dataset'), (select feature_property_id from feature_property where name = 'taxonomy'), now()); + insert into feature_type_property (feature_type_id, feature_property_id, record_effective_date) values ((select feature_type_id from feature_type where name = 'dataset'), (select feature_property_id from feature_property where name = 'date_range'), now()); + insert into feature_type_property (feature_type_id, feature_property_id, record_effective_date) values ((select feature_type_id from feature_type where name = 'dataset'), (select feature_property_id from feature_property where name = 'start_date'), now()); + insert into feature_type_property (feature_type_id, feature_property_id, record_effective_date) values ((select feature_type_id from feature_type where name = 'dataset'), (select feature_property_id from feature_property where name = 'end_date'), now()); + insert into feature_type_property (feature_type_id, feature_property_id, record_effective_date) values ((select feature_type_id from feature_type where name = 'dataset'), (select feature_property_id from feature_property where name = 'geometry'), now()); + + -- feature_type: sample_site + insert into feature_type_property (feature_type_id, feature_property_id, record_effective_date) values ((select feature_type_id from feature_type where name = 'sample_site'), (select feature_property_id from feature_property where name = 'name'), now()); + insert into feature_type_property (feature_type_id, feature_property_id, record_effective_date) values ((select feature_type_id from feature_type where name = 'sample_site'), (select feature_property_id from feature_property where name = 'description'), now()); + insert into feature_type_property (feature_type_id, feature_property_id, record_effective_date) values ((select feature_type_id from feature_type where name = 'sample_site'), (select feature_property_id from feature_property where name = 'geometry'), now()); + + -- feature_type: sample_method + insert into feature_type_property (feature_type_id, feature_property_id, record_effective_date) values ((select feature_type_id from feature_type where name = 'sample_method'), (select feature_property_id from feature_property where name = 'name'), now()); + insert into feature_type_property (feature_type_id, feature_property_id, record_effective_date) values ((select feature_type_id from feature_type where name = 'sample_method'), (select feature_property_id from feature_property where name = 'description'), now()); + + -- feature_type: sample_period + insert into feature_type_property (feature_type_id, feature_property_id, record_effective_date) values ((select feature_type_id from feature_type where name = 'sample_period'), (select feature_property_id from feature_property where name = 'date_range'), now()); + insert into feature_type_property (feature_type_id, feature_property_id, record_effective_date) values ((select feature_type_id from feature_type where name = 'sample_period'), (select feature_property_id from feature_property where name = 'start_date'), now()); + insert into feature_type_property (feature_type_id, feature_property_id, record_effective_date) values ((select feature_type_id from feature_type where name = 'sample_period'), (select feature_property_id from feature_property where name = 'end_date'), now()); + + -- feature_type: observation + insert into feature_type_property (feature_type_id, feature_property_id, record_effective_date) values ((select feature_type_id from feature_type where name = 'observation'), (select feature_property_id from feature_property where name = 'taxonomy'), now()); + insert into feature_type_property (feature_type_id, feature_property_id, record_effective_date) values ((select feature_type_id from feature_type where name = 'observation'), (select feature_property_id from feature_property where name = 'date_range'), now()); + insert into feature_type_property (feature_type_id, feature_property_id, record_effective_date) values ((select feature_type_id from feature_type where name = 'observation'), (select feature_property_id from feature_property where name = 'start_date'), now()); + insert into feature_type_property (feature_type_id, feature_property_id, record_effective_date) values ((select feature_type_id from feature_type where name = 'observation'), (select feature_property_id from feature_property where name = 'end_date'), now()); + insert into feature_type_property (feature_type_id, feature_property_id, record_effective_date) values ((select feature_type_id from feature_type where name = 'observation'), (select feature_property_id from feature_property where name = 'geometry'), now()); + insert into feature_type_property (feature_type_id, feature_property_id, record_effective_date) values ((select feature_type_id from feature_type where name = 'observation'), (select feature_property_id from feature_property where name = 'count'), now()); + + -- feature_type: animal + insert into feature_type_property (feature_type_id, feature_property_id, record_effective_date) values ((select feature_type_id from feature_type where name = 'animal'), (select feature_property_id from feature_property where name = 'taxonomy'), now()); + + -- feature_type: telemetry + insert into feature_type_property (feature_type_id, feature_property_id, record_effective_date) values ((select feature_type_id from feature_type where name = 'telemetry'), (select feature_property_id from feature_property where name = 'date_range'), now()); + insert into feature_type_property (feature_type_id, feature_property_id, record_effective_date) values ((select feature_type_id from feature_type where name = 'telemetry'), (select feature_property_id from feature_property where name = 'start_date'), now()); + insert into feature_type_property (feature_type_id, feature_property_id, record_effective_date) values ((select feature_type_id from feature_type where name = 'telemetry'), (select feature_property_id from feature_property where name = 'end_date'), now()); + `); +} + +export async function down(knex: Knex): Promise { + await knex.raw(``); +} diff --git a/database/src/seeds/01_db_system_users.ts b/database/src/seeds/01_db_system_users.ts index cf37eb103..34cf5e6a7 100644 --- a/database/src/seeds/01_db_system_users.ts +++ b/database/src/seeds/01_db_system_users.ts @@ -1,6 +1,5 @@ import { Knex } from 'knex'; -const DB_SCHEMA = process.env.DB_SCHEMA; const DB_ADMIN = process.env.DB_ADMIN; export enum SYSTEM_IDENTITY_SOURCE { @@ -86,8 +85,8 @@ const systemUsers = [ */ export async function seed(knex: Knex): Promise { await knex.raw(` - set schema '${DB_SCHEMA}'; - set search_path = ${DB_SCHEMA}; + set schema 'biohub'; + set search_path = 'biohub'; `); for (const systemUser of systemUsers) { diff --git a/database/src/seeds/02_security_transform_eml_dwc_occurrences.ts b/database/src/seeds/02_security_transform_eml_dwc_occurrences.ts index db3d67eed..1b94dfce5 100644 --- a/database/src/seeds/02_security_transform_eml_dwc_occurrences.ts +++ b/database/src/seeds/02_security_transform_eml_dwc_occurrences.ts @@ -1,8 +1,5 @@ import { Knex } from 'knex'; -const DB_SCHEMA = process.env.DB_SCHEMA; -const DB_SCHEMA_DAPI_V1 = process.env.DB_SCHEMA_DAPI_V1; - /** * Add spatial transform * @@ -12,8 +9,8 @@ const DB_SCHEMA_DAPI_V1 = process.env.DB_SCHEMA_DAPI_V1; */ export async function seed(knex: Knex): Promise { await knex.raw(` - SET SCHEMA '${DB_SCHEMA}'; - SET SEARCH_PATH = ${DB_SCHEMA}, ${DB_SCHEMA_DAPI_V1}; + SET SCHEMA 'biohub'; + SET SEARCH_PATH = 'biohub'; `); const response = await knex.raw(checkTransformExists()); diff --git a/database/src/seeds/03_dwc_spatial_transform.ts b/database/src/seeds/03_dwc_spatial_transform.ts index a3f429ca6..fe20ee917 100644 --- a/database/src/seeds/03_dwc_spatial_transform.ts +++ b/database/src/seeds/03_dwc_spatial_transform.ts @@ -1,8 +1,5 @@ import { Knex } from 'knex'; -const DB_SCHEMA = process.env.DB_SCHEMA; -const DB_SCHEMA_DAPI_V1 = process.env.DB_SCHEMA_DAPI_V1; - /** * Add spatial transform * @@ -12,8 +9,8 @@ const DB_SCHEMA_DAPI_V1 = process.env.DB_SCHEMA_DAPI_V1; */ export async function seed(knex: Knex): Promise { await knex.raw(` - SET SCHEMA '${DB_SCHEMA}'; - SET SEARCH_PATH = ${DB_SCHEMA}, ${DB_SCHEMA_DAPI_V1}; + SET SCHEMA 'biohub'; + SET SEARCH_PATH = 'biohub'; `); const response = await knex.raw(checkTransformExists()); diff --git a/database/src/seeds/04_mock_test_data.ts b/database/src/seeds/04_mock_test_data.ts new file mode 100644 index 000000000..14280d405 --- /dev/null +++ b/database/src/seeds/04_mock_test_data.ts @@ -0,0 +1,387 @@ +import { faker } from '@faker-js/faker'; +// @ts-ignore ignore error over missing geojson-random declaration (.d.ts) file +import random from 'geojson-random'; +import { Knex } from 'knex'; + +// Disable mock data seeding by default, unless manually enabled +const ENABLED = false; + +/** + * Sample query for performance testing. + * + * -- Select feature_submissions on multiple conditions (AND) + * select * from submission_feature + * where feature_type_id = 1 and submission_feature_id in ( + * select distinct t1.submission_feature_id + * FROM search_string t1 + * WHERE EXISTS ( + * select 1 from search_string t3 where t3.submission_feature_id = t1.submission_feature_id and t3.value like '%cor%' + * ) and exists ( + * select 1 from search_string t4 where t4.submission_feature_id = t1.submission_feature_id and t4.value like '%arx%' + * ) and exists ( + * select 1 from search_number t5 where t5.submission_feature_id = t1.submission_feature_id and t5.value > 40 and t5.value < 50 + * ) and exists ( + * select 1 from search_taxonomy t6 where t6.submission_feature_id = t1.submission_feature_id and t6.value > 50000 and t6.value < 50100 + * ) and exists ( + * select 1 from search_datetime t7 where t7.submission_feature_id = t1.submission_feature_id and t7.value > '2023-08-01' and t7.value < '2024-04-01' and t7.feature_property_id = (select feature_property_id from feature_property where name = 'start_date') + * ) and exists ( + * select 1 from search_datetime t7 where t7.submission_feature_id = t1.submission_feature_id and t7.value > '2023-08-01' and t7.value < '2024-04-01' and t7.feature_property_id = (select feature_property_id from feature_property where name = 'end_date') + * ) and exists ( + * select 1 from search_spatial t8 where t8.submission_feature_id = t1.submission_feature_id and public.ST_INTERSECTS(t8.value, public.ST_GeomFromGeoJSON('{"coordinates":[[[-128.12596524778567,50.90095573861839],[-128.6951954392062,50.75063500834236],[-127.71373499792975,49.63640480052965],[-125.38308025753057,48.53083459202276],[-123.3647465830768,48.15806226354249],[-122.94623399379441,48.36504151433127],[-123.37439502763095,49.13209156231335],[-124.66835857611437,49.81654191782255],[-126.6572708981094,50.607171392416745],[-127.89342678974776,50.9888374217299],[-128.12596524778567,50.90095573861839]]],"type":"Polygon"}')) + * ) + * ); + */ + +/** + * Inserts mock submission/feature data, geared towards performance testing. + * + * @export + * @param {Knex} knex + * @return {*} {Promise} + */ +export async function seed(knex: Knex): Promise { + if (!ENABLED) { + return knex.raw(`SELECT null;`); // dummy query to appease knex + } + + await knex.raw(` + SET SCHEMA 'biohub'; + SET SEARCH_PATH = 'biohub','public'; + `); + + const numSubmissions = 10000; // Number of submissions (ie: surveys) + + for (let i = 0; i < numSubmissions; i++) { + await insertRecord(knex); + } +} + +/** + * Insert a single submission record, a single dataset record, and 50 observation records. + * + * @param {Knex} knex + */ +const insertRecord = async (knex: Knex) => { + // Submission (1) + const submission_id = await insertSubmissionRecord(knex); + + // Dataset (1) + const parent_submission_feature_id1 = await insertDatasetRecord(knex, { submission_id }); + + // Sample Sites (10) + for (let i = 0; i < 10; i++) { + const parent_submission_feature_id2 = await insertSampleSiteRecord(knex, { + submission_id, + parent_submission_feature_id: parent_submission_feature_id1 + }); + + // Animals (2 per sample site) + for (let i = 0; i < 2; i++) { + await insertAnimalRecord(knex, { submission_id, parent_submission_feature_id: parent_submission_feature_id2 }); + } + + // Observations (20 per sample site) + for (let i = 0; i < 20; i++) { + await insertObservationRecord(knex, { + submission_id, + parent_submission_feature_id: parent_submission_feature_id2 + }); + } + } +}; + +const insertSubmissionRecord = async (knex: Knex): Promise => { + const response = await knex.raw(`${insertSubmission()}`); + const submission_id = response.rows[0].submission_id; + + return submission_id; +}; + +const insertDatasetRecord = async (knex: Knex, options: { submission_id: number }): Promise => { + const response = await knex.raw( + `${insertSubmissionFeature({ + submission_id: options.submission_id, + parent_submission_feature_id: null, + feature_type: 'dataset' + })}` + ); + const submission_feature_id = response.rows[0].submission_feature_id; + + await knex.raw(`${insertSearchString({ submission_feature_id })}`); + await knex.raw(`${insertSearchString({ submission_feature_id })}`); + await knex.raw(`${insertSearchString({ submission_feature_id })}`); + await knex.raw(`${insertSearchString({ submission_feature_id })}`); + await knex.raw(`${insertSearchString({ submission_feature_id })}`); + + await knex.raw(`${insertSearchNumber({ submission_feature_id })}`); + await knex.raw(`${insertSearchNumber({ submission_feature_id })}`); + await knex.raw(`${insertSearchNumber({ submission_feature_id })}`); + await knex.raw(`${insertSearchNumber({ submission_feature_id })}`); + + await knex.raw(`${insertSearchTaxonomy({ submission_feature_id })}`); + await knex.raw(`${insertSearchTaxonomy({ submission_feature_id })}`); + await knex.raw(`${insertSearchTaxonomy({ submission_feature_id })}`); + + await knex.raw(`${insertSearchStartDatetime({ submission_feature_id })}`); + await knex.raw(`${insertSearchEndDatetime({ submission_feature_id })}`); + + await knex.raw(`${insertSpatialPolygon({ submission_feature_id })}`); + + return submission_feature_id; +}; + +const insertSampleSiteRecord = async ( + knex: Knex, + options: { submission_id: number; parent_submission_feature_id: number } +): Promise => { + const response = await knex.raw( + `${insertSubmissionFeature({ + submission_id: options.submission_id, + parent_submission_feature_id: options.parent_submission_feature_id, + feature_type: 'sample_site' + })}` + ); + const submission_feature_id = response.rows[0].submission_feature_id; + + await knex.raw(`${insertSearchString({ submission_feature_id })}`); + await knex.raw(`${insertSearchString({ submission_feature_id })}`); + + await knex.raw(`${insertSpatialPolygon({ submission_feature_id })}`); + + return submission_feature_id; +}; + +const insertObservationRecord = async ( + knex: Knex, + options: { submission_id: number; parent_submission_feature_id: number } +): Promise => { + const response = await knex.raw( + `${insertSubmissionFeature({ + submission_id: options.submission_id, + parent_submission_feature_id: options.parent_submission_feature_id, + feature_type: 'observation' + })}` + ); + const submission_feature_id = response.rows[0].submission_feature_id; + + await knex.raw(`${insertSearchString({ submission_feature_id })}`); + await knex.raw(`${insertSearchString({ submission_feature_id })}`); + await knex.raw(`${insertSearchString({ submission_feature_id })}`); + await knex.raw(`${insertSearchString({ submission_feature_id })}`); + await knex.raw(`${insertSearchString({ submission_feature_id })}`); + + await knex.raw(`${insertSearchNumber({ submission_feature_id })}`); + await knex.raw(`${insertSearchNumber({ submission_feature_id })}`); + await knex.raw(`${insertSearchNumber({ submission_feature_id })}`); + await knex.raw(`${insertSearchNumber({ submission_feature_id })}`); + + await knex.raw(`${insertSearchTaxonomy({ submission_feature_id })}`); + + await knex.raw(`${insertSearchStartDatetime({ submission_feature_id })}`); + await knex.raw(`${insertSearchEndDatetime({ submission_feature_id })}`); + + await knex.raw(`${insertSpatialPoint({ submission_feature_id })}`); + + return submission_feature_id; +}; + +const insertAnimalRecord = async ( + knex: Knex, + options: { submission_id: number; parent_submission_feature_id: number } +): Promise => { + const response = await knex.raw( + `${insertSubmissionFeature({ + submission_id: options.submission_id, + parent_submission_feature_id: options.parent_submission_feature_id, + feature_type: 'animal' + })}` + ); + const submission_feature_id = response.rows[0].submission_feature_id; + + await knex.raw(`${insertSearchString({ submission_feature_id })}`); + await knex.raw(`${insertSearchString({ submission_feature_id })}`); + await knex.raw(`${insertSearchString({ submission_feature_id })}`); + await knex.raw(`${insertSearchString({ submission_feature_id })}`); + await knex.raw(`${insertSearchString({ submission_feature_id })}`); + + await knex.raw(`${insertSearchNumber({ submission_feature_id })}`); + await knex.raw(`${insertSearchNumber({ submission_feature_id })}`); + await knex.raw(`${insertSearchNumber({ submission_feature_id })}`); + await knex.raw(`${insertSearchNumber({ submission_feature_id })}`); + + await knex.raw(`${insertSearchTaxonomy({ submission_feature_id })}`); + + await knex.raw(`${insertSearchStartDatetime({ submission_feature_id })}`); + await knex.raw(`${insertSearchEndDatetime({ submission_feature_id })}`); + + await knex.raw(`${insertSpatialPoint({ submission_feature_id })}`); + + return submission_feature_id; +}; + +const insertSubmission = () => ` + INSERT INTO submission + ( + source_transform_id, + uuid + ) + values + ( + 1, + public.gen_random_uuid() + ) + RETURNING submission_id; +`; + +const insertSubmissionFeature = (options: { + submission_id: number; + parent_submission_feature_id: number | null; + feature_type: 'dataset' | 'sample_site' | 'observation' | 'animal'; +}) => ` + INSERT INTO submission_feature + ( + submission_id, + parent_submission_feature_id, + feature_type_id, + data, + record_effective_date + ) + values + ( + ${options.submission_id}, + ${options.parent_submission_feature_id}, + (select feature_type_id from feature_type where name = '${options.feature_type}'), + '{ + "name": "${faker.lorem.words(3)}" + }', + now() + ) + RETURNING submission_feature_id; +`; + +const insertSearchString = (options: { submission_feature_id: number }) => ` + INSERT INTO search_string + ( + submission_feature_id, + feature_property_id, + value + ) + values + ( + ${options.submission_feature_id}, + (select feature_property_id from feature_property where name = 'name'), + $$${faker.lorem.words(3)}$$ + ); +`; + +const insertSearchNumber = (options: { submission_feature_id: number }) => ` + INSERT INTO search_number + ( + submission_feature_id, + feature_property_id, + value + ) + values + ( + ${options.submission_feature_id}, + (select feature_property_id from feature_property where name = 'count'), + $$${faker.number.int({ min: 0, max: 100 })}$$ + ); +`; + +const insertSearchTaxonomy = (options: { submission_feature_id: number }) => ` + INSERT INTO search_taxonomy + ( + submission_feature_id, + feature_property_id, + value + ) + values + ( + ${options.submission_feature_id}, + (select feature_property_id from feature_property where name = 'taxonomy'), + $$${faker.number.int({ min: 10000, max: 99999 })}$$ + ); +`; + +const insertSearchStartDatetime = (options: { submission_feature_id: number }) => ` + INSERT INTO search_datetime + ( + submission_feature_id, + feature_property_id, + value + ) + values + ( + ${options.submission_feature_id}, + (select feature_property_id from feature_property where name = 'start_date'), + $$${faker.date.past().toISOString()}$$ + ); +`; + +const insertSearchEndDatetime = (options: { submission_feature_id: number }) => ` + INSERT INTO search_datetime + ( + submission_feature_id, + feature_property_id, + value + ) + values + ( + ${options.submission_feature_id}, + (select feature_property_id from feature_property where name = 'end_date'), + $$${faker.date.future().toISOString()}$$ + ); +`; + +const insertSpatialPolygon = (options: { submission_feature_id: number }) => + ` + INSERT INTO search_spatial + ( + submission_feature_id, + feature_property_id, + value + ) + values + ( + ${options.submission_feature_id}, + (select feature_property_id from feature_property where name = 'geometry'), + public.ST_GeomFromGeoJSON( + '${JSON.stringify( + random.polygon( + 1, // number of features in feature collection + randomIntFromInterval(4, 30), // number of coordinates + 1, // degrees freedom + [-135.878906, 48.617424, -114.433594, 60.664785] // bbox constraint + )['features'][0]['geometry'] + )}' + ) + ); +`; + +const insertSpatialPoint = (options: { submission_feature_id: number }) => + ` + INSERT INTO search_spatial + ( + submission_feature_id, + feature_property_id, + value + ) + values + ( + ${options.submission_feature_id}, + (select feature_property_id from feature_property where name = 'geometry'), + public.ST_GeomFromGeoJSON( + '${JSON.stringify( + random.point( + 1, // number of features in feature collection + [-135.878906, 48.617424, -114.433594, 60.664785] // bbox constraint + )['features'][0]['geometry'] + )}' + ) + ); +`; + +const randomIntFromInterval = (min: number, max: number) => { + return Math.floor(Math.random() * (max - min + 1) + min); +}; diff --git a/docker-compose.yml b/docker-compose.yml index 04eeededd..e10f3d9f9 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -41,6 +41,7 @@ services: - ${API_PORT}:${API_PORT} environment: - NODE_ENV=${NODE_ENV} + - NODE_OPTIONS=${API_NODE_OPTIONS} - ELASTICSEARCH_URL=${ELASTICSEARCH_URL} - ELASTICSEARCH_EML_INDEX=${ELASTICSEARCH_EML_INDEX} - ELASTICSEARCH_TAXONOMY_INDEX=${ELASTICSEARCH_TAXONOMY_INDEX} @@ -148,6 +149,7 @@ services: - ${APP_PORT}:${APP_PORT} environment: - NODE_ENV=${NODE_ENV} + - NODE_OPTIONS=${APP_NODE_OPTIONS} - REACT_APP_NODE_ENV=${NODE_ENV} - PORT=${APP_PORT} - REACT_APP_API_HOST=${API_HOST} @@ -181,7 +183,6 @@ services: - DB_SCHEMA=${DB_SCHEMA} - DB_USER_API=${DB_USER_API} - DB_USER_API_PASS=${DB_USER_API_PASS} - - DB_SCHEMA_DAPI_V1=${DB_SCHEMA_DAPI_V1} volumes: - /opt/app-root/src/node_modules # prevents local node_modules overriding container node_modules networks: @@ -208,7 +209,6 @@ services: - DB_SCHEMA=${DB_SCHEMA} - DB_USER_API=${DB_USER_API} - DB_USER_API_PASS=${DB_USER_API_PASS} - - DB_SCHEMA_DAPI_V1=${DB_SCHEMA_DAPI_V1} volumes: - /opt/app-root/src/node_modules # prevents local node_modules overriding container node_modules networks: @@ -235,7 +235,6 @@ services: - DB_SCHEMA=${DB_SCHEMA} - DB_USER_API=${DB_USER_API} - DB_USER_API_PASS=${DB_USER_API_PASS} - - DB_SCHEMA_DAPI_V1=${DB_SCHEMA_DAPI_V1} volumes: - /opt/app-root/src/node_modules # prevents local node_modules overriding container node_modules networks: diff --git a/env_config/env.docker b/env_config/env.docker index a2131e854..bcaecad14 100644 --- a/env_config/env.docker +++ b/env_config/env.docker @@ -21,6 +21,10 @@ # ------------------------------------------------------------------------------ NODE_ENV=local +# Dictates the max size of the heap (Mb). Should not be greater than 75% of total available memory. The default seems to be 2Gb. +API_NODE_OPTIONS=--max-old-space-size=4096 +APP_NODE_OPTIONS=--max-old-space-size=4096 + # ------------------------------------------------------------------------------ # App # ------------------------------------------------------------------------------ @@ -54,7 +58,6 @@ DB_USER_API_PASS=postgres DB_PORT=5432 DB_DATABASE=biohubbc DB_SCHEMA=biohub -DB_SCHEMA_DAPI_V1=biohub_dapi_v1 DB_TZ=America/Vancouver # ------------------------------------------------------------------------------