diff --git a/scripts/configure.js b/scripts/configure.js deleted file mode 100755 index aedef3c..0000000 --- a/scripts/configure.js +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/env node - -import { parseArgs } from "node:util"; -import * as fs from "fs"; -import * as path from "path"; -import * as utils from "./utils.js"; - -const args = parseArgs({ - options: { - schema: { - type: "string", - }, - file_name: { - type: "string", - }, - db_name: { - type: "string", - } - } -}); - -const schema = utils.optional(args, "schema"); -const tokenizable = new Set; -if (schema != null) { - function traverse_schema(x, path) { - if (x.type == "object") { - if ("properties" in x) { - for (const [k, v] of Object.entries(x.properties)) { - traverse_schema(v, (path == null ? k : path + "." + k)); - } - } - } else if (x.type == "array") { - if ("items" in x) { - traverse_schema(x.items, path); - } - } else if (x.type == "string") { - if ("_attributes" in x) { - if (x._attributes.indexOf("free_text") >= 0) { - tokenizable.add(path); - } - } - } - } - - const schema_doc = fs.readFileSync(schema, { encoding: "utf8" }); - const loaded_schema = JSON.parse(schema_doc); - traverse_schema(loaded_schema); -} - -console.log(JSON.stringify({ - db_name: utils.required(args, "db_name"), - file_name: utils.required(args, "file_name"), - tokenizable: Array.from(tokenizable) -})) diff --git a/scripts/fresh.js b/scripts/fresh.js index ff0802b..5a84aa9 100755 --- a/scripts/fresh.js +++ b/scripts/fresh.js @@ -25,7 +25,7 @@ const args = parseArgs({ }); const dir = utils.required(args, "dir"); -const { db_paths, db_tokenizable } = utils.parseConfigurations(utils.required(args, "config"), dir); +const db_paths = utils.parseConfigurations(utils.required(args, "config"), dir); const { list_projects, list_assets, list_versions, find_latest, read_summary, read_metadata } = utils.chooseSourceFunctions(utils.optional(args, "registry"), utils.optional(args, "gypsum")); // Creating the timestamp here, just so that if there are any operations @@ -34,4 +34,4 @@ const { list_projects, list_assets, list_versions, find_latest, read_summary, re // just (re)aligning with whatever's in the bucket. fs.writeFileSync(path.join(dir, "modified"), String((new Date).getTime())) -await freshHandler(db_paths, list_projects, list_assets, list_versions, find_latest, read_summary, read_metadata, db_tokenizable); +await freshHandler(db_paths, list_projects, list_assets, list_versions, find_latest, read_summary, read_metadata); diff --git a/scripts/manual.js b/scripts/manual.js index e9e5c1f..7e8457f 100755 --- a/scripts/manual.js +++ b/scripts/manual.js @@ -33,7 +33,7 @@ const args = parseArgs({ } }); -const { db_paths, db_tokenizable } = utils.parseConfigurations(utils.required(args, "config"), utils.required(args, "dir")); +const db_paths = utils.parseConfigurations(utils.required(args, "config"), utils.required(args, "dir")); const { list_projects, list_assets, list_versions, find_latest, read_summary, read_metadata } = utils.chooseSourceFunctions(utils.optional(args, "registry"), utils.optional(args, "gypsum")); await manualHandler( @@ -46,5 +46,4 @@ await manualHandler( find_latest, read_summary, read_metadata, - db_tokenizable ); diff --git a/scripts/update.js b/scripts/update.js index 1e080cc..dae153c 100755 --- a/scripts/update.js +++ b/scripts/update.js @@ -25,12 +25,12 @@ const args = parseArgs({ }); const dir = utils.required(args, "dir"); -const { db_paths, db_tokenizable } = utils.parseConfigurations(utils.required(args, "config"), dir); +const db_paths = utils.parseConfigurations(utils.required(args, "config"), dir); const { list_logs, read_log, read_metadata, find_latest } = utils.chooseSourceFunctions(utils.optional(args, "registry"), utils.optional(args, "gypsum")); let lastmod_path = path.join(dir, "modified"); let lastmod = new Date(Number(fs.readFileSync(lastmod_path))); -let all_logs = await updateHandler(db_paths, lastmod, list_logs, read_log, read_metadata, find_latest, db_tokenizable); +let all_logs = await updateHandler(db_paths, lastmod, list_logs, read_log, read_metadata, find_latest); // Storing the timestamp of the last processed job. if (all_logs.length) { diff --git a/scripts/utils.js b/scripts/utils.js index 06b6415..484c005 100644 --- a/scripts/utils.js +++ b/scripts/utils.js @@ -5,13 +5,11 @@ import * as path from "path"; export function parseConfigurations(configs, dir) { const db_paths = {}; - const db_tokenizable = {}; for (const cpath of configs) { - let config = JSON.parse(fs.readFileSync(cpath, { encoding: "utf8" })); - db_paths[config.file_name] = path.join(dir, config.db_name); - db_tokenizable[config.file_name] = new Set(config.tokenizable); + let i = cpath.indexOf(","); + db_paths[cpath.slice(0, i)] = path.join(dir, cpath.slice(i + 1)); } - return { db_paths, db_tokenizable }; + return db_paths; } export function chooseSourceFunctions(registry, gypsum_url) { diff --git a/src/handlers/freshHandler.js b/src/handlers/freshHandler.js index 5a4b002..7f9d444 100644 --- a/src/handlers/freshHandler.js +++ b/src/handlers/freshHandler.js @@ -3,7 +3,7 @@ import { addVersion } from "../sqlite/addVersion.js"; import { createTables } from "../sqlite/createTables.js"; import Database from "better-sqlite3" -export async function freshHandler(db_paths, list_projects, list_assets, list_versions, find_latest, read_summary, read_metadata, db_tokenizable) { +export async function freshHandler(db_paths, list_projects, list_assets, list_versions, find_latest, read_summary, read_metadata) { const db_handles = {}; for (const [k, v] of Object.entries(db_paths)) { if (fs.existsSync(v)) { @@ -17,7 +17,7 @@ export async function freshHandler(db_paths, list_projects, list_assets, list_ve const all_projects = await list_projects(); let all_outcomes = []; for (const project of all_projects) { - let projprom = internal_freshProject(db_handles, project, list_assets, list_versions, find_latest, read_summary, read_metadata, db_tokenizable); + let projprom = internal_freshProject(db_handles, project, list_assets, list_versions, find_latest, read_summary, read_metadata); all_outcomes.push(projprom); } @@ -32,11 +32,11 @@ export async function freshHandler(db_paths, list_projects, list_assets, list_ve } // Only exported for the purpose of re-use in manualHandler.js. -export async function internal_freshProject(db_handles, project, list_assets, list_versions, find_latest, read_summary, read_metadata, db_tokenizable) { +export async function internal_freshProject(db_handles, project, list_assets, list_versions, find_latest, read_summary, read_metadata) { const all_assets = await list_assets(project); let all_outcomes = []; for (const asset of all_assets) { - let assprom = internal_freshAsset(db_handles, project, asset, list_versions, find_latest, read_summary, read_metadata, db_tokenizable); + let assprom = internal_freshAsset(db_handles, project, asset, list_versions, find_latest, read_summary, read_metadata); all_outcomes.push(assprom); } @@ -49,7 +49,7 @@ export async function internal_freshProject(db_handles, project, list_assets, li } } -export async function internal_freshAsset(db_handles, project, asset, list_versions, find_latest, read_summary, read_metadata, db_tokenizable) { +export async function internal_freshAsset(db_handles, project, asset, list_versions, find_latest, read_summary, read_metadata) { const latest = await find_latest(project, asset); if (latest == null) { // short-circuit if latest=null, as that means that there are no non-probational versions. return; @@ -58,7 +58,7 @@ export async function internal_freshAsset(db_handles, project, asset, list_versi const all_versions = await list_versions(project, asset); let all_outcomes = []; for (const version of all_versions) { - let verprom = internal_freshVersion(db_handles, project, asset, version, latest, read_summary, read_metadata, db_tokenizable); + let verprom = internal_freshVersion(db_handles, project, asset, version, latest, read_summary, read_metadata); all_outcomes.push(verprom); } @@ -71,7 +71,7 @@ export async function internal_freshAsset(db_handles, project, asset, list_versi } } -export async function internal_freshVersion(db_handles, project, asset, version, latest, read_summary, read_metadata, db_tokenizable) { +export async function internal_freshVersion(db_handles, project, asset, version, latest, read_summary, read_metadata) { const summ = await read_summary(project, asset, version); if ("on_probation" in summ && summ.on_probation) { return; @@ -79,7 +79,7 @@ export async function internal_freshVersion(db_handles, project, asset, version, const output = await read_metadata(project, asset, version, Object.keys(db_handles)); for (const [e, db] of Object.entries(db_handles)) { try { - addVersion(db, project, asset, version, (latest == version), output[e], db_tokenizable[e]); + addVersion(db, project, asset, version, (latest == version), output[e]); } catch (err) { throw new Error("failed to add to database '" + e + "'", { cause: err }); } diff --git a/src/handlers/manualHandler.js b/src/handlers/manualHandler.js index 93d5a9f..ba1a98a 100644 --- a/src/handlers/manualHandler.js +++ b/src/handlers/manualHandler.js @@ -4,7 +4,7 @@ import { deleteProject } from "../sqlite/deleteProject.js"; import * as fresh from "./freshHandler.js"; import Database from "better-sqlite3" -export async function manualHandler(db_paths, project, asset, version, list_assets, list_versions, find_latest, read_summary, read_metadata, db_tokenizable) { +export async function manualHandler(db_paths, project, asset, version, list_assets, list_versions, find_latest, read_summary, read_metadata) { const db_handles = {}; for (const [k, v] of Object.entries(db_paths)) { db_handles[k] = Database(v); @@ -14,13 +14,13 @@ export async function manualHandler(db_paths, project, asset, version, list_asse for (const db of Object.values(db_handles)) { deleteProject(db, project); } - await fresh.internal_freshProject(db_handles, project, list_assets, list_versions, find_latest, read_summary, read_metadata, db_tokenizable); + await fresh.internal_freshProject(db_handles, project, list_assets, list_versions, find_latest, read_summary, read_metadata); } else if (version == null) { for (const db of Object.values(db_handles)) { deleteAsset(db, project, asset); } - await fresh.internal_freshAsset(db_handles, project, asset, list_versions, find_latest, read_summary, read_metadata, db_tokenizable); + await fresh.internal_freshAsset(db_handles, project, asset, list_versions, find_latest, read_summary, read_metadata); } else { for (const db of Object.values(db_handles)) { @@ -28,7 +28,7 @@ export async function manualHandler(db_paths, project, asset, version, list_asse } const latest = find_latest(project, asset); if (latest != null) { // short-circuit if latest = null, as this implies that there are no (non-probational) versions. - await fresh.internal_freshVersion(db_handles, project, asset, version, latest, read_summary, read_metadata, db_tokenizable); + await fresh.internal_freshVersion(db_handles, project, asset, version, latest, read_summary, read_metadata); } } } diff --git a/src/handlers/updateHandler.js b/src/handlers/updateHandler.js index 88623a5..9aa6fdb 100644 --- a/src/handlers/updateHandler.js +++ b/src/handlers/updateHandler.js @@ -85,7 +85,7 @@ export async function readLogs(last_modified, list_logs, read_log) { return logs; } -export async function updateHandler(db_paths, last_modified, list_logs, read_log, read_metadata, find_latest, db_tokenizable) { +export async function updateHandler(db_paths, last_modified, list_logs, read_log, read_metadata, find_latest) { const db_handles = {}; for (const [k, v] of Object.entries(db_paths)) { db_handles[k] = Database(v); @@ -104,7 +104,7 @@ export async function updateHandler(db_paths, last_modified, list_logs, read_log const version = safe_extract(parameters, "version"); let output = await read_metadata(project, asset, version, to_extract); for (const [e, db] of Object.entries(db_handles)) { - addVersion(db, project, asset, version, is_latest(parameters), output[e], db_tokenizable[e]); + addVersion(db, project, asset, version, is_latest(parameters), output[e]); } } else if (type == "delete-version") { diff --git a/src/sqlite/addVersion.js b/src/sqlite/addVersion.js index 29aa85d..1c945a1 100644 --- a/src/sqlite/addVersion.js +++ b/src/sqlite/addVersion.js @@ -1,6 +1,6 @@ import { splitIntoTokens } from "./splitIntoTokens.js"; -export function addVersion(db, project, asset, version, latest, metadata, tokenizable) { +export function addVersion(db, project, asset, version, latest, metadata) { const trans = db.transaction(() => { db.prepare("DELETE FROM versions WHERE project = ? AND asset = ? AND VERSION = ?").run(project, asset, version); if (latest) { @@ -22,7 +22,7 @@ export function addVersion(db, project, asset, version, latest, metadata, tokeni for (const [p, m] of Object.entries(metadata)) { let pinfo = db.prepare("INSERT INTO paths(vid, path, metadata) VALUES(?, ?, jsonb(?)) RETURNING pid").get(vid, p, JSON.stringify(m)); let pid = pinfo.pid; - traverse_metadata(db, pid, m, null, insert_token, tokenizable); + traverse_metadata(db, pid, m, null, insert_token); } }); @@ -30,24 +30,22 @@ export function addVersion(db, project, asset, version, latest, metadata, tokeni return; } -function traverse_metadata(db, pid, metadata, property, insert_token, tokenizable) { +function traverse_metadata(db, pid, metadata, property, insert_token) { if (metadata instanceof Array) { for (const v of metadata) { - traverse_metadata(db, pid, v, property, insert_token, tokenizable); + traverse_metadata(db, pid, v, property, insert_token); } } else if (metadata instanceof Object) { for (const [k, v] of Object.entries(metadata)) { let newname = (property == null ? k : property + "." + k); - traverse_metadata(db, pid, v, newname, insert_token, tokenizable); + traverse_metadata(db, pid, v, newname, insert_token); } } else { - if (typeof metadata == "string" && tokenizable.has(property)) { + if (typeof metadata == "string") { let tokens = splitIntoTokens(metadata); for (const t of tokens) { insert_token(pid, property, t); } - } else { - insert_token(pid, property, String(metadata)); } } } diff --git a/tests/handlers/freshHandler.test.js b/tests/handlers/freshHandler.test.js index 9cf532f..9f75cc0 100644 --- a/tests/handlers/freshHandler.test.js +++ b/tests/handlers/freshHandler.test.js @@ -6,10 +6,8 @@ import Database from "better-sqlite3"; test("freshHandler works correctly without probation", async () => { const testdir = utils.setupTestDirectory("freshHandler"); let all_paths = {}; - let all_tokenizable = {}; for (const p of [ "_meta", "_alt" ]) { all_paths[p] = path.join(testdir, "test" + p + ".sqlite3") - all_tokenizable[p] = new Set(["description", "motto"]); } await freshHandler( @@ -54,8 +52,7 @@ test("freshHandler works correctly without probation", async () => { "_alt": { "thingy.csv": utils.mockMetadata["macrophage"] } } } - }, - all_tokenizable + } ); // Check that all versions are added, along with their metadata entries. @@ -69,7 +66,7 @@ test("freshHandler works correctly without probation", async () => { expect(vpayload.map(x => x.version)).toEqual(["bar1", "bar2", "v1", "v1"]); expect(vpayload.map(x => x.latest)).toEqual([0, 1, 1, 1]); - let tpayload = utils.scanForToken(db, 'Donato'); + let tpayload = utils.scanForToken(db, 'donato'); if (x == "_meta") { expect(tpayload.length).toBeGreaterThan(0); } else { @@ -90,10 +87,8 @@ test("freshHandler works correctly without probation", async () => { test("freshHandler works correctly with probation", async () => { const testdir = utils.setupTestDirectory("freshHandler"); let all_paths = {}; - let all_tokenizable = {}; for (const p of [ "_meta", "_alt" ]) { all_paths[p] = path.join(testdir, "test" + p + ".sqlite3") - all_tokenizable[p] = new Set(["description", "motto"]); } await freshHandler( @@ -132,8 +127,7 @@ test("freshHandler works correctly with probation", async () => { "_meta": { "AAA.json": utils.mockMetadata["marcille"] }, "_alt": { "BBB/CCC.txt": utils.mockMetadata["chicken"] } } - }, - all_tokenizable + } ); // Check that all versions are added, along with their metadata entries. @@ -147,7 +141,7 @@ test("freshHandler works correctly with probation", async () => { expect(vpayload[0].version).toBe("bar1"); expect(vpayload[0].latest).toBe(1); - let tpayload = utils.scanForToken(db, 'Donato'); + let tpayload = utils.scanForToken(db, 'donato'); if (x == "_meta") { expect(tpayload.length).toBeGreaterThan(0); } else { diff --git a/tests/handlers/manualHandler.test.js b/tests/handlers/manualHandler.test.js index 637c70a..31f79e0 100644 --- a/tests/handlers/manualHandler.test.js +++ b/tests/handlers/manualHandler.test.js @@ -7,14 +7,12 @@ import Database from "better-sqlite3"; test("manualHandler works correctly", async () => { const testdir = utils.setupTestDirectory("manualHandler"); let all_paths = {}; - let all_tokenizable = {}; for (const p of [ "_meta", "_alt" ]) { let opath = path.join(testdir, "test" + p + ".sqlite3") let db = Database(opath); createTables(db); db.close(); all_paths[p] = opath; - all_tokenizable[p] = new Set(["description", "motto"]); } // Set up the various functions. @@ -60,7 +58,7 @@ test("manualHandler works correctly", async () => { }; // Refreshing a single version. - await manualHandler(all_paths, "test", "foo", "bar1", listAssets, listVersions, findLatest, readSummary, readMetadata, all_tokenizable); + await manualHandler(all_paths, "test", "foo", "bar1", listAssets, listVersions, findLatest, readSummary, readMetadata); for (const [x, p] of Object.entries(all_paths)) { const db = Database(p); @@ -72,7 +70,7 @@ test("manualHandler works correctly", async () => { expect(vpayload[0].version).toEqual("bar1"); expect(vpayload[0].latest).toEqual(0); - let tpayload = utils.scanForToken(db, 'Donato'); + let tpayload = utils.scanForToken(db, 'donato'); if (x == "_meta") { expect(tpayload.length).toBeGreaterThan(0); } else { @@ -90,7 +88,7 @@ test("manualHandler works correctly", async () => { } // Refreshing a single asset. - await manualHandler(all_paths, "test", "foo", null, listAssets, listVersions, findLatest, readSummary, readMetadata, all_tokenizable); + await manualHandler(all_paths, "test", "foo", null, listAssets, listVersions, findLatest, readSummary, readMetadata); for (const [x, p] of Object.entries(all_paths)) { const db = Database(p); @@ -110,7 +108,7 @@ test("manualHandler works correctly", async () => { } // Refreshing a single project. - await manualHandler(all_paths, "test", null, null, listAssets, listVersions, findLatest, readSummary, readMetadata, all_tokenizable); + await manualHandler(all_paths, "test", null, null, listAssets, listVersions, findLatest, readSummary, readMetadata); for (const [x, p] of Object.entries(all_paths)) { const db = Database(p); diff --git a/tests/handlers/updateHandler.test.js b/tests/handlers/updateHandler.test.js index 2d73328..96ef306 100644 --- a/tests/handlers/updateHandler.test.js +++ b/tests/handlers/updateHandler.test.js @@ -53,14 +53,12 @@ test("readLogs reports logs correctly", async () => { test("updateHandler adds versions correctly", async () => { const testdir = utils.setupTestDirectory("updateHandler"); let all_paths = {}; - let all_tokenizable = {}; for (const p of [ "_meta", "_alt" ]) { let opath = path.join(testdir, "test" + p + ".sqlite3") let db = Database(opath); createTables(db); db.close(); all_paths[p] = opath; - all_tokenizable[p] = new Set(["description", "motto"]); } const now = new Date; @@ -96,8 +94,7 @@ test("updateHandler adds versions correctly", async () => { }, (project, asset) => { throw new Error("I shouldn't be called here"); - }, - all_tokenizable + } ); // Check that it sorts correctly. @@ -143,7 +140,7 @@ test("updateHandler adds versions correctly", async () => { expect(tpayload[0].path).toBe("BBB/CCC.txt"); expect(tpayload[0].field).toBe("variations"); - tpayload = utils.scanForToken(db, '%stem cells%', { partial: true }); + tpayload = utils.scanForToken(db, 'hemato%', { partial: true }); expect(tpayload.length).toBe(1); expect(tpayload[0].path).toBe("thingy.csv"); expect(tpayload[0].field).toBe("lineage.from.from.name"); @@ -156,7 +153,6 @@ test("updateHandler adds versions correctly", async () => { test("updateHandler deletes versions correctly", async () => { const testdir = utils.setupTestDirectory("updateHandler"); let all_paths = {}; - let all_tokenizable = {}; for (const p of [ "_meta", "_alt" ]) { let opath = path.join(testdir, "test" + p + ".sqlite3") let db = Database(opath); @@ -166,7 +162,6 @@ test("updateHandler deletes versions correctly", async () => { addVersion(db, "test", "foo", "v3", true, {}, new Set); db.close(); all_paths[p] = opath; - all_tokenizable[p] = new Set; } const now = new Date; @@ -185,8 +180,7 @@ test("updateHandler deletes versions correctly", async () => { }, (project, asset) => { throw new Error("I shouldn't be called here"); - }, - all_tokenizable + } ); for (const [x, p] of Object.entries(all_paths)) { @@ -215,8 +209,7 @@ test("updateHandler deletes versions correctly", async () => { }, (project, asset) => { return "v1"; - }, - all_tokenizable + } ); for (const [x, p] of Object.entries(all_paths)) { @@ -242,8 +235,7 @@ test("updateHandler deletes versions correctly", async () => { }, (project, asset) => { return null; - }, - all_tokenizable + } ); for (const [x, p] of Object.entries(all_paths)) { @@ -257,7 +249,6 @@ test("updateHandler deletes versions correctly", async () => { test("updateHandler deletes assets correctly", async () => { const testdir = utils.setupTestDirectory("updateHandler"); let all_paths = {}; - let all_tokenizable = {}; for (const p of [ "_meta", "_alt" ]) { let opath = path.join(testdir, "test" + p + ".sqlite3") let db = Database(opath); @@ -266,7 +257,6 @@ test("updateHandler deletes assets correctly", async () => { addVersion(db, "test", "bar", "v1", true, {}, new Set); db.close(); all_paths[p] = opath; - all_tokenizable[p] = new Set; } const now = new Date; @@ -284,8 +274,7 @@ test("updateHandler deletes assets correctly", async () => { }, (project, asset) => { throw new Error("I shouldn't be called here"); - }, - all_tokenizable + } ); for (const [x, p] of Object.entries(all_paths)) { @@ -301,7 +290,6 @@ test("updateHandler deletes assets correctly", async () => { test("updateHandler deletes projects correctly", async () => { const testdir = utils.setupTestDirectory("updateHandler"); let all_paths = {}; - let all_tokenizable = {}; for (const p of [ "_meta", "_alt" ]) { let opath = path.join(testdir, "test" + p + ".sqlite3") let db = Database(opath); @@ -310,7 +298,6 @@ test("updateHandler deletes projects correctly", async () => { addVersion(db, "retest", "foo", "v1", true, {}, new Set); db.close(); all_paths[p] = opath; - all_tokenizable[p] = new Set; } const now = new Date; @@ -328,8 +315,7 @@ test("updateHandler deletes projects correctly", async () => { }, (project, asset) => { throw new Error("I shouldn't be called here"); - }, - all_tokenizable + } ); for (const [x, p] of Object.entries(all_paths)) { diff --git a/tests/scripts/configure.test.js b/tests/scripts/configure.test.js deleted file mode 100644 index ec17a31..0000000 --- a/tests/scripts/configure.test.js +++ /dev/null @@ -1,68 +0,0 @@ -import * as fs from "fs"; -import * as path from "path"; -import Database from "better-sqlite3"; -import * as utils from "../utils.js"; -import { execSync } from "child_process"; - -test("configure script works correctly", () => { - const testdir = utils.setupTestDirectory("configure"); - const schema_path = path.join(testdir, "schema.json"); - fs.writeFileSync( - schema_path, - `{ - "type": "object", - "properties": { - "foo": { - "type": "string" - }, - "bar": { - "type": "number" - }, - "whee": { - "type": "string", - "_attributes": [ "free_text" ] - }, - "blah": { - "type": "array", - "items": { - "type": "number" - } - }, - "stuff": { - "type": "array", - "items": { - "type": "string", - "_attributes": [ "free_text" ] - } - }, - "other": { - "type": "object", - "properties": { - "blah": { - "type": "string" - }, - "foobar": { - "type": "string", - "_attributes": [ "free_text" ] - } - } - } - } -}`); - - let output = execSync(`./scripts/configure.js --schema ${schema_path} --file_name falin --db_name marcille`); - const dec = new TextDecoder; - let parsed = JSON.parse(dec.decode(output)); - expect(parsed.file_name).toBe("falin"); - expect(parsed.db_name).toBe("marcille"); - - parsed.tokenizable.sort(); - const expected = [ "stuff", "whee", "other.foobar" ]; - expected.sort(); - expect(parsed.tokenizable).toEqual(expected); - - // Schema is actually optional. - output = execSync(`./scripts/configure.js --file_name falin --db_name marcille`); - parsed = JSON.parse(dec.decode(output)); - expect(parsed.tokenizable).toEqual([]); -}) diff --git a/tests/scripts/utils.js b/tests/scripts/utils.js index 2204281..97b3bc5 100644 --- a/tests/scripts/utils.js +++ b/tests/scripts/utils.js @@ -23,26 +23,11 @@ export function mockEnvironment(name) { fs.mkdirSync(path.join(regdir, "foo", "bar", "v2", "blah", "sub")); fs.writeFileSync(path.join(regdir, "foo", "bar", "v2", "blah", "sub", "other.json"), JSON.stringify(utils.mockMetadata["macrophage"])); - const confdir = path.join(testdir, "configs"); - fs.mkdirSync(confdir); - const conf1 = path.join(confdir, "1.json"); - fs.writeFileSync(conf1, JSON.stringify({ - file_name: "stuff.json", - db_name: "stuff.sqlite3", - tokenizable: [ "description" ] - })); - const conf2 = path.join(confdir, "2.json"); - fs.writeFileSync(conf2, JSON.stringify({ - file_name: "other.json", - db_name: "other.sqlite3", - tokenizable: [ "motto" ] - })); - const inddir = path.join(testdir, "indices"); fs.mkdirSync(inddir); return { registry: regdir, - configs: [conf1, conf2], + configs: ["stuff.json,stuff.sqlite3", "other.json,other.sqlite3"], indices: inddir, }; } diff --git a/tests/sqlite/addVersion.test.js b/tests/sqlite/addVersion.test.js index 396ada3..64cdc5f 100644 --- a/tests/sqlite/addVersion.test.js +++ b/tests/sqlite/addVersion.test.js @@ -11,7 +11,7 @@ test("Basic addition of a new version", () => { createTables(db); const meta = { "a.txt": utils.mockMetadata["chicken"], "b/c.txt": utils.mockMetadata["marcille"] }; - addVersion(db, "foo", "bar", "whee", true, meta, new Set); + addVersion(db, "foo", "bar", "whee", true, meta); // Checking that all the pieces were added. const vpayload = db.prepare("SELECT * FROM versions").all(); @@ -34,14 +34,12 @@ test("Basic addition of a new version", () => { expect(JSON.parse(ppayload[1].metadata).first_name).toBe("Marcille"); let tpayload = utils.scanForToken(db, "chicken"); - expect(tpayload.length).toBe(1); - expect(tpayload[0].path).toBe("a.txt"); - expect(tpayload[0].field).toBe("ingredients.meat"); - - tpayload = utils.scanForToken(db, "Marcille"); - expect(tpayload.length).toBe(1); - expect(tpayload[0].path).toBe("b/c.txt"); - expect(tpayload[0].field).toBe("first_name"); + expect(tpayload.length).toBeGreaterThan(0); + expect(tpayload.some(x => x.path == "a.txt" && x.field == "title")).toBe(true); + + tpayload = utils.scanForToken(db, "marcille"); + expect(tpayload.length).toBeGreaterThan(0); + expect(tpayload.some(x => x.path == "b/c.txt" && x.field == "first_name")).toBe(true); }) test("New version can be added multiple times", () => { @@ -51,21 +49,21 @@ test("New version can be added multiple times", () => { createTables(db); let meta = { "a.txt": utils.mockMetadata["chicken"] }; - addVersion(db, "foo", "bar", "whee", true, meta, new Set); + addVersion(db, "foo", "bar", "whee", true, meta); let tpayload1 = utils.scanForToken(db, "cream"); - expect(tpayload1.length).toBe(1); - let tpayload2 = utils.scanForToken(db, "weird food"); + expect(tpayload1.length).toBeGreaterThan(0); + let tpayload2 = utils.scanForToken(db, "weird"); expect(tpayload2.length).toBe(0); // Second addition deletes all existing entries in an cascading manner. meta = { "aa.txt": utils.mockMetadata["marcille"] }; - addVersion(db, "foo", "bar", "whee", true, meta, new Set); + addVersion(db, "foo", "bar", "whee", true, meta); tpayload1 = utils.scanForToken(db, "cream"); expect(tpayload1.length).toBe(0); - tpayload2 = utils.scanForToken(db, "weird food"); - expect(tpayload2.length).toBe(1); + tpayload2 = utils.scanForToken(db, "weird"); + expect(tpayload2.length).toBeGreaterThan(0); }) test("Version addition updates the latest version", () => { @@ -74,8 +72,8 @@ test("Version addition updates the latest version", () => { let db = Database(opath); createTables(db); - addVersion(db, "foo", "bar", "gastly", true, {}, new Set); - addVersion(db, "foo", "bar", "haunter", true, {}, new Set); + addVersion(db, "foo", "bar", "gastly", true, {}); + addVersion(db, "foo", "bar", "haunter", true, {}); let vpayload = db.prepare("SELECT * FROM versions WHERE latest = 1").all(); expect(vpayload.length).toBe(1); @@ -92,17 +90,14 @@ test("Version addition responds to tokenization", () => { let db = Database(opath); createTables(db); - let tokable = new Set(["description"]); - addVersion(db, "foo", "bar", "gastly", true, { "recipe.json": utils.mockMetadata["chicken"] }, tokable); - addVersion(db, "foo", "bar", "haunter", true, { "best_girl.txt": utils.mockMetadata["marcille"] }, tokable); + addVersion(db, "foo", "bar", "gastly", true, { "recipe.json": utils.mockMetadata["chicken"] }); + addVersion(db, "foo", "bar", "haunter", true, { "best_girl.txt": utils.mockMetadata["marcille"] }); let tpayload1 = utils.scanForToken(db, "creamy"); - expect(tpayload1.length).toBe(1); - expect(tpayload1[0].field).toBe("description"); - expect(tpayload1[0].path).toBe("recipe.json"); + expect(tpayload1.length).toBeGreaterThan(0); + expect(tpayload1.some(x => x.field == "description" && x.path == "recipe.json")).toBe(true); let tpayload2 = utils.scanForToken(db, "laios"); - expect(tpayload2.length).toBe(1); - expect(tpayload2[0].field).toBe("description"); - expect(tpayload2[0].path).toBe("best_girl.txt"); + expect(tpayload2.length).toBeGreaterThan(0); + expect(tpayload2.some(x => x.field == "description" && x.path == "best_girl.txt")).toBe(true); }) diff --git a/tests/sqlite/deleteAsset.test.js b/tests/sqlite/deleteAsset.test.js index b13281b..64a144d 100644 --- a/tests/sqlite/deleteAsset.test.js +++ b/tests/sqlite/deleteAsset.test.js @@ -11,20 +11,20 @@ test("Assets can be deleted", () => { let db = Database(opath); createTables(db); - addVersion(db, "foo", "bar", "whee", true, { "a.txt": utils.mockMetadata["marcille"] }, new Set); - addVersion(db, "foo", "bar", "whee2", true, { "b.txt": utils.mockMetadata["marcille"] }, new Set); - addVersion(db, "foo", "stuff", "whee", true, { "a.txt": utils.mockMetadata["chicken"] }, new Set); + addVersion(db, "foo", "bar", "whee", true, { "a.txt": utils.mockMetadata["marcille"] }); + addVersion(db, "foo", "bar", "whee2", true, { "b.txt": utils.mockMetadata["marcille"] }); + addVersion(db, "foo", "stuff", "whee", true, { "a.txt": utils.mockMetadata["chicken"] }); let tpayload1 = utils.scanForToken(db, "chicken"); - expect(tpayload1.length).toBe(1); - let tpayload2 = utils.scanForToken(db, "Donato"); - expect(tpayload2.length).toBe(2); + expect(tpayload1.length).toBeGreaterThan(0); + let tpayload2 = utils.scanForToken(db, "donato"); + expect(tpayload2.length).toBeGreaterThan(0); // Deletion cascades to all other tables. deleteAsset(db, "foo", "bar"); tpayload1 = utils.scanForToken(db, "chicken"); - expect(tpayload1.length).toBe(1); - tpayload2 = utils.scanForToken(db, "Donato"); + expect(tpayload1.length).toBeGreaterThan(1); + tpayload2 = utils.scanForToken(db, "donato"); expect(tpayload2.length).toBe(0); }) diff --git a/tests/sqlite/deleteProject.test.js b/tests/sqlite/deleteProject.test.js index db2a7f4..8ebf7c7 100644 --- a/tests/sqlite/deleteProject.test.js +++ b/tests/sqlite/deleteProject.test.js @@ -11,20 +11,20 @@ test("Projects can be deleted", () => { let db = Database(opath); createTables(db); - addVersion(db, "foo", "bar", "whee", true, { "a.txt": utils.mockMetadata["marcille"] }, new Set); - addVersion(db, "foo", "bar2", "whee", true, { "b.txt": utils.mockMetadata["marcille"] }, new Set); - addVersion(db, "foo2", "stuff", "whee", true, { "a.txt": utils.mockMetadata["chicken"] }, new Set); + addVersion(db, "foo", "bar", "whee", true, { "a.txt": utils.mockMetadata["marcille"] }); + addVersion(db, "foo", "bar2", "whee", true, { "b.txt": utils.mockMetadata["marcille"] }); + addVersion(db, "foo2", "stuff", "whee", true, { "a.txt": utils.mockMetadata["chicken"] }); let tpayload1 = utils.scanForToken(db, 'chicken'); - expect(tpayload1.length).toBe(1); - let tpayload2 = utils.scanForToken(db, 'Donato'); - expect(tpayload2.length).toBe(2); + expect(tpayload1.length).toBeGreaterThan(0); + let tpayload2 = utils.scanForToken(db, 'donato'); + expect(tpayload2.length).toBeGreaterThan(0); // Deletion cascades to all other tables. deleteProject(db, "foo"); tpayload1 = utils.scanForToken(db, 'chicken'); - expect(tpayload1.length).toBe(1); - tpayload2 = utils.scanForToken(db, 'Donato'); + expect(tpayload1.length).toBeGreaterThan(0); + tpayload2 = utils.scanForToken(db, 'donato'); expect(tpayload2.length).toBe(0); }) diff --git a/tests/sqlite/deleteVersion.test.js b/tests/sqlite/deleteVersion.test.js index 49d7937..3283b85 100644 --- a/tests/sqlite/deleteVersion.test.js +++ b/tests/sqlite/deleteVersion.test.js @@ -11,19 +11,19 @@ test("Versions can be deleted", () => { let db = Database(opath); createTables(db); - addVersion(db, "foo", "bar", "whee", true, { "a.txt": utils.mockMetadata["marcille"] }, new Set); - addVersion(db, "foo", "bar", "whee2", true, { "a.txt": utils.mockMetadata["chicken"] }, new Set); + addVersion(db, "foo", "bar", "whee", true, { "a.txt": utils.mockMetadata["marcille"] }); + addVersion(db, "foo", "bar", "whee2", true, { "a.txt": utils.mockMetadata["chicken"] }); let tpayload1 = utils.scanForToken(db, 'chicken'); - expect(tpayload1.length).toBe(1); - let tpayload2 = utils.scanForToken(db, 'Marcille'); - expect(tpayload2.length).toBe(1); + expect(tpayload1.length).toBeGreaterThan(0); + let tpayload2 = utils.scanForToken(db, 'marcille'); + expect(tpayload2.length).toBeGreaterThan(0); // Deletion cascades to all other tables. deleteVersion(db, "foo", "bar", "whee"); tpayload1 = utils.scanForToken(db, 'chicken'); - expect(tpayload1.length).toBe(1); - tpayload2 = utils.scanForToken(db, 'Marcille'); + expect(tpayload1.length).toBeGreaterThan(0); + tpayload2 = utils.scanForToken(db, 'marcille'); expect(tpayload2.length).toBe(0); }) diff --git a/tests/sqlite/setLatest.test.js b/tests/sqlite/setLatest.test.js index 2bb9669..3d0128c 100644 --- a/tests/sqlite/setLatest.test.js +++ b/tests/sqlite/setLatest.test.js @@ -11,18 +11,18 @@ test("We can manually set a different latest version", () => { let db = Database(opath); createTables(db); - addVersion(db, "foo", "bar", "whee", true, { "a.txt": utils.mockMetadata["chicken"] }, new Set); - addVersion(db, "foo", "bar", "whee2", true, { "a.txt": utils.mockMetadata["marcille"] }, new Set); + addVersion(db, "foo", "bar", "whee", true, { "a.txt": utils.mockMetadata["chicken"] }); + addVersion(db, "foo", "bar", "whee2", true, { "a.txt": utils.mockMetadata["marcille"] }); let tpayload1 = utils.scanForToken(db, "chicken", { latest: true }); expect(tpayload1.length).toBe(0); - let tpayload2 = utils.scanForToken(db, "Marcille", { latest: true }); - expect(tpayload2.length).toBe(1); + let tpayload2 = utils.scanForToken(db, "marcille", { latest: true }); + expect(tpayload2.length).toBeGreaterThan(0); setLatest(db, "foo", "bar", "whee"); tpayload1 = utils.scanForToken(db, "chicken", { latest: true }); - expect(tpayload1.length).toBe(1); - tpayload2 = utils.scanForToken(db, "Marcille", { latest: true }); + expect(tpayload1.length).toBeGreaterThan(0); + tpayload2 = utils.scanForToken(db, "marcille", { latest: true }); expect(tpayload2.length).toBe(0); })