New
{title}
diff --git a/src/lib/components/CopyToClipBoardBtn.svelte b/src/lib/components/CopyToClipBoardBtn.svelte
index 53f4c122c66..bafb71c9a58 100644
--- a/src/lib/components/CopyToClipBoardBtn.svelte
+++ b/src/lib/components/CopyToClipBoardBtn.svelte
@@ -43,7 +43,7 @@
>
-
+
diff --git a/src/lib/components/DisclaimerModal.svelte b/src/lib/components/DisclaimerModal.svelte
index 590bb088b3c..f0a18abd1dd 100644
--- a/src/lib/components/DisclaimerModal.svelte
+++ b/src/lib/components/DisclaimerModal.svelte
@@ -1,11 +1,7 @@
-
+
- {PUBLIC_APP_NAME}
+ {envPublic.PUBLIC_APP_NAME}
Settings
- {#if PUBLIC_APP_NAME === "HuggingChat"}
+ {#if envPublic.PUBLIC_APP_NAME === "HuggingChat"}
0 ||
assistant?.dynamicPrompt;
- const prefix = PUBLIC_SHARE_PREFIX || `${PUBLIC_ORIGIN || $page.url.origin}${base}`;
+ const prefix =
+ envPublic.PUBLIC_SHARE_PREFIX || `${envPublic.PUBLIC_ORIGIN || $page.url.origin}${base}`;
$: shareUrl = `${prefix}/assistant/${assistant?._id}`;
diff --git a/src/lib/components/chat/ChatIntroduction.svelte b/src/lib/components/chat/ChatIntroduction.svelte
index eb0a11b0537..81fb51b09fa 100644
--- a/src/lib/components/chat/ChatIntroduction.svelte
+++ b/src/lib/components/chat/ChatIntroduction.svelte
@@ -1,7 +1,5 @@
-{#if PUBLIC_APP_ASSETS === "chatui"}
+{#if envPublic.PUBLIC_APP_ASSETS === "chatui"}
{/if}
diff --git a/src/lib/migrations/migrations.ts b/src/lib/migrations/migrations.ts
index 2615f170b5f..2331644d8b8 100644
--- a/src/lib/migrations/migrations.ts
+++ b/src/lib/migrations/migrations.ts
@@ -1,7 +1,8 @@
-import { client, collections } from "$lib/server/database";
+import { Database } from "$lib/server/database";
import { migrations } from "./routines";
import { acquireLock, releaseLock, isDBLocked, refreshLock } from "./lock";
import { isHuggingChat } from "$lib/utils/isHuggingChat";
+import { logger } from "$lib/server/logger";
const LOCK_KEY = "migrations";
@@ -12,18 +13,21 @@ export async function checkAndRunMigrations() {
}
// check if all migrations have already been run
- const migrationResults = await collections.migrationResults.find().toArray();
+ const migrationResults = await Database.getInstance()
+ .getCollections()
+ .migrationResults.find()
+ .toArray();
- console.log("[MIGRATIONS] Begin check...");
+ logger.info("[MIGRATIONS] Begin check...");
// connect to the database
- const connectedClient = await client.connect();
+ const connectedClient = await Database.getInstance().getClient().connect();
const lockId = await acquireLock(LOCK_KEY);
if (!lockId) {
// another instance already has the lock, so we exit early
- console.log(
+ logger.info(
"[MIGRATIONS] Another instance already has the lock. Waiting for DB to be unlocked."
);
@@ -50,65 +54,69 @@ export async function checkAndRunMigrations() {
// check if the migration has already been applied
if (!shouldRun) {
- console.log(`[MIGRATIONS] "${migration.name}" already applied. Skipping...`);
+ logger.info(`[MIGRATIONS] "${migration.name}" already applied. Skipping...`);
} else {
// check the modifiers to see if some cases match
if (
(migration.runForHuggingChat === "only" && !isHuggingChat) ||
(migration.runForHuggingChat === "never" && isHuggingChat)
) {
- console.log(
+ logger.info(
`[MIGRATIONS] "${migration.name}" should not be applied for this run. Skipping...`
);
continue;
}
// otherwise all is good and we can run the migration
- console.log(
+ logger.info(
`[MIGRATIONS] "${migration.name}" ${
migration.runEveryTime ? "should run every time" : "not applied yet"
}. Applying...`
);
- await collections.migrationResults.updateOne(
- { _id: migration._id },
- {
- $set: {
- name: migration.name,
- status: "ongoing",
+ await Database.getInstance()
+ .getCollections()
+ .migrationResults.updateOne(
+ { _id: migration._id },
+ {
+ $set: {
+ name: migration.name,
+ status: "ongoing",
+ },
},
- },
- { upsert: true }
- );
+ { upsert: true }
+ );
const session = connectedClient.startSession();
let result = false;
try {
await session.withTransaction(async () => {
- result = await migration.up(connectedClient);
+ result = await migration.up(Database.getInstance());
});
} catch (e) {
- console.log(`[MIGRATION[] "${migration.name}" failed!`);
- console.error(e);
+ logger.info(`[MIGRATIONS] "${migration.name}" failed!`);
+ logger.error(e);
} finally {
await session.endSession();
}
- await collections.migrationResults.updateOne(
- { _id: migration._id },
- {
- $set: {
- name: migration.name,
- status: result ? "success" : "failure",
+ await Database.getInstance()
+ .getCollections()
+ .migrationResults.updateOne(
+ { _id: migration._id },
+ {
+ $set: {
+ name: migration.name,
+ status: result ? "success" : "failure",
+ },
},
- },
- { upsert: true }
- );
+ { upsert: true }
+ );
}
}
- console.log("[MIGRATIONS] All migrations applied. Releasing lock");
+ logger.info("[MIGRATIONS] All migrations applied. Releasing lock");
clearInterval(refreshInterval);
await releaseLock(LOCK_KEY, lockId);
diff --git a/src/lib/migrations/routines/01-update-search-assistants.ts b/src/lib/migrations/routines/01-update-search-assistants.ts
index 9f12b27d3fb..52c8b2f6c99 100644
--- a/src/lib/migrations/routines/01-update-search-assistants.ts
+++ b/src/lib/migrations/routines/01-update-search-assistants.ts
@@ -1,5 +1,5 @@
import type { Migration } from ".";
-import { getCollections } from "$lib/server/database";
+import { collections } from "$lib/server/database";
import { ObjectId, type AnyBulkWriteOperation } from "mongodb";
import type { Assistant } from "$lib/types/Assistant";
import { generateSearchTokens } from "$lib/utils/searchTokens";
@@ -7,8 +7,8 @@ import { generateSearchTokens } from "$lib/utils/searchTokens";
const migration: Migration = {
_id: new ObjectId("5f9f3e3e3e3e3e3e3e3e3e3e"),
name: "Update search assistants",
- up: async (client) => {
- const { assistants } = getCollections(client);
+ up: async () => {
+ const { assistants } = collections;
let ops: AnyBulkWriteOperation[] = [];
for await (const assistant of assistants
@@ -40,8 +40,8 @@ const migration: Migration = {
return true;
},
- down: async (client) => {
- const { assistants } = getCollections(client);
+ down: async () => {
+ const { assistants } = collections;
await assistants.updateMany({}, { $unset: { searchTokens: "" } });
return true;
},
diff --git a/src/lib/migrations/routines/02-update-assistants-models.ts b/src/lib/migrations/routines/02-update-assistants-models.ts
index 73655a88f84..f7f0c9dd454 100644
--- a/src/lib/migrations/routines/02-update-assistants-models.ts
+++ b/src/lib/migrations/routines/02-update-assistants-models.ts
@@ -1,14 +1,14 @@
import type { Migration } from ".";
-import { getCollections } from "$lib/server/database";
+import { collections } from "$lib/server/database";
import { ObjectId } from "mongodb";
const updateAssistantsModels: Migration = {
_id: new ObjectId("5f9f3f3f3f3f3f3f3f3f3f3f"),
name: "Update deprecated models in assistants with the default model",
- up: async (client) => {
+ up: async () => {
const models = (await import("$lib/server/models")).models;
- const { assistants } = getCollections(client);
+ const { assistants } = collections;
const modelIds = models.map((el) => el.id); // string[]
const defaultModelId = models[0].id;
diff --git a/src/lib/migrations/routines/index.ts b/src/lib/migrations/routines/index.ts
index 96a6a07ab3c..0d6eafa8f04 100644
--- a/src/lib/migrations/routines/index.ts
+++ b/src/lib/migrations/routines/index.ts
@@ -1,13 +1,14 @@
-import type { MongoClient, ObjectId } from "mongodb";
+import type { ObjectId } from "mongodb";
import updateSearchAssistant from "./01-update-search-assistants";
import updateAssistantsModels from "./02-update-assistants-models";
+import type { Database } from "$lib/server/database";
export interface Migration {
_id: ObjectId;
name: string;
- up: (client: MongoClient) => Promise;
- down?: (client: MongoClient) => Promise;
+ up: (client: Database) => Promise;
+ down?: (client: Database) => Promise;
runForFreshInstall?: "only" | "never"; // leave unspecified to run for both
runForHuggingChat?: "only" | "never"; // leave unspecified to run for both
runEveryTime?: boolean;
diff --git a/src/lib/server/abortedGenerations.ts b/src/lib/server/abortedGenerations.ts
index 575cf637bfe..548809d6b6c 100644
--- a/src/lib/server/abortedGenerations.ts
+++ b/src/lib/server/abortedGenerations.ts
@@ -1,29 +1,42 @@
// Shouldn't be needed if we dove into sveltekit internals, see https://github.com/huggingface/chat-ui/pull/88#issuecomment-1523173850
-import { setTimeout } from "node:timers/promises";
-import { collections } from "./database";
+import { logger } from "$lib/server/logger";
+import { collections } from "$lib/server/database";
-let closed = false;
-process.on("SIGINT", () => {
- closed = true;
-});
+export class AbortedGenerations {
+ private static instance: AbortedGenerations;
-export let abortedGenerations: Map = new Map();
+ private abortedGenerations: Map = new Map();
-async function maintainAbortedGenerations() {
- while (!closed) {
- await setTimeout(1000);
+ private constructor() {
+ const interval = setInterval(this.updateList, 1000);
+ process.on("SIGINT", () => {
+ clearInterval(interval);
+ });
+ }
+
+ public static getInstance(): AbortedGenerations {
+ if (!AbortedGenerations.instance) {
+ AbortedGenerations.instance = new AbortedGenerations();
+ }
+
+ return AbortedGenerations.instance;
+ }
+
+ public getList(): Map {
+ return this.abortedGenerations;
+ }
+
+ private async updateList() {
try {
const aborts = await collections.abortedGenerations.find({}).sort({ createdAt: 1 }).toArray();
- abortedGenerations = new Map(
+ this.abortedGenerations = new Map(
aborts.map(({ conversationId, createdAt }) => [conversationId.toString(), createdAt])
);
} catch (err) {
- console.error(err);
+ logger.error(err);
}
}
}
-
-maintainAbortedGenerations();
diff --git a/src/lib/server/auth.ts b/src/lib/server/auth.ts
index 96e6ec8f345..94eacdb476b 100644
--- a/src/lib/server/auth.ts
+++ b/src/lib/server/auth.ts
@@ -1,22 +1,13 @@
import { Issuer, BaseClient, type UserinfoResponse, TokenSet, custom } from "openid-client";
import { addHours, addWeeks } from "date-fns";
-import {
- COOKIE_NAME,
- OPENID_CLIENT_ID,
- OPENID_CLIENT_SECRET,
- OPENID_PROVIDER_URL,
- OPENID_SCOPES,
- OPENID_NAME_CLAIM,
- OPENID_TOLERANCE,
- OPENID_RESOURCE,
- OPENID_CONFIG,
-} from "$env/static/private";
+import { env } from "$env/dynamic/private";
import { sha256 } from "$lib/utils/sha256";
import { z } from "zod";
import { dev } from "$app/environment";
import type { Cookies } from "@sveltejs/kit";
-import { collections } from "./database";
+import { collections } from "$lib/server/database";
import JSON5 from "json5";
+import { logger } from "$lib/server/logger";
export interface OIDCSettings {
redirectURI: string;
@@ -35,27 +26,27 @@ const stringWithDefault = (value: string) =>
export const OIDConfig = z
.object({
- CLIENT_ID: stringWithDefault(OPENID_CLIENT_ID),
- CLIENT_SECRET: stringWithDefault(OPENID_CLIENT_SECRET),
- PROVIDER_URL: stringWithDefault(OPENID_PROVIDER_URL),
- SCOPES: stringWithDefault(OPENID_SCOPES),
- NAME_CLAIM: stringWithDefault(OPENID_NAME_CLAIM).refine(
+ CLIENT_ID: stringWithDefault(env.OPENID_CLIENT_ID),
+ CLIENT_SECRET: stringWithDefault(env.OPENID_CLIENT_SECRET),
+ PROVIDER_URL: stringWithDefault(env.OPENID_PROVIDER_URL),
+ SCOPES: stringWithDefault(env.OPENID_SCOPES),
+ NAME_CLAIM: stringWithDefault(env.OPENID_NAME_CLAIM).refine(
(el) => !["preferred_username", "email", "picture", "sub"].includes(el),
{ message: "nameClaim cannot be one of the restricted keys." }
),
- TOLERANCE: stringWithDefault(OPENID_TOLERANCE),
- RESOURCE: stringWithDefault(OPENID_RESOURCE),
+ TOLERANCE: stringWithDefault(env.OPENID_TOLERANCE),
+ RESOURCE: stringWithDefault(env.OPENID_RESOURCE),
})
- .parse(JSON5.parse(OPENID_CONFIG));
+ .parse(JSON5.parse(env.OPENID_CONFIG));
export const requiresUser = !!OIDConfig.CLIENT_ID && !!OIDConfig.CLIENT_SECRET;
export function refreshSessionCookie(cookies: Cookies, sessionId: string) {
- cookies.set(COOKIE_NAME, sessionId, {
+ cookies.set(env.COOKIE_NAME, sessionId, {
path: "/",
// So that it works inside the space's iframe
- sameSite: dev ? "lax" : "none",
- secure: !dev,
+ sameSite: dev || env.ALLOW_INSECURE_COOKIES === "true" ? "lax" : "none",
+ secure: !dev && !(env.ALLOW_INSECURE_COOKIES === "true"),
httpOnly: true,
expires: addWeeks(new Date(), 2),
});
@@ -150,7 +141,7 @@ export async function validateAndParseCsrfToken(
return { redirectUrl: data.redirectUrl };
}
} catch (e) {
- console.error(e);
+ logger.error(e);
}
return null;
}
diff --git a/src/lib/server/database.ts b/src/lib/server/database.ts
index fa6d2b8f0ff..4a8302ce9ca 100644
--- a/src/lib/server/database.ts
+++ b/src/lib/server/database.ts
@@ -1,4 +1,4 @@
-import { MONGODB_URL, MONGODB_DB_NAME, MONGODB_DIRECT_CONNECTION } from "$env/static/private";
+import { env } from "$env/dynamic/private";
import { GridFSBucket, MongoClient } from "mongodb";
import type { Conversation } from "$lib/types/Conversation";
import type { SharedConversation } from "$lib/types/SharedConversation";
@@ -13,149 +13,197 @@ import type { ConversationStats } from "$lib/types/ConversationStats";
import type { MigrationResult } from "$lib/types/MigrationResult";
import type { Semaphore } from "$lib/types/Semaphore";
import type { AssistantStats } from "$lib/types/AssistantStats";
+import { logger } from "$lib/server/logger";
+import { building } from "$app/environment";
-if (!MONGODB_URL) {
- throw new Error(
- "Please specify the MONGODB_URL environment variable inside .env.local. Set it to mongodb://localhost:27017 if you are running MongoDB locally, or to a MongoDB Atlas free instance for example."
- );
-}
export const CONVERSATION_STATS_COLLECTION = "conversations.stats";
-const client = new MongoClient(MONGODB_URL, {
- directConnection: MONGODB_DIRECT_CONNECTION === "true",
-});
-
-export const connectPromise = client.connect().catch(console.error);
-
-export function getCollections(mongoClient: MongoClient) {
- const db = mongoClient.db(MONGODB_DB_NAME + (import.meta.env.MODE === "test" ? "-test" : ""));
-
- const conversations = db.collection("conversations");
- const conversationStats = db.collection(CONVERSATION_STATS_COLLECTION);
- const assistants = db.collection("assistants");
- const assistantStats = db.collection("assistants.stats");
- const reports = db.collection("reports");
- const sharedConversations = db.collection("sharedConversations");
- const abortedGenerations = db.collection("abortedGenerations");
- const settings = db.collection("settings");
- const users = db.collection("users");
- const sessions = db.collection("sessions");
- const messageEvents = db.collection("messageEvents");
- const bucket = new GridFSBucket(db, { bucketName: "files" });
- const migrationResults = db.collection("migrationResults");
- const semaphores = db.collection("semaphores");
-
- return {
- conversations,
- conversationStats,
- assistants,
- assistantStats,
- reports,
- sharedConversations,
- abortedGenerations,
- settings,
- users,
- sessions,
- messageEvents,
- bucket,
- migrationResults,
- semaphores,
- };
-}
-const db = client.db(MONGODB_DB_NAME + (import.meta.env.MODE === "test" ? "-test" : ""));
-
-const collections = getCollections(client);
-
-const {
- conversations,
- conversationStats,
- assistants,
- assistantStats,
- reports,
- sharedConversations,
- abortedGenerations,
- settings,
- users,
- sessions,
- messageEvents,
- semaphores,
-} = collections;
-
-export { client, db, collections };
-
-client.on("open", () => {
- conversations
- .createIndex(
- { sessionId: 1, updatedAt: -1 },
- { partialFilterExpression: { sessionId: { $exists: true } } }
- )
- .catch(console.error);
- conversations
- .createIndex(
- { userId: 1, updatedAt: -1 },
- { partialFilterExpression: { userId: { $exists: true } } }
- )
- .catch(console.error);
- conversations
- .createIndex(
- { "message.id": 1, "message.ancestors": 1 },
- { partialFilterExpression: { userId: { $exists: true } } }
- )
- .catch(console.error);
- // To do stats on conversations
- conversations.createIndex({ updatedAt: 1 }).catch(console.error);
- // Not strictly necessary, could use _id, but more convenient. Also for stats
- conversations.createIndex({ createdAt: 1 }).catch(console.error);
- // To do stats on conversation messages
- conversations.createIndex({ "messages.createdAt": 1 }, { sparse: true }).catch(console.error);
- // Unique index for stats
- conversationStats
- .createIndex(
- {
+export class Database {
+ private client: MongoClient;
+
+ private static instance: Database;
+
+ private constructor() {
+ if (!env.MONGODB_URL) {
+ throw new Error(
+ "Please specify the MONGODB_URL environment variable inside .env.local. Set it to mongodb://localhost:27017 if you are running MongoDB locally, or to a MongoDB Atlas free instance for example."
+ );
+ }
+
+ this.client = new MongoClient(env.MONGODB_URL, {
+ directConnection: env.MONGODB_DIRECT_CONNECTION === "true",
+ });
+
+ this.client.connect().catch((err) => {
+ logger.error("Connection error", err);
+ process.exit(1);
+ });
+ this.client.db(env.MONGODB_DB_NAME + (import.meta.env.MODE === "test" ? "-test" : ""));
+ this.client.on("open", () => this.initDatabase());
+
+ // Disconnect DB on process kill
+ process.on("SIGINT", async () => {
+ await this.client.close(true);
+
+ // https://github.com/sveltejs/kit/issues/9540
+ setTimeout(() => {
+ process.exit(0);
+ }, 100);
+ });
+ }
+
+ public static getInstance(): Database {
+ if (!Database.instance) {
+ Database.instance = new Database();
+ }
+
+ return Database.instance;
+ }
+
+ /**
+ * Return mongoClient
+ */
+ public getClient(): MongoClient {
+ return this.client;
+ }
+
+ /**
+ * Return map of database's collections
+ */
+ public getCollections() {
+ const db = this.client.db(
+ env.MONGODB_DB_NAME + (import.meta.env.MODE === "test" ? "-test" : "")
+ );
+
+ const conversations = db.collection("conversations");
+ const conversationStats = db.collection(CONVERSATION_STATS_COLLECTION);
+ const assistants = db.collection("assistants");
+ const assistantStats = db.collection("assistants.stats");
+ const reports = db.collection("reports");
+ const sharedConversations = db.collection("sharedConversations");
+ const abortedGenerations = db.collection("abortedGenerations");
+ const settings = db.collection("settings");
+ const users = db.collection("users");
+ const sessions = db.collection("sessions");
+ const messageEvents = db.collection("messageEvents");
+ const bucket = new GridFSBucket(db, { bucketName: "files" });
+ const migrationResults = db.collection("migrationResults");
+ const semaphores = db.collection("semaphores");
+
+ return {
+ conversations,
+ conversationStats,
+ assistants,
+ assistantStats,
+ reports,
+ sharedConversations,
+ abortedGenerations,
+ settings,
+ users,
+ sessions,
+ messageEvents,
+ bucket,
+ migrationResults,
+ semaphores,
+ };
+ }
+
+ /**
+ * Init database once connected: Index creation
+ * @private
+ */
+ private initDatabase() {
+ const {
+ conversations,
+ conversationStats,
+ assistants,
+ assistantStats,
+ reports,
+ sharedConversations,
+ abortedGenerations,
+ settings,
+ users,
+ sessions,
+ messageEvents,
+ semaphores,
+ } = this.getCollections();
+
+ conversations
+ .createIndex(
+ { sessionId: 1, updatedAt: -1 },
+ { partialFilterExpression: { sessionId: { $exists: true } } }
+ )
+ .catch(logger.error);
+ conversations
+ .createIndex(
+ { userId: 1, updatedAt: -1 },
+ { partialFilterExpression: { userId: { $exists: true } } }
+ )
+ .catch(logger.error);
+ conversations
+ .createIndex(
+ { "message.id": 1, "message.ancestors": 1 },
+ { partialFilterExpression: { userId: { $exists: true } } }
+ )
+ .catch(logger.error);
+ // Not strictly necessary, could use _id, but more convenient. Also for stats
+ // To do stats on conversation messages
+ conversations.createIndex({ "messages.createdAt": 1 }, { sparse: true }).catch(logger.error);
+ // Unique index for stats
+ conversationStats
+ .createIndex(
+ {
+ type: 1,
+ "date.field": 1,
+ "date.span": 1,
+ "date.at": 1,
+ distinct: 1,
+ },
+ { unique: true }
+ )
+ .catch(logger.error);
+ // Allow easy check of last computed stat for given type/dateField
+ conversationStats
+ .createIndex({
type: 1,
"date.field": 1,
- "date.span": 1,
"date.at": 1,
- distinct: 1,
- },
- { unique: true }
- )
- .catch(console.error);
- // Allow easy check of last computed stat for given type/dateField
- conversationStats
- .createIndex({
- type: 1,
- "date.field": 1,
- "date.at": 1,
- })
- .catch(console.error);
- abortedGenerations.createIndex({ updatedAt: 1 }, { expireAfterSeconds: 30 }).catch(console.error);
- abortedGenerations.createIndex({ conversationId: 1 }, { unique: true }).catch(console.error);
- sharedConversations.createIndex({ hash: 1 }, { unique: true }).catch(console.error);
- settings.createIndex({ sessionId: 1 }, { unique: true, sparse: true }).catch(console.error);
- settings.createIndex({ userId: 1 }, { unique: true, sparse: true }).catch(console.error);
- settings.createIndex({ assistants: 1 }).catch(console.error);
- users.createIndex({ hfUserId: 1 }, { unique: true }).catch(console.error);
- users.createIndex({ sessionId: 1 }, { unique: true, sparse: true }).catch(console.error);
- // No unicity because due to renames & outdated info from oauth provider, there may be the same username on different users
- users.createIndex({ username: 1 }).catch(console.error);
- messageEvents.createIndex({ createdAt: 1 }, { expireAfterSeconds: 60 }).catch(console.error);
- sessions.createIndex({ expiresAt: 1 }, { expireAfterSeconds: 0 }).catch(console.error);
- sessions.createIndex({ sessionId: 1 }, { unique: true }).catch(console.error);
- assistants.createIndex({ createdById: 1, userCount: -1 }).catch(console.error);
- assistants.createIndex({ userCount: 1 }).catch(console.error);
- assistants.createIndex({ featured: 1, userCount: -1 }).catch(console.error);
- assistants.createIndex({ modelId: 1, userCount: -1 }).catch(console.error);
- assistants.createIndex({ searchTokens: 1 }).catch(console.error);
- assistants.createIndex({ last24HoursCount: 1 }).catch(console.error);
- assistantStats
- // Order of keys is important for the queries
- .createIndex({ "date.span": 1, "date.at": 1, assistantId: 1 }, { unique: true })
- .catch(console.error);
- reports.createIndex({ assistantId: 1 }).catch(console.error);
- reports.createIndex({ createdBy: 1, assistantId: 1 }).catch(console.error);
-
- // Unique index for semaphore and migration results
- semaphores.createIndex({ key: 1 }, { unique: true }).catch(console.error);
- semaphores.createIndex({ createdAt: 1 }, { expireAfterSeconds: 60 }).catch(console.error);
-});
+ })
+ .catch(logger.error);
+ abortedGenerations
+ .createIndex({ updatedAt: 1 }, { expireAfterSeconds: 30 })
+ .catch(logger.error);
+ abortedGenerations.createIndex({ conversationId: 1 }, { unique: true }).catch(logger.error);
+ sharedConversations.createIndex({ hash: 1 }, { unique: true }).catch(logger.error);
+ settings.createIndex({ sessionId: 1 }, { unique: true, sparse: true }).catch(logger.error);
+ settings.createIndex({ userId: 1 }, { unique: true, sparse: true }).catch(logger.error);
+ settings.createIndex({ assistants: 1 }).catch(logger.error);
+ users.createIndex({ hfUserId: 1 }, { unique: true }).catch(logger.error);
+ users.createIndex({ sessionId: 1 }, { unique: true, sparse: true }).catch(logger.error);
+ // No unicity because due to renames & outdated info from oauth provider, there may be the same username on different users
+ users.createIndex({ username: 1 }).catch(logger.error);
+ messageEvents.createIndex({ createdAt: 1 }, { expireAfterSeconds: 60 }).catch(logger.error);
+ sessions.createIndex({ expiresAt: 1 }, { expireAfterSeconds: 0 }).catch(logger.error);
+ sessions.createIndex({ sessionId: 1 }, { unique: true }).catch(logger.error);
+ assistants.createIndex({ createdById: 1, userCount: -1 }).catch(logger.error);
+ assistants.createIndex({ userCount: 1 }).catch(logger.error);
+ assistants.createIndex({ featured: 1, userCount: -1 }).catch(logger.error);
+ assistants.createIndex({ modelId: 1, userCount: -1 }).catch(logger.error);
+ assistants.createIndex({ searchTokens: 1 }).catch(logger.error);
+ assistants.createIndex({ last24HoursCount: 1 }).catch(logger.error);
+ assistantStats
+ // Order of keys is important for the queries
+ .createIndex({ "date.span": 1, "date.at": 1, assistantId: 1 }, { unique: true })
+ .catch(logger.error);
+ reports.createIndex({ assistantId: 1 }).catch(logger.error);
+ reports.createIndex({ createdBy: 1, assistantId: 1 }).catch(logger.error);
+
+ // Unique index for semaphore and migration results
+ semaphores.createIndex({ key: 1 }, { unique: true }).catch(logger.error);
+ semaphores.createIndex({ createdAt: 1 }, { expireAfterSeconds: 60 }).catch(logger.error);
+ }
+}
+
+export const collections = building
+ ? ({} as unknown as ReturnType)
+ : Database.getInstance().getCollections();
diff --git a/src/lib/server/embeddingEndpoints/hfApi/embeddingHfApi.ts b/src/lib/server/embeddingEndpoints/hfApi/embeddingHfApi.ts
index 162e8964c00..86f84ac19b8 100644
--- a/src/lib/server/embeddingEndpoints/hfApi/embeddingHfApi.ts
+++ b/src/lib/server/embeddingEndpoints/hfApi/embeddingHfApi.ts
@@ -1,7 +1,8 @@
import { z } from "zod";
import type { EmbeddingEndpoint, Embedding } from "../embeddingEndpoints";
import { chunk } from "$lib/utils/chunk";
-import { HF_TOKEN } from "$env/static/private";
+import { env } from "$env/dynamic/private";
+import { logger } from "$lib/server/logger";
export const embeddingEndpointHfApiSchema = z.object({
weight: z.number().int().positive().default(1),
@@ -10,7 +11,7 @@ export const embeddingEndpointHfApiSchema = z.object({
authorization: z
.string()
.optional()
- .transform((v) => (!v && HF_TOKEN ? "Bearer " + HF_TOKEN : v)), // if the header is not set but HF_TOKEN is, use it as the authorization header
+ .transform((v) => (!v && env.HF_TOKEN ? "Bearer " + env.HF_TOKEN : v)), // if the header is not set but HF_TOKEN is, use it as the authorization header
});
export async function embeddingEndpointHfApi(
@@ -35,8 +36,8 @@ export async function embeddingEndpointHfApi(
});
if (!response.ok) {
- console.log(await response.text());
- console.error("Failed to get embeddings from Hugging Face API", response);
+ logger.error(await response.text());
+ logger.error("Failed to get embeddings from Hugging Face API", response);
return [];
}
diff --git a/src/lib/server/embeddingEndpoints/openai/embeddingEndpoints.ts b/src/lib/server/embeddingEndpoints/openai/embeddingEndpoints.ts
index 89d7900bb28..527a9732498 100644
--- a/src/lib/server/embeddingEndpoints/openai/embeddingEndpoints.ts
+++ b/src/lib/server/embeddingEndpoints/openai/embeddingEndpoints.ts
@@ -1,14 +1,14 @@
import { z } from "zod";
import type { EmbeddingEndpoint, Embedding } from "../embeddingEndpoints";
import { chunk } from "$lib/utils/chunk";
-import { OPENAI_API_KEY } from "$env/static/private";
+import { env } from "$env/dynamic/private";
export const embeddingEndpointOpenAIParametersSchema = z.object({
weight: z.number().int().positive().default(1),
model: z.any(),
type: z.literal("openai"),
url: z.string().url().default("https://api.openai.com/v1/embeddings"),
- apiKey: z.string().default(OPENAI_API_KEY),
+ apiKey: z.string().default(env.OPENAI_API_KEY),
});
export async function embeddingEndpointOpenAI(
diff --git a/src/lib/server/embeddingEndpoints/tei/embeddingEndpoints.ts b/src/lib/server/embeddingEndpoints/tei/embeddingEndpoints.ts
index 0d2c7ae1336..c999ceba7da 100644
--- a/src/lib/server/embeddingEndpoints/tei/embeddingEndpoints.ts
+++ b/src/lib/server/embeddingEndpoints/tei/embeddingEndpoints.ts
@@ -1,7 +1,8 @@
import { z } from "zod";
import type { EmbeddingEndpoint, Embedding } from "../embeddingEndpoints";
import { chunk } from "$lib/utils/chunk";
-import { HF_TOKEN } from "$env/static/private";
+import { env } from "$env/dynamic/private";
+import { logger } from "$lib/server/logger";
export const embeddingEndpointTeiParametersSchema = z.object({
weight: z.number().int().positive().default(1),
@@ -11,7 +12,7 @@ export const embeddingEndpointTeiParametersSchema = z.object({
authorization: z
.string()
.optional()
- .transform((v) => (!v && HF_TOKEN ? "Bearer " + HF_TOKEN : v)), // if the header is not set but HF_TOKEN is, use it as the authorization header
+ .transform((v) => (!v && env.HF_TOKEN ? "Bearer " + env.HF_TOKEN : v)), // if the header is not set but HF_TOKEN is, use it as the authorization header
});
const getModelInfoByUrl = async (url: string, authorization?: string) => {
@@ -29,7 +30,7 @@ const getModelInfoByUrl = async (url: string, authorization?: string) => {
const json = await response.json();
return { max_client_batch_size: 32, max_batch_tokens: 16384, ...json };
} catch {
- console.log("Could not get info from TEI embedding endpoint. Using defaults.");
+ logger.debug("Could not get info from TEI embedding endpoint. Using defaults.");
return { max_client_batch_size: 32, max_batch_tokens: 16384 };
}
};
diff --git a/src/lib/server/embeddingModels.ts b/src/lib/server/embeddingModels.ts
index 96f3795bd93..67ad8fe5b1e 100644
--- a/src/lib/server/embeddingModels.ts
+++ b/src/lib/server/embeddingModels.ts
@@ -1,4 +1,4 @@
-import { TEXT_EMBEDDING_MODELS } from "$env/static/private";
+import { env } from "$env/dynamic/private";
import { z } from "zod";
import { sum } from "$lib/utils/sum";
@@ -29,7 +29,7 @@ const modelConfig = z.object({
// Default embedding model for backward compatibility
const rawEmbeddingModelJSON =
- TEXT_EMBEDDING_MODELS ||
+ env.TEXT_EMBEDDING_MODELS ||
`[
{
"name": "Xenova/gte-small",
diff --git a/src/lib/server/endpoints/anthropic/endpointAnthropic.ts b/src/lib/server/endpoints/anthropic/endpointAnthropic.ts
index e3ef2eb51fa..4353c6b11a5 100644
--- a/src/lib/server/endpoints/anthropic/endpointAnthropic.ts
+++ b/src/lib/server/endpoints/anthropic/endpointAnthropic.ts
@@ -1,5 +1,5 @@
import { z } from "zod";
-import { ANTHROPIC_API_KEY } from "$env/static/private";
+import { env } from "$env/dynamic/private";
import type { Endpoint } from "../endpoints";
import type { TextGenerationStreamOutput } from "@huggingface/inference";
@@ -8,7 +8,7 @@ export const endpointAnthropicParametersSchema = z.object({
model: z.any(),
type: z.literal("anthropic"),
baseURL: z.string().url().default("https://api.anthropic.com"),
- apiKey: z.string().default(ANTHROPIC_API_KEY ?? "sk-"),
+ apiKey: z.string().default(env.ANTHROPIC_API_KEY ?? "sk-"),
defaultHeaders: z.record(z.string()).optional(),
defaultQuery: z.record(z.string()).optional(),
});
diff --git a/src/lib/server/endpoints/cloudflare/endpointCloudflare.ts b/src/lib/server/endpoints/cloudflare/endpointCloudflare.ts
index 3a1be4d5175..f09d2723adb 100644
--- a/src/lib/server/endpoints/cloudflare/endpointCloudflare.ts
+++ b/src/lib/server/endpoints/cloudflare/endpointCloudflare.ts
@@ -1,14 +1,15 @@
import { z } from "zod";
import type { Endpoint } from "../endpoints";
import type { TextGenerationStreamOutput } from "@huggingface/inference";
-import { CLOUDFLARE_ACCOUNT_ID, CLOUDFLARE_API_TOKEN } from "$env/static/private";
+import { env } from "$env/dynamic/private";
+import { logger } from "$lib/server/logger";
export const endpointCloudflareParametersSchema = z.object({
weight: z.number().int().positive().default(1),
model: z.any(),
type: z.literal("cloudflare"),
- accountId: z.string().default(CLOUDFLARE_ACCOUNT_ID),
- apiToken: z.string().default(CLOUDFLARE_API_TOKEN),
+ accountId: z.string().default(env.CLOUDFLARE_ACCOUNT_ID),
+ apiToken: z.string().default(env.CLOUDFLARE_API_TOKEN),
});
export async function endpointCloudflare(
@@ -104,8 +105,8 @@ export async function endpointCloudflare(
try {
data = JSON.parse(jsonString);
} catch (e) {
- console.error("Failed to parse JSON", e);
- console.error("Problematic JSON string:", jsonString);
+ logger.error("Failed to parse JSON", e);
+ logger.error("Problematic JSON string:", jsonString);
continue; // Skip this iteration and try the next chunk
}
diff --git a/src/lib/server/endpoints/cohere/endpointCohere.ts b/src/lib/server/endpoints/cohere/endpointCohere.ts
index 524152fb991..f1c5562fa02 100644
--- a/src/lib/server/endpoints/cohere/endpointCohere.ts
+++ b/src/lib/server/endpoints/cohere/endpointCohere.ts
@@ -1,5 +1,5 @@
import { z } from "zod";
-import { COHERE_API_TOKEN } from "$env/static/private";
+import { env } from "$env/dynamic/private";
import type { Endpoint } from "../endpoints";
import type { TextGenerationStreamOutput } from "@huggingface/inference";
import type { Cohere, CohereClient } from "cohere-ai";
@@ -9,7 +9,7 @@ export const endpointCohereParametersSchema = z.object({
weight: z.number().int().positive().default(1),
model: z.any(),
type: z.literal("cohere"),
- apiKey: z.string().default(COHERE_API_TOKEN),
+ apiKey: z.string().default(env.COHERE_API_TOKEN),
raw: z.boolean().default(false),
});
diff --git a/src/lib/server/endpoints/langserve/endpointLangserve.ts b/src/lib/server/endpoints/langserve/endpointLangserve.ts
index 2c5a475c967..364765c478d 100644
--- a/src/lib/server/endpoints/langserve/endpointLangserve.ts
+++ b/src/lib/server/endpoints/langserve/endpointLangserve.ts
@@ -2,6 +2,7 @@ import { buildPrompt } from "$lib/buildPrompt";
import { z } from "zod";
import type { Endpoint } from "../endpoints";
import type { TextGenerationStreamOutput } from "@huggingface/inference";
+import { logger } from "$lib/server/logger";
export const endpointLangserveParametersSchema = z.object({
weight: z.number().int().positive().default(1),
@@ -99,8 +100,8 @@ export function endpointLangserve(
try {
data = JSON.parse(jsonString);
} catch (e) {
- console.error("Failed to parse JSON", e);
- console.error("Problematic JSON string:", jsonString);
+ logger.error("Failed to parse JSON", e);
+ logger.error("Problematic JSON string:", jsonString);
continue; // Skip this iteration and try the next chunk
}
// Assuming content within data is a plain string
diff --git a/src/lib/server/endpoints/llamacpp/endpointLlamacpp.ts b/src/lib/server/endpoints/llamacpp/endpointLlamacpp.ts
index ffd9fa2c495..b2b8d1478c2 100644
--- a/src/lib/server/endpoints/llamacpp/endpointLlamacpp.ts
+++ b/src/lib/server/endpoints/llamacpp/endpointLlamacpp.ts
@@ -1,8 +1,9 @@
-import { HF_ACCESS_TOKEN, HF_TOKEN } from "$env/static/private";
+import { env } from "$env/dynamic/private";
import { buildPrompt } from "$lib/buildPrompt";
import type { TextGenerationStreamOutput } from "@huggingface/inference";
import type { Endpoint } from "../endpoints";
import { z } from "zod";
+import { logger } from "$lib/server/logger";
export const endpointLlamacppParametersSchema = z.object({
weight: z.number().int().positive().default(1),
@@ -12,7 +13,7 @@ export const endpointLlamacppParametersSchema = z.object({
accessToken: z
.string()
.min(1)
- .default(HF_TOKEN ?? HF_ACCESS_TOKEN),
+ .default(env.HF_TOKEN ?? env.HF_ACCESS_TOKEN),
});
export function endpointLlamacpp(
@@ -93,8 +94,8 @@ export function endpointLlamacpp(
try {
data = JSON.parse(jsonString);
} catch (e) {
- console.error("Failed to parse JSON", e);
- console.error("Problematic JSON string:", jsonString);
+ logger.error("Failed to parse JSON", e);
+ logger.error("Problematic JSON string:", jsonString);
continue; // Skip this iteration and try the next chunk
}
diff --git a/src/lib/server/endpoints/openai/endpointOai.ts b/src/lib/server/endpoints/openai/endpointOai.ts
index 8bd28540dd4..945921b1b9a 100644
--- a/src/lib/server/endpoints/openai/endpointOai.ts
+++ b/src/lib/server/endpoints/openai/endpointOai.ts
@@ -2,7 +2,7 @@ import { z } from "zod";
import { openAICompletionToTextGenerationStream } from "./openAICompletionToTextGenerationStream";
import { openAIChatToTextGenerationStream } from "./openAIChatToTextGenerationStream";
import { buildPrompt } from "$lib/buildPrompt";
-import { OPENAI_API_KEY } from "$env/static/private";
+import { env } from "$env/dynamic/private";
import type { Endpoint } from "../endpoints";
export const endpointOAIParametersSchema = z.object({
@@ -10,7 +10,7 @@ export const endpointOAIParametersSchema = z.object({
model: z.any(),
type: z.literal("openai"),
baseURL: z.string().url().default("https://api.openai.com/v1"),
- apiKey: z.string().default(OPENAI_API_KEY ?? "sk-"),
+ apiKey: z.string().default(env.OPENAI_API_KEY ?? "sk-"),
completion: z
.union([z.literal("completions"), z.literal("chat_completions")])
.default("chat_completions"),
diff --git a/src/lib/server/endpoints/tgi/endpointTgi.ts b/src/lib/server/endpoints/tgi/endpointTgi.ts
index 131d628ae21..aed06739722 100644
--- a/src/lib/server/endpoints/tgi/endpointTgi.ts
+++ b/src/lib/server/endpoints/tgi/endpointTgi.ts
@@ -1,4 +1,4 @@
-import { HF_ACCESS_TOKEN, HF_TOKEN } from "$env/static/private";
+import { env } from "$env/dynamic/private";
import { buildPrompt } from "$lib/buildPrompt";
import { textGenerationStream } from "@huggingface/inference";
import type { Endpoint } from "../endpoints";
@@ -9,7 +9,7 @@ export const endpointTgiParametersSchema = z.object({
model: z.any(),
type: z.literal("tgi"),
url: z.string().url(),
- accessToken: z.string().default(HF_TOKEN ?? HF_ACCESS_TOKEN),
+ accessToken: z.string().default(env.HF_TOKEN ?? env.HF_ACCESS_TOKEN),
authorization: z.string().optional(),
});
diff --git a/src/lib/server/files/downloadFile.ts b/src/lib/server/files/downloadFile.ts
index 4d2bddb1c30..91b430fc5d8 100644
--- a/src/lib/server/files/downloadFile.ts
+++ b/src/lib/server/files/downloadFile.ts
@@ -1,5 +1,5 @@
import { error } from "@sveltejs/kit";
-import { collections } from "../database";
+import { collections } from "$lib/server/database";
import type { Conversation } from "$lib/types/Conversation";
import type { SharedConversation } from "$lib/types/SharedConversation";
diff --git a/src/lib/server/files/uploadFile.ts b/src/lib/server/files/uploadFile.ts
index 1c4a59b6f44..34452245741 100644
--- a/src/lib/server/files/uploadFile.ts
+++ b/src/lib/server/files/uploadFile.ts
@@ -1,6 +1,6 @@
import type { Conversation } from "$lib/types/Conversation";
import { sha256 } from "$lib/utils/sha256";
-import { collections } from "../database";
+import { collections } from "$lib/server/database";
export async function uploadFile(file: Blob, conv: Conversation): Promise {
const sha = await sha256(await file.text());
diff --git a/src/lib/server/logger.ts b/src/lib/server/logger.ts
new file mode 100644
index 00000000000..b01b7692e3a
--- /dev/null
+++ b/src/lib/server/logger.ts
@@ -0,0 +1,18 @@
+import pino from "pino";
+import { dev } from "$app/environment";
+import { env } from "$env/dynamic/private";
+
+let options: pino.LoggerOptions = {};
+
+if (dev) {
+ options = {
+ transport: {
+ target: "pino-pretty",
+ options: {
+ colorize: true,
+ },
+ },
+ };
+}
+
+export const logger = pino({ ...options, level: env.LOG_LEVEL ?? "info" });
diff --git a/src/lib/server/metrics.ts b/src/lib/server/metrics.ts
new file mode 100644
index 00000000000..a4728cc78ba
--- /dev/null
+++ b/src/lib/server/metrics.ts
@@ -0,0 +1,43 @@
+import { collectDefaultMetrics, Registry } from "prom-client";
+import express from "express";
+import { logger } from "$lib/server/logger";
+import { env } from "$env/dynamic/private";
+
+export class MetricsServer {
+ private static instance: MetricsServer;
+
+ private constructor() {
+ const app = express();
+ const port = env.METRICS_PORT || "5565";
+
+ const server = app.listen(port, () => {
+ logger.info(`Metrics server listening on port ${port}`);
+ });
+
+ const register = new Registry();
+ collectDefaultMetrics({ register });
+
+ app.get("/metrics", (req, res) => {
+ register.metrics().then((metrics) => {
+ res.set("Content-Type", "text/plain");
+ res.send(metrics);
+ });
+ });
+
+ process.on("SIGINT", async () => {
+ logger.info("Sigint received, disconnect metrics server ...");
+ server.close(() => {
+ logger.info("Server stopped ...");
+ });
+ process.exit();
+ });
+ }
+
+ public static getInstance(): MetricsServer {
+ if (!MetricsServer.instance) {
+ MetricsServer.instance = new MetricsServer();
+ }
+
+ return MetricsServer.instance;
+ }
+}
diff --git a/src/lib/server/models.ts b/src/lib/server/models.ts
index 20bd58645d0..7cdbaf5f5bb 100644
--- a/src/lib/server/models.ts
+++ b/src/lib/server/models.ts
@@ -1,11 +1,4 @@
-import {
- HF_TOKEN,
- HF_API_ROOT,
- MODELS,
- OLD_MODELS,
- TASK_MODEL,
- HF_ACCESS_TOKEN,
-} from "$env/static/private";
+import { env } from "$env/dynamic/private";
import type { ChatTemplateInput } from "$lib/types/Template";
import { compileTemplate } from "$lib/utils/template";
import { z } from "zod";
@@ -18,6 +11,7 @@ import type { PreTrainedTokenizer } from "@xenova/transformers";
import JSON5 from "json5";
import { getTokenizer } from "$lib/utils/getTokenizer";
+import { logger } from "$lib/server/logger";
type Optional = Pick, K> & Omit;
@@ -71,7 +65,7 @@ const modelConfig = z.object({
embeddingModel: validateEmbeddingModelByName(embeddingModels).optional(),
});
-const modelsRaw = z.array(modelConfig).parse(JSON5.parse(MODELS));
+const modelsRaw = z.array(modelConfig).parse(JSON5.parse(env.MODELS));
async function getChatPromptRender(
m: z.infer
@@ -91,7 +85,7 @@ async function getChatPromptRender(
try {
tokenizer = await getTokenizer(m.tokenizer);
} catch (e) {
- console.error(
+ logger.error(
"Failed to load tokenizer for model " +
m.name +
" consider setting chatPromptTemplate manually or making sure the model is available on the hub. Error: " +
@@ -146,8 +140,8 @@ const addEndpoint = (m: Awaited>) => ({
if (!m.endpoints) {
return endpointTgi({
type: "tgi",
- url: `${HF_API_ROOT}/${m.name}`,
- accessToken: HF_TOKEN ?? HF_ACCESS_TOKEN,
+ url: `${env.HF_API_ROOT}/${m.name}`,
+ accessToken: env.HF_TOKEN ?? env.HF_ACCESS_TOKEN,
weight: 1,
model: m,
});
@@ -198,7 +192,7 @@ export const models = await Promise.all(modelsRaw.map((e) => processModel(e).the
export const defaultModel = models[0];
// Models that have been deprecated
-export const oldModels = OLD_MODELS
+export const oldModels = env.OLD_MODELS
? z
.array(
z.object({
@@ -207,7 +201,7 @@ export const oldModels = OLD_MODELS
displayName: z.string().min(1).optional(),
})
)
- .parse(JSON5.parse(OLD_MODELS))
+ .parse(JSON5.parse(env.OLD_MODELS))
.map((m) => ({ ...m, id: m.id || m.name, displayName: m.displayName || m.name }))
: [];
@@ -218,9 +212,9 @@ export const validateModel = (_models: BackendModel[]) => {
// if `TASK_MODEL` is string & name of a model in `MODELS`, then we use `MODELS[TASK_MODEL]`, else we try to parse `TASK_MODEL` as a model config itself
-export const smallModel = TASK_MODEL
- ? (models.find((m) => m.name === TASK_MODEL) ||
- (await processModel(modelConfig.parse(JSON5.parse(TASK_MODEL))).then((m) =>
+export const smallModel = env.TASK_MODEL
+ ? (models.find((m) => m.name === env.TASK_MODEL) ||
+ (await processModel(modelConfig.parse(JSON5.parse(env.TASK_MODEL))).then((m) =>
addEndpoint(m)
))) ??
defaultModel
diff --git a/src/lib/server/preprocessMessages.ts b/src/lib/server/preprocessMessages.ts
index 53768fa6f62..9cdcaae0487 100644
--- a/src/lib/server/preprocessMessages.ts
+++ b/src/lib/server/preprocessMessages.ts
@@ -2,6 +2,7 @@ import type { Conversation } from "$lib/types/Conversation";
import type { Message } from "$lib/types/Message";
import { format } from "date-fns";
import { downloadFile } from "./files/downloadFile";
+import { logger } from "$lib/server/logger";
export async function preprocessMessages(
messages: Message[],
@@ -44,7 +45,7 @@ Answer the question: ${lastQuestion}`;
const b64 = image.toString("base64");
return `![](data:${mime};base64,${b64})})`;
} catch (e) {
- console.error(e);
+ logger.error(e);
}
})
);
diff --git a/src/lib/server/summarize.ts b/src/lib/server/summarize.ts
index 264963614f1..4cef6174dc9 100644
--- a/src/lib/server/summarize.ts
+++ b/src/lib/server/summarize.ts
@@ -1,9 +1,10 @@
-import { LLM_SUMMERIZATION } from "$env/static/private";
+import { env } from "$env/dynamic/private";
import { generateFromDefaultEndpoint } from "$lib/server/generateFromDefaultEndpoint";
import type { Message } from "$lib/types/Message";
+import { logger } from "$lib/server/logger";
export async function summarize(prompt: string) {
- if (!LLM_SUMMERIZATION) {
+ if (!env.LLM_SUMMERIZATION) {
return prompt.split(/\s+/g).slice(0, 5).join(" ");
}
@@ -41,7 +42,7 @@ export async function summarize(prompt: string) {
return summary;
})
.catch((e) => {
- console.error(e);
+ logger.error(e);
return null;
});
}
diff --git a/src/lib/server/usageLimits.ts b/src/lib/server/usageLimits.ts
index 0323e83fb50..e1f2390388a 100644
--- a/src/lib/server/usageLimits.ts
+++ b/src/lib/server/usageLimits.ts
@@ -1,5 +1,5 @@
import { z } from "zod";
-import { USAGE_LIMITS, RATE_LIMIT } from "$env/static/private";
+import { env } from "$env/dynamic/private";
import JSON5 from "json5";
// RATE_LIMIT is the legacy way to define messages per minute limit
@@ -12,7 +12,7 @@ export const usageLimitsSchema = z
messagesPerMinute: z
.preprocess((val) => {
if (val === undefined) {
- return RATE_LIMIT;
+ return env.RATE_LIMIT;
}
return val;
}, z.coerce.number().optional())
@@ -20,4 +20,4 @@ export const usageLimitsSchema = z
})
.optional();
-export const usageLimits = usageLimitsSchema.parse(JSON5.parse(USAGE_LIMITS));
+export const usageLimits = usageLimitsSchema.parse(JSON5.parse(env.USAGE_LIMITS));
diff --git a/src/lib/server/websearch/runWebSearch.ts b/src/lib/server/websearch/runWebSearch.ts
index 8eba2ddd627..1d06a39c5a8 100644
--- a/src/lib/server/websearch/runWebSearch.ts
+++ b/src/lib/server/websearch/runWebSearch.ts
@@ -5,7 +5,7 @@ import { chunk } from "$lib/utils/chunk";
import { findSimilarSentences } from "$lib/server/sentenceSimilarity";
import { getWebSearchProvider } from "./searchWeb";
import { defaultEmbeddingModel, embeddingModels } from "$lib/server/embeddingModels";
-import { WEBSEARCH_ALLOWLIST, WEBSEARCH_BLOCKLIST, ENABLE_LOCAL_FETCH } from "$env/static/private";
+import { env } from "$env/dynamic/private";
import type { Conversation } from "$lib/types/Conversation";
import type { MessageUpdate } from "$lib/types/MessageUpdate";
@@ -22,8 +22,8 @@ const MAX_N_PAGES_EMBED = 5 as const;
const listSchema = z.array(z.string()).default([]);
-const allowList = listSchema.parse(JSON5.parse(WEBSEARCH_ALLOWLIST));
-const blockList = listSchema.parse(JSON5.parse(WEBSEARCH_BLOCKLIST));
+const allowList = listSchema.parse(JSON5.parse(env.WEBSEARCH_ALLOWLIST));
+const blockList = listSchema.parse(JSON5.parse(env.WEBSEARCH_BLOCKLIST));
export async function runWebSearch(
conv: Conversation,
@@ -52,7 +52,7 @@ export async function runWebSearch(
let linksToUse = [...ragSettings.allowedLinks];
- if (ENABLE_LOCAL_FETCH !== "true") {
+ if (env.ENABLE_LOCAL_FETCH !== "true") {
const localLinks = await Promise.all(
linksToUse.map(async (link) => {
try {
diff --git a/src/lib/server/websearch/searchSearxng.ts b/src/lib/server/websearch/searchSearxng.ts
index a432003cb16..9507d536969 100644
--- a/src/lib/server/websearch/searchSearxng.ts
+++ b/src/lib/server/websearch/searchSearxng.ts
@@ -1,11 +1,12 @@
-import { SEARXNG_QUERY_URL } from "$env/static/private";
+import { env } from "$env/dynamic/private";
+import { logger } from "$lib/server/logger";
export async function searchSearxng(query: string) {
const abortController = new AbortController();
setTimeout(() => abortController.abort(), 10000);
// Insert the query into the URL template
- let url = SEARXNG_QUERY_URL.replace("", query);
+ let url = env.SEARXNG_QUERY_URL.replace("", query);
// Check if "&format=json" already exists in the URL
if (!url.includes("&format=json")) {
@@ -18,7 +19,7 @@ export async function searchSearxng(query: string) {
})
.then((response) => response.json() as Promise<{ results: { url: string }[] }>)
.catch((error) => {
- console.error("Failed to fetch or parse JSON", error);
+ logger.error("Failed to fetch or parse JSON", error);
throw new Error("Failed to fetch or parse JSON");
});
diff --git a/src/lib/server/websearch/searchWeb.ts b/src/lib/server/websearch/searchWeb.ts
index 94021e5c014..724be1227a7 100644
--- a/src/lib/server/websearch/searchWeb.ts
+++ b/src/lib/server/websearch/searchWeb.ts
@@ -1,13 +1,6 @@
import type { YouWebSearch } from "../../types/WebSearch";
import { WebSearchProvider } from "../../types/WebSearch";
-import {
- SERPAPI_KEY,
- SERPER_API_KEY,
- SERPSTACK_API_KEY,
- USE_LOCAL_WEBSEARCH,
- SEARXNG_QUERY_URL,
- YDC_API_KEY,
-} from "$env/static/private";
+import { env } from "$env/dynamic/private";
import { getJson } from "serpapi";
import type { GoogleParameters } from "serpapi";
import { searchWebLocal } from "./searchWebLocal";
@@ -15,9 +8,9 @@ import { searchSearxng } from "./searchSearxng";
// get which SERP api is providing web results
export function getWebSearchProvider() {
- if (YDC_API_KEY) {
+ if (env.YDC_API_KEY) {
return WebSearchProvider.YOU;
- } else if (SEARXNG_QUERY_URL) {
+ } else if (env.SEARXNG_QUERY_URL) {
return WebSearchProvider.SEARXNG;
} else {
return WebSearchProvider.GOOGLE;
@@ -26,22 +19,22 @@ export function getWebSearchProvider() {
// Show result as JSON
export async function searchWeb(query: string) {
- if (USE_LOCAL_WEBSEARCH) {
+ if (env.USE_LOCAL_WEBSEARCH) {
return await searchWebLocal(query);
}
- if (SEARXNG_QUERY_URL) {
+ if (env.SEARXNG_QUERY_URL) {
return await searchSearxng(query);
}
- if (SERPER_API_KEY) {
+ if (env.SERPER_API_KEY) {
return await searchWebSerper(query);
}
- if (YDC_API_KEY) {
+ if (env.YDC_API_KEY) {
return await searchWebYouApi(query);
}
- if (SERPAPI_KEY) {
+ if (env.SERPAPI_KEY) {
return await searchWebSerpApi(query);
}
- if (SERPSTACK_API_KEY) {
+ if (env.SERPSTACK_API_KEY) {
return await searchSerpStack(query);
}
throw new Error("No You.com or Serper.dev or SerpAPI key found");
@@ -58,7 +51,7 @@ export async function searchWebSerper(query: string) {
method: "POST",
body: JSON.stringify(params),
headers: {
- "x-api-key": SERPER_API_KEY,
+ "x-api-key": env.SERPER_API_KEY,
"Content-type": "application/json; charset=UTF-8",
},
});
@@ -84,7 +77,7 @@ export async function searchWebSerpApi(query: string) {
hl: "en",
gl: "us",
google_domain: "google.com",
- api_key: SERPAPI_KEY,
+ api_key: env.SERPAPI_KEY,
} satisfies GoogleParameters;
// Show result as JSON
@@ -97,7 +90,7 @@ export async function searchWebYouApi(query: string) {
const response = await fetch(`https://api.ydc-index.io/search?query=${query}`, {
method: "GET",
headers: {
- "X-API-Key": YDC_API_KEY,
+ "X-API-Key": env.YDC_API_KEY,
"Content-type": "application/json; charset=UTF-8",
},
});
@@ -123,7 +116,7 @@ export async function searchWebYouApi(query: string) {
export async function searchSerpStack(query: string) {
const response = await fetch(
- `http://api.serpstack.com/search?access_key=${SERPSTACK_API_KEY}&query=${query}&hl=en&gl=us`,
+ `http://api.serpstack.com/search?access_key=${env.SERPSTACK_API_KEY}&query=${query}&hl=en&gl=us`,
{
method: "GET",
headers: {
diff --git a/src/lib/utils/getShareUrl.ts b/src/lib/utils/getShareUrl.ts
index ef4259f6ad3..5278ab6fd6e 100644
--- a/src/lib/utils/getShareUrl.ts
+++ b/src/lib/utils/getShareUrl.ts
@@ -1,6 +1,8 @@
import { base } from "$app/paths";
-import { PUBLIC_ORIGIN, PUBLIC_SHARE_PREFIX } from "$env/static/public";
+import { env as envPublic } from "$env/dynamic/public";
export function getShareUrl(url: URL, shareId: string): string {
- return `${PUBLIC_SHARE_PREFIX || `${PUBLIC_ORIGIN || url.origin}${base}`}/r/${shareId}`;
+ return `${
+ envPublic.PUBLIC_SHARE_PREFIX || `${envPublic.PUBLIC_ORIGIN || url.origin}${base}`
+ }/r/${shareId}`;
}
diff --git a/src/lib/utils/isHuggingChat.ts b/src/lib/utils/isHuggingChat.ts
index fbcbefbc546..df1ad80039e 100644
--- a/src/lib/utils/isHuggingChat.ts
+++ b/src/lib/utils/isHuggingChat.ts
@@ -1,3 +1,3 @@
-import { PUBLIC_APP_ASSETS } from "$env/static/public";
+import { env as envPublic } from "$env/dynamic/public";
-export const isHuggingChat = PUBLIC_APP_ASSETS === "huggingchat";
+export const isHuggingChat = envPublic.PUBLIC_APP_ASSETS === "huggingchat";
diff --git a/src/routes/+error.svelte b/src/routes/+error.svelte
index 6836376aa41..376412a7ffd 100644
--- a/src/routes/+error.svelte
+++ b/src/routes/+error.svelte
@@ -11,5 +11,10 @@
{$page.status}
{$page.error?.message}
+ {#if $page.error?.errorId}
+
+ {$page.error
+ .errorId}
+ {/if}
diff --git a/src/routes/+layout.server.ts b/src/routes/+layout.server.ts
index c353bc4f47c..a79c470f688 100644
--- a/src/routes/+layout.server.ts
+++ b/src/routes/+layout.server.ts
@@ -5,17 +5,7 @@ import { UrlDependency } from "$lib/types/UrlDependency";
import { defaultModel, models, oldModels, validateModel } from "$lib/server/models";
import { authCondition, requiresUser } from "$lib/server/auth";
import { DEFAULT_SETTINGS } from "$lib/types/Settings";
-import {
- SERPAPI_KEY,
- SERPER_API_KEY,
- SERPSTACK_API_KEY,
- MESSAGES_BEFORE_LOGIN,
- YDC_API_KEY,
- USE_LOCAL_WEBSEARCH,
- SEARXNG_QUERY_URL,
- ENABLE_ASSISTANTS,
- ENABLE_ASSISTANTS_RAG,
-} from "$env/static/private";
+import { env } from "$env/dynamic/private";
import { ObjectId } from "mongodb";
import type { ConvSidebar } from "$lib/types/ConvSidebar";
@@ -47,7 +37,7 @@ export const load: LayoutServerLoad = async ({ locals, depends }) => {
});
}
- const enableAssistants = ENABLE_ASSISTANTS === "true";
+ const enableAssistants = env.ENABLE_ASSISTANTS === "true";
const assistantActive = !models.map(({ id }) => id).includes(settings?.activeModel ?? "");
@@ -87,7 +77,7 @@ export const load: LayoutServerLoad = async ({ locals, depends }) => {
const assistants = await collections.assistants.find({ _id: { $in: assistantIds } }).toArray();
- const messagesBeforeLogin = MESSAGES_BEFORE_LOGIN ? parseInt(MESSAGES_BEFORE_LOGIN) : 0;
+ const messagesBeforeLogin = env.MESSAGES_BEFORE_LOGIN ? parseInt(env.MESSAGES_BEFORE_LOGIN) : 0;
let loginRequired = false;
@@ -136,12 +126,12 @@ export const load: LayoutServerLoad = async ({ locals, depends }) => {
}) satisfies ConvSidebar[],
settings: {
searchEnabled: !!(
- SERPAPI_KEY ||
- SERPER_API_KEY ||
- SERPSTACK_API_KEY ||
- YDC_API_KEY ||
- USE_LOCAL_WEBSEARCH ||
- SEARXNG_QUERY_URL
+ env.SERPAPI_KEY ||
+ env.SERPER_API_KEY ||
+ env.SERPSTACK_API_KEY ||
+ env.YDC_API_KEY ||
+ env.USE_LOCAL_WEBSEARCH ||
+ env.SEARXNG_QUERY_URL
),
ethicsModalAccepted: !!settings?.ethicsModalAcceptedAt,
ethicsModalAcceptedAt: settings?.ethicsModalAcceptedAt ?? null,
@@ -188,7 +178,7 @@ export const load: LayoutServerLoad = async ({ locals, depends }) => {
},
assistant,
enableAssistants,
- enableAssistantsRAG: ENABLE_ASSISTANTS_RAG === "true",
+ enableAssistantsRAG: env.ENABLE_ASSISTANTS_RAG === "true",
loginRequired,
loginEnabled: requiresUser,
guestMode: requiresUser && messagesBeforeLogin > 0,
diff --git a/src/routes/+layout.svelte b/src/routes/+layout.svelte
index bcfa7c05f37..18f9a884b26 100644
--- a/src/routes/+layout.svelte
+++ b/src/routes/+layout.svelte
@@ -7,13 +7,7 @@
import { page } from "$app/stores";
import { browser } from "$app/environment";
- import {
- PUBLIC_APPLE_APP_ID,
- PUBLIC_APP_DESCRIPTION,
- PUBLIC_ORIGIN,
- PUBLIC_PLAUSIBLE_SCRIPT_URL,
- } from "$env/static/public";
- import { PUBLIC_APP_ASSETS, PUBLIC_APP_NAME } from "$env/static/public";
+ import { env as envPublic } from "$env/dynamic/public";
import { error } from "$lib/stores/errors";
import { createSettingsStore } from "$lib/stores/settings";
@@ -134,7 +128,7 @@
- {PUBLIC_APP_NAME}
+ {envPublic.PUBLIC_APP_NAME}
@@ -142,44 +136,49 @@
{#if !$page.url.pathname.includes("/assistant/") && $page.route.id !== "/assistants" && !$page.url.pathname.includes("/models/")}
-
+
-
+
-
+
{/if}
- {#if PUBLIC_PLAUSIBLE_SCRIPT_URL && PUBLIC_ORIGIN}
+ {#if envPublic.PUBLIC_PLAUSIBLE_SCRIPT_URL && envPublic.PUBLIC_ORIGIN}
{/if}
- {#if PUBLIC_APPLE_APP_ID}
-
+ {#if envPublic.PUBLIC_APPLE_APP_ID}
+
{/if}
diff --git a/src/routes/+page.svelte b/src/routes/+page.svelte
index d0410802aa4..abedc1c8862 100644
--- a/src/routes/+page.svelte
+++ b/src/routes/+page.svelte
@@ -1,7 +1,7 @@
- {PUBLIC_APP_NAME}
+ {envPublic.PUBLIC_APP_NAME}
" -H "Content-Type: application/json" -d '{"model": "OpenAssistant/oasst-sft-6-llama-30b-xor"}'
export async function POST({ request }) {
- if (!PARQUET_EXPORT_DATASET || !PARQUET_EXPORT_HF_TOKEN) {
+ if (!env.PARQUET_EXPORT_DATASET || !env.PARQUET_EXPORT_HF_TOKEN) {
throw error(500, "Parquet export is not configured.");
}
@@ -41,7 +42,7 @@ export async function POST({ request }) {
const writer = await parquet.ParquetWriter.openFile(schema, fileName);
let count = 0;
- console.log("Exporting conversations for model", model);
+ logger.info("Exporting conversations for model", model);
for await (const conversation of collections.settings.aggregate<{
title: string;
@@ -88,11 +89,11 @@ export async function POST({ request }) {
++count;
if (count % 1_000 === 0) {
- console.log("Exported", count, "conversations");
+ logger.info("Exported", count, "conversations");
}
}
- console.log("exporting convos with userId");
+ logger.info("exporting convos with userId");
for await (const conversation of collections.settings.aggregate<{
title: string;
@@ -133,24 +134,24 @@ export async function POST({ request }) {
++count;
if (count % 1_000 === 0) {
- console.log("Exported", count, "conversations");
+ logger.info("Exported", count, "conversations");
}
}
await writer.close();
- console.log("Uploading", fileName, "to Hugging Face Hub");
+ logger.info("Uploading", fileName, "to Hugging Face Hub");
await uploadFile({
file: pathToFileURL(fileName) as URL,
- credentials: { accessToken: PARQUET_EXPORT_HF_TOKEN },
+ credentials: { accessToken: env.PARQUET_EXPORT_HF_TOKEN },
repo: {
type: "dataset",
- name: PARQUET_EXPORT_DATASET,
+ name: env.PARQUET_EXPORT_DATASET,
},
});
- console.log("Upload done");
+ logger.info("Upload done");
await unlink(fileName);
diff --git a/src/routes/admin/stats/compute/+server.ts b/src/routes/admin/stats/compute/+server.ts
index 5e73c36c26a..d8d7f0ec605 100644
--- a/src/routes/admin/stats/compute/+server.ts
+++ b/src/routes/admin/stats/compute/+server.ts
@@ -1,15 +1,16 @@
import { json } from "@sveltejs/kit";
import type { ConversationStats } from "$lib/types/ConversationStats";
-import { CONVERSATION_STATS_COLLECTION, collections } from "$lib/server/database.js";
+import { CONVERSATION_STATS_COLLECTION, collections } from "$lib/server/database";
+import { logger } from "$lib/server/logger";
// Triger like this:
// curl -X POST "http://localhost:5173/chat/admin/stats/compute" -H "Authorization: Bearer "
export async function POST() {
for (const span of ["day", "week", "month"] as const) {
- computeStats({ dateField: "updatedAt", type: "conversation", span }).catch(console.error);
- computeStats({ dateField: "createdAt", type: "conversation", span }).catch(console.error);
- computeStats({ dateField: "createdAt", type: "message", span }).catch(console.error);
+ computeStats({ dateField: "updatedAt", type: "conversation", span }).catch(logger.error);
+ computeStats({ dateField: "createdAt", type: "conversation", span }).catch(logger.error);
+ computeStats({ dateField: "createdAt", type: "message", span }).catch(logger.error);
}
return json({}, { status: 202 });
@@ -29,7 +30,7 @@ async function computeStats(params: {
// In those cases we need to compute the stats from before the last month as everything is one aggregation
const minDate = lastComputed ? lastComputed.date.at : new Date(0);
- console.log("Computing stats for", params.type, params.span, params.dateField, "from", minDate);
+ logger.info("Computing stats for", params.type, params.span, params.dateField, "from", minDate);
const dateField = params.type === "message" ? "messages." + params.dateField : params.dateField;
@@ -213,5 +214,5 @@ async function computeStats(params: {
await collections.conversations.aggregate(pipeline, { allowDiskUse: true }).next();
- console.log("Computed stats for", params.type, params.span, params.dateField);
+ logger.info("Computed stats for", params.type, params.span, params.dateField);
}
diff --git a/src/routes/api/assistants/+server.ts b/src/routes/api/assistants/+server.ts
index 8b99a680bdf..ac588676cbc 100644
--- a/src/routes/api/assistants/+server.ts
+++ b/src/routes/api/assistants/+server.ts
@@ -1,9 +1,9 @@
-import { collections } from "$lib/server/database.js";
+import { collections } from "$lib/server/database";
import type { Assistant } from "$lib/types/Assistant";
import type { User } from "$lib/types/User";
import { generateQueryTokens } from "$lib/utils/searchTokens.js";
import type { Filter } from "mongodb";
-import { REQUIRE_FEATURED_ASSISTANTS } from "$env/static/private";
+import { env } from "$env/dynamic/private";
const NUM_PER_PAGE = 24;
@@ -27,11 +27,11 @@ export async function GET({ url, locals }) {
// if there is no user, we show community assistants, so only show featured assistants
const shouldBeFeatured =
- REQUIRE_FEATURED_ASSISTANTS === "true" && !user ? { featured: true } : {};
+ env.REQUIRE_FEATURED_ASSISTANTS === "true" && !user ? { featured: true } : {};
// if the user queried is not the current user, only show "public" assistants that have been shared before
const shouldHaveBeenShared =
- REQUIRE_FEATURED_ASSISTANTS === "true" && !createdByCurrentUser
+ env.REQUIRE_FEATURED_ASSISTANTS === "true" && !createdByCurrentUser
? { userCount: { $gt: 1 } }
: {};
diff --git a/src/routes/assistant/[assistantId]/+page.server.ts b/src/routes/assistant/[assistantId]/+page.server.ts
index ac14877dbc4..fddb181b4b8 100644
--- a/src/routes/assistant/[assistantId]/+page.server.ts
+++ b/src/routes/assistant/[assistantId]/+page.server.ts
@@ -1,5 +1,5 @@
import { base } from "$app/paths";
-import { collections } from "$lib/server/database.js";
+import { collections } from "$lib/server/database";
import { redirect } from "@sveltejs/kit";
import { ObjectId } from "mongodb";
diff --git a/src/routes/assistant/[assistantId]/+page.svelte b/src/routes/assistant/[assistantId]/+page.svelte
index 16608d07ef2..f19b879023b 100644
--- a/src/routes/assistant/[assistantId]/+page.svelte
+++ b/src/routes/assistant/[assistantId]/+page.svelte
@@ -6,7 +6,7 @@
import { useSettingsStore } from "$lib/stores/settings";
import type { PageData } from "./$types";
import { applyAction, enhance } from "$app/forms";
- import { PUBLIC_APP_NAME, PUBLIC_ORIGIN } from "$env/static/public";
+ import { env as envPublic } from "$env/dynamic/public";
import { page } from "$app/stores";
import IconGear from "~icons/bi/gear-fill";
@@ -24,15 +24,16 @@
-
+
diff --git a/src/routes/assistants/+page.server.ts b/src/routes/assistants/+page.server.ts
index a30c964a8d4..0cf3663f704 100644
--- a/src/routes/assistants/+page.server.ts
+++ b/src/routes/assistants/+page.server.ts
@@ -1,6 +1,6 @@
import { base } from "$app/paths";
-import { ENABLE_ASSISTANTS, REQUIRE_FEATURED_ASSISTANTS } from "$env/static/private";
-import { collections } from "$lib/server/database.js";
+import { env } from "$env/dynamic/private";
+import { Database, collections } from "$lib/server/database.js";
import { SortKey, type Assistant } from "$lib/types/Assistant";
import type { User } from "$lib/types/User";
import { generateQueryTokens } from "$lib/utils/searchTokens.js";
@@ -10,7 +10,7 @@ import type { Filter } from "mongodb";
const NUM_PER_PAGE = 24;
export const load = async ({ url, locals }) => {
- if (!ENABLE_ASSISTANTS) {
+ if (!env.ENABLE_ASSISTANTS) {
throw redirect(302, `${base}/`);
}
@@ -18,7 +18,7 @@ export const load = async ({ url, locals }) => {
const pageIndex = parseInt(url.searchParams.get("p") ?? "0");
const username = url.searchParams.get("user");
const query = url.searchParams.get("q")?.trim() ?? null;
- const sort = url.searchParams.get("sort")?.trim() ?? SortKey.POPULAR;
+ const sort = url.searchParams.get("sort")?.trim() ?? SortKey.TRENDING;
const createdByCurrentUser = locals.user?.username && locals.user.username === username;
let user: Pick | null = null;
@@ -34,11 +34,11 @@ export const load = async ({ url, locals }) => {
// if there is no user, we show community assistants, so only show featured assistants
const shouldBeFeatured =
- REQUIRE_FEATURED_ASSISTANTS === "true" && !user ? { featured: true } : {};
+ env.REQUIRE_FEATURED_ASSISTANTS === "true" && !user ? { featured: true } : {};
// if the user queried is not the current user, only show "public" assistants that have been shared before
const shouldHaveBeenShared =
- REQUIRE_FEATURED_ASSISTANTS === "true" && !createdByCurrentUser
+ env.REQUIRE_FEATURED_ASSISTANTS === "true" && !createdByCurrentUser
? { userCount: { $gt: 1 } }
: {};
@@ -50,8 +50,9 @@ export const load = async ({ url, locals }) => {
...shouldBeFeatured,
...shouldHaveBeenShared,
};
- const assistants = await collections.assistants
- .find(filter)
+ const assistants = await Database.getInstance()
+ .getCollections()
+ .assistants.find(filter)
.skip(NUM_PER_PAGE * pageIndex)
.sort({
...(sort === SortKey.TRENDING && { last24HoursCount: -1 }),
@@ -60,7 +61,9 @@ export const load = async ({ url, locals }) => {
.limit(NUM_PER_PAGE)
.toArray();
- const numTotalItems = await collections.assistants.countDocuments(filter);
+ const numTotalItems = await Database.getInstance()
+ .getCollections()
+ .assistants.countDocuments(filter);
return {
assistants: JSON.parse(JSON.stringify(assistants)) as Array,
diff --git a/src/routes/assistants/+page.svelte b/src/routes/assistants/+page.svelte
index b27524aacc3..d2f90b6323c 100644
--- a/src/routes/assistants/+page.svelte
+++ b/src/routes/assistants/+page.svelte
@@ -1,7 +1,7 @@
-
+
-
+
diff --git a/src/routes/models/[...model]/thumbnail.png/ModelThumbnail.svelte b/src/routes/models/[...model]/thumbnail.png/ModelThumbnail.svelte
index b76e362af73..86af4085e1f 100644
--- a/src/routes/models/[...model]/thumbnail.png/ModelThumbnail.svelte
+++ b/src/routes/models/[...model]/thumbnail.png/ModelThumbnail.svelte
@@ -1,5 +1,5 @@