From f39638fe0212e4ba50cf03717a94a3b4ece05fef Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Sat, 11 Oct 2025 17:59:27 +0530 Subject: [PATCH 01/53] feat(app): initialize new model management --- src/lib/models/providers/baseProvider.ts | 20 +++ src/lib/models/providers/openai.ts | 207 +++++++++++++++++++++++ src/lib/models/registry.ts | 33 ++++ src/lib/models/types.ts | 16 ++ 4 files changed, 276 insertions(+) create mode 100644 src/lib/models/providers/baseProvider.ts create mode 100644 src/lib/models/providers/openai.ts create mode 100644 src/lib/models/registry.ts create mode 100644 src/lib/models/types.ts diff --git a/src/lib/models/providers/baseProvider.ts b/src/lib/models/providers/baseProvider.ts new file mode 100644 index 0000000..1d7ef47 --- /dev/null +++ b/src/lib/models/providers/baseProvider.ts @@ -0,0 +1,20 @@ +import { Embeddings } from '@langchain/core/embeddings'; +import { BaseChatModel } from '@langchain/core/language_models/chat_models'; +import { Model, ModelList, ProviderMetadata } from '../types'; +import { UIConfigField } from '@/lib/config/types'; + +abstract class BaseModelProvider { + constructor(protected config: CONFIG) {} + abstract getDefaultModels(): Promise; + abstract getModelList(): Promise; + abstract loadChatModel(modelName: string): Promise; + abstract loadEmbeddingModel(modelName: string): Promise; + static getProviderConfigFields(): UIConfigField[] { + throw new Error('Method not implemented.'); + } + static getProviderMetadata(): ProviderMetadata { + throw new Error('Method not Implemented.'); + } +} + +export default BaseModelProvider; diff --git a/src/lib/models/providers/openai.ts b/src/lib/models/providers/openai.ts new file mode 100644 index 0000000..5569b97 --- /dev/null +++ b/src/lib/models/providers/openai.ts @@ -0,0 +1,207 @@ +import { BaseChatModel } from '@langchain/core/language_models/chat_models'; +import { Model, ModelList, ProviderMetadata } from '../types'; +import BaseModelProvider from './baseProvider'; +import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai'; +import { Embeddings } from '@langchain/core/embeddings'; +import { UIConfigField } from '@/lib/config/types'; + +interface OpenAIConfig { + name: string; + apiKey: string; + baseURL: string; +} + +const defaultChatModels: Model[] = [ + { + name: 'GPT-3.5 Turbo', + key: 'gpt-3.5-turbo', + }, + { + name: 'GPT-4', + key: 'gpt-4', + }, + { + name: 'GPT-4 turbo', + key: 'gpt-4-turbo', + }, + { + name: 'GPT-4 omni', + key: 'gpt-4o', + }, + { + name: 'GPT-4o (2024-05-13)', + key: 'gpt-4o-2024-05-13', + }, + { + name: 'GPT-4 omni mini', + key: 'gpt-4o-mini', + }, + { + name: 'GPT 4.1 nano', + key: 'gpt-4.1-nano', + }, + { + name: 'GPT 4.1 mini', + key: 'gpt-4.1-mini', + }, + { + name: 'GPT 4.1', + key: 'gpt-4.1', + }, + { + name: 'GPT 5 nano', + key: 'gpt-5-nano', + }, + { + name: 'GPT 5', + key: 'gpt-5', + }, + { + name: 'GPT 5 Mini', + key: 'gpt-5-mini', + }, + { + name: 'o1', + key: 'o1', + }, + { + name: 'o3', + key: 'o3', + }, + { + name: 'o3 Mini', + key: 'o3-mini', + }, + { + name: 'o4 Mini', + key: 'o4-mini', + }, +]; + +const defaultEmbeddingModels: Model[] = [ + { + name: 'Text Embedding 3 Small', + key: 'text-embedding-3-small', + }, + { + name: 'Text Embedding 3 Large', + key: 'text-embedding-3-large', + }, +]; + +const providerConfigFields: UIConfigField[] = [ + /* { + type: 'string', + name: 'Name (Optional)', + key: 'name', + description: 'An optional name for this provider configuration', + required: false, + placeholder: 'Provider Name', + scope: 'server', + }, */ /* FOR NAME DIRECTLY CREATE INPUT IN FRONTEND */ + { + type: 'password', + name: 'API Key', + key: 'apiKey', + description: 'Your OpenAI API key', + required: true, + placeholder: 'OpenAI API Key', + env: 'OPENAI_API_KEY', + scope: 'server', + }, + { + type: 'string', + name: 'Base URL', + key: 'baseURL', + description: 'The base URL for the OpenAI API', + required: true, + placeholder: 'OpenAI Base URL', + default: 'https://api.openai.com/v1', + env: 'OPENAI_BASE_URL', + scope: 'server', + }, +]; + +class OpenAIProvider extends BaseModelProvider { + constructor(config: OpenAIConfig) { + super(config); + } + + async getDefaultModels(): Promise { + if (this.config.baseURL === 'https://api.openai.com/v1') { + return { + embedding: defaultEmbeddingModels, + chat: defaultChatModels, + }; + } + + return { + embedding: [], + chat: [], + }; + } + + async getModelList(): Promise { + /* Todo: IMPLEMENT MODEL READING FROM CONFIG FILE */ + const defaultModels = await this.getDefaultModels(); + + return { + embedding: [...defaultModels.embedding], + chat: [...defaultModels.chat], + }; + } + + async loadChatModel(key: string): Promise { + const modelList = await this.getModelList(); + + const exists = modelList.chat.filter((m) => m.key === key); + + if (!exists) { + throw new Error( + 'Error Loading OpenAI Chat Model. Invalid Model Selected', + ); + } + + return new ChatOpenAI({ + apiKey: this.config.apiKey, + temperature: 0.7, + model: key, + configuration: { + baseURL: this.config.baseURL, + }, + }); + } + + async loadEmbeddingModel(key: string): Promise { + const modelList = await this.getModelList(); + + const exists = modelList.chat.filter((m) => m.key === key); + + if (!exists) { + throw new Error( + 'Error Loading OpenAI Embedding Model. Invalid Model Selected.', + ); + } + + return new OpenAIEmbeddings({ + apiKey: this.config.apiKey, + model: key, + configuration: { + baseURL: this.config.baseURL, + }, + }); + } + + static getProviderConfigFields(): UIConfigField[] { + return providerConfigFields; + } + + static getProviderMetadata(): ProviderMetadata { + return { + key: 'openai', + name: 'OpenAI', + }; + } +} + +export default OpenAIProvider; diff --git a/src/lib/models/registry.ts b/src/lib/models/registry.ts new file mode 100644 index 0000000..5538e9e --- /dev/null +++ b/src/lib/models/registry.ts @@ -0,0 +1,33 @@ +import { ModelProviderUISection, UIConfigField } from '../config/types'; +import { ProviderMetadata } from './types'; +import BaseModelProvider from './providers/baseProvider'; +import OpenAIProvider from './providers/openai'; + +interface ProviderClass { + new (config: T): BaseModelProvider; + getProviderConfigFields(): UIConfigField[]; + getProviderMetadata(): ProviderMetadata; +} + +const providers: Record> = { + openai: OpenAIProvider, +}; + +class ModelRegistry { + constructor() {} + + getUIConfigSection(): ModelProviderUISection[] { + return Object.entries(providers).map(([k, p]) => { + const configFields = p.getProviderConfigFields(); + const metadata = p.getProviderMetadata(); + + return { + fields: configFields, + key: k, + name: metadata.name, + }; + }); + } +} + +export default ModelRegistry; diff --git a/src/lib/models/types.ts b/src/lib/models/types.ts new file mode 100644 index 0000000..e85ad6b --- /dev/null +++ b/src/lib/models/types.ts @@ -0,0 +1,16 @@ +type Model = { + name: string; + key: string; +}; + +type ModelList = { + embedding: Model[]; + chat: Model[]; +}; + +type ProviderMetadata = { + name: string; + key: string; +}; + +export type { Model, ModelList, ProviderMetadata }; From f1e6aa9c1a6e0d03be8b85df1ecc2d3c7eacfc38 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Sat, 11 Oct 2025 17:59:45 +0530 Subject: [PATCH 02/53] feat(app): add serverUtils, create `hashObj` util --- src/lib/serverUtils.ts | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 src/lib/serverUtils.ts diff --git a/src/lib/serverUtils.ts b/src/lib/serverUtils.ts new file mode 100644 index 0000000..9dca22f --- /dev/null +++ b/src/lib/serverUtils.ts @@ -0,0 +1,7 @@ +import crypto from 'crypto'; + +export const hashObj = (obj: { [key: string]: any }) => { + const json = JSON.stringify(obj, Object.keys(obj).sort()); + const hash = crypto.createHash('sha256').update(json).digest('hex'); + return hash; +}; From 3003d44544d03ed3a93d87096fa1e0bbae2d7849 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Sat, 11 Oct 2025 18:00:06 +0530 Subject: [PATCH 03/53] feat(app): initialize new config management --- src/lib/config/index.ts | 137 ++++++++++++++++++++++++++++++++++++++++ src/lib/config/types.ts | 82 ++++++++++++++++++++++++ 2 files changed, 219 insertions(+) create mode 100644 src/lib/config/index.ts create mode 100644 src/lib/config/types.ts diff --git a/src/lib/config/index.ts b/src/lib/config/index.ts new file mode 100644 index 0000000..e8c3b5d --- /dev/null +++ b/src/lib/config/index.ts @@ -0,0 +1,137 @@ +import path from 'node:path'; +import fs from 'fs'; +import { Config, ConfigModelProvider, EnvMap, UIConfigSections } from './types'; +import ModelRegistry from '../models/registry'; +import { hashObj } from '../serverUtils'; + +class ConfigManager { + configPath: string = path.join( + process.env.DATA_DIR || process.cwd(), + '/data/config.json', + ); + configVersion = 1; + currentConfig: Config = { + version: this.configVersion, + general: {}, + modelProviders: [], + }; + uiConfigSections: UIConfigSections = { + general: [], + modelProviders: [], + }; + modelRegistry = new ModelRegistry(); + + constructor() { + this.initialize(); + } + + private initialize() { + this.initializeConfig(); + this.initializeFromEnv(); + } + + private saveConfig() { + fs.writeFileSync( + this.configPath, + JSON.stringify(this.currentConfig, null, 2), + ); + } + + private initializeConfig() { + const exists = fs.existsSync(this.configPath); + if (!exists) { + fs.writeFileSync( + this.configPath, + JSON.stringify(this.currentConfig, null, 2), + ); + } else { + try { + this.currentConfig = JSON.parse( + fs.readFileSync(this.configPath, 'utf-8'), + ); + } catch (err) { + if (err instanceof SyntaxError) { + console.error( + `Error parsing config file at ${this.configPath}:`, + err, + ); + console.log( + 'Loading default config and overwriting the existing file.', + ); + fs.writeFileSync( + this.configPath, + JSON.stringify(this.currentConfig, null, 2), + ); + return; + } else { + console.log('Unknown error reading config file:', err); + } + } + + this.currentConfig = this.migrateConfigNeeded(this.currentConfig); + } + } + + private migrateConfigNeeded(config: Config): Config { + /* TODO: Add migrations */ + return config; + } + + private initializeFromEnv() { + const providerConfigSections = this.modelRegistry.getUIConfigSection(); + + this.uiConfigSections.modelProviders = providerConfigSections; + + const newProviders: ConfigModelProvider[] = []; + + providerConfigSections.forEach((provider) => { + const newProvider: ConfigModelProvider & { required?: string[] } = { + id: crypto.randomUUID(), + name: `${provider.name} ${Math.floor(Math.random() * 1000)}`, + type: provider.key, + chatModels: [], + embeddingModels: [], + config: {}, + required: [], + hash: '', + }; + + provider.fields.forEach((field) => { + newProvider.config[field.key] = + process.env[field.env!] || + field.default || + ''; /* Env var must exist for providers */ + + if (field.required) newProvider.required?.push(field.key); + }); + + let configured = true; + + newProvider.required?.forEach((r) => { + if (!newProvider.config[r]) { + configured = false; + } + }); + + if (configured) { + const hash = hashObj(newProvider.config); + newProvider.hash = hash; + delete newProvider.required; + + const exists = this.currentConfig.modelProviders.find( + (p) => p.hash === hash, + ); + + if (!exists) { + newProviders.push(newProvider); + } + } + }); + + this.currentConfig.modelProviders.push(...newProviders); + + this.saveConfig(); + } +} + +new ConfigManager(); diff --git a/src/lib/config/types.ts b/src/lib/config/types.ts new file mode 100644 index 0000000..9cd89b9 --- /dev/null +++ b/src/lib/config/types.ts @@ -0,0 +1,82 @@ +type BaseUIConfigField = { + name: string; + key: string; + required: boolean; + description: string; + scope: 'client' | 'server'; + env?: string; +}; + +type StringUIConfigField = BaseUIConfigField & { + type: 'string'; + placeholder?: string; + default?: string; +}; + +type SelectUIConfigFieldOptions = { + name: string; + key: string; + value: string; +}; + +type SelectUIConfigField = BaseUIConfigField & { + type: 'select'; + default?: string; + options: SelectUIConfigFieldOptions[]; +}; + +type PasswordUIConfigField = BaseUIConfigField & { + type: 'password'; + placeholder?: string; + default?: string; +}; + +type UIConfigField = + | StringUIConfigField + | SelectUIConfigField + | PasswordUIConfigField; + +type ConfigModelProvider = { + id: string; + name: string; + type: string; + chatModels: string[]; + embeddingModels: string[]; + config: { [key: string]: any }; + hash: string; +}; + +type Config = { + version: number; + general: { + [key: string]: any; + }; + modelProviders: ConfigModelProvider[]; +}; + +type EnvMap = { + [key: string]: { + fieldKey: string; + providerKey: string; + }; +}; + +type ModelProviderUISection = { + name: string; + key: string; + fields: UIConfigField[]; +}; + +type UIConfigSections = { + general: UIConfigField[]; + modelProviders: ModelProviderUISection[]; +}; + +export type { + UIConfigField, + Config, + EnvMap, + UIConfigSections, + ModelProviderUISection, + ConfigModelProvider, +}; From 387da5dbdd00992a6b7ca8df35cbac3380fdb3cd Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Sat, 11 Oct 2025 18:00:31 +0530 Subject: [PATCH 04/53] feat(instrumentation): run config migrations --- src/instrumentation.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/instrumentation.ts b/src/instrumentation.ts index cd85715..cff0d44 100644 --- a/src/instrumentation.ts +++ b/src/instrumentation.ts @@ -7,5 +7,7 @@ export const register = async () => { } catch (error) { console.error('Failed to run database migrations:', error); } + + await import('./lib/config/index'); } }; From e7fbab12edb53f573421cd6a87089b56b2c998cd Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Mon, 13 Oct 2025 21:58:30 +0530 Subject: [PATCH 05/53] feat(config): add `getConfig` method --- src/lib/config/index.ts | 22 +++++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/src/lib/config/index.ts b/src/lib/config/index.ts index e8c3b5d..79be5a7 100644 --- a/src/lib/config/index.ts +++ b/src/lib/config/index.ts @@ -68,11 +68,11 @@ class ConfigManager { } } - this.currentConfig = this.migrateConfigNeeded(this.currentConfig); + this.currentConfig = this.migrateConfig(this.currentConfig); } } - private migrateConfigNeeded(config: Config): Config { + private migrateConfig(config: Config): Config { /* TODO: Add migrations */ return config; } @@ -132,6 +132,22 @@ class ConfigManager { this.saveConfig(); } + + public getConfig(key: string, defaultValue?: any): any { + const nested = key.split('.'); + let obj: any = this.currentConfig; + + for (let i = 0; i < nested.length; i++) { + const part = nested[i]; + if (obj == null) return defaultValue; + + obj = obj[part]; + } + + return obj === undefined ? defaultValue : obj; + } } -new ConfigManager(); +const configManager = new ConfigManager(); + +export default configManager \ No newline at end of file From e45a9af9ffbd3fe3a09defc66277cd7d6879e0b2 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Mon, 13 Oct 2025 22:01:17 +0530 Subject: [PATCH 06/53] feat(config): add client side and server side config registries --- src/lib/config/clientRegistry.ts | 13 +++++++++++++ src/lib/config/serverRegistry.ts | 1 + 2 files changed, 14 insertions(+) create mode 100644 src/lib/config/clientRegistry.ts create mode 100644 src/lib/config/serverRegistry.ts diff --git a/src/lib/config/clientRegistry.ts b/src/lib/config/clientRegistry.ts new file mode 100644 index 0000000..8a63bd7 --- /dev/null +++ b/src/lib/config/clientRegistry.ts @@ -0,0 +1,13 @@ +"use client" + +const getClientConfig = (key: string, defaultVal?: any) => { + return localStorage.getItem(key) ?? defaultVal ?? undefined +} + +export const getTheme = () => getClientConfig('theme', 'dark') + +export const getAutoImageSearch = () => Boolean(getClientConfig('autoImageSearch', 'true')) + +export const getAutoVideoSearch = () => Boolean(getClientConfig('autoVideoSearch', 'true')) + +export const getSystemInstructions = () => getClientConfig('systemInstructions', '') \ No newline at end of file diff --git a/src/lib/config/serverRegistry.ts b/src/lib/config/serverRegistry.ts new file mode 100644 index 0000000..6cdbbd6 --- /dev/null +++ b/src/lib/config/serverRegistry.ts @@ -0,0 +1 @@ +/* TODO: add server opts */ \ No newline at end of file From 999553877dd48289d520292d50be4891945e4df8 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Tue, 14 Oct 2025 10:05:31 +0530 Subject: [PATCH 07/53] feat(app): lint & beautify --- src/lib/config/clientRegistry.ts | 17 +++--- src/lib/config/index.ts | 2 +- src/lib/config/serverRegistry.ts | 2 +- tailwind.config.ts | 5 +- yarn.lock | 98 +++++++++++++++++++++++++++++++- 5 files changed, 113 insertions(+), 11 deletions(-) diff --git a/src/lib/config/clientRegistry.ts b/src/lib/config/clientRegistry.ts index 8a63bd7..6d0c0d1 100644 --- a/src/lib/config/clientRegistry.ts +++ b/src/lib/config/clientRegistry.ts @@ -1,13 +1,16 @@ -"use client" +'use client'; const getClientConfig = (key: string, defaultVal?: any) => { - return localStorage.getItem(key) ?? defaultVal ?? undefined -} + return localStorage.getItem(key) ?? defaultVal ?? undefined; +}; -export const getTheme = () => getClientConfig('theme', 'dark') +export const getTheme = () => getClientConfig('theme', 'dark'); -export const getAutoImageSearch = () => Boolean(getClientConfig('autoImageSearch', 'true')) +export const getAutoImageSearch = () => + Boolean(getClientConfig('autoImageSearch', 'true')); -export const getAutoVideoSearch = () => Boolean(getClientConfig('autoVideoSearch', 'true')) +export const getAutoVideoSearch = () => + Boolean(getClientConfig('autoVideoSearch', 'true')); -export const getSystemInstructions = () => getClientConfig('systemInstructions', '') \ No newline at end of file +export const getSystemInstructions = () => + getClientConfig('systemInstructions', ''); diff --git a/src/lib/config/index.ts b/src/lib/config/index.ts index 79be5a7..ccb9c1e 100644 --- a/src/lib/config/index.ts +++ b/src/lib/config/index.ts @@ -150,4 +150,4 @@ class ConfigManager { const configManager = new ConfigManager(); -export default configManager \ No newline at end of file +export default configManager; diff --git a/src/lib/config/serverRegistry.ts b/src/lib/config/serverRegistry.ts index 6cdbbd6..a8df739 100644 --- a/src/lib/config/serverRegistry.ts +++ b/src/lib/config/serverRegistry.ts @@ -1 +1 @@ -/* TODO: add server opts */ \ No newline at end of file +/* TODO: add server opts */ diff --git a/tailwind.config.ts b/tailwind.config.ts index 1b1a08b..2dc02e0 100644 --- a/tailwind.config.ts +++ b/tailwind.config.ts @@ -49,6 +49,9 @@ const config: Config = { }, }, }, - plugins: [require('@tailwindcss/typography'), require('@headlessui/tailwindcss')({ prefix: 'headless' })], + plugins: [ + require('@tailwindcss/typography'), + require('@headlessui/tailwindcss')({ prefix: 'headless' }), + ], }; export default config; diff --git a/yarn.lock b/yarn.lock index ab38dd5..3aa8070 100644 --- a/yarn.lock +++ b/yarn.lock @@ -39,6 +39,13 @@ resolved "https://registry.yarnpkg.com/@colors/colors/-/colors-1.6.0.tgz#ec6cd237440700bc23ca23087f513c75508958b0" integrity sha512-Ir+AOibqzrIsL6ajt3Rz3LskB7OiMVHqltZmspbW/TJuTVuyOMirVqAkjfY6JISiLHgyNqicAC8AyHHGzNd/dA== +"@cspotcode/source-map-support@^0.8.0": + version "0.8.1" + resolved "https://registry.yarnpkg.com/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz#00629c35a688e05a88b1cda684fb9d5e73f000a1" + integrity sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw== + dependencies: + "@jridgewell/trace-mapping" "0.3.9" + "@dabh/diagnostics@^2.0.2": version "2.0.3" resolved "https://registry.yarnpkg.com/@dabh/diagnostics/-/diagnostics-2.0.3.tgz#7f7e97ee9a725dffc7808d93668cc984e1dc477a" @@ -580,7 +587,7 @@ "@jridgewell/sourcemap-codec" "^1.4.10" "@jridgewell/trace-mapping" "^0.3.24" -"@jridgewell/resolve-uri@^3.1.0": +"@jridgewell/resolve-uri@^3.0.3", "@jridgewell/resolve-uri@^3.1.0": version "3.1.2" resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz#7a0ee601f60f99a20c7c7c5ff0c80388c1189bd6" integrity sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw== @@ -595,6 +602,14 @@ resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz#d7c6e6755c78567a951e04ab52ef0fd26de59f32" integrity sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg== +"@jridgewell/trace-mapping@0.3.9": + version "0.3.9" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz#6534fd5933a53ba7cbf3a17615e273a0d1273ff9" + integrity sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ== + dependencies: + "@jridgewell/resolve-uri" "^3.0.3" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping@^0.3.24": version "0.3.25" resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz#15f190e98895f3fc23276ee14bc76b675c2e50f0" @@ -932,6 +947,26 @@ resolved "https://registry.yarnpkg.com/@tanstack/virtual-core/-/virtual-core-3.10.9.tgz#55710c92b311fdaa8d8c66682a0dbdd684bc77c4" integrity sha512-kBknKOKzmeR7lN+vSadaKWXaLS0SZZG+oqpQ/k80Q6g9REn6zRHS/ZYdrIzHnpHgy/eWs00SujveUN/GJT2qTw== +"@tsconfig/node10@^1.0.7": + version "1.0.11" + resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.11.tgz#6ee46400685f130e278128c7b38b7e031ff5b2f2" + integrity sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw== + +"@tsconfig/node12@^1.0.7": + version "1.0.11" + resolved "https://registry.yarnpkg.com/@tsconfig/node12/-/node12-1.0.11.tgz#ee3def1f27d9ed66dac6e46a295cffb0152e058d" + integrity sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag== + +"@tsconfig/node14@^1.0.0": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@tsconfig/node14/-/node14-1.0.3.tgz#e4386316284f00b98435bf40f72f75a09dabf6c1" + integrity sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow== + +"@tsconfig/node16@^1.0.2": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@tsconfig/node16/-/node16-1.0.4.tgz#0b92dcc0cc1c81f6f306a381f28e31b1a56536e9" + integrity sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA== + "@types/better-sqlite3@^7.6.12": version "7.6.12" resolved "https://registry.yarnpkg.com/@types/better-sqlite3/-/better-sqlite3-7.6.12.tgz#e5712d46d71097dcc2775c0b068072eadc15deb7" @@ -1124,6 +1159,18 @@ acorn-jsx@^5.3.2: resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== +acorn-walk@^8.1.1: + version "8.3.4" + resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.3.4.tgz#794dd169c3977edf4ba4ea47583587c5866236b7" + integrity sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g== + dependencies: + acorn "^8.11.0" + +acorn@^8.11.0, acorn@^8.4.1: + version "8.15.0" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.15.0.tgz#a360898bc415edaac46c8241f6383975b930b816" + integrity sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg== + acorn@^8.9.0: version "8.11.3" resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.3.tgz#71e0b14e13a4ec160724b38fb7b0f233b1b81d7a" @@ -1186,6 +1233,11 @@ anymatch@~3.1.2: normalize-path "^3.0.0" picomatch "^2.0.4" +arg@^4.1.0: + version "4.1.3" + resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089" + integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA== + arg@^5.0.2: version "5.0.2" resolved "https://registry.yarnpkg.com/arg/-/arg-5.0.2.tgz#c81433cc427c92c4dcf4865142dbca6f15acd59c" @@ -1744,6 +1796,11 @@ core-util-is@~1.0.0: resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== +create-require@^1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333" + integrity sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ== + cross-fetch@^3.1.5: version "3.2.0" resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.2.0.tgz#34e9192f53bc757d6614304d9e5e6fb4edb782e3" @@ -1888,6 +1945,11 @@ didyoumean@^1.2.2: resolved "https://registry.yarnpkg.com/didyoumean/-/didyoumean-1.2.2.tgz#989346ffe9e839b4555ecf5666edea0d3e8ad037" integrity sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw== +diff@^4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d" + integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A== + dingbat-to-unicode@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/dingbat-to-unicode/-/dingbat-to-unicode-1.0.1.tgz#5091dd673241453e6b5865e26e5a4452cdef5c83" @@ -3515,6 +3577,11 @@ lucide-react@^0.363.0: resolved "https://registry.yarnpkg.com/lucide-react/-/lucide-react-0.363.0.tgz#2bb1f9d09b830dda86f5118fcd097f87247fe0e3" integrity sha512-AlsfPCsXQyQx7wwsIgzcKOL9LwC498LIMAo+c0Es5PkHJa33xwmYAkkSoKoJWWWSYQEStqu58/jT4tL2gi32uQ== +make-error@^1.1.1: + version "1.3.6" + resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2" + integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw== + mammoth@^1.9.1: version "1.9.1" resolved "https://registry.yarnpkg.com/mammoth/-/mammoth-1.9.1.tgz#b544c26747a412b5b00a11aa80477c6796860eaf" @@ -4976,6 +5043,25 @@ ts-interface-checker@^0.1.9: resolved "https://registry.yarnpkg.com/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz#784fd3d679722bc103b1b4b8030bcddb5db2a699" integrity sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA== +ts-node@^10.9.2: + version "10.9.2" + resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.9.2.tgz#70f021c9e185bccdca820e26dc413805c101c71f" + integrity sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ== + dependencies: + "@cspotcode/source-map-support" "^0.8.0" + "@tsconfig/node10" "^1.0.7" + "@tsconfig/node12" "^1.0.7" + "@tsconfig/node14" "^1.0.0" + "@tsconfig/node16" "^1.0.2" + acorn "^8.4.1" + acorn-walk "^8.1.1" + arg "^4.1.0" + create-require "^1.1.0" + diff "^4.0.1" + make-error "^1.1.1" + v8-compile-cache-lib "^3.0.1" + yn "3.1.1" + tsconfig-paths@^3.15.0: version "3.15.0" resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz#5299ec605e55b1abb23ec939ef15edaf483070d4" @@ -5143,6 +5229,11 @@ uuid@^9.0.1: resolved "https://registry.yarnpkg.com/uuid/-/uuid-9.0.1.tgz#e188d4c8853cc722220392c424cd637f32293f30" integrity sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA== +v8-compile-cache-lib@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz#6336e8d71965cb3d35a1bbb7868445a7c05264bf" + integrity sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg== + validate.io-array@^1.0.3, validate.io-array@^1.0.5: version "1.0.6" resolved "https://registry.yarnpkg.com/validate.io-array/-/validate.io-array-1.0.6.tgz#5b5a2cafd8f8b85abb2f886ba153f2d93a27774d" @@ -5360,6 +5451,11 @@ yet-another-react-lightbox@^3.17.2: resolved "https://registry.yarnpkg.com/yet-another-react-lightbox/-/yet-another-react-lightbox-3.17.2.tgz#00474b83189ec4d81302792211ca31ffb808554c" integrity sha512-fM+Br5nR2kt/oBAOHDqVdUmogiHRLCt4iuIJHPS9Q+ME+h+ciME6vEpLt3IPgGU8whib1agEyZBgWJOAKjgadQ== +yn@3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" + integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q== + yocto-queue@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" From 64c4514cad782e5b051e7ff8fa4ad3e6c28b1bb3 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Tue, 14 Oct 2025 13:03:17 +0530 Subject: [PATCH 08/53] feat(models): add `id` & `name` fields, move `getUIConfigSection` to seperate file --- src/lib/models/providers/baseProvider.ts | 27 +++++++++- src/lib/models/providers/index.ts | 21 ++++++++ src/lib/models/providers/openai.ts | 28 +++++----- src/lib/models/registry.ts | 68 +++++++++++++++--------- 4 files changed, 107 insertions(+), 37 deletions(-) create mode 100644 src/lib/models/providers/index.ts diff --git a/src/lib/models/providers/baseProvider.ts b/src/lib/models/providers/baseProvider.ts index 1d7ef47..980a2b2 100644 --- a/src/lib/models/providers/baseProvider.ts +++ b/src/lib/models/providers/baseProvider.ts @@ -4,7 +4,11 @@ import { Model, ModelList, ProviderMetadata } from '../types'; import { UIConfigField } from '@/lib/config/types'; abstract class BaseModelProvider { - constructor(protected config: CONFIG) {} + constructor( + protected id: string, + protected name: string, + protected config: CONFIG, + ) {} abstract getDefaultModels(): Promise; abstract getModelList(): Promise; abstract loadChatModel(modelName: string): Promise; @@ -15,6 +19,27 @@ abstract class BaseModelProvider { static getProviderMetadata(): ProviderMetadata { throw new Error('Method not Implemented.'); } + static parseAndValidate(raw: any): any { + /* Static methods can't access class type parameters */ + throw new Error('Method not Implemented.'); + } } +export type ProviderConstructor = { + new (id: string, name: string, config: CONFIG): BaseModelProvider; + parseAndValidate(raw: any): CONFIG; + getProviderConfigFields: () => UIConfigField[]; + getProviderMetadata: () => ProviderMetadata; +}; + +export const createProviderInstance =

>( + Provider: P, + id: string, + name: string, + rawConfig: unknown, +): InstanceType

=> { + const cfg = Provider.parseAndValidate(rawConfig); + return new Provider(id, name, cfg) as InstanceType

; +}; + export default BaseModelProvider; diff --git a/src/lib/models/providers/index.ts b/src/lib/models/providers/index.ts new file mode 100644 index 0000000..b893400 --- /dev/null +++ b/src/lib/models/providers/index.ts @@ -0,0 +1,21 @@ +import { ModelProviderUISection } from '@/lib/config/types'; +import { ProviderConstructor } from './baseProvider'; +import OpenAIProvider from './openai'; + +export const providers: Record> = { + openai: OpenAIProvider, +}; + +export const getModelProvidersUIConfigSection = + (): ModelProviderUISection[] => { + return Object.entries(providers).map(([k, p]) => { + const configFields = p.getProviderConfigFields(); + const metadata = p.getProviderMetadata(); + + return { + fields: configFields, + key: k, + name: metadata.name, + }; + }); + }; diff --git a/src/lib/models/providers/openai.ts b/src/lib/models/providers/openai.ts index 5569b97..fee3b52 100644 --- a/src/lib/models/providers/openai.ts +++ b/src/lib/models/providers/openai.ts @@ -6,7 +6,6 @@ import { Embeddings } from '@langchain/core/embeddings'; import { UIConfigField } from '@/lib/config/types'; interface OpenAIConfig { - name: string; apiKey: string; baseURL: string; } @@ -90,15 +89,6 @@ const defaultEmbeddingModels: Model[] = [ ]; const providerConfigFields: UIConfigField[] = [ - /* { - type: 'string', - name: 'Name (Optional)', - key: 'name', - description: 'An optional name for this provider configuration', - required: false, - placeholder: 'Provider Name', - scope: 'server', - }, */ /* FOR NAME DIRECTLY CREATE INPUT IN FRONTEND */ { type: 'password', name: 'API Key', @@ -123,8 +113,8 @@ const providerConfigFields: UIConfigField[] = [ ]; class OpenAIProvider extends BaseModelProvider { - constructor(config: OpenAIConfig) { - super(config); + constructor(id: string, name: string, config: OpenAIConfig) { + super(id, name, config); } async getDefaultModels(): Promise { @@ -192,6 +182,20 @@ class OpenAIProvider extends BaseModelProvider { }); } + static parseAndValidate(raw: any): OpenAIConfig { + if (!raw || typeof raw !== 'object') + throw new Error('Invalid config provided. Expected object'); + if (!raw.apiKey || !raw.baseURL) + throw new Error( + 'Invalid config provided. API key and base URL must be provided', + ); + + return { + apiKey: String(raw.apiKey), + baseURL: String(raw.baseURL), + }; + } + static getProviderConfigFields(): UIConfigField[] { return providerConfigFields; } diff --git a/src/lib/models/registry.ts b/src/lib/models/registry.ts index 5538e9e..2d4de6f 100644 --- a/src/lib/models/registry.ts +++ b/src/lib/models/registry.ts @@ -1,33 +1,53 @@ -import { ModelProviderUISection, UIConfigField } from '../config/types'; -import { ProviderMetadata } from './types'; -import BaseModelProvider from './providers/baseProvider'; -import OpenAIProvider from './providers/openai'; - -interface ProviderClass { - new (config: T): BaseModelProvider; - getProviderConfigFields(): UIConfigField[]; - getProviderMetadata(): ProviderMetadata; -} - -const providers: Record> = { - openai: OpenAIProvider, -}; +import { ConfigModelProvider } from '../config/types'; +import BaseModelProvider, { + createProviderInstance, +} from './providers/baseProvider'; +import { getConfiguredModelProviders } from '../config/serverRegistry'; +import { providers } from './providers'; +import { ModelList } from './types'; class ModelRegistry { - constructor() {} + activeProviders: (ConfigModelProvider & { + provider: BaseModelProvider; + })[] = []; - getUIConfigSection(): ModelProviderUISection[] { - return Object.entries(providers).map(([k, p]) => { - const configFields = p.getProviderConfigFields(); - const metadata = p.getProviderMetadata(); + constructor() { + this.initializeActiveProviders(); + } - return { - fields: configFields, - key: k, - name: metadata.name, - }; + private initializeActiveProviders() { + const configuredProviders = getConfiguredModelProviders(); + + configuredProviders.forEach((p) => { + try { + const provider = providers[p.type]; + if (!provider) throw new Error('Invalid provider type'); + + this.activeProviders.push({ + ...p, + provider: createProviderInstance(provider, p.id, p.name, p.config), + }); + } catch (err) { + console.error( + `Failed to initialize provider. Type: ${p.type}, ID: ${p.id}, Config: ${JSON.stringify(p.config)}, Error: ${err}`, + ); + } }); } + + async getActiveModels() { + const models: ModelList[] = []; + + await Promise.all( + this.activeProviders.map(async (p) => { + const m = await p.provider.getModelList(); + + models.push(m); + }), + ); + + return models; + } } export default ModelRegistry; From 77743949c75e9812f096777b94c1121adcf06514 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Tue, 14 Oct 2025 13:04:06 +0530 Subject: [PATCH 09/53] feat(configManager): fix circular import issues --- src/lib/config/index.ts | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/lib/config/index.ts b/src/lib/config/index.ts index ccb9c1e..5bed79b 100644 --- a/src/lib/config/index.ts +++ b/src/lib/config/index.ts @@ -1,8 +1,8 @@ import path from 'node:path'; import fs from 'fs'; -import { Config, ConfigModelProvider, EnvMap, UIConfigSections } from './types'; -import ModelRegistry from '../models/registry'; +import { Config, ConfigModelProvider, UIConfigSections } from './types'; import { hashObj } from '../serverUtils'; +import { getModelProvidersUIConfigSection } from '../models/providers'; class ConfigManager { configPath: string = path.join( @@ -19,7 +19,6 @@ class ConfigManager { general: [], modelProviders: [], }; - modelRegistry = new ModelRegistry(); constructor() { this.initialize(); @@ -78,7 +77,7 @@ class ConfigManager { } private initializeFromEnv() { - const providerConfigSections = this.modelRegistry.getUIConfigSection(); + const providerConfigSections = getModelProvidersUIConfigSection(); this.uiConfigSections.modelProviders = providerConfigSections; From 87226957f1399c5ce9d0322595aec1c44c20244b Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Tue, 14 Oct 2025 13:04:26 +0530 Subject: [PATCH 10/53] feat(serverRegistry): add server-side config opts --- src/lib/config/serverRegistry.ts | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/src/lib/config/serverRegistry.ts b/src/lib/config/serverRegistry.ts index a8df739..c21ce24 100644 --- a/src/lib/config/serverRegistry.ts +++ b/src/lib/config/serverRegistry.ts @@ -1 +1,12 @@ -/* TODO: add server opts */ +import configManager from './index'; +import { ConfigModelProvider } from './types'; + +export const getConfiguredModelProviders = (): ConfigModelProvider[] => { + return configManager.getConfig('modelProviders', []); +}; + +export const getConfiguredModelProviderById = ( + id: string, +): ConfigModelProvider | undefined => { + return getConfiguredModelProviders().find((p) => p.id === id) ?? undefined; +}; From a375de73cca02a201f00bf0d585ba7e87908e8ce Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Tue, 14 Oct 2025 13:05:19 +0530 Subject: [PATCH 11/53] feat(openAiProvider): load models from config as well --- src/lib/config/types.ts | 6 ++++-- src/lib/models/providers/openai.ts | 10 +++++++--- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/src/lib/config/types.ts b/src/lib/config/types.ts index 9cd89b9..ba5113c 100644 --- a/src/lib/config/types.ts +++ b/src/lib/config/types.ts @@ -1,3 +1,5 @@ +import { Model } from '../models/types'; + type BaseUIConfigField = { name: string; key: string; @@ -40,8 +42,8 @@ type ConfigModelProvider = { id: string; name: string; type: string; - chatModels: string[]; - embeddingModels: string[]; + chatModels: Model[]; + embeddingModels: Model[]; config: { [key: string]: any }; hash: string; }; diff --git a/src/lib/models/providers/openai.ts b/src/lib/models/providers/openai.ts index fee3b52..7e56b5a 100644 --- a/src/lib/models/providers/openai.ts +++ b/src/lib/models/providers/openai.ts @@ -4,6 +4,7 @@ import BaseModelProvider from './baseProvider'; import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai'; import { Embeddings } from '@langchain/core/embeddings'; import { UIConfigField } from '@/lib/config/types'; +import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry'; interface OpenAIConfig { apiKey: string; @@ -132,12 +133,15 @@ class OpenAIProvider extends BaseModelProvider { } async getModelList(): Promise { - /* Todo: IMPLEMENT MODEL READING FROM CONFIG FILE */ const defaultModels = await this.getDefaultModels(); + const configProvider = getConfiguredModelProviderById(this.id)!; return { - embedding: [...defaultModels.embedding], - chat: [...defaultModels.chat], + embedding: [ + ...defaultModels.embedding, + ...configProvider.embeddingModels, + ], + chat: [...defaultModels.chat, ...configProvider.chatModels], }; } From c02e535f4c381f773874df2bb983041816f67e67 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Tue, 14 Oct 2025 13:05:56 +0530 Subject: [PATCH 12/53] feat(openAiProvider): use `find` instead of `filter` --- src/lib/models/providers/openai.ts | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/lib/models/providers/openai.ts b/src/lib/models/providers/openai.ts index 7e56b5a..6055b34 100644 --- a/src/lib/models/providers/openai.ts +++ b/src/lib/models/providers/openai.ts @@ -148,7 +148,7 @@ class OpenAIProvider extends BaseModelProvider { async loadChatModel(key: string): Promise { const modelList = await this.getModelList(); - const exists = modelList.chat.filter((m) => m.key === key); + const exists = modelList.chat.find((m) => m.key === key); if (!exists) { throw new Error( @@ -168,8 +168,7 @@ class OpenAIProvider extends BaseModelProvider { async loadEmbeddingModel(key: string): Promise { const modelList = await this.getModelList(); - - const exists = modelList.chat.filter((m) => m.key === key); + const exists = modelList.embedding.find((m) => m.key === key); if (!exists) { throw new Error( From 6d615283472f6b3ae42a522a89209ea030f307c8 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Tue, 14 Oct 2025 13:33:14 +0530 Subject: [PATCH 13/53] feat(configManager): add update & removal methods --- src/lib/config/index.ts | 48 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 48 insertions(+) diff --git a/src/lib/config/index.ts b/src/lib/config/index.ts index 5bed79b..5314df3 100644 --- a/src/lib/config/index.ts +++ b/src/lib/config/index.ts @@ -145,6 +145,54 @@ class ConfigManager { return obj === undefined ? defaultValue : obj; } + + public updateConfig(key: string, val: any) { + const parts = key.split('.'); + if (parts.length === 0) return; + + let target: any = this.currentConfig; + for (let i = 0; i < parts.length - 1; i++) { + const part = parts[i]; + if (target[part] === null || typeof target[part] !== 'object') { + target[part] = {}; + } + + target = target[part]; + } + + const finalKey = parts[parts.length - 1]; + target[finalKey] = val; + + this.saveConfig(); + } + + public addModelProvider(type: string, name: string, config: any) { + const newModelProvider: ConfigModelProvider = { + id: crypto.randomUUID(), + name, + type, + config, + chatModels: [], + embeddingModels: [], + hash: hashObj(config), + }; + + this.currentConfig.modelProviders.push(newModelProvider); + this.saveConfig(); + } + + public removeModelProvider(id: string) { + const index = this.currentConfig.modelProviders.findIndex( + (p) => p.id === id, + ); + + if (index === -1) return; + + this.currentConfig.modelProviders = + this.currentConfig.modelProviders.filter((p) => p.id !== id); + + this.saveConfig(); + } } const configManager = new ConfigManager(); From 4ee317336810d26905cecddc6e5e3a86a1e22abd Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Wed, 15 Oct 2025 09:04:48 +0530 Subject: [PATCH 14/53] feat(config): add setupComplete --- src/lib/config/index.ts | 13 +++++++++++++ src/lib/config/types.ts | 1 + 2 files changed, 14 insertions(+) diff --git a/src/lib/config/index.ts b/src/lib/config/index.ts index 5314df3..1fbea38 100644 --- a/src/lib/config/index.ts +++ b/src/lib/config/index.ts @@ -12,6 +12,7 @@ class ConfigManager { configVersion = 1; currentConfig: Config = { version: this.configVersion, + setupComplete: false, general: {}, modelProviders: [], }; @@ -193,6 +194,18 @@ class ConfigManager { this.saveConfig(); } + + public isSetupComplete() { + return this.currentConfig.setupComplete + } + + public markSetupComplete() { + if (!this.currentConfig.setupComplete) { + this.currentConfig.setupComplete = true + } + + this.saveConfig() + } } const configManager = new ConfigManager(); diff --git a/src/lib/config/types.ts b/src/lib/config/types.ts index ba5113c..34fdcb9 100644 --- a/src/lib/config/types.ts +++ b/src/lib/config/types.ts @@ -50,6 +50,7 @@ type ConfigModelProvider = { type Config = { version: number; + setupComplete: boolean; general: { [key: string]: any; }; From cc5eea17e4779c702cf23072e29e20bd61b257f3 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Wed, 15 Oct 2025 12:53:05 +0530 Subject: [PATCH 15/53] feat(app): remove old providers & registry --- src/lib/config.ts | 158 --------------------------- src/lib/providers/aimlapi.ts | 94 ----------------- src/lib/providers/anthropic.ts | 78 -------------- src/lib/providers/deepseek.ts | 49 --------- src/lib/providers/gemini.ts | 114 -------------------- src/lib/providers/groq.ts | 44 -------- src/lib/providers/index.ts | 170 ------------------------------ src/lib/providers/lemonade.ts | 94 ----------------- src/lib/providers/lmstudio.ts | 100 ------------------ src/lib/providers/ollama.ts | 86 --------------- src/lib/providers/openai.ts | 159 ---------------------------- src/lib/providers/transformers.ts | 36 ------- 12 files changed, 1182 deletions(-) delete mode 100644 src/lib/config.ts delete mode 100644 src/lib/providers/aimlapi.ts delete mode 100644 src/lib/providers/anthropic.ts delete mode 100644 src/lib/providers/deepseek.ts delete mode 100644 src/lib/providers/gemini.ts delete mode 100644 src/lib/providers/groq.ts delete mode 100644 src/lib/providers/index.ts delete mode 100644 src/lib/providers/lemonade.ts delete mode 100644 src/lib/providers/lmstudio.ts delete mode 100644 src/lib/providers/ollama.ts delete mode 100644 src/lib/providers/openai.ts delete mode 100644 src/lib/providers/transformers.ts diff --git a/src/lib/config.ts b/src/lib/config.ts deleted file mode 100644 index b79ec94..0000000 --- a/src/lib/config.ts +++ /dev/null @@ -1,158 +0,0 @@ -import toml from '@iarna/toml'; - -// Use dynamic imports for Node.js modules to prevent client-side errors -let fs: any; -let path: any; -if (typeof window === 'undefined') { - // We're on the server - fs = require('fs'); - path = require('path'); -} - -const configFileName = 'config.toml'; - -interface Config { - GENERAL: { - SIMILARITY_MEASURE: string; - KEEP_ALIVE: string; - }; - MODELS: { - OPENAI: { - API_KEY: string; - }; - GROQ: { - API_KEY: string; - }; - ANTHROPIC: { - API_KEY: string; - }; - GEMINI: { - API_KEY: string; - }; - OLLAMA: { - API_URL: string; - API_KEY: string; - }; - DEEPSEEK: { - API_KEY: string; - }; - AIMLAPI: { - API_KEY: string; - }; - LM_STUDIO: { - API_URL: string; - }; - LEMONADE: { - API_URL: string; - API_KEY: string; - }; - CUSTOM_OPENAI: { - API_URL: string; - API_KEY: string; - MODEL_NAME: string; - }; - }; - API_ENDPOINTS: { - SEARXNG: string; - }; -} - -type RecursivePartial = { - [P in keyof T]?: RecursivePartial; -}; - -const loadConfig = () => { - // Server-side only - if (typeof window === 'undefined') { - return toml.parse( - fs.readFileSync(path.join(process.cwd(), `${configFileName}`), 'utf-8'), - ) as any as Config; - } - - // Client-side fallback - settings will be loaded via API - return {} as Config; -}; - -export const getSimilarityMeasure = () => - loadConfig().GENERAL.SIMILARITY_MEASURE; - -export const getKeepAlive = () => loadConfig().GENERAL.KEEP_ALIVE; - -export const getOpenaiApiKey = () => loadConfig().MODELS.OPENAI.API_KEY; - -export const getGroqApiKey = () => loadConfig().MODELS.GROQ.API_KEY; - -export const getAnthropicApiKey = () => loadConfig().MODELS.ANTHROPIC.API_KEY; - -export const getGeminiApiKey = () => loadConfig().MODELS.GEMINI.API_KEY; - -export const getSearxngApiEndpoint = () => - process.env.SEARXNG_API_URL || loadConfig().API_ENDPOINTS.SEARXNG; - -export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL; - -export const getOllamaApiKey = () => loadConfig().MODELS.OLLAMA.API_KEY; - -export const getDeepseekApiKey = () => loadConfig().MODELS.DEEPSEEK.API_KEY; - -export const getAimlApiKey = () => loadConfig().MODELS.AIMLAPI.API_KEY; - -export const getCustomOpenaiApiKey = () => - loadConfig().MODELS.CUSTOM_OPENAI.API_KEY; - -export const getCustomOpenaiApiUrl = () => - loadConfig().MODELS.CUSTOM_OPENAI.API_URL; - -export const getCustomOpenaiModelName = () => - loadConfig().MODELS.CUSTOM_OPENAI.MODEL_NAME; - -export const getLMStudioApiEndpoint = () => - loadConfig().MODELS.LM_STUDIO.API_URL; - -export const getLemonadeApiEndpoint = () => - loadConfig().MODELS.LEMONADE.API_URL; - -export const getLemonadeApiKey = () => loadConfig().MODELS.LEMONADE.API_KEY; - -const mergeConfigs = (current: any, update: any): any => { - if (update === null || update === undefined) { - return current; - } - - if (typeof current !== 'object' || current === null) { - return update; - } - - const result = { ...current }; - - for (const key in update) { - if (Object.prototype.hasOwnProperty.call(update, key)) { - const updateValue = update[key]; - - if ( - typeof updateValue === 'object' && - updateValue !== null && - typeof result[key] === 'object' && - result[key] !== null - ) { - result[key] = mergeConfigs(result[key], updateValue); - } else if (updateValue !== undefined) { - result[key] = updateValue; - } - } - } - - return result; -}; - -export const updateConfig = (config: RecursivePartial) => { - // Server-side only - if (typeof window === 'undefined') { - const currentConfig = loadConfig(); - const mergedConfig = mergeConfigs(currentConfig, config); - fs.writeFileSync( - path.join(path.join(process.cwd(), `${configFileName}`)), - toml.stringify(mergedConfig), - ); - } -}; diff --git a/src/lib/providers/aimlapi.ts b/src/lib/providers/aimlapi.ts deleted file mode 100644 index 9c982fe..0000000 --- a/src/lib/providers/aimlapi.ts +++ /dev/null @@ -1,94 +0,0 @@ -import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai'; -import { getAimlApiKey } from '../config'; -import { ChatModel, EmbeddingModel } from '.'; -import { BaseChatModel } from '@langchain/core/language_models/chat_models'; -import { Embeddings } from '@langchain/core/embeddings'; -import axios from 'axios'; - -export const PROVIDER_INFO = { - key: 'aimlapi', - displayName: 'AI/ML API', -}; - -interface AimlApiModel { - id: string; - name?: string; - type?: string; -} - -const API_URL = 'https://api.aimlapi.com'; - -export const loadAimlApiChatModels = async () => { - const apiKey = getAimlApiKey(); - - if (!apiKey) return {}; - - try { - const response = await axios.get(`${API_URL}/models`, { - headers: { - 'Content-Type': 'application/json', - Authorization: `Bearer ${apiKey}`, - }, - }); - - const chatModels: Record = {}; - - response.data.data.forEach((model: AimlApiModel) => { - if (model.type === 'chat-completion') { - chatModels[model.id] = { - displayName: model.name || model.id, - model: new ChatOpenAI({ - apiKey: apiKey, - modelName: model.id, - temperature: 0.7, - configuration: { - baseURL: API_URL, - }, - }) as unknown as BaseChatModel, - }; - } - }); - - return chatModels; - } catch (err) { - console.error(`Error loading AI/ML API models: ${err}`); - return {}; - } -}; - -export const loadAimlApiEmbeddingModels = async () => { - const apiKey = getAimlApiKey(); - - if (!apiKey) return {}; - - try { - const response = await axios.get(`${API_URL}/models`, { - headers: { - 'Content-Type': 'application/json', - Authorization: `Bearer ${apiKey}`, - }, - }); - - const embeddingModels: Record = {}; - - response.data.data.forEach((model: AimlApiModel) => { - if (model.type === 'embedding') { - embeddingModels[model.id] = { - displayName: model.name || model.id, - model: new OpenAIEmbeddings({ - apiKey: apiKey, - modelName: model.id, - configuration: { - baseURL: API_URL, - }, - }) as unknown as Embeddings, - }; - } - }); - - return embeddingModels; - } catch (err) { - console.error(`Error loading AI/ML API embeddings models: ${err}`); - return {}; - } -}; diff --git a/src/lib/providers/anthropic.ts b/src/lib/providers/anthropic.ts deleted file mode 100644 index 6af2115..0000000 --- a/src/lib/providers/anthropic.ts +++ /dev/null @@ -1,78 +0,0 @@ -import { ChatAnthropic } from '@langchain/anthropic'; -import { ChatModel } from '.'; -import { getAnthropicApiKey } from '../config'; - -export const PROVIDER_INFO = { - key: 'anthropic', - displayName: 'Anthropic', -}; -import { BaseChatModel } from '@langchain/core/language_models/chat_models'; - -const anthropicChatModels: Record[] = [ - { - displayName: 'Claude 4.1 Opus', - key: 'claude-opus-4-1-20250805', - }, - { - displayName: 'Claude 4 Opus', - key: 'claude-opus-4-20250514', - }, - { - displayName: 'Claude 4 Sonnet', - key: 'claude-sonnet-4-20250514', - }, - { - displayName: 'Claude 3.7 Sonnet', - key: 'claude-3-7-sonnet-20250219', - }, - { - displayName: 'Claude 3.5 Haiku', - key: 'claude-3-5-haiku-20241022', - }, - { - displayName: 'Claude 3.5 Sonnet v2', - key: 'claude-3-5-sonnet-20241022', - }, - { - displayName: 'Claude 3.5 Sonnet', - key: 'claude-3-5-sonnet-20240620', - }, - { - displayName: 'Claude 3 Opus', - key: 'claude-3-opus-20240229', - }, - { - displayName: 'Claude 3 Sonnet', - key: 'claude-3-sonnet-20240229', - }, - { - displayName: 'Claude 3 Haiku', - key: 'claude-3-haiku-20240307', - }, -]; - -export const loadAnthropicChatModels = async () => { - const anthropicApiKey = getAnthropicApiKey(); - - if (!anthropicApiKey) return {}; - - try { - const chatModels: Record = {}; - - anthropicChatModels.forEach((model) => { - chatModels[model.key] = { - displayName: model.displayName, - model: new ChatAnthropic({ - apiKey: anthropicApiKey, - modelName: model.key, - temperature: 0.7, - }) as unknown as BaseChatModel, - }; - }); - - return chatModels; - } catch (err) { - console.error(`Error loading Anthropic models: ${err}`); - return {}; - } -}; diff --git a/src/lib/providers/deepseek.ts b/src/lib/providers/deepseek.ts deleted file mode 100644 index 9c9ef5a..0000000 --- a/src/lib/providers/deepseek.ts +++ /dev/null @@ -1,49 +0,0 @@ -import { ChatOpenAI } from '@langchain/openai'; -import { getDeepseekApiKey } from '../config'; -import { ChatModel } from '.'; -import { BaseChatModel } from '@langchain/core/language_models/chat_models'; - -export const PROVIDER_INFO = { - key: 'deepseek', - displayName: 'Deepseek AI', -}; - -const deepseekChatModels: Record[] = [ - { - displayName: 'Deepseek Chat (Deepseek V3)', - key: 'deepseek-chat', - }, - { - displayName: 'Deepseek Reasoner (Deepseek R1)', - key: 'deepseek-reasoner', - }, -]; - -export const loadDeepseekChatModels = async () => { - const deepseekApiKey = getDeepseekApiKey(); - - if (!deepseekApiKey) return {}; - - try { - const chatModels: Record = {}; - - deepseekChatModels.forEach((model) => { - chatModels[model.key] = { - displayName: model.displayName, - model: new ChatOpenAI({ - apiKey: deepseekApiKey, - modelName: model.key, - temperature: 0.7, - configuration: { - baseURL: 'https://api.deepseek.com', - }, - }) as unknown as BaseChatModel, - }; - }); - - return chatModels; - } catch (err) { - console.error(`Error loading Deepseek models: ${err}`); - return {}; - } -}; diff --git a/src/lib/providers/gemini.ts b/src/lib/providers/gemini.ts deleted file mode 100644 index 418e0a4..0000000 --- a/src/lib/providers/gemini.ts +++ /dev/null @@ -1,114 +0,0 @@ -import { - ChatGoogleGenerativeAI, - GoogleGenerativeAIEmbeddings, -} from '@langchain/google-genai'; -import { getGeminiApiKey } from '../config'; -import { ChatModel, EmbeddingModel } from '.'; - -export const PROVIDER_INFO = { - key: 'gemini', - displayName: 'Google Gemini', -}; -import { BaseChatModel } from '@langchain/core/language_models/chat_models'; -import { Embeddings } from '@langchain/core/embeddings'; - -const geminiChatModels: Record[] = [ - { - displayName: 'Gemini 2.5 Flash', - key: 'gemini-2.5-flash', - }, - { - displayName: 'Gemini 2.5 Flash-Lite', - key: 'gemini-2.5-flash-lite', - }, - { - displayName: 'Gemini 2.5 Pro', - key: 'gemini-2.5-pro', - }, - { - displayName: 'Gemini 2.0 Flash', - key: 'gemini-2.0-flash', - }, - { - displayName: 'Gemini 2.0 Flash-Lite', - key: 'gemini-2.0-flash-lite', - }, - { - displayName: 'Gemini 2.0 Flash Thinking Experimental', - key: 'gemini-2.0-flash-thinking-exp-01-21', - }, - { - displayName: 'Gemini 1.5 Flash', - key: 'gemini-1.5-flash', - }, - { - displayName: 'Gemini 1.5 Flash-8B', - key: 'gemini-1.5-flash-8b', - }, - { - displayName: 'Gemini 1.5 Pro', - key: 'gemini-1.5-pro', - }, -]; - -const geminiEmbeddingModels: Record[] = [ - { - displayName: 'Text Embedding 004', - key: 'models/text-embedding-004', - }, - { - displayName: 'Embedding 001', - key: 'models/embedding-001', - }, -]; - -export const loadGeminiChatModels = async () => { - const geminiApiKey = getGeminiApiKey(); - - if (!geminiApiKey) return {}; - - try { - const chatModels: Record = {}; - - geminiChatModels.forEach((model) => { - chatModels[model.key] = { - displayName: model.displayName, - model: new ChatGoogleGenerativeAI({ - apiKey: geminiApiKey, - model: model.key, - temperature: 0.7, - }) as unknown as BaseChatModel, - }; - }); - - return chatModels; - } catch (err) { - console.error(`Error loading Gemini models: ${err}`); - return {}; - } -}; - -export const loadGeminiEmbeddingModels = async () => { - const geminiApiKey = getGeminiApiKey(); - - if (!geminiApiKey) return {}; - - try { - const embeddingModels: Record = {}; - - geminiEmbeddingModels.forEach((model) => { - embeddingModels[model.key] = { - displayName: model.displayName, - model: new GoogleGenerativeAIEmbeddings({ - apiKey: geminiApiKey, - modelName: model.key, - }) as unknown as Embeddings, - }; - }); - - return embeddingModels; - } catch (err) { - console.error(`Error loading Gemini embeddings models: ${err}`); - return {}; - } -}; diff --git a/src/lib/providers/groq.ts b/src/lib/providers/groq.ts deleted file mode 100644 index 4e7db51..0000000 --- a/src/lib/providers/groq.ts +++ /dev/null @@ -1,44 +0,0 @@ -import { ChatGroq } from '@langchain/groq'; -import { getGroqApiKey } from '../config'; -import { ChatModel } from '.'; - -export const PROVIDER_INFO = { - key: 'groq', - displayName: 'Groq', -}; - -import { BaseChatModel } from '@langchain/core/language_models/chat_models'; - -export const loadGroqChatModels = async () => { - const groqApiKey = getGroqApiKey(); - if (!groqApiKey) return {}; - - try { - const res = await fetch('https://api.groq.com/openai/v1/models', { - method: 'GET', - headers: { - Authorization: `bearer ${groqApiKey}`, - 'Content-Type': 'application/json', - }, - }); - - const groqChatModels = (await res.json()).data; - const chatModels: Record = {}; - - groqChatModels.forEach((model: any) => { - chatModels[model.id] = { - displayName: model.id, - model: new ChatGroq({ - apiKey: groqApiKey, - model: model.id, - temperature: 0.7, - }) as unknown as BaseChatModel, - }; - }); - - return chatModels; - } catch (err) { - console.error(`Error loading Groq models: ${err}`); - return {}; - } -}; diff --git a/src/lib/providers/index.ts b/src/lib/providers/index.ts deleted file mode 100644 index d4e4248..0000000 --- a/src/lib/providers/index.ts +++ /dev/null @@ -1,170 +0,0 @@ -import { Embeddings } from '@langchain/core/embeddings'; -import { BaseChatModel } from '@langchain/core/language_models/chat_models'; -import { - loadOpenAIChatModels, - loadOpenAIEmbeddingModels, - PROVIDER_INFO as OpenAIInfo, - PROVIDER_INFO, -} from './openai'; -import { - getCustomOpenaiApiKey, - getCustomOpenaiApiUrl, - getCustomOpenaiModelName, -} from '../config'; -import { ChatOpenAI } from '@langchain/openai'; -import { - loadOllamaChatModels, - loadOllamaEmbeddingModels, - PROVIDER_INFO as OllamaInfo, -} from './ollama'; -import { loadGroqChatModels, PROVIDER_INFO as GroqInfo } from './groq'; -import { - loadAnthropicChatModels, - PROVIDER_INFO as AnthropicInfo, -} from './anthropic'; -import { - loadGeminiChatModels, - loadGeminiEmbeddingModels, - PROVIDER_INFO as GeminiInfo, -} from './gemini'; -import { - loadTransformersEmbeddingsModels, - PROVIDER_INFO as TransformersInfo, -} from './transformers'; -import { - loadDeepseekChatModels, - PROVIDER_INFO as DeepseekInfo, -} from './deepseek'; -import { - loadAimlApiChatModels, - loadAimlApiEmbeddingModels, - PROVIDER_INFO as AimlApiInfo, -} from './aimlapi'; -import { - loadLMStudioChatModels, - loadLMStudioEmbeddingsModels, - PROVIDER_INFO as LMStudioInfo, -} from './lmstudio'; -import { - loadLemonadeChatModels, - loadLemonadeEmbeddingModels, - PROVIDER_INFO as LemonadeInfo, -} from './lemonade'; - -export const PROVIDER_METADATA = { - openai: OpenAIInfo, - ollama: OllamaInfo, - groq: GroqInfo, - anthropic: AnthropicInfo, - gemini: GeminiInfo, - transformers: TransformersInfo, - deepseek: DeepseekInfo, - aimlapi: AimlApiInfo, - lmstudio: LMStudioInfo, - lemonade: LemonadeInfo, - custom_openai: { - key: 'custom_openai', - displayName: 'Custom OpenAI', - }, -}; - -export interface ChatModel { - displayName: string; - model: BaseChatModel; -} - -export interface EmbeddingModel { - displayName: string; - model: Embeddings; -} - -export const chatModelProviders: Record< - string, - () => Promise> -> = { - openai: loadOpenAIChatModels, - ollama: loadOllamaChatModels, - groq: loadGroqChatModels, - anthropic: loadAnthropicChatModels, - gemini: loadGeminiChatModels, - deepseek: loadDeepseekChatModels, - aimlapi: loadAimlApiChatModels, - lmstudio: loadLMStudioChatModels, - lemonade: loadLemonadeChatModels, -}; - -export const embeddingModelProviders: Record< - string, - () => Promise> -> = { - openai: loadOpenAIEmbeddingModels, - ollama: loadOllamaEmbeddingModels, - gemini: loadGeminiEmbeddingModels, - transformers: loadTransformersEmbeddingsModels, - aimlapi: loadAimlApiEmbeddingModels, - lmstudio: loadLMStudioEmbeddingsModels, - lemonade: loadLemonadeEmbeddingModels, -}; - -export const getAvailableChatModelProviders = async () => { - const models: Record> = {}; - - for (const provider in chatModelProviders) { - const providerModels = await chatModelProviders[provider](); - if (Object.keys(providerModels).length > 0) { - models[provider] = providerModels; - } - } - - const customOpenAiApiKey = getCustomOpenaiApiKey(); - const customOpenAiApiUrl = getCustomOpenaiApiUrl(); - const customOpenAiModelName = getCustomOpenaiModelName(); - - models['custom_openai'] = { - ...(customOpenAiApiKey && customOpenAiApiUrl && customOpenAiModelName - ? { - [customOpenAiModelName]: { - displayName: customOpenAiModelName, - model: new ChatOpenAI({ - apiKey: customOpenAiApiKey, - modelName: customOpenAiModelName, - ...(() => { - const temperatureRestrictedModels = [ - 'gpt-5-nano', - 'gpt-5', - 'gpt-5-mini', - 'o1', - 'o3', - 'o3-mini', - 'o4-mini', - ]; - const isTemperatureRestricted = - temperatureRestrictedModels.some((restrictedModel) => - customOpenAiModelName.includes(restrictedModel), - ); - return isTemperatureRestricted ? {} : { temperature: 0.7 }; - })(), - configuration: { - baseURL: customOpenAiApiUrl, - }, - }) as unknown as BaseChatModel, - }, - } - : {}), - }; - - return models; -}; - -export const getAvailableEmbeddingModelProviders = async () => { - const models: Record> = {}; - - for (const provider in embeddingModelProviders) { - const providerModels = await embeddingModelProviders[provider](); - if (Object.keys(providerModels).length > 0) { - models[provider] = providerModels; - } - } - - return models; -}; diff --git a/src/lib/providers/lemonade.ts b/src/lib/providers/lemonade.ts deleted file mode 100644 index d87e678..0000000 --- a/src/lib/providers/lemonade.ts +++ /dev/null @@ -1,94 +0,0 @@ -import axios from 'axios'; -import { getLemonadeApiEndpoint, getLemonadeApiKey } from '../config'; -import { ChatModel, EmbeddingModel } from '.'; - -export const PROVIDER_INFO = { - key: 'lemonade', - displayName: 'Lemonade', -}; - -import { ChatOpenAI } from '@langchain/openai'; -import { OpenAIEmbeddings } from '@langchain/openai'; - -export const loadLemonadeChatModels = async () => { - const lemonadeApiEndpoint = getLemonadeApiEndpoint(); - const lemonadeApiKey = getLemonadeApiKey(); - - if (!lemonadeApiEndpoint) return {}; - - try { - const res = await axios.get(`${lemonadeApiEndpoint}/api/v1/models`, { - headers: { - 'Content-Type': 'application/json', - ...(lemonadeApiKey - ? { Authorization: `Bearer ${lemonadeApiKey}` } - : {}), - }, - }); - - const { data: models } = res.data; - - const chatModels: Record = {}; - - models.forEach((model: any) => { - chatModels[model.id] = { - displayName: model.id, - model: new ChatOpenAI({ - apiKey: lemonadeApiKey || 'lemonade-key', - modelName: model.id, - temperature: 0.7, - configuration: { - baseURL: `${lemonadeApiEndpoint}/api/v1`, - }, - }), - }; - }); - - return chatModels; - } catch (err) { - console.error(`Error loading Lemonade models: ${err}`); - return {}; - } -}; - -export const loadLemonadeEmbeddingModels = async () => { - const lemonadeApiEndpoint = getLemonadeApiEndpoint(); - const lemonadeApiKey = getLemonadeApiKey(); - - if (!lemonadeApiEndpoint) return {}; - - try { - const res = await axios.get(`${lemonadeApiEndpoint}/api/v1/models`, { - headers: { - 'Content-Type': 'application/json', - ...(lemonadeApiKey - ? { Authorization: `Bearer ${lemonadeApiKey}` } - : {}), - }, - }); - - const { data: models } = res.data; - - const embeddingModels: Record = {}; - - // Filter models that support embeddings (if Lemonade provides this info) - // For now, we'll assume all models can be used for embeddings - models.forEach((model: any) => { - embeddingModels[model.id] = { - displayName: model.id, - model: new OpenAIEmbeddings({ - apiKey: lemonadeApiKey || 'lemonade-key', - modelName: model.id, - configuration: { - baseURL: `${lemonadeApiEndpoint}/api/v1`, - }, - }), - }; - }); - - return embeddingModels; - } catch (err) { - console.error(`Error loading Lemonade embedding models: ${err}`); - return {}; - } -}; diff --git a/src/lib/providers/lmstudio.ts b/src/lib/providers/lmstudio.ts deleted file mode 100644 index f79c0aa..0000000 --- a/src/lib/providers/lmstudio.ts +++ /dev/null @@ -1,100 +0,0 @@ -import { getKeepAlive, getLMStudioApiEndpoint } from '../config'; -import axios from 'axios'; -import { ChatModel, EmbeddingModel } from '.'; - -export const PROVIDER_INFO = { - key: 'lmstudio', - displayName: 'LM Studio', -}; -import { ChatOpenAI } from '@langchain/openai'; -import { OpenAIEmbeddings } from '@langchain/openai'; -import { BaseChatModel } from '@langchain/core/language_models/chat_models'; -import { Embeddings } from '@langchain/core/embeddings'; - -interface LMStudioModel { - id: string; - name?: string; -} - -const ensureV1Endpoint = (endpoint: string): string => - endpoint.endsWith('/v1') ? endpoint : `${endpoint}/v1`; - -const checkServerAvailability = async (endpoint: string): Promise => { - try { - await axios.get(`${ensureV1Endpoint(endpoint)}/models`, { - headers: { 'Content-Type': 'application/json' }, - }); - return true; - } catch { - return false; - } -}; - -export const loadLMStudioChatModels = async () => { - const endpoint = getLMStudioApiEndpoint(); - - if (!endpoint) return {}; - if (!(await checkServerAvailability(endpoint))) return {}; - - try { - const response = await axios.get(`${ensureV1Endpoint(endpoint)}/models`, { - headers: { 'Content-Type': 'application/json' }, - }); - - const chatModels: Record = {}; - - response.data.data.forEach((model: LMStudioModel) => { - chatModels[model.id] = { - displayName: model.name || model.id, - model: new ChatOpenAI({ - apiKey: 'lm-studio', - configuration: { - baseURL: ensureV1Endpoint(endpoint), - }, - modelName: model.id, - temperature: 0.7, - streaming: true, - maxRetries: 3, - }) as unknown as BaseChatModel, - }; - }); - - return chatModels; - } catch (err) { - console.error(`Error loading LM Studio models: ${err}`); - return {}; - } -}; - -export const loadLMStudioEmbeddingsModels = async () => { - const endpoint = getLMStudioApiEndpoint(); - - if (!endpoint) return {}; - if (!(await checkServerAvailability(endpoint))) return {}; - - try { - const response = await axios.get(`${ensureV1Endpoint(endpoint)}/models`, { - headers: { 'Content-Type': 'application/json' }, - }); - - const embeddingsModels: Record = {}; - - response.data.data.forEach((model: LMStudioModel) => { - embeddingsModels[model.id] = { - displayName: model.name || model.id, - model: new OpenAIEmbeddings({ - apiKey: 'lm-studio', - configuration: { - baseURL: ensureV1Endpoint(endpoint), - }, - modelName: model.id, - }) as unknown as Embeddings, - }; - }); - - return embeddingsModels; - } catch (err) { - console.error(`Error loading LM Studio embeddings model: ${err}`); - return {}; - } -}; diff --git a/src/lib/providers/ollama.ts b/src/lib/providers/ollama.ts deleted file mode 100644 index cb0b848..0000000 --- a/src/lib/providers/ollama.ts +++ /dev/null @@ -1,86 +0,0 @@ -import axios from 'axios'; -import { getKeepAlive, getOllamaApiEndpoint, getOllamaApiKey } from '../config'; -import { ChatModel, EmbeddingModel } from '.'; - -export const PROVIDER_INFO = { - key: 'ollama', - displayName: 'Ollama', -}; -import { ChatOllama } from '@langchain/ollama'; -import { OllamaEmbeddings } from '@langchain/ollama'; - -export const loadOllamaChatModels = async () => { - const ollamaApiEndpoint = getOllamaApiEndpoint(); - const ollamaApiKey = getOllamaApiKey(); - - if (!ollamaApiEndpoint) return {}; - - try { - const res = await axios.get(`${ollamaApiEndpoint}/api/tags`, { - headers: { - 'Content-Type': 'application/json', - }, - }); - - const { models } = res.data; - - const chatModels: Record = {}; - - models.forEach((model: any) => { - chatModels[model.model] = { - displayName: model.name, - model: new ChatOllama({ - baseUrl: ollamaApiEndpoint, - model: model.model, - temperature: 0.7, - keepAlive: getKeepAlive(), - ...(ollamaApiKey - ? { headers: { Authorization: `Bearer ${ollamaApiKey}` } } - : {}), - }), - }; - }); - - return chatModels; - } catch (err) { - console.error(`Error loading Ollama models: ${err}`); - return {}; - } -}; - -export const loadOllamaEmbeddingModels = async () => { - const ollamaApiEndpoint = getOllamaApiEndpoint(); - const ollamaApiKey = getOllamaApiKey(); - - if (!ollamaApiEndpoint) return {}; - - try { - const res = await axios.get(`${ollamaApiEndpoint}/api/tags`, { - headers: { - 'Content-Type': 'application/json', - }, - }); - - const { models } = res.data; - - const embeddingModels: Record = {}; - - models.forEach((model: any) => { - embeddingModels[model.model] = { - displayName: model.name, - model: new OllamaEmbeddings({ - baseUrl: ollamaApiEndpoint, - model: model.model, - ...(ollamaApiKey - ? { headers: { Authorization: `Bearer ${ollamaApiKey}` } } - : {}), - }), - }; - }); - - return embeddingModels; - } catch (err) { - console.error(`Error loading Ollama embeddings models: ${err}`); - return {}; - } -}; diff --git a/src/lib/providers/openai.ts b/src/lib/providers/openai.ts deleted file mode 100644 index 0c5379a..0000000 --- a/src/lib/providers/openai.ts +++ /dev/null @@ -1,159 +0,0 @@ -import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai'; -import { getOpenaiApiKey } from '../config'; -import { ChatModel, EmbeddingModel } from '.'; - -export const PROVIDER_INFO = { - key: 'openai', - displayName: 'OpenAI', -}; -import { BaseChatModel } from '@langchain/core/language_models/chat_models'; -import { Embeddings } from '@langchain/core/embeddings'; - -const openaiChatModels: Record[] = [ - { - displayName: 'GPT-3.5 Turbo', - key: 'gpt-3.5-turbo', - }, - { - displayName: 'GPT-4', - key: 'gpt-4', - }, - { - displayName: 'GPT-4 turbo', - key: 'gpt-4-turbo', - }, - { - displayName: 'GPT-4 omni', - key: 'gpt-4o', - }, - { - displayName: 'GPT-4o (2024-05-13)', - key: 'gpt-4o-2024-05-13', - }, - { - displayName: 'GPT-4 omni mini', - key: 'gpt-4o-mini', - }, - { - displayName: 'GPT 4.1 nano', - key: 'gpt-4.1-nano', - }, - { - displayName: 'GPT 4.1 mini', - key: 'gpt-4.1-mini', - }, - { - displayName: 'GPT 4.1', - key: 'gpt-4.1', - }, - { - displayName: 'GPT 5 nano', - key: 'gpt-5-nano', - }, - { - displayName: 'GPT 5', - key: 'gpt-5', - }, - { - displayName: 'GPT 5 Mini', - key: 'gpt-5-mini', - }, - { - displayName: 'o1', - key: 'o1', - }, - { - displayName: 'o3', - key: 'o3', - }, - { - displayName: 'o3 Mini', - key: 'o3-mini', - }, - { - displayName: 'o4 Mini', - key: 'o4-mini', - }, -]; - -const openaiEmbeddingModels: Record[] = [ - { - displayName: 'Text Embedding 3 Small', - key: 'text-embedding-3-small', - }, - { - displayName: 'Text Embedding 3 Large', - key: 'text-embedding-3-large', - }, -]; - -export const loadOpenAIChatModels = async () => { - const openaiApiKey = getOpenaiApiKey(); - - if (!openaiApiKey) return {}; - - try { - const chatModels: Record = {}; - - openaiChatModels.forEach((model) => { - // Models that only support temperature = 1 - const temperatureRestrictedModels = [ - 'gpt-5-nano', - 'gpt-5', - 'gpt-5-mini', - 'o1', - 'o3', - 'o3-mini', - 'o4-mini', - ]; - const isTemperatureRestricted = temperatureRestrictedModels.some( - (restrictedModel) => model.key.includes(restrictedModel), - ); - - const modelConfig: any = { - apiKey: openaiApiKey, - modelName: model.key, - }; - - // Only add temperature if the model supports it - if (!isTemperatureRestricted) { - modelConfig.temperature = 0.7; - } - - chatModels[model.key] = { - displayName: model.displayName, - model: new ChatOpenAI(modelConfig) as unknown as BaseChatModel, - }; - }); - - return chatModels; - } catch (err) { - console.error(`Error loading OpenAI models: ${err}`); - return {}; - } -}; - -export const loadOpenAIEmbeddingModels = async () => { - const openaiApiKey = getOpenaiApiKey(); - - if (!openaiApiKey) return {}; - - try { - const embeddingModels: Record = {}; - - openaiEmbeddingModels.forEach((model) => { - embeddingModels[model.key] = { - displayName: model.displayName, - model: new OpenAIEmbeddings({ - apiKey: openaiApiKey, - modelName: model.key, - }) as unknown as Embeddings, - }; - }); - - return embeddingModels; - } catch (err) { - console.error(`Error loading OpenAI embeddings models: ${err}`); - return {}; - } -}; diff --git a/src/lib/providers/transformers.ts b/src/lib/providers/transformers.ts deleted file mode 100644 index 3098d9f..0000000 --- a/src/lib/providers/transformers.ts +++ /dev/null @@ -1,36 +0,0 @@ -import { HuggingFaceTransformersEmbeddings } from '../huggingfaceTransformer'; - -export const PROVIDER_INFO = { - key: 'transformers', - displayName: 'Hugging Face', -}; - -export const loadTransformersEmbeddingsModels = async () => { - try { - const embeddingModels = { - 'xenova-bge-small-en-v1.5': { - displayName: 'BGE Small', - model: new HuggingFaceTransformersEmbeddings({ - modelName: 'Xenova/bge-small-en-v1.5', - }), - }, - 'xenova-gte-small': { - displayName: 'GTE Small', - model: new HuggingFaceTransformersEmbeddings({ - modelName: 'Xenova/gte-small', - }), - }, - 'xenova-bert-base-multilingual-uncased': { - displayName: 'Bert Multilingual', - model: new HuggingFaceTransformersEmbeddings({ - modelName: 'Xenova/bert-base-multilingual-uncased', - }), - }, - }; - - return embeddingModels; - } catch (err) { - console.error(`Error loading Transformers embeddings model: ${err}`); - return {}; - } -}; From 30fb1e312b034cdba5a6ebb580c03e54afbd71ae Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Wed, 15 Oct 2025 12:53:36 +0530 Subject: [PATCH 16/53] feat(modelRegistry): add `MinimalProvider` type --- src/lib/models/registry.ts | 15 ++++++++++----- src/lib/models/types.ts | 9 ++++++++- 2 files changed, 18 insertions(+), 6 deletions(-) diff --git a/src/lib/models/registry.ts b/src/lib/models/registry.ts index 2d4de6f..a434dcc 100644 --- a/src/lib/models/registry.ts +++ b/src/lib/models/registry.ts @@ -4,7 +4,7 @@ import BaseModelProvider, { } from './providers/baseProvider'; import { getConfiguredModelProviders } from '../config/serverRegistry'; import { providers } from './providers'; -import { ModelList } from './types'; +import { MinimalProvider, Model } from './types'; class ModelRegistry { activeProviders: (ConfigModelProvider & { @@ -35,18 +35,23 @@ class ModelRegistry { }); } - async getActiveModels() { - const models: ModelList[] = []; + async getActiveProviders() { + const providers: MinimalProvider[] = []; await Promise.all( this.activeProviders.map(async (p) => { const m = await p.provider.getModelList(); - models.push(m); + providers.push({ + id: p.id, + name: p.name, + chatModels: m.chat, + embeddingModels: m.embedding, + }); }), ); - return models; + return providers; } } diff --git a/src/lib/models/types.ts b/src/lib/models/types.ts index e85ad6b..c91b241 100644 --- a/src/lib/models/types.ts +++ b/src/lib/models/types.ts @@ -13,4 +13,11 @@ type ProviderMetadata = { key: string; }; -export type { Model, ModelList, ProviderMetadata }; +type MinimalProvider = { + id: string; + name: string; + chatModels: Model[]; + embeddingModels: Model[]; +}; + +export type { Model, ModelList, ProviderMetadata, MinimalProvider }; From 410201b1171b060e17cb753758af821d225d1eca Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Wed, 15 Oct 2025 12:54:54 +0530 Subject: [PATCH 17/53] feat(api/models): rename to `providers`, use new model registry --- src/app/api/models/route.ts | 47 ---------------------------------- src/app/api/providers/route.ts | 28 ++++++++++++++++++++ 2 files changed, 28 insertions(+), 47 deletions(-) delete mode 100644 src/app/api/models/route.ts create mode 100644 src/app/api/providers/route.ts diff --git a/src/app/api/models/route.ts b/src/app/api/models/route.ts deleted file mode 100644 index 04a6949..0000000 --- a/src/app/api/models/route.ts +++ /dev/null @@ -1,47 +0,0 @@ -import { - getAvailableChatModelProviders, - getAvailableEmbeddingModelProviders, -} from '@/lib/providers'; - -export const GET = async (req: Request) => { - try { - const [chatModelProviders, embeddingModelProviders] = await Promise.all([ - getAvailableChatModelProviders(), - getAvailableEmbeddingModelProviders(), - ]); - - Object.keys(chatModelProviders).forEach((provider) => { - Object.keys(chatModelProviders[provider]).forEach((model) => { - delete (chatModelProviders[provider][model] as { model?: unknown }) - .model; - }); - }); - - Object.keys(embeddingModelProviders).forEach((provider) => { - Object.keys(embeddingModelProviders[provider]).forEach((model) => { - delete (embeddingModelProviders[provider][model] as { model?: unknown }) - .model; - }); - }); - - return Response.json( - { - chatModelProviders, - embeddingModelProviders, - }, - { - status: 200, - }, - ); - } catch (err) { - console.error('An error occurred while fetching models', err); - return Response.json( - { - message: 'An error has occurred.', - }, - { - status: 500, - }, - ); - } -}; diff --git a/src/app/api/providers/route.ts b/src/app/api/providers/route.ts new file mode 100644 index 0000000..1a80957 --- /dev/null +++ b/src/app/api/providers/route.ts @@ -0,0 +1,28 @@ +import ModelRegistry from '@/lib/models/registry'; + +export const GET = async (req: Request) => { + try { + const registry = new ModelRegistry(); + + const activeProviders = await registry.getActiveProviders(); + + return Response.json( + { + providers: activeProviders, + }, + { + status: 200, + }, + ); + } catch (err) { + console.error('An error occurred while fetching providers', err); + return Response.json( + { + message: 'An error has occurred.', + }, + { + status: 500, + }, + ); + } +}; From 626cb646e22b186e9bdb7073801963739ebacfe8 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Wed, 15 Oct 2025 12:55:22 +0530 Subject: [PATCH 18/53] feat(chat-hook): use new providers endpoint --- src/lib/hooks/useChat.tsx | 219 ++++++++++++++------------------------ 1 file changed, 79 insertions(+), 140 deletions(-) diff --git a/src/lib/hooks/useChat.tsx b/src/lib/hooks/useChat.tsx index 479fc79..04b17b5 100644 --- a/src/lib/hooks/useChat.tsx +++ b/src/lib/hooks/useChat.tsx @@ -20,6 +20,7 @@ import crypto from 'crypto'; import { useSearchParams } from 'next/navigation'; import { toast } from 'sonner'; import { getSuggestions } from '../actions'; +import { MinimalProvider } from '../models/types'; export type Section = { userMessage: UserMessage; @@ -66,13 +67,13 @@ export interface File { } interface ChatModelProvider { - name: string; - provider: string; + key: string; + providerId: string; } interface EmbeddingModelProvider { - name: string; - provider: string; + key: string; + providerId: string; } const checkConfig = async ( @@ -82,10 +83,12 @@ const checkConfig = async ( setHasError: (hasError: boolean) => void, ) => { try { - let chatModel = localStorage.getItem('chatModel'); - let chatModelProvider = localStorage.getItem('chatModelProvider'); - let embeddingModel = localStorage.getItem('embeddingModel'); - let embeddingModelProvider = localStorage.getItem('embeddingModelProvider'); + let chatModelKey = localStorage.getItem('chatModelKey'); + let chatModelProviderId = localStorage.getItem('chatModelProviderId'); + let embeddingModelKey = localStorage.getItem('embeddingModelKey'); + let embeddingModelProviderId = localStorage.getItem( + 'embeddingModelProviderId', + ); const autoImageSearch = localStorage.getItem('autoImageSearch'); const autoVideoSearch = localStorage.getItem('autoVideoSearch'); @@ -98,145 +101,81 @@ const checkConfig = async ( localStorage.setItem('autoVideoSearch', 'false'); } - const providers = await fetch(`/api/models`, { + const res = await fetch(`/api/providers`, { headers: { 'Content-Type': 'application/json', }, - }).then(async (res) => { - if (!res.ok) - throw new Error( - `Failed to fetch models: ${res.status} ${res.statusText}`, - ); - return res.json(); }); - if ( - !chatModel || - !chatModelProvider || - !embeddingModel || - !embeddingModelProvider - ) { - if (!chatModel || !chatModelProvider) { - const chatModelProviders = providers.chatModelProviders; - const chatModelProvidersKeys = Object.keys(chatModelProviders); - - if (!chatModelProviders || chatModelProvidersKeys.length === 0) { - return toast.error('No chat models available'); - } else { - chatModelProvider = - chatModelProvidersKeys.find( - (provider) => - Object.keys(chatModelProviders[provider]).length > 0, - ) || chatModelProvidersKeys[0]; - } - - if ( - chatModelProvider === 'custom_openai' && - Object.keys(chatModelProviders[chatModelProvider]).length === 0 - ) { - toast.error( - "Looks like you haven't configured any chat model providers. Please configure them from the settings page or the config file.", - ); - return setHasError(true); - } - - chatModel = Object.keys(chatModelProviders[chatModelProvider])[0]; - } - - if (!embeddingModel || !embeddingModelProvider) { - const embeddingModelProviders = providers.embeddingModelProviders; - - if ( - !embeddingModelProviders || - Object.keys(embeddingModelProviders).length === 0 - ) - return toast.error('No embedding models available'); - - embeddingModelProvider = Object.keys(embeddingModelProviders)[0]; - embeddingModel = Object.keys( - embeddingModelProviders[embeddingModelProvider], - )[0]; - } - - localStorage.setItem('chatModel', chatModel!); - localStorage.setItem('chatModelProvider', chatModelProvider); - localStorage.setItem('embeddingModel', embeddingModel!); - localStorage.setItem('embeddingModelProvider', embeddingModelProvider); - } else { - const chatModelProviders = providers.chatModelProviders; - const embeddingModelProviders = providers.embeddingModelProviders; - - if ( - Object.keys(chatModelProviders).length > 0 && - (!chatModelProviders[chatModelProvider] || - Object.keys(chatModelProviders[chatModelProvider]).length === 0) - ) { - const chatModelProvidersKeys = Object.keys(chatModelProviders); - chatModelProvider = - chatModelProvidersKeys.find( - (key) => Object.keys(chatModelProviders[key]).length > 0, - ) || chatModelProvidersKeys[0]; - - localStorage.setItem('chatModelProvider', chatModelProvider); - } - - if ( - chatModelProvider && - !chatModelProviders[chatModelProvider][chatModel] - ) { - if ( - chatModelProvider === 'custom_openai' && - Object.keys(chatModelProviders[chatModelProvider]).length === 0 - ) { - toast.error( - "Looks like you haven't configured any chat model providers. Please configure them from the settings page or the config file.", - ); - return setHasError(true); - } - - chatModel = Object.keys( - chatModelProviders[ - Object.keys(chatModelProviders[chatModelProvider]).length > 0 - ? chatModelProvider - : Object.keys(chatModelProviders)[0] - ], - )[0]; - - localStorage.setItem('chatModel', chatModel); - } - - if ( - Object.keys(embeddingModelProviders).length > 0 && - !embeddingModelProviders[embeddingModelProvider] - ) { - embeddingModelProvider = Object.keys(embeddingModelProviders)[0]; - localStorage.setItem('embeddingModelProvider', embeddingModelProvider); - } - - if ( - embeddingModelProvider && - !embeddingModelProviders[embeddingModelProvider][embeddingModel] - ) { - embeddingModel = Object.keys( - embeddingModelProviders[embeddingModelProvider], - )[0]; - localStorage.setItem('embeddingModel', embeddingModel); - } + if (!res.ok) { + throw new Error( + `Provider fetching failed with status code ${res.status}`, + ); } + const data = await res.json(); + const providers: MinimalProvider[] = data.providers; + + if (providers.length === 0) { + throw new Error( + 'No chat model providers found, please configure them in the settings page.', + ); + } + + const chatModelProvider = + providers.find((p) => p.id === chatModelProviderId) ?? + providers.find((p) => p.chatModels.length > 0); + + if (!chatModelProvider) { + throw new Error( + 'No chat models found, pleae configure them in the settings page.', + ); + } + + chatModelProviderId = chatModelProvider.id; + + const chatModel = + chatModelProvider.chatModels.find((m) => m.key === chatModelKey) ?? + chatModelProvider.chatModels[0]; + chatModelKey = chatModel.key; + + const embeddingModelProvider = + providers.find((p) => p.id === embeddingModelProviderId) ?? + providers.find((p) => p.embeddingModels.length > 0); + + if (!embeddingModelProvider) { + throw new Error( + 'No embedding models found, pleae configure them in the settings page.', + ); + } + + embeddingModelProviderId = embeddingModelProvider.id; + + const embeddingModel = + embeddingModelProvider.embeddingModels.find( + (m) => m.key === embeddingModelKey, + ) ?? embeddingModelProvider.embeddingModels[0]; + embeddingModelKey = embeddingModel.key; + + localStorage.setItem('chatModelKey', chatModelKey); + localStorage.setItem('chatModelProviderId', chatModelProviderId); + localStorage.setItem('embeddingModelKey', embeddingModelKey); + localStorage.setItem('embeddingModelProviderId', embeddingModelProviderId); + setChatModelProvider({ - name: chatModel!, - provider: chatModelProvider, + key: chatModelKey, + providerId: chatModelProviderId, }); setEmbeddingModelProvider({ - name: embeddingModel!, - provider: embeddingModelProvider, + key: embeddingModelKey, + providerId: embeddingModelProviderId, }); setIsConfigReady(true); - } catch (err) { + } catch (err: any) { console.error('An error occurred while checking the configuration:', err); + toast.error(err.message); setIsConfigReady(false); setHasError(true); } @@ -356,15 +295,15 @@ export const ChatProvider = ({ const [chatModelProvider, setChatModelProvider] = useState( { - name: '', - provider: '', + key: '', + providerId: '', }, ); const [embeddingModelProvider, setEmbeddingModelProvider] = useState({ - name: '', - provider: '', + key: '', + providerId: '', }); const [isConfigReady, setIsConfigReady] = useState(false); @@ -742,12 +681,12 @@ export const ChatProvider = ({ ? chatHistory.slice(0, messageIndex === -1 ? undefined : messageIndex) : chatHistory, chatModel: { - name: chatModelProvider.name, - provider: chatModelProvider.provider, + key: chatModelProvider.key, + providerId: chatModelProvider.providerId, }, embeddingModel: { - name: embeddingModelProvider.name, - provider: embeddingModelProvider.provider, + key: embeddingModelProvider.key, + providerId: embeddingModelProvider.providerId, }, systemInstructions: localStorage.getItem('systemInstructions'), }), From b67ca79e2ad2abc431c6a4d7b7a171e738c96f1c Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Wed, 15 Oct 2025 13:02:08 +0530 Subject: [PATCH 19/53] feat(config): add `searxngURL` --- src/lib/config/serverRegistry.ts | 2 ++ src/lib/config/types.ts | 4 ++++ src/lib/searxng.ts | 4 ++-- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/src/lib/config/serverRegistry.ts b/src/lib/config/serverRegistry.ts index c21ce24..9214e29 100644 --- a/src/lib/config/serverRegistry.ts +++ b/src/lib/config/serverRegistry.ts @@ -10,3 +10,5 @@ export const getConfiguredModelProviderById = ( ): ConfigModelProvider | undefined => { return getConfiguredModelProviders().find((p) => p.id === id) ?? undefined; }; + +export const getSearxngURL = () => configManager.getConfig('search.searxngURL', '') diff --git a/src/lib/config/types.ts b/src/lib/config/types.ts index 34fdcb9..f4da00b 100644 --- a/src/lib/config/types.ts +++ b/src/lib/config/types.ts @@ -55,6 +55,9 @@ type Config = { [key: string]: any; }; modelProviders: ConfigModelProvider[]; + search: { + [key: string]: any + } }; type EnvMap = { @@ -73,6 +76,7 @@ type ModelProviderUISection = { type UIConfigSections = { general: UIConfigField[]; modelProviders: ModelProviderUISection[]; + search: UIConfigField[]; }; export type { diff --git a/src/lib/searxng.ts b/src/lib/searxng.ts index ae19db2..cca41b2 100644 --- a/src/lib/searxng.ts +++ b/src/lib/searxng.ts @@ -1,5 +1,5 @@ import axios from 'axios'; -import { getSearxngApiEndpoint } from './config'; +import { getSearxngURL } from './config/serverRegistry'; interface SearxngSearchOptions { categories?: string[]; @@ -23,7 +23,7 @@ export const searchSearxng = async ( query: string, opts?: SearxngSearchOptions, ) => { - const searxngURL = getSearxngApiEndpoint(); + const searxngURL = getSearxngURL(); const url = new URL(`${searxngURL}/search?format=json`); url.searchParams.append('q', query); From 0d2cd4bb1ebea5b4e53e420eb6ff80cb6f5dff13 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Thu, 16 Oct 2025 17:53:31 +0530 Subject: [PATCH 20/53] feat(app): remove `compute-dot`, make cosine default --- package.json | 2 +- src/lib/types/compute-dot.d.ts | 5 ----- src/lib/utils/computeSimilarity.ts | 12 +----------- yarn.lock | 21 +++++++++++++++++++++ 4 files changed, 23 insertions(+), 17 deletions(-) delete mode 100644 src/lib/types/compute-dot.d.ts diff --git a/package.json b/package.json index 53c899e..abbbee7 100644 --- a/package.json +++ b/package.json @@ -29,8 +29,8 @@ "better-sqlite3": "^11.9.1", "clsx": "^2.1.0", "compute-cosine-similarity": "^1.1.0", - "compute-dot": "^1.1.0", "drizzle-orm": "^0.40.1", + "framer-motion": "^12.23.24", "html-to-text": "^9.0.5", "jspdf": "^3.0.1", "langchain": "^0.3.30", diff --git a/src/lib/types/compute-dot.d.ts b/src/lib/types/compute-dot.d.ts deleted file mode 100644 index b671c64..0000000 --- a/src/lib/types/compute-dot.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -declare function computeDot(vectorA: number[], vectorB: number[]): number; - -declare module 'compute-dot' { - export default computeDot; -} diff --git a/src/lib/utils/computeSimilarity.ts b/src/lib/utils/computeSimilarity.ts index a635577..eda2a59 100644 --- a/src/lib/utils/computeSimilarity.ts +++ b/src/lib/utils/computeSimilarity.ts @@ -1,17 +1,7 @@ -import dot from 'compute-dot'; import cosineSimilarity from 'compute-cosine-similarity'; -import { getSimilarityMeasure } from '../config'; const computeSimilarity = (x: number[], y: number[]): number => { - const similarityMeasure = getSimilarityMeasure(); - - if (similarityMeasure === 'cosine') { - return cosineSimilarity(x, y) as number; - } else if (similarityMeasure === 'dot') { - return dot(x, y); - } - - throw new Error('Invalid similarity measure'); + return cosineSimilarity(x, y) as number; }; export default computeSimilarity; diff --git a/yarn.lock b/yarn.lock index 3aa8070..0c14778 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2709,6 +2709,15 @@ fraction.js@^4.3.7: resolved "https://registry.yarnpkg.com/fraction.js/-/fraction.js-4.3.7.tgz#06ca0085157e42fda7f9e726e79fefc4068840f7" integrity sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew== +framer-motion@^12.23.24: + version "12.23.24" + resolved "https://registry.yarnpkg.com/framer-motion/-/framer-motion-12.23.24.tgz#4895b67e880bd2b1089e61fbaa32ae802fc24b8c" + integrity sha512-HMi5HRoRCTou+3fb3h9oTLyJGBxHfW+HnNE25tAXOvVx/IvwMHK0cx7IR4a2ZU6sh3IX1Z+4ts32PcYBOqka8w== + dependencies: + motion-dom "^12.23.23" + motion-utils "^12.23.6" + tslib "^2.4.0" + fs-constants@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad" @@ -3674,6 +3683,18 @@ mkdirp-classic@^0.5.2, mkdirp-classic@^0.5.3: resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113" integrity sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A== +motion-dom@^12.23.23: + version "12.23.23" + resolved "https://registry.yarnpkg.com/motion-dom/-/motion-dom-12.23.23.tgz#8f874333ea1a04ee3a89eb928f518b463d589e0e" + integrity sha512-n5yolOs0TQQBRUFImrRfs/+6X4p3Q4n1dUEqt/H58Vx7OW6RF+foWEgmTVDhIWJIMXOuNNL0apKH2S16en9eiA== + dependencies: + motion-utils "^12.23.6" + +motion-utils@^12.23.6: + version "12.23.6" + resolved "https://registry.yarnpkg.com/motion-utils/-/motion-utils-12.23.6.tgz#fafef80b4ea85122dd0d6c599a0c63d72881f312" + integrity sha512-eAWoPgr4eFEOFfg2WjIsMoqJTW6Z8MTUCgn/GZ3VRpClWBdnbjryiA3ZSNLyxCTmCQx4RmYX6jX1iWHbenUPNQ== + ms@2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" From 36fdb6491d597286d8f4fd0da8c604e697722150 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Thu, 16 Oct 2025 17:56:14 +0530 Subject: [PATCH 21/53] feat(model-types): add `ModelWithProvider` type --- src/lib/models/types.ts | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/src/lib/models/types.ts b/src/lib/models/types.ts index c91b241..fdd5df2 100644 --- a/src/lib/models/types.ts +++ b/src/lib/models/types.ts @@ -20,4 +20,15 @@ type MinimalProvider = { embeddingModels: Model[]; }; -export type { Model, ModelList, ProviderMetadata, MinimalProvider }; +type ModelWithProvider = { + key: string; + providerId: string; +}; + +export type { + Model, + ModelList, + ProviderMetadata, + MinimalProvider, + ModelWithProvider, +}; From 9219593ee11818adc21bdf8b14413eb2a66507fc Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Thu, 16 Oct 2025 17:56:57 +0530 Subject: [PATCH 22/53] feat(model-registry): add loading method --- src/lib/models/registry.ts | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/src/lib/models/registry.ts b/src/lib/models/registry.ts index a434dcc..a2a7aa8 100644 --- a/src/lib/models/registry.ts +++ b/src/lib/models/registry.ts @@ -53,6 +53,26 @@ class ModelRegistry { return providers; } + + async loadChatModel(providerId: string, modelName: string) { + const provider = this.activeProviders.find((p) => p.id === providerId); + + if (!provider) throw new Error('Invalid provider id'); + + const model = await provider.provider.loadChatModel(modelName); + + return model; + } + + async loadEmbeddingModel(providerId: string, modelName: string) { + const provider = this.activeProviders.find((p) => p.id === providerId); + + if (!provider) throw new Error('Invalid provider id'); + + const model = await provider.provider.loadEmbeddingModel(modelName); + + return model; + } } export default ModelRegistry; From 9706079ed4fea3489af132e6b581067e7a14c507 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Thu, 16 Oct 2025 17:57:30 +0530 Subject: [PATCH 23/53] feat(config): add `searxngURL` --- src/lib/config/index.ts | 35 ++++++++++++++++++++++++++++++++--- 1 file changed, 32 insertions(+), 3 deletions(-) diff --git a/src/lib/config/index.ts b/src/lib/config/index.ts index 1fbea38..e3c3713 100644 --- a/src/lib/config/index.ts +++ b/src/lib/config/index.ts @@ -15,10 +15,26 @@ class ConfigManager { setupComplete: false, general: {}, modelProviders: [], + search: { + searxngURL: '', + }, }; uiConfigSections: UIConfigSections = { general: [], modelProviders: [], + search: [ + { + name: 'SearXNG URL', + key: 'searxngURL', + type: 'string', + required: false, + description: 'The URL of your SearXNG instance', + placeholder: 'http://localhost:4000', + default: '', + scope: 'server', + env: 'SEARXNG_API_URL', + }, + ], }; constructor() { @@ -78,6 +94,7 @@ class ConfigManager { } private initializeFromEnv() { + /* providers section*/ const providerConfigSections = getModelProvidersUIConfigSection(); this.uiConfigSections.modelProviders = providerConfigSections; @@ -130,6 +147,14 @@ class ConfigManager { this.currentConfig.modelProviders.push(...newProviders); + /* search section */ + this.uiConfigSections.search.forEach((f) => { + if (f.env && !this.currentConfig.search[f.key]) { + this.currentConfig.search[f.key] = + process.env[f.env] ?? f.default ?? ''; + } + }); + this.saveConfig(); } @@ -196,15 +221,19 @@ class ConfigManager { } public isSetupComplete() { - return this.currentConfig.setupComplete + return this.currentConfig.setupComplete; } public markSetupComplete() { if (!this.currentConfig.setupComplete) { - this.currentConfig.setupComplete = true + this.currentConfig.setupComplete = true; } - this.saveConfig() + this.saveConfig(); + } + + public getUIConfigSections(): UIConfigSections { + return this.uiConfigSections; } } From 768578951c00166f55b35f6d29081d58f51ef5c7 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Thu, 16 Oct 2025 17:58:13 +0530 Subject: [PATCH 24/53] feat(chat-route): use new model registry --- src/app/api/chat/route.ts | 101 +++++++++++++------------------------- 1 file changed, 34 insertions(+), 67 deletions(-) diff --git a/src/app/api/chat/route.ts b/src/app/api/chat/route.ts index 7329299..bab34fa 100644 --- a/src/app/api/chat/route.ts +++ b/src/app/api/chat/route.ts @@ -1,23 +1,14 @@ import crypto from 'crypto'; import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages'; import { EventEmitter } from 'stream'; -import { - getAvailableChatModelProviders, - getAvailableEmbeddingModelProviders, -} from '@/lib/providers'; import db from '@/lib/db'; import { chats, messages as messagesSchema } from '@/lib/db/schema'; import { and, eq, gt } from 'drizzle-orm'; import { getFileDetails } from '@/lib/utils/files'; -import { BaseChatModel } from '@langchain/core/language_models/chat_models'; -import { ChatOpenAI } from '@langchain/openai'; -import { - getCustomOpenaiApiKey, - getCustomOpenaiApiUrl, - getCustomOpenaiModelName, -} from '@/lib/config'; import { searchHandlers } from '@/lib/search'; import { z } from 'zod'; +import ModelRegistry from '@/lib/models/registry'; +import { ModelWithProvider } from '@/lib/models/types'; export const runtime = 'nodejs'; export const dynamic = 'force-dynamic'; @@ -28,14 +19,30 @@ const messageSchema = z.object({ content: z.string().min(1, 'Message content is required'), }); -const chatModelSchema = z.object({ - provider: z.string().optional(), - name: z.string().optional(), +const chatModelSchema: z.ZodType = z.object({ + providerId: z.string({ + errorMap: () => ({ + message: 'Chat model provider id must be provided', + }), + }), + key: z.string({ + errorMap: () => ({ + message: 'Chat model key must be provided', + }), + }), }); -const embeddingModelSchema = z.object({ - provider: z.string().optional(), - name: z.string().optional(), +const embeddingModelSchema: z.ZodType = z.object({ + providerId: z.string({ + errorMap: () => ({ + message: 'Embedding model provider id must be provided', + }), + }), + key: z.string({ + errorMap: () => ({ + message: 'Embedding model key must be provided', + }), + }), }); const bodySchema = z.object({ @@ -57,8 +64,8 @@ const bodySchema = z.object({ .optional() .default([]), files: z.array(z.string()).optional().default([]), - chatModel: chatModelSchema.optional().default({}), - embeddingModel: embeddingModelSchema.optional().default({}), + chatModel: chatModelSchema, + embeddingModel: embeddingModelSchema, systemInstructions: z.string().nullable().optional().default(''), }); @@ -248,56 +255,16 @@ export const POST = async (req: Request) => { ); } - const [chatModelProviders, embeddingModelProviders] = await Promise.all([ - getAvailableChatModelProviders(), - getAvailableEmbeddingModelProviders(), + const registry = new ModelRegistry(); + + const [llm, embedding] = await Promise.all([ + registry.loadChatModel(body.chatModel.providerId, body.chatModel.key), + registry.loadEmbeddingModel( + body.embeddingModel.providerId, + body.embeddingModel.key, + ), ]); - const chatModelProvider = - chatModelProviders[ - body.chatModel?.provider || Object.keys(chatModelProviders)[0] - ]; - const chatModel = - chatModelProvider[ - body.chatModel?.name || Object.keys(chatModelProvider)[0] - ]; - - const embeddingProvider = - embeddingModelProviders[ - body.embeddingModel?.provider || Object.keys(embeddingModelProviders)[0] - ]; - const embeddingModel = - embeddingProvider[ - body.embeddingModel?.name || Object.keys(embeddingProvider)[0] - ]; - - let llm: BaseChatModel | undefined; - let embedding = embeddingModel.model; - - if (body.chatModel?.provider === 'custom_openai') { - llm = new ChatOpenAI({ - apiKey: getCustomOpenaiApiKey(), - modelName: getCustomOpenaiModelName(), - temperature: 0.7, - configuration: { - baseURL: getCustomOpenaiApiUrl(), - }, - }) as unknown as BaseChatModel; - } else if (chatModelProvider && chatModel) { - llm = chatModel.model; - } - - if (!llm) { - return Response.json({ error: 'Invalid chat model' }, { status: 400 }); - } - - if (!embedding) { - return Response.json( - { error: 'Invalid embedding model' }, - { status: 400 }, - ); - } - const humanMessageId = message.messageId ?? crypto.randomBytes(7).toString('hex'); From 0ff1be47bffcd366a96cb82391b2f0f0b46ab328 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Thu, 16 Oct 2025 18:01:25 +0530 Subject: [PATCH 25/53] feat(routes): use new model registry --- src/app/api/images/route.ts | 50 +++++--------------------------- src/app/api/suggestions/route.ts | 49 +++++-------------------------- src/app/api/videos/route.ts | 50 +++++--------------------------- src/components/SearchImages.tsx | 17 ++++------- src/components/SearchVideos.tsx | 17 ++++------- src/lib/actions.ts | 15 +++------- 6 files changed, 40 insertions(+), 158 deletions(-) diff --git a/src/app/api/images/route.ts b/src/app/api/images/route.ts index e02854d..d3416ca 100644 --- a/src/app/api/images/route.ts +++ b/src/app/api/images/route.ts @@ -1,23 +1,12 @@ import handleImageSearch from '@/lib/chains/imageSearchAgent'; -import { - getCustomOpenaiApiKey, - getCustomOpenaiApiUrl, - getCustomOpenaiModelName, -} from '@/lib/config'; -import { getAvailableChatModelProviders } from '@/lib/providers'; -import { BaseChatModel } from '@langchain/core/language_models/chat_models'; +import ModelRegistry from '@/lib/models/registry'; +import { ModelWithProvider } from '@/lib/models/types'; import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages'; -import { ChatOpenAI } from '@langchain/openai'; - -interface ChatModel { - provider: string; - model: string; -} interface ImageSearchBody { query: string; chatHistory: any[]; - chatModel?: ChatModel; + chatModel: ModelWithProvider; } export const POST = async (req: Request) => { @@ -34,35 +23,12 @@ export const POST = async (req: Request) => { }) .filter((msg) => msg !== undefined) as BaseMessage[]; - const chatModelProviders = await getAvailableChatModelProviders(); + const registry = new ModelRegistry(); - const chatModelProvider = - chatModelProviders[ - body.chatModel?.provider || Object.keys(chatModelProviders)[0] - ]; - const chatModel = - chatModelProvider[ - body.chatModel?.model || Object.keys(chatModelProvider)[0] - ]; - - let llm: BaseChatModel | undefined; - - if (body.chatModel?.provider === 'custom_openai') { - llm = new ChatOpenAI({ - apiKey: getCustomOpenaiApiKey(), - modelName: getCustomOpenaiModelName(), - temperature: 0.7, - configuration: { - baseURL: getCustomOpenaiApiUrl(), - }, - }) as unknown as BaseChatModel; - } else if (chatModelProvider && chatModel) { - llm = chatModel.model; - } - - if (!llm) { - return Response.json({ error: 'Invalid chat model' }, { status: 400 }); - } + const llm = await registry.loadChatModel( + body.chatModel.providerId, + body.chatModel.key, + ); const images = await handleImageSearch( { diff --git a/src/app/api/suggestions/route.ts b/src/app/api/suggestions/route.ts index 99179d2..d8312cf 100644 --- a/src/app/api/suggestions/route.ts +++ b/src/app/api/suggestions/route.ts @@ -1,22 +1,12 @@ import generateSuggestions from '@/lib/chains/suggestionGeneratorAgent'; -import { - getCustomOpenaiApiKey, - getCustomOpenaiApiUrl, - getCustomOpenaiModelName, -} from '@/lib/config'; -import { getAvailableChatModelProviders } from '@/lib/providers'; +import ModelRegistry from '@/lib/models/registry'; +import { ModelWithProvider } from '@/lib/models/types'; import { BaseChatModel } from '@langchain/core/language_models/chat_models'; import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages'; -import { ChatOpenAI } from '@langchain/openai'; - -interface ChatModel { - provider: string; - model: string; -} interface SuggestionsGenerationBody { chatHistory: any[]; - chatModel?: ChatModel; + chatModel: ModelWithProvider; } export const POST = async (req: Request) => { @@ -33,35 +23,12 @@ export const POST = async (req: Request) => { }) .filter((msg) => msg !== undefined) as BaseMessage[]; - const chatModelProviders = await getAvailableChatModelProviders(); + const registry = new ModelRegistry(); - const chatModelProvider = - chatModelProviders[ - body.chatModel?.provider || Object.keys(chatModelProviders)[0] - ]; - const chatModel = - chatModelProvider[ - body.chatModel?.model || Object.keys(chatModelProvider)[0] - ]; - - let llm: BaseChatModel | undefined; - - if (body.chatModel?.provider === 'custom_openai') { - llm = new ChatOpenAI({ - apiKey: getCustomOpenaiApiKey(), - modelName: getCustomOpenaiModelName(), - temperature: 0.7, - configuration: { - baseURL: getCustomOpenaiApiUrl(), - }, - }) as unknown as BaseChatModel; - } else if (chatModelProvider && chatModel) { - llm = chatModel.model; - } - - if (!llm) { - return Response.json({ error: 'Invalid chat model' }, { status: 400 }); - } + const llm = await registry.loadChatModel( + body.chatModel.providerId, + body.chatModel.key, + ); const suggestions = await generateSuggestions( { diff --git a/src/app/api/videos/route.ts b/src/app/api/videos/route.ts index 7e8288b..02e5909 100644 --- a/src/app/api/videos/route.ts +++ b/src/app/api/videos/route.ts @@ -1,23 +1,12 @@ import handleVideoSearch from '@/lib/chains/videoSearchAgent'; -import { - getCustomOpenaiApiKey, - getCustomOpenaiApiUrl, - getCustomOpenaiModelName, -} from '@/lib/config'; -import { getAvailableChatModelProviders } from '@/lib/providers'; -import { BaseChatModel } from '@langchain/core/language_models/chat_models'; +import ModelRegistry from '@/lib/models/registry'; +import { ModelWithProvider } from '@/lib/models/types'; import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages'; -import { ChatOpenAI } from '@langchain/openai'; - -interface ChatModel { - provider: string; - model: string; -} interface VideoSearchBody { query: string; chatHistory: any[]; - chatModel?: ChatModel; + chatModel: ModelWithProvider; } export const POST = async (req: Request) => { @@ -34,35 +23,12 @@ export const POST = async (req: Request) => { }) .filter((msg) => msg !== undefined) as BaseMessage[]; - const chatModelProviders = await getAvailableChatModelProviders(); + const registry = new ModelRegistry(); - const chatModelProvider = - chatModelProviders[ - body.chatModel?.provider || Object.keys(chatModelProviders)[0] - ]; - const chatModel = - chatModelProvider[ - body.chatModel?.model || Object.keys(chatModelProvider)[0] - ]; - - let llm: BaseChatModel | undefined; - - if (body.chatModel?.provider === 'custom_openai') { - llm = new ChatOpenAI({ - apiKey: getCustomOpenaiApiKey(), - modelName: getCustomOpenaiModelName(), - temperature: 0.7, - configuration: { - baseURL: getCustomOpenaiApiUrl(), - }, - }) as unknown as BaseChatModel; - } else if (chatModelProvider && chatModel) { - llm = chatModel.model; - } - - if (!llm) { - return Response.json({ error: 'Invalid chat model' }, { status: 400 }); - } + const llm = await registry.loadChatModel( + body.chatModel.providerId, + body.chatModel.key, + ); const videos = await handleVideoSearch( { diff --git a/src/components/SearchImages.tsx b/src/components/SearchImages.tsx index 08c16ee..ca4d477 100644 --- a/src/components/SearchImages.tsx +++ b/src/components/SearchImages.tsx @@ -33,11 +33,10 @@ const SearchImages = ({ onClick={async () => { setLoading(true); - const chatModelProvider = localStorage.getItem('chatModelProvider'); - const chatModel = localStorage.getItem('chatModel'); - - const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL'); - const customOpenAIKey = localStorage.getItem('openAIApiKey'); + const chatModelProvider = localStorage.getItem( + 'chatModelProviderId', + ); + const chatModel = localStorage.getItem('chatModelKey'); const res = await fetch(`/api/images`, { method: 'POST', @@ -48,12 +47,8 @@ const SearchImages = ({ query: query, chatHistory: chatHistory, chatModel: { - provider: chatModelProvider, - model: chatModel, - ...(chatModelProvider === 'custom_openai' && { - customOpenAIBaseURL: customOpenAIBaseURL, - customOpenAIKey: customOpenAIKey, - }), + providerId: chatModelProvider, + key: chatModel, }, }), }); diff --git a/src/components/SearchVideos.tsx b/src/components/SearchVideos.tsx index a09a0d2..4084383 100644 --- a/src/components/SearchVideos.tsx +++ b/src/components/SearchVideos.tsx @@ -48,11 +48,10 @@ const Searchvideos = ({ onClick={async () => { setLoading(true); - const chatModelProvider = localStorage.getItem('chatModelProvider'); - const chatModel = localStorage.getItem('chatModel'); - - const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL'); - const customOpenAIKey = localStorage.getItem('openAIApiKey'); + const chatModelProvider = localStorage.getItem( + 'chatModelProviderId', + ); + const chatModel = localStorage.getItem('chatModelKey'); const res = await fetch(`/api/videos`, { method: 'POST', @@ -63,12 +62,8 @@ const Searchvideos = ({ query: query, chatHistory: chatHistory, chatModel: { - provider: chatModelProvider, - model: chatModel, - ...(chatModelProvider === 'custom_openai' && { - customOpenAIBaseURL: customOpenAIBaseURL, - customOpenAIKey: customOpenAIKey, - }), + providerId: chatModelProvider, + key: chatModel, }, }), }); diff --git a/src/lib/actions.ts b/src/lib/actions.ts index 93d0b38..cb75d88 100644 --- a/src/lib/actions.ts +++ b/src/lib/actions.ts @@ -1,11 +1,8 @@ import { Message } from '@/components/ChatWindow'; export const getSuggestions = async (chatHistory: Message[]) => { - const chatModel = localStorage.getItem('chatModel'); - const chatModelProvider = localStorage.getItem('chatModelProvider'); - - const customOpenAIKey = localStorage.getItem('openAIApiKey'); - const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL'); + const chatModel = localStorage.getItem('chatModelKey'); + const chatModelProvider = localStorage.getItem('chatModelProviderId'); const res = await fetch(`/api/suggestions`, { method: 'POST', @@ -15,12 +12,8 @@ export const getSuggestions = async (chatHistory: Message[]) => { body: JSON.stringify({ chatHistory: chatHistory, chatModel: { - provider: chatModelProvider, - model: chatModel, - ...(chatModelProvider === 'custom_openai' && { - customOpenAIKey, - customOpenAIBaseURL, - }), + providerId: chatModelProvider, + key: chatModel, }, }), }); From 0c7566bb8701f43f8d303cd8f672dba42cb30692 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Thu, 16 Oct 2025 18:03:40 +0530 Subject: [PATCH 26/53] feat(sidebar): fix colors on smaller devices --- src/components/Sidebar.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/components/Sidebar.tsx b/src/components/Sidebar.tsx index f25925d..6c300da 100644 --- a/src/components/Sidebar.tsx +++ b/src/components/Sidebar.tsx @@ -70,7 +70,7 @@ const Sidebar = ({ children }: { children: React.ReactNode }) => { -

+
{navLinks.map((link, i) => ( Date: Thu, 16 Oct 2025 20:42:04 +0530 Subject: [PATCH 27/53] feat(config-route): use new config manager & model registry --- src/app/api/config/route.ts | 145 ++++++------------------------------ 1 file changed, 22 insertions(+), 123 deletions(-) diff --git a/src/app/api/config/route.ts b/src/app/api/config/route.ts index 5f66fdf..184155e 100644 --- a/src/app/api/config/route.ts +++ b/src/app/api/config/route.ts @@ -1,134 +1,33 @@ -import { - getAnthropicApiKey, - getCustomOpenaiApiKey, - getCustomOpenaiApiUrl, - getCustomOpenaiModelName, - getGeminiApiKey, - getGroqApiKey, - getOllamaApiEndpoint, - getOpenaiApiKey, - getDeepseekApiKey, - getAimlApiKey, - getLMStudioApiEndpoint, - getLemonadeApiEndpoint, - getLemonadeApiKey, - updateConfig, - getOllamaApiKey, -} from '@/lib/config'; -import { - getAvailableChatModelProviders, - getAvailableEmbeddingModelProviders, -} from '@/lib/providers'; +import configManager from '@/lib/config'; +import ModelRegistry from '@/lib/models/registry'; +import { NextRequest, NextResponse } from 'next/server'; -export const GET = async (req: Request) => { +export const GET = async (req: NextRequest) => { try { - const config: Record = {}; + const values = configManager.currentConfig; + const fields = configManager.getUIConfigSections(); - const [chatModelProviders, embeddingModelProviders] = await Promise.all([ - getAvailableChatModelProviders(), - getAvailableEmbeddingModelProviders(), - ]); + const modelRegistry = new ModelRegistry(); + const modelProviders = await modelRegistry.getActiveProviders(); - config['chatModelProviders'] = {}; - config['embeddingModelProviders'] = {}; + values.modelProviders = values.modelProviders.map((mp) => { + const activeProvider = modelProviders.find((p) => p.id === mp.id) - for (const provider in chatModelProviders) { - config['chatModelProviders'][provider] = Object.keys( - chatModelProviders[provider], - ).map((model) => { - return { - name: model, - displayName: chatModelProviders[provider][model].displayName, - }; - }); - } + return { + ...mp, + chatModels: activeProvider?.chatModels ?? mp.chatModels, + embeddingModels: activeProvider?.embeddingModels ?? mp.embeddingModels + } + }) - for (const provider in embeddingModelProviders) { - config['embeddingModelProviders'][provider] = Object.keys( - embeddingModelProviders[provider], - ).map((model) => { - return { - name: model, - displayName: embeddingModelProviders[provider][model].displayName, - }; - }); - } - - config['openaiApiKey'] = getOpenaiApiKey(); - config['ollamaApiUrl'] = getOllamaApiEndpoint(); - config['ollamaApiKey'] = getOllamaApiKey(); - config['lmStudioApiUrl'] = getLMStudioApiEndpoint(); - config['lemonadeApiUrl'] = getLemonadeApiEndpoint(); - config['lemonadeApiKey'] = getLemonadeApiKey(); - config['anthropicApiKey'] = getAnthropicApiKey(); - config['groqApiKey'] = getGroqApiKey(); - config['geminiApiKey'] = getGeminiApiKey(); - config['deepseekApiKey'] = getDeepseekApiKey(); - config['aimlApiKey'] = getAimlApiKey(); - config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl(); - config['customOpenaiApiKey'] = getCustomOpenaiApiKey(); - config['customOpenaiModelName'] = getCustomOpenaiModelName(); - - return Response.json({ ...config }, { status: 200 }); + return NextResponse.json({ + values, + fields, + }) } catch (err) { - console.error('An error occurred while getting config:', err); + console.error('Error in getting config: ', err); return Response.json( - { message: 'An error occurred while getting config' }, - { status: 500 }, - ); - } -}; - -export const POST = async (req: Request) => { - try { - const config = await req.json(); - - const updatedConfig = { - MODELS: { - OPENAI: { - API_KEY: config.openaiApiKey, - }, - GROQ: { - API_KEY: config.groqApiKey, - }, - ANTHROPIC: { - API_KEY: config.anthropicApiKey, - }, - GEMINI: { - API_KEY: config.geminiApiKey, - }, - OLLAMA: { - API_URL: config.ollamaApiUrl, - API_KEY: config.ollamaApiKey, - }, - DEEPSEEK: { - API_KEY: config.deepseekApiKey, - }, - AIMLAPI: { - API_KEY: config.aimlApiKey, - }, - LM_STUDIO: { - API_URL: config.lmStudioApiUrl, - }, - LEMONADE: { - API_URL: config.lemonadeApiUrl, - API_KEY: config.lemonadeApiKey, - }, - CUSTOM_OPENAI: { - API_URL: config.customOpenaiApiUrl, - API_KEY: config.customOpenaiApiKey, - MODEL_NAME: config.customOpenaiModelName, - }, - }, - }; - - updateConfig(updatedConfig); - - return Response.json({ message: 'Config updated' }, { status: 200 }); - } catch (err) { - console.error('An error occurred while updating config:', err); - return Response.json( - { message: 'An error occurred while updating config' }, + { message: 'An error has occurred.' }, { status: 500 }, ); } From 861d50674ace0da0e1777bd2a63578783447d89c Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Fri, 17 Oct 2025 14:35:49 +0530 Subject: [PATCH 28/53] feat(theme): fix colors --- tailwind.config.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tailwind.config.ts b/tailwind.config.ts index 2dc02e0..ea2865c 100644 --- a/tailwind.config.ts +++ b/tailwind.config.ts @@ -11,8 +11,8 @@ const themeDark = (colors: DefaultColors) => ({ const themeLight = (colors: DefaultColors) => ({ 50: '#ffffff', 100: '#f6f8fa', - 200: '#d0d7de', - 300: '#afb8c1', + 200: '#e8edf1', + 300: '#d0d7de', }); const config: Config = { From ac7cfac78463546e3e722080f568939a061a59fb Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Fri, 17 Oct 2025 14:36:04 +0530 Subject: [PATCH 29/53] feat(config): add theme --- src/lib/config/index.ts | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/src/lib/config/index.ts b/src/lib/config/index.ts index e3c3713..159ee56 100644 --- a/src/lib/config/index.ts +++ b/src/lib/config/index.ts @@ -20,7 +20,27 @@ class ConfigManager { }, }; uiConfigSections: UIConfigSections = { - general: [], + general: [ + { + name: 'Theme', + key: 'theme', + type: 'select', + options: [ + { + name: 'Light', + value: 'light', + }, + { + name: 'Dark', + value: 'dark', + }, + ], + required: false, + description: 'Choose between light and dark layouts for the app.', + default: 'dark', + scope: 'client', + }, + ], modelProviders: [], search: [ { From ca8b74b69554c84560cb1d88ab7da0ac772e4b8d Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Fri, 17 Oct 2025 14:37:17 +0530 Subject: [PATCH 30/53] feat(components): add loader --- src/components/ChatWindow.tsx | 18 ++---------------- src/components/ui/Loader.tsx | 22 ++++++++++++++++++++++ 2 files changed, 24 insertions(+), 16 deletions(-) create mode 100644 src/components/ui/Loader.tsx diff --git a/src/components/ChatWindow.tsx b/src/components/ChatWindow.tsx index 2078512..5c0a3e8 100644 --- a/src/components/ChatWindow.tsx +++ b/src/components/ChatWindow.tsx @@ -8,6 +8,7 @@ import { Settings } from 'lucide-react'; import Link from 'next/link'; import NextError from 'next/error'; import { useChat } from '@/lib/hooks/useChat'; +import Loader from './ui/Loader'; export interface BaseMessage { chatId: string; @@ -85,22 +86,7 @@ const ChatWindow = () => { ) ) : (
- +
); }; diff --git a/src/components/ui/Loader.tsx b/src/components/ui/Loader.tsx new file mode 100644 index 0000000..0bf35e7 --- /dev/null +++ b/src/components/ui/Loader.tsx @@ -0,0 +1,22 @@ +const Loader = () => { + return ( + + ); +}; + +export default Loader; From 09dd8dba5a880c296b4d0da46bdff6a3e6324b48 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Fri, 17 Oct 2025 14:38:05 +0530 Subject: [PATCH 31/53] feat(config-route): add POST handler --- src/app/api/config/route.ts | 49 +++++++++++++++++++++++++++++++++---- 1 file changed, 44 insertions(+), 5 deletions(-) diff --git a/src/app/api/config/route.ts b/src/app/api/config/route.ts index 184155e..9a45e8e 100644 --- a/src/app/api/config/route.ts +++ b/src/app/api/config/route.ts @@ -2,6 +2,11 @@ import configManager from '@/lib/config'; import ModelRegistry from '@/lib/models/registry'; import { NextRequest, NextResponse } from 'next/server'; +type SaveConfigBody = { + key: string; + value: string; +}; + export const GET = async (req: NextRequest) => { try { const values = configManager.currentConfig; @@ -11,19 +16,53 @@ export const GET = async (req: NextRequest) => { const modelProviders = await modelRegistry.getActiveProviders(); values.modelProviders = values.modelProviders.map((mp) => { - const activeProvider = modelProviders.find((p) => p.id === mp.id) + const activeProvider = modelProviders.find((p) => p.id === mp.id); return { ...mp, chatModels: activeProvider?.chatModels ?? mp.chatModels, - embeddingModels: activeProvider?.embeddingModels ?? mp.embeddingModels - } - }) + embeddingModels: activeProvider?.embeddingModels ?? mp.embeddingModels, + }; + }); return NextResponse.json({ values, fields, - }) + }); + } catch (err) { + console.error('Error in getting config: ', err); + return Response.json( + { message: 'An error has occurred.' }, + { status: 500 }, + ); + } +}; + +export const POST = async (req: NextRequest) => { + try { + const body: SaveConfigBody = await req.json(); + + if (!body.key || !body.value) { + return Response.json( + { + message: 'Key and value are required.', + }, + { + status: 400, + }, + ); + } + + configManager.updateConfig(body.key, body.value); + + return Response.json( + { + message: 'Config updated successfully.', + }, + { + status: 200, + }, + ); } catch (err) { console.error('Error in getting config: ', err); return Response.json( From e02b9a5efc34c47194384e35f5ca367ce8fb1c43 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Fri, 17 Oct 2025 14:38:36 +0530 Subject: [PATCH 32/53] feat(settings-page): remove page --- src/app/settings/page.tsx | 1007 ------------------------------------- 1 file changed, 1007 deletions(-) delete mode 100644 src/app/settings/page.tsx diff --git a/src/app/settings/page.tsx b/src/app/settings/page.tsx deleted file mode 100644 index 1af53f9..0000000 --- a/src/app/settings/page.tsx +++ /dev/null @@ -1,1007 +0,0 @@ -'use client'; - -import { Settings as SettingsIcon, ArrowLeft, Loader2 } from 'lucide-react'; -import { useEffect, useState } from 'react'; -import { cn } from '@/lib/utils'; -import { Switch } from '@headlessui/react'; -import ThemeSwitcher from '@/components/theme/Switcher'; -import { ImagesIcon, VideoIcon } from 'lucide-react'; -import Link from 'next/link'; -import { PROVIDER_METADATA } from '@/lib/providers'; - -interface SettingsType { - chatModelProviders: { - [key: string]: [Record]; - }; - embeddingModelProviders: { - [key: string]: [Record]; - }; - openaiApiKey: string; - groqApiKey: string; - anthropicApiKey: string; - geminiApiKey: string; - ollamaApiUrl: string; - ollamaApiKey: string; - lmStudioApiUrl: string; - lemonadeApiUrl: string; - lemonadeApiKey: string; - deepseekApiKey: string; - aimlApiKey: string; - customOpenaiApiKey: string; - customOpenaiApiUrl: string; - customOpenaiModelName: string; -} - -interface InputProps extends React.InputHTMLAttributes { - isSaving?: boolean; - onSave?: (value: string) => void; -} - -const Input = ({ className, isSaving, onSave, ...restProps }: InputProps) => { - return ( -
- onSave?.(e.target.value)} - /> - {isSaving && ( -
- -
- )} -
- ); -}; - -interface TextareaProps extends React.InputHTMLAttributes { - isSaving?: boolean; - onSave?: (value: string) => void; -} - -const Textarea = ({ - className, - isSaving, - onSave, - ...restProps -}: TextareaProps) => { - return ( -
-