diff --git a/src/app/settings/page.tsx b/src/app/settings/page.tsx index 8eee9a4..919304b 100644 --- a/src/app/settings/page.tsx +++ b/src/app/settings/page.tsx @@ -7,6 +7,7 @@ import { Switch } from '@headlessui/react'; import ThemeSwitcher from '@/components/theme/Switcher'; import { ImagesIcon, VideoIcon } from 'lucide-react'; import Link from 'next/link'; +import { PROVIDER_METADATA } from '@/lib/providers'; interface SettingsType { chatModelProviders: { @@ -547,9 +548,8 @@ const Page = () => { options={Object.keys(config.chatModelProviders).map( (provider) => ({ value: provider, - label: - provider.charAt(0).toUpperCase() + - provider.slice(1), + label: (PROVIDER_METADATA as any)[provider]?.displayName || + provider.charAt(0).toUpperCase() + provider.slice(1), }), )} /> @@ -689,9 +689,8 @@ const Page = () => { options={Object.keys(config.embeddingModelProviders).map( (provider) => ({ value: provider, - label: - provider.charAt(0).toUpperCase() + - provider.slice(1), + label: (PROVIDER_METADATA as any)[provider]?.displayName || + provider.charAt(0).toUpperCase() + provider.slice(1), }), )} /> diff --git a/src/lib/config.ts b/src/lib/config.ts index 7c6d495..e3f2680 100644 --- a/src/lib/config.ts +++ b/src/lib/config.ts @@ -1,7 +1,14 @@ -import fs from 'fs'; -import path from 'path'; import toml from '@iarna/toml'; +// Use dynamic imports for Node.js modules to prevent client-side errors +let fs: any; +let path: any; +if (typeof window === 'undefined') { + // We're on the server + fs = require('fs'); + path = require('path'); +} + const configFileName = 'config.toml'; interface Config { @@ -46,10 +53,17 @@ type RecursivePartial = { [P in keyof T]?: RecursivePartial; }; -const loadConfig = () => - toml.parse( - fs.readFileSync(path.join(process.cwd(), `${configFileName}`), 'utf-8'), - ) as any as Config; +const loadConfig = () => { + // Server-side only + if (typeof window === 'undefined') { + return toml.parse( + fs.readFileSync(path.join(process.cwd(), `${configFileName}`), 'utf-8'), + ) as any as Config; + } + + // Client-side fallback - settings will be loaded via API + return {} as Config; +}; export const getSimilarityMeasure = () => loadConfig().GENERAL.SIMILARITY_MEASURE; @@ -114,10 +128,13 @@ const mergeConfigs = (current: any, update: any): any => { }; export const updateConfig = (config: RecursivePartial) => { - const currentConfig = loadConfig(); - const mergedConfig = mergeConfigs(currentConfig, config); - fs.writeFileSync( - path.join(path.join(process.cwd(), `${configFileName}`)), - toml.stringify(mergedConfig), - ); + // Server-side only + if (typeof window === 'undefined') { + const currentConfig = loadConfig(); + const mergedConfig = mergeConfigs(currentConfig, config); + fs.writeFileSync( + path.join(path.join(process.cwd(), `${configFileName}`)), + toml.stringify(mergedConfig), + ); + } }; diff --git a/src/lib/providers/anthropic.ts b/src/lib/providers/anthropic.ts index 7ecde4b..e434b32 100644 --- a/src/lib/providers/anthropic.ts +++ b/src/lib/providers/anthropic.ts @@ -1,6 +1,11 @@ import { ChatAnthropic } from '@langchain/anthropic'; import { ChatModel } from '.'; import { getAnthropicApiKey } from '../config'; + +export const PROVIDER_INFO = { + key: 'anthropic', + displayName: 'Anthropic' +}; import { BaseChatModel } from '@langchain/core/language_models/chat_models'; const anthropicChatModels: Record[] = [ diff --git a/src/lib/providers/deepseek.ts b/src/lib/providers/deepseek.ts index 88f02ec..b272801 100644 --- a/src/lib/providers/deepseek.ts +++ b/src/lib/providers/deepseek.ts @@ -3,6 +3,11 @@ import { getDeepseekApiKey } from '../config'; import { ChatModel } from '.'; import { BaseChatModel } from '@langchain/core/language_models/chat_models'; +export const PROVIDER_INFO = { + key: 'deepseek', + displayName: 'Deepseek AI' +}; + const deepseekChatModels: Record[] = [ { displayName: 'Deepseek Chat (Deepseek V3)', diff --git a/src/lib/providers/gemini.ts b/src/lib/providers/gemini.ts index 2a88015..6af9fb2 100644 --- a/src/lib/providers/gemini.ts +++ b/src/lib/providers/gemini.ts @@ -4,6 +4,11 @@ import { } from '@langchain/google-genai'; import { getGeminiApiKey } from '../config'; import { ChatModel, EmbeddingModel } from '.'; + +export const PROVIDER_INFO = { + key: 'gemini', + displayName: 'Google Gemini' +}; import { BaseChatModel } from '@langchain/core/language_models/chat_models'; import { Embeddings } from '@langchain/core/embeddings'; diff --git a/src/lib/providers/groq.ts b/src/lib/providers/groq.ts index 85c75f4..62481d4 100644 --- a/src/lib/providers/groq.ts +++ b/src/lib/providers/groq.ts @@ -1,6 +1,11 @@ import { ChatOpenAI } from '@langchain/openai'; import { getGroqApiKey } from '../config'; import { ChatModel } from '.'; + +export const PROVIDER_INFO = { + key: 'groq', + displayName: 'Groq' +}; import { BaseChatModel } from '@langchain/core/language_models/chat_models'; const groqChatModels: Record[] = [ diff --git a/src/lib/providers/index.ts b/src/lib/providers/index.ts index 0a4a6db..073bd61 100644 --- a/src/lib/providers/index.ts +++ b/src/lib/providers/index.ts @@ -1,19 +1,34 @@ import { Embeddings } from '@langchain/core/embeddings'; import { BaseChatModel } from '@langchain/core/language_models/chat_models'; -import { loadOpenAIChatModels, loadOpenAIEmbeddingModels } from './openai'; +import { loadOpenAIChatModels, loadOpenAIEmbeddingModels, PROVIDER_INFO as OpenAIInfo, PROVIDER_INFO } from './openai'; import { getCustomOpenaiApiKey, getCustomOpenaiApiUrl, getCustomOpenaiModelName, } from '../config'; import { ChatOpenAI } from '@langchain/openai'; -import { loadOllamaChatModels, loadOllamaEmbeddingModels } from './ollama'; -import { loadGroqChatModels } from './groq'; -import { loadAnthropicChatModels } from './anthropic'; -import { loadGeminiChatModels, loadGeminiEmbeddingModels } from './gemini'; -import { loadTransformersEmbeddingsModels } from './transformers'; -import { loadDeepseekChatModels } from './deepseek'; -import { loadLMStudioChatModels, loadLMStudioEmbeddingsModels } from './lmstudio'; +import { loadOllamaChatModels, loadOllamaEmbeddingModels, PROVIDER_INFO as OllamaInfo } from './ollama'; +import { loadGroqChatModels, PROVIDER_INFO as GroqInfo } from './groq'; +import { loadAnthropicChatModels, PROVIDER_INFO as AnthropicInfo } from './anthropic'; +import { loadGeminiChatModels, loadGeminiEmbeddingModels, PROVIDER_INFO as GeminiInfo } from './gemini'; +import { loadTransformersEmbeddingsModels, PROVIDER_INFO as TransformersInfo } from './transformers'; +import { loadDeepseekChatModels, PROVIDER_INFO as DeepseekInfo } from './deepseek'; +import { loadLMStudioChatModels, loadLMStudioEmbeddingsModels, PROVIDER_INFO as LMStudioInfo } from './lmstudio'; + +export const PROVIDER_METADATA = { + openai: OpenAIInfo, + ollama: OllamaInfo, + groq: GroqInfo, + anthropic: AnthropicInfo, + gemini: GeminiInfo, + transformers: TransformersInfo, + deepseek: DeepseekInfo, + lmstudio: LMStudioInfo, + custom_openai: { + key: 'custom_openai', + displayName: 'Custom OpenAI' + } +}; export interface ChatModel { displayName: string; diff --git a/src/lib/providers/lmstudio.ts b/src/lib/providers/lmstudio.ts index fd8eb75..f7be638 100644 --- a/src/lib/providers/lmstudio.ts +++ b/src/lib/providers/lmstudio.ts @@ -1,6 +1,11 @@ import { getKeepAlive, getLMStudioApiEndpoint } from '../config'; import axios from 'axios'; import { ChatModel, EmbeddingModel } from '.'; + +export const PROVIDER_INFO = { + key: 'lmstudio', + displayName: 'LM Studio' +}; import { ChatOpenAI } from '@langchain/openai'; import { OpenAIEmbeddings } from '@langchain/openai'; import { BaseChatModel } from '@langchain/core/language_models/chat_models'; diff --git a/src/lib/providers/ollama.ts b/src/lib/providers/ollama.ts index 92e98e4..beab58f 100644 --- a/src/lib/providers/ollama.ts +++ b/src/lib/providers/ollama.ts @@ -1,6 +1,11 @@ import axios from 'axios'; import { getKeepAlive, getOllamaApiEndpoint } from '../config'; import { ChatModel, EmbeddingModel } from '.'; + +export const PROVIDER_INFO = { + key: 'ollama', + displayName: 'Ollama' +}; import { ChatOllama } from '@langchain/community/chat_models/ollama'; import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama'; diff --git a/src/lib/providers/openai.ts b/src/lib/providers/openai.ts index 01bacc6..36f7e29 100644 --- a/src/lib/providers/openai.ts +++ b/src/lib/providers/openai.ts @@ -1,6 +1,11 @@ import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai'; import { getOpenaiApiKey } from '../config'; import { ChatModel, EmbeddingModel } from '.'; + +export const PROVIDER_INFO = { + key: 'openai', + displayName: 'OpenAI' +}; import { BaseChatModel } from '@langchain/core/language_models/chat_models'; import { Embeddings } from '@langchain/core/embeddings'; diff --git a/src/lib/providers/transformers.ts b/src/lib/providers/transformers.ts index a06dd12..fd7cb9e 100644 --- a/src/lib/providers/transformers.ts +++ b/src/lib/providers/transformers.ts @@ -1,5 +1,10 @@ import { HuggingFaceTransformersEmbeddings } from '../huggingfaceTransformer'; +export const PROVIDER_INFO = { + key: 'transformers', + displayName: 'Hugging Face' +}; + export const loadTransformersEmbeddingsModels = async () => { try { const embeddingModels = {