diff --git a/sample.config.toml b/sample.config.toml
index 691b964..980e99d 100644
--- a/sample.config.toml
+++ b/sample.config.toml
@@ -22,5 +22,8 @@ MODEL_NAME = ""
[MODELS.OLLAMA]
API_URL = "" # Ollama API URL - http://host.docker.internal:11434
+[MODELS.DEEPSEEK]
+API_KEY = ""
+
[API_ENDPOINTS]
SEARXNG = "" # SearxNG API URL - http://localhost:32768
\ No newline at end of file
diff --git a/src/app/api/config/route.ts b/src/app/api/config/route.ts
index 871bb21..39c1f84 100644
--- a/src/app/api/config/route.ts
+++ b/src/app/api/config/route.ts
@@ -7,6 +7,7 @@ import {
getGroqApiKey,
getOllamaApiEndpoint,
getOpenaiApiKey,
+ getDeepseekApiKey,
updateConfig,
} from '@/lib/config';
import {
@@ -53,6 +54,7 @@ export const GET = async (req: Request) => {
config['anthropicApiKey'] = getAnthropicApiKey();
config['groqApiKey'] = getGroqApiKey();
config['geminiApiKey'] = getGeminiApiKey();
+ config['deepseekApiKey'] = getDeepseekApiKey();
config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl();
config['customOpenaiApiKey'] = getCustomOpenaiApiKey();
config['customOpenaiModelName'] = getCustomOpenaiModelName();
@@ -88,6 +90,9 @@ export const POST = async (req: Request) => {
OLLAMA: {
API_URL: config.ollamaApiUrl,
},
+ DEEPSEEK: {
+ API_KEY: config.deepseekApiKey,
+ },
CUSTOM_OPENAI: {
API_URL: config.customOpenaiApiUrl,
API_KEY: config.customOpenaiApiKey,
diff --git a/src/app/settings/page.tsx b/src/app/settings/page.tsx
index 8e1c45a..8eee9a4 100644
--- a/src/app/settings/page.tsx
+++ b/src/app/settings/page.tsx
@@ -20,6 +20,7 @@ interface SettingsType {
anthropicApiKey: string;
geminiApiKey: string;
ollamaApiUrl: string;
+ deepseekApiKey: string;
customOpenaiApiKey: string;
customOpenaiApiUrl: string;
customOpenaiModelName: string;
@@ -838,6 +839,25 @@ const Page = () => {
onSave={(value) => saveConfig('geminiApiKey', value)}
/>
+
+
+
+ Deepseek API Key
+
+
{
+ setConfig((prev) => ({
+ ...prev!,
+ deepseekApiKey: e.target.value,
+ }));
+ }}
+ onSave={(value) => saveConfig('deepseekApiKey', value)}
+ />
+
diff --git a/src/lib/config.ts b/src/lib/config.ts
index ef99eed..2831214 100644
--- a/src/lib/config.ts
+++ b/src/lib/config.ts
@@ -25,6 +25,9 @@ interface Config {
OLLAMA: {
API_URL: string;
};
+ DEEPSEEK: {
+ API_KEY: string;
+ };
CUSTOM_OPENAI: {
API_URL: string;
API_KEY: string;
@@ -63,6 +66,8 @@ export const getSearxngApiEndpoint = () =>
export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL;
+export const getDeepseekApiKey = () => loadConfig().MODELS.DEEPSEEK.API_KEY;
+
export const getCustomOpenaiApiKey = () =>
loadConfig().MODELS.CUSTOM_OPENAI.API_KEY;
diff --git a/src/lib/providers/deepseek.ts b/src/lib/providers/deepseek.ts
new file mode 100644
index 0000000..88f02ec
--- /dev/null
+++ b/src/lib/providers/deepseek.ts
@@ -0,0 +1,44 @@
+import { ChatOpenAI } from '@langchain/openai';
+import { getDeepseekApiKey } from '../config';
+import { ChatModel } from '.';
+import { BaseChatModel } from '@langchain/core/language_models/chat_models';
+
+const deepseekChatModels: Record[] = [
+ {
+ displayName: 'Deepseek Chat (Deepseek V3)',
+ key: 'deepseek-chat',
+ },
+ {
+ displayName: 'Deepseek Reasoner (Deepseek R1)',
+ key: 'deepseek-reasoner',
+ },
+];
+
+export const loadDeepseekChatModels = async () => {
+ const deepseekApiKey = getDeepseekApiKey();
+
+ if (!deepseekApiKey) return {};
+
+ try {
+ const chatModels: Record = {};
+
+ deepseekChatModels.forEach((model) => {
+ chatModels[model.key] = {
+ displayName: model.displayName,
+ model: new ChatOpenAI({
+ openAIApiKey: deepseekApiKey,
+ modelName: model.key,
+ temperature: 0.7,
+ configuration: {
+ baseURL: 'https://api.deepseek.com',
+ },
+ }) as unknown as BaseChatModel,
+ };
+ });
+
+ return chatModels;
+ } catch (err) {
+ console.error(`Error loading Deepseek models: ${err}`);
+ return {};
+ }
+};
diff --git a/src/lib/providers/index.ts b/src/lib/providers/index.ts
index c32d0fa..eef212f 100644
--- a/src/lib/providers/index.ts
+++ b/src/lib/providers/index.ts
@@ -12,6 +12,7 @@ import { loadGroqChatModels } from './groq';
import { loadAnthropicChatModels } from './anthropic';
import { loadGeminiChatModels, loadGeminiEmbeddingModels } from './gemini';
import { loadTransformersEmbeddingsModels } from './transformers';
+import { loadDeepseekChatModels } from './deepseek';
export interface ChatModel {
displayName: string;
@@ -32,6 +33,7 @@ export const chatModelProviders: Record<
groq: loadGroqChatModels,
anthropic: loadAnthropicChatModels,
gemini: loadGeminiChatModels,
+ deepseek: loadDeepseekChatModels,
};
export const embeddingModelProviders: Record<