mirror of
				https://github.com/ItzCrazyKns/Perplexica.git
				synced 2025-11-03 20:28:14 +00:00 
			
		
		
		
	Merge branch 'feat/deepseek-provider'
This commit is contained in:
		@@ -22,5 +22,8 @@ MODEL_NAME = ""
 | 
				
			|||||||
[MODELS.OLLAMA]
 | 
					[MODELS.OLLAMA]
 | 
				
			||||||
API_URL = "" # Ollama API URL - http://host.docker.internal:11434
 | 
					API_URL = "" # Ollama API URL - http://host.docker.internal:11434
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[MODELS.DEEPSEEK]
 | 
				
			||||||
 | 
					API_KEY = ""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
[API_ENDPOINTS]
 | 
					[API_ENDPOINTS]
 | 
				
			||||||
SEARXNG = "" # SearxNG API URL - http://localhost:32768
 | 
					SEARXNG = "" # SearxNG API URL - http://localhost:32768
 | 
				
			||||||
@@ -7,6 +7,7 @@ import {
 | 
				
			|||||||
  getGroqApiKey,
 | 
					  getGroqApiKey,
 | 
				
			||||||
  getOllamaApiEndpoint,
 | 
					  getOllamaApiEndpoint,
 | 
				
			||||||
  getOpenaiApiKey,
 | 
					  getOpenaiApiKey,
 | 
				
			||||||
 | 
					  getDeepseekApiKey,
 | 
				
			||||||
  updateConfig,
 | 
					  updateConfig,
 | 
				
			||||||
} from '@/lib/config';
 | 
					} from '@/lib/config';
 | 
				
			||||||
import {
 | 
					import {
 | 
				
			||||||
@@ -53,6 +54,7 @@ export const GET = async (req: Request) => {
 | 
				
			|||||||
    config['anthropicApiKey'] = getAnthropicApiKey();
 | 
					    config['anthropicApiKey'] = getAnthropicApiKey();
 | 
				
			||||||
    config['groqApiKey'] = getGroqApiKey();
 | 
					    config['groqApiKey'] = getGroqApiKey();
 | 
				
			||||||
    config['geminiApiKey'] = getGeminiApiKey();
 | 
					    config['geminiApiKey'] = getGeminiApiKey();
 | 
				
			||||||
 | 
					    config['deepseekApiKey'] = getDeepseekApiKey();
 | 
				
			||||||
    config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl();
 | 
					    config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl();
 | 
				
			||||||
    config['customOpenaiApiKey'] = getCustomOpenaiApiKey();
 | 
					    config['customOpenaiApiKey'] = getCustomOpenaiApiKey();
 | 
				
			||||||
    config['customOpenaiModelName'] = getCustomOpenaiModelName();
 | 
					    config['customOpenaiModelName'] = getCustomOpenaiModelName();
 | 
				
			||||||
@@ -88,6 +90,9 @@ export const POST = async (req: Request) => {
 | 
				
			|||||||
        OLLAMA: {
 | 
					        OLLAMA: {
 | 
				
			||||||
          API_URL: config.ollamaApiUrl,
 | 
					          API_URL: config.ollamaApiUrl,
 | 
				
			||||||
        },
 | 
					        },
 | 
				
			||||||
 | 
					        DEEPSEEK: {
 | 
				
			||||||
 | 
					          API_KEY: config.deepseekApiKey,
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
        CUSTOM_OPENAI: {
 | 
					        CUSTOM_OPENAI: {
 | 
				
			||||||
          API_URL: config.customOpenaiApiUrl,
 | 
					          API_URL: config.customOpenaiApiUrl,
 | 
				
			||||||
          API_KEY: config.customOpenaiApiKey,
 | 
					          API_KEY: config.customOpenaiApiKey,
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -20,6 +20,7 @@ interface SettingsType {
 | 
				
			|||||||
  anthropicApiKey: string;
 | 
					  anthropicApiKey: string;
 | 
				
			||||||
  geminiApiKey: string;
 | 
					  geminiApiKey: string;
 | 
				
			||||||
  ollamaApiUrl: string;
 | 
					  ollamaApiUrl: string;
 | 
				
			||||||
 | 
					  deepseekApiKey: string;
 | 
				
			||||||
  customOpenaiApiKey: string;
 | 
					  customOpenaiApiKey: string;
 | 
				
			||||||
  customOpenaiApiUrl: string;
 | 
					  customOpenaiApiUrl: string;
 | 
				
			||||||
  customOpenaiModelName: string;
 | 
					  customOpenaiModelName: string;
 | 
				
			||||||
@@ -838,6 +839,25 @@ const Page = () => {
 | 
				
			|||||||
                    onSave={(value) => saveConfig('geminiApiKey', value)}
 | 
					                    onSave={(value) => saveConfig('geminiApiKey', value)}
 | 
				
			||||||
                  />
 | 
					                  />
 | 
				
			||||||
                </div>
 | 
					                </div>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                <div className="flex flex-col space-y-1">
 | 
				
			||||||
 | 
					                  <p className="text-black/70 dark:text-white/70 text-sm">
 | 
				
			||||||
 | 
					                    Deepseek API Key
 | 
				
			||||||
 | 
					                  </p>
 | 
				
			||||||
 | 
					                  <Input
 | 
				
			||||||
 | 
					                    type="text"
 | 
				
			||||||
 | 
					                    placeholder="Deepseek API Key"
 | 
				
			||||||
 | 
					                    value={config.deepseekApiKey}
 | 
				
			||||||
 | 
					                    isSaving={savingStates['deepseekApiKey']}
 | 
				
			||||||
 | 
					                    onChange={(e) => {
 | 
				
			||||||
 | 
					                      setConfig((prev) => ({
 | 
				
			||||||
 | 
					                        ...prev!,
 | 
				
			||||||
 | 
					                        deepseekApiKey: e.target.value,
 | 
				
			||||||
 | 
					                      }));
 | 
				
			||||||
 | 
					                    }}
 | 
				
			||||||
 | 
					                    onSave={(value) => saveConfig('deepseekApiKey', value)}
 | 
				
			||||||
 | 
					                  />
 | 
				
			||||||
 | 
					                </div>
 | 
				
			||||||
              </div>
 | 
					              </div>
 | 
				
			||||||
            </SettingsSection>
 | 
					            </SettingsSection>
 | 
				
			||||||
          </div>
 | 
					          </div>
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -25,6 +25,9 @@ interface Config {
 | 
				
			|||||||
    OLLAMA: {
 | 
					    OLLAMA: {
 | 
				
			||||||
      API_URL: string;
 | 
					      API_URL: string;
 | 
				
			||||||
    };
 | 
					    };
 | 
				
			||||||
 | 
					    DEEPSEEK: {
 | 
				
			||||||
 | 
					      API_KEY: string;
 | 
				
			||||||
 | 
					    };
 | 
				
			||||||
    CUSTOM_OPENAI: {
 | 
					    CUSTOM_OPENAI: {
 | 
				
			||||||
      API_URL: string;
 | 
					      API_URL: string;
 | 
				
			||||||
      API_KEY: string;
 | 
					      API_KEY: string;
 | 
				
			||||||
@@ -63,6 +66,8 @@ export const getSearxngApiEndpoint = () =>
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL;
 | 
					export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					export const getDeepseekApiKey = () => loadConfig().MODELS.DEEPSEEK.API_KEY;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
export const getCustomOpenaiApiKey = () =>
 | 
					export const getCustomOpenaiApiKey = () =>
 | 
				
			||||||
  loadConfig().MODELS.CUSTOM_OPENAI.API_KEY;
 | 
					  loadConfig().MODELS.CUSTOM_OPENAI.API_KEY;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										44
									
								
								src/lib/providers/deepseek.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										44
									
								
								src/lib/providers/deepseek.ts
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,44 @@
 | 
				
			|||||||
 | 
					import { ChatOpenAI } from '@langchain/openai';
 | 
				
			||||||
 | 
					import { getDeepseekApiKey } from '../config';
 | 
				
			||||||
 | 
					import { ChatModel } from '.';
 | 
				
			||||||
 | 
					import { BaseChatModel } from '@langchain/core/language_models/chat_models';
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					const deepseekChatModels: Record<string, string>[] = [
 | 
				
			||||||
 | 
					  {
 | 
				
			||||||
 | 
					    displayName: 'Deepseek Chat (Deepseek V3)',
 | 
				
			||||||
 | 
					    key: 'deepseek-chat',
 | 
				
			||||||
 | 
					  },
 | 
				
			||||||
 | 
					  {
 | 
				
			||||||
 | 
					    displayName: 'Deepseek Reasoner (Deepseek R1)',
 | 
				
			||||||
 | 
					    key: 'deepseek-reasoner',
 | 
				
			||||||
 | 
					  },
 | 
				
			||||||
 | 
					];
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					export const loadDeepseekChatModels = async () => {
 | 
				
			||||||
 | 
					  const deepseekApiKey = getDeepseekApiKey();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  if (!deepseekApiKey) return {};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  try {
 | 
				
			||||||
 | 
					    const chatModels: Record<string, ChatModel> = {};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    deepseekChatModels.forEach((model) => {
 | 
				
			||||||
 | 
					      chatModels[model.key] = {
 | 
				
			||||||
 | 
					        displayName: model.displayName,
 | 
				
			||||||
 | 
					        model: new ChatOpenAI({
 | 
				
			||||||
 | 
					          openAIApiKey: deepseekApiKey,
 | 
				
			||||||
 | 
					          modelName: model.key,
 | 
				
			||||||
 | 
					          temperature: 0.7,
 | 
				
			||||||
 | 
					          configuration: {
 | 
				
			||||||
 | 
					            baseURL: 'https://api.deepseek.com',
 | 
				
			||||||
 | 
					          },
 | 
				
			||||||
 | 
					        }) as unknown as BaseChatModel,
 | 
				
			||||||
 | 
					      };
 | 
				
			||||||
 | 
					    });
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return chatModels;
 | 
				
			||||||
 | 
					  } catch (err) {
 | 
				
			||||||
 | 
					    console.error(`Error loading Deepseek models: ${err}`);
 | 
				
			||||||
 | 
					    return {};
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					};
 | 
				
			||||||
@@ -12,6 +12,7 @@ import { loadGroqChatModels } from './groq';
 | 
				
			|||||||
import { loadAnthropicChatModels } from './anthropic';
 | 
					import { loadAnthropicChatModels } from './anthropic';
 | 
				
			||||||
import { loadGeminiChatModels, loadGeminiEmbeddingModels } from './gemini';
 | 
					import { loadGeminiChatModels, loadGeminiEmbeddingModels } from './gemini';
 | 
				
			||||||
import { loadTransformersEmbeddingsModels } from './transformers';
 | 
					import { loadTransformersEmbeddingsModels } from './transformers';
 | 
				
			||||||
 | 
					import { loadDeepseekChatModels } from './deepseek';
 | 
				
			||||||
 | 
					
 | 
				
			||||||
export interface ChatModel {
 | 
					export interface ChatModel {
 | 
				
			||||||
  displayName: string;
 | 
					  displayName: string;
 | 
				
			||||||
@@ -32,6 +33,7 @@ export const chatModelProviders: Record<
 | 
				
			|||||||
  groq: loadGroqChatModels,
 | 
					  groq: loadGroqChatModels,
 | 
				
			||||||
  anthropic: loadAnthropicChatModels,
 | 
					  anthropic: loadAnthropicChatModels,
 | 
				
			||||||
  gemini: loadGeminiChatModels,
 | 
					  gemini: loadGeminiChatModels,
 | 
				
			||||||
 | 
					  deepseek: loadDeepseekChatModels,
 | 
				
			||||||
};
 | 
					};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
export const embeddingModelProviders: Record<
 | 
					export const embeddingModelProviders: Record<
 | 
				
			||||||
 
 | 
				
			|||||||
		Reference in New Issue
	
	Block a user