Compare commits

...

4 Commits

Author SHA1 Message Date
Martin Chang
22387d65d6 Merge 5d37d66858 into 46541e6c0c 2025-02-03 10:47:09 +01:00
ItzCrazyKns
46541e6c0c feat(package): update markdown-to-jsx version 2025-02-02 14:31:18 +05:30
marty1885
5d37d66858 fix: add missing file 2024-12-06 16:43:01 +08:00
marty1885
b0fba3d5c7 feat: support deepinfra as a service provider 2024-12-06 16:35:41 +08:00
8 changed files with 106 additions and 5 deletions

View File

@@ -8,6 +8,7 @@ OPENAI = "" # OpenAI API key - sk-1234567890abcdef1234567890abcdef
GROQ = "" # Groq API key - gsk_1234567890abcdef1234567890abcdef GROQ = "" # Groq API key - gsk_1234567890abcdef1234567890abcdef
ANTHROPIC = "" # Anthropic API key - sk-ant-1234567890abcdef1234567890abcdef ANTHROPIC = "" # Anthropic API key - sk-ant-1234567890abcdef1234567890abcdef
GEMINI = "" # Gemini API key - sk-1234567890abcdef1234567890abcdef GEMINI = "" # Gemini API key - sk-1234567890abcdef1234567890abcdef
DEEPINFRA = "" # DeepInfra API key - 1234567890abcdef1234567890abcdef
[API_ENDPOINTS] [API_ENDPOINTS]
SEARXNG = "http://localhost:32768" # SearxNG API URL SEARXNG = "http://localhost:32768" # SearxNG API URL

View File

@@ -15,6 +15,7 @@ interface Config {
GROQ: string; GROQ: string;
ANTHROPIC: string; ANTHROPIC: string;
GEMINI: string; GEMINI: string;
DEEPINFRA: string;
}; };
API_ENDPOINTS: { API_ENDPOINTS: {
SEARXNG: string; SEARXNG: string;
@@ -46,6 +47,8 @@ export const getAnthropicApiKey = () => loadConfig().API_KEYS.ANTHROPIC;
export const getGeminiApiKey = () => loadConfig().API_KEYS.GEMINI; export const getGeminiApiKey = () => loadConfig().API_KEYS.GEMINI;
export const getDeepInftaApiKeys = () => loadConfig().API_KEYS.DEEPINFRA;
export const getSearxngApiEndpoint = () => export const getSearxngApiEndpoint = () =>
process.env.SEARXNG_API_URL || loadConfig().API_ENDPOINTS.SEARXNG; process.env.SEARXNG_API_URL || loadConfig().API_ENDPOINTS.SEARXNG;

View File

@@ -0,0 +1,76 @@
import { DeepInfraEmbeddings } from "@langchain/community/embeddings/deepinfra";
import { ChatDeepInfra } from "@langchain/community/chat_models/deepinfra";
import { getDeepInftaApiKeys } from "../../config";
import logger from '../../utils/logger';
export const loadDeepInfraChatModels = async () => {
const deepinfraApiKey = getDeepInftaApiKeys();
if (!deepinfraApiKey) return {};
try {
const chatModels = {
'meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo': {
displayName: 'LLaMA 3.1 70B Turbo',
model: new ChatDeepInfra({
model: 'meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo',
temperature: 0.7,
apiKey: deepinfraApiKey,
}),
},
'meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo': {
displayName: 'LLaMA 3.1 8B Turbo',
model: new ChatDeepInfra({
model: 'meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo',
temperature: 0.7,
apiKey: deepinfraApiKey,
}),
},
'meta-llama/Meta-Llama-3.1-70B-Instruct': {
displayName: 'LLaMA 3.1 70B',
model: new ChatDeepInfra({
model: 'meta-llama/Meta-Llama-3.1-70B-Instruct',
temperature: 0.7,
apiKey: deepinfraApiKey,
}),
},
'meta-llama/Meta-Llama-3.1-8B-Instruct': {
displayName: 'LLaMA 3.1 8B',
model: new ChatDeepInfra({
model: 'meta-llama/Meta-Llama-3.1-8B-Instruct',
temperature: 0.7,
apiKey: deepinfraApiKey,
}),
},
};
return chatModels;
} catch (err) {
logger.error(`Error loading Gemini models: ${err}`);
return {};
}
};
export const loadDeepInfraEmbeddingsModels = async () => {
const deepinfraApiKey = getDeepInftaApiKeys();
if (!deepinfraApiKey) return {};
try {
const embeddingModels = {
'BAAI/bge-m3': {
displayName: 'BAAI/bge-m3',
model: new DeepInfraEmbeddings({
apiToken: deepinfraApiKey,
modelName: 'BAAI/bge-m3',
}),
},
};
return embeddingModels;
} catch (err) {
logger.error(`Error loading Gemini embeddings model: ${err}`);
return {};
}
};

View File

@@ -4,6 +4,7 @@ import { loadOpenAIChatModels, loadOpenAIEmbeddingsModels } from './openai';
import { loadAnthropicChatModels } from './anthropic'; import { loadAnthropicChatModels } from './anthropic';
import { loadTransformersEmbeddingsModels } from './transformers'; import { loadTransformersEmbeddingsModels } from './transformers';
import { loadGeminiChatModels, loadGeminiEmbeddingsModels } from './gemini'; import { loadGeminiChatModels, loadGeminiEmbeddingsModels } from './gemini';
import { loadDeepInfraChatModels, loadDeepInfraEmbeddingsModels } from './deepinfra';
const chatModelProviders = { const chatModelProviders = {
openai: loadOpenAIChatModels, openai: loadOpenAIChatModels,
@@ -11,6 +12,7 @@ const chatModelProviders = {
ollama: loadOllamaChatModels, ollama: loadOllamaChatModels,
anthropic: loadAnthropicChatModels, anthropic: loadAnthropicChatModels,
gemini: loadGeminiChatModels, gemini: loadGeminiChatModels,
deepinfra: loadDeepInfraChatModels,
}; };
const embeddingModelProviders = { const embeddingModelProviders = {
@@ -18,6 +20,7 @@ const embeddingModelProviders = {
local: loadTransformersEmbeddingsModels, local: loadTransformersEmbeddingsModels,
ollama: loadOllamaEmbeddingsModels, ollama: loadOllamaEmbeddingsModels,
gemini: loadGeminiEmbeddingsModels, gemini: loadGeminiEmbeddingsModels,
deepinfra: loadDeepInfraEmbeddingsModels,
}; };
export const getAvailableChatModelProviders = async () => { export const getAvailableChatModelProviders = async () => {

View File

@@ -71,6 +71,7 @@ router.post('/', async (req, res) => {
GROQ: config.groqApiKey, GROQ: config.groqApiKey,
ANTHROPIC: config.anthropicApiKey, ANTHROPIC: config.anthropicApiKey,
GEMINI: config.geminiApiKey, GEMINI: config.geminiApiKey,
DEEPINFRA: config.deepInfraApiKey,
}, },
API_ENDPOINTS: { API_ENDPOINTS: {
OLLAMA: config.ollamaApiUrl, OLLAMA: config.ollamaApiUrl,

View File

@@ -64,6 +64,7 @@ interface SettingsType {
groqApiKey: string; groqApiKey: string;
anthropicApiKey: string; anthropicApiKey: string;
geminiApiKey: string; geminiApiKey: string;
deepinfraApiKey: string;
ollamaApiUrl: string; ollamaApiUrl: string;
} }
@@ -493,6 +494,22 @@ const SettingsDialog = ({
} }
/> />
</div> </div>
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
DeepInfra API Key
</p>
<Input
type="text"
placeholder="DeepInfta API key"
defaultValue={config.geminiApiKey}
onChange={(e) =>
setConfig({
...config,
deepinfraApiKey: e.target.value,
})
}
/>
</div>
</div> </div>
)} )}
{isLoading && ( {isLoading && (

View File

@@ -18,7 +18,7 @@
"clsx": "^2.1.0", "clsx": "^2.1.0",
"langchain": "^0.1.30", "langchain": "^0.1.30",
"lucide-react": "^0.363.0", "lucide-react": "^0.363.0",
"markdown-to-jsx": "^7.6.2", "markdown-to-jsx": "^7.7.2",
"next": "14.1.4", "next": "14.1.4",
"next-themes": "^0.3.0", "next-themes": "^0.3.0",
"react": "^18", "react": "^18",

View File

@@ -2210,10 +2210,10 @@ lucide-react@^0.363.0:
resolved "https://registry.yarnpkg.com/lucide-react/-/lucide-react-0.363.0.tgz#2bb1f9d09b830dda86f5118fcd097f87247fe0e3" resolved "https://registry.yarnpkg.com/lucide-react/-/lucide-react-0.363.0.tgz#2bb1f9d09b830dda86f5118fcd097f87247fe0e3"
integrity sha512-AlsfPCsXQyQx7wwsIgzcKOL9LwC498LIMAo+c0Es5PkHJa33xwmYAkkSoKoJWWWSYQEStqu58/jT4tL2gi32uQ== integrity sha512-AlsfPCsXQyQx7wwsIgzcKOL9LwC498LIMAo+c0Es5PkHJa33xwmYAkkSoKoJWWWSYQEStqu58/jT4tL2gi32uQ==
markdown-to-jsx@^7.6.2: markdown-to-jsx@^7.7.2:
version "7.6.2" version "7.7.2"
resolved "https://registry.yarnpkg.com/markdown-to-jsx/-/markdown-to-jsx-7.6.2.tgz#254cbf7d412a37073486c0a2dd52266d2191a793" resolved "https://registry.yarnpkg.com/markdown-to-jsx/-/markdown-to-jsx-7.7.2.tgz#59c1dd64f48b53719311ab140be3cd51cdabccd3"
integrity sha512-gEcyiJXzBxmId2Y/kydLbD6KRNccDiUy/Src1cFGn3s2X0LZZ/hUiEc2VisFyA5kUE3SXclTCczjQiAuqKZiFQ== integrity sha512-N3AKfYRvxNscvcIH6HDnDKILp4S8UWbebp+s92Y8SwIq0CuSbLW4Jgmrbjku3CWKjTQO0OyIMS6AhzqrwjEa3g==
md5@^2.3.0: md5@^2.3.0:
version "2.3.0" version "2.3.0"