diff --git a/src/app/api/config/route.ts b/src/app/api/config/route.ts index 0c11b23..f117cce 100644 --- a/src/app/api/config/route.ts +++ b/src/app/api/config/route.ts @@ -11,6 +11,7 @@ import { getAimlApiKey, getLMStudioApiEndpoint, updateConfig, + getOllamaApiKey, } from '@/lib/config'; import { getAvailableChatModelProviders, @@ -53,6 +54,7 @@ export const GET = async (req: Request) => { config['openaiApiKey'] = getOpenaiApiKey(); config['ollamaApiUrl'] = getOllamaApiEndpoint(); + config['ollamaApiKey'] = getOllamaApiKey(); config['lmStudioApiUrl'] = getLMStudioApiEndpoint(); config['anthropicApiKey'] = getAnthropicApiKey(); config['groqApiKey'] = getGroqApiKey(); @@ -93,6 +95,7 @@ export const POST = async (req: Request) => { }, OLLAMA: { API_URL: config.ollamaApiUrl, + API_KEY: config.ollamaApiKey, }, DEEPSEEK: { API_KEY: config.deepseekApiKey, diff --git a/src/app/settings/page.tsx b/src/app/settings/page.tsx index 1b13c9c..6fb8255 100644 --- a/src/app/settings/page.tsx +++ b/src/app/settings/page.tsx @@ -21,6 +21,7 @@ interface SettingsType { anthropicApiKey: string; geminiApiKey: string; ollamaApiUrl: string; + ollamaApiKey: string; lmStudioApiUrl: string; deepseekApiKey: string; aimlApiKey: string; @@ -818,6 +819,25 @@ const Page = () => { /> +
+ Ollama API Key (Can be left blank) +
+ { + setConfig((prev) => ({ + ...prev!, + ollamaApiKey: e.target.value, + })); + }} + onSave={(value) => saveConfig('ollamaApiKey', value)} + /> +GROQ API Key diff --git a/src/lib/config.ts b/src/lib/config.ts index d885e13..79d69dc 100644 --- a/src/lib/config.ts +++ b/src/lib/config.ts @@ -31,6 +31,7 @@ interface Config { }; OLLAMA: { API_URL: string; + API_KEY: string; }; DEEPSEEK: { API_KEY: string; @@ -86,6 +87,8 @@ export const getSearxngApiEndpoint = () => export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL; +export const getOllamaApiKey = () => loadConfig().MODELS.OLLAMA.API_KEY; + export const getDeepseekApiKey = () => loadConfig().MODELS.DEEPSEEK.API_KEY; export const getAimlApiKey = () => loadConfig().MODELS.AIMLAPI.API_KEY; diff --git a/src/lib/providers/ollama.ts b/src/lib/providers/ollama.ts index d5c7899..cb0b848 100644 --- a/src/lib/providers/ollama.ts +++ b/src/lib/providers/ollama.ts @@ -1,5 +1,5 @@ import axios from 'axios'; -import { getKeepAlive, getOllamaApiEndpoint } from '../config'; +import { getKeepAlive, getOllamaApiEndpoint, getOllamaApiKey } from '../config'; import { ChatModel, EmbeddingModel } from '.'; export const PROVIDER_INFO = { @@ -11,6 +11,7 @@ import { OllamaEmbeddings } from '@langchain/ollama'; export const loadOllamaChatModels = async () => { const ollamaApiEndpoint = getOllamaApiEndpoint(); + const ollamaApiKey = getOllamaApiKey(); if (!ollamaApiEndpoint) return {}; @@ -33,6 +34,9 @@ export const loadOllamaChatModels = async () => { model: model.model, temperature: 0.7, keepAlive: getKeepAlive(), + ...(ollamaApiKey + ? { headers: { Authorization: `Bearer ${ollamaApiKey}` } } + : {}), }), }; }); @@ -46,6 +50,7 @@ export const loadOllamaChatModels = async () => { export const loadOllamaEmbeddingModels = async () => { const ollamaApiEndpoint = getOllamaApiEndpoint(); + const ollamaApiKey = getOllamaApiKey(); if (!ollamaApiEndpoint) return {}; @@ -66,6 +71,9 @@ export const loadOllamaEmbeddingModels = async () => { model: new OllamaEmbeddings({ baseUrl: ollamaApiEndpoint, model: model.model, + ...(ollamaApiKey + ? { headers: { Authorization: `Bearer ${ollamaApiKey}` } } + : {}), }), }; });