feat(ollama): add ability to provide api key

This commit is contained in:
ItzCrazyKns
2025-08-20 20:32:13 +05:30
parent 0b15bfbe32
commit f680188905
4 changed files with 35 additions and 1 deletions

View File

@@ -11,6 +11,7 @@ import {
getAimlApiKey, getAimlApiKey,
getLMStudioApiEndpoint, getLMStudioApiEndpoint,
updateConfig, updateConfig,
getOllamaApiKey,
} from '@/lib/config'; } from '@/lib/config';
import { import {
getAvailableChatModelProviders, getAvailableChatModelProviders,
@@ -53,6 +54,7 @@ export const GET = async (req: Request) => {
config['openaiApiKey'] = getOpenaiApiKey(); config['openaiApiKey'] = getOpenaiApiKey();
config['ollamaApiUrl'] = getOllamaApiEndpoint(); config['ollamaApiUrl'] = getOllamaApiEndpoint();
config['ollamaApiKey'] = getOllamaApiKey();
config['lmStudioApiUrl'] = getLMStudioApiEndpoint(); config['lmStudioApiUrl'] = getLMStudioApiEndpoint();
config['anthropicApiKey'] = getAnthropicApiKey(); config['anthropicApiKey'] = getAnthropicApiKey();
config['groqApiKey'] = getGroqApiKey(); config['groqApiKey'] = getGroqApiKey();
@@ -93,6 +95,7 @@ export const POST = async (req: Request) => {
}, },
OLLAMA: { OLLAMA: {
API_URL: config.ollamaApiUrl, API_URL: config.ollamaApiUrl,
API_KEY: config.ollamaApiKey,
}, },
DEEPSEEK: { DEEPSEEK: {
API_KEY: config.deepseekApiKey, API_KEY: config.deepseekApiKey,

View File

@@ -21,6 +21,7 @@ interface SettingsType {
anthropicApiKey: string; anthropicApiKey: string;
geminiApiKey: string; geminiApiKey: string;
ollamaApiUrl: string; ollamaApiUrl: string;
ollamaApiKey: string;
lmStudioApiUrl: string; lmStudioApiUrl: string;
deepseekApiKey: string; deepseekApiKey: string;
aimlApiKey: string; aimlApiKey: string;
@@ -818,6 +819,25 @@ const Page = () => {
/> />
</div> </div>
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
Ollama API Key (Can be left blank)
</p>
<Input
type="text"
placeholder="Ollama API Key"
value={config.ollamaApiKey}
isSaving={savingStates['ollamaApiKey']}
onChange={(e) => {
setConfig((prev) => ({
...prev!,
ollamaApiKey: e.target.value,
}));
}}
onSave={(value) => saveConfig('ollamaApiKey', value)}
/>
</div>
<div className="flex flex-col space-y-1"> <div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm"> <p className="text-black/70 dark:text-white/70 text-sm">
GROQ API Key GROQ API Key

View File

@@ -31,6 +31,7 @@ interface Config {
}; };
OLLAMA: { OLLAMA: {
API_URL: string; API_URL: string;
API_KEY: string;
}; };
DEEPSEEK: { DEEPSEEK: {
API_KEY: string; API_KEY: string;
@@ -86,6 +87,8 @@ export const getSearxngApiEndpoint = () =>
export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL; export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL;
export const getOllamaApiKey = () => loadConfig().MODELS.OLLAMA.API_KEY;
export const getDeepseekApiKey = () => loadConfig().MODELS.DEEPSEEK.API_KEY; export const getDeepseekApiKey = () => loadConfig().MODELS.DEEPSEEK.API_KEY;
export const getAimlApiKey = () => loadConfig().MODELS.AIMLAPI.API_KEY; export const getAimlApiKey = () => loadConfig().MODELS.AIMLAPI.API_KEY;

View File

@@ -1,5 +1,5 @@
import axios from 'axios'; import axios from 'axios';
import { getKeepAlive, getOllamaApiEndpoint } from '../config'; import { getKeepAlive, getOllamaApiEndpoint, getOllamaApiKey } from '../config';
import { ChatModel, EmbeddingModel } from '.'; import { ChatModel, EmbeddingModel } from '.';
export const PROVIDER_INFO = { export const PROVIDER_INFO = {
@@ -11,6 +11,7 @@ import { OllamaEmbeddings } from '@langchain/ollama';
export const loadOllamaChatModels = async () => { export const loadOllamaChatModels = async () => {
const ollamaApiEndpoint = getOllamaApiEndpoint(); const ollamaApiEndpoint = getOllamaApiEndpoint();
const ollamaApiKey = getOllamaApiKey();
if (!ollamaApiEndpoint) return {}; if (!ollamaApiEndpoint) return {};
@@ -33,6 +34,9 @@ export const loadOllamaChatModels = async () => {
model: model.model, model: model.model,
temperature: 0.7, temperature: 0.7,
keepAlive: getKeepAlive(), keepAlive: getKeepAlive(),
...(ollamaApiKey
? { headers: { Authorization: `Bearer ${ollamaApiKey}` } }
: {}),
}), }),
}; };
}); });
@@ -46,6 +50,7 @@ export const loadOllamaChatModels = async () => {
export const loadOllamaEmbeddingModels = async () => { export const loadOllamaEmbeddingModels = async () => {
const ollamaApiEndpoint = getOllamaApiEndpoint(); const ollamaApiEndpoint = getOllamaApiEndpoint();
const ollamaApiKey = getOllamaApiKey();
if (!ollamaApiEndpoint) return {}; if (!ollamaApiEndpoint) return {};
@@ -66,6 +71,9 @@ export const loadOllamaEmbeddingModels = async () => {
model: new OllamaEmbeddings({ model: new OllamaEmbeddings({
baseUrl: ollamaApiEndpoint, baseUrl: ollamaApiEndpoint,
model: model.model, model: model.model,
...(ollamaApiKey
? { headers: { Authorization: `Bearer ${ollamaApiKey}` } }
: {}),
}), }),
}; };
}); });