feat(ollama): add ability to provide api key

This commit is contained in:
ItzCrazyKns
2025-08-20 20:32:13 +05:30
parent 0b15bfbe32
commit f680188905
4 changed files with 35 additions and 1 deletions

View File

@@ -11,6 +11,7 @@ import {
getAimlApiKey,
getLMStudioApiEndpoint,
updateConfig,
getOllamaApiKey,
} from '@/lib/config';
import {
getAvailableChatModelProviders,
@@ -53,6 +54,7 @@ export const GET = async (req: Request) => {
config['openaiApiKey'] = getOpenaiApiKey();
config['ollamaApiUrl'] = getOllamaApiEndpoint();
config['ollamaApiKey'] = getOllamaApiKey();
config['lmStudioApiUrl'] = getLMStudioApiEndpoint();
config['anthropicApiKey'] = getAnthropicApiKey();
config['groqApiKey'] = getGroqApiKey();
@@ -93,6 +95,7 @@ export const POST = async (req: Request) => {
},
OLLAMA: {
API_URL: config.ollamaApiUrl,
API_KEY: config.ollamaApiKey,
},
DEEPSEEK: {
API_KEY: config.deepseekApiKey,

View File

@@ -21,6 +21,7 @@ interface SettingsType {
anthropicApiKey: string;
geminiApiKey: string;
ollamaApiUrl: string;
ollamaApiKey: string;
lmStudioApiUrl: string;
deepseekApiKey: string;
aimlApiKey: string;
@@ -818,6 +819,25 @@ const Page = () => {
/>
</div>
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
Ollama API Key (Can be left blank)
</p>
<Input
type="text"
placeholder="Ollama API Key"
value={config.ollamaApiKey}
isSaving={savingStates['ollamaApiKey']}
onChange={(e) => {
setConfig((prev) => ({
...prev!,
ollamaApiKey: e.target.value,
}));
}}
onSave={(value) => saveConfig('ollamaApiKey', value)}
/>
</div>
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
GROQ API Key

View File

@@ -31,6 +31,7 @@ interface Config {
};
OLLAMA: {
API_URL: string;
API_KEY: string;
};
DEEPSEEK: {
API_KEY: string;
@@ -86,6 +87,8 @@ export const getSearxngApiEndpoint = () =>
export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL;
export const getOllamaApiKey = () => loadConfig().MODELS.OLLAMA.API_KEY;
export const getDeepseekApiKey = () => loadConfig().MODELS.DEEPSEEK.API_KEY;
export const getAimlApiKey = () => loadConfig().MODELS.AIMLAPI.API_KEY;

View File

@@ -1,5 +1,5 @@
import axios from 'axios';
import { getKeepAlive, getOllamaApiEndpoint } from '../config';
import { getKeepAlive, getOllamaApiEndpoint, getOllamaApiKey } from '../config';
import { ChatModel, EmbeddingModel } from '.';
export const PROVIDER_INFO = {
@@ -11,6 +11,7 @@ import { OllamaEmbeddings } from '@langchain/ollama';
export const loadOllamaChatModels = async () => {
const ollamaApiEndpoint = getOllamaApiEndpoint();
const ollamaApiKey = getOllamaApiKey();
if (!ollamaApiEndpoint) return {};
@@ -33,6 +34,9 @@ export const loadOllamaChatModels = async () => {
model: model.model,
temperature: 0.7,
keepAlive: getKeepAlive(),
...(ollamaApiKey
? { headers: { Authorization: `Bearer ${ollamaApiKey}` } }
: {}),
}),
};
});
@@ -46,6 +50,7 @@ export const loadOllamaChatModels = async () => {
export const loadOllamaEmbeddingModels = async () => {
const ollamaApiEndpoint = getOllamaApiEndpoint();
const ollamaApiKey = getOllamaApiKey();
if (!ollamaApiEndpoint) return {};
@@ -66,6 +71,9 @@ export const loadOllamaEmbeddingModels = async () => {
model: new OllamaEmbeddings({
baseUrl: ollamaApiEndpoint,
model: model.model,
...(ollamaApiKey
? { headers: { Authorization: `Bearer ${ollamaApiKey}` } }
: {}),
}),
};
});