adding the ability to configure ollama chat options and embeddings params via the config.toml file

This commit is contained in:
Owyn Richen
2025-02-05 13:33:17 -08:00
parent 46541e6c0c
commit ad0826111b
8 changed files with 1052 additions and 1388 deletions

View File

@@ -20,6 +20,8 @@ interface Config {
SEARXNG: string;
OLLAMA: string;
};
OLLAMA_CHAT_OPTIONS: {};
OLLAMA_EMBEDDINGS_PARAMS: {};
}
type RecursivePartial<T> = {
@@ -51,6 +53,10 @@ export const getSearxngApiEndpoint = () =>
export const getOllamaApiEndpoint = () => loadConfig().API_ENDPOINTS.OLLAMA;
export const getOllamaChatOptions = () => loadConfig().OLLAMA_CHAT_OPTIONS;
export const getOllamaEmbeddingsParams = () =>
loadConfig().OLLAMA_EMBEDDINGS_PARAMS;
export const updateConfig = (config: RecursivePartial<Config>) => {
const currentConfig = loadConfig();

View File

@@ -1,11 +1,17 @@
import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama';
import { getKeepAlive, getOllamaApiEndpoint } from '../../config';
import {
getKeepAlive,
getOllamaApiEndpoint,
getOllamaChatOptions,
getOllamaEmbeddingsParams,
} from '../../config';
import logger from '../../utils/logger';
import { ChatOllama } from '@langchain/community/chat_models/ollama';
import axios from 'axios';
export const loadOllamaChatModels = async () => {
const ollamaEndpoint = getOllamaApiEndpoint();
const ollamaChatOptions = getOllamaChatOptions();
const keepAlive = getKeepAlive();
if (!ollamaEndpoint) return {};
@@ -23,10 +29,14 @@ export const loadOllamaChatModels = async () => {
acc[model.model] = {
displayName: model.name,
model: new ChatOllama({
baseUrl: ollamaEndpoint,
model: model.model,
temperature: 0.7,
keepAlive: keepAlive,
...ollamaChatOptions, // merge the options specified via config
...{
// things defined in this dictionary will take precendence
baseUrl: ollamaEndpoint,
model: model.model,
temperature: 0.7,
keepAlive: keepAlive,
},
}),
};
@@ -42,6 +52,7 @@ export const loadOllamaChatModels = async () => {
export const loadOllamaEmbeddingsModels = async () => {
const ollamaEndpoint = getOllamaApiEndpoint();
const ollamaEmbeddingParams = getOllamaEmbeddingsParams();
if (!ollamaEndpoint) return {};
@@ -58,8 +69,12 @@ export const loadOllamaEmbeddingsModels = async () => {
acc[model.model] = {
displayName: model.name,
model: new OllamaEmbeddings({
baseUrl: ollamaEndpoint,
model: model.model,
...ollamaEmbeddingParams, // merge the options specified via config
...{
// things defined in this dictionary will take precendence
baseUrl: ollamaEndpoint,
model: model.model,
},
}),
};

View File

@@ -9,6 +9,8 @@ import {
getAnthropicApiKey,
getGeminiApiKey,
getOpenaiApiKey,
getOllamaChatOptions,
getOllamaEmbeddingsParams,
updateConfig,
} from '../config';
import logger from '../utils/logger';
@@ -54,6 +56,8 @@ router.get('/', async (_, res) => {
config['anthropicApiKey'] = getAnthropicApiKey();
config['groqApiKey'] = getGroqApiKey();
config['geminiApiKey'] = getGeminiApiKey();
config['ollamaChatOptions'] = getOllamaChatOptions();
config['ollamaEmbeddingsParams'] = getOllamaEmbeddingsParams();
res.status(200).json(config);
} catch (err: any) {
@@ -75,6 +79,7 @@ router.post('/', async (req, res) => {
API_ENDPOINTS: {
OLLAMA: config.ollamaApiUrl,
},
OLLAMA_OPTIONS: config.ollamaOptions,
};
updateConfig(updatedConfig);