mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-06-16 23:08:41 +00:00
feat(ollama): add keep_alive param
This commit is contained in:
@ -8,6 +8,7 @@ interface Config {
|
||||
GENERAL: {
|
||||
PORT: number;
|
||||
SIMILARITY_MEASURE: string;
|
||||
KEEP_ALIVE: string;
|
||||
};
|
||||
API_KEYS: {
|
||||
OPENAI: string;
|
||||
@ -34,6 +35,8 @@ export const getPort = () => loadConfig().GENERAL.PORT;
|
||||
export const getSimilarityMeasure = () =>
|
||||
loadConfig().GENERAL.SIMILARITY_MEASURE;
|
||||
|
||||
export const getKeepAlive = () => loadConfig().GENERAL.KEEP_ALIVE;
|
||||
|
||||
export const getOpenaiApiKey = () => loadConfig().API_KEYS.OPENAI;
|
||||
|
||||
export const getGroqApiKey = () => loadConfig().API_KEYS.GROQ;
|
||||
|
@ -1,11 +1,12 @@
|
||||
import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama';
|
||||
import { getOllamaApiEndpoint } from '../../config';
|
||||
import { getKeepAlive, getOllamaApiEndpoint } from '../../config';
|
||||
import logger from '../../utils/logger';
|
||||
import { ChatOllama } from '@langchain/community/chat_models/ollama';
|
||||
|
||||
export const loadOllamaChatModels = async () => {
|
||||
const ollamaEndpoint = getOllamaApiEndpoint();
|
||||
|
||||
const keepAlive = getKeepAlive();
|
||||
|
||||
if (!ollamaEndpoint) return {};
|
||||
|
||||
try {
|
||||
@ -24,6 +25,7 @@ export const loadOllamaChatModels = async () => {
|
||||
baseUrl: ollamaEndpoint,
|
||||
model: model.model,
|
||||
temperature: 0.7,
|
||||
keepAlive: keepAlive
|
||||
}),
|
||||
};
|
||||
|
||||
|
Reference in New Issue
Block a user