From 07dc7d7649633031fac27aa3a484e74d7d75a5da Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Sat, 15 Feb 2025 11:26:38 +0530 Subject: [PATCH] feat(config): update config & custom openai --- sample.config.toml | 27 ++++++++++---- src/config.ts | 92 ++++++++++++++++++++++++++++++++-------------- 2 files changed, 85 insertions(+), 34 deletions(-) diff --git a/sample.config.toml b/sample.config.toml index 50ba95d..686372c 100644 --- a/sample.config.toml +++ b/sample.config.toml @@ -3,12 +3,25 @@ PORT = 3001 # Port to run the server on SIMILARITY_MEASURE = "cosine" # "cosine" or "dot" KEEP_ALIVE = "5m" # How long to keep Ollama models loaded into memory. (Instead of using -1 use "-1m") -[API_KEYS] -OPENAI = "" # OpenAI API key - sk-1234567890abcdef1234567890abcdef -GROQ = "" # Groq API key - gsk_1234567890abcdef1234567890abcdef -ANTHROPIC = "" # Anthropic API key - sk-ant-1234567890abcdef1234567890abcdef -GEMINI = "" # Gemini API key - sk-1234567890abcdef1234567890abcdef +[MODELS] +[MODELS.OPENAI] +API_KEY = "" + +[MODELS.GROQ] +API_KEY = "" + +[MODELS.ANTHROPIC] +API_KEY = "" + +[MODELS.GEMINI] +API_KEY = "" + +[MODELS.CUSTOM_OPENAI] +API_KEY = "" +API_URL = "" + +[MODELS.OLLAMA] +API_URL = "" # Ollama API URL - http://host.docker.internal:11434 [API_ENDPOINTS] -SEARXNG = "http://localhost:32768" # SearxNG API URL -OLLAMA = "" # Ollama API URL - http://host.docker.internal:11434 \ No newline at end of file +SEARXNG = "http://localhost:32768" # SearxNG API URL \ No newline at end of file diff --git a/src/config.ts b/src/config.ts index 001c259..ab2a5db 100644 --- a/src/config.ts +++ b/src/config.ts @@ -10,15 +10,30 @@ interface Config { SIMILARITY_MEASURE: string; KEEP_ALIVE: string; }; - API_KEYS: { - OPENAI: string; - GROQ: string; - ANTHROPIC: string; - GEMINI: string; + MODELS: { + OPENAI: { + API_KEY: string; + }; + GROQ: { + API_KEY: string; + }; + ANTHROPIC: { + API_KEY: string; + }; + GEMINI: { + API_KEY: string; + }; + OLLAMA: { + API_URL: string; + }; + CUSTOM_OPENAI: { + API_URL: string; + API_KEY: string; + MODEL_NAME: string; + }; }; API_ENDPOINTS: { SEARXNG: string; - OLLAMA: string; }; } @@ -38,42 +53,65 @@ export const getSimilarityMeasure = () => export const getKeepAlive = () => loadConfig().GENERAL.KEEP_ALIVE; -export const getOpenaiApiKey = () => loadConfig().API_KEYS.OPENAI; +export const getOpenaiApiKey = () => loadConfig().MODELS.OPENAI.API_KEY; -export const getGroqApiKey = () => loadConfig().API_KEYS.GROQ; +export const getGroqApiKey = () => loadConfig().MODELS.GROQ.API_KEY; -export const getAnthropicApiKey = () => loadConfig().API_KEYS.ANTHROPIC; +export const getAnthropicApiKey = () => loadConfig().MODELS.ANTHROPIC.API_KEY; -export const getGeminiApiKey = () => loadConfig().API_KEYS.GEMINI; +export const getGeminiApiKey = () => loadConfig().MODELS.GEMINI.API_KEY; export const getSearxngApiEndpoint = () => process.env.SEARXNG_API_URL || loadConfig().API_ENDPOINTS.SEARXNG; -export const getOllamaApiEndpoint = () => loadConfig().API_ENDPOINTS.OLLAMA; +export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL; -export const updateConfig = (config: RecursivePartial) => { - const currentConfig = loadConfig(); +export const getCustomOpenaiApiKey = () => + loadConfig().MODELS.CUSTOM_OPENAI.API_KEY; - for (const key in currentConfig) { - if (!config[key]) config[key] = {}; +export const getCustomOpenaiApiUrl = () => + loadConfig().MODELS.CUSTOM_OPENAI.API_URL; - if (typeof currentConfig[key] === 'object' && currentConfig[key] !== null) { - for (const nestedKey in currentConfig[key]) { - if ( - !config[key][nestedKey] && - currentConfig[key][nestedKey] && - config[key][nestedKey] !== '' - ) { - config[key][nestedKey] = currentConfig[key][nestedKey]; - } +export const getCustomOpenaiModelName = () => + loadConfig().MODELS.CUSTOM_OPENAI.MODEL_NAME; + +const mergeConfigs = (current: any, update: any): any => { + if (update === null || update === undefined) { + return current; + } + + if (typeof current !== 'object' || current === null) { + return update; + } + + const result = { ...current }; + + for (const key in update) { + if (Object.prototype.hasOwnProperty.call(update, key)) { + const updateValue = update[key]; + + if ( + typeof updateValue === 'object' && + updateValue !== null && + typeof result[key] === 'object' && + result[key] !== null + ) { + result[key] = mergeConfigs(result[key], updateValue); + } else if (updateValue !== undefined) { + result[key] = updateValue; } - } else if (currentConfig[key] && config[key] !== '') { - config[key] = currentConfig[key]; } } + return result; +}; + +export const updateConfig = (config: RecursivePartial) => { + const currentConfig = loadConfig(); + const mergedConfig = mergeConfigs(currentConfig, config); + fs.writeFileSync( path.join(__dirname, `../${configFileName}`), - toml.stringify(config), + toml.stringify(mergedConfig), ); };