From 4cb20542a5db99e1220a5b3bea9ff6439af8e9d1 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Tue, 18 Mar 2025 10:33:32 +0530 Subject: [PATCH] feat(config): update file path, add post endpoint --- ui/app/api/config/route.ts | 102 ++++++++++++++++++++++++++++--------- ui/app/settings/page.tsx | 2 +- ui/lib/config.ts | 6 +-- 3 files changed, 82 insertions(+), 28 deletions(-) diff --git a/ui/app/api/config/route.ts b/ui/app/api/config/route.ts index 7bd188e..46c71f5 100644 --- a/ui/app/api/config/route.ts +++ b/ui/app/api/config/route.ts @@ -1,55 +1,109 @@ -import { getAnthropicApiKey, getCustomOpenaiApiKey, getCustomOpenaiApiUrl, getCustomOpenaiModelName, getGeminiApiKey, getGroqApiKey, getOllamaApiEndpoint, getOpenaiApiKey } from "@/lib/config" -import { getAvailableChatModelProviders, getAvailableEmbeddingModelProviders } from "@/lib/providers" +import { + getAnthropicApiKey, + getCustomOpenaiApiKey, + getCustomOpenaiApiUrl, + getCustomOpenaiModelName, + getGeminiApiKey, + getGroqApiKey, + getOllamaApiEndpoint, + getOpenaiApiKey, + updateConfig, +} from '@/lib/config'; +import { + getAvailableChatModelProviders, + getAvailableEmbeddingModelProviders, +} from '@/lib/providers'; export const GET = async (req: Request) => { try { - const config: Record = {} + const config: Record = {}; const [chatModelProviders, embeddingModelProviders] = await Promise.all([ getAvailableChatModelProviders(), getAvailableEmbeddingModelProviders(), - ]) + ]); - config['chatModelProviders'] = {} - config['embeddingModelProviders'] = {} + config['chatModelProviders'] = {}; + config['embeddingModelProviders'] = {}; for (const provider in chatModelProviders) { config['chatModelProviders'][provider] = Object.keys( chatModelProviders[provider], - ).map(model => { + ).map((model) => { return { name: model, displayName: chatModelProviders[provider][model].displayName, - } - }) + }; + }); } for (const provider in embeddingModelProviders) { config['embeddingModelProviders'][provider] = Object.keys( embeddingModelProviders[provider], - ).map(model => { + ).map((model) => { return { name: model, displayName: embeddingModelProviders[provider][model].displayName, - } - }) + }; + }); } - config['openaiApiKey'] = getOpenaiApiKey() - config['ollamaApiUrl'] = getOllamaApiEndpoint() - config['anthropicApiKey'] = getAnthropicApiKey() - config['groqApiKey'] = getGroqApiKey() - config['geminiApiKey'] = getGeminiApiKey() - config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl() - config['customOpenaiApiKey'] = getCustomOpenaiApiKey() - config['customOpenaiModelName'] = getCustomOpenaiModelName() + config['openaiApiKey'] = getOpenaiApiKey(); + config['ollamaApiUrl'] = getOllamaApiEndpoint(); + config['anthropicApiKey'] = getAnthropicApiKey(); + config['groqApiKey'] = getGroqApiKey(); + config['geminiApiKey'] = getGeminiApiKey(); + config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl(); + config['customOpenaiApiKey'] = getCustomOpenaiApiKey(); + config['customOpenaiModelName'] = getCustomOpenaiModelName(); - return Response.json({ ...config }, { status: 200 }) + return Response.json({ ...config }, { status: 200 }); } catch (err) { - console.error('An error ocurred while getting config:', err) + console.error('An error ocurred while getting config:', err); return Response.json( { message: 'An error ocurred while getting config' }, { status: 500 }, - ) + ); } -} +}; + +export const POST = async (req: Request) => { + try { + const config = await req.json(); + + const updatedConfig = { + MODELS: { + OPENAI: { + API_KEY: config.openaiApiKey, + }, + GROQ: { + API_KEY: config.groqApiKey, + }, + ANTHROPIC: { + API_KEY: config.anthropicApiKey, + }, + GEMINI: { + API_KEY: config.geminiApiKey, + }, + OLLAMA: { + API_URL: config.ollamaApiUrl, + }, + CUSTOM_OPENAI: { + API_URL: config.customOpenaiApiUrl, + API_KEY: config.customOpenaiApiKey, + MODEL_NAME: config.customOpenaiModelName, + }, + }, + }; + + updateConfig(updatedConfig); + + return Response.json({ message: 'Config updated' }, { status: 200 }); + } catch (err) { + console.error('An error ocurred while updating config:', err); + return Response.json( + { message: 'An error ocurred while updating config' }, + { status: 500 }, + ); + } +}; diff --git a/ui/app/settings/page.tsx b/ui/app/settings/page.tsx index 3bd1f2d..bcb9b2b 100644 --- a/ui/app/settings/page.tsx +++ b/ui/app/settings/page.tsx @@ -188,7 +188,7 @@ const Page = () => { } as SettingsType; const response = await fetch( - `${process.env.NEXT_PUBLIC_API_URL}/config`, + `/api/config`, { method: 'POST', headers: { diff --git a/ui/lib/config.ts b/ui/lib/config.ts index 1e5148d..0a314cf 100644 --- a/ui/lib/config.ts +++ b/ui/lib/config.ts @@ -109,9 +109,9 @@ const mergeConfigs = (current: any, update: any): any => { export const updateConfig = (config: RecursivePartial) => { const currentConfig = loadConfig(); const mergedConfig = mergeConfigs(currentConfig, config); - + console.log(mergedConfig); fs.writeFileSync( - path.join(__dirname, `../${configFileName}`), + path.join(path.join(process.cwd(), `${configFileName}`)), toml.stringify(mergedConfig), ); -}; \ No newline at end of file +};