feat(config): update file path, add post endpoint

This commit is contained in:
ItzCrazyKns
2025-03-18 10:33:32 +05:30
parent 97f6196d9b
commit 4cb20542a5
3 changed files with 82 additions and 28 deletions

View File

@ -1,55 +1,109 @@
import { getAnthropicApiKey, getCustomOpenaiApiKey, getCustomOpenaiApiUrl, getCustomOpenaiModelName, getGeminiApiKey, getGroqApiKey, getOllamaApiEndpoint, getOpenaiApiKey } from "@/lib/config" import {
import { getAvailableChatModelProviders, getAvailableEmbeddingModelProviders } from "@/lib/providers" getAnthropicApiKey,
getCustomOpenaiApiKey,
getCustomOpenaiApiUrl,
getCustomOpenaiModelName,
getGeminiApiKey,
getGroqApiKey,
getOllamaApiEndpoint,
getOpenaiApiKey,
updateConfig,
} from '@/lib/config';
import {
getAvailableChatModelProviders,
getAvailableEmbeddingModelProviders,
} from '@/lib/providers';
export const GET = async (req: Request) => { export const GET = async (req: Request) => {
try { try {
const config: Record<string, any> = {} const config: Record<string, any> = {};
const [chatModelProviders, embeddingModelProviders] = await Promise.all([ const [chatModelProviders, embeddingModelProviders] = await Promise.all([
getAvailableChatModelProviders(), getAvailableChatModelProviders(),
getAvailableEmbeddingModelProviders(), getAvailableEmbeddingModelProviders(),
]) ]);
config['chatModelProviders'] = {} config['chatModelProviders'] = {};
config['embeddingModelProviders'] = {} config['embeddingModelProviders'] = {};
for (const provider in chatModelProviders) { for (const provider in chatModelProviders) {
config['chatModelProviders'][provider] = Object.keys( config['chatModelProviders'][provider] = Object.keys(
chatModelProviders[provider], chatModelProviders[provider],
).map(model => { ).map((model) => {
return { return {
name: model, name: model,
displayName: chatModelProviders[provider][model].displayName, displayName: chatModelProviders[provider][model].displayName,
} };
}) });
} }
for (const provider in embeddingModelProviders) { for (const provider in embeddingModelProviders) {
config['embeddingModelProviders'][provider] = Object.keys( config['embeddingModelProviders'][provider] = Object.keys(
embeddingModelProviders[provider], embeddingModelProviders[provider],
).map(model => { ).map((model) => {
return { return {
name: model, name: model,
displayName: embeddingModelProviders[provider][model].displayName, displayName: embeddingModelProviders[provider][model].displayName,
} };
}) });
} }
config['openaiApiKey'] = getOpenaiApiKey() config['openaiApiKey'] = getOpenaiApiKey();
config['ollamaApiUrl'] = getOllamaApiEndpoint() config['ollamaApiUrl'] = getOllamaApiEndpoint();
config['anthropicApiKey'] = getAnthropicApiKey() config['anthropicApiKey'] = getAnthropicApiKey();
config['groqApiKey'] = getGroqApiKey() config['groqApiKey'] = getGroqApiKey();
config['geminiApiKey'] = getGeminiApiKey() config['geminiApiKey'] = getGeminiApiKey();
config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl() config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl();
config['customOpenaiApiKey'] = getCustomOpenaiApiKey() config['customOpenaiApiKey'] = getCustomOpenaiApiKey();
config['customOpenaiModelName'] = getCustomOpenaiModelName() config['customOpenaiModelName'] = getCustomOpenaiModelName();
return Response.json({ ...config }, { status: 200 }) return Response.json({ ...config }, { status: 200 });
} catch (err) { } catch (err) {
console.error('An error ocurred while getting config:', err) console.error('An error ocurred while getting config:', err);
return Response.json( return Response.json(
{ message: 'An error ocurred while getting config' }, { message: 'An error ocurred while getting config' },
{ status: 500 }, { status: 500 },
) );
} }
};
export const POST = async (req: Request) => {
try {
const config = await req.json();
const updatedConfig = {
MODELS: {
OPENAI: {
API_KEY: config.openaiApiKey,
},
GROQ: {
API_KEY: config.groqApiKey,
},
ANTHROPIC: {
API_KEY: config.anthropicApiKey,
},
GEMINI: {
API_KEY: config.geminiApiKey,
},
OLLAMA: {
API_URL: config.ollamaApiUrl,
},
CUSTOM_OPENAI: {
API_URL: config.customOpenaiApiUrl,
API_KEY: config.customOpenaiApiKey,
MODEL_NAME: config.customOpenaiModelName,
},
},
};
updateConfig(updatedConfig);
return Response.json({ message: 'Config updated' }, { status: 200 });
} catch (err) {
console.error('An error ocurred while updating config:', err);
return Response.json(
{ message: 'An error ocurred while updating config' },
{ status: 500 },
);
} }
};

View File

@ -188,7 +188,7 @@ const Page = () => {
} as SettingsType; } as SettingsType;
const response = await fetch( const response = await fetch(
`${process.env.NEXT_PUBLIC_API_URL}/config`, `/api/config`,
{ {
method: 'POST', method: 'POST',
headers: { headers: {

View File

@ -109,9 +109,9 @@ const mergeConfigs = (current: any, update: any): any => {
export const updateConfig = (config: RecursivePartial<Config>) => { export const updateConfig = (config: RecursivePartial<Config>) => {
const currentConfig = loadConfig(); const currentConfig = loadConfig();
const mergedConfig = mergeConfigs(currentConfig, config); const mergedConfig = mergeConfigs(currentConfig, config);
console.log(mergedConfig);
fs.writeFileSync( fs.writeFileSync(
path.join(__dirname, `../${configFileName}`), path.join(path.join(process.cwd(), `${configFileName}`)),
toml.stringify(mergedConfig), toml.stringify(mergedConfig),
); );
}; };