mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-06-27 12:18:33 +00:00
feat(config): update file path, add post endpoint
This commit is contained in:
@ -1,55 +1,109 @@
|
||||
import { getAnthropicApiKey, getCustomOpenaiApiKey, getCustomOpenaiApiUrl, getCustomOpenaiModelName, getGeminiApiKey, getGroqApiKey, getOllamaApiEndpoint, getOpenaiApiKey } from "@/lib/config"
|
||||
import { getAvailableChatModelProviders, getAvailableEmbeddingModelProviders } from "@/lib/providers"
|
||||
import {
|
||||
getAnthropicApiKey,
|
||||
getCustomOpenaiApiKey,
|
||||
getCustomOpenaiApiUrl,
|
||||
getCustomOpenaiModelName,
|
||||
getGeminiApiKey,
|
||||
getGroqApiKey,
|
||||
getOllamaApiEndpoint,
|
||||
getOpenaiApiKey,
|
||||
updateConfig,
|
||||
} from '@/lib/config';
|
||||
import {
|
||||
getAvailableChatModelProviders,
|
||||
getAvailableEmbeddingModelProviders,
|
||||
} from '@/lib/providers';
|
||||
|
||||
export const GET = async (req: Request) => {
|
||||
try {
|
||||
const config: Record<string, any> = {}
|
||||
const config: Record<string, any> = {};
|
||||
|
||||
const [chatModelProviders, embeddingModelProviders] = await Promise.all([
|
||||
getAvailableChatModelProviders(),
|
||||
getAvailableEmbeddingModelProviders(),
|
||||
])
|
||||
]);
|
||||
|
||||
config['chatModelProviders'] = {}
|
||||
config['embeddingModelProviders'] = {}
|
||||
config['chatModelProviders'] = {};
|
||||
config['embeddingModelProviders'] = {};
|
||||
|
||||
for (const provider in chatModelProviders) {
|
||||
config['chatModelProviders'][provider] = Object.keys(
|
||||
chatModelProviders[provider],
|
||||
).map(model => {
|
||||
).map((model) => {
|
||||
return {
|
||||
name: model,
|
||||
displayName: chatModelProviders[provider][model].displayName,
|
||||
}
|
||||
})
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
for (const provider in embeddingModelProviders) {
|
||||
config['embeddingModelProviders'][provider] = Object.keys(
|
||||
embeddingModelProviders[provider],
|
||||
).map(model => {
|
||||
).map((model) => {
|
||||
return {
|
||||
name: model,
|
||||
displayName: embeddingModelProviders[provider][model].displayName,
|
||||
}
|
||||
})
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
config['openaiApiKey'] = getOpenaiApiKey()
|
||||
config['ollamaApiUrl'] = getOllamaApiEndpoint()
|
||||
config['anthropicApiKey'] = getAnthropicApiKey()
|
||||
config['groqApiKey'] = getGroqApiKey()
|
||||
config['geminiApiKey'] = getGeminiApiKey()
|
||||
config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl()
|
||||
config['customOpenaiApiKey'] = getCustomOpenaiApiKey()
|
||||
config['customOpenaiModelName'] = getCustomOpenaiModelName()
|
||||
config['openaiApiKey'] = getOpenaiApiKey();
|
||||
config['ollamaApiUrl'] = getOllamaApiEndpoint();
|
||||
config['anthropicApiKey'] = getAnthropicApiKey();
|
||||
config['groqApiKey'] = getGroqApiKey();
|
||||
config['geminiApiKey'] = getGeminiApiKey();
|
||||
config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl();
|
||||
config['customOpenaiApiKey'] = getCustomOpenaiApiKey();
|
||||
config['customOpenaiModelName'] = getCustomOpenaiModelName();
|
||||
|
||||
return Response.json({ ...config }, { status: 200 })
|
||||
return Response.json({ ...config }, { status: 200 });
|
||||
} catch (err) {
|
||||
console.error('An error ocurred while getting config:', err)
|
||||
console.error('An error ocurred while getting config:', err);
|
||||
return Response.json(
|
||||
{ message: 'An error ocurred while getting config' },
|
||||
{ status: 500 },
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export const POST = async (req: Request) => {
|
||||
try {
|
||||
const config = await req.json();
|
||||
|
||||
const updatedConfig = {
|
||||
MODELS: {
|
||||
OPENAI: {
|
||||
API_KEY: config.openaiApiKey,
|
||||
},
|
||||
GROQ: {
|
||||
API_KEY: config.groqApiKey,
|
||||
},
|
||||
ANTHROPIC: {
|
||||
API_KEY: config.anthropicApiKey,
|
||||
},
|
||||
GEMINI: {
|
||||
API_KEY: config.geminiApiKey,
|
||||
},
|
||||
OLLAMA: {
|
||||
API_URL: config.ollamaApiUrl,
|
||||
},
|
||||
CUSTOM_OPENAI: {
|
||||
API_URL: config.customOpenaiApiUrl,
|
||||
API_KEY: config.customOpenaiApiKey,
|
||||
MODEL_NAME: config.customOpenaiModelName,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
updateConfig(updatedConfig);
|
||||
|
||||
return Response.json({ message: 'Config updated' }, { status: 200 });
|
||||
} catch (err) {
|
||||
console.error('An error ocurred while updating config:', err);
|
||||
return Response.json(
|
||||
{ message: 'An error ocurred while updating config' },
|
||||
{ status: 500 },
|
||||
);
|
||||
}
|
||||
};
|
||||
|
@ -188,7 +188,7 @@ const Page = () => {
|
||||
} as SettingsType;
|
||||
|
||||
const response = await fetch(
|
||||
`${process.env.NEXT_PUBLIC_API_URL}/config`,
|
||||
`/api/config`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
|
@ -109,9 +109,9 @@ const mergeConfigs = (current: any, update: any): any => {
|
||||
export const updateConfig = (config: RecursivePartial<Config>) => {
|
||||
const currentConfig = loadConfig();
|
||||
const mergedConfig = mergeConfigs(currentConfig, config);
|
||||
|
||||
console.log(mergedConfig);
|
||||
fs.writeFileSync(
|
||||
path.join(__dirname, `../${configFileName}`),
|
||||
path.join(path.join(process.cwd(), `${configFileName}`)),
|
||||
toml.stringify(mergedConfig),
|
||||
);
|
||||
};
|
||||
};
|
||||
|
Reference in New Issue
Block a user