mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-04-30 00:02:44 +00:00
feat(app): add GET config route
This commit is contained in:
55
ui/app/api/config/route.ts
Normal file
55
ui/app/api/config/route.ts
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
import { getAnthropicApiKey, getCustomOpenaiApiKey, getCustomOpenaiApiUrl, getCustomOpenaiModelName, getGeminiApiKey, getGroqApiKey, getOllamaApiEndpoint, getOpenaiApiKey } from "@/lib/config"
|
||||||
|
import { getAvailableChatModelProviders, getAvailableEmbeddingModelProviders } from "@/lib/providers"
|
||||||
|
|
||||||
|
export const GET = async (req: Request) => {
|
||||||
|
try {
|
||||||
|
const config: Record<string, any> = {}
|
||||||
|
|
||||||
|
const [chatModelProviders, embeddingModelProviders] = await Promise.all([
|
||||||
|
getAvailableChatModelProviders(),
|
||||||
|
getAvailableEmbeddingModelProviders(),
|
||||||
|
])
|
||||||
|
|
||||||
|
config['chatModelProviders'] = {}
|
||||||
|
config['embeddingModelProviders'] = {}
|
||||||
|
|
||||||
|
for (const provider in chatModelProviders) {
|
||||||
|
config['chatModelProviders'][provider] = Object.keys(
|
||||||
|
chatModelProviders[provider],
|
||||||
|
).map(model => {
|
||||||
|
return {
|
||||||
|
name: model,
|
||||||
|
displayName: chatModelProviders[provider][model].displayName,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const provider in embeddingModelProviders) {
|
||||||
|
config['embeddingModelProviders'][provider] = Object.keys(
|
||||||
|
embeddingModelProviders[provider],
|
||||||
|
).map(model => {
|
||||||
|
return {
|
||||||
|
name: model,
|
||||||
|
displayName: embeddingModelProviders[provider][model].displayName,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
config['openaiApiKey'] = getOpenaiApiKey()
|
||||||
|
config['ollamaApiUrl'] = getOllamaApiEndpoint()
|
||||||
|
config['anthropicApiKey'] = getAnthropicApiKey()
|
||||||
|
config['groqApiKey'] = getGroqApiKey()
|
||||||
|
config['geminiApiKey'] = getGeminiApiKey()
|
||||||
|
config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl()
|
||||||
|
config['customOpenaiApiKey'] = getCustomOpenaiApiKey()
|
||||||
|
config['customOpenaiModelName'] = getCustomOpenaiModelName()
|
||||||
|
|
||||||
|
return Response.json({ ...config }, { status: 200 })
|
||||||
|
} catch (err) {
|
||||||
|
console.error('An error ocurred while getting config:', err)
|
||||||
|
return Response.json(
|
||||||
|
{ message: 'An error ocurred while getting config' },
|
||||||
|
{ status: 500 },
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
@ -116,7 +116,7 @@ const Page = () => {
|
|||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const fetchConfig = async () => {
|
const fetchConfig = async () => {
|
||||||
setIsLoading(true);
|
setIsLoading(true);
|
||||||
const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/config`, {
|
const res = await fetch(`/api/config`, {
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
},
|
},
|
||||||
@ -208,7 +208,7 @@ const Page = () => {
|
|||||||
key.toLowerCase().includes('api') ||
|
key.toLowerCase().includes('api') ||
|
||||||
key.toLowerCase().includes('url')
|
key.toLowerCase().includes('url')
|
||||||
) {
|
) {
|
||||||
const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/config`, {
|
const res = await fetch(`/api/config`, {
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
},
|
},
|
||||||
|
Reference in New Issue
Block a user