mirror of
				https://github.com/ItzCrazyKns/Perplexica.git
				synced 2025-10-31 03:18:16 +00:00 
			
		
		
		
	feat(app): remove backend
This commit is contained in:
		
							
								
								
									
										81
									
								
								src/app/api/suggestions/route.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										81
									
								
								src/app/api/suggestions/route.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,81 @@ | ||||
| import generateSuggestions from '@/lib/chains/suggestionGeneratorAgent'; | ||||
| import { | ||||
|   getCustomOpenaiApiKey, | ||||
|   getCustomOpenaiApiUrl, | ||||
|   getCustomOpenaiModelName, | ||||
| } from '@/lib/config'; | ||||
| import { getAvailableChatModelProviders } from '@/lib/providers'; | ||||
| import { BaseChatModel } from '@langchain/core/language_models/chat_models'; | ||||
| import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages'; | ||||
| import { ChatOpenAI } from '@langchain/openai'; | ||||
|  | ||||
| interface ChatModel { | ||||
|   provider: string; | ||||
|   model: string; | ||||
| } | ||||
|  | ||||
| interface SuggestionsGenerationBody { | ||||
|   chatHistory: any[]; | ||||
|   chatModel?: ChatModel; | ||||
| } | ||||
|  | ||||
| export const POST = async (req: Request) => { | ||||
|   try { | ||||
|     const body: SuggestionsGenerationBody = await req.json(); | ||||
|  | ||||
|     const chatHistory = body.chatHistory | ||||
|       .map((msg: any) => { | ||||
|         if (msg.role === 'user') { | ||||
|           return new HumanMessage(msg.content); | ||||
|         } else if (msg.role === 'assistant') { | ||||
|           return new AIMessage(msg.content); | ||||
|         } | ||||
|       }) | ||||
|       .filter((msg) => msg !== undefined) as BaseMessage[]; | ||||
|  | ||||
|     const chatModelProviders = await getAvailableChatModelProviders(); | ||||
|  | ||||
|     const chatModelProvider = | ||||
|       chatModelProviders[ | ||||
|         body.chatModel?.provider || Object.keys(chatModelProviders)[0] | ||||
|       ]; | ||||
|     const chatModel = | ||||
|       chatModelProvider[ | ||||
|         body.chatModel?.model || Object.keys(chatModelProvider)[0] | ||||
|       ]; | ||||
|  | ||||
|     let llm: BaseChatModel | undefined; | ||||
|  | ||||
|     if (body.chatModel?.provider === 'custom_openai') { | ||||
|       llm = new ChatOpenAI({ | ||||
|         openAIApiKey: getCustomOpenaiApiKey(), | ||||
|         modelName: getCustomOpenaiModelName(), | ||||
|         temperature: 0.7, | ||||
|         configuration: { | ||||
|           baseURL: getCustomOpenaiApiUrl(), | ||||
|         }, | ||||
|       }) as unknown as BaseChatModel; | ||||
|     } else if (chatModelProvider && chatModel) { | ||||
|       llm = chatModel.model; | ||||
|     } | ||||
|  | ||||
|     if (!llm) { | ||||
|       return Response.json({ error: 'Invalid chat model' }, { status: 400 }); | ||||
|     } | ||||
|  | ||||
|     const suggestions = await generateSuggestions( | ||||
|       { | ||||
|         chat_history: chatHistory, | ||||
|       }, | ||||
|       llm, | ||||
|     ); | ||||
|  | ||||
|     return Response.json({ suggestions }, { status: 200 }); | ||||
|   } catch (err) { | ||||
|     console.error(`An error ocurred while generating suggestions: ${err}`); | ||||
|     return Response.json( | ||||
|       { message: 'An error ocurred while generating suggestions' }, | ||||
|       { status: 500 }, | ||||
|     ); | ||||
|   } | ||||
| }; | ||||
		Reference in New Issue
	
	Block a user