import handleVideoSearch from '@/lib/chains/videoSearchAgent'; import { getCustomOpenaiApiKey, getCustomOpenaiApiUrl, getCustomOpenaiModelName, } from '@/lib/config'; import { getAvailableChatModelProviders } from '@/lib/providers'; import { BaseChatModel } from '@langchain/core/language_models/chat_models'; import { AIMessage, HumanMessage } from '@langchain/core/messages'; import { ChatOpenAI } from '@langchain/openai'; interface ChatModel { provider: string; model: string; } interface VideoSearchBody { query: string; chatHistory: any[]; chatModel?: ChatModel; } export const POST = async (req: Request) => { try { const body: VideoSearchBody = await req.json(); const chatHistory = body.chatHistory .map((msg: any) => { if (msg.role === 'user') { return new HumanMessage(msg.content); } else if (msg.role === 'assistant') { return new AIMessage(msg.content); } }) .filter((msg) => msg !== undefined); const chatModelProviders = await getAvailableChatModelProviders(); const chatModelProvider = chatModelProviders[ body.chatModel?.provider || Object.keys(chatModelProviders)[0] ]; const chatModel = chatModelProvider[ body.chatModel?.model || Object.keys(chatModelProvider)[0] ]; let llm: BaseChatModel | undefined; if (body.chatModel?.provider === 'custom_openai') { llm = new ChatOpenAI({ openAIApiKey: getCustomOpenaiApiKey(), modelName: getCustomOpenaiModelName(), temperature: 0.7, configuration: { baseURL: getCustomOpenaiApiUrl(), }, }); } else if (chatModelProvider && chatModel) { llm = chatModel.model; } if (!llm) { return Response.json({ error: 'Invalid chat model' }, { status: 400 }); } const videos = await handleVideoSearch( { chat_history: chatHistory, query: body.query, }, llm, ); return Response.json({ videos }, { status: 200 }); } catch (err) { console.error(`An error ocurred while searching videos: ${err}`); return Response.json( { message: 'An error ocurred while searching videos' }, { status: 500 }, ); } };