diff --git a/src/app/api/suggestions/route.ts b/src/app/api/suggestions/route.ts index 2dc7248..0c70cba 100644 --- a/src/app/api/suggestions/route.ts +++ b/src/app/api/suggestions/route.ts @@ -19,19 +19,9 @@ export const POST = async (req: Request) => { body.chatModel.key, ); - const chatHistory = body.chatHistory - .map((msg: any) => { - if (msg.role === 'user') { - return new HumanMessage(msg.content); - } else if (msg.role === 'assistant') { - return new AIMessage(msg.content); - } - }) - .filter((msg) => msg !== undefined) as BaseMessage[]; - const suggestions = await generateSuggestions( { - chatHistory, + chatHistory: body.chatHistory, }, llm, ); diff --git a/src/lib/agents/suggestions/index.ts b/src/lib/agents/suggestions/index.ts index 03302ac..050eac7 100644 --- a/src/lib/agents/suggestions/index.ts +++ b/src/lib/agents/suggestions/index.ts @@ -1,32 +1,39 @@ -import ListLineOutputParser from '@/lib/outputParsers/listLineOutputParser'; -import { ChatPromptTemplate, PromptTemplate } from '@langchain/core/prompts'; import formatChatHistoryAsString from '@/lib/utils/formatHistory'; -import { BaseMessage, HumanMessage, SystemMessage } from '@langchain/core/messages'; -import { BaseChatModel } from '@langchain/core/language_models/chat_models'; import { suggestionGeneratorPrompt } from '@/lib/prompts/suggestions'; +import { ChatTurnMessage } from '@/lib/types'; +import z from 'zod'; +import BaseLLM from '@/lib/models/base/llm'; +import { i } from 'mathjs'; type SuggestionGeneratorInput = { - chatHistory: BaseMessage[]; + chatHistory: ChatTurnMessage[]; }; -const outputParser = new ListLineOutputParser({ - key: 'suggestions', +const schema = z.object({ + suggestions: z + .array(z.string()) + .describe('List of suggested questions or prompts'), }); const generateSuggestions = async ( input: SuggestionGeneratorInput, - llm: BaseChatModel, + llm: BaseLLM, ) => { - const chatPrompt = await ChatPromptTemplate.fromMessages([ - new SystemMessage(suggestionGeneratorPrompt), - new HumanMessage(`${formatChatHistoryAsString(input.chatHistory)}`) - ]).formatMessages({}) + const res = await llm.generateObject>({ + messages: [ + { + role: 'system', + content: suggestionGeneratorPrompt, + }, + { + role: 'user', + content: `\n${formatChatHistoryAsString(input.chatHistory)}\n`, + }, + ], + schema, + }); - const res = await llm.invoke(chatPrompt) - - const suggestions = await outputParser.invoke(res) - - return suggestions + return res.suggestions; }; export default generateSuggestions; diff --git a/src/lib/prompts/suggestions/index.ts b/src/lib/prompts/suggestions/index.ts index daa99d4..18922ba 100644 --- a/src/lib/prompts/suggestions/index.ts +++ b/src/lib/prompts/suggestions/index.ts @@ -3,13 +3,15 @@ You are an AI suggestion generator for an AI powered search engine. You will be You need to make sure the suggestions are relevant to the conversation and are helpful to the user. Keep a note that the user might use these suggestions to ask a chat model for more information. Make sure the suggestions are medium in length and are informative and relevant to the conversation. -Provide these suggestions separated by newlines between the XML tags and . For example: - - -Tell me more about SpaceX and their recent projects -What is the latest news on SpaceX? -Who is the CEO of SpaceX? - +Sample suggestions for a conversation about Elon Musk: +{ + "suggestions": [ + "What are Elon Musk's plans for SpaceX in the next decade?", + "How has Tesla's stock performance been influenced by Elon Musk's leadership?", + "What are the key innovations introduced by Elon Musk in the electric vehicle industry?", + "How does Elon Musk's vision for renewable energy impact global sustainability efforts?" + ] +} Today's date is ${new Date().toISOString()} -`; \ No newline at end of file +`;