Files
Perplexica/src/lib/agents/search/index.ts
2025-12-08 13:07:52 +05:30

104 lines
2.9 KiB
TypeScript

import { ResearcherOutput, SearchAgentInput } from './types';
import SessionManager from '@/lib/session';
import { classify } from './classifier';
import Researcher from './researcher';
import { getWriterPrompt } from '@/lib/prompts/search/writer';
import { WidgetExecutor } from './widgets';
class SearchAgent {
async searchAsync(session: SessionManager, input: SearchAgentInput) {
const classification = await classify({
chatHistory: input.chatHistory,
enabledSources: input.config.sources,
query: input.followUp,
llm: input.config.llm,
});
const widgetPromise = WidgetExecutor.executeAll({
classification,
chatHistory: input.chatHistory,
followUp: input.followUp,
llm: input.config.llm,
}).then((widgetOutputs) => {
widgetOutputs.forEach((o) => {
session.emitBlock({
id: crypto.randomUUID(),
type: 'widget',
data: {
widgetType: o.type,
params: o.data,
},
});
});
return widgetOutputs;
});
let searchPromise: Promise<ResearcherOutput> | null = null;
if (!classification.classification.skipSearch) {
const researcher = new Researcher();
searchPromise = researcher.research(session, {
chatHistory: input.chatHistory,
followUp: input.followUp,
classification: classification,
config: input.config,
});
}
const [widgetOutputs, searchResults] = await Promise.all([
widgetPromise,
searchPromise,
]);
session.emit('data', {
type: 'researchComplete',
});
const finalContext =
searchResults?.searchFindings
.map(
(f, index) =>
`<result index=${index} title=${f.metadata.title}>${f.content}</result>`,
)
.join('\n') || '';
const widgetContext = widgetOutputs
.map((o) => {
return `<result>${o.llmContext}</result>`;
})
.join('\n-------------\n');
const finalContextWithWidgets = `<search_results note="These are the search results and assistant can cite these">\n${finalContext}\n</search_results>\n<widgets_result noteForAssistant="Its output is already showed to the user, assistant can use this information to answer the query but do not CITE this as a souce">\n${widgetContext}\n</widgets_result>`;
const writerPrompt = getWriterPrompt(finalContextWithWidgets);
const answerStream = input.config.llm.streamText({
messages: [
{
role: 'system',
content: writerPrompt,
},
...input.chatHistory,
{
role: 'user',
content: input.followUp,
},
],
});
let accumulatedText = '';
for await (const chunk of answerStream) {
accumulatedText += chunk.contentChunk;
session.emit('data', {
type: 'response',
data: chunk.contentChunk,
});
}
session.emit('end', {});
}
}
export default SearchAgent;