mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-04-30 00:02:44 +00:00
Merge branch 'feat/deep-research' into master-deep-research
This commit is contained in:
@ -363,20 +363,18 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||||||
|
|
||||||
if (data.type === 'sources') {
|
if (data.type === 'sources') {
|
||||||
sources = data.data;
|
sources = data.data;
|
||||||
if (!added) {
|
setMessages((prevMessages) => [
|
||||||
setMessages((prevMessages) => [
|
...prevMessages,
|
||||||
...prevMessages,
|
{
|
||||||
{
|
content: '',
|
||||||
content: '',
|
messageId: data.messageId,
|
||||||
messageId: data.messageId,
|
chatId: chatId!,
|
||||||
chatId: chatId!,
|
role: 'assistant',
|
||||||
role: 'assistant',
|
sources: sources,
|
||||||
sources: sources,
|
createdAt: new Date(),
|
||||||
createdAt: new Date(),
|
},
|
||||||
},
|
]);
|
||||||
]);
|
added = true;
|
||||||
added = true;
|
|
||||||
}
|
|
||||||
setMessageAppeared(true);
|
setMessageAppeared(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -394,20 +392,20 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||||||
},
|
},
|
||||||
]);
|
]);
|
||||||
added = true;
|
added = true;
|
||||||
|
setMessageAppeared(true);
|
||||||
|
} else {
|
||||||
|
setMessages((prev) =>
|
||||||
|
prev.map((message) => {
|
||||||
|
if (message.messageId === data.messageId) {
|
||||||
|
return { ...message, content: message.content + data.data };
|
||||||
|
}
|
||||||
|
|
||||||
|
return message;
|
||||||
|
}),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
setMessages((prev) =>
|
|
||||||
prev.map((message) => {
|
|
||||||
if (message.messageId === data.messageId) {
|
|
||||||
return { ...message, content: message.content + data.data };
|
|
||||||
}
|
|
||||||
|
|
||||||
return message;
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
|
|
||||||
recievedMessage += data.data;
|
recievedMessage += data.data;
|
||||||
setMessageAppeared(true);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (data.type === 'messageEnd') {
|
if (data.type === 'messageEnd') {
|
||||||
|
@ -6,11 +6,6 @@ import {
|
|||||||
MessagesPlaceholder,
|
MessagesPlaceholder,
|
||||||
PromptTemplate,
|
PromptTemplate,
|
||||||
} from '@langchain/core/prompts';
|
} from '@langchain/core/prompts';
|
||||||
import {
|
|
||||||
RunnableLambda,
|
|
||||||
RunnableMap,
|
|
||||||
RunnableSequence,
|
|
||||||
} from '@langchain/core/runnables';
|
|
||||||
import { BaseMessage } from '@langchain/core/messages';
|
import { BaseMessage } from '@langchain/core/messages';
|
||||||
import { StringOutputParser } from '@langchain/core/output_parsers';
|
import { StringOutputParser } from '@langchain/core/output_parsers';
|
||||||
import LineListOutputParser from '../outputParsers/listLineOutputParser';
|
import LineListOutputParser from '../outputParsers/listLineOutputParser';
|
||||||
@ -24,6 +19,7 @@ import computeSimilarity from '../utils/computeSimilarity';
|
|||||||
import formatChatHistoryAsString from '../utils/formatHistory';
|
import formatChatHistoryAsString from '../utils/formatHistory';
|
||||||
import eventEmitter from 'events';
|
import eventEmitter from 'events';
|
||||||
import { StreamEvent } from '@langchain/core/tracers/log_stream';
|
import { StreamEvent } from '@langchain/core/tracers/log_stream';
|
||||||
|
import { EventEmitter } from 'node:stream';
|
||||||
|
|
||||||
export interface MetaSearchAgentType {
|
export interface MetaSearchAgentType {
|
||||||
searchAndAnswer: (
|
searchAndAnswer: (
|
||||||
@ -47,7 +43,7 @@ interface Config {
|
|||||||
activeEngines: string[];
|
activeEngines: string[];
|
||||||
}
|
}
|
||||||
|
|
||||||
type BasicChainInput = {
|
type SearchInput = {
|
||||||
chat_history: BaseMessage[];
|
chat_history: BaseMessage[];
|
||||||
query: string;
|
query: string;
|
||||||
};
|
};
|
||||||
@ -60,237 +56,242 @@ class MetaSearchAgent implements MetaSearchAgentType {
|
|||||||
this.config = config;
|
this.config = config;
|
||||||
}
|
}
|
||||||
|
|
||||||
private async createSearchRetrieverChain(llm: BaseChatModel) {
|
private async searchSources(
|
||||||
|
llm: BaseChatModel,
|
||||||
|
input: SearchInput,
|
||||||
|
emitter: EventEmitter,
|
||||||
|
) {
|
||||||
(llm as unknown as ChatOpenAI).temperature = 0;
|
(llm as unknown as ChatOpenAI).temperature = 0;
|
||||||
|
|
||||||
return RunnableSequence.from([
|
const chatPrompt = PromptTemplate.fromTemplate(
|
||||||
PromptTemplate.fromTemplate(this.config.queryGeneratorPrompt),
|
this.config.queryGeneratorPrompt,
|
||||||
llm,
|
);
|
||||||
this.strParser,
|
|
||||||
RunnableLambda.from(async (input: string) => {
|
|
||||||
const linksOutputParser = new LineListOutputParser({
|
|
||||||
key: 'links',
|
|
||||||
});
|
|
||||||
|
|
||||||
const questionOutputParser = new LineOutputParser({
|
const processedChatPrompt = await chatPrompt.invoke({
|
||||||
key: 'question',
|
chat_history: formatChatHistoryAsString(input.chat_history),
|
||||||
});
|
query: input.query,
|
||||||
|
});
|
||||||
|
|
||||||
const links = await linksOutputParser.parse(input);
|
const llmRes = await llm.invoke(processedChatPrompt);
|
||||||
let question = this.config.summarizer
|
const messageStr = await this.strParser.invoke(llmRes);
|
||||||
? await questionOutputParser.parse(input)
|
|
||||||
: input;
|
|
||||||
|
|
||||||
if (question === 'not_needed') {
|
const linksOutputParser = new LineListOutputParser({
|
||||||
return { query: '', docs: [] };
|
key: 'links',
|
||||||
|
});
|
||||||
|
|
||||||
|
const questionOutputParser = new LineOutputParser({
|
||||||
|
key: 'question',
|
||||||
|
});
|
||||||
|
|
||||||
|
const links = await linksOutputParser.parse(messageStr);
|
||||||
|
let question = this.config.summarizer
|
||||||
|
? await questionOutputParser.parse(messageStr)
|
||||||
|
: messageStr;
|
||||||
|
|
||||||
|
if (question === 'not_needed') {
|
||||||
|
return { query: '', docs: [] };
|
||||||
|
}
|
||||||
|
|
||||||
|
if (links.length > 0) {
|
||||||
|
if (question.length === 0) {
|
||||||
|
question = 'summarize';
|
||||||
|
}
|
||||||
|
|
||||||
|
let docs: Document[] = [];
|
||||||
|
|
||||||
|
const linkDocs = await getDocumentsFromLinks({ links });
|
||||||
|
|
||||||
|
const docGroups: Document[] = [];
|
||||||
|
|
||||||
|
linkDocs.map((doc) => {
|
||||||
|
const URLDocExists = docGroups.find(
|
||||||
|
(d) =>
|
||||||
|
d.metadata.url === doc.metadata.url && d.metadata.totalDocs < 10,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!URLDocExists) {
|
||||||
|
docGroups.push({
|
||||||
|
...doc,
|
||||||
|
metadata: {
|
||||||
|
...doc.metadata,
|
||||||
|
totalDocs: 1,
|
||||||
|
},
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
if (links.length > 0) {
|
const docIndex = docGroups.findIndex(
|
||||||
if (question.length === 0) {
|
(d) =>
|
||||||
question = 'summarize';
|
d.metadata.url === doc.metadata.url && d.metadata.totalDocs < 10,
|
||||||
}
|
);
|
||||||
|
|
||||||
let docs: Document[] = [];
|
if (docIndex !== -1) {
|
||||||
|
docGroups[docIndex].pageContent =
|
||||||
const linkDocs = await getDocumentsFromLinks({ links });
|
docGroups[docIndex].pageContent + `\n\n` + doc.pageContent;
|
||||||
|
docGroups[docIndex].metadata.totalDocs += 1;
|
||||||
const docGroups: Document[] = [];
|
|
||||||
|
|
||||||
linkDocs.map((doc) => {
|
|
||||||
const URLDocExists = docGroups.find(
|
|
||||||
(d) =>
|
|
||||||
d.metadata.url === doc.metadata.url &&
|
|
||||||
d.metadata.totalDocs < 10,
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!URLDocExists) {
|
|
||||||
docGroups.push({
|
|
||||||
...doc,
|
|
||||||
metadata: {
|
|
||||||
...doc.metadata,
|
|
||||||
totalDocs: 1,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const docIndex = docGroups.findIndex(
|
|
||||||
(d) =>
|
|
||||||
d.metadata.url === doc.metadata.url &&
|
|
||||||
d.metadata.totalDocs < 10,
|
|
||||||
);
|
|
||||||
|
|
||||||
if (docIndex !== -1) {
|
|
||||||
docGroups[docIndex].pageContent =
|
|
||||||
docGroups[docIndex].pageContent + `\n\n` + doc.pageContent;
|
|
||||||
docGroups[docIndex].metadata.totalDocs += 1;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
await Promise.all(
|
|
||||||
docGroups.map(async (doc) => {
|
|
||||||
const res = await llm.invoke(`
|
|
||||||
You are a web search summarizer, tasked with summarizing a piece of text retrieved from a web search. Your job is to summarize the
|
|
||||||
text into a detailed, 2-4 paragraph explanation that captures the main ideas and provides a comprehensive answer to the query.
|
|
||||||
If the query is \"summarize\", you should provide a detailed summary of the text. If the query is a specific question, you should answer it in the summary.
|
|
||||||
|
|
||||||
- **Journalistic tone**: The summary should sound professional and journalistic, not too casual or vague.
|
|
||||||
- **Thorough and detailed**: Ensure that every key point from the text is captured and that the summary directly answers the query.
|
|
||||||
- **Not too lengthy, but detailed**: The summary should be informative but not excessively long. Focus on providing detailed information in a concise format.
|
|
||||||
|
|
||||||
The text will be shared inside the \`text\` XML tag, and the query inside the \`query\` XML tag.
|
|
||||||
|
|
||||||
<example>
|
|
||||||
1. \`<text>
|
|
||||||
Docker is a set of platform-as-a-service products that use OS-level virtualization to deliver software in packages called containers.
|
|
||||||
It was first released in 2013 and is developed by Docker, Inc. Docker is designed to make it easier to create, deploy, and run applications
|
|
||||||
by using containers.
|
|
||||||
</text>
|
|
||||||
|
|
||||||
<query>
|
|
||||||
What is Docker and how does it work?
|
|
||||||
</query>
|
|
||||||
|
|
||||||
Response:
|
|
||||||
Docker is a revolutionary platform-as-a-service product developed by Docker, Inc., that uses container technology to make application
|
|
||||||
deployment more efficient. It allows developers to package their software with all necessary dependencies, making it easier to run in
|
|
||||||
any environment. Released in 2013, Docker has transformed the way applications are built, deployed, and managed.
|
|
||||||
\`
|
|
||||||
2. \`<text>
|
|
||||||
The theory of relativity, or simply relativity, encompasses two interrelated theories of Albert Einstein: special relativity and general
|
|
||||||
relativity. However, the word "relativity" is sometimes used in reference to Galilean invariance. The term "theory of relativity" was based
|
|
||||||
on the expression "relative theory" used by Max Planck in 1906. The theory of relativity usually encompasses two interrelated theories by
|
|
||||||
Albert Einstein: special relativity and general relativity. Special relativity applies to all physical phenomena in the absence of gravity.
|
|
||||||
General relativity explains the law of gravitation and its relation to other forces of nature. It applies to the cosmological and astrophysical
|
|
||||||
realm, including astronomy.
|
|
||||||
</text>
|
|
||||||
|
|
||||||
<query>
|
|
||||||
summarize
|
|
||||||
</query>
|
|
||||||
|
|
||||||
Response:
|
|
||||||
The theory of relativity, developed by Albert Einstein, encompasses two main theories: special relativity and general relativity. Special
|
|
||||||
relativity applies to all physical phenomena in the absence of gravity, while general relativity explains the law of gravitation and its
|
|
||||||
relation to other forces of nature. The theory of relativity is based on the concept of "relative theory," as introduced by Max Planck in
|
|
||||||
1906. It is a fundamental theory in physics that has revolutionized our understanding of the universe.
|
|
||||||
\`
|
|
||||||
</example>
|
|
||||||
|
|
||||||
Everything below is the actual data you will be working with. Good luck!
|
|
||||||
|
|
||||||
<query>
|
|
||||||
${question}
|
|
||||||
</query>
|
|
||||||
|
|
||||||
<text>
|
|
||||||
${doc.pageContent}
|
|
||||||
</text>
|
|
||||||
|
|
||||||
Make sure to answer the query in the summary.
|
|
||||||
`);
|
|
||||||
|
|
||||||
const document = new Document({
|
|
||||||
pageContent: res.content as string,
|
|
||||||
metadata: {
|
|
||||||
title: doc.metadata.title,
|
|
||||||
url: doc.metadata.url,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
docs.push(document);
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
|
|
||||||
return { query: question, docs: docs };
|
|
||||||
} else {
|
|
||||||
question = question.replace(/<think>.*?<\/think>/g, '');
|
|
||||||
|
|
||||||
const res = await searchSearxng(question, {
|
|
||||||
language: 'en',
|
|
||||||
engines: this.config.activeEngines,
|
|
||||||
});
|
|
||||||
|
|
||||||
const documents = res.results.map(
|
|
||||||
(result) =>
|
|
||||||
new Document({
|
|
||||||
pageContent:
|
|
||||||
result.content ||
|
|
||||||
(this.config.activeEngines.includes('youtube')
|
|
||||||
? result.title
|
|
||||||
: '') /* Todo: Implement transcript grabbing using Youtubei (source: https://www.npmjs.com/package/youtubei) */,
|
|
||||||
metadata: {
|
|
||||||
title: result.title,
|
|
||||||
url: result.url,
|
|
||||||
...(result.img_src && { img_src: result.img_src }),
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
|
|
||||||
return { query: question, docs: documents };
|
|
||||||
}
|
}
|
||||||
}),
|
});
|
||||||
]);
|
|
||||||
|
await Promise.all(
|
||||||
|
docGroups.map(async (doc) => {
|
||||||
|
const res = await llm.invoke(`
|
||||||
|
You are a web search summarizer, tasked with summarizing a piece of text retrieved from a web search. Your job is to summarize the
|
||||||
|
text into a detailed, 2-4 paragraph explanation that captures the main ideas and provides a comprehensive answer to the query.
|
||||||
|
If the query is \"summarize\", you should provide a detailed summary of the text. If the query is a specific question, you should answer it in the summary.
|
||||||
|
|
||||||
|
- **Journalistic tone**: The summary should sound professional and journalistic, not too casual or vague.
|
||||||
|
- **Thorough and detailed**: Ensure that every key point from the text is captured and that the summary directly answers the query.
|
||||||
|
- **Not too lengthy, but detailed**: The summary should be informative but not excessively long. Focus on providing detailed information in a concise format.
|
||||||
|
|
||||||
|
The text will be shared inside the \`text\` XML tag, and the query inside the \`query\` XML tag.
|
||||||
|
|
||||||
|
<example>
|
||||||
|
1. \`<text>
|
||||||
|
Docker is a set of platform-as-a-service products that use OS-level virtualization to deliver software in packages called containers.
|
||||||
|
It was first released in 2013 and is developed by Docker, Inc. Docker is designed to make it easier to create, deploy, and run applications
|
||||||
|
by using containers.
|
||||||
|
</text>
|
||||||
|
|
||||||
|
<query>
|
||||||
|
What is Docker and how does it work?
|
||||||
|
</query>
|
||||||
|
|
||||||
|
Response:
|
||||||
|
Docker is a revolutionary platform-as-a-service product developed by Docker, Inc., that uses container technology to make application
|
||||||
|
deployment more efficient. It allows developers to package their software with all necessary dependencies, making it easier to run in
|
||||||
|
any environment. Released in 2013, Docker has transformed the way applications are built, deployed, and managed.
|
||||||
|
\`
|
||||||
|
2. \`<text>
|
||||||
|
The theory of relativity, or simply relativity, encompasses two interrelated theories of Albert Einstein: special relativity and general
|
||||||
|
relativity. However, the word "relativity" is sometimes used in reference to Galilean invariance. The term "theory of relativity" was based
|
||||||
|
on the expression "relative theory" used by Max Planck in 1906. The theory of relativity usually encompasses two interrelated theories by
|
||||||
|
Albert Einstein: special relativity and general relativity. Special relativity applies to all physical phenomena in the absence of gravity.
|
||||||
|
General relativity explains the law of gravitation and its relation to other forces of nature. It applies to the cosmological and astrophysical
|
||||||
|
realm, including astronomy.
|
||||||
|
</text>
|
||||||
|
|
||||||
|
<query>
|
||||||
|
summarize
|
||||||
|
</query>
|
||||||
|
|
||||||
|
Response:
|
||||||
|
The theory of relativity, developed by Albert Einstein, encompasses two main theories: special relativity and general relativity. Special
|
||||||
|
relativity applies to all physical phenomena in the absence of gravity, while general relativity explains the law of gravitation and its
|
||||||
|
relation to other forces of nature. The theory of relativity is based on the concept of "relative theory," as introduced by Max Planck in
|
||||||
|
1906. It is a fundamental theory in physics that has revolutionized our understanding of the universe.
|
||||||
|
\`
|
||||||
|
</example>
|
||||||
|
|
||||||
|
Everything below is the actual data you will be working with. Good luck!
|
||||||
|
|
||||||
|
<query>
|
||||||
|
${question}
|
||||||
|
</query>
|
||||||
|
|
||||||
|
<text>
|
||||||
|
${doc.pageContent}
|
||||||
|
</text>
|
||||||
|
|
||||||
|
Make sure to answer the query in the summary.
|
||||||
|
`);
|
||||||
|
|
||||||
|
const document = new Document({
|
||||||
|
pageContent: res.content as string,
|
||||||
|
metadata: {
|
||||||
|
title: doc.metadata.title,
|
||||||
|
url: doc.metadata.url,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
docs.push(document);
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
return { query: question, docs: docs };
|
||||||
|
} else {
|
||||||
|
question = question.replace(/<think>.*?<\/think>/g, '');
|
||||||
|
|
||||||
|
const res = await searchSearxng(question, {
|
||||||
|
language: 'en',
|
||||||
|
engines: this.config.activeEngines,
|
||||||
|
});
|
||||||
|
|
||||||
|
const documents = res.results.map(
|
||||||
|
(result) =>
|
||||||
|
new Document({
|
||||||
|
pageContent:
|
||||||
|
result.content ||
|
||||||
|
(this.config.activeEngines.includes('youtube')
|
||||||
|
? result.title
|
||||||
|
: '') /* Todo: Implement transcript grabbing using Youtubei (source: https://www.npmjs.com/package/youtubei) */,
|
||||||
|
metadata: {
|
||||||
|
title: result.title,
|
||||||
|
url: result.url,
|
||||||
|
...(result.img_src && { img_src: result.img_src }),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
return { query: question, docs: documents };
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private async createAnsweringChain(
|
private async streamAnswer(
|
||||||
llm: BaseChatModel,
|
llm: BaseChatModel,
|
||||||
fileIds: string[],
|
fileIds: string[],
|
||||||
embeddings: Embeddings,
|
embeddings: Embeddings,
|
||||||
optimizationMode: 'speed' | 'balanced' | 'quality',
|
optimizationMode: 'speed' | 'balanced' | 'quality',
|
||||||
systemInstructions: string,
|
systemInstructions: string,
|
||||||
|
input: SearchInput,
|
||||||
|
emitter: EventEmitter,
|
||||||
) {
|
) {
|
||||||
return RunnableSequence.from([
|
const chatPrompt = ChatPromptTemplate.fromMessages([
|
||||||
RunnableMap.from({
|
['system', this.config.responsePrompt],
|
||||||
systemInstructions: () => systemInstructions,
|
new MessagesPlaceholder('chat_history'),
|
||||||
query: (input: BasicChainInput) => input.query,
|
['user', '{query}'],
|
||||||
chat_history: (input: BasicChainInput) => input.chat_history,
|
]);
|
||||||
date: () => new Date().toISOString(),
|
|
||||||
context: RunnableLambda.from(async (input: BasicChainInput) => {
|
|
||||||
const processedHistory = formatChatHistoryAsString(
|
|
||||||
input.chat_history,
|
|
||||||
);
|
|
||||||
|
|
||||||
let docs: Document[] | null = null;
|
let docs: Document[] | null = null;
|
||||||
let query = input.query;
|
let query = input.query;
|
||||||
|
|
||||||
if (this.config.searchWeb) {
|
if (this.config.searchWeb) {
|
||||||
const searchRetrieverChain =
|
const searchResults = await this.searchSources(llm, input, emitter);
|
||||||
await this.createSearchRetrieverChain(llm);
|
|
||||||
|
|
||||||
const searchRetrieverResult = await searchRetrieverChain.invoke({
|
query = searchResults.query;
|
||||||
chat_history: processedHistory,
|
docs = searchResults.docs;
|
||||||
query,
|
}
|
||||||
});
|
|
||||||
|
|
||||||
query = searchRetrieverResult.query;
|
const sortedDocs = await this.rerankDocs(
|
||||||
docs = searchRetrieverResult.docs;
|
query,
|
||||||
}
|
docs ?? [],
|
||||||
|
fileIds,
|
||||||
|
embeddings,
|
||||||
|
optimizationMode,
|
||||||
|
);
|
||||||
|
|
||||||
const sortedDocs = await this.rerankDocs(
|
emitter.emit('data', JSON.stringify({ type: 'sources', data: sortedDocs }));
|
||||||
query,
|
|
||||||
docs ?? [],
|
|
||||||
fileIds,
|
|
||||||
embeddings,
|
|
||||||
optimizationMode,
|
|
||||||
);
|
|
||||||
|
|
||||||
return sortedDocs;
|
const context = this.processDocs(sortedDocs);
|
||||||
})
|
|
||||||
.withConfig({
|
const formattedChatPrompt = await chatPrompt.invoke({
|
||||||
runName: 'FinalSourceRetriever',
|
query: input.query,
|
||||||
})
|
chat_history: input.chat_history,
|
||||||
.pipe(this.processDocs),
|
date: new Date().toISOString(),
|
||||||
}),
|
context: context,
|
||||||
ChatPromptTemplate.fromMessages([
|
systemInstructions: systemInstructions,
|
||||||
['system', this.config.responsePrompt],
|
|
||||||
new MessagesPlaceholder('chat_history'),
|
|
||||||
['user', '{query}'],
|
|
||||||
]),
|
|
||||||
llm,
|
|
||||||
this.strParser,
|
|
||||||
]).withConfig({
|
|
||||||
runName: 'FinalResponseGenerator',
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const llmRes = await llm.stream(formattedChatPrompt);
|
||||||
|
|
||||||
|
for await (const data of llmRes) {
|
||||||
|
const messageStr = await this.strParser.invoke(data);
|
||||||
|
|
||||||
|
emitter.emit(
|
||||||
|
'data',
|
||||||
|
JSON.stringify({ type: 'response', data: messageStr }),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
emitter.emit('end');
|
||||||
}
|
}
|
||||||
|
|
||||||
private async rerankDocs(
|
private async rerankDocs(
|
||||||
@ -431,39 +432,6 @@ class MetaSearchAgent implements MetaSearchAgentType {
|
|||||||
.join('\n');
|
.join('\n');
|
||||||
}
|
}
|
||||||
|
|
||||||
private async handleStream(
|
|
||||||
stream: AsyncGenerator<StreamEvent, any, any>,
|
|
||||||
emitter: eventEmitter,
|
|
||||||
) {
|
|
||||||
for await (const event of stream) {
|
|
||||||
if (
|
|
||||||
event.event === 'on_chain_end' &&
|
|
||||||
event.name === 'FinalSourceRetriever'
|
|
||||||
) {
|
|
||||||
``;
|
|
||||||
emitter.emit(
|
|
||||||
'data',
|
|
||||||
JSON.stringify({ type: 'sources', data: event.data.output }),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if (
|
|
||||||
event.event === 'on_chain_stream' &&
|
|
||||||
event.name === 'FinalResponseGenerator'
|
|
||||||
) {
|
|
||||||
emitter.emit(
|
|
||||||
'data',
|
|
||||||
JSON.stringify({ type: 'response', data: event.data.chunk }),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if (
|
|
||||||
event.event === 'on_chain_end' &&
|
|
||||||
event.name === 'FinalResponseGenerator'
|
|
||||||
) {
|
|
||||||
emitter.emit('end');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async searchAndAnswer(
|
async searchAndAnswer(
|
||||||
message: string,
|
message: string,
|
||||||
history: BaseMessage[],
|
history: BaseMessage[],
|
||||||
@ -475,26 +443,19 @@ class MetaSearchAgent implements MetaSearchAgentType {
|
|||||||
) {
|
) {
|
||||||
const emitter = new eventEmitter();
|
const emitter = new eventEmitter();
|
||||||
|
|
||||||
const answeringChain = await this.createAnsweringChain(
|
this.streamAnswer(
|
||||||
llm,
|
llm,
|
||||||
fileIds,
|
fileIds,
|
||||||
embeddings,
|
embeddings,
|
||||||
optimizationMode,
|
optimizationMode,
|
||||||
systemInstructions,
|
systemInstructions,
|
||||||
);
|
|
||||||
|
|
||||||
const stream = answeringChain.streamEvents(
|
|
||||||
{
|
{
|
||||||
chat_history: history,
|
chat_history: history,
|
||||||
query: message,
|
query: message,
|
||||||
},
|
},
|
||||||
{
|
emitter,
|
||||||
version: 'v1',
|
|
||||||
},
|
|
||||||
);
|
);
|
||||||
|
|
||||||
this.handleStream(stream, emitter);
|
|
||||||
|
|
||||||
return emitter;
|
return emitter;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user