mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-09-21 00:31:33 +00:00
Compare commits
16 Commits
feat/syste
...
114a7aa09d
Author | SHA1 | Date | |
---|---|---|---|
|
114a7aa09d | ||
|
d0ba8c9038 | ||
|
934fb0a23b | ||
|
e226645bc7 | ||
|
5447530ece | ||
|
ed6d46a440 | ||
|
588e68e93e | ||
|
c4440327db | ||
|
64e2d457cc | ||
|
bf705afc21 | ||
|
2e4433a6b3 | ||
|
8ecf3b4e99 | ||
|
09661ae11d | ||
|
a8d410bc2f | ||
|
b5ee8386e7 | ||
|
0fcd598ff7 |
@@ -33,6 +33,7 @@ The API accepts a JSON object in the request body, where you define the focus mo
|
|||||||
["human", "Hi, how are you?"],
|
["human", "Hi, how are you?"],
|
||||||
["assistant", "I am doing well, how can I help you today?"]
|
["assistant", "I am doing well, how can I help you today?"]
|
||||||
],
|
],
|
||||||
|
"systemInstructions": "Focus on providing technical details about Perplexica's architecture.",
|
||||||
"stream": false
|
"stream": false
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
@@ -63,6 +64,8 @@ The API accepts a JSON object in the request body, where you define the focus mo
|
|||||||
|
|
||||||
- **`query`** (string, required): The search query or question.
|
- **`query`** (string, required): The search query or question.
|
||||||
|
|
||||||
|
- **`systemInstructions`** (string, optional): Custom instructions provided by the user to guide the AI's response. These instructions are treated as user preferences and have lower priority than the system's core instructions. For example, you can specify a particular writing style, format, or focus area.
|
||||||
|
|
||||||
- **`history`** (array, optional): An array of message pairs representing the conversation history. Each pair consists of a role (either 'human' or 'assistant') and the message content. This allows the system to use the context of the conversation to refine results. Example:
|
- **`history`** (array, optional): An array of message pairs representing the conversation history. Each pair consists of a role (either 'human' or 'assistant') and the message content. This allows the system to use the context of the conversation to refine results. Example:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
|
@@ -22,5 +22,8 @@ MODEL_NAME = ""
|
|||||||
[MODELS.OLLAMA]
|
[MODELS.OLLAMA]
|
||||||
API_URL = "" # Ollama API URL - http://host.docker.internal:11434
|
API_URL = "" # Ollama API URL - http://host.docker.internal:11434
|
||||||
|
|
||||||
|
[MODELS.DEEPSEEK]
|
||||||
|
API_KEY = ""
|
||||||
|
|
||||||
[API_ENDPOINTS]
|
[API_ENDPOINTS]
|
||||||
SEARXNG = "" # SearxNG API URL - http://localhost:32768
|
SEARXNG = "" # SearxNG API URL - http://localhost:32768
|
@@ -7,6 +7,7 @@ import {
|
|||||||
getGroqApiKey,
|
getGroqApiKey,
|
||||||
getOllamaApiEndpoint,
|
getOllamaApiEndpoint,
|
||||||
getOpenaiApiKey,
|
getOpenaiApiKey,
|
||||||
|
getDeepseekApiKey,
|
||||||
updateConfig,
|
updateConfig,
|
||||||
} from '@/lib/config';
|
} from '@/lib/config';
|
||||||
import {
|
import {
|
||||||
@@ -53,6 +54,7 @@ export const GET = async (req: Request) => {
|
|||||||
config['anthropicApiKey'] = getAnthropicApiKey();
|
config['anthropicApiKey'] = getAnthropicApiKey();
|
||||||
config['groqApiKey'] = getGroqApiKey();
|
config['groqApiKey'] = getGroqApiKey();
|
||||||
config['geminiApiKey'] = getGeminiApiKey();
|
config['geminiApiKey'] = getGeminiApiKey();
|
||||||
|
config['deepseekApiKey'] = getDeepseekApiKey();
|
||||||
config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl();
|
config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl();
|
||||||
config['customOpenaiApiKey'] = getCustomOpenaiApiKey();
|
config['customOpenaiApiKey'] = getCustomOpenaiApiKey();
|
||||||
config['customOpenaiModelName'] = getCustomOpenaiModelName();
|
config['customOpenaiModelName'] = getCustomOpenaiModelName();
|
||||||
@@ -88,6 +90,9 @@ export const POST = async (req: Request) => {
|
|||||||
OLLAMA: {
|
OLLAMA: {
|
||||||
API_URL: config.ollamaApiUrl,
|
API_URL: config.ollamaApiUrl,
|
||||||
},
|
},
|
||||||
|
DEEPSEEK: {
|
||||||
|
API_KEY: config.deepseekApiKey,
|
||||||
|
},
|
||||||
CUSTOM_OPENAI: {
|
CUSTOM_OPENAI: {
|
||||||
API_URL: config.customOpenaiApiUrl,
|
API_URL: config.customOpenaiApiUrl,
|
||||||
API_KEY: config.customOpenaiApiKey,
|
API_KEY: config.customOpenaiApiKey,
|
||||||
|
@@ -34,6 +34,7 @@ interface ChatRequestBody {
|
|||||||
query: string;
|
query: string;
|
||||||
history: Array<[string, string]>;
|
history: Array<[string, string]>;
|
||||||
stream?: boolean;
|
stream?: boolean;
|
||||||
|
systemInstructions?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const POST = async (req: Request) => {
|
export const POST = async (req: Request) => {
|
||||||
@@ -125,7 +126,7 @@ export const POST = async (req: Request) => {
|
|||||||
embeddings,
|
embeddings,
|
||||||
body.optimizationMode,
|
body.optimizationMode,
|
||||||
[],
|
[],
|
||||||
"",
|
body.systemInstructions || '',
|
||||||
);
|
);
|
||||||
|
|
||||||
if (!body.stream) {
|
if (!body.stream) {
|
||||||
|
@@ -20,6 +20,7 @@ interface SettingsType {
|
|||||||
anthropicApiKey: string;
|
anthropicApiKey: string;
|
||||||
geminiApiKey: string;
|
geminiApiKey: string;
|
||||||
ollamaApiUrl: string;
|
ollamaApiUrl: string;
|
||||||
|
deepseekApiKey: string;
|
||||||
customOpenaiApiKey: string;
|
customOpenaiApiKey: string;
|
||||||
customOpenaiApiUrl: string;
|
customOpenaiApiUrl: string;
|
||||||
customOpenaiModelName: string;
|
customOpenaiModelName: string;
|
||||||
@@ -838,6 +839,25 @@ const Page = () => {
|
|||||||
onSave={(value) => saveConfig('geminiApiKey', value)}
|
onSave={(value) => saveConfig('geminiApiKey', value)}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<div className="flex flex-col space-y-1">
|
||||||
|
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||||
|
Deepseek API Key
|
||||||
|
</p>
|
||||||
|
<Input
|
||||||
|
type="text"
|
||||||
|
placeholder="Deepseek API Key"
|
||||||
|
value={config.deepseekApiKey}
|
||||||
|
isSaving={savingStates['deepseekApiKey']}
|
||||||
|
onChange={(e) => {
|
||||||
|
setConfig((prev) => ({
|
||||||
|
...prev!,
|
||||||
|
deepseekApiKey: e.target.value,
|
||||||
|
}));
|
||||||
|
}}
|
||||||
|
onSave={(value) => saveConfig('deepseekApiKey', value)}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</SettingsSection>
|
</SettingsSection>
|
||||||
</div>
|
</div>
|
||||||
|
@@ -363,7 +363,6 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||||||
|
|
||||||
if (data.type === 'sources') {
|
if (data.type === 'sources') {
|
||||||
sources = data.data;
|
sources = data.data;
|
||||||
if (!added) {
|
|
||||||
setMessages((prevMessages) => [
|
setMessages((prevMessages) => [
|
||||||
...prevMessages,
|
...prevMessages,
|
||||||
{
|
{
|
||||||
@@ -376,7 +375,6 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||||||
},
|
},
|
||||||
]);
|
]);
|
||||||
added = true;
|
added = true;
|
||||||
}
|
|
||||||
setMessageAppeared(true);
|
setMessageAppeared(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -394,8 +392,8 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||||||
},
|
},
|
||||||
]);
|
]);
|
||||||
added = true;
|
added = true;
|
||||||
}
|
setMessageAppeared(true);
|
||||||
|
} else {
|
||||||
setMessages((prev) =>
|
setMessages((prev) =>
|
||||||
prev.map((message) => {
|
prev.map((message) => {
|
||||||
if (message.messageId === data.messageId) {
|
if (message.messageId === data.messageId) {
|
||||||
@@ -405,9 +403,9 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
|||||||
return message;
|
return message;
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
|
}
|
||||||
|
|
||||||
recievedMessage += data.data;
|
recievedMessage += data.data;
|
||||||
setMessageAppeared(true);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (data.type === 'messageEnd') {
|
if (data.type === 'messageEnd') {
|
||||||
|
@@ -48,6 +48,7 @@ const MessageBox = ({
|
|||||||
const [speechMessage, setSpeechMessage] = useState(message.content);
|
const [speechMessage, setSpeechMessage] = useState(message.content);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
|
const citationRegex = /\[([^\]]+)\]/g;
|
||||||
const regex = /\[(\d+)\]/g;
|
const regex = /\[(\d+)\]/g;
|
||||||
let processedMessage = message.content;
|
let processedMessage = message.content;
|
||||||
|
|
||||||
@@ -67,11 +68,33 @@ const MessageBox = ({
|
|||||||
) {
|
) {
|
||||||
setParsedMessage(
|
setParsedMessage(
|
||||||
processedMessage.replace(
|
processedMessage.replace(
|
||||||
regex,
|
citationRegex,
|
||||||
(_, number) =>
|
(_, capturedContent: string) => {
|
||||||
`<a href="${
|
const numbers = capturedContent
|
||||||
message.sources?.[number - 1]?.metadata?.url
|
.split(',')
|
||||||
}" target="_blank" className="bg-light-secondary dark:bg-dark-secondary px-1 rounded ml-1 no-underline text-xs text-black/70 dark:text-white/70 relative">${number}</a>`,
|
.map((numStr) => numStr.trim());
|
||||||
|
|
||||||
|
const linksHtml = numbers
|
||||||
|
.map((numStr) => {
|
||||||
|
const number = parseInt(numStr);
|
||||||
|
|
||||||
|
if (isNaN(number) || number <= 0) {
|
||||||
|
return `[${numStr}]`;
|
||||||
|
}
|
||||||
|
|
||||||
|
const source = message.sources?.[number - 1];
|
||||||
|
const url = source?.metadata?.url;
|
||||||
|
|
||||||
|
if (url) {
|
||||||
|
return `<a href="${url}" target="_blank" className="bg-light-secondary dark:bg-dark-secondary px-1 rounded ml-1 no-underline text-xs text-black/70 dark:text-white/70 relative">${numStr}</a>`;
|
||||||
|
} else {
|
||||||
|
return `[${numStr}]`;
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.join('');
|
||||||
|
|
||||||
|
return linksHtml;
|
||||||
|
},
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
return;
|
return;
|
||||||
|
@@ -25,6 +25,9 @@ interface Config {
|
|||||||
OLLAMA: {
|
OLLAMA: {
|
||||||
API_URL: string;
|
API_URL: string;
|
||||||
};
|
};
|
||||||
|
DEEPSEEK: {
|
||||||
|
API_KEY: string;
|
||||||
|
};
|
||||||
CUSTOM_OPENAI: {
|
CUSTOM_OPENAI: {
|
||||||
API_URL: string;
|
API_URL: string;
|
||||||
API_KEY: string;
|
API_KEY: string;
|
||||||
@@ -63,6 +66,8 @@ export const getSearxngApiEndpoint = () =>
|
|||||||
|
|
||||||
export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL;
|
export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL;
|
||||||
|
|
||||||
|
export const getDeepseekApiKey = () => loadConfig().MODELS.DEEPSEEK.API_KEY;
|
||||||
|
|
||||||
export const getCustomOpenaiApiKey = () =>
|
export const getCustomOpenaiApiKey = () =>
|
||||||
loadConfig().MODELS.CUSTOM_OPENAI.API_KEY;
|
loadConfig().MODELS.CUSTOM_OPENAI.API_KEY;
|
||||||
|
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
export const webSearchRetrieverPrompt = `
|
export const webSearchRetrieverPrompt = `
|
||||||
You are an AI question rephraser. You will be given a conversation and a follow-up question, you will have to rephrase the follow up question so it is a standalone question and can be used by another LLM to search the web for information to answer it.
|
You are an AI question rephraser. You will be given a conversation and a follow-up question, you will have to rephrase the follow up question so it is a standalone question and can be used by another LLM to search the web for information to answer it.
|
||||||
If it is a smple writing task or a greeting (unless the greeting contains a question after it) like Hi, Hello, How are you, etc. than a question then you need to return \`not_needed\` as the response (This is because the LLM won't need to search the web for finding information on this topic).
|
If it is a simple writing task or a greeting (unless the greeting contains a question after it) like Hi, Hello, How are you, etc. than a question then you need to return \`not_needed\` as the response (This is because the LLM won't need to search the web for finding information on this topic).
|
||||||
If the user asks some question from some URL or wants you to summarize a PDF or a webpage (via URL) you need to return the links inside the \`links\` XML block and the question inside the \`question\` XML block. If the user wants to you to summarize the webpage or the PDF you need to return \`summarize\` inside the \`question\` XML block in place of a question and the link to summarize in the \`links\` XML block.
|
If the user asks some question from some URL or wants you to summarize a PDF or a webpage (via URL) you need to return the links inside the \`links\` XML block and the question inside the \`question\` XML block. If the user wants to you to summarize the webpage or the PDF you need to return \`summarize\` inside the \`question\` XML block in place of a question and the link to summarize in the \`links\` XML block.
|
||||||
You must always return the rephrased question inside the \`question\` XML block, if there are no links in the follow-up question then don't insert a \`links\` XML block in your response.
|
You must always return the rephrased question inside the \`question\` XML block, if there are no links in the follow-up question then don't insert a \`links\` XML block in your response.
|
||||||
|
|
||||||
|
44
src/lib/providers/deepseek.ts
Normal file
44
src/lib/providers/deepseek.ts
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
import { ChatOpenAI } from '@langchain/openai';
|
||||||
|
import { getDeepseekApiKey } from '../config';
|
||||||
|
import { ChatModel } from '.';
|
||||||
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
|
|
||||||
|
const deepseekChatModels: Record<string, string>[] = [
|
||||||
|
{
|
||||||
|
displayName: 'Deepseek Chat (Deepseek V3)',
|
||||||
|
key: 'deepseek-chat',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
displayName: 'Deepseek Reasoner (Deepseek R1)',
|
||||||
|
key: 'deepseek-reasoner',
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
export const loadDeepseekChatModels = async () => {
|
||||||
|
const deepseekApiKey = getDeepseekApiKey();
|
||||||
|
|
||||||
|
if (!deepseekApiKey) return {};
|
||||||
|
|
||||||
|
try {
|
||||||
|
const chatModels: Record<string, ChatModel> = {};
|
||||||
|
|
||||||
|
deepseekChatModels.forEach((model) => {
|
||||||
|
chatModels[model.key] = {
|
||||||
|
displayName: model.displayName,
|
||||||
|
model: new ChatOpenAI({
|
||||||
|
openAIApiKey: deepseekApiKey,
|
||||||
|
modelName: model.key,
|
||||||
|
temperature: 0.7,
|
||||||
|
configuration: {
|
||||||
|
baseURL: 'https://api.deepseek.com',
|
||||||
|
},
|
||||||
|
}) as unknown as BaseChatModel,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
return chatModels;
|
||||||
|
} catch (err) {
|
||||||
|
console.error(`Error loading Deepseek models: ${err}`);
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
};
|
@@ -12,6 +12,7 @@ import { loadGroqChatModels } from './groq';
|
|||||||
import { loadAnthropicChatModels } from './anthropic';
|
import { loadAnthropicChatModels } from './anthropic';
|
||||||
import { loadGeminiChatModels, loadGeminiEmbeddingModels } from './gemini';
|
import { loadGeminiChatModels, loadGeminiEmbeddingModels } from './gemini';
|
||||||
import { loadTransformersEmbeddingsModels } from './transformers';
|
import { loadTransformersEmbeddingsModels } from './transformers';
|
||||||
|
import { loadDeepseekChatModels } from './deepseek';
|
||||||
|
|
||||||
export interface ChatModel {
|
export interface ChatModel {
|
||||||
displayName: string;
|
displayName: string;
|
||||||
@@ -32,6 +33,7 @@ export const chatModelProviders: Record<
|
|||||||
groq: loadGroqChatModels,
|
groq: loadGroqChatModels,
|
||||||
anthropic: loadAnthropicChatModels,
|
anthropic: loadAnthropicChatModels,
|
||||||
gemini: loadGeminiChatModels,
|
gemini: loadGeminiChatModels,
|
||||||
|
deepseek: loadDeepseekChatModels,
|
||||||
};
|
};
|
||||||
|
|
||||||
export const embeddingModelProviders: Record<
|
export const embeddingModelProviders: Record<
|
||||||
|
@@ -6,11 +6,6 @@ import {
|
|||||||
MessagesPlaceholder,
|
MessagesPlaceholder,
|
||||||
PromptTemplate,
|
PromptTemplate,
|
||||||
} from '@langchain/core/prompts';
|
} from '@langchain/core/prompts';
|
||||||
import {
|
|
||||||
RunnableLambda,
|
|
||||||
RunnableMap,
|
|
||||||
RunnableSequence,
|
|
||||||
} from '@langchain/core/runnables';
|
|
||||||
import { BaseMessage } from '@langchain/core/messages';
|
import { BaseMessage } from '@langchain/core/messages';
|
||||||
import { StringOutputParser } from '@langchain/core/output_parsers';
|
import { StringOutputParser } from '@langchain/core/output_parsers';
|
||||||
import LineListOutputParser from '../outputParsers/listLineOutputParser';
|
import LineListOutputParser from '../outputParsers/listLineOutputParser';
|
||||||
@@ -24,6 +19,7 @@ import computeSimilarity from '../utils/computeSimilarity';
|
|||||||
import formatChatHistoryAsString from '../utils/formatHistory';
|
import formatChatHistoryAsString from '../utils/formatHistory';
|
||||||
import eventEmitter from 'events';
|
import eventEmitter from 'events';
|
||||||
import { StreamEvent } from '@langchain/core/tracers/log_stream';
|
import { StreamEvent } from '@langchain/core/tracers/log_stream';
|
||||||
|
import { EventEmitter } from 'node:stream';
|
||||||
|
|
||||||
export interface MetaSearchAgentType {
|
export interface MetaSearchAgentType {
|
||||||
searchAndAnswer: (
|
searchAndAnswer: (
|
||||||
@@ -47,7 +43,7 @@ interface Config {
|
|||||||
activeEngines: string[];
|
activeEngines: string[];
|
||||||
}
|
}
|
||||||
|
|
||||||
type BasicChainInput = {
|
type SearchInput = {
|
||||||
chat_history: BaseMessage[];
|
chat_history: BaseMessage[];
|
||||||
query: string;
|
query: string;
|
||||||
};
|
};
|
||||||
@@ -60,14 +56,25 @@ class MetaSearchAgent implements MetaSearchAgentType {
|
|||||||
this.config = config;
|
this.config = config;
|
||||||
}
|
}
|
||||||
|
|
||||||
private async createSearchRetrieverChain(llm: BaseChatModel) {
|
private async searchSources(
|
||||||
|
llm: BaseChatModel,
|
||||||
|
input: SearchInput,
|
||||||
|
emitter: EventEmitter,
|
||||||
|
) {
|
||||||
(llm as unknown as ChatOpenAI).temperature = 0;
|
(llm as unknown as ChatOpenAI).temperature = 0;
|
||||||
|
|
||||||
return RunnableSequence.from([
|
const chatPrompt = PromptTemplate.fromTemplate(
|
||||||
PromptTemplate.fromTemplate(this.config.queryGeneratorPrompt),
|
this.config.queryGeneratorPrompt,
|
||||||
llm,
|
);
|
||||||
this.strParser,
|
|
||||||
RunnableLambda.from(async (input: string) => {
|
const processedChatPrompt = await chatPrompt.invoke({
|
||||||
|
chat_history: formatChatHistoryAsString(input.chat_history),
|
||||||
|
query: input.query,
|
||||||
|
});
|
||||||
|
|
||||||
|
const llmRes = await llm.invoke(processedChatPrompt);
|
||||||
|
const messageStr = await this.strParser.invoke(llmRes);
|
||||||
|
|
||||||
const linksOutputParser = new LineListOutputParser({
|
const linksOutputParser = new LineListOutputParser({
|
||||||
key: 'links',
|
key: 'links',
|
||||||
});
|
});
|
||||||
@@ -76,10 +83,10 @@ class MetaSearchAgent implements MetaSearchAgentType {
|
|||||||
key: 'question',
|
key: 'question',
|
||||||
});
|
});
|
||||||
|
|
||||||
const links = await linksOutputParser.parse(input);
|
const links = await linksOutputParser.parse(messageStr);
|
||||||
let question = this.config.summarizer
|
let question = this.config.summarizer
|
||||||
? await questionOutputParser.parse(input)
|
? await questionOutputParser.parse(messageStr)
|
||||||
: input;
|
: messageStr;
|
||||||
|
|
||||||
if (question === 'not_needed') {
|
if (question === 'not_needed') {
|
||||||
return { query: '', docs: [] };
|
return { query: '', docs: [] };
|
||||||
@@ -99,8 +106,7 @@ class MetaSearchAgent implements MetaSearchAgentType {
|
|||||||
linkDocs.map((doc) => {
|
linkDocs.map((doc) => {
|
||||||
const URLDocExists = docGroups.find(
|
const URLDocExists = docGroups.find(
|
||||||
(d) =>
|
(d) =>
|
||||||
d.metadata.url === doc.metadata.url &&
|
d.metadata.url === doc.metadata.url && d.metadata.totalDocs < 10,
|
||||||
d.metadata.totalDocs < 10,
|
|
||||||
);
|
);
|
||||||
|
|
||||||
if (!URLDocExists) {
|
if (!URLDocExists) {
|
||||||
@@ -115,8 +121,7 @@ class MetaSearchAgent implements MetaSearchAgentType {
|
|||||||
|
|
||||||
const docIndex = docGroups.findIndex(
|
const docIndex = docGroups.findIndex(
|
||||||
(d) =>
|
(d) =>
|
||||||
d.metadata.url === doc.metadata.url &&
|
d.metadata.url === doc.metadata.url && d.metadata.totalDocs < 10,
|
||||||
d.metadata.totalDocs < 10,
|
|
||||||
);
|
);
|
||||||
|
|
||||||
if (docIndex !== -1) {
|
if (docIndex !== -1) {
|
||||||
@@ -228,42 +233,31 @@ class MetaSearchAgent implements MetaSearchAgentType {
|
|||||||
|
|
||||||
return { query: question, docs: documents };
|
return { query: question, docs: documents };
|
||||||
}
|
}
|
||||||
}),
|
|
||||||
]);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private async createAnsweringChain(
|
private async streamAnswer(
|
||||||
llm: BaseChatModel,
|
llm: BaseChatModel,
|
||||||
fileIds: string[],
|
fileIds: string[],
|
||||||
embeddings: Embeddings,
|
embeddings: Embeddings,
|
||||||
optimizationMode: 'speed' | 'balanced' | 'quality',
|
optimizationMode: 'speed' | 'balanced' | 'quality',
|
||||||
systemInstructions: string,
|
systemInstructions: string,
|
||||||
|
input: SearchInput,
|
||||||
|
emitter: EventEmitter,
|
||||||
) {
|
) {
|
||||||
return RunnableSequence.from([
|
const chatPrompt = ChatPromptTemplate.fromMessages([
|
||||||
RunnableMap.from({
|
['system', this.config.responsePrompt],
|
||||||
systemInstructions: () => systemInstructions,
|
new MessagesPlaceholder('chat_history'),
|
||||||
query: (input: BasicChainInput) => input.query,
|
['user', '{query}'],
|
||||||
chat_history: (input: BasicChainInput) => input.chat_history,
|
]);
|
||||||
date: () => new Date().toISOString(),
|
|
||||||
context: RunnableLambda.from(async (input: BasicChainInput) => {
|
|
||||||
const processedHistory = formatChatHistoryAsString(
|
|
||||||
input.chat_history,
|
|
||||||
);
|
|
||||||
|
|
||||||
let docs: Document[] | null = null;
|
let docs: Document[] | null = null;
|
||||||
let query = input.query;
|
let query = input.query;
|
||||||
|
|
||||||
if (this.config.searchWeb) {
|
if (this.config.searchWeb) {
|
||||||
const searchRetrieverChain =
|
const searchResults = await this.searchSources(llm, input, emitter);
|
||||||
await this.createSearchRetrieverChain(llm);
|
|
||||||
|
|
||||||
const searchRetrieverResult = await searchRetrieverChain.invoke({
|
query = searchResults.query;
|
||||||
chat_history: processedHistory,
|
docs = searchResults.docs;
|
||||||
query,
|
|
||||||
});
|
|
||||||
|
|
||||||
query = searchRetrieverResult.query;
|
|
||||||
docs = searchRetrieverResult.docs;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const sortedDocs = await this.rerankDocs(
|
const sortedDocs = await this.rerankDocs(
|
||||||
@@ -274,23 +268,30 @@ class MetaSearchAgent implements MetaSearchAgentType {
|
|||||||
optimizationMode,
|
optimizationMode,
|
||||||
);
|
);
|
||||||
|
|
||||||
return sortedDocs;
|
emitter.emit('data', JSON.stringify({ type: 'sources', data: sortedDocs }));
|
||||||
})
|
|
||||||
.withConfig({
|
const context = this.processDocs(sortedDocs);
|
||||||
runName: 'FinalSourceRetriever',
|
|
||||||
})
|
const formattedChatPrompt = await chatPrompt.invoke({
|
||||||
.pipe(this.processDocs),
|
query: input.query,
|
||||||
}),
|
chat_history: input.chat_history,
|
||||||
ChatPromptTemplate.fromMessages([
|
date: new Date().toISOString(),
|
||||||
['system', this.config.responsePrompt],
|
context: context,
|
||||||
new MessagesPlaceholder('chat_history'),
|
systemInstructions: systemInstructions,
|
||||||
['user', '{query}'],
|
|
||||||
]),
|
|
||||||
llm,
|
|
||||||
this.strParser,
|
|
||||||
]).withConfig({
|
|
||||||
runName: 'FinalResponseGenerator',
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const llmRes = await llm.stream(formattedChatPrompt);
|
||||||
|
|
||||||
|
for await (const data of llmRes) {
|
||||||
|
const messageStr = await this.strParser.invoke(data);
|
||||||
|
|
||||||
|
emitter.emit(
|
||||||
|
'data',
|
||||||
|
JSON.stringify({ type: 'response', data: messageStr }),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
emitter.emit('end');
|
||||||
}
|
}
|
||||||
|
|
||||||
private async rerankDocs(
|
private async rerankDocs(
|
||||||
@@ -431,39 +432,6 @@ class MetaSearchAgent implements MetaSearchAgentType {
|
|||||||
.join('\n');
|
.join('\n');
|
||||||
}
|
}
|
||||||
|
|
||||||
private async handleStream(
|
|
||||||
stream: AsyncGenerator<StreamEvent, any, any>,
|
|
||||||
emitter: eventEmitter,
|
|
||||||
) {
|
|
||||||
for await (const event of stream) {
|
|
||||||
if (
|
|
||||||
event.event === 'on_chain_end' &&
|
|
||||||
event.name === 'FinalSourceRetriever'
|
|
||||||
) {
|
|
||||||
``;
|
|
||||||
emitter.emit(
|
|
||||||
'data',
|
|
||||||
JSON.stringify({ type: 'sources', data: event.data.output }),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if (
|
|
||||||
event.event === 'on_chain_stream' &&
|
|
||||||
event.name === 'FinalResponseGenerator'
|
|
||||||
) {
|
|
||||||
emitter.emit(
|
|
||||||
'data',
|
|
||||||
JSON.stringify({ type: 'response', data: event.data.chunk }),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if (
|
|
||||||
event.event === 'on_chain_end' &&
|
|
||||||
event.name === 'FinalResponseGenerator'
|
|
||||||
) {
|
|
||||||
emitter.emit('end');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async searchAndAnswer(
|
async searchAndAnswer(
|
||||||
message: string,
|
message: string,
|
||||||
history: BaseMessage[],
|
history: BaseMessage[],
|
||||||
@@ -475,26 +443,19 @@ class MetaSearchAgent implements MetaSearchAgentType {
|
|||||||
) {
|
) {
|
||||||
const emitter = new eventEmitter();
|
const emitter = new eventEmitter();
|
||||||
|
|
||||||
const answeringChain = await this.createAnsweringChain(
|
this.streamAnswer(
|
||||||
llm,
|
llm,
|
||||||
fileIds,
|
fileIds,
|
||||||
embeddings,
|
embeddings,
|
||||||
optimizationMode,
|
optimizationMode,
|
||||||
systemInstructions,
|
systemInstructions,
|
||||||
);
|
|
||||||
|
|
||||||
const stream = answeringChain.streamEvents(
|
|
||||||
{
|
{
|
||||||
chat_history: history,
|
chat_history: history,
|
||||||
query: message,
|
query: message,
|
||||||
},
|
},
|
||||||
{
|
emitter,
|
||||||
version: 'v1',
|
|
||||||
},
|
|
||||||
);
|
);
|
||||||
|
|
||||||
this.handleStream(stream, emitter);
|
|
||||||
|
|
||||||
return emitter;
|
return emitter;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user