Compare commits

..

5 Commits

Author SHA1 Message Date
ItzCrazyKns
da1123d84b feat(groq): update model name 2025-04-07 23:30:51 +05:30
ItzCrazyKns
627775c430 feat(groq): remove maverick (not being run yet) 2025-04-07 23:29:51 +05:30
ItzCrazyKns
245573efca feat(groq): update model list 2025-04-07 23:23:18 +05:30
ItzCrazyKns
a85f762c58 feat(package): bump version 2025-04-07 10:27:04 +05:30
ItzCrazyKns
3ddcceda0a feat(gemini-provider): update embedding models 2025-04-07 10:26:29 +05:30
5 changed files with 284 additions and 231 deletions

View File

@@ -1,6 +1,6 @@
{ {
"name": "perplexica-frontend", "name": "perplexica-frontend",
"version": "1.10.1", "version": "1.10.2",
"license": "MIT", "license": "MIT",
"author": "ItzCrazyKns", "author": "ItzCrazyKns",
"scripts": { "scripts": {

View File

@@ -363,6 +363,7 @@ const ChatWindow = ({ id }: { id?: string }) => {
if (data.type === 'sources') { if (data.type === 'sources') {
sources = data.data; sources = data.data;
if (!added) {
setMessages((prevMessages) => [ setMessages((prevMessages) => [
...prevMessages, ...prevMessages,
{ {
@@ -375,6 +376,7 @@ const ChatWindow = ({ id }: { id?: string }) => {
}, },
]); ]);
added = true; added = true;
}
setMessageAppeared(true); setMessageAppeared(true);
} }
@@ -392,8 +394,8 @@ const ChatWindow = ({ id }: { id?: string }) => {
}, },
]); ]);
added = true; added = true;
setMessageAppeared(true); }
} else {
setMessages((prev) => setMessages((prev) =>
prev.map((message) => { prev.map((message) => {
if (message.messageId === data.messageId) { if (message.messageId === data.messageId) {
@@ -403,9 +405,9 @@ const ChatWindow = ({ id }: { id?: string }) => {
return message; return message;
}), }),
); );
}
recievedMessage += data.data; recievedMessage += data.data;
setMessageAppeared(true);
} }
if (data.type === 'messageEnd') { if (data.type === 'messageEnd') {

View File

@@ -40,8 +40,12 @@ const geminiChatModels: Record<string, string>[] = [
const geminiEmbeddingModels: Record<string, string>[] = [ const geminiEmbeddingModels: Record<string, string>[] = [
{ {
displayName: 'Gemini Embedding', displayName: 'Text Embedding 004',
key: 'gemini-embedding-exp', key: 'models/text-embedding-004',
},
{
displayName: 'Embedding 001',
key: 'models/embedding-001',
}, },
]; ];

View File

@@ -72,6 +72,14 @@ const groqChatModels: Record<string, string>[] = [
displayName: 'Llama 3.2 90B Vision Preview (Preview)', displayName: 'Llama 3.2 90B Vision Preview (Preview)',
key: 'llama-3.2-90b-vision-preview', key: 'llama-3.2-90b-vision-preview',
}, },
/* {
displayName: 'Llama 4 Maverick 17B 128E Instruct (Preview)',
key: 'meta-llama/llama-4-maverick-17b-128e-instruct',
}, */
{
displayName: 'Llama 4 Scout 17B 16E Instruct (Preview)',
key: 'meta-llama/llama-4-scout-17b-16e-instruct',
},
]; ];
export const loadGroqChatModels = async () => { export const loadGroqChatModels = async () => {

View File

@@ -6,6 +6,11 @@ import {
MessagesPlaceholder, MessagesPlaceholder,
PromptTemplate, PromptTemplate,
} from '@langchain/core/prompts'; } from '@langchain/core/prompts';
import {
RunnableLambda,
RunnableMap,
RunnableSequence,
} from '@langchain/core/runnables';
import { BaseMessage } from '@langchain/core/messages'; import { BaseMessage } from '@langchain/core/messages';
import { StringOutputParser } from '@langchain/core/output_parsers'; import { StringOutputParser } from '@langchain/core/output_parsers';
import LineListOutputParser from '../outputParsers/listLineOutputParser'; import LineListOutputParser from '../outputParsers/listLineOutputParser';
@@ -19,7 +24,6 @@ import computeSimilarity from '../utils/computeSimilarity';
import formatChatHistoryAsString from '../utils/formatHistory'; import formatChatHistoryAsString from '../utils/formatHistory';
import eventEmitter from 'events'; import eventEmitter from 'events';
import { StreamEvent } from '@langchain/core/tracers/log_stream'; import { StreamEvent } from '@langchain/core/tracers/log_stream';
import { EventEmitter } from 'node:stream';
export interface MetaSearchAgentType { export interface MetaSearchAgentType {
searchAndAnswer: ( searchAndAnswer: (
@@ -43,7 +47,7 @@ interface Config {
activeEngines: string[]; activeEngines: string[];
} }
type SearchInput = { type BasicChainInput = {
chat_history: BaseMessage[]; chat_history: BaseMessage[];
query: string; query: string;
}; };
@@ -56,25 +60,14 @@ class MetaSearchAgent implements MetaSearchAgentType {
this.config = config; this.config = config;
} }
private async searchSources( private async createSearchRetrieverChain(llm: BaseChatModel) {
llm: BaseChatModel,
input: SearchInput,
emitter: EventEmitter,
) {
(llm as unknown as ChatOpenAI).temperature = 0; (llm as unknown as ChatOpenAI).temperature = 0;
const chatPrompt = PromptTemplate.fromTemplate( return RunnableSequence.from([
this.config.queryGeneratorPrompt, PromptTemplate.fromTemplate(this.config.queryGeneratorPrompt),
); llm,
this.strParser,
const processedChatPrompt = await chatPrompt.invoke({ RunnableLambda.from(async (input: string) => {
chat_history: formatChatHistoryAsString(input.chat_history),
query: input.query,
});
const llmRes = await llm.invoke(processedChatPrompt);
const messageStr = await this.strParser.invoke(llmRes);
const linksOutputParser = new LineListOutputParser({ const linksOutputParser = new LineListOutputParser({
key: 'links', key: 'links',
}); });
@@ -83,10 +76,10 @@ class MetaSearchAgent implements MetaSearchAgentType {
key: 'question', key: 'question',
}); });
const links = await linksOutputParser.parse(messageStr); const links = await linksOutputParser.parse(input);
let question = this.config.summarizer let question = this.config.summarizer
? await questionOutputParser.parse(messageStr) ? await questionOutputParser.parse(input)
: messageStr; : input;
if (question === 'not_needed') { if (question === 'not_needed') {
return { query: '', docs: [] }; return { query: '', docs: [] };
@@ -106,7 +99,8 @@ class MetaSearchAgent implements MetaSearchAgentType {
linkDocs.map((doc) => { linkDocs.map((doc) => {
const URLDocExists = docGroups.find( const URLDocExists = docGroups.find(
(d) => (d) =>
d.metadata.url === doc.metadata.url && d.metadata.totalDocs < 10, d.metadata.url === doc.metadata.url &&
d.metadata.totalDocs < 10,
); );
if (!URLDocExists) { if (!URLDocExists) {
@@ -121,7 +115,8 @@ class MetaSearchAgent implements MetaSearchAgentType {
const docIndex = docGroups.findIndex( const docIndex = docGroups.findIndex(
(d) => (d) =>
d.metadata.url === doc.metadata.url && d.metadata.totalDocs < 10, d.metadata.url === doc.metadata.url &&
d.metadata.totalDocs < 10,
); );
if (docIndex !== -1) { if (docIndex !== -1) {
@@ -233,31 +228,42 @@ class MetaSearchAgent implements MetaSearchAgentType {
return { query: question, docs: documents }; return { query: question, docs: documents };
} }
}),
]);
} }
private async streamAnswer( private async createAnsweringChain(
llm: BaseChatModel, llm: BaseChatModel,
fileIds: string[], fileIds: string[],
embeddings: Embeddings, embeddings: Embeddings,
optimizationMode: 'speed' | 'balanced' | 'quality', optimizationMode: 'speed' | 'balanced' | 'quality',
systemInstructions: string, systemInstructions: string,
input: SearchInput,
emitter: EventEmitter,
) { ) {
const chatPrompt = ChatPromptTemplate.fromMessages([ return RunnableSequence.from([
['system', this.config.responsePrompt], RunnableMap.from({
new MessagesPlaceholder('chat_history'), systemInstructions: () => systemInstructions,
['user', '{query}'], query: (input: BasicChainInput) => input.query,
]); chat_history: (input: BasicChainInput) => input.chat_history,
date: () => new Date().toISOString(),
context: RunnableLambda.from(async (input: BasicChainInput) => {
const processedHistory = formatChatHistoryAsString(
input.chat_history,
);
let docs: Document[] | null = null; let docs: Document[] | null = null;
let query = input.query; let query = input.query;
if (this.config.searchWeb) { if (this.config.searchWeb) {
const searchResults = await this.searchSources(llm, input, emitter); const searchRetrieverChain =
await this.createSearchRetrieverChain(llm);
query = searchResults.query; const searchRetrieverResult = await searchRetrieverChain.invoke({
docs = searchResults.docs; chat_history: processedHistory,
query,
});
query = searchRetrieverResult.query;
docs = searchRetrieverResult.docs;
} }
const sortedDocs = await this.rerankDocs( const sortedDocs = await this.rerankDocs(
@@ -268,30 +274,23 @@ class MetaSearchAgent implements MetaSearchAgentType {
optimizationMode, optimizationMode,
); );
emitter.emit('data', JSON.stringify({ type: 'sources', data: sortedDocs })); return sortedDocs;
})
const context = this.processDocs(sortedDocs); .withConfig({
runName: 'FinalSourceRetriever',
const formattedChatPrompt = await chatPrompt.invoke({ })
query: input.query, .pipe(this.processDocs),
chat_history: input.chat_history, }),
date: new Date().toISOString(), ChatPromptTemplate.fromMessages([
context: context, ['system', this.config.responsePrompt],
systemInstructions: systemInstructions, new MessagesPlaceholder('chat_history'),
['user', '{query}'],
]),
llm,
this.strParser,
]).withConfig({
runName: 'FinalResponseGenerator',
}); });
const llmRes = await llm.stream(formattedChatPrompt);
for await (const data of llmRes) {
const messageStr = await this.strParser.invoke(data);
emitter.emit(
'data',
JSON.stringify({ type: 'response', data: messageStr }),
);
}
emitter.emit('end');
} }
private async rerankDocs( private async rerankDocs(
@@ -432,6 +431,39 @@ class MetaSearchAgent implements MetaSearchAgentType {
.join('\n'); .join('\n');
} }
private async handleStream(
stream: AsyncGenerator<StreamEvent, any, any>,
emitter: eventEmitter,
) {
for await (const event of stream) {
if (
event.event === 'on_chain_end' &&
event.name === 'FinalSourceRetriever'
) {
``;
emitter.emit(
'data',
JSON.stringify({ type: 'sources', data: event.data.output }),
);
}
if (
event.event === 'on_chain_stream' &&
event.name === 'FinalResponseGenerator'
) {
emitter.emit(
'data',
JSON.stringify({ type: 'response', data: event.data.chunk }),
);
}
if (
event.event === 'on_chain_end' &&
event.name === 'FinalResponseGenerator'
) {
emitter.emit('end');
}
}
}
async searchAndAnswer( async searchAndAnswer(
message: string, message: string,
history: BaseMessage[], history: BaseMessage[],
@@ -443,19 +475,26 @@ class MetaSearchAgent implements MetaSearchAgentType {
) { ) {
const emitter = new eventEmitter(); const emitter = new eventEmitter();
this.streamAnswer( const answeringChain = await this.createAnsweringChain(
llm, llm,
fileIds, fileIds,
embeddings, embeddings,
optimizationMode, optimizationMode,
systemInstructions, systemInstructions,
);
const stream = answeringChain.streamEvents(
{ {
chat_history: history, chat_history: history,
query: message, query: message,
}, },
emitter, {
version: 'v1',
},
); );
this.handleStream(stream, emitter);
return emitter; return emitter;
} }
} }