mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-11-24 14:08:14 +00:00
feat(routes): update routes to handle new llm types
This commit is contained in:
@@ -1,7 +1,6 @@
|
||||
import searchImages from '@/lib/agents/media/image';
|
||||
import ModelRegistry from '@/lib/models/registry';
|
||||
import { ModelWithProvider } from '@/lib/models/types';
|
||||
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
||||
|
||||
interface ImageSearchBody {
|
||||
query: string;
|
||||
@@ -20,19 +19,9 @@ export const POST = async (req: Request) => {
|
||||
body.chatModel.key,
|
||||
);
|
||||
|
||||
const chatHistory = body.chatHistory
|
||||
.map((msg: any) => {
|
||||
if (msg.role === 'user') {
|
||||
return new HumanMessage(msg.content);
|
||||
} else if (msg.role === 'assistant') {
|
||||
return new AIMessage(msg.content);
|
||||
}
|
||||
})
|
||||
.filter((msg) => msg !== undefined) as BaseMessage[];
|
||||
|
||||
const images = await searchImages(
|
||||
{
|
||||
chatHistory: chatHistory,
|
||||
chatHistory: body.chatHistory,
|
||||
query: body.query,
|
||||
},
|
||||
llm,
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
||||
import { MetaSearchAgentType } from '@/lib/search/metaSearchAgent';
|
||||
import { searchHandlers } from '@/lib/search';
|
||||
import ModelRegistry from '@/lib/models/registry';
|
||||
import { ModelWithProvider } from '@/lib/models/types';
|
||||
import SessionManager from '@/lib/session';
|
||||
import SearchAgent from '@/lib/agents/search';
|
||||
import { ChatTurnMessage } from '@/lib/types';
|
||||
|
||||
interface ChatRequestBody {
|
||||
optimizationMode: 'speed' | 'balanced';
|
||||
@@ -40,27 +40,26 @@ export const POST = async (req: Request) => {
|
||||
),
|
||||
]);
|
||||
|
||||
const history: BaseMessage[] = body.history.map((msg) => {
|
||||
const history: ChatTurnMessage[] = body.history.map((msg) => {
|
||||
return msg[0] === 'human'
|
||||
? new HumanMessage({ content: msg[1] })
|
||||
: new AIMessage({ content: msg[1] });
|
||||
? { role: 'user', content: msg[1] }
|
||||
: { role: 'assistant', content: msg[1] };
|
||||
});
|
||||
|
||||
const searchHandler: MetaSearchAgentType = searchHandlers[body.focusMode];
|
||||
const session = SessionManager.createSession();
|
||||
|
||||
if (!searchHandler) {
|
||||
return Response.json({ message: 'Invalid focus mode' }, { status: 400 });
|
||||
}
|
||||
const agent = new SearchAgent();
|
||||
|
||||
const emitter = await searchHandler.searchAndAnswer(
|
||||
body.query,
|
||||
history,
|
||||
llm,
|
||||
embeddings,
|
||||
body.optimizationMode,
|
||||
[],
|
||||
body.systemInstructions || '',
|
||||
);
|
||||
agent.searchAsync(session, {
|
||||
chatHistory: history,
|
||||
config: {
|
||||
embedding: embeddings,
|
||||
llm: llm,
|
||||
sources: ['web', 'discussions', 'academic'],
|
||||
mode: 'balanced',
|
||||
},
|
||||
followUp: body.query,
|
||||
});
|
||||
|
||||
if (!body.stream) {
|
||||
return new Promise(
|
||||
@@ -71,7 +70,7 @@ export const POST = async (req: Request) => {
|
||||
let message = '';
|
||||
let sources: any[] = [];
|
||||
|
||||
emitter.on('data', (data: string) => {
|
||||
session.addListener('data', (data: string) => {
|
||||
try {
|
||||
const parsedData = JSON.parse(data);
|
||||
if (parsedData.type === 'response') {
|
||||
@@ -89,11 +88,11 @@ export const POST = async (req: Request) => {
|
||||
}
|
||||
});
|
||||
|
||||
emitter.on('end', () => {
|
||||
session.addListener('end', () => {
|
||||
resolve(Response.json({ message, sources }, { status: 200 }));
|
||||
});
|
||||
|
||||
emitter.on('error', (error: any) => {
|
||||
session.addListener('error', (error: any) => {
|
||||
reject(
|
||||
Response.json(
|
||||
{ message: 'Search error', error },
|
||||
@@ -124,14 +123,14 @@ export const POST = async (req: Request) => {
|
||||
);
|
||||
|
||||
signal.addEventListener('abort', () => {
|
||||
emitter.removeAllListeners();
|
||||
session.removeAllListeners();
|
||||
|
||||
try {
|
||||
controller.close();
|
||||
} catch (error) {}
|
||||
});
|
||||
|
||||
emitter.on('data', (data: string) => {
|
||||
session.addListener('data', (data: string) => {
|
||||
if (signal.aborted) return;
|
||||
|
||||
try {
|
||||
@@ -162,7 +161,7 @@ export const POST = async (req: Request) => {
|
||||
}
|
||||
});
|
||||
|
||||
emitter.on('end', () => {
|
||||
session.addListener('end', () => {
|
||||
if (signal.aborted) return;
|
||||
|
||||
controller.enqueue(
|
||||
@@ -175,7 +174,7 @@ export const POST = async (req: Request) => {
|
||||
controller.close();
|
||||
});
|
||||
|
||||
emitter.on('error', (error: any) => {
|
||||
session.addListener('error', (error: any) => {
|
||||
if (signal.aborted) return;
|
||||
|
||||
controller.error(error);
|
||||
|
||||
@@ -7,6 +7,7 @@ import { DocxLoader } from '@langchain/community/document_loaders/fs/docx';
|
||||
import { RecursiveCharacterTextSplitter } from '@langchain/textsplitters';
|
||||
import { Document } from '@langchain/core/documents';
|
||||
import ModelRegistry from '@/lib/models/registry';
|
||||
import { Chunk } from '@/lib/types';
|
||||
|
||||
interface FileRes {
|
||||
fileName: string;
|
||||
@@ -87,9 +88,17 @@ export async function POST(req: Request) {
|
||||
}),
|
||||
);
|
||||
|
||||
const embeddings = await model.embedDocuments(
|
||||
splitted.map((doc) => doc.pageContent),
|
||||
const chunks: Chunk[] = splitted.map((doc) => {
|
||||
return {
|
||||
content: doc.pageContent,
|
||||
metadata: doc.metadata,
|
||||
}
|
||||
});
|
||||
|
||||
const embeddings = await model.embedChunks(
|
||||
chunks
|
||||
);
|
||||
|
||||
const embeddingsDataPath = filePath.replace(
|
||||
/\.\w+$/,
|
||||
'-embeddings.json',
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import handleVideoSearch from '@/lib/agents/media/video';
|
||||
import ModelRegistry from '@/lib/models/registry';
|
||||
import { ModelWithProvider } from '@/lib/models/types';
|
||||
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
||||
|
||||
interface VideoSearchBody {
|
||||
query: string;
|
||||
@@ -20,19 +19,9 @@ export const POST = async (req: Request) => {
|
||||
body.chatModel.key,
|
||||
);
|
||||
|
||||
const chatHistory = body.chatHistory
|
||||
.map((msg: any) => {
|
||||
if (msg.role === 'user') {
|
||||
return new HumanMessage(msg.content);
|
||||
} else if (msg.role === 'assistant') {
|
||||
return new AIMessage(msg.content);
|
||||
}
|
||||
})
|
||||
.filter((msg) => msg !== undefined) as BaseMessage[];
|
||||
|
||||
const videos = await handleVideoSearch(
|
||||
{
|
||||
chatHistory: chatHistory,
|
||||
chatHistory: body.chatHistory,
|
||||
query: body.query,
|
||||
},
|
||||
llm,
|
||||
|
||||
Reference in New Issue
Block a user