mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-06-18 15:58:31 +00:00
feat(app): add search API
This commit is contained in:
@ -20,11 +20,11 @@ The API accepts a JSON object in the request body, where you define the focus mo
|
|||||||
{
|
{
|
||||||
"chatModel": {
|
"chatModel": {
|
||||||
"provider": "openai",
|
"provider": "openai",
|
||||||
"model": "gpt-4o-mini"
|
"name": "gpt-4o-mini"
|
||||||
},
|
},
|
||||||
"embeddingModel": {
|
"embeddingModel": {
|
||||||
"provider": "openai",
|
"provider": "openai",
|
||||||
"model": "text-embedding-3-large"
|
"name": "text-embedding-3-large"
|
||||||
},
|
},
|
||||||
"optimizationMode": "speed",
|
"optimizationMode": "speed",
|
||||||
"focusMode": "webSearch",
|
"focusMode": "webSearch",
|
||||||
@ -38,18 +38,18 @@ The API accepts a JSON object in the request body, where you define the focus mo
|
|||||||
|
|
||||||
### Request Parameters
|
### Request Parameters
|
||||||
|
|
||||||
- **`chatModel`** (object, optional): Defines the chat model to be used for the query. For model details you can send a GET request at `http://localhost:3001/api/models`. Make sure to use the key value (For example "gpt-4o-mini" instead of the display name "GPT 4 omni mini").
|
- **`chatModel`** (object, optional): Defines the chat model to be used for the query. For model details you can send a GET request at `http://localhost:3000/api/models`. Make sure to use the key value (For example "gpt-4o-mini" instead of the display name "GPT 4 omni mini").
|
||||||
|
|
||||||
- `provider`: Specifies the provider for the chat model (e.g., `openai`, `ollama`).
|
- `provider`: Specifies the provider for the chat model (e.g., `openai`, `ollama`).
|
||||||
- `model`: The specific model from the chosen provider (e.g., `gpt-4o-mini`).
|
- `name`: The specific model from the chosen provider (e.g., `gpt-4o-mini`).
|
||||||
- Optional fields for custom OpenAI configuration:
|
- Optional fields for custom OpenAI configuration:
|
||||||
- `customOpenAIBaseURL`: If you’re using a custom OpenAI instance, provide the base URL.
|
- `customOpenAIBaseURL`: If you’re using a custom OpenAI instance, provide the base URL.
|
||||||
- `customOpenAIKey`: The API key for a custom OpenAI instance.
|
- `customOpenAIKey`: The API key for a custom OpenAI instance.
|
||||||
|
|
||||||
- **`embeddingModel`** (object, optional): Defines the embedding model for similarity-based searching. For model details you can send a GET request at `http://localhost:3001/api/models`. Make sure to use the key value (For example "text-embedding-3-large" instead of the display name "Text Embedding 3 Large").
|
- **`embeddingModel`** (object, optional): Defines the embedding model for similarity-based searching. For model details you can send a GET request at `http://localhost:3000/api/models`. Make sure to use the key value (For example "text-embedding-3-large" instead of the display name "Text Embedding 3 Large").
|
||||||
|
|
||||||
- `provider`: The provider for the embedding model (e.g., `openai`).
|
- `provider`: The provider for the embedding model (e.g., `openai`).
|
||||||
- `model`: The specific embedding model (e.g., `text-embedding-3-large`).
|
- `name`: The specific embedding model (e.g., `text-embedding-3-large`).
|
||||||
|
|
||||||
- **`focusMode`** (string, required): Specifies which focus mode to use. Available modes:
|
- **`focusMode`** (string, required): Specifies which focus mode to use. Available modes:
|
||||||
|
|
||||||
|
@ -20,67 +20,11 @@ import {
|
|||||||
getCustomOpenaiApiUrl,
|
getCustomOpenaiApiUrl,
|
||||||
getCustomOpenaiModelName,
|
getCustomOpenaiModelName,
|
||||||
} from '@/lib/config';
|
} from '@/lib/config';
|
||||||
|
import { searchHandlers } from '@/lib/search';
|
||||||
|
|
||||||
export const runtime = 'nodejs';
|
export const runtime = 'nodejs';
|
||||||
export const dynamic = 'force-dynamic';
|
export const dynamic = 'force-dynamic';
|
||||||
|
|
||||||
const searchHandlers: Record<string, MetaSearchAgent> = {
|
|
||||||
webSearch: new MetaSearchAgent({
|
|
||||||
activeEngines: [],
|
|
||||||
queryGeneratorPrompt: prompts.webSearchRetrieverPrompt,
|
|
||||||
responsePrompt: prompts.webSearchResponsePrompt,
|
|
||||||
rerank: true,
|
|
||||||
rerankThreshold: 0.3,
|
|
||||||
searchWeb: true,
|
|
||||||
summarizer: true,
|
|
||||||
}),
|
|
||||||
academicSearch: new MetaSearchAgent({
|
|
||||||
activeEngines: ['arxiv', 'google scholar', 'pubmed'],
|
|
||||||
queryGeneratorPrompt: prompts.academicSearchRetrieverPrompt,
|
|
||||||
responsePrompt: prompts.academicSearchResponsePrompt,
|
|
||||||
rerank: true,
|
|
||||||
rerankThreshold: 0,
|
|
||||||
searchWeb: true,
|
|
||||||
summarizer: false,
|
|
||||||
}),
|
|
||||||
writingAssistant: new MetaSearchAgent({
|
|
||||||
activeEngines: [],
|
|
||||||
queryGeneratorPrompt: '',
|
|
||||||
responsePrompt: prompts.writingAssistantPrompt,
|
|
||||||
rerank: true,
|
|
||||||
rerankThreshold: 0,
|
|
||||||
searchWeb: false,
|
|
||||||
summarizer: false,
|
|
||||||
}),
|
|
||||||
wolframAlphaSearch: new MetaSearchAgent({
|
|
||||||
activeEngines: ['wolframalpha'],
|
|
||||||
queryGeneratorPrompt: prompts.wolframAlphaSearchRetrieverPrompt,
|
|
||||||
responsePrompt: prompts.wolframAlphaSearchResponsePrompt,
|
|
||||||
rerank: false,
|
|
||||||
rerankThreshold: 0,
|
|
||||||
searchWeb: true,
|
|
||||||
summarizer: false,
|
|
||||||
}),
|
|
||||||
youtubeSearch: new MetaSearchAgent({
|
|
||||||
activeEngines: ['youtube'],
|
|
||||||
queryGeneratorPrompt: prompts.youtubeSearchRetrieverPrompt,
|
|
||||||
responsePrompt: prompts.youtubeSearchResponsePrompt,
|
|
||||||
rerank: true,
|
|
||||||
rerankThreshold: 0.3,
|
|
||||||
searchWeb: true,
|
|
||||||
summarizer: false,
|
|
||||||
}),
|
|
||||||
redditSearch: new MetaSearchAgent({
|
|
||||||
activeEngines: ['reddit'],
|
|
||||||
queryGeneratorPrompt: prompts.redditSearchRetrieverPrompt,
|
|
||||||
responsePrompt: prompts.redditSearchResponsePrompt,
|
|
||||||
rerank: true,
|
|
||||||
rerankThreshold: 0.3,
|
|
||||||
searchWeb: true,
|
|
||||||
summarizer: false,
|
|
||||||
}),
|
|
||||||
};
|
|
||||||
|
|
||||||
type Message = {
|
type Message = {
|
||||||
messageId: string;
|
messageId: string;
|
||||||
chatId: string;
|
chatId: string;
|
||||||
|
164
src/app/api/search/route.ts
Normal file
164
src/app/api/search/route.ts
Normal file
@ -0,0 +1,164 @@
|
|||||||
|
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
|
import type { Embeddings } from '@langchain/core/embeddings';
|
||||||
|
import { ChatOpenAI } from '@langchain/openai';
|
||||||
|
import {
|
||||||
|
getAvailableChatModelProviders,
|
||||||
|
getAvailableEmbeddingModelProviders,
|
||||||
|
} from '@/lib/providers';
|
||||||
|
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
||||||
|
import { MetaSearchAgentType } from '@/lib/search/metaSearchAgent';
|
||||||
|
import {
|
||||||
|
getCustomOpenaiApiKey,
|
||||||
|
getCustomOpenaiApiUrl,
|
||||||
|
getCustomOpenaiModelName,
|
||||||
|
} from '@/lib/config';
|
||||||
|
import { searchHandlers } from '@/lib/search';
|
||||||
|
|
||||||
|
interface chatModel {
|
||||||
|
provider: string;
|
||||||
|
name: string;
|
||||||
|
customOpenAIKey?: string;
|
||||||
|
customOpenAIBaseURL?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface embeddingModel {
|
||||||
|
provider: string;
|
||||||
|
name: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ChatRequestBody {
|
||||||
|
optimizationMode: 'speed' | 'balanced';
|
||||||
|
focusMode: string;
|
||||||
|
chatModel?: chatModel;
|
||||||
|
embeddingModel?: embeddingModel;
|
||||||
|
query: string;
|
||||||
|
history: Array<[string, string]>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const POST = async (req: Request) => {
|
||||||
|
try {
|
||||||
|
const body: ChatRequestBody = await req.json();
|
||||||
|
|
||||||
|
if (!body.focusMode || !body.query) {
|
||||||
|
return Response.json(
|
||||||
|
{ message: 'Missing focus mode or query' },
|
||||||
|
{ status: 400 },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
body.history = body.history || [];
|
||||||
|
body.optimizationMode = body.optimizationMode || 'balanced';
|
||||||
|
|
||||||
|
const history: BaseMessage[] = body.history.map((msg) => {
|
||||||
|
return msg[0] === 'human'
|
||||||
|
? new HumanMessage({ content: msg[1] })
|
||||||
|
: new AIMessage({ content: msg[1] });
|
||||||
|
});
|
||||||
|
|
||||||
|
const [chatModelProviders, embeddingModelProviders] = await Promise.all([
|
||||||
|
getAvailableChatModelProviders(),
|
||||||
|
getAvailableEmbeddingModelProviders(),
|
||||||
|
]);
|
||||||
|
|
||||||
|
const chatModelProvider =
|
||||||
|
body.chatModel?.provider || Object.keys(chatModelProviders)[0];
|
||||||
|
const chatModel =
|
||||||
|
body.chatModel?.name ||
|
||||||
|
Object.keys(chatModelProviders[chatModelProvider])[0];
|
||||||
|
|
||||||
|
const embeddingModelProvider =
|
||||||
|
body.embeddingModel?.provider || Object.keys(embeddingModelProviders)[0];
|
||||||
|
const embeddingModel =
|
||||||
|
body.embeddingModel?.name ||
|
||||||
|
Object.keys(embeddingModelProviders[embeddingModelProvider])[0];
|
||||||
|
|
||||||
|
let llm: BaseChatModel | undefined;
|
||||||
|
let embeddings: Embeddings | undefined;
|
||||||
|
|
||||||
|
if (body.chatModel?.provider === 'custom_openai') {
|
||||||
|
llm = new ChatOpenAI({
|
||||||
|
modelName: body.chatModel?.name || getCustomOpenaiModelName(),
|
||||||
|
openAIApiKey:
|
||||||
|
body.chatModel?.customOpenAIKey || getCustomOpenaiApiKey(),
|
||||||
|
temperature: 0.7,
|
||||||
|
configuration: {
|
||||||
|
baseURL:
|
||||||
|
body.chatModel?.customOpenAIBaseURL || getCustomOpenaiApiUrl(),
|
||||||
|
},
|
||||||
|
}) as unknown as BaseChatModel;
|
||||||
|
} else if (
|
||||||
|
chatModelProviders[chatModelProvider] &&
|
||||||
|
chatModelProviders[chatModelProvider][chatModel]
|
||||||
|
) {
|
||||||
|
llm = chatModelProviders[chatModelProvider][chatModel]
|
||||||
|
.model as unknown as BaseChatModel | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
embeddingModelProviders[embeddingModelProvider] &&
|
||||||
|
embeddingModelProviders[embeddingModelProvider][embeddingModel]
|
||||||
|
) {
|
||||||
|
embeddings = embeddingModelProviders[embeddingModelProvider][
|
||||||
|
embeddingModel
|
||||||
|
].model as Embeddings | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!llm || !embeddings) {
|
||||||
|
return Response.json(
|
||||||
|
{ message: 'Invalid model selected' },
|
||||||
|
{ status: 400 },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const searchHandler: MetaSearchAgentType = searchHandlers[body.focusMode];
|
||||||
|
|
||||||
|
if (!searchHandler) {
|
||||||
|
return Response.json({ message: 'Invalid focus mode' }, { status: 400 });
|
||||||
|
}
|
||||||
|
|
||||||
|
const emitter = await searchHandler.searchAndAnswer(
|
||||||
|
body.query,
|
||||||
|
history,
|
||||||
|
llm,
|
||||||
|
embeddings,
|
||||||
|
body.optimizationMode,
|
||||||
|
[],
|
||||||
|
);
|
||||||
|
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
let message = '';
|
||||||
|
let sources: any[] = [];
|
||||||
|
|
||||||
|
emitter.on('data', (data) => {
|
||||||
|
try {
|
||||||
|
const parsedData = JSON.parse(data);
|
||||||
|
if (parsedData.type === 'response') {
|
||||||
|
message += parsedData.data;
|
||||||
|
} else if (parsedData.type === 'sources') {
|
||||||
|
sources = parsedData.data;
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
reject(
|
||||||
|
Response.json({ message: 'Error parsing data' }, { status: 500 }),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
emitter.on('end', () => {
|
||||||
|
resolve(Response.json({ message, sources }, { status: 200 }));
|
||||||
|
});
|
||||||
|
|
||||||
|
emitter.on('error', (error) => {
|
||||||
|
reject(
|
||||||
|
Response.json({ message: 'Search error', error }, { status: 500 }),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
} catch (err: any) {
|
||||||
|
console.error(`Error in getting search results: ${err.message}`);
|
||||||
|
return Response.json(
|
||||||
|
{ message: 'An error has occurred.' },
|
||||||
|
{ status: 500 },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
};
|
59
src/lib/search/index.ts
Normal file
59
src/lib/search/index.ts
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
import MetaSearchAgent from '@/lib/search/metaSearchAgent';
|
||||||
|
import prompts from '../prompts';
|
||||||
|
|
||||||
|
export const searchHandlers: Record<string, MetaSearchAgent> = {
|
||||||
|
webSearch: new MetaSearchAgent({
|
||||||
|
activeEngines: [],
|
||||||
|
queryGeneratorPrompt: prompts.webSearchRetrieverPrompt,
|
||||||
|
responsePrompt: prompts.webSearchResponsePrompt,
|
||||||
|
rerank: true,
|
||||||
|
rerankThreshold: 0.3,
|
||||||
|
searchWeb: true,
|
||||||
|
summarizer: true,
|
||||||
|
}),
|
||||||
|
academicSearch: new MetaSearchAgent({
|
||||||
|
activeEngines: ['arxiv', 'google scholar', 'pubmed'],
|
||||||
|
queryGeneratorPrompt: prompts.academicSearchRetrieverPrompt,
|
||||||
|
responsePrompt: prompts.academicSearchResponsePrompt,
|
||||||
|
rerank: true,
|
||||||
|
rerankThreshold: 0,
|
||||||
|
searchWeb: true,
|
||||||
|
summarizer: false,
|
||||||
|
}),
|
||||||
|
writingAssistant: new MetaSearchAgent({
|
||||||
|
activeEngines: [],
|
||||||
|
queryGeneratorPrompt: '',
|
||||||
|
responsePrompt: prompts.writingAssistantPrompt,
|
||||||
|
rerank: true,
|
||||||
|
rerankThreshold: 0,
|
||||||
|
searchWeb: false,
|
||||||
|
summarizer: false,
|
||||||
|
}),
|
||||||
|
wolframAlphaSearch: new MetaSearchAgent({
|
||||||
|
activeEngines: ['wolframalpha'],
|
||||||
|
queryGeneratorPrompt: prompts.wolframAlphaSearchRetrieverPrompt,
|
||||||
|
responsePrompt: prompts.wolframAlphaSearchResponsePrompt,
|
||||||
|
rerank: false,
|
||||||
|
rerankThreshold: 0,
|
||||||
|
searchWeb: true,
|
||||||
|
summarizer: false,
|
||||||
|
}),
|
||||||
|
youtubeSearch: new MetaSearchAgent({
|
||||||
|
activeEngines: ['youtube'],
|
||||||
|
queryGeneratorPrompt: prompts.youtubeSearchRetrieverPrompt,
|
||||||
|
responsePrompt: prompts.youtubeSearchResponsePrompt,
|
||||||
|
rerank: true,
|
||||||
|
rerankThreshold: 0.3,
|
||||||
|
searchWeb: true,
|
||||||
|
summarizer: false,
|
||||||
|
}),
|
||||||
|
redditSearch: new MetaSearchAgent({
|
||||||
|
activeEngines: ['reddit'],
|
||||||
|
queryGeneratorPrompt: prompts.redditSearchRetrieverPrompt,
|
||||||
|
responsePrompt: prompts.redditSearchResponsePrompt,
|
||||||
|
rerank: true,
|
||||||
|
rerankThreshold: 0.3,
|
||||||
|
searchWeb: true,
|
||||||
|
summarizer: false,
|
||||||
|
}),
|
||||||
|
};
|
Reference in New Issue
Block a user