mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-06-19 08:18:48 +00:00
Compare commits
10 Commits
d892b9c0d4
...
v1.10.1
Author | SHA1 | Date | |
---|---|---|---|
97e64aa65e | |||
90e303f737 | |||
7955d8e408 | |||
b285cb4323 | |||
5d60ab1139 | |||
9095996356 | |||
310c8a75fd | |||
191d1dc25f | |||
d3b2f8983d | |||
27286465a3 |
@ -16,8 +16,6 @@ services:
|
||||
dockerfile: app.dockerfile
|
||||
environment:
|
||||
- SEARXNG_API_URL=http://searxng:8080
|
||||
env_file:
|
||||
- .env
|
||||
ports:
|
||||
- 3000:3000
|
||||
networks:
|
||||
|
@ -32,7 +32,8 @@ The API accepts a JSON object in the request body, where you define the focus mo
|
||||
"history": [
|
||||
["human", "Hi, how are you?"],
|
||||
["assistant", "I am doing well, how can I help you today?"]
|
||||
]
|
||||
],
|
||||
"stream": false
|
||||
}
|
||||
```
|
||||
|
||||
@ -71,11 +72,13 @@ The API accepts a JSON object in the request body, where you define the focus mo
|
||||
]
|
||||
```
|
||||
|
||||
- **`stream`** (boolean, optional): When set to `true`, enables streaming responses. Default is `false`.
|
||||
|
||||
### Response
|
||||
|
||||
The response from the API includes both the final message and the sources used to generate that message.
|
||||
|
||||
#### Example Response
|
||||
#### Standard Response (stream: false)
|
||||
|
||||
```json
|
||||
{
|
||||
@ -100,6 +103,28 @@ The response from the API includes both the final message and the sources used t
|
||||
}
|
||||
```
|
||||
|
||||
#### Streaming Response (stream: true)
|
||||
|
||||
When streaming is enabled, the API returns a stream of newline-delimited JSON objects. Each line contains a complete, valid JSON object. The response has Content-Type: application/json.
|
||||
|
||||
Example of streamed response objects:
|
||||
|
||||
```
|
||||
{"type":"init","data":"Stream connected"}
|
||||
{"type":"sources","data":[{"pageContent":"...","metadata":{"title":"...","url":"..."}},...]}
|
||||
{"type":"response","data":"Perplexica is an "}
|
||||
{"type":"response","data":"innovative, open-source "}
|
||||
{"type":"response","data":"AI-powered search engine..."}
|
||||
{"type":"done"}
|
||||
```
|
||||
|
||||
Clients should process each line as a separate JSON object. The different message types include:
|
||||
|
||||
- **`init`**: Initial connection message
|
||||
- **`sources`**: All sources used for the response
|
||||
- **`response`**: Chunks of the generated answer text
|
||||
- **`done`**: Indicates the stream is complete
|
||||
|
||||
### Fields in the Response
|
||||
|
||||
- **`message`** (string): The search result, generated based on the query and focus mode.
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "perplexica-frontend",
|
||||
"version": "1.10.0",
|
||||
"version": "1.10.1",
|
||||
"license": "MIT",
|
||||
"author": "ItzCrazyKns",
|
||||
"scripts": {
|
||||
|
@ -295,9 +295,9 @@ export const POST = async (req: Request) => {
|
||||
},
|
||||
});
|
||||
} catch (err) {
|
||||
console.error('An error ocurred while processing chat request:', err);
|
||||
console.error('An error occurred while processing chat request:', err);
|
||||
return Response.json(
|
||||
{ message: 'An error ocurred while processing chat request' },
|
||||
{ message: 'An error occurred while processing chat request' },
|
||||
{ status: 500 },
|
||||
);
|
||||
}
|
||||
|
@ -59,9 +59,9 @@ export const GET = async (req: Request) => {
|
||||
|
||||
return Response.json({ ...config }, { status: 200 });
|
||||
} catch (err) {
|
||||
console.error('An error ocurred while getting config:', err);
|
||||
console.error('An error occurred while getting config:', err);
|
||||
return Response.json(
|
||||
{ message: 'An error ocurred while getting config' },
|
||||
{ message: 'An error occurred while getting config' },
|
||||
{ status: 500 },
|
||||
);
|
||||
}
|
||||
@ -100,9 +100,9 @@ export const POST = async (req: Request) => {
|
||||
|
||||
return Response.json({ message: 'Config updated' }, { status: 200 });
|
||||
} catch (err) {
|
||||
console.error('An error ocurred while updating config:', err);
|
||||
console.error('An error occurred while updating config:', err);
|
||||
return Response.json(
|
||||
{ message: 'An error ocurred while updating config' },
|
||||
{ message: 'An error occurred while updating config' },
|
||||
{ status: 500 },
|
||||
);
|
||||
}
|
||||
|
@ -48,7 +48,7 @@ export const GET = async (req: Request) => {
|
||||
},
|
||||
);
|
||||
} catch (err) {
|
||||
console.error(`An error ocurred in discover route: ${err}`);
|
||||
console.error(`An error occurred in discover route: ${err}`);
|
||||
return Response.json(
|
||||
{
|
||||
message: 'An error has occurred',
|
||||
|
@ -74,9 +74,9 @@ export const POST = async (req: Request) => {
|
||||
|
||||
return Response.json({ images }, { status: 200 });
|
||||
} catch (err) {
|
||||
console.error(`An error ocurred while searching images: ${err}`);
|
||||
console.error(`An error occurred while searching images: ${err}`);
|
||||
return Response.json(
|
||||
{ message: 'An error ocurred while searching images' },
|
||||
{ message: 'An error occurred while searching images' },
|
||||
{ status: 500 },
|
||||
);
|
||||
}
|
||||
|
@ -34,7 +34,7 @@ export const GET = async (req: Request) => {
|
||||
},
|
||||
);
|
||||
} catch (err) {
|
||||
console.error('An error ocurred while fetching models', err);
|
||||
console.error('An error occurred while fetching models', err);
|
||||
return Response.json(
|
||||
{
|
||||
message: 'An error has occurred.',
|
||||
|
@ -33,6 +33,7 @@ interface ChatRequestBody {
|
||||
embeddingModel?: embeddingModel;
|
||||
query: string;
|
||||
history: Array<[string, string]>;
|
||||
stream?: boolean;
|
||||
}
|
||||
|
||||
export const POST = async (req: Request) => {
|
||||
@ -48,6 +49,7 @@ export const POST = async (req: Request) => {
|
||||
|
||||
body.history = body.history || [];
|
||||
body.optimizationMode = body.optimizationMode || 'balanced';
|
||||
body.stream = body.stream || false;
|
||||
|
||||
const history: BaseMessage[] = body.history.map((msg) => {
|
||||
return msg[0] === 'human'
|
||||
@ -125,40 +127,137 @@ export const POST = async (req: Request) => {
|
||||
[],
|
||||
);
|
||||
|
||||
return new Promise(
|
||||
(
|
||||
resolve: (value: Response) => void,
|
||||
reject: (value: Response) => void,
|
||||
) => {
|
||||
let message = '';
|
||||
if (!body.stream) {
|
||||
return new Promise(
|
||||
(
|
||||
resolve: (value: Response) => void,
|
||||
reject: (value: Response) => void,
|
||||
) => {
|
||||
let message = '';
|
||||
let sources: any[] = [];
|
||||
|
||||
emitter.on('data', (data: string) => {
|
||||
try {
|
||||
const parsedData = JSON.parse(data);
|
||||
if (parsedData.type === 'response') {
|
||||
message += parsedData.data;
|
||||
} else if (parsedData.type === 'sources') {
|
||||
sources = parsedData.data;
|
||||
}
|
||||
} catch (error) {
|
||||
reject(
|
||||
Response.json(
|
||||
{ message: 'Error parsing data' },
|
||||
{ status: 500 },
|
||||
),
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
emitter.on('end', () => {
|
||||
resolve(Response.json({ message, sources }, { status: 200 }));
|
||||
});
|
||||
|
||||
emitter.on('error', (error: any) => {
|
||||
reject(
|
||||
Response.json(
|
||||
{ message: 'Search error', error },
|
||||
{ status: 500 },
|
||||
),
|
||||
);
|
||||
});
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
const encoder = new TextEncoder();
|
||||
|
||||
const abortController = new AbortController();
|
||||
const { signal } = abortController;
|
||||
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
let sources: any[] = [];
|
||||
|
||||
emitter.on('data', (data) => {
|
||||
controller.enqueue(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
type: 'init',
|
||||
data: 'Stream connected',
|
||||
}) + '\n',
|
||||
),
|
||||
);
|
||||
|
||||
signal.addEventListener('abort', () => {
|
||||
emitter.removeAllListeners();
|
||||
|
||||
try {
|
||||
controller.close();
|
||||
} catch (error) {}
|
||||
});
|
||||
|
||||
emitter.on('data', (data: string) => {
|
||||
if (signal.aborted) return;
|
||||
|
||||
try {
|
||||
const parsedData = JSON.parse(data);
|
||||
|
||||
if (parsedData.type === 'response') {
|
||||
message += parsedData.data;
|
||||
controller.enqueue(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
type: 'response',
|
||||
data: parsedData.data,
|
||||
}) + '\n',
|
||||
),
|
||||
);
|
||||
} else if (parsedData.type === 'sources') {
|
||||
sources = parsedData.data;
|
||||
controller.enqueue(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
type: 'sources',
|
||||
data: sources,
|
||||
}) + '\n',
|
||||
),
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
reject(
|
||||
Response.json({ message: 'Error parsing data' }, { status: 500 }),
|
||||
);
|
||||
controller.error(error);
|
||||
}
|
||||
});
|
||||
|
||||
emitter.on('end', () => {
|
||||
resolve(Response.json({ message, sources }, { status: 200 }));
|
||||
if (signal.aborted) return;
|
||||
|
||||
controller.enqueue(
|
||||
encoder.encode(
|
||||
JSON.stringify({
|
||||
type: 'done',
|
||||
}) + '\n',
|
||||
),
|
||||
);
|
||||
controller.close();
|
||||
});
|
||||
|
||||
emitter.on('error', (error) => {
|
||||
reject(
|
||||
Response.json({ message: 'Search error', error }, { status: 500 }),
|
||||
);
|
||||
emitter.on('error', (error: any) => {
|
||||
if (signal.aborted) return;
|
||||
|
||||
controller.error(error);
|
||||
});
|
||||
},
|
||||
);
|
||||
cancel() {
|
||||
abortController.abort();
|
||||
},
|
||||
});
|
||||
|
||||
return new Response(stream, {
|
||||
headers: {
|
||||
'Content-Type': 'text/event-stream',
|
||||
'Cache-Control': 'no-cache, no-transform',
|
||||
Connection: 'keep-alive',
|
||||
},
|
||||
});
|
||||
} catch (err: any) {
|
||||
console.error(`Error in getting search results: ${err.message}`);
|
||||
return Response.json(
|
||||
|
@ -72,9 +72,9 @@ export const POST = async (req: Request) => {
|
||||
|
||||
return Response.json({ suggestions }, { status: 200 });
|
||||
} catch (err) {
|
||||
console.error(`An error ocurred while generating suggestions: ${err}`);
|
||||
console.error(`An error occurred while generating suggestions: ${err}`);
|
||||
return Response.json(
|
||||
{ message: 'An error ocurred while generating suggestions' },
|
||||
{ message: 'An error occurred while generating suggestions' },
|
||||
{ status: 500 },
|
||||
);
|
||||
}
|
||||
|
@ -74,9 +74,9 @@ export const POST = async (req: Request) => {
|
||||
|
||||
return Response.json({ videos }, { status: 200 });
|
||||
} catch (err) {
|
||||
console.error(`An error ocurred while searching videos: ${err}`);
|
||||
console.error(`An error occurred while searching videos: ${err}`);
|
||||
return Response.json(
|
||||
{ message: 'An error ocurred while searching videos' },
|
||||
{ message: 'An error occurred while searching videos' },
|
||||
{ status: 500 },
|
||||
);
|
||||
}
|
||||
|
@ -40,70 +40,37 @@ type RecursivePartial<T> = {
|
||||
[P in keyof T]?: RecursivePartial<T[P]>;
|
||||
};
|
||||
|
||||
const loadConfig = () => {
|
||||
const configPath = path.join(process.cwd(), configFileName);
|
||||
if (!fs.existsSync(configPath) || fs.lstatSync(configPath).isDirectory()) {
|
||||
return {} as Config;
|
||||
}
|
||||
return toml.parse(fs.readFileSync(configPath, 'utf-8')) as any as Config;
|
||||
};
|
||||
|
||||
const getEnvVar = (key: string): string | undefined => {
|
||||
return process.env[key];
|
||||
};
|
||||
|
||||
const getConfigValue = (path: string[], defaultValue: string): string => {
|
||||
// Convert path to environment variable name (e.g., ['MODELS', 'GROQ', 'API_KEY'] -> 'GROQ_API_KEY')
|
||||
const envKey = path.slice(1).join('_').toUpperCase();
|
||||
const envValue = getEnvVar(envKey);
|
||||
|
||||
if (envValue !== undefined) {
|
||||
return envValue;
|
||||
}
|
||||
|
||||
// Fall back to config.toml
|
||||
let value: any = loadConfig();
|
||||
for (const key of path) {
|
||||
value = value[key];
|
||||
if (value === undefined) {
|
||||
return defaultValue;
|
||||
}
|
||||
}
|
||||
return value;
|
||||
};
|
||||
const loadConfig = () =>
|
||||
toml.parse(
|
||||
fs.readFileSync(path.join(process.cwd(), `${configFileName}`), 'utf-8'),
|
||||
) as any as Config;
|
||||
|
||||
export const getSimilarityMeasure = () =>
|
||||
getConfigValue(['GENERAL', 'SIMILARITY_MEASURE'], 'cosine');
|
||||
loadConfig().GENERAL.SIMILARITY_MEASURE;
|
||||
|
||||
export const getKeepAlive = () =>
|
||||
getConfigValue(['GENERAL', 'KEEP_ALIVE'], '30s');
|
||||
export const getKeepAlive = () => loadConfig().GENERAL.KEEP_ALIVE;
|
||||
|
||||
export const getOpenaiApiKey = () =>
|
||||
getConfigValue(['MODELS', 'OPENAI', 'API_KEY'], '');
|
||||
export const getOpenaiApiKey = () => loadConfig().MODELS.OPENAI.API_KEY;
|
||||
|
||||
export const getGroqApiKey = () =>
|
||||
getConfigValue(['MODELS', 'GROQ', 'API_KEY'], '');
|
||||
export const getGroqApiKey = () => loadConfig().MODELS.GROQ.API_KEY;
|
||||
|
||||
export const getAnthropicApiKey = () =>
|
||||
getConfigValue(['MODELS', 'ANTHROPIC', 'API_KEY'], '');
|
||||
export const getAnthropicApiKey = () => loadConfig().MODELS.ANTHROPIC.API_KEY;
|
||||
|
||||
export const getGeminiApiKey = () =>
|
||||
getConfigValue(['MODELS', 'GEMINI', 'API_KEY'], '');
|
||||
export const getGeminiApiKey = () => loadConfig().MODELS.GEMINI.API_KEY;
|
||||
|
||||
export const getSearxngApiEndpoint = () =>
|
||||
process.env.SEARXNG_API_URL || getConfigValue(['API_ENDPOINTS', 'SEARXNG'], '');
|
||||
process.env.SEARXNG_API_URL || loadConfig().API_ENDPOINTS.SEARXNG;
|
||||
|
||||
export const getOllamaApiEndpoint = () =>
|
||||
getConfigValue(['MODELS', 'OLLAMA', 'API_URL'], 'http://localhost:11434');
|
||||
export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL;
|
||||
|
||||
export const getCustomOpenaiApiKey = () =>
|
||||
getConfigValue(['MODELS', 'CUSTOM_OPENAI', 'API_KEY'], '');
|
||||
loadConfig().MODELS.CUSTOM_OPENAI.API_KEY;
|
||||
|
||||
export const getCustomOpenaiApiUrl = () =>
|
||||
getConfigValue(['MODELS', 'CUSTOM_OPENAI', 'API_URL'], '');
|
||||
loadConfig().MODELS.CUSTOM_OPENAI.API_URL;
|
||||
|
||||
export const getCustomOpenaiModelName = () =>
|
||||
getConfigValue(['MODELS', 'CUSTOM_OPENAI', 'MODEL_NAME'], '');
|
||||
loadConfig().MODELS.CUSTOM_OPENAI.MODEL_NAME;
|
||||
|
||||
const mergeConfigs = (current: any, update: any): any => {
|
||||
if (update === null || update === undefined) {
|
||||
|
@ -8,6 +8,10 @@ import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { Embeddings } from '@langchain/core/embeddings';
|
||||
|
||||
const geminiChatModels: Record<string, string>[] = [
|
||||
{
|
||||
displayName: 'Gemini 2.5 Pro Experimental',
|
||||
key: 'gemini-2.5-pro-exp-03-25',
|
||||
},
|
||||
{
|
||||
displayName: 'Gemini 2.0 Flash',
|
||||
key: 'gemini-2.0-flash',
|
||||
@ -17,8 +21,8 @@ const geminiChatModels: Record<string, string>[] = [
|
||||
key: 'gemini-2.0-flash-lite',
|
||||
},
|
||||
{
|
||||
displayName: 'Gemini 2.0 Pro Experimental',
|
||||
key: 'gemini-2.0-pro-exp-02-05',
|
||||
displayName: 'Gemini 2.0 Flash Thinking Experimental',
|
||||
key: 'gemini-2.0-flash-thinking-exp-01-21',
|
||||
},
|
||||
{
|
||||
displayName: 'Gemini 1.5 Flash',
|
||||
|
Reference in New Issue
Block a user