mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-06-18 07:48:35 +00:00
Compare commits
10 Commits
947ef2f85b
...
af237872a7
Author | SHA1 | Date | |
---|---|---|---|
af237872a7 | |||
97e64aa65e | |||
90e303f737 | |||
5d60ab1139 | |||
9095996356 | |||
191d1dc25f | |||
d3b2f8983d | |||
590a52d38c | |||
ca3fad6632 | |||
e0d5787c5d |
@ -26,9 +26,21 @@ services:
|
|||||||
- ./config.toml:/home/perplexica/config.toml
|
- ./config.toml:/home/perplexica/config.toml
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
|
redict:
|
||||||
|
image: registry.redict.io/redict:latest
|
||||||
|
container_name: perplexica-redict
|
||||||
|
ports:
|
||||||
|
- "6379:6379"
|
||||||
|
volumes:
|
||||||
|
- redict_data:/data
|
||||||
|
networks:
|
||||||
|
- perplexica-network
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
networks:
|
networks:
|
||||||
perplexica-network:
|
perplexica-network:
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
backend-dbstore:
|
backend-dbstore:
|
||||||
uploads:
|
uploads:
|
||||||
|
redict_data:
|
@ -32,7 +32,8 @@ The API accepts a JSON object in the request body, where you define the focus mo
|
|||||||
"history": [
|
"history": [
|
||||||
["human", "Hi, how are you?"],
|
["human", "Hi, how are you?"],
|
||||||
["assistant", "I am doing well, how can I help you today?"]
|
["assistant", "I am doing well, how can I help you today?"]
|
||||||
]
|
],
|
||||||
|
"stream": false
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -71,11 +72,13 @@ The API accepts a JSON object in the request body, where you define the focus mo
|
|||||||
]
|
]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
- **`stream`** (boolean, optional): When set to `true`, enables streaming responses. Default is `false`.
|
||||||
|
|
||||||
### Response
|
### Response
|
||||||
|
|
||||||
The response from the API includes both the final message and the sources used to generate that message.
|
The response from the API includes both the final message and the sources used to generate that message.
|
||||||
|
|
||||||
#### Example Response
|
#### Standard Response (stream: false)
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
@ -100,6 +103,28 @@ The response from the API includes both the final message and the sources used t
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### Streaming Response (stream: true)
|
||||||
|
|
||||||
|
When streaming is enabled, the API returns a stream of newline-delimited JSON objects. Each line contains a complete, valid JSON object. The response has Content-Type: application/json.
|
||||||
|
|
||||||
|
Example of streamed response objects:
|
||||||
|
|
||||||
|
```
|
||||||
|
{"type":"init","data":"Stream connected"}
|
||||||
|
{"type":"sources","data":[{"pageContent":"...","metadata":{"title":"...","url":"..."}},...]}
|
||||||
|
{"type":"response","data":"Perplexica is an "}
|
||||||
|
{"type":"response","data":"innovative, open-source "}
|
||||||
|
{"type":"response","data":"AI-powered search engine..."}
|
||||||
|
{"type":"done"}
|
||||||
|
```
|
||||||
|
|
||||||
|
Clients should process each line as a separate JSON object. The different message types include:
|
||||||
|
|
||||||
|
- **`init`**: Initial connection message
|
||||||
|
- **`sources`**: All sources used for the response
|
||||||
|
- **`response`**: Chunks of the generated answer text
|
||||||
|
- **`done`**: Indicates the stream is complete
|
||||||
|
|
||||||
### Fields in the Response
|
### Fields in the Response
|
||||||
|
|
||||||
- **`message`** (string): The search result, generated based on the query and focus mode.
|
- **`message`** (string): The search result, generated based on the query and focus mode.
|
||||||
|
@ -12,6 +12,11 @@ search:
|
|||||||
server:
|
server:
|
||||||
secret_key: 'a2fb23f1b02e6ee83875b09826990de0f6bd908b6638e8c10277d415f6ab852b' # Is overwritten by ${SEARXNG_SECRET}
|
secret_key: 'a2fb23f1b02e6ee83875b09826990de0f6bd908b6638e8c10277d415f6ab852b' # Is overwritten by ${SEARXNG_SECRET}
|
||||||
|
|
||||||
|
redis:
|
||||||
|
url: redis://redict:6379/0
|
||||||
|
|
||||||
engines:
|
engines:
|
||||||
- name: wolframalpha
|
- name: wolframalpha
|
||||||
disabled: false
|
disabled: false
|
||||||
|
- name: qwant
|
||||||
|
disabled: true
|
||||||
|
@ -33,6 +33,7 @@ interface ChatRequestBody {
|
|||||||
embeddingModel?: embeddingModel;
|
embeddingModel?: embeddingModel;
|
||||||
query: string;
|
query: string;
|
||||||
history: Array<[string, string]>;
|
history: Array<[string, string]>;
|
||||||
|
stream?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const POST = async (req: Request) => {
|
export const POST = async (req: Request) => {
|
||||||
@ -48,6 +49,7 @@ export const POST = async (req: Request) => {
|
|||||||
|
|
||||||
body.history = body.history || [];
|
body.history = body.history || [];
|
||||||
body.optimizationMode = body.optimizationMode || 'balanced';
|
body.optimizationMode = body.optimizationMode || 'balanced';
|
||||||
|
body.stream = body.stream || false;
|
||||||
|
|
||||||
const history: BaseMessage[] = body.history.map((msg) => {
|
const history: BaseMessage[] = body.history.map((msg) => {
|
||||||
return msg[0] === 'human'
|
return msg[0] === 'human'
|
||||||
@ -125,40 +127,137 @@ export const POST = async (req: Request) => {
|
|||||||
[],
|
[],
|
||||||
);
|
);
|
||||||
|
|
||||||
return new Promise(
|
if (!body.stream) {
|
||||||
(
|
return new Promise(
|
||||||
resolve: (value: Response) => void,
|
(
|
||||||
reject: (value: Response) => void,
|
resolve: (value: Response) => void,
|
||||||
) => {
|
reject: (value: Response) => void,
|
||||||
let message = '';
|
) => {
|
||||||
|
let message = '';
|
||||||
|
let sources: any[] = [];
|
||||||
|
|
||||||
|
emitter.on('data', (data: string) => {
|
||||||
|
try {
|
||||||
|
const parsedData = JSON.parse(data);
|
||||||
|
if (parsedData.type === 'response') {
|
||||||
|
message += parsedData.data;
|
||||||
|
} else if (parsedData.type === 'sources') {
|
||||||
|
sources = parsedData.data;
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
reject(
|
||||||
|
Response.json(
|
||||||
|
{ message: 'Error parsing data' },
|
||||||
|
{ status: 500 },
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
emitter.on('end', () => {
|
||||||
|
resolve(Response.json({ message, sources }, { status: 200 }));
|
||||||
|
});
|
||||||
|
|
||||||
|
emitter.on('error', (error: any) => {
|
||||||
|
reject(
|
||||||
|
Response.json(
|
||||||
|
{ message: 'Search error', error },
|
||||||
|
{ status: 500 },
|
||||||
|
),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const encoder = new TextEncoder();
|
||||||
|
|
||||||
|
const abortController = new AbortController();
|
||||||
|
const { signal } = abortController;
|
||||||
|
|
||||||
|
const stream = new ReadableStream({
|
||||||
|
start(controller) {
|
||||||
let sources: any[] = [];
|
let sources: any[] = [];
|
||||||
|
|
||||||
emitter.on('data', (data) => {
|
controller.enqueue(
|
||||||
|
encoder.encode(
|
||||||
|
JSON.stringify({
|
||||||
|
type: 'init',
|
||||||
|
data: 'Stream connected',
|
||||||
|
}) + '\n',
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
signal.addEventListener('abort', () => {
|
||||||
|
emitter.removeAllListeners();
|
||||||
|
|
||||||
|
try {
|
||||||
|
controller.close();
|
||||||
|
} catch (error) {}
|
||||||
|
});
|
||||||
|
|
||||||
|
emitter.on('data', (data: string) => {
|
||||||
|
if (signal.aborted) return;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const parsedData = JSON.parse(data);
|
const parsedData = JSON.parse(data);
|
||||||
|
|
||||||
if (parsedData.type === 'response') {
|
if (parsedData.type === 'response') {
|
||||||
message += parsedData.data;
|
controller.enqueue(
|
||||||
|
encoder.encode(
|
||||||
|
JSON.stringify({
|
||||||
|
type: 'response',
|
||||||
|
data: parsedData.data,
|
||||||
|
}) + '\n',
|
||||||
|
),
|
||||||
|
);
|
||||||
} else if (parsedData.type === 'sources') {
|
} else if (parsedData.type === 'sources') {
|
||||||
sources = parsedData.data;
|
sources = parsedData.data;
|
||||||
|
controller.enqueue(
|
||||||
|
encoder.encode(
|
||||||
|
JSON.stringify({
|
||||||
|
type: 'sources',
|
||||||
|
data: sources,
|
||||||
|
}) + '\n',
|
||||||
|
),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
reject(
|
controller.error(error);
|
||||||
Response.json({ message: 'Error parsing data' }, { status: 500 }),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
emitter.on('end', () => {
|
emitter.on('end', () => {
|
||||||
resolve(Response.json({ message, sources }, { status: 200 }));
|
if (signal.aborted) return;
|
||||||
|
|
||||||
|
controller.enqueue(
|
||||||
|
encoder.encode(
|
||||||
|
JSON.stringify({
|
||||||
|
type: 'done',
|
||||||
|
}) + '\n',
|
||||||
|
),
|
||||||
|
);
|
||||||
|
controller.close();
|
||||||
});
|
});
|
||||||
|
|
||||||
emitter.on('error', (error) => {
|
emitter.on('error', (error: any) => {
|
||||||
reject(
|
if (signal.aborted) return;
|
||||||
Response.json({ message: 'Search error', error }, { status: 500 }),
|
|
||||||
);
|
controller.error(error);
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
);
|
cancel() {
|
||||||
|
abortController.abort();
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return new Response(stream, {
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'text/event-stream',
|
||||||
|
'Cache-Control': 'no-cache, no-transform',
|
||||||
|
Connection: 'keep-alive',
|
||||||
|
},
|
||||||
|
});
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
console.error(`Error in getting search results: ${err.message}`);
|
console.error(`Error in getting search results: ${err.message}`);
|
||||||
return Response.json(
|
return Response.json(
|
||||||
|
Reference in New Issue
Block a user