mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-09-20 08:11:33 +00:00
Compare commits
4 Commits
v1.10.1
...
947ef2f85b
Author | SHA1 | Date | |
---|---|---|---|
|
947ef2f85b | ||
|
590a52d38c | ||
|
ca3fad6632 | ||
|
e0d5787c5d |
@@ -26,9 +26,21 @@ services:
|
|||||||
- ./config.toml:/home/perplexica/config.toml
|
- ./config.toml:/home/perplexica/config.toml
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
|
redict:
|
||||||
|
image: registry.redict.io/redict:latest
|
||||||
|
container_name: perplexica-redict
|
||||||
|
ports:
|
||||||
|
- "6379:6379"
|
||||||
|
volumes:
|
||||||
|
- redict_data:/data
|
||||||
|
networks:
|
||||||
|
- perplexica-network
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
networks:
|
networks:
|
||||||
perplexica-network:
|
perplexica-network:
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
backend-dbstore:
|
backend-dbstore:
|
||||||
uploads:
|
uploads:
|
||||||
|
redict_data:
|
@@ -32,8 +32,7 @@ The API accepts a JSON object in the request body, where you define the focus mo
|
|||||||
"history": [
|
"history": [
|
||||||
["human", "Hi, how are you?"],
|
["human", "Hi, how are you?"],
|
||||||
["assistant", "I am doing well, how can I help you today?"]
|
["assistant", "I am doing well, how can I help you today?"]
|
||||||
],
|
]
|
||||||
"stream": false
|
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -72,13 +71,11 @@ The API accepts a JSON object in the request body, where you define the focus mo
|
|||||||
]
|
]
|
||||||
```
|
```
|
||||||
|
|
||||||
- **`stream`** (boolean, optional): When set to `true`, enables streaming responses. Default is `false`.
|
|
||||||
|
|
||||||
### Response
|
### Response
|
||||||
|
|
||||||
The response from the API includes both the final message and the sources used to generate that message.
|
The response from the API includes both the final message and the sources used to generate that message.
|
||||||
|
|
||||||
#### Standard Response (stream: false)
|
#### Example Response
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
@@ -103,28 +100,6 @@ The response from the API includes both the final message and the sources used t
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Streaming Response (stream: true)
|
|
||||||
|
|
||||||
When streaming is enabled, the API returns a stream of newline-delimited JSON objects. Each line contains a complete, valid JSON object. The response has Content-Type: application/json.
|
|
||||||
|
|
||||||
Example of streamed response objects:
|
|
||||||
|
|
||||||
```
|
|
||||||
{"type":"init","data":"Stream connected"}
|
|
||||||
{"type":"sources","data":[{"pageContent":"...","metadata":{"title":"...","url":"..."}},...]}
|
|
||||||
{"type":"response","data":"Perplexica is an "}
|
|
||||||
{"type":"response","data":"innovative, open-source "}
|
|
||||||
{"type":"response","data":"AI-powered search engine..."}
|
|
||||||
{"type":"done"}
|
|
||||||
```
|
|
||||||
|
|
||||||
Clients should process each line as a separate JSON object. The different message types include:
|
|
||||||
|
|
||||||
- **`init`**: Initial connection message
|
|
||||||
- **`sources`**: All sources used for the response
|
|
||||||
- **`response`**: Chunks of the generated answer text
|
|
||||||
- **`done`**: Indicates the stream is complete
|
|
||||||
|
|
||||||
### Fields in the Response
|
### Fields in the Response
|
||||||
|
|
||||||
- **`message`** (string): The search result, generated based on the query and focus mode.
|
- **`message`** (string): The search result, generated based on the query and focus mode.
|
||||||
|
@@ -12,6 +12,11 @@ search:
|
|||||||
server:
|
server:
|
||||||
secret_key: 'a2fb23f1b02e6ee83875b09826990de0f6bd908b6638e8c10277d415f6ab852b' # Is overwritten by ${SEARXNG_SECRET}
|
secret_key: 'a2fb23f1b02e6ee83875b09826990de0f6bd908b6638e8c10277d415f6ab852b' # Is overwritten by ${SEARXNG_SECRET}
|
||||||
|
|
||||||
|
redis:
|
||||||
|
url: redis://redict:6379/0
|
||||||
|
|
||||||
engines:
|
engines:
|
||||||
- name: wolframalpha
|
- name: wolframalpha
|
||||||
disabled: false
|
disabled: false
|
||||||
|
- name: qwant
|
||||||
|
disabled: true
|
||||||
|
@@ -33,7 +33,6 @@ interface ChatRequestBody {
|
|||||||
embeddingModel?: embeddingModel;
|
embeddingModel?: embeddingModel;
|
||||||
query: string;
|
query: string;
|
||||||
history: Array<[string, string]>;
|
history: Array<[string, string]>;
|
||||||
stream?: boolean;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export const POST = async (req: Request) => {
|
export const POST = async (req: Request) => {
|
||||||
@@ -49,7 +48,6 @@ export const POST = async (req: Request) => {
|
|||||||
|
|
||||||
body.history = body.history || [];
|
body.history = body.history || [];
|
||||||
body.optimizationMode = body.optimizationMode || 'balanced';
|
body.optimizationMode = body.optimizationMode || 'balanced';
|
||||||
body.stream = body.stream || false;
|
|
||||||
|
|
||||||
const history: BaseMessage[] = body.history.map((msg) => {
|
const history: BaseMessage[] = body.history.map((msg) => {
|
||||||
return msg[0] === 'human'
|
return msg[0] === 'human'
|
||||||
@@ -127,137 +125,40 @@ export const POST = async (req: Request) => {
|
|||||||
[],
|
[],
|
||||||
);
|
);
|
||||||
|
|
||||||
if (!body.stream) {
|
return new Promise(
|
||||||
return new Promise(
|
(
|
||||||
(
|
resolve: (value: Response) => void,
|
||||||
resolve: (value: Response) => void,
|
reject: (value: Response) => void,
|
||||||
reject: (value: Response) => void,
|
) => {
|
||||||
) => {
|
let message = '';
|
||||||
let message = '';
|
|
||||||
let sources: any[] = [];
|
|
||||||
|
|
||||||
emitter.on('data', (data: string) => {
|
|
||||||
try {
|
|
||||||
const parsedData = JSON.parse(data);
|
|
||||||
if (parsedData.type === 'response') {
|
|
||||||
message += parsedData.data;
|
|
||||||
} else if (parsedData.type === 'sources') {
|
|
||||||
sources = parsedData.data;
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
reject(
|
|
||||||
Response.json(
|
|
||||||
{ message: 'Error parsing data' },
|
|
||||||
{ status: 500 },
|
|
||||||
),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
emitter.on('end', () => {
|
|
||||||
resolve(Response.json({ message, sources }, { status: 200 }));
|
|
||||||
});
|
|
||||||
|
|
||||||
emitter.on('error', (error: any) => {
|
|
||||||
reject(
|
|
||||||
Response.json(
|
|
||||||
{ message: 'Search error', error },
|
|
||||||
{ status: 500 },
|
|
||||||
),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const encoder = new TextEncoder();
|
|
||||||
|
|
||||||
const abortController = new AbortController();
|
|
||||||
const { signal } = abortController;
|
|
||||||
|
|
||||||
const stream = new ReadableStream({
|
|
||||||
start(controller) {
|
|
||||||
let sources: any[] = [];
|
let sources: any[] = [];
|
||||||
|
|
||||||
controller.enqueue(
|
emitter.on('data', (data) => {
|
||||||
encoder.encode(
|
|
||||||
JSON.stringify({
|
|
||||||
type: 'init',
|
|
||||||
data: 'Stream connected',
|
|
||||||
}) + '\n',
|
|
||||||
),
|
|
||||||
);
|
|
||||||
|
|
||||||
signal.addEventListener('abort', () => {
|
|
||||||
emitter.removeAllListeners();
|
|
||||||
|
|
||||||
try {
|
|
||||||
controller.close();
|
|
||||||
} catch (error) {}
|
|
||||||
});
|
|
||||||
|
|
||||||
emitter.on('data', (data: string) => {
|
|
||||||
if (signal.aborted) return;
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const parsedData = JSON.parse(data);
|
const parsedData = JSON.parse(data);
|
||||||
|
|
||||||
if (parsedData.type === 'response') {
|
if (parsedData.type === 'response') {
|
||||||
controller.enqueue(
|
message += parsedData.data;
|
||||||
encoder.encode(
|
|
||||||
JSON.stringify({
|
|
||||||
type: 'response',
|
|
||||||
data: parsedData.data,
|
|
||||||
}) + '\n',
|
|
||||||
),
|
|
||||||
);
|
|
||||||
} else if (parsedData.type === 'sources') {
|
} else if (parsedData.type === 'sources') {
|
||||||
sources = parsedData.data;
|
sources = parsedData.data;
|
||||||
controller.enqueue(
|
|
||||||
encoder.encode(
|
|
||||||
JSON.stringify({
|
|
||||||
type: 'sources',
|
|
||||||
data: sources,
|
|
||||||
}) + '\n',
|
|
||||||
),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
controller.error(error);
|
reject(
|
||||||
|
Response.json({ message: 'Error parsing data' }, { status: 500 }),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
emitter.on('end', () => {
|
emitter.on('end', () => {
|
||||||
if (signal.aborted) return;
|
resolve(Response.json({ message, sources }, { status: 200 }));
|
||||||
|
});
|
||||||
|
|
||||||
controller.enqueue(
|
emitter.on('error', (error) => {
|
||||||
encoder.encode(
|
reject(
|
||||||
JSON.stringify({
|
Response.json({ message: 'Search error', error }, { status: 500 }),
|
||||||
type: 'done',
|
|
||||||
}) + '\n',
|
|
||||||
),
|
|
||||||
);
|
);
|
||||||
controller.close();
|
|
||||||
});
|
|
||||||
|
|
||||||
emitter.on('error', (error: any) => {
|
|
||||||
if (signal.aborted) return;
|
|
||||||
|
|
||||||
controller.error(error);
|
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
cancel() {
|
);
|
||||||
abortController.abort();
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
return new Response(stream, {
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'text/event-stream',
|
|
||||||
'Cache-Control': 'no-cache, no-transform',
|
|
||||||
Connection: 'keep-alive',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
console.error(`Error in getting search results: ${err.message}`);
|
console.error(`Error in getting search results: ${err.message}`);
|
||||||
return Response.json(
|
return Response.json(
|
||||||
|
Reference in New Issue
Block a user