mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-06-27 04:08:39 +00:00
Compare commits
1 Commits
v1.10.1
...
f3e8fb5ef1
Author | SHA1 | Date | |
---|---|---|---|
f3e8fb5ef1 |
@ -32,8 +32,7 @@ The API accepts a JSON object in the request body, where you define the focus mo
|
|||||||
"history": [
|
"history": [
|
||||||
["human", "Hi, how are you?"],
|
["human", "Hi, how are you?"],
|
||||||
["assistant", "I am doing well, how can I help you today?"]
|
["assistant", "I am doing well, how can I help you today?"]
|
||||||
],
|
]
|
||||||
"stream": false
|
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -72,13 +71,11 @@ The API accepts a JSON object in the request body, where you define the focus mo
|
|||||||
]
|
]
|
||||||
```
|
```
|
||||||
|
|
||||||
- **`stream`** (boolean, optional): When set to `true`, enables streaming responses. Default is `false`.
|
|
||||||
|
|
||||||
### Response
|
### Response
|
||||||
|
|
||||||
The response from the API includes both the final message and the sources used to generate that message.
|
The response from the API includes both the final message and the sources used to generate that message.
|
||||||
|
|
||||||
#### Standard Response (stream: false)
|
#### Example Response
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
@ -103,28 +100,6 @@ The response from the API includes both the final message and the sources used t
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Streaming Response (stream: true)
|
|
||||||
|
|
||||||
When streaming is enabled, the API returns a stream of newline-delimited JSON objects. Each line contains a complete, valid JSON object. The response has Content-Type: application/json.
|
|
||||||
|
|
||||||
Example of streamed response objects:
|
|
||||||
|
|
||||||
```
|
|
||||||
{"type":"init","data":"Stream connected"}
|
|
||||||
{"type":"sources","data":[{"pageContent":"...","metadata":{"title":"...","url":"..."}},...]}
|
|
||||||
{"type":"response","data":"Perplexica is an "}
|
|
||||||
{"type":"response","data":"innovative, open-source "}
|
|
||||||
{"type":"response","data":"AI-powered search engine..."}
|
|
||||||
{"type":"done"}
|
|
||||||
```
|
|
||||||
|
|
||||||
Clients should process each line as a separate JSON object. The different message types include:
|
|
||||||
|
|
||||||
- **`init`**: Initial connection message
|
|
||||||
- **`sources`**: All sources used for the response
|
|
||||||
- **`response`**: Chunks of the generated answer text
|
|
||||||
- **`done`**: Indicates the stream is complete
|
|
||||||
|
|
||||||
### Fields in the Response
|
### Fields in the Response
|
||||||
|
|
||||||
- **`message`** (string): The search result, generated based on the query and focus mode.
|
- **`message`** (string): The search result, generated based on the query and focus mode.
|
||||||
|
@ -295,9 +295,9 @@ export const POST = async (req: Request) => {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('An error occurred while processing chat request:', err);
|
console.error('An error ocurred while processing chat request:', err);
|
||||||
return Response.json(
|
return Response.json(
|
||||||
{ message: 'An error occurred while processing chat request' },
|
{ message: 'An error ocurred while processing chat request' },
|
||||||
{ status: 500 },
|
{ status: 500 },
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -59,9 +59,9 @@ export const GET = async (req: Request) => {
|
|||||||
|
|
||||||
return Response.json({ ...config }, { status: 200 });
|
return Response.json({ ...config }, { status: 200 });
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('An error occurred while getting config:', err);
|
console.error('An error ocurred while getting config:', err);
|
||||||
return Response.json(
|
return Response.json(
|
||||||
{ message: 'An error occurred while getting config' },
|
{ message: 'An error ocurred while getting config' },
|
||||||
{ status: 500 },
|
{ status: 500 },
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -100,9 +100,9 @@ export const POST = async (req: Request) => {
|
|||||||
|
|
||||||
return Response.json({ message: 'Config updated' }, { status: 200 });
|
return Response.json({ message: 'Config updated' }, { status: 200 });
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('An error occurred while updating config:', err);
|
console.error('An error ocurred while updating config:', err);
|
||||||
return Response.json(
|
return Response.json(
|
||||||
{ message: 'An error occurred while updating config' },
|
{ message: 'An error ocurred while updating config' },
|
||||||
{ status: 500 },
|
{ status: 500 },
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -48,7 +48,7 @@ export const GET = async (req: Request) => {
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(`An error occurred in discover route: ${err}`);
|
console.error(`An error ocurred in discover route: ${err}`);
|
||||||
return Response.json(
|
return Response.json(
|
||||||
{
|
{
|
||||||
message: 'An error has occurred',
|
message: 'An error has occurred',
|
||||||
|
@ -74,9 +74,9 @@ export const POST = async (req: Request) => {
|
|||||||
|
|
||||||
return Response.json({ images }, { status: 200 });
|
return Response.json({ images }, { status: 200 });
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(`An error occurred while searching images: ${err}`);
|
console.error(`An error ocurred while searching images: ${err}`);
|
||||||
return Response.json(
|
return Response.json(
|
||||||
{ message: 'An error occurred while searching images' },
|
{ message: 'An error ocurred while searching images' },
|
||||||
{ status: 500 },
|
{ status: 500 },
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -34,7 +34,7 @@ export const GET = async (req: Request) => {
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('An error occurred while fetching models', err);
|
console.error('An error ocurred while fetching models', err);
|
||||||
return Response.json(
|
return Response.json(
|
||||||
{
|
{
|
||||||
message: 'An error has occurred.',
|
message: 'An error has occurred.',
|
||||||
|
@ -146,10 +146,7 @@ export const POST = async (req: Request) => {
|
|||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
reject(
|
reject(
|
||||||
Response.json(
|
Response.json({ message: 'Error parsing data' }, { status: 500 }),
|
||||||
{ message: 'Error parsing data' },
|
|
||||||
{ status: 500 },
|
|
||||||
),
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -160,10 +157,7 @@ export const POST = async (req: Request) => {
|
|||||||
|
|
||||||
emitter.on('error', (error: any) => {
|
emitter.on('error', (error: any) => {
|
||||||
reject(
|
reject(
|
||||||
Response.json(
|
Response.json({ message: 'Search error', error }, { status: 500 }),
|
||||||
{ message: 'Search error', error },
|
|
||||||
{ status: 500 },
|
|
||||||
),
|
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
@ -179,21 +173,18 @@ export const POST = async (req: Request) => {
|
|||||||
start(controller) {
|
start(controller) {
|
||||||
let sources: any[] = [];
|
let sources: any[] = [];
|
||||||
|
|
||||||
controller.enqueue(
|
controller.enqueue(encoder.encode("data: " + JSON.stringify({
|
||||||
encoder.encode(
|
|
||||||
JSON.stringify({
|
|
||||||
type: 'init',
|
type: 'init',
|
||||||
data: 'Stream connected',
|
data: 'Stream connected'
|
||||||
}) + '\n',
|
}) + "\n\n"));
|
||||||
),
|
|
||||||
);
|
|
||||||
|
|
||||||
signal.addEventListener('abort', () => {
|
signal.addEventListener('abort', () => {
|
||||||
emitter.removeAllListeners();
|
emitter.removeAllListeners();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
controller.close();
|
controller.close();
|
||||||
} catch (error) {}
|
} catch (error) {
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
emitter.on('data', (data: string) => {
|
emitter.on('data', (data: string) => {
|
||||||
@ -203,24 +194,16 @@ export const POST = async (req: Request) => {
|
|||||||
const parsedData = JSON.parse(data);
|
const parsedData = JSON.parse(data);
|
||||||
|
|
||||||
if (parsedData.type === 'response') {
|
if (parsedData.type === 'response') {
|
||||||
controller.enqueue(
|
controller.enqueue(encoder.encode("data: " + JSON.stringify({
|
||||||
encoder.encode(
|
|
||||||
JSON.stringify({
|
|
||||||
type: 'response',
|
type: 'response',
|
||||||
data: parsedData.data,
|
data: parsedData.data
|
||||||
}) + '\n',
|
}) + "\n\n"));
|
||||||
),
|
|
||||||
);
|
|
||||||
} else if (parsedData.type === 'sources') {
|
} else if (parsedData.type === 'sources') {
|
||||||
sources = parsedData.data;
|
sources = parsedData.data;
|
||||||
controller.enqueue(
|
controller.enqueue(encoder.encode("data: " + JSON.stringify({
|
||||||
encoder.encode(
|
|
||||||
JSON.stringify({
|
|
||||||
type: 'sources',
|
type: 'sources',
|
||||||
data: sources,
|
data: sources
|
||||||
}) + '\n',
|
}) + "\n\n"));
|
||||||
),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
controller.error(error);
|
controller.error(error);
|
||||||
@ -230,13 +213,9 @@ export const POST = async (req: Request) => {
|
|||||||
emitter.on('end', () => {
|
emitter.on('end', () => {
|
||||||
if (signal.aborted) return;
|
if (signal.aborted) return;
|
||||||
|
|
||||||
controller.enqueue(
|
controller.enqueue(encoder.encode("data: " + JSON.stringify({
|
||||||
encoder.encode(
|
type: 'done'
|
||||||
JSON.stringify({
|
}) + "\n\n"));
|
||||||
type: 'done',
|
|
||||||
}) + '\n',
|
|
||||||
),
|
|
||||||
);
|
|
||||||
controller.close();
|
controller.close();
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -246,16 +225,17 @@ export const POST = async (req: Request) => {
|
|||||||
controller.error(error);
|
controller.error(error);
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
|
||||||
cancel() {
|
cancel() {
|
||||||
abortController.abort();
|
abortController.abort();
|
||||||
},
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
return new Response(stream, {
|
return new Response(stream, {
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'text/event-stream',
|
'Content-Type': 'text/event-stream',
|
||||||
'Cache-Control': 'no-cache, no-transform',
|
'Cache-Control': 'no-cache, no-transform',
|
||||||
Connection: 'keep-alive',
|
'Connection': 'keep-alive',
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
|
@ -72,9 +72,9 @@ export const POST = async (req: Request) => {
|
|||||||
|
|
||||||
return Response.json({ suggestions }, { status: 200 });
|
return Response.json({ suggestions }, { status: 200 });
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(`An error occurred while generating suggestions: ${err}`);
|
console.error(`An error ocurred while generating suggestions: ${err}`);
|
||||||
return Response.json(
|
return Response.json(
|
||||||
{ message: 'An error occurred while generating suggestions' },
|
{ message: 'An error ocurred while generating suggestions' },
|
||||||
{ status: 500 },
|
{ status: 500 },
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -74,9 +74,9 @@ export const POST = async (req: Request) => {
|
|||||||
|
|
||||||
return Response.json({ videos }, { status: 200 });
|
return Response.json({ videos }, { status: 200 });
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(`An error occurred while searching videos: ${err}`);
|
console.error(`An error ocurred while searching videos: ${err}`);
|
||||||
return Response.json(
|
return Response.json(
|
||||||
{ message: 'An error occurred while searching videos' },
|
{ message: 'An error ocurred while searching videos' },
|
||||||
{ status: 500 },
|
{ status: 500 },
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -8,10 +8,6 @@ import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
|||||||
import { Embeddings } from '@langchain/core/embeddings';
|
import { Embeddings } from '@langchain/core/embeddings';
|
||||||
|
|
||||||
const geminiChatModels: Record<string, string>[] = [
|
const geminiChatModels: Record<string, string>[] = [
|
||||||
{
|
|
||||||
displayName: 'Gemini 2.5 Pro Experimental',
|
|
||||||
key: 'gemini-2.5-pro-exp-03-25',
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
displayName: 'Gemini 2.0 Flash',
|
displayName: 'Gemini 2.0 Flash',
|
||||||
key: 'gemini-2.0-flash',
|
key: 'gemini-2.0-flash',
|
||||||
@ -21,8 +17,8 @@ const geminiChatModels: Record<string, string>[] = [
|
|||||||
key: 'gemini-2.0-flash-lite',
|
key: 'gemini-2.0-flash-lite',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
displayName: 'Gemini 2.0 Flash Thinking Experimental',
|
displayName: 'Gemini 2.0 Pro Experimental',
|
||||||
key: 'gemini-2.0-flash-thinking-exp-01-21',
|
key: 'gemini-2.0-pro-exp-02-05',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
displayName: 'Gemini 1.5 Flash',
|
displayName: 'Gemini 1.5 Flash',
|
||||||
|
Reference in New Issue
Block a user