Compare commits

..

2 Commits

Author SHA1 Message Date
086b8abdf4 Merge 8097610baf into 09661ae11d 2025-04-05 17:57:22 -07:00
8097610baf Adding user session for history 2025-04-05 17:43:04 -07:00
13 changed files with 63 additions and 87 deletions

View File

@ -33,7 +33,6 @@ The API accepts a JSON object in the request body, where you define the focus mo
["human", "Hi, how are you?"],
["assistant", "I am doing well, how can I help you today?"]
],
"systemInstructions": "Focus on providing technical details about Perplexica's architecture.",
"stream": false
}
```
@ -64,8 +63,6 @@ The API accepts a JSON object in the request body, where you define the focus mo
- **`query`** (string, required): The search query or question.
- **`systemInstructions`** (string, optional): Custom instructions provided by the user to guide the AI's response. These instructions are treated as user preferences and have lower priority than the system's core instructions. For example, you can specify a particular writing style, format, or focus area.
- **`history`** (array, optional): An array of message pairs representing the conversation history. Each pair consists of a role (either 'human' or 'assistant') and the message content. This allows the system to use the context of the conversation to refine results. Example:
```json

View File

@ -22,8 +22,5 @@ MODEL_NAME = ""
[MODELS.OLLAMA]
API_URL = "" # Ollama API URL - http://host.docker.internal:11434
[MODELS.DEEPSEEK]
API_KEY = ""
[API_ENDPOINTS]
SEARXNG = "" # SearxNG API URL - http://localhost:32768

View File

@ -29,6 +29,7 @@ type Message = {
messageId: string;
chatId: string;
content: string;
userSessionId: string;
};
type ChatModel = {
@ -138,6 +139,7 @@ const handleHistorySave = async (
where: eq(chats.id, message.chatId),
});
let currentDate = new Date();
if (!chat) {
await db
.insert(chats)
@ -147,6 +149,8 @@ const handleHistorySave = async (
createdAt: new Date().toString(),
focusMode: focusMode,
files: files.map(getFileDetails),
userSessionId: message.userSessionId,
timestamp: currentDate.toISOString(),
})
.execute();
}

View File

@ -1,10 +1,47 @@
import db from '@/lib/db';
import { chats } from '@/lib/db/schema';
import { eq, sql} from 'drizzle-orm';
export const GET = async (req: Request) => {
try {
let chats = await db.query.chats.findMany();
chats = chats.reverse();
return Response.json({ chats: chats }, { status: 200 });
// get header from request
const headers = await req.headers;
let userSessionId = headers.get('user-session-id')?.toString() ?? '';
if (userSessionId == '') {
return Response.json({ chats: {} }, { status: 200 });
}
let chatsRes = await db.query.chats.findMany({
where: eq(chats.userSessionId, userSessionId),
});
chatsRes = chatsRes.reverse();
// Keep only the latest 20 records in the database. Delete older records.
let maxRecordLimit = 20;
if (chatsRes.length > maxRecordLimit) {
const deleteChatsQuery = sql`DELETE FROM chats
WHERE userSessionId = ${userSessionId} AND (
timestamp IS NULL OR
timestamp NOT in (
SELECT timestamp FROM chats
WHERE userSessionId = ${userSessionId}
ORDER BY timestamp DESC
LIMIT ${maxRecordLimit}
)
)
`;
await db.run(deleteChatsQuery);
// Delete messages that no longer link with the chat from the database.
const deleteMessagesQuery = sql`DELETE FROM messages
WHERE chatId NOT IN (
SELECT id FROM chats
)
`;
await db.run(deleteMessagesQuery);
}
return Response.json({ chats: chatsRes }, { status: 200 });
} catch (err) {
console.error('Error in getting chats: ', err);
return Response.json(

View File

@ -7,7 +7,6 @@ import {
getGroqApiKey,
getOllamaApiEndpoint,
getOpenaiApiKey,
getDeepseekApiKey,
updateConfig,
} from '@/lib/config';
import {
@ -54,7 +53,6 @@ export const GET = async (req: Request) => {
config['anthropicApiKey'] = getAnthropicApiKey();
config['groqApiKey'] = getGroqApiKey();
config['geminiApiKey'] = getGeminiApiKey();
config['deepseekApiKey'] = getDeepseekApiKey();
config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl();
config['customOpenaiApiKey'] = getCustomOpenaiApiKey();
config['customOpenaiModelName'] = getCustomOpenaiModelName();
@ -90,9 +88,6 @@ export const POST = async (req: Request) => {
OLLAMA: {
API_URL: config.ollamaApiUrl,
},
DEEPSEEK: {
API_KEY: config.deepseekApiKey,
},
CUSTOM_OPENAI: {
API_URL: config.customOpenaiApiUrl,
API_KEY: config.customOpenaiApiKey,

View File

@ -34,7 +34,6 @@ interface ChatRequestBody {
query: string;
history: Array<[string, string]>;
stream?: boolean;
systemInstructions?: string;
}
export const POST = async (req: Request) => {
@ -126,7 +125,7 @@ export const POST = async (req: Request) => {
embeddings,
body.optimizationMode,
[],
body.systemInstructions || '',
'',
);
if (!body.stream) {

View File

@ -1,5 +1,6 @@
'use client';
import crypto from 'crypto';
import DeleteChat from '@/components/DeleteChat';
import { cn, formatTimeDifference } from '@/lib/utils';
import { BookOpenText, ClockIcon, Delete, ScanEye } from 'lucide-react';
@ -21,10 +22,17 @@ const Page = () => {
const fetchChats = async () => {
setLoading(true);
let userSessionId = localStorage.getItem('userSessionId');
if (!userSessionId) {
userSessionId = crypto.randomBytes(20).toString('hex');
localStorage.setItem('userSessionId', userSessionId)
}
const res = await fetch(`/api/chats`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
'user-session-id': userSessionId!,
},
});

View File

@ -20,7 +20,6 @@ interface SettingsType {
anthropicApiKey: string;
geminiApiKey: string;
ollamaApiUrl: string;
deepseekApiKey: string;
customOpenaiApiKey: string;
customOpenaiApiUrl: string;
customOpenaiModelName: string;
@ -839,25 +838,6 @@ const Page = () => {
onSave={(value) => saveConfig('geminiApiKey', value)}
/>
</div>
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
Deepseek API Key
</p>
<Input
type="text"
placeholder="Deepseek API Key"
value={config.deepseekApiKey}
isSaving={savingStates['deepseekApiKey']}
onChange={(e) => {
setConfig((prev) => ({
...prev!,
deepseekApiKey: e.target.value,
}));
}}
onSave={(value) => saveConfig('deepseekApiKey', value)}
/>
</div>
</div>
</SettingsSection>
</div>

View File

@ -95,6 +95,12 @@ const checkConfig = async (
if (!embeddingModel || !embeddingModelProvider) {
const embeddingModelProviders = providers.embeddingModelProviders;
let userSessionId = localStorage.getItem('userSessionId');
if (!userSessionId) {
userSessionId = crypto.randomBytes(20).toString('hex');
localStorage.setItem('userSessionId', userSessionId!)
}
if (
!embeddingModelProviders ||
Object.keys(embeddingModelProviders).length === 0
@ -342,6 +348,7 @@ const ChatWindow = ({ id }: { id?: string }) => {
let added = false;
messageId = messageId ?? crypto.randomBytes(7).toString('hex');
let userSessionId = localStorage.getItem('userSessionId');
setMessages((prevMessages) => [
...prevMessages,
@ -466,6 +473,7 @@ const ChatWindow = ({ id }: { id?: string }) => {
messageId: messageId,
chatId: chatId!,
content: message,
userSessionId: userSessionId,
},
chatId: chatId!,
files: fileIds,

View File

@ -25,9 +25,6 @@ interface Config {
OLLAMA: {
API_URL: string;
};
DEEPSEEK: {
API_KEY: string;
};
CUSTOM_OPENAI: {
API_URL: string;
API_KEY: string;
@ -66,8 +63,6 @@ export const getSearxngApiEndpoint = () =>
export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL;
export const getDeepseekApiKey = () => loadConfig().MODELS.DEEPSEEK.API_KEY;
export const getCustomOpenaiApiKey = () =>
loadConfig().MODELS.CUSTOM_OPENAI.API_KEY;

View File

@ -25,4 +25,6 @@ export const chats = sqliteTable('chats', {
files: text('files', { mode: 'json' })
.$type<File[]>()
.default(sql`'[]'`),
userSessionId: text('userSessionId'),
timestamp: text('timestamp'),
});

View File

@ -1,44 +0,0 @@
import { ChatOpenAI } from '@langchain/openai';
import { getDeepseekApiKey } from '../config';
import { ChatModel } from '.';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
const deepseekChatModels: Record<string, string>[] = [
{
displayName: 'Deepseek Chat (Deepseek V3)',
key: 'deepseek-chat',
},
{
displayName: 'Deepseek Reasoner (Deepseek R1)',
key: 'deepseek-reasoner',
},
];
export const loadDeepseekChatModels = async () => {
const deepseekApiKey = getDeepseekApiKey();
if (!deepseekApiKey) return {};
try {
const chatModels: Record<string, ChatModel> = {};
deepseekChatModels.forEach((model) => {
chatModels[model.key] = {
displayName: model.displayName,
model: new ChatOpenAI({
openAIApiKey: deepseekApiKey,
modelName: model.key,
temperature: 0.7,
configuration: {
baseURL: 'https://api.deepseek.com',
},
}) as unknown as BaseChatModel,
};
});
return chatModels;
} catch (err) {
console.error(`Error loading Deepseek models: ${err}`);
return {};
}
};

View File

@ -12,7 +12,6 @@ import { loadGroqChatModels } from './groq';
import { loadAnthropicChatModels } from './anthropic';
import { loadGeminiChatModels, loadGeminiEmbeddingModels } from './gemini';
import { loadTransformersEmbeddingsModels } from './transformers';
import { loadDeepseekChatModels } from './deepseek';
export interface ChatModel {
displayName: string;
@ -33,7 +32,6 @@ export const chatModelProviders: Record<
groq: loadGroqChatModels,
anthropic: loadAnthropicChatModels,
gemini: loadGeminiChatModels,
deepseek: loadDeepseekChatModels,
};
export const embeddingModelProviders: Record<