Compare commits

..

18 Commits

Author SHA1 Message Date
ItzCrazyKns
b8e4152e77 Merge pull request #857 from skoved/fix-light-mode
make file icon in attachment modal in chat page fit light theme better
2025-09-12 21:16:46 +05:30
ItzCrazyKns
c8ac9279bd Merge pull request #866 from agungbesti/feat/add-openai-models
feat: add new OpenAI models with proper temperature parameter handling
2025-09-12 21:15:09 +05:30
akubesti
6f367c34a8 feat: add gpt-5, gpt-5-mini, o3 models and remove gpt-5-chat-latest
- Add new OpenAI models: gpt-5, gpt-5-mini, and o3 series

- Fix temperature parameter handling for o3 models

- Update models list to ensure compatibility
2025-09-12 22:22:16 +07:00
akubesti
328b12ffbe feat: add new OpenAI models with proper temperature parameter handling
- Add GPT 4.1 series and o1/o3/o4 models with temperature compatibility fixes

- Remove gpt-5/gpt-5-mini models due to organization verification restrictions

- Fix 400 errors for models that only support default temperature values
2025-09-11 16:38:01 +07:00
skoved
d8486e90bb make file icon in attachment modal in chat page fit light theme better
make the file icon in the attachment modal for the chat page an off-white background so that it matches the light theme better and looks the same as the attachment modal on the home page
2025-08-27 09:43:09 -04:00
ItzCrazyKns
238bcaff2b Delete .github/FUNDING.yml 2025-08-27 16:23:39 +05:30
ItzCrazyKns
6f7c55b783 Update FUNDING.yml with Patreon username
Added Patreon username for funding support.
2025-08-27 16:19:53 +05:30
ItzCrazyKns
83a0cffe1b Merge pull request #828 from justinmayer/patch-1
docs: Add instructions for local OpenAI-API-compatible LLMs
2025-08-27 14:38:55 +05:30
ItzCrazyKns
829ae59944 Merge pull request #855 from skoved/fix-light-mode
fix text color for topic names on discover page in light mode
2025-08-27 14:38:13 +05:30
skoved
a546eb18a1 make file icon in attachment modal fit light theme better
make the file icon in the attachment modal black on an off-white background so that it matches the light theme better and looks stylistically equivalent to its look in dark mode
2025-08-26 21:53:51 -04:00
skoved
ff1ca56157 fix text color for topic names on discover page in light mode
make light mode color for topic text black so it is readable
2025-08-26 21:16:21 -04:00
ItzCrazyKns
30725b5d6d feat(attach): remove unused import 2025-08-25 21:48:08 +05:30
ItzCrazyKns
8dc54efbdd feat(chat-route): lint & beautify 2025-08-21 17:48:55 +05:30
ItzCrazyKns
72f26b4370 feat(upload): save files uploaded after chat created 2025-08-21 17:47:49 +05:30
ItzCrazyKns
f680188905 feat(ollama): add ability to provide api key 2025-08-20 20:32:13 +05:30
ItzCrazyKns
0b15bfbe32 feat(app): switch to useChat hook 2025-08-20 20:21:06 +05:30
ItzCrazyKns
8fc7808654 feat(hooks): implement useChat hook 2025-08-20 20:20:50 +05:30
Justin Mayer
65fc881356 docs: Add instructions for local OpenAI-API-compatible LLMs
Perplexica supports local OpenAI-API-compatible LLMs via the `[MODELS.CUSTOM_OPENAI]` header in `config.toml` but was missing documentation for configuring the necessary settings. This makes that support more explicit and visible, as well as helping end users set the required configuration values appropriately.
2025-07-17 19:10:12 +02:00
23 changed files with 816 additions and 782 deletions

View File

@@ -53,7 +53,7 @@ Want to know more about its architecture and how it works? You can read it [here
## Features ## Features
- **Local LLMs**: You can make use local LLMs such as Llama3 and Mixtral using Ollama. - **Local LLMs**: You can utilize local LLMs such as Qwen, DeepSeek, Llama, and Mistral.
- **Two Main Modes:** - **Two Main Modes:**
- **Copilot Mode:** (In development) Boosts search by generating different queries to find more relevant internet sources. Like normal search instead of just using the context by SearxNG, it visits the top matches and tries to find relevant sources to the user's query directly from the page. - **Copilot Mode:** (In development) Boosts search by generating different queries to find more relevant internet sources. Like normal search instead of just using the context by SearxNG, it visits the top matches and tries to find relevant sources to the user's query directly from the page.
- **Normal Mode:** Processes your query and performs a web search. - **Normal Mode:** Processes your query and performs a web search.
@@ -87,6 +87,7 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker.
4. Rename the `sample.config.toml` file to `config.toml`. For Docker setups, you need only fill in the following fields: 4. Rename the `sample.config.toml` file to `config.toml`. For Docker setups, you need only fill in the following fields:
- `OPENAI`: Your OpenAI API key. **You only need to fill this if you wish to use OpenAI's models**. - `OPENAI`: Your OpenAI API key. **You only need to fill this if you wish to use OpenAI's models**.
- `CUSTOM_OPENAI`: Your OpenAI-API-compliant local server URL, model name, and API key. You should run your local server with host set to `0.0.0.0`, take note of which port number it is running on, and then use that port number to set `API_URL = http://host.docker.internal:PORT_NUMBER`. You must specify the model name, such as `MODEL_NAME = "unsloth/DeepSeek-R1-0528-Qwen3-8B-GGUF:Q4_K_XL"`. Finally, set `API_KEY` to the appropriate value. If you have not defined an API key, just put anything you want in-between the quotation marks: `API_KEY = "whatever-you-want-but-not-blank"` **You only need to configure these settings if you want to use a local OpenAI-compliant server, such as Llama.cpp's [`llama-server`](https://github.com/ggml-org/llama.cpp/blob/master/tools/server/README.md)**.
- `OLLAMA`: Your Ollama API URL. You should enter it as `http://host.docker.internal:PORT_NUMBER`. If you installed Ollama on port 11434, use `http://host.docker.internal:11434`. For other ports, adjust accordingly. **You need to fill this if you wish to use Ollama's models instead of OpenAI's**. - `OLLAMA`: Your Ollama API URL. You should enter it as `http://host.docker.internal:PORT_NUMBER`. If you installed Ollama on port 11434, use `http://host.docker.internal:11434`. For other ports, adjust accordingly. **You need to fill this if you wish to use Ollama's models instead of OpenAI's**.
- `GROQ`: Your Groq API key. **You only need to fill this if you wish to use Groq's hosted models**. - `GROQ`: Your Groq API key. **You only need to fill this if you wish to use Groq's hosted models**.
- `ANTHROPIC`: Your Anthropic API key. **You only need to fill this if you wish to use Anthropic models**. - `ANTHROPIC`: Your Anthropic API key. **You only need to fill this if you wish to use Anthropic models**.
@@ -120,7 +121,17 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker.
See the [installation documentation](https://github.com/ItzCrazyKns/Perplexica/tree/master/docs/installation) for more information like updating, etc. See the [installation documentation](https://github.com/ItzCrazyKns/Perplexica/tree/master/docs/installation) for more information like updating, etc.
### Ollama Connection Errors ### Troubleshooting
#### Local OpenAI-API-Compliant Servers
If Perplexica tells you that you haven't configured any chat model providers, ensure that:
1. Your server is running on `0.0.0.0` (not `127.0.0.1`) and on the same port you put in the API URL.
2. You have specified the correct model name loaded by your local LLM server.
3. You have specified the correct API key, or if one is not defined, you have put *something* in the API key field and not left it empty.
#### Ollama Connection Errors
If you're encountering an Ollama connection error, it is likely due to the backend being unable to connect to Ollama's API. To fix this issue you can: If you're encountering an Ollama connection error, it is likely due to the backend being unable to connect to Ollama's API. To fix this issue you can:

View File

@@ -1,11 +1,7 @@
import prompts from '@/lib/prompts';
import MetaSearchAgent from '@/lib/search/metaSearchAgent';
import crypto from 'crypto'; import crypto from 'crypto';
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages'; import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
import { EventEmitter } from 'stream'; import { EventEmitter } from 'stream';
import { import {
chatModelProviders,
embeddingModelProviders,
getAvailableChatModelProviders, getAvailableChatModelProviders,
getAvailableEmbeddingModelProviders, getAvailableEmbeddingModelProviders,
} from '@/lib/providers'; } from '@/lib/providers';
@@ -138,6 +134,8 @@ const handleHistorySave = async (
where: eq(chats.id, message.chatId), where: eq(chats.id, message.chatId),
}); });
const fileData = files.map(getFileDetails);
if (!chat) { if (!chat) {
await db await db
.insert(chats) .insert(chats)
@@ -146,9 +144,15 @@ const handleHistorySave = async (
title: message.content, title: message.content,
createdAt: new Date().toString(), createdAt: new Date().toString(),
focusMode: focusMode, focusMode: focusMode,
files: files.map(getFileDetails), files: fileData,
}) })
.execute(); .execute();
} else if (JSON.stringify(chat.files ?? []) != JSON.stringify(fileData)) {
db.update(chats)
.set({
files: files.map(getFileDetails),
})
.where(eq(chats.id, message.chatId));
} }
const messageExists = await db.query.messages.findFirst({ const messageExists = await db.query.messages.findFirst({

View File

@@ -11,6 +11,7 @@ import {
getAimlApiKey, getAimlApiKey,
getLMStudioApiEndpoint, getLMStudioApiEndpoint,
updateConfig, updateConfig,
getOllamaApiKey,
} from '@/lib/config'; } from '@/lib/config';
import { import {
getAvailableChatModelProviders, getAvailableChatModelProviders,
@@ -53,6 +54,7 @@ export const GET = async (req: Request) => {
config['openaiApiKey'] = getOpenaiApiKey(); config['openaiApiKey'] = getOpenaiApiKey();
config['ollamaApiUrl'] = getOllamaApiEndpoint(); config['ollamaApiUrl'] = getOllamaApiEndpoint();
config['ollamaApiKey'] = getOllamaApiKey();
config['lmStudioApiUrl'] = getLMStudioApiEndpoint(); config['lmStudioApiUrl'] = getLMStudioApiEndpoint();
config['anthropicApiKey'] = getAnthropicApiKey(); config['anthropicApiKey'] = getAnthropicApiKey();
config['groqApiKey'] = getGroqApiKey(); config['groqApiKey'] = getGroqApiKey();
@@ -93,6 +95,7 @@ export const POST = async (req: Request) => {
}, },
OLLAMA: { OLLAMA: {
API_URL: config.ollamaApiUrl, API_URL: config.ollamaApiUrl,
API_KEY: config.ollamaApiKey,
}, },
DEEPSEEK: { DEEPSEEK: {
API_KEY: config.deepseekApiKey, API_KEY: config.deepseekApiKey,

View File

@@ -1,9 +1,17 @@
import ChatWindow from '@/components/ChatWindow'; 'use client';
import React from 'react';
const Page = ({ params }: { params: Promise<{ chatId: string }> }) => { import ChatWindow from '@/components/ChatWindow';
const { chatId } = React.use(params); import { useParams } from 'next/navigation';
return <ChatWindow id={chatId} />; import React from 'react';
import { ChatProvider } from '@/lib/hooks/useChat';
const Page = () => {
const { chatId }: { chatId: string } = useParams();
return (
<ChatProvider id={chatId}>
<ChatWindow />
</ChatProvider>
);
}; };
export default Page; export default Page;

View File

@@ -91,7 +91,7 @@ const Page = () => {
'border-[0.1px] rounded-full text-sm px-3 py-1 text-nowrap transition duration-200 cursor-pointer', 'border-[0.1px] rounded-full text-sm px-3 py-1 text-nowrap transition duration-200 cursor-pointer',
activeTopic === t.key activeTopic === t.key
? 'text-cyan-300 bg-cyan-300/30 border-cyan-300/60' ? 'text-cyan-300 bg-cyan-300/30 border-cyan-300/60'
: 'border-white/30 text-white/70 hover:text-white hover:border-white/40 hover:bg-white/5', : 'border-black/30 dark:border-white/30 text-black/70 dark:text-white/70 hover:text-black dark:hover:text-white hover:border-black/40 dark:hover:border-white/40 hover:bg-black/5 dark:hover:bg-white/5',
)} )}
onClick={() => setActiveTopic(t.key)} onClick={() => setActiveTopic(t.key)}
> >

View File

@@ -1,4 +1,5 @@
import ChatWindow from '@/components/ChatWindow'; import ChatWindow from '@/components/ChatWindow';
import { ChatProvider } from '@/lib/hooks/useChat';
import { Metadata } from 'next'; import { Metadata } from 'next';
import { Suspense } from 'react'; import { Suspense } from 'react';
@@ -11,7 +12,9 @@ const Home = () => {
return ( return (
<div> <div>
<Suspense> <Suspense>
<ChatWindow /> <ChatProvider>
<ChatWindow />
</ChatProvider>
</Suspense> </Suspense>
</div> </div>
); );

View File

@@ -21,6 +21,7 @@ interface SettingsType {
anthropicApiKey: string; anthropicApiKey: string;
geminiApiKey: string; geminiApiKey: string;
ollamaApiUrl: string; ollamaApiUrl: string;
ollamaApiKey: string;
lmStudioApiUrl: string; lmStudioApiUrl: string;
deepseekApiKey: string; deepseekApiKey: string;
aimlApiKey: string; aimlApiKey: string;
@@ -818,6 +819,25 @@ const Page = () => {
/> />
</div> </div>
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
Ollama API Key (Can be left blank)
</p>
<Input
type="text"
placeholder="Ollama API Key"
value={config.ollamaApiKey}
isSaving={savingStates['ollamaApiKey']}
onChange={(e) => {
setConfig((prev) => ({
...prev!,
ollamaApiKey: e.target.value,
}));
}}
onSave={(value) => saveConfig('ollamaApiKey', value)}
/>
</div>
<div className="flex flex-col space-y-1"> <div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm"> <p className="text-black/70 dark:text-white/70 text-sm">
GROQ API Key GROQ API Key

View File

@@ -5,28 +5,11 @@ import MessageInput from './MessageInput';
import { File, Message } from './ChatWindow'; import { File, Message } from './ChatWindow';
import MessageBox from './MessageBox'; import MessageBox from './MessageBox';
import MessageBoxLoading from './MessageBoxLoading'; import MessageBoxLoading from './MessageBoxLoading';
import { useChat } from '@/lib/hooks/useChat';
const Chat = () => {
const { messages, loading, messageAppeared } = useChat();
const Chat = ({
loading,
messages,
sendMessage,
messageAppeared,
rewrite,
fileIds,
setFileIds,
files,
setFiles,
}: {
messages: Message[];
sendMessage: (message: string) => void;
loading: boolean;
messageAppeared: boolean;
rewrite: (messageId: string) => void;
fileIds: string[];
setFileIds: (fileIds: string[]) => void;
files: File[];
setFiles: (files: File[]) => void;
}) => {
const [dividerWidth, setDividerWidth] = useState(0); const [dividerWidth, setDividerWidth] = useState(0);
const dividerRef = useRef<HTMLDivElement | null>(null); const dividerRef = useRef<HTMLDivElement | null>(null);
const messageEnd = useRef<HTMLDivElement | null>(null); const messageEnd = useRef<HTMLDivElement | null>(null);
@@ -72,12 +55,8 @@ const Chat = ({
key={i} key={i}
message={msg} message={msg}
messageIndex={i} messageIndex={i}
history={messages}
loading={loading}
dividerRef={isLast ? dividerRef : undefined} dividerRef={isLast ? dividerRef : undefined}
isLast={isLast} isLast={isLast}
rewrite={rewrite}
sendMessage={sendMessage}
/> />
{!isLast && msg.role === 'assistant' && ( {!isLast && msg.role === 'assistant' && (
<div className="h-px w-full bg-light-secondary dark:bg-dark-secondary" /> <div className="h-px w-full bg-light-secondary dark:bg-dark-secondary" />
@@ -92,14 +71,7 @@ const Chat = ({
className="bottom-24 lg:bottom-10 fixed z-40" className="bottom-24 lg:bottom-10 fixed z-40"
style={{ width: dividerWidth }} style={{ width: dividerWidth }}
> >
<MessageInput <MessageInput />
loading={loading}
sendMessage={sendMessage}
fileIds={fileIds}
setFileIds={setFileIds}
files={files}
setFiles={setFiles}
/>
</div> </div>
)} )}
</div> </div>

View File

@@ -1,17 +1,13 @@
'use client'; 'use client';
import { useEffect, useRef, useState } from 'react';
import { Document } from '@langchain/core/documents'; import { Document } from '@langchain/core/documents';
import Navbar from './Navbar'; import Navbar from './Navbar';
import Chat from './Chat'; import Chat from './Chat';
import EmptyChat from './EmptyChat'; import EmptyChat from './EmptyChat';
import crypto from 'crypto';
import { toast } from 'sonner';
import { useSearchParams } from 'next/navigation';
import { getSuggestions } from '@/lib/actions';
import { Settings } from 'lucide-react'; import { Settings } from 'lucide-react';
import Link from 'next/link'; import Link from 'next/link';
import NextError from 'next/error'; import NextError from 'next/error';
import { useChat } from '@/lib/hooks/useChat';
export type Message = { export type Message = {
messageId: string; messageId: string;
@@ -29,547 +25,8 @@ export interface File {
fileId: string; fileId: string;
} }
interface ChatModelProvider { const ChatWindow = () => {
name: string; const { hasError, isReady, notFound, messages } = useChat();
provider: string;
}
interface EmbeddingModelProvider {
name: string;
provider: string;
}
const checkConfig = async (
setChatModelProvider: (provider: ChatModelProvider) => void,
setEmbeddingModelProvider: (provider: EmbeddingModelProvider) => void,
setIsConfigReady: (ready: boolean) => void,
setHasError: (hasError: boolean) => void,
) => {
try {
let chatModel = localStorage.getItem('chatModel');
let chatModelProvider = localStorage.getItem('chatModelProvider');
let embeddingModel = localStorage.getItem('embeddingModel');
let embeddingModelProvider = localStorage.getItem('embeddingModelProvider');
const autoImageSearch = localStorage.getItem('autoImageSearch');
const autoVideoSearch = localStorage.getItem('autoVideoSearch');
if (!autoImageSearch) {
localStorage.setItem('autoImageSearch', 'true');
}
if (!autoVideoSearch) {
localStorage.setItem('autoVideoSearch', 'false');
}
const providers = await fetch(`/api/models`, {
headers: {
'Content-Type': 'application/json',
},
}).then(async (res) => {
if (!res.ok)
throw new Error(
`Failed to fetch models: ${res.status} ${res.statusText}`,
);
return res.json();
});
if (
!chatModel ||
!chatModelProvider ||
!embeddingModel ||
!embeddingModelProvider
) {
if (!chatModel || !chatModelProvider) {
const chatModelProviders = providers.chatModelProviders;
const chatModelProvidersKeys = Object.keys(chatModelProviders);
if (!chatModelProviders || chatModelProvidersKeys.length === 0) {
return toast.error('No chat models available');
} else {
chatModelProvider =
chatModelProvidersKeys.find(
(provider) =>
Object.keys(chatModelProviders[provider]).length > 0,
) || chatModelProvidersKeys[0];
}
if (
chatModelProvider === 'custom_openai' &&
Object.keys(chatModelProviders[chatModelProvider]).length === 0
) {
toast.error(
"Looks like you haven't configured any chat model providers. Please configure them from the settings page or the config file.",
);
return setHasError(true);
}
chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
}
if (!embeddingModel || !embeddingModelProvider) {
const embeddingModelProviders = providers.embeddingModelProviders;
if (
!embeddingModelProviders ||
Object.keys(embeddingModelProviders).length === 0
)
return toast.error('No embedding models available');
embeddingModelProvider = Object.keys(embeddingModelProviders)[0];
embeddingModel = Object.keys(
embeddingModelProviders[embeddingModelProvider],
)[0];
}
localStorage.setItem('chatModel', chatModel!);
localStorage.setItem('chatModelProvider', chatModelProvider);
localStorage.setItem('embeddingModel', embeddingModel!);
localStorage.setItem('embeddingModelProvider', embeddingModelProvider);
} else {
const chatModelProviders = providers.chatModelProviders;
const embeddingModelProviders = providers.embeddingModelProviders;
if (
Object.keys(chatModelProviders).length > 0 &&
(!chatModelProviders[chatModelProvider] ||
Object.keys(chatModelProviders[chatModelProvider]).length === 0)
) {
const chatModelProvidersKeys = Object.keys(chatModelProviders);
chatModelProvider =
chatModelProvidersKeys.find(
(key) => Object.keys(chatModelProviders[key]).length > 0,
) || chatModelProvidersKeys[0];
localStorage.setItem('chatModelProvider', chatModelProvider);
}
if (
chatModelProvider &&
!chatModelProviders[chatModelProvider][chatModel]
) {
if (
chatModelProvider === 'custom_openai' &&
Object.keys(chatModelProviders[chatModelProvider]).length === 0
) {
toast.error(
"Looks like you haven't configured any chat model providers. Please configure them from the settings page or the config file.",
);
return setHasError(true);
}
chatModel = Object.keys(
chatModelProviders[
Object.keys(chatModelProviders[chatModelProvider]).length > 0
? chatModelProvider
: Object.keys(chatModelProviders)[0]
],
)[0];
localStorage.setItem('chatModel', chatModel);
}
if (
Object.keys(embeddingModelProviders).length > 0 &&
!embeddingModelProviders[embeddingModelProvider]
) {
embeddingModelProvider = Object.keys(embeddingModelProviders)[0];
localStorage.setItem('embeddingModelProvider', embeddingModelProvider);
}
if (
embeddingModelProvider &&
!embeddingModelProviders[embeddingModelProvider][embeddingModel]
) {
embeddingModel = Object.keys(
embeddingModelProviders[embeddingModelProvider],
)[0];
localStorage.setItem('embeddingModel', embeddingModel);
}
}
setChatModelProvider({
name: chatModel!,
provider: chatModelProvider,
});
setEmbeddingModelProvider({
name: embeddingModel!,
provider: embeddingModelProvider,
});
setIsConfigReady(true);
} catch (err) {
console.error('An error occurred while checking the configuration:', err);
setIsConfigReady(false);
setHasError(true);
}
};
const loadMessages = async (
chatId: string,
setMessages: (messages: Message[]) => void,
setIsMessagesLoaded: (loaded: boolean) => void,
setChatHistory: (history: [string, string][]) => void,
setFocusMode: (mode: string) => void,
setNotFound: (notFound: boolean) => void,
setFiles: (files: File[]) => void,
setFileIds: (fileIds: string[]) => void,
) => {
const res = await fetch(`/api/chats/${chatId}`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
},
});
if (res.status === 404) {
setNotFound(true);
setIsMessagesLoaded(true);
return;
}
const data = await res.json();
const messages = data.messages.map((msg: any) => {
return {
...msg,
...JSON.parse(msg.metadata),
};
}) as Message[];
setMessages(messages);
const history = messages.map((msg) => {
return [msg.role, msg.content];
}) as [string, string][];
console.debug(new Date(), 'app:messages_loaded');
document.title = messages[0].content;
const files = data.chat.files.map((file: any) => {
return {
fileName: file.name,
fileExtension: file.name.split('.').pop(),
fileId: file.fileId,
};
});
setFiles(files);
setFileIds(files.map((file: File) => file.fileId));
setChatHistory(history);
setFocusMode(data.chat.focusMode);
setIsMessagesLoaded(true);
};
const ChatWindow = ({ id }: { id?: string }) => {
const searchParams = useSearchParams();
const initialMessage = searchParams.get('q');
const [chatId, setChatId] = useState<string | undefined>(id);
const [newChatCreated, setNewChatCreated] = useState(false);
const [chatModelProvider, setChatModelProvider] = useState<ChatModelProvider>(
{
name: '',
provider: '',
},
);
const [embeddingModelProvider, setEmbeddingModelProvider] =
useState<EmbeddingModelProvider>({
name: '',
provider: '',
});
const [isConfigReady, setIsConfigReady] = useState(false);
const [hasError, setHasError] = useState(false);
const [isReady, setIsReady] = useState(false);
useEffect(() => {
checkConfig(
setChatModelProvider,
setEmbeddingModelProvider,
setIsConfigReady,
setHasError,
);
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
const [loading, setLoading] = useState(false);
const [messageAppeared, setMessageAppeared] = useState(false);
const [chatHistory, setChatHistory] = useState<[string, string][]>([]);
const [messages, setMessages] = useState<Message[]>([]);
const [files, setFiles] = useState<File[]>([]);
const [fileIds, setFileIds] = useState<string[]>([]);
const [focusMode, setFocusMode] = useState('webSearch');
const [optimizationMode, setOptimizationMode] = useState('speed');
const [isMessagesLoaded, setIsMessagesLoaded] = useState(false);
const [notFound, setNotFound] = useState(false);
useEffect(() => {
if (
chatId &&
!newChatCreated &&
!isMessagesLoaded &&
messages.length === 0
) {
loadMessages(
chatId,
setMessages,
setIsMessagesLoaded,
setChatHistory,
setFocusMode,
setNotFound,
setFiles,
setFileIds,
);
} else if (!chatId) {
setNewChatCreated(true);
setIsMessagesLoaded(true);
setChatId(crypto.randomBytes(20).toString('hex'));
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
const messagesRef = useRef<Message[]>([]);
useEffect(() => {
messagesRef.current = messages;
}, [messages]);
useEffect(() => {
if (isMessagesLoaded && isConfigReady) {
setIsReady(true);
console.debug(new Date(), 'app:ready');
} else {
setIsReady(false);
}
}, [isMessagesLoaded, isConfigReady]);
const sendMessage = async (
message: string,
messageId?: string,
rewrite = false,
) => {
if (loading) return;
if (!isConfigReady) {
toast.error('Cannot send message before the configuration is ready');
return;
}
setLoading(true);
setMessageAppeared(false);
let sources: Document[] | undefined = undefined;
let recievedMessage = '';
let added = false;
messageId = messageId ?? crypto.randomBytes(7).toString('hex');
setMessages((prevMessages) => [
...prevMessages,
{
content: message,
messageId: messageId,
chatId: chatId!,
role: 'user',
createdAt: new Date(),
},
]);
const messageHandler = async (data: any) => {
if (data.type === 'error') {
toast.error(data.data);
setLoading(false);
return;
}
if (data.type === 'sources') {
sources = data.data;
if (!added) {
setMessages((prevMessages) => [
...prevMessages,
{
content: '',
messageId: data.messageId,
chatId: chatId!,
role: 'assistant',
sources: sources,
createdAt: new Date(),
},
]);
added = true;
}
setMessageAppeared(true);
}
if (data.type === 'message') {
if (!added) {
setMessages((prevMessages) => [
...prevMessages,
{
content: data.data,
messageId: data.messageId,
chatId: chatId!,
role: 'assistant',
sources: sources,
createdAt: new Date(),
},
]);
added = true;
}
setMessages((prev) =>
prev.map((message) => {
if (message.messageId === data.messageId) {
return { ...message, content: message.content + data.data };
}
return message;
}),
);
recievedMessage += data.data;
setMessageAppeared(true);
}
if (data.type === 'messageEnd') {
setChatHistory((prevHistory) => [
...prevHistory,
['human', message],
['assistant', recievedMessage],
]);
setLoading(false);
const lastMsg = messagesRef.current[messagesRef.current.length - 1];
const autoImageSearch = localStorage.getItem('autoImageSearch');
const autoVideoSearch = localStorage.getItem('autoVideoSearch');
if (autoImageSearch === 'true') {
document
.getElementById(`search-images-${lastMsg.messageId}`)
?.click();
}
if (autoVideoSearch === 'true') {
document
.getElementById(`search-videos-${lastMsg.messageId}`)
?.click();
}
if (
lastMsg.role === 'assistant' &&
lastMsg.sources &&
lastMsg.sources.length > 0 &&
!lastMsg.suggestions
) {
const suggestions = await getSuggestions(messagesRef.current);
setMessages((prev) =>
prev.map((msg) => {
if (msg.messageId === lastMsg.messageId) {
return { ...msg, suggestions: suggestions };
}
return msg;
}),
);
}
}
};
const messageIndex = messages.findIndex((m) => m.messageId === messageId);
const res = await fetch('/api/chat', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
content: message,
message: {
messageId: messageId,
chatId: chatId!,
content: message,
},
chatId: chatId!,
files: fileIds,
focusMode: focusMode,
optimizationMode: optimizationMode,
history: rewrite
? chatHistory.slice(0, messageIndex === -1 ? undefined : messageIndex)
: chatHistory,
chatModel: {
name: chatModelProvider.name,
provider: chatModelProvider.provider,
},
embeddingModel: {
name: embeddingModelProvider.name,
provider: embeddingModelProvider.provider,
},
systemInstructions: localStorage.getItem('systemInstructions'),
}),
});
if (!res.body) throw new Error('No response body');
const reader = res.body?.getReader();
const decoder = new TextDecoder('utf-8');
let partialChunk = '';
while (true) {
const { value, done } = await reader.read();
if (done) break;
partialChunk += decoder.decode(value, { stream: true });
try {
const messages = partialChunk.split('\n');
for (const msg of messages) {
if (!msg.trim()) continue;
const json = JSON.parse(msg);
messageHandler(json);
}
partialChunk = '';
} catch (error) {
console.warn('Incomplete JSON, waiting for next chunk...');
}
}
};
const rewrite = (messageId: string) => {
const index = messages.findIndex((msg) => msg.messageId === messageId);
if (index === -1) return;
const message = messages[index - 1];
setMessages((prev) => {
return [...prev.slice(0, messages.length > 2 ? index - 1 : 0)];
});
setChatHistory((prev) => {
return [...prev.slice(0, messages.length > 2 ? index - 1 : 0)];
});
sendMessage(message.content, message.messageId, true);
};
useEffect(() => {
if (isReady && initialMessage && isConfigReady) {
sendMessage(initialMessage);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [isConfigReady, isReady, initialMessage]);
if (hasError) { if (hasError) {
return ( return (
<div className="relative"> <div className="relative">
@@ -594,31 +51,11 @@ const ChatWindow = ({ id }: { id?: string }) => {
<div> <div>
{messages.length > 0 ? ( {messages.length > 0 ? (
<> <>
<Navbar chatId={chatId!} messages={messages} /> <Navbar />
<Chat <Chat />
loading={loading}
messages={messages}
sendMessage={sendMessage}
messageAppeared={messageAppeared}
rewrite={rewrite}
fileIds={fileIds}
setFileIds={setFileIds}
files={files}
setFiles={setFiles}
/>
</> </>
) : ( ) : (
<EmptyChat <EmptyChat />
sendMessage={sendMessage}
focusMode={focusMode}
setFocusMode={setFocusMode}
optimizationMode={optimizationMode}
setOptimizationMode={setOptimizationMode}
fileIds={fileIds}
setFileIds={setFileIds}
files={files}
setFiles={setFiles}
/>
)} )}
</div> </div>
) )

View File

@@ -5,27 +5,7 @@ import Link from 'next/link';
import WeatherWidget from './WeatherWidget'; import WeatherWidget from './WeatherWidget';
import NewsArticleWidget from './NewsArticleWidget'; import NewsArticleWidget from './NewsArticleWidget';
const EmptyChat = ({ const EmptyChat = () => {
sendMessage,
focusMode,
setFocusMode,
optimizationMode,
setOptimizationMode,
fileIds,
setFileIds,
files,
setFiles,
}: {
sendMessage: (message: string) => void;
focusMode: string;
setFocusMode: (mode: string) => void;
optimizationMode: string;
setOptimizationMode: (mode: string) => void;
fileIds: string[];
setFileIds: (fileIds: string[]) => void;
files: File[];
setFiles: (files: File[]) => void;
}) => {
return ( return (
<div className="relative"> <div className="relative">
<div className="absolute w-full flex flex-row items-center justify-end mr-5 mt-5"> <div className="absolute w-full flex flex-row items-center justify-end mr-5 mt-5">
@@ -38,17 +18,7 @@ const EmptyChat = ({
<h2 className="text-black/70 dark:text-white/70 text-3xl font-medium -mt-8"> <h2 className="text-black/70 dark:text-white/70 text-3xl font-medium -mt-8">
Research begins here. Research begins here.
</h2> </h2>
<EmptyChatMessageInput <EmptyChatMessageInput />
sendMessage={sendMessage}
focusMode={focusMode}
setFocusMode={setFocusMode}
optimizationMode={optimizationMode}
setOptimizationMode={setOptimizationMode}
fileIds={fileIds}
setFileIds={setFileIds}
files={files}
setFiles={setFiles}
/>
</div> </div>
<div className="flex flex-col w-full gap-4 mt-2 sm:flex-row sm:justify-center"> <div className="flex flex-col w-full gap-4 mt-2 sm:flex-row sm:justify-center">
<div className="flex-1 w-full"> <div className="flex-1 w-full">

View File

@@ -1,34 +1,15 @@
import { ArrowRight } from 'lucide-react'; import { ArrowRight } from 'lucide-react';
import { useEffect, useRef, useState } from 'react'; import { useEffect, useRef, useState } from 'react';
import TextareaAutosize from 'react-textarea-autosize'; import TextareaAutosize from 'react-textarea-autosize';
import CopilotToggle from './MessageInputActions/Copilot';
import Focus from './MessageInputActions/Focus'; import Focus from './MessageInputActions/Focus';
import Optimization from './MessageInputActions/Optimization'; import Optimization from './MessageInputActions/Optimization';
import Attach from './MessageInputActions/Attach'; import Attach from './MessageInputActions/Attach';
import { File } from './ChatWindow'; import { useChat } from '@/lib/hooks/useChat';
const EmptyChatMessageInput = ({ const EmptyChatMessageInput = () => {
sendMessage, const { sendMessage } = useChat();
focusMode,
setFocusMode, /* const [copilotEnabled, setCopilotEnabled] = useState(false); */
optimizationMode,
setOptimizationMode,
fileIds,
setFileIds,
files,
setFiles,
}: {
sendMessage: (message: string) => void;
focusMode: string;
setFocusMode: (mode: string) => void;
optimizationMode: string;
setOptimizationMode: (mode: string) => void;
fileIds: string[];
setFileIds: (fileIds: string[]) => void;
files: File[];
setFiles: (files: File[]) => void;
}) => {
const [copilotEnabled, setCopilotEnabled] = useState(false);
const [message, setMessage] = useState(''); const [message, setMessage] = useState('');
const inputRef = useRef<HTMLTextAreaElement | null>(null); const inputRef = useRef<HTMLTextAreaElement | null>(null);
@@ -84,20 +65,11 @@ const EmptyChatMessageInput = ({
/> />
<div className="flex flex-row items-center justify-between mt-4"> <div className="flex flex-row items-center justify-between mt-4">
<div className="flex flex-row items-center space-x-2 lg:space-x-4"> <div className="flex flex-row items-center space-x-2 lg:space-x-4">
<Focus focusMode={focusMode} setFocusMode={setFocusMode} /> <Focus />
<Attach <Attach showText />
fileIds={fileIds}
setFileIds={setFileIds}
files={files}
setFiles={setFiles}
showText
/>
</div> </div>
<div className="flex flex-row items-center space-x-1 sm:space-x-4"> <div className="flex flex-row items-center space-x-1 sm:space-x-4">
<Optimization <Optimization />
optimizationMode={optimizationMode}
setOptimizationMode={setOptimizationMode}
/>
<button <button
disabled={message.trim().length === 0} disabled={message.trim().length === 0}
className="bg-[#24A0ED] text-white disabled:text-black/50 dark:disabled:text-white/50 disabled:bg-[#e0e0dc] dark:disabled:bg-[#ececec21] hover:bg-opacity-85 transition duration-100 rounded-full p-2" className="bg-[#24A0ED] text-white disabled:text-black/50 dark:disabled:text-white/50 disabled:bg-[#e0e0dc] dark:disabled:bg-[#ececec21] hover:bg-opacity-85 transition duration-100 rounded-full p-2"

View File

@@ -20,6 +20,7 @@ import SearchImages from './SearchImages';
import SearchVideos from './SearchVideos'; import SearchVideos from './SearchVideos';
import { useSpeech } from 'react-text-to-speech'; import { useSpeech } from 'react-text-to-speech';
import ThinkBox from './ThinkBox'; import ThinkBox from './ThinkBox';
import { useChat } from '@/lib/hooks/useChat';
const ThinkTagProcessor = ({ const ThinkTagProcessor = ({
children, children,
@@ -36,22 +37,16 @@ const ThinkTagProcessor = ({
const MessageBox = ({ const MessageBox = ({
message, message,
messageIndex, messageIndex,
history,
loading,
dividerRef, dividerRef,
isLast, isLast,
rewrite,
sendMessage,
}: { }: {
message: Message; message: Message;
messageIndex: number; messageIndex: number;
history: Message[];
loading: boolean;
dividerRef?: MutableRefObject<HTMLDivElement | null>; dividerRef?: MutableRefObject<HTMLDivElement | null>;
isLast: boolean; isLast: boolean;
rewrite: (messageId: string) => void;
sendMessage: (message: string) => void;
}) => { }) => {
const { loading, messages: history, sendMessage, rewrite } = useChat();
const [parsedMessage, setParsedMessage] = useState(message.content); const [parsedMessage, setParsedMessage] = useState(message.content);
const [speechMessage, setSpeechMessage] = useState(message.content); const [speechMessage, setSpeechMessage] = useState(message.content);
const [thinkingEnded, setThinkingEnded] = useState(false); const [thinkingEnded, setThinkingEnded] = useState(false);

View File

@@ -6,22 +6,11 @@ import Attach from './MessageInputActions/Attach';
import CopilotToggle from './MessageInputActions/Copilot'; import CopilotToggle from './MessageInputActions/Copilot';
import { File } from './ChatWindow'; import { File } from './ChatWindow';
import AttachSmall from './MessageInputActions/AttachSmall'; import AttachSmall from './MessageInputActions/AttachSmall';
import { useChat } from '@/lib/hooks/useChat';
const MessageInput = () => {
const { loading, sendMessage } = useChat();
const MessageInput = ({
sendMessage,
loading,
fileIds,
setFileIds,
files,
setFiles,
}: {
sendMessage: (message: string) => void;
loading: boolean;
fileIds: string[];
setFileIds: (fileIds: string[]) => void;
files: File[];
setFiles: (files: File[]) => void;
}) => {
const [copilotEnabled, setCopilotEnabled] = useState(false); const [copilotEnabled, setCopilotEnabled] = useState(false);
const [message, setMessage] = useState(''); const [message, setMessage] = useState('');
const [textareaRows, setTextareaRows] = useState(1); const [textareaRows, setTextareaRows] = useState(1);
@@ -79,14 +68,7 @@ const MessageInput = ({
mode === 'multi' ? 'flex-col rounded-lg' : 'flex-row rounded-full', mode === 'multi' ? 'flex-col rounded-lg' : 'flex-row rounded-full',
)} )}
> >
{mode === 'single' && ( {mode === 'single' && <AttachSmall />}
<AttachSmall
fileIds={fileIds}
setFileIds={setFileIds}
files={files}
setFiles={setFiles}
/>
)}
<TextareaAutosize <TextareaAutosize
ref={inputRef} ref={inputRef}
value={message} value={message}
@@ -113,12 +95,7 @@ const MessageInput = ({
)} )}
{mode === 'multi' && ( {mode === 'multi' && (
<div className="flex flex-row items-center justify-between w-full pt-2"> <div className="flex flex-row items-center justify-between w-full pt-2">
<AttachSmall <AttachSmall />
fileIds={fileIds}
setFileIds={setFileIds}
files={files}
setFiles={setFiles}
/>
<div className="flex flex-row items-center space-x-4"> <div className="flex flex-row items-center space-x-4">
<CopilotToggle <CopilotToggle
copilotEnabled={copilotEnabled} copilotEnabled={copilotEnabled}

View File

@@ -7,21 +7,11 @@ import {
} from '@headlessui/react'; } from '@headlessui/react';
import { CopyPlus, File, LoaderCircle, Plus, Trash } from 'lucide-react'; import { CopyPlus, File, LoaderCircle, Plus, Trash } from 'lucide-react';
import { Fragment, useRef, useState } from 'react'; import { Fragment, useRef, useState } from 'react';
import { File as FileType } from '../ChatWindow'; import { useChat } from '@/lib/hooks/useChat';
const Attach = ({ showText }: { showText?: boolean }) => {
const { files, setFiles, setFileIds, fileIds } = useChat();
const Attach = ({
fileIds,
setFileIds,
showText,
files,
setFiles,
}: {
fileIds: string[];
setFileIds: (fileIds: string[]) => void;
showText?: boolean;
files: FileType[];
setFiles: (files: FileType[]) => void;
}) => {
const [loading, setLoading] = useState(false); const [loading, setLoading] = useState(false);
const fileInputRef = useRef<any>(); const fileInputRef = useRef<any>();
@@ -142,8 +132,8 @@ const Attach = ({
key={i} key={i}
className="flex flex-row items-center justify-start w-full space-x-3 p-3" className="flex flex-row items-center justify-start w-full space-x-3 p-3"
> >
<div className="bg-dark-100 flex items-center justify-center w-10 h-10 rounded-md"> <div className="bg-light-100 dark:bg-dark-100 flex items-center justify-center w-10 h-10 rounded-md">
<File size={16} className="text-white/70" /> <File size={16} className="text-black/70 dark:text-white/70" />
</div> </div>
<p className="text-black/70 dark:text-white/70 text-sm"> <p className="text-black/70 dark:text-white/70 text-sm">
{file.fileName.length > 25 {file.fileName.length > 25

View File

@@ -8,18 +8,11 @@ import {
import { CopyPlus, File, LoaderCircle, Plus, Trash } from 'lucide-react'; import { CopyPlus, File, LoaderCircle, Plus, Trash } from 'lucide-react';
import { Fragment, useRef, useState } from 'react'; import { Fragment, useRef, useState } from 'react';
import { File as FileType } from '../ChatWindow'; import { File as FileType } from '../ChatWindow';
import { useChat } from '@/lib/hooks/useChat';
const AttachSmall = () => {
const { files, setFiles, setFileIds, fileIds } = useChat();
const AttachSmall = ({
fileIds,
setFileIds,
files,
setFiles,
}: {
fileIds: string[];
setFileIds: (fileIds: string[]) => void;
files: FileType[];
setFiles: (files: FileType[]) => void;
}) => {
const [loading, setLoading] = useState(false); const [loading, setLoading] = useState(false);
const fileInputRef = useRef<any>(); const fileInputRef = useRef<any>();
@@ -114,8 +107,8 @@ const AttachSmall = ({
key={i} key={i}
className="flex flex-row items-center justify-start w-full space-x-3 p-3" className="flex flex-row items-center justify-start w-full space-x-3 p-3"
> >
<div className="bg-dark-100 flex items-center justify-center w-10 h-10 rounded-md"> <div className="bg-light-100 dark:bg-dark-100 flex items-center justify-center w-10 h-10 rounded-md">
<File size={16} className="text-white/70" /> <File size={16} className="text-black/70 dark:text-white/70" />
</div> </div>
<p className="text-black/70 dark:text-white/70 text-sm"> <p className="text-black/70 dark:text-white/70 text-sm">
{file.fileName.length > 25 {file.fileName.length > 25

View File

@@ -15,6 +15,7 @@ import {
} from '@headlessui/react'; } from '@headlessui/react';
import { SiReddit, SiYoutube } from '@icons-pack/react-simple-icons'; import { SiReddit, SiYoutube } from '@icons-pack/react-simple-icons';
import { Fragment } from 'react'; import { Fragment } from 'react';
import { useChat } from '@/lib/hooks/useChat';
const focusModes = [ const focusModes = [
{ {
@@ -55,13 +56,9 @@ const focusModes = [
}, },
]; ];
const Focus = ({ const Focus = () => {
focusMode, const { focusMode, setFocusMode } = useChat();
setFocusMode,
}: {
focusMode: string;
setFocusMode: (mode: string) => void;
}) => {
return ( return (
<Popover className="relative w-full max-w-[15rem] md:max-w-md lg:max-w-lg mt-[6.5px]"> <Popover className="relative w-full max-w-[15rem] md:max-w-md lg:max-w-lg mt-[6.5px]">
<PopoverButton <PopoverButton

View File

@@ -7,6 +7,7 @@ import {
Transition, Transition,
} from '@headlessui/react'; } from '@headlessui/react';
import { Fragment } from 'react'; import { Fragment } from 'react';
import { useChat } from '@/lib/hooks/useChat';
const OptimizationModes = [ const OptimizationModes = [
{ {
@@ -34,13 +35,9 @@ const OptimizationModes = [
}, },
]; ];
const Optimization = ({ const Optimization = () => {
optimizationMode, const { optimizationMode, setOptimizationMode } = useChat();
setOptimizationMode,
}: {
optimizationMode: string;
setOptimizationMode: (mode: string) => void;
}) => {
return ( return (
<Popover className="relative w-full max-w-[15rem] md:max-w-md lg:max-w-lg"> <Popover className="relative w-full max-w-[15rem] md:max-w-md lg:max-w-lg">
<PopoverButton <PopoverButton

View File

@@ -10,6 +10,7 @@ import {
Transition, Transition,
} from '@headlessui/react'; } from '@headlessui/react';
import jsPDF from 'jspdf'; import jsPDF from 'jspdf';
import { useChat } from '@/lib/hooks/useChat';
const downloadFile = (filename: string, content: string, type: string) => { const downloadFile = (filename: string, content: string, type: string) => {
const blob = new Blob([content], { type }); const blob = new Blob([content], { type });
@@ -118,16 +119,12 @@ const exportAsPDF = (messages: Message[], title: string) => {
doc.save(`${title || 'chat'}.pdf`); doc.save(`${title || 'chat'}.pdf`);
}; };
const Navbar = ({ const Navbar = () => {
chatId,
messages,
}: {
messages: Message[];
chatId: string;
}) => {
const [title, setTitle] = useState<string>(''); const [title, setTitle] = useState<string>('');
const [timeAgo, setTimeAgo] = useState<string>(''); const [timeAgo, setTimeAgo] = useState<string>('');
const { messages, chatId } = useChat();
useEffect(() => { useEffect(() => {
if (messages.length > 0) { if (messages.length > 0) {
const newTitle = const newTitle =
@@ -206,7 +203,7 @@ const Navbar = ({
</PopoverPanel> </PopoverPanel>
</Transition> </Transition>
</Popover> </Popover>
<DeleteChat redirect chatId={chatId} chats={[]} setChats={() => {}} /> <DeleteChat redirect chatId={chatId!} chats={[]} setChats={() => {}} />
</div> </div>
</div> </div>
); );

View File

@@ -31,6 +31,7 @@ interface Config {
}; };
OLLAMA: { OLLAMA: {
API_URL: string; API_URL: string;
API_KEY: string;
}; };
DEEPSEEK: { DEEPSEEK: {
API_KEY: string; API_KEY: string;
@@ -86,6 +87,8 @@ export const getSearxngApiEndpoint = () =>
export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL; export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL;
export const getOllamaApiKey = () => loadConfig().MODELS.OLLAMA.API_KEY;
export const getDeepseekApiKey = () => loadConfig().MODELS.DEEPSEEK.API_KEY; export const getDeepseekApiKey = () => loadConfig().MODELS.DEEPSEEK.API_KEY;
export const getAimlApiKey = () => loadConfig().MODELS.AIMLAPI.API_KEY; export const getAimlApiKey = () => loadConfig().MODELS.AIMLAPI.API_KEY;

643
src/lib/hooks/useChat.tsx Normal file
View File

@@ -0,0 +1,643 @@
'use client';
import { Message } from '@/components/ChatWindow';
import { createContext, useContext, useEffect, useRef, useState } from 'react';
import crypto from 'crypto';
import { useSearchParams } from 'next/navigation';
import { toast } from 'sonner';
import { Document } from '@langchain/core/documents';
import { getSuggestions } from '../actions';
type ChatContext = {
messages: Message[];
chatHistory: [string, string][];
files: File[];
fileIds: string[];
focusMode: string;
chatId: string | undefined;
optimizationMode: string;
isMessagesLoaded: boolean;
loading: boolean;
notFound: boolean;
messageAppeared: boolean;
isReady: boolean;
hasError: boolean;
setOptimizationMode: (mode: string) => void;
setFocusMode: (mode: string) => void;
setFiles: (files: File[]) => void;
setFileIds: (fileIds: string[]) => void;
sendMessage: (
message: string,
messageId?: string,
rewrite?: boolean,
) => Promise<void>;
rewrite: (messageId: string) => void;
};
export interface File {
fileName: string;
fileExtension: string;
fileId: string;
}
interface ChatModelProvider {
name: string;
provider: string;
}
interface EmbeddingModelProvider {
name: string;
provider: string;
}
const checkConfig = async (
setChatModelProvider: (provider: ChatModelProvider) => void,
setEmbeddingModelProvider: (provider: EmbeddingModelProvider) => void,
setIsConfigReady: (ready: boolean) => void,
setHasError: (hasError: boolean) => void,
) => {
try {
let chatModel = localStorage.getItem('chatModel');
let chatModelProvider = localStorage.getItem('chatModelProvider');
let embeddingModel = localStorage.getItem('embeddingModel');
let embeddingModelProvider = localStorage.getItem('embeddingModelProvider');
const autoImageSearch = localStorage.getItem('autoImageSearch');
const autoVideoSearch = localStorage.getItem('autoVideoSearch');
if (!autoImageSearch) {
localStorage.setItem('autoImageSearch', 'true');
}
if (!autoVideoSearch) {
localStorage.setItem('autoVideoSearch', 'false');
}
const providers = await fetch(`/api/models`, {
headers: {
'Content-Type': 'application/json',
},
}).then(async (res) => {
if (!res.ok)
throw new Error(
`Failed to fetch models: ${res.status} ${res.statusText}`,
);
return res.json();
});
if (
!chatModel ||
!chatModelProvider ||
!embeddingModel ||
!embeddingModelProvider
) {
if (!chatModel || !chatModelProvider) {
const chatModelProviders = providers.chatModelProviders;
const chatModelProvidersKeys = Object.keys(chatModelProviders);
if (!chatModelProviders || chatModelProvidersKeys.length === 0) {
return toast.error('No chat models available');
} else {
chatModelProvider =
chatModelProvidersKeys.find(
(provider) =>
Object.keys(chatModelProviders[provider]).length > 0,
) || chatModelProvidersKeys[0];
}
if (
chatModelProvider === 'custom_openai' &&
Object.keys(chatModelProviders[chatModelProvider]).length === 0
) {
toast.error(
"Looks like you haven't configured any chat model providers. Please configure them from the settings page or the config file.",
);
return setHasError(true);
}
chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
}
if (!embeddingModel || !embeddingModelProvider) {
const embeddingModelProviders = providers.embeddingModelProviders;
if (
!embeddingModelProviders ||
Object.keys(embeddingModelProviders).length === 0
)
return toast.error('No embedding models available');
embeddingModelProvider = Object.keys(embeddingModelProviders)[0];
embeddingModel = Object.keys(
embeddingModelProviders[embeddingModelProvider],
)[0];
}
localStorage.setItem('chatModel', chatModel!);
localStorage.setItem('chatModelProvider', chatModelProvider);
localStorage.setItem('embeddingModel', embeddingModel!);
localStorage.setItem('embeddingModelProvider', embeddingModelProvider);
} else {
const chatModelProviders = providers.chatModelProviders;
const embeddingModelProviders = providers.embeddingModelProviders;
if (
Object.keys(chatModelProviders).length > 0 &&
(!chatModelProviders[chatModelProvider] ||
Object.keys(chatModelProviders[chatModelProvider]).length === 0)
) {
const chatModelProvidersKeys = Object.keys(chatModelProviders);
chatModelProvider =
chatModelProvidersKeys.find(
(key) => Object.keys(chatModelProviders[key]).length > 0,
) || chatModelProvidersKeys[0];
localStorage.setItem('chatModelProvider', chatModelProvider);
}
if (
chatModelProvider &&
!chatModelProviders[chatModelProvider][chatModel]
) {
if (
chatModelProvider === 'custom_openai' &&
Object.keys(chatModelProviders[chatModelProvider]).length === 0
) {
toast.error(
"Looks like you haven't configured any chat model providers. Please configure them from the settings page or the config file.",
);
return setHasError(true);
}
chatModel = Object.keys(
chatModelProviders[
Object.keys(chatModelProviders[chatModelProvider]).length > 0
? chatModelProvider
: Object.keys(chatModelProviders)[0]
],
)[0];
localStorage.setItem('chatModel', chatModel);
}
if (
Object.keys(embeddingModelProviders).length > 0 &&
!embeddingModelProviders[embeddingModelProvider]
) {
embeddingModelProvider = Object.keys(embeddingModelProviders)[0];
localStorage.setItem('embeddingModelProvider', embeddingModelProvider);
}
if (
embeddingModelProvider &&
!embeddingModelProviders[embeddingModelProvider][embeddingModel]
) {
embeddingModel = Object.keys(
embeddingModelProviders[embeddingModelProvider],
)[0];
localStorage.setItem('embeddingModel', embeddingModel);
}
}
setChatModelProvider({
name: chatModel!,
provider: chatModelProvider,
});
setEmbeddingModelProvider({
name: embeddingModel!,
provider: embeddingModelProvider,
});
setIsConfigReady(true);
} catch (err) {
console.error('An error occurred while checking the configuration:', err);
setIsConfigReady(false);
setHasError(true);
}
};
const loadMessages = async (
chatId: string,
setMessages: (messages: Message[]) => void,
setIsMessagesLoaded: (loaded: boolean) => void,
setChatHistory: (history: [string, string][]) => void,
setFocusMode: (mode: string) => void,
setNotFound: (notFound: boolean) => void,
setFiles: (files: File[]) => void,
setFileIds: (fileIds: string[]) => void,
) => {
const res = await fetch(`/api/chats/${chatId}`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
},
});
if (res.status === 404) {
setNotFound(true);
setIsMessagesLoaded(true);
return;
}
const data = await res.json();
const messages = data.messages.map((msg: any) => {
return {
...msg,
...JSON.parse(msg.metadata),
};
}) as Message[];
setMessages(messages);
const history = messages.map((msg) => {
return [msg.role, msg.content];
}) as [string, string][];
console.debug(new Date(), 'app:messages_loaded');
document.title = messages[0].content;
const files = data.chat.files.map((file: any) => {
return {
fileName: file.name,
fileExtension: file.name.split('.').pop(),
fileId: file.fileId,
};
});
setFiles(files);
setFileIds(files.map((file: File) => file.fileId));
setChatHistory(history);
setFocusMode(data.chat.focusMode);
setIsMessagesLoaded(true);
};
export const chatContext = createContext<ChatContext>({
chatHistory: [],
chatId: '',
fileIds: [],
files: [],
focusMode: '',
hasError: false,
isMessagesLoaded: false,
isReady: false,
loading: false,
messageAppeared: false,
messages: [],
notFound: false,
optimizationMode: '',
rewrite: () => {},
sendMessage: async () => {},
setFileIds: () => {},
setFiles: () => {},
setFocusMode: () => {},
setOptimizationMode: () => {},
});
export const ChatProvider = ({
children,
id,
}: {
children: React.ReactNode;
id?: string;
}) => {
const searchParams = useSearchParams();
const initialMessage = searchParams.get('q');
const [chatId, setChatId] = useState<string | undefined>(id);
const [newChatCreated, setNewChatCreated] = useState(false);
const [loading, setLoading] = useState(false);
const [messageAppeared, setMessageAppeared] = useState(false);
const [chatHistory, setChatHistory] = useState<[string, string][]>([]);
const [messages, setMessages] = useState<Message[]>([]);
const [files, setFiles] = useState<File[]>([]);
const [fileIds, setFileIds] = useState<string[]>([]);
const [focusMode, setFocusMode] = useState('webSearch');
const [optimizationMode, setOptimizationMode] = useState('speed');
const [isMessagesLoaded, setIsMessagesLoaded] = useState(false);
const [notFound, setNotFound] = useState(false);
const [chatModelProvider, setChatModelProvider] = useState<ChatModelProvider>(
{
name: '',
provider: '',
},
);
const [embeddingModelProvider, setEmbeddingModelProvider] =
useState<EmbeddingModelProvider>({
name: '',
provider: '',
});
const [isConfigReady, setIsConfigReady] = useState(false);
const [hasError, setHasError] = useState(false);
const [isReady, setIsReady] = useState(false);
const messagesRef = useRef<Message[]>([]);
useEffect(() => {
checkConfig(
setChatModelProvider,
setEmbeddingModelProvider,
setIsConfigReady,
setHasError,
);
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
useEffect(() => {
if (
chatId &&
!newChatCreated &&
!isMessagesLoaded &&
messages.length === 0
) {
loadMessages(
chatId,
setMessages,
setIsMessagesLoaded,
setChatHistory,
setFocusMode,
setNotFound,
setFiles,
setFileIds,
);
} else if (!chatId) {
setNewChatCreated(true);
setIsMessagesLoaded(true);
setChatId(crypto.randomBytes(20).toString('hex'));
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
useEffect(() => {
messagesRef.current = messages;
}, [messages]);
useEffect(() => {
if (isMessagesLoaded && isConfigReady) {
setIsReady(true);
console.debug(new Date(), 'app:ready');
} else {
setIsReady(false);
}
}, [isMessagesLoaded, isConfigReady]);
const rewrite = (messageId: string) => {
const index = messages.findIndex((msg) => msg.messageId === messageId);
if (index === -1) return;
const message = messages[index - 1];
setMessages((prev) => {
return [...prev.slice(0, messages.length > 2 ? index - 1 : 0)];
});
setChatHistory((prev) => {
return [...prev.slice(0, messages.length > 2 ? index - 1 : 0)];
});
sendMessage(message.content, message.messageId, true);
};
useEffect(() => {
if (isReady && initialMessage && isConfigReady) {
if (!isConfigReady) {
toast.error('Cannot send message before the configuration is ready');
return;
}
sendMessage(initialMessage);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [isConfigReady, isReady, initialMessage]);
const sendMessage: ChatContext['sendMessage'] = async (
message,
messageId,
rewrite = false,
) => {
if (loading) return;
setLoading(true);
setMessageAppeared(false);
let sources: Document[] | undefined = undefined;
let recievedMessage = '';
let added = false;
messageId = messageId ?? crypto.randomBytes(7).toString('hex');
setMessages((prevMessages) => [
...prevMessages,
{
content: message,
messageId: messageId,
chatId: chatId!,
role: 'user',
createdAt: new Date(),
},
]);
const messageHandler = async (data: any) => {
if (data.type === 'error') {
toast.error(data.data);
setLoading(false);
return;
}
if (data.type === 'sources') {
sources = data.data;
if (!added) {
setMessages((prevMessages) => [
...prevMessages,
{
content: '',
messageId: data.messageId,
chatId: chatId!,
role: 'assistant',
sources: sources,
createdAt: new Date(),
},
]);
added = true;
}
setMessageAppeared(true);
}
if (data.type === 'message') {
if (!added) {
setMessages((prevMessages) => [
...prevMessages,
{
content: data.data,
messageId: data.messageId,
chatId: chatId!,
role: 'assistant',
sources: sources,
createdAt: new Date(),
},
]);
added = true;
}
setMessages((prev) =>
prev.map((message) => {
if (message.messageId === data.messageId) {
return { ...message, content: message.content + data.data };
}
return message;
}),
);
recievedMessage += data.data;
setMessageAppeared(true);
}
if (data.type === 'messageEnd') {
setChatHistory((prevHistory) => [
...prevHistory,
['human', message],
['assistant', recievedMessage],
]);
setLoading(false);
const lastMsg = messagesRef.current[messagesRef.current.length - 1];
const autoImageSearch = localStorage.getItem('autoImageSearch');
const autoVideoSearch = localStorage.getItem('autoVideoSearch');
if (autoImageSearch === 'true') {
document
.getElementById(`search-images-${lastMsg.messageId}`)
?.click();
}
if (autoVideoSearch === 'true') {
document
.getElementById(`search-videos-${lastMsg.messageId}`)
?.click();
}
if (
lastMsg.role === 'assistant' &&
lastMsg.sources &&
lastMsg.sources.length > 0 &&
!lastMsg.suggestions
) {
const suggestions = await getSuggestions(messagesRef.current);
setMessages((prev) =>
prev.map((msg) => {
if (msg.messageId === lastMsg.messageId) {
return { ...msg, suggestions: suggestions };
}
return msg;
}),
);
}
}
};
const messageIndex = messages.findIndex((m) => m.messageId === messageId);
const res = await fetch('/api/chat', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
content: message,
message: {
messageId: messageId,
chatId: chatId!,
content: message,
},
chatId: chatId!,
files: fileIds,
focusMode: focusMode,
optimizationMode: optimizationMode,
history: rewrite
? chatHistory.slice(0, messageIndex === -1 ? undefined : messageIndex)
: chatHistory,
chatModel: {
name: chatModelProvider.name,
provider: chatModelProvider.provider,
},
embeddingModel: {
name: embeddingModelProvider.name,
provider: embeddingModelProvider.provider,
},
systemInstructions: localStorage.getItem('systemInstructions'),
}),
});
if (!res.body) throw new Error('No response body');
const reader = res.body?.getReader();
const decoder = new TextDecoder('utf-8');
let partialChunk = '';
while (true) {
const { value, done } = await reader.read();
if (done) break;
partialChunk += decoder.decode(value, { stream: true });
try {
const messages = partialChunk.split('\n');
for (const msg of messages) {
if (!msg.trim()) continue;
const json = JSON.parse(msg);
messageHandler(json);
}
partialChunk = '';
} catch (error) {
console.warn('Incomplete JSON, waiting for next chunk...');
}
}
};
return (
<chatContext.Provider
value={{
messages,
chatHistory,
files,
fileIds,
focusMode,
chatId,
hasError,
isMessagesLoaded,
isReady,
loading,
messageAppeared,
notFound,
optimizationMode,
setFileIds,
setFiles,
setFocusMode,
setOptimizationMode,
rewrite,
sendMessage,
}}
>
{children}
</chatContext.Provider>
);
};
export const useChat = () => {
const ctx = useContext(chatContext);
return ctx;
};

View File

@@ -120,7 +120,11 @@ export const getAvailableChatModelProviders = async () => {
model: new ChatOpenAI({ model: new ChatOpenAI({
apiKey: customOpenAiApiKey, apiKey: customOpenAiApiKey,
modelName: customOpenAiModelName, modelName: customOpenAiModelName,
temperature: 0.7, ...((() => {
const temperatureRestrictedModels = ['gpt-5-nano','gpt-5','gpt-5-mini','o1', 'o3', 'o3-mini', 'o4-mini'];
const isTemperatureRestricted = temperatureRestrictedModels.some(restrictedModel => customOpenAiModelName.includes(restrictedModel));
return isTemperatureRestricted ? {} : { temperature: 0.7 };
})()),
configuration: { configuration: {
baseURL: customOpenAiApiUrl, baseURL: customOpenAiApiUrl,
}, },

View File

@@ -1,5 +1,5 @@
import axios from 'axios'; import axios from 'axios';
import { getKeepAlive, getOllamaApiEndpoint } from '../config'; import { getKeepAlive, getOllamaApiEndpoint, getOllamaApiKey } from '../config';
import { ChatModel, EmbeddingModel } from '.'; import { ChatModel, EmbeddingModel } from '.';
export const PROVIDER_INFO = { export const PROVIDER_INFO = {
@@ -11,6 +11,7 @@ import { OllamaEmbeddings } from '@langchain/ollama';
export const loadOllamaChatModels = async () => { export const loadOllamaChatModels = async () => {
const ollamaApiEndpoint = getOllamaApiEndpoint(); const ollamaApiEndpoint = getOllamaApiEndpoint();
const ollamaApiKey = getOllamaApiKey();
if (!ollamaApiEndpoint) return {}; if (!ollamaApiEndpoint) return {};
@@ -33,6 +34,9 @@ export const loadOllamaChatModels = async () => {
model: model.model, model: model.model,
temperature: 0.7, temperature: 0.7,
keepAlive: getKeepAlive(), keepAlive: getKeepAlive(),
...(ollamaApiKey
? { headers: { Authorization: `Bearer ${ollamaApiKey}` } }
: {}),
}), }),
}; };
}); });
@@ -46,6 +50,7 @@ export const loadOllamaChatModels = async () => {
export const loadOllamaEmbeddingModels = async () => { export const loadOllamaEmbeddingModels = async () => {
const ollamaApiEndpoint = getOllamaApiEndpoint(); const ollamaApiEndpoint = getOllamaApiEndpoint();
const ollamaApiKey = getOllamaApiKey();
if (!ollamaApiEndpoint) return {}; if (!ollamaApiEndpoint) return {};
@@ -66,6 +71,9 @@ export const loadOllamaEmbeddingModels = async () => {
model: new OllamaEmbeddings({ model: new OllamaEmbeddings({
baseUrl: ollamaApiEndpoint, baseUrl: ollamaApiEndpoint,
model: model.model, model: model.model,
...(ollamaApiKey
? { headers: { Authorization: `Bearer ${ollamaApiKey}` } }
: {}),
}), }),
}; };
}); });

View File

@@ -26,6 +26,10 @@ const openaiChatModels: Record<string, string>[] = [
displayName: 'GPT-4 omni', displayName: 'GPT-4 omni',
key: 'gpt-4o', key: 'gpt-4o',
}, },
{
displayName: 'GPT-4o (2024-05-13)',
key: 'gpt-4o-2024-05-13',
},
{ {
displayName: 'GPT-4 omni mini', displayName: 'GPT-4 omni mini',
key: 'gpt-4o-mini', key: 'gpt-4o-mini',
@@ -47,12 +51,28 @@ const openaiChatModels: Record<string, string>[] = [
key: 'gpt-5-nano', key: 'gpt-5-nano',
}, },
{ {
displayName: 'GPT 5 mini', displayName: 'GPT 5',
key: 'gpt-5',
},
{
displayName: 'GPT 5 Mini',
key: 'gpt-5-mini', key: 'gpt-5-mini',
}, },
{ {
displayName: 'GPT 5', displayName: 'o1',
key: 'gpt-5', key: 'o1',
},
{
displayName: 'o3',
key: 'o3',
},
{
displayName: 'o3 Mini',
key: 'o3-mini',
},
{
displayName: 'o4 Mini',
key: 'o4-mini',
}, },
]; ];
@@ -76,13 +96,23 @@ export const loadOpenAIChatModels = async () => {
const chatModels: Record<string, ChatModel> = {}; const chatModels: Record<string, ChatModel> = {};
openaiChatModels.forEach((model) => { openaiChatModels.forEach((model) => {
// Models that only support temperature = 1
const temperatureRestrictedModels = ['gpt-5-nano','gpt-5','gpt-5-mini','o1', 'o3', 'o3-mini', 'o4-mini'];
const isTemperatureRestricted = temperatureRestrictedModels.some(restrictedModel => model.key.includes(restrictedModel));
const modelConfig: any = {
apiKey: openaiApiKey,
modelName: model.key,
};
// Only add temperature if the model supports it
if (!isTemperatureRestricted) {
modelConfig.temperature = 0.7;
}
chatModels[model.key] = { chatModels[model.key] = {
displayName: model.displayName, displayName: model.displayName,
model: new ChatOpenAI({ model: new ChatOpenAI(modelConfig) as unknown as BaseChatModel,
apiKey: openaiApiKey,
modelName: model.key,
temperature: model.key.includes('gpt-5') ? 1 : 0.7,
}) as unknown as BaseChatModel,
}; };
}); });