Apply context window everywhere. Ensure styling is good on all screen sizes. Cleanup inconsistencies with upstream branch.

This commit is contained in:
Willie Zutz
2025-04-20 13:10:59 -06:00
parent 0bb860b154
commit 67ee9eff53
11 changed files with 30 additions and 54 deletions

View File

@ -7,11 +7,13 @@ import {
import { getAvailableChatModelProviders } from '@/lib/providers'; import { getAvailableChatModelProviders } from '@/lib/providers';
import { BaseChatModel } from '@langchain/core/language_models/chat_models'; import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages'; import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
import { ChatOllama } from '@langchain/ollama';
import { ChatOpenAI } from '@langchain/openai'; import { ChatOpenAI } from '@langchain/openai';
interface ChatModel { interface ChatModel {
provider: string; provider: string;
model: string; model: string;
ollamaContextWindow?: number;
} }
interface ImageSearchBody { interface ImageSearchBody {
@ -58,6 +60,10 @@ export const POST = async (req: Request) => {
}) as unknown as BaseChatModel; }) as unknown as BaseChatModel;
} else if (chatModelProvider && chatModel) { } else if (chatModelProvider && chatModel) {
llm = chatModel.model; llm = chatModel.model;
// Set context window size for Ollama models
if (llm instanceof ChatOllama && body.chatModel?.provider === 'ollama') {
llm.numCtx = body.chatModel.ollamaContextWindow || 2048;
}
} }
if (!llm) { if (!llm) {

View File

@ -7,11 +7,13 @@ import {
import { getAvailableChatModelProviders } from '@/lib/providers'; import { getAvailableChatModelProviders } from '@/lib/providers';
import { BaseChatModel } from '@langchain/core/language_models/chat_models'; import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages'; import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
import { ChatOllama } from '@langchain/ollama';
import { ChatOpenAI } from '@langchain/openai'; import { ChatOpenAI } from '@langchain/openai';
interface ChatModel { interface ChatModel {
provider: string; provider: string;
model: string; model: string;
ollamaContextWindow?: number;
} }
interface VideoSearchBody { interface VideoSearchBody {
@ -58,6 +60,10 @@ export const POST = async (req: Request) => {
}) as unknown as BaseChatModel; }) as unknown as BaseChatModel;
} else if (chatModelProvider && chatModel) { } else if (chatModelProvider && chatModel) {
llm = chatModel.model; llm = chatModel.model;
// Set context window size for Ollama models
if (llm instanceof ChatOllama && body.chatModel?.provider === 'ollama') {
llm.numCtx = body.chatModel.ollamaContextWindow || 2048;
}
} }
if (!llm) { if (!llm) {

View File

@ -20,10 +20,7 @@ const Chat = ({
setOptimizationMode, setOptimizationMode,
}: { }: {
messages: Message[]; messages: Message[];
sendMessage: ( sendMessage: (message: string) => void;
message: string,
messageId?: string,
) => void;
loading: boolean; loading: boolean;
messageAppeared: boolean; messageAppeared: boolean;
rewrite: (messageId: string) => void; rewrite: (messageId: string) => void;
@ -96,7 +93,7 @@ const Chat = ({
<div ref={messageEnd} className="h-0" /> <div ref={messageEnd} className="h-0" />
{dividerWidth > 0 && ( {dividerWidth > 0 && (
<div <div
className="bottom-24 lg:bottom-10 fixed" className="bottom-24 lg:bottom-10 fixed z-40"
style={{ width: dividerWidth }} style={{ width: dividerWidth }}
> >
<MessageInput <MessageInput

View File

@ -367,20 +367,6 @@ const sendMessage = async (
} }
messageId = messageId ?? crypto.randomBytes(7).toString('hex'); messageId = messageId ?? crypto.randomBytes(7).toString('hex');
// let messageData = {
// type: 'message',
// message: {
// messageId: messageId,
// chatId: chatId!,
// content: message,
// },
// files: fileIds,
// focusMode: focusMode,
// optimizationMode: optimizationMode,
// history: [...messageChatHistory, ['human', message]],
// isCompact: options?.isCompact ?? isCompact,
// };
// ws.send(JSON.stringify(messageData));
setMessages((prevMessages) => [ setMessages((prevMessages) => [
...prevMessages, ...prevMessages,
@ -563,7 +549,7 @@ const sendMessage = async (
useEffect(() => { useEffect(() => {
if (isReady && initialMessage && isConfigReady) { if (isReady && initialMessage && isConfigReady) {
sendMessage(initialMessage, undefined, { }); sendMessage(initialMessage);
} }
// eslint-disable-next-line react-hooks/exhaustive-deps // eslint-disable-next-line react-hooks/exhaustive-deps
}, [isConfigReady, isReady, initialMessage]); }, [isConfigReady, isReady, initialMessage]);

View File

@ -18,10 +18,7 @@ const EmptyChatMessageInput = ({
files, files,
setFiles, setFiles,
}: { }: {
sendMessage: ( sendMessage: (message: string) => void;
message: string,
messageId?: string,
) => void;
focusMode: string; focusMode: string;
setFocusMode: (mode: string) => void; setFocusMode: (mode: string) => void;
optimizationMode: string; optimizationMode: string;
@ -64,13 +61,13 @@ const EmptyChatMessageInput = ({
<form <form
onSubmit={(e) => { onSubmit={(e) => {
e.preventDefault(); e.preventDefault();
sendMessage(message, undefined); sendMessage(message);
setMessage(''); setMessage('');
}} }}
onKeyDown={(e) => { onKeyDown={(e) => {
if (e.key === 'Enter' && !e.shiftKey) { if (e.key === 'Enter' && !e.shiftKey) {
e.preventDefault(); e.preventDefault();
sendMessage(message, undefined); sendMessage(message);
setMessage(''); setMessage('');
} }
}} }}

View File

@ -42,10 +42,7 @@ const MessageBox = ({
dividerRef?: MutableRefObject<HTMLDivElement | null>; dividerRef?: MutableRefObject<HTMLDivElement | null>;
isLast: boolean; isLast: boolean;
rewrite: (messageId: string) => void; rewrite: (messageId: string) => void;
sendMessage: ( sendMessage: (message: string) => void;
message: string,
messageId?: string
) => void;
}) => { }) => {
const [parsedMessage, setParsedMessage] = useState(message.content); const [parsedMessage, setParsedMessage] = useState(message.content);
const [speechMessage, setSpeechMessage] = useState(message.content); const [speechMessage, setSpeechMessage] = useState(message.content);
@ -110,10 +107,6 @@ const MessageBox = ({
const { speechStatus, start, stop } = useSpeech({ text: speechMessage }); const { speechStatus, start, stop } = useSpeech({ text: speechMessage });
const handleSuggestionClick = (suggestion: string) => {
sendMessage(suggestion, undefined);
};
const markdownOverrides: MarkdownToJSX.Options = { const markdownOverrides: MarkdownToJSX.Options = {
overrides: { overrides: {
think: { think: {
@ -228,7 +221,7 @@ const MessageBox = ({
<div className="h-px w-full bg-light-secondary dark:bg-dark-secondary" /> <div className="h-px w-full bg-light-secondary dark:bg-dark-secondary" />
<div <div
onClick={() => { onClick={() => {
handleSuggestionClick(suggestion); sendMessage(suggestion);
}} }}
className="cursor-pointer flex flex-row justify-between font-medium space-x-2 items-center" className="cursor-pointer flex flex-row justify-between font-medium space-x-2 items-center"
> >

View File

@ -18,10 +18,7 @@ const MessageInput = ({
optimizationMode, optimizationMode,
setOptimizationMode, setOptimizationMode,
}: { }: {
sendMessage: ( sendMessage: (message: string) => void;
message: string,
messageId?: string
) => void;
loading: boolean; loading: boolean;
fileIds: string[]; fileIds: string[];
setFileIds: (fileIds: string[]) => void; setFileIds: (fileIds: string[]) => void;
@ -68,13 +65,13 @@ const MessageInput = ({
onSubmit={(e) => { onSubmit={(e) => {
if (loading) return; if (loading) return;
e.preventDefault(); e.preventDefault();
sendMessage(message, undefined); sendMessage(message);
setMessage(''); setMessage('');
}} }}
onKeyDown={(e) => { onKeyDown={(e) => {
if (e.key === 'Enter' && !e.shiftKey && !loading) { if (e.key === 'Enter' && !e.shiftKey && !loading) {
e.preventDefault(); e.preventDefault();
sendMessage(message, undefined); sendMessage(message);
setMessage(''); setMessage('');
} }
}} }}

View File

@ -47,7 +47,7 @@ const Optimization = ({
}; };
return ( return (
<Popover className="relative w-full max-w-[15rem] md:max-w-md lg:max-w-lg"> <Popover className="relative">
<PopoverButton <PopoverButton
type="button" type="button"
className="p-2 text-black/50 dark:text-white/50 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary active:scale-95 transition duration-200 hover:text-black dark:hover:text-white" className="p-2 text-black/50 dark:text-white/50 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary active:scale-95 transition duration-200 hover:text-black dark:hover:text-white"
@ -75,8 +75,8 @@ const Optimization = ({
leaveFrom="opacity-100 translate-y-0" leaveFrom="opacity-100 translate-y-0"
leaveTo="opacity-0 translate-y-1" leaveTo="opacity-0 translate-y-1"
> >
<PopoverPanel className="absolute z-10 w-64 md:w-[250px] right-0 bottom-[100%] mb-2"> <PopoverPanel className="absolute z-10 bottom-[100%] mb-2 left-1/2 transform -translate-x-1/2">
<div className="flex flex-col gap-2 bg-light-primary dark:bg-dark-primary border rounded-lg border-light-200 dark:border-dark-200 w-full p-4 max-h-[200px] md:max-h-none overflow-y-auto"> <div className="flex flex-col gap-2 bg-light-primary dark:bg-dark-primary border rounded-lg border-light-200 dark:border-dark-200 w-max max-w-[300px] p-4 max-h-[200px] md:max-h-none overflow-y-auto">
{OptimizationModes.map((mode, i) => ( {OptimizationModes.map((mode, i) => (
<PopoverButton <PopoverButton
onClick={() => handleOptimizationChange(mode.key)} onClick={() => handleOptimizationChange(mode.key)}

View File

@ -6,10 +6,7 @@ import {
redditSearchResponsePrompt, redditSearchResponsePrompt,
redditSearchRetrieverPrompt, redditSearchRetrieverPrompt,
} from './redditSearch'; } from './redditSearch';
import { import { webSearchResponsePrompt, webSearchRetrieverPrompt } from './webSearch';
webSearchResponsePrompt,
webSearchRetrieverPrompt
} from './webSearch';
import { import {
wolframAlphaSearchResponsePrompt, wolframAlphaSearchResponsePrompt,
wolframAlphaSearchRetrieverPrompt, wolframAlphaSearchRetrieverPrompt,

View File

@ -282,10 +282,7 @@ class MetaSearchAgent implements MetaSearchAgentType {
.pipe(this.processDocs), .pipe(this.processDocs),
}), }),
ChatPromptTemplate.fromMessages([ ChatPromptTemplate.fromMessages([
[ ['system', this.config.responsePrompt],
'system',
this.config.responsePrompt
],
new MessagesPlaceholder('chat_history'), new MessagesPlaceholder('chat_history'),
['user', '{query}'], ['user', '{query}'],
]), ]),
@ -483,7 +480,7 @@ class MetaSearchAgent implements MetaSearchAgentType {
fileIds, fileIds,
embeddings, embeddings,
optimizationMode, optimizationMode,
systemInstructions systemInstructions,
); );
const stream = answeringChain.streamEvents( const stream = answeringChain.streamEvents(