mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-06-15 22:38:48 +00:00
Apply context window everywhere. Ensure styling is good on all screen sizes. Cleanup inconsistencies with upstream branch.
This commit is contained in:
@ -7,11 +7,13 @@ import {
|
||||
import { getAvailableChatModelProviders } from '@/lib/providers';
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
||||
import { ChatOllama } from '@langchain/ollama';
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
|
||||
interface ChatModel {
|
||||
provider: string;
|
||||
model: string;
|
||||
ollamaContextWindow?: number;
|
||||
}
|
||||
|
||||
interface ImageSearchBody {
|
||||
@ -58,6 +60,10 @@ export const POST = async (req: Request) => {
|
||||
}) as unknown as BaseChatModel;
|
||||
} else if (chatModelProvider && chatModel) {
|
||||
llm = chatModel.model;
|
||||
// Set context window size for Ollama models
|
||||
if (llm instanceof ChatOllama && body.chatModel?.provider === 'ollama') {
|
||||
llm.numCtx = body.chatModel.ollamaContextWindow || 2048;
|
||||
}
|
||||
}
|
||||
|
||||
if (!llm) {
|
||||
|
@ -7,11 +7,13 @@ import {
|
||||
import { getAvailableChatModelProviders } from '@/lib/providers';
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
||||
import { ChatOllama } from '@langchain/ollama';
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
|
||||
interface ChatModel {
|
||||
provider: string;
|
||||
model: string;
|
||||
ollamaContextWindow?: number;
|
||||
}
|
||||
|
||||
interface VideoSearchBody {
|
||||
@ -58,6 +60,10 @@ export const POST = async (req: Request) => {
|
||||
}) as unknown as BaseChatModel;
|
||||
} else if (chatModelProvider && chatModel) {
|
||||
llm = chatModel.model;
|
||||
// Set context window size for Ollama models
|
||||
if (llm instanceof ChatOllama && body.chatModel?.provider === 'ollama') {
|
||||
llm.numCtx = body.chatModel.ollamaContextWindow || 2048;
|
||||
}
|
||||
}
|
||||
|
||||
if (!llm) {
|
||||
|
@ -20,10 +20,7 @@ const Chat = ({
|
||||
setOptimizationMode,
|
||||
}: {
|
||||
messages: Message[];
|
||||
sendMessage: (
|
||||
message: string,
|
||||
messageId?: string,
|
||||
) => void;
|
||||
sendMessage: (message: string) => void;
|
||||
loading: boolean;
|
||||
messageAppeared: boolean;
|
||||
rewrite: (messageId: string) => void;
|
||||
@ -96,7 +93,7 @@ const Chat = ({
|
||||
<div ref={messageEnd} className="h-0" />
|
||||
{dividerWidth > 0 && (
|
||||
<div
|
||||
className="bottom-24 lg:bottom-10 fixed"
|
||||
className="bottom-24 lg:bottom-10 fixed z-40"
|
||||
style={{ width: dividerWidth }}
|
||||
>
|
||||
<MessageInput
|
||||
|
@ -367,20 +367,6 @@ const sendMessage = async (
|
||||
}
|
||||
|
||||
messageId = messageId ?? crypto.randomBytes(7).toString('hex');
|
||||
// let messageData = {
|
||||
// type: 'message',
|
||||
// message: {
|
||||
// messageId: messageId,
|
||||
// chatId: chatId!,
|
||||
// content: message,
|
||||
// },
|
||||
// files: fileIds,
|
||||
// focusMode: focusMode,
|
||||
// optimizationMode: optimizationMode,
|
||||
// history: [...messageChatHistory, ['human', message]],
|
||||
// isCompact: options?.isCompact ?? isCompact,
|
||||
// };
|
||||
// ws.send(JSON.stringify(messageData));
|
||||
|
||||
setMessages((prevMessages) => [
|
||||
...prevMessages,
|
||||
@ -563,7 +549,7 @@ const sendMessage = async (
|
||||
|
||||
useEffect(() => {
|
||||
if (isReady && initialMessage && isConfigReady) {
|
||||
sendMessage(initialMessage, undefined, { });
|
||||
sendMessage(initialMessage);
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [isConfigReady, isReady, initialMessage]);
|
||||
|
@ -18,10 +18,7 @@ const EmptyChatMessageInput = ({
|
||||
files,
|
||||
setFiles,
|
||||
}: {
|
||||
sendMessage: (
|
||||
message: string,
|
||||
messageId?: string,
|
||||
) => void;
|
||||
sendMessage: (message: string) => void;
|
||||
focusMode: string;
|
||||
setFocusMode: (mode: string) => void;
|
||||
optimizationMode: string;
|
||||
@ -64,13 +61,13 @@ const EmptyChatMessageInput = ({
|
||||
<form
|
||||
onSubmit={(e) => {
|
||||
e.preventDefault();
|
||||
sendMessage(message, undefined);
|
||||
sendMessage(message);
|
||||
setMessage('');
|
||||
}}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === 'Enter' && !e.shiftKey) {
|
||||
e.preventDefault();
|
||||
sendMessage(message, undefined);
|
||||
sendMessage(message);
|
||||
setMessage('');
|
||||
}
|
||||
}}
|
||||
|
@ -42,10 +42,7 @@ const MessageBox = ({
|
||||
dividerRef?: MutableRefObject<HTMLDivElement | null>;
|
||||
isLast: boolean;
|
||||
rewrite: (messageId: string) => void;
|
||||
sendMessage: (
|
||||
message: string,
|
||||
messageId?: string
|
||||
) => void;
|
||||
sendMessage: (message: string) => void;
|
||||
}) => {
|
||||
const [parsedMessage, setParsedMessage] = useState(message.content);
|
||||
const [speechMessage, setSpeechMessage] = useState(message.content);
|
||||
@ -110,10 +107,6 @@ const MessageBox = ({
|
||||
|
||||
const { speechStatus, start, stop } = useSpeech({ text: speechMessage });
|
||||
|
||||
const handleSuggestionClick = (suggestion: string) => {
|
||||
sendMessage(suggestion, undefined);
|
||||
};
|
||||
|
||||
const markdownOverrides: MarkdownToJSX.Options = {
|
||||
overrides: {
|
||||
think: {
|
||||
@ -228,7 +221,7 @@ const MessageBox = ({
|
||||
<div className="h-px w-full bg-light-secondary dark:bg-dark-secondary" />
|
||||
<div
|
||||
onClick={() => {
|
||||
handleSuggestionClick(suggestion);
|
||||
sendMessage(suggestion);
|
||||
}}
|
||||
className="cursor-pointer flex flex-row justify-between font-medium space-x-2 items-center"
|
||||
>
|
||||
|
@ -18,10 +18,7 @@ const MessageInput = ({
|
||||
optimizationMode,
|
||||
setOptimizationMode,
|
||||
}: {
|
||||
sendMessage: (
|
||||
message: string,
|
||||
messageId?: string
|
||||
) => void;
|
||||
sendMessage: (message: string) => void;
|
||||
loading: boolean;
|
||||
fileIds: string[];
|
||||
setFileIds: (fileIds: string[]) => void;
|
||||
@ -68,13 +65,13 @@ const MessageInput = ({
|
||||
onSubmit={(e) => {
|
||||
if (loading) return;
|
||||
e.preventDefault();
|
||||
sendMessage(message, undefined);
|
||||
sendMessage(message);
|
||||
setMessage('');
|
||||
}}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === 'Enter' && !e.shiftKey && !loading) {
|
||||
e.preventDefault();
|
||||
sendMessage(message, undefined);
|
||||
sendMessage(message);
|
||||
setMessage('');
|
||||
}
|
||||
}}
|
||||
|
@ -47,7 +47,7 @@ const Optimization = ({
|
||||
};
|
||||
|
||||
return (
|
||||
<Popover className="relative w-full max-w-[15rem] md:max-w-md lg:max-w-lg">
|
||||
<Popover className="relative">
|
||||
<PopoverButton
|
||||
type="button"
|
||||
className="p-2 text-black/50 dark:text-white/50 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary active:scale-95 transition duration-200 hover:text-black dark:hover:text-white"
|
||||
@ -75,8 +75,8 @@ const Optimization = ({
|
||||
leaveFrom="opacity-100 translate-y-0"
|
||||
leaveTo="opacity-0 translate-y-1"
|
||||
>
|
||||
<PopoverPanel className="absolute z-10 w-64 md:w-[250px] right-0 bottom-[100%] mb-2">
|
||||
<div className="flex flex-col gap-2 bg-light-primary dark:bg-dark-primary border rounded-lg border-light-200 dark:border-dark-200 w-full p-4 max-h-[200px] md:max-h-none overflow-y-auto">
|
||||
<PopoverPanel className="absolute z-10 bottom-[100%] mb-2 left-1/2 transform -translate-x-1/2">
|
||||
<div className="flex flex-col gap-2 bg-light-primary dark:bg-dark-primary border rounded-lg border-light-200 dark:border-dark-200 w-max max-w-[300px] p-4 max-h-[200px] md:max-h-none overflow-y-auto">
|
||||
{OptimizationModes.map((mode, i) => (
|
||||
<PopoverButton
|
||||
onClick={() => handleOptimizationChange(mode.key)}
|
||||
|
@ -6,10 +6,7 @@ import {
|
||||
redditSearchResponsePrompt,
|
||||
redditSearchRetrieverPrompt,
|
||||
} from './redditSearch';
|
||||
import {
|
||||
webSearchResponsePrompt,
|
||||
webSearchRetrieverPrompt
|
||||
} from './webSearch';
|
||||
import { webSearchResponsePrompt, webSearchRetrieverPrompt } from './webSearch';
|
||||
import {
|
||||
wolframAlphaSearchResponsePrompt,
|
||||
wolframAlphaSearchRetrieverPrompt,
|
||||
|
@ -107,4 +107,4 @@ export const webSearchResponsePrompt = `
|
||||
</context>
|
||||
|
||||
Current date & time in ISO format (UTC timezone) is: {date}.
|
||||
`;
|
||||
`;
|
||||
|
@ -282,10 +282,7 @@ class MetaSearchAgent implements MetaSearchAgentType {
|
||||
.pipe(this.processDocs),
|
||||
}),
|
||||
ChatPromptTemplate.fromMessages([
|
||||
[
|
||||
'system',
|
||||
this.config.responsePrompt
|
||||
],
|
||||
['system', this.config.responsePrompt],
|
||||
new MessagesPlaceholder('chat_history'),
|
||||
['user', '{query}'],
|
||||
]),
|
||||
@ -483,7 +480,7 @@ class MetaSearchAgent implements MetaSearchAgentType {
|
||||
fileIds,
|
||||
embeddings,
|
||||
optimizationMode,
|
||||
systemInstructions
|
||||
systemInstructions,
|
||||
);
|
||||
|
||||
const stream = answeringChain.streamEvents(
|
||||
|
Reference in New Issue
Block a user