mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-06-19 16:28:42 +00:00
Ran prettier formatting
This commit is contained in:
@ -234,7 +234,7 @@ export const POST = async (req: Request) => {
|
||||
}) as unknown as BaseChatModel;
|
||||
} else if (chatModelProvider && chatModel) {
|
||||
llm = chatModel.model;
|
||||
|
||||
|
||||
// Set context window size for Ollama models
|
||||
if (llm instanceof ChatOllama && body.chatModel?.provider === 'ollama') {
|
||||
llm.numCtx = body.chatModel.ollamaContextWindow || 2048;
|
||||
|
@ -98,7 +98,7 @@ export const POST = async (req: Request) => {
|
||||
llm = chatModelProviders[chatModelProvider][chatModel]
|
||||
.model as unknown as BaseChatModel | undefined;
|
||||
}
|
||||
|
||||
|
||||
if (llm instanceof ChatOllama && body.chatModel?.provider === 'ollama') {
|
||||
llm.numCtx = body.chatModel.ollamaContextWindow || 2048;
|
||||
}
|
||||
|
@ -337,7 +337,7 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
||||
}
|
||||
}, [isMessagesLoaded, isConfigReady]);
|
||||
|
||||
const sendMessage = async (
|
||||
const sendMessage = async (
|
||||
message: string,
|
||||
messageId?: string,
|
||||
options?: { rewriteIndex?: number },
|
||||
@ -359,10 +359,13 @@ const sendMessage = async (
|
||||
if (options?.rewriteIndex !== undefined) {
|
||||
const rewriteIndex = options.rewriteIndex;
|
||||
setMessages((prev) => {
|
||||
return [...prev.slice(0, messages.length > 2 ? rewriteIndex - 1 : 0)]
|
||||
return [...prev.slice(0, messages.length > 2 ? rewriteIndex - 1 : 0)];
|
||||
});
|
||||
|
||||
messageChatHistory = chatHistory.slice(0, messages.length > 2 ? rewriteIndex - 1 : 0)
|
||||
|
||||
messageChatHistory = chatHistory.slice(
|
||||
0,
|
||||
messages.length > 2 ? rewriteIndex - 1 : 0,
|
||||
);
|
||||
setChatHistory(messageChatHistory);
|
||||
}
|
||||
|
||||
@ -480,7 +483,8 @@ const sendMessage = async (
|
||||
}
|
||||
};
|
||||
|
||||
const ollamaContextWindow = localStorage.getItem('ollamaContextWindow') || '2048';
|
||||
const ollamaContextWindow =
|
||||
localStorage.getItem('ollamaContextWindow') || '2048';
|
||||
|
||||
const res = await fetch('/api/chat', {
|
||||
method: 'POST',
|
||||
@ -542,9 +546,13 @@ const sendMessage = async (
|
||||
};
|
||||
|
||||
const rewrite = (messageId: string) => {
|
||||
const messageIndex = messages.findIndex((msg) => msg.messageId === messageId);
|
||||
if(messageIndex == -1) return;
|
||||
sendMessage(messages[messageIndex - 1].content, messageId, { rewriteIndex: messageIndex });
|
||||
const messageIndex = messages.findIndex(
|
||||
(msg) => msg.messageId === messageId,
|
||||
);
|
||||
if (messageIndex == -1) return;
|
||||
sendMessage(messages[messageIndex - 1].content, messageId, {
|
||||
rewriteIndex: messageIndex,
|
||||
});
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
|
@ -77,7 +77,9 @@ const MessageInput = ({
|
||||
}}
|
||||
className={cn(
|
||||
'bg-light-secondary dark:bg-dark-secondary p-4 flex items-center border border-light-200 dark:border-dark-200',
|
||||
mode === 'multi' ? 'flex-col rounded-lg' : 'flex-col md:flex-row rounded-lg md:rounded-full',
|
||||
mode === 'multi'
|
||||
? 'flex-col rounded-lg'
|
||||
: 'flex-col md:flex-row rounded-lg md:rounded-full',
|
||||
)}
|
||||
>
|
||||
{mode === 'single' && (
|
||||
@ -130,7 +132,7 @@ const MessageInput = ({
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
|
||||
{mode === 'multi' && (
|
||||
<div className="flex flex-col md:flex-row items-start md:items-center justify-between w-full pt-2">
|
||||
<div className="flex flex-row items-center justify-between w-full md:w-auto mb-2 md:mb-0">
|
||||
|
@ -35,12 +35,11 @@ const OptimizationModes = [
|
||||
|
||||
const Optimization = ({
|
||||
optimizationMode,
|
||||
setOptimizationMode
|
||||
setOptimizationMode,
|
||||
}: {
|
||||
optimizationMode: string;
|
||||
setOptimizationMode: (mode: string) => void;
|
||||
}) => {
|
||||
|
||||
const handleOptimizationChange = (mode: string) => {
|
||||
setOptimizationMode(mode);
|
||||
localStorage.setItem('optimizationMode', mode);
|
||||
|
@ -46,20 +46,20 @@ const SearchImages = ({
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
query: query,
|
||||
chatHistory: chatHistory,
|
||||
chatModel: {
|
||||
provider: chatModelProvider,
|
||||
model: chatModel,
|
||||
...(chatModelProvider === 'custom_openai' && {
|
||||
customOpenAIBaseURL: customOpenAIBaseURL,
|
||||
customOpenAIKey: customOpenAIKey,
|
||||
}),
|
||||
...(chatModelProvider === 'ollama' && {
|
||||
ollamaContextWindow: parseInt(ollamaContextWindow),
|
||||
}),
|
||||
},
|
||||
}),
|
||||
query: query,
|
||||
chatHistory: chatHistory,
|
||||
chatModel: {
|
||||
provider: chatModelProvider,
|
||||
model: chatModel,
|
||||
...(chatModelProvider === 'custom_openai' && {
|
||||
customOpenAIBaseURL: customOpenAIBaseURL,
|
||||
customOpenAIKey: customOpenAIKey,
|
||||
}),
|
||||
...(chatModelProvider === 'ollama' && {
|
||||
ollamaContextWindow: parseInt(ollamaContextWindow),
|
||||
}),
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
const data = await res.json();
|
||||
|
@ -56,25 +56,25 @@ const Searchvideos = ({
|
||||
localStorage.getItem('ollamaContextWindow') || '2048';
|
||||
|
||||
const res = await fetch(`/api/videos`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
query: query,
|
||||
chatHistory: chatHistory,
|
||||
chatModel: {
|
||||
provider: chatModelProvider,
|
||||
model: chatModel,
|
||||
...(chatModelProvider === 'custom_openai' && {
|
||||
customOpenAIBaseURL: customOpenAIBaseURL,
|
||||
customOpenAIKey: customOpenAIKey,
|
||||
}),
|
||||
...(chatModelProvider === 'ollama' && {
|
||||
ollamaContextWindow: parseInt(ollamaContextWindow),
|
||||
}),
|
||||
},
|
||||
body: JSON.stringify({
|
||||
query: query,
|
||||
chatHistory: chatHistory,
|
||||
chatModel: {
|
||||
provider: chatModelProvider,
|
||||
model: chatModel,
|
||||
...(chatModelProvider === 'custom_openai' && {
|
||||
customOpenAIBaseURL: customOpenAIBaseURL,
|
||||
customOpenAIKey: customOpenAIKey,
|
||||
}),
|
||||
...(chatModelProvider === 'ollama' && {
|
||||
ollamaContextWindow: parseInt(ollamaContextWindow),
|
||||
}),
|
||||
},
|
||||
}),
|
||||
}),
|
||||
});
|
||||
|
||||
const data = await res.json();
|
||||
|
Reference in New Issue
Block a user