Compare commits

..

5 Commits

Author SHA1 Message Date
701819d018 Revert "Update README.md"
This reverts commit 68e151b2bd.
2025-05-13 20:14:08 +05:30
68e151b2bd Update README.md 2025-04-29 17:13:30 +05:30
06ff272541 feat(openai): add GPT 4.1 models 2025-04-29 13:10:14 +05:30
4154d5e4b1 Merge branch 'pr/629' 2025-04-23 20:35:52 +05:30
8aaee2c40c feat(app): support complex title 2025-02-15 16:48:21 +08:00
19 changed files with 1866 additions and 13352 deletions

11024
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -20,7 +20,6 @@
"@langchain/core": "^0.3.42",
"@langchain/google-genai": "^0.1.12",
"@langchain/openai": "^0.0.25",
"@langchain/ollama": "^0.2.0",
"@langchain/textsplitters": "^0.1.0",
"@tailwindcss/typography": "^0.5.12",
"@xenova/transformers": "^2.17.2",

View File

@ -20,7 +20,6 @@ import {
getCustomOpenaiApiUrl,
getCustomOpenaiModelName,
} from '@/lib/config';
import { ChatOllama } from '@langchain/ollama';
import { searchHandlers } from '@/lib/search';
export const runtime = 'nodejs';
@ -35,7 +34,6 @@ type Message = {
type ChatModel = {
provider: string;
name: string;
ollamaContextWindow?: number;
};
type EmbeddingModel = {
@ -234,11 +232,6 @@ export const POST = async (req: Request) => {
}) as unknown as BaseChatModel;
} else if (chatModelProvider && chatModel) {
llm = chatModel.model;
// Set context window size for Ollama models
if (llm instanceof ChatOllama && body.chatModel?.provider === 'ollama') {
llm.numCtx = body.chatModel.ollamaContextWindow || 2048;
}
}
if (!llm) {

View File

@ -7,13 +7,11 @@ import {
import { getAvailableChatModelProviders } from '@/lib/providers';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
import { ChatOllama } from '@langchain/ollama';
import { ChatOpenAI } from '@langchain/openai';
interface ChatModel {
provider: string;
model: string;
ollamaContextWindow?: number;
}
interface ImageSearchBody {
@ -60,10 +58,6 @@ export const POST = async (req: Request) => {
}) as unknown as BaseChatModel;
} else if (chatModelProvider && chatModel) {
llm = chatModel.model;
// Set context window size for Ollama models
if (llm instanceof ChatOllama && body.chatModel?.provider === 'ollama') {
llm.numCtx = body.chatModel.ollamaContextWindow || 2048;
}
}
if (!llm) {

View File

@ -13,14 +13,12 @@ import {
getCustomOpenaiModelName,
} from '@/lib/config';
import { searchHandlers } from '@/lib/search';
import { ChatOllama } from '@langchain/ollama';
interface chatModel {
provider: string;
name: string;
customOpenAIKey?: string;
customOpenAIBaseURL?: string;
ollamaContextWindow?: number;
}
interface embeddingModel {
@ -99,10 +97,6 @@ export const POST = async (req: Request) => {
.model as unknown as BaseChatModel | undefined;
}
if (llm instanceof ChatOllama && body.chatModel?.provider === 'ollama') {
llm.numCtx = body.chatModel.ollamaContextWindow || 2048;
}
if (
embeddingModelProviders[embeddingModelProvider] &&
embeddingModelProviders[embeddingModelProvider][embeddingModel]

View File

@ -8,12 +8,10 @@ import { getAvailableChatModelProviders } from '@/lib/providers';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
import { ChatOpenAI } from '@langchain/openai';
import { ChatOllama } from '@langchain/ollama';
interface ChatModel {
provider: string;
model: string;
ollamaContextWindow?: number;
}
interface SuggestionsGenerationBody {
@ -59,10 +57,6 @@ export const POST = async (req: Request) => {
}) as unknown as BaseChatModel;
} else if (chatModelProvider && chatModel) {
llm = chatModel.model;
// Set context window size for Ollama models
if (llm instanceof ChatOllama && body.chatModel?.provider === 'ollama') {
llm.numCtx = body.chatModel.ollamaContextWindow || 2048;
}
}
if (!llm) {

View File

@ -7,13 +7,11 @@ import {
import { getAvailableChatModelProviders } from '@/lib/providers';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
import { ChatOllama } from '@langchain/ollama';
import { ChatOpenAI } from '@langchain/openai';
interface ChatModel {
provider: string;
model: string;
ollamaContextWindow?: number;
}
interface VideoSearchBody {
@ -60,10 +58,6 @@ export const POST = async (req: Request) => {
}) as unknown as BaseChatModel;
} else if (chatModelProvider && chatModel) {
llm = chatModel.model;
// Set context window size for Ollama models
if (llm instanceof ChatOllama && body.chatModel?.provider === 'ollama') {
llm.numCtx = body.chatModel.ollamaContextWindow || 2048;
}
}
if (!llm) {

View File

@ -26,7 +26,6 @@ interface SettingsType {
customOpenaiApiKey: string;
customOpenaiApiUrl: string;
customOpenaiModelName: string;
ollamaContextWindow: number;
}
interface InputProps extends React.InputHTMLAttributes<HTMLInputElement> {
@ -149,11 +148,6 @@ const Page = () => {
const [automaticVideoSearch, setAutomaticVideoSearch] = useState(false);
const [systemInstructions, setSystemInstructions] = useState<string>('');
const [savingStates, setSavingStates] = useState<Record<string, boolean>>({});
const [contextWindowSize, setContextWindowSize] = useState(2048);
const [isCustomContextWindow, setIsCustomContextWindow] = useState(false);
const predefinedContextSizes = [
1024, 2048, 3072, 4096, 8192, 16384, 32768, 65536, 131072,
];
useEffect(() => {
const fetchConfig = async () => {
@ -165,7 +159,6 @@ const Page = () => {
});
const data = (await res.json()) as SettingsType;
setConfig(data);
const chatModelProvidersKeys = Object.keys(data.chatModelProviders || {});
@ -214,13 +207,6 @@ const Page = () => {
setAutomaticVideoSearch(
localStorage.getItem('autoVideoSearch') === 'true',
);
const storedContextWindow = parseInt(
localStorage.getItem('ollamaContextWindow') ?? '2048',
);
setContextWindowSize(storedContextWindow);
setIsCustomContextWindow(
!predefinedContextSizes.includes(storedContextWindow),
);
setSystemInstructions(localStorage.getItem('systemInstructions')!);
@ -380,8 +366,6 @@ const Page = () => {
localStorage.setItem('embeddingModelProvider', value);
} else if (key === 'embeddingModel') {
localStorage.setItem('embeddingModel', value);
} else if (key === 'ollamaContextWindow') {
localStorage.setItem('ollamaContextWindow', value.toString());
} else if (key === 'systemInstructions') {
localStorage.setItem('systemInstructions', value);
}
@ -615,78 +599,6 @@ const Page = () => {
];
})()}
/>
{selectedChatModelProvider === 'ollama' && (
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
Chat Context Window Size
</p>
<Select
value={
isCustomContextWindow
? 'custom'
: contextWindowSize.toString()
}
onChange={(e) => {
const value = e.target.value;
if (value === 'custom') {
setIsCustomContextWindow(true);
} else {
setIsCustomContextWindow(false);
const numValue = parseInt(value);
setContextWindowSize(numValue);
setConfig((prev) => ({
...prev!,
ollamaContextWindow: numValue,
}));
saveConfig('ollamaContextWindow', numValue);
}
}}
options={[
...predefinedContextSizes.map((size) => ({
value: size.toString(),
label: `${size.toLocaleString()} tokens`,
})),
{ value: 'custom', label: 'Custom...' },
]}
/>
{isCustomContextWindow && (
<div className="mt-2">
<Input
type="number"
min={512}
value={contextWindowSize}
placeholder="Custom context window size (minimum 512)"
isSaving={savingStates['ollamaContextWindow']}
onChange={(e) => {
// Allow any value to be typed
const value =
parseInt(e.target.value) ||
contextWindowSize;
setContextWindowSize(value);
}}
onSave={(value) => {
// Validate only when saving
const numValue = Math.max(
512,
parseInt(value) || 2048,
);
setContextWindowSize(numValue);
setConfig((prev) => ({
...prev!,
ollamaContextWindow: numValue,
}));
saveConfig('ollamaContextWindow', numValue);
}}
/>
</div>
)}
<p className="text-xs text-black/60 dark:text-white/60 mt-0.5">
{isCustomContextWindow
? 'Adjust the context window size for Ollama models (minimum 512 tokens)'
: 'Adjust the context window size for Ollama models'}
</p>
</div>
)}
</div>
)}
</div>

View File

@ -16,8 +16,6 @@ const Chat = ({
setFileIds,
files,
setFiles,
optimizationMode,
setOptimizationMode,
}: {
messages: Message[];
sendMessage: (message: string) => void;
@ -28,8 +26,6 @@ const Chat = ({
setFileIds: (fileIds: string[]) => void;
files: File[];
setFiles: (files: File[]) => void;
optimizationMode: string;
setOptimizationMode: (mode: string) => void;
}) => {
const [dividerWidth, setDividerWidth] = useState(0);
const dividerRef = useRef<HTMLDivElement | null>(null);
@ -103,8 +99,6 @@ const Chat = ({
setFileIds={setFileIds}
files={files}
setFiles={setFiles}
optimizationMode={optimizationMode}
setOptimizationMode={setOptimizationMode}
/>
</div>
)}

View File

@ -287,16 +287,6 @@ const ChatWindow = ({ id }: { id?: string }) => {
const [notFound, setNotFound] = useState(false);
useEffect(() => {
const savedOptimizationMode = localStorage.getItem('optimizationMode');
if (savedOptimizationMode !== null) {
setOptimizationMode(savedOptimizationMode);
} else {
localStorage.setItem('optimizationMode', optimizationMode);
}
}, []);
useEffect(() => {
if (
chatId &&
@ -337,11 +327,7 @@ const ChatWindow = ({ id }: { id?: string }) => {
}
}, [isMessagesLoaded, isConfigReady]);
const sendMessage = async (
message: string,
messageId?: string,
options?: { rewriteIndex?: number },
) => {
const sendMessage = async (message: string, messageId?: string) => {
if (loading) return;
if (!isConfigReady) {
toast.error('Cannot send message before the configuration is ready');
@ -354,20 +340,6 @@ const ChatWindow = ({ id }: { id?: string }) => {
let sources: Document[] | undefined = undefined;
let recievedMessage = '';
let added = false;
let messageChatHistory = chatHistory;
if (options?.rewriteIndex !== undefined) {
const rewriteIndex = options.rewriteIndex;
setMessages((prev) => {
return [...prev.slice(0, messages.length > 2 ? rewriteIndex - 1 : 0)];
});
messageChatHistory = chatHistory.slice(
0,
messages.length > 2 ? rewriteIndex - 1 : 0,
);
setChatHistory(messageChatHistory);
}
messageId = messageId ?? crypto.randomBytes(7).toString('hex');
@ -483,9 +455,6 @@ const ChatWindow = ({ id }: { id?: string }) => {
}
};
const ollamaContextWindow =
localStorage.getItem('ollamaContextWindow') || '2048';
const res = await fetch('/api/chat', {
method: 'POST',
headers: {
@ -502,13 +471,10 @@ const ChatWindow = ({ id }: { id?: string }) => {
files: fileIds,
focusMode: focusMode,
optimizationMode: optimizationMode,
history: messageChatHistory,
history: chatHistory,
chatModel: {
name: chatModelProvider.name,
provider: chatModelProvider.provider,
...(chatModelProvider.provider === 'ollama' && {
ollamaContextWindow: parseInt(ollamaContextWindow),
}),
},
embeddingModel: {
name: embeddingModelProvider.name,
@ -546,13 +512,20 @@ const ChatWindow = ({ id }: { id?: string }) => {
};
const rewrite = (messageId: string) => {
const messageIndex = messages.findIndex(
(msg) => msg.messageId === messageId,
);
if (messageIndex == -1) return;
sendMessage(messages[messageIndex - 1].content, messageId, {
rewriteIndex: messageIndex,
const index = messages.findIndex((msg) => msg.messageId === messageId);
if (index === -1) return;
const message = messages[index - 1];
setMessages((prev) => {
return [...prev.slice(0, messages.length > 2 ? index - 1 : 0)];
});
setChatHistory((prev) => {
return [...prev.slice(0, messages.length > 2 ? index - 1 : 0)];
});
sendMessage(message.content, message.messageId);
};
useEffect(() => {
@ -597,8 +570,6 @@ const ChatWindow = ({ id }: { id?: string }) => {
setFileIds={setFileIds}
files={files}
setFiles={setFiles}
optimizationMode={optimizationMode}
setOptimizationMode={setOptimizationMode}
/>
</>
) : (

View File

@ -4,7 +4,6 @@ import { useEffect, useRef, useState } from 'react';
import TextareaAutosize from 'react-textarea-autosize';
import Attach from './MessageInputActions/Attach';
import CopilotToggle from './MessageInputActions/Copilot';
import Optimization from './MessageInputActions/Optimization';
import { File } from './ChatWindow';
import AttachSmall from './MessageInputActions/AttachSmall';
@ -15,8 +14,6 @@ const MessageInput = ({
setFileIds,
files,
setFiles,
optimizationMode,
setOptimizationMode,
}: {
sendMessage: (message: string) => void;
loading: boolean;
@ -24,8 +21,6 @@ const MessageInput = ({
setFileIds: (fileIds: string[]) => void;
files: File[];
setFiles: (files: File[]) => void;
optimizationMode: string;
setOptimizationMode: (mode: string) => void;
}) => {
const [copilotEnabled, setCopilotEnabled] = useState(false);
const [message, setMessage] = useState('');
@ -45,16 +40,20 @@ const MessageInput = ({
useEffect(() => {
const handleKeyDown = (e: KeyboardEvent) => {
const activeElement = document.activeElement;
const isInputFocused =
activeElement?.tagName === 'INPUT' ||
activeElement?.tagName === 'TEXTAREA' ||
activeElement?.hasAttribute('contenteditable');
if (e.key === '/' && !isInputFocused) {
e.preventDefault();
inputRef.current?.focus();
}
};
document.addEventListener('keydown', handleKeyDown);
return () => {
document.removeEventListener('keydown', handleKeyDown);
};
@ -76,35 +75,18 @@ const MessageInput = ({
}
}}
className={cn(
'bg-light-secondary dark:bg-dark-secondary p-4 flex items-center border border-light-200 dark:border-dark-200',
mode === 'multi'
? 'flex-col rounded-lg'
: 'flex-col md:flex-row rounded-lg md:rounded-full',
'bg-light-secondary dark:bg-dark-secondary p-4 flex items-center overflow-hidden border border-light-200 dark:border-dark-200',
mode === 'multi' ? 'flex-col rounded-lg' : 'flex-row rounded-full',
)}
>
{mode === 'single' && (
<div className="flex flex-row items-center justify-between w-full mb-2 md:mb-0 md:w-auto">
<div className="flex flex-row items-center space-x-2">
<AttachSmall
fileIds={fileIds}
setFileIds={setFileIds}
files={files}
setFiles={setFiles}
/>
<Optimization
optimizationMode={optimizationMode}
setOptimizationMode={setOptimizationMode}
/>
</div>
<div className="md:hidden">
<CopilotToggle
copilotEnabled={copilotEnabled}
setCopilotEnabled={setCopilotEnabled}
/>
</div>
</div>
)}
<div className="flex flex-row items-center w-full">
<TextareaAutosize
ref={inputRef}
value={message}
@ -117,12 +99,10 @@ const MessageInput = ({
/>
{mode === 'single' && (
<div className="flex flex-row items-center space-x-4">
<div className="hidden md:block">
<CopilotToggle
copilotEnabled={copilotEnabled}
setCopilotEnabled={setCopilotEnabled}
/>
</div>
<button
disabled={message.trim().length === 0 || loading}
className="bg-[#24A0ED] text-white disabled:text-black/50 dark:disabled:text-white/50 hover:bg-opacity-85 transition duration-100 disabled:bg-[#e0e0dc79] dark:disabled:bg-[#ececec21] rounded-full p-2"
@ -131,40 +111,22 @@ const MessageInput = ({
</button>
</div>
)}
</div>
{mode === 'multi' && (
<div className="flex flex-col md:flex-row items-start md:items-center justify-between w-full pt-2">
<div className="flex flex-row items-center justify-between w-full md:w-auto mb-2 md:mb-0">
<div className="flex flex-row items-center space-x-2">
<div className="flex flex-row items-center justify-between w-full pt-2">
<AttachSmall
fileIds={fileIds}
setFileIds={setFileIds}
files={files}
setFiles={setFiles}
/>
<Optimization
optimizationMode={optimizationMode}
setOptimizationMode={setOptimizationMode}
/>
</div>
<div className="md:hidden">
<div className="flex flex-row items-center space-x-4">
<CopilotToggle
copilotEnabled={copilotEnabled}
setCopilotEnabled={setCopilotEnabled}
/>
</div>
</div>
<div className="flex flex-row items-center space-x-4 self-end">
<div className="hidden md:block">
<CopilotToggle
copilotEnabled={copilotEnabled}
setCopilotEnabled={setCopilotEnabled}
/>
</div>
<button
disabled={message.trim().length === 0 || loading}
className="bg-[#24A0ED] text-white disabled:text-black/50 dark:disabled:text-white/50 hover:bg-opacity-85 transition duration-100 disabled:bg-[#e0e0dc79] dark:disabled:bg-[#ececec21] rounded-full p-2"
className="bg-[#24A0ED] text-white text-black/50 dark:disabled:text-white/50 hover:bg-opacity-85 transition duration-100 disabled:bg-[#e0e0dc79] dark:disabled:bg-[#ececec21] rounded-full p-2"
>
<ArrowUp className="bg-background" size={17} />
</button>

View File

@ -1,4 +1,4 @@
import { ChevronDown, Minimize2, Sliders, Star, Zap } from 'lucide-react';
import { ChevronDown, Sliders, Star, Zap } from 'lucide-react';
import { cn } from '@/lib/utils';
import {
Popover,
@ -7,6 +7,7 @@ import {
Transition,
} from '@headlessui/react';
import { Fragment } from 'react';
const OptimizationModes = [
{
key: 'speed',
@ -40,13 +41,8 @@ const Optimization = ({
optimizationMode: string;
setOptimizationMode: (mode: string) => void;
}) => {
const handleOptimizationChange = (mode: string) => {
setOptimizationMode(mode);
localStorage.setItem('optimizationMode', mode);
};
return (
<Popover className="relative">
<Popover className="relative w-full max-w-[15rem] md:max-w-md lg:max-w-lg">
<PopoverButton
type="button"
className="p-2 text-black/50 dark:text-white/50 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary active:scale-95 transition duration-200 hover:text-black dark:hover:text-white"
@ -74,11 +70,11 @@ const Optimization = ({
leaveFrom="opacity-100 translate-y-0"
leaveTo="opacity-0 translate-y-1"
>
<PopoverPanel className="absolute z-10 bottom-[100%] mb-2 left-1/2 transform -translate-x-1/2">
<div className="flex flex-col gap-2 bg-light-primary dark:bg-dark-primary border rounded-lg border-light-200 dark:border-dark-200 w-max max-w-[300px] p-4 max-h-[200px] md:max-h-none overflow-y-auto">
<PopoverPanel className="absolute z-10 w-64 md:w-[250px] right-0">
<div className="flex flex-col gap-2 bg-light-primary dark:bg-dark-primary border rounded-lg border-light-200 dark:border-dark-200 w-full p-4 max-h-[200px] md:max-h-none overflow-y-auto">
{OptimizationModes.map((mode, i) => (
<PopoverButton
onClick={() => handleOptimizationChange(mode.key)}
onClick={() => setOptimizationMode(mode.key)}
key={i}
disabled={mode.key === 'quality'}
className={cn(

View File

@ -35,10 +35,9 @@ const SearchImages = ({
const chatModelProvider = localStorage.getItem('chatModelProvider');
const chatModel = localStorage.getItem('chatModel');
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
const customOpenAIKey = localStorage.getItem('openAIApiKey');
const ollamaContextWindow =
localStorage.getItem('ollamaContextWindow') || '2048';
const res = await fetch(`/api/images`, {
method: 'POST',
@ -55,9 +54,6 @@ const SearchImages = ({
customOpenAIBaseURL: customOpenAIBaseURL,
customOpenAIKey: customOpenAIKey,
}),
...(chatModelProvider === 'ollama' && {
ollamaContextWindow: parseInt(ollamaContextWindow),
}),
},
}),
});

View File

@ -50,10 +50,9 @@ const Searchvideos = ({
const chatModelProvider = localStorage.getItem('chatModelProvider');
const chatModel = localStorage.getItem('chatModel');
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
const customOpenAIKey = localStorage.getItem('openAIApiKey');
const ollamaContextWindow =
localStorage.getItem('ollamaContextWindow') || '2048';
const res = await fetch(`/api/videos`, {
method: 'POST',
@ -70,9 +69,6 @@ const Searchvideos = ({
customOpenAIBaseURL: customOpenAIBaseURL,
customOpenAIKey: customOpenAIKey,
}),
...(chatModelProvider === 'ollama' && {
ollamaContextWindow: parseInt(ollamaContextWindow),
}),
},
}),
});

View File

@ -6,8 +6,6 @@ export const getSuggestions = async (chatHisory: Message[]) => {
const customOpenAIKey = localStorage.getItem('openAIApiKey');
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
const ollamaContextWindow =
localStorage.getItem('ollamaContextWindow') || '2048';
const res = await fetch(`/api/suggestions`, {
method: 'POST',
@ -23,9 +21,6 @@ export const getSuggestions = async (chatHisory: Message[]) => {
customOpenAIKey,
customOpenAIBaseURL,
}),
...(chatModelProvider === 'ollama' && {
ollamaContextWindow: parseInt(ollamaContextWindow),
}),
},
}),
});

View File

@ -6,8 +6,8 @@ export const PROVIDER_INFO = {
key: 'ollama',
displayName: 'Ollama',
};
import { ChatOllama } from '@langchain/ollama';
import { OllamaEmbeddings } from '@langchain/ollama';
import { ChatOllama } from '@langchain/community/chat_models/ollama';
import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama';
export const loadOllamaChatModels = async () => {
const ollamaApiEndpoint = getOllamaApiEndpoint();

View File

@ -30,6 +30,18 @@ const openaiChatModels: Record<string, string>[] = [
displayName: 'GPT-4 omni mini',
key: 'gpt-4o-mini',
},
{
displayName: 'GPT 4.1 nano',
key: 'gpt-4.1-nano',
},
{
displayName: 'GPT 4.1 mini',
key: 'gpt-4.1-mini',
},
{
displayName: 'GPT 4.1',
key: 'gpt-4.1',
},
];
const openaiEmbeddingModels: Record<string, string>[] = [

View File

@ -64,7 +64,7 @@ export const getDocumentsFromLinks = async ({ links }: { links: string[] }) => {
const splittedText = await splitter.splitText(parsedText);
const title = res.data
.toString('utf8')
.match(/<title>(.*?)<\/title>/)?.[1];
.match(/<title.*>(.*?)<\/title>/)?.[1];
const linkDocs = splittedText.map((text) => {
return new Document({

3826
yarn.lock

File diff suppressed because it is too large Load Diff